cclaw-cli 0.51.28 → 0.51.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/cli.d.ts +6 -1
  2. package/dist/cli.js +117 -64
  3. package/dist/codex-feature-flag.d.ts +1 -1
  4. package/dist/codex-feature-flag.js +1 -1
  5. package/dist/config.js +3 -0
  6. package/dist/content/cancel-command.d.ts +2 -0
  7. package/dist/content/cancel-command.js +25 -0
  8. package/dist/content/closeout-guidance.js +3 -3
  9. package/dist/content/core-agents.js +5 -5
  10. package/dist/content/harness-doc.js +1 -1
  11. package/dist/content/hooks.js +32 -9
  12. package/dist/content/ideate-command.js +12 -7
  13. package/dist/content/meta-skill.js +7 -9
  14. package/dist/content/next-command.d.ts +2 -2
  15. package/dist/content/next-command.js +31 -27
  16. package/dist/content/node-hooks.js +24 -8
  17. package/dist/content/opencode-plugin.js +1 -1
  18. package/dist/content/session-hooks.js +1 -1
  19. package/dist/content/stage-command.js +1 -1
  20. package/dist/content/stage-common-guidance.js +4 -4
  21. package/dist/content/stages/plan.js +2 -2
  22. package/dist/content/stages/review.js +1 -1
  23. package/dist/content/stages/tdd.js +1 -1
  24. package/dist/content/start-command.d.ts +2 -2
  25. package/dist/content/start-command.js +18 -15
  26. package/dist/content/status-command.js +9 -8
  27. package/dist/content/subagents.js +1 -1
  28. package/dist/content/templates.d.ts +1 -1
  29. package/dist/content/templates.js +2 -2
  30. package/dist/content/track-render-context.d.ts +1 -0
  31. package/dist/content/track-render-context.js +2 -0
  32. package/dist/doctor-registry.d.ts +2 -0
  33. package/dist/doctor-registry.js +37 -10
  34. package/dist/doctor.d.ts +2 -1
  35. package/dist/doctor.js +184 -8
  36. package/dist/flow-state.d.ts +1 -1
  37. package/dist/flow-state.js +1 -1
  38. package/dist/fs-utils.js +6 -0
  39. package/dist/harness-adapters.d.ts +2 -2
  40. package/dist/harness-adapters.js +21 -94
  41. package/dist/harness-selection.d.ts +31 -0
  42. package/dist/harness-selection.js +214 -0
  43. package/dist/install.d.ts +4 -1
  44. package/dist/install.js +47 -10
  45. package/dist/internal/advance-stage.js +7 -7
  46. package/dist/managed-resources.d.ts +53 -0
  47. package/dist/managed-resources.js +289 -0
  48. package/dist/policy.js +1 -1
  49. package/dist/run-archive.d.ts +8 -0
  50. package/dist/run-archive.js +23 -9
  51. package/dist/run-persistence.js +1 -1
  52. package/dist/runs.d.ts +1 -1
  53. package/dist/runs.js +1 -1
  54. package/dist/tdd-cycle.js +10 -10
  55. package/dist/tdd-verification-evidence.js +4 -4
  56. package/dist/track-heuristics.d.ts +2 -0
  57. package/dist/track-heuristics.js +11 -3
  58. package/package.json +1 -1
@@ -919,13 +919,13 @@ async function runAdvanceStage(projectRoot, args, io) {
919
919
  });
920
920
  const nextActions = [];
921
921
  if (validation.delegation.missing.length > 0) {
922
- nextActions.push(`Complete or waive mandatory delegation(s): ${validation.delegation.missing.join(", ")}. Helper: \`node .cclaw/hooks/stage-complete.mjs ${args.stage} --waive-delegation=${validation.delegation.missing.join(",")} --waiver-reason="<why safe>"\`.`);
922
+ nextActions.push(`Run mandatory delegation(s) for stage "${args.stage}": ${validation.delegation.missing.join(", ")}. These roles are required by the stage schema before advance. If dispatch is impossible, use the waiver fallback only with a user-visible reason: \`node .cclaw/hooks/stage-complete.mjs ${args.stage} --waive-delegation=${validation.delegation.missing.join(",")} --waiver-reason="<why safe>"\`.`);
923
923
  }
924
924
  if (validation.delegation.missingEvidence.length > 0) {
925
- nextActions.push(`Role-switch fallback completion needs --evidence-ref or escalate to a real isolated dispatch surface.`);
925
+ nextActions.push(`Role-switch fallback completion needs artifact evidenceRefs naming what the role proved; rerun completion with --evidence-ref=<artifact#anchor> or escalate to a real isolated dispatch surface.`);
926
926
  }
927
927
  if (validation.delegation.missingDispatchProof.length > 0) {
928
- nextActions.push(`Isolated completion(s) ${dispatchProofDetails.join(", ") || validation.delegation.missingDispatchProof.join(", ")} lack matching dispatch proof; run the helper lifecycle scheduled -> launched -> acknowledged -> completed with --span-id, --dispatch-id, --dispatch-surface and --agent-definition-path before advancing.`);
928
+ nextActions.push(`Isolated completion(s) ${dispatchProofDetails.join(", ") || validation.delegation.missingDispatchProof.join(", ")} lack event-log dispatch proof. The ledger says completed, but .cclaw/state/delegation-events.jsonl must show scheduled -> launched -> acknowledged -> completed with --span-id, --dispatch-id, --dispatch-surface, --agent-definition-path, ackTs, and completedTs before advancing.`);
929
929
  }
930
930
  if (validation.delegation.legacyInferredCompletions.length > 0) {
931
931
  nextActions.push(`Pre-v3 ledger entries found: ${validation.delegation.legacyInferredCompletions.join(", ")}. Run \`node .cclaw/hooks/delegation-record.mjs --rerecord --span-id=<id> --dispatch-id=<id> --dispatch-surface=<surface> --agent-definition-path=<path>\` to upgrade the row to dispatch-proof shape.`);
@@ -962,15 +962,15 @@ async function runAdvanceStage(projectRoot, args, io) {
962
962
  io.stderr.write(`cclaw internal advance-stage: validation failed for stage "${args.stage}".\n`);
963
963
  if (validation.delegation.missing.length > 0) {
964
964
  io.stderr.write(`- missing delegations: ${validation.delegation.missing.join(", ")}\n`);
965
- io.stderr.write(` next action: complete the delegation, or rerun with --waive-delegation=${validation.delegation.missing.join(",")} --waiver-reason="<why safe>".\n`);
965
+ io.stderr.write(` next action: run the named agent(s) for this stage, or rerun with --waive-delegation=${validation.delegation.missing.join(",")} --waiver-reason="<why safe>" only when the user accepts the safety trade-off.\n`);
966
966
  }
967
967
  if (validation.delegation.missingEvidence.length > 0) {
968
968
  io.stderr.write(`- role-switch evidence missing: ${validation.delegation.missingEvidence.join(", ")}\n`);
969
- io.stderr.write(` next action: include --evidence-ref=<artifact#anchor> when emitting the completed event, or escalate to a true isolated dispatch surface.\n`);
969
+ io.stderr.write(` next action: include --evidence-ref=<artifact#anchor> when emitting the completed event so the artifact shows what was reviewed/proved, or escalate to a true isolated dispatch surface.\n`);
970
970
  }
971
971
  if (validation.delegation.missingDispatchProof.length > 0) {
972
972
  io.stderr.write(`- isolated completion lacks dispatch proof: ${dispatchProofDetails.join(", ") || validation.delegation.missingDispatchProof.join(", ")}\n`);
973
- io.stderr.write(` next action: emit scheduled -> launched -> acknowledged -> completed with --span-id, --dispatch-id, --dispatch-surface, --agent-definition-path before advancing.\n`);
973
+ io.stderr.write(` next action: repair the event log proof by emitting scheduled -> launched -> acknowledged -> completed with --span-id, --dispatch-id, --dispatch-surface, --agent-definition-path, ackTs, and completedTs before advancing.\n`);
974
974
  }
975
975
  if (validation.delegation.legacyInferredCompletions.length > 0) {
976
976
  io.stderr.write(`- legacy-inferred completions need rerecord: ${validation.delegation.legacyInferredCompletions.join(", ")}\n`);
@@ -1422,7 +1422,7 @@ async function runRewind(projectRoot, args, io) {
1422
1422
  nextActions: [
1423
1423
  `Re-run ${args.targetStage} stage work and update its artifact evidence.`,
1424
1424
  `Then run cclaw internal rewind --ack ${args.targetStage}.`,
1425
- "Continue with /cc-next after the stale marker is acknowledged."
1425
+ "Continue with /cc after the stale marker is acknowledged."
1426
1426
  ]
1427
1427
  };
1428
1428
  await appendRewindLog(projectRoot, payload);
@@ -0,0 +1,53 @@
1
+ import { type HarnessId } from "./types.js";
2
+ import type { WriteFileSafeOptions } from "./fs-utils.js";
3
+ export declare const MANAGED_RESOURCE_MANIFEST_REL_PATH = ".cclaw/state/managed-resources.json";
4
+ export interface ManagedResourceEntry {
5
+ path: string;
6
+ sha256: string;
7
+ owner: "cclaw";
8
+ harness?: HarnessId | "core";
9
+ packageVersion: string;
10
+ prunable: boolean;
11
+ safeToOverwrite: boolean;
12
+ updatedAt: string;
13
+ lastBackupPath?: string;
14
+ previousSha256?: string;
15
+ }
16
+ export interface ManagedResourceManifest {
17
+ version: 1;
18
+ generatedAt: string;
19
+ packageVersion: string;
20
+ resources: ManagedResourceEntry[];
21
+ }
22
+ interface ManagedResourceSessionOptions {
23
+ projectRoot: string;
24
+ operation: string;
25
+ }
26
+ export interface ManagedResourceValidationIssue {
27
+ index?: number;
28
+ path?: string;
29
+ field: string;
30
+ message: string;
31
+ }
32
+ export declare function isManagedGeneratedPath(relPath: string): boolean;
33
+ export declare function validateManagedResourceEntry(value: unknown, index?: number): ManagedResourceValidationIssue[];
34
+ export declare function validateManagedResourceManifest(value: unknown): ManagedResourceValidationIssue[];
35
+ export declare function isValidManagedResourceEntry(value: unknown): value is ManagedResourceEntry;
36
+ export declare function readManagedResourceManifest(projectRoot: string): Promise<ManagedResourceManifest | null>;
37
+ export declare class ManagedResourceSession {
38
+ private readonly projectRoot;
39
+ private readonly operation;
40
+ private readonly timestamp;
41
+ private readonly previous;
42
+ private readonly touched;
43
+ private constructor();
44
+ static create(options: ManagedResourceSessionOptions): Promise<ManagedResourceSession>;
45
+ shouldManage(filePath: string): boolean;
46
+ writeFileSafe(filePath: string, content: string, options?: WriteFileSafeOptions): Promise<void>;
47
+ commit(): Promise<ManagedResourceManifest>;
48
+ }
49
+ export declare function getActiveManagedResourceSession(): ManagedResourceSession | null;
50
+ export declare function setActiveManagedResourceSession(session: ManagedResourceSession | null): void;
51
+ export declare function isManagedResourcePath(projectRoot: string, filePath: string): boolean;
52
+ export declare function hashManagedResourceContent(content: string | Buffer): string;
53
+ export {};
@@ -0,0 +1,289 @@
1
+ import crypto from "node:crypto";
2
+ import fs from "node:fs/promises";
3
+ import path from "node:path";
4
+ import { CCLAW_VERSION, RUNTIME_ROOT } from "./constants.js";
5
+ import { HARNESS_IDS } from "./types.js";
6
+ export const MANAGED_RESOURCE_MANIFEST_REL_PATH = `${RUNTIME_ROOT}/state/managed-resources.json`;
7
+ const MANAGED_RESOURCE_HARNESSES = new Set(["core", ...HARNESS_IDS]);
8
+ const SHA256_HEX_PATTERN = /^[a-f0-9]{64}$/iu;
9
+ let activeSession = null;
10
+ function sha256(content) {
11
+ return crypto.createHash("sha256").update(content).digest("hex");
12
+ }
13
+ function normalizeRelPath(projectRoot, filePath) {
14
+ const rel = path.relative(projectRoot, filePath).replace(/\\/gu, "/");
15
+ if (rel.startsWith("../") || rel === ".." || path.isAbsolute(rel))
16
+ return null;
17
+ return rel;
18
+ }
19
+ async function exists(filePath) {
20
+ try {
21
+ await fs.access(filePath);
22
+ return true;
23
+ }
24
+ catch {
25
+ return false;
26
+ }
27
+ }
28
+ async function ensureDir(dirPath) {
29
+ await fs.mkdir(dirPath, { recursive: true });
30
+ }
31
+ async function atomicWrite(filePath, content, options = {}) {
32
+ await ensureDir(path.dirname(filePath));
33
+ const tempPath = path.join(path.dirname(filePath), `.${path.basename(filePath)}.tmp-${process.pid}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`);
34
+ await fs.writeFile(tempPath, content, {
35
+ encoding: "utf8",
36
+ ...(options.mode !== undefined ? { mode: options.mode } : {})
37
+ });
38
+ await fs.rename(tempPath, filePath);
39
+ if (options.mode !== undefined) {
40
+ await fs.chmod(filePath, options.mode).catch(() => undefined);
41
+ }
42
+ }
43
+ function inferHarness(relPath) {
44
+ if (relPath.startsWith(".claude/"))
45
+ return "claude";
46
+ if (relPath.startsWith(".cursor/"))
47
+ return "cursor";
48
+ if (relPath.startsWith(".opencode/"))
49
+ return "opencode";
50
+ if (relPath.startsWith(".codex/") || relPath.startsWith(".agents/skills/"))
51
+ return "codex";
52
+ return "core";
53
+ }
54
+ export function isManagedGeneratedPath(relPath) {
55
+ if (relPath === MANAGED_RESOURCE_MANIFEST_REL_PATH)
56
+ return false;
57
+ if (relPath === `${RUNTIME_ROOT}/config.yaml`)
58
+ return false;
59
+ if (relPath === `${RUNTIME_ROOT}/knowledge.jsonl`)
60
+ return false;
61
+ if (relPath.startsWith(`${RUNTIME_ROOT}/artifacts/`))
62
+ return false;
63
+ if (relPath.startsWith(`${RUNTIME_ROOT}/runs/`))
64
+ return false;
65
+ if (relPath === `${RUNTIME_ROOT}/state/flow-state.json`)
66
+ return false;
67
+ if (relPath === `${RUNTIME_ROOT}/state/.init-in-progress`)
68
+ return false;
69
+ if (relPath.startsWith(`${RUNTIME_ROOT}/state/upgrade-backups/`))
70
+ return false;
71
+ if (relPath.startsWith(`${RUNTIME_ROOT}/state/sync-backups/`))
72
+ return false;
73
+ if (relPath === "AGENTS.md" || relPath === "CLAUDE.md")
74
+ return true;
75
+ if (relPath === `${RUNTIME_ROOT}/state/iron-laws.json`)
76
+ return true;
77
+ for (const prefix of [
78
+ `${RUNTIME_ROOT}/commands/`,
79
+ `${RUNTIME_ROOT}/skills/`,
80
+ `${RUNTIME_ROOT}/templates/`,
81
+ `${RUNTIME_ROOT}/rules/`,
82
+ `${RUNTIME_ROOT}/agents/`,
83
+ `${RUNTIME_ROOT}/hooks/`,
84
+ ".claude/commands/",
85
+ ".cursor/commands/",
86
+ ".opencode/commands/",
87
+ ".opencode/agents/",
88
+ ".codex/agents/",
89
+ ".agents/skills/"
90
+ ]) {
91
+ if (relPath.startsWith(prefix))
92
+ return true;
93
+ }
94
+ return relPath === ".claude/hooks/hooks.json" ||
95
+ relPath === ".cursor/hooks.json" ||
96
+ relPath === ".cursor/rules/cclaw-workflow.mdc" ||
97
+ relPath === ".codex/hooks.json" ||
98
+ relPath === ".opencode/plugins/cclaw-plugin.mjs";
99
+ }
100
+ function validationIssue(index, field, message, pathValue) {
101
+ return {
102
+ ...(index !== undefined ? { index } : {}),
103
+ ...(typeof pathValue === "string" ? { path: pathValue } : {}),
104
+ field,
105
+ message
106
+ };
107
+ }
108
+ function validateSha256(value) {
109
+ return typeof value === "string" && SHA256_HEX_PATTERN.test(value);
110
+ }
111
+ export function validateManagedResourceEntry(value, index) {
112
+ const issues = [];
113
+ if (!value || typeof value !== "object" || Array.isArray(value)) {
114
+ return [validationIssue(index, "entry", "entry must be an object")];
115
+ }
116
+ const entry = value;
117
+ if (typeof entry.path !== "string" || entry.path.trim().length === 0) {
118
+ issues.push(validationIssue(index, "path", "path must be a non-empty string", entry.path));
119
+ }
120
+ else if (entry.path.startsWith("../") || entry.path === ".." || path.isAbsolute(entry.path)) {
121
+ issues.push(validationIssue(index, "path", "path must be project-relative", entry.path));
122
+ }
123
+ else if (!isManagedGeneratedPath(entry.path)) {
124
+ issues.push(validationIssue(index, "path", "path must be a known generated cclaw surface", entry.path));
125
+ }
126
+ if (!validateSha256(entry.sha256)) {
127
+ issues.push(validationIssue(index, "sha256", "sha256 must be a 64-character hex digest", entry.path));
128
+ }
129
+ if (entry.owner !== "cclaw") {
130
+ issues.push(validationIssue(index, "owner", 'owner must be "cclaw"', entry.path));
131
+ }
132
+ if (typeof entry.harness !== "string" || !MANAGED_RESOURCE_HARNESSES.has(entry.harness)) {
133
+ issues.push(validationIssue(index, "harness", `harness must be one of: core, ${HARNESS_IDS.join(", ")}`, entry.path));
134
+ }
135
+ if (typeof entry.packageVersion !== "string" || entry.packageVersion.trim().length === 0) {
136
+ issues.push(validationIssue(index, "packageVersion", "packageVersion must be a non-empty string", entry.path));
137
+ }
138
+ if (typeof entry.prunable !== "boolean") {
139
+ issues.push(validationIssue(index, "prunable", "prunable must be a boolean", entry.path));
140
+ }
141
+ if (typeof entry.safeToOverwrite !== "boolean") {
142
+ issues.push(validationIssue(index, "safeToOverwrite", "safeToOverwrite must be a boolean", entry.path));
143
+ }
144
+ if (typeof entry.updatedAt !== "string" || entry.updatedAt.trim().length === 0) {
145
+ issues.push(validationIssue(index, "updatedAt", "updatedAt must be a non-empty string", entry.path));
146
+ }
147
+ if (entry.lastBackupPath !== undefined && (typeof entry.lastBackupPath !== "string" || entry.lastBackupPath.trim().length === 0)) {
148
+ issues.push(validationIssue(index, "lastBackupPath", "lastBackupPath must be a non-empty string when present", entry.path));
149
+ }
150
+ if (entry.previousSha256 !== undefined && !validateSha256(entry.previousSha256)) {
151
+ issues.push(validationIssue(index, "previousSha256", "previousSha256 must be a 64-character hex digest when present", entry.path));
152
+ }
153
+ return issues;
154
+ }
155
+ export function validateManagedResourceManifest(value) {
156
+ const issues = [];
157
+ if (!value || typeof value !== "object" || Array.isArray(value)) {
158
+ return [validationIssue(undefined, "manifest", "manifest must be an object")];
159
+ }
160
+ const manifest = value;
161
+ if (manifest.version !== 1) {
162
+ issues.push(validationIssue(undefined, "version", "version must be 1"));
163
+ }
164
+ if (typeof manifest.generatedAt !== "string" || manifest.generatedAt.trim().length === 0) {
165
+ issues.push(validationIssue(undefined, "generatedAt", "generatedAt must be a non-empty string"));
166
+ }
167
+ if (typeof manifest.packageVersion !== "string" || manifest.packageVersion.trim().length === 0) {
168
+ issues.push(validationIssue(undefined, "packageVersion", "packageVersion must be a non-empty string"));
169
+ }
170
+ if (!Array.isArray(manifest.resources)) {
171
+ issues.push(validationIssue(undefined, "resources", "resources must be an array"));
172
+ return issues;
173
+ }
174
+ manifest.resources.forEach((entry, index) => {
175
+ issues.push(...validateManagedResourceEntry(entry, index));
176
+ });
177
+ return issues;
178
+ }
179
+ export function isValidManagedResourceEntry(value) {
180
+ return validateManagedResourceEntry(value).length === 0;
181
+ }
182
+ export async function readManagedResourceManifest(projectRoot) {
183
+ const manifestPath = path.join(projectRoot, MANAGED_RESOURCE_MANIFEST_REL_PATH);
184
+ if (!(await exists(manifestPath)))
185
+ return null;
186
+ const parsed = JSON.parse(await fs.readFile(manifestPath, "utf8"));
187
+ if (parsed.version !== 1 || !Array.isArray(parsed.resources))
188
+ return null;
189
+ const resources = parsed.resources.filter(isValidManagedResourceEntry);
190
+ return {
191
+ version: 1,
192
+ generatedAt: typeof parsed.generatedAt === "string" ? parsed.generatedAt : new Date(0).toISOString(),
193
+ packageVersion: typeof parsed.packageVersion === "string" ? parsed.packageVersion : "unknown",
194
+ resources
195
+ };
196
+ }
197
+ export class ManagedResourceSession {
198
+ projectRoot;
199
+ operation;
200
+ timestamp;
201
+ previous = new Map();
202
+ touched = new Map();
203
+ constructor(options, previous) {
204
+ this.projectRoot = options.projectRoot;
205
+ this.operation = options.operation;
206
+ this.timestamp = new Date().toISOString().replace(/[:.]/gu, "-");
207
+ for (const entry of previous?.resources ?? []) {
208
+ this.previous.set(entry.path, entry);
209
+ }
210
+ }
211
+ static async create(options) {
212
+ const previous = await readManagedResourceManifest(options.projectRoot).catch(() => null);
213
+ return new ManagedResourceSession(options, previous);
214
+ }
215
+ shouldManage(filePath) {
216
+ const rel = normalizeRelPath(this.projectRoot, filePath);
217
+ return rel !== null && isManagedGeneratedPath(rel);
218
+ }
219
+ async writeFileSafe(filePath, content, options = {}) {
220
+ const rel = normalizeRelPath(this.projectRoot, filePath);
221
+ if (rel === null || !isManagedGeneratedPath(rel)) {
222
+ await atomicWrite(filePath, content, options);
223
+ return;
224
+ }
225
+ const nextHash = sha256(content);
226
+ const previous = this.previous.get(rel);
227
+ let previousSha256;
228
+ let lastBackupPath;
229
+ if (await exists(filePath)) {
230
+ const current = await fs.readFile(filePath);
231
+ const currentHash = sha256(current);
232
+ previousSha256 = currentHash;
233
+ const knownPrevious = previous?.sha256;
234
+ if (currentHash !== nextHash && (knownPrevious === undefined || currentHash !== knownPrevious)) {
235
+ const backupRoot = path.join(this.projectRoot, RUNTIME_ROOT, "state", this.operation === "upgrade" ? "upgrade-backups" : "sync-backups", this.timestamp);
236
+ const backupPath = path.join(backupRoot, rel);
237
+ await ensureDir(path.dirname(backupPath));
238
+ await fs.copyFile(filePath, backupPath);
239
+ lastBackupPath = normalizeRelPath(this.projectRoot, backupPath) ?? undefined;
240
+ }
241
+ }
242
+ await atomicWrite(filePath, content, options);
243
+ this.touched.set(rel, {
244
+ path: rel,
245
+ sha256: nextHash,
246
+ owner: "cclaw",
247
+ harness: inferHarness(rel),
248
+ packageVersion: CCLAW_VERSION,
249
+ prunable: true,
250
+ safeToOverwrite: true,
251
+ updatedAt: new Date().toISOString(),
252
+ ...(lastBackupPath ? { lastBackupPath } : {}),
253
+ ...(previousSha256 && previousSha256 !== nextHash ? { previousSha256 } : {})
254
+ });
255
+ }
256
+ async commit() {
257
+ const resourcesByPath = new Map(this.previous);
258
+ for (const [rel, entry] of this.touched) {
259
+ resourcesByPath.set(rel, entry);
260
+ }
261
+ const resources = [];
262
+ for (const entry of resourcesByPath.values()) {
263
+ if (await exists(path.join(this.projectRoot, entry.path))) {
264
+ resources.push(entry);
265
+ }
266
+ }
267
+ const manifest = {
268
+ version: 1,
269
+ generatedAt: new Date().toISOString(),
270
+ packageVersion: CCLAW_VERSION,
271
+ resources: resources.sort((a, b) => a.path.localeCompare(b.path))
272
+ };
273
+ await atomicWrite(path.join(this.projectRoot, MANAGED_RESOURCE_MANIFEST_REL_PATH), `${JSON.stringify(manifest, null, 2)}\n`, { mode: 0o600 });
274
+ return manifest;
275
+ }
276
+ }
277
+ export function getActiveManagedResourceSession() {
278
+ return activeSession;
279
+ }
280
+ export function setActiveManagedResourceSession(session) {
281
+ activeSession = session;
282
+ }
283
+ export function isManagedResourcePath(projectRoot, filePath) {
284
+ const rel = normalizeRelPath(projectRoot, filePath);
285
+ return rel !== null && isManagedGeneratedPath(rel);
286
+ }
287
+ export function hashManagedResourceContent(content) {
288
+ return sha256(content);
289
+ }
package/dist/policy.js CHANGED
@@ -127,7 +127,7 @@ export async function policyChecks(projectRoot, options = {}) {
127
127
  });
128
128
  utilitySkillChecks.push({
129
129
  file: ".cursor/rules/cclaw-workflow.mdc",
130
- needle: "/cc-next",
130
+ needle: "/cc",
131
131
  name: "rules:cursor:next_command_guidance"
132
132
  });
133
133
  }
@@ -1,5 +1,7 @@
1
1
  import { type FlowState } from "./flow-state.js";
2
2
  import type { FlowStage } from "./types.js";
3
+ export declare const ARCHIVE_DISPOSITIONS: readonly ["completed", "cancelled", "abandoned"];
4
+ export type ArchiveDisposition = (typeof ARCHIVE_DISPOSITIONS)[number];
3
5
  export interface CclawRunMeta {
4
6
  id: string;
5
7
  title: string;
@@ -12,6 +14,8 @@ export interface ArchiveRunResult {
12
14
  runName: string;
13
15
  resetState: FlowState;
14
16
  snapshottedStateFiles: string[];
17
+ disposition: ArchiveDisposition;
18
+ dispositionReason?: string;
15
19
  /** Knowledge curation hint: total active entries + soft threshold (50). */
16
20
  knowledge: {
17
21
  activeEntryCount: number;
@@ -36,11 +40,15 @@ export interface ArchiveManifest {
36
40
  sourceCurrentStage: FlowStage;
37
41
  sourceCompletedStages: FlowStage[];
38
42
  snapshottedStateFiles: string[];
43
+ disposition: ArchiveDisposition;
44
+ dispositionReason?: string;
39
45
  retro: ArchiveRunResult["retro"];
40
46
  }
41
47
  export interface ArchiveRunOptions {
42
48
  skipRetro?: boolean;
43
49
  skipRetroReason?: string;
50
+ disposition?: ArchiveDisposition;
51
+ dispositionReason?: string;
44
52
  }
45
53
  export declare function listRuns(projectRoot: string): Promise<CclawRunMeta[]>;
46
54
  export declare function archiveRun(projectRoot: string, runName?: string, options?: ArchiveRunOptions): Promise<ArchiveRunResult>;
@@ -6,6 +6,7 @@ import { ensureDir, exists, withDirectoryLock, writeFileSafe } from "./fs-utils.
6
6
  import { readKnowledgeSafely } from "./knowledge-store.js";
7
7
  import { evaluateRetroGate } from "./retro-gate.js";
8
8
  import { ensureRunSystem, flowStateLockPathFor, readFlowState, writeFlowState } from "./run-persistence.js";
9
+ export const ARCHIVE_DISPOSITIONS = ["completed", "cancelled", "abandoned"];
9
10
  const RUNS_DIR_REL_PATH = `${RUNTIME_ROOT}/runs`;
10
11
  const ACTIVE_ARTIFACTS_REL_PATH = `${RUNTIME_ROOT}/artifacts`;
11
12
  const STATE_DIR_REL_PATH = `${RUNTIME_ROOT}/state`;
@@ -205,37 +206,46 @@ export async function archiveRun(projectRoot, runName, options = {}) {
205
206
  const archiveArtifactsPath = path.join(archivePath, "artifacts");
206
207
  let sourceState = await readFlowState(projectRoot);
207
208
  const retroGate = await evaluateRetroGate(projectRoot, sourceState);
209
+ const disposition = options.disposition ?? "completed";
210
+ const dispositionReason = options.dispositionReason?.trim();
211
+ const nonCompletedDisposition = disposition !== "completed";
212
+ if (nonCompletedDisposition && (!dispositionReason || dispositionReason.length === 0)) {
213
+ throw new Error("archive --disposition=cancelled|abandoned requires --reason=<text>.");
214
+ }
208
215
  const shipCompleted = sourceState.completedStages.includes("ship");
209
216
  const skipRetro = options.skipRetro === true;
210
217
  const skipRetroReason = options.skipRetroReason?.trim();
211
218
  if (skipRetro && (!skipRetroReason || skipRetroReason.length === 0)) {
212
219
  throw new Error("archive --skip-retro requires --retro-reason=<text>.");
213
220
  }
221
+ if (nonCompletedDisposition && skipRetro) {
222
+ throw new Error("archive --skip-retro is only valid for completed archives; use --reason with cancelled/abandoned.");
223
+ }
214
224
  const retroSkippedInCloseout = sourceState.closeout.retroSkipped === true &&
215
225
  typeof sourceState.closeout.retroSkipReason === "string" &&
216
226
  sourceState.closeout.retroSkipReason.trim().length > 0;
217
227
  const readyForArchive = sourceState.closeout.shipSubstate === "ready_to_archive";
218
228
  const inShipCloseout = sourceState.currentStage === "ship";
219
- if (readyForArchive && !compoundCloseoutComplete(sourceState)) {
229
+ if (!nonCompletedDisposition && readyForArchive && !compoundCloseoutComplete(sourceState)) {
220
230
  throw new Error("Archive blocked: compound closeout is incomplete. " +
221
231
  "Promote compound guidance or skip compound review with an explicit reason before archiving.");
222
232
  }
223
- if (inShipCloseout && skipRetro) {
233
+ if (!nonCompletedDisposition && inShipCloseout && skipRetro) {
224
234
  throw new Error("Archive blocked: --skip-retro is not allowed while current stage is ship. " +
225
- "Complete closeout to ready_to_archive via /cc-next.");
235
+ "Complete closeout to ready_to_archive via /cc.");
226
236
  }
227
- if (inShipCloseout && !readyForArchive) {
237
+ if (!nonCompletedDisposition && inShipCloseout && !readyForArchive) {
228
238
  throw new Error("Archive blocked: closeout is not ready_to_archive. " +
229
- "Resume /cc-next until closeout reaches ready_to_archive.");
239
+ "Resume /cc until closeout reaches ready_to_archive.");
230
240
  }
231
- if (shipCompleted && !readyForArchive && !skipRetro) {
241
+ if (!nonCompletedDisposition && shipCompleted && !readyForArchive && !skipRetro) {
232
242
  throw new Error("Archive blocked: closeout is not ready_to_archive. " +
233
- "Resume /cc-next until closeout reaches ready_to_archive, " +
243
+ "Resume /cc until closeout reaches ready_to_archive, " +
234
244
  "or run `cclaw archive --skip-retro --retro-reason=<text>` for CLI-only flows.");
235
245
  }
236
- if (retroGate.required && !retroGate.completed && !skipRetro && !retroSkippedInCloseout) {
246
+ if (!nonCompletedDisposition && retroGate.required && !retroGate.completed && !skipRetro && !retroSkippedInCloseout) {
237
247
  throw new Error("Archive blocked: retro gate is required after ship completion. " +
238
- "Run /cc-next (auto-runs retro) or, for CLI-only flows, re-run `cclaw archive --skip-retro --retro-reason=<text>`.");
248
+ "Run /cc (auto-runs retro) or, for CLI-only flows, re-run `cclaw archive --skip-retro --retro-reason=<text>`.");
239
249
  }
240
250
  if (retroGate.completed) {
241
251
  const completedAt = sourceState.retro.completedAt ?? new Date().toISOString();
@@ -291,6 +301,8 @@ export async function archiveRun(projectRoot, runName, options = {}) {
291
301
  sourceCurrentStage: sourceState.currentStage,
292
302
  sourceCompletedStages: sourceState.completedStages,
293
303
  snapshottedStateFiles,
304
+ disposition,
305
+ ...(dispositionReason ? { dispositionReason } : {}),
294
306
  retro: retroSummary
295
307
  };
296
308
  await writeFileSafe(path.join(archivePath, "archive-manifest.json"), `${JSON.stringify(manifest, null, 2)}\n`);
@@ -304,6 +316,8 @@ export async function archiveRun(projectRoot, runName, options = {}) {
304
316
  runName: archiveRunName,
305
317
  resetState,
306
318
  snapshottedStateFiles,
319
+ disposition,
320
+ ...(dispositionReason ? { dispositionReason } : {}),
307
321
  knowledge: knowledgeStats,
308
322
  retro: retroSummary
309
323
  };
@@ -60,7 +60,7 @@ function validateFlowTransition(prev, next) {
60
60
  const isNaturalForward = naturalForward === next.currentStage;
61
61
  const isReviewRewind = prev.currentStage === "review" && next.currentStage === "tdd";
62
62
  if (!isNaturalForward && !isReviewRewind) {
63
- throw new InvalidStageTransitionError(prev.currentStage, next.currentStage, `no transition rule allows "${prev.currentStage}" -> "${next.currentStage}" for track "${prev.track}". Use /cc-next to advance stages or archive the run to reset.`);
63
+ throw new InvalidStageTransitionError(prev.currentStage, next.currentStage, `no transition rule allows "${prev.currentStage}" -> "${next.currentStage}" for track "${prev.track}". Use /cc to advance stages or archive the run to reset.`);
64
64
  }
65
65
  }
66
66
  function flowStatePath(projectRoot) {
package/dist/runs.d.ts CHANGED
@@ -1,2 +1,2 @@
1
1
  export { CorruptFlowStateError, InvalidStageTransitionError, type WriteFlowStateOptions, ensureRunSystem, readFlowState, writeFlowState } from "./run-persistence.js";
2
- export { archiveRun, countActiveKnowledgeEntries, listRuns, type ArchiveManifest, type ArchiveRunOptions, type ArchiveRunResult, type CclawRunMeta } from "./run-archive.js";
2
+ export { ARCHIVE_DISPOSITIONS, archiveRun, countActiveKnowledgeEntries, listRuns, type ArchiveDisposition, type ArchiveManifest, type ArchiveRunOptions, type ArchiveRunResult, type CclawRunMeta } from "./run-archive.js";
package/dist/runs.js CHANGED
@@ -1,2 +1,2 @@
1
1
  export { CorruptFlowStateError, InvalidStageTransitionError, ensureRunSystem, readFlowState, writeFlowState } from "./run-persistence.js";
2
- export { archiveRun, countActiveKnowledgeEntries, listRuns } from "./run-archive.js";
2
+ export { ARCHIVE_DISPOSITIONS, archiveRun, countActiveKnowledgeEntries, listRuns } from "./run-archive.js";
package/dist/tdd-cycle.js CHANGED
@@ -95,7 +95,7 @@ export function validateTddCycleOrder(entries, options = {}) {
95
95
  // cycles could appear to share a RED/GREEN pair.
96
96
  for (const slice of bySlice.keys()) {
97
97
  if (!SLICE_ID_PATTERN.test(slice)) {
98
- issues.push(`slice "${slice}": id must match /^S-\\d+$/ (e.g. S-1)`);
98
+ issues.push(`slice "${slice}": id must match /^S-\\d+$/ (e.g. S-1); repair by re-logging RED/GREEN/REFACTOR with a stable slice id.`);
99
99
  }
100
100
  }
101
101
  for (const [slice, sliceEntries] of bySlice.entries()) {
@@ -103,15 +103,15 @@ export function validateTddCycleOrder(entries, options = {}) {
103
103
  for (const entry of sliceEntries) {
104
104
  if (entry.phase === "red") {
105
105
  if (entry.exitCode === undefined) {
106
- issues.push(`slice ${slice}: red entry must record a non-zero exitCode`);
106
+ issues.push(`slice ${slice}: RED repair needed: red entry must record a non-zero exitCode from a failing test.`);
107
107
  continue;
108
108
  }
109
109
  if (entry.exitCode === 0) {
110
- issues.push(`slice ${slice}: red entry exitCode must be non-zero`);
110
+ issues.push(`slice ${slice}: RED repair needed: red entry exitCode must be non-zero; passing output is not RED evidence.`);
111
111
  continue;
112
112
  }
113
113
  if (state === "red_open") {
114
- issues.push(`slice ${slice}: duplicate red before green`);
114
+ issues.push(`slice ${slice}: RED/GREEN repair needed: duplicate RED before GREEN; record the GREEN pass that closes the prior RED or split into a new slice.`);
115
115
  continue;
116
116
  }
117
117
  state = "red_open";
@@ -119,15 +119,15 @@ export function validateTddCycleOrder(entries, options = {}) {
119
119
  }
120
120
  if (entry.phase === "green") {
121
121
  if (entry.exitCode === undefined) {
122
- issues.push(`slice ${slice}: green entry must record exitCode 0`);
122
+ issues.push(`slice ${slice}: GREEN repair needed: green entry must record exitCode 0 from the verification command.`);
123
123
  continue;
124
124
  }
125
125
  if (entry.exitCode !== 0) {
126
- issues.push(`slice ${slice}: green entry exitCode must be 0`);
126
+ issues.push(`slice ${slice}: GREEN repair needed: green entry exitCode must be 0; fix regressions before advancing.`);
127
127
  continue;
128
128
  }
129
129
  if (state !== "red_open") {
130
- issues.push(`slice ${slice}: green logged before red`);
130
+ issues.push(`slice ${slice}: GREEN repair needed: green logged before RED; add the failing RED test evidence first.`);
131
131
  continue;
132
132
  }
133
133
  state = "green_done";
@@ -135,15 +135,15 @@ export function validateTddCycleOrder(entries, options = {}) {
135
135
  }
136
136
  // refactor — must preserve the passing state established by green.
137
137
  if (entry.exitCode === undefined) {
138
- issues.push(`slice ${slice}: refactor entry must record exitCode 0`);
138
+ issues.push(`slice ${slice}: REFACTOR repair needed: refactor entry must record exitCode 0 proving behavior stayed green.`);
139
139
  continue;
140
140
  }
141
141
  if (entry.exitCode !== 0) {
142
- issues.push(`slice ${slice}: refactor entry exitCode must be 0 (tests must stay green)`);
142
+ issues.push(`slice ${slice}: REFACTOR repair needed: tests must stay green after cleanup; rerun/fix before closing the slice.`);
143
143
  continue;
144
144
  }
145
145
  if (state !== "green_done") {
146
- issues.push(`slice ${slice}: refactor logged before green`);
146
+ issues.push(`slice ${slice}: REFACTOR repair needed: refactor logged before GREEN; prove GREEN first, then cleanup.`);
147
147
  continue;
148
148
  }
149
149
  }
@@ -14,20 +14,20 @@ export async function validateTddVerificationEvidence(projectRoot, evidence, opt
14
14
  const gitPresent = configuredVcs !== "none" && await exists(path.join(projectRoot, ".git"));
15
15
  const issues = [];
16
16
  if (options.requireCommand !== false && !TEST_COMMAND_HINT_PATTERN.test(normalized)) {
17
- issues.push("must include the fresh verification command that was run (for example `npm test`, `pytest`, `go test`, or equivalent).");
17
+ issues.push("GREEN repair needed: include the fresh verification command that was run (for example `npm test`, `pytest`, `go test`, or equivalent).");
18
18
  }
19
19
  if (options.requirePassStatus !== false && !PASS_STATUS_PATTERN.test(normalized)) {
20
- issues.push("must include explicit success status (for example `PASS` or `GREEN`).");
20
+ issues.push("GREEN repair needed: include explicit success status (for example `PASS` or `GREEN`).");
21
21
  }
22
22
  const hasSha = SHA_WITH_LABEL_PATTERN.test(normalized);
23
23
  const hasNoVcs = NO_VCS_ATTESTATION_PATTERN.test(normalized);
24
24
  const hasNoVcsHash = NO_VCS_HASH_PATTERN.test(normalized);
25
25
  if (mode !== "disabled" && configuredVcs === "none") {
26
26
  if (!hasNoVcs) {
27
- issues.push("must include an explicit no-VCS reason because `vcs` is `none`.");
27
+ issues.push("NO_VCS_MODE repair needed: include an explicit no-VCS reason because `vcs` is `none`.");
28
28
  }
29
29
  if (!hasNoVcsHash) {
30
- issues.push("must include a content/artifact hash for no-VCS TDD evidence (for example `artifact-hash: sha256:<hash>`).");
30
+ issues.push("NO_VCS_MODE repair needed: include a content/artifact hash for no-VCS TDD evidence (for example `artifact-hash: sha256:<hash>`).");
31
31
  }
32
32
  }
33
33
  else if (mode === "required" && !hasSha) {
@@ -3,6 +3,8 @@ export interface TrackResolution {
3
3
  track: FlowTrack;
4
4
  reason: string;
5
5
  matchedTokens: string[];
6
+ confidence: "high" | "medium" | "low";
7
+ overrideGuidance: string;
6
8
  }
7
9
  /**
8
10
  * Reference implementation of the track classifier the /cc skill prose