cclaw-cli 0.51.23 → 0.51.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/doctor.js CHANGED
@@ -24,6 +24,7 @@ import { resolveTrackFromPrompt } from "./track-heuristics.js";
24
24
  import { classifyCodexHooksFlag, codexConfigPath, readCodexConfig } from "./codex-feature-flag.js";
25
25
  import { LANGUAGE_RULE_PACK_DIR, LANGUAGE_RULE_PACK_FILES, LEGACY_LANGUAGE_RULE_PACK_FOLDERS } from "./content/utility-skills.js";
26
26
  import { validateHookDocument } from "./hook-schema.js";
27
+ import { HOOK_EVENTS_BY_HARNESS } from "./content/hook-events.js";
27
28
  import { validateKnowledgeEntry } from "./knowledge-store.js";
28
29
  import { readSeedShelf } from "./content/seed-shelf.js";
29
30
  import { evaluateRetroGate } from "./retro-gate.js";
@@ -295,8 +296,12 @@ function opencodeConfigCandidates(projectRoot) {
295
296
  return [
296
297
  path.join(projectRoot, "opencode.json"),
297
298
  path.join(projectRoot, "opencode.jsonc"),
299
+ path.join(projectRoot, "oh-my-opencode.jsonc"),
300
+ path.join(projectRoot, "oh-my-openagent.jsonc"),
298
301
  path.join(projectRoot, ".opencode/opencode.json"),
299
- path.join(projectRoot, ".opencode/opencode.jsonc")
302
+ path.join(projectRoot, ".opencode/opencode.jsonc"),
303
+ path.join(projectRoot, ".opencode/oh-my-opencode.jsonc"),
304
+ path.join(projectRoot, ".opencode/oh-my-openagent.jsonc")
300
305
  ];
301
306
  }
302
307
  function openCodeConfigRegistersPlugin(parsed) {
@@ -360,6 +365,87 @@ function opencodeQuestionEnvCheck() {
360
365
  details: "Set OPENCODE_ENABLE_QUESTION_TOOL=1 for OpenCode ACP clients so permission-gated structured questions can use the question tool."
361
366
  };
362
367
  }
368
+ function codexFlagInactiveDetail(configPath, state, error) {
369
+ if (state === "enabled") {
370
+ return `codex_hooks feature flag is enabled in ${configPath}; Codex hooks are active.`;
371
+ }
372
+ if (state === "read-error") {
373
+ return `Codex hooks are inactive: could not read ${configPath} (${error instanceof Error ? error.message : String(error)}).`;
374
+ }
375
+ if (state === "missing-file") {
376
+ return `Codex hooks are inactive: ${configPath} does not exist; .codex/hooks.json is ignored until [features] codex_hooks = true is configured.`;
377
+ }
378
+ if (state === "missing-section") {
379
+ return `Codex hooks are inactive: ${configPath} has no [features] section; add codex_hooks = true to activate configured hooks.`;
380
+ }
381
+ if (state === "missing-key") {
382
+ return `Codex hooks are inactive: ${configPath} is missing codex_hooks under [features]; add codex_hooks = true to activate configured hooks.`;
383
+ }
384
+ return `Codex hooks are inactive: ${configPath} sets codex_hooks to a non-true value; set codex_hooks = true under [features].`;
385
+ }
386
+ function hookCommandsWithMatchers(value) {
387
+ if (!Array.isArray(value)) {
388
+ return [];
389
+ }
390
+ const out = [];
391
+ for (const item of value) {
392
+ const obj = toObject(item);
393
+ if (!obj)
394
+ continue;
395
+ const matcher = typeof obj.matcher === "string" ? obj.matcher : undefined;
396
+ if (typeof obj.command === "string") {
397
+ out.push({ command: obj.command, matcher });
398
+ }
399
+ const nested = hookCommandsWithMatchers(obj.hooks);
400
+ for (const child of nested) {
401
+ out.push({ ...child, matcher: child.matcher ?? matcher });
402
+ }
403
+ }
404
+ return out;
405
+ }
406
+ function commandHasHandler(entries, handler) {
407
+ return entries.some((entry) => entry.command.includes(`run-hook.cmd ${handler}`) || entry.command.includes(`run-hook.mjs ${handler}`));
408
+ }
409
+ function codexBashOnly(entries, handler) {
410
+ const matches = entries.filter((entry) => entry.command.includes(`run-hook.cmd ${handler}`) || entry.command.includes(`run-hook.mjs ${handler}`));
411
+ return matches.length > 0 && matches.every((entry) => entry.matcher === "Bash|bash");
412
+ }
413
+ function codexStructuralWiringCheck(codexHooks) {
414
+ const problems = [];
415
+ const expectedSession = HOOK_EVENTS_BY_HARNESS.codex.session_rehydrate;
416
+ if (expectedSession !== "SessionStart matcher=startup|resume") {
417
+ problems.push("semantic session_rehydrate mapping must remain SessionStart matcher=startup|resume");
418
+ }
419
+ const session = hookCommandsWithMatchers(codexHooks.SessionStart);
420
+ if (!commandHasHandler(session, "session-start") || !session.some((entry) => entry.matcher === "startup|resume")) {
421
+ problems.push("SessionStart must run session-start with matcher startup|resume");
422
+ }
423
+ const userPrompt = hookCommandsWithMatchers(codexHooks.UserPromptSubmit);
424
+ if (!commandHasHandler(userPrompt, "prompt-guard")) {
425
+ problems.push("UserPromptSubmit must run prompt-guard");
426
+ }
427
+ if (!commandHasHandler(userPrompt, "verify-current-state")) {
428
+ problems.push("UserPromptSubmit must run verify-current-state");
429
+ }
430
+ const pre = hookCommandsWithMatchers(codexHooks.PreToolUse);
431
+ if (!codexBashOnly(pre, "prompt-guard")) {
432
+ problems.push("PreToolUse prompt-guard must be Bash-only matcher Bash|bash");
433
+ }
434
+ if (!codexBashOnly(pre, "workflow-guard")) {
435
+ problems.push("PreToolUse workflow-guard must be Bash-only matcher Bash|bash");
436
+ }
437
+ const post = hookCommandsWithMatchers(codexHooks.PostToolUse);
438
+ if (!codexBashOnly(post, "context-monitor")) {
439
+ problems.push("PostToolUse context-monitor must be Bash-only matcher Bash|bash");
440
+ }
441
+ const stop = hookCommandsWithMatchers(codexHooks.Stop);
442
+ if (!commandHasHandler(stop, "stop-handoff")) {
443
+ problems.push("Stop must run stop-handoff");
444
+ }
445
+ return problems.length === 0
446
+ ? { ok: true, details: "Codex hook events, matchers, and manifest semantic mappings are structurally valid" }
447
+ : { ok: false, details: problems.join("; ") };
448
+ }
363
449
  async function initRecoveryCheck(projectRoot) {
364
450
  const sentinelPath = path.join(projectRoot, RUNTIME_ROOT, "state", ".init-in-progress");
365
451
  if (!(await exists(sentinelPath))) {
@@ -1003,34 +1089,48 @@ export async function doctorChecks(projectRoot, options = {}) {
1003
1089
  ok: codexWiringOk,
1004
1090
  details: `${codexHooksFile} must wire SessionStart, UserPromptSubmit(prompt/verify-current-state), Bash-only PreToolUse(prompt/workflow), Bash-only PostToolUse(context-monitor), and Stop(stop-handoff). Codex workflow-guard is intentionally strict Bash-only.`
1005
1091
  });
1006
- // Feature flag warning: Codex ignores `.codex/hooks.json` unless the
1007
- // user has `[features] codex_hooks = true` in `~/.codex/config.toml`.
1008
- // Advisory warning — not a hard failure, because the skills still
1009
- // work without the flag.
1092
+ const codexStructural = codexStructuralWiringCheck(codexHooks);
1093
+ checks.push({
1094
+ name: "hook:wiring:codex:structure",
1095
+ ok: codexStructural.ok,
1096
+ details: codexStructural.details
1097
+ });
1098
+ // Codex ignores `.codex/hooks.json` unless the user has
1099
+ // `[features] codex_hooks = true` in `~/.codex/config.toml`.
1010
1100
  const codexConfig = codexConfigPath();
1011
- let featureFlagNote = "";
1101
+ let codexFlagState = "read-error";
1102
+ let codexFlagReadError;
1012
1103
  try {
1013
1104
  const content = await readCodexConfig(codexConfig);
1014
- const state = classifyCodexHooksFlag(content);
1015
- featureFlagNote =
1016
- state === "enabled"
1017
- ? `codex_hooks feature flag is enabled in ${codexConfig}`
1018
- : state === "missing-file"
1019
- ? `warning: ${codexConfig} does not exist; .codex/hooks.json will be ignored until you create it with \`[features]\\ncodex_hooks = true\\n\`.`
1020
- : state === "missing-section"
1021
- ? `warning: ${codexConfig} has no [features] section; add \`[features]\\ncodex_hooks = true\\n\` to enable cclaw hooks.`
1022
- : state === "missing-key"
1023
- ? `warning: ${codexConfig} is missing the codex_hooks key under [features]. Add \`codex_hooks = true\` to enable cclaw hooks.`
1024
- : `warning: ${codexConfig} sets codex_hooks to a non-true value; set \`codex_hooks = true\` under [features] to enable cclaw hooks.`;
1105
+ codexFlagState = classifyCodexHooksFlag(content);
1025
1106
  }
1026
1107
  catch (err) {
1027
- featureFlagNote = `warning: could not read ${codexConfig}: ${err instanceof Error ? err.message : String(err)}`;
1108
+ codexFlagReadError = err;
1028
1109
  }
1110
+ const featureFlagNote = codexFlagInactiveDetail(codexConfig, codexFlagState, codexFlagReadError);
1111
+ const featureFlagOk = codexFlagState === "enabled";
1029
1112
  checks.push({
1030
1113
  name: "warning:codex:feature_flag",
1031
- ok: true,
1032
- details: featureFlagNote
1114
+ ok: featureFlagOk,
1115
+ details: featureFlagNote,
1116
+ summary: featureFlagOk
1117
+ ? "Codex hooks are active."
1118
+ : "Codex hooks are inactive; configured hooks will be ignored.",
1119
+ fix: "Set `[features] codex_hooks = true` in the Codex config or run cclaw init/sync with Codex flag repair.",
1120
+ docRef: "docs/harnesses.md"
1033
1121
  });
1122
+ if (parsedConfig?.strictness === "strict") {
1123
+ checks.push({
1124
+ name: "hook:codex:feature_flag_active",
1125
+ ok: featureFlagOk,
1126
+ details: featureFlagNote,
1127
+ summary: featureFlagOk
1128
+ ? "Codex hooks are active for strict runtime enforcement."
1129
+ : "Codex hooks are inactive; strict Codex hook enforcement is not ready.",
1130
+ fix: "Set `[features] codex_hooks = true` in the Codex config so strict Codex hooks can run.",
1131
+ docRef: "docs/harnesses.md"
1132
+ });
1133
+ }
1034
1134
  // Legacy `.codex/commands/*` must not linger from older cclaw installs.
1035
1135
  // (The `.codex/hooks.json` path is now managed and is validated above,
1036
1136
  // so there is no longer a legacy_hooks_json warning.)
@@ -63,6 +63,7 @@ export interface CloseoutState {
63
63
  retroSkipReason?: string;
64
64
  compoundCompletedAt?: string;
65
65
  compoundSkipped?: boolean;
66
+ compoundSkipReason?: string;
66
67
  compoundPromoted: number;
67
68
  }
68
69
  export declare function createInitialCloseoutState(): CloseoutState;
@@ -42,6 +42,7 @@ export function createInitialCloseoutState() {
42
42
  retroSkipReason: undefined,
43
43
  compoundCompletedAt: undefined,
44
44
  compoundSkipped: undefined,
45
+ compoundSkipReason: undefined,
45
46
  compoundPromoted: 0
46
47
  };
47
48
  }
package/dist/install.js CHANGED
@@ -191,15 +191,84 @@ function resolveRepoRoot() {
191
191
  return process.cwd();
192
192
  }
193
193
 
194
+ function isZeroSha(value) {
195
+ return /^0{40,64}$/u.test(value);
196
+ }
197
+
198
+ function readStdin() {
199
+ try {
200
+ return fs.readFileSync(0, "utf8");
201
+ } catch {
202
+ return "";
203
+ }
204
+ }
205
+
206
+ function uniqueLines(chunks) {
207
+ return [...new Set(chunks
208
+ .join("\n")
209
+ .split(/\r?\n/gu)
210
+ .map((line) => line.trim())
211
+ .filter((line) => line.length > 0))].join("\n");
212
+ }
213
+
214
+ function diffNames(root, range) {
215
+ const result = runGit(["diff", "--name-only", range], root);
216
+ return result.status === 0 ? result.stdout : "";
217
+ }
218
+
219
+ function changedFilesFromUnpushedCommits(root, localSha = "HEAD") {
220
+ const revList = runGit(["rev-list", "--reverse", localSha, "--not", "--remotes"], root);
221
+ if (revList.status !== 0 || revList.stdout.trim().length === 0) {
222
+ return "";
223
+ }
224
+ const chunks = [];
225
+ for (const commit of revList.stdout.split(/\r?\n/gu).map((line) => line.trim()).filter(Boolean)) {
226
+ const diffTree = runGit(["diff-tree", "--no-commit-id", "--name-only", "-r", "--root", commit], root);
227
+ if (diffTree.status === 0) chunks.push(diffTree.stdout);
228
+ }
229
+ return uniqueLines(chunks);
230
+ }
231
+
232
+ function changedFilesFromPrePushStdin(root, stdin) {
233
+ const chunks = [];
234
+ for (const rawLine of stdin.split(/\r?\n/gu)) {
235
+ const parts = rawLine.trim().split(/\s+/u);
236
+ if (parts.length < 4) continue;
237
+ const [localRef, localSha, remoteRef, remoteSha] = parts;
238
+ void localRef;
239
+ void remoteRef;
240
+ if (!localSha || isZeroSha(localSha)) continue;
241
+ if (remoteSha && !isZeroSha(remoteSha)) {
242
+ chunks.push(diffNames(root, remoteSha + ".." + localSha));
243
+ continue;
244
+ }
245
+ const upstream = runGit(["rev-parse", "--verify", "--quiet", "@{upstream}"], root);
246
+ if (upstream.status === 0 && upstream.stdout.trim().length > 0) {
247
+ chunks.push(diffNames(root, upstream.stdout.trim() + ".." + localSha));
248
+ continue;
249
+ }
250
+ chunks.push(changedFilesFromUnpushedCommits(root, localSha));
251
+ }
252
+ return uniqueLines(chunks);
253
+ }
254
+
194
255
  function resolveChangedFiles(root) {
195
256
  if (HOOK_NAME === "pre-commit") {
196
257
  const result = runGit(["diff", "--cached", "--name-only"], root);
197
258
  return result.status === 0 ? result.stdout : "";
198
259
  }
199
- const upstreamResult = runGit(["diff", "--name-only", "@{upstream}...HEAD"], root);
260
+ const stdinChanged = changedFilesFromPrePushStdin(root, readStdin());
261
+ if (stdinChanged.length > 0) {
262
+ return stdinChanged;
263
+ }
264
+ const upstreamResult = runGit(["diff", "--name-only", "@{upstream}..HEAD"], root);
200
265
  if (upstreamResult.status === 0) {
201
266
  return upstreamResult.stdout;
202
267
  }
268
+ const unpushed = changedFilesFromUnpushedCommits(root);
269
+ if (unpushed.length > 0) {
270
+ return unpushed;
271
+ }
203
272
  const fallback = runGit(["diff", "--name-only", "HEAD~1...HEAD"], root);
204
273
  return fallback.status === 0 ? fallback.stdout : "";
205
274
  }
@@ -1272,6 +1341,8 @@ export async function uninstallCclaw(projectRoot) {
1272
1341
  await removeIfEmpty(path.join(projectRoot, ".agents"));
1273
1342
  const managedAgentNames = [
1274
1343
  "planner",
1344
+ "product-manager",
1345
+ "critic",
1275
1346
  "reviewer",
1276
1347
  "security-reviewer",
1277
1348
  "test-author",
@@ -505,18 +505,40 @@ export async function appendKnowledge(projectRoot, seeds, defaults = {}) {
505
505
  appendedEntries
506
506
  };
507
507
  }
508
+ const SHORT_TECHNICAL_TOKEN_SET = new Set(["ci", "db", "ui", "qa", "ux"]);
508
509
  function tokenizeText(value) {
509
510
  if (!value)
510
511
  return [];
511
- return value
512
- .toLowerCase()
513
- .split(/[^a-z0-9]+/u)
514
- .map((token) => token.trim())
515
- .filter((token) => token.length >= 3);
512
+ const tokens = [];
513
+ const matches = value.matchAll(/[A-Za-z0-9]+/gu);
514
+ for (const match of matches) {
515
+ const raw = match[0] ?? "";
516
+ const normalized = raw.toLowerCase();
517
+ if (normalized.length >= 3) {
518
+ tokens.push(normalized);
519
+ continue;
520
+ }
521
+ if (/^[A-Z]{2}$/u.test(raw) || SHORT_TECHNICAL_TOKEN_SET.has(normalized)) {
522
+ tokens.push(normalized);
523
+ }
524
+ }
525
+ return tokens;
516
526
  }
517
527
  function uniqueTokens(values) {
518
528
  return [...new Set(values)];
519
529
  }
530
+ function supersededTriggerSet(entries) {
531
+ const superseded = new Set();
532
+ for (const entry of entries) {
533
+ for (const trigger of entry.supersedes ?? []) {
534
+ superseded.add(normalizeText(trigger));
535
+ }
536
+ }
537
+ return superseded;
538
+ }
539
+ function isSupersededLearning(entry, supersededTriggers) {
540
+ return entry.superseded_by !== undefined || supersededTriggers.has(normalizeText(entry.trigger));
541
+ }
520
542
  function pathTokens(paths) {
521
543
  if (!Array.isArray(paths) || paths.length === 0)
522
544
  return [];
@@ -538,7 +560,9 @@ export async function selectRelevantLearnings(projectRoot, options = {}) {
538
560
  const limit = typeof options.limit === "number" && Number.isFinite(options.limit) && options.limit > 0
539
561
  ? Math.floor(options.limit)
540
562
  : 8;
541
- const ranked = entries.map((entry, index) => {
563
+ const staleTriggers = supersededTriggerSet(entries);
564
+ const activeEntries = entries.filter((entry) => !isSupersededLearning(entry, staleTriggers));
565
+ const ranked = activeEntries.map((entry, index) => {
542
566
  let score = 0;
543
567
  let stageScore = 0;
544
568
  if (stage) {
@@ -35,6 +35,13 @@ function stateDirPath(projectRoot) {
35
35
  function archiveLockPath(projectRoot) {
36
36
  return path.join(projectRoot, RUNTIME_ROOT, "state", ".archive.lock");
37
37
  }
38
+ function compoundCloseoutComplete(state) {
39
+ return (state.closeout.compoundCompletedAt !== undefined ||
40
+ state.closeout.compoundPromoted > 0 ||
41
+ (state.closeout.compoundSkipped === true &&
42
+ typeof state.closeout.compoundSkipReason === "string" &&
43
+ state.closeout.compoundSkipReason.trim().length > 0));
44
+ }
38
45
  async function snapshotStateDirectory(projectRoot, destinationRoot) {
39
46
  const sourceDir = stateDirPath(projectRoot);
40
47
  if (!(await exists(sourceDir))) {
@@ -209,6 +216,10 @@ export async function archiveRun(projectRoot, runName, options = {}) {
209
216
  sourceState.closeout.retroSkipReason.trim().length > 0;
210
217
  const readyForArchive = sourceState.closeout.shipSubstate === "ready_to_archive";
211
218
  const inShipCloseout = sourceState.currentStage === "ship";
219
+ if (readyForArchive && !compoundCloseoutComplete(sourceState)) {
220
+ throw new Error("Archive blocked: compound closeout is incomplete. " +
221
+ "Promote compound guidance or skip compound review with an explicit reason before archiving.");
222
+ }
212
223
  if (inShipCloseout && skipRetro) {
213
224
  throw new Error("Archive blocked: --skip-retro is not allowed while current stage is ship. " +
214
225
  "Complete closeout to ready_to_archive via /cc-next.");
@@ -255,21 +255,27 @@ function sanitizeCloseoutState(value) {
255
255
  ? true
256
256
  : undefined;
257
257
  const compoundCompletedAt = typeof typed.compoundCompletedAt === "string" ? typed.compoundCompletedAt : undefined;
258
- const compoundSkipped = typeof typed.compoundSkipped === "boolean" ? typed.compoundSkipped : undefined;
258
+ const compoundSkipReason = typeof typed.compoundSkipReason === "string"
259
+ ? typed.compoundSkipReason.trim() || undefined
260
+ : undefined;
261
+ const compoundSkipped = typed.compoundSkipped === true && compoundSkipReason !== undefined
262
+ ? true
263
+ : undefined;
259
264
  const promotedRaw = typed.compoundPromoted;
260
265
  const compoundPromoted = typeof promotedRaw === "number" && Number.isFinite(promotedRaw) && promotedRaw >= 0
261
266
  ? Math.floor(promotedRaw)
262
267
  : 0;
263
- // Demote shipSubstate when its retro invariant is violated on disk. A
264
- // hand-edited flow-state could claim `ready_to_archive` or `compound_review`
265
- // without ever going through the retro step, which would let `archive`
266
- // proceed and skip the gate. Compound completion is not independently
267
- // tracked in all flows (some runs rely on knowledge.jsonl + the retro
268
- // window), so we only demote when the retro leg is missing outright.
268
+ // Demote shipSubstate when its closeout invariants are violated on disk. A
269
+ // hand-edited flow-state could claim `ready_to_archive` without completing
270
+ // the compound leg, which would let `archive` skip durable closeout proof.
269
271
  const retroDone = retroAcceptedAt !== undefined || retroSkipped === true;
272
+ const compoundDone = compoundCompletedAt !== undefined || compoundPromoted > 0 || compoundSkipped === true;
270
273
  if (!retroDone && (shipSubstate === "ready_to_archive" || shipSubstate === "compound_review")) {
271
274
  shipSubstate = "retro_review";
272
275
  }
276
+ else if (shipSubstate === "ready_to_archive" && !compoundDone) {
277
+ shipSubstate = "compound_review";
278
+ }
273
279
  return {
274
280
  shipSubstate,
275
281
  retroDraftedAt,
@@ -278,6 +284,7 @@ function sanitizeCloseoutState(value) {
278
284
  retroSkipReason,
279
285
  compoundCompletedAt,
280
286
  compoundSkipped,
287
+ compoundSkipReason,
281
288
  compoundPromoted
282
289
  };
283
290
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "cclaw-cli",
3
- "version": "0.51.23",
3
+ "version": "0.51.24",
4
4
  "description": "Installer-first flow toolkit for coding agents",
5
5
  "type": "module",
6
6
  "bin": {