cclaw-cli 0.48.1 → 0.48.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +10 -3
  2. package/dist/artifact-linter.js +2 -8
  3. package/dist/cli.js +8 -1
  4. package/dist/config.d.ts +3 -0
  5. package/dist/config.js +13 -3
  6. package/dist/constants.d.ts +6 -0
  7. package/dist/constants.js +11 -0
  8. package/dist/content/contracts.d.ts +2 -2
  9. package/dist/content/contracts.js +2 -2
  10. package/dist/content/core-agents.d.ts +1 -1
  11. package/dist/content/core-agents.js +1 -1
  12. package/dist/content/hooks.js +16 -15
  13. package/dist/content/next-command.js +4 -2
  14. package/dist/content/observe.d.ts +2 -2
  15. package/dist/content/observe.js +83 -13
  16. package/dist/content/opencode-plugin.js +227 -45
  17. package/dist/content/stage-schema.js +1 -1
  18. package/dist/content/stages/ship.js +2 -5
  19. package/dist/content/templates.js +3 -6
  20. package/dist/delegation.d.ts +5 -1
  21. package/dist/delegation.js +12 -8
  22. package/dist/doctor.js +132 -15
  23. package/dist/eval/runner.js +36 -4
  24. package/dist/feature-system.d.ts +11 -4
  25. package/dist/feature-system.js +54 -10
  26. package/dist/flow-state.d.ts +2 -0
  27. package/dist/flow-state.js +19 -2
  28. package/dist/fs-utils.d.ts +4 -1
  29. package/dist/fs-utils.js +20 -4
  30. package/dist/gate-evidence.d.ts +2 -0
  31. package/dist/gate-evidence.js +13 -4
  32. package/dist/install.js +25 -23
  33. package/dist/internal/advance-stage.js +49 -10
  34. package/dist/knowledge-store.d.ts +8 -0
  35. package/dist/knowledge-store.js +113 -33
  36. package/dist/retro-gate.js +33 -23
  37. package/dist/run-archive.js +166 -128
  38. package/dist/run-persistence.d.ts +8 -1
  39. package/dist/run-persistence.js +7 -6
  40. package/dist/trace-matrix.js +7 -7
  41. package/package.json +1 -1
package/dist/install.js CHANGED
@@ -178,15 +178,15 @@ async function ensureStructure(projectRoot) {
178
178
  await ensureDir(path.join(projectRoot, dir));
179
179
  }
180
180
  }
181
- async function writeCommandContracts(projectRoot) {
182
- for (const stage of FLOW_STAGES) {
183
- await writeFileSafe(runtimePath(projectRoot, "commands", `${stage}.md`), stageCommandContract(stage));
184
- }
181
+ async function writeCommandContracts(projectRoot, track = "standard") {
182
+ await Promise.all(FLOW_STAGES.map(async (stage) => {
183
+ await writeFileSafe(runtimePath(projectRoot, "commands", `${stage}.md`), stageCommandContract(stage, track));
184
+ }));
185
185
  }
186
186
  async function writeArtifactTemplates(projectRoot) {
187
- for (const [fileName, content] of Object.entries(ARTIFACT_TEMPLATES)) {
187
+ await Promise.all(Object.entries(ARTIFACT_TEMPLATES).map(async ([fileName, content]) => {
188
188
  await writeFileSafe(runtimePath(projectRoot, "templates", fileName), content);
189
- }
189
+ }));
190
190
  }
191
191
  /**
192
192
  * Seed the `.cclaw/evals/` scaffold. Only writes files that do not already
@@ -699,7 +699,7 @@ async function writeHooks(projectRoot, config) {
699
699
  async function ensureKnowledgeStore(projectRoot) {
700
700
  const storePath = runtimePath(projectRoot, "knowledge.jsonl");
701
701
  if (!(await exists(storePath))) {
702
- await writeFileSafe(storePath, "");
702
+ await writeFileSafe(storePath, "", { mode: 0o600 });
703
703
  }
704
704
  const legacyMdPath = runtimePath(projectRoot, "knowledge.md");
705
705
  if (await exists(legacyMdPath)) {
@@ -874,7 +874,7 @@ async function ensureSessionStateFiles(projectRoot) {
874
874
  }
875
875
  const activityPath = path.join(stateDir, "stage-activity.jsonl");
876
876
  if (!(await exists(activityPath))) {
877
- await writeFileSafe(activityPath, "");
877
+ await writeFileSafe(activityPath, "", { mode: 0o600 });
878
878
  }
879
879
  const checkpointPath = path.join(stateDir, "checkpoint.json");
880
880
  if (!(await exists(checkpointPath))) {
@@ -887,7 +887,7 @@ async function ensureSessionStateFiles(projectRoot) {
887
887
  blockers: [],
888
888
  timestamp: new Date().toISOString()
889
889
  };
890
- await writeFileSafe(checkpointPath, `${JSON.stringify(initialCheckpoint, null, 2)}\n`);
890
+ await writeFileSafe(checkpointPath, `${JSON.stringify(initialCheckpoint, null, 2)}\n`, { mode: 0o600 });
891
891
  }
892
892
  const suggestionMemoryPath = path.join(stateDir, "suggestion-memory.json");
893
893
  if (!(await exists(suggestionMemoryPath))) {
@@ -897,11 +897,11 @@ async function ensureSessionStateFiles(projectRoot) {
897
897
  lastSuggestedStage: "",
898
898
  lastSuggestedAt: ""
899
899
  };
900
- await writeFileSafe(suggestionMemoryPath, `${JSON.stringify(suggestionMemory, null, 2)}\n`);
900
+ await writeFileSafe(suggestionMemoryPath, `${JSON.stringify(suggestionMemory, null, 2)}\n`, { mode: 0o600 });
901
901
  }
902
902
  const contextModePath = path.join(stateDir, "context-mode.json");
903
903
  if (!(await exists(contextModePath))) {
904
- await writeFileSafe(contextModePath, `${JSON.stringify(createInitialContextModeState(), null, 2)}\n`);
904
+ await writeFileSafe(contextModePath, `${JSON.stringify(createInitialContextModeState(), null, 2)}\n`, { mode: 0o600 });
905
905
  }
906
906
  const knowledgeDigestPath = path.join(stateDir, "knowledge-digest.md");
907
907
  if (!(await exists(knowledgeDigestPath))) {
@@ -909,18 +909,18 @@ async function ensureSessionStateFiles(projectRoot) {
909
909
  }
910
910
  const tddCycleLogPath = path.join(stateDir, "tdd-cycle-log.jsonl");
911
911
  if (!(await exists(tddCycleLogPath))) {
912
- await writeFileSafe(tddCycleLogPath, "");
912
+ await writeFileSafe(tddCycleLogPath, "", { mode: 0o600 });
913
913
  }
914
914
  const reconciliationNoticesPath = path.join(stateDir, "reconciliation-notices.json");
915
915
  if (!(await exists(reconciliationNoticesPath))) {
916
- await writeFileSafe(reconciliationNoticesPath, `${JSON.stringify({ schemaVersion: 1, notices: [] }, null, 2)}\n`);
916
+ await writeFileSafe(reconciliationNoticesPath, `${JSON.stringify({ schemaVersion: 1, notices: [] }, null, 2)}\n`, { mode: 0o600 });
917
917
  }
918
918
  const flowSnapshotPath = path.join(stateDir, "flow-state.snapshot.json");
919
919
  if (!(await exists(flowSnapshotPath))) {
920
920
  await writeFileSafe(flowSnapshotPath, `${JSON.stringify({
921
921
  capturedAt: new Date().toISOString(),
922
922
  state: flow
923
- }, null, 2)}\n`);
923
+ }, null, 2)}\n`, { mode: 0o600 });
924
924
  }
925
925
  }
926
926
  async function writeRulebook(projectRoot) {
@@ -977,8 +977,8 @@ async function writeState(projectRoot, config, forceReset = false) {
977
977
  if (!forceReset && (await exists(statePath))) {
978
978
  return;
979
979
  }
980
- const state = createInitialFlowState("active", config.defaultTrack ?? "standard");
981
- await writeFileSafe(statePath, `${JSON.stringify(state, null, 2)}\n`);
980
+ const state = createInitialFlowState({ track: config.defaultTrack ?? "standard" });
981
+ await writeFileSafe(statePath, `${JSON.stringify(state, null, 2)}\n`, { mode: 0o600 });
982
982
  }
983
983
  async function writeAdapterManifest(projectRoot, harnesses) {
984
984
  const manifest = {
@@ -1198,13 +1198,15 @@ async function materializeRuntime(projectRoot, config, forceStateReset) {
1198
1198
  await ensureStructure(projectRoot);
1199
1199
  await cleanLegacyArtifacts(projectRoot);
1200
1200
  await cleanStaleFiles(projectRoot);
1201
- await writeCommandContracts(projectRoot);
1202
- await writeUtilityCommands(projectRoot, config);
1203
- await writeSkills(projectRoot, config);
1204
- await writeContextModes(projectRoot);
1205
- await writeArtifactTemplates(projectRoot);
1206
- await writeEvalScaffold(projectRoot);
1207
- await writeRulebook(projectRoot);
1201
+ await Promise.all([
1202
+ writeCommandContracts(projectRoot, config.defaultTrack ?? "standard"),
1203
+ writeUtilityCommands(projectRoot, config),
1204
+ writeSkills(projectRoot, config),
1205
+ writeContextModes(projectRoot),
1206
+ writeArtifactTemplates(projectRoot),
1207
+ writeEvalScaffold(projectRoot),
1208
+ writeRulebook(projectRoot)
1209
+ ]);
1208
1210
  await writeState(projectRoot, config, forceStateReset);
1209
1211
  await ensureRunSystem(projectRoot, { createIfMissing: false });
1210
1212
  await ensureSessionStateFiles(projectRoot);
@@ -1,22 +1,51 @@
1
1
  import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
- import { RUNTIME_ROOT } from "../constants.js";
3
+ import { RUNTIME_ROOT, SHIP_FINALIZATION_MODES } from "../constants.js";
4
4
  import { stageSchema } from "../content/stage-schema.js";
5
5
  import { appendDelegation, checkMandatoryDelegations } from "../delegation.js";
6
6
  import { readActiveFeature } from "../feature-system.js";
7
7
  import { verifyCompletedStagesGateClosure, verifyCurrentStageGateEvidence } from "../gate-evidence.js";
8
8
  import { extractMarkdownSectionBody, parseLearningsSection } from "../artifact-linter.js";
9
- import { isFlowTrack, nextStage } from "../flow-state.js";
9
+ import { getAvailableTransitions, getTransitionGuards, isFlowTrack } from "../flow-state.js";
10
10
  import { appendKnowledge } from "../knowledge-store.js";
11
11
  import { readFlowState, writeFlowState } from "../runs.js";
12
12
  import { FLOW_STAGES } from "../types.js";
13
13
  function unique(values) {
14
14
  return [...new Set(values)];
15
15
  }
16
+ function resolveSuccessorTransition(stage, track, transitionTargets, satisfiedGuards, selectedTransitionGuards) {
17
+ const natural = transitionTargets[0] ?? null;
18
+ const specialTargets = transitionTargets.filter((target) => target !== natural);
19
+ for (const target of specialTargets) {
20
+ const guards = getTransitionGuards(stage, target, track);
21
+ if (guards.length === 0)
22
+ continue;
23
+ const selectedSpecial = guards.some((guard) => selectedTransitionGuards.has(guard));
24
+ if (!selectedSpecial)
25
+ continue;
26
+ if (guards.every((guard) => satisfiedGuards.has(guard))) {
27
+ return target;
28
+ }
29
+ }
30
+ if (natural) {
31
+ const guards = getTransitionGuards(stage, natural, track);
32
+ if (guards.every((guard) => satisfiedGuards.has(guard))) {
33
+ return natural;
34
+ }
35
+ }
36
+ for (const target of specialTargets) {
37
+ const guards = getTransitionGuards(stage, target, track);
38
+ if (guards.every((guard) => satisfiedGuards.has(guard))) {
39
+ return target;
40
+ }
41
+ }
42
+ return natural;
43
+ }
16
44
  const TEST_COMMAND_HINT_PATTERN = /\b(?:npm test|pnpm test|yarn test|bun test|vitest|jest|pytest|go test|cargo test|mvn test|gradle test|dotnet test)\b/iu;
17
45
  const SHA_WITH_LABEL_PATTERN = /\b(?:sha|commit)(?:\s*[:=]|\s+)\s*[0-9a-f]{7,40}\b/iu;
18
46
  const PASS_STATUS_PATTERN = /\b(?:pass|passed|green|ok)\b/iu;
19
- const SHIP_FINALIZATION_MODE_PATTERN = /\bFINALIZE_(?:MERGE_LOCAL|OPEN_PR|QUEUE|HANDOFF|SKIP)\b/u;
47
+ const SHIP_FINALIZATION_MODE_PATTERN = new RegExp(`\\b(?:${SHIP_FINALIZATION_MODES.join("|")})\\b`, "u");
48
+ const SHIP_FINALIZATION_MODE_HINT = SHIP_FINALIZATION_MODES.join(", ");
20
49
  // Per-gate validators keyed by `${stage}:${gateId}`. Returning a non-null
21
50
  // string surfaces the reason as an `advance-stage` failure so evidence is
22
51
  // guaranteed to carry the structural breadcrumbs downstream tooling
@@ -36,7 +65,7 @@ const GATE_EVIDENCE_VALIDATORS = {
36
65
  },
37
66
  "ship:ship_finalization_executed": (evidence) => {
38
67
  if (!SHIP_FINALIZATION_MODE_PATTERN.test(evidence)) {
39
- return "must name the finalization mode that ran (for example `FINALIZE_MERGE_LOCAL`, `FINALIZE_OPEN_PR`, `FINALIZE_HANDOFF`, `FINALIZE_QUEUE`, or `FINALIZE_SKIP`).";
68
+ return `must name the finalization mode that ran (for example ${SHIP_FINALIZATION_MODE_HINT}).`;
40
69
  }
41
70
  return null;
42
71
  }
@@ -395,11 +424,18 @@ async function runAdvanceStage(projectRoot, args, io) {
395
424
  const requiredGateIds = schema.requiredGates
396
425
  .filter((gate) => gate.tier === "required")
397
426
  .map((gate) => gate.id);
427
+ const transitionTargets = getAvailableTransitions(args.stage, flowState.track).map((rule) => rule.to);
398
428
  const allowedGateIds = new Set(schema.requiredGates.map((gate) => gate.id));
429
+ const transitionGuardIds = new Set(transitionTargets
430
+ .flatMap((target) => getTransitionGuards(args.stage, target, flowState.track))
431
+ .filter((guardId) => !allowedGateIds.has(guardId)));
432
+ const selectableGateIds = new Set([...allowedGateIds, ...transitionGuardIds]);
399
433
  const selectedGateIds = args.passedGateIds.length > 0
400
- ? args.passedGateIds.filter((gateId) => allowedGateIds.has(gateId))
434
+ ? args.passedGateIds.filter((gateId) => selectableGateIds.has(gateId))
401
435
  : requiredGateIds;
402
- const missingRequired = requiredGateIds.filter((gateId) => !selectedGateIds.includes(gateId));
436
+ const selectedGateIdSet = new Set(selectedGateIds);
437
+ const selectedTransitionGuards = selectedGateIds.filter((gateId) => transitionGuardIds.has(gateId));
438
+ const missingRequired = requiredGateIds.filter((gateId) => !selectedGateIdSet.has(gateId));
403
439
  if (missingRequired.length > 0) {
404
440
  io.stderr.write(`cclaw internal advance-stage: required gates not selected as passed: ${missingRequired.join(", ")}.\n`);
405
441
  return 1;
@@ -429,14 +465,16 @@ async function runAdvanceStage(projectRoot, args, io) {
429
465
  }
430
466
  const catalog = flowState.stageGateCatalog[args.stage];
431
467
  const nextPassed = unique([...catalog.passed, ...selectedGateIds]).filter((gateId) => allowedGateIds.has(gateId));
432
- const nextBlocked = unique(catalog.blocked.filter((gateId) => !nextPassed.includes(gateId))).filter((gateId) => allowedGateIds.has(gateId));
468
+ const nextPassedSet = new Set(nextPassed);
469
+ const nextBlocked = unique(catalog.blocked.filter((gateId) => !nextPassedSet.has(gateId))).filter((gateId) => allowedGateIds.has(gateId));
433
470
  const conditional = new Set(catalog.conditional);
434
471
  const nextTriggered = unique([
435
472
  ...catalog.triggered.filter((gateId) => conditional.has(gateId)),
436
473
  ...nextPassed.filter((gateId) => conditional.has(gateId)),
437
474
  ...nextBlocked.filter((gateId) => conditional.has(gateId))
438
475
  ]);
439
- const missingGuardEvidence = nextPassed.filter((gateId) => {
476
+ const guardEvidenceGateIds = unique([...nextPassed, ...selectedTransitionGuards]);
477
+ const missingGuardEvidence = guardEvidenceGateIds.filter((gateId) => {
440
478
  const existing = flowState.guardEvidence[gateId];
441
479
  if (typeof existing === "string" && existing.trim().length > 0) {
442
480
  return false;
@@ -464,7 +502,7 @@ async function runAdvanceStage(projectRoot, args, io) {
464
502
  return 1;
465
503
  }
466
504
  const nextGuardEvidence = { ...flowState.guardEvidence };
467
- for (const gateId of nextPassed) {
505
+ for (const gateId of guardEvidenceGateIds) {
468
506
  const provided = args.evidenceByGate[gateId];
469
507
  if (typeof provided === "string" && provided.trim().length > 0) {
470
508
  nextGuardEvidence[gateId] = provided.trim();
@@ -508,7 +546,8 @@ async function runAdvanceStage(projectRoot, args, io) {
508
546
  io.stderr.write(`cclaw internal advance-stage: learnings harvest failed for "${schema.artifactFile}". ${learningsHarvest.details}\n`);
509
547
  return 1;
510
548
  }
511
- const successor = nextStage(args.stage, flowState.track);
549
+ const satisfiedGuards = new Set([...nextPassed, ...selectedTransitionGuards]);
550
+ const successor = resolveSuccessorTransition(args.stage, flowState.track, transitionTargets, satisfiedGuards, new Set(selectedTransitionGuards));
512
551
  const completedStages = flowState.completedStages.includes(args.stage)
513
552
  ? [...flowState.completedStages]
514
553
  : [...flowState.completedStages, args.stage];
@@ -58,9 +58,17 @@ export interface AppendKnowledgeResult {
58
58
  errors: string[];
59
59
  appendedEntries: KnowledgeEntry[];
60
60
  }
61
+ export interface ReadKnowledgeOptions {
62
+ lockAware?: boolean;
63
+ }
64
+ export interface ReadKnowledgeResult {
65
+ entries: KnowledgeEntry[];
66
+ malformedLines: number;
67
+ }
61
68
  export declare function validateKnowledgeEntry(entry: unknown): {
62
69
  ok: boolean;
63
70
  errors: string[];
64
71
  };
65
72
  export declare function materializeKnowledgeEntry(seed: KnowledgeSeedEntry, defaults?: AppendKnowledgeDefaults): KnowledgeEntry;
73
+ export declare function readKnowledgeSafely(projectRoot: string, options?: ReadKnowledgeOptions): Promise<ReadKnowledgeResult>;
66
74
  export declare function appendKnowledge(projectRoot: string, seeds: KnowledgeSeedEntry[], defaults?: AppendKnowledgeDefaults): Promise<AppendKnowledgeResult>;
@@ -66,8 +66,78 @@ function dedupeKey(entry) {
66
66
  entry.severity === undefined ? "none" : entry.severity
67
67
  ].join("|");
68
68
  }
69
+ function emptyKnowledgeSnapshot() {
70
+ return {
71
+ lines: [],
72
+ entries: [],
73
+ malformedLines: 0,
74
+ keyToIndex: new Map(),
75
+ entryByIndex: new Map()
76
+ };
77
+ }
78
+ function parseKnowledgeSnapshot(raw) {
79
+ const lines = stripBom(raw).split(/\r?\n/u);
80
+ const entries = [];
81
+ const keyToIndex = new Map();
82
+ const entryByIndex = new Map();
83
+ let malformedLines = 0;
84
+ for (let i = 0; i < lines.length; i += 1) {
85
+ const trimmed = lines[i].trim();
86
+ if (trimmed.length === 0)
87
+ continue;
88
+ try {
89
+ const parsed = JSON.parse(trimmed);
90
+ const validated = validateKnowledgeEntry(parsed);
91
+ if (!validated.ok) {
92
+ malformedLines += 1;
93
+ continue;
94
+ }
95
+ const entry = parsed;
96
+ entries.push(entry);
97
+ const key = dedupeKey(entry);
98
+ if (!keyToIndex.has(key)) {
99
+ keyToIndex.set(key, i);
100
+ }
101
+ entryByIndex.set(i, entry);
102
+ }
103
+ catch {
104
+ malformedLines += 1;
105
+ }
106
+ }
107
+ return {
108
+ lines,
109
+ entries,
110
+ malformedLines,
111
+ keyToIndex,
112
+ entryByIndex
113
+ };
114
+ }
115
+ async function readKnowledgeSnapshot(filePath) {
116
+ try {
117
+ const raw = await fs.readFile(filePath, "utf8");
118
+ return parseKnowledgeSnapshot(raw);
119
+ }
120
+ catch (error) {
121
+ const code = error?.code;
122
+ if (code === "ENOENT") {
123
+ return emptyKnowledgeSnapshot();
124
+ }
125
+ throw error;
126
+ }
127
+ }
128
+ function mergeKnowledgeOccurrence(target, incoming) {
129
+ const mergedFrequency = target.frequency + Math.max(1, incoming.frequency);
130
+ const mergedLastSeen = target.last_seen_ts >= incoming.last_seen_ts
131
+ ? target.last_seen_ts
132
+ : incoming.last_seen_ts;
133
+ return {
134
+ ...target,
135
+ frequency: mergedFrequency,
136
+ last_seen_ts: mergedLastSeen
137
+ };
138
+ }
69
139
  function isIsoUtcTimestamp(value) {
70
- return /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$/u.test(value);
140
+ return /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,3})?Z$/u.test(value);
71
141
  }
72
142
  function isNullableString(value) {
73
143
  return value === null || typeof value === "string";
@@ -176,29 +246,19 @@ export function materializeKnowledgeEntry(seed, defaults = {}) {
176
246
  }
177
247
  return entry;
178
248
  }
179
- async function readExistingKnowledgeKeys(filePath) {
180
- const keys = new Set();
181
- try {
182
- const raw = stripBom(await fs.readFile(filePath, "utf8"));
183
- const lines = raw.split(/\r?\n/u).map((line) => line.trim()).filter((line) => line.length > 0);
184
- for (const line of lines) {
185
- try {
186
- const parsed = JSON.parse(line);
187
- const validated = validateKnowledgeEntry(parsed);
188
- if (!validated.ok)
189
- continue;
190
- const entry = parsed;
191
- keys.add(dedupeKey(entry));
192
- }
193
- catch {
194
- // Ignore malformed historical lines for dedupe indexing.
195
- }
196
- }
197
- }
198
- catch {
199
- // Missing file is fine — treat as empty store.
249
+ export async function readKnowledgeSafely(projectRoot, options = {}) {
250
+ const filePath = knowledgePath(projectRoot);
251
+ const read = async () => {
252
+ const snapshot = await readKnowledgeSnapshot(filePath);
253
+ return {
254
+ entries: snapshot.entries,
255
+ malformedLines: snapshot.malformedLines
256
+ };
257
+ };
258
+ if (options.lockAware === false) {
259
+ return read();
200
260
  }
201
- return keys;
261
+ return withDirectoryLock(knowledgeLockPath(projectRoot), read);
202
262
  }
203
263
  export async function appendKnowledge(projectRoot, seeds, defaults = {}) {
204
264
  if (seeds.length === 0) {
@@ -221,22 +281,42 @@ export async function appendKnowledge(projectRoot, seeds, defaults = {}) {
221
281
  const appendedEntries = [];
222
282
  await withDirectoryLock(knowledgeLockPath(projectRoot), async () => {
223
283
  await fs.mkdir(path.dirname(filePath), { recursive: true });
224
- const existingKeys = await readExistingKnowledgeKeys(filePath);
225
- const batchKeys = new Set();
226
- const linesToAppend = [];
284
+ const snapshot = await readKnowledgeSnapshot(filePath);
285
+ const updatedByIndex = new Map();
286
+ const batchEntries = new Map();
227
287
  for (const entry of materialized) {
228
288
  const key = dedupeKey(entry);
229
- if (existingKeys.has(key) || batchKeys.has(key)) {
289
+ const existingIndex = snapshot.keyToIndex.get(key);
290
+ if (existingIndex !== undefined) {
230
291
  skippedDuplicates += 1;
292
+ const base = updatedByIndex.get(existingIndex) ?? snapshot.entryByIndex.get(existingIndex);
293
+ if (base) {
294
+ updatedByIndex.set(existingIndex, mergeKnowledgeOccurrence(base, entry));
295
+ }
231
296
  continue;
232
297
  }
233
- batchKeys.add(key);
234
- existingKeys.add(key);
235
- appendedEntries.push(entry);
236
- linesToAppend.push(JSON.stringify(entry));
298
+ const existingBatchEntry = batchEntries.get(key);
299
+ if (existingBatchEntry) {
300
+ skippedDuplicates += 1;
301
+ batchEntries.set(key, mergeKnowledgeOccurrence(existingBatchEntry, entry));
302
+ continue;
303
+ }
304
+ batchEntries.set(key, { ...entry });
305
+ }
306
+ appendedEntries.push(...batchEntries.values());
307
+ if (updatedByIndex.size === 0 && batchEntries.size === 0) {
308
+ return;
237
309
  }
238
- if (linesToAppend.length > 0) {
239
- await fs.appendFile(filePath, `${linesToAppend.join("\n")}\n`, "utf8");
310
+ const rewrittenLines = snapshot.lines.map((line, index) => {
311
+ const updated = updatedByIndex.get(index);
312
+ return updated ? JSON.stringify(updated) : line;
313
+ }).filter((line) => line.trim().length > 0);
314
+ const linesToWrite = [
315
+ ...rewrittenLines,
316
+ ...Array.from(batchEntries.values(), (entry) => JSON.stringify(entry))
317
+ ];
318
+ if (linesToWrite.length > 0) {
319
+ await fs.writeFile(filePath, `${linesToWrite.join("\n")}\n`, "utf8");
240
320
  }
241
321
  });
242
322
  return {
@@ -2,6 +2,7 @@ import fs from "node:fs/promises";
2
2
  import path from "node:path";
3
3
  import { RUNTIME_ROOT } from "./constants.js";
4
4
  import { exists, stripBom } from "./fs-utils.js";
5
+ import { readKnowledgeSafely } from "./knowledge-store.js";
5
6
  function activeArtifactsPath(projectRoot) {
6
7
  return path.join(projectRoot, RUNTIME_ROOT, "artifacts");
7
8
  }
@@ -61,35 +62,44 @@ export async function evaluateRetroGate(projectRoot, state) {
61
62
  }
62
63
  }
63
64
  const shouldFallbackScan = compoundEntries <= 0 && (windowStartMs !== null || windowEndMs !== null);
64
- const knowledgeFile = path.join(projectRoot, RUNTIME_ROOT, "knowledge.jsonl");
65
- if (shouldFallbackScan && (await exists(knowledgeFile))) {
65
+ if (shouldFallbackScan) {
66
+ const countIfEligible = (parsed) => {
67
+ if (parsed.type !== "compound") {
68
+ return 0;
69
+ }
70
+ const created = typeof parsed.created === "string" ? parseIsoTimestamp(parsed.created) : null;
71
+ if (created === null || !inInclusiveWindow(created, windowStartMs, windowEndMs)) {
72
+ return 0;
73
+ }
74
+ const source = typeof parsed.source === "string"
75
+ ? parsed.source.trim().toLowerCase()
76
+ : null;
77
+ const legacyRetroStage = parsed.stage === "retro";
78
+ return source === "retro" || legacyRetroStage ? 1 : 0;
79
+ };
66
80
  try {
67
- const raw = stripBom(await fs.readFile(knowledgeFile, "utf8"));
81
+ const knowledgeFile = path.join(projectRoot, RUNTIME_ROOT, "knowledge.jsonl");
82
+ const { entries } = await readKnowledgeSafely(projectRoot);
68
83
  compoundEntries = 0;
69
- for (const line of raw.split(/\r?\n/)) {
70
- const trimmed = line.trim();
71
- if (!trimmed)
72
- continue;
73
- try {
74
- const parsed = JSON.parse(trimmed);
75
- if (parsed.type !== "compound") {
76
- continue;
77
- }
78
- const created = typeof parsed.created === "string" ? parseIsoTimestamp(parsed.created) : null;
79
- if (created === null || !inInclusiveWindow(created, windowStartMs, windowEndMs)) {
84
+ for (const parsed of entries) {
85
+ compoundEntries += countIfEligible(parsed);
86
+ }
87
+ // Backward compatibility for historical/hand-edited rows that don't pass
88
+ // strict knowledge schema validation but still carry retro evidence.
89
+ if (compoundEntries === 0 && (await exists(knowledgeFile))) {
90
+ const raw = stripBom(await fs.readFile(knowledgeFile, "utf8"));
91
+ for (const line of raw.split(/\r?\n/)) {
92
+ const trimmed = line.trim();
93
+ if (!trimmed)
80
94
  continue;
95
+ try {
96
+ const parsed = JSON.parse(trimmed);
97
+ compoundEntries += countIfEligible(parsed);
81
98
  }
82
- const source = typeof parsed.source === "string"
83
- ? parsed.source.trim().toLowerCase()
84
- : null;
85
- const legacyRetroStage = parsed.stage === "retro";
86
- if (source === "retro" || legacyRetroStage) {
87
- compoundEntries += 1;
99
+ catch {
100
+ // ignore malformed lines for retro gate calculation
88
101
  }
89
102
  }
90
- catch {
91
- // ignore malformed lines for retro gate calculation
92
- }
93
103
  }
94
104
  }
95
105
  catch {