@phren/cli 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -150,6 +150,26 @@ export async function handleHookPrompt() {
150
150
  if (!keywords)
151
151
  process.exit(0);
152
152
  debugLog(`hook-prompt keywords: "${keywords}"`);
153
+ // Session momentum: track topic frequencies within the session
154
+ let hotTopics = [];
155
+ if (sessionId) {
156
+ const topicFile = sessionMarker(getPhrenPath(), `topics-${sessionId}.json`);
157
+ let sessionTopics = {};
158
+ try {
159
+ if (fs.existsSync(topicFile)) {
160
+ sessionTopics = JSON.parse(fs.readFileSync(topicFile, 'utf8'));
161
+ }
162
+ }
163
+ catch { /* ignore parse errors */ }
164
+ for (const kw of keywordEntries) {
165
+ sessionTopics[kw] = (sessionTopics[kw] ?? 0) + 1;
166
+ }
167
+ fs.writeFileSync(topicFile, JSON.stringify(sessionTopics));
168
+ // Find hot topics (3+ mentions this session)
169
+ hotTopics = Object.entries(sessionTopics)
170
+ .filter(([, count]) => count >= 3)
171
+ .map(([topic]) => topic);
172
+ }
153
173
  const tIndex0 = Date.now();
154
174
  const db = await buildIndex(getPhrenPath(), profile);
155
175
  stage.indexMs = Date.now() - tIndex0;
@@ -197,9 +217,13 @@ export async function handleHookPrompt() {
197
217
  stage.rankMs = Date.now() - tRank0;
198
218
  if (!rows.length)
199
219
  process.exit(0);
200
- const safeTokenBudget = clampInt(process.env.PHREN_CONTEXT_TOKEN_BUDGET, 550, 180, 10000);
220
+ let safeTokenBudget = clampInt(process.env.PHREN_CONTEXT_TOKEN_BUDGET, 550, 180, 10000);
201
221
  const safeLineBudget = clampInt(process.env.PHREN_CONTEXT_SNIPPET_LINES, 6, 2, 100);
202
222
  const safeCharBudget = clampInt(process.env.PHREN_CONTEXT_SNIPPET_CHARS, 520, 120, 10000);
223
+ // Session momentum: boost token budget for hot topics
224
+ if (hotTopics.length > 0) {
225
+ safeTokenBudget = Math.min(Math.floor(safeTokenBudget * 1.3), parseInt(process.env.PHREN_MAX_INJECT_TOKENS ?? '2000', 10));
226
+ }
203
227
  const tSelect0 = Date.now();
204
228
  const { selected, usedTokens } = selectSnippets(rows, keywords, safeTokenBudget, safeLineBudget, safeCharBudget);
205
229
  stage.selectMs = Date.now() - tSelect0;
@@ -16,7 +16,7 @@ const SEARCH_TYPES = new Set([
16
16
  "task",
17
17
  "changelog",
18
18
  "canonical",
19
- "memory-queue",
19
+ "review-queue",
20
20
  "skill",
21
21
  "other",
22
22
  ]);
@@ -45,7 +45,7 @@ function printSearchUsage() {
45
45
  console.error(" phren search --project <name> [--type <type>] [--limit <n>] [--all]");
46
46
  console.error(" phren search --history Show recent searches");
47
47
  console.error(" phren search --from-history <n> Re-run search #n from history");
48
- console.error(" type: claude|summary|findings|reference|task|changelog|canonical|memory-queue|skill|other");
48
+ console.error(" type: claude|summary|findings|reference|task|changelog|canonical|review-queue|skill|other");
49
49
  }
50
50
  function validateAndNormalizeSearchOptions(phrenPath, queryParts, project, type, limit, showHistory, fromHistory, searchAll) {
51
51
  if (showHistory) {
@@ -1,9 +1,11 @@
1
1
  import * as fs from "fs";
2
+ import { statSync } from "fs";
2
3
  import * as path from "path";
3
4
  import { debugLog, EXEC_TIMEOUT_MS, EXEC_TIMEOUT_QUICK_MS } from "./shared.js";
4
5
  import { errorMessage, runGitOrThrow } from "./utils.js";
5
6
  import { findingIdFromLine } from "./finding-impact.js";
6
7
  import { METADATA_REGEX, isArchiveStart, isArchiveEnd } from "./content-metadata.js";
8
+ import { FINDING_TYPE_DECAY, extractFindingType, parseFindingLifecycle } from "./finding-lifecycle.js";
7
9
  export const FINDING_PROVENANCE_SOURCES = [
8
10
  "human",
9
11
  "agent",
@@ -270,6 +272,16 @@ function confidenceForAge(ageDays, decay) {
270
272
  return d90 - ((d90 - d120) * ((ageDays - 90) / 30));
271
273
  return d120; // don't decay further past d120; TTL handles final expiry
272
274
  }
275
+ function wasFileModifiedAfter(filePath, findingDate) {
276
+ try {
277
+ const stat = statSync(filePath);
278
+ const fileModified = stat.mtime.toISOString().slice(0, 10);
279
+ return fileModified > findingDate;
280
+ }
281
+ catch {
282
+ return false; // File doesn't exist or can't stat — handled by citation validation
283
+ }
284
+ }
273
285
  export function filterTrustedFindings(content, ttlDays) {
274
286
  return filterTrustedFindingsDetailed(content, { ttlDays }).content;
275
287
  }
@@ -282,6 +294,7 @@ export function filterTrustedFindingsDetailed(content, opts) {
282
294
  ...(options.decay || {}),
283
295
  };
284
296
  const highImpactFindingIds = options.highImpactFindingIds;
297
+ const impactCounts = options.impactCounts;
285
298
  const project = options.project;
286
299
  const lines = content.split("\n");
287
300
  const out = [];
@@ -356,11 +369,41 @@ export function filterTrustedFindingsDetailed(content, opts) {
356
369
  else {
357
370
  confidence = DEFAULT_UNDATED_CONFIDENCE;
358
371
  }
372
+ // Type-specific decay adjustment
373
+ const findingType = extractFindingType(line);
374
+ if (findingType) {
375
+ const typeConfig = FINDING_TYPE_DECAY[findingType];
376
+ if (typeConfig) {
377
+ // Override max age for this type
378
+ if (effectiveDate && typeConfig.maxAgeDays !== Infinity) {
379
+ const age = ageDaysForDate(effectiveDate);
380
+ if (age !== null && age > typeConfig.maxAgeDays) {
381
+ issues.push({ date: effectiveDate || "unknown", bullet: line, reason: "stale" });
382
+ if (citation)
383
+ i++;
384
+ continue;
385
+ }
386
+ }
387
+ // Apply type-specific decay multiplier
388
+ confidence *= typeConfig.decayMultiplier;
389
+ // Decisions and anti-patterns get a floor boost (never drop below 0.6)
390
+ if (typeConfig.maxAgeDays === Infinity) {
391
+ confidence = Math.max(confidence, 0.6);
392
+ }
393
+ }
394
+ }
359
395
  if (citation && !validateFindingCitation(citation)) {
360
396
  issues.push({ date: effectiveDate || "unknown", bullet: line, reason: "invalid_citation" });
361
397
  i++;
362
398
  continue;
363
399
  }
400
+ // If cited file was modified after finding was created, lower confidence
401
+ if (citation && effectiveDate && citation.file) {
402
+ const fileModifiedAfterFinding = wasFileModifiedAfter(citation.file, effectiveDate);
403
+ if (fileModifiedAfterFinding) {
404
+ confidence *= 0.7; // File changed since finding was written — may be stale
405
+ }
406
+ }
364
407
  if (!citation)
365
408
  confidence *= 0.8;
366
409
  const provenance = parseSourceComment(line)?.source ?? "unknown";
@@ -370,9 +413,29 @@ export function filterTrustedFindingsDetailed(content, opts) {
370
413
  confidence *= 0.9;
371
414
  if (project && highImpactFindingIds?.size) {
372
415
  const findingId = findingIdFromLine(line);
373
- if (highImpactFindingIds.has(findingId))
374
- confidence *= 1.15;
416
+ if (highImpactFindingIds.has(findingId)) {
417
+ // Get surface count for graduated boost
418
+ const surfaceCount = impactCounts?.get(findingId) ?? 3;
419
+ // Log-scaled: 3→1.15x, 10→1.28x, 30→1.38x, capped at 1.4x
420
+ const boost = Math.min(1.4, 1 + 0.1 * Math.log2(Math.max(3, surfaceCount)));
421
+ confidence *= boost;
422
+ // Decay resistance: confirmed findings decay 3x slower
423
+ if (effectiveDate) {
424
+ const realAge = ageDaysForDate(effectiveDate);
425
+ if (realAge !== null) {
426
+ const slowedAge = Math.floor(realAge / 3);
427
+ confidence = Math.max(confidence, confidenceForAge(slowedAge, decay));
428
+ }
429
+ }
430
+ }
375
431
  }
432
+ const lifecycle = parseFindingLifecycle(line);
433
+ if (lifecycle?.status === "superseded")
434
+ confidence *= 0.25;
435
+ if (lifecycle?.status === "retracted")
436
+ confidence *= 0.1;
437
+ if (lifecycle?.status === "contradicted")
438
+ confidence *= 0.4;
376
439
  confidence = Math.max(0, Math.min(1, confidence));
377
440
  if (confidence < minConfidence) {
378
441
  issues.push({ date: effectiveDate || "unknown", bullet: line, reason: "stale" });
@@ -187,7 +187,7 @@ export function jaccardSimilarity(a, b) {
187
187
  const PROSE_ENTITY_RE = UNIVERSAL_TECH_TERMS_RE;
188
188
  const POSITIVE_RE = /\b(always|prefer|should|must|works|recommend|enable)\b/i;
189
189
  const NEGATIVE_RE = /\b(never|avoid|don't|do not|shouldn't|must not|broken|deprecated|disable)\b/i;
190
- // ── Dynamic entity extraction ─────────────────────────────────────────────────
190
+ // ── Dynamic fragment extraction ────────────────────────────────────────────────
191
191
  const ENTITY_CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
192
192
  // Patterns that suggest a token is a proper noun / tool name:
193
193
  // - CamelCase word (at least one interior uppercase): PhotonMappingEngine, GameKit
@@ -257,7 +257,7 @@ function extractProseEntities(text, dynamicEntities) {
257
257
  let m;
258
258
  while ((m = re.exec(text)) !== null)
259
259
  found.add(m[0].toLowerCase());
260
- // Match additional entity patterns (versions, env keys, file paths, error codes, dates)
260
+ // Match additional fragment patterns (versions, env keys, file paths, error codes, dates)
261
261
  for (const { re: pattern } of EXTRA_ENTITY_PATTERNS) {
262
262
  const pRe = new RegExp(pattern.source, pattern.flags);
263
263
  let pm;
@@ -265,7 +265,7 @@ function extractProseEntities(text, dynamicEntities) {
265
265
  found.add(pm[0].toLowerCase());
266
266
  }
267
267
  if (dynamicEntities) {
268
- // Also check whether any dynamic entity appears (case-insensitive word match)
268
+ // Also check whether any dynamic fragment appears (case-insensitive word match)
269
269
  for (const entity of dynamicEntities) {
270
270
  const escaped = entity.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
271
271
  if (new RegExp(`\\b${escaped}\\b`, "i").test(text)) {
@@ -11,7 +11,7 @@ import { isDuplicateFinding, scanForSecrets, normalizeObservationTags, resolveCo
11
11
  import { validateFindingsFormat, validateFinding } from "./content-validate.js";
12
12
  import { countActiveFindings, autoArchiveToReference } from "./content-archive.js";
13
13
  import { resolveAutoFindingTaskItem, resolveFindingTaskReference, resolveFindingSessionId, } from "./finding-context.js";
14
- import { buildLifecycleComments, parseFindingLifecycle, stripLifecycleComments, } from "./finding-lifecycle.js";
14
+ import { buildLifecycleComments, extractFindingType, parseFindingLifecycle, stripLifecycleComments, } from "./finding-lifecycle.js";
15
15
  import { METADATA_REGEX, } from "./content-metadata.js";
16
16
  /** Default cap for active findings before auto-archiving is triggered. */
17
17
  const DEFAULT_FINDINGS_CAP = 20;
@@ -107,6 +107,22 @@ function resolveFindingCitationInput(phrenPath, project, citationInput) {
107
107
  }
108
108
  return phrenOk(Object.keys(resolved).length > 0 ? resolved : undefined);
109
109
  }
110
+ export function autoDetectFindingType(text) {
111
+ const lower = text.toLowerCase();
112
+ if (/\b(we decided|decision:|chose .+ over|went with)\b/.test(lower))
113
+ return 'decision';
114
+ if (/\b(bug:|bug in|found a bug|broken|crashes|fails when)\b/.test(lower))
115
+ return 'bug';
116
+ if (/\b(workaround:|work around|temporary fix|hack:)\b/.test(lower))
117
+ return 'workaround';
118
+ if (/\b(pattern:|always .+ before|never .+ without|best practice)\b/.test(lower))
119
+ return 'pattern';
120
+ if (/\b(pitfall:|gotcha:|watch out|careful with|trap:)\b/.test(lower))
121
+ return 'pitfall';
122
+ if (/\b(currently|as of|right now|at the moment|observation:)\b/.test(lower))
123
+ return 'context';
124
+ return null;
125
+ }
110
126
  function prepareFinding(learning, project, fullHistory, extraAnnotations, citationInput, source, nowIso, inferredRepo, headCommit, phrenPath) {
111
127
  const secretType = scanForSecrets(learning);
112
128
  if (secretType) {
@@ -114,10 +130,17 @@ function prepareFinding(learning, project, fullHistory, extraAnnotations, citati
114
130
  }
115
131
  const today = (nowIso ?? new Date().toISOString()).slice(0, 10);
116
132
  const { text: tagNormalized, warning: tagWarning } = normalizeObservationTags(learning);
117
- const normalizedLearning = resolveCoref(tagNormalized, {
133
+ let normalizedLearning = resolveCoref(tagNormalized, {
118
134
  project,
119
135
  file: citationInput?.file,
120
136
  });
137
+ const existingType = extractFindingType('- ' + normalizedLearning);
138
+ if (!existingType) {
139
+ const detected = autoDetectFindingType(normalizedLearning);
140
+ if (detected) {
141
+ normalizedLearning = `[${detected}] ${normalizedLearning}`;
142
+ }
143
+ }
121
144
  const fid = crypto.randomBytes(4).toString("hex");
122
145
  const fidComment = `<!-- fid:${fid} -->`;
123
146
  const createdComment = `<!-- created: ${today} -->`;
@@ -204,20 +227,20 @@ export function upsertCanonical(phrenPath, project, memory) {
204
227
  const resolvedDir = safeProjectPath(phrenPath, project);
205
228
  if (!resolvedDir || !fs.existsSync(resolvedDir))
206
229
  return phrenErr(`Project "${project}" not found in phren.`, PhrenError.PROJECT_NOT_FOUND);
207
- const canonicalPath = path.join(resolvedDir, "CANONICAL_MEMORIES.md");
230
+ const canonicalPath = path.join(resolvedDir, "truths.md");
208
231
  const today = new Date().toISOString().slice(0, 10);
209
232
  const bullet = memory.startsWith("- ") ? memory : `- ${memory}`;
210
233
  withFileLock(canonicalPath, () => {
211
234
  if (!fs.existsSync(canonicalPath)) {
212
- fs.writeFileSync(canonicalPath, `# ${project} Canonical Memories\n\n## Pinned\n\n${bullet} _(pinned ${today})_\n`);
235
+ fs.writeFileSync(canonicalPath, `# ${project} Truths\n\n## Truths\n\n${bullet} _(added ${today})_\n`);
213
236
  }
214
237
  else {
215
238
  const existing = fs.readFileSync(canonicalPath, "utf8");
216
- const line = `${bullet} _(pinned ${today})_`;
239
+ const line = `${bullet} _(added ${today})_`;
217
240
  if (!existing.includes(bullet)) {
218
- const updated = existing.includes("## Pinned")
219
- ? existing.replace("## Pinned", `## Pinned\n\n${line}`)
220
- : `${existing.trimEnd()}\n\n## Pinned\n\n${line}\n`;
241
+ const updated = existing.includes("## Truths")
242
+ ? existing.replace("## Truths", `## Truths\n\n${line}`)
243
+ : `${existing.trimEnd()}\n\n## Truths\n\n${line}\n`;
221
244
  const content = updated.endsWith("\n") ? updated : updated + "\n";
222
245
  const tmpPath = canonicalPath + `.tmp-${crypto.randomUUID()}`;
223
246
  fs.writeFileSync(tmpPath, content);
@@ -226,7 +249,7 @@ export function upsertCanonical(phrenPath, project, memory) {
226
249
  }
227
250
  });
228
251
  appendAuditLog(phrenPath, "pin_memory", `project=${project} memory=${JSON.stringify(memory)}`);
229
- return phrenOk(`Pinned canonical memory in ${project}.`);
252
+ return phrenOk(`Truth saved in ${project}.`);
230
253
  }
231
254
  export function addFindingToFile(phrenPath, project, learning, citationInput, opts) {
232
255
  const findingError = validateFinding(learning);
@@ -54,6 +54,46 @@ function normalizeFindingGroupKey(item) {
54
54
  function findingTimelineDate(item) {
55
55
  return item.status_updated || item.date || "0000-00-00";
56
56
  }
57
+ function collectFindingBulletLines(lines) {
58
+ const bulletLines = [];
59
+ let inArchiveBlock = false;
60
+ for (let i = 0; i < lines.length; i++) {
61
+ const line = lines[i];
62
+ if (isArchiveStart(line)) {
63
+ inArchiveBlock = true;
64
+ continue;
65
+ }
66
+ if (isArchiveEnd(line)) {
67
+ inArchiveBlock = false;
68
+ continue;
69
+ }
70
+ if (!line.startsWith("- "))
71
+ continue;
72
+ bulletLines.push({ line, i, archived: inArchiveBlock });
73
+ }
74
+ return bulletLines;
75
+ }
76
+ function findMatchingFindingBullet(bulletLines, needle, match) {
77
+ const fidNeedle = needle.replace(/^fid:/, "");
78
+ const fidMatch = /^[a-z0-9]{8}$/.test(fidNeedle)
79
+ ? bulletLines.filter(({ line }) => new RegExp(`<!--\\s*fid:${fidNeedle}\\s*-->`).test(line))
80
+ : [];
81
+ const exactMatches = bulletLines.filter(({ line }) => line.replace(/^-\s+/, "").replace(/<!--.*?-->/g, "").trim().toLowerCase() === needle);
82
+ const partialMatches = bulletLines.filter(({ line }) => line.toLowerCase().includes(needle));
83
+ if (fidMatch.length === 1)
84
+ return { kind: "found", idx: fidMatch[0].i };
85
+ if (exactMatches.length === 1)
86
+ return { kind: "found", idx: exactMatches[0].i };
87
+ if (exactMatches.length > 1) {
88
+ return { kind: "ambiguous", error: `"${match}" is ambiguous (${exactMatches.length} exact matches). Use a more specific phrase.` };
89
+ }
90
+ if (partialMatches.length === 1)
91
+ return { kind: "found", idx: partialMatches[0].i };
92
+ if (partialMatches.length > 1) {
93
+ return { kind: "ambiguous", error: `"${match}" is ambiguous (${partialMatches.length} partial matches). Use a more specific phrase.` };
94
+ }
95
+ return { kind: "not_found" };
96
+ }
57
97
  export function readFindings(phrenPath, project, opts = {}) {
58
98
  const ensured = ensureProject(phrenPath, project);
59
99
  if (!ensured.ok)
@@ -210,35 +250,19 @@ export function removeFinding(phrenPath, project, match) {
210
250
  return withSafeLock(filePath, () => {
211
251
  const lines = fs.readFileSync(filePath, "utf8").split("\n");
212
252
  const needle = match.trim().toLowerCase();
213
- const bulletLines = lines.map((line, i) => ({ line, i })).filter(({ line }) => line.startsWith("- "));
214
- // 0) Stable finding ID match (fid:XXXXXXXX or just the 8-char hex)
215
- const fidNeedle = needle.replace(/^fid:/, "");
216
- const fidMatch = /^[a-z0-9]{8}$/.test(fidNeedle)
217
- ? bulletLines.filter(({ line }) => new RegExp(`<!--\\s*fid:${fidNeedle}\\s*-->`).test(line))
218
- : [];
219
- // 1) Exact text match (strip bullet prefix + metadata for comparison)
220
- const exactMatches = bulletLines.filter(({ line }) => line.replace(/^-\s+/, "").replace(/<!--.*?-->/g, "").trim().toLowerCase() === needle);
221
- // 2) Unique partial substring match
222
- const partialMatches = bulletLines.filter(({ line }) => line.toLowerCase().includes(needle));
223
- let idx;
224
- if (fidMatch.length === 1) {
225
- idx = fidMatch[0].i;
226
- }
227
- else if (exactMatches.length === 1) {
228
- idx = exactMatches[0].i;
229
- }
230
- else if (exactMatches.length > 1) {
231
- return phrenErr(`"${match}" is ambiguous (${exactMatches.length} exact matches). Use a more specific phrase.`, PhrenError.AMBIGUOUS_MATCH);
232
- }
233
- else if (partialMatches.length === 1) {
234
- idx = partialMatches[0].i;
235
- }
236
- else if (partialMatches.length > 1) {
237
- return phrenErr(`"${match}" is ambiguous (${partialMatches.length} partial matches). Use a more specific phrase.`, PhrenError.AMBIGUOUS_MATCH);
253
+ const bulletLines = collectFindingBulletLines(lines);
254
+ const activeMatch = findMatchingFindingBullet(bulletLines.filter(({ archived }) => !archived), needle, match);
255
+ if (activeMatch.kind === "ambiguous") {
256
+ return phrenErr(activeMatch.error, PhrenError.AMBIGUOUS_MATCH);
238
257
  }
239
- else {
258
+ if (activeMatch.kind === "not_found") {
259
+ const archivedMatch = findMatchingFindingBullet(bulletLines.filter(({ archived }) => archived), needle, match);
260
+ if (archivedMatch.kind === "ambiguous" || archivedMatch.kind === "found") {
261
+ return phrenErr(`Finding "${match}" is archived and read-only. Restore or re-add it before mutating history.`, PhrenError.VALIDATION_ERROR);
262
+ }
240
263
  return phrenErr(`No finding matching "${match}" in project "${project}". Try a different search term or check :findings view.`, PhrenError.NOT_FOUND);
241
264
  }
265
+ const idx = activeMatch.idx;
242
266
  const removeCount = isCitationLine(lines[idx + 1] || "") ? 2 : 1;
243
267
  const matched = lines[idx];
244
268
  lines.splice(idx, removeCount);
@@ -260,33 +284,19 @@ export function editFinding(phrenPath, project, oldText, newText) {
260
284
  return withSafeLock(findingsPath, () => {
261
285
  const lines = fs.readFileSync(findingsPath, "utf8").split("\n");
262
286
  const needle = oldText.trim().toLowerCase();
263
- const bulletLines = lines.map((line, i) => ({ line, i })).filter(({ line }) => line.startsWith("- "));
264
- // Stable finding ID match
265
- const fidNeedle = needle.replace(/^fid:/, "");
266
- const fidMatch = /^[a-z0-9]{8}$/.test(fidNeedle)
267
- ? bulletLines.filter(({ line }) => new RegExp(`<!--\\s*fid:${fidNeedle}\\s*-->`).test(line))
268
- : [];
269
- const exactMatches = bulletLines.filter(({ line }) => line.replace(/^-\s+/, "").replace(/<!--.*?-->/g, "").trim().toLowerCase() === needle);
270
- const partialMatches = bulletLines.filter(({ line }) => line.toLowerCase().includes(needle));
271
- let idx;
272
- if (fidMatch.length === 1) {
273
- idx = fidMatch[0].i;
274
- }
275
- else if (exactMatches.length === 1) {
276
- idx = exactMatches[0].i;
277
- }
278
- else if (exactMatches.length > 1) {
279
- return phrenErr(`"${oldText}" is ambiguous (${exactMatches.length} exact matches). Use a more specific phrase.`, PhrenError.AMBIGUOUS_MATCH);
287
+ const bulletLines = collectFindingBulletLines(lines);
288
+ const activeMatch = findMatchingFindingBullet(bulletLines.filter(({ archived }) => !archived), needle, oldText);
289
+ if (activeMatch.kind === "ambiguous") {
290
+ return phrenErr(activeMatch.error, PhrenError.AMBIGUOUS_MATCH);
280
291
  }
281
- else if (partialMatches.length === 1) {
282
- idx = partialMatches[0].i;
283
- }
284
- else if (partialMatches.length > 1) {
285
- return phrenErr(`"${oldText}" is ambiguous (${partialMatches.length} partial matches). Use a more specific phrase.`, PhrenError.AMBIGUOUS_MATCH);
286
- }
287
- else {
292
+ if (activeMatch.kind === "not_found") {
293
+ const archivedMatch = findMatchingFindingBullet(bulletLines.filter(({ archived }) => archived), needle, oldText);
294
+ if (archivedMatch.kind === "ambiguous" || archivedMatch.kind === "found") {
295
+ return phrenErr(`Finding "${oldText}" is archived and read-only. Restore or re-add it before mutating history.`, PhrenError.VALIDATION_ERROR);
296
+ }
288
297
  return phrenErr(`No finding matching "${oldText}" in project "${project}".`, PhrenError.NOT_FOUND);
289
298
  }
299
+ const idx = activeMatch.idx;
290
300
  // Preserve existing metadata comment (fid, citations, etc.)
291
301
  const existing = lines[idx];
292
302
  const metaMatch = existing.match(/(<!--.*?-->)/g);
@@ -31,7 +31,7 @@ Usage:
31
31
  phren search <query> [--project <n>] [--type <t>] [--limit <n>]
32
32
  Search what phren remembers
33
33
  phren add-finding <project> "..." Tell phren what you learned
34
- phren pin <project> "..." Pin a canonical memory
34
+ phren pin <project> "..." Save a truth
35
35
  phren tasks Cross-project task view
36
36
  phren skill-list List installed skills
37
37
  phren doctor [--fix] [--check-data] [--agents]
@@ -144,6 +144,29 @@ export function getHighImpactFindings(phrenPath, minSurfaceCount = 3) {
144
144
  };
145
145
  return new Set(ids);
146
146
  }
147
+ export function getImpactSurfaceCounts(phrenPath, minSurfaces = 1) {
148
+ const file = impactLogFile(phrenPath);
149
+ if (!fs.existsSync(file))
150
+ return new Map();
151
+ const lines = fs.readFileSync(file, "utf8").split("\n").filter(Boolean);
152
+ const counts = new Map();
153
+ for (const line of lines) {
154
+ try {
155
+ const entry = JSON.parse(line);
156
+ if (entry.findingId) {
157
+ counts.set(entry.findingId, (counts.get(entry.findingId) ?? 0) + 1);
158
+ }
159
+ }
160
+ catch { }
161
+ }
162
+ // Filter by minimum
163
+ const filtered = new Map();
164
+ for (const [id, count] of counts) {
165
+ if (count >= minSurfaces)
166
+ filtered.set(id, count);
167
+ }
168
+ return filtered;
169
+ }
147
170
  export function markImpactEntriesCompletedForSession(phrenPath, sessionId, project) {
148
171
  if (!sessionId)
149
172
  return 0;
@@ -6,6 +6,24 @@ const LIFECYCLE_PREFIX = "phren";
6
6
  import { withFileLock } from "./governance-locks.js";
7
7
  import { isValidProjectName, safeProjectPath } from "./utils.js";
8
8
  import { parseCreatedDate as parseCreatedDateMeta, parseStatusField, parseStatus, parseSupersession, parseContradiction, parseFindingId as parseFindingIdMeta, stripLifecycleMetadata, stripRelationMetadata, } from "./content-metadata.js";
9
+ export const FINDING_TYPE_DECAY = {
10
+ 'pattern': { maxAgeDays: 365, decayMultiplier: 1.0 }, // Slow decay, long-lived
11
+ 'decision': { maxAgeDays: Infinity, decayMultiplier: 1.0 }, // Never decays
12
+ 'pitfall': { maxAgeDays: 365, decayMultiplier: 1.0 }, // Slow decay
13
+ 'anti-pattern': { maxAgeDays: Infinity, decayMultiplier: 1.0 }, // Never decays
14
+ 'observation': { maxAgeDays: 14, decayMultiplier: 0.7 }, // Fast decay, short-lived
15
+ 'workaround': { maxAgeDays: 60, decayMultiplier: 0.85 }, // Medium decay
16
+ 'bug': { maxAgeDays: 30, decayMultiplier: 0.8 }, // Medium-fast decay
17
+ 'tooling': { maxAgeDays: 180, decayMultiplier: 0.95 }, // Medium-slow decay
18
+ 'context': { maxAgeDays: 30, decayMultiplier: 0.75 }, // Fast decay (contextual facts)
19
+ };
20
+ export function extractFindingType(line) {
21
+ const match = line.match(/\[(\w[\w-]*)\]/);
22
+ if (!match)
23
+ return null;
24
+ const tag = match[1].toLowerCase();
25
+ return tag in FINDING_TYPE_DECAY ? tag : null;
26
+ }
9
27
  export const FINDING_LIFECYCLE_STATUSES = [
10
28
  "active",
11
29
  "superseded",
@@ -387,7 +387,7 @@ export function appendReviewQueue(phrenPath, project, section, entries) {
387
387
  const resolvedDir = safeProjectPath(phrenPath, project);
388
388
  if (!resolvedDir || !fs.existsSync(resolvedDir))
389
389
  return phrenErr(`Project "${project}" not found in phren.`, PhrenError.PROJECT_NOT_FOUND);
390
- const queuePath = path.join(resolvedDir, "MEMORY_QUEUE.md");
390
+ const queuePath = path.join(resolvedDir, "review.md");
391
391
  const today = new Date().toISOString().slice(0, 10);
392
392
  const normalized = [];
393
393
  for (const entry of entries) {
@@ -270,7 +270,7 @@ export function recordInjection(phrenPath, key, sessionId) {
270
270
  debugLog(`Usage log rotation failed: ${errorMessage(err)}`);
271
271
  }
272
272
  }
273
- export function recordFeedback(phrenPath, key, feedback) {
273
+ export function recordFeedback(phrenPath, key, feedback, sessionId) {
274
274
  const delta = {};
275
275
  if (feedback === "helpful")
276
276
  delta.helpful = 1;
@@ -280,6 +280,14 @@ export function recordFeedback(phrenPath, key, feedback) {
280
280
  delta.regressionPenalty = 1;
281
281
  appendScoreJournal(phrenPath, key, delta);
282
282
  appendAuditLog(phrenPath, "memory_feedback", `key=${key} feedback=${feedback}`);
283
+ // When feedback is "helpful", mark correlated query entries for future boost
284
+ if (feedback === "helpful" && sessionId) {
285
+ import("./query-correlation.js").then(({ markCorrelationsHelpful: markHelpful }) => {
286
+ const colonIdx = key.indexOf(":");
287
+ const docKey = colonIdx >= 0 ? key.slice(0, colonIdx) : key;
288
+ markHelpful(phrenPath, sessionId, docKey);
289
+ }).catch(() => { });
290
+ }
283
291
  }
284
292
  // Module-level cache for the journal aggregation used by getQualityMultiplier.
285
293
  // Invalidated whenever flushEntryScores runs (at which point the journal is cleared).
@@ -34,7 +34,7 @@ export function updateFileChecksums(phrenPath, profileName) {
34
34
  const tracked = [];
35
35
  const dirs = getProjectDirs(phrenPath, profileName);
36
36
  for (const dir of dirs) {
37
- for (const name of ["FINDINGS.md", ...TASK_FILE_ALIASES, "CANONICAL_MEMORIES.md"]) {
37
+ for (const name of ["FINDINGS.md", ...TASK_FILE_ALIASES, "truths.md"]) {
38
38
  const full = path.join(dir, name);
39
39
  if (!fs.existsSync(full))
40
40
  continue;
@@ -533,7 +533,7 @@ export async function runDoctor(phrenPath, fix = false, checkData = false) {
533
533
  const projectName = path.basename(projectDir);
534
534
  if (projectName === "global")
535
535
  continue;
536
- for (const mdFile of ["FINDINGS.md", ...TASK_FILE_ALIASES, "MEMORY_QUEUE.md", "CLAUDE.md", "REFERENCE.md"]) {
536
+ for (const mdFile of ["FINDINGS.md", ...TASK_FILE_ALIASES, "review.md", "CLAUDE.md", "REFERENCE.md"]) {
537
537
  const filePath = path.join(projectDir, mdFile);
538
538
  if (!fs.existsSync(filePath))
539
539
  continue;
@@ -10,10 +10,10 @@ export function register(server, ctx) {
10
10
  const { phrenPath, withWriteQueue, updateFileInIndex } = ctx;
11
11
  server.registerTool("pin_memory", {
12
12
  title: "◆ phren · pin memory",
13
- description: "Promote an important memory into CANONICAL_MEMORIES.md so retrieval prioritizes it.",
13
+ description: "Write a truth a high-confidence, always-inject entry in truths.md that never decays.",
14
14
  inputSchema: z.object({
15
15
  project: z.string().describe("Project name."),
16
- memory: z.string().describe("Canonical memory text to pin."),
16
+ memory: z.string().describe("Truth text."),
17
17
  }),
18
18
  }, async ({ project, memory }) => {
19
19
  if (!isValidProjectName(project))
@@ -22,8 +22,8 @@ export function register(server, ctx) {
22
22
  const result = upsertCanonical(phrenPath, project, memory);
23
23
  if (!result.ok)
24
24
  return mcpResponse({ ok: false, error: result.error });
25
- // Update FTS index so newly pinned memory is immediately searchable
26
- const canonicalPath = path.join(phrenPath, project, "CANONICAL_MEMORIES.md");
25
+ // Update FTS index so newly added truth is immediately searchable
26
+ const canonicalPath = path.join(phrenPath, project, "truths.md");
27
27
  updateFileInIndex(canonicalPath);
28
28
  return mcpResponse({ ok: true, message: result.data, data: { project, memory } });
29
29
  });
@@ -294,7 +294,7 @@ export function register(server, ctx) {
294
294
  // ── get_review_queue ─────────────────────────────────────────────────────
295
295
  server.registerTool("get_review_queue", {
296
296
  title: "◆ phren · get review queue",
297
- description: "List all items in a project's memory review queue (MEMORY_QUEUE.md), or across all projects when omitted. " +
297
+ description: "List all items in a project's review queue (review.md), or across all projects when omitted. " +
298
298
  "Returns items with their id, section (Review/Stale/Conflicts), date, text, confidence, and risky flag.",
299
299
  inputSchema: z.object({
300
300
  project: z.string().optional().describe("Project name. Omit to read the review queue across all projects in the active profile."),