@phren/cli 0.0.9 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -227,6 +227,167 @@ export async function handleConsolidateMemories(args = []) {
227
227
  return;
228
228
  console.log(`Updated backups (${backups.length}): ${backups.join(", ")}`);
229
229
  }
230
+ export async function handleGcMaintain(args = []) {
231
+ const dryRun = args.includes("--dry-run");
232
+ const phrenPath = getPhrenPath();
233
+ const { execSync } = await import("child_process");
234
+ const report = {
235
+ gitGcRan: false,
236
+ commitsSquashed: 0,
237
+ sessionsRemoved: 0,
238
+ runtimeLogsRemoved: 0,
239
+ };
240
+ // 1. Run git gc --aggressive on the ~/.phren repo
241
+ if (dryRun) {
242
+ console.log("[dry-run] Would run: git gc --aggressive");
243
+ }
244
+ else {
245
+ try {
246
+ execSync("git gc --aggressive --quiet", { cwd: phrenPath, stdio: "pipe" });
247
+ report.gitGcRan = true;
248
+ console.log("git gc --aggressive: done");
249
+ }
250
+ catch (err) {
251
+ console.error(`git gc failed: ${errorMessage(err)}`);
252
+ }
253
+ }
254
+ // 2. Squash old auto-save commits (older than 7 days) into weekly summaries
255
+ const sevenDaysAgo = new Date(Date.now() - 7 * 86400000).toISOString().slice(0, 10);
256
+ let oldCommits = [];
257
+ try {
258
+ const raw = execSync(`git log --oneline --before="${sevenDaysAgo}" --format="%H %s"`, { cwd: phrenPath, encoding: "utf8" }).trim();
259
+ if (raw) {
260
+ oldCommits = raw.split("\n").filter((l) => l.includes("auto-save:") || l.includes("[auto]"));
261
+ }
262
+ }
263
+ catch {
264
+ // Not a git repo or no commits — skip silently
265
+ }
266
+ if (oldCommits.length === 0) {
267
+ console.log("Commit squash: no old auto-save commits to squash.");
268
+ }
269
+ else if (dryRun) {
270
+ console.log(`[dry-run] Would squash ${oldCommits.length} auto-save commits older than 7 days into weekly summaries.`);
271
+ report.commitsSquashed = oldCommits.length;
272
+ }
273
+ else {
274
+ // Group by ISO week (YYYY-Www) based on commit timestamp
275
+ const commitsByWeek = new Map();
276
+ for (const line of oldCommits) {
277
+ const hash = line.split(" ")[0];
278
+ try {
279
+ const dateStr = execSync(`git log -1 --format="%ci" ${hash}`, { cwd: phrenPath, encoding: "utf8" }).trim();
280
+ const date = new Date(dateStr);
281
+ const weekStart = new Date(date);
282
+ weekStart.setDate(date.getDate() - date.getDay()); // start of week (Sunday)
283
+ const weekKey = weekStart.toISOString().slice(0, 10);
284
+ if (!commitsByWeek.has(weekKey))
285
+ commitsByWeek.set(weekKey, []);
286
+ commitsByWeek.get(weekKey).push(hash);
287
+ }
288
+ catch {
289
+ // Skip commits we can't resolve
290
+ }
291
+ }
292
+ // For each week with multiple commits, soft-reset to oldest and amend into a summary
293
+ for (const [weekKey, hashes] of commitsByWeek.entries()) {
294
+ if (hashes.length < 2)
295
+ continue;
296
+ try {
297
+ const oldest = hashes[hashes.length - 1];
298
+ const newest = hashes[0];
299
+ // Use git rebase --onto to squash: squash all into the oldest parent
300
+ const parentOfOldest = execSync(`git rev-parse ${oldest}^`, { cwd: phrenPath, encoding: "utf8" }).trim();
301
+ // Build rebase script via env variable to squash all but first to "squash"
302
+ const rebaseScript = hashes
303
+ .map((h, i) => `${i === hashes.length - 1 ? "pick" : "squash"} ${h} auto-save`)
304
+ .reverse()
305
+ .join("\n");
306
+ const scriptPath = path.join(phrenPath, ".runtime", `gc-rebase-${weekKey}.sh`);
307
+ fs.writeFileSync(scriptPath, rebaseScript);
308
+ // Use GIT_SEQUENCE_EDITOR to feed our script
309
+ execSync(`GIT_SEQUENCE_EDITOR="cat ${scriptPath} >" git rebase -i ${parentOfOldest}`, { cwd: phrenPath, stdio: "pipe" });
310
+ fs.unlinkSync(scriptPath);
311
+ report.commitsSquashed += hashes.length - 1;
312
+ console.log(`Squashed ${hashes.length} auto-save commits for week of ${weekKey} (${newest.slice(0, 7)}..${oldest.slice(0, 7)}).`);
313
+ }
314
+ catch {
315
+ // Squashing is best-effort — log and continue
316
+ console.warn(` Could not squash auto-save commits for week ${weekKey} (possibly non-linear history). Skipping.`);
317
+ }
318
+ }
319
+ if (report.commitsSquashed === 0) {
320
+ console.log("Commit squash: all old auto-save weeks have only one commit, nothing to squash.");
321
+ }
322
+ }
323
+ // 3. Prune stale session markers from ~/.phren/.sessions/ older than 30 days
324
+ const sessionsDir = path.join(phrenPath, ".sessions");
325
+ const thirtyDaysAgo = Date.now() - 30 * 86400000;
326
+ if (fs.existsSync(sessionsDir)) {
327
+ const entries = fs.readdirSync(sessionsDir);
328
+ for (const entry of entries) {
329
+ const fullPath = path.join(sessionsDir, entry);
330
+ try {
331
+ const stat = fs.statSync(fullPath);
332
+ if (stat.mtimeMs < thirtyDaysAgo) {
333
+ if (dryRun) {
334
+ console.log(`[dry-run] Would remove session marker: .sessions/${entry}`);
335
+ }
336
+ else {
337
+ fs.unlinkSync(fullPath);
338
+ }
339
+ report.sessionsRemoved++;
340
+ }
341
+ }
342
+ catch {
343
+ // Skip unreadable entries
344
+ }
345
+ }
346
+ }
347
+ const sessionsVerb = dryRun ? "Would remove" : "Removed";
348
+ console.log(`${sessionsVerb} ${report.sessionsRemoved} stale session marker(s) from .sessions/`);
349
+ // 4. Trim runtime logs from ~/.phren/.runtime/ older than 30 days
350
+ const runtimeDir = path.join(phrenPath, ".runtime");
351
+ const logExtensions = new Set([".log", ".jsonl", ".json"]);
352
+ if (fs.existsSync(runtimeDir)) {
353
+ const entries = fs.readdirSync(runtimeDir);
354
+ for (const entry of entries) {
355
+ const ext = path.extname(entry);
356
+ if (!logExtensions.has(ext))
357
+ continue;
358
+ // Never trim the active audit log or telemetry config
359
+ if (entry === "audit.log" || entry === "telemetry.json")
360
+ continue;
361
+ const fullPath = path.join(runtimeDir, entry);
362
+ try {
363
+ const stat = fs.statSync(fullPath);
364
+ if (stat.mtimeMs < thirtyDaysAgo) {
365
+ if (dryRun) {
366
+ console.log(`[dry-run] Would remove runtime log: .runtime/${entry}`);
367
+ }
368
+ else {
369
+ fs.unlinkSync(fullPath);
370
+ }
371
+ report.runtimeLogsRemoved++;
372
+ }
373
+ }
374
+ catch {
375
+ // Skip unreadable entries
376
+ }
377
+ }
378
+ }
379
+ const logsVerb = dryRun ? "Would remove" : "Removed";
380
+ console.log(`${logsVerb} ${report.runtimeLogsRemoved} stale runtime log(s) from .runtime/`);
381
+ // 5. Summary
382
+ if (!dryRun) {
383
+ appendAuditLog(phrenPath, "maintain_gc", `gitGc=${report.gitGcRan} squashed=${report.commitsSquashed} sessions=${report.sessionsRemoved} logs=${report.runtimeLogsRemoved}`);
384
+ }
385
+ console.log(`\nGC complete:${dryRun ? " (dry-run)" : ""}` +
386
+ ` git_gc=${report.gitGcRan}` +
387
+ ` commits_squashed=${report.commitsSquashed}` +
388
+ ` sessions_pruned=${report.sessionsRemoved}` +
389
+ ` logs_pruned=${report.runtimeLogsRemoved}`);
390
+ }
230
391
  // ── Maintain router ──────────────────────────────────────────────────────────
231
392
  export async function handleMaintain(args) {
232
393
  const sub = args[0];
@@ -245,6 +406,8 @@ export async function handleMaintain(args) {
245
406
  return handleExtractMemories(rest[0]);
246
407
  case "restore":
247
408
  return handleRestoreBackup(rest);
409
+ case "gc":
410
+ return handleGcMaintain(rest);
248
411
  default:
249
412
  console.log(`phren maintain - memory maintenance and governance
250
413
 
@@ -258,7 +421,9 @@ Subcommands:
258
421
  Deduplicate FINDINGS.md bullets. Run after a burst of work
259
422
  when findings feel repetitive, or monthly to keep things clean.
260
423
  phren maintain extract [project] Mine git/GitHub signals into memory candidates
261
- phren maintain restore [project] List and restore from .bak files`);
424
+ phren maintain restore [project] List and restore from .bak files
425
+ phren maintain gc [--dry-run] Garbage-collect the ~/.phren repo: git gc, squash old
426
+ auto-save commits, prune stale session markers and runtime logs`);
262
427
  if (sub) {
263
428
  console.error(`\nUnknown maintain subcommand: "${sub}"`);
264
429
  process.exit(1);
@@ -345,7 +345,7 @@ export async function handleHookPrompt() {
345
345
  parts.push(`Findings ready for consolidation:`);
346
346
  parts.push(notices.join("\n"));
347
347
  parts.push(`Run phren-consolidate when ready.`);
348
- parts.push(`</phren-notice>`);
348
+ parts.push(`<phren-notice>`);
349
349
  }
350
350
  if (noticeFile) {
351
351
  try {
@@ -367,7 +367,7 @@ export async function handleHookPrompt() {
367
367
  }
368
368
  catch (err) {
369
369
  const msg = errorMessage(err);
370
- process.stdout.write(`\n<phren-error>phren hook failed: ${msg}. Check ~/.phren/.runtime/debug.log for details.</phren-error>\n`);
370
+ process.stdout.write(`\n<phren-error>phren hook failed: ${msg}. Check ~/.phren/.runtime/debug.log for details.<phren-error>\n`);
371
371
  debugLog(`hook-prompt error: ${msg}`);
372
372
  process.exit(0);
373
373
  }
@@ -5,7 +5,7 @@ import { debugLog, EXEC_TIMEOUT_MS, EXEC_TIMEOUT_QUICK_MS } from "./shared.js";
5
5
  import { errorMessage, runGitOrThrow } from "./utils.js";
6
6
  import { findingIdFromLine } from "./finding-impact.js";
7
7
  import { METADATA_REGEX, isArchiveStart, isArchiveEnd } from "./content-metadata.js";
8
- import { FINDING_TYPE_DECAY, extractFindingType, parseFindingLifecycle } from "./finding-lifecycle.js";
8
+ import { FINDING_TYPE_DECAY, extractFindingType } from "./finding-lifecycle.js";
9
9
  export const FINDING_PROVENANCE_SOURCES = [
10
10
  "human",
11
11
  "agent",
@@ -294,7 +294,6 @@ export function filterTrustedFindingsDetailed(content, opts) {
294
294
  ...(options.decay || {}),
295
295
  };
296
296
  const highImpactFindingIds = options.highImpactFindingIds;
297
- const impactCounts = options.impactCounts;
298
297
  const project = options.project;
299
298
  const lines = content.split("\n");
300
299
  const out = [];
@@ -413,29 +412,20 @@ export function filterTrustedFindingsDetailed(content, opts) {
413
412
  confidence *= 0.9;
414
413
  if (project && highImpactFindingIds?.size) {
415
414
  const findingId = findingIdFromLine(line);
416
- if (highImpactFindingIds.has(findingId)) {
417
- // Get surface count for graduated boost
418
- const surfaceCount = impactCounts?.get(findingId) ?? 3;
419
- // Log-scaled: 3→1.15x, 10→1.28x, 30→1.38x, capped at 1.4x
420
- const boost = Math.min(1.4, 1 + 0.1 * Math.log2(Math.max(3, surfaceCount)));
421
- confidence *= boost;
422
- // Decay resistance: confirmed findings decay 3x slower
423
- if (effectiveDate) {
424
- const realAge = ageDaysForDate(effectiveDate);
425
- if (realAge !== null) {
426
- const slowedAge = Math.floor(realAge / 3);
427
- confidence = Math.max(confidence, confidenceForAge(slowedAge, decay));
428
- }
415
+ if (highImpactFindingIds.has(findingId))
416
+ confidence *= 1.15;
417
+ }
418
+ // Confirmed findings decay 3x slower recompute confidence with reduced age
419
+ {
420
+ const findingId = findingIdFromLine(line);
421
+ if (findingId && highImpactFindingIds?.has(findingId) && effectiveDate) {
422
+ const realAge = ageDaysForDate(effectiveDate);
423
+ if (realAge !== null) {
424
+ const slowedAge = Math.floor(realAge / 3);
425
+ confidence = Math.max(confidence, confidenceForAge(slowedAge, decay));
429
426
  }
430
427
  }
431
428
  }
432
- const lifecycle = parseFindingLifecycle(line);
433
- if (lifecycle?.status === "superseded")
434
- confidence *= 0.25;
435
- if (lifecycle?.status === "retracted")
436
- confidence *= 0.1;
437
- if (lifecycle?.status === "contradicted")
438
- confidence *= 0.4;
439
429
  confidence = Math.max(0, Math.min(1, confidence));
440
430
  if (confidence < minConfidence) {
441
431
  issues.push({ date: effectiveDate || "unknown", bullet: line, reason: "stale" });
@@ -8,7 +8,7 @@ import { isValidProjectName, queueFilePath, safeProjectPath, errorMessage } from
8
8
  import { parseCitationComment, parseSourceComment, } from "./content-citation.js";
9
9
  import { parseFindingLifecycle, } from "./finding-lifecycle.js";
10
10
  import { METADATA_REGEX, isCitationLine, isArchiveStart, isArchiveEnd, parseFindingId, parseAllContradictions, stripComments, } from "./content-metadata.js";
11
- export { readTasks, readTasksAcrossProjects, resolveTaskItem, addTask, addTasks, completeTasks, completeTask, removeTask, updateTask, linkTaskIssue, pinTask, unpinTask, workNextTask, tidyDoneTasks, taskMarkdown, appendChildFinding, promoteTask, TASKS_FILENAME, TASK_FILE_ALIASES, canonicalTaskFilePath, resolveTaskFilePath, isTaskFileName, } from "./data-tasks.js";
11
+ export { readTasks, readTasksAcrossProjects, resolveTaskItem, addTask, addTasks, completeTasks, completeTask, removeTask, removeTasks, updateTask, linkTaskIssue, pinTask, unpinTask, workNextTask, tidyDoneTasks, taskMarkdown, appendChildFinding, promoteTask, TASKS_FILENAME, TASK_FILE_ALIASES, canonicalTaskFilePath, resolveTaskFilePath, isTaskFileName, } from "./data-tasks.js";
12
12
  export { addProjectToProfile, listMachines, listProfiles, listProjectCards, removeProjectFromProfile, setMachineProfile, } from "./profile-store.js";
13
13
  export { loadShellState, readRuntimeHealth, resetShellState, saveShellState, } from "./shell-state-store.js";
14
14
  function withSafeLock(filePath, fn) {
@@ -528,6 +528,29 @@ export function removeTask(phrenPath, project, match) {
528
528
  return phrenOk(`Removed task from ${project}: ${item.line}`);
529
529
  });
530
530
  }
531
+ export function removeTasks(phrenPath, project, matches) {
532
+ const bPath = canonicalTaskFilePath(phrenPath, project);
533
+ if (!bPath)
534
+ return phrenErr(`Project name "${project}" is not valid.`, PhrenError.INVALID_PROJECT_NAME);
535
+ return withSafeLock(bPath, () => {
536
+ const parsed = readTasks(phrenPath, project);
537
+ if (!parsed.ok)
538
+ return forwardErr(parsed);
539
+ const removed = [];
540
+ const errors = [];
541
+ for (const match of matches) {
542
+ const found = findItemByMatch(parsed.data, match);
543
+ if (found.error || !found.match) {
544
+ errors.push(match);
545
+ continue;
546
+ }
547
+ const [item] = parsed.data.items[found.match.section].splice(found.match.index, 1);
548
+ removed.push(item.line);
549
+ }
550
+ writeTaskDoc(parsed.data);
551
+ return phrenOk({ removed, errors });
552
+ });
553
+ }
531
554
  export function updateTask(phrenPath, project, match, updates) {
532
555
  const bPath = canonicalTaskFilePath(phrenPath, project);
533
556
  if (!bPath)
@@ -3,7 +3,7 @@ import { z } from "zod";
3
3
  import * as fs from "fs";
4
4
  import * as path from "path";
5
5
  import { isValidProjectName } from "./utils.js";
6
- import { addTask as addTaskStore, addTasks as addTasksBatch, taskMarkdown, completeTask as completeTaskStore, completeTasks as completeTasksBatch, removeTask as removeTaskStore, linkTaskIssue, pinTask, workNextTask, tidyDoneTasks, readTasks, readTasksAcrossProjects, resolveTaskItem, TASKS_FILENAME, updateTask as updateTaskStore, promoteTask, } from "./data-access.js";
6
+ import { addTask as addTaskStore, addTasks as addTasksBatch, taskMarkdown, completeTask as completeTaskStore, completeTasks as completeTasksBatch, removeTask as removeTaskStore, removeTasks as removeTasksBatch, linkTaskIssue, pinTask, workNextTask, tidyDoneTasks, readTasks, readTasksAcrossProjects, resolveTaskItem, TASKS_FILENAME, updateTask as updateTaskStore, promoteTask, } from "./data-access.js";
7
7
  import { applyGravity } from "./data-tasks.js";
8
8
  import { buildTaskIssueBody, createGithubIssueForTask, parseGithubIssueUrl, resolveProjectGithubRepo, } from "./tasks-github.js";
9
9
  import { clearTaskCheckpoint } from "./session-checkpoints.js";
@@ -327,6 +327,26 @@ export function register(server, ctx) {
327
327
  return mcpResponse({ ok: true, message: result.data, data: { project, item } });
328
328
  });
329
329
  });
330
+ server.registerTool("remove_tasks", {
331
+ title: "◆ phren · remove tasks (bulk)",
332
+ description: "Remove multiple tasks in one call. Pass an array of partial item texts or IDs.",
333
+ inputSchema: z.object({
334
+ project: z.string().describe("Project name."),
335
+ items: z.array(z.string()).describe("List of partial item texts or IDs to remove."),
336
+ }),
337
+ }, async ({ project, items }) => {
338
+ if (!isValidProjectName(project))
339
+ return mcpResponse({ ok: false, error: `Invalid project name: "${project}"` });
340
+ return withWriteQueue(async () => {
341
+ const result = removeTasksBatch(phrenPath, project, items);
342
+ if (!result.ok)
343
+ return mcpResponse({ ok: false, error: result.error });
344
+ const { removed, errors } = result.data;
345
+ if (removed.length > 0)
346
+ refreshTaskIndex(updateFileInIndex, phrenPath, project);
347
+ return mcpResponse({ ok: removed.length > 0, ...(removed.length === 0 ? { error: `No tasks removed: ${errors.join("; ")}` } : {}), message: `Removed ${removed.length}/${items.length} items`, data: { project, removed, errors } });
348
+ });
349
+ });
330
350
  server.registerTool("update_task", {
331
351
  title: "◆ phren · update task",
332
352
  description: "Update a task's text, priority, context, section, or GitHub metadata by matching text.",
@@ -3,7 +3,7 @@ import { getQualityMultiplier, entryScoreKey, } from "./shared-governance.js";
3
3
  import { queryDocRows, queryRows, cosineFallback, extractSnippet, getDocSourceKey, getEntityBoostDocs, decodeFiniteNumber, rowToDocWithRowid, } from "./shared-index.js";
4
4
  import { filterTrustedFindingsDetailed, } from "./shared-content.js";
5
5
  import { parseCitationComment } from "./content-citation.js";
6
- import { getHighImpactFindings, getImpactSurfaceCounts } from "./finding-impact.js";
6
+ import { getHighImpactFindings } from "./finding-impact.js";
7
7
  import { buildFtsQueryVariants, buildRelaxedFtsQuery, isFeatureEnabled, STOP_WORDS } from "./utils.js";
8
8
  import * as fs from "fs";
9
9
  import * as path from "path";
@@ -36,7 +36,7 @@ const LOW_VALUE_BULLET_FRACTION = 0.5;
36
36
  // ── Intent and scoring helpers ───────────────────────────────────────────────
37
37
  export function detectTaskIntent(prompt) {
38
38
  const p = prompt.toLowerCase();
39
- if (/(^|\s)\/[a-z][a-z0-9_-]{1,63}(?=$|\s|[.,:;!?])/.test(p) || /\b(skill|swarm|lineup|slash command)\b/.test(p))
39
+ if (/(?:^|\s)\/(?!(?:home|usr|var|tmp|etc|opt|api|mnt)\b)[a-z][\w-]*\b/.test(p) || /\bskill\b/.test(p))
40
40
  return "skill";
41
41
  if (/(bug|error|fix|broken|regression|fail|stack trace)/.test(p))
42
42
  return "debug";
@@ -49,8 +49,6 @@ export function detectTaskIntent(prompt) {
49
49
  return "general";
50
50
  }
51
51
  function intentBoost(intent, docType) {
52
- if (intent === "skill" && docType === "skill")
53
- return 4;
54
52
  if (intent === "debug" && (docType === "findings" || docType === "reference"))
55
53
  return 3;
56
54
  if (intent === "review" && (docType === "canonical" || docType === "changelog"))
@@ -347,23 +345,10 @@ export function searchDocuments(db, safeQuery, prompt, keywords, detectedProject
347
345
  if (ftsDocs.length === 0 && relaxedQuery && relaxedQuery !== safeQuery) {
348
346
  runScopedFtsQuery(relaxedQuery);
349
347
  }
350
- // Tier 1.5: Fragment graph expansion
351
- const fragmentExpansionDocs = [];
352
- const queryLower = (prompt + " " + keywords).toLowerCase();
353
- const fragmentBoostDocKeys = getEntityBoostDocs(db, queryLower);
354
- for (const docKey of fragmentBoostDocKeys) {
355
- if (ftsSeenKeys.has(docKey))
356
- continue;
357
- const rows = queryDocRows(db, "SELECT project, filename, type, content, path FROM docs WHERE path = ? LIMIT 1", [docKey]);
358
- if (rows?.length) {
359
- ftsSeenKeys.add(docKey);
360
- fragmentExpansionDocs.push(rows[0]);
361
- }
362
- }
363
348
  // Tier 2: Token-overlap semantic — always run, scored independently
364
349
  const semanticDocs = semanticFallbackDocs(db, `${prompt}\n${keywords}`, detectedProject);
365
350
  // Merge with Reciprocal Rank Fusion so documents found by both tiers rank highest
366
- const merged = rrfMerge([ftsDocs, fragmentExpansionDocs, semanticDocs]);
351
+ const merged = rrfMerge([ftsDocs, semanticDocs]);
367
352
  if (merged.length === 0)
368
353
  return null;
369
354
  return merged.slice(0, 12);
@@ -400,7 +385,7 @@ export async function searchDocumentsAsync(db, safeQuery, prompt, keywords, dete
400
385
  }
401
386
  catch (err) {
402
387
  // Vector search failure is non-fatal — return sync result
403
- if (process.env.PHREN_DEBUG)
388
+ if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
404
389
  process.stderr.write(`[phren] hybridSearch vectorFallback: ${err instanceof Error ? err.message : String(err)}\n`);
405
390
  return syncResult;
406
391
  }
@@ -500,7 +485,7 @@ export async function searchKnowledgeRows(db, options) {
500
485
  }
501
486
  }
502
487
  catch (err) {
503
- if (process.env.PHREN_DEBUG) {
488
+ if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG)) {
504
489
  process.stderr.write(`[phren] vectorFallback: ${err instanceof Error ? err.message : String(err)}\n`);
505
490
  }
506
491
  }
@@ -515,7 +500,6 @@ export function applyTrustFilter(rows, ttlDays, minConfidence, decay, phrenPath)
515
500
  const queueItems = [];
516
501
  const auditEntries = [];
517
502
  const highImpactFindingIds = phrenPath ? getHighImpactFindings(phrenPath, 3) : undefined;
518
- const impactCounts = phrenPath ? getImpactSurfaceCounts(phrenPath, 1) : undefined;
519
503
  const filtered = rows
520
504
  .map((doc) => {
521
505
  if (!TRUST_FILTERED_TYPES.has(doc.type))
@@ -526,7 +510,6 @@ export function applyTrustFilter(rows, ttlDays, minConfidence, decay, phrenPath)
526
510
  decay,
527
511
  project: doc.project,
528
512
  highImpactFindingIds,
529
- impactCounts,
530
513
  });
531
514
  if (trust.issues.length > 0) {
532
515
  const stale = trust.issues.filter((i) => i.reason === "stale").map((i) => i.bullet);
@@ -630,7 +613,7 @@ export function rankResults(rows, intent, gitCtx, detectedProject, phrenPathLoca
630
613
  const scored = ranked.map((doc) => {
631
614
  const globBoost = getProjectGlobBoost(phrenPathLocal, doc.project, cwd, gitCtx?.changedFiles);
632
615
  const key = entryScoreKey(doc.project, doc.filename, doc.content);
633
- const entity = entityBoostPaths.has(doc.path) ? 1.5 : 1;
616
+ const entity = entityBoostPaths.has(doc.path) ? 1.3 : 1;
634
617
  const date = getRecentDate(doc);
635
618
  const fileRel = fileRelevanceBoost(doc.path, changedFiles);
636
619
  const branchMat = branchMatchBoost(doc.content, gitCtx?.branch);
@@ -645,12 +628,7 @@ export function rankResults(rows, intent, gitCtx, detectedProject, phrenPathLoca
645
628
  && queryOverlap < WEAK_CROSS_PROJECT_OVERLAP_MAX
646
629
  ? WEAK_CROSS_PROJECT_OVERLAP_PENALTY
647
630
  : 0;
648
- // Boost skills whose filename matches a query token (e.g. "swarm" matches swarm.md)
649
- const skillNameBoost = doc.type === "skill" && queryTokens.length > 0
650
- ? queryTokens.some((t) => doc.filename.replace(/\.md$/i, "").toLowerCase() === t) ? 4 : 0
651
- : 0;
652
631
  const score = Math.round((intentBoost(intent, doc.type) +
653
- skillNameBoost +
654
632
  fileRel +
655
633
  branchMat +
656
634
  globBoost +
@@ -754,7 +732,7 @@ export function markStaleCitations(snippet) {
754
732
  }
755
733
  }
756
734
  catch (err) {
757
- if (process.env.PHREN_DEBUG)
735
+ if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
758
736
  process.stderr.write(`[phren] applyCitationAnnotations fileRead: ${err instanceof Error ? err.message : String(err)}\n`);
759
737
  stale = true;
760
738
  }
@@ -771,23 +749,6 @@ export function markStaleCitations(snippet) {
771
749
  }
772
750
  return result.join("\n");
773
751
  }
774
- function annotateContradictions(snippet) {
775
- return snippet.split('\n').map(line => {
776
- const conflictMatch = line.match(/<!-- conflicts_with: "(.*?)" -->/);
777
- const contradictMatch = line.match(/<!-- phren:contradicts "(.*?)" -->/);
778
- const statusMatch = line.match(/phren:status "contradicted"/);
779
- if (conflictMatch) {
780
- return line.replace(conflictMatch[0], '') + ` [CONTRADICTED — conflicts with: "${conflictMatch[1]}"]`;
781
- }
782
- if (contradictMatch) {
783
- return line.replace(contradictMatch[0], '') + ` [CONTRADICTED — see: "${contradictMatch[1]}"]`;
784
- }
785
- if (statusMatch) {
786
- return line + ' [CONTRADICTED]';
787
- }
788
- return line;
789
- }).join('\n');
790
- }
791
752
  export function selectSnippets(rows, keywords, tokenBudget, lineBudget, charBudget) {
792
753
  const selected = [];
793
754
  let usedTokens = 36;
@@ -813,7 +774,6 @@ export function selectSnippets(rows, keywords, tokenBudget, lineBudget, charBudg
813
774
  if (TRUST_FILTERED_TYPES.has(doc.type)) {
814
775
  snippet = markStaleCitations(snippet);
815
776
  }
816
- snippet = annotateContradictions(snippet);
817
777
  snippet = dedupSnippetBullets(snippet);
818
778
  if (!snippet.trim())
819
779
  continue;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@phren/cli",
3
- "version": "0.0.9",
3
+ "version": "0.0.10",
4
4
  "description": "Knowledge layer for AI agents. Claude remembers you. Phren remembers your work.",
5
5
  "type": "module",
6
6
  "bin": {