@phren/cli 0.0.3 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,147 @@
1
+ // query-correlation.ts — Lightweight query-to-finding correlation tracker.
2
+ // Tracks which queries led to which documents being selected (and later rated "helpful"),
3
+ // then uses that data to pre-warm results for recurring query patterns.
4
+ //
5
+ // Gated behind PHREN_FEATURE_QUERY_CORRELATION env var (disabled by default).
6
+ // Storage: JSONL append to .runtime/query-correlations.jsonl, last-500 window.
7
+ import * as fs from "fs";
8
+ import { runtimeFile, debugLog } from "./shared.js";
9
+ import { isFeatureEnabled, errorMessage } from "./utils.js";
10
+ const CORRELATION_FILENAME = "query-correlations.jsonl";
11
+ const RECENT_WINDOW = 500;
12
+ const MIN_TOKEN_OVERLAP = 2;
13
+ const MIN_TOKEN_LENGTH = 3;
14
+ /**
15
+ * Check if query correlation feature is enabled via env var.
16
+ */
17
+ export function isQueryCorrelationEnabled() {
18
+ return isFeatureEnabled("PHREN_FEATURE_QUERY_CORRELATION", false);
19
+ }
20
+ /**
21
+ * Log query-to-finding correlations after snippet selection.
22
+ * Called from handleHookPrompt after selectSnippets.
23
+ */
24
+ export function logCorrelations(phrenPath, keywords, selected, sessionId) {
25
+ if (!isQueryCorrelationEnabled())
26
+ return;
27
+ if (!selected.length || !keywords.trim())
28
+ return;
29
+ try {
30
+ const correlationFile = runtimeFile(phrenPath, CORRELATION_FILENAME);
31
+ const lines = [];
32
+ for (const sel of selected) {
33
+ const entry = {
34
+ timestamp: new Date().toISOString(),
35
+ keywords: keywords.slice(0, 200),
36
+ project: sel.doc.project,
37
+ filename: sel.doc.filename,
38
+ sessionId,
39
+ };
40
+ lines.push(JSON.stringify(entry));
41
+ }
42
+ fs.appendFileSync(correlationFile, lines.join("\n") + "\n");
43
+ }
44
+ catch (err) {
45
+ debugLog(`query-correlation log failed: ${errorMessage(err)}`);
46
+ }
47
+ }
48
+ /**
49
+ * Mark correlations from a session as "helpful" when positive feedback is received.
50
+ * This retroactively stamps entries so that future correlation lookups weight them higher.
51
+ */
52
+ export function markCorrelationsHelpful(phrenPath, sessionId, docKey) {
53
+ if (!isQueryCorrelationEnabled())
54
+ return;
55
+ try {
56
+ const correlationFile = runtimeFile(phrenPath, CORRELATION_FILENAME);
57
+ if (!fs.existsSync(correlationFile))
58
+ return;
59
+ const raw = fs.readFileSync(correlationFile, "utf8");
60
+ const lines = raw.split("\n").filter(Boolean);
61
+ let modified = false;
62
+ const updated = lines.map((line) => {
63
+ try {
64
+ const entry = JSON.parse(line);
65
+ if (entry.sessionId === sessionId &&
66
+ `${entry.project}/${entry.filename}` === docKey &&
67
+ !entry.helpful) {
68
+ entry.helpful = true;
69
+ modified = true;
70
+ return JSON.stringify(entry);
71
+ }
72
+ }
73
+ catch {
74
+ // keep original line
75
+ }
76
+ return line;
77
+ });
78
+ if (modified) {
79
+ fs.writeFileSync(correlationFile, updated.join("\n") + "\n");
80
+ }
81
+ }
82
+ catch (err) {
83
+ debugLog(`query-correlation mark-helpful failed: ${errorMessage(err)}`);
84
+ }
85
+ }
86
+ /**
87
+ * Tokenize a keyword string for overlap comparison.
88
+ */
89
+ function tokenize(text) {
90
+ return new Set(text
91
+ .toLowerCase()
92
+ .split(/\s+/)
93
+ .filter((w) => w.length >= MIN_TOKEN_LENGTH));
94
+ }
95
+ /**
96
+ * Find documents that historically correlate with the given query keywords.
97
+ * Returns doc keys (project/filename) sorted by correlation strength.
98
+ *
99
+ * Only looks at the last RECENT_WINDOW entries for performance.
100
+ * Entries marked "helpful" get a 2x weight boost.
101
+ */
102
+ export function getCorrelatedDocs(phrenPath, keywords, limit = 3) {
103
+ if (!isQueryCorrelationEnabled())
104
+ return [];
105
+ try {
106
+ const correlationFile = runtimeFile(phrenPath, CORRELATION_FILENAME);
107
+ if (!fs.existsSync(correlationFile))
108
+ return [];
109
+ const raw = fs.readFileSync(correlationFile, "utf8");
110
+ const lines = raw.split("\n").filter(Boolean);
111
+ // Only look at last RECENT_WINDOW entries to keep it fast
112
+ const recent = lines.slice(-RECENT_WINDOW);
113
+ const queryTokens = tokenize(keywords);
114
+ if (queryTokens.size === 0)
115
+ return [];
116
+ const docScores = new Map();
117
+ for (const line of recent) {
118
+ try {
119
+ const entry = JSON.parse(line);
120
+ const entryTokens = tokenize(entry.keywords);
121
+ // Calculate overlap between current query and past query
122
+ let overlap = 0;
123
+ for (const t of queryTokens) {
124
+ if (entryTokens.has(t))
125
+ overlap++;
126
+ }
127
+ if (overlap >= MIN_TOKEN_OVERLAP) {
128
+ const key = `${entry.project}/${entry.filename}`;
129
+ // Helpful entries get a 2x weight boost
130
+ const weight = entry.helpful ? overlap * 2 : overlap;
131
+ docScores.set(key, (docScores.get(key) ?? 0) + weight);
132
+ }
133
+ }
134
+ catch {
135
+ // skip malformed lines
136
+ }
137
+ }
138
+ return [...docScores.entries()]
139
+ .sort((a, b) => b[1] - a[1])
140
+ .slice(0, limit)
141
+ .map(([key]) => key);
142
+ }
143
+ catch (err) {
144
+ debugLog(`query-correlation lookup failed: ${errorMessage(err)}`);
145
+ return [];
146
+ }
147
+ }
@@ -4,5 +4,5 @@ export { filterTrustedFindings, filterTrustedFindingsDetailed, } from "./content
4
4
  export { scanForSecrets, resolveCoref, isDuplicateFinding, detectConflicts, extractDynamicEntities, checkSemanticDedup, checkSemanticConflicts, } from "./content-dedup.js";
5
5
  export { countActiveFindings, autoArchiveToReference, } from "./content-archive.js";
6
6
  export { upsertCanonical, addFindingToFile, addFindingsToFile, } from "./content-learning.js";
7
- export { FINDING_LIFECYCLE_STATUSES, parseFindingLifecycle, buildLifecycleComments, isInactiveFindingLine, } from "./finding-lifecycle.js";
7
+ export { FINDING_LIFECYCLE_STATUSES, FINDING_TYPE_DECAY, extractFindingType, parseFindingLifecycle, buildLifecycleComments, isInactiveFindingLine, } from "./finding-lifecycle.js";
8
8
  export { METADATA_REGEX, parseStatus, parseStatusField, parseSupersession, parseSupersedesRef, parseContradiction, parseAllContradictions, parseFindingId, parseCreatedDate, isCitationLine, isArchiveStart, isArchiveEnd, stripLifecycleMetadata, stripRelationMetadata, stripAllMetadata, stripComments, addMetadata, } from "./content-metadata.js";
@@ -81,8 +81,8 @@ const FILE_TYPE_MAP = {
81
81
  "reference.md": "reference",
82
82
  "tasks.md": "task",
83
83
  "changelog.md": "changelog",
84
- "canonical_memories.md": "canonical",
85
- "memory_queue.md": "memory-queue",
84
+ "truths.md": "canonical",
85
+ "review.md": "review-queue",
86
86
  };
87
87
  function pathHasSegment(relPath, segment) {
88
88
  const parts = relPath.replace(/\\/g, "/").split("/").filter(Boolean);
@@ -282,7 +282,7 @@ function computePhrenHash(phrenPath, profile, preGlobbed) {
282
282
  }
283
283
  }
284
284
  }
285
- // Include manual entity links so graph changes invalidate the cache
285
+ // Include manual fragment links so graph changes invalidate the cache
286
286
  const manualLinksPath = runtimeFile(phrenPath, "manual-links.json");
287
287
  if (fs.existsSync(manualLinksPath)) {
288
288
  try {
@@ -598,7 +598,7 @@ export function updateFileInIndex(db, filePath, phrenPath) {
598
598
  const type = classifyFile(filename, relFile);
599
599
  const entry = { fullPath: resolvedPath, project, filename, type, relFile };
600
600
  if (insertFileIntoIndex(db, entry, phrenPath, { scheduleEmbeddings: true })) {
601
- // Re-extract entities for finding files
601
+ // Re-extract fragments for finding files
602
602
  if (type === "findings") {
603
603
  try {
604
604
  const content = fs.readFileSync(resolvedPath, "utf-8");
@@ -687,7 +687,7 @@ function isSentinelFresh(phrenPath, sentinel) {
687
687
  return true;
688
688
  }
689
689
  /**
690
- * Attempt to restore the entity graph (entities, entity_links, global_entities) from a
690
+ * Attempt to restore the fragment graph (entities, entity_links, global_entities) from a
691
691
  * previously persisted JSON snapshot. Returns true if the graph was loaded, false if the
692
692
  * caller must run full extraction instead.
693
693
  */
@@ -723,7 +723,7 @@ function loadCachedEntityGraph(db, graphPath, allFiles, phrenPath) {
723
723
  // is not empty after a cached-graph rebuild path.
724
724
  if (Array.isArray(graph.globalEntities)) {
725
725
  for (const [entity, project, docKey] of graph.globalEntities) {
726
- // Skip global entities whose source doc no longer exists
726
+ // Skip global fragments whose source doc no longer exists
727
727
  if (docKey && !validDocKeys.has(docKey))
728
728
  continue;
729
729
  try {
@@ -736,7 +736,7 @@ function loadCachedEntityGraph(db, graphPath, allFiles, phrenPath) {
736
736
  }
737
737
  }
738
738
  else {
739
- // Older cache without globalEntities: re-derive from entity_links + entities
739
+ // Older cache without globalEntities: re-derive from entity_links + entities tables
740
740
  try {
741
741
  const rows = db.exec(`SELECT e.name, el.source_doc FROM entity_links el
742
742
  JOIN entities e ON el.target_id = e.id
@@ -769,7 +769,7 @@ function loadCachedEntityGraph(db, graphPath, allFiles, phrenPath) {
769
769
  }
770
770
  return false;
771
771
  }
772
- /** Merge manual entity links (written by link_findings tool) into the live DB. Always runs on
772
+ /** Merge manual fragment links (written by link_findings tool) into the live DB. Always runs on
773
773
  * every build so hand-authored links survive a full index rebuild. */
774
774
  function mergeManualLinks(db, phrenPath) {
775
775
  const manualLinksPath = runtimeFile(phrenPath, 'manual-links.json');
@@ -1001,7 +1001,7 @@ async function buildIndexImpl(phrenPath, profile) {
1001
1001
  extractAndLinkFragments(db, content, getEntrySourceDocKey(entry, phrenPath), phrenPath);
1002
1002
  }
1003
1003
  catch (err) {
1004
- debugLog(`entity extraction failed: ${errorMessage(err)}`);
1004
+ debugLog(`fragment extraction failed: ${errorMessage(err)}`);
1005
1005
  }
1006
1006
  }
1007
1007
  }
@@ -1063,15 +1063,15 @@ async function buildIndexImpl(phrenPath, profile) {
1063
1063
  tokenize = "porter unicode61"
1064
1064
  );
1065
1065
  `);
1066
- // Entity graph tables for lightweight reference graph
1066
+ // Fragment graph tables for lightweight reference graph
1067
1067
  db.run(`CREATE TABLE IF NOT EXISTS entities (id INTEGER PRIMARY KEY, name TEXT NOT NULL, type TEXT NOT NULL, first_seen_at TEXT, UNIQUE(name, type))`);
1068
1068
  db.run(`CREATE TABLE IF NOT EXISTS entity_links (source_id INTEGER REFERENCES entities(id), target_id INTEGER REFERENCES entities(id), rel_type TEXT NOT NULL, source_doc TEXT, PRIMARY KEY (source_id, target_id, rel_type))`);
1069
- // Q20: Cross-project entity index
1069
+ // Q20: Cross-project fragment index
1070
1070
  ensureGlobalEntitiesTable(db);
1071
1071
  const allFiles = globResult.entries;
1072
1072
  const newHashes = {};
1073
1073
  let fileCount = 0;
1074
- // Try loading cached entity graph
1074
+ // Try loading cached fragment graph
1075
1075
  const graphPath = runtimeFile(phrenPath, 'entity-graph.json');
1076
1076
  const entityGraphLoaded = loadCachedEntityGraph(db, graphPath, allFiles, phrenPath);
1077
1077
  for (const entry of allFiles) {
@@ -1084,19 +1084,19 @@ async function buildIndexImpl(phrenPath, profile) {
1084
1084
  }
1085
1085
  if (insertFileIntoIndex(db, entry, phrenPath, { scheduleEmbeddings: true })) {
1086
1086
  fileCount++;
1087
- // Extract entities from finding files (if not loaded from cache)
1087
+ // Extract fragments from finding files (if not loaded from cache)
1088
1088
  if (!entityGraphLoaded && entry.type === "findings") {
1089
1089
  try {
1090
1090
  const content = fs.readFileSync(entry.fullPath, "utf-8");
1091
1091
  extractAndLinkFragments(db, content, getEntrySourceDocKey(entry, phrenPath), phrenPath);
1092
1092
  }
1093
1093
  catch (err) {
1094
- debugLog(`entity extraction failed: ${errorMessage(err)}`);
1094
+ debugLog(`fragment extraction failed: ${errorMessage(err)}`);
1095
1095
  }
1096
1096
  }
1097
1097
  }
1098
1098
  }
1099
- // Persist entity graph for next build
1099
+ // Persist fragment graph for next build
1100
1100
  if (!entityGraphLoaded) {
1101
1101
  try {
1102
1102
  const entityRows = db.exec("SELECT id, name, type FROM entities")[0]?.values ?? [];
@@ -12,6 +12,7 @@ import { vectorFallback } from "./shared-search-fallback.js";
12
12
  import { getOllamaUrl, getCloudEmbeddingUrl } from "./shared-ollama.js";
13
13
  import { keywordFallbackSearch } from "./core-search.js";
14
14
  import { debugLog } from "./shared.js";
15
+ import { getCorrelatedDocs } from "./query-correlation.js";
15
16
  // ── Scoring constants ─────────────────────────────────────────────────────────
16
17
  /** Number of docs sampled for token-overlap semantic fallback search. */
17
18
  const SEMANTIC_FALLBACK_SAMPLE_LIMIT = 100;
@@ -31,6 +32,8 @@ const LOW_FOCUS_SNIPPET_CHAR_FRACTION = 0.55;
31
32
  const TASK_RESCUE_MIN_OVERLAP = 0.3;
32
33
  const TASK_RESCUE_OVERLAP_MARGIN = 0.12;
33
34
  const TASK_RESCUE_SCORE_MARGIN = 0.6;
35
+ /** Boost applied to docs that correlate with recurring query patterns. */
36
+ const CORRELATION_BOOST = 1.5;
34
37
  /** Fraction of bullets that must be low-value before applying the low-value penalty. */
35
38
  const LOW_VALUE_BULLET_FRACTION = 0.5;
36
39
  // ── Intent and scoring helpers ───────────────────────────────────────────────
@@ -605,6 +608,8 @@ export function rankResults(rows, intent, gitCtx, detectedProject, phrenPathLoca
605
608
  }
606
609
  }
607
610
  const getRecentDate = (doc) => recentDateCache.get(doc.path || `${doc.project}/${doc.filename}`) ?? "0000-00-00";
611
+ // Query correlation: pre-warm docs that historically correlated with similar queries
612
+ const correlatedDocKeys = query ? new Set(getCorrelatedDocs(phrenPathLocal, query, 5)) : new Set();
608
613
  // Precompute per-doc ranking metadata once — avoids recomputing inside sort comparator.
609
614
  const changedFiles = gitCtx?.changedFiles || new Set();
610
615
  const FILE_MATCH_BOOST = 1.5;
@@ -626,6 +631,8 @@ export function rankResults(rows, intent, gitCtx, detectedProject, phrenPathLoca
626
631
  && queryOverlap < WEAK_CROSS_PROJECT_OVERLAP_MAX
627
632
  ? WEAK_CROSS_PROJECT_OVERLAP_PENALTY
628
633
  : 0;
634
+ const correlationKey = `${doc.project}/${doc.filename}`;
635
+ const correlationBoost = correlatedDocKeys.has(correlationKey) ? CORRELATION_BOOST : 0;
629
636
  const score = Math.round((intentBoost(intent, doc.type) +
630
637
  fileRel +
631
638
  branchMat +
@@ -633,7 +640,8 @@ export function rankResults(rows, intent, gitCtx, detectedProject, phrenPathLoca
633
640
  qualityMult +
634
641
  entity +
635
642
  queryOverlap * queryOverlapWeight +
636
- recencyBoost(doc.type, date) -
643
+ recencyBoost(doc.type, date) +
644
+ correlationBoost -
637
645
  weakCrossProjectPenalty -
638
646
  lowValuePenalty(doc.content, doc.type)) * crossProjectAgeMultiplier(doc, detectedProject, date) * 10000) / 10000;
639
647
  const fileMatch = fileRel > 0 || branchMat > 0;
@@ -698,6 +706,24 @@ export function rankResults(rows, intent, gitCtx, detectedProject, phrenPathLoca
698
706
  }
699
707
  return ranked;
700
708
  }
709
+ /** Annotate snippet lines that carry contradiction metadata with visible markers. */
710
+ export function annotateContradictions(snippet) {
711
+ return snippet.split('\n').map(line => {
712
+ const conflictMatch = line.match(/<!-- conflicts_with: "(.*?)" -->/);
713
+ const contradictMatch = line.match(/<!-- phren:contradicts "(.*?)" -->/);
714
+ const statusMatch = line.match(/phren:status "contradicted"/);
715
+ if (conflictMatch) {
716
+ return line.replace(conflictMatch[0], '') + ` [CONTRADICTED — conflicts with: "${conflictMatch[1]}"]`;
717
+ }
718
+ if (contradictMatch) {
719
+ return line.replace(contradictMatch[0], '') + ` [CONTRADICTED — see: "${contradictMatch[1]}"]`;
720
+ }
721
+ if (statusMatch) {
722
+ return line + ' [CONTRADICTED]';
723
+ }
724
+ return line;
725
+ }).join('\n');
726
+ }
701
727
  /** Mark snippet lines with stale citations (cited file missing or line content changed). */
702
728
  export function markStaleCitations(snippet) {
703
729
  const lines = snippet.split("\n");
@@ -759,6 +785,8 @@ export function selectSnippets(rows, keywords, tokenBudget, lineBudget, charBudg
759
785
  if (TRUST_FILTERED_TYPES.has(doc.type)) {
760
786
  snippet = markStaleCitations(snippet);
761
787
  }
788
+ // Surface contradiction metadata as visible annotations
789
+ snippet = annotateContradictions(snippet);
762
790
  let focusScore = queryTokens.length > 0
763
791
  ? overlapScore(queryTokens, `${doc.filename}\n${snippet}`)
764
792
  : 1;
@@ -3,7 +3,8 @@
3
3
  * Extracted from shell.ts to keep the orchestrator under 300 lines.
4
4
  */
5
5
  import { PhrenShell } from "./shell.js";
6
- import { style, clearScreen, clearToEnd, shellStartupFrames } from "./shell-render.js";
6
+ import { style, clearScreen, clearToEnd, shellStartupFrames, gradient, badge } from "./shell-render.js";
7
+ import { createPhrenAnimator } from "./phren-art.js";
7
8
  import { errorMessage } from "./utils.js";
8
9
  import { computePhrenLiveStateToken } from "./shared.js";
9
10
  import { VERSION } from "./init-shared.js";
@@ -59,13 +60,55 @@ async function playStartupIntro(phrenPath, plan = resolveStartupIntroPlan(phrenP
59
60
  await sleep(160);
60
61
  }
61
62
  }
62
- renderIntroFrame(frames[frames.length - 1], renderHint);
63
+ // Start animated phren during loading
64
+ const animator = createPhrenAnimator({ facing: "right" });
65
+ animator.start();
66
+ const cols = process.stdout.columns || 80;
67
+ const tagline = style.dim("local memory for working agents");
68
+ const versionBadge = badge(`v${VERSION}`, style.boldBlue);
69
+ const logoLines = [
70
+ "██████╗ ██╗ ██╗██████╗ ███████╗███╗ ██╗",
71
+ "██╔══██╗██║ ██║██╔══██╗██╔════╝████╗ ██║",
72
+ "██████╔╝███████║██████╔╝█████╗ ██╔██╗ ██║",
73
+ "██╔═══╝ ██╔══██║██╔══██╗██╔══╝ ██║╚██╗██║",
74
+ "██║ ██║ ██║██║ ██║███████╗██║ ╚████║",
75
+ "╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═══╝",
76
+ ].map(l => gradient(l));
77
+ const infoLine = `${gradient("◆")} ${style.bold("phren")} ${versionBadge} ${tagline}`;
78
+ function renderAnimatedFrame(hint) {
79
+ const phrenLines = animator.getFrame();
80
+ const rightSide = ["", "", ...logoLines, "", infoLine];
81
+ const charWidth = 26;
82
+ const maxLines = Math.max(phrenLines.length, rightSide.length);
83
+ const merged = [""];
84
+ for (let i = 0; i < maxLines; i++) {
85
+ const left = (i < phrenLines.length ? phrenLines[i] : "").padEnd(charWidth);
86
+ const right = i < rightSide.length ? rightSide[i] : "";
87
+ merged.push(left + right);
88
+ }
89
+ if (hint)
90
+ merged.push("", ` ${hint}`);
91
+ merged.push("");
92
+ renderIntroFrame(merged.join("\n"));
93
+ }
94
+ // Animate during dwell/loading period
63
95
  if (plan.holdForKeypress) {
96
+ const animInterval = setInterval(() => renderAnimatedFrame(renderHint), 200);
97
+ renderAnimatedFrame(renderHint);
64
98
  await waitForAnyKeypress();
99
+ clearInterval(animInterval);
65
100
  }
66
101
  else if (plan.dwellMs > 0) {
67
- await sleep(plan.dwellMs);
102
+ const startTime = Date.now();
103
+ while (Date.now() - startTime < plan.dwellMs) {
104
+ renderAnimatedFrame(renderHint);
105
+ await sleep(200);
106
+ }
107
+ }
108
+ else {
109
+ renderAnimatedFrame(renderHint);
68
110
  }
111
+ animator.stop();
69
112
  if (plan.markSeen) {
70
113
  markStartupIntroSeen(phrenPath);
71
114
  }
@@ -219,17 +219,36 @@ export function shellStartupFrames(version) {
219
219
  const cols = process.stdout.columns || 80;
220
220
  const tagline = style.dim("local memory for working agents");
221
221
  const versionBadge = badge(`v${version}`, style.boldBlue);
222
+ if (cols >= 72) {
223
+ // Side-by-side: phren character on left, logo text on right
224
+ const phrenLines = PHREN_STARTUP;
225
+ const logoLines = PHREN_LOGO.map(line => gradient(line));
226
+ const infoLine = `${gradient("◆")} ${style.bold("phren")} ${versionBadge} ${tagline}`;
227
+ // Logo is 6 lines, pad to align vertically with character center
228
+ const rightSide = [
229
+ "", "", ...logoLines, "", infoLine,
230
+ ];
231
+ // Merge side by side: character left (26 cols), logo right
232
+ const charWidth = 26;
233
+ const maxLines = Math.max(phrenLines.length, rightSide.length);
234
+ const merged = [""];
235
+ for (let i = 0; i < maxLines; i++) {
236
+ const left = (i < phrenLines.length ? phrenLines[i] : "").padEnd(charWidth);
237
+ const right = i < rightSide.length ? rightSide[i] : "";
238
+ merged.push(left + right);
239
+ }
240
+ merged.push("");
241
+ return [
242
+ // Frame 1: Logo with character side by side immediately
243
+ merged.join("\n"),
244
+ ];
245
+ }
222
246
  if (cols >= 56) {
247
+ // Medium terminal: stacked but compact
223
248
  const logo = PHREN_LOGO.map(line => " " + gradient(line));
224
- const phren = PHREN_STARTUP.map(line => " " + line);
225
249
  const sep = gradient("━".repeat(Math.min(52, cols)));
226
250
  return [
227
- // Frame 1: Phren appears
228
- ["", ...phren, "", ` ${versionBadge} ${tagline}`, ""].join("\n"),
229
- // Frame 2: Full logo materializes with phren
230
- ["", ...phren, "", ...logo, "", ` ${versionBadge} ${tagline}`, ""].join("\n"),
231
- // Frame 3: Complete with brand separator
232
- ["", ...phren, "", ...logo, ` ${sep}`, ` ${gradient("◆")} ${style.bold("phren")} ${versionBadge} ${tagline}`, ""].join("\n"),
251
+ ["", ...logo, ` ${sep}`, ` ${gradient("◆")} ${style.bold("phren")} ${versionBadge} ${tagline}`, ""].join("\n"),
233
252
  ];
234
253
  }
235
254
  // Narrow terminal: progressive text reveal with gradient
@@ -39,7 +39,7 @@ function countBullets(filePath) {
39
39
  return content.split("\n").filter((l) => l.startsWith("- ")).length;
40
40
  }
41
41
  function countQueueItems(phrenPath, project) {
42
- const queueFile = path.join(phrenPath, project, "MEMORY_QUEUE.md");
42
+ const queueFile = path.join(phrenPath, project, "review.md");
43
43
  return countBullets(queueFile);
44
44
  }
45
45
  function runGit(cwd, args) {
@@ -7,7 +7,7 @@ const CATEGORY_BY_MODULE = {
7
7
  "mcp-finding": "Finding capture",
8
8
  "mcp-memory": "Memory quality",
9
9
  "mcp-data": "Data management",
10
- "mcp-graph": "Entities and graph",
10
+ "mcp-graph": "Fragments and graph",
11
11
  "mcp-session": "Session management",
12
12
  "mcp-ops": "Operations and review",
13
13
  "mcp-skills": "Skills management",
package/mcp/dist/utils.js CHANGED
@@ -222,7 +222,7 @@ export function safeProjectPath(base, ...segments) {
222
222
  }
223
223
  return resolved;
224
224
  }
225
- const QUEUE_FILENAME = "MEMORY_QUEUE.md";
225
+ const QUEUE_FILENAME = "review.md";
226
226
  export function queueFilePath(phrenPath, project) {
227
227
  if (!isValidProjectName(project)) {
228
228
  throw new Error(`Invalid project name: ${project}`);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@phren/cli",
3
- "version": "0.0.3",
4
- "description": "Long-term memory for AI agents. Stored as markdown in a git repo you own.",
3
+ "version": "0.0.5",
4
+ "description": "Knowledge layer for AI agents. Claude remembers you. Phren remembers your work.",
5
5
  "type": "module",
6
6
  "bin": {
7
7
  "phren": "mcp/dist/index.js"