@phren/cli 0.0.35 → 0.0.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -434,6 +434,34 @@ export async function handleHookStop() {
434
434
  }
435
435
  return;
436
436
  }
437
+ // Check if HEAD has an upstream tracking branch before attempting sync.
438
+ // Detached HEAD or branches without upstream would cause silent push failures.
439
+ const upstream = await runBestEffortGit(["rev-parse", "--abbrev-ref", "@{upstream}"], phrenPath);
440
+ if (!upstream.ok || !upstream.output) {
441
+ const unsyncedCommits = await countUnsyncedCommits(phrenPath);
442
+ const noUpstreamDetail = "commit created; no upstream tracking branch";
443
+ finalizeTaskSession({
444
+ phrenPath,
445
+ sessionId: taskSessionId,
446
+ status: "no-upstream",
447
+ detail: noUpstreamDetail,
448
+ });
449
+ updateRuntimeHealth(phrenPath, {
450
+ lastStopAt: now,
451
+ lastAutoSave: { at: now, status: "no-upstream", detail: noUpstreamDetail },
452
+ lastSync: {
453
+ lastPushAt: now,
454
+ lastPushStatus: "no-upstream",
455
+ lastPushDetail: noUpstreamDetail,
456
+ unsyncedCommits,
457
+ },
458
+ });
459
+ appendAuditLog(phrenPath, "hook_stop", "status=no-upstream");
460
+ if (unsyncedCommits > 3) {
461
+ process.stderr.write(`phren: ${unsyncedCommits} unsynced commits — no upstream tracking branch.\n`);
462
+ }
463
+ return;
464
+ }
437
465
  const unsyncedCommits = await countUnsyncedCommits(phrenPath);
438
466
  const scheduled = scheduleBackgroundSync(phrenPath);
439
467
  const syncDetail = scheduled
@@ -305,7 +305,7 @@ export function addFindingToFile(phrenPath, project, learning, citationInput, op
305
305
  return phrenOk(`Skipped duplicate finding for "${project}": already exists with similar wording.`);
306
306
  }
307
307
  const newContent = `# ${project} Findings\n\n## ${today}\n\n${preparedForNewFile.finding.bullet}\n${preparedForNewFile.finding.citationComment}\n`;
308
- const tmpPath = learningsPath + ".tmp." + process.pid;
308
+ const tmpPath = learningsPath + `.tmp-${crypto.randomUUID()}`;
309
309
  fs.writeFileSync(tmpPath, newContent);
310
310
  fs.renameSync(tmpPath, learningsPath);
311
311
  return phrenOk({
@@ -461,7 +461,7 @@ export function addFindingsToFile(phrenPath, project, learnings, opts) {
461
461
  added.push(learning);
462
462
  }
463
463
  if (added.length > 0) {
464
- const tmpPath = learningsPath + ".tmp." + process.pid;
464
+ const tmpPath = learningsPath + `.tmp-${crypto.randomUUID()}`;
465
465
  fs.writeFileSync(tmpPath, content.endsWith("\n") ? content : `${content}\n`);
466
466
  fs.renameSync(tmpPath, learningsPath);
467
467
  }
@@ -113,6 +113,7 @@ export function readFindings(phrenPath, project, opts = {}) {
113
113
  let date = "unknown";
114
114
  let index = 1;
115
115
  let inArchiveBlock = false;
116
+ let headingTag;
116
117
  const includeArchived = opts.includeArchived ?? false;
117
118
  for (let i = 0; i < lines.length; i++) {
118
119
  const line = lines[i];
@@ -134,6 +135,39 @@ export function readFindings(phrenPath, project, opts = {}) {
134
135
  date = extractedDate;
135
136
  continue;
136
137
  }
138
+ // Support heading-based findings: ## topic / ### title / paragraph
139
+ const h2TagMatch = line.match(/^##\s+([a-z_-]+)\s*$/i);
140
+ if (h2TagMatch && !line.match(/^##\s+\d{4}/)) {
141
+ // Track topic heading (but not date headings like ## 2026-03-22)
142
+ headingTag = h2TagMatch[1].toLowerCase();
143
+ continue;
144
+ }
145
+ const h3Match = line.match(/^###\s+(.+)$/);
146
+ if (h3Match && headingTag) {
147
+ let body = "";
148
+ for (let j = i + 1; j < lines.length; j++) {
149
+ const next = lines[j].trim();
150
+ if (!next)
151
+ continue;
152
+ if (next.startsWith("#") || next.startsWith("- "))
153
+ break;
154
+ body = next;
155
+ break;
156
+ }
157
+ const title = h3Match[1].trim();
158
+ const syntheticText = body ? `[${headingTag}] ${title} — ${body}` : `[${headingTag}] ${title}`;
159
+ items.push({
160
+ id: `L${index}`,
161
+ date,
162
+ text: syntheticText,
163
+ source: "unknown",
164
+ status: "active",
165
+ archived: inArchiveBlock,
166
+ tier: inArchiveBlock ? "archived" : "current",
167
+ });
168
+ index++;
169
+ continue;
170
+ }
137
171
  if (!line.startsWith("- "))
138
172
  continue;
139
173
  const next = lines[i + 1] || "";
@@ -24,40 +24,11 @@ function acquireFileLock(lockPath) {
24
24
  try {
25
25
  const stat = fs.statSync(lockPath);
26
26
  if (Date.now() - stat.mtimeMs > staleThreshold) {
27
- // Verify lock owner PID is dead before removing stale lock
28
- let ownerDead = true;
29
- try {
30
- const lockContent = fs.readFileSync(lockPath, "utf8");
31
- const lockPid = Number.parseInt(lockContent.split("\n")[0], 10);
32
- if (Number.isFinite(lockPid) && lockPid > 0) {
33
- if (process.platform !== 'win32') {
34
- try {
35
- process.kill(lockPid, 0);
36
- ownerDead = false;
37
- }
38
- catch {
39
- ownerDead = true;
40
- }
41
- }
42
- else {
43
- try {
44
- const result = require('child_process').spawnSync('tasklist', ['/FI', `PID eq ${lockPid}`, '/NH'], { encoding: 'utf8', timeout: 2000 });
45
- if (result.stdout && result.stdout.includes(String(lockPid)))
46
- ownerDead = false;
47
- }
48
- catch {
49
- ownerDead = true;
50
- }
51
- }
52
- }
53
- }
54
- catch {
55
- ownerDead = true; // Can't read lock file, treat as dead
56
- }
57
- if (ownerDead) {
58
- fs.unlinkSync(lockPath);
59
- continue;
60
- }
27
+ // Lock file is older than stale threshold delete unconditionally.
28
+ // This handles zombie processes, crashed hooks, and any case where
29
+ // the owning process failed to clean up.
30
+ fs.unlinkSync(lockPath);
31
+ continue;
61
32
  }
62
33
  }
63
34
  catch (statErr) {
@@ -143,7 +143,7 @@ function normalizeRuntimeHealth(data) {
143
143
  normalized.lastPromptAt = data.lastPromptAt;
144
144
  if (typeof data.lastStopAt === "string")
145
145
  normalized.lastStopAt = data.lastStopAt;
146
- if (isRecord(data.lastAutoSave) && typeof data.lastAutoSave.at === "string" && ["clean", "saved-local", "saved-pushed", "error"].includes(String(data.lastAutoSave.status))) {
146
+ if (isRecord(data.lastAutoSave) && typeof data.lastAutoSave.at === "string" && ["clean", "saved-local", "saved-pushed", "no-upstream", "error"].includes(String(data.lastAutoSave.status))) {
147
147
  normalized.lastAutoSave = {
148
148
  at: data.lastAutoSave.at,
149
149
  status: data.lastAutoSave.status,
@@ -169,7 +169,7 @@ function normalizeRuntimeHealth(data) {
169
169
  normalized.lastSync.lastSuccessfulPullAt = data.lastSync.lastSuccessfulPullAt;
170
170
  if (typeof data.lastSync.lastPushAt === "string")
171
171
  normalized.lastSync.lastPushAt = data.lastSync.lastPushAt;
172
- if (["saved-local", "saved-pushed", "error"].includes(String(data.lastSync.lastPushStatus)))
172
+ if (["saved-local", "saved-pushed", "no-upstream", "error"].includes(String(data.lastSync.lastPushStatus)))
173
173
  normalized.lastSync.lastPushStatus = data.lastSync.lastPushStatus;
174
174
  if (typeof data.lastSync.lastPushDetail === "string")
175
175
  normalized.lastSync.lastPushDetail = data.lastSync.lastPushDetail;
@@ -118,7 +118,7 @@ export function parseMcpMode(raw) {
118
118
  function normalizedBootstrapProjectName(projectPath) {
119
119
  return path.basename(projectPath).toLowerCase().replace(/[^a-z0-9_-]/g, "-");
120
120
  }
121
- function getPendingBootstrapTarget(phrenPath, opts) {
121
+ function getPendingBootstrapTarget(phrenPath, _opts) {
122
122
  const cwdProject = detectProjectDir(process.cwd(), phrenPath);
123
123
  if (!cwdProject)
124
124
  return null;
@@ -884,15 +884,25 @@ export function ensureProjectScaffold(projectDir, projectName, domain = "softwar
884
884
  }
885
885
  }
886
886
  export function ensureLocalGitRepo(phrenPath) {
887
+ // Check if phrenPath already has its own git repo (not just being inside a parent)
887
888
  try {
888
- execFileSync("git", ["-C", phrenPath, "rev-parse", "--is-inside-work-tree"], {
889
- stdio: ["ignore", "ignore", "ignore"],
889
+ const topLevel = execFileSync("git", ["-C", phrenPath, "rev-parse", "--show-toplevel"], {
890
+ encoding: "utf8",
891
+ stdio: ["ignore", "pipe", "ignore"],
890
892
  timeout: EXEC_TIMEOUT_QUICK_MS,
891
- });
892
- return { ok: true, initialized: false, detail: "existing git repo" };
893
+ }).trim();
894
+ const resolvedTopLevel = path.resolve(topLevel);
895
+ const resolvedPhrenPath = path.resolve(phrenPath);
896
+ if (resolvedTopLevel === resolvedPhrenPath) {
897
+ // phrenPath IS the repo root — it has its own git repo
898
+ return { ok: true, initialized: false, detail: "existing git repo" };
899
+ }
900
+ // phrenPath is inside a parent repo — skip nested init
901
+ logger.warn("init", `Skipping git init: ${resolvedPhrenPath} is inside existing repo ${resolvedTopLevel}`);
902
+ return { ok: true, initialized: false, detail: `skipped: inside existing repo ${resolvedTopLevel}` };
893
903
  }
894
904
  catch {
895
- // Fall through to initialization below.
905
+ // Not inside any git repo — fall through to initialization below.
896
906
  }
897
907
  try {
898
908
  try {
@@ -170,8 +170,17 @@ function filterAgentHooks(filePath, commandField) {
170
170
  return true;
171
171
  }
172
172
  catch (err) {
173
- debugLog(`filterAgentHooks: failed for ${filePath}: ${errorMessage(err)}`);
174
- return false;
173
+ // JSON parse or other failure — back up the corrupted file so uninstall can proceed
174
+ const bakPath = filePath + ".bak";
175
+ try {
176
+ fs.renameSync(filePath, bakPath);
177
+ log(` Warning: corrupted hook config backed up to ${bakPath} (${errorMessage(err)})`);
178
+ }
179
+ catch (bakErr) {
180
+ debugLog(`filterAgentHooks: backup failed for ${filePath}: ${errorMessage(bakErr)}`);
181
+ log(` Warning: could not process hook config ${filePath}: ${errorMessage(err)}`);
182
+ }
183
+ return true;
175
184
  }
176
185
  }
177
186
  async function promptUninstallConfirm(phrenPath) {
@@ -35,7 +35,16 @@ export function atomicWriteText(filePath, content) {
35
35
  fs.mkdirSync(path.dirname(filePath), { recursive: true });
36
36
  const tmpPath = `${filePath}.tmp-${crypto.randomUUID()}`;
37
37
  fs.writeFileSync(tmpPath, content);
38
- fs.renameSync(tmpPath, filePath);
38
+ try {
39
+ fs.renameSync(tmpPath, filePath);
40
+ }
41
+ catch (err) {
42
+ try {
43
+ fs.unlinkSync(tmpPath);
44
+ }
45
+ catch { }
46
+ throw err;
47
+ }
39
48
  }
40
49
  function isInstallMode(value) {
41
50
  return value === "shared" || value === "project-local";
@@ -232,7 +232,7 @@ function parseUserDefinedFragments(phrenPath, project) {
232
232
  }
233
233
  }
234
234
  /** Clear the user fragment cache (call between index builds). */
235
- function clearUserFragmentCache() {
235
+ function _clearUserFragmentCache() {
236
236
  _userFragmentCache.clear();
237
237
  _buildUserFragmentCache.clear();
238
238
  _activeBuildCacheKeyPrefix = null;
@@ -22,7 +22,13 @@ export { buildSourceDocKey, decodeFiniteNumber, decodeStringRow, extractSnippet,
22
22
  // ── Async embedding queue ───────────────────────────────────────────────────
23
23
  const _embQueue = new Map();
24
24
  let _embTimer = null;
25
+ const MAX_EMB_QUEUE = 500;
25
26
  function scheduleEmbedding(phrenPath, docPath, content) {
27
+ if (_embQueue.size >= MAX_EMB_QUEUE) {
28
+ const oldest = _embQueue.keys().next().value;
29
+ if (oldest !== undefined)
30
+ _embQueue.delete(oldest);
31
+ }
26
32
  _embQueue.set(docPath, { phrenPath, content });
27
33
  if (_embTimer)
28
34
  clearTimeout(_embTimer);
@@ -63,6 +69,7 @@ async function _drainEmbQueue() {
63
69
  }
64
70
  catch (err) {
65
71
  logger.debug("embeddingQueue embedText", errorMessage(err));
72
+ _embQueue.clear();
66
73
  }
67
74
  }
68
75
  try {
@@ -70,6 +77,7 @@ async function _drainEmbQueue() {
70
77
  }
71
78
  catch (err) {
72
79
  logger.debug("embeddingQueue cacheFlush", errorMessage(err));
80
+ _embQueue.clear();
73
81
  }
74
82
  }
75
83
  }
@@ -63,7 +63,7 @@ async function playStartupIntro(phrenPath, plan = resolveStartupIntroPlan(phrenP
63
63
  // Start animated phren during loading
64
64
  const animator = createPhrenAnimator({ facing: "right" });
65
65
  animator.start();
66
- const cols = process.stdout.columns || 80;
66
+ const _cols = process.stdout.columns || 80;
67
67
  const tagline = style.dim("local memory for working agents");
68
68
  const versionBadge = badge(`v${VERSION}`, style.boldBlue);
69
69
  const logoLines = [
@@ -302,7 +302,7 @@ export function finalizeTaskSession(args) {
302
302
  if (!state || state.mode !== "auto")
303
303
  return;
304
304
  const match = state.stableId ? `bid:${state.stableId}` : state.item;
305
- if (args.status === "saved-local" || args.status === "saved-pushed") {
305
+ if (args.status === "saved-local" || args.status === "saved-pushed" || args.status === "no-upstream") {
306
306
  const completed = completeTask(args.phrenPath, state.project, match);
307
307
  if (!completed.ok) {
308
308
  debugLog(`task lifecycle complete ${state.project}: ${completed.error}`);
@@ -456,7 +456,6 @@ async function handleSearchKnowledge(ctx, { query, limit, project, type, tag, si
456
456
  }
457
457
  }
458
458
  async function handleGetProjectSummary(ctx, { name }) {
459
- const { phrenPath } = ctx;
460
459
  const db = ctx.db();
461
460
  const docs = queryDocRows(db, "SELECT project, filename, type, content, path FROM docs WHERE project = ?", [name]);
462
461
  if (!docs) {
@@ -194,8 +194,62 @@ export async function buildGraph(phrenPath, profile, focusProject, existingDb) {
194
194
  const MAX_UNTAGGED = isFocused ? Infinity : 100;
195
195
  let taggedCount = 0;
196
196
  let untaggedAdded = 0;
197
- for (const line of lines) {
198
- // Support legacy tagged findings like [decision], [pitfall], etc.
197
+ // Support heading-based findings: ## topic / ### title / paragraph
198
+ let currentHeadingTag;
199
+ let _currentHeadingTitle;
200
+ for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {
201
+ const line = lines[lineIdx];
202
+ // Track heading context for heading-based findings
203
+ const h2Match = line.match(/^##\s+([a-z_-]+)\s*$/i);
204
+ if (h2Match) {
205
+ currentHeadingTag = h2Match[1].toLowerCase();
206
+ _currentHeadingTitle = undefined;
207
+ continue;
208
+ }
209
+ const h3Match = line.match(/^###\s+(.+)$/);
210
+ if (h3Match && currentHeadingTag) {
211
+ // Read the next non-empty line as the body
212
+ let body = "";
213
+ for (let j = lineIdx + 1; j < lines.length; j++) {
214
+ const next = lines[j].trim();
215
+ if (!next)
216
+ continue;
217
+ if (next.startsWith("#"))
218
+ break;
219
+ body = next;
220
+ break;
221
+ }
222
+ const title = h3Match[1].trim();
223
+ const text = body ? `${title} — ${body}` : title;
224
+ if (text.length >= 10) {
225
+ if (taggedCount >= MAX_TAGGED)
226
+ continue;
227
+ const topic = classifyTopicForText(`[${currentHeadingTag}] ${text}`, projectTopics);
228
+ const scoreKey = entryScoreKey(project, "FINDINGS.md", `[${currentHeadingTag}] ${text}`);
229
+ const nodeId = stableId("finding", scoreKey);
230
+ taggedCount++;
231
+ nodes.push({
232
+ id: nodeId,
233
+ label: text.length > 55 ? `${text.slice(0, 52)}...` : text,
234
+ fullLabel: text,
235
+ group: `topic:${topic.slug}`,
236
+ refCount: taggedCount,
237
+ project,
238
+ tagged: true,
239
+ scoreKey,
240
+ scoreKeys: [scoreKey],
241
+ refDocs: [{ doc: `${project}/FINDINGS.md`, project, scoreKey }],
242
+ topicSlug: topic.slug,
243
+ topicLabel: topic.label,
244
+ });
245
+ links.push({ source: project, target: nodeId });
246
+ for (const other of exactProjectMentions(text, projectSet, project)) {
247
+ links.push({ source: project, target: other });
248
+ }
249
+ }
250
+ continue;
251
+ }
252
+ // Standard bullet-based findings: - [tag] text
199
253
  const tagMatch = line.match(/^-\s+\[([a-z_-]+)\]\s+(.+?)(?:\s*<!--.*-->)?$/);
200
254
  if (tagMatch) {
201
255
  if (taggedCount >= MAX_TAGGED)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@phren/cli",
3
- "version": "0.0.35",
3
+ "version": "0.0.37",
4
4
  "description": "Knowledge layer for AI agents. Phren learns and recalls.",
5
5
  "type": "module",
6
6
  "bin": {