@phren/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +590 -0
- package/mcp/dist/capabilities/cli.js +61 -0
- package/mcp/dist/capabilities/index.js +15 -0
- package/mcp/dist/capabilities/mcp.js +61 -0
- package/mcp/dist/capabilities/types.js +57 -0
- package/mcp/dist/capabilities/vscode.js +61 -0
- package/mcp/dist/capabilities/web-ui.js +61 -0
- package/mcp/dist/cli-actions.js +302 -0
- package/mcp/dist/cli-config.js +580 -0
- package/mcp/dist/cli-extract.js +305 -0
- package/mcp/dist/cli-govern.js +371 -0
- package/mcp/dist/cli-graph.js +169 -0
- package/mcp/dist/cli-hooks-citations.js +44 -0
- package/mcp/dist/cli-hooks-context.js +56 -0
- package/mcp/dist/cli-hooks-globs.js +83 -0
- package/mcp/dist/cli-hooks-output.js +130 -0
- package/mcp/dist/cli-hooks-retrieval.js +2 -0
- package/mcp/dist/cli-hooks-session.js +1402 -0
- package/mcp/dist/cli-hooks.js +350 -0
- package/mcp/dist/cli-namespaces.js +989 -0
- package/mcp/dist/cli-ops.js +253 -0
- package/mcp/dist/cli-search.js +407 -0
- package/mcp/dist/cli.js +108 -0
- package/mcp/dist/content-archive.js +278 -0
- package/mcp/dist/content-citation.js +391 -0
- package/mcp/dist/content-dedup.js +622 -0
- package/mcp/dist/content-learning.js +472 -0
- package/mcp/dist/content-metadata.js +186 -0
- package/mcp/dist/content-validate.js +462 -0
- package/mcp/dist/core-finding.js +54 -0
- package/mcp/dist/core-project.js +36 -0
- package/mcp/dist/core-search.js +50 -0
- package/mcp/dist/data-access.js +400 -0
- package/mcp/dist/data-tasks.js +821 -0
- package/mcp/dist/embedding.js +344 -0
- package/mcp/dist/entrypoint.js +387 -0
- package/mcp/dist/finding-context.js +172 -0
- package/mcp/dist/finding-impact.js +181 -0
- package/mcp/dist/finding-journal.js +122 -0
- package/mcp/dist/finding-lifecycle.js +259 -0
- package/mcp/dist/governance-audit.js +22 -0
- package/mcp/dist/governance-locks.js +96 -0
- package/mcp/dist/governance-policy.js +648 -0
- package/mcp/dist/governance-scores.js +355 -0
- package/mcp/dist/hooks.js +449 -0
- package/mcp/dist/impact-scoring.js +22 -0
- package/mcp/dist/index-query.js +168 -0
- package/mcp/dist/index.js +205 -0
- package/mcp/dist/init-config.js +336 -0
- package/mcp/dist/init-preferences.js +62 -0
- package/mcp/dist/init-setup.js +1305 -0
- package/mcp/dist/init-shared.js +29 -0
- package/mcp/dist/init.js +1730 -0
- package/mcp/dist/link-checksums.js +62 -0
- package/mcp/dist/link-context.js +257 -0
- package/mcp/dist/link-doctor.js +591 -0
- package/mcp/dist/link-skills.js +212 -0
- package/mcp/dist/link.js +596 -0
- package/mcp/dist/logger.js +15 -0
- package/mcp/dist/machine-identity.js +38 -0
- package/mcp/dist/mcp-config.js +254 -0
- package/mcp/dist/mcp-data.js +315 -0
- package/mcp/dist/mcp-extract-facts.js +78 -0
- package/mcp/dist/mcp-extract.js +133 -0
- package/mcp/dist/mcp-finding.js +557 -0
- package/mcp/dist/mcp-graph.js +339 -0
- package/mcp/dist/mcp-hooks.js +256 -0
- package/mcp/dist/mcp-memory.js +58 -0
- package/mcp/dist/mcp-ops.js +328 -0
- package/mcp/dist/mcp-search.js +628 -0
- package/mcp/dist/mcp-session.js +651 -0
- package/mcp/dist/mcp-skills.js +189 -0
- package/mcp/dist/mcp-tasks.js +551 -0
- package/mcp/dist/mcp-types.js +7 -0
- package/mcp/dist/memory-ui-assets.js +6 -0
- package/mcp/dist/memory-ui-data.js +513 -0
- package/mcp/dist/memory-ui-graph.js +1910 -0
- package/mcp/dist/memory-ui-page.js +353 -0
- package/mcp/dist/memory-ui-scripts.js +1387 -0
- package/mcp/dist/memory-ui-server.js +1218 -0
- package/mcp/dist/memory-ui-styles.js +555 -0
- package/mcp/dist/memory-ui.js +9 -0
- package/mcp/dist/package-metadata.js +13 -0
- package/mcp/dist/phren-art.js +52 -0
- package/mcp/dist/phren-core.js +108 -0
- package/mcp/dist/phren-dotenv.js +67 -0
- package/mcp/dist/phren-paths.js +476 -0
- package/mcp/dist/proactivity.js +172 -0
- package/mcp/dist/profile-store.js +228 -0
- package/mcp/dist/project-config.js +85 -0
- package/mcp/dist/project-locator.js +25 -0
- package/mcp/dist/project-topics.js +1134 -0
- package/mcp/dist/provider-adapters.js +176 -0
- package/mcp/dist/runtime-profile.js +18 -0
- package/mcp/dist/session-checkpoints.js +131 -0
- package/mcp/dist/session-utils.js +68 -0
- package/mcp/dist/shared-content.js +8 -0
- package/mcp/dist/shared-embedding-cache.js +143 -0
- package/mcp/dist/shared-fragment-graph.js +456 -0
- package/mcp/dist/shared-governance.js +4 -0
- package/mcp/dist/shared-index.js +1334 -0
- package/mcp/dist/shared-ollama.js +192 -0
- package/mcp/dist/shared-paths.js +1 -0
- package/mcp/dist/shared-retrieval.js +796 -0
- package/mcp/dist/shared-search-fallback.js +375 -0
- package/mcp/dist/shared-sqljs.js +42 -0
- package/mcp/dist/shared-stemmer.js +171 -0
- package/mcp/dist/shared-vector-index.js +199 -0
- package/mcp/dist/shared.js +114 -0
- package/mcp/dist/shell-entry.js +209 -0
- package/mcp/dist/shell-input.js +943 -0
- package/mcp/dist/shell-palette.js +119 -0
- package/mcp/dist/shell-render.js +252 -0
- package/mcp/dist/shell-state-store.js +81 -0
- package/mcp/dist/shell-types.js +13 -0
- package/mcp/dist/shell-view-list.js +14 -0
- package/mcp/dist/shell-view.js +707 -0
- package/mcp/dist/shell.js +352 -0
- package/mcp/dist/skill-files.js +117 -0
- package/mcp/dist/skill-registry.js +279 -0
- package/mcp/dist/skill-state.js +28 -0
- package/mcp/dist/startup-embedding.js +57 -0
- package/mcp/dist/status.js +323 -0
- package/mcp/dist/synonyms.json +670 -0
- package/mcp/dist/task-hygiene.js +251 -0
- package/mcp/dist/task-lifecycle.js +347 -0
- package/mcp/dist/tasks-github.js +76 -0
- package/mcp/dist/telemetry.js +165 -0
- package/mcp/dist/test-global-setup.js +37 -0
- package/mcp/dist/tool-registry.js +104 -0
- package/mcp/dist/update.js +97 -0
- package/mcp/dist/utils.js +543 -0
- package/package.json +67 -0
- package/skills/README.md +7 -0
- package/skills/consolidate/SKILL.md +152 -0
- package/skills/discover/SKILL.md +175 -0
- package/skills/init/SKILL.md +216 -0
- package/skills/profiles/SKILL.md +121 -0
- package/skills/sync/SKILL.md +261 -0
- package/starter/README.md +74 -0
- package/starter/global/CLAUDE.md +89 -0
- package/starter/global/skills/humanize.md +30 -0
- package/starter/global/skills/pipeline.md +35 -0
- package/starter/global/skills/release.md +35 -0
- package/starter/machines.yaml +8 -0
- package/starter/my-api/.claude/skills/README.md +7 -0
- package/starter/my-api/CLAUDE.md +33 -0
- package/starter/my-api/FINDINGS.md +9 -0
- package/starter/my-api/summary.md +7 -0
- package/starter/my-api/tasks.md +7 -0
- package/starter/my-first-project/.claude/skills/README.md +7 -0
- package/starter/my-first-project/CLAUDE.md +49 -0
- package/starter/my-first-project/FINDINGS.md +24 -0
- package/starter/my-first-project/summary.md +11 -0
- package/starter/my-first-project/tasks.md +25 -0
- package/starter/my-frontend/.claude/skills/README.md +7 -0
- package/starter/my-frontend/CLAUDE.md +33 -0
- package/starter/my-frontend/FINDINGS.md +9 -0
- package/starter/my-frontend/summary.md +7 -0
- package/starter/my-frontend/tasks.md +7 -0
- package/starter/profiles/default.yaml +4 -0
- package/starter/profiles/personal.yaml +4 -0
- package/starter/profiles/work.yaml +4 -0
- package/starter/templates/README.md +7 -0
- package/starter/templates/frontend/CLAUDE.md +23 -0
- package/starter/templates/frontend/FINDINGS.md +7 -0
- package/starter/templates/frontend/reference/README.md +4 -0
- package/starter/templates/frontend/summary.md +7 -0
- package/starter/templates/frontend/tasks.md +11 -0
- package/starter/templates/library/CLAUDE.md +22 -0
- package/starter/templates/library/FINDINGS.md +7 -0
- package/starter/templates/library/reference/README.md +4 -0
- package/starter/templates/library/summary.md +7 -0
- package/starter/templates/library/tasks.md +11 -0
- package/starter/templates/monorepo/CLAUDE.md +21 -0
- package/starter/templates/monorepo/FINDINGS.md +7 -0
- package/starter/templates/monorepo/reference/README.md +4 -0
- package/starter/templates/monorepo/summary.md +7 -0
- package/starter/templates/monorepo/tasks.md +11 -0
- package/starter/templates/python-project/CLAUDE.md +21 -0
- package/starter/templates/python-project/FINDINGS.md +7 -0
- package/starter/templates/python-project/reference/README.md +4 -0
- package/starter/templates/python-project/summary.md +7 -0
- package/starter/templates/python-project/tasks.md +10 -0
|
@@ -0,0 +1,1334 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import * as os from "os";
|
|
4
|
+
import * as crypto from "crypto";
|
|
5
|
+
import { globSync } from "glob";
|
|
6
|
+
import { debugLog, appendIndexEvent, getProjectDirs, collectNativeMemoryFiles, runtimeFile, homeDir, readRootManifest, } from "./shared.js";
|
|
7
|
+
import { getIndexPolicy, withFileLock } from "./shared-governance.js";
|
|
8
|
+
import { stripTaskDoneSection } from "./shared-content.js";
|
|
9
|
+
import { invalidateDfCache } from "./shared-search-fallback.js";
|
|
10
|
+
import { errorMessage } from "./utils.js";
|
|
11
|
+
import { beginUserFragmentBuildCache, endUserFragmentBuildCache, extractAndLinkFragments, ensureGlobalEntitiesTable, } from "./shared-fragment-graph.js";
|
|
12
|
+
import { bootstrapSqlJs } from "./shared-sqljs.js";
|
|
13
|
+
import { getProjectOwnershipMode, getProjectSourcePath, readProjectConfig } from "./project-config.js";
|
|
14
|
+
import { buildSourceDocKey, queryDocRows, queryRows, } from "./index-query.js";
|
|
15
|
+
import { classifyTopicForText, readProjectTopics, } from "./project-topics.js";
|
|
16
|
+
export { porterStem } from "./shared-stemmer.js";
|
|
17
|
+
export { cosineFallback } from "./shared-search-fallback.js";
|
|
18
|
+
export { queryFragmentLinks, queryFragmentLinks as queryEntityLinks, getFragmentBoostDocs, getFragmentBoostDocs as getEntityBoostDocs, ensureGlobalEntitiesTable, queryCrossProjectFragments, logFragmentMiss, logFragmentMiss as logEntityMiss, extractFragmentNames, extractFragmentNames as extractEntityNames, } from "./shared-fragment-graph.js";
|
|
19
|
+
export { buildSourceDocKey, decodeFiniteNumber, decodeStringRow, extractSnippet, getDocSourceKey, normalizeMemoryId, queryDocBySourceKey, queryDocRows, queryRows, rowToDoc, rowToDocWithRowid, } from "./index-query.js";
|
|
20
|
+
// ── Async embedding queue ───────────────────────────────────────────────────
|
|
21
|
+
const _embQueue = new Map();
|
|
22
|
+
let _embTimer = null;
|
|
23
|
+
function scheduleEmbedding(phrenPath, docPath, content) {
|
|
24
|
+
_embQueue.set(docPath, { phrenPath, content });
|
|
25
|
+
if (_embTimer)
|
|
26
|
+
clearTimeout(_embTimer);
|
|
27
|
+
_embTimer = setTimeout(() => { _embTimer = null; void _drainEmbQueue(); }, 500);
|
|
28
|
+
// Unref so the timer doesn't keep short-lived CLI processes alive
|
|
29
|
+
_embTimer.unref();
|
|
30
|
+
}
|
|
31
|
+
async function _drainEmbQueue() {
|
|
32
|
+
if (_embQueue.size === 0)
|
|
33
|
+
return;
|
|
34
|
+
const { embedText, getEmbeddingModel } = await import("./shared-ollama.js");
|
|
35
|
+
const { getEmbeddingCache } = await import("./shared-embedding-cache.js");
|
|
36
|
+
const entries = [..._embQueue.entries()];
|
|
37
|
+
_embQueue.clear();
|
|
38
|
+
// Group by phrenPath so we flush each cache once after all its entries are set.
|
|
39
|
+
const byPhrenPath = new Map();
|
|
40
|
+
for (const [docPath, { phrenPath, content }] of entries) {
|
|
41
|
+
const bucket = byPhrenPath.get(phrenPath) ?? [];
|
|
42
|
+
bucket.push({ docPath, content });
|
|
43
|
+
byPhrenPath.set(phrenPath, bucket);
|
|
44
|
+
}
|
|
45
|
+
for (const [phrenPath, docs] of byPhrenPath) {
|
|
46
|
+
const cache = getEmbeddingCache(phrenPath);
|
|
47
|
+
try {
|
|
48
|
+
await cache.load();
|
|
49
|
+
}
|
|
50
|
+
catch (err) {
|
|
51
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
52
|
+
process.stderr.write(`[phren] embeddingQueue cacheLoad: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
53
|
+
}
|
|
54
|
+
const model = getEmbeddingModel();
|
|
55
|
+
for (const { docPath, content } of docs) {
|
|
56
|
+
try {
|
|
57
|
+
if (cache.get(docPath, model))
|
|
58
|
+
continue;
|
|
59
|
+
const vec = await embedText(content);
|
|
60
|
+
if (vec)
|
|
61
|
+
cache.set(docPath, getEmbeddingModel(), vec);
|
|
62
|
+
}
|
|
63
|
+
catch (err) {
|
|
64
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
65
|
+
process.stderr.write(`[phren] embeddingQueue embedText: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
try {
|
|
69
|
+
await cache.flush();
|
|
70
|
+
}
|
|
71
|
+
catch (err) {
|
|
72
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
73
|
+
process.stderr.write(`[phren] embeddingQueue cacheFlush: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const FILE_TYPE_MAP = {
|
|
78
|
+
"claude.md": "claude",
|
|
79
|
+
"summary.md": "summary",
|
|
80
|
+
"findings.md": "findings",
|
|
81
|
+
"reference.md": "reference",
|
|
82
|
+
"tasks.md": "task",
|
|
83
|
+
"changelog.md": "changelog",
|
|
84
|
+
"canonical_memories.md": "canonical",
|
|
85
|
+
"memory_queue.md": "memory-queue",
|
|
86
|
+
};
|
|
87
|
+
function pathHasSegment(relPath, segment) {
|
|
88
|
+
const parts = relPath.replace(/\\/g, "/").split("/").filter(Boolean);
|
|
89
|
+
return parts.includes(segment);
|
|
90
|
+
}
|
|
91
|
+
export function classifyFile(filename, relPath) {
|
|
92
|
+
// Directory takes priority over filename-based classification
|
|
93
|
+
if (pathHasSegment(relPath, "reference"))
|
|
94
|
+
return "reference";
|
|
95
|
+
if (pathHasSegment(relPath, "skills"))
|
|
96
|
+
return "skill";
|
|
97
|
+
const mapped = FILE_TYPE_MAP[filename.toLowerCase()];
|
|
98
|
+
if (mapped)
|
|
99
|
+
return mapped;
|
|
100
|
+
return "other";
|
|
101
|
+
}
|
|
102
|
+
const IMPORT_RE = /^@import\s+(.+)$/gm;
|
|
103
|
+
const MAX_IMPORT_DEPTH = 5;
|
|
104
|
+
/**
|
|
105
|
+
* Internal recursive helper for resolveImports. Tracks `seen` (cycle detection) and `depth` (runaway
|
|
106
|
+
* recursion guard) — callers should never pass these; use the public `resolveImports` instead.
|
|
107
|
+
*/
|
|
108
|
+
function _resolveImportsRecursive(content, phrenPath, seen, depth) {
|
|
109
|
+
if (depth >= MAX_IMPORT_DEPTH)
|
|
110
|
+
return content;
|
|
111
|
+
return content.replace(IMPORT_RE, (_match, importPath) => {
|
|
112
|
+
const trimmed = importPath.trim();
|
|
113
|
+
const globalRoot = path.resolve(phrenPath, "global");
|
|
114
|
+
const resolved = path.join(globalRoot, trimmed);
|
|
115
|
+
// Use lexical resolution first for the prefix check
|
|
116
|
+
const lexical = path.resolve(resolved);
|
|
117
|
+
if (lexical !== globalRoot && !lexical.startsWith(globalRoot + path.sep)) {
|
|
118
|
+
return `<!-- @import blocked: path traversal -->`;
|
|
119
|
+
}
|
|
120
|
+
// Dereference symlinks before the prefix check to prevent symlink traversal attacks
|
|
121
|
+
// (e.g. global/evil -> /etc/passwd would pass the lexical check but fail here).
|
|
122
|
+
let normalized;
|
|
123
|
+
try {
|
|
124
|
+
normalized = fs.realpathSync.native(resolved);
|
|
125
|
+
}
|
|
126
|
+
catch (err) {
|
|
127
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
128
|
+
process.stderr.write(`[phren] resolveImports realpath: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
129
|
+
return `<!-- @import not found: ${trimmed} -->`;
|
|
130
|
+
}
|
|
131
|
+
let normalizedGlobalRoot = globalRoot;
|
|
132
|
+
try {
|
|
133
|
+
normalizedGlobalRoot = fs.realpathSync.native(globalRoot);
|
|
134
|
+
}
|
|
135
|
+
catch {
|
|
136
|
+
// Fall back to the lexical global path if the root cannot be resolved.
|
|
137
|
+
}
|
|
138
|
+
if (normalized !== normalizedGlobalRoot &&
|
|
139
|
+
!normalized.startsWith(normalizedGlobalRoot + path.sep)) {
|
|
140
|
+
return `<!-- @import blocked: symlink traversal -->`;
|
|
141
|
+
}
|
|
142
|
+
if (seen.has(normalized)) {
|
|
143
|
+
return `<!-- @import cycle: ${trimmed} -->`;
|
|
144
|
+
}
|
|
145
|
+
try {
|
|
146
|
+
const childSeen = new Set(seen);
|
|
147
|
+
childSeen.add(normalized);
|
|
148
|
+
const imported = fs.readFileSync(normalized, "utf-8");
|
|
149
|
+
return _resolveImportsRecursive(imported, phrenPath, childSeen, depth + 1);
|
|
150
|
+
}
|
|
151
|
+
catch (err) {
|
|
152
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
153
|
+
process.stderr.write(`[phren] resolveImports fileRead: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
154
|
+
return `<!-- @import error: ${trimmed} -->`;
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Resolve `@import shared/file.md` directives in document content.
|
|
160
|
+
* The import path is resolved relative to the phren root (e.g. `shared/foo.md` -> `~/.phren/global/shared/foo.md`).
|
|
161
|
+
* Circular imports are detected and skipped. Depth is capped to prevent runaway recursion.
|
|
162
|
+
*/
|
|
163
|
+
export function resolveImports(content, phrenPath) {
|
|
164
|
+
return _resolveImportsRecursive(content, phrenPath, new Set(), 0);
|
|
165
|
+
}
|
|
166
|
+
function touchSentinel(phrenPath) {
|
|
167
|
+
const dir = path.join(phrenPath, ".runtime");
|
|
168
|
+
const sentinelPath = path.join(dir, "phren-sentinel");
|
|
169
|
+
try {
|
|
170
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
171
|
+
fs.writeFileSync(sentinelPath, Date.now().toString());
|
|
172
|
+
}
|
|
173
|
+
catch (err) {
|
|
174
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
175
|
+
process.stderr.write(`[phren] touchSentinel: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
function computePhrenHash(phrenPath, profile, preGlobbed) {
|
|
179
|
+
const policy = getIndexPolicy(phrenPath);
|
|
180
|
+
const hash = crypto.createHash("sha1");
|
|
181
|
+
const topicConfigEntries = getProjectDirs(phrenPath, profile)
|
|
182
|
+
.map((dir) => path.join(dir, "topic-config.json"))
|
|
183
|
+
.filter((configPath) => fs.existsSync(configPath));
|
|
184
|
+
if (preGlobbed) {
|
|
185
|
+
for (const f of preGlobbed) {
|
|
186
|
+
try {
|
|
187
|
+
const stat = fs.statSync(f);
|
|
188
|
+
hash.update(`${f}:${stat.mtimeMs}:${stat.size}`);
|
|
189
|
+
}
|
|
190
|
+
catch (err) {
|
|
191
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
192
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
for (const configPath of topicConfigEntries) {
|
|
196
|
+
try {
|
|
197
|
+
const stat = fs.statSync(configPath);
|
|
198
|
+
hash.update(`topic-config:${configPath}:${stat.mtimeMs}:${stat.size}`);
|
|
199
|
+
}
|
|
200
|
+
catch (err) {
|
|
201
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
202
|
+
process.stderr.write(`[phren] computePhrenHash topicConfig: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
else {
|
|
207
|
+
const projectDirs = getProjectDirs(phrenPath, profile);
|
|
208
|
+
const files = [];
|
|
209
|
+
for (const dir of projectDirs) {
|
|
210
|
+
const projectName = path.basename(dir);
|
|
211
|
+
const config = readProjectConfig(phrenPath, projectName);
|
|
212
|
+
const ownership = getProjectOwnershipMode(phrenPath, projectName, config);
|
|
213
|
+
try {
|
|
214
|
+
const matched = new Set();
|
|
215
|
+
for (const pattern of policy.includeGlobs) {
|
|
216
|
+
const dot = policy.includeHidden || pattern.startsWith(".") || pattern.includes("/.");
|
|
217
|
+
const mdFiles = globSync(pattern, { cwd: dir, nodir: true, dot, ignore: policy.excludeGlobs });
|
|
218
|
+
for (const f of mdFiles)
|
|
219
|
+
matched.add(f);
|
|
220
|
+
}
|
|
221
|
+
for (const f of matched) {
|
|
222
|
+
if (ownership === "repo-managed" && path.basename(f).toLowerCase() === "claude.md")
|
|
223
|
+
continue;
|
|
224
|
+
files.push(path.join(dir, f));
|
|
225
|
+
}
|
|
226
|
+
if (ownership === "repo-managed") {
|
|
227
|
+
for (const entry of getRepoManagedInstructionEntries(phrenPath, projectName)) {
|
|
228
|
+
files.push(entry.fullPath);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
catch (err) {
|
|
233
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
234
|
+
process.stderr.write(`[phren] computePhrenHash globDir: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
files.sort();
|
|
238
|
+
for (const f of files) {
|
|
239
|
+
try {
|
|
240
|
+
const stat = fs.statSync(f);
|
|
241
|
+
hash.update(`${f}:${stat.mtimeMs}:${stat.size}`);
|
|
242
|
+
}
|
|
243
|
+
catch (err) {
|
|
244
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
245
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
for (const configPath of topicConfigEntries) {
|
|
249
|
+
try {
|
|
250
|
+
const stat = fs.statSync(configPath);
|
|
251
|
+
hash.update(`topic-config:${configPath}:${stat.mtimeMs}:${stat.size}`);
|
|
252
|
+
}
|
|
253
|
+
catch (err) {
|
|
254
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
255
|
+
process.stderr.write(`[phren] computePhrenHash topicConfig: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
for (const mem of collectNativeMemoryFiles()) {
|
|
260
|
+
try {
|
|
261
|
+
const stat = fs.statSync(mem.fullPath);
|
|
262
|
+
hash.update(`native:${mem.fullPath}:${stat.mtimeMs}:${stat.size}`);
|
|
263
|
+
}
|
|
264
|
+
catch (err) {
|
|
265
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
266
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
// Include global/ files (pulled via @import) so changes invalidate the cache
|
|
270
|
+
const globalDir = path.join(phrenPath, "global");
|
|
271
|
+
if (fs.existsSync(globalDir)) {
|
|
272
|
+
const globalFiles = globSync("**/*.md", { cwd: globalDir, nodir: true }).sort();
|
|
273
|
+
for (const f of globalFiles) {
|
|
274
|
+
try {
|
|
275
|
+
const fp = path.join(globalDir, f);
|
|
276
|
+
const stat = fs.statSync(fp);
|
|
277
|
+
hash.update(`global:${f}:${stat.mtimeMs}:${stat.size}`);
|
|
278
|
+
}
|
|
279
|
+
catch (err) {
|
|
280
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
281
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
// Include manual entity links so graph changes invalidate the cache
|
|
286
|
+
const manualLinksPath = runtimeFile(phrenPath, "manual-links.json");
|
|
287
|
+
if (fs.existsSync(manualLinksPath)) {
|
|
288
|
+
try {
|
|
289
|
+
const stat = fs.statSync(manualLinksPath);
|
|
290
|
+
hash.update(`manual-links:${stat.mtimeMs}:${stat.size}`);
|
|
291
|
+
}
|
|
292
|
+
catch (err) {
|
|
293
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
294
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
const indexPolicyPath = path.join(phrenPath, ".governance", "index-policy.json");
|
|
298
|
+
if (fs.existsSync(indexPolicyPath)) {
|
|
299
|
+
try {
|
|
300
|
+
const stat = fs.statSync(indexPolicyPath);
|
|
301
|
+
hash.update(`index-policy-file:${stat.mtimeMs}:${stat.size}`);
|
|
302
|
+
}
|
|
303
|
+
catch (err) {
|
|
304
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
305
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
if (profile)
|
|
309
|
+
hash.update(`profile:${profile}`);
|
|
310
|
+
hash.update(`index-policy:${JSON.stringify(policy)}`);
|
|
311
|
+
return hash.digest("hex").slice(0, 16);
|
|
312
|
+
}
|
|
313
|
+
const INDEX_HASHES_FILENAME = "index-hashes.json";
|
|
314
|
+
const INDEX_SCHEMA_VERSION = 3; // bump when FTS schema changes to force full rebuild
|
|
315
|
+
function hashFileContent(filePath) {
|
|
316
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
317
|
+
return crypto.createHash("sha256").update(content).digest("hex");
|
|
318
|
+
}
|
|
319
|
+
function loadHashMap(phrenPath) {
|
|
320
|
+
const runtimeDir = path.join(phrenPath, ".runtime");
|
|
321
|
+
const hashFile = path.join(runtimeDir, INDEX_HASHES_FILENAME);
|
|
322
|
+
try {
|
|
323
|
+
if (fs.existsSync(hashFile)) {
|
|
324
|
+
return JSON.parse(fs.readFileSync(hashFile, "utf-8"));
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
catch (err) {
|
|
328
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
329
|
+
process.stderr.write(`[phren] loadHashMap: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
330
|
+
}
|
|
331
|
+
return { hashes: {} };
|
|
332
|
+
}
|
|
333
|
+
function saveHashMap(phrenPath, hashes) {
|
|
334
|
+
const runtimeDir = path.join(phrenPath, ".runtime");
|
|
335
|
+
try {
|
|
336
|
+
fs.mkdirSync(runtimeDir, { recursive: true });
|
|
337
|
+
const hashFile = path.join(runtimeDir, INDEX_HASHES_FILENAME);
|
|
338
|
+
withFileLock(hashFile, () => {
|
|
339
|
+
// Read-merge-write: load existing hashes, merge new values (new wins), then write.
|
|
340
|
+
// Prune entries for files that no longer exist to prevent ghost paths from causing
|
|
341
|
+
// repeated full rebuilds when deleted files are found in the hash map.
|
|
342
|
+
let existing = {};
|
|
343
|
+
try {
|
|
344
|
+
const data = JSON.parse(fs.readFileSync(hashFile, "utf-8"));
|
|
345
|
+
if (data.hashes && typeof data.hashes === "object")
|
|
346
|
+
existing = data.hashes;
|
|
347
|
+
}
|
|
348
|
+
catch (err) {
|
|
349
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
350
|
+
process.stderr.write(`[phren] saveHashMap readExisting: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
351
|
+
}
|
|
352
|
+
const merged = { ...existing, ...hashes };
|
|
353
|
+
// Remove entries for paths that no longer exist on disk
|
|
354
|
+
for (const filePath of Object.keys(merged)) {
|
|
355
|
+
if (!fs.existsSync(filePath)) {
|
|
356
|
+
delete merged[filePath];
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
fs.writeFileSync(hashFile, JSON.stringify({ version: INDEX_SCHEMA_VERSION, hashes: merged }, null, 2));
|
|
360
|
+
});
|
|
361
|
+
}
|
|
362
|
+
catch (err) {
|
|
363
|
+
debugLog(`Failed to save index hash map: ${errorMessage(err)}`);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
const LEGACY_TOPIC_REFERENCE_RE = /^reference[\\/]+topics[\\/]+([a-z0-9_-]+)\.md$/i;
|
|
367
|
+
function normalizeDocSegment(value) {
|
|
368
|
+
return value.replace(/\\/g, "/").replace(/^\/+/, "");
|
|
369
|
+
}
|
|
370
|
+
function getEntrySourceDocKey(entry, phrenPath) {
|
|
371
|
+
if (entry.relFile) {
|
|
372
|
+
return `${normalizeDocSegment(entry.project)}/${normalizeDocSegment(entry.relFile)}`;
|
|
373
|
+
}
|
|
374
|
+
return buildSourceDocKey(entry.project, entry.fullPath, phrenPath, entry.filename);
|
|
375
|
+
}
|
|
376
|
+
function getRepoManagedInstructionEntries(phrenPath, project) {
|
|
377
|
+
const repoDir = getProjectSourcePath(phrenPath, project);
|
|
378
|
+
if (!repoDir)
|
|
379
|
+
return [];
|
|
380
|
+
const candidates = ["CLAUDE.md", path.join(".claude", "CLAUDE.md")];
|
|
381
|
+
const entries = [];
|
|
382
|
+
for (const relFile of candidates) {
|
|
383
|
+
const fullPath = path.join(repoDir, relFile);
|
|
384
|
+
if (!fs.existsSync(fullPath))
|
|
385
|
+
continue;
|
|
386
|
+
const filename = path.basename(relFile);
|
|
387
|
+
entries.push({
|
|
388
|
+
fullPath,
|
|
389
|
+
project,
|
|
390
|
+
filename,
|
|
391
|
+
type: classifyFile(filename, relFile),
|
|
392
|
+
relFile,
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
return entries;
|
|
396
|
+
}
|
|
397
|
+
function globAllFiles(phrenPath, profile) {
|
|
398
|
+
const projectDirs = getProjectDirs(phrenPath, profile);
|
|
399
|
+
const indexPolicy = getIndexPolicy(phrenPath);
|
|
400
|
+
const entries = [];
|
|
401
|
+
const allAbsolutePaths = [];
|
|
402
|
+
for (const dir of projectDirs) {
|
|
403
|
+
const projectName = path.basename(dir);
|
|
404
|
+
const config = readProjectConfig(phrenPath, projectName);
|
|
405
|
+
const ownership = getProjectOwnershipMode(phrenPath, projectName, config);
|
|
406
|
+
const mdFilesSet = new Set();
|
|
407
|
+
for (const pattern of indexPolicy.includeGlobs) {
|
|
408
|
+
const dot = indexPolicy.includeHidden || pattern.startsWith(".") || pattern.includes("/.");
|
|
409
|
+
const matched = globSync(pattern, {
|
|
410
|
+
cwd: dir,
|
|
411
|
+
nodir: true,
|
|
412
|
+
dot,
|
|
413
|
+
ignore: indexPolicy.excludeGlobs,
|
|
414
|
+
});
|
|
415
|
+
for (const rel of matched)
|
|
416
|
+
mdFilesSet.add(rel);
|
|
417
|
+
}
|
|
418
|
+
const relFiles = [...mdFilesSet].sort();
|
|
419
|
+
for (const relFile of relFiles) {
|
|
420
|
+
const filename = path.basename(relFile);
|
|
421
|
+
if (ownership === "repo-managed" && filename.toLowerCase() === "claude.md")
|
|
422
|
+
continue;
|
|
423
|
+
const fullPath = path.join(dir, relFile);
|
|
424
|
+
const type = classifyFile(filename, relFile);
|
|
425
|
+
entries.push({ fullPath, project: projectName, filename, type, relFile });
|
|
426
|
+
allAbsolutePaths.push(fullPath);
|
|
427
|
+
}
|
|
428
|
+
if (ownership === "repo-managed") {
|
|
429
|
+
for (const entry of getRepoManagedInstructionEntries(phrenPath, projectName)) {
|
|
430
|
+
entries.push(entry);
|
|
431
|
+
allAbsolutePaths.push(entry.fullPath);
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
// Index global skills so search_knowledge can find them
|
|
436
|
+
const globalSkillsDir = path.join(phrenPath, "global", "skills");
|
|
437
|
+
if (fs.existsSync(globalSkillsDir)) {
|
|
438
|
+
const skillFiles = globSync("**/*.md", { cwd: globalSkillsDir, nodir: true });
|
|
439
|
+
for (const relFile of skillFiles) {
|
|
440
|
+
const fullPath = path.join(globalSkillsDir, relFile);
|
|
441
|
+
const filename = path.basename(relFile);
|
|
442
|
+
entries.push({ fullPath, project: "global", filename, type: "skill", relFile: `skills/${relFile}` });
|
|
443
|
+
allAbsolutePaths.push(fullPath);
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
for (const mem of collectNativeMemoryFiles()) {
|
|
447
|
+
entries.push({ fullPath: mem.fullPath, project: mem.project, filename: mem.file, type: "findings" });
|
|
448
|
+
allAbsolutePaths.push(mem.fullPath);
|
|
449
|
+
}
|
|
450
|
+
allAbsolutePaths.sort();
|
|
451
|
+
return { filePaths: allAbsolutePaths, entries };
|
|
452
|
+
}
|
|
453
|
+
export function listIndexedDocumentPaths(phrenPath, profile) {
|
|
454
|
+
return globAllFiles(phrenPath, profile).filePaths;
|
|
455
|
+
}
|
|
456
|
+
export function normalizeIndexedContent(content, type, phrenPath, maxChars) {
|
|
457
|
+
let normalized = content
|
|
458
|
+
.replace(/<!-- phren:archive:start -->[\s\S]*?<!-- phren:archive:end -->/g, "")
|
|
459
|
+
.replace(/<details>[\s\S]*?<\/details>/gi, "")
|
|
460
|
+
.replace(/<!--\s*created:\s*.*?-->/g, "")
|
|
461
|
+
.replace(/<!--\s*source:\s*.*?-->/g, "")
|
|
462
|
+
.replace(/<!--\s*phren:cite\s+\{[\s\S]*?\}\s*-->/g, "");
|
|
463
|
+
normalized = resolveImports(normalized, phrenPath);
|
|
464
|
+
if (type === "task") {
|
|
465
|
+
normalized = stripTaskDoneSection(normalized);
|
|
466
|
+
}
|
|
467
|
+
if (typeof maxChars === "number" && maxChars >= 0) {
|
|
468
|
+
normalized = normalized.slice(0, maxChars);
|
|
469
|
+
}
|
|
470
|
+
return normalized;
|
|
471
|
+
}
|
|
472
|
+
function insertFileIntoIndex(db, entry, phrenPath, opts) {
|
|
473
|
+
try {
|
|
474
|
+
const raw = fs.readFileSync(entry.fullPath, "utf-8");
|
|
475
|
+
const content = normalizeIndexedContent(raw, entry.type, phrenPath);
|
|
476
|
+
const indexedContent = applyReferenceTopicHints(entry, content, phrenPath);
|
|
477
|
+
db.run("INSERT INTO docs (project, filename, type, content, path) VALUES (?, ?, ?, ?, ?)", [entry.project, entry.filename, entry.type, indexedContent, entry.fullPath]);
|
|
478
|
+
if (opts?.scheduleEmbeddings) {
|
|
479
|
+
scheduleEmbedding(phrenPath, entry.fullPath, indexedContent.slice(0, 8000));
|
|
480
|
+
}
|
|
481
|
+
return true;
|
|
482
|
+
}
|
|
483
|
+
catch (err) {
|
|
484
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
485
|
+
process.stderr.write(`[phren] insertFileIntoIndex: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
486
|
+
return false;
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
function normalizeTopicTokenSegment(raw) {
|
|
490
|
+
return raw.toLowerCase().replace(/[^a-z0-9]+/g, "");
|
|
491
|
+
}
|
|
492
|
+
function extractLegacyTopicSlug(entry) {
|
|
493
|
+
const rel = (entry.relFile || "").replace(/\\/g, "/");
|
|
494
|
+
const match = rel.match(LEGACY_TOPIC_REFERENCE_RE);
|
|
495
|
+
if (!match)
|
|
496
|
+
return null;
|
|
497
|
+
return match[1].toLowerCase();
|
|
498
|
+
}
|
|
499
|
+
function detectReferenceTopics(entry, content, phrenPath) {
|
|
500
|
+
if (entry.type !== "reference")
|
|
501
|
+
return [];
|
|
502
|
+
const { topics } = readProjectTopics(phrenPath, entry.project);
|
|
503
|
+
if (!topics.length)
|
|
504
|
+
return [];
|
|
505
|
+
const topicBySlug = new Map(topics.map((topic) => [topic.slug, topic]));
|
|
506
|
+
const lower = content.toLowerCase();
|
|
507
|
+
const matchedByContent = topics.filter((topic) => {
|
|
508
|
+
if (topic.slug === "general")
|
|
509
|
+
return false;
|
|
510
|
+
return topic.keywords.some((keyword) => keyword && lower.includes(keyword));
|
|
511
|
+
});
|
|
512
|
+
const selected = [];
|
|
513
|
+
const pushUnique = (topic) => {
|
|
514
|
+
if (!topic)
|
|
515
|
+
return;
|
|
516
|
+
if (selected.some((item) => item.slug === topic.slug))
|
|
517
|
+
return;
|
|
518
|
+
selected.push(topic);
|
|
519
|
+
};
|
|
520
|
+
// Backward compatibility: keep legacy topic docs pinned to their filename slug
|
|
521
|
+
// when that slug still exists in topic-config (or built-in topics).
|
|
522
|
+
const legacySlug = extractLegacyTopicSlug(entry);
|
|
523
|
+
if (legacySlug)
|
|
524
|
+
pushUnique(topicBySlug.get(legacySlug));
|
|
525
|
+
// Content-based topic tags for any reference doc shape (not only reference/topics/<slug>.md).
|
|
526
|
+
for (const topic of matchedByContent)
|
|
527
|
+
pushUnique(topic);
|
|
528
|
+
// Preserve previous behavior: always include at least one topic hint.
|
|
529
|
+
if (!selected.length) {
|
|
530
|
+
pushUnique(classifyTopicForText(content, topics));
|
|
531
|
+
}
|
|
532
|
+
return selected;
|
|
533
|
+
}
|
|
534
|
+
function applyReferenceTopicHints(entry, content, phrenPath) {
|
|
535
|
+
const topics = detectReferenceTopics(entry, content, phrenPath);
|
|
536
|
+
if (!topics.length)
|
|
537
|
+
return content;
|
|
538
|
+
const hintTokens = new Set();
|
|
539
|
+
for (const topic of topics) {
|
|
540
|
+
const slugToken = normalizeTopicTokenSegment(topic.slug) || "general";
|
|
541
|
+
hintTokens.add(`phrentopic${slugToken}`);
|
|
542
|
+
for (const keyword of topic.keywords) {
|
|
543
|
+
const keywordToken = normalizeTopicTokenSegment(keyword);
|
|
544
|
+
if (!keywordToken)
|
|
545
|
+
continue;
|
|
546
|
+
hintTokens.add(`phrentopickeyword${keywordToken}`);
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
return `${content}\n\n${Array.from(hintTokens).join(" ")}`.trimEnd();
|
|
550
|
+
}
|
|
551
|
+
function deleteEntityLinksForDocPath(db, phrenPath, docPath, fallbackProject, fallbackFilename) {
|
|
552
|
+
const docRows = queryDocRows(db, "SELECT project, filename, type, content, path FROM docs WHERE path = ? LIMIT 1", [docPath]);
|
|
553
|
+
const project = docRows?.[0]?.project ?? fallbackProject;
|
|
554
|
+
if (!project)
|
|
555
|
+
return;
|
|
556
|
+
const filename = docRows?.[0]?.filename ?? fallbackFilename;
|
|
557
|
+
const sourceDoc = buildSourceDocKey(project, docPath, phrenPath, filename);
|
|
558
|
+
db.run("DELETE FROM entity_links WHERE source_doc = ?", [sourceDoc]);
|
|
559
|
+
// Q19: also purge global_entities rows for this doc so cross_project_entities
|
|
560
|
+
// never returns deleted/stale documents.
|
|
561
|
+
try {
|
|
562
|
+
db.run("DELETE FROM global_entities WHERE doc_key = ?", [sourceDoc]);
|
|
563
|
+
}
|
|
564
|
+
catch (err) {
|
|
565
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
566
|
+
process.stderr.write(`[phren] deleteEntityLinksForDocPath globalEntities: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
/**
|
|
570
|
+
* Incrementally update a single file in the FTS index.
|
|
571
|
+
* Deletes the old record for the file, re-reads and re-inserts it.
|
|
572
|
+
* Touches the sentinel file to invalidate caches.
|
|
573
|
+
*/
|
|
574
|
+
export function updateFileInIndex(db, filePath, phrenPath) {
|
|
575
|
+
const resolvedPath = path.resolve(filePath);
|
|
576
|
+
// Delete old record
|
|
577
|
+
try {
|
|
578
|
+
deleteEntityLinksForDocPath(db, phrenPath, resolvedPath);
|
|
579
|
+
}
|
|
580
|
+
catch (err) {
|
|
581
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
582
|
+
process.stderr.write(`[phren] updateFileInIndex deleteEntityLinks: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
583
|
+
}
|
|
584
|
+
try {
|
|
585
|
+
db.run("DELETE FROM docs WHERE path = ?", [resolvedPath]);
|
|
586
|
+
}
|
|
587
|
+
catch (err) {
|
|
588
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
589
|
+
process.stderr.write(`[phren] updateFileInIndex deleteDocs: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
590
|
+
}
|
|
591
|
+
// Re-insert if file still exists
|
|
592
|
+
if (fs.existsSync(resolvedPath)) {
|
|
593
|
+
const filename = path.basename(resolvedPath);
|
|
594
|
+
// Determine project from path: the file should be under phrenPath/<project>/
|
|
595
|
+
const rel = path.relative(path.resolve(phrenPath), resolvedPath);
|
|
596
|
+
const project = rel.split(path.sep)[0];
|
|
597
|
+
const relFile = rel.split(path.sep).slice(1).join(path.sep);
|
|
598
|
+
const type = classifyFile(filename, relFile);
|
|
599
|
+
const entry = { fullPath: resolvedPath, project, filename, type, relFile };
|
|
600
|
+
if (insertFileIntoIndex(db, entry, phrenPath, { scheduleEmbeddings: true })) {
|
|
601
|
+
// Re-extract entities for finding files
|
|
602
|
+
if (type === "findings") {
|
|
603
|
+
try {
|
|
604
|
+
const content = fs.readFileSync(resolvedPath, "utf-8");
|
|
605
|
+
extractAndLinkFragments(db, content, getEntrySourceDocKey(entry, phrenPath), phrenPath);
|
|
606
|
+
}
|
|
607
|
+
catch (err) {
|
|
608
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
609
|
+
process.stderr.write(`[phren] updateFileInIndex entityExtraction: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
// Update hash map for this file
|
|
614
|
+
try {
|
|
615
|
+
const hashData = loadHashMap(phrenPath);
|
|
616
|
+
hashData.hashes[resolvedPath] = hashFileContent(resolvedPath);
|
|
617
|
+
saveHashMap(phrenPath, hashData.hashes);
|
|
618
|
+
}
|
|
619
|
+
catch (err) {
|
|
620
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
621
|
+
process.stderr.write(`[phren] updateFileInIndex hashMap: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
else {
|
|
625
|
+
// Remove stale embedding if file was deleted
|
|
626
|
+
void (async () => {
|
|
627
|
+
try {
|
|
628
|
+
const { getEmbeddingCache } = await import("./shared-embedding-cache.js");
|
|
629
|
+
const c = getEmbeddingCache(phrenPath);
|
|
630
|
+
c.delete(resolvedPath);
|
|
631
|
+
await c.flush();
|
|
632
|
+
}
|
|
633
|
+
catch (err) {
|
|
634
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
635
|
+
process.stderr.write(`[phren] updateFileInIndex embeddingDelete: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
636
|
+
}
|
|
637
|
+
})();
|
|
638
|
+
}
|
|
639
|
+
touchSentinel(phrenPath);
|
|
640
|
+
invalidateDfCache();
|
|
641
|
+
}
|
|
642
|
+
/** Read/write a sentinel that caches the phren hash to skip full recomputation. */
|
|
643
|
+
function readHashSentinel(phrenPath) {
|
|
644
|
+
try {
|
|
645
|
+
const sentinelPath = runtimeFile(phrenPath, "index-sentinel.json");
|
|
646
|
+
if (!fs.existsSync(sentinelPath))
|
|
647
|
+
return null;
|
|
648
|
+
const data = JSON.parse(fs.readFileSync(sentinelPath, "utf-8"));
|
|
649
|
+
if (typeof data.hash === "string" && typeof data.computedAt === "number") {
|
|
650
|
+
return { hash: data.hash, computedAt: data.computedAt };
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
catch (err) {
|
|
654
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
655
|
+
process.stderr.write(`[phren] readHashSentinel: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
656
|
+
}
|
|
657
|
+
return null;
|
|
658
|
+
}
|
|
659
|
+
function writeHashSentinel(phrenPath, hash) {
|
|
660
|
+
try {
|
|
661
|
+
const sentinelPath = runtimeFile(phrenPath, "index-sentinel.json");
|
|
662
|
+
fs.writeFileSync(sentinelPath, JSON.stringify({ hash, computedAt: Date.now() }));
|
|
663
|
+
}
|
|
664
|
+
catch (err) {
|
|
665
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
666
|
+
process.stderr.write(`[phren] writeHashSentinel: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
function isSentinelFresh(phrenPath, sentinel) {
|
|
670
|
+
// Check mtime of key directories — if any are newer than the sentinel, it's stale
|
|
671
|
+
const dirsToCheck = [
|
|
672
|
+
phrenPath,
|
|
673
|
+
path.join(phrenPath, ".governance"),
|
|
674
|
+
path.join(phrenPath, ".runtime"),
|
|
675
|
+
];
|
|
676
|
+
for (const dir of dirsToCheck) {
|
|
677
|
+
try {
|
|
678
|
+
const stat = fs.statSync(dir);
|
|
679
|
+
if (stat.mtimeMs > sentinel.computedAt)
|
|
680
|
+
return false;
|
|
681
|
+
}
|
|
682
|
+
catch (err) {
|
|
683
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
684
|
+
process.stderr.write(`[phren] isSentinelFresh statDir: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
return true;
|
|
688
|
+
}
|
|
689
|
+
/**
|
|
690
|
+
* Attempt to restore the entity graph (entities, entity_links, global_entities) from a
|
|
691
|
+
* previously persisted JSON snapshot. Returns true if the graph was loaded, false if the
|
|
692
|
+
* caller must run full extraction instead.
|
|
693
|
+
*/
|
|
694
|
+
function loadCachedEntityGraph(db, graphPath, allFiles, phrenPath) {
|
|
695
|
+
if (!fs.existsSync(graphPath))
|
|
696
|
+
return false;
|
|
697
|
+
try {
|
|
698
|
+
const graph = JSON.parse(fs.readFileSync(graphPath, 'utf8'));
|
|
699
|
+
const graphMtime = fs.statSync(graphPath).mtimeMs;
|
|
700
|
+
const anyNewer = allFiles.some(f => {
|
|
701
|
+
try {
|
|
702
|
+
return fs.statSync(f.fullPath).mtimeMs > graphMtime;
|
|
703
|
+
}
|
|
704
|
+
catch (err) {
|
|
705
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
706
|
+
process.stderr.write(`[phren] loadCachedEntityGraph statFile: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
707
|
+
return true;
|
|
708
|
+
}
|
|
709
|
+
});
|
|
710
|
+
if (!anyNewer && graph.entities && graph.links) {
|
|
711
|
+
// Build set of valid source doc keys from current file set
|
|
712
|
+
const validDocKeys = new Set(allFiles.map(f => getEntrySourceDocKey(f, phrenPath)));
|
|
713
|
+
for (const [id, name, type] of graph.entities) {
|
|
714
|
+
db.run("INSERT OR IGNORE INTO entities (id, name, type, first_seen_at) VALUES (?, ?, ?, ?)", [id, name, type, new Date().toISOString().slice(0, 10)]);
|
|
715
|
+
}
|
|
716
|
+
for (const [sourceId, targetId, relType, sourceDoc] of graph.links) {
|
|
717
|
+
// Skip links for docs that no longer exist in the current file set
|
|
718
|
+
if (sourceDoc && !validDocKeys.has(sourceDoc))
|
|
719
|
+
continue;
|
|
720
|
+
db.run("INSERT OR IGNORE INTO entity_links (source_id, target_id, rel_type, source_doc) VALUES (?, ?, ?, ?)", [sourceId, targetId, relType, sourceDoc]);
|
|
721
|
+
}
|
|
722
|
+
// Q19: also restore global_entities from cached graph so cross_project_entities
|
|
723
|
+
// is not empty after a cached-graph rebuild path.
|
|
724
|
+
if (Array.isArray(graph.globalEntities)) {
|
|
725
|
+
for (const [entity, project, docKey] of graph.globalEntities) {
|
|
726
|
+
// Skip global entities whose source doc no longer exists
|
|
727
|
+
if (docKey && !validDocKeys.has(docKey))
|
|
728
|
+
continue;
|
|
729
|
+
try {
|
|
730
|
+
db.run("INSERT OR IGNORE INTO global_entities (entity, project, doc_key) VALUES (?, ?, ?)", [entity, project, docKey]);
|
|
731
|
+
}
|
|
732
|
+
catch (err) {
|
|
733
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
734
|
+
process.stderr.write(`[phren] loadCachedEntityGraph globalEntitiesInsert2: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
else {
|
|
739
|
+
// Older cache without globalEntities: re-derive from entity_links + entities
|
|
740
|
+
try {
|
|
741
|
+
const rows = db.exec(`SELECT e.name, el.source_doc FROM entity_links el
|
|
742
|
+
JOIN entities e ON el.target_id = e.id
|
|
743
|
+
WHERE el.source_doc IS NOT NULL`)[0]?.values ?? [];
|
|
744
|
+
for (const [name, sourceDoc] of rows) {
|
|
745
|
+
const projectMatch = typeof sourceDoc === "string" ? sourceDoc.match(/^([^/]+)\//) : null;
|
|
746
|
+
const proj = projectMatch ? projectMatch[1] : null;
|
|
747
|
+
if (proj && name) {
|
|
748
|
+
try {
|
|
749
|
+
db.run("INSERT OR IGNORE INTO global_entities (entity, project, doc_key) VALUES (?, ?, ?)", [name, proj, sourceDoc]);
|
|
750
|
+
}
|
|
751
|
+
catch (err) {
|
|
752
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
753
|
+
process.stderr.write(`[phren] loadCachedEntityGraph globalEntitiesInsert: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
catch (err) {
|
|
759
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
760
|
+
process.stderr.write(`[phren] entityGraph globalEntitiesRestore: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
return true;
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
catch (err) {
|
|
767
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
768
|
+
process.stderr.write(`[phren] entityGraph cacheLoad: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
769
|
+
}
|
|
770
|
+
return false;
|
|
771
|
+
}
|
|
772
|
+
/** Merge manual entity links (written by link_findings tool) into the live DB. Always runs on
|
|
773
|
+
* every build so hand-authored links survive a full index rebuild. */
|
|
774
|
+
function mergeManualLinks(db, phrenPath) {
|
|
775
|
+
const manualLinksPath = runtimeFile(phrenPath, 'manual-links.json');
|
|
776
|
+
if (!fs.existsSync(manualLinksPath))
|
|
777
|
+
return;
|
|
778
|
+
try {
|
|
779
|
+
const manualLinks = JSON.parse(fs.readFileSync(manualLinksPath, 'utf8'));
|
|
780
|
+
let pruned = false;
|
|
781
|
+
const validLinks = [];
|
|
782
|
+
for (const link of manualLinks) {
|
|
783
|
+
try {
|
|
784
|
+
// Validate: skip manual links whose sourceDoc no longer exists in the index
|
|
785
|
+
const docCheck = queryRows(db, "SELECT 1 FROM docs WHERE source_key = ? LIMIT 1", [link.sourceDoc]);
|
|
786
|
+
if (!docCheck || docCheck.length === 0) {
|
|
787
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
788
|
+
process.stderr.write(`[phren] manualLinks: pruning stale link to "${link.sourceDoc}"\n`);
|
|
789
|
+
pruned = true;
|
|
790
|
+
continue;
|
|
791
|
+
}
|
|
792
|
+
validLinks.push(link);
|
|
793
|
+
db.run("INSERT OR IGNORE INTO entities (name, type, first_seen_at) VALUES (?, ?, ?)", [link.entity, link.entityType, new Date().toISOString().slice(0, 10)]);
|
|
794
|
+
db.run("INSERT OR IGNORE INTO entities (name, type, first_seen_at) VALUES (?, ?, ?)", [link.sourceDoc, "document", new Date().toISOString().slice(0, 10)]);
|
|
795
|
+
const eRes = db.exec("SELECT id FROM entities WHERE name = ? AND type = ?", [link.entity, link.entityType]);
|
|
796
|
+
const dRes = db.exec("SELECT id FROM entities WHERE name = ? AND type = ?", [link.sourceDoc, "document"]);
|
|
797
|
+
const eId = eRes?.[0]?.values?.[0]?.[0];
|
|
798
|
+
const dId = dRes?.[0]?.values?.[0]?.[0];
|
|
799
|
+
if (eId != null && dId != null) {
|
|
800
|
+
db.run("INSERT OR IGNORE INTO entity_links (source_id, target_id, rel_type, source_doc) VALUES (?, ?, ?, ?)", [dId, eId, link.relType, link.sourceDoc]);
|
|
801
|
+
}
|
|
802
|
+
// Also populate global_entities so manual links are discoverable via cross_project_entities
|
|
803
|
+
const projectMatch = link.sourceDoc.match(/^([^/]+)\//);
|
|
804
|
+
if (projectMatch) {
|
|
805
|
+
try {
|
|
806
|
+
db.run("INSERT OR IGNORE INTO global_entities (entity, project, doc_key) VALUES (?, ?, ?)", [link.entity, projectMatch[1], link.sourceDoc]);
|
|
807
|
+
}
|
|
808
|
+
catch (err) {
|
|
809
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
810
|
+
process.stderr.write(`[phren] manualLinks globalEntities: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
catch (err) {
|
|
815
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
816
|
+
process.stderr.write(`[phren] manualLinks entry: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
// Rewrite manual-links.json if stale entries were pruned
|
|
820
|
+
if (pruned) {
|
|
821
|
+
try {
|
|
822
|
+
withFileLock(manualLinksPath, () => {
|
|
823
|
+
const tmpPath = manualLinksPath + `.tmp-${crypto.randomUUID()}`;
|
|
824
|
+
fs.writeFileSync(tmpPath, JSON.stringify(validLinks, null, 2));
|
|
825
|
+
fs.renameSync(tmpPath, manualLinksPath);
|
|
826
|
+
});
|
|
827
|
+
}
|
|
828
|
+
catch (err) {
|
|
829
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
830
|
+
process.stderr.write(`[phren] manualLinks prune write: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
catch (err) {
|
|
835
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
836
|
+
process.stderr.write(`[phren] mergeManualLinks: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
async function buildIndexImpl(phrenPath, profile) {
|
|
840
|
+
const t0 = Date.now();
|
|
841
|
+
beginUserFragmentBuildCache(phrenPath, getProjectDirs(phrenPath, profile).map(dir => path.basename(dir)));
|
|
842
|
+
try {
|
|
843
|
+
// ── Cache dir + hash sentinel ─────────────────────────────────────────────
|
|
844
|
+
let userSuffix;
|
|
845
|
+
try {
|
|
846
|
+
userSuffix = String(os.userInfo().uid);
|
|
847
|
+
}
|
|
848
|
+
catch (err) {
|
|
849
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
850
|
+
process.stderr.write(`[phren] buildIndexImpl userInfo: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
851
|
+
userSuffix = crypto.createHash("sha1").update(homeDir()).digest("hex").slice(0, 12);
|
|
852
|
+
}
|
|
853
|
+
const cacheDir = path.join(os.tmpdir(), `phren-fts-${userSuffix}`);
|
|
854
|
+
// Fast path: if the sentinel is fresh, skip the expensive glob + hash computation
|
|
855
|
+
const sentinel = readHashSentinel(phrenPath);
|
|
856
|
+
let hash;
|
|
857
|
+
let globResult;
|
|
858
|
+
if (sentinel && isSentinelFresh(phrenPath, sentinel)) {
|
|
859
|
+
hash = sentinel.hash;
|
|
860
|
+
const cacheFile = path.join(cacheDir, `${hash}.db`);
|
|
861
|
+
if (fs.existsSync(cacheFile)) {
|
|
862
|
+
// Sentinel cache hit — defer full glob until we actually need it
|
|
863
|
+
globResult = globAllFiles(phrenPath, profile);
|
|
864
|
+
}
|
|
865
|
+
else {
|
|
866
|
+
// Cache file was cleaned up, fall through to full computation
|
|
867
|
+
globResult = globAllFiles(phrenPath, profile);
|
|
868
|
+
hash = computePhrenHash(phrenPath, profile, globResult.filePaths);
|
|
869
|
+
writeHashSentinel(phrenPath, hash);
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
else {
|
|
873
|
+
globResult = globAllFiles(phrenPath, profile);
|
|
874
|
+
hash = computePhrenHash(phrenPath, profile, globResult.filePaths);
|
|
875
|
+
writeHashSentinel(phrenPath, hash);
|
|
876
|
+
}
|
|
877
|
+
const cacheFile = path.join(cacheDir, `${hash}.db`);
|
|
878
|
+
const SQL = await bootstrapSqlJs();
|
|
879
|
+
// ── Incremental update (cache hit path) ───────────────────────────────────
|
|
880
|
+
// Load saved per-file hashes for incremental updates
|
|
881
|
+
const savedHashData = loadHashMap(phrenPath);
|
|
882
|
+
const savedHashes = savedHashData.hashes;
|
|
883
|
+
const schemaChanged = savedHashData.version !== INDEX_SCHEMA_VERSION;
|
|
884
|
+
// Try loading cached DB for incremental update
|
|
885
|
+
if (!schemaChanged && fs.existsSync(cacheFile)) {
|
|
886
|
+
try {
|
|
887
|
+
const cached = fs.readFileSync(cacheFile);
|
|
888
|
+
let db;
|
|
889
|
+
let shouldCloseDb = true;
|
|
890
|
+
try {
|
|
891
|
+
db = new SQL.Database(cached);
|
|
892
|
+
// If OS cleaned /tmp and the file was recreated as empty, the DB will have
|
|
893
|
+
// 0 docs even though savedHashes has full content. Treat as cache miss so
|
|
894
|
+
// the stale hash map doesn't drive an incremental update against an empty DB.
|
|
895
|
+
const docCountResult = db.exec("SELECT COUNT(*) FROM docs");
|
|
896
|
+
const docCount = docCountResult?.[0]?.values?.[0]?.[0] ?? 0;
|
|
897
|
+
if (docCount === 0 && globResult.entries.length > 0) {
|
|
898
|
+
throw new Error("cached DB is empty, forcing full rebuild");
|
|
899
|
+
}
|
|
900
|
+
// Schema migration: add first_seen_at column if missing
|
|
901
|
+
try {
|
|
902
|
+
db.run("ALTER TABLE entities ADD COLUMN first_seen_at TEXT");
|
|
903
|
+
}
|
|
904
|
+
catch { /* column already exists — expected */ }
|
|
905
|
+
// Compute current file hashes and determine what changed
|
|
906
|
+
const allFiles = globResult.entries;
|
|
907
|
+
const currentHashes = {};
|
|
908
|
+
const changedFiles = [];
|
|
909
|
+
const newFiles = [];
|
|
910
|
+
for (const entry of allFiles) {
|
|
911
|
+
try {
|
|
912
|
+
const fileHash = hashFileContent(entry.fullPath);
|
|
913
|
+
currentHashes[entry.fullPath] = fileHash;
|
|
914
|
+
if (!(entry.fullPath in savedHashes)) {
|
|
915
|
+
newFiles.push(entry);
|
|
916
|
+
}
|
|
917
|
+
else if (savedHashes[entry.fullPath] !== fileHash) {
|
|
918
|
+
changedFiles.push(entry);
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
catch (err) {
|
|
922
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
923
|
+
process.stderr.write(`[phren] buildIndex hashFile: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
// Check for files missing from the index (deleted files)
|
|
927
|
+
const currentPaths = new Set(Object.keys(currentHashes));
|
|
928
|
+
const missingFromIndex = Object.keys(savedHashes).filter(p => !currentPaths.has(p));
|
|
929
|
+
// Force full rebuild if >20% of saved files are missing
|
|
930
|
+
const totalSaved = Object.keys(savedHashes).length;
|
|
931
|
+
if (totalSaved > 0 && missingFromIndex.length / totalSaved > 0.2) {
|
|
932
|
+
debugLog(`>20% files missing (${missingFromIndex.length}/${totalSaved}), forcing full rebuild`);
|
|
933
|
+
// Fall through to full rebuild below
|
|
934
|
+
}
|
|
935
|
+
else if (changedFiles.length === 0 && newFiles.length === 0 && missingFromIndex.length === 0) {
|
|
936
|
+
// Nothing changed, pure cache hit
|
|
937
|
+
debugLog(`Loaded FTS index from cache (${hash.slice(0, 8)}) in ${Date.now() - t0}ms`);
|
|
938
|
+
appendIndexEvent(phrenPath, {
|
|
939
|
+
event: "build_index",
|
|
940
|
+
cache: "hit",
|
|
941
|
+
hash: hash.slice(0, 12),
|
|
942
|
+
elapsedMs: Date.now() - t0,
|
|
943
|
+
profile: profile || "",
|
|
944
|
+
});
|
|
945
|
+
shouldCloseDb = false;
|
|
946
|
+
return db;
|
|
947
|
+
}
|
|
948
|
+
else {
|
|
949
|
+
// Incremental update: apply each file change atomically to avoid losing docs on crash.
|
|
950
|
+
const changedPaths = new Set(changedFiles.map(entry => entry.fullPath));
|
|
951
|
+
db.run("BEGIN");
|
|
952
|
+
try {
|
|
953
|
+
for (const missingPath of missingFromIndex) {
|
|
954
|
+
try {
|
|
955
|
+
deleteEntityLinksForDocPath(db, phrenPath, missingPath);
|
|
956
|
+
}
|
|
957
|
+
catch (err) {
|
|
958
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
959
|
+
process.stderr.write(`[phren] buildIndex deleteEntityLinksForMissing: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
960
|
+
}
|
|
961
|
+
try {
|
|
962
|
+
db.run("DELETE FROM docs WHERE path = ?", [missingPath]);
|
|
963
|
+
}
|
|
964
|
+
catch (err) {
|
|
965
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
966
|
+
process.stderr.write(`[phren] buildIndex deleteDocForMissing: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
967
|
+
}
|
|
968
|
+
}
|
|
969
|
+
db.run("COMMIT");
|
|
970
|
+
}
|
|
971
|
+
catch (err) {
|
|
972
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
973
|
+
process.stderr.write(`[phren] buildIndex incrementalDeleteCommit: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
974
|
+
try {
|
|
975
|
+
db.run("ROLLBACK");
|
|
976
|
+
}
|
|
977
|
+
catch (e2) {
|
|
978
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
979
|
+
process.stderr.write(`[phren] buildIndex incrementalDeleteRollback: ${e2 instanceof Error ? e2.message : String(e2)}\n`);
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
let updatedCount = 0;
|
|
983
|
+
for (const entry of [...changedFiles, ...newFiles]) {
|
|
984
|
+
db.run("BEGIN");
|
|
985
|
+
try {
|
|
986
|
+
if (changedPaths.has(entry.fullPath)) {
|
|
987
|
+
const sourceDocKey = getEntrySourceDocKey(entry, phrenPath);
|
|
988
|
+
db.run("DELETE FROM entity_links WHERE source_doc = ?", [sourceDocKey]);
|
|
989
|
+
// Q19: keep global_entities in sync with entity_links on updates
|
|
990
|
+
try {
|
|
991
|
+
db.run("DELETE FROM global_entities WHERE doc_key = ?", [sourceDocKey]);
|
|
992
|
+
}
|
|
993
|
+
catch { /* table may not exist in older cached DBs */ }
|
|
994
|
+
db.run("DELETE FROM docs WHERE path = ?", [entry.fullPath]);
|
|
995
|
+
}
|
|
996
|
+
if (insertFileIntoIndex(db, entry, phrenPath, { scheduleEmbeddings: true })) {
|
|
997
|
+
updatedCount++;
|
|
998
|
+
if (entry.type === "findings") {
|
|
999
|
+
try {
|
|
1000
|
+
const content = fs.readFileSync(entry.fullPath, "utf-8");
|
|
1001
|
+
extractAndLinkFragments(db, content, getEntrySourceDocKey(entry, phrenPath), phrenPath);
|
|
1002
|
+
}
|
|
1003
|
+
catch (err) {
|
|
1004
|
+
debugLog(`entity extraction failed: ${errorMessage(err)}`);
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
db.run("COMMIT");
|
|
1009
|
+
}
|
|
1010
|
+
catch (err) {
|
|
1011
|
+
try {
|
|
1012
|
+
db.run("ROLLBACK");
|
|
1013
|
+
}
|
|
1014
|
+
catch (e2) {
|
|
1015
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1016
|
+
process.stderr.write(`[phren] buildIndex perFileRollback: ${e2 instanceof Error ? e2.message : String(e2)}\n`);
|
|
1017
|
+
}
|
|
1018
|
+
throw err;
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
saveHashMap(phrenPath, currentHashes);
|
|
1022
|
+
touchSentinel(phrenPath);
|
|
1023
|
+
invalidateDfCache();
|
|
1024
|
+
// Save updated cache
|
|
1025
|
+
try {
|
|
1026
|
+
fs.mkdirSync(cacheDir, { recursive: true });
|
|
1027
|
+
fs.writeFileSync(cacheFile, db.export());
|
|
1028
|
+
}
|
|
1029
|
+
catch (err) {
|
|
1030
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1031
|
+
process.stderr.write(`[phren] buildIndex incrementalCacheSave: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1032
|
+
}
|
|
1033
|
+
const incMs = Date.now() - t0;
|
|
1034
|
+
debugLog(`Incremental FTS update: ${updatedCount} changed, ${missingFromIndex.length} removed in ${incMs}ms`);
|
|
1035
|
+
appendIndexEvent(phrenPath, {
|
|
1036
|
+
event: "build_index",
|
|
1037
|
+
cache: "incremental",
|
|
1038
|
+
hash: hash.slice(0, 12),
|
|
1039
|
+
files: updatedCount,
|
|
1040
|
+
removed: missingFromIndex.length,
|
|
1041
|
+
elapsedMs: incMs,
|
|
1042
|
+
profile: profile || "",
|
|
1043
|
+
});
|
|
1044
|
+
shouldCloseDb = false;
|
|
1045
|
+
return db;
|
|
1046
|
+
}
|
|
1047
|
+
}
|
|
1048
|
+
finally {
|
|
1049
|
+
if (shouldCloseDb) {
|
|
1050
|
+
db?.close();
|
|
1051
|
+
}
|
|
1052
|
+
}
|
|
1053
|
+
}
|
|
1054
|
+
catch (err) {
|
|
1055
|
+
debugLog(`Cache load failed, rebuilding index: ${errorMessage(err)}`);
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1058
|
+
// ── Full rebuild ──────────────────────────────────────────────────────────
|
|
1059
|
+
const db = new SQL.Database();
|
|
1060
|
+
db.run(`
|
|
1061
|
+
CREATE VIRTUAL TABLE docs USING fts5(
|
|
1062
|
+
project, filename, type, content, path,
|
|
1063
|
+
tokenize = "porter unicode61"
|
|
1064
|
+
);
|
|
1065
|
+
`);
|
|
1066
|
+
// Entity graph tables for lightweight reference graph
|
|
1067
|
+
db.run(`CREATE TABLE IF NOT EXISTS entities (id INTEGER PRIMARY KEY, name TEXT NOT NULL, type TEXT NOT NULL, first_seen_at TEXT, UNIQUE(name, type))`);
|
|
1068
|
+
db.run(`CREATE TABLE IF NOT EXISTS entity_links (source_id INTEGER REFERENCES entities(id), target_id INTEGER REFERENCES entities(id), rel_type TEXT NOT NULL, source_doc TEXT, PRIMARY KEY (source_id, target_id, rel_type))`);
|
|
1069
|
+
// Q20: Cross-project entity index
|
|
1070
|
+
ensureGlobalEntitiesTable(db);
|
|
1071
|
+
const allFiles = globResult.entries;
|
|
1072
|
+
const newHashes = {};
|
|
1073
|
+
let fileCount = 0;
|
|
1074
|
+
// Try loading cached entity graph
|
|
1075
|
+
const graphPath = runtimeFile(phrenPath, 'entity-graph.json');
|
|
1076
|
+
const entityGraphLoaded = loadCachedEntityGraph(db, graphPath, allFiles, phrenPath);
|
|
1077
|
+
for (const entry of allFiles) {
|
|
1078
|
+
try {
|
|
1079
|
+
newHashes[entry.fullPath] = hashFileContent(entry.fullPath);
|
|
1080
|
+
}
|
|
1081
|
+
catch (err) {
|
|
1082
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1083
|
+
process.stderr.write(`[phren] computePhrenHash skip: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1084
|
+
}
|
|
1085
|
+
if (insertFileIntoIndex(db, entry, phrenPath, { scheduleEmbeddings: true })) {
|
|
1086
|
+
fileCount++;
|
|
1087
|
+
// Extract entities from finding files (if not loaded from cache)
|
|
1088
|
+
if (!entityGraphLoaded && entry.type === "findings") {
|
|
1089
|
+
try {
|
|
1090
|
+
const content = fs.readFileSync(entry.fullPath, "utf-8");
|
|
1091
|
+
extractAndLinkFragments(db, content, getEntrySourceDocKey(entry, phrenPath), phrenPath);
|
|
1092
|
+
}
|
|
1093
|
+
catch (err) {
|
|
1094
|
+
debugLog(`entity extraction failed: ${errorMessage(err)}`);
|
|
1095
|
+
}
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
// Persist entity graph for next build
|
|
1100
|
+
if (!entityGraphLoaded) {
|
|
1101
|
+
try {
|
|
1102
|
+
const entityRows = db.exec("SELECT id, name, type FROM entities")[0]?.values ?? [];
|
|
1103
|
+
const linkRows = db.exec("SELECT source_id, target_id, rel_type, source_doc FROM entity_links")[0]?.values ?? [];
|
|
1104
|
+
// Q19: also persist global_entities so the cached-graph rebuild path can
|
|
1105
|
+
// restore it without re-running extraction on every file.
|
|
1106
|
+
const globalEntityRows = db.exec("SELECT entity, project, doc_key FROM global_entities")[0]?.values ?? [];
|
|
1107
|
+
fs.writeFileSync(graphPath, JSON.stringify({ entities: entityRows, links: linkRows, globalEntities: globalEntityRows, ts: Date.now() }));
|
|
1108
|
+
}
|
|
1109
|
+
catch (err) {
|
|
1110
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1111
|
+
process.stderr.write(`[phren] buildIndex entityGraphPersist: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
// Always merge manual links (survive rebuild)
|
|
1115
|
+
mergeManualLinks(db, phrenPath);
|
|
1116
|
+
// ── Finalize: persist hashes, save cache, log ─────────────────────────────
|
|
1117
|
+
saveHashMap(phrenPath, newHashes);
|
|
1118
|
+
touchSentinel(phrenPath);
|
|
1119
|
+
invalidateDfCache();
|
|
1120
|
+
const buildMs = Date.now() - t0;
|
|
1121
|
+
debugLog(`Built FTS index: ${fileCount} files from ${getProjectDirs(phrenPath, profile).length} projects in ${buildMs}ms`);
|
|
1122
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1123
|
+
console.error(`Indexed ${fileCount} files from ${getProjectDirs(phrenPath, profile).length} projects`);
|
|
1124
|
+
appendIndexEvent(phrenPath, {
|
|
1125
|
+
event: "build_index",
|
|
1126
|
+
cache: "miss",
|
|
1127
|
+
hash: hash.slice(0, 12),
|
|
1128
|
+
files: fileCount,
|
|
1129
|
+
projects: getProjectDirs(phrenPath, profile).length,
|
|
1130
|
+
elapsedMs: buildMs,
|
|
1131
|
+
profile: profile || "",
|
|
1132
|
+
});
|
|
1133
|
+
try {
|
|
1134
|
+
fs.mkdirSync(cacheDir, { recursive: true });
|
|
1135
|
+
fs.writeFileSync(cacheFile, db.export());
|
|
1136
|
+
for (const f of fs.readdirSync(cacheDir)) {
|
|
1137
|
+
if (!f.endsWith(".db") || f === `${hash}.db`)
|
|
1138
|
+
continue;
|
|
1139
|
+
try {
|
|
1140
|
+
fs.unlinkSync(path.join(cacheDir, f));
|
|
1141
|
+
}
|
|
1142
|
+
catch (err) {
|
|
1143
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1144
|
+
process.stderr.write(`[phren] buildIndex staleCacheCleanup: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
debugLog(`Saved FTS index cache (${hash.slice(0, 8)}) — total ${Date.now() - t0}ms`);
|
|
1148
|
+
}
|
|
1149
|
+
catch (err) {
|
|
1150
|
+
debugLog(`Failed to save FTS index cache: ${errorMessage(err)}`);
|
|
1151
|
+
}
|
|
1152
|
+
return db;
|
|
1153
|
+
}
|
|
1154
|
+
finally {
|
|
1155
|
+
endUserFragmentBuildCache(phrenPath);
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
function createEmptyIndexDb(SQL) {
|
|
1159
|
+
const db = new SQL.Database();
|
|
1160
|
+
db.run(`
|
|
1161
|
+
CREATE VIRTUAL TABLE docs USING fts5(
|
|
1162
|
+
project, filename, type, content, path,
|
|
1163
|
+
tokenize = "porter unicode61"
|
|
1164
|
+
);
|
|
1165
|
+
`);
|
|
1166
|
+
db.run(`CREATE TABLE IF NOT EXISTS entities (id INTEGER PRIMARY KEY, name TEXT NOT NULL, type TEXT NOT NULL, first_seen_at TEXT, UNIQUE(name, type))`);
|
|
1167
|
+
db.run(`CREATE TABLE IF NOT EXISTS entity_links (source_id INTEGER REFERENCES entities(id), target_id INTEGER REFERENCES entities(id), rel_type TEXT NOT NULL, source_doc TEXT, PRIMARY KEY (source_id, target_id, rel_type))`);
|
|
1168
|
+
ensureGlobalEntitiesTable(db);
|
|
1169
|
+
return db;
|
|
1170
|
+
}
|
|
1171
|
+
function isRebuildLockHeld(phrenPath) {
|
|
1172
|
+
const lockTarget = runtimeFile(phrenPath, "index-rebuild");
|
|
1173
|
+
const lockPath = lockTarget + ".lock";
|
|
1174
|
+
try {
|
|
1175
|
+
const stat = fs.statSync(lockPath);
|
|
1176
|
+
const staleThreshold = Number.parseInt((process.env.PHREN_FILE_LOCK_STALE_MS) || "30000", 10) || 30000;
|
|
1177
|
+
return Date.now() - stat.mtimeMs <= staleThreshold;
|
|
1178
|
+
}
|
|
1179
|
+
catch (err) {
|
|
1180
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1181
|
+
process.stderr.write(`[phren] isRebuildLockHeld stat: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1182
|
+
return false;
|
|
1183
|
+
}
|
|
1184
|
+
}
|
|
1185
|
+
async function loadIndexSnapshotOrEmpty(phrenPath, profile) {
|
|
1186
|
+
const SQL = await bootstrapSqlJs();
|
|
1187
|
+
let userSuffix;
|
|
1188
|
+
try {
|
|
1189
|
+
userSuffix = String(os.userInfo().uid);
|
|
1190
|
+
}
|
|
1191
|
+
catch (err) {
|
|
1192
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1193
|
+
process.stderr.write(`[phren] loadIndexSnapshotOrEmpty userInfo: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1194
|
+
userSuffix = crypto.createHash("sha1").update(homeDir()).digest("hex").slice(0, 12);
|
|
1195
|
+
}
|
|
1196
|
+
const cacheDir = path.join(os.tmpdir(), `phren-fts-${userSuffix}`);
|
|
1197
|
+
const globResult = globAllFiles(phrenPath, profile);
|
|
1198
|
+
const hash = computePhrenHash(phrenPath, profile, globResult.filePaths);
|
|
1199
|
+
const cacheFile = path.join(cacheDir, `${hash}.db`);
|
|
1200
|
+
if (fs.existsSync(cacheFile)) {
|
|
1201
|
+
try {
|
|
1202
|
+
return new SQL.Database(fs.readFileSync(cacheFile));
|
|
1203
|
+
}
|
|
1204
|
+
catch (err) {
|
|
1205
|
+
debugLog(`Failed to open cached FTS snapshot while rebuild lock held: ${errorMessage(err)}`);
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
debugLog("FTS rebuild already in progress; returning empty snapshot");
|
|
1209
|
+
return createEmptyIndexDb(SQL);
|
|
1210
|
+
}
|
|
1211
|
+
// Serialize concurrent in-process buildIndex calls to prevent SQLite corruption
|
|
1212
|
+
let buildLock = Promise.resolve(null);
|
|
1213
|
+
// Staleness debounce: if the index was rebuilt within this window, return the
|
|
1214
|
+
// cached DB immediately without re-running the expensive glob + hash pipeline.
|
|
1215
|
+
// Configurable via PHREN_INDEX_DEBOUNCE_MS (default 5000ms).
|
|
1216
|
+
const INDEX_DEBOUNCE_DEFAULT_MS = 5000;
|
|
1217
|
+
let _lastBuiltDb = null;
|
|
1218
|
+
let _lastBuildTimestamp = 0;
|
|
1219
|
+
let _lastBuildKey = "";
|
|
1220
|
+
function getIndexDebounceMs() {
|
|
1221
|
+
const raw = (process.env.PHREN_INDEX_DEBOUNCE_MS);
|
|
1222
|
+
if (!raw)
|
|
1223
|
+
return INDEX_DEBOUNCE_DEFAULT_MS;
|
|
1224
|
+
const parsed = Number.parseInt(raw, 10);
|
|
1225
|
+
if (Number.isNaN(parsed) || parsed < 0)
|
|
1226
|
+
return INDEX_DEBOUNCE_DEFAULT_MS;
|
|
1227
|
+
return Math.min(parsed, 60000);
|
|
1228
|
+
}
|
|
1229
|
+
function isDbOpen(db) {
|
|
1230
|
+
try {
|
|
1231
|
+
db.exec("SELECT 1");
|
|
1232
|
+
return true;
|
|
1233
|
+
}
|
|
1234
|
+
catch {
|
|
1235
|
+
return false;
|
|
1236
|
+
}
|
|
1237
|
+
}
|
|
1238
|
+
export async function buildIndex(phrenPath, profile) {
|
|
1239
|
+
const debounceMs = getIndexDebounceMs();
|
|
1240
|
+
const buildKey = `${phrenPath}|${profile ?? ""}`;
|
|
1241
|
+
if (debounceMs > 0 &&
|
|
1242
|
+
_lastBuiltDb !== null &&
|
|
1243
|
+
_lastBuildKey === buildKey &&
|
|
1244
|
+
Date.now() - _lastBuildTimestamp < debounceMs &&
|
|
1245
|
+
isDbOpen(_lastBuiltDb)) {
|
|
1246
|
+
debugLog(`buildIndex debounce hit (${Date.now() - _lastBuildTimestamp}ms < ${debounceMs}ms)`);
|
|
1247
|
+
return _lastBuiltDb;
|
|
1248
|
+
}
|
|
1249
|
+
const result = buildLock.then(() => _buildIndexGuarded(phrenPath, profile));
|
|
1250
|
+
// Update the lock chain; swallow rejections so the chain doesn't stall
|
|
1251
|
+
buildLock = result.catch(() => null);
|
|
1252
|
+
const db = await result;
|
|
1253
|
+
_lastBuiltDb = db;
|
|
1254
|
+
_lastBuildTimestamp = Date.now();
|
|
1255
|
+
_lastBuildKey = buildKey;
|
|
1256
|
+
return db;
|
|
1257
|
+
}
|
|
1258
|
+
async function _buildIndexGuarded(phrenPath, profile) {
|
|
1259
|
+
const lockTarget = runtimeFile(phrenPath, "index-rebuild");
|
|
1260
|
+
if (isRebuildLockHeld(phrenPath)) {
|
|
1261
|
+
return loadIndexSnapshotOrEmpty(phrenPath, profile);
|
|
1262
|
+
}
|
|
1263
|
+
try {
|
|
1264
|
+
return await withFileLock(lockTarget, async () => {
|
|
1265
|
+
let timer;
|
|
1266
|
+
const timeout = new Promise((_, reject) => {
|
|
1267
|
+
timer = setTimeout(() => reject(new Error("buildIndex timed out after 30s")), 30000);
|
|
1268
|
+
});
|
|
1269
|
+
try {
|
|
1270
|
+
return await Promise.race([buildIndexImpl(phrenPath, profile), timeout]);
|
|
1271
|
+
}
|
|
1272
|
+
finally {
|
|
1273
|
+
clearTimeout(timer);
|
|
1274
|
+
}
|
|
1275
|
+
});
|
|
1276
|
+
}
|
|
1277
|
+
catch (err) {
|
|
1278
|
+
const message = errorMessage(err);
|
|
1279
|
+
if (message.includes("could not acquire lock")) {
|
|
1280
|
+
debugLog(`FTS rebuild skipped because another process holds the rebuild lock: ${message}`);
|
|
1281
|
+
return loadIndexSnapshotOrEmpty(phrenPath, profile);
|
|
1282
|
+
}
|
|
1283
|
+
throw err;
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
/** Find the FTS cache file for a specific phrenPath+profile. Returns exists + size. */
|
|
1287
|
+
export function findFtsCacheForPath(phrenPath, profile) {
|
|
1288
|
+
let userSuffix;
|
|
1289
|
+
try {
|
|
1290
|
+
userSuffix = String(os.userInfo().uid);
|
|
1291
|
+
}
|
|
1292
|
+
catch (err) {
|
|
1293
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1294
|
+
process.stderr.write(`[phren] findFtsCacheForPath userInfo: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1295
|
+
userSuffix = crypto.createHash("sha1").update(homeDir()).digest("hex").slice(0, 12);
|
|
1296
|
+
}
|
|
1297
|
+
const cacheDir = path.join(os.tmpdir(), `phren-fts-${userSuffix}`);
|
|
1298
|
+
try {
|
|
1299
|
+
const globResult = globAllFiles(phrenPath, profile);
|
|
1300
|
+
const hash = computePhrenHash(phrenPath, profile, globResult.filePaths);
|
|
1301
|
+
const cacheFile = path.join(cacheDir, `${hash}.db`);
|
|
1302
|
+
if (fs.existsSync(cacheFile)) {
|
|
1303
|
+
const stat = fs.statSync(cacheFile);
|
|
1304
|
+
return { exists: true, sizeBytes: stat.size };
|
|
1305
|
+
}
|
|
1306
|
+
}
|
|
1307
|
+
catch (err) {
|
|
1308
|
+
if ((process.env.PHREN_DEBUG || process.env.PHREN_DEBUG))
|
|
1309
|
+
process.stderr.write(`[phren] findFtsCacheForPath: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
1310
|
+
}
|
|
1311
|
+
return { exists: false };
|
|
1312
|
+
}
|
|
1313
|
+
export function detectProject(phrenPath, cwd, profile) {
|
|
1314
|
+
const manifest = readRootManifest(phrenPath);
|
|
1315
|
+
if (manifest?.installMode === "project-local") {
|
|
1316
|
+
return manifest.primaryProject || null;
|
|
1317
|
+
}
|
|
1318
|
+
const projectDirs = getProjectDirs(phrenPath, profile);
|
|
1319
|
+
const resolvedCwd = path.resolve(cwd);
|
|
1320
|
+
let bestMatch = null;
|
|
1321
|
+
for (const dir of projectDirs) {
|
|
1322
|
+
const projectName = path.basename(dir);
|
|
1323
|
+
const sourcePath = getProjectSourcePath(phrenPath, projectName);
|
|
1324
|
+
if (!sourcePath)
|
|
1325
|
+
continue;
|
|
1326
|
+
const matches = resolvedCwd === sourcePath || resolvedCwd.startsWith(sourcePath + path.sep);
|
|
1327
|
+
if (!matches)
|
|
1328
|
+
continue;
|
|
1329
|
+
if (!bestMatch || sourcePath.length > bestMatch.length) {
|
|
1330
|
+
bestMatch = { project: projectName, length: sourcePath.length };
|
|
1331
|
+
}
|
|
1332
|
+
}
|
|
1333
|
+
return bestMatch?.project || null;
|
|
1334
|
+
}
|