lore-memory 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +666 -0
  3. package/bin/lore.js +108 -0
  4. package/package.json +53 -0
  5. package/src/commands/drafts.js +144 -0
  6. package/src/commands/edit.js +30 -0
  7. package/src/commands/embed.js +63 -0
  8. package/src/commands/export.js +76 -0
  9. package/src/commands/graph.js +80 -0
  10. package/src/commands/init.js +110 -0
  11. package/src/commands/log.js +149 -0
  12. package/src/commands/mine.js +38 -0
  13. package/src/commands/onboard.js +112 -0
  14. package/src/commands/score.js +88 -0
  15. package/src/commands/search.js +49 -0
  16. package/src/commands/serve.js +21 -0
  17. package/src/commands/stale.js +41 -0
  18. package/src/commands/status.js +59 -0
  19. package/src/commands/watch.js +67 -0
  20. package/src/commands/why.js +58 -0
  21. package/src/lib/budget.js +57 -0
  22. package/src/lib/config.js +52 -0
  23. package/src/lib/drafts.js +104 -0
  24. package/src/lib/embeddings.js +97 -0
  25. package/src/lib/entries.js +59 -0
  26. package/src/lib/format.js +23 -0
  27. package/src/lib/git.js +18 -0
  28. package/src/lib/graph.js +51 -0
  29. package/src/lib/guard.js +13 -0
  30. package/src/lib/index.js +84 -0
  31. package/src/lib/nlp.js +106 -0
  32. package/src/lib/relevance.js +81 -0
  33. package/src/lib/scorer.js +188 -0
  34. package/src/lib/sessions.js +51 -0
  35. package/src/lib/stale.js +27 -0
  36. package/src/mcp/server.js +52 -0
  37. package/src/mcp/tools/drafts.js +54 -0
  38. package/src/mcp/tools/log.js +93 -0
  39. package/src/mcp/tools/overview.js +141 -0
  40. package/src/mcp/tools/search.js +96 -0
  41. package/src/mcp/tools/stale.js +88 -0
  42. package/src/mcp/tools/why.js +91 -0
  43. package/src/watcher/comments.js +113 -0
  44. package/src/watcher/graph.js +149 -0
  45. package/src/watcher/index.js +134 -0
  46. package/src/watcher/signals.js +217 -0
  47. package/src/watcher/staleness.js +104 -0
@@ -0,0 +1,96 @@
1
+ 'use strict';
2
+
3
+ const { readIndex } = require('../../lib/index');
4
+ const { readEntry } = require('../../lib/entries');
5
+ const { enforceBudget, formatEntry } = require('../../lib/budget');
6
+ const { readConfig } = require('../../lib/config');
7
+
8
+ const toolDefinition = {
9
+ name: 'lore_search',
10
+ description: 'Search Lore entries by keyword or semantic meaning. Returns matching architectural decisions, invariants, gotchas, and graveyard entries.',
11
+ inputSchema: {
12
+ type: 'object',
13
+ properties: {
14
+ query: {
15
+ type: 'string',
16
+ description: 'Search query — keyword or natural language description',
17
+ },
18
+ type: {
19
+ type: 'string',
20
+ enum: ['decision', 'invariant', 'gotcha', 'graveyard'],
21
+ description: 'Optional: filter by entry type',
22
+ },
23
+ },
24
+ required: ['query'],
25
+ },
26
+ };
27
+
28
+ async function handler(args) {
29
+ const { query, type: filterType } = args;
30
+ const config = readConfig();
31
+ const budget = (config.mcp && config.mcp.tokenBudget) ? config.mcp.tokenBudget : 4000;
32
+
33
+ try {
34
+ const index = readIndex();
35
+ const q = query.toLowerCase();
36
+ const matches = [];
37
+
38
+ for (const entryPath of Object.values(index.entries)) {
39
+ const entry = readEntry(entryPath);
40
+ if (!entry) continue;
41
+ if (filterType && entry.type !== filterType) continue;
42
+
43
+ const searchable = [
44
+ entry.title,
45
+ entry.context,
46
+ ...(entry.alternatives || []),
47
+ entry.tradeoffs || '',
48
+ ...(entry.tags || []),
49
+ ].join(' ').toLowerCase();
50
+
51
+ if (searchable.includes(q)) {
52
+ matches.push(entry);
53
+ }
54
+ }
55
+
56
+ // Try semantic search if text search found nothing and embeddings exist
57
+ if (matches.length === 0) {
58
+ try {
59
+ const { findSimilar } = require('../../lib/embeddings');
60
+ const allIds = Object.keys(index.entries);
61
+ const similar = await findSimilar(query, allIds, 5);
62
+ for (const { id, score } of similar) {
63
+ if (score < 0.5) continue;
64
+ const entry = readEntry(index.entries[id]);
65
+ if (entry) {
66
+ if (!filterType || entry.type === filterType) {
67
+ matches.push(entry);
68
+ }
69
+ }
70
+ }
71
+ } catch (e) {
72
+ // Ollama not available — skip semantic search silently
73
+ }
74
+ }
75
+
76
+ if (matches.length === 0) {
77
+ return {
78
+ content: [{ type: 'text', text: `No entries found for: "${query}"` }],
79
+ };
80
+ }
81
+
82
+ const formatted = matches.map(e => formatEntry(e)).join('\n\n---\n\n');
83
+ const budgeted = enforceBudget(matches, budget);
84
+
85
+ return {
86
+ content: [{ type: 'text', text: budgeted || formatted }],
87
+ };
88
+ } catch (e) {
89
+ return {
90
+ content: [{ type: 'text', text: `Error: ${e.message}` }],
91
+ isError: true,
92
+ };
93
+ }
94
+ }
95
+
96
+ module.exports = { toolDefinition, handler };
@@ -0,0 +1,88 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs-extra');
4
+ const path = require('path');
5
+ const { readIndex } = require('../../lib/index');
6
+ const { readEntry } = require('../../lib/entries');
7
+ const { checkStaleness } = require('../../lib/stale');
8
+ const { checkPatternStaleness } = require('../../watcher/staleness');
9
+
10
+ const toolDefinition = {
11
+ name: 'lore_stale',
12
+ description: 'Check which Lore entries may be outdated. Combines mtime-based staleness (files changed since entry was written) with pattern-based semantic checks (e.g. new HTTP calls in performance paths, architecture shifts).',
13
+ inputSchema: {
14
+ type: 'object',
15
+ properties: {},
16
+ required: [],
17
+ },
18
+ };
19
+
20
+ async function handler() {
21
+ try {
22
+ const index = readIndex();
23
+ const projectRoot = process.cwd();
24
+ const staleItems = [];
25
+
26
+ for (const [id, entryPath] of Object.entries(index.entries)) {
27
+ const entry = readEntry(entryPath);
28
+ if (!entry) continue;
29
+
30
+ // mtime-based staleness (existing)
31
+ const staleFiles = checkStaleness(entry);
32
+ for (const s of staleFiles) {
33
+ const daysText = s.daysAgo === 0 ? 'today' : `${s.daysAgo} day${s.daysAgo === 1 ? '' : 's'} ago`;
34
+
35
+ // Pattern-based semantic staleness on the changed file
36
+ const reasons = [];
37
+ try {
38
+ const absPath = path.resolve(projectRoot, s.filepath);
39
+ if (fs.existsSync(absPath)) {
40
+ const code = fs.readFileSync(absPath, 'utf8');
41
+ const patternReasons = checkPatternStaleness(entry, s.filepath, code);
42
+ reasons.push(...patternReasons);
43
+ }
44
+ } catch (e) { /* ignore read errors */ }
45
+
46
+ staleItems.push({
47
+ id: entry.id,
48
+ title: entry.title,
49
+ type: entry.type,
50
+ file: s.filepath,
51
+ changedDaysAgo: s.daysAgo,
52
+ daysText,
53
+ reasons,
54
+ });
55
+ }
56
+ }
57
+
58
+ if (staleItems.length === 0) {
59
+ return {
60
+ content: [{ type: 'text', text: 'All Lore entries are up to date.' }],
61
+ };
62
+ }
63
+
64
+ const lines = ['The following Lore entries may be outdated:\n'];
65
+ for (const item of staleItems) {
66
+ lines.push(`• [${item.type.toUpperCase()}] ${item.title} (${item.id})`);
67
+ lines.push(` File changed: ${item.file} (${item.daysText})`);
68
+ if (item.reasons.length > 0) {
69
+ for (const r of item.reasons) {
70
+ lines.push(` ⚠ ${r}`);
71
+ }
72
+ }
73
+ lines.push(` Review with: lore edit ${item.id}`);
74
+ lines.push('');
75
+ }
76
+
77
+ return {
78
+ content: [{ type: 'text', text: lines.join('\n') }],
79
+ };
80
+ } catch (e) {
81
+ return {
82
+ content: [{ type: 'text', text: `Error: ${e.message}` }],
83
+ isError: true,
84
+ };
85
+ }
86
+ }
87
+
88
+ module.exports = { toolDefinition, handler };
@@ -0,0 +1,91 @@
1
+ 'use strict';
2
+
3
+ const path = require('path');
4
+ const { readIndex } = require('../../lib/index');
5
+ const { readEntry } = require('../../lib/entries');
6
+ const { loadGraph } = require('../../lib/graph');
7
+ const { enforceBudget } = require('../../lib/budget');
8
+ const { readConfig } = require('../../lib/config');
9
+
10
+ const toolDefinition = {
11
+ name: 'lore_why',
12
+ description: 'Retrieve architectural decisions and context for a specific file or directory. Returns all relevant Lore entries, including graph-propagated context from imported and importing modules.',
13
+ inputSchema: {
14
+ type: 'object',
15
+ properties: {
16
+ filepath: {
17
+ type: 'string',
18
+ description: 'The file or directory path to look up context for (relative to project root)',
19
+ },
20
+ },
21
+ required: ['filepath'],
22
+ },
23
+ };
24
+
25
+ async function handler(args) {
26
+ const { filepath } = args;
27
+ const config = readConfig();
28
+ const budget = (config.mcp && config.mcp.tokenBudget) ? config.mcp.tokenBudget : 4000;
29
+
30
+ try {
31
+ const index = readIndex();
32
+ const graph = loadGraph();
33
+ const normalized = filepath.replace(/^\.\//, '');
34
+
35
+ // Collect entry IDs with weights: { id → maxWeight }
36
+ const weights = {};
37
+ function addIds(ids, w) {
38
+ for (const id of (ids || [])) {
39
+ weights[id] = Math.max(weights[id] || 0, w);
40
+ }
41
+ }
42
+
43
+ // Direct file match (1.0)
44
+ addIds(index.files[normalized], 1.0);
45
+ addIds(index.files[normalized + '/'], 1.0);
46
+
47
+ // Ancestor directory walk (0.7)
48
+ let current = normalized;
49
+ while (true) {
50
+ const parent = path.dirname(current);
51
+ if (parent === current || parent === '.') break;
52
+ addIds(index.files[parent + '/'], 0.7);
53
+ current = parent;
54
+ }
55
+
56
+ // Graph: files this file imports (0.3)
57
+ for (const dep of (graph.imports[normalized] || [])) {
58
+ addIds(index.files[dep], 0.3);
59
+ }
60
+
61
+ // Graph: files that import this file (0.2)
62
+ for (const dep of (graph.importedBy[normalized] || [])) {
63
+ addIds(index.files[dep], 0.2);
64
+ }
65
+
66
+ if (Object.keys(weights).length === 0) {
67
+ return {
68
+ content: [{ type: 'text', text: `No Lore entries found for: ${filepath}` }],
69
+ };
70
+ }
71
+
72
+ // Sort by weight descending
73
+ const entries = [];
74
+ for (const [id, weight] of Object.entries(weights)) {
75
+ const entryPath = index.entries[id];
76
+ const entry = readEntry(entryPath);
77
+ if (entry) entries.push(Object.assign({}, entry, { _score: weight }));
78
+ }
79
+ entries.sort((a, b) => b._score - a._score);
80
+
81
+ const context = enforceBudget(entries, budget);
82
+ return { content: [{ type: 'text', text: context }] };
83
+ } catch (e) {
84
+ return {
85
+ content: [{ type: 'text', text: `Error: ${e.message}` }],
86
+ isError: true,
87
+ };
88
+ }
89
+ }
90
+
91
+ module.exports = { toolDefinition, handler };
@@ -0,0 +1,113 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs-extra');
4
+ const path = require('path');
5
+ const { detectType, extractTitle, scoreComment } = require('../lib/nlp');
6
+ const { saveDraft } = require('../lib/drafts');
7
+
8
+ /**
9
+ * Extract raw comment strings from source code.
10
+ * Tries Babel AST for JS/TS, falls back to regex for all languages.
11
+ */
12
+ function extractComments(code, filePath) {
13
+ if (/\.(js|ts|jsx|tsx)$/.test(filePath)) {
14
+ try {
15
+ const babelParser = require('@babel/parser');
16
+ const ast = babelParser.parse(code, {
17
+ sourceType: 'unambiguous',
18
+ plugins: ['typescript', 'jsx', 'decorators-legacy'],
19
+ errorRecovery: true,
20
+ });
21
+ return (ast.comments || []).map(c => c.value.trim()).filter(Boolean);
22
+ } catch (e) {
23
+ // fall through
24
+ }
25
+ }
26
+
27
+ // Regex: covers JS, Python, Go, Rust, shell
28
+ const comments = [];
29
+ const patterns = [
30
+ /\/\/([^\n]+)/g, // //
31
+ /\/\*([\s\S]*?)\*\//g, // /* */
32
+ /#([^\n]+)/g, // #
33
+ /--([^\n]+)/g, // --
34
+ ];
35
+ for (const re of patterns) {
36
+ let m;
37
+ while ((m = re.exec(code)) !== null) {
38
+ const text = m[1].trim();
39
+ if (text) comments.push(text);
40
+ }
41
+ }
42
+ return comments;
43
+ }
44
+
45
+ /**
46
+ * Mine a single file for lore-worthy comments.
47
+ * Saves passing comments as drafts.
48
+ * @param {string} absFilePath
49
+ * @param {string} projectRoot
50
+ * @returns {object[]} created drafts
51
+ */
52
+ function mineFile(absFilePath, projectRoot) {
53
+ let code = '';
54
+ try { code = fs.readFileSync(absFilePath, 'utf8'); } catch (e) { return []; }
55
+
56
+ const relativePath = path.relative(projectRoot, absFilePath).replace(/\\/g, '/');
57
+ const comments = extractComments(code, absFilePath);
58
+ const created = [];
59
+
60
+ // Deduplicate: skip if we have a recent draft from same file with very similar title
61
+ for (const comment of comments) {
62
+ const score = scoreComment(comment);
63
+ if (score < 0.5) continue;
64
+
65
+ const { type, confidence } = detectType(comment);
66
+ const title = extractTitle(comment);
67
+ if (!title || title.length < 3) continue;
68
+
69
+ const draft = {
70
+ draftId: `draft-${Date.now()}-${Math.random().toString(36).slice(2, 7)}`,
71
+ suggestedType: type,
72
+ suggestedTitle: title,
73
+ evidence: comment.length > 300 ? comment.slice(0, 300) + '…' : comment,
74
+ files: [relativePath],
75
+ confidence: Math.min(1, (score + confidence) / 2),
76
+ createdAt: new Date().toISOString(),
77
+ status: 'pending',
78
+ source: 'comment-mine',
79
+ };
80
+
81
+ saveDraft(draft);
82
+ created.push(draft);
83
+ }
84
+
85
+ return created;
86
+ }
87
+
88
+ /**
89
+ * Mine all source files in a directory recursively.
90
+ * @param {string} absDirPath
91
+ * @param {string} projectRoot
92
+ * @param {string[]} ignore
93
+ * @returns {number} total drafts created
94
+ */
95
+ function mineDirectory(absDirPath, projectRoot, ignore) {
96
+ const { globSync } = require('glob');
97
+ const ignoreList = ignore || ['node_modules', 'dist', '.git', '.lore', 'coverage'];
98
+ const ignorePats = ignoreList.map(i => `${i}/**`);
99
+
100
+ const files = globSync('**/*.{js,ts,jsx,tsx,py,go,rs}', {
101
+ cwd: absDirPath,
102
+ ignore: ignorePats,
103
+ absolute: true,
104
+ });
105
+
106
+ let total = 0;
107
+ for (const file of files) {
108
+ total += mineFile(file, projectRoot).length;
109
+ }
110
+ return total;
111
+ }
112
+
113
+ module.exports = { extractComments, mineFile, mineDirectory };
@@ -0,0 +1,149 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs-extra');
4
+ const path = require('path');
5
+ const { loadGraph, saveGraph } = require('../lib/graph');
6
+
7
+ /**
8
+ * Resolve a relative import path to a project-relative file path.
9
+ */
10
+ function resolveImport(fromFile, importPath, projectRoot) {
11
+ if (!importPath.startsWith('.') && !importPath.startsWith('/')) return null;
12
+
13
+ const fromDir = path.dirname(path.join(projectRoot, fromFile));
14
+ const resolved = path.resolve(fromDir, importPath);
15
+ const relative = path.relative(projectRoot, resolved);
16
+
17
+ const extensions = ['', '.js', '.ts', '.jsx', '.tsx', '/index.js', '/index.ts', '/index.jsx', '/index.tsx'];
18
+ for (const ext of extensions) {
19
+ const candidate = relative + ext;
20
+ if (fs.existsSync(path.join(projectRoot, candidate))) {
21
+ return candidate.replace(/\\/g, '/');
22
+ }
23
+ }
24
+ return null;
25
+ }
26
+
27
+ /**
28
+ * Extract all imports from a source file.
29
+ * Uses @babel/parser for JS/TS, falls back to regex.
30
+ */
31
+ function extractImports(code, filePath, projectRoot) {
32
+ const imports = [];
33
+
34
+ if (/\.(js|ts|jsx|tsx)$/.test(filePath)) {
35
+ try {
36
+ const babelParser = require('@babel/parser');
37
+ const ast = babelParser.parse(code, {
38
+ sourceType: 'unambiguous',
39
+ plugins: ['typescript', 'jsx', 'decorators-legacy'],
40
+ errorRecovery: true,
41
+ });
42
+
43
+ function visit(node) {
44
+ if (!node || typeof node !== 'object') return;
45
+
46
+ // ES import: import x from 'y'
47
+ if (node.type === 'ImportDeclaration' && node.source && node.source.value) {
48
+ const r = resolveImport(filePath, node.source.value, projectRoot);
49
+ if (r) imports.push(r);
50
+ }
51
+
52
+ // CJS require: require('y')
53
+ if (
54
+ node.type === 'CallExpression' &&
55
+ node.callee && node.callee.name === 'require' &&
56
+ node.arguments && node.arguments[0] &&
57
+ node.arguments[0].type === 'StringLiteral'
58
+ ) {
59
+ const r = resolveImport(filePath, node.arguments[0].value, projectRoot);
60
+ if (r) imports.push(r);
61
+ }
62
+
63
+ for (const key of Object.keys(node)) {
64
+ if (key === 'type' || key === 'loc' || key === 'start' || key === 'end') continue;
65
+ const val = node[key];
66
+ if (Array.isArray(val)) val.forEach(v => v && v.type && visit(v));
67
+ else if (val && typeof val === 'object' && val.type) visit(val);
68
+ }
69
+ }
70
+
71
+ visit(ast.program);
72
+ return imports;
73
+ } catch (e) {
74
+ // fall through to regex
75
+ }
76
+ }
77
+
78
+ // Regex fallback
79
+ const requireRe = /require\(['"]([^'"]+)['"]\)/g;
80
+ const importRe = /import(?:[^'"]*from)?\s*['"]([^'"]+)['"]/g;
81
+ for (const re of [requireRe, importRe]) {
82
+ let m;
83
+ while ((m = re.exec(code)) !== null) {
84
+ const r = resolveImport(filePath, m[1], projectRoot);
85
+ if (r) imports.push(r);
86
+ }
87
+ }
88
+ return imports;
89
+ }
90
+
91
+ /**
92
+ * Update the graph incrementally for a single changed file.
93
+ */
94
+ function updateGraphForFile(absFilePath, projectRoot) {
95
+ const graph = loadGraph();
96
+ const filePath = path.relative(projectRoot, absFilePath).replace(/\\/g, '/');
97
+
98
+ // Remove old outgoing edges
99
+ const oldImports = graph.imports[filePath] || [];
100
+ for (const dep of oldImports) {
101
+ if (graph.importedBy[dep]) {
102
+ graph.importedBy[dep] = graph.importedBy[dep].filter(f => f !== filePath);
103
+ }
104
+ }
105
+
106
+ // Read and parse
107
+ let code = '';
108
+ try { code = fs.readFileSync(absFilePath, 'utf8'); } catch (e) { return; }
109
+
110
+ const newImports = extractImports(code, filePath, projectRoot);
111
+ graph.imports[filePath] = newImports;
112
+
113
+ for (const dep of newImports) {
114
+ if (!graph.importedBy[dep]) graph.importedBy[dep] = [];
115
+ if (!graph.importedBy[dep].includes(filePath)) {
116
+ graph.importedBy[dep].push(filePath);
117
+ }
118
+ }
119
+
120
+ saveGraph(graph);
121
+ }
122
+
123
+ /**
124
+ * Build the full import graph by scanning all JS/TS files.
125
+ */
126
+ function buildFullGraph(projectRoot, ignorePatterns) {
127
+ const { globSync } = require('glob');
128
+ const ignore = (ignorePatterns || ['node_modules', 'dist', '.git', '.lore', 'coverage']).map(i => `${i}/**`);
129
+
130
+ const graph = { imports: {}, importedBy: {}, lastUpdated: new Date().toISOString() };
131
+ const files = globSync('**/*.{js,ts,jsx,tsx}', { cwd: projectRoot, ignore, absolute: false });
132
+
133
+ for (const file of files) {
134
+ let code = '';
135
+ try { code = fs.readFileSync(path.join(projectRoot, file), 'utf8'); } catch (e) { continue; }
136
+
137
+ const imports = extractImports(code, file, projectRoot);
138
+ graph.imports[file] = imports;
139
+
140
+ for (const dep of imports) {
141
+ if (!graph.importedBy[dep]) graph.importedBy[dep] = [];
142
+ if (!graph.importedBy[dep].includes(file)) graph.importedBy[dep].push(file);
143
+ }
144
+ }
145
+
146
+ return graph;
147
+ }
148
+
149
+ module.exports = { updateGraphForFile, buildFullGraph, extractImports };
@@ -0,0 +1,134 @@
1
+ 'use strict';
2
+
3
+ const path = require('path');
4
+ const fs = require('fs-extra');
5
+ const chalk = require('chalk');
6
+ const chokidar = require('chokidar');
7
+ const { readConfig } = require('../lib/config');
8
+ const { LORE_DIR } = require('../lib/index');
9
+ const signals = require('./signals');
10
+ const { mineFile } = require('./comments');
11
+ const { updateGraphForFile } = require('./graph');
12
+
13
+ function timestamp() {
14
+ return new Date().toLocaleTimeString('en-US', { hour12: false, hour: '2-digit', minute: '2-digit' });
15
+ }
16
+
17
+ /**
18
+ * Start the file watcher.
19
+ * @param {object} options - { quiet, logFile }
20
+ */
21
+ function startWatcher(options = {}) {
22
+ const projectRoot = process.cwd();
23
+ const config = readConfig();
24
+ const ignore = config.watchIgnore || ['node_modules', 'dist', '.git', 'coverage'];
25
+ // Build regex: any segment matching ignore list or .lore
26
+ const ignoreRe = new RegExp(`(${[...ignore, '\\.lore'].map(s => s.replace('.', '\\.')).join('|')})`);
27
+
28
+ let draftCount = 0;
29
+
30
+ const log = options.logFile
31
+ ? (msg) => {
32
+ const plain = msg.replace(/\x1B\[[0-9;]*m/g, '');
33
+ fs.appendFileSync(options.logFile, `${new Date().toISOString()} ${plain}\n`);
34
+ }
35
+ : (msg) => console.log(msg);
36
+
37
+ if (!options.quiet) {
38
+ console.log(chalk.cyan('📖 Lore Watcher started'));
39
+ console.log(chalk.dim(` Watching: ${projectRoot}`));
40
+ console.log(chalk.dim(` Ignoring: ${ignore.join(', ')}`));
41
+ console.log(chalk.dim(' Press Ctrl+C to stop'));
42
+ console.log();
43
+ }
44
+
45
+ function recordDraft(draft, filepath) {
46
+ draftCount++;
47
+ const rel = path.relative(projectRoot, filepath) || filepath;
48
+ log(`${chalk.dim(`[${timestamp()}]`)} Signal detected in ${chalk.yellow(rel)} — queued for review`);
49
+ }
50
+
51
+ const watcher = chokidar.watch('.', {
52
+ cwd: projectRoot,
53
+ ignored: ignoreRe,
54
+ persistent: true,
55
+ ignoreInitial: true,
56
+ awaitWriteFinish: { stabilityThreshold: 500, pollInterval: 100 },
57
+ });
58
+
59
+ watcher.on('unlink', (relPath) => {
60
+ const abs = path.join(projectRoot, relPath);
61
+ const draft = signals.onFileDeletion(abs, projectRoot);
62
+ if (draft) recordDraft(draft, abs);
63
+ });
64
+
65
+ watcher.on('unlinkDir', (relPath) => {
66
+ const abs = path.join(projectRoot, relPath);
67
+ const draft = signals.onDirectoryDeletion(abs, projectRoot);
68
+ if (draft) recordDraft(draft, abs);
69
+ });
70
+
71
+ watcher.on('add', (relPath) => {
72
+ const abs = path.join(projectRoot, relPath);
73
+ const draft = signals.onNewFile(abs, projectRoot);
74
+ if (draft) recordDraft(draft, abs);
75
+ });
76
+
77
+ watcher.on('change', (relPath) => {
78
+ const abs = path.join(projectRoot, relPath);
79
+
80
+ // Repeated edit tracking
81
+ const editDraft = signals.trackFileEdit(abs, projectRoot);
82
+ if (editDraft) recordDraft(editDraft, abs);
83
+
84
+ // package.json changes
85
+ if (relPath.endsWith('package.json')) {
86
+ const pkgDrafts = signals.onPackageJsonChange(abs, projectRoot);
87
+ for (const d of pkgDrafts) recordDraft(d, abs);
88
+ }
89
+
90
+ // Comment mining + graph update for source files
91
+ if (/\.(js|ts|jsx|tsx|py|go|rs)$/.test(relPath)) {
92
+ const commentDrafts = mineFile(abs, projectRoot);
93
+ if (commentDrafts.length > 0) {
94
+ draftCount += commentDrafts.length;
95
+ log(`${chalk.dim(`[${timestamp()}]`)} Mined ${commentDrafts.length} comment${commentDrafts.length === 1 ? '' : 's'} from ${chalk.yellow(relPath)} — queued for review`);
96
+ }
97
+
98
+ try { updateGraphForFile(abs, projectRoot); } catch (e) {}
99
+ }
100
+ });
101
+
102
+ // Watch COMMIT_EDITMSG to detect new commits
103
+ const commitMsgPath = path.join(projectRoot, '.git', 'COMMIT_EDITMSG');
104
+ let gitWatcher = null;
105
+ if (fs.existsSync(path.join(projectRoot, '.git'))) {
106
+ gitWatcher = chokidar.watch(commitMsgPath, { persistent: true, ignoreInitial: true });
107
+ gitWatcher.on('change', () => {
108
+ try {
109
+ const message = fs.readFileSync(commitMsgPath, 'utf8').trim();
110
+ const drafts = signals.onCommitMessage(message, projectRoot);
111
+ for (const d of drafts) {
112
+ draftCount++;
113
+ log(`${chalk.dim(`[${timestamp()}]`)} Commit signal: "${message.slice(0, 60)}" — queued for review`);
114
+ }
115
+ } catch (e) {}
116
+ });
117
+ }
118
+
119
+ function shutdown() {
120
+ watcher.close();
121
+ if (gitWatcher) gitWatcher.close();
122
+ if (!options.quiet) {
123
+ console.log();
124
+ console.log(chalk.cyan(`📖 Lore captured ${draftCount} draft${draftCount === 1 ? '' : 's'} today. Review with: lore drafts`));
125
+ }
126
+ }
127
+
128
+ process.on('SIGINT', () => { shutdown(); process.exit(0); });
129
+ process.on('SIGTERM', () => { watcher.close(); if (gitWatcher) gitWatcher.close(); process.exit(0); });
130
+
131
+ return watcher;
132
+ }
133
+
134
+ module.exports = { startWatcher };