claude-mneme 2.10.3 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client/mneme-client.mjs +223 -0
- package/package.json +5 -2
- package/scripts/utils.mjs +23 -69
- package/server/batch-queue.mjs +81 -0
- package/server/deduplicator.mjs +80 -0
- package/server/log-service.mjs +153 -0
- package/server/memory-cache.mjs +144 -0
- package/server/mneme-server.mjs +485 -0
- package/server/summarization-service.mjs +261 -0
- package/server/test-log-service.mjs +228 -0
- package/server/test-server.mjs +117 -0
- package/server/test-summarization.mjs +51 -0
- package/server/throttler.mjs +103 -0
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Mneme Client
|
|
3
|
+
*
|
|
4
|
+
* Thin HTTP client for hooks to communicate with the Mneme server.
|
|
5
|
+
* Handles auto-start of server if not running.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { request } from 'http';
|
|
9
|
+
import { existsSync, readFileSync, unlinkSync } from 'fs';
|
|
10
|
+
import { spawn } from 'child_process';
|
|
11
|
+
import { join, dirname } from 'path';
|
|
12
|
+
import { fileURLToPath } from 'url';
|
|
13
|
+
import { homedir } from 'os';
|
|
14
|
+
|
|
15
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
16
|
+
const __dirname = dirname(__filename);
|
|
17
|
+
|
|
18
|
+
const MEMORY_BASE = join(homedir(), '.claude-mneme');
|
|
19
|
+
const PID_FILE = join(MEMORY_BASE, '.server.pid');
|
|
20
|
+
const SERVER_SCRIPT = join(__dirname, '../server/mneme-server.mjs');
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Check if server is running and responsive
|
|
24
|
+
*/
|
|
25
|
+
async function pingServer(host, port) {
|
|
26
|
+
return new Promise((resolve) => {
|
|
27
|
+
const req = request({
|
|
28
|
+
host,
|
|
29
|
+
port,
|
|
30
|
+
path: '/health',
|
|
31
|
+
method: 'GET',
|
|
32
|
+
timeout: 1000
|
|
33
|
+
}, (res) => {
|
|
34
|
+
resolve(res.statusCode === 200);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
req.on('error', () => resolve(false));
|
|
38
|
+
req.on('timeout', () => {
|
|
39
|
+
req.destroy();
|
|
40
|
+
resolve(false);
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
req.end();
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Ensure server is running, start if needed
|
|
49
|
+
* Returns { host, port }
|
|
50
|
+
*/
|
|
51
|
+
export async function ensureServer() {
|
|
52
|
+
// Check if server is already running
|
|
53
|
+
if (existsSync(PID_FILE)) {
|
|
54
|
+
try {
|
|
55
|
+
const { pid, host, port } = JSON.parse(readFileSync(PID_FILE, 'utf-8'));
|
|
56
|
+
|
|
57
|
+
// Verify process is alive
|
|
58
|
+
try {
|
|
59
|
+
process.kill(pid, 0); // Signal 0 = check existence
|
|
60
|
+
} catch {
|
|
61
|
+
// Process dead, clean up stale PID file
|
|
62
|
+
unlinkSync(PID_FILE);
|
|
63
|
+
return ensureServer(); // Retry
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Verify server is responsive
|
|
67
|
+
if (await pingServer(host, port)) {
|
|
68
|
+
return { host, port };
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Server not responsive, clean up and retry
|
|
72
|
+
unlinkSync(PID_FILE);
|
|
73
|
+
return ensureServer();
|
|
74
|
+
} catch (e) {
|
|
75
|
+
// Invalid PID file, clean up
|
|
76
|
+
unlinkSync(PID_FILE);
|
|
77
|
+
return ensureServer();
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Start server (detached process)
|
|
82
|
+
const child = spawn('node', [SERVER_SCRIPT], {
|
|
83
|
+
detached: true,
|
|
84
|
+
stdio: 'ignore'
|
|
85
|
+
});
|
|
86
|
+
child.unref();
|
|
87
|
+
|
|
88
|
+
// Wait for server to be ready (max 3 seconds)
|
|
89
|
+
for (let i = 0; i < 30; i++) {
|
|
90
|
+
await new Promise(r => setTimeout(r, 100));
|
|
91
|
+
|
|
92
|
+
if (existsSync(PID_FILE)) {
|
|
93
|
+
try {
|
|
94
|
+
const { host, port } = JSON.parse(readFileSync(PID_FILE, 'utf-8'));
|
|
95
|
+
if (await pingServer(host, port)) {
|
|
96
|
+
return { host, port };
|
|
97
|
+
}
|
|
98
|
+
} catch {
|
|
99
|
+
// Not ready yet, continue waiting
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
throw new Error('Server failed to start within 3 seconds');
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* HTTP client for the Mneme server
|
|
109
|
+
*/
|
|
110
|
+
export class MnemeClient {
|
|
111
|
+
constructor(host, port) {
|
|
112
|
+
this.host = host;
|
|
113
|
+
this.port = port;
|
|
114
|
+
this.timeout = 2000; // 2 second timeout
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Make HTTP request to server
|
|
119
|
+
*/
|
|
120
|
+
async request(method, path, body = null) {
|
|
121
|
+
return new Promise((resolve, reject) => {
|
|
122
|
+
const options = {
|
|
123
|
+
host: this.host,
|
|
124
|
+
port: this.port,
|
|
125
|
+
path,
|
|
126
|
+
method,
|
|
127
|
+
headers: body ? { 'Content-Type': 'application/json' } : {},
|
|
128
|
+
timeout: this.timeout
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
const req = request(options, (res) => {
|
|
132
|
+
let data = '';
|
|
133
|
+
res.on('data', chunk => data += chunk);
|
|
134
|
+
res.on('end', () => {
|
|
135
|
+
try {
|
|
136
|
+
const parsed = JSON.parse(data);
|
|
137
|
+
if (res.statusCode >= 400) {
|
|
138
|
+
reject(new Error(parsed.error || `HTTP ${res.statusCode}`));
|
|
139
|
+
} else {
|
|
140
|
+
resolve(parsed);
|
|
141
|
+
}
|
|
142
|
+
} catch (e) {
|
|
143
|
+
reject(new Error('Invalid JSON response'));
|
|
144
|
+
}
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
req.on('error', reject);
|
|
149
|
+
req.on('timeout', () => {
|
|
150
|
+
req.destroy();
|
|
151
|
+
reject(new Error('Request timeout'));
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
if (body) {
|
|
155
|
+
req.write(JSON.stringify(body));
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
req.end();
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* POST request helper
|
|
164
|
+
*/
|
|
165
|
+
async post(path, body) {
|
|
166
|
+
return this.request('POST', path, body);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* GET request helper
|
|
171
|
+
*/
|
|
172
|
+
async get(path) {
|
|
173
|
+
return this.request('GET', path);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Session Management
|
|
177
|
+
|
|
178
|
+
async registerSession(sessionId, cwd) {
|
|
179
|
+
return this.post('/session/register', { sessionId, cwd });
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
async unregisterSession(sessionId) {
|
|
183
|
+
return this.post('/session/unregister', { sessionId });
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Health Check
|
|
187
|
+
|
|
188
|
+
async health() {
|
|
189
|
+
return this.get('/health');
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Log Operations
|
|
193
|
+
|
|
194
|
+
async appendLog(project, entry) {
|
|
195
|
+
return this.post('/log/append', { project, entry });
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async flushLog(project = null) {
|
|
199
|
+
return this.post('/log/flush', { project });
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// Summarization Operations
|
|
203
|
+
|
|
204
|
+
async triggerSummarize(project, force = false) {
|
|
205
|
+
return this.post('/summarize/trigger', { project, force });
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
async getSummarizeStatus(project) {
|
|
209
|
+
return this.post('/summarize/status', { project });
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
async getSummary(project) {
|
|
213
|
+
return this.post('/summary/get', { project });
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Get a connected client (auto-starts server if needed)
|
|
219
|
+
*/
|
|
220
|
+
export async function getClient() {
|
|
221
|
+
const { host, port } = await ensureServer();
|
|
222
|
+
return new MnemeClient(host, port);
|
|
223
|
+
}
|
package/package.json
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "claude-mneme",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "3.0.0",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"scripts": {
|
|
7
7
|
"install-deps": "npm install",
|
|
8
|
-
"test": "node --test scripts/utils.test.mjs"
|
|
8
|
+
"test": "node --test scripts/utils.test.mjs",
|
|
9
|
+
"test:server": "node server/test-server.mjs",
|
|
10
|
+
"test:log": "node server/test-log-service.mjs",
|
|
11
|
+
"test:summarization": "node server/test-summarization.mjs"
|
|
9
12
|
},
|
|
10
13
|
"dependencies": {
|
|
11
14
|
"@anthropic-ai/claude-agent-sdk": "^0.2.31"
|
package/scripts/utils.mjs
CHANGED
|
@@ -1842,26 +1842,27 @@ export function scoreEntriesByRelevance(entries, cwd, config) {
|
|
|
1842
1842
|
}
|
|
1843
1843
|
|
|
1844
1844
|
/**
|
|
1845
|
-
* Append a log entry
|
|
1846
|
-
* Writes to .pending.jsonl for batching, then flushes if throttle allows.
|
|
1845
|
+
* Append a log entry via the Mneme server (batched, deduplicated).
|
|
1847
1846
|
* Also extracts and indexes entities from the entry.
|
|
1848
1847
|
*/
|
|
1849
1848
|
export function appendLogEntry(entry, cwd = process.cwd()) {
|
|
1850
|
-
const paths = ensureMemoryDirs(cwd);
|
|
1851
|
-
const pendingPath = paths.log.replace('.jsonl', '.pending.jsonl');
|
|
1852
1849
|
const config = loadConfig();
|
|
1850
|
+
const project = getProjectRoot(cwd);
|
|
1851
|
+
|
|
1852
|
+
// Send to server (batched, deduplicated)
|
|
1853
|
+
import('../client/mneme-client.mjs')
|
|
1854
|
+
.then(({ getClient }) => getClient())
|
|
1855
|
+
.then(client => client.appendLog(project, entry))
|
|
1856
|
+
.catch(err => {
|
|
1857
|
+
// Server unavailable, fail silently (non-critical)
|
|
1858
|
+
logError(err, 'appendLogEntry');
|
|
1859
|
+
});
|
|
1853
1860
|
|
|
1854
|
-
//
|
|
1855
|
-
appendFileSync(pendingPath, JSON.stringify(entry) + '\n');
|
|
1856
|
-
|
|
1857
|
-
// Extract and index entities from the entry
|
|
1861
|
+
// Extract and index entities from the entry (TODO: move to server in Phase 4)
|
|
1858
1862
|
updateEntityIndex(entry, cwd, config);
|
|
1859
1863
|
|
|
1860
|
-
// Invalidate cache since data has changed
|
|
1864
|
+
// Invalidate cache since data has changed (TODO: move to server in Phase 3)
|
|
1861
1865
|
invalidateCache(cwd);
|
|
1862
|
-
|
|
1863
|
-
// Throttled flush - only flush every 5 seconds
|
|
1864
|
-
flushPendingLog(cwd, 5000);
|
|
1865
1866
|
}
|
|
1866
1867
|
|
|
1867
1868
|
/**
|
|
@@ -1938,67 +1939,20 @@ export function flushPendingLog(cwd = process.cwd(), throttleMs = 0) {
|
|
|
1938
1939
|
}
|
|
1939
1940
|
|
|
1940
1941
|
/**
|
|
1941
|
-
*
|
|
1942
|
+
* Trigger summarization via server (throttled, queued)
|
|
1942
1943
|
* Call this after appending to the log
|
|
1943
1944
|
*/
|
|
1944
1945
|
export function maybeSummarize(cwd = process.cwd()) {
|
|
1945
|
-
const
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
const logContent = readFileSync(paths.log, 'utf-8').trim();
|
|
1955
|
-
if (!logContent) return;
|
|
1956
|
-
|
|
1957
|
-
const entryCount = logContent.split('\n').filter(l => l).length;
|
|
1958
|
-
|
|
1959
|
-
if (entryCount < config.maxLogEntriesBeforeSummarize) {
|
|
1960
|
-
return;
|
|
1961
|
-
}
|
|
1962
|
-
|
|
1963
|
-
// Acquire lock atomically using O_EXCL (fails if file already exists)
|
|
1964
|
-
const lockFile = paths.log + '.lock';
|
|
1965
|
-
try {
|
|
1966
|
-
// If lock exists, check if it's stale
|
|
1967
|
-
if (existsSync(lockFile)) {
|
|
1968
|
-
const lockContent = readFileSync(lockFile, 'utf-8').trim();
|
|
1969
|
-
const lockTime = parseInt(lockContent, 10);
|
|
1970
|
-
if (lockTime && Date.now() - lockTime < 5 * 60 * 1000) {
|
|
1971
|
-
return; // Lock is fresh, summarization already running
|
|
1972
|
-
}
|
|
1973
|
-
// Stale lock — remove it so we can try to acquire
|
|
1974
|
-
try { unlinkSync(lockFile); } catch {}
|
|
1975
|
-
}
|
|
1976
|
-
|
|
1977
|
-
// Atomic create: O_CREAT | O_EXCL | O_WRONLY fails if another process created it first
|
|
1978
|
-
const fd = openSync(lockFile, fsConstants.O_CREAT | fsConstants.O_EXCL | fsConstants.O_WRONLY);
|
|
1979
|
-
const timestamp = Buffer.from(Date.now().toString());
|
|
1980
|
-
writeSync(fd, timestamp);
|
|
1981
|
-
closeSync(fd);
|
|
1982
|
-
} catch {
|
|
1983
|
-
// Another process won the race — let it handle summarization
|
|
1984
|
-
return;
|
|
1985
|
-
}
|
|
1986
|
-
|
|
1987
|
-
// Spawn summarize.mjs in background
|
|
1988
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
1989
|
-
const __dirname = dirname(__filename);
|
|
1990
|
-
const summarizeScript = join(__dirname, 'summarize.mjs');
|
|
1991
|
-
|
|
1992
|
-
const child = spawn('node', [summarizeScript, cwd], {
|
|
1993
|
-
detached: true,
|
|
1994
|
-
stdio: 'ignore',
|
|
1995
|
-
cwd: cwd
|
|
1946
|
+
const project = getProjectRoot(cwd);
|
|
1947
|
+
|
|
1948
|
+
// Trigger via server (server handles throttling, entry count check, etc.)
|
|
1949
|
+
import('../client/mneme-client.mjs')
|
|
1950
|
+
.then(({ getClient }) => getClient())
|
|
1951
|
+
.then(client => client.triggerSummarize(project, false))
|
|
1952
|
+
.catch(err => {
|
|
1953
|
+
// Server unavailable or throttled, fail silently (non-critical)
|
|
1954
|
+
logError(err, 'maybeSummarize');
|
|
1996
1955
|
});
|
|
1997
|
-
|
|
1998
|
-
child.unref();
|
|
1999
|
-
} catch (e) {
|
|
2000
|
-
logError(e, 'maybeSummarize');
|
|
2001
|
-
}
|
|
2002
1956
|
}
|
|
2003
1957
|
|
|
2004
1958
|
// ============================================================================
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BatchQueue
|
|
3
|
+
*
|
|
4
|
+
* Collects items and flushes them in batches based on:
|
|
5
|
+
* - Size threshold (max items)
|
|
6
|
+
* - Time threshold (max wait time)
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
export class BatchQueue {
|
|
10
|
+
constructor({ maxSize = 100, maxWaitMs = 1000, processor }) {
|
|
11
|
+
this.maxSize = maxSize;
|
|
12
|
+
this.maxWaitMs = maxWaitMs;
|
|
13
|
+
this.processor = processor; // async function(batch)
|
|
14
|
+
this.batch = [];
|
|
15
|
+
this.timer = null;
|
|
16
|
+
this.flushing = false;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Add item to queue
|
|
21
|
+
*/
|
|
22
|
+
add(item) {
|
|
23
|
+
this.batch.push(item);
|
|
24
|
+
|
|
25
|
+
// Flush immediately if batch is full
|
|
26
|
+
if (this.batch.length >= this.maxSize) {
|
|
27
|
+
this.flush();
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Schedule flush if not already scheduled
|
|
32
|
+
if (!this.timer) {
|
|
33
|
+
this.timer = setTimeout(() => this.flush(), this.maxWaitMs);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Flush current batch
|
|
39
|
+
*/
|
|
40
|
+
async flush() {
|
|
41
|
+
if (this.flushing || this.batch.length === 0) return;
|
|
42
|
+
|
|
43
|
+
// Clear timer
|
|
44
|
+
if (this.timer) {
|
|
45
|
+
clearTimeout(this.timer);
|
|
46
|
+
this.timer = null;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Take current batch
|
|
50
|
+
const items = this.batch;
|
|
51
|
+
this.batch = [];
|
|
52
|
+
this.flushing = true;
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
await this.processor(items);
|
|
56
|
+
} catch (err) {
|
|
57
|
+
// Log error but don't throw (non-critical, best-effort)
|
|
58
|
+
console.error('[batch-queue] Flush error:', err.message);
|
|
59
|
+
} finally {
|
|
60
|
+
this.flushing = false;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Get current queue depth
|
|
66
|
+
*/
|
|
67
|
+
depth() {
|
|
68
|
+
return this.batch.length;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Shutdown queue (flush and clear timer)
|
|
73
|
+
*/
|
|
74
|
+
async shutdown() {
|
|
75
|
+
await this.flush();
|
|
76
|
+
if (this.timer) {
|
|
77
|
+
clearTimeout(this.timer);
|
|
78
|
+
this.timer = null;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deduplicator
|
|
3
|
+
*
|
|
4
|
+
* Detects duplicate entries within a sliding time window.
|
|
5
|
+
* Uses content-based hashing (ignores timestamp).
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
export class Deduplicator {
|
|
9
|
+
constructor({ windowMs = 5000 } = {}) {
|
|
10
|
+
this.windowMs = windowMs;
|
|
11
|
+
this.recentHashes = new Map(); // hash -> timestamp
|
|
12
|
+
this.cleanupInterval = setInterval(() => this.cleanup(), 60000); // Clean every minute
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Check if entry is a duplicate
|
|
17
|
+
* Returns true if duplicate, false otherwise
|
|
18
|
+
*/
|
|
19
|
+
isDuplicate(entry) {
|
|
20
|
+
const hash = this.hashEntry(entry);
|
|
21
|
+
const lastSeen = this.recentHashes.get(hash);
|
|
22
|
+
const now = Date.now();
|
|
23
|
+
|
|
24
|
+
if (lastSeen && now - lastSeen < this.windowMs) {
|
|
25
|
+
// Duplicate within window, update timestamp
|
|
26
|
+
this.recentHashes.set(hash, now);
|
|
27
|
+
return true;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Not a duplicate, record it
|
|
31
|
+
this.recentHashes.set(hash, now);
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Hash entry based on type and content (ignore timestamp)
|
|
37
|
+
*/
|
|
38
|
+
hashEntry(entry) {
|
|
39
|
+
const { type, content, action, outcome, subject } = entry;
|
|
40
|
+
|
|
41
|
+
// For task entries, include action/outcome/subject
|
|
42
|
+
if (type === 'task') {
|
|
43
|
+
return `task:${action || ''}:${outcome || ''}:${subject || ''}`;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// For other entries, use type + content
|
|
47
|
+
return `${type}:${content || ''}`;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Clean up old entries (older than window + 1 minute buffer)
|
|
52
|
+
*/
|
|
53
|
+
cleanup() {
|
|
54
|
+
const now = Date.now();
|
|
55
|
+
const expiry = this.windowMs + 60000; // Window + 1 minute buffer
|
|
56
|
+
|
|
57
|
+
for (const [hash, timestamp] of this.recentHashes) {
|
|
58
|
+
if (now - timestamp > expiry) {
|
|
59
|
+
this.recentHashes.delete(hash);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Get current cache size
|
|
66
|
+
*/
|
|
67
|
+
size() {
|
|
68
|
+
return this.recentHashes.size;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Shutdown deduplicator
|
|
73
|
+
*/
|
|
74
|
+
shutdown() {
|
|
75
|
+
if (this.cleanupInterval) {
|
|
76
|
+
clearInterval(this.cleanupInterval);
|
|
77
|
+
this.cleanupInterval = null;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LogService
|
|
3
|
+
*
|
|
4
|
+
* Handles log entry batching, deduplication, and file writes.
|
|
5
|
+
* Centralizes all file I/O to eliminate lock contention.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, appendFileSync, mkdirSync } from 'fs';
|
|
9
|
+
import { join } from 'path';
|
|
10
|
+
import { createHash } from 'crypto';
|
|
11
|
+
import { homedir } from 'os';
|
|
12
|
+
import { BatchQueue } from './batch-queue.mjs';
|
|
13
|
+
import { Deduplicator } from './deduplicator.mjs';
|
|
14
|
+
|
|
15
|
+
export class LogService {
|
|
16
|
+
constructor(config, logger) {
|
|
17
|
+
this.config = config;
|
|
18
|
+
this.logger = logger;
|
|
19
|
+
this.deduplicator = new Deduplicator({ windowMs: 5000 });
|
|
20
|
+
this.stats = {
|
|
21
|
+
entriesReceived: 0,
|
|
22
|
+
entriesDeduplicated: 0,
|
|
23
|
+
entriesWritten: 0,
|
|
24
|
+
batchesFlushed: 0,
|
|
25
|
+
writeErrors: 0
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
// Create batch queue with processor
|
|
29
|
+
this.queue = new BatchQueue({
|
|
30
|
+
maxSize: config.batching?.log?.maxSize || 100,
|
|
31
|
+
maxWaitMs: config.batching?.log?.maxWaitMs || 1000,
|
|
32
|
+
processor: (batch) => this.processBatch(batch)
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Append log entry (queues for batched write)
|
|
38
|
+
*/
|
|
39
|
+
append(project, entry) {
|
|
40
|
+
this.stats.entriesReceived++;
|
|
41
|
+
|
|
42
|
+
// Deduplicate
|
|
43
|
+
if (this.deduplicator.isDuplicate(entry)) {
|
|
44
|
+
this.stats.entriesDeduplicated++;
|
|
45
|
+
return { ok: true, deduplicated: true, queued: false };
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Queue for batched write
|
|
49
|
+
this.queue.add({ project, entry });
|
|
50
|
+
|
|
51
|
+
return { ok: true, deduplicated: false, queued: true };
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Force immediate flush
|
|
56
|
+
*/
|
|
57
|
+
async flush(project = null) {
|
|
58
|
+
await this.queue.flush();
|
|
59
|
+
return { ok: true, entriesFlushed: this.stats.entriesWritten };
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Process a batch of entries (group by project and write)
|
|
64
|
+
*/
|
|
65
|
+
async processBatch(items) {
|
|
66
|
+
// Group by project
|
|
67
|
+
const byProject = new Map();
|
|
68
|
+
for (const { project, entry } of items) {
|
|
69
|
+
if (!byProject.has(project)) {
|
|
70
|
+
byProject.set(project, []);
|
|
71
|
+
}
|
|
72
|
+
byProject.get(project).push(entry);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Write to each project's log file
|
|
76
|
+
for (const [project, entries] of byProject) {
|
|
77
|
+
try {
|
|
78
|
+
this.writeToLog(project, entries);
|
|
79
|
+
this.stats.entriesWritten += entries.length;
|
|
80
|
+
} catch (err) {
|
|
81
|
+
this.stats.writeErrors++;
|
|
82
|
+
this.logger.error('log-write-failed', {
|
|
83
|
+
project,
|
|
84
|
+
entryCount: entries.length,
|
|
85
|
+
error: err.message
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
this.stats.batchesFlushed++;
|
|
91
|
+
|
|
92
|
+
this.logger.debug('log-batch-flushed', {
|
|
93
|
+
projects: byProject.size,
|
|
94
|
+
totalEntries: items.length,
|
|
95
|
+
batchNum: this.stats.batchesFlushed
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Write entries to project log file
|
|
101
|
+
*/
|
|
102
|
+
writeToLog(project, entries) {
|
|
103
|
+
const projectDir = this.getProjectMemoryDir(project);
|
|
104
|
+
const logFile = join(projectDir, 'log.jsonl');
|
|
105
|
+
|
|
106
|
+
// Ensure directory exists
|
|
107
|
+
if (!existsSync(projectDir)) {
|
|
108
|
+
mkdirSync(projectDir, { recursive: true });
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Prepare log lines
|
|
112
|
+
const lines = entries.map(e => JSON.stringify(e)).join('\n') + '\n';
|
|
113
|
+
|
|
114
|
+
// Single atomic write (no locking needed — we're the only writer)
|
|
115
|
+
appendFileSync(logFile, lines);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Get project memory directory path
|
|
120
|
+
*/
|
|
121
|
+
getProjectMemoryDir(project) {
|
|
122
|
+
// Hash project path for directory name (same as current implementation)
|
|
123
|
+
const hash = createHash('sha256').update(project).digest('hex').slice(0, 16);
|
|
124
|
+
const memoryBase = homedir() + '/.claude-mneme';
|
|
125
|
+
return join(memoryBase, hash);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Get queue depth
|
|
130
|
+
*/
|
|
131
|
+
queueDepth() {
|
|
132
|
+
return this.queue.depth();
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Get stats
|
|
137
|
+
*/
|
|
138
|
+
getStats() {
|
|
139
|
+
return {
|
|
140
|
+
...this.stats,
|
|
141
|
+
queueDepth: this.queue.depth(),
|
|
142
|
+
deduplicatorSize: this.deduplicator.size()
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Shutdown service
|
|
148
|
+
*/
|
|
149
|
+
async shutdown() {
|
|
150
|
+
await this.queue.shutdown();
|
|
151
|
+
this.deduplicator.shutdown();
|
|
152
|
+
}
|
|
153
|
+
}
|