cf-memory-mcp 3.8.2 → 3.8.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +71 -0
- package/bin/cf-memory-mcp-indexer.js +410 -0
- package/bin/cf-memory-mcp.js +35 -0
- package/package.json +5 -3
package/README.md
CHANGED
|
@@ -600,6 +600,77 @@ DEBUG=1 npx cf-memory-mcp
|
|
|
600
600
|
- `DEBUG=1` - Enable debug logging
|
|
601
601
|
- `MCP_DEBUG=1` - Enable MCP-specific debug logging
|
|
602
602
|
|
|
603
|
+
## 🧠 NEW: Assistant Memory Features (v4.0.0)
|
|
604
|
+
|
|
605
|
+
**AI Assistant Memory Management** - Long-term memory for AI assistants with session tracking, context bootstrapping, and entity relationships.
|
|
606
|
+
|
|
607
|
+
### Assistant Memory Tools
|
|
608
|
+
|
|
609
|
+
| Tool | Description |
|
|
610
|
+
|------|-------------|
|
|
611
|
+
| `store_memory` | Store facts, preferences, or important context |
|
|
612
|
+
| `retrieve_memories` | Semantic search over memories |
|
|
613
|
+
| `get_context_bootstrap` | Get memories to pre-load on session start |
|
|
614
|
+
| `start_session` | Begin a conversation session |
|
|
615
|
+
| `end_session` | End session, extract key facts |
|
|
616
|
+
| `store_entity` | Store structured entities (people, projects, etc.) |
|
|
617
|
+
|
|
618
|
+
### Memory Types
|
|
619
|
+
- **fact** - General knowledge about the user
|
|
620
|
+
- **preference** - User preferences and settings
|
|
621
|
+
- **task** - Active or completed tasks
|
|
622
|
+
- **entity** - Structured data about people, projects, companies
|
|
623
|
+
- **session_summary** - Summarized conversation sessions
|
|
624
|
+
|
|
625
|
+
### Example Usage
|
|
626
|
+
|
|
627
|
+
```typescript
|
|
628
|
+
// Store a memory
|
|
629
|
+
const result = await store_memory({
|
|
630
|
+
content: "John is building a RAG pipeline with 4x H100 GPUs",
|
|
631
|
+
type: "fact",
|
|
632
|
+
importance: 0.9,
|
|
633
|
+
confidence: 1.0,
|
|
634
|
+
source: "user_explicit",
|
|
635
|
+
tags: ["hardware", "rag", "work"]
|
|
636
|
+
});
|
|
637
|
+
|
|
638
|
+
// Retrieve relevant memories
|
|
639
|
+
const memories = await retrieve_memories({
|
|
640
|
+
query: "what hardware is John using?",
|
|
641
|
+
limit: 5,
|
|
642
|
+
min_importance: 0.7
|
|
643
|
+
});
|
|
644
|
+
|
|
645
|
+
// Bootstrap context on session start
|
|
646
|
+
const context = await get_context_bootstrap({
|
|
647
|
+
max_tokens: 4000,
|
|
648
|
+
recent_sessions: 3,
|
|
649
|
+
current_context: "discussing RAG pipeline"
|
|
650
|
+
});
|
|
651
|
+
```
|
|
652
|
+
|
|
653
|
+
### Database Schema
|
|
654
|
+
|
|
655
|
+
Run the migration to add assistant memory tables:
|
|
656
|
+
```bash
|
|
657
|
+
wrangler d1 execute MEMORY_DB --file=./migrations/002_assistant_memory.sql
|
|
658
|
+
```
|
|
659
|
+
|
|
660
|
+
Create the vectorize index:
|
|
661
|
+
```bash
|
|
662
|
+
wrangler vectorize create assistant-memory-index --dimensions=1024 --metric=cosine
|
|
663
|
+
```
|
|
664
|
+
|
|
665
|
+
Add to `wrangler.toml`:
|
|
666
|
+
```toml
|
|
667
|
+
[[vectorize]]
|
|
668
|
+
binding = "VECTORIZE_ASSISTANT"
|
|
669
|
+
index_name = "assistant-memory-index"
|
|
670
|
+
```
|
|
671
|
+
|
|
672
|
+
---
|
|
673
|
+
|
|
603
674
|
## 📋 Requirements
|
|
604
675
|
|
|
605
676
|
- **Node.js** 16.0.0 or higher
|
|
@@ -0,0 +1,410 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* CF-Memory-MCP Local Indexer
|
|
5
|
+
*
|
|
6
|
+
* Fast incremental indexing with local file watching and hash caching.
|
|
7
|
+
* Only sends changed files to the server, reducing indexing time by 90%+.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* npx cf-memory-mcp-indexer watch /path/to/project
|
|
11
|
+
* npx cf-memory-mcp-indexer index /path/to/project --once
|
|
12
|
+
*
|
|
13
|
+
* Features:
|
|
14
|
+
* - Local file hash cache (stored in ~/.cache/cf-memory-indexer/)
|
|
15
|
+
* - Only uploads changed files
|
|
16
|
+
* - File watching with debouncing
|
|
17
|
+
* - Batch file uploads (faster than individual file processing)
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const fs = require('fs');
|
|
21
|
+
const path = require('path');
|
|
22
|
+
const crypto = require('crypto');
|
|
23
|
+
const https = require('https');
|
|
24
|
+
const { URL } = require('url');
|
|
25
|
+
const os = require('os');
|
|
26
|
+
const process = require('process');
|
|
27
|
+
|
|
28
|
+
const API_URL = process.env.CF_MEMORY_API_URL || 'https://cf-memory-mcp-simplified.johnlam90.workers.dev';
|
|
29
|
+
const API_KEY = process.env.CF_MEMORY_API_KEY;
|
|
30
|
+
const CACHE_DIR = path.join(os.homedir(), '.cache', 'cf-memory-indexer');
|
|
31
|
+
|
|
32
|
+
// File patterns to include/exclude
|
|
33
|
+
const DEFAULT_INCLUDE = [
|
|
34
|
+
'**/*.ts', '**/*.tsx', '**/*.js', '**/*.jsx',
|
|
35
|
+
'**/*.py', '**/*.go', '**/*.rs', '**/*.java',
|
|
36
|
+
'**/*.md', '**/*.json'
|
|
37
|
+
];
|
|
38
|
+
const DEFAULT_EXCLUDE = [
|
|
39
|
+
'**/node_modules/**', '**/.git/**', '**/dist/**',
|
|
40
|
+
'**/build/**', '**/.next/**', '**/coverage/**',
|
|
41
|
+
'**/*.min.js', '**/*.d.ts'
|
|
42
|
+
];
|
|
43
|
+
|
|
44
|
+
class IncrementalIndexer {
|
|
45
|
+
constructor(projectPath, options = {}) {
|
|
46
|
+
this.projectPath = path.resolve(projectPath);
|
|
47
|
+
this.projectName = options.name || path.basename(this.projectPath);
|
|
48
|
+
this.cacheFile = path.join(CACHE_DIR, `${this.hashString(this.projectPath)}.json`);
|
|
49
|
+
this.include = options.include || DEFAULT_INCLUDE;
|
|
50
|
+
this.exclude = options.exclude || DEFAULT_EXCLUDE;
|
|
51
|
+
this.dryRun = options.dryRun || false;
|
|
52
|
+
this.batchSize = options.batchSize || 50;
|
|
53
|
+
|
|
54
|
+
// Ensure cache directory exists
|
|
55
|
+
if (!fs.existsSync(CACHE_DIR)) {
|
|
56
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
this.cache = this.loadCache();
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
hashString(str) {
|
|
63
|
+
return crypto.createHash('md5').update(str).digest('hex');
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
loadCache() {
|
|
67
|
+
try {
|
|
68
|
+
if (fs.existsSync(this.cacheFile)) {
|
|
69
|
+
return JSON.parse(fs.readFileSync(this.cacheFile, 'utf8'));
|
|
70
|
+
}
|
|
71
|
+
} catch (err) {
|
|
72
|
+
console.error('Failed to load cache:', err.message);
|
|
73
|
+
}
|
|
74
|
+
return { files: {}, lastIndexed: null, projectId: null };
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
saveCache() {
|
|
78
|
+
try {
|
|
79
|
+
fs.writeFileSync(this.cacheFile, JSON.stringify(this.cache, null, 2));
|
|
80
|
+
} catch (err) {
|
|
81
|
+
console.error('Failed to save cache:', err.message);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async scanFiles() {
|
|
86
|
+
const files = [];
|
|
87
|
+
const walk = (dir) => {
|
|
88
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
89
|
+
for (const entry of entries) {
|
|
90
|
+
const fullPath = path.join(dir, entry.name);
|
|
91
|
+
const relativePath = path.relative(this.projectPath, fullPath);
|
|
92
|
+
|
|
93
|
+
if (entry.isDirectory()) {
|
|
94
|
+
// Check if directory should be excluded
|
|
95
|
+
const shouldExclude = this.exclude.some(pattern =>
|
|
96
|
+
this.matchGlob(relativePath, pattern.replace('**/', ''))
|
|
97
|
+
);
|
|
98
|
+
if (!shouldExclude) {
|
|
99
|
+
walk(fullPath);
|
|
100
|
+
}
|
|
101
|
+
} else {
|
|
102
|
+
// Check if file should be included
|
|
103
|
+
const shouldInclude = this.include.some(pattern =>
|
|
104
|
+
this.matchGlob(relativePath, pattern)
|
|
105
|
+
);
|
|
106
|
+
const shouldExclude = this.exclude.some(pattern =>
|
|
107
|
+
this.matchGlob(relativePath, pattern)
|
|
108
|
+
);
|
|
109
|
+
|
|
110
|
+
if (shouldInclude && !shouldExclude) {
|
|
111
|
+
files.push(fullPath);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
walk(this.projectPath);
|
|
118
|
+
return files;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
matchGlob(filepath, pattern) {
|
|
122
|
+
// Simple glob matching
|
|
123
|
+
const regex = new RegExp(
|
|
124
|
+
'^' +
|
|
125
|
+
pattern
|
|
126
|
+
.replace(/\*\*/g, '<<<GLOBSTAR>>>')
|
|
127
|
+
.replace(/\*/g, '[^/]*')
|
|
128
|
+
.replace(/<<<GLOBSTAR>>>/g, '.*')
|
|
129
|
+
.replace(/\?/g, '.')
|
|
130
|
+
+ '$'
|
|
131
|
+
);
|
|
132
|
+
return regex.test(filepath);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
async getChangedFiles() {
|
|
136
|
+
const allFiles = await this.scanFiles();
|
|
137
|
+
const changedFiles = [];
|
|
138
|
+
const unchangedFiles = [];
|
|
139
|
+
const newCache = { ...this.cache };
|
|
140
|
+
|
|
141
|
+
for (const filePath of allFiles) {
|
|
142
|
+
const relativePath = path.relative(this.projectPath, filePath);
|
|
143
|
+
const stats = fs.statSync(filePath);
|
|
144
|
+
const mtime = stats.mtime.getTime();
|
|
145
|
+
const size = stats.size;
|
|
146
|
+
|
|
147
|
+
// Skip files larger than 1MB
|
|
148
|
+
if (size > 1024 * 1024) {
|
|
149
|
+
console.log(`⚠️ Skipping large file: ${relativePath}`);
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
const cached = this.cache.files[relativePath];
|
|
154
|
+
|
|
155
|
+
if (!cached || cached.mtime !== mtime || cached.size !== size) {
|
|
156
|
+
// File is new or changed
|
|
157
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
158
|
+
const hash = this.hashString(content);
|
|
159
|
+
|
|
160
|
+
// Double-check with content hash
|
|
161
|
+
if (!cached || cached.hash !== hash) {
|
|
162
|
+
changedFiles.push({
|
|
163
|
+
path: relativePath,
|
|
164
|
+
fullPath: filePath,
|
|
165
|
+
content,
|
|
166
|
+
size,
|
|
167
|
+
mtime
|
|
168
|
+
});
|
|
169
|
+
} else {
|
|
170
|
+
// Content same but mtime/size changed (git checkout, etc.)
|
|
171
|
+
unchangedFiles.push(relativePath);
|
|
172
|
+
newCache.files[relativePath] = { hash, mtime, size };
|
|
173
|
+
}
|
|
174
|
+
} else {
|
|
175
|
+
unchangedFiles.push(relativePath);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Detect deleted files
|
|
180
|
+
const currentFiles = new Set(allFiles.map(f => path.relative(this.projectPath, f)));
|
|
181
|
+
const deletedFiles = Object.keys(this.cache.files).filter(f => !currentFiles.has(f));
|
|
182
|
+
|
|
183
|
+
return { changedFiles, unchangedFiles, deletedFiles, newCache };
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
async index() {
|
|
187
|
+
console.log(`🔍 Scanning ${this.projectPath}...`);
|
|
188
|
+
const { changedFiles, unchangedFiles, deletedFiles, newCache } = await this.getChangedFiles();
|
|
189
|
+
|
|
190
|
+
console.log(`📊 Found ${changedFiles.length} changed, ${unchangedFiles.length} unchanged, ${deletedFiles.length} deleted`);
|
|
191
|
+
|
|
192
|
+
if (changedFiles.length === 0 && deletedFiles.length === 0) {
|
|
193
|
+
console.log('✅ Everything up to date!');
|
|
194
|
+
return;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (this.dryRun) {
|
|
198
|
+
console.log('🔍 Dry run - would index:');
|
|
199
|
+
changedFiles.forEach(f => console.log(` - ${f.path}`));
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Upload changed files in batches
|
|
204
|
+
if (changedFiles.length > 0) {
|
|
205
|
+
console.log(`📤 Uploading ${changedFiles.length} files...`);
|
|
206
|
+
await this.uploadFiles(changedFiles);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Update cache
|
|
210
|
+
for (const file of changedFiles) {
|
|
211
|
+
newCache.files[file.path] = {
|
|
212
|
+
hash: this.hashString(file.content),
|
|
213
|
+
mtime: file.mtime,
|
|
214
|
+
size: file.size
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Remove deleted files from cache
|
|
219
|
+
for (const file of deletedFiles) {
|
|
220
|
+
delete newCache.files[file];
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
newCache.lastIndexed = Date.now();
|
|
224
|
+
this.cache = newCache;
|
|
225
|
+
this.saveCache();
|
|
226
|
+
|
|
227
|
+
console.log('✅ Index complete!');
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
async uploadFiles(files) {
|
|
231
|
+
// Process in batches
|
|
232
|
+
for (let i = 0; i < files.length; i += this.batchSize) {
|
|
233
|
+
const batch = files.slice(i, i + this.batchSize);
|
|
234
|
+
console.log(` Batch ${Math.floor(i / this.batchSize) + 1}/${Math.ceil(files.length / this.batchSize)} (${batch.length} files)`);
|
|
235
|
+
|
|
236
|
+
await this.uploadBatch(batch);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
async uploadBatch(files) {
|
|
241
|
+
return new Promise((resolve, reject) => {
|
|
242
|
+
const payload = JSON.stringify({
|
|
243
|
+
project_path: this.projectPath,
|
|
244
|
+
project_name: this.projectName,
|
|
245
|
+
files: files.map(f => ({
|
|
246
|
+
path: f.path,
|
|
247
|
+
content: f.content
|
|
248
|
+
}))
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
const url = new URL(`${API_URL}/mcp/tools/index_project`);
|
|
252
|
+
const options = {
|
|
253
|
+
hostname: url.hostname,
|
|
254
|
+
path: url.pathname,
|
|
255
|
+
method: 'POST',
|
|
256
|
+
headers: {
|
|
257
|
+
'Content-Type': 'application/json',
|
|
258
|
+
'Authorization': `Bearer ${API_KEY}`,
|
|
259
|
+
'Content-Length': Buffer.byteLength(payload)
|
|
260
|
+
},
|
|
261
|
+
timeout: 120000
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
const req = https.request(options, (res) => {
|
|
265
|
+
let data = '';
|
|
266
|
+
res.on('data', chunk => data += chunk);
|
|
267
|
+
res.on('end', () => {
|
|
268
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
269
|
+
try {
|
|
270
|
+
const result = JSON.parse(data);
|
|
271
|
+
if (result.content && result.content[0]) {
|
|
272
|
+
const content = JSON.parse(result.content[0].text);
|
|
273
|
+
if (content.project_id) {
|
|
274
|
+
this.cache.projectId = content.project_id;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
resolve(result);
|
|
278
|
+
} catch (err) {
|
|
279
|
+
reject(new Error(`Invalid response: ${err.message}`));
|
|
280
|
+
}
|
|
281
|
+
} else {
|
|
282
|
+
reject(new Error(`HTTP ${res.statusCode}: ${data}`));
|
|
283
|
+
}
|
|
284
|
+
});
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
req.on('error', reject);
|
|
288
|
+
req.on('timeout', () => {
|
|
289
|
+
req.destroy();
|
|
290
|
+
reject(new Error('Request timeout'));
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
req.write(payload);
|
|
294
|
+
req.end();
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
async watch() {
|
|
299
|
+
console.log(`👁️ Watching ${this.projectPath} for changes...`);
|
|
300
|
+
console.log('Press Ctrl+C to stop\n');
|
|
301
|
+
|
|
302
|
+
// Initial index
|
|
303
|
+
await this.index();
|
|
304
|
+
|
|
305
|
+
// Watch for changes
|
|
306
|
+
const chokidar = await this.loadChokidar();
|
|
307
|
+
|
|
308
|
+
const watcher = chokidar.watch(this.projectPath, {
|
|
309
|
+
ignored: this.exclude,
|
|
310
|
+
persistent: true,
|
|
311
|
+
ignoreInitial: true
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
let debounceTimer = null;
|
|
315
|
+
|
|
316
|
+
const handleChange = async () => {
|
|
317
|
+
if (debounceTimer) clearTimeout(debounceTimer);
|
|
318
|
+
debounceTimer = setTimeout(async () => {
|
|
319
|
+
console.log('\n🔄 Changes detected, re-indexing...');
|
|
320
|
+
await this.index();
|
|
321
|
+
console.log('');
|
|
322
|
+
}, 1000);
|
|
323
|
+
};
|
|
324
|
+
|
|
325
|
+
watcher
|
|
326
|
+
.on('add', path => handleChange())
|
|
327
|
+
.on('change', path => handleChange())
|
|
328
|
+
.on('unlink', path => handleChange());
|
|
329
|
+
|
|
330
|
+
// Keep process alive
|
|
331
|
+
process.on('SIGINT', () => {
|
|
332
|
+
console.log('\n👋 Stopping watcher...');
|
|
333
|
+
watcher.close();
|
|
334
|
+
process.exit(0);
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
async loadChokidar() {
|
|
339
|
+
try {
|
|
340
|
+
return require('chokidar');
|
|
341
|
+
} catch (err) {
|
|
342
|
+
console.log('Installing chokidar for file watching...');
|
|
343
|
+
const { execSync } = require('child_process');
|
|
344
|
+
execSync('npm install chokidar --save-dev', { cwd: __dirname, stdio: 'inherit' });
|
|
345
|
+
return require('chokidar');
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// CLI
|
|
351
|
+
async function main() {
|
|
352
|
+
if (!API_KEY) {
|
|
353
|
+
console.error('❌ CF_MEMORY_API_KEY environment variable required');
|
|
354
|
+
process.exit(1);
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
const args = process.argv.slice(2);
|
|
358
|
+
|
|
359
|
+
// Detect command from how the script was called (cf-memory-index vs cf-memory-watch)
|
|
360
|
+
const scriptName = path.basename(process.argv[1]);
|
|
361
|
+
let command = args[0];
|
|
362
|
+
let projectPath = args[1] || '.';
|
|
363
|
+
|
|
364
|
+
if (scriptName.includes('watch')) {
|
|
365
|
+
command = 'watch';
|
|
366
|
+
projectPath = args[0] || '.';
|
|
367
|
+
} else if (scriptName.includes('index')) {
|
|
368
|
+
command = 'index';
|
|
369
|
+
projectPath = args[0] || '.';
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
const options = {
|
|
373
|
+
dryRun: args.includes('--dry-run'),
|
|
374
|
+
once: args.includes('--once')
|
|
375
|
+
};
|
|
376
|
+
|
|
377
|
+
const indexer = new IncrementalIndexer(projectPath, options);
|
|
378
|
+
|
|
379
|
+
if (command === 'watch') {
|
|
380
|
+
await indexer.watch();
|
|
381
|
+
} else if (command === 'index') {
|
|
382
|
+
await indexer.index();
|
|
383
|
+
} else {
|
|
384
|
+
console.log(`
|
|
385
|
+
CF-Memory-MCP Incremental Indexer
|
|
386
|
+
|
|
387
|
+
Usage:
|
|
388
|
+
cf-memory-mcp-indexer index <path> Index project once
|
|
389
|
+
cf-memory-mcp-indexer watch <path> Watch and auto-index on changes
|
|
390
|
+
|
|
391
|
+
Options:
|
|
392
|
+
--dry-run Show what would be indexed without uploading
|
|
393
|
+
|
|
394
|
+
Environment:
|
|
395
|
+
CF_MEMORY_API_KEY Required. Your API key
|
|
396
|
+
CF_MEMORY_API_URL Optional. Custom API endpoint
|
|
397
|
+
`);
|
|
398
|
+
process.exit(1);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
// Export for programmatic use
|
|
403
|
+
if (require.main === module) {
|
|
404
|
+
main().catch(err => {
|
|
405
|
+
console.error('Error:', err.message);
|
|
406
|
+
process.exit(1);
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
module.exports = { IncrementalIndexer };
|
package/bin/cf-memory-mcp.js
CHANGED
|
@@ -45,6 +45,7 @@ class CFMemoryMCP {
|
|
|
45
45
|
this.legacyServerUrl = LEGACY_SERVER_URL;
|
|
46
46
|
this.userAgent = `cf-memory-mcp/${PACKAGE_VERSION} (${os.platform()} ${os.arch()}; Node.js ${process.version})`;
|
|
47
47
|
this.useStreamableHttp = true; // Try Streamable HTTP first
|
|
48
|
+
this.autoWatcher = null; // Background file watcher
|
|
48
49
|
|
|
49
50
|
// Handle process termination gracefully
|
|
50
51
|
process.on('SIGINT', () => this.shutdown('SIGINT'));
|
|
@@ -91,12 +92,46 @@ class CFMemoryMCP {
|
|
|
91
92
|
try {
|
|
92
93
|
// Skip connectivity test in MCP mode - it will be tested when first request is made
|
|
93
94
|
this.logDebug('Starting MCP message processing...');
|
|
95
|
+
|
|
96
|
+
// Start auto-watcher if CF_MEMORY_AUTO_WATCH is set
|
|
97
|
+
if (process.env.CF_MEMORY_AUTO_WATCH === '1' || process.env.CF_MEMORY_AUTO_WATCH === 'true') {
|
|
98
|
+
this.startAutoWatcher();
|
|
99
|
+
}
|
|
100
|
+
|
|
94
101
|
await this.processStdio();
|
|
95
102
|
} catch (error) {
|
|
96
103
|
this.logError('Failed to start MCP server:', error);
|
|
97
104
|
process.exit(1);
|
|
98
105
|
}
|
|
99
106
|
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Start auto-watching files for changes
|
|
110
|
+
*/
|
|
111
|
+
async startAutoWatcher() {
|
|
112
|
+
// Use CF_MEMORY_WATCH_PATH if set, otherwise use current working directory
|
|
113
|
+
const watchPath = process.env.CF_MEMORY_WATCH_PATH || process.cwd();
|
|
114
|
+
const projectName = process.env.CF_MEMORY_PROJECT_NAME || path.basename(watchPath);
|
|
115
|
+
|
|
116
|
+
this.logDebug(`Starting auto-watch for: ${watchPath} (${projectName})`);
|
|
117
|
+
|
|
118
|
+
try {
|
|
119
|
+
// Dynamically import the indexer
|
|
120
|
+
const { IncrementalIndexer } = require('./cf-memory-mcp-indexer.js');
|
|
121
|
+
this.autoWatcher = new IncrementalIndexer(watchPath, {
|
|
122
|
+
name: projectName
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
// Start watching in background
|
|
126
|
+
this.autoWatcher.watch().catch(err => {
|
|
127
|
+
this.logError('Auto-watch error:', err);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
this.logDebug('Auto-watch started successfully');
|
|
131
|
+
} catch (err) {
|
|
132
|
+
this.logError('Failed to start auto-watch:', err);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
100
135
|
|
|
101
136
|
/**
|
|
102
137
|
* Test connectivity to the Cloudflare Worker
|
package/package.json
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "cf-memory-mcp",
|
|
3
|
-
"version": "3.8.
|
|
3
|
+
"version": "3.8.4",
|
|
4
4
|
"description": "Best-in-class MCP server with CONTEXTUAL CHUNKING (Anthropic-style, 35-67% better retrieval), Optimized LLM stack (Llama-3.1-8B), BGE-M3 embeddings, Query Expansion Caching, Hybrid Embedding Strategy, and Unified Project Intelligence",
|
|
5
5
|
"main": "bin/cf-memory-mcp.js",
|
|
6
6
|
"bin": {
|
|
7
|
-
"cf-memory-mcp": "bin/cf-memory-mcp.js"
|
|
7
|
+
"cf-memory-mcp": "bin/cf-memory-mcp.js",
|
|
8
|
+
"cf-memory-index": "bin/cf-memory-mcp-indexer.js",
|
|
9
|
+
"cf-memory-watch": "bin/cf-memory-mcp-indexer.js"
|
|
8
10
|
},
|
|
9
11
|
"author": {
|
|
10
12
|
"name": "John Lam",
|
|
@@ -130,6 +132,6 @@
|
|
|
130
132
|
"jest-environment-node": "^30.0.4",
|
|
131
133
|
"ts-jest": "^29.4.0",
|
|
132
134
|
"typescript": "^5.7.3",
|
|
133
|
-
"wrangler": "^4.
|
|
135
|
+
"wrangler": "^4.67.1"
|
|
134
136
|
}
|
|
135
137
|
}
|