cf-memory-mcp 3.8.2 → 3.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -600,6 +600,77 @@ DEBUG=1 npx cf-memory-mcp
600
600
  - `DEBUG=1` - Enable debug logging
601
601
  - `MCP_DEBUG=1` - Enable MCP-specific debug logging
602
602
 
603
+ ## 🧠 NEW: Assistant Memory Features (v4.0.0)
604
+
605
+ **AI Assistant Memory Management** - Long-term memory for AI assistants with session tracking, context bootstrapping, and entity relationships.
606
+
607
+ ### Assistant Memory Tools
608
+
609
+ | Tool | Description |
610
+ |------|-------------|
611
+ | `store_memory` | Store facts, preferences, or important context |
612
+ | `retrieve_memories` | Semantic search over memories |
613
+ | `get_context_bootstrap` | Get memories to pre-load on session start |
614
+ | `start_session` | Begin a conversation session |
615
+ | `end_session` | End session, extract key facts |
616
+ | `store_entity` | Store structured entities (people, projects, etc.) |
617
+
618
+ ### Memory Types
619
+ - **fact** - General knowledge about the user
620
+ - **preference** - User preferences and settings
621
+ - **task** - Active or completed tasks
622
+ - **entity** - Structured data about people, projects, companies
623
+ - **session_summary** - Summarized conversation sessions
624
+
625
+ ### Example Usage
626
+
627
+ ```typescript
628
+ // Store a memory
629
+ const result = await store_memory({
630
+ content: "John is building a RAG pipeline with 4x H100 GPUs",
631
+ type: "fact",
632
+ importance: 0.9,
633
+ confidence: 1.0,
634
+ source: "user_explicit",
635
+ tags: ["hardware", "rag", "work"]
636
+ });
637
+
638
+ // Retrieve relevant memories
639
+ const memories = await retrieve_memories({
640
+ query: "what hardware is John using?",
641
+ limit: 5,
642
+ min_importance: 0.7
643
+ });
644
+
645
+ // Bootstrap context on session start
646
+ const context = await get_context_bootstrap({
647
+ max_tokens: 4000,
648
+ recent_sessions: 3,
649
+ current_context: "discussing RAG pipeline"
650
+ });
651
+ ```
652
+
653
+ ### Database Schema
654
+
655
+ Run the migration to add assistant memory tables:
656
+ ```bash
657
+ wrangler d1 execute MEMORY_DB --file=./migrations/002_assistant_memory.sql
658
+ ```
659
+
660
+ Create the vectorize index:
661
+ ```bash
662
+ wrangler vectorize create assistant-memory-index --dimensions=1024 --metric=cosine
663
+ ```
664
+
665
+ Add to `wrangler.toml`:
666
+ ```toml
667
+ [[vectorize]]
668
+ binding = "VECTORIZE_ASSISTANT"
669
+ index_name = "assistant-memory-index"
670
+ ```
671
+
672
+ ---
673
+
603
674
  ## šŸ“‹ Requirements
604
675
 
605
676
  - **Node.js** 16.0.0 or higher
@@ -0,0 +1,394 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * CF-Memory-MCP Local Indexer
5
+ *
6
+ * Fast incremental indexing with local file watching and hash caching.
7
+ * Only sends changed files to the server, reducing indexing time by 90%+.
8
+ *
9
+ * Usage:
10
+ * npx cf-memory-mcp-indexer watch /path/to/project
11
+ * npx cf-memory-mcp-indexer index /path/to/project --once
12
+ *
13
+ * Features:
14
+ * - Local file hash cache (stored in ~/.cache/cf-memory-indexer/)
15
+ * - Only uploads changed files
16
+ * - File watching with debouncing
17
+ * - Batch file uploads (faster than individual file processing)
18
+ */
19
+
20
+ const fs = require('fs');
21
+ const path = require('path');
22
+ const crypto = require('crypto');
23
+ const https = require('https');
24
+ const { URL } = require('url');
25
+ const os = require('os');
26
+ const process = require('process');
27
+
28
+ const API_URL = process.env.CF_MEMORY_API_URL || 'https://cf-memory-mcp-simplified.johnlam90.workers.dev';
29
+ const API_KEY = process.env.CF_MEMORY_API_KEY;
30
+ const CACHE_DIR = path.join(os.homedir(), '.cache', 'cf-memory-indexer');
31
+
32
+ // File patterns to include/exclude
33
+ const DEFAULT_INCLUDE = [
34
+ '**/*.ts', '**/*.tsx', '**/*.js', '**/*.jsx',
35
+ '**/*.py', '**/*.go', '**/*.rs', '**/*.java',
36
+ '**/*.md', '**/*.json'
37
+ ];
38
+ const DEFAULT_EXCLUDE = [
39
+ '**/node_modules/**', '**/.git/**', '**/dist/**',
40
+ '**/build/**', '**/.next/**', '**/coverage/**',
41
+ '**/*.min.js', '**/*.d.ts'
42
+ ];
43
+
44
+ class IncrementalIndexer {
45
+ constructor(projectPath, options = {}) {
46
+ this.projectPath = path.resolve(projectPath);
47
+ this.projectName = options.name || path.basename(this.projectPath);
48
+ this.cacheFile = path.join(CACHE_DIR, `${this.hashString(this.projectPath)}.json`);
49
+ this.include = options.include || DEFAULT_INCLUDE;
50
+ this.exclude = options.exclude || DEFAULT_EXCLUDE;
51
+ this.dryRun = options.dryRun || false;
52
+ this.batchSize = options.batchSize || 50;
53
+
54
+ // Ensure cache directory exists
55
+ if (!fs.existsSync(CACHE_DIR)) {
56
+ fs.mkdirSync(CACHE_DIR, { recursive: true });
57
+ }
58
+
59
+ this.cache = this.loadCache();
60
+ }
61
+
62
+ hashString(str) {
63
+ return crypto.createHash('md5').update(str).digest('hex');
64
+ }
65
+
66
+ loadCache() {
67
+ try {
68
+ if (fs.existsSync(this.cacheFile)) {
69
+ return JSON.parse(fs.readFileSync(this.cacheFile, 'utf8'));
70
+ }
71
+ } catch (err) {
72
+ console.error('Failed to load cache:', err.message);
73
+ }
74
+ return { files: {}, lastIndexed: null, projectId: null };
75
+ }
76
+
77
+ saveCache() {
78
+ try {
79
+ fs.writeFileSync(this.cacheFile, JSON.stringify(this.cache, null, 2));
80
+ } catch (err) {
81
+ console.error('Failed to save cache:', err.message);
82
+ }
83
+ }
84
+
85
+ async scanFiles() {
86
+ const files = [];
87
+ const walk = (dir) => {
88
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
89
+ for (const entry of entries) {
90
+ const fullPath = path.join(dir, entry.name);
91
+ const relativePath = path.relative(this.projectPath, fullPath);
92
+
93
+ if (entry.isDirectory()) {
94
+ // Check if directory should be excluded
95
+ const shouldExclude = this.exclude.some(pattern =>
96
+ this.matchGlob(relativePath, pattern.replace('**/', ''))
97
+ );
98
+ if (!shouldExclude) {
99
+ walk(fullPath);
100
+ }
101
+ } else {
102
+ // Check if file should be included
103
+ const shouldInclude = this.include.some(pattern =>
104
+ this.matchGlob(relativePath, pattern)
105
+ );
106
+ const shouldExclude = this.exclude.some(pattern =>
107
+ this.matchGlob(relativePath, pattern)
108
+ );
109
+
110
+ if (shouldInclude && !shouldExclude) {
111
+ files.push(fullPath);
112
+ }
113
+ }
114
+ }
115
+ };
116
+
117
+ walk(this.projectPath);
118
+ return files;
119
+ }
120
+
121
+ matchGlob(filepath, pattern) {
122
+ // Simple glob matching
123
+ const regex = new RegExp(
124
+ '^' +
125
+ pattern
126
+ .replace(/\*\*/g, '<<<GLOBSTAR>>>')
127
+ .replace(/\*/g, '[^/]*')
128
+ .replace(/<<<GLOBSTAR>>>/g, '.*')
129
+ .replace(/\?/g, '.')
130
+ + '$'
131
+ );
132
+ return regex.test(filepath);
133
+ }
134
+
135
+ async getChangedFiles() {
136
+ const allFiles = await this.scanFiles();
137
+ const changedFiles = [];
138
+ const unchangedFiles = [];
139
+ const newCache = { ...this.cache };
140
+
141
+ for (const filePath of allFiles) {
142
+ const relativePath = path.relative(this.projectPath, filePath);
143
+ const stats = fs.statSync(filePath);
144
+ const mtime = stats.mtime.getTime();
145
+ const size = stats.size;
146
+
147
+ // Skip files larger than 1MB
148
+ if (size > 1024 * 1024) {
149
+ console.log(`āš ļø Skipping large file: ${relativePath}`);
150
+ continue;
151
+ }
152
+
153
+ const cached = this.cache.files[relativePath];
154
+
155
+ if (!cached || cached.mtime !== mtime || cached.size !== size) {
156
+ // File is new or changed
157
+ const content = fs.readFileSync(filePath, 'utf8');
158
+ const hash = this.hashString(content);
159
+
160
+ // Double-check with content hash
161
+ if (!cached || cached.hash !== hash) {
162
+ changedFiles.push({
163
+ path: relativePath,
164
+ fullPath: filePath,
165
+ content,
166
+ size,
167
+ mtime
168
+ });
169
+ } else {
170
+ // Content same but mtime/size changed (git checkout, etc.)
171
+ unchangedFiles.push(relativePath);
172
+ newCache.files[relativePath] = { hash, mtime, size };
173
+ }
174
+ } else {
175
+ unchangedFiles.push(relativePath);
176
+ }
177
+ }
178
+
179
+ // Detect deleted files
180
+ const currentFiles = new Set(allFiles.map(f => path.relative(this.projectPath, f)));
181
+ const deletedFiles = Object.keys(this.cache.files).filter(f => !currentFiles.has(f));
182
+
183
+ return { changedFiles, unchangedFiles, deletedFiles, newCache };
184
+ }
185
+
186
+ async index() {
187
+ console.log(`šŸ” Scanning ${this.projectPath}...`);
188
+ const { changedFiles, unchangedFiles, deletedFiles, newCache } = await this.getChangedFiles();
189
+
190
+ console.log(`šŸ“Š Found ${changedFiles.length} changed, ${unchangedFiles.length} unchanged, ${deletedFiles.length} deleted`);
191
+
192
+ if (changedFiles.length === 0 && deletedFiles.length === 0) {
193
+ console.log('āœ… Everything up to date!');
194
+ return;
195
+ }
196
+
197
+ if (this.dryRun) {
198
+ console.log('šŸ” Dry run - would index:');
199
+ changedFiles.forEach(f => console.log(` - ${f.path}`));
200
+ return;
201
+ }
202
+
203
+ // Upload changed files in batches
204
+ if (changedFiles.length > 0) {
205
+ console.log(`šŸ“¤ Uploading ${changedFiles.length} files...`);
206
+ await this.uploadFiles(changedFiles);
207
+ }
208
+
209
+ // Update cache
210
+ for (const file of changedFiles) {
211
+ newCache.files[file.path] = {
212
+ hash: this.hashString(file.content),
213
+ mtime: file.mtime,
214
+ size: file.size
215
+ };
216
+ }
217
+
218
+ // Remove deleted files from cache
219
+ for (const file of deletedFiles) {
220
+ delete newCache.files[file];
221
+ }
222
+
223
+ newCache.lastIndexed = Date.now();
224
+ this.cache = newCache;
225
+ this.saveCache();
226
+
227
+ console.log('āœ… Index complete!');
228
+ }
229
+
230
+ async uploadFiles(files) {
231
+ // Process in batches
232
+ for (let i = 0; i < files.length; i += this.batchSize) {
233
+ const batch = files.slice(i, i + this.batchSize);
234
+ console.log(` Batch ${Math.floor(i / this.batchSize) + 1}/${Math.ceil(files.length / this.batchSize)} (${batch.length} files)`);
235
+
236
+ await this.uploadBatch(batch);
237
+ }
238
+ }
239
+
240
+ async uploadBatch(files) {
241
+ return new Promise((resolve, reject) => {
242
+ const payload = JSON.stringify({
243
+ project_path: this.projectPath,
244
+ project_name: this.projectName,
245
+ files: files.map(f => ({
246
+ path: f.path,
247
+ content: f.content
248
+ }))
249
+ });
250
+
251
+ const url = new URL(`${API_URL}/mcp/tools/index_project`);
252
+ const options = {
253
+ hostname: url.hostname,
254
+ path: url.pathname,
255
+ method: 'POST',
256
+ headers: {
257
+ 'Content-Type': 'application/json',
258
+ 'Authorization': `Bearer ${API_KEY}`,
259
+ 'Content-Length': Buffer.byteLength(payload)
260
+ },
261
+ timeout: 120000
262
+ };
263
+
264
+ const req = https.request(options, (res) => {
265
+ let data = '';
266
+ res.on('data', chunk => data += chunk);
267
+ res.on('end', () => {
268
+ if (res.statusCode >= 200 && res.statusCode < 300) {
269
+ try {
270
+ const result = JSON.parse(data);
271
+ if (result.content && result.content[0]) {
272
+ const content = JSON.parse(result.content[0].text);
273
+ if (content.project_id) {
274
+ this.cache.projectId = content.project_id;
275
+ }
276
+ }
277
+ resolve(result);
278
+ } catch (err) {
279
+ reject(new Error(`Invalid response: ${err.message}`));
280
+ }
281
+ } else {
282
+ reject(new Error(`HTTP ${res.statusCode}: ${data}`));
283
+ }
284
+ });
285
+ });
286
+
287
+ req.on('error', reject);
288
+ req.on('timeout', () => {
289
+ req.destroy();
290
+ reject(new Error('Request timeout'));
291
+ });
292
+
293
+ req.write(payload);
294
+ req.end();
295
+ });
296
+ }
297
+
298
+ async watch() {
299
+ console.log(`šŸ‘ļø Watching ${this.projectPath} for changes...`);
300
+ console.log('Press Ctrl+C to stop\n');
301
+
302
+ // Initial index
303
+ await this.index();
304
+
305
+ // Watch for changes
306
+ const chokidar = await this.loadChokidar();
307
+
308
+ const watcher = chokidar.watch(this.projectPath, {
309
+ ignored: this.exclude,
310
+ persistent: true,
311
+ ignoreInitial: true
312
+ });
313
+
314
+ let debounceTimer = null;
315
+
316
+ const handleChange = async () => {
317
+ if (debounceTimer) clearTimeout(debounceTimer);
318
+ debounceTimer = setTimeout(async () => {
319
+ console.log('\nšŸ”„ Changes detected, re-indexing...');
320
+ await this.index();
321
+ console.log('');
322
+ }, 1000);
323
+ };
324
+
325
+ watcher
326
+ .on('add', path => handleChange())
327
+ .on('change', path => handleChange())
328
+ .on('unlink', path => handleChange());
329
+
330
+ // Keep process alive
331
+ process.on('SIGINT', () => {
332
+ console.log('\nšŸ‘‹ Stopping watcher...');
333
+ watcher.close();
334
+ process.exit(0);
335
+ });
336
+ }
337
+
338
+ async loadChokidar() {
339
+ try {
340
+ return require('chokidar');
341
+ } catch (err) {
342
+ console.log('Installing chokidar for file watching...');
343
+ const { execSync } = require('child_process');
344
+ execSync('npm install chokidar --save-dev', { cwd: __dirname, stdio: 'inherit' });
345
+ return require('chokidar');
346
+ }
347
+ }
348
+ }
349
+
350
+ // CLI
351
+ async function main() {
352
+ if (!API_KEY) {
353
+ console.error('āŒ CF_MEMORY_API_KEY environment variable required');
354
+ process.exit(1);
355
+ }
356
+
357
+ const args = process.argv.slice(2);
358
+ const command = args[0];
359
+ const projectPath = args[1] || '.';
360
+
361
+ const options = {
362
+ dryRun: args.includes('--dry-run'),
363
+ once: args.includes('--once')
364
+ };
365
+
366
+ const indexer = new IncrementalIndexer(projectPath, options);
367
+
368
+ if (command === 'watch') {
369
+ await indexer.watch();
370
+ } else if (command === 'index') {
371
+ await indexer.index();
372
+ } else {
373
+ console.log(`
374
+ CF-Memory-MCP Incremental Indexer
375
+
376
+ Usage:
377
+ cf-memory-mcp-indexer index <path> Index project once
378
+ cf-memory-mcp-indexer watch <path> Watch and auto-index on changes
379
+
380
+ Options:
381
+ --dry-run Show what would be indexed without uploading
382
+
383
+ Environment:
384
+ CF_MEMORY_API_KEY Required. Your API key
385
+ CF_MEMORY_API_URL Optional. Custom API endpoint
386
+ `);
387
+ process.exit(1);
388
+ }
389
+ }
390
+
391
+ main().catch(err => {
392
+ console.error('Error:', err.message);
393
+ process.exit(1);
394
+ });
package/package.json CHANGED
@@ -1,10 +1,12 @@
1
1
  {
2
2
  "name": "cf-memory-mcp",
3
- "version": "3.8.2",
3
+ "version": "3.8.3",
4
4
  "description": "Best-in-class MCP server with CONTEXTUAL CHUNKING (Anthropic-style, 35-67% better retrieval), Optimized LLM stack (Llama-3.1-8B), BGE-M3 embeddings, Query Expansion Caching, Hybrid Embedding Strategy, and Unified Project Intelligence",
5
5
  "main": "bin/cf-memory-mcp.js",
6
6
  "bin": {
7
- "cf-memory-mcp": "bin/cf-memory-mcp.js"
7
+ "cf-memory-mcp": "bin/cf-memory-mcp.js",
8
+ "cf-memory-index": "bin/cf-memory-mcp-indexer.js",
9
+ "cf-memory-watch": "bin/cf-memory-mcp-indexer.js"
8
10
  },
9
11
  "author": {
10
12
  "name": "John Lam",
@@ -130,6 +132,6 @@
130
132
  "jest-environment-node": "^30.0.4",
131
133
  "ts-jest": "^29.4.0",
132
134
  "typescript": "^5.7.3",
133
- "wrangler": "^4.21.2"
135
+ "wrangler": "^4.67.1"
134
136
  }
135
137
  }