mcp-memory-keeper 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +433 -0
- package/LICENSE +21 -0
- package/README.md +1051 -0
- package/bin/mcp-memory-keeper +52 -0
- package/dist/__tests__/helpers/database-test-helper.js +160 -0
- package/dist/__tests__/helpers/test-server.js +92 -0
- package/dist/__tests__/integration/advanced-features.test.js +614 -0
- package/dist/__tests__/integration/backward-compatibility.test.js +245 -0
- package/dist/__tests__/integration/batchOperationsE2E.test.js +396 -0
- package/dist/__tests__/integration/batchOperationsHandler.test.js +1230 -0
- package/dist/__tests__/integration/channelManagementHandler.test.js +1291 -0
- package/dist/__tests__/integration/channels.test.js +376 -0
- package/dist/__tests__/integration/checkpoint.test.js +251 -0
- package/dist/__tests__/integration/concurrent-access.test.js +190 -0
- package/dist/__tests__/integration/context-operations.test.js +243 -0
- package/dist/__tests__/integration/contextDiff.test.js +852 -0
- package/dist/__tests__/integration/contextDiffHandler.test.js +976 -0
- package/dist/__tests__/integration/contextExportHandler.test.js +510 -0
- package/dist/__tests__/integration/contextGetPaginationDefaults.test.js +298 -0
- package/dist/__tests__/integration/contextReassignChannelHandler.test.js +908 -0
- package/dist/__tests__/integration/contextRelationshipsHandler.test.js +1151 -0
- package/dist/__tests__/integration/contextSearch.test.js +938 -0
- package/dist/__tests__/integration/contextSearchHandler.test.js +552 -0
- package/dist/__tests__/integration/contextWatchActual.test.js +165 -0
- package/dist/__tests__/integration/contextWatchHandler.test.js +1500 -0
- package/dist/__tests__/integration/cross-session-sharing.test.js +302 -0
- package/dist/__tests__/integration/database-initialization.test.js +134 -0
- package/dist/__tests__/integration/enhanced-context-operations.test.js +1082 -0
- package/dist/__tests__/integration/enhancedContextGetHandler.test.js +915 -0
- package/dist/__tests__/integration/enhancedContextTimelineHandler.test.js +716 -0
- package/dist/__tests__/integration/error-cases.test.js +407 -0
- package/dist/__tests__/integration/export-import.test.js +367 -0
- package/dist/__tests__/integration/feature-flags.test.js +542 -0
- package/dist/__tests__/integration/file-operations.test.js +264 -0
- package/dist/__tests__/integration/git-integration.test.js +237 -0
- package/dist/__tests__/integration/index-tools.test.js +496 -0
- package/dist/__tests__/integration/issue11-actual-bug-demo.test.js +304 -0
- package/dist/__tests__/integration/issue11-search-filters-bug.test.js +561 -0
- package/dist/__tests__/integration/issue12-checkpoint-restore-behavior.test.js +621 -0
- package/dist/__tests__/integration/issue13-key-validation.test.js +433 -0
- package/dist/__tests__/integration/knowledge-graph.test.js +338 -0
- package/dist/__tests__/integration/migrations.test.js +528 -0
- package/dist/__tests__/integration/multi-agent.test.js +546 -0
- package/dist/__tests__/integration/pagination-critical-fix.test.js +296 -0
- package/dist/__tests__/integration/paginationDefaultsHandler.test.js +600 -0
- package/dist/__tests__/integration/project-directory.test.js +283 -0
- package/dist/__tests__/integration/resource-cleanup.test.js +149 -0
- package/dist/__tests__/integration/retention.test.js +513 -0
- package/dist/__tests__/integration/search.test.js +333 -0
- package/dist/__tests__/integration/semantic-search.test.js +266 -0
- package/dist/__tests__/integration/server-initialization.test.js +307 -0
- package/dist/__tests__/integration/session-management.test.js +219 -0
- package/dist/__tests__/integration/simplified-sharing.test.js +346 -0
- package/dist/__tests__/integration/smart-compaction.test.js +230 -0
- package/dist/__tests__/integration/summarization.test.js +308 -0
- package/dist/__tests__/integration/watcher-migration-validation.test.js +544 -0
- package/dist/__tests__/security/input-validation.test.js +115 -0
- package/dist/__tests__/utils/agents.test.js +473 -0
- package/dist/__tests__/utils/database.test.js +177 -0
- package/dist/__tests__/utils/git.test.js +122 -0
- package/dist/__tests__/utils/knowledge-graph.test.js +297 -0
- package/dist/__tests__/utils/migrationHealthCheck.test.js +302 -0
- package/dist/__tests__/utils/project-directory-messages.test.js +188 -0
- package/dist/__tests__/utils/timezone-safe-dates.js +119 -0
- package/dist/__tests__/utils/validation.test.js +200 -0
- package/dist/__tests__/utils/vector-store.test.js +231 -0
- package/dist/handlers/contextWatchHandlers.js +206 -0
- package/dist/index.js +4310 -0
- package/dist/index.phase1.backup.js +410 -0
- package/dist/index.phase2.backup.js +704 -0
- package/dist/migrations/003_add_channels.js +174 -0
- package/dist/migrations/004_add_context_watch.js +151 -0
- package/dist/migrations/005_add_context_watch.js +98 -0
- package/dist/migrations/simplify-sharing.js +117 -0
- package/dist/repositories/BaseRepository.js +30 -0
- package/dist/repositories/CheckpointRepository.js +140 -0
- package/dist/repositories/ContextRepository.js +1873 -0
- package/dist/repositories/FileRepository.js +104 -0
- package/dist/repositories/RepositoryManager.js +62 -0
- package/dist/repositories/SessionRepository.js +66 -0
- package/dist/repositories/WatcherRepository.js +252 -0
- package/dist/repositories/index.js +15 -0
- package/dist/server.js +384 -0
- package/dist/test-helpers/database-helper.js +128 -0
- package/dist/types/entities.js +3 -0
- package/dist/utils/agents.js +791 -0
- package/dist/utils/channels.js +150 -0
- package/dist/utils/database.js +731 -0
- package/dist/utils/feature-flags.js +476 -0
- package/dist/utils/git.js +145 -0
- package/dist/utils/knowledge-graph.js +264 -0
- package/dist/utils/migrationHealthCheck.js +373 -0
- package/dist/utils/migrations.js +452 -0
- package/dist/utils/retention.js +460 -0
- package/dist/utils/timestamps.js +112 -0
- package/dist/utils/validation.js +296 -0
- package/dist/utils/vector-store.js +247 -0
- package/package.json +84 -0
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
const database_1 = require("../../utils/database");
|
|
37
|
+
const os = __importStar(require("os"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const fs = __importStar(require("fs"));
|
|
40
|
+
const uuid_1 = require("uuid");
|
|
41
|
+
describe('Export/Import Integration Tests', () => {
|
|
42
|
+
let dbManager;
|
|
43
|
+
let tempDbPath;
|
|
44
|
+
let tempExportPath;
|
|
45
|
+
let db;
|
|
46
|
+
beforeEach(() => {
|
|
47
|
+
tempDbPath = path.join(os.tmpdir(), `test-export-${Date.now()}.db`);
|
|
48
|
+
tempExportPath = path.join(os.tmpdir(), `test-exports-${Date.now()}`);
|
|
49
|
+
dbManager = new database_1.DatabaseManager({
|
|
50
|
+
filename: tempDbPath,
|
|
51
|
+
maxSize: 10 * 1024 * 1024,
|
|
52
|
+
walMode: true,
|
|
53
|
+
});
|
|
54
|
+
db = dbManager.getDatabase();
|
|
55
|
+
// Create export directory
|
|
56
|
+
fs.mkdirSync(tempExportPath, { recursive: true });
|
|
57
|
+
});
|
|
58
|
+
afterEach(() => {
|
|
59
|
+
dbManager.close();
|
|
60
|
+
try {
|
|
61
|
+
fs.unlinkSync(tempDbPath);
|
|
62
|
+
fs.unlinkSync(`${tempDbPath}-wal`);
|
|
63
|
+
fs.unlinkSync(`${tempDbPath}-shm`);
|
|
64
|
+
fs.rmSync(tempExportPath, { recursive: true, force: true });
|
|
65
|
+
}
|
|
66
|
+
catch (_e) {
|
|
67
|
+
// Ignore
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
describe('context_export', () => {
|
|
71
|
+
it('should export session data to JSON', () => {
|
|
72
|
+
// Create test data
|
|
73
|
+
const sessionId = (0, uuid_1.v4)();
|
|
74
|
+
db.prepare('INSERT INTO sessions (id, name, description) VALUES (?, ?, ?)').run(sessionId, 'Export Test', 'Test session for export');
|
|
75
|
+
// Add context items
|
|
76
|
+
const items = [
|
|
77
|
+
{ key: 'task1', value: 'Complete export feature', category: 'task', priority: 'high' },
|
|
78
|
+
{
|
|
79
|
+
key: 'note1',
|
|
80
|
+
value: 'Export format should be JSON',
|
|
81
|
+
category: 'note',
|
|
82
|
+
priority: 'normal',
|
|
83
|
+
},
|
|
84
|
+
];
|
|
85
|
+
items.forEach(item => {
|
|
86
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value, category, priority) VALUES (?, ?, ?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, item.key, item.value, item.category, item.priority);
|
|
87
|
+
});
|
|
88
|
+
// Add file cache
|
|
89
|
+
db.prepare('INSERT INTO file_cache (id, session_id, file_path, content, hash) VALUES (?, ?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, '/test.ts', 'test content', 'hash123');
|
|
90
|
+
// Export data
|
|
91
|
+
const exportData = {
|
|
92
|
+
version: '1.0',
|
|
93
|
+
timestamp: new Date().toISOString(),
|
|
94
|
+
session: db.prepare('SELECT * FROM sessions WHERE id = ?').get(sessionId),
|
|
95
|
+
context_items: db
|
|
96
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ?')
|
|
97
|
+
.all(sessionId),
|
|
98
|
+
file_cache: db.prepare('SELECT * FROM file_cache WHERE session_id = ?').all(sessionId),
|
|
99
|
+
checkpoints: db.prepare('SELECT * FROM checkpoints WHERE session_id = ?').all(sessionId),
|
|
100
|
+
};
|
|
101
|
+
const exportPath = path.join(tempExportPath, `session-${sessionId}.json`);
|
|
102
|
+
fs.writeFileSync(exportPath, JSON.stringify(exportData, null, 2));
|
|
103
|
+
// Verify export
|
|
104
|
+
expect(fs.existsSync(exportPath)).toBe(true);
|
|
105
|
+
const exported = JSON.parse(fs.readFileSync(exportPath, 'utf-8'));
|
|
106
|
+
expect(exported.version).toBe('1.0');
|
|
107
|
+
expect(exported.context_items).toHaveLength(2);
|
|
108
|
+
expect(exported.file_cache).toHaveLength(1);
|
|
109
|
+
});
|
|
110
|
+
it('should export with checkpoints and linked items', () => {
|
|
111
|
+
const sessionId = (0, uuid_1.v4)();
|
|
112
|
+
db.prepare('INSERT INTO sessions (id, name) VALUES (?, ?)').run(sessionId, 'Checkpoint Export');
|
|
113
|
+
// Add context items
|
|
114
|
+
const itemId1 = (0, uuid_1.v4)();
|
|
115
|
+
const itemId2 = (0, uuid_1.v4)();
|
|
116
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)').run(itemId1, sessionId, 'item1', 'value1');
|
|
117
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)').run(itemId2, sessionId, 'item2', 'value2');
|
|
118
|
+
// Create checkpoint
|
|
119
|
+
const checkpointId = (0, uuid_1.v4)();
|
|
120
|
+
db.prepare('INSERT INTO checkpoints (id, session_id, name) VALUES (?, ?, ?)').run(checkpointId, sessionId, 'Test Checkpoint');
|
|
121
|
+
// Link items to checkpoint
|
|
122
|
+
db.prepare('INSERT INTO checkpoint_items (id, checkpoint_id, context_item_id) VALUES (?, ?, ?)').run((0, uuid_1.v4)(), checkpointId, itemId1);
|
|
123
|
+
db.prepare('INSERT INTO checkpoint_items (id, checkpoint_id, context_item_id) VALUES (?, ?, ?)').run((0, uuid_1.v4)(), checkpointId, itemId2);
|
|
124
|
+
// Export with checkpoint data
|
|
125
|
+
const exportData = {
|
|
126
|
+
version: '1.0',
|
|
127
|
+
timestamp: new Date().toISOString(),
|
|
128
|
+
session: db.prepare('SELECT * FROM sessions WHERE id = ?').get(sessionId),
|
|
129
|
+
context_items: db
|
|
130
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ?')
|
|
131
|
+
.all(sessionId),
|
|
132
|
+
checkpoints: db.prepare('SELECT * FROM checkpoints WHERE session_id = ?').all(sessionId),
|
|
133
|
+
checkpoint_items: db
|
|
134
|
+
.prepare(`
|
|
135
|
+
SELECT cpi.* FROM checkpoint_items cpi
|
|
136
|
+
JOIN checkpoints cp ON cpi.checkpoint_id = cp.id
|
|
137
|
+
WHERE cp.session_id = ?
|
|
138
|
+
`)
|
|
139
|
+
.all(sessionId),
|
|
140
|
+
};
|
|
141
|
+
const exportPath = path.join(tempExportPath, 'checkpoint-export.json');
|
|
142
|
+
fs.writeFileSync(exportPath, JSON.stringify(exportData, null, 2));
|
|
143
|
+
const exported = JSON.parse(fs.readFileSync(exportPath, 'utf-8'));
|
|
144
|
+
expect(exported.checkpoints).toHaveLength(1);
|
|
145
|
+
expect(exported.checkpoint_items).toHaveLength(2);
|
|
146
|
+
});
|
|
147
|
+
it('should compress large exports', () => {
|
|
148
|
+
const sessionId = (0, uuid_1.v4)();
|
|
149
|
+
db.prepare('INSERT INTO sessions (id, name) VALUES (?, ?)').run(sessionId, 'Large Export');
|
|
150
|
+
// Add many items
|
|
151
|
+
const items = [];
|
|
152
|
+
for (let i = 0; i < 1000; i++) {
|
|
153
|
+
const id = (0, uuid_1.v4)();
|
|
154
|
+
items.push(id);
|
|
155
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)').run(id, sessionId, `key${i}`, `This is a longer value to make the export larger: ${i}`);
|
|
156
|
+
}
|
|
157
|
+
// Export data
|
|
158
|
+
const exportData = {
|
|
159
|
+
version: '1.0',
|
|
160
|
+
timestamp: new Date().toISOString(),
|
|
161
|
+
session: db.prepare('SELECT * FROM sessions WHERE id = ?').get(sessionId),
|
|
162
|
+
context_items: db
|
|
163
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ?')
|
|
164
|
+
.all(sessionId),
|
|
165
|
+
};
|
|
166
|
+
// Test both compressed and uncompressed
|
|
167
|
+
const uncompressedPath = path.join(tempExportPath, 'large-export.json');
|
|
168
|
+
const compressedPath = path.join(tempExportPath, 'large-export.json.gz');
|
|
169
|
+
fs.writeFileSync(uncompressedPath, JSON.stringify(exportData, null, 2));
|
|
170
|
+
// Simulate compression (using zlib)
|
|
171
|
+
const zlib = require('zlib');
|
|
172
|
+
const compressed = zlib.gzipSync(JSON.stringify(exportData));
|
|
173
|
+
fs.writeFileSync(compressedPath, compressed);
|
|
174
|
+
const uncompressedSize = fs.statSync(uncompressedPath).size;
|
|
175
|
+
const compressedSize = fs.statSync(compressedPath).size;
|
|
176
|
+
expect(compressedSize).toBeLessThan(uncompressedSize * 0.5); // Should compress well
|
|
177
|
+
});
|
|
178
|
+
});
|
|
179
|
+
describe('context_import', () => {
|
|
180
|
+
it('should import session data from JSON', () => {
|
|
181
|
+
// Create export data
|
|
182
|
+
const exportData = {
|
|
183
|
+
version: '1.0',
|
|
184
|
+
timestamp: new Date().toISOString(),
|
|
185
|
+
session: {
|
|
186
|
+
id: (0, uuid_1.v4)(),
|
|
187
|
+
name: 'Imported Session',
|
|
188
|
+
description: 'Test import',
|
|
189
|
+
created_at: new Date().toISOString(),
|
|
190
|
+
},
|
|
191
|
+
context_items: [
|
|
192
|
+
{
|
|
193
|
+
id: (0, uuid_1.v4)(),
|
|
194
|
+
session_id: '',
|
|
195
|
+
key: 'imported1',
|
|
196
|
+
value: 'value1',
|
|
197
|
+
category: 'task',
|
|
198
|
+
priority: 'high',
|
|
199
|
+
},
|
|
200
|
+
{
|
|
201
|
+
id: (0, uuid_1.v4)(),
|
|
202
|
+
session_id: '',
|
|
203
|
+
key: 'imported2',
|
|
204
|
+
value: 'value2',
|
|
205
|
+
category: 'note',
|
|
206
|
+
priority: 'normal',
|
|
207
|
+
},
|
|
208
|
+
],
|
|
209
|
+
file_cache: [
|
|
210
|
+
{
|
|
211
|
+
id: (0, uuid_1.v4)(),
|
|
212
|
+
session_id: '',
|
|
213
|
+
file_path: '/imported.ts',
|
|
214
|
+
content: 'imported content',
|
|
215
|
+
hash: 'hash456',
|
|
216
|
+
},
|
|
217
|
+
],
|
|
218
|
+
};
|
|
219
|
+
const importPath = path.join(tempExportPath, 'import-test.json');
|
|
220
|
+
fs.writeFileSync(importPath, JSON.stringify(exportData, null, 2));
|
|
221
|
+
// Import data
|
|
222
|
+
const importedData = JSON.parse(fs.readFileSync(importPath, 'utf-8'));
|
|
223
|
+
// Create new session for import
|
|
224
|
+
const newSessionId = (0, uuid_1.v4)();
|
|
225
|
+
db.prepare('INSERT INTO sessions (id, name, description) VALUES (?, ?, ?)').run(newSessionId, `${importedData.session.name} (Imported)`, importedData.session.description);
|
|
226
|
+
// Import context items
|
|
227
|
+
importedData.context_items.forEach((item) => {
|
|
228
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value, category, priority) VALUES (?, ?, ?, ?, ?, ?)').run((0, uuid_1.v4)(), newSessionId, item.key, item.value, item.category, item.priority);
|
|
229
|
+
});
|
|
230
|
+
// Import file cache
|
|
231
|
+
importedData.file_cache.forEach((file) => {
|
|
232
|
+
db.prepare('INSERT INTO file_cache (id, session_id, file_path, content, hash) VALUES (?, ?, ?, ?, ?)').run((0, uuid_1.v4)(), newSessionId, file.file_path, file.content, file.hash);
|
|
233
|
+
});
|
|
234
|
+
// Verify import
|
|
235
|
+
const session = db.prepare('SELECT * FROM sessions WHERE id = ?').get(newSessionId);
|
|
236
|
+
expect(session.name).toContain('Imported');
|
|
237
|
+
const items = db
|
|
238
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ?')
|
|
239
|
+
.all(newSessionId);
|
|
240
|
+
expect(items).toHaveLength(2);
|
|
241
|
+
expect(items.map((i) => i.key)).toContain('imported1');
|
|
242
|
+
const files = db
|
|
243
|
+
.prepare('SELECT * FROM file_cache WHERE session_id = ?')
|
|
244
|
+
.all(newSessionId);
|
|
245
|
+
expect(files).toHaveLength(1);
|
|
246
|
+
expect(files[0].file_path).toBe('/imported.ts');
|
|
247
|
+
});
|
|
248
|
+
it('should validate import data format', () => {
|
|
249
|
+
const invalidData = {
|
|
250
|
+
// Missing version
|
|
251
|
+
timestamp: new Date().toISOString(),
|
|
252
|
+
session: { name: 'Invalid' },
|
|
253
|
+
};
|
|
254
|
+
const importPath = path.join(tempExportPath, 'invalid.json');
|
|
255
|
+
fs.writeFileSync(importPath, JSON.stringify(invalidData));
|
|
256
|
+
// Validation function
|
|
257
|
+
const validateImport = (data) => {
|
|
258
|
+
return !!(data.version && data.session && data.session.name);
|
|
259
|
+
};
|
|
260
|
+
const importedData = JSON.parse(fs.readFileSync(importPath, 'utf-8'));
|
|
261
|
+
expect(validateImport(importedData)).toBe(false);
|
|
262
|
+
});
|
|
263
|
+
it('should handle duplicate imports', () => {
|
|
264
|
+
const sessionId = (0, uuid_1.v4)();
|
|
265
|
+
db.prepare('INSERT INTO sessions (id, name) VALUES (?, ?)').run(sessionId, 'Original');
|
|
266
|
+
// Add existing item
|
|
267
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, 'existing_key', 'original value');
|
|
268
|
+
// Import data with same key
|
|
269
|
+
const importData = {
|
|
270
|
+
version: '1.0',
|
|
271
|
+
timestamp: new Date().toISOString(),
|
|
272
|
+
context_items: [
|
|
273
|
+
{ key: 'existing_key', value: 'imported value', category: 'task', priority: 'high' },
|
|
274
|
+
],
|
|
275
|
+
};
|
|
276
|
+
// Strategy 1: Skip duplicates
|
|
277
|
+
const existingItem = db
|
|
278
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ? AND key = ?')
|
|
279
|
+
.get(sessionId, 'existing_key');
|
|
280
|
+
if (!existingItem) {
|
|
281
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value, category, priority) VALUES (?, ?, ?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, importData.context_items[0].key, importData.context_items[0].value, importData.context_items[0].category, importData.context_items[0].priority);
|
|
282
|
+
}
|
|
283
|
+
// Verify original was kept
|
|
284
|
+
const item = db
|
|
285
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ? AND key = ?')
|
|
286
|
+
.get(sessionId, 'existing_key');
|
|
287
|
+
expect(item.value).toBe('original value');
|
|
288
|
+
// Strategy 2: Replace duplicates
|
|
289
|
+
db.prepare('INSERT OR REPLACE INTO context_items (id, session_id, key, value, category, priority) VALUES (?, ?, ?, ?, ?, ?)').run(item.id, sessionId, 'existing_key', 'replaced value', 'task', 'high');
|
|
290
|
+
const replaced = db
|
|
291
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ? AND key = ?')
|
|
292
|
+
.get(sessionId, 'existing_key');
|
|
293
|
+
expect(replaced.value).toBe('replaced value');
|
|
294
|
+
});
|
|
295
|
+
it('should merge imports into existing session', () => {
|
|
296
|
+
const sessionId = (0, uuid_1.v4)();
|
|
297
|
+
db.prepare('INSERT INTO sessions (id, name) VALUES (?, ?)').run(sessionId, 'Merge Target');
|
|
298
|
+
// Add existing items
|
|
299
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, 'existing1', 'value1');
|
|
300
|
+
// Import additional items
|
|
301
|
+
const importData = {
|
|
302
|
+
version: '1.0',
|
|
303
|
+
context_items: [
|
|
304
|
+
{ key: 'imported1', value: 'new value1' },
|
|
305
|
+
{ key: 'imported2', value: 'new value2' },
|
|
306
|
+
],
|
|
307
|
+
};
|
|
308
|
+
// Merge import
|
|
309
|
+
importData.context_items.forEach(item => {
|
|
310
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, item.key, item.value);
|
|
311
|
+
});
|
|
312
|
+
// Verify merge
|
|
313
|
+
const allItems = db
|
|
314
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ? ORDER BY key')
|
|
315
|
+
.all(sessionId);
|
|
316
|
+
expect(allItems).toHaveLength(3);
|
|
317
|
+
expect(allItems.map((i) => i.key)).toEqual(['existing1', 'imported1', 'imported2']);
|
|
318
|
+
});
|
|
319
|
+
});
|
|
320
|
+
describe('Export/Import formats', () => {
|
|
321
|
+
it('should support markdown export format', () => {
|
|
322
|
+
const sessionId = (0, uuid_1.v4)();
|
|
323
|
+
db.prepare('INSERT INTO sessions (id, name, description) VALUES (?, ?, ?)').run(sessionId, 'Markdown Export', 'Testing markdown format');
|
|
324
|
+
// Add context items
|
|
325
|
+
const items = [
|
|
326
|
+
{ key: 'task1', value: 'Implement feature X', category: 'task', priority: 'high' },
|
|
327
|
+
{ key: 'decision1', value: 'Use TypeScript', category: 'decision', priority: 'high' },
|
|
328
|
+
{
|
|
329
|
+
key: 'note1',
|
|
330
|
+
value: 'Remember to test edge cases',
|
|
331
|
+
category: 'note',
|
|
332
|
+
priority: 'normal',
|
|
333
|
+
},
|
|
334
|
+
];
|
|
335
|
+
items.forEach(item => {
|
|
336
|
+
db.prepare('INSERT INTO context_items (id, session_id, key, value, category, priority) VALUES (?, ?, ?, ?, ?, ?)').run((0, uuid_1.v4)(), sessionId, item.key, item.value, item.category, item.priority);
|
|
337
|
+
});
|
|
338
|
+
// Generate markdown
|
|
339
|
+
const session = db.prepare('SELECT * FROM sessions WHERE id = ?').get(sessionId);
|
|
340
|
+
const contextItems = db
|
|
341
|
+
.prepare('SELECT * FROM context_items WHERE session_id = ? ORDER BY category, priority DESC')
|
|
342
|
+
.all(sessionId);
|
|
343
|
+
const markdown = [`# ${session.name}`, '', session.description, '', '## Context Items', ''];
|
|
344
|
+
const itemsByCategory = contextItems.reduce((acc, item) => {
|
|
345
|
+
if (!acc[item.category])
|
|
346
|
+
acc[item.category] = [];
|
|
347
|
+
acc[item.category].push(item);
|
|
348
|
+
return acc;
|
|
349
|
+
}, {});
|
|
350
|
+
Object.entries(itemsByCategory).forEach(([category, items]) => {
|
|
351
|
+
markdown.push(`### ${category.charAt(0).toUpperCase() + category.slice(1)}s`);
|
|
352
|
+
markdown.push('');
|
|
353
|
+
items.forEach((item) => {
|
|
354
|
+
markdown.push(`- **${item.key}** (${item.priority}): ${item.value}`);
|
|
355
|
+
});
|
|
356
|
+
markdown.push('');
|
|
357
|
+
});
|
|
358
|
+
const markdownPath = path.join(tempExportPath, 'export.md');
|
|
359
|
+
fs.writeFileSync(markdownPath, markdown.join('\n'));
|
|
360
|
+
// Verify markdown
|
|
361
|
+
const content = fs.readFileSync(markdownPath, 'utf-8');
|
|
362
|
+
expect(content).toContain('# Markdown Export');
|
|
363
|
+
expect(content).toContain('### Tasks');
|
|
364
|
+
expect(content).toContain('**task1** (high): Implement feature X');
|
|
365
|
+
});
|
|
366
|
+
});
|
|
367
|
+
});
|