mcp-memory-keeper 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/CHANGELOG.md +433 -0
  2. package/LICENSE +21 -0
  3. package/README.md +1051 -0
  4. package/bin/mcp-memory-keeper +52 -0
  5. package/dist/__tests__/helpers/database-test-helper.js +160 -0
  6. package/dist/__tests__/helpers/test-server.js +92 -0
  7. package/dist/__tests__/integration/advanced-features.test.js +614 -0
  8. package/dist/__tests__/integration/backward-compatibility.test.js +245 -0
  9. package/dist/__tests__/integration/batchOperationsE2E.test.js +396 -0
  10. package/dist/__tests__/integration/batchOperationsHandler.test.js +1230 -0
  11. package/dist/__tests__/integration/channelManagementHandler.test.js +1291 -0
  12. package/dist/__tests__/integration/channels.test.js +376 -0
  13. package/dist/__tests__/integration/checkpoint.test.js +251 -0
  14. package/dist/__tests__/integration/concurrent-access.test.js +190 -0
  15. package/dist/__tests__/integration/context-operations.test.js +243 -0
  16. package/dist/__tests__/integration/contextDiff.test.js +852 -0
  17. package/dist/__tests__/integration/contextDiffHandler.test.js +976 -0
  18. package/dist/__tests__/integration/contextExportHandler.test.js +510 -0
  19. package/dist/__tests__/integration/contextGetPaginationDefaults.test.js +298 -0
  20. package/dist/__tests__/integration/contextReassignChannelHandler.test.js +908 -0
  21. package/dist/__tests__/integration/contextRelationshipsHandler.test.js +1151 -0
  22. package/dist/__tests__/integration/contextSearch.test.js +938 -0
  23. package/dist/__tests__/integration/contextSearchHandler.test.js +552 -0
  24. package/dist/__tests__/integration/contextWatchActual.test.js +165 -0
  25. package/dist/__tests__/integration/contextWatchHandler.test.js +1500 -0
  26. package/dist/__tests__/integration/cross-session-sharing.test.js +302 -0
  27. package/dist/__tests__/integration/database-initialization.test.js +134 -0
  28. package/dist/__tests__/integration/enhanced-context-operations.test.js +1082 -0
  29. package/dist/__tests__/integration/enhancedContextGetHandler.test.js +915 -0
  30. package/dist/__tests__/integration/enhancedContextTimelineHandler.test.js +716 -0
  31. package/dist/__tests__/integration/error-cases.test.js +407 -0
  32. package/dist/__tests__/integration/export-import.test.js +367 -0
  33. package/dist/__tests__/integration/feature-flags.test.js +542 -0
  34. package/dist/__tests__/integration/file-operations.test.js +264 -0
  35. package/dist/__tests__/integration/git-integration.test.js +237 -0
  36. package/dist/__tests__/integration/index-tools.test.js +496 -0
  37. package/dist/__tests__/integration/issue11-actual-bug-demo.test.js +304 -0
  38. package/dist/__tests__/integration/issue11-search-filters-bug.test.js +561 -0
  39. package/dist/__tests__/integration/issue12-checkpoint-restore-behavior.test.js +621 -0
  40. package/dist/__tests__/integration/issue13-key-validation.test.js +433 -0
  41. package/dist/__tests__/integration/knowledge-graph.test.js +338 -0
  42. package/dist/__tests__/integration/migrations.test.js +528 -0
  43. package/dist/__tests__/integration/multi-agent.test.js +546 -0
  44. package/dist/__tests__/integration/pagination-critical-fix.test.js +296 -0
  45. package/dist/__tests__/integration/paginationDefaultsHandler.test.js +600 -0
  46. package/dist/__tests__/integration/project-directory.test.js +283 -0
  47. package/dist/__tests__/integration/resource-cleanup.test.js +149 -0
  48. package/dist/__tests__/integration/retention.test.js +513 -0
  49. package/dist/__tests__/integration/search.test.js +333 -0
  50. package/dist/__tests__/integration/semantic-search.test.js +266 -0
  51. package/dist/__tests__/integration/server-initialization.test.js +307 -0
  52. package/dist/__tests__/integration/session-management.test.js +219 -0
  53. package/dist/__tests__/integration/simplified-sharing.test.js +346 -0
  54. package/dist/__tests__/integration/smart-compaction.test.js +230 -0
  55. package/dist/__tests__/integration/summarization.test.js +308 -0
  56. package/dist/__tests__/integration/watcher-migration-validation.test.js +544 -0
  57. package/dist/__tests__/security/input-validation.test.js +115 -0
  58. package/dist/__tests__/utils/agents.test.js +473 -0
  59. package/dist/__tests__/utils/database.test.js +177 -0
  60. package/dist/__tests__/utils/git.test.js +122 -0
  61. package/dist/__tests__/utils/knowledge-graph.test.js +297 -0
  62. package/dist/__tests__/utils/migrationHealthCheck.test.js +302 -0
  63. package/dist/__tests__/utils/project-directory-messages.test.js +188 -0
  64. package/dist/__tests__/utils/timezone-safe-dates.js +119 -0
  65. package/dist/__tests__/utils/validation.test.js +200 -0
  66. package/dist/__tests__/utils/vector-store.test.js +231 -0
  67. package/dist/handlers/contextWatchHandlers.js +206 -0
  68. package/dist/index.js +4310 -0
  69. package/dist/index.phase1.backup.js +410 -0
  70. package/dist/index.phase2.backup.js +704 -0
  71. package/dist/migrations/003_add_channels.js +174 -0
  72. package/dist/migrations/004_add_context_watch.js +151 -0
  73. package/dist/migrations/005_add_context_watch.js +98 -0
  74. package/dist/migrations/simplify-sharing.js +117 -0
  75. package/dist/repositories/BaseRepository.js +30 -0
  76. package/dist/repositories/CheckpointRepository.js +140 -0
  77. package/dist/repositories/ContextRepository.js +1873 -0
  78. package/dist/repositories/FileRepository.js +104 -0
  79. package/dist/repositories/RepositoryManager.js +62 -0
  80. package/dist/repositories/SessionRepository.js +66 -0
  81. package/dist/repositories/WatcherRepository.js +252 -0
  82. package/dist/repositories/index.js +15 -0
  83. package/dist/server.js +384 -0
  84. package/dist/test-helpers/database-helper.js +128 -0
  85. package/dist/types/entities.js +3 -0
  86. package/dist/utils/agents.js +791 -0
  87. package/dist/utils/channels.js +150 -0
  88. package/dist/utils/database.js +731 -0
  89. package/dist/utils/feature-flags.js +476 -0
  90. package/dist/utils/git.js +145 -0
  91. package/dist/utils/knowledge-graph.js +264 -0
  92. package/dist/utils/migrationHealthCheck.js +373 -0
  93. package/dist/utils/migrations.js +452 -0
  94. package/dist/utils/retention.js +460 -0
  95. package/dist/utils/timestamps.js +112 -0
  96. package/dist/utils/validation.js +296 -0
  97. package/dist/utils/vector-store.js +247 -0
  98. package/package.json +84 -0
@@ -0,0 +1,1230 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ const globals_1 = require("@jest/globals");
37
+ const database_1 = require("../../utils/database");
38
+ const ContextRepository_1 = require("../../repositories/ContextRepository");
39
+ const os = __importStar(require("os"));
40
+ const path = __importStar(require("path"));
41
+ const fs = __importStar(require("fs"));
42
+ const uuid_1 = require("uuid");
43
+ const validation_1 = require("../../utils/validation");
44
+ (0, globals_1.describe)('Batch Operations Handler Integration Tests', () => {
45
+ let dbManager;
46
+ let tempDbPath;
47
+ let db;
48
+ let _contextRepo;
49
+ let testSessionId;
50
+ let secondSessionId;
51
+ (0, globals_1.beforeEach)(() => {
52
+ tempDbPath = path.join(os.tmpdir(), `test-batch-operations-${Date.now()}.db`);
53
+ dbManager = new database_1.DatabaseManager({
54
+ filename: tempDbPath,
55
+ maxSize: 10 * 1024 * 1024,
56
+ walMode: true,
57
+ });
58
+ db = dbManager.getDatabase();
59
+ _contextRepo = new ContextRepository_1.ContextRepository(dbManager);
60
+ // Create test sessions
61
+ testSessionId = (0, uuid_1.v4)();
62
+ secondSessionId = (0, uuid_1.v4)();
63
+ db.prepare('INSERT INTO sessions (id, name) VALUES (?, ?)').run(testSessionId, 'Test Session');
64
+ db.prepare('INSERT INTO sessions (id, name) VALUES (?, ?)').run(secondSessionId, 'Second Session');
65
+ });
66
+ (0, globals_1.afterEach)(() => {
67
+ dbManager.close();
68
+ try {
69
+ fs.unlinkSync(tempDbPath);
70
+ fs.unlinkSync(`${tempDbPath}-wal`);
71
+ fs.unlinkSync(`${tempDbPath}-shm`);
72
+ }
73
+ catch (_e) {
74
+ // Ignore
75
+ }
76
+ });
77
+ (0, globals_1.describe)('Batch Save Operations', () => {
78
+ (0, globals_1.it)('should save multiple items in a single batch', () => {
79
+ const items = [
80
+ {
81
+ key: 'batch.config.db',
82
+ value: 'postgresql://localhost:5432/app',
83
+ category: 'config',
84
+ priority: 'high',
85
+ channel: 'main',
86
+ },
87
+ {
88
+ key: 'batch.config.cache',
89
+ value: 'redis://localhost:6379',
90
+ category: 'config',
91
+ priority: 'normal',
92
+ channel: 'main',
93
+ },
94
+ {
95
+ key: 'batch.task.deploy',
96
+ value: 'Deploy to production',
97
+ category: 'task',
98
+ priority: 'high',
99
+ channel: 'deployment',
100
+ },
101
+ ];
102
+ // Simulate batch save handler logic
103
+ const results = [];
104
+ const errors = [];
105
+ db.prepare('BEGIN TRANSACTION').run();
106
+ try {
107
+ const stmt = db.prepare(`
108
+ INSERT INTO context_items (
109
+ id, session_id, key, value, category, priority, channel,
110
+ created_at, updated_at, size
111
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
112
+ `);
113
+ items.forEach((item, index) => {
114
+ try {
115
+ const id = (0, uuid_1.v4)();
116
+ const now = new Date().toISOString();
117
+ const size = Buffer.byteLength(item.value, 'utf8');
118
+ stmt.run(id, testSessionId, item.key, item.value, item.category || null, item.priority || 'normal', item.channel || 'general', now, now, size);
119
+ results.push({
120
+ index,
121
+ key: item.key,
122
+ success: true,
123
+ id,
124
+ });
125
+ }
126
+ catch (error) {
127
+ errors.push({
128
+ index,
129
+ key: item.key,
130
+ error: error.message,
131
+ });
132
+ }
133
+ });
134
+ db.prepare('COMMIT').run();
135
+ }
136
+ catch (error) {
137
+ db.prepare('ROLLBACK').run();
138
+ throw error;
139
+ }
140
+ (0, globals_1.expect)(results.length).toBe(3);
141
+ (0, globals_1.expect)(errors.length).toBe(0);
142
+ // Verify items were saved
143
+ const savedItems = db
144
+ .prepare('SELECT * FROM context_items WHERE session_id = ? ORDER BY key')
145
+ .all(testSessionId);
146
+ (0, globals_1.expect)(savedItems.length).toBe(3);
147
+ (0, globals_1.expect)(savedItems.map((item) => item.key)).toEqual([
148
+ 'batch.config.cache',
149
+ 'batch.config.db',
150
+ 'batch.task.deploy',
151
+ ]);
152
+ // Handler response
153
+ const handlerResponse = {
154
+ content: [
155
+ {
156
+ type: 'text',
157
+ text: JSON.stringify({
158
+ operation: 'batch_save',
159
+ totalItems: items.length,
160
+ succeeded: results.length,
161
+ failed: errors.length,
162
+ results: results,
163
+ errors: errors,
164
+ }, null, 2),
165
+ },
166
+ ],
167
+ };
168
+ const parsed = JSON.parse(handlerResponse.content[0].text);
169
+ (0, globals_1.expect)(parsed.succeeded).toBe(3);
170
+ (0, globals_1.expect)(parsed.failed).toBe(0);
171
+ });
172
+ (0, globals_1.it)('should handle duplicate keys in batch save', () => {
173
+ // First, create an existing item
174
+ db.prepare(`
175
+ INSERT INTO context_items (id, session_id, key, value)
176
+ VALUES (?, ?, ?, ?)
177
+ `).run((0, uuid_1.v4)(), testSessionId, 'existing.key', 'Original value');
178
+ const items = [
179
+ {
180
+ key: 'new.key.1',
181
+ value: 'New value 1',
182
+ },
183
+ {
184
+ key: 'existing.key', // Duplicate
185
+ value: 'Updated value',
186
+ },
187
+ {
188
+ key: 'new.key.2',
189
+ value: 'New value 2',
190
+ },
191
+ ];
192
+ const results = [];
193
+ const errors = [];
194
+ db.prepare('BEGIN TRANSACTION').run();
195
+ try {
196
+ items.forEach((item, index) => {
197
+ try {
198
+ // Check if key exists
199
+ const existing = db
200
+ .prepare('SELECT id FROM context_items WHERE session_id = ? AND key = ?')
201
+ .get(testSessionId, item.key);
202
+ if (existing) {
203
+ // Update existing
204
+ db.prepare(`
205
+ UPDATE context_items
206
+ SET value = ?, updated_at = CURRENT_TIMESTAMP
207
+ WHERE session_id = ? AND key = ?
208
+ `).run(item.value, testSessionId, item.key);
209
+ results.push({
210
+ index,
211
+ key: item.key,
212
+ success: true,
213
+ action: 'updated',
214
+ });
215
+ }
216
+ else {
217
+ // Insert new
218
+ const id = (0, uuid_1.v4)();
219
+ db.prepare(`
220
+ INSERT INTO context_items (id, session_id, key, value)
221
+ VALUES (?, ?, ?, ?)
222
+ `).run(id, testSessionId, item.key, item.value);
223
+ results.push({
224
+ index,
225
+ key: item.key,
226
+ success: true,
227
+ action: 'created',
228
+ id,
229
+ });
230
+ }
231
+ }
232
+ catch (error) {
233
+ errors.push({
234
+ index,
235
+ key: item.key,
236
+ error: error.message,
237
+ });
238
+ }
239
+ });
240
+ db.prepare('COMMIT').run();
241
+ }
242
+ catch (error) {
243
+ db.prepare('ROLLBACK').run();
244
+ throw error;
245
+ }
246
+ (0, globals_1.expect)(results.length).toBe(3);
247
+ (0, globals_1.expect)(results.filter(r => r.action === 'created').length).toBe(2);
248
+ (0, globals_1.expect)(results.filter(r => r.action === 'updated').length).toBe(1);
249
+ // Verify the update
250
+ const updated = db
251
+ .prepare('SELECT * FROM context_items WHERE key = ?')
252
+ .get('existing.key');
253
+ (0, globals_1.expect)(updated.value).toBe('Updated value');
254
+ });
255
+ (0, globals_1.it)('should validate batch save items', () => {
256
+ const invalidItems = [
257
+ {
258
+ // Missing key
259
+ value: 'No key provided',
260
+ },
261
+ {
262
+ key: '', // Empty key
263
+ value: 'Empty key',
264
+ },
265
+ {
266
+ key: 'no.value',
267
+ // Missing value
268
+ },
269
+ {
270
+ key: 'invalid.category',
271
+ value: 'Invalid category',
272
+ category: 'invalid-category', // Invalid category
273
+ },
274
+ {
275
+ key: 'invalid.priority',
276
+ value: 'Invalid priority',
277
+ priority: 'urgent', // Invalid priority
278
+ },
279
+ ];
280
+ const errors = [];
281
+ invalidItems.forEach((item, index) => {
282
+ try {
283
+ // Validate key
284
+ if (!item.key || !item.key.trim()) {
285
+ throw new validation_1.ValidationError('Key is required and cannot be empty');
286
+ }
287
+ // Validate value
288
+ if (!item.value) {
289
+ throw new validation_1.ValidationError('Value is required');
290
+ }
291
+ // Validate category
292
+ if (item.category) {
293
+ const validCategories = ['task', 'decision', 'progress', 'note', 'error', 'warning'];
294
+ if (!validCategories.includes(item.category)) {
295
+ throw new validation_1.ValidationError(`Invalid category: ${item.category}`);
296
+ }
297
+ }
298
+ // Validate priority
299
+ if (item.priority) {
300
+ const validPriorities = ['high', 'normal', 'low'];
301
+ if (!validPriorities.includes(item.priority)) {
302
+ throw new validation_1.ValidationError(`Invalid priority: ${item.priority}`);
303
+ }
304
+ }
305
+ }
306
+ catch (error) {
307
+ errors.push({
308
+ index,
309
+ key: item.key || 'undefined',
310
+ error: error.message,
311
+ });
312
+ }
313
+ });
314
+ (0, globals_1.expect)(errors.length).toBe(5);
315
+ (0, globals_1.expect)(errors[0].error).toContain('Key is required');
316
+ (0, globals_1.expect)(errors[1].error).toContain('Key is required');
317
+ (0, globals_1.expect)(errors[2].error).toContain('Value is required');
318
+ (0, globals_1.expect)(errors[3].error).toContain('Invalid category');
319
+ (0, globals_1.expect)(errors[4].error).toContain('Invalid priority');
320
+ });
321
+ (0, globals_1.it)('should handle partial batch save failures', () => {
322
+ const items = [
323
+ {
324
+ key: 'valid.item.1',
325
+ value: 'Valid value 1',
326
+ },
327
+ {
328
+ key: '', // Will fail validation
329
+ value: 'Invalid key',
330
+ },
331
+ {
332
+ key: 'valid.item.2',
333
+ value: 'Valid value 2',
334
+ },
335
+ {
336
+ key: 'invalid.priority',
337
+ value: 'Invalid priority',
338
+ priority: 'urgent', // Will fail validation
339
+ },
340
+ {
341
+ key: 'valid.item.3',
342
+ value: 'Valid value 3',
343
+ },
344
+ ];
345
+ const results = [];
346
+ const errors = [];
347
+ items.forEach((item, index) => {
348
+ try {
349
+ // Validate
350
+ if (!item.key || !item.key.trim()) {
351
+ throw new validation_1.ValidationError('Key is required');
352
+ }
353
+ if (item.priority) {
354
+ const validPriorities = ['high', 'normal', 'low'];
355
+ if (!validPriorities.includes(item.priority)) {
356
+ throw new validation_1.ValidationError(`Invalid priority: ${item.priority}`);
357
+ }
358
+ }
359
+ // Save
360
+ const id = (0, uuid_1.v4)();
361
+ db.prepare(`
362
+ INSERT INTO context_items (id, session_id, key, value, priority)
363
+ VALUES (?, ?, ?, ?, ?)
364
+ `).run(id, testSessionId, item.key, item.value, item.priority || 'normal');
365
+ results.push({
366
+ index,
367
+ key: item.key,
368
+ success: true,
369
+ id,
370
+ });
371
+ }
372
+ catch (error) {
373
+ errors.push({
374
+ index,
375
+ key: item.key || 'undefined',
376
+ error: error.message,
377
+ });
378
+ }
379
+ });
380
+ (0, globals_1.expect)(results.length).toBe(3); // 3 valid items
381
+ (0, globals_1.expect)(errors.length).toBe(2); // 2 invalid items
382
+ // Handler response
383
+ const handlerResponse = {
384
+ content: [
385
+ {
386
+ type: 'text',
387
+ text: JSON.stringify({
388
+ operation: 'batch_save',
389
+ totalItems: items.length,
390
+ succeeded: results.length,
391
+ failed: errors.length,
392
+ results: results,
393
+ errors: errors,
394
+ }, null, 2),
395
+ },
396
+ ],
397
+ };
398
+ const parsed = JSON.parse(handlerResponse.content[0].text);
399
+ (0, globals_1.expect)(parsed.succeeded).toBe(3);
400
+ (0, globals_1.expect)(parsed.failed).toBe(2);
401
+ });
402
+ (0, globals_1.it)('should enforce batch size limits', () => {
403
+ const maxBatchSize = 100;
404
+ const items = Array.from({ length: 150 }, (_, i) => ({
405
+ key: `batch.item.${i}`,
406
+ value: `Value ${i}`,
407
+ }));
408
+ try {
409
+ if (items.length > maxBatchSize) {
410
+ throw new validation_1.ValidationError(`Batch size ${items.length} exceeds maximum allowed size of ${maxBatchSize}`);
411
+ }
412
+ }
413
+ catch (error) {
414
+ (0, globals_1.expect)(error).toBeInstanceOf(validation_1.ValidationError);
415
+ (0, globals_1.expect)(error.message).toContain('exceeds maximum allowed size');
416
+ }
417
+ });
418
+ (0, globals_1.it)('should calculate total size for batch operations', () => {
419
+ const items = [
420
+ {
421
+ key: 'small.item',
422
+ value: 'Small',
423
+ },
424
+ {
425
+ key: 'medium.item',
426
+ value: 'This is a medium sized value with more content',
427
+ },
428
+ {
429
+ key: 'large.item',
430
+ value: 'A'.repeat(1000), // 1KB
431
+ },
432
+ ];
433
+ let totalSize = 0;
434
+ const results = [];
435
+ items.forEach((item, index) => {
436
+ const size = Buffer.byteLength(item.value, 'utf8');
437
+ totalSize += size;
438
+ const id = (0, uuid_1.v4)();
439
+ db.prepare(`
440
+ INSERT INTO context_items (id, session_id, key, value, size)
441
+ VALUES (?, ?, ?, ?, ?)
442
+ `).run(id, testSessionId, item.key, item.value, size);
443
+ results.push({
444
+ index,
445
+ key: item.key,
446
+ success: true,
447
+ id,
448
+ size,
449
+ });
450
+ });
451
+ (0, globals_1.expect)(totalSize).toBeGreaterThan(1000);
452
+ // Handler response with size information
453
+ const handlerResponse = {
454
+ content: [
455
+ {
456
+ type: 'text',
457
+ text: JSON.stringify({
458
+ operation: 'batch_save',
459
+ totalItems: items.length,
460
+ succeeded: results.length,
461
+ failed: 0,
462
+ totalSize: totalSize,
463
+ averageSize: Math.round(totalSize / items.length),
464
+ results: results,
465
+ }, null, 2),
466
+ },
467
+ ],
468
+ };
469
+ const parsed = JSON.parse(handlerResponse.content[0].text);
470
+ (0, globals_1.expect)(parsed.totalSize).toBe(totalSize);
471
+ (0, globals_1.expect)(parsed.averageSize).toBeGreaterThan(0);
472
+ });
473
+ });
474
+ (0, globals_1.describe)('Batch Delete Operations', () => {
475
+ (0, globals_1.beforeEach)(() => {
476
+ // Create test items
477
+ const items = [
478
+ 'delete.item.1',
479
+ 'delete.item.2',
480
+ 'delete.item.3',
481
+ 'keep.item.1',
482
+ 'keep.item.2',
483
+ ];
484
+ items.forEach(key => {
485
+ db.prepare(`
486
+ INSERT INTO context_items (id, session_id, key, value)
487
+ VALUES (?, ?, ?, ?)
488
+ `).run((0, uuid_1.v4)(), testSessionId, key, `Value for ${key}`);
489
+ });
490
+ // Create item in another session
491
+ db.prepare(`
492
+ INSERT INTO context_items (id, session_id, key, value)
493
+ VALUES (?, ?, ?, ?)
494
+ `).run((0, uuid_1.v4)(), secondSessionId, 'delete.item.1', 'Another session item');
495
+ });
496
+ (0, globals_1.it)('should delete multiple items by keys', () => {
497
+ const keysToDelete = ['delete.item.1', 'delete.item.2', 'delete.item.3'];
498
+ // Simulate batch delete handler
499
+ const results = [];
500
+ db.prepare('BEGIN TRANSACTION').run();
501
+ try {
502
+ keysToDelete.forEach((key, index) => {
503
+ const result = db
504
+ .prepare('DELETE FROM context_items WHERE session_id = ? AND key = ?')
505
+ .run(testSessionId, key);
506
+ results.push({
507
+ index,
508
+ key,
509
+ deleted: result.changes > 0,
510
+ count: result.changes,
511
+ });
512
+ });
513
+ db.prepare('COMMIT').run();
514
+ }
515
+ catch (error) {
516
+ db.prepare('ROLLBACK').run();
517
+ throw error;
518
+ }
519
+ (0, globals_1.expect)(results.every(r => r.deleted)).toBe(true);
520
+ (0, globals_1.expect)(results.reduce((sum, r) => sum + r.count, 0)).toBe(3);
521
+ // Verify items were deleted
522
+ const remainingItems = db
523
+ .prepare('SELECT key FROM context_items WHERE session_id = ?')
524
+ .all(testSessionId);
525
+ (0, globals_1.expect)(remainingItems.length).toBe(2);
526
+ (0, globals_1.expect)(remainingItems.map((item) => item.key).sort()).toEqual([
527
+ 'keep.item.1',
528
+ 'keep.item.2',
529
+ ]);
530
+ // Verify item from other session wasn't deleted
531
+ const otherSessionItem = db
532
+ .prepare('SELECT * FROM context_items WHERE session_id = ? AND key = ?')
533
+ .get(secondSessionId, 'delete.item.1');
534
+ (0, globals_1.expect)(otherSessionItem).toBeTruthy();
535
+ // Handler response
536
+ const handlerResponse = {
537
+ content: [
538
+ {
539
+ type: 'text',
540
+ text: JSON.stringify({
541
+ operation: 'batch_delete',
542
+ keys: keysToDelete,
543
+ totalDeleted: 3,
544
+ results: results,
545
+ }, null, 2),
546
+ },
547
+ ],
548
+ };
549
+ const parsed = JSON.parse(handlerResponse.content[0].text);
550
+ (0, globals_1.expect)(parsed.totalDeleted).toBe(3);
551
+ });
552
+ (0, globals_1.it)('should handle non-existent keys in batch delete', () => {
553
+ const keysToDelete = ['delete.item.1', 'non.existent.1', 'delete.item.2', 'non.existent.2'];
554
+ const results = [];
555
+ keysToDelete.forEach((key, index) => {
556
+ const result = db
557
+ .prepare('DELETE FROM context_items WHERE session_id = ? AND key = ?')
558
+ .run(testSessionId, key);
559
+ results.push({
560
+ index,
561
+ key,
562
+ deleted: result.changes > 0,
563
+ count: result.changes,
564
+ });
565
+ });
566
+ const deletedCount = results.filter(r => r.deleted).length;
567
+ (0, globals_1.expect)(deletedCount).toBe(2);
568
+ // Handler response
569
+ const handlerResponse = {
570
+ content: [
571
+ {
572
+ type: 'text',
573
+ text: JSON.stringify({
574
+ operation: 'batch_delete',
575
+ keys: keysToDelete,
576
+ totalRequested: keysToDelete.length,
577
+ totalDeleted: deletedCount,
578
+ notFound: results.filter(r => !r.deleted).map(r => r.key),
579
+ results: results,
580
+ }, null, 2),
581
+ },
582
+ ],
583
+ };
584
+ const parsed = JSON.parse(handlerResponse.content[0].text);
585
+ (0, globals_1.expect)(parsed.totalDeleted).toBe(2);
586
+ (0, globals_1.expect)(parsed.notFound).toHaveLength(2);
587
+ });
588
+ (0, globals_1.it)('should validate keys before deletion', () => {
589
+ const invalidKeys = ['', ' ', null, undefined];
590
+ const errors = [];
591
+ invalidKeys.forEach((key, index) => {
592
+ try {
593
+ if (!key || !key.trim()) {
594
+ throw new validation_1.ValidationError('Key cannot be empty');
595
+ }
596
+ }
597
+ catch (error) {
598
+ errors.push({
599
+ index,
600
+ key: key || 'undefined',
601
+ error: error.message,
602
+ });
603
+ }
604
+ });
605
+ (0, globals_1.expect)(errors.length).toBe(4);
606
+ });
607
+ (0, globals_1.it)('should handle batch delete with pattern matching', () => {
608
+ const pattern = 'delete.item.*';
609
+ // Convert to SQL pattern
610
+ const sqlPattern = pattern.replace(/\*/g, '%');
611
+ const result = db
612
+ .prepare('DELETE FROM context_items WHERE session_id = ? AND key LIKE ?')
613
+ .run(testSessionId, sqlPattern);
614
+ (0, globals_1.expect)(result.changes).toBe(3);
615
+ // Verify only keep.item.* remain
616
+ const remainingItems = db
617
+ .prepare('SELECT key FROM context_items WHERE session_id = ?')
618
+ .all(testSessionId);
619
+ (0, globals_1.expect)(remainingItems.every((item) => item.key.startsWith('keep.'))).toBe(true);
620
+ // Handler response
621
+ const handlerResponse = {
622
+ content: [
623
+ {
624
+ type: 'text',
625
+ text: JSON.stringify({
626
+ operation: 'batch_delete',
627
+ pattern: pattern,
628
+ totalDeleted: result.changes,
629
+ }, null, 2),
630
+ },
631
+ ],
632
+ };
633
+ const parsed = JSON.parse(handlerResponse.content[0].text);
634
+ (0, globals_1.expect)(parsed.totalDeleted).toBe(3);
635
+ });
636
+ (0, globals_1.it)('should support dry run for batch delete', () => {
637
+ const keysToDelete = ['delete.item.1', 'delete.item.2'];
638
+ const _dryRun = true;
639
+ // In dry run, SELECT instead of DELETE
640
+ const itemsToDelete = db
641
+ .prepare(`SELECT key, value, category, priority FROM context_items
642
+ WHERE session_id = ? AND key IN (${keysToDelete.map(() => '?').join(',')})`)
643
+ .all(testSessionId, ...keysToDelete);
644
+ (0, globals_1.expect)(itemsToDelete.length).toBe(2);
645
+ // Verify no actual deletion
646
+ const count = db
647
+ .prepare('SELECT COUNT(*) as count FROM context_items WHERE session_id = ?')
648
+ .get(testSessionId).count;
649
+ (0, globals_1.expect)(count).toBe(5); // All items still exist
650
+ // Handler response for dry run
651
+ const handlerResponse = {
652
+ content: [
653
+ {
654
+ type: 'text',
655
+ text: JSON.stringify({
656
+ operation: 'batch_delete',
657
+ dryRun: true,
658
+ keys: keysToDelete,
659
+ itemsToDelete: itemsToDelete.map((item) => ({
660
+ key: item.key,
661
+ value: item.value.substring(0, 50) + (item.value.length > 50 ? '...' : ''),
662
+ category: item.category,
663
+ priority: item.priority,
664
+ })),
665
+ totalItems: itemsToDelete.length,
666
+ }, null, 2),
667
+ },
668
+ ],
669
+ };
670
+ const parsed = JSON.parse(handlerResponse.content[0].text);
671
+ (0, globals_1.expect)(parsed.dryRun).toBe(true);
672
+ (0, globals_1.expect)(parsed.itemsToDelete).toHaveLength(2);
673
+ });
674
+ });
675
+ (0, globals_1.describe)('Batch Update Operations', () => {
676
+ (0, globals_1.beforeEach)(() => {
677
+ // Create test items with various properties
678
+ const items = [
679
+ {
680
+ key: 'update.item.1',
681
+ value: 'Original value 1',
682
+ category: 'task',
683
+ priority: 'normal',
684
+ channel: 'main',
685
+ },
686
+ {
687
+ key: 'update.item.2',
688
+ value: 'Original value 2',
689
+ category: 'note',
690
+ priority: 'low',
691
+ channel: 'main',
692
+ },
693
+ {
694
+ key: 'update.item.3',
695
+ value: 'Original value 3',
696
+ category: 'config',
697
+ priority: 'high',
698
+ channel: 'development',
699
+ },
700
+ ];
701
+ items.forEach(item => {
702
+ db.prepare(`
703
+ INSERT INTO context_items (id, session_id, key, value, category, priority, channel)
704
+ VALUES (?, ?, ?, ?, ?, ?, ?)
705
+ `).run((0, uuid_1.v4)(), testSessionId, item.key, item.value, item.category, item.priority, item.channel);
706
+ });
707
+ });
708
+ (0, globals_1.it)('should update multiple items with different changes', () => {
709
+ const updates = [
710
+ {
711
+ key: 'update.item.1',
712
+ updates: {
713
+ value: 'Updated value 1',
714
+ priority: 'high',
715
+ },
716
+ },
717
+ {
718
+ key: 'update.item.2',
719
+ updates: {
720
+ category: 'task',
721
+ channel: 'production',
722
+ },
723
+ },
724
+ {
725
+ key: 'update.item.3',
726
+ updates: {
727
+ value: 'Completely new value',
728
+ category: 'note',
729
+ priority: 'normal',
730
+ },
731
+ },
732
+ ];
733
+ const results = [];
734
+ db.prepare('BEGIN TRANSACTION').run();
735
+ try {
736
+ updates.forEach((update, index) => {
737
+ // Build dynamic UPDATE statement
738
+ const setClauses = [];
739
+ const values = [];
740
+ if (update.updates.value !== undefined) {
741
+ setClauses.push('value = ?');
742
+ values.push(update.updates.value);
743
+ }
744
+ if (update.updates.category !== undefined) {
745
+ setClauses.push('category = ?');
746
+ values.push(update.updates.category);
747
+ }
748
+ if (update.updates.priority !== undefined) {
749
+ setClauses.push('priority = ?');
750
+ values.push(update.updates.priority);
751
+ }
752
+ if (update.updates.channel !== undefined) {
753
+ setClauses.push('channel = ?');
754
+ values.push(update.updates.channel);
755
+ }
756
+ setClauses.push('updated_at = CURRENT_TIMESTAMP');
757
+ const sql = `
758
+ UPDATE context_items
759
+ SET ${setClauses.join(', ')}
760
+ WHERE session_id = ? AND key = ?
761
+ `;
762
+ values.push(testSessionId, update.key);
763
+ const result = db.prepare(sql).run(...values);
764
+ results.push({
765
+ index,
766
+ key: update.key,
767
+ updated: result.changes > 0,
768
+ fields: Object.keys(update.updates),
769
+ });
770
+ });
771
+ db.prepare('COMMIT').run();
772
+ }
773
+ catch (error) {
774
+ db.prepare('ROLLBACK').run();
775
+ throw error;
776
+ }
777
+ (0, globals_1.expect)(results.every(r => r.updated)).toBe(true);
778
+ // Verify updates
779
+ const updatedItems = db
780
+ .prepare('SELECT * FROM context_items WHERE session_id = ? ORDER BY key')
781
+ .all(testSessionId);
782
+ (0, globals_1.expect)(updatedItems[0].value).toBe('Updated value 1');
783
+ (0, globals_1.expect)(updatedItems[0].priority).toBe('high');
784
+ (0, globals_1.expect)(updatedItems[1].category).toBe('task');
785
+ (0, globals_1.expect)(updatedItems[1].channel).toBe('production');
786
+ (0, globals_1.expect)(updatedItems[2].value).toBe('Completely new value');
787
+ // Handler response
788
+ const handlerResponse = {
789
+ content: [
790
+ {
791
+ type: 'text',
792
+ text: JSON.stringify({
793
+ operation: 'batch_update',
794
+ totalItems: updates.length,
795
+ succeeded: results.filter(r => r.updated).length,
796
+ failed: results.filter(r => !r.updated).length,
797
+ results: results,
798
+ }, null, 2),
799
+ },
800
+ ],
801
+ };
802
+ const parsed = JSON.parse(handlerResponse.content[0].text);
803
+ (0, globals_1.expect)(parsed.succeeded).toBe(3);
804
+ });
805
+ (0, globals_1.it)('should validate update fields', () => {
806
+ const invalidUpdates = [
807
+ {
808
+ key: 'update.item.1',
809
+ updates: {
810
+ category: 'invalid-category', // Invalid
811
+ },
812
+ },
813
+ {
814
+ key: 'update.item.2',
815
+ updates: {
816
+ priority: 'urgent', // Invalid
817
+ },
818
+ },
819
+ {
820
+ key: 'update.item.3',
821
+ updates: {
822
+ value: '', // Empty value
823
+ },
824
+ },
825
+ {
826
+ key: 'update.item.1',
827
+ updates: {}, // No updates
828
+ },
829
+ ];
830
+ const errors = [];
831
+ invalidUpdates.forEach((update, index) => {
832
+ try {
833
+ // Validate category
834
+ if (update.updates.category) {
835
+ const validCategories = ['task', 'decision', 'progress', 'note', 'error', 'warning'];
836
+ if (!validCategories.includes(update.updates.category)) {
837
+ throw new validation_1.ValidationError(`Invalid category: ${update.updates.category}`);
838
+ }
839
+ }
840
+ // Validate priority
841
+ if (update.updates.priority) {
842
+ const validPriorities = ['high', 'normal', 'low'];
843
+ if (!validPriorities.includes(update.updates.priority)) {
844
+ throw new validation_1.ValidationError(`Invalid priority: ${update.updates.priority}`);
845
+ }
846
+ }
847
+ // Validate value
848
+ if (update.updates.value !== undefined && update.updates.value === '') {
849
+ throw new validation_1.ValidationError('Value cannot be empty');
850
+ }
851
+ // Validate at least one update
852
+ if (Object.keys(update.updates).length === 0) {
853
+ throw new validation_1.ValidationError('No updates provided');
854
+ }
855
+ }
856
+ catch (error) {
857
+ errors.push({
858
+ index,
859
+ key: update.key,
860
+ error: error.message,
861
+ });
862
+ }
863
+ });
864
+ (0, globals_1.expect)(errors.length).toBe(4);
865
+ (0, globals_1.expect)(errors[0].error).toContain('Invalid category');
866
+ (0, globals_1.expect)(errors[1].error).toContain('Invalid priority');
867
+ (0, globals_1.expect)(errors[2].error).toContain('Value cannot be empty');
868
+ (0, globals_1.expect)(errors[3].error).toContain('No updates provided');
869
+ });
870
+ (0, globals_1.it)('should handle partial update failures', () => {
871
+ const updates = [
872
+ {
873
+ key: 'update.item.1',
874
+ updates: {
875
+ value: 'Valid update',
876
+ },
877
+ },
878
+ {
879
+ key: 'non.existent.key', // Will fail
880
+ updates: {
881
+ value: 'Update for non-existent',
882
+ },
883
+ },
884
+ {
885
+ key: 'update.item.2',
886
+ updates: {
887
+ priority: 'urgent', // Invalid priority
888
+ },
889
+ },
890
+ {
891
+ key: 'update.item.3',
892
+ updates: {
893
+ channel: 'staging',
894
+ },
895
+ },
896
+ ];
897
+ const results = [];
898
+ const errors = [];
899
+ updates.forEach((update, index) => {
900
+ try {
901
+ // Validate priority
902
+ if (update.updates.priority) {
903
+ const validPriorities = ['high', 'normal', 'low'];
904
+ if (!validPriorities.includes(update.updates.priority)) {
905
+ throw new validation_1.ValidationError(`Invalid priority: ${update.updates.priority}`);
906
+ }
907
+ }
908
+ // Build update
909
+ const setClauses = [];
910
+ const values = [];
911
+ Object.entries(update.updates).forEach(([field, value]) => {
912
+ setClauses.push(`${field} = ?`);
913
+ values.push(value);
914
+ });
915
+ setClauses.push('updated_at = CURRENT_TIMESTAMP');
916
+ values.push(testSessionId, update.key);
917
+ const result = db
918
+ .prepare(`UPDATE context_items SET ${setClauses.join(', ')} WHERE session_id = ? AND key = ?`)
919
+ .run(...values);
920
+ if (result.changes === 0) {
921
+ throw new Error('Item not found');
922
+ }
923
+ results.push({
924
+ index,
925
+ key: update.key,
926
+ updated: true,
927
+ });
928
+ }
929
+ catch (error) {
930
+ errors.push({
931
+ index,
932
+ key: update.key,
933
+ error: error.message,
934
+ });
935
+ }
936
+ });
937
+ (0, globals_1.expect)(results.length).toBe(2); // 2 successful
938
+ (0, globals_1.expect)(errors.length).toBe(2); // 2 failed
939
+ });
940
+ (0, globals_1.it)('should support metadata updates', () => {
941
+ const updates = [
942
+ {
943
+ key: 'update.item.1',
944
+ updates: {
945
+ metadata: { tags: ['important', 'reviewed'], lastReviewed: new Date().toISOString() },
946
+ },
947
+ },
948
+ {
949
+ key: 'update.item.2',
950
+ updates: {
951
+ metadata: { environment: 'production', version: '1.0.0' },
952
+ },
953
+ },
954
+ ];
955
+ const results = [];
956
+ updates.forEach((update, index) => {
957
+ const metadataJson = JSON.stringify(update.updates.metadata);
958
+ const result = db
959
+ .prepare(`UPDATE context_items
960
+ SET metadata = ?, updated_at = CURRENT_TIMESTAMP
961
+ WHERE session_id = ? AND key = ?`)
962
+ .run(metadataJson, testSessionId, update.key);
963
+ results.push({
964
+ index,
965
+ key: update.key,
966
+ updated: result.changes > 0,
967
+ });
968
+ });
969
+ (0, globals_1.expect)(results.every(r => r.updated)).toBe(true);
970
+ // Verify metadata
971
+ const items = db
972
+ .prepare('SELECT key, metadata FROM context_items WHERE session_id = ? AND metadata IS NOT NULL')
973
+ .all(testSessionId);
974
+ (0, globals_1.expect)(items.length).toBe(2);
975
+ items.forEach(item => {
976
+ const metadata = JSON.parse(item.metadata);
977
+ (0, globals_1.expect)(metadata).toBeTruthy();
978
+ });
979
+ });
980
+ (0, globals_1.it)('should update items matching pattern', () => {
981
+ const pattern = 'update.item.*';
982
+ const updates = {
983
+ priority: 'high',
984
+ channel: 'production',
985
+ };
986
+ const result = db
987
+ .prepare(`UPDATE context_items
988
+ SET priority = ?, channel = ?, updated_at = CURRENT_TIMESTAMP
989
+ WHERE session_id = ? AND key GLOB ?`)
990
+ .run(updates.priority, updates.channel, testSessionId, pattern);
991
+ (0, globals_1.expect)(result.changes).toBe(3);
992
+ // Verify all matching items were updated
993
+ const updatedItems = db
994
+ .prepare('SELECT * FROM context_items WHERE session_id = ? AND key GLOB ?')
995
+ .all(testSessionId, pattern);
996
+ (0, globals_1.expect)(updatedItems.every((item) => item.priority === 'high')).toBe(true);
997
+ (0, globals_1.expect)(updatedItems.every((item) => item.channel === 'production')).toBe(true);
998
+ // Handler response
999
+ const handlerResponse = {
1000
+ content: [
1001
+ {
1002
+ type: 'text',
1003
+ text: JSON.stringify({
1004
+ operation: 'batch_update',
1005
+ pattern: pattern,
1006
+ updates: updates,
1007
+ itemsUpdated: result.changes,
1008
+ }, null, 2),
1009
+ },
1010
+ ],
1011
+ };
1012
+ const parsed = JSON.parse(handlerResponse.content[0].text);
1013
+ (0, globals_1.expect)(parsed.itemsUpdated).toBe(3);
1014
+ });
1015
+ });
1016
+ (0, globals_1.describe)('Performance and Transaction Handling', () => {
1017
+ (0, globals_1.it)('should handle large batch operations efficiently', () => {
1018
+ const batchSize = 500;
1019
+ const items = Array.from({ length: batchSize }, (_, i) => ({
1020
+ key: `perf.item.${i.toString().padStart(4, '0')}`,
1021
+ value: `Performance test value ${i}`,
1022
+ category: i % 2 === 0 ? 'task' : 'note',
1023
+ priority: i % 3 === 0 ? 'high' : i % 3 === 1 ? 'normal' : 'low',
1024
+ }));
1025
+ const startTime = Date.now();
1026
+ db.prepare('BEGIN TRANSACTION').run();
1027
+ try {
1028
+ const stmt = db.prepare(`
1029
+ INSERT INTO context_items (id, session_id, key, value, category, priority)
1030
+ VALUES (?, ?, ?, ?, ?, ?)
1031
+ `);
1032
+ items.forEach(item => {
1033
+ stmt.run((0, uuid_1.v4)(), testSessionId, item.key, item.value, item.category, item.priority);
1034
+ });
1035
+ db.prepare('COMMIT').run();
1036
+ }
1037
+ catch (error) {
1038
+ db.prepare('ROLLBACK').run();
1039
+ throw error;
1040
+ }
1041
+ const endTime = Date.now();
1042
+ (0, globals_1.expect)(endTime - startTime).toBeLessThan(2000); // Should complete within 2 seconds
1043
+ // Verify all items were saved
1044
+ const count = db
1045
+ .prepare('SELECT COUNT(*) as count FROM context_items WHERE session_id = ?')
1046
+ .get(testSessionId).count;
1047
+ (0, globals_1.expect)(count).toBe(batchSize);
1048
+ });
1049
+ (0, globals_1.it)('should rollback entire batch on error', () => {
1050
+ const items = [
1051
+ { key: 'rollback.1', value: 'Value 1' },
1052
+ { key: 'rollback.2', value: 'Value 2' },
1053
+ { key: 'rollback.3', value: 'Value 3' },
1054
+ ];
1055
+ try {
1056
+ db.prepare('BEGIN TRANSACTION').run();
1057
+ // Insert first two items successfully
1058
+ items.slice(0, 2).forEach(item => {
1059
+ db.prepare(`
1060
+ INSERT INTO context_items (id, session_id, key, value)
1061
+ VALUES (?, ?, ?, ?)
1062
+ `).run((0, uuid_1.v4)(), testSessionId, item.key, item.value);
1063
+ });
1064
+ // Simulate error on third item
1065
+ throw new Error('Simulated error during batch operation');
1066
+ }
1067
+ catch (_error) {
1068
+ db.prepare('ROLLBACK').run();
1069
+ }
1070
+ // Verify no items were saved
1071
+ const count = db
1072
+ .prepare(`SELECT COUNT(*) as count FROM context_items
1073
+ WHERE session_id = ? AND key LIKE 'rollback.%'`)
1074
+ .get(testSessionId).count;
1075
+ (0, globals_1.expect)(count).toBe(0);
1076
+ });
1077
+ (0, globals_1.it)('should handle concurrent batch operations safely', () => {
1078
+ // This test simulates what would happen with concurrent operations
1079
+ // In a real scenario, SQLite's transaction isolation would handle this
1080
+ const batch1Items = Array.from({ length: 50 }, (_, i) => ({
1081
+ key: `concurrent.batch1.${i}`,
1082
+ value: `Batch 1 value ${i}`,
1083
+ }));
1084
+ const batch2Items = Array.from({ length: 50 }, (_, i) => ({
1085
+ key: `concurrent.batch2.${i}`,
1086
+ value: `Batch 2 value ${i}`,
1087
+ }));
1088
+ // Execute batches sequentially (SQLite would serialize concurrent transactions)
1089
+ let batch1Success = false;
1090
+ let batch2Success = false;
1091
+ // Batch 1
1092
+ try {
1093
+ db.prepare('BEGIN TRANSACTION').run();
1094
+ const stmt = db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)');
1095
+ batch1Items.forEach(item => {
1096
+ stmt.run((0, uuid_1.v4)(), testSessionId, item.key, item.value);
1097
+ });
1098
+ db.prepare('COMMIT').run();
1099
+ batch1Success = true;
1100
+ }
1101
+ catch (_error) {
1102
+ db.prepare('ROLLBACK').run();
1103
+ }
1104
+ // Batch 2
1105
+ try {
1106
+ db.prepare('BEGIN TRANSACTION').run();
1107
+ const stmt = db.prepare('INSERT INTO context_items (id, session_id, key, value) VALUES (?, ?, ?, ?)');
1108
+ batch2Items.forEach(item => {
1109
+ stmt.run((0, uuid_1.v4)(), testSessionId, item.key, item.value);
1110
+ });
1111
+ db.prepare('COMMIT').run();
1112
+ batch2Success = true;
1113
+ }
1114
+ catch (_error) {
1115
+ db.prepare('ROLLBACK').run();
1116
+ }
1117
+ (0, globals_1.expect)(batch1Success).toBe(true);
1118
+ (0, globals_1.expect)(batch2Success).toBe(true);
1119
+ // Verify both batches succeeded
1120
+ const count = db
1121
+ .prepare(`SELECT COUNT(*) as count FROM context_items
1122
+ WHERE session_id = ? AND key LIKE 'concurrent.%'`)
1123
+ .get(testSessionId).count;
1124
+ (0, globals_1.expect)(count).toBe(100);
1125
+ });
1126
+ });
1127
+ (0, globals_1.describe)('Handler Response Formats', () => {
1128
+ (0, globals_1.it)('should provide detailed batch save response', () => {
1129
+ const items = [
1130
+ { key: 'response.1', value: 'Value 1', category: 'task' },
1131
+ { key: 'response.2', value: 'Value 2', priority: 'high' },
1132
+ ];
1133
+ const results = items.map((item, index) => ({
1134
+ index,
1135
+ key: item.key,
1136
+ success: true,
1137
+ id: (0, uuid_1.v4)(),
1138
+ action: 'created',
1139
+ size: Buffer.byteLength(item.value, 'utf8'),
1140
+ }));
1141
+ const handlerResponse = {
1142
+ content: [
1143
+ {
1144
+ type: 'text',
1145
+ text: JSON.stringify({
1146
+ operation: 'batch_save',
1147
+ totalItems: items.length,
1148
+ succeeded: results.filter(r => r.success).length,
1149
+ failed: 0,
1150
+ totalSize: results.reduce((sum, r) => sum + r.size, 0),
1151
+ results: results,
1152
+ timestamp: new Date().toISOString(),
1153
+ }, null, 2),
1154
+ },
1155
+ ],
1156
+ };
1157
+ const parsed = JSON.parse(handlerResponse.content[0].text);
1158
+ (0, globals_1.expect)(parsed.operation).toBe('batch_save');
1159
+ (0, globals_1.expect)(parsed.totalItems).toBe(2);
1160
+ (0, globals_1.expect)(parsed.succeeded).toBe(2);
1161
+ (0, globals_1.expect)(parsed.results).toHaveLength(2);
1162
+ (0, globals_1.expect)(parsed.timestamp).toBeTruthy();
1163
+ });
1164
+ (0, globals_1.it)('should provide summary for large batch operations', () => {
1165
+ const itemCount = 1000;
1166
+ const succeeded = 950;
1167
+ const failed = 50;
1168
+ const handlerResponse = {
1169
+ content: [
1170
+ {
1171
+ type: 'text',
1172
+ text: JSON.stringify({
1173
+ operation: 'batch_save',
1174
+ totalItems: itemCount,
1175
+ succeeded: succeeded,
1176
+ failed: failed,
1177
+ successRate: `${((succeeded / itemCount) * 100).toFixed(1)}%`,
1178
+ summary: {
1179
+ categories: {
1180
+ task: 400,
1181
+ note: 300,
1182
+ config: 250,
1183
+ },
1184
+ priorities: {
1185
+ high: 300,
1186
+ normal: 400,
1187
+ low: 250,
1188
+ },
1189
+ },
1190
+ // Don't include individual results for large batches
1191
+ message: 'Large batch operation completed. Individual results omitted for brevity.',
1192
+ }, null, 2),
1193
+ },
1194
+ ],
1195
+ };
1196
+ const parsed = JSON.parse(handlerResponse.content[0].text);
1197
+ (0, globals_1.expect)(parsed.successRate).toBe('95.0%');
1198
+ (0, globals_1.expect)(parsed.summary).toBeTruthy();
1199
+ (0, globals_1.expect)(parsed.results).toBeUndefined(); // Omitted for large batches
1200
+ });
1201
+ (0, globals_1.it)('should handle mixed operation results', () => {
1202
+ const operations = {
1203
+ save: { attempted: 10, succeeded: 8 },
1204
+ update: { attempted: 5, succeeded: 5 },
1205
+ delete: { attempted: 3, succeeded: 2 },
1206
+ };
1207
+ const handlerResponse = {
1208
+ content: [
1209
+ {
1210
+ type: 'text',
1211
+ text: JSON.stringify({
1212
+ operation: 'batch_mixed',
1213
+ operations: operations,
1214
+ totals: {
1215
+ attempted: 18,
1216
+ succeeded: 15,
1217
+ failed: 3,
1218
+ },
1219
+ summary: 'Completed batch operations: 8/10 saved, 5/5 updated, 2/3 deleted',
1220
+ }, null, 2),
1221
+ },
1222
+ ],
1223
+ };
1224
+ const parsed = JSON.parse(handlerResponse.content[0].text);
1225
+ (0, globals_1.expect)(parsed.totals.succeeded).toBe(15);
1226
+ (0, globals_1.expect)(parsed.totals.failed).toBe(3);
1227
+ (0, globals_1.expect)(parsed.summary).toContain('8/10 saved');
1228
+ });
1229
+ });
1230
+ });