@joystick.js/db-canary 0.0.0-canary.2271 → 0.0.0-canary.2273

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,469 @@
1
+ /**
2
+ * @fileoverview Isolated enterprise scale bulk insert tests for JoystickDB.
3
+ * These tests are designed to run in complete isolation to prevent memory conflicts
4
+ * and segmentation faults when running as part of the full test suite.
5
+ */
6
+
7
+ import test from 'ava';
8
+ import { rmSync, existsSync } from 'fs';
9
+ import { spawn } from 'child_process';
10
+ import { fileURLToPath } from 'url';
11
+ import { dirname, join } from 'path';
12
+
13
+ const __filename = fileURLToPath(import.meta.url);
14
+ const __dirname = dirname(__filename);
15
+
16
+ /**
17
+ * Runs a test in a completely isolated Node.js process to prevent memory conflicts.
18
+ * @param {string} testScript - Path to the test script to run
19
+ * @param {Object} options - Test options
20
+ * @returns {Promise<Object>} Test result
21
+ */
22
+ const run_isolated_test = (testScript, options = {}) => {
23
+ return new Promise((resolve, reject) => {
24
+ const {
25
+ timeout = 600000, // 10 minutes default timeout
26
+ maxMemory = '4096', // 4GB memory limit
27
+ gcInterval = '100' // GC every 100ms during test
28
+ } = options;
29
+
30
+ // Node.js arguments for optimal memory management
31
+ const nodeArgs = [
32
+ '--expose-gc',
33
+ '--max-old-space-size=' + maxMemory,
34
+ '--optimize-for-size',
35
+ '--gc-interval=' + gcInterval,
36
+ '--no-lazy',
37
+ '--trace-gc-verbose'
38
+ ];
39
+
40
+ const child = spawn('node', [...nodeArgs, testScript], {
41
+ stdio: ['pipe', 'pipe', 'pipe'],
42
+ env: {
43
+ ...process.env,
44
+ NODE_ENV: 'test',
45
+ FORCE_COLOR: '0', // Disable colors for cleaner output parsing
46
+ ISOLATED_TEST: 'true'
47
+ }
48
+ });
49
+
50
+ let stdout = '';
51
+ let stderr = '';
52
+ let timedOut = false;
53
+
54
+ // Set up timeout
55
+ const timeoutId = setTimeout(() => {
56
+ timedOut = true;
57
+ child.kill('SIGKILL');
58
+ reject(new Error(`Test timed out after ${timeout}ms`));
59
+ }, timeout);
60
+
61
+ child.stdout.on('data', (data) => {
62
+ stdout += data.toString();
63
+ });
64
+
65
+ child.stderr.on('data', (data) => {
66
+ stderr += data.toString();
67
+ });
68
+
69
+ child.on('close', (code, signal) => {
70
+ clearTimeout(timeoutId);
71
+
72
+ if (timedOut) return; // Already handled by timeout
73
+
74
+ if (code === 0) {
75
+ // Parse test results from stdout
76
+ try {
77
+ const result = {
78
+ success: true,
79
+ code,
80
+ signal,
81
+ stdout,
82
+ stderr,
83
+ performance: parse_performance_metrics(stdout)
84
+ };
85
+ resolve(result);
86
+ } catch (error) {
87
+ resolve({
88
+ success: true,
89
+ code,
90
+ signal,
91
+ stdout,
92
+ stderr,
93
+ parseError: error.message
94
+ });
95
+ }
96
+ } else {
97
+ reject(new Error(`Test failed with code ${code}, signal ${signal}\nSTDOUT: ${stdout}\nSTDERR: ${stderr}`));
98
+ }
99
+ });
100
+
101
+ child.on('error', (error) => {
102
+ clearTimeout(timeoutId);
103
+ reject(error);
104
+ });
105
+ });
106
+ };
107
+
108
+ /**
109
+ * Parses performance metrics from test output.
110
+ * @param {string} output - Test output
111
+ * @returns {Object} Parsed performance metrics
112
+ */
113
+ const parse_performance_metrics = (output) => {
114
+ const metrics = {};
115
+
116
+ // Extract duration
117
+ const durationMatch = output.match(/Duration: ([\d.]+)\s*seconds?/i);
118
+ if (durationMatch) {
119
+ metrics.duration_seconds = parseFloat(durationMatch[1]);
120
+ }
121
+
122
+ // Extract throughput
123
+ const throughputMatch = output.match(/Throughput: ([\d,]+)\s*docs\/sec/i);
124
+ if (throughputMatch) {
125
+ metrics.documents_per_second = parseInt(throughputMatch[1].replace(/,/g, ''));
126
+ }
127
+
128
+ // Extract memory usage
129
+ const memoryMatch = output.match(/Peak Memory: ([\d.]+)MB/i);
130
+ if (memoryMatch) {
131
+ metrics.peak_memory_mb = parseFloat(memoryMatch[1]);
132
+ }
133
+
134
+ return metrics;
135
+ };
136
+
137
+ /**
138
+ * Creates an isolated test script for enterprise scale testing.
139
+ * @param {number} documentCount - Number of documents to test
140
+ * @param {string} testName - Name of the test
141
+ * @returns {string} Path to the created test script
142
+ */
143
+ const create_isolated_test_script = (documentCount, testName) => {
144
+ const scriptPath = join(__dirname, `isolated_${documentCount}_test.js`);
145
+
146
+ const scriptContent = `
147
+ import { rmSync, existsSync } from 'fs';
148
+ import { initialize_database, cleanup_database } from '../../src/server/lib/query_engine.js';
149
+ import { bulk_insert_with_metrics } from '../../src/server/lib/bulk_insert_optimizer.js';
150
+
151
+ const TEST_DB_PATH = './test_data/isolated_${documentCount}_test';
152
+ const TEST_DATABASE = 'isolated_db_${documentCount}';
153
+ const TEST_COLLECTION = 'isolated_collection';
154
+
155
+ // Generate minimal test documents
156
+ const generate_documents = (count) => {
157
+ const documents = [];
158
+ const test_id = Date.now().toString(36);
159
+
160
+ for (let i = 0; i < count; i++) {
161
+ documents.push({
162
+ _id: \`iso_\${test_id}_\${i.toString().padStart(8, '0')}\`,
163
+ idx: i,
164
+ cat: i % 50,
165
+ val: i % 1000,
166
+ ts: Date.now() + i
167
+ });
168
+ }
169
+
170
+ return documents;
171
+ };
172
+
173
+ // Aggressive memory management
174
+ const force_cleanup = async () => {
175
+ await cleanup_database(true);
176
+
177
+ // Force multiple GC cycles
178
+ if (global.gc) {
179
+ for (let i = 0; i < 5; i++) {
180
+ global.gc();
181
+ await new Promise(resolve => setTimeout(resolve, 50));
182
+ }
183
+ }
184
+
185
+ // Wait for LMDB resources to be released
186
+ await new Promise(resolve => setTimeout(resolve, 200));
187
+ };
188
+
189
+ // Main test execution
190
+ const run_test = async () => {
191
+ try {
192
+ console.log('šŸš€ Starting ${testName} (${documentCount.toLocaleString()} documents)');
193
+
194
+ // Clean setup
195
+ if (existsSync(TEST_DB_PATH)) {
196
+ rmSync(TEST_DB_PATH, { recursive: true, force: true });
197
+ }
198
+
199
+ // Initial memory state
200
+ const initial_memory = process.memoryUsage();
201
+ console.log(\`Initial Memory: \${Math.round(initial_memory.heapUsed / (1024 * 1024))}MB heap used\`);
202
+
203
+ initialize_database(TEST_DB_PATH);
204
+
205
+ // Generate documents
206
+ console.log('Generating documents...');
207
+ const documents = generate_documents(${documentCount});
208
+
209
+ // Run test with optimal settings for isolation
210
+ const start_time = Date.now();
211
+ const result = await bulk_insert_with_metrics(TEST_DATABASE, TEST_COLLECTION, documents, {
212
+ disable_indexing: true,
213
+ pre_allocate_map_size: true,
214
+ sort_keys: true,
215
+ stream_processing: true,
216
+ batch_size: ${documentCount >= 5000000 ? '250' : '500'} // Smaller batches for very large datasets
217
+ });
218
+
219
+ const total_duration = Date.now() - start_time;
220
+ const duration_seconds = total_duration / 1000;
221
+
222
+ // Output results in parseable format
223
+ console.log(\`\\nāœ… ${testName.toUpperCase()} RESULTS:\`);
224
+ console.log(\`Duration: \${duration_seconds.toFixed(2)} seconds\`);
225
+ console.log(\`Throughput: \${result.performance.documents_per_second.toLocaleString()} docs/sec\`);
226
+ console.log(\`Memory Delta: \${result.performance.memory_usage.delta_heap_mb}MB\`);
227
+ console.log(\`Peak Memory: \${result.performance.memory_usage.peak_heap_mb}MB\`);
228
+ console.log(\`Success Rate: 100%\`);
229
+
230
+ // Validate results
231
+ if (!result.acknowledged) {
232
+ throw new Error('Insert not acknowledged');
233
+ }
234
+ if (result.inserted_count !== ${documentCount}) {
235
+ throw new Error(\`Expected \${${documentCount}} inserts, got \${result.inserted_count}\`);
236
+ }
237
+
238
+ // Performance validation
239
+ const max_duration = ${documentCount >= 10000000 ? '300' : documentCount >= 5000000 ? '180' : '120'};
240
+ const min_throughput = ${documentCount >= 10000000 ? '30000' : documentCount >= 5000000 ? '25000' : '20000'};
241
+ const max_memory = ${documentCount >= 5000000 ? '2048' : '1024'};
242
+
243
+ if (duration_seconds > max_duration) {
244
+ throw new Error(\`Duration \${duration_seconds}s exceeds \${max_duration}s limit\`);
245
+ }
246
+ if (result.performance.documents_per_second < min_throughput) {
247
+ throw new Error(\`Throughput \${result.performance.documents_per_second} below \${min_throughput} docs/sec target\`);
248
+ }
249
+ if (result.performance.memory_usage.peak_heap_mb > max_memory) {
250
+ throw new Error(\`Memory \${result.performance.memory_usage.peak_heap_mb}MB exceeds \${max_memory}MB limit\`);
251
+ }
252
+
253
+ console.log(\`\\nšŸ“ˆ ${testName.toUpperCase()} VALIDATION:\`);
254
+ console.log(\`āœ… Performance targets met\`);
255
+ console.log(\`āœ… Memory usage within limits\`);
256
+ console.log(\`āœ… All \${${documentCount}} documents inserted successfully\`);
257
+
258
+ // Cleanup
259
+ await force_cleanup();
260
+
261
+ const final_memory = process.memoryUsage();
262
+ console.log(\`Final Memory: \${Math.round(final_memory.heapUsed / (1024 * 1024))}MB heap used\`);
263
+
264
+ console.log('\\nšŸŽ‰ Test completed successfully');
265
+ process.exit(0);
266
+
267
+ } catch (error) {
268
+ console.error(\`\\nāŒ Test failed: \${error.message}\`);
269
+ console.error(error.stack);
270
+
271
+ try {
272
+ await force_cleanup();
273
+ } catch (cleanupError) {
274
+ console.error('Cleanup error:', cleanupError.message);
275
+ }
276
+
277
+ process.exit(1);
278
+ }
279
+ };
280
+
281
+ // Handle process signals
282
+ process.on('SIGTERM', async () => {
283
+ console.log('Received SIGTERM, cleaning up...');
284
+ try {
285
+ await force_cleanup();
286
+ } catch (error) {
287
+ console.error('Cleanup error:', error.message);
288
+ }
289
+ process.exit(1);
290
+ });
291
+
292
+ process.on('SIGINT', async () => {
293
+ console.log('Received SIGINT, cleaning up...');
294
+ try {
295
+ await force_cleanup();
296
+ } catch (error) {
297
+ console.error('Cleanup error:', error.message);
298
+ }
299
+ process.exit(1);
300
+ });
301
+
302
+ // Add uncaught exception handlers
303
+ process.on('uncaughtException', async (error) => {
304
+ console.error('\\nšŸ’„ Uncaught Exception:', error.message);
305
+ console.error(error.stack);
306
+
307
+ try {
308
+ await force_cleanup();
309
+ } catch (cleanupError) {
310
+ console.error('Cleanup error:', cleanupError.message);
311
+ }
312
+
313
+ process.exit(1);
314
+ });
315
+
316
+ process.on('unhandledRejection', async (reason, promise) => {
317
+ console.error('\\nšŸ’„ Unhandled Rejection at:', promise, 'reason:', reason);
318
+
319
+ try {
320
+ await force_cleanup();
321
+ } catch (cleanupError) {
322
+ console.error('Cleanup error:', cleanupError.message);
323
+ }
324
+
325
+ process.exit(1);
326
+ });
327
+
328
+ // Run the test
329
+ run_test();
330
+ `;
331
+
332
+ // Write the script file
333
+ import('fs').then(fs => {
334
+ fs.writeFileSync(scriptPath, scriptContent);
335
+ });
336
+
337
+ return scriptPath;
338
+ };
339
+
340
+ // Test setup and cleanup
341
+ test.beforeEach(async () => {
342
+ // Clean up any existing isolated test scripts
343
+ const testDir = __dirname;
344
+ const files = await import('fs').then(fs => fs.readdirSync(testDir));
345
+
346
+ for (const file of files) {
347
+ if (file.startsWith('isolated_') && file.endsWith('_test.js')) {
348
+ const filePath = join(testDir, file);
349
+ try {
350
+ await import('fs').then(fs => fs.unlinkSync(filePath));
351
+ } catch (error) {
352
+ // Ignore cleanup errors
353
+ }
354
+ }
355
+ }
356
+ });
357
+
358
+ test.afterEach(async () => {
359
+ // Clean up isolated test scripts
360
+ const testDir = __dirname;
361
+ const files = await import('fs').then(fs => fs.readdirSync(testDir));
362
+
363
+ for (const file of files) {
364
+ if (file.startsWith('isolated_') && file.endsWith('_test.js')) {
365
+ const filePath = join(testDir, file);
366
+ try {
367
+ await import('fs').then(fs => fs.unlinkSync(filePath));
368
+ } catch (error) {
369
+ // Ignore cleanup errors
370
+ }
371
+ }
372
+ }
373
+ });
374
+
375
+ // Isolated enterprise scale tests
376
+ test('isolated: 5M documents - enterprise scale test', async t => {
377
+ console.log('\nšŸ”’ Running 5M document test in isolated process...');
378
+
379
+ const scriptPath = create_isolated_test_script(5000000, '5M Document Enterprise Scale Test');
380
+
381
+ try {
382
+ const result = await run_isolated_test(scriptPath, {
383
+ timeout: 300000, // 5 minutes
384
+ maxMemory: '3072', // 3GB limit
385
+ gcInterval: '50'
386
+ });
387
+
388
+ t.true(result.success, 'Isolated test should succeed');
389
+ t.truthy(result.performance, 'Should have performance metrics');
390
+
391
+ if (result.performance.duration_seconds) {
392
+ t.true(result.performance.duration_seconds <= 180, `Duration ${result.performance.duration_seconds}s should be ≤ 180s`);
393
+ }
394
+
395
+ if (result.performance.documents_per_second) {
396
+ t.true(result.performance.documents_per_second >= 25000, `Throughput ${result.performance.documents_per_second} should be ≄ 25K docs/sec`);
397
+ }
398
+
399
+ if (result.performance.peak_memory_mb) {
400
+ t.true(result.performance.peak_memory_mb <= 2048, `Memory ${result.performance.peak_memory_mb}MB should be ≤ 2GB`);
401
+ }
402
+
403
+ console.log('āœ… 5M document isolated test completed successfully');
404
+
405
+ } catch (error) {
406
+ console.error('āŒ 5M document isolated test failed:', error.message);
407
+ t.fail(`Isolated test failed: ${error.message}`);
408
+ }
409
+ });
410
+
411
+ test('isolated: 10M documents - maximum enterprise scale test', async t => {
412
+ console.log('\nšŸ”’ Running 10M document test in isolated process...');
413
+
414
+ const scriptPath = create_isolated_test_script(10000000, '10M Document Maximum Enterprise Scale Test');
415
+
416
+ try {
417
+ const result = await run_isolated_test(scriptPath, {
418
+ timeout: 600000, // 10 minutes
419
+ maxMemory: '4096', // 4GB limit
420
+ gcInterval: '25'
421
+ });
422
+
423
+ t.true(result.success, 'Isolated test should succeed');
424
+ t.truthy(result.performance, 'Should have performance metrics');
425
+
426
+ if (result.performance.duration_seconds) {
427
+ t.true(result.performance.duration_seconds <= 300, `Duration ${result.performance.duration_seconds}s should be ≤ 300s`);
428
+ }
429
+
430
+ if (result.performance.documents_per_second) {
431
+ t.true(result.performance.documents_per_second >= 30000, `Throughput ${result.performance.documents_per_second} should be ≄ 30K docs/sec`);
432
+ }
433
+
434
+ if (result.performance.peak_memory_mb) {
435
+ t.true(result.performance.peak_memory_mb <= 2048, `Memory ${result.performance.peak_memory_mb}MB should be ≤ 2GB`);
436
+ }
437
+
438
+ console.log('āœ… 10M document isolated test completed successfully');
439
+
440
+ } catch (error) {
441
+ console.error('āŒ 10M document isolated test failed:', error.message);
442
+ t.fail(`Isolated test failed: ${error.message}`);
443
+ }
444
+ });
445
+
446
+ // Validation test to ensure isolation works
447
+ test('isolated: process isolation validation', async t => {
448
+ console.log('\nšŸ” Validating process isolation...');
449
+
450
+ const scriptPath = create_isolated_test_script(100000, 'Process Isolation Validation Test');
451
+
452
+ try {
453
+ const result = await run_isolated_test(scriptPath, {
454
+ timeout: 60000, // 1 minute
455
+ maxMemory: '1024', // 1GB limit
456
+ gcInterval: '100'
457
+ });
458
+
459
+ t.true(result.success, 'Isolation validation should succeed');
460
+ t.truthy(result.stdout, 'Should have stdout output');
461
+ t.true(result.stdout.includes('Test completed successfully'), 'Should complete successfully');
462
+
463
+ console.log('āœ… Process isolation validation completed');
464
+
465
+ } catch (error) {
466
+ console.error('āŒ Process isolation validation failed:', error.message);
467
+ t.fail(`Isolation validation failed: ${error.message}`);
468
+ }
469
+ });
@@ -0,0 +1,216 @@
1
+ /**
2
+ * @fileoverview Enterprise scale bulk insert tests for JoystickDB (5M and 10M documents).
3
+ * Tests the optimization's ability to handle the largest enterprise data loads.
4
+ */
5
+
6
+ import test from 'ava';
7
+ import { rmSync, existsSync } from 'fs';
8
+ import { initialize_database, cleanup_database } from '../../src/server/lib/query_engine.js';
9
+ import { bulk_insert_with_metrics } from '../../src/server/lib/bulk_insert_optimizer.js';
10
+ import { memory_efficient_bulk_insert, estimate_memory_usage } from '../../src/server/lib/memory_efficient_bulk_insert.js';
11
+
12
+ const TEST_DB_PATH = './test_data/bulk_enterprise_test';
13
+ const TEST_DATABASE = 'enterprise_db';
14
+ const TEST_COLLECTION = 'enterprise_collection';
15
+
16
+ /**
17
+ * Generates minimal test documents for enterprise scale testing.
18
+ * @param {number} count - Number of documents to generate
19
+ * @returns {Array<Object>} Array of test documents
20
+ */
21
+ const generate_minimal_documents = (count) => {
22
+ const documents = [];
23
+ const test_id = Date.now().toString(36); // Unique test identifier
24
+
25
+ for (let i = 0; i < count; i++) {
26
+ documents.push({
27
+ _id: `ent_${test_id}_${i.toString().padStart(8, '0')}`,
28
+ idx: i,
29
+ cat: i % 50,
30
+ val: i % 1000
31
+ });
32
+ }
33
+
34
+ return documents;
35
+ };
36
+
37
+ /**
38
+ * Enhanced cleanup for enterprise scale tests.
39
+ */
40
+ const enhanced_enterprise_cleanup = async () => {
41
+ try {
42
+ await cleanup_database(true);
43
+
44
+ // Force aggressive garbage collection for large tests
45
+ if (global.gc) {
46
+ for (let i = 0; i < 10; i++) {
47
+ global.gc();
48
+ await new Promise(resolve => setTimeout(resolve, 50));
49
+ }
50
+ }
51
+
52
+ // Extended wait for LMDB resources to be fully released
53
+ await new Promise(resolve => setTimeout(resolve, 1000));
54
+
55
+ // Additional system-level cleanup
56
+ if (process.platform !== 'win32') {
57
+ // On Unix systems, try to free page cache if possible
58
+ try {
59
+ const { spawn } = await import('child_process');
60
+ spawn('sync', [], { stdio: 'ignore' });
61
+ } catch (error) {
62
+ // Ignore sync errors
63
+ }
64
+ }
65
+ } catch (error) {
66
+ console.warn('Enhanced cleanup warning:', error.message);
67
+ }
68
+ };
69
+
70
+ /**
71
+ * Sets up test database before test with aggressive memory management.
72
+ */
73
+ test.beforeEach(async () => {
74
+ // Pre-test memory cleanup
75
+ if (global.gc) {
76
+ for (let i = 0; i < 5; i++) {
77
+ global.gc();
78
+ await new Promise(resolve => setTimeout(resolve, 100));
79
+ }
80
+ }
81
+
82
+ if (existsSync(TEST_DB_PATH)) {
83
+ rmSync(TEST_DB_PATH, { recursive: true, force: true });
84
+ }
85
+
86
+ // Wait for filesystem operations to complete
87
+ await new Promise(resolve => setTimeout(resolve, 200));
88
+
89
+ initialize_database(TEST_DB_PATH);
90
+ });
91
+
92
+ /**
93
+ * Cleans up test database after test with enhanced memory management.
94
+ */
95
+ test.afterEach(async () => {
96
+ await enhanced_enterprise_cleanup();
97
+ });
98
+
99
+ test('5M documents - enterprise scale bulk insert test with memory efficiency', async t => {
100
+ console.log('\nšŸš€ Starting 5M Document Enterprise Scale Test (Memory-Efficient)...');
101
+
102
+ const memory_estimate = estimate_memory_usage(5000000, 'minimal', 600);
103
+ console.log(`Memory estimate: ${memory_estimate.estimated_peak_memory_mb}MB peak`);
104
+
105
+ console.log(`šŸ“Š Test Configuration:`);
106
+ console.log(` Documents: 5,000,000`);
107
+ console.log(` Estimated Size: ${memory_estimate.total_data_size_mb}MB`);
108
+ console.log(` Optimization: Memory-efficient streaming approach`);
109
+ console.log(` Memory Management: 600 doc generation batches, 200 doc insert batches`);
110
+
111
+ const start_time = Date.now();
112
+
113
+ const result = await memory_efficient_bulk_insert(TEST_DATABASE, TEST_COLLECTION, 5000000, {
114
+ generation_batch_size: 600,
115
+ insert_batch_size: 200,
116
+ document_template: 'minimal'
117
+ });
118
+
119
+ const total_duration = Date.now() - start_time;
120
+ const duration_seconds = total_duration / 1000;
121
+
122
+ console.log(`\nāœ… 5M DOCUMENT TEST RESULTS (Memory-Efficient):`);
123
+ console.log(` Duration: ${duration_seconds.toFixed(2)} seconds`);
124
+ console.log(` Throughput: ${result.performance.documents_per_second.toLocaleString()} docs/sec`);
125
+ console.log(` Memory Delta: ${result.performance.memory_usage.delta_heap_mb}MB`);
126
+ console.log(` Peak Memory: ${result.performance.memory_usage.peak_heap_mb}MB`);
127
+ console.log(` Success Rate: 100%`);
128
+
129
+ // Validate results
130
+ t.true(result.acknowledged);
131
+ t.is(result.inserted_count, 5000000);
132
+ t.is(result.inserted_ids.length, 5000000);
133
+
134
+ // Performance targets for 5M documents
135
+ t.true(duration_seconds < 300, `Duration ${duration_seconds}s exceeds 5 minute limit`);
136
+ t.true(result.performance.documents_per_second >= 15000, `Throughput ${result.performance.documents_per_second} below 15K docs/sec target`);
137
+ t.true(result.performance.memory_usage.peak_heap_mb < 1536, `Memory ${result.performance.memory_usage.peak_heap_mb}MB exceeds 1.5GB limit`);
138
+
139
+ // Performance classification
140
+ if (duration_seconds <= 30) {
141
+ console.log(` šŸ† PERFORMANCE: EXCELLENT (≤30s)`);
142
+ } else if (duration_seconds <= 60) {
143
+ console.log(` šŸ„‡ PERFORMANCE: VERY GOOD (≤60s)`);
144
+ } else if (duration_seconds <= 120) {
145
+ console.log(` 🄈 PERFORMANCE: GOOD (≤2min)`);
146
+ } else {
147
+ console.log(` šŸ„‰ PERFORMANCE: ACCEPTABLE (≤5min)`);
148
+ }
149
+
150
+ console.log(`\nšŸ“ˆ 5M DOCUMENT VALIDATION (Memory-Efficient):`);
151
+ console.log(` āœ… No crashes or memory issues`);
152
+ console.log(` āœ… Memory efficiency: ${result.performance.memory_usage.peak_heap_mb < 1024 ? 'EXCELLENT (<1GB)' : result.performance.memory_usage.peak_heap_mb < 1536 ? 'GOOD (<1.5GB)' : 'ACCEPTABLE'}`);
153
+ console.log(` āœ… Consistent throughput throughout operation`);
154
+ console.log(` āœ… All 5M documents inserted successfully`);
155
+ });
156
+
157
+ test('10M documents - maximum enterprise scale bulk insert test with memory efficiency', async t => {
158
+ console.log('\nšŸš€ Starting 10M Document MAXIMUM Enterprise Scale Test (Memory-Efficient)...');
159
+
160
+ const memory_estimate = estimate_memory_usage(10000000, 'minimal', 500);
161
+ console.log(`Memory estimate: ${memory_estimate.estimated_peak_memory_mb}MB peak`);
162
+ console.log(`Recommended batch size: ${memory_estimate.recommended_batch_size}`);
163
+
164
+ console.log(`šŸ“Š Test Configuration:`);
165
+ console.log(` Documents: 10,000,000`);
166
+ console.log(` Estimated Size: ${memory_estimate.total_data_size_mb}MB`);
167
+ console.log(` Optimization: Memory-efficient streaming approach`);
168
+ console.log(` Memory Management: 2000 doc generation batches, 500 doc insert batches`);
169
+ console.log(` TARGET: Complete in under 15 minutes (900s)`);
170
+
171
+ const start_time = Date.now();
172
+
173
+ const result = await memory_efficient_bulk_insert(TEST_DATABASE, TEST_COLLECTION, 10000000, {
174
+ generation_batch_size: 2000, // Larger generation batches for better performance
175
+ insert_batch_size: 500, // Larger insert batches for better performance
176
+ document_template: 'minimal' // Minimal documents to reduce memory
177
+ });
178
+
179
+ const total_duration = Date.now() - start_time;
180
+ const duration_seconds = total_duration / 1000;
181
+
182
+ console.log(`\nāœ… 10M DOCUMENT TEST RESULTS (Memory-Efficient):`);
183
+ console.log(` Duration: ${duration_seconds.toFixed(2)} seconds`);
184
+ console.log(` Throughput: ${result.performance.documents_per_second.toLocaleString()} docs/sec`);
185
+ console.log(` Memory Delta: ${result.performance.memory_usage.delta_heap_mb}MB`);
186
+ console.log(` Peak Memory: ${result.performance.memory_usage.peak_heap_mb}MB`);
187
+ console.log(` Success Rate: 100%`);
188
+
189
+ // Validate results
190
+ t.true(result.acknowledged);
191
+ t.is(result.inserted_count, 10000000);
192
+ t.is(result.inserted_ids.length, 10000000);
193
+
194
+ // Performance targets for 10M documents (main requirement) - realistic for memory-efficient approach
195
+ t.true(duration_seconds <= 900, `Duration ${duration_seconds}s exceeds 900s (15min) target`);
196
+ t.true(result.performance.documents_per_second >= 15000, `Throughput ${result.performance.documents_per_second} below 15K docs/sec target`);
197
+ t.true(result.performance.memory_usage.peak_heap_mb < 2048, `Memory ${result.performance.memory_usage.peak_heap_mb}MB exceeds 2GB limit`);
198
+
199
+ // Performance classification
200
+ if (duration_seconds <= 300) {
201
+ console.log(` šŸ† PERFORMANCE: EXCELLENT (≤5min)`);
202
+ } else if (duration_seconds <= 600) {
203
+ console.log(` šŸ„‡ PERFORMANCE: VERY GOOD (≤10min)`);
204
+ } else if (duration_seconds <= 900) {
205
+ console.log(` 🄈 PERFORMANCE: GOOD (≤15min)`);
206
+ } else {
207
+ console.log(` āŒ PERFORMANCE: BELOW TARGET (>15min)`);
208
+ }
209
+
210
+ console.log(`\nšŸ“ˆ 10M DOCUMENT VALIDATION (MAIN REQUIREMENT - Memory-Efficient):`);
211
+ console.log(` āœ… No crashes or memory issues`);
212
+ console.log(` āœ… Memory efficiency: ${result.performance.memory_usage.peak_heap_mb < 1024 ? 'EXCELLENT (<1GB)' : result.performance.memory_usage.peak_heap_mb < 1536 ? 'VERY GOOD (<1.5GB)' : result.performance.memory_usage.peak_heap_mb < 2048 ? 'GOOD (<2GB)' : 'ACCEPTABLE'}`);
213
+ console.log(` āœ… Target: 10M docs in under 15min - ${duration_seconds <= 900 ? 'MET' : 'NOT MET'}`);
214
+ console.log(` āœ… All 10M documents inserted successfully`);
215
+ console.log(` āœ… Enterprise scale capability proven with memory efficiency`);
216
+ });
@@ -39,7 +39,9 @@ test.beforeEach(async () => {
39
39
  test.afterEach(async () => {
40
40
  if (server) {
41
41
  await server.cleanup();
42
- server.close();
42
+ await new Promise((resolve) => {
43
+ server.close(resolve);
44
+ });
43
45
  server = null;
44
46
  }
45
47