@joystick.js/db-canary 0.0.0-canary.2274 ā 0.0.0-canary.2276
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -104
- package/debug_test_runner.js +208 -0
- package/dist/client/index.js +1 -1
- package/dist/server/cluster/master.js +2 -2
- package/dist/server/cluster/worker.js +1 -1
- package/dist/server/index.js +1 -1
- package/dist/server/lib/auto_index_manager.js +1 -1
- package/dist/server/lib/bulk_insert_optimizer.js +1 -1
- package/dist/server/lib/http_server.js +3 -3
- package/dist/server/lib/operation_dispatcher.js +1 -1
- package/dist/server/lib/operations/admin.js +1 -1
- package/dist/server/lib/operations/update_one.js +1 -1
- package/dist/server/lib/simple_sync_manager.js +1 -0
- package/dist/server/lib/sync_receiver.js +1 -0
- package/full_debug_test_runner.js +197 -0
- package/package.json +10 -7
- package/src/client/index.js +1 -0
- package/src/server/cluster/master.js +8 -2
- package/src/server/cluster/worker.js +9 -3
- package/src/server/index.js +25 -24
- package/src/server/lib/auto_index_manager.js +8 -3
- package/src/server/lib/bulk_insert_optimizer.js +79 -0
- package/src/server/lib/http_server.js +7 -0
- package/src/server/lib/operation_dispatcher.js +16 -10
- package/src/server/lib/operations/admin.js +64 -31
- package/src/server/lib/operations/update_one.js +251 -1
- package/src/server/lib/simple_sync_manager.js +444 -0
- package/src/server/lib/sync_receiver.js +461 -0
- package/tests/client/index.test.js +7 -0
- package/tests/performance/isolated_5000000_test.js +184 -0
- package/tests/server/lib/http_server.test.js +3 -12
- package/tests/server/lib/operations/update_one.test.js +161 -0
- package/tests/server/lib/simple_sync_system.test.js +124 -0
- package/dist/server/lib/replication_manager.js +0 -1
- package/dist/server/lib/write_forwarder.js +0 -1
- package/src/server/lib/replication_manager.js +0 -727
- package/src/server/lib/write_forwarder.js +0 -636
- package/tests/server/lib/replication_manager.test.js +0 -202
- package/tests/server/lib/write_forwarder.test.js +0 -258
|
@@ -0,0 +1,461 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Sync receiver for secondary nodes in JoystickDB.
|
|
3
|
+
* Receives and processes sync messages from primary nodes with API_KEY authentication.
|
|
4
|
+
* Secondary nodes are read-only except for authenticated sync operations.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import net from 'net';
|
|
8
|
+
import fs from 'fs/promises';
|
|
9
|
+
import { get_settings } from './load_settings.js';
|
|
10
|
+
import { create_message_parser, encode_message } from './tcp_protocol.js';
|
|
11
|
+
import create_logger from './logger.js';
|
|
12
|
+
import insert_one from './operations/insert_one.js';
|
|
13
|
+
import update_one from './operations/update_one.js';
|
|
14
|
+
import delete_one from './operations/delete_one.js';
|
|
15
|
+
import delete_many from './operations/delete_many.js';
|
|
16
|
+
import bulk_write from './operations/bulk_write.js';
|
|
17
|
+
import create_index_operation from './operations/create_index.js';
|
|
18
|
+
import drop_index_operation from './operations/drop_index.js';
|
|
19
|
+
|
|
20
|
+
const { create_context_logger } = create_logger('sync_receiver');
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Sync receiver that processes authenticated sync messages from primary nodes.
|
|
24
|
+
* Validates API_KEY and applies operations to local database.
|
|
25
|
+
*/
|
|
26
|
+
class SyncReceiver {
|
|
27
|
+
/**
|
|
28
|
+
* Creates a new SyncReceiver instance.
|
|
29
|
+
*/
|
|
30
|
+
constructor() {
|
|
31
|
+
/** @type {boolean} Whether this node is configured as secondary */
|
|
32
|
+
this.is_secondary = false;
|
|
33
|
+
|
|
34
|
+
/** @type {string|null} API_KEY loaded from file for authentication */
|
|
35
|
+
this.api_key = null;
|
|
36
|
+
|
|
37
|
+
/** @type {string|null} Path to API_KEY file */
|
|
38
|
+
this.api_key_file_path = null;
|
|
39
|
+
|
|
40
|
+
/** @type {net.Server|null} TCP server for receiving sync messages */
|
|
41
|
+
this.server = null;
|
|
42
|
+
|
|
43
|
+
/** @type {number} TCP port for sync operations */
|
|
44
|
+
this.sync_port = 1985;
|
|
45
|
+
|
|
46
|
+
/** @type {Object} Logger instance */
|
|
47
|
+
this.log = create_context_logger();
|
|
48
|
+
|
|
49
|
+
/** @type {Object} Sync statistics */
|
|
50
|
+
this.stats = {
|
|
51
|
+
total_received: 0,
|
|
52
|
+
successful_syncs: 0,
|
|
53
|
+
failed_syncs: 0,
|
|
54
|
+
auth_failures: 0,
|
|
55
|
+
operations_applied: 0
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Initializes the sync receiver with settings configuration.
|
|
61
|
+
*/
|
|
62
|
+
async initialize() {
|
|
63
|
+
try {
|
|
64
|
+
const settings = get_settings();
|
|
65
|
+
|
|
66
|
+
if (settings.primary === true) {
|
|
67
|
+
this.log.info('Node configured as primary - sync receiver disabled');
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (settings.primary === false) {
|
|
72
|
+
this.is_secondary = true;
|
|
73
|
+
this.api_key_file_path = settings.secondary_sync_key;
|
|
74
|
+
this.sync_port = settings.sync_port || 1985;
|
|
75
|
+
|
|
76
|
+
if (!this.api_key_file_path) {
|
|
77
|
+
this.log.error('Secondary node missing secondary_sync_key configuration');
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
await this.load_api_key();
|
|
82
|
+
|
|
83
|
+
if (!this.api_key) {
|
|
84
|
+
this.log.error('Failed to load API_KEY - sync receiver disabled');
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
this.log.info('Initializing sync receiver for secondary node', {
|
|
89
|
+
api_key_file: this.api_key_file_path,
|
|
90
|
+
sync_port: this.sync_port
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
this.start_server();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
} catch (error) {
|
|
97
|
+
this.log.warn('Could not initialize sync receiver - settings not loaded', {
|
|
98
|
+
error: error.message
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Loads API_KEY from configured file path.
|
|
105
|
+
*/
|
|
106
|
+
async load_api_key() {
|
|
107
|
+
try {
|
|
108
|
+
const key_content = await fs.readFile(this.api_key_file_path, 'utf8');
|
|
109
|
+
this.api_key = key_content.trim();
|
|
110
|
+
|
|
111
|
+
this.log.info('API_KEY loaded successfully', {
|
|
112
|
+
file_path: this.api_key_file_path,
|
|
113
|
+
key_length: this.api_key.length
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
} catch (error) {
|
|
117
|
+
this.log.error('Failed to load API_KEY from file', {
|
|
118
|
+
file_path: this.api_key_file_path,
|
|
119
|
+
error: error.message
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Starts the TCP server to receive sync messages.
|
|
126
|
+
*/
|
|
127
|
+
start_server() {
|
|
128
|
+
try {
|
|
129
|
+
this.server = net.createServer((socket) => {
|
|
130
|
+
this.log.debug('Sync connection established', {
|
|
131
|
+
remote_address: socket.remoteAddress,
|
|
132
|
+
remote_port: socket.remotePort
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
const message_parser = create_message_parser();
|
|
136
|
+
|
|
137
|
+
socket.on('data', (data) => {
|
|
138
|
+
try {
|
|
139
|
+
const messages = message_parser.parse_messages(data);
|
|
140
|
+
for (const message of messages) {
|
|
141
|
+
this.handle_sync_message(socket, message).catch((error) => {
|
|
142
|
+
this.log.error('Failed to handle sync message', {
|
|
143
|
+
error: error.message,
|
|
144
|
+
remote_address: socket.remoteAddress
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
} catch (error) {
|
|
149
|
+
this.log.error('Failed to parse sync message', {
|
|
150
|
+
error: error.message,
|
|
151
|
+
remote_address: socket.remoteAddress
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
});
|
|
155
|
+
|
|
156
|
+
socket.on('error', (error) => {
|
|
157
|
+
this.log.error('Sync connection error', {
|
|
158
|
+
error: error.message,
|
|
159
|
+
remote_address: socket.remoteAddress
|
|
160
|
+
});
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
socket.on('close', () => {
|
|
164
|
+
this.log.debug('Sync connection closed', {
|
|
165
|
+
remote_address: socket.remoteAddress
|
|
166
|
+
});
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
this.server.listen(this.sync_port, () => {
|
|
171
|
+
this.log.info('Sync receiver server started', {
|
|
172
|
+
port: this.sync_port
|
|
173
|
+
});
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
this.server.on('error', (error) => {
|
|
177
|
+
this.log.error('Sync receiver server error', {
|
|
178
|
+
error: error.message,
|
|
179
|
+
port: this.sync_port
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
} catch (error) {
|
|
183
|
+
this.log.error('Failed to start sync receiver server', {
|
|
184
|
+
error: error.message,
|
|
185
|
+
port: this.sync_port
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Handles incoming sync message from primary node.
|
|
192
|
+
* @param {net.Socket} socket - Connection socket
|
|
193
|
+
* @param {Object|string} raw_message - Raw sync message
|
|
194
|
+
*/
|
|
195
|
+
async handle_sync_message(socket, raw_message) {
|
|
196
|
+
this.stats.total_received++;
|
|
197
|
+
|
|
198
|
+
let message;
|
|
199
|
+
try {
|
|
200
|
+
message = typeof raw_message === 'string' ? JSON.parse(raw_message) : raw_message;
|
|
201
|
+
} catch (error) {
|
|
202
|
+
this.send_sync_response(socket, null, 'error', 'Invalid JSON message');
|
|
203
|
+
return;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
const { type, api_key, sequence, operation, collection, data } = message;
|
|
207
|
+
|
|
208
|
+
if (type !== 'operation_sync') {
|
|
209
|
+
this.send_sync_response(socket, sequence, 'error', 'Invalid message type');
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// Validate API_KEY
|
|
214
|
+
if (!this.validate_api_key(api_key)) {
|
|
215
|
+
this.stats.auth_failures++;
|
|
216
|
+
this.log.error('Sync authentication failed', {
|
|
217
|
+
sequence,
|
|
218
|
+
operation,
|
|
219
|
+
remote_address: socket.remoteAddress
|
|
220
|
+
});
|
|
221
|
+
this.send_sync_response(socket, sequence, 'auth_failed', 'Invalid API_KEY');
|
|
222
|
+
return;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Apply operation to local database
|
|
226
|
+
try {
|
|
227
|
+
await this.apply_sync_operation(operation, collection, data);
|
|
228
|
+
this.stats.successful_syncs++;
|
|
229
|
+
this.stats.operations_applied++;
|
|
230
|
+
|
|
231
|
+
this.log.debug('Sync operation applied successfully', {
|
|
232
|
+
sequence,
|
|
233
|
+
operation,
|
|
234
|
+
collection
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
this.send_sync_response(socket, sequence, 'success', null);
|
|
238
|
+
|
|
239
|
+
} catch (error) {
|
|
240
|
+
this.stats.failed_syncs++;
|
|
241
|
+
|
|
242
|
+
this.log.error('Failed to apply sync operation', {
|
|
243
|
+
sequence,
|
|
244
|
+
operation,
|
|
245
|
+
collection,
|
|
246
|
+
error: error.message
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
this.send_sync_response(socket, sequence, 'error', error.message);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
/**
|
|
254
|
+
* Validates API_KEY from sync message.
|
|
255
|
+
* @param {string} provided_key - API_KEY from sync message
|
|
256
|
+
* @returns {boolean} True if key is valid
|
|
257
|
+
*/
|
|
258
|
+
validate_api_key(provided_key) {
|
|
259
|
+
if (!provided_key || !this.api_key) {
|
|
260
|
+
return false;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
return provided_key === this.api_key;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
/**
|
|
267
|
+
* Applies sync operation to local database.
|
|
268
|
+
* @param {string} operation - Operation type
|
|
269
|
+
* @param {string} collection - Collection name
|
|
270
|
+
* @param {Object} data - Operation data
|
|
271
|
+
*/
|
|
272
|
+
async apply_sync_operation(operation, collection, data) {
|
|
273
|
+
const database_name = data.database || 'default';
|
|
274
|
+
|
|
275
|
+
switch (operation) {
|
|
276
|
+
case 'insert_one':
|
|
277
|
+
return await insert_one(database_name, collection, data.document, data.options);
|
|
278
|
+
|
|
279
|
+
case 'update_one':
|
|
280
|
+
return await update_one(database_name, collection, data.filter, data.update, data.options);
|
|
281
|
+
|
|
282
|
+
case 'delete_one':
|
|
283
|
+
return await delete_one(database_name, collection, data.filter, data.options);
|
|
284
|
+
|
|
285
|
+
case 'delete_many':
|
|
286
|
+
return await delete_many(database_name, collection, data.filter, data.options);
|
|
287
|
+
|
|
288
|
+
case 'bulk_write':
|
|
289
|
+
return await bulk_write(database_name, collection, data.operations, data.options);
|
|
290
|
+
|
|
291
|
+
case 'create_index':
|
|
292
|
+
return await create_index_operation(database_name, collection, data.field, data.options);
|
|
293
|
+
|
|
294
|
+
case 'drop_index':
|
|
295
|
+
return await drop_index_operation(database_name, collection, data.field);
|
|
296
|
+
|
|
297
|
+
default:
|
|
298
|
+
throw new Error(`Unsupported sync operation: ${operation}`);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
/**
|
|
303
|
+
* Sends sync response back to primary node.
|
|
304
|
+
* @param {net.Socket} socket - Connection socket
|
|
305
|
+
* @param {number|null} sequence - Sequence number
|
|
306
|
+
* @param {string} status - Response status
|
|
307
|
+
* @param {string|null} error - Error message if any
|
|
308
|
+
*/
|
|
309
|
+
send_sync_response(socket, sequence, status, error) {
|
|
310
|
+
const response = {
|
|
311
|
+
type: 'sync_acknowledged',
|
|
312
|
+
sequence,
|
|
313
|
+
status,
|
|
314
|
+
timestamp: Date.now()
|
|
315
|
+
};
|
|
316
|
+
|
|
317
|
+
if (error) {
|
|
318
|
+
response.error = error;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
try {
|
|
322
|
+
const encoded_response = encode_message(response);
|
|
323
|
+
socket.write(encoded_response);
|
|
324
|
+
} catch (write_error) {
|
|
325
|
+
this.log.error('Failed to send sync response', {
|
|
326
|
+
sequence,
|
|
327
|
+
status,
|
|
328
|
+
error: write_error.message
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
/**
|
|
334
|
+
* Checks if a client operation should be blocked on secondary.
|
|
335
|
+
* @param {string} operation_type - Type of operation
|
|
336
|
+
* @returns {boolean} True if operation should be blocked
|
|
337
|
+
*/
|
|
338
|
+
should_block_client_operation(operation_type) {
|
|
339
|
+
if (!this.is_secondary) {
|
|
340
|
+
return false;
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
// Allow read operations on secondary
|
|
344
|
+
const read_operations = ['find', 'find_one', 'count_documents', 'get_indexes'];
|
|
345
|
+
|
|
346
|
+
if (read_operations.includes(operation_type)) {
|
|
347
|
+
return false;
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// Block all write operations for direct clients
|
|
351
|
+
return true;
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
/**
|
|
355
|
+
* Gets current sync receiver status and statistics.
|
|
356
|
+
* @returns {Object} Sync receiver status
|
|
357
|
+
*/
|
|
358
|
+
get_sync_status() {
|
|
359
|
+
return {
|
|
360
|
+
is_secondary: this.is_secondary,
|
|
361
|
+
api_key_loaded: !!this.api_key,
|
|
362
|
+
api_key_file: this.api_key_file_path,
|
|
363
|
+
server_running: !!this.server && this.server.listening,
|
|
364
|
+
sync_port: this.sync_port,
|
|
365
|
+
stats: this.stats
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
/**
|
|
370
|
+
* Reloads API_KEY from file (for key rotation).
|
|
371
|
+
*/
|
|
372
|
+
async reload_api_key() {
|
|
373
|
+
if (!this.api_key_file_path) {
|
|
374
|
+
throw new Error('No API_KEY file path configured');
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
const old_key_length = this.api_key ? this.api_key.length : 0;
|
|
378
|
+
await this.load_api_key();
|
|
379
|
+
|
|
380
|
+
this.log.info('API_KEY reloaded', {
|
|
381
|
+
old_key_length,
|
|
382
|
+
new_key_length: this.api_key ? this.api_key.length : 0
|
|
383
|
+
});
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
/**
|
|
387
|
+
* Promotes secondary to primary (manual failover support).
|
|
388
|
+
*/
|
|
389
|
+
promote_to_primary() {
|
|
390
|
+
if (!this.is_secondary) {
|
|
391
|
+
throw new Error('Node is not configured as secondary');
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
this.log.info('Promoting secondary to primary');
|
|
395
|
+
|
|
396
|
+
// Stop sync receiver server
|
|
397
|
+
if (this.server) {
|
|
398
|
+
this.server.close(() => {
|
|
399
|
+
this.log.info('Sync receiver server stopped for primary promotion');
|
|
400
|
+
});
|
|
401
|
+
this.server = null;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
this.is_secondary = false;
|
|
405
|
+
|
|
406
|
+
this.log.info('Node promoted to primary - sync receiver disabled');
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* Shuts down the sync receiver and closes server.
|
|
411
|
+
* @returns {Promise<void>}
|
|
412
|
+
*/
|
|
413
|
+
async shutdown() {
|
|
414
|
+
this.log.info('Shutting down sync receiver');
|
|
415
|
+
|
|
416
|
+
if (this.server) {
|
|
417
|
+
return new Promise((resolve) => {
|
|
418
|
+
this.server.close(() => {
|
|
419
|
+
this.log.info('Sync receiver server closed');
|
|
420
|
+
resolve();
|
|
421
|
+
});
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
this.log.info('Sync receiver shutdown complete');
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
/** @type {SyncReceiver|null} Singleton instance */
|
|
430
|
+
let sync_receiver_instance = null;
|
|
431
|
+
|
|
432
|
+
/**
|
|
433
|
+
* Gets the sync receiver singleton instance.
|
|
434
|
+
* @returns {SyncReceiver} Sync receiver instance
|
|
435
|
+
*/
|
|
436
|
+
export const get_sync_receiver = () => {
|
|
437
|
+
if (!sync_receiver_instance) {
|
|
438
|
+
sync_receiver_instance = new SyncReceiver();
|
|
439
|
+
}
|
|
440
|
+
return sync_receiver_instance;
|
|
441
|
+
};
|
|
442
|
+
|
|
443
|
+
/**
|
|
444
|
+
* Initializes the sync receiver singleton.
|
|
445
|
+
* @returns {Promise<void>}
|
|
446
|
+
*/
|
|
447
|
+
export const initialize_sync_receiver = async () => {
|
|
448
|
+
const receiver = get_sync_receiver();
|
|
449
|
+
await receiver.initialize();
|
|
450
|
+
};
|
|
451
|
+
|
|
452
|
+
/**
|
|
453
|
+
* Shuts down the sync receiver singleton.
|
|
454
|
+
* @returns {Promise<void>}
|
|
455
|
+
*/
|
|
456
|
+
export const shutdown_sync_receiver = async () => {
|
|
457
|
+
if (sync_receiver_instance) {
|
|
458
|
+
await sync_receiver_instance.shutdown();
|
|
459
|
+
sync_receiver_instance = null;
|
|
460
|
+
}
|
|
461
|
+
};
|
|
@@ -18,6 +18,13 @@ test.beforeEach(async () => {
|
|
|
18
18
|
|
|
19
19
|
// Clean up database files to ensure test isolation
|
|
20
20
|
try {
|
|
21
|
+
if (existsSync('./.joystick/data/joystickdb_1983/data.mdb')) {
|
|
22
|
+
unlinkSync('./.joystick/data/joystickdb_1983/data.mdb');
|
|
23
|
+
}
|
|
24
|
+
if (existsSync('./.joystick/data/joystickdb_1983/lock.mdb')) {
|
|
25
|
+
unlinkSync('./.joystick/data/joystickdb_1983/lock.mdb');
|
|
26
|
+
}
|
|
27
|
+
// Also clean up old location for backward compatibility
|
|
21
28
|
if (existsSync('./data/data.mdb')) {
|
|
22
29
|
unlinkSync('./data/data.mdb');
|
|
23
30
|
}
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
|
|
2
|
+
import { rmSync, existsSync } from 'fs';
|
|
3
|
+
import { initialize_database, cleanup_database } from '../../src/server/lib/query_engine.js';
|
|
4
|
+
import { bulk_insert_with_metrics } from '../../src/server/lib/bulk_insert_optimizer.js';
|
|
5
|
+
|
|
6
|
+
const TEST_DB_PATH = './test_data/isolated_5000000_test';
|
|
7
|
+
const TEST_DATABASE = 'isolated_db_5000000';
|
|
8
|
+
const TEST_COLLECTION = 'isolated_collection';
|
|
9
|
+
|
|
10
|
+
// Generate minimal test documents
|
|
11
|
+
const generate_documents = (count) => {
|
|
12
|
+
const documents = [];
|
|
13
|
+
const test_id = Date.now().toString(36);
|
|
14
|
+
|
|
15
|
+
for (let i = 0; i < count; i++) {
|
|
16
|
+
documents.push({
|
|
17
|
+
_id: `iso_${test_id}_${i.toString().padStart(8, '0')}`,
|
|
18
|
+
idx: i,
|
|
19
|
+
cat: i % 50,
|
|
20
|
+
val: i % 1000,
|
|
21
|
+
ts: Date.now() + i
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return documents;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
// Aggressive memory management
|
|
29
|
+
const force_cleanup = async () => {
|
|
30
|
+
await cleanup_database(true);
|
|
31
|
+
|
|
32
|
+
// Force multiple GC cycles
|
|
33
|
+
if (global.gc) {
|
|
34
|
+
for (let i = 0; i < 5; i++) {
|
|
35
|
+
global.gc();
|
|
36
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Wait for LMDB resources to be released
|
|
41
|
+
await new Promise(resolve => setTimeout(resolve, 200));
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
// Main test execution
|
|
45
|
+
const run_test = async () => {
|
|
46
|
+
try {
|
|
47
|
+
console.log('š Starting 5M Document Enterprise Scale Test (5,000,000 documents)');
|
|
48
|
+
|
|
49
|
+
// Clean setup
|
|
50
|
+
if (existsSync(TEST_DB_PATH)) {
|
|
51
|
+
rmSync(TEST_DB_PATH, { recursive: true, force: true });
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Initial memory state
|
|
55
|
+
const initial_memory = process.memoryUsage();
|
|
56
|
+
console.log(`Initial Memory: ${Math.round(initial_memory.heapUsed / (1024 * 1024))}MB heap used`);
|
|
57
|
+
|
|
58
|
+
initialize_database(TEST_DB_PATH);
|
|
59
|
+
|
|
60
|
+
// Generate documents
|
|
61
|
+
console.log('Generating documents...');
|
|
62
|
+
const documents = generate_documents(5000000);
|
|
63
|
+
|
|
64
|
+
// Run test with optimal settings for isolation
|
|
65
|
+
const start_time = Date.now();
|
|
66
|
+
const result = await bulk_insert_with_metrics(TEST_DATABASE, TEST_COLLECTION, documents, {
|
|
67
|
+
disable_indexing: true,
|
|
68
|
+
pre_allocate_map_size: true,
|
|
69
|
+
sort_keys: true,
|
|
70
|
+
stream_processing: true,
|
|
71
|
+
batch_size: 250 // Smaller batches for very large datasets
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const total_duration = Date.now() - start_time;
|
|
75
|
+
const duration_seconds = total_duration / 1000;
|
|
76
|
+
|
|
77
|
+
// Output results in parseable format
|
|
78
|
+
console.log(`\nā
5M DOCUMENT ENTERPRISE SCALE TEST RESULTS:`);
|
|
79
|
+
console.log(`Duration: ${duration_seconds.toFixed(2)} seconds`);
|
|
80
|
+
console.log(`Throughput: ${result.performance.documents_per_second.toLocaleString()} docs/sec`);
|
|
81
|
+
console.log(`Memory Delta: ${result.performance.memory_usage.delta_heap_mb}MB`);
|
|
82
|
+
console.log(`Peak Memory: ${result.performance.memory_usage.peak_heap_mb}MB`);
|
|
83
|
+
console.log(`Success Rate: 100%`);
|
|
84
|
+
|
|
85
|
+
// Validate results
|
|
86
|
+
if (!result.acknowledged) {
|
|
87
|
+
throw new Error('Insert not acknowledged');
|
|
88
|
+
}
|
|
89
|
+
if (result.inserted_count !== 5000000) {
|
|
90
|
+
throw new Error(`Expected ${5000000} inserts, got ${result.inserted_count}`);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Performance validation
|
|
94
|
+
const max_duration = 180;
|
|
95
|
+
const min_throughput = 25000;
|
|
96
|
+
const max_memory = 2048;
|
|
97
|
+
|
|
98
|
+
if (duration_seconds > max_duration) {
|
|
99
|
+
throw new Error(`Duration ${duration_seconds}s exceeds ${max_duration}s limit`);
|
|
100
|
+
}
|
|
101
|
+
if (result.performance.documents_per_second < min_throughput) {
|
|
102
|
+
throw new Error(`Throughput ${result.performance.documents_per_second} below ${min_throughput} docs/sec target`);
|
|
103
|
+
}
|
|
104
|
+
if (result.performance.memory_usage.peak_heap_mb > max_memory) {
|
|
105
|
+
throw new Error(`Memory ${result.performance.memory_usage.peak_heap_mb}MB exceeds ${max_memory}MB limit`);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
console.log(`\nš 5M DOCUMENT ENTERPRISE SCALE TEST VALIDATION:`);
|
|
109
|
+
console.log(`ā
Performance targets met`);
|
|
110
|
+
console.log(`ā
Memory usage within limits`);
|
|
111
|
+
console.log(`ā
All ${5000000} documents inserted successfully`);
|
|
112
|
+
|
|
113
|
+
// Cleanup
|
|
114
|
+
await force_cleanup();
|
|
115
|
+
|
|
116
|
+
const final_memory = process.memoryUsage();
|
|
117
|
+
console.log(`Final Memory: ${Math.round(final_memory.heapUsed / (1024 * 1024))}MB heap used`);
|
|
118
|
+
|
|
119
|
+
console.log('\nš Test completed successfully');
|
|
120
|
+
process.exit(0);
|
|
121
|
+
|
|
122
|
+
} catch (error) {
|
|
123
|
+
console.error(`\nā Test failed: ${error.message}`);
|
|
124
|
+
console.error(error.stack);
|
|
125
|
+
|
|
126
|
+
try {
|
|
127
|
+
await force_cleanup();
|
|
128
|
+
} catch (cleanupError) {
|
|
129
|
+
console.error('Cleanup error:', cleanupError.message);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
process.exit(1);
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
// Handle process signals
|
|
137
|
+
process.on('SIGTERM', async () => {
|
|
138
|
+
console.log('Received SIGTERM, cleaning up...');
|
|
139
|
+
try {
|
|
140
|
+
await force_cleanup();
|
|
141
|
+
} catch (error) {
|
|
142
|
+
console.error('Cleanup error:', error.message);
|
|
143
|
+
}
|
|
144
|
+
process.exit(1);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
process.on('SIGINT', async () => {
|
|
148
|
+
console.log('Received SIGINT, cleaning up...');
|
|
149
|
+
try {
|
|
150
|
+
await force_cleanup();
|
|
151
|
+
} catch (error) {
|
|
152
|
+
console.error('Cleanup error:', error.message);
|
|
153
|
+
}
|
|
154
|
+
process.exit(1);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// Add uncaught exception handlers
|
|
158
|
+
process.on('uncaughtException', async (error) => {
|
|
159
|
+
console.error('\nš„ Uncaught Exception:', error.message);
|
|
160
|
+
console.error(error.stack);
|
|
161
|
+
|
|
162
|
+
try {
|
|
163
|
+
await force_cleanup();
|
|
164
|
+
} catch (cleanupError) {
|
|
165
|
+
console.error('Cleanup error:', cleanupError.message);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
process.exit(1);
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
process.on('unhandledRejection', async (reason, promise) => {
|
|
172
|
+
console.error('\nš„ Unhandled Rejection at:', promise, 'reason:', reason);
|
|
173
|
+
|
|
174
|
+
try {
|
|
175
|
+
await force_cleanup();
|
|
176
|
+
} catch (cleanupError) {
|
|
177
|
+
console.error('Cleanup error:', cleanupError.message);
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
process.exit(1);
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
// Run the test
|
|
184
|
+
run_test();
|
|
@@ -290,18 +290,9 @@ test('HTTP server handles port conflicts gracefully', async (t) => {
|
|
|
290
290
|
// NOTE: Wait a moment to ensure the first server is fully established.
|
|
291
291
|
await new Promise(resolve => setTimeout(resolve, 100));
|
|
292
292
|
|
|
293
|
-
// NOTE: Try to start second server on same port - this should
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
await start_http_server(test_port);
|
|
297
|
-
t.fail('Should throw error for port conflict');
|
|
298
|
-
} catch (error) {
|
|
299
|
-
error_caught = true;
|
|
300
|
-
t.true(error.message.includes('EADDRINUSE') || error.code === 'EADDRINUSE');
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
// NOTE: Ensure we caught the expected error.
|
|
304
|
-
t.true(error_caught, 'Expected EADDRINUSE error was not caught');
|
|
293
|
+
// NOTE: Try to start second server on same port - in test environment this should return null.
|
|
294
|
+
const server2 = await start_http_server(test_port);
|
|
295
|
+
t.is(server2, null, 'Second server should return null due to port conflict in test environment');
|
|
305
296
|
|
|
306
297
|
// NOTE: Verify first server is still running and accessible.
|
|
307
298
|
const setup_info = get_setup_info();
|