@joystick.js/db-canary 0.0.0-canary.2275 ā 0.0.0-canary.2277
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -104
- package/debug_test_runner.js +208 -0
- package/dist/server/index.js +1 -1
- package/dist/server/lib/operation_dispatcher.js +1 -1
- package/dist/server/lib/operations/admin.js +1 -1
- package/dist/server/lib/simple_sync_manager.js +1 -0
- package/dist/server/lib/sync_receiver.js +1 -0
- package/full_debug_test_runner.js +197 -0
- package/package.json +2 -2
- package/src/server/index.js +25 -24
- package/src/server/lib/operation_dispatcher.js +16 -10
- package/src/server/lib/operations/admin.js +64 -31
- package/src/server/lib/simple_sync_manager.js +444 -0
- package/src/server/lib/sync_receiver.js +461 -0
- package/tests/server/lib/simple_sync_system.test.js +124 -0
- package/dist/server/lib/replication_manager.js +0 -1
- package/dist/server/lib/write_forwarder.js +0 -1
- package/src/server/lib/replication_manager.js +0 -727
- package/src/server/lib/write_forwarder.js +0 -636
- package/tests/server/lib/replication_manager.test.js +0 -202
- package/tests/server/lib/write_forwarder.test.js +0 -258
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import y from"net";import h from"fs/promises";import{get_settings as d}from"./load_settings.js";import{create_message_parser as f,encode_message as m}from"./tcp_protocol.js";import g from"./logger.js";import u from"./operations/insert_one.js";import v from"./operations/update_one.js";import w from"./operations/delete_one.js";import k from"./operations/delete_many.js";import S from"./operations/bulk_write.js";import b from"./operations/create_index.js";import A from"./operations/drop_index.js";const{create_context_logger:I}=g("sync_receiver");class x{constructor(){this.is_secondary=!1,this.api_key=null,this.api_key_file_path=null,this.server=null,this.sync_port=1985,this.log=I(),this.stats={total_received:0,successful_syncs:0,failed_syncs:0,auth_failures:0,operations_applied:0}}async initialize(){try{const e=d();if(e.primary===!0){this.log.info("Node configured as primary - sync receiver disabled");return}if(e.primary===!1){if(this.is_secondary=!0,this.api_key_file_path=e.secondary_sync_key,this.sync_port=e.sync_port||1985,!this.api_key_file_path){this.log.error("Secondary node missing secondary_sync_key configuration");return}if(await this.load_api_key(),!this.api_key){this.log.error("Failed to load API_KEY - sync receiver disabled");return}this.log.info("Initializing sync receiver for secondary node",{api_key_file:this.api_key_file_path,sync_port:this.sync_port}),this.start_server()}}catch(e){this.log.warn("Could not initialize sync receiver - settings not loaded",{error:e.message})}}async load_api_key(){try{const e=await h.readFile(this.api_key_file_path,"utf8");this.api_key=e.trim(),this.log.info("API_KEY loaded successfully",{file_path:this.api_key_file_path,key_length:this.api_key.length})}catch(e){this.log.error("Failed to load API_KEY from file",{file_path:this.api_key_file_path,error:e.message})}}start_server(){try{this.server=y.createServer(e=>{this.log.debug("Sync connection established",{remote_address:e.remoteAddress,remote_port:e.remotePort});const s=f();e.on("data",r=>{try{const t=s.parse_messages(r);for(const o of t)this.handle_sync_message(e,o).catch(i=>{this.log.error("Failed to handle sync message",{error:i.message,remote_address:e.remoteAddress})})}catch(t){this.log.error("Failed to parse sync message",{error:t.message,remote_address:e.remoteAddress})}}),e.on("error",r=>{this.log.error("Sync connection error",{error:r.message,remote_address:e.remoteAddress})}),e.on("close",()=>{this.log.debug("Sync connection closed",{remote_address:e.remoteAddress})})}),this.server.listen(this.sync_port,()=>{this.log.info("Sync receiver server started",{port:this.sync_port})}),this.server.on("error",e=>{this.log.error("Sync receiver server error",{error:e.message,port:this.sync_port})})}catch(e){this.log.error("Failed to start sync receiver server",{error:e.message,port:this.sync_port})}}async handle_sync_message(e,s){this.stats.total_received++;let r;try{r=typeof s=="string"?JSON.parse(s):s}catch{this.send_sync_response(e,null,"error","Invalid JSON message");return}const{type:t,api_key:o,sequence:i,operation:a,collection:_,data:p}=r;if(t!=="operation_sync"){this.send_sync_response(e,i,"error","Invalid message type");return}if(!this.validate_api_key(o)){this.stats.auth_failures++,this.log.error("Sync authentication failed",{sequence:i,operation:a,remote_address:e.remoteAddress}),this.send_sync_response(e,i,"auth_failed","Invalid API_KEY");return}try{await this.apply_sync_operation(a,_,p),this.stats.successful_syncs++,this.stats.operations_applied++,this.log.debug("Sync operation applied successfully",{sequence:i,operation:a,collection:_}),this.send_sync_response(e,i,"success",null)}catch(c){this.stats.failed_syncs++,this.log.error("Failed to apply sync operation",{sequence:i,operation:a,collection:_,error:c.message}),this.send_sync_response(e,i,"error",c.message)}}validate_api_key(e){return!e||!this.api_key?!1:e===this.api_key}async apply_sync_operation(e,s,r){const t=r.database||"default";switch(e){case"insert_one":return await u(t,s,r.document,r.options);case"update_one":return await v(t,s,r.filter,r.update,r.options);case"delete_one":return await w(t,s,r.filter,r.options);case"delete_many":return await k(t,s,r.filter,r.options);case"bulk_write":return await S(t,s,r.operations,r.options);case"create_index":return await b(t,s,r.field,r.options);case"drop_index":return await A(t,s,r.field);default:throw new Error(`Unsupported sync operation: ${e}`)}}send_sync_response(e,s,r,t){const o={type:"sync_acknowledged",sequence:s,status:r,timestamp:Date.now()};t&&(o.error=t);try{const i=m(o);e.write(i)}catch(i){this.log.error("Failed to send sync response",{sequence:s,status:r,error:i.message})}}should_block_client_operation(e){return!(!this.is_secondary||["find","find_one","count_documents","get_indexes"].includes(e))}get_sync_status(){return{is_secondary:this.is_secondary,api_key_loaded:!!this.api_key,api_key_file:this.api_key_file_path,server_running:!!this.server&&this.server.listening,sync_port:this.sync_port,stats:this.stats}}async reload_api_key(){if(!this.api_key_file_path)throw new Error("No API_KEY file path configured");const e=this.api_key?this.api_key.length:0;await this.load_api_key(),this.log.info("API_KEY reloaded",{old_key_length:e,new_key_length:this.api_key?this.api_key.length:0})}promote_to_primary(){if(!this.is_secondary)throw new Error("Node is not configured as secondary");this.log.info("Promoting secondary to primary"),this.server&&(this.server.close(()=>{this.log.info("Sync receiver server stopped for primary promotion")}),this.server=null),this.is_secondary=!1,this.log.info("Node promoted to primary - sync receiver disabled")}async shutdown(){if(this.log.info("Shutting down sync receiver"),this.server)return new Promise(e=>{this.server.close(()=>{this.log.info("Sync receiver server closed"),e()})});this.log.info("Sync receiver shutdown complete")}}let n=null;const E=()=>(n||(n=new x),n),$=async()=>{await E().initialize()},j=async()=>{n&&(await n.shutdown(),n=null)};export{E as get_sync_receiver,$ as initialize_sync_receiver,j as shutdown_sync_receiver};
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview Full debug test runner that mimics the original test runner
|
|
5
|
+
* but with enhanced exception tracking to identify problematic tests.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { spawn } from 'child_process';
|
|
9
|
+
|
|
10
|
+
// Track uncaught exceptions with detailed context
|
|
11
|
+
const uncaught_exceptions = [];
|
|
12
|
+
let current_test_phase = 'startup';
|
|
13
|
+
let test_start_time = null;
|
|
14
|
+
|
|
15
|
+
// Enhanced exception handlers that log but don't exit
|
|
16
|
+
process.on('uncaughtException', (error) => {
|
|
17
|
+
const exception_info = {
|
|
18
|
+
type: 'uncaughtException',
|
|
19
|
+
phase: current_test_phase,
|
|
20
|
+
message: error.message,
|
|
21
|
+
stack: error.stack,
|
|
22
|
+
timestamp: new Date().toISOString(),
|
|
23
|
+
elapsed_ms: test_start_time ? Date.now() - test_start_time : 0
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
uncaught_exceptions.push(exception_info);
|
|
27
|
+
|
|
28
|
+
console.error(`\nš„ UNCAUGHT EXCEPTION #${uncaught_exceptions.length}:`);
|
|
29
|
+
console.error(`š Phase: ${current_test_phase}`);
|
|
30
|
+
console.error(`ā° Elapsed: ${exception_info.elapsed_ms}ms`);
|
|
31
|
+
console.error(`š„ Error: ${error.message}`);
|
|
32
|
+
console.error(`š Stack (first 5 lines):`);
|
|
33
|
+
const stack_lines = error.stack.split('\n').slice(0, 5);
|
|
34
|
+
stack_lines.forEach(line => console.error(` ${line}`));
|
|
35
|
+
console.error(`ā° Time: ${exception_info.timestamp}\n`);
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
39
|
+
const exception_info = {
|
|
40
|
+
type: 'unhandledRejection',
|
|
41
|
+
phase: current_test_phase,
|
|
42
|
+
reason: reason?.toString() || 'Unknown reason',
|
|
43
|
+
stack: reason?.stack || 'No stack available',
|
|
44
|
+
timestamp: new Date().toISOString(),
|
|
45
|
+
elapsed_ms: test_start_time ? Date.now() - test_start_time : 0
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
uncaught_exceptions.push(exception_info);
|
|
49
|
+
|
|
50
|
+
console.error(`\nš„ UNHANDLED REJECTION #${uncaught_exceptions.length}:`);
|
|
51
|
+
console.error(`š Phase: ${current_test_phase}`);
|
|
52
|
+
console.error(`ā° Elapsed: ${exception_info.elapsed_ms}ms`);
|
|
53
|
+
console.error(`š„ Reason: ${reason}`);
|
|
54
|
+
console.error(`š Stack (first 5 lines):`);
|
|
55
|
+
const stack_lines = (reason?.stack || 'No stack available').split('\n').slice(0, 5);
|
|
56
|
+
stack_lines.forEach(line => console.error(` ${line}`));
|
|
57
|
+
console.error(`ā° Time: ${exception_info.timestamp}\n`);
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Runs the full test suite exactly like the original test runner.
|
|
62
|
+
* @returns {Promise<number>} Exit code
|
|
63
|
+
*/
|
|
64
|
+
const run_full_test_suite_debug = () => {
|
|
65
|
+
return new Promise((resolve) => {
|
|
66
|
+
current_test_phase = 'full-test-suite';
|
|
67
|
+
test_start_time = Date.now();
|
|
68
|
+
|
|
69
|
+
console.log(`š Running FULL TEST SUITE with debug tracking`);
|
|
70
|
+
console.log(`š This mimics the exact command: npm test`);
|
|
71
|
+
console.log(`š» Command: ./node_modules/.bin/ava --serial --verbose tests/client/**/*.test.js tests/server/**/*.test.js`);
|
|
72
|
+
console.log(`š§ NODE_OPTIONS: --expose-gc --max-old-space-size=4096`);
|
|
73
|
+
console.log(`ā° Started at: ${new Date().toISOString()}\n`);
|
|
74
|
+
|
|
75
|
+
const command = './node_modules/.bin/ava';
|
|
76
|
+
const args = ['--serial', '--verbose', 'tests/client/**/*.test.js', 'tests/server/**/*.test.js'];
|
|
77
|
+
|
|
78
|
+
const child = spawn(command, args, {
|
|
79
|
+
stdio: 'pipe', // Capture output so we can track progress
|
|
80
|
+
env: {
|
|
81
|
+
...process.env,
|
|
82
|
+
NODE_ENV: 'test',
|
|
83
|
+
NODE_OPTIONS: '--expose-gc --max-old-space-size=4096'
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
let output_buffer = '';
|
|
88
|
+
let test_count = 0;
|
|
89
|
+
|
|
90
|
+
child.stdout.on('data', (data) => {
|
|
91
|
+
const text = data.toString();
|
|
92
|
+
output_buffer += text;
|
|
93
|
+
process.stdout.write(text);
|
|
94
|
+
|
|
95
|
+
// Track test progress
|
|
96
|
+
const test_matches = text.match(/ā/g);
|
|
97
|
+
if (test_matches) {
|
|
98
|
+
test_count += test_matches.length;
|
|
99
|
+
current_test_phase = `test-${test_count}`;
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
child.stderr.on('data', (data) => {
|
|
104
|
+
const text = data.toString();
|
|
105
|
+
output_buffer += text;
|
|
106
|
+
process.stderr.write(text);
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
child.on('close', (code) => {
|
|
110
|
+
const elapsed = Date.now() - test_start_time;
|
|
111
|
+
|
|
112
|
+
console.log(`\nā
Full test suite completed with exit code: ${code}`);
|
|
113
|
+
console.log(`ā° Total elapsed: ${elapsed}ms`);
|
|
114
|
+
console.log(`š Total tests detected: ${test_count}`);
|
|
115
|
+
|
|
116
|
+
// Exception analysis
|
|
117
|
+
console.log(`\nš UNCAUGHT EXCEPTION ANALYSIS:`);
|
|
118
|
+
console.log(`Total exceptions detected: ${uncaught_exceptions.length}`);
|
|
119
|
+
|
|
120
|
+
if (uncaught_exceptions.length > 0) {
|
|
121
|
+
console.log('\nš„ Exception Timeline:');
|
|
122
|
+
|
|
123
|
+
uncaught_exceptions.forEach((exc, index) => {
|
|
124
|
+
console.log(`\nException #${index + 1}:`);
|
|
125
|
+
console.log(` Type: ${exc.type}`);
|
|
126
|
+
console.log(` Phase: ${exc.phase}`);
|
|
127
|
+
console.log(` Elapsed: ${exc.elapsed_ms}ms`);
|
|
128
|
+
console.log(` Message: ${exc.message}`);
|
|
129
|
+
console.log(` Time: ${exc.timestamp}`);
|
|
130
|
+
|
|
131
|
+
if (exc.stack) {
|
|
132
|
+
console.log(` Key Stack Lines:`);
|
|
133
|
+
const stack_lines = exc.stack.split('\n')
|
|
134
|
+
.filter(line => line.includes('db/src/') || line.includes('db/tests/'))
|
|
135
|
+
.slice(0, 3);
|
|
136
|
+
stack_lines.forEach(line => console.log(` ${line.trim()}`));
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
// Try to correlate with test timing
|
|
141
|
+
console.log('\nš Exception Timing Analysis:');
|
|
142
|
+
uncaught_exceptions.forEach((exc, index) => {
|
|
143
|
+
const test_number_estimate = Math.floor((exc.elapsed_ms / elapsed) * test_count);
|
|
144
|
+
console.log(` Exception #${index + 1} occurred around test #${test_number_estimate} (${exc.elapsed_ms}ms elapsed)`);
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
resolve(code);
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
child.on('error', (error) => {
|
|
152
|
+
console.error(`\nā Full test suite failed: ${error.message}`);
|
|
153
|
+
resolve(1);
|
|
154
|
+
});
|
|
155
|
+
});
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Main execution function.
|
|
160
|
+
*/
|
|
161
|
+
const main = async () => {
|
|
162
|
+
const args = process.argv.slice(2);
|
|
163
|
+
|
|
164
|
+
if (args.includes('--help') || args.includes('-h')) {
|
|
165
|
+
console.log(`
|
|
166
|
+
š Full Debug Test Runner for JoystickDB
|
|
167
|
+
|
|
168
|
+
Usage: node full_debug_test_runner.js
|
|
169
|
+
|
|
170
|
+
This runner executes the complete test suite exactly like 'npm test'
|
|
171
|
+
but captures uncaught exceptions and unhandled rejections with detailed
|
|
172
|
+
timing and context information to identify problematic tests.
|
|
173
|
+
`);
|
|
174
|
+
process.exit(0);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
console.log('šÆ Selected strategy: Full Test Suite Debug Analysis');
|
|
178
|
+
|
|
179
|
+
const exit_code = await run_full_test_suite_debug();
|
|
180
|
+
|
|
181
|
+
if (exit_code === 0 && uncaught_exceptions.length === 0) {
|
|
182
|
+
console.log(`\nš All tests passed with NO uncaught exceptions!`);
|
|
183
|
+
} else if (exit_code === 0 && uncaught_exceptions.length > 0) {
|
|
184
|
+
console.log(`\nā ļø All tests passed but ${uncaught_exceptions.length} uncaught exceptions detected`);
|
|
185
|
+
} else {
|
|
186
|
+
console.log(`\nš„ Tests failed with exit code: ${exit_code}`);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
process.exit(exit_code);
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
// Run the main function
|
|
193
|
+
main().catch(error => {
|
|
194
|
+
console.error(`\nš„ Debug runner error: ${error.message}`);
|
|
195
|
+
console.error(error.stack);
|
|
196
|
+
process.exit(1);
|
|
197
|
+
});
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@joystick.js/db-canary",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.0-canary.
|
|
5
|
-
"canary_version": "0.0.0-canary.
|
|
4
|
+
"version": "0.0.0-canary.2277",
|
|
5
|
+
"canary_version": "0.0.0-canary.2276",
|
|
6
6
|
"description": "JoystickDB - A minimalist database server for the Joystick framework",
|
|
7
7
|
"main": "./dist/server/index.js",
|
|
8
8
|
"scripts": {
|
package/src/server/index.js
CHANGED
|
@@ -42,13 +42,13 @@ import {
|
|
|
42
42
|
stop_backup_schedule
|
|
43
43
|
} from './lib/backup_manager.js';
|
|
44
44
|
import {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
} from './lib/
|
|
45
|
+
initialize_simple_sync_manager,
|
|
46
|
+
shutdown_simple_sync_manager
|
|
47
|
+
} from './lib/simple_sync_manager.js';
|
|
48
48
|
import {
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
} from './lib/
|
|
49
|
+
initialize_sync_receiver,
|
|
50
|
+
shutdown_sync_receiver
|
|
51
|
+
} from './lib/sync_receiver.js';
|
|
52
52
|
import { handle_database_operation, handle_admin_operation, handle_ping_operation } from './lib/operation_dispatcher.js';
|
|
53
53
|
import { start_http_server, stop_http_server } from './lib/http_server.js';
|
|
54
54
|
import {
|
|
@@ -326,29 +326,30 @@ const initialize_server_components = async (settings) => {
|
|
|
326
326
|
};
|
|
327
327
|
|
|
328
328
|
/**
|
|
329
|
-
* Initializes
|
|
329
|
+
* Initializes simple sync manager with error handling.
|
|
330
330
|
* @param {Function} log - Logger function
|
|
331
331
|
*/
|
|
332
|
-
const
|
|
332
|
+
const initialize_sync_manager_with_logging = (log) => {
|
|
333
333
|
try {
|
|
334
|
-
|
|
335
|
-
log.info('
|
|
336
|
-
} catch (
|
|
337
|
-
log.warn('Failed to initialize
|
|
334
|
+
initialize_simple_sync_manager();
|
|
335
|
+
log.info('Simple sync manager initialized');
|
|
336
|
+
} catch (sync_error) {
|
|
337
|
+
log.warn('Failed to initialize simple sync manager', { error: sync_error.message });
|
|
338
338
|
}
|
|
339
339
|
};
|
|
340
340
|
|
|
341
341
|
/**
|
|
342
|
-
* Initializes
|
|
342
|
+
* Initializes sync receiver with error handling.
|
|
343
343
|
* @param {Function} log - Logger function
|
|
344
344
|
*/
|
|
345
|
-
const
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
345
|
+
const initialize_sync_receiver_with_logging = (log) => {
|
|
346
|
+
initialize_sync_receiver()
|
|
347
|
+
.then(() => {
|
|
348
|
+
log.info('Sync receiver initialized');
|
|
349
|
+
})
|
|
350
|
+
.catch((receiver_error) => {
|
|
351
|
+
log.warn('Failed to initialize sync receiver', { error: receiver_error.message });
|
|
352
|
+
});
|
|
352
353
|
};
|
|
353
354
|
|
|
354
355
|
/**
|
|
@@ -658,8 +659,8 @@ const create_server_cleanup_function = () => {
|
|
|
658
659
|
try {
|
|
659
660
|
await stop_http_server();
|
|
660
661
|
stop_backup_schedule();
|
|
661
|
-
await
|
|
662
|
-
await
|
|
662
|
+
await shutdown_simple_sync_manager();
|
|
663
|
+
await shutdown_sync_receiver();
|
|
663
664
|
|
|
664
665
|
if (connection_manager) {
|
|
665
666
|
connection_manager.shutdown();
|
|
@@ -699,8 +700,8 @@ export const create_server = async () => {
|
|
|
699
700
|
await attempt_startup_restore(settings, log);
|
|
700
701
|
await initialize_server_components(settings);
|
|
701
702
|
|
|
702
|
-
|
|
703
|
-
|
|
703
|
+
initialize_sync_manager_with_logging(log);
|
|
704
|
+
initialize_sync_receiver_with_logging(log);
|
|
704
705
|
start_backup_scheduling(settings, log);
|
|
705
706
|
|
|
706
707
|
connection_manager = create_server_connection_manager();
|
|
@@ -7,8 +7,8 @@
|
|
|
7
7
|
*/
|
|
8
8
|
|
|
9
9
|
import { encode_message } from './tcp_protocol.js';
|
|
10
|
-
import {
|
|
11
|
-
import {
|
|
10
|
+
import { get_simple_sync_manager } from './simple_sync_manager.js';
|
|
11
|
+
import { get_sync_receiver } from './sync_receiver.js';
|
|
12
12
|
import { check_and_grow_map_size } from './query_engine.js';
|
|
13
13
|
import { performance_monitor } from './performance_monitor.js';
|
|
14
14
|
import create_logger from './logger.js';
|
|
@@ -236,17 +236,17 @@ const is_write_operation = (op_type) => {
|
|
|
236
236
|
};
|
|
237
237
|
|
|
238
238
|
/**
|
|
239
|
-
* Queues operation for
|
|
239
|
+
* Queues operation for sync if it's a write operation.
|
|
240
240
|
* @param {string} op_type - Operation type
|
|
241
241
|
* @param {Object} data - Operation data
|
|
242
242
|
*/
|
|
243
|
-
const
|
|
243
|
+
const queue_sync_if_write_operation = (op_type, data) => {
|
|
244
244
|
if (!is_write_operation(op_type)) {
|
|
245
245
|
return;
|
|
246
246
|
}
|
|
247
247
|
|
|
248
|
-
const
|
|
249
|
-
|
|
248
|
+
const sync_manager = get_simple_sync_manager();
|
|
249
|
+
sync_manager.queue_sync(op_type, data.collection, data);
|
|
250
250
|
|
|
251
251
|
setImmediate(() => check_and_grow_map_size());
|
|
252
252
|
};
|
|
@@ -292,7 +292,7 @@ const handle_successful_operation = (socket, op_type, data, result, start_time,
|
|
|
292
292
|
response_size
|
|
293
293
|
);
|
|
294
294
|
|
|
295
|
-
|
|
295
|
+
queue_sync_if_write_operation(op_type, data);
|
|
296
296
|
};
|
|
297
297
|
|
|
298
298
|
/**
|
|
@@ -364,10 +364,16 @@ export const handle_database_operation = async (socket, op_type, data, check_aut
|
|
|
364
364
|
return;
|
|
365
365
|
}
|
|
366
366
|
|
|
367
|
-
const
|
|
368
|
-
const
|
|
367
|
+
const sync_receiver = get_sync_receiver();
|
|
368
|
+
const should_block = sync_receiver.should_block_client_operation(op_type);
|
|
369
369
|
|
|
370
|
-
if (
|
|
370
|
+
if (should_block) {
|
|
371
|
+
const response = {
|
|
372
|
+
ok: 0,
|
|
373
|
+
error: 'Write operations not allowed on secondary node. Use primary node for write operations.'
|
|
374
|
+
};
|
|
375
|
+
send_encoded_response(socket, response);
|
|
376
|
+
log_operation_performance(socket.id, op_type, data.collection, 0, 'error', 'Write operation blocked on secondary', raw_data_size, 0);
|
|
371
377
|
return;
|
|
372
378
|
}
|
|
373
379
|
|
|
@@ -28,8 +28,8 @@ import {
|
|
|
28
28
|
restore_backup,
|
|
29
29
|
cleanup_old_backups
|
|
30
30
|
} from '../backup_manager.js';
|
|
31
|
-
import {
|
|
32
|
-
import {
|
|
31
|
+
import { get_simple_sync_manager } from '../simple_sync_manager.js';
|
|
32
|
+
import { get_sync_receiver } from '../sync_receiver.js';
|
|
33
33
|
import create_logger from '../logger.js';
|
|
34
34
|
import { performance_monitor } from '../performance_monitor.js';
|
|
35
35
|
|
|
@@ -869,46 +869,79 @@ export default async (admin_action, data = {}, connection_manager, authenticated
|
|
|
869
869
|
result = { indexes: get_indexes(data.database || 'default', data.collection) };
|
|
870
870
|
break;
|
|
871
871
|
|
|
872
|
-
case '
|
|
873
|
-
const
|
|
874
|
-
|
|
872
|
+
case 'get_sync_status':
|
|
873
|
+
const sync_manager = get_simple_sync_manager();
|
|
874
|
+
const sync_receiver = get_sync_receiver();
|
|
875
|
+
result = {
|
|
876
|
+
sync_manager: sync_manager.get_sync_status(),
|
|
877
|
+
sync_receiver: sync_receiver.get_sync_status()
|
|
878
|
+
};
|
|
875
879
|
break;
|
|
876
880
|
|
|
877
|
-
case '
|
|
878
|
-
if (!
|
|
879
|
-
throw new Error('
|
|
881
|
+
case 'update_secondary_nodes':
|
|
882
|
+
if (!Array.isArray(data.secondary_nodes)) {
|
|
883
|
+
throw new Error('secondary_nodes array is required for update_secondary_nodes operation');
|
|
880
884
|
}
|
|
881
|
-
const
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
});
|
|
885
|
+
const update_sync_manager = get_simple_sync_manager();
|
|
886
|
+
update_sync_manager.update_secondary_nodes(data.secondary_nodes);
|
|
887
|
+
result = {
|
|
888
|
+
success: true,
|
|
889
|
+
message: 'Secondary nodes updated successfully',
|
|
890
|
+
secondary_nodes: data.secondary_nodes
|
|
891
|
+
};
|
|
889
892
|
break;
|
|
890
893
|
|
|
891
|
-
case '
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
}
|
|
895
|
-
const remove_replication_manager = get_replication_manager();
|
|
896
|
-
result = remove_replication_manager.remove_secondary(data.secondary_id);
|
|
894
|
+
case 'force_sync':
|
|
895
|
+
const force_sync_manager = get_simple_sync_manager();
|
|
896
|
+
result = await force_sync_manager.force_sync();
|
|
897
897
|
break;
|
|
898
898
|
|
|
899
|
-
case '
|
|
900
|
-
|
|
901
|
-
|
|
899
|
+
case 'set_primary_role':
|
|
900
|
+
if (typeof data.primary !== 'boolean') {
|
|
901
|
+
throw new Error('primary boolean value is required for set_primary_role operation');
|
|
902
|
+
}
|
|
903
|
+
|
|
904
|
+
if (data.primary) {
|
|
905
|
+
// Promoting to primary - stop sync receiver, start sync manager
|
|
906
|
+
const receiver = get_sync_receiver();
|
|
907
|
+
receiver.promote_to_primary();
|
|
908
|
+
result = {
|
|
909
|
+
success: true,
|
|
910
|
+
message: 'Node promoted to primary successfully',
|
|
911
|
+
role: 'primary'
|
|
912
|
+
};
|
|
913
|
+
} else {
|
|
914
|
+
// Demoting to secondary - this would require restart with new config
|
|
915
|
+
result = {
|
|
916
|
+
success: false,
|
|
917
|
+
message: 'Demoting primary to secondary requires server restart with updated configuration',
|
|
918
|
+
role: 'primary'
|
|
919
|
+
};
|
|
920
|
+
}
|
|
902
921
|
break;
|
|
903
922
|
|
|
904
|
-
case '
|
|
905
|
-
const
|
|
906
|
-
|
|
923
|
+
case 'reload_sync_key':
|
|
924
|
+
const key_sync_receiver = get_sync_receiver();
|
|
925
|
+
if (!key_sync_receiver.is_secondary) {
|
|
926
|
+
throw new Error('reload_sync_key can only be used on secondary nodes');
|
|
927
|
+
}
|
|
928
|
+
await key_sync_receiver.reload_api_key();
|
|
929
|
+
result = {
|
|
930
|
+
success: true,
|
|
931
|
+
message: 'API_KEY reloaded successfully'
|
|
932
|
+
};
|
|
907
933
|
break;
|
|
908
934
|
|
|
909
|
-
case '
|
|
910
|
-
const
|
|
911
|
-
|
|
935
|
+
case 'get_secondary_auth_status':
|
|
936
|
+
const auth_sync_manager = get_simple_sync_manager();
|
|
937
|
+
const auth_status = auth_sync_manager.get_sync_status();
|
|
938
|
+
result = {
|
|
939
|
+
secondary_count: auth_status.secondary_count,
|
|
940
|
+
auth_failures: auth_status.stats.auth_failures,
|
|
941
|
+
successful_syncs: auth_status.stats.successful_syncs,
|
|
942
|
+
failed_syncs: auth_status.stats.failed_syncs,
|
|
943
|
+
secondaries: auth_status.secondaries
|
|
944
|
+
};
|
|
912
945
|
break;
|
|
913
946
|
|
|
914
947
|
default:
|