@joystick.js/db-canary 0.0.0-canary.2275 → 0.0.0-canary.2276
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -104
- package/debug_test_runner.js +208 -0
- package/dist/server/index.js +1 -1
- package/dist/server/lib/operation_dispatcher.js +1 -1
- package/dist/server/lib/operations/admin.js +1 -1
- package/dist/server/lib/simple_sync_manager.js +1 -0
- package/dist/server/lib/sync_receiver.js +1 -0
- package/full_debug_test_runner.js +197 -0
- package/package.json +2 -2
- package/src/server/index.js +25 -24
- package/src/server/lib/operation_dispatcher.js +16 -10
- package/src/server/lib/operations/admin.js +64 -31
- package/src/server/lib/simple_sync_manager.js +444 -0
- package/src/server/lib/sync_receiver.js +461 -0
- package/tests/server/lib/simple_sync_system.test.js +124 -0
- package/dist/server/lib/replication_manager.js +0 -1
- package/dist/server/lib/write_forwarder.js +0 -1
- package/src/server/lib/replication_manager.js +0 -727
- package/src/server/lib/write_forwarder.js +0 -636
- package/tests/server/lib/replication_manager.test.js +0 -202
- package/tests/server/lib/write_forwarder.test.js +0 -258
package/README.md
CHANGED
|
@@ -213,11 +213,13 @@ export JOYSTICK_DB_SETTINGS='{
|
|
|
213
213
|
"window_ms": 300000
|
|
214
214
|
}
|
|
215
215
|
},
|
|
216
|
-
"
|
|
217
|
-
|
|
218
|
-
"
|
|
219
|
-
"
|
|
220
|
-
|
|
216
|
+
"primary": true,
|
|
217
|
+
"secondary_nodes": [
|
|
218
|
+
{ "ip": "192.168.1.100" },
|
|
219
|
+
{ "ip": "192.168.1.101" }
|
|
220
|
+
],
|
|
221
|
+
"secondary_sync_key": "/path/to/sync.key",
|
|
222
|
+
"sync_port": 1985,
|
|
221
223
|
"backup": {
|
|
222
224
|
"enabled": true,
|
|
223
225
|
"schedule": "0 2 * * *",
|
|
@@ -931,67 +933,58 @@ The HTTP API returns consistent error responses:
|
|
|
931
933
|
|
|
932
934
|
## Replication
|
|
933
935
|
|
|
934
|
-
|
|
936
|
+
JoystickDB features a simplified primary/secondary replication system for high availability and read scaling. The replication system maintains database separation across all nodes and uses API key authentication for secure sync operations.
|
|
935
937
|
|
|
936
|
-
### How Replication Works
|
|
938
|
+
### How Simplified Replication Works
|
|
937
939
|
|
|
938
|
-
- **Primary Node**:
|
|
939
|
-
- **Secondary Nodes**: Read-only copies that
|
|
940
|
-
- **
|
|
941
|
-
- **
|
|
942
|
-
- **
|
|
940
|
+
- **Primary Node**: Main database server that accepts both read and write operations for all databases
|
|
941
|
+
- **Secondary Nodes**: Read-only copies that receive synchronized data from primary for all databases
|
|
942
|
+
- **API Key Authentication**: All sync operations between nodes use API key authentication for security
|
|
943
|
+
- **Read-only Secondaries**: Secondary nodes block write operations from clients, only accepting authenticated sync from primary
|
|
944
|
+
- **Manual Failover**: Admin operations allow promoting a secondary to primary when needed
|
|
945
|
+
- **Database Isolation**: Replication maintains complete database separation across all nodes
|
|
943
946
|
|
|
944
|
-
### Setting Up Replication
|
|
947
|
+
### Setting Up Simplified Replication
|
|
945
948
|
|
|
946
949
|
#### 1. Configure Primary Node
|
|
947
950
|
|
|
948
951
|
```bash
|
|
949
|
-
# Primary server configuration
|
|
952
|
+
# Primary server configuration
|
|
950
953
|
export JOYSTICK_DB_SETTINGS='{
|
|
951
954
|
"port": 1983,
|
|
952
|
-
"
|
|
953
|
-
|
|
954
|
-
"
|
|
955
|
-
"
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
"id": "secondary-1",
|
|
960
|
-
"ip": "192.168.1.100",
|
|
961
|
-
"port": 1984,
|
|
962
|
-
"private_key": "base64-encoded-private-key",
|
|
963
|
-
"enabled": true
|
|
964
|
-
},
|
|
965
|
-
{
|
|
966
|
-
"id": "secondary-2",
|
|
967
|
-
"ip": "192.168.1.101",
|
|
968
|
-
"port": 1984,
|
|
969
|
-
"private_key": "base64-encoded-private-key",
|
|
970
|
-
"enabled": true
|
|
971
|
-
}
|
|
972
|
-
]
|
|
973
|
-
}
|
|
955
|
+
"primary": true,
|
|
956
|
+
"secondary_nodes": [
|
|
957
|
+
{ "ip": "192.168.1.100" },
|
|
958
|
+
{ "ip": "192.168.1.101" }
|
|
959
|
+
],
|
|
960
|
+
"secondary_sync_key": "/path/to/sync.key",
|
|
961
|
+
"sync_port": 1985
|
|
974
962
|
}'
|
|
975
963
|
```
|
|
976
964
|
|
|
977
965
|
#### 2. Configure Secondary Nodes
|
|
978
966
|
|
|
979
967
|
```bash
|
|
980
|
-
# Secondary server configuration
|
|
968
|
+
# Secondary server configuration
|
|
981
969
|
export JOYSTICK_DB_SETTINGS='{
|
|
982
|
-
"port":
|
|
983
|
-
"
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
"ip": "192.168.1.10",
|
|
987
|
-
"port": 1983,
|
|
988
|
-
"private_key": "base64-encoded-private-key"
|
|
989
|
-
}
|
|
990
|
-
}
|
|
970
|
+
"port": 1983,
|
|
971
|
+
"primary": false,
|
|
972
|
+
"secondary_sync_key": "/path/to/sync.key",
|
|
973
|
+
"sync_port": 1985
|
|
991
974
|
}'
|
|
992
975
|
```
|
|
993
976
|
|
|
994
|
-
|
|
977
|
+
#### 3. Create Sync Key File
|
|
978
|
+
|
|
979
|
+
The sync key file contains the API key for authenticated sync operations:
|
|
980
|
+
|
|
981
|
+
```bash
|
|
982
|
+
# Generate and save sync key (same key for all nodes)
|
|
983
|
+
echo "your-secure-sync-api-key" > /path/to/sync.key
|
|
984
|
+
chmod 600 /path/to/sync.key
|
|
985
|
+
```
|
|
986
|
+
|
|
987
|
+
### Managing Simplified Replication
|
|
995
988
|
|
|
996
989
|
```javascript
|
|
997
990
|
// Connect to primary node
|
|
@@ -1003,38 +996,35 @@ const client = joystickdb.client({
|
|
|
1003
996
|
}
|
|
1004
997
|
});
|
|
1005
998
|
|
|
1006
|
-
// Check
|
|
1007
|
-
const
|
|
1008
|
-
|
|
1009
|
-
console.log('📡 Connected secondaries:', status.connected_secondaries);
|
|
1010
|
-
console.log('📊 Queue length:', status.queue_length);
|
|
1011
|
-
|
|
1012
|
-
// Add a new secondary node dynamically
|
|
1013
|
-
await client.add_secondary({
|
|
1014
|
-
id: 'secondary-3',
|
|
1015
|
-
ip: '192.168.1.102',
|
|
1016
|
-
port: 1984,
|
|
1017
|
-
private_key: 'base64-encoded-private-key'
|
|
999
|
+
// Check sync system status (admin operation)
|
|
1000
|
+
const sync_status = await client.admin_operation({
|
|
1001
|
+
operation: 'get_sync_status'
|
|
1018
1002
|
});
|
|
1019
1003
|
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
console.log('
|
|
1023
|
-
console.log('📈 Total secondaries:', health.total_secondaries);
|
|
1004
|
+
console.log('🔄 Sync enabled:', sync_status.enabled);
|
|
1005
|
+
console.log('📡 Connected secondaries:', sync_status.connected_nodes);
|
|
1006
|
+
console.log('📊 Pending operations:', sync_status.pending_count);
|
|
1024
1007
|
|
|
1025
|
-
//
|
|
1026
|
-
await client.
|
|
1027
|
-
|
|
1008
|
+
// Promote secondary to primary (manual failover)
|
|
1009
|
+
const promote_result = await client.admin_operation({
|
|
1010
|
+
operation: 'promote_to_primary',
|
|
1011
|
+
secondary_ip: '192.168.1.100'
|
|
1012
|
+
});
|
|
1013
|
+
|
|
1014
|
+
console.log('🔄 Failover completed:', promote_result.success);
|
|
1015
|
+
|
|
1016
|
+
// Force sync to all secondaries
|
|
1017
|
+
const sync_result = await client.admin_operation({
|
|
1018
|
+
operation: 'force_sync'
|
|
1019
|
+
});
|
|
1028
1020
|
|
|
1029
|
-
|
|
1030
|
-
await client.remove_secondary('secondary-1');
|
|
1031
|
-
console.log('➖ Secondary removed');
|
|
1021
|
+
console.log('🔄 Manual sync completed:', sync_result.synced_nodes);
|
|
1032
1022
|
```
|
|
1033
1023
|
|
|
1034
|
-
### Using Replication for Read Scaling
|
|
1024
|
+
### Using Simplified Replication for Read Scaling
|
|
1035
1025
|
|
|
1036
1026
|
```javascript
|
|
1037
|
-
// Connect to primary for writes
|
|
1027
|
+
// Connect to primary for writes (all databases supported)
|
|
1038
1028
|
const primary = joystickdb.client({
|
|
1039
1029
|
host: '192.168.1.10',
|
|
1040
1030
|
port: 1983,
|
|
@@ -1044,17 +1034,17 @@ const primary = joystickdb.client({
|
|
|
1044
1034
|
}
|
|
1045
1035
|
});
|
|
1046
1036
|
|
|
1047
|
-
// Connect to secondary for reads (
|
|
1037
|
+
// Connect to secondary for reads only (all databases replicated)
|
|
1048
1038
|
const secondary = joystickdb.client({
|
|
1049
1039
|
host: '192.168.1.100',
|
|
1050
|
-
port:
|
|
1040
|
+
port: 1983,
|
|
1051
1041
|
authentication: {
|
|
1052
1042
|
username: 'admin',
|
|
1053
1043
|
password: 'your-password'
|
|
1054
1044
|
}
|
|
1055
1045
|
});
|
|
1056
1046
|
|
|
1057
|
-
// Write to primary (
|
|
1047
|
+
// Write to primary (automatically synced to secondaries)
|
|
1058
1048
|
const user_db = primary.db('user_management');
|
|
1059
1049
|
const inventory_db = primary.db('inventory');
|
|
1060
1050
|
|
|
@@ -1068,7 +1058,7 @@ await inventory_db.collection('products').insert_one({
|
|
|
1068
1058
|
price: 99.99
|
|
1069
1059
|
});
|
|
1070
1060
|
|
|
1071
|
-
// Read from secondary (reduces load on primary
|
|
1061
|
+
// Read from secondary (reduces load on primary)
|
|
1072
1062
|
const secondary_user_db = secondary.db('user_management');
|
|
1073
1063
|
const secondary_inventory_db = secondary.db('inventory');
|
|
1074
1064
|
|
|
@@ -1077,6 +1067,13 @@ const products = await secondary_inventory_db.collection('products').find({});
|
|
|
1077
1067
|
|
|
1078
1068
|
console.log('👥 Users from secondary:', users.documents.length);
|
|
1079
1069
|
console.log('📦 Products from secondary:', products.documents.length);
|
|
1070
|
+
|
|
1071
|
+
// Note: Write operations to secondary will be rejected
|
|
1072
|
+
try {
|
|
1073
|
+
await secondary_user_db.collection('users').insert_one({ name: 'Test' });
|
|
1074
|
+
} catch (error) {
|
|
1075
|
+
console.log('❌ Secondary is read-only:', error.message);
|
|
1076
|
+
}
|
|
1080
1077
|
```
|
|
1081
1078
|
|
|
1082
1079
|
## Administration
|
|
@@ -1315,25 +1312,13 @@ setInterval(test_backup, 30 * 24 * 60 * 60 * 1000); // 30 days
|
|
|
1315
1312
|
}
|
|
1316
1313
|
},
|
|
1317
1314
|
|
|
1318
|
-
"
|
|
1319
|
-
|
|
1320
|
-
"
|
|
1321
|
-
"
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
},
|
|
1326
|
-
|
|
1327
|
-
"write_forwarder": {
|
|
1328
|
-
"enabled": false,
|
|
1329
|
-
"primary": {
|
|
1330
|
-
"ip": "127.0.0.1",
|
|
1331
|
-
"port": 1983,
|
|
1332
|
-
"private_key": "base64-encoded-key"
|
|
1333
|
-
},
|
|
1334
|
-
"timeout_ms": 5000,
|
|
1335
|
-
"retry_attempts": 3
|
|
1336
|
-
},
|
|
1315
|
+
"primary": true,
|
|
1316
|
+
"secondary_nodes": [
|
|
1317
|
+
{ "ip": "192.168.1.100" },
|
|
1318
|
+
{ "ip": "192.168.1.101" }
|
|
1319
|
+
],
|
|
1320
|
+
"secondary_sync_key": "/path/to/sync.key",
|
|
1321
|
+
"sync_port": 1985,
|
|
1337
1322
|
|
|
1338
1323
|
"s3": {
|
|
1339
1324
|
"region": "us-east-1",
|
|
@@ -1379,9 +1364,10 @@ JOYSTICKDB_DATABASE_PATH=./data
|
|
|
1379
1364
|
JOYSTICKDB_DATABASE_AUTO_MAP_SIZE=true
|
|
1380
1365
|
JOYSTICKDB_DATABASE_MAX_DBS=100
|
|
1381
1366
|
|
|
1382
|
-
# Replication settings
|
|
1383
|
-
|
|
1384
|
-
|
|
1367
|
+
# Simplified Replication settings
|
|
1368
|
+
JOYSTICKDB_PRIMARY=true
|
|
1369
|
+
JOYSTICKDB_SECONDARY_SYNC_KEY=/path/to/sync.key
|
|
1370
|
+
JOYSTICKDB_SYNC_PORT=1985
|
|
1385
1371
|
|
|
1386
1372
|
# S3 settings
|
|
1387
1373
|
JOYSTICKDB_S3_REGION=us-east-1
|
|
@@ -1675,13 +1661,10 @@ const monitor_performance = async () => {
|
|
|
1675
1661
|
#### Auto-Indexing Management
|
|
1676
1662
|
- `client.get_auto_index_stats()` - Get automatic indexing statistics
|
|
1677
1663
|
|
|
1678
|
-
#### Replication Management
|
|
1679
|
-
- `client.
|
|
1680
|
-
- `client.
|
|
1681
|
-
- `client.
|
|
1682
|
-
- `client.sync_secondaries()` - Force synchronization with secondaries
|
|
1683
|
-
- `client.get_secondary_health()` - Get health status of secondary nodes
|
|
1684
|
-
- `client.get_forwarder_status()` - Get write forwarder status (for secondary nodes)
|
|
1664
|
+
#### Simplified Replication Management (via Admin Operations)
|
|
1665
|
+
- `client.admin_operation({ operation: 'get_sync_status' })` - Get sync system status and statistics
|
|
1666
|
+
- `client.admin_operation({ operation: 'promote_to_primary', secondary_ip: 'ip' })` - Promote secondary to primary (manual failover)
|
|
1667
|
+
- `client.admin_operation({ operation: 'force_sync' })` - Force synchronization with all secondaries
|
|
1685
1668
|
|
|
1686
1669
|
#### Administration
|
|
1687
1670
|
- `client.get_stats()` - Get comprehensive server statistics
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview Debug test runner for identifying uncaught exceptions in JoystickDB tests.
|
|
5
|
+
* This runner provides detailed exception tracking to identify problematic tests.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { spawn } from 'child_process';
|
|
9
|
+
import { existsSync } from 'fs';
|
|
10
|
+
|
|
11
|
+
// Track uncaught exceptions with detailed context
|
|
12
|
+
const uncaught_exceptions = [];
|
|
13
|
+
let current_test_context = 'startup';
|
|
14
|
+
|
|
15
|
+
// Enhanced exception handlers that log but don't exit
|
|
16
|
+
process.on('uncaughtException', (error) => {
|
|
17
|
+
const exception_info = {
|
|
18
|
+
type: 'uncaughtException',
|
|
19
|
+
context: current_test_context,
|
|
20
|
+
message: error.message,
|
|
21
|
+
stack: error.stack,
|
|
22
|
+
timestamp: new Date().toISOString()
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
uncaught_exceptions.push(exception_info);
|
|
26
|
+
|
|
27
|
+
console.error(`\n🔥 UNCAUGHT EXCEPTION #${uncaught_exceptions.length}:`);
|
|
28
|
+
console.error(`📍 Context: ${current_test_context}`);
|
|
29
|
+
console.error(`💥 Error: ${error.message}`);
|
|
30
|
+
console.error(`📚 Stack: ${error.stack}`);
|
|
31
|
+
console.error(`⏰ Time: ${exception_info.timestamp}\n`);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
35
|
+
const exception_info = {
|
|
36
|
+
type: 'unhandledRejection',
|
|
37
|
+
context: current_test_context,
|
|
38
|
+
reason: reason?.toString() || 'Unknown reason',
|
|
39
|
+
stack: reason?.stack || 'No stack available',
|
|
40
|
+
timestamp: new Date().toISOString()
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
uncaught_exceptions.push(exception_info);
|
|
44
|
+
|
|
45
|
+
console.error(`\n🔥 UNHANDLED REJECTION #${uncaught_exceptions.length}:`);
|
|
46
|
+
console.error(`📍 Context: ${current_test_context}`);
|
|
47
|
+
console.error(`💥 Reason: ${reason}`);
|
|
48
|
+
console.error(`📚 Stack: ${reason?.stack || 'No stack available'}`);
|
|
49
|
+
console.error(`⏰ Time: ${exception_info.timestamp}\n`);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Runs a specific test file or pattern with enhanced debugging.
|
|
54
|
+
* @param {string} test_pattern - Test pattern to run
|
|
55
|
+
* @param {string} context_name - Context name for tracking
|
|
56
|
+
* @returns {Promise<number>} Exit code
|
|
57
|
+
*/
|
|
58
|
+
const run_debug_test = (test_pattern, context_name) => {
|
|
59
|
+
return new Promise((resolve) => {
|
|
60
|
+
current_test_context = context_name;
|
|
61
|
+
|
|
62
|
+
console.log(`\n🔍 Debug Test: ${context_name}`);
|
|
63
|
+
console.log(`📁 Pattern: ${test_pattern}`);
|
|
64
|
+
console.log(`⏰ Started: ${new Date().toISOString()}`);
|
|
65
|
+
|
|
66
|
+
const command = './node_modules/.bin/ava';
|
|
67
|
+
const args = ['--serial', '--verbose', test_pattern];
|
|
68
|
+
|
|
69
|
+
const child = spawn(command, args, {
|
|
70
|
+
stdio: 'inherit',
|
|
71
|
+
env: {
|
|
72
|
+
...process.env,
|
|
73
|
+
NODE_ENV: 'test',
|
|
74
|
+
NODE_OPTIONS: '--expose-gc --max-old-space-size=4096'
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
child.on('close', (code) => {
|
|
79
|
+
console.log(`\n✅ Debug test completed: ${context_name} (exit code: ${code})`);
|
|
80
|
+
console.log(`📊 Exceptions during this test: ${uncaught_exceptions.filter(e => e.context === context_name).length}`);
|
|
81
|
+
resolve(code);
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
child.on('error', (error) => {
|
|
85
|
+
console.error(`\n❌ Debug test failed: ${context_name} - ${error.message}`);
|
|
86
|
+
resolve(1);
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Runs tests in isolated groups to identify exception sources.
|
|
93
|
+
*/
|
|
94
|
+
const run_isolated_debug_tests = async () => {
|
|
95
|
+
console.log('🔍 Starting isolated debug test analysis...\n');
|
|
96
|
+
|
|
97
|
+
const test_groups = [
|
|
98
|
+
{
|
|
99
|
+
name: 'client-tests',
|
|
100
|
+
pattern: 'tests/client/**/*.test.js',
|
|
101
|
+
description: 'Client-side tests'
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
name: 'server-basic-tests',
|
|
105
|
+
pattern: 'tests/server/index.test.js',
|
|
106
|
+
description: 'Basic server tests'
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
name: 'server-lib-tests',
|
|
110
|
+
pattern: 'tests/server/lib/**/*.test.js',
|
|
111
|
+
description: 'Server library tests (including sync system)'
|
|
112
|
+
},
|
|
113
|
+
{
|
|
114
|
+
name: 'server-integration-tests',
|
|
115
|
+
pattern: 'tests/server/integration/**/*.test.js',
|
|
116
|
+
description: 'Server integration tests'
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
name: 'sync-system-only',
|
|
120
|
+
pattern: 'tests/server/lib/simple_sync_system.test.js',
|
|
121
|
+
description: 'Sync system tests only'
|
|
122
|
+
}
|
|
123
|
+
];
|
|
124
|
+
|
|
125
|
+
for (const group of test_groups) {
|
|
126
|
+
if (existsSync(group.pattern.replace('**/*.test.js', '').replace('*.test.js', ''))) {
|
|
127
|
+
await run_debug_test(group.pattern, group.name);
|
|
128
|
+
|
|
129
|
+
// Wait between test groups
|
|
130
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
131
|
+
} else {
|
|
132
|
+
console.log(`⚠️ Skipping ${group.name} - path not found`);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Summary report
|
|
137
|
+
console.log('\n📊 EXCEPTION ANALYSIS SUMMARY:');
|
|
138
|
+
console.log(`Total uncaught exceptions detected: ${uncaught_exceptions.length}`);
|
|
139
|
+
|
|
140
|
+
if (uncaught_exceptions.length > 0) {
|
|
141
|
+
console.log('\n🔥 Exception breakdown by test context:');
|
|
142
|
+
const by_context = {};
|
|
143
|
+
|
|
144
|
+
uncaught_exceptions.forEach(exc => {
|
|
145
|
+
by_context[exc.context] = (by_context[exc.context] || 0) + 1;
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
Object.entries(by_context).forEach(([context, count]) => {
|
|
149
|
+
console.log(` ${context}: ${count} exceptions`);
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
console.log('\n📝 Detailed exception information:');
|
|
153
|
+
uncaught_exceptions.forEach((exc, index) => {
|
|
154
|
+
console.log(`\nException #${index + 1}:`);
|
|
155
|
+
console.log(` Type: ${exc.type}`);
|
|
156
|
+
console.log(` Context: ${exc.context}`);
|
|
157
|
+
console.log(` Message: ${exc.message}`);
|
|
158
|
+
console.log(` Time: ${exc.timestamp}`);
|
|
159
|
+
if (exc.stack) {
|
|
160
|
+
console.log(` Stack (first 3 lines):`);
|
|
161
|
+
const stack_lines = exc.stack.split('\n').slice(0, 3);
|
|
162
|
+
stack_lines.forEach(line => console.log(` ${line}`));
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
} else {
|
|
166
|
+
console.log('🎉 No uncaught exceptions detected in isolated tests!');
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* Main execution function.
|
|
172
|
+
*/
|
|
173
|
+
const main = async () => {
|
|
174
|
+
const args = process.argv.slice(2);
|
|
175
|
+
|
|
176
|
+
if (args.includes('--help') || args.includes('-h')) {
|
|
177
|
+
console.log(`
|
|
178
|
+
🔍 Debug Test Runner for JoystickDB
|
|
179
|
+
|
|
180
|
+
Usage: node debug_test_runner.js [isolated]
|
|
181
|
+
|
|
182
|
+
Commands:
|
|
183
|
+
isolated - Run tests in isolated groups to identify exception sources
|
|
184
|
+
|
|
185
|
+
This runner captures uncaught exceptions and unhandled rejections
|
|
186
|
+
without terminating the process, allowing us to identify which
|
|
187
|
+
specific tests are causing problems.
|
|
188
|
+
`);
|
|
189
|
+
process.exit(0);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
if (args[0] === 'isolated' || args.length === 0) {
|
|
193
|
+
await run_isolated_debug_tests();
|
|
194
|
+
} else {
|
|
195
|
+
console.error('❌ Unknown command. Use --help for usage information.');
|
|
196
|
+
process.exit(1);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
console.log('\n🔍 Debug analysis complete.');
|
|
200
|
+
process.exit(0);
|
|
201
|
+
};
|
|
202
|
+
|
|
203
|
+
// Run the main function
|
|
204
|
+
main().catch(error => {
|
|
205
|
+
console.error(`\n💥 Debug runner error: ${error.message}`);
|
|
206
|
+
console.error(error.stack);
|
|
207
|
+
process.exit(1);
|
|
208
|
+
});
|
package/dist/server/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import S from"net";import{decode as k}from"msgpackr";import x from"./lib/op_types.js";import g from"./lib/safe_json_parse.js";import{load_settings as
|
|
1
|
+
import S from"net";import{decode as k}from"msgpackr";import x from"./lib/op_types.js";import g from"./lib/safe_json_parse.js";import{load_settings as m,get_settings as l,get_port_configuration as c}from"./lib/load_settings.js";import{send_error as _}from"./lib/send_response.js";import{start_cluster as T}from"./cluster/index.js";import h from"./lib/logger.js";import{initialize_database as z,cleanup_database as O}from"./lib/query_engine.js";import{create_message_parser as I,encode_message as y}from"./lib/tcp_protocol.js";import{create_connection_manager as R}from"./lib/connection_manager.js";import{shutdown_write_queue as C}from"./lib/write_queue.js";import{setup_authentication as E,verify_password as q,get_client_ip as N,is_rate_limited as $,initialize_auth_manager as A,reset_auth_state as B}from"./lib/auth_manager.js";import{initialize_api_key_manager as D}from"./lib/api_key_manager.js";import{is_development_mode as v,display_development_startup_message as F,warn_undefined_node_env as J}from"./lib/development_mode.js";import{restore_backup as K,start_backup_schedule as P,stop_backup_schedule as j}from"./lib/backup_manager.js";import{initialize_simple_sync_manager as G,shutdown_simple_sync_manager as M}from"./lib/simple_sync_manager.js";import{initialize_sync_receiver as H,shutdown_sync_receiver as U}from"./lib/sync_receiver.js";import{handle_database_operation as V,handle_admin_operation as W,handle_ping_operation as Y}from"./lib/operation_dispatcher.js";import{start_http_server as L,stop_http_server as Q}from"./lib/http_server.js";import{create_recovery_token as X,initialize_recovery_manager as w,reset_recovery_state as Z}from"./lib/recovery_manager.js";import{has_settings as ee}from"./lib/load_settings.js";const i=new Set;let a=null;const re=e=>e&&e.password,d=e=>({ok:0,error:e}),te=()=>({ok:1,version:"1.0.0",message:"Authentication successful"}),u=(e,r)=>{const t=y(r);e.write(t),e.end()},p=(e,r)=>{const t=y(r);e.write(t)},ne=async(e,r={})=>{if(!re(r)){const t=d("Authentication operation requires password to be set in data.");u(e,t);return}try{const t=N(e);if($(t)){const o=d("Too many failed attempts. Please try again later.");u(e,o);return}if(!await q(r.password,t)){const o=d("Authentication failed");u(e,o);return}i.add(e.id);const s=te();p(e,s)}catch(t){const n=d(`Authentication error: ${t.message}`);u(e,n)}},se=e=>({ok:1,password:e,message:"Authentication setup completed successfully. Save this password - it will not be shown again."}),oe=e=>({ok:0,error:`Setup error: ${e}`}),ae=async(e,r={})=>{try{const t=E(),n=se(t);p(e,n)}catch(t){const n=oe(t.message);p(e,n)}},ie=(e="")=>{if(!e)throw new Error("Must pass an op type for operation.");return x.includes(e)},ce=e=>g(e),_e=e=>{try{const r=k(e);return typeof r=="string"?g(r):r}catch{return null}},or=e=>{try{return typeof e=="string"?ce(e):Buffer.isBuffer(e)?_e(e):e}catch{return null}},f=e=>v()?!0:i.has(e.id),pe=async(e,r)=>{if(e?.restore_from)try{r.info("Startup restore requested",{backup_filename:e.restore_from});const t=await K(e.restore_from);r.info("Startup restore completed",{backup_filename:e.restore_from,duration_ms:t.duration_ms});const n={...e};delete n.restore_from,process.env.JOYSTICK_DB_SETTINGS=JSON.stringify(n),m(),r.info("Removed restore_from from settings after successful restore")}catch(t){r.error("Startup restore failed",{backup_filename:e.restore_from,error:t.message}),r.info("Continuing with fresh database after restore failure")}},de=()=>{try{return m(),l()}catch{return null}},ue=async e=>{const{tcp_port:r}=c(),t=e?.data_path||`./.joystick/data/joystickdb_${r}`;z(t),A(),await D(),w()},me=e=>{try{G(),e.info("Simple sync manager initialized")}catch(r){e.warn("Failed to initialize simple sync manager",{error:r.message})}},le=e=>{H().then(()=>{e.info("Sync receiver initialized")}).catch(r=>{e.warn("Failed to initialize sync receiver",{error:r.message})})},fe=(e,r)=>{if(e?.s3)try{P(),r.info("Backup scheduling started")}catch(t){r.warn("Failed to start backup scheduling",{error:t.message})}},ge=async(e,r)=>{try{const t=await L(e);return t&&r.info("HTTP server started",{http_port:e}),t}catch(t){return r.warn("Failed to start HTTP server",{error:t.message}),null}},he=()=>{if(v()){const{tcp_port:e,http_port:r}=c();F(e,r)}else J()},ye=()=>R({max_connections:1e3,idle_timeout:600*1e3,request_timeout:5*1e3}),ve=async(e,r,t,n)=>{a.update_activity(e.id);try{const s=t.parse_messages(r);for(const o of s)await we(e,o,r.length,n)}catch(s){n.error("Message parsing failed",{client_id:e.id,error:s.message}),_(e,{message:"Invalid message format"}),e.end()}},we=async(e,r,t,n)=>{const s=r,o=s?.op||null;if(!o){_(e,{message:"Missing operation type"});return}if(!ie(o)){_(e,{message:"Invalid operation type"});return}const b=a.create_request_timeout(e.id,o);try{await be(e,o,s,t)}finally{clearTimeout(b)}},be=async(e,r,t,n)=>{const s=t?.data||{};switch(r){case"authentication":await ne(e,s);break;case"setup":await ae(e,s);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":case"find_one":case"find":case"count_documents":case"create_index":case"drop_index":case"get_indexes":await V(e,r,s,f,n,a,i);break;case"ping":Y(e);break;case"admin":await W(e,s,f,a,i);break;case"reload":await Se(e);break;default:_(e,{message:`Operation ${r} not implemented`})}},Se=async e=>{if(!f(e)){_(e,{message:"Authentication required"});return}try{const r=ke(),t=await xe(),n=Te(r,t);p(e,n)}catch(r){const t={ok:0,error:`Reload operation failed: ${r.message}`};p(e,t)}},ke=()=>{try{return l()}catch{return null}},xe=async()=>{try{return await m(),l()}catch{return{port:1983,authentication:{}}}},Te=(e,r)=>({ok:1,status:"success",message:"Configuration reloaded successfully",changes:{port_changed:e?e.port!==r.port:!1,authentication_changed:e?e.authentication?.password_hash!==r.authentication?.password_hash:!1},timestamp:new Date().toISOString()}),ze=(e,r)=>{r.info("Client disconnected",{socket_id:e.id}),i.delete(e.id),a.remove_connection(e.id)},Oe=(e,r,t)=>{t.error("Socket error",{socket_id:e.id,error:r.message}),i.delete(e.id),a.remove_connection(e.id)},Ie=(e,r,t)=>{e.on("data",async n=>{await ve(e,n,r,t)}),e.on("end",()=>{ze(e,t)}),e.on("error",n=>{Oe(e,n,t)})},Re=(e,r)=>{if(!a.add_connection(e))return;const t=I();Ie(e,t,r)},Ce=()=>async()=>{try{await Q(),j(),await M(),await U(),a&&a.shutdown(),i.clear(),await C(),await new Promise(e=>setTimeout(e,100)),await O(),B(),Z()}catch{}},ar=async()=>{const{create_context_logger:e}=h("server"),r=e(),t=de();await pe(t,r),await ue(t),me(r),le(r),fe(t,r),a=ye();const{http_port:n}=c();await ge(n,r),he();const s=S.createServer((o={})=>{Re(o,r)});return s.cleanup=Ce(),s},Ee=e=>{try{w();const r=X();console.log("Emergency Recovery Token Generated"),console.log(`Visit: ${r.url}`),console.log("Token expires in 10 minutes"),e.info("Recovery token generated via CLI",{expires_at:new Date(r.expires_at).toISOString()}),process.exit(0)}catch(r){console.error("Failed to generate recovery token:",r.message),e.error("Recovery token generation failed",{error:r.message}),process.exit(1)}},qe=()=>{const{tcp_port:e}=c();return{worker_count:process.env.WORKER_COUNT?parseInt(process.env.WORKER_COUNT):void 0,port:e,environment:process.env.NODE_ENV||"development"}},Ne=(e,r)=>{const{tcp_port:t,http_port:n}=c(),s=ee();r.info("Starting JoystickDB server...",{workers:e.worker_count||"auto",tcp_port:t,http_port:n,environment:e.environment,has_settings:s,port_source:s?"JOYSTICK_DB_SETTINGS":"default"})};if(import.meta.url===`file://${process.argv[1]}`){const{create_context_logger:e}=h("main"),r=e();process.argv.includes("--generate-recovery-token")&&Ee(r);const t=qe();Ne(t,r),T(t)}export{ne as authentication,ie as check_op_type,ar as create_server,or as parse_data,ae as setup};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{encode_message as f}from"./tcp_protocol.js";import{
|
|
1
|
+
import{encode_message as f}from"./tcp_protocol.js";import{get_simple_sync_manager as h}from"./simple_sync_manager.js";import{get_sync_receiver as w}from"./sync_receiver.js";import{check_and_grow_map_size as v}from"./query_engine.js";import{performance_monitor as x}from"./performance_monitor.js";import D from"./logger.js";import b from"./operations/insert_one.js";import y from"./operations/update_one.js";import k from"./operations/delete_one.js";import q from"./operations/delete_many.js";import A from"./operations/bulk_write.js";import I from"./operations/find_one.js";import $ from"./operations/find.js";import U from"./operations/count_documents.js";import W from"./operations/create_index.js";import C from"./operations/drop_index.js";import E from"./operations/get_indexes.js";import L from"./operations/admin.js";const{create_context_logger:d}=D("operation_dispatcher"),Z=e=>e.length>64,j=e=>["admin","config","local"].includes(e.toLowerCase()),B=e=>/^[a-zA-Z0-9_-]+$/.test(e),F=e=>!e||typeof e!="string"||Z(e)||j(e)?!1:B(e),G=()=>({ok:0,error:"Authentication required"}),H=()=>({ok:0,error:"Invalid database name. Database names must be alphanumeric with underscores/hyphens, max 64 characters, and cannot be reserved names (admin, config, local)."}),_=(e,n)=>{const o=f(n);e.write(o)},a=(e,n,o,s,t,r,c,i)=>{x.log_structured_operation(e,n,o,s,t,r,c,i)},g=(e,n,o,s,t,r,c,i,l=null)=>{const m={client_id:n,op:o,collection:s,duration_ms:t,status:r,request_size:c};r==="success"?(m.response_size=i,e.info("Database operation completed",m)):(m.error=l,e.error("Database operation failed",m))},J=async(e,n,o)=>{switch(e){case"insert_one":return await b(n,o.collection,o.document,o.options);case"update_one":return await y(n,o.collection,o.filter,o.update,o.options);case"delete_one":return await k(n,o.collection,o.filter,o.options);case"delete_many":return await q(n,o.collection,o.filter,o.options);case"bulk_write":return await A(n,o.collection,o.operations,o.options);case"find_one":return await I(n,o.collection,o.filter,o.options);case"find":return await $(n,o.collection,o.filter,o.options);case"count_documents":return await U(n,o.collection,o.filter,o.options);case"create_index":return await W(n,o.collection,o.field,o.options);case"drop_index":return await C(n,o.collection,o.field);case"get_indexes":return await E(n,o.collection);default:throw new Error(`Unsupported operation: ${e}`)}},K=(e,n)=>e==="find_one"?{ok:1,document:n}:e==="find"?{ok:1,documents:n}:e==="count_documents"?{ok:1,...n}:{ok:1,...n},M=e=>!["find","find_one","count_documents","get_indexes"].includes(e),N=(e,n)=>{if(!M(e))return;h().queue_sync(e,n.collection,n),setImmediate(()=>v())},O=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t,l=K(n,s),p=f(l).length;_(e,l),a(e.id,n,o.collection,i,"success",null,r,p),g(c,e.id,n,o.collection,i,"success",r,p),N(n,o)},P=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t;a(e.id,n,o.collection,i,"error",s.message,r,0),g(c,e.id,n,o.collection,i,"error",r,0,s.message);const l={ok:0,error:s.message};_(e,l)},pe=async(e,n,o,s,t=0,r=null,c=null)=>{const i=Date.now();if(!s(e)){const u=G();_(e,u),a(e.id,n,null,0,"error","Authentication required",t,0);return}const l=o.database||"default";if(!F(l)){const u=H();_(e,u),a(e.id,n,o.collection,0,"error","Invalid database name",t,0);return}if(w().should_block_client_operation(n)){_(e,{ok:0,error:"Write operations not allowed on secondary node. Use primary node for write operations."}),a(e.id,n,o.collection,0,"error","Write operation blocked on secondary",t,0);return}try{const u=await J(n,l,o);O(e,n,o,u,i,t)}catch(u){P(e,n,o,u,i,t)}},Q=()=>({ok:!1,error:"Authentication required"}),R=(e,n)=>e?{ok:1,...n}:{ok:!0,...n},S=e=>({ok:0,error:`Admin operation failed: ${e}`}),fe=async(e,n,o,s=null,t=null)=>{if(!o(e)){const r=Q();_(e,r);return}try{const r=n?.admin_action,i=await L(r,n||{},s,t),l=R(r,i);_(e,l)}catch(r){const c=S(r.message);_(e,c)}},de=e=>{const n=Date.now(),o={ok:1,response_time_ms:Date.now()-n};_(e,o)};export{fe as handle_admin_operation,pe as handle_database_operation,de as handle_ping_operation};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as y}from"../query_engine.js";import{get_settings as $}from"../load_settings.js";import{get_write_queue as v}from"../write_queue.js";import{get_auth_stats as q}from"../auth_manager.js";import{get_query_statistics as E,get_auto_index_statistics as D,force_index_evaluation as O,remove_automatic_indexes as P}from"../auto_index_manager.js";import{create_index as N,drop_index as R,get_indexes as U}from"../index_manager.js";import{test_s3_connection as j,create_backup as T,list_backups as W,restore_backup as A,cleanup_old_backups as J}from"../backup_manager.js";import{get_replication_manager as b}from"../replication_manager.js";import{get_write_forwarder as B}from"../write_forwarder.js";import L from"../logger.js";import{performance_monitor as w}from"../performance_monitor.js";const{create_context_logger:h}=L("admin"),I=()=>{try{return $()}catch{return{port:1983}}},G=t=>{try{const e=t.getStats?t.getStats():{};return{pageSize:e.pageSize||0,treeDepth:e.treeDepth||0,treeBranchPages:e.treeBranchPages||0,treeLeafPages:e.treeLeafPages||0,entryCount:e.entryCount||0,mapSize:e.mapSize||0,lastPageNumber:e.lastPageNumber||0}}catch{return{error:"Could not retrieve database stats"}}},H=(t,e)=>{const o={};let r=0;try{for(const{key:n}of t.getRange())if(typeof n=="string"&&n.includes(":")&&!n.startsWith("_")){const a=n.split(":")[0];o[a]=(o[a]||0)+1,r++}}catch(n){e.warn("Could not iterate database range for stats",{error:n.message})}return{collections:o,total_documents:r}},z=()=>{const t=process.memoryUsage();return{rss:Math.round(t.rss/1024/1024),heapTotal:Math.round(t.heapTotal/1024/1024),heapUsed:Math.round(t.heapUsed/1024/1024),external:Math.round(t.external/1024/1024)}},K=t=>t.mapSize>0?Math.round(t.lastPageNumber*t.pageSize/t.mapSize*100):0,Q=t=>({uptime:Math.floor(process.uptime()),uptime_formatted:M(process.uptime()),memory_usage:t,memory_usage_raw:process.memoryUsage(),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid,cpu_usage:process.cpuUsage()}),V=(t,e,o,r)=>({total_documents:t,total_collections:Object.keys(e).length,collections:e,stats:o,map_size_usage_percent:r,disk_usage:{map_size_mb:Math.round((o.mapSize||0)/1024/1024),used_space_mb:Math.round((o.lastPageNumber||0)*(o.pageSize||0)/1024/1024)}}),X=()=>{const t=h();try{const e=y(),o=I(),r=G(e),{collections:n,total_documents:a}=H(e,t),s=z(),i=K(r);return{server:Q(s),database:V(a,n,r,i),performance:{ops_per_second:F(),avg_response_time_ms:C()}}}catch(e){throw t.error("Failed to get enhanced stats",{error:e.message}),e}},M=t=>{const e=Math.floor(t/86400),o=Math.floor(t%86400/3600),r=Math.floor(t%3600/60),n=Math.floor(t%60);return e>0?`${e}d ${o}h ${r}m ${n}s`:o>0?`${o}h ${r}m ${n}s`:r>0?`${r}m ${n}s`:`${n}s`};let k=0,S=0,Y=Date.now();const F=()=>{const t=(Date.now()-Y)/1e3;return t>0?Math.round(k/t):0},C=()=>k>0?Math.round(S/k):0,Z=t=>{k++,S+=t},ee=t=>({name:t,document_count:0,indexes:[],estimated_size_bytes:0}),te=(t,e,o)=>{const r={};let n=0;try{for(const{key:a}of t.getRange())if(typeof a=="string"&&a.includes(":")&&!a.startsWith("_")){const s=a.split(":");if(s.length>=3){const i=s[0],c=s[1];i===e&&(r[c]||(r[c]=ee(c)),r[c].document_count++,n++)}}}catch(a){o.warn("Could not iterate database range for collections",{error:a.message})}return{collections_map:r,total_documents:n}},re=(t,e,o)=>{const r=["admin_test","test_collection","queue_test","users","products","orders","sessions","logs","analytics","settings","another_collection","list_test","pagination_test","get_test","query_test","admin_insert_test","admin_update_test","admin_delete_test"];let n=0;for(const a of r)try{const s=`${e}:${a}:`,i=t.getRange({start:s,end:s+"\xFF"});let c=0;for(const _ of i)c++,n++;c>0&&(o[a]={name:a,document_count:c,indexes:[],estimated_size_bytes:c*100})}catch{continue}return n},oe=(t,e,o,r)=>{try{const n=`index:${e}:`,a=t.getRange({start:n,end:n+"\xFF"});for(const{key:s,value:i}of a)if(typeof s=="string"&&s.startsWith(n)){const c=s.substring(n.length),_=c.split(":")[0],u=c.split(":")[1];o[_]&&u&&(o[_].indexes.includes(u)||o[_].indexes.push(u))}}catch(n){r.warn("Could not iterate index range",{error:n.message})}},se=(t="default")=>{const e=h();try{const o=y();let{collections_map:r,total_documents:n}=te(o,t,e);Object.keys(r).length===0&&(n+=re(o,t,r)),oe(o,t,r,e);const a=Object.values(r);return{collections:a,total_collections:a.length,total_documents:n}}catch(o){throw e.error("Failed to list collections",{error:o.message}),o}},ne=(t,e={})=>{const o=h();if(!t)throw new Error("Collection name is required");try{const r=y(),{limit:n=50,skip:a=0,sort_field:s,sort_order:i="asc",database:c="default"}=e,_=[],u=`${c}:${t}:`;let m=0,g=0;for(const{key:d,value:f}of r.getRange({start:u,end:u+"\xFF"}))if(typeof d=="string"&&d.startsWith(u)){if(g<a){g++;continue}if(m>=n)break;try{const l=JSON.parse(f),p=d.substring(u.length);_.push({_id:p,...l}),m++}catch(l){o.warn("Could not parse document",{collection:t,key:d,error:l.message})}}return s&&_.length>0&&_.sort((d,f)=>{const l=d[s],p=f[s];return i==="desc"?p>l?1:p<l?-1:0:l>p?1:l<p?-1:0}),{collection:t,documents:_,count:_.length,skip:a,limit:n,has_more:m===n}}catch(r){throw o.error("Failed to list documents",{collection:t,error:r.message}),r}},ae=(t,e,o="default")=>{const r=h();if(!t||!e)throw new Error("Collection name and document ID are required");try{const n=y(),a=`${o}:${t}:${e}`,s=n.get(a);if(!s)return{found:!1,collection:t,document_id:e};const i=JSON.parse(s);return{found:!0,collection:t,document_id:e,document:{_id:e,...i}}}catch(n){throw r.error("Failed to get document",{collection:t,document_id:e,error:n.message}),n}},ce=(t,e,o,r)=>{switch(t){case"$gt":return o>e;case"$gte":return o>=e;case"$lt":return o<e;case"$lte":return o<=e;case"$ne":return o!==e;case"$in":return Array.isArray(e)&&e.includes(o);case"$regex":const n=r.$options||"";return new RegExp(e,n).test(String(o));default:return o===r}},ie=(t,e)=>Object.keys(e).every(o=>{const r=e[o],n=t[o];return typeof r=="object"&&r!==null?Object.keys(r).every(a=>{const s=r[a];return ce(a,s,n,r)}):n===r}),_e=(t,e,o,r,n)=>{try{const a=JSON.parse(e),i={_id:t.substring(o.length),...a};return ie(i,r)?i:null}catch(a){return n.warn("Could not parse document during query",{key:t,error:a.message}),null}},ue=(t,e={},o={})=>{const r=h();if(!t)throw new Error("Collection name is required");try{const n=y(),{limit:a=100,skip:s=0,database:i="default"}=o,c=[],_=`${i}:${t}:`;let u=0,m=0,g=0;for(const{key:d,value:f}of n.getRange({start:_,end:_+"\xFF"}))if(typeof d=="string"&&d.startsWith(_)){g++;const l=_e(d,f,_,e,r);if(l){if(m<s){m++;continue}if(u>=a)break;c.push(l),u++}}return{collection:t,filter:e,documents:c,count:c.length,total_examined:g,skip:s,limit:a,has_more:u===a}}catch(n){throw r.error("Failed to query documents",{collection:t,filter:e,error:n.message}),n}},le=async(t,e,o,r={})=>await(await import("./insert_one.js")).default(t,e,o,r),de=async(t,e,o,r,n={})=>await(await import("./update_one.js")).default(t,e,o,r,n),me=async(t,e,o,r={})=>await(await import("./delete_one.js")).default(t,e,o,r);var qe=async(t,e={},o,r)=>{const n=h(),a=Date.now();try{let s;switch(t){case"stats":const c=z();s={server:{uptime:Math.floor(process.uptime()),uptime_formatted:M(process.uptime()),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid},memory:{heap_used_mb:c.heapUsed,heap_total_mb:c.heapTotal,rss_mb:c.rss,external_mb:c.external,heap_used_percent:c.heapTotal>0?Math.round(c.heapUsed/c.heapTotal*100):0},database:{...w.get_database_stats(),map_size_mb:Math.round((w.get_database_stats()?.map_size||0)/1024/1024),used_space_mb:Math.round((w.get_database_stats()?.used_space||0)/1024/1024),usage_percent:w.get_database_stats()?.usage_percent||0},performance:{ops_per_second:F(),avg_response_time_ms:C()},system:w.get_system_stats(),connections:o?.get_stats()||{active:r?.size||0,total:r?.size||0},write_queue:v()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...q()},settings:(()=>{try{return{port:$().port||1983}}catch{return{port:1983}}})()};break;case"list_collections":s=se();break;case"list_documents":s=ne(e.collection,{limit:e.limit,skip:e.skip,sort_field:e.sort_field,sort_order:e.sort_order});break;case"get_document":s=ae(e.collection,e.document_id);break;case"query_documents":s=ue(e.collection,e.filter,{limit:e.limit,skip:e.skip});break;case"insert_document":s=await le(e.database||"default",e.collection,e.document,e.options);break;case"update_document":const _=e.document_id?{_id:e.document_id}:e.filter;s=await de(e.database||"default",e.collection,_,e.update,e.options);break;case"delete_document":const u=e.document_id?{_id:e.document_id}:e.filter;s=await me(e.database||"default",e.collection,u,e.options);break;case"test_s3_connection":s=await j();break;case"backup_now":s=await T();break;case"list_backups":s=await W();break;case"restore_backup":if(!e.backup_filename)throw new Error("backup_filename is required for restore operation");s=await A(e.backup_filename);break;case"cleanup_backups":s=await J();break;case"get_auto_index_stats":s=D();break;case"get_query_stats":s=E(e.collection);break;case"evaluate_auto_indexes":s=await O(e.collection);break;case"remove_auto_indexes":if(!e.collection)throw new Error("collection is required for remove_auto_indexes operation");s=await P(e.collection,e.field_names);break;case"create_index":if(!e.collection||!e.field)throw new Error("collection and field are required for create_index operation");s=await N(e.database||"default",e.collection,e.field,e.options);break;case"drop_index":if(!e.collection||!e.field)throw new Error("collection and field are required for drop_index operation");s=await R(e.database||"default",e.collection,e.field);break;case"get_indexes":if(!e.collection)throw new Error("collection is required for get_indexes operation");s={indexes:U(e.database||"default",e.collection)};break;case"get_replication_status":s=b().get_replication_status();break;case"add_secondary":if(!e.id||!e.ip||!e.port||!e.private_key)throw new Error("id, ip, port, and private_key are required for add_secondary operation");s=await b().add_secondary({id:e.id,ip:e.ip,port:e.port,private_key:e.private_key,enabled:!0});break;case"remove_secondary":if(!e.secondary_id)throw new Error("secondary_id is required for remove_secondary operation");s=b().remove_secondary(e.secondary_id);break;case"sync_secondaries":s=await b().sync_secondaries();break;case"get_secondary_health":s=b().get_secondary_health();break;case"get_forwarder_status":s=B().get_forwarder_status();break;default:s={...X(),connections:o?.get_stats()||{},write_queue:v()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...q()},settings:(()=>{try{return{port:$().port||1983}}catch{return{port:1983}}})()}}const i=Date.now()-a;return Z(i),n.info("Admin operation completed",{admin_action:t||"default",duration_ms:i,status:"success"}),s}catch(s){const i=Date.now()-a;throw n.error("Admin operation failed",{admin_action:t||"default",duration_ms:i,status:"error",error:s.message}),s}};export{qe as default,Z as track_operation};
|
|
1
|
+
import{get_database as b}from"../query_engine.js";import{get_settings as q}from"../load_settings.js";import{get_write_queue as z}from"../write_queue.js";import{get_auth_stats as M}from"../auth_manager.js";import{get_query_statistics as P,get_auto_index_statistics as N,force_index_evaluation as O,remove_automatic_indexes as A}from"../auto_index_manager.js";import{create_index as R,drop_index as U,get_indexes as j}from"../index_manager.js";import{test_s3_connection as T,create_backup as W,list_backups as J,restore_backup as B,cleanup_old_backups as I}from"../backup_manager.js";import{get_simple_sync_manager as x}from"../simple_sync_manager.js";import{get_sync_receiver as v}from"../sync_receiver.js";import L from"../logger.js";import{performance_monitor as w}from"../performance_monitor.js";const{create_context_logger:y}=L("admin"),K=()=>{try{return q()}catch{return{port:1983}}},Y=t=>{try{const e=t.getStats?t.getStats():{};return{pageSize:e.pageSize||0,treeDepth:e.treeDepth||0,treeBranchPages:e.treeBranchPages||0,treeLeafPages:e.treeLeafPages||0,entryCount:e.entryCount||0,mapSize:e.mapSize||0,lastPageNumber:e.lastPageNumber||0}}catch{return{error:"Could not retrieve database stats"}}},G=(t,e)=>{const o={};let r=0;try{for(const{key:n}of t.getRange())if(typeof n=="string"&&n.includes(":")&&!n.startsWith("_")){const a=n.split(":")[0];o[a]=(o[a]||0)+1,r++}}catch(n){e.warn("Could not iterate database range for stats",{error:n.message})}return{collections:o,total_documents:r}},S=()=>{const t=process.memoryUsage();return{rss:Math.round(t.rss/1024/1024),heapTotal:Math.round(t.heapTotal/1024/1024),heapUsed:Math.round(t.heapUsed/1024/1024),external:Math.round(t.external/1024/1024)}},H=t=>t.mapSize>0?Math.round(t.lastPageNumber*t.pageSize/t.mapSize*100):0,Q=t=>({uptime:Math.floor(process.uptime()),uptime_formatted:E(process.uptime()),memory_usage:t,memory_usage_raw:process.memoryUsage(),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid,cpu_usage:process.cpuUsage()}),V=(t,e,o,r)=>({total_documents:t,total_collections:Object.keys(e).length,collections:e,stats:o,map_size_usage_percent:r,disk_usage:{map_size_mb:Math.round((o.mapSize||0)/1024/1024),used_space_mb:Math.round((o.lastPageNumber||0)*(o.pageSize||0)/1024/1024)}}),X=()=>{const t=y();try{const e=b(),o=K(),r=Y(e),{collections:n,total_documents:a}=G(e,t),s=S(),i=H(r);return{server:Q(s),database:V(a,n,r,i),performance:{ops_per_second:C(),avg_response_time_ms:D()}}}catch(e){throw t.error("Failed to get enhanced stats",{error:e.message}),e}},E=t=>{const e=Math.floor(t/86400),o=Math.floor(t%86400/3600),r=Math.floor(t%3600/60),n=Math.floor(t%60);return e>0?`${e}d ${o}h ${r}m ${n}s`:o>0?`${o}h ${r}m ${n}s`:r>0?`${r}m ${n}s`:`${n}s`};let $=0,F=0,Z=Date.now();const C=()=>{const t=(Date.now()-Z)/1e3;return t>0?Math.round($/t):0},D=()=>$>0?Math.round(F/$):0,ee=t=>{$++,F+=t},te=t=>({name:t,document_count:0,indexes:[],estimated_size_bytes:0}),re=(t,e,o)=>{const r={};let n=0;try{for(const{key:a}of t.getRange())if(typeof a=="string"&&a.includes(":")&&!a.startsWith("_")){const s=a.split(":");if(s.length>=3){const i=s[0],c=s[1];i===e&&(r[c]||(r[c]=te(c)),r[c].document_count++,n++)}}}catch(a){o.warn("Could not iterate database range for collections",{error:a.message})}return{collections_map:r,total_documents:n}},se=(t,e,o)=>{const r=["admin_test","test_collection","queue_test","users","products","orders","sessions","logs","analytics","settings","another_collection","list_test","pagination_test","get_test","query_test","admin_insert_test","admin_update_test","admin_delete_test"];let n=0;for(const a of r)try{const s=`${e}:${a}:`,i=t.getRange({start:s,end:s+"\xFF"});let c=0;for(const _ of i)c++,n++;c>0&&(o[a]={name:a,document_count:c,indexes:[],estimated_size_bytes:c*100})}catch{continue}return n},oe=(t,e,o,r)=>{try{const n=`index:${e}:`,a=t.getRange({start:n,end:n+"\xFF"});for(const{key:s,value:i}of a)if(typeof s=="string"&&s.startsWith(n)){const c=s.substring(n.length),_=c.split(":")[0],u=c.split(":")[1];o[_]&&u&&(o[_].indexes.includes(u)||o[_].indexes.push(u))}}catch(n){r.warn("Could not iterate index range",{error:n.message})}},ne=(t="default")=>{const e=y();try{const o=b();let{collections_map:r,total_documents:n}=re(o,t,e);Object.keys(r).length===0&&(n+=se(o,t,r)),oe(o,t,r,e);const a=Object.values(r);return{collections:a,total_collections:a.length,total_documents:n}}catch(o){throw e.error("Failed to list collections",{error:o.message}),o}},ae=(t,e={})=>{const o=y();if(!t)throw new Error("Collection name is required");try{const r=b(),{limit:n=50,skip:a=0,sort_field:s,sort_order:i="asc",database:c="default"}=e,_=[],u=`${c}:${t}:`;let m=0,p=0;for(const{key:d,value:g}of r.getRange({start:u,end:u+"\xFF"}))if(typeof d=="string"&&d.startsWith(u)){if(p<a){p++;continue}if(m>=n)break;try{const l=JSON.parse(g),f=d.substring(u.length);_.push({_id:f,...l}),m++}catch(l){o.warn("Could not parse document",{collection:t,key:d,error:l.message})}}return s&&_.length>0&&_.sort((d,g)=>{const l=d[s],f=g[s];return i==="desc"?f>l?1:f<l?-1:0:l>f?1:l<f?-1:0}),{collection:t,documents:_,count:_.length,skip:a,limit:n,has_more:m===n}}catch(r){throw o.error("Failed to list documents",{collection:t,error:r.message}),r}},ce=(t,e,o="default")=>{const r=y();if(!t||!e)throw new Error("Collection name and document ID are required");try{const n=b(),a=`${o}:${t}:${e}`,s=n.get(a);if(!s)return{found:!1,collection:t,document_id:e};const i=JSON.parse(s);return{found:!0,collection:t,document_id:e,document:{_id:e,...i}}}catch(n){throw r.error("Failed to get document",{collection:t,document_id:e,error:n.message}),n}},ie=(t,e,o,r)=>{switch(t){case"$gt":return o>e;case"$gte":return o>=e;case"$lt":return o<e;case"$lte":return o<=e;case"$ne":return o!==e;case"$in":return Array.isArray(e)&&e.includes(o);case"$regex":const n=r.$options||"";return new RegExp(e,n).test(String(o));default:return o===r}},_e=(t,e)=>Object.keys(e).every(o=>{const r=e[o],n=t[o];return typeof r=="object"&&r!==null?Object.keys(r).every(a=>{const s=r[a];return ie(a,s,n,r)}):n===r}),ue=(t,e,o,r,n)=>{try{const a=JSON.parse(e),i={_id:t.substring(o.length),...a};return _e(i,r)?i:null}catch(a){return n.warn("Could not parse document during query",{key:t,error:a.message}),null}},le=(t,e={},o={})=>{const r=y();if(!t)throw new Error("Collection name is required");try{const n=b(),{limit:a=100,skip:s=0,database:i="default"}=o,c=[],_=`${i}:${t}:`;let u=0,m=0,p=0;for(const{key:d,value:g}of n.getRange({start:_,end:_+"\xFF"}))if(typeof d=="string"&&d.startsWith(_)){p++;const l=ue(d,g,_,e,r);if(l){if(m<s){m++;continue}if(u>=a)break;c.push(l),u++}}return{collection:t,filter:e,documents:c,count:c.length,total_examined:p,skip:s,limit:a,has_more:u===a}}catch(n){throw r.error("Failed to query documents",{collection:t,filter:e,error:n.message}),n}},de=async(t,e,o,r={})=>await(await import("./insert_one.js")).default(t,e,o,r),me=async(t,e,o,r,n={})=>await(await import("./update_one.js")).default(t,e,o,r,n),pe=async(t,e,o,r={})=>await(await import("./delete_one.js")).default(t,e,o,r);var ze=async(t,e={},o,r)=>{const n=y(),a=Date.now();try{let s;switch(t){case"stats":const c=S();s={server:{uptime:Math.floor(process.uptime()),uptime_formatted:E(process.uptime()),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid},memory:{heap_used_mb:c.heapUsed,heap_total_mb:c.heapTotal,rss_mb:c.rss,external_mb:c.external,heap_used_percent:c.heapTotal>0?Math.round(c.heapUsed/c.heapTotal*100):0},database:{...w.get_database_stats(),map_size_mb:Math.round((w.get_database_stats()?.map_size||0)/1024/1024),used_space_mb:Math.round((w.get_database_stats()?.used_space||0)/1024/1024),usage_percent:w.get_database_stats()?.usage_percent||0},performance:{ops_per_second:C(),avg_response_time_ms:D()},system:w.get_system_stats(),connections:o?.get_stats()||{active:r?.size||0,total:r?.size||0},write_queue:z()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...M()},settings:(()=>{try{return{port:q().port||1983}}catch{return{port:1983}}})()};break;case"list_collections":s=ne();break;case"list_documents":s=ae(e.collection,{limit:e.limit,skip:e.skip,sort_field:e.sort_field,sort_order:e.sort_order});break;case"get_document":s=ce(e.collection,e.document_id);break;case"query_documents":s=le(e.collection,e.filter,{limit:e.limit,skip:e.skip});break;case"insert_document":s=await de(e.database||"default",e.collection,e.document,e.options);break;case"update_document":const _=e.document_id?{_id:e.document_id}:e.filter;s=await me(e.database||"default",e.collection,_,e.update,e.options);break;case"delete_document":const u=e.document_id?{_id:e.document_id}:e.filter;s=await pe(e.database||"default",e.collection,u,e.options);break;case"test_s3_connection":s=await T();break;case"backup_now":s=await W();break;case"list_backups":s=await J();break;case"restore_backup":if(!e.backup_filename)throw new Error("backup_filename is required for restore operation");s=await B(e.backup_filename);break;case"cleanup_backups":s=await I();break;case"get_auto_index_stats":s=N();break;case"get_query_stats":s=P(e.collection);break;case"evaluate_auto_indexes":s=await O(e.collection);break;case"remove_auto_indexes":if(!e.collection)throw new Error("collection is required for remove_auto_indexes operation");s=await A(e.collection,e.field_names);break;case"create_index":if(!e.collection||!e.field)throw new Error("collection and field are required for create_index operation");s=await R(e.database||"default",e.collection,e.field,e.options);break;case"drop_index":if(!e.collection||!e.field)throw new Error("collection and field are required for drop_index operation");s=await U(e.database||"default",e.collection,e.field);break;case"get_indexes":if(!e.collection)throw new Error("collection is required for get_indexes operation");s={indexes:j(e.database||"default",e.collection)};break;case"get_sync_status":const m=x(),p=v();s={sync_manager:m.get_sync_status(),sync_receiver:p.get_sync_status()};break;case"update_secondary_nodes":if(!Array.isArray(e.secondary_nodes))throw new Error("secondary_nodes array is required for update_secondary_nodes operation");x().update_secondary_nodes(e.secondary_nodes),s={success:!0,message:"Secondary nodes updated successfully",secondary_nodes:e.secondary_nodes};break;case"force_sync":s=await x().force_sync();break;case"set_primary_role":if(typeof e.primary!="boolean")throw new Error("primary boolean value is required for set_primary_role operation");e.primary?(v().promote_to_primary(),s={success:!0,message:"Node promoted to primary successfully",role:"primary"}):s={success:!1,message:"Demoting primary to secondary requires server restart with updated configuration",role:"primary"};break;case"reload_sync_key":const l=v();if(!l.is_secondary)throw new Error("reload_sync_key can only be used on secondary nodes");await l.reload_api_key(),s={success:!0,message:"API_KEY reloaded successfully"};break;case"get_secondary_auth_status":const h=x().get_sync_status();s={secondary_count:h.secondary_count,auth_failures:h.stats.auth_failures,successful_syncs:h.stats.successful_syncs,failed_syncs:h.stats.failed_syncs,secondaries:h.secondaries};break;default:s={...X(),connections:o?.get_stats()||{},write_queue:z()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...M()},settings:(()=>{try{return{port:q().port||1983}}catch{return{port:1983}}})()}}const i=Date.now()-a;return ee(i),n.info("Admin operation completed",{admin_action:t||"default",duration_ms:i,status:"success"}),s}catch(s){const i=Date.now()-a;throw n.error("Admin operation failed",{admin_action:t||"default",duration_ms:i,status:"error",error:s.message}),s}};export{ze as default,ee as track_operation};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import l from"net";import{get_settings as y}from"./load_settings.js";import{encode_message as g}from"./tcp_protocol.js";import u from"./logger.js";const{create_context_logger:m}=u("simple_sync");class p{constructor(){this.is_primary=!1,this.secondary_nodes=[],this.connections=new Map,this.sync_port=1985,this.sync_timeout_ms=5e3,this.sync_retries=2,this.sequence_number=0,this.log=m(),this.stats={total_synced:0,successful_syncs:0,failed_syncs:0,auth_failures:0}}initialize(){try{const s=y();if(!s.primary){this.log.info("Node not configured as primary - sync disabled");return}this.is_primary=s.primary,this.secondary_nodes=s.secondary_nodes||[],this.sync_port=s.sync_port||1985,this.sync_timeout_ms=s.sync_timeout_ms||5e3,this.sync_retries=s.sync_retries||2,this.log.info("Initializing simple sync manager",{is_primary:this.is_primary,secondary_count:this.secondary_nodes.length,sync_port:this.sync_port,timeout_ms:this.sync_timeout_ms,retries:this.sync_retries}),this.connect_to_secondaries()}catch(s){this.log.warn("Could not initialize sync manager - settings not loaded",{error:s.message})}}connect_to_secondaries(){for(const s of this.secondary_nodes)this.connect_to_secondary(s)}connect_to_secondary(s){const{ip:n}=s,e=`${n}:${this.sync_port}`;if(!this.connections.has(e)){this.log.info("Connecting to secondary node",{ip:n,port:this.sync_port});try{const t=new l.Socket;t.connect(this.sync_port,n,()=>{this.log.info("Connected to secondary node",{ip:n,port:this.sync_port})}),t.on("error",o=>{this.log.error("Secondary connection error",{ip:n,error:o.message}),this.connections.delete(e),setTimeout(()=>{try{this.connect_to_secondary(s)}catch(r){this.log.error("Failed to retry secondary connection",{ip:n,error:r.message})}},5e3)}),t.on("close",()=>{this.log.warn("Secondary connection closed",{ip:n}),this.connections.delete(e),setTimeout(()=>{try{this.connect_to_secondary(s)}catch(o){this.log.error("Failed to retry secondary connection",{ip:n,error:o.message})}},5e3)}),t.on("data",o=>{try{const r=JSON.parse(o.toString());this.handle_sync_response(e,r)}catch(r){this.log.error("Failed to parse sync response",{connection_id:e,error:r.message})}}),this.connections.set(e,{socket:t,ip:n,connected:!0,last_sync:null})}catch(t){this.log.error("Failed to connect to secondary",{ip:n,error:t.message})}}}handle_sync_response(s,n){const{type:e,status:t,sequence:o,error:r}=n;e==="sync_acknowledged"&&(t==="success"?(this.stats.successful_syncs++,this.log.debug("Sync acknowledged",{connection_id:s,sequence:o})):t==="auth_failed"?(this.stats.auth_failures++,this.log.error("Sync authentication failed",{connection_id:s,sequence:o,error:r})):(this.stats.failed_syncs++,this.log.error("Sync failed",{connection_id:s,sequence:o,error:r})))}queue_sync(s,n,e){if(!this.is_primary||this.connections.size===0)return;const t=++this.sequence_number;this.log.debug("Queuing sync operation",{operation:s,collection:n,sequence:t,secondary_count:this.connections.size}),this.stats.total_synced++,this.send_sync_to_secondaries(s,n,e,t)}send_sync_to_secondaries(s,n,e,t){try{const o=y();if(!o.api_key){this.log.error("No API_KEY configured for sync operations");return}const r={type:"operation_sync",api_key:o.api_key,sequence:t,timestamp:Date.now(),operation:s,collection:n,data:e},h=g(r);for(const[a,i]of this.connections)if(!(!i.connected||!i.socket))try{i.socket.write(h),i.last_sync=Date.now(),this.log.debug("Sent sync to secondary",{connection_id:a,operation:s,sequence:t})}catch(d){this.log.error("Failed to send sync to secondary",{connection_id:a,error:d.message})}}catch(o){this.log.error("Failed to send sync to secondaries",{operation:s,sequence:t,error:o.message})}}update_secondary_nodes(s){this.log.info("Updating secondary nodes configuration",{old_count:this.secondary_nodes.length,new_count:s.length});for(const[n,e]of this.connections)try{e.socket.end()}catch(t){this.log.warn("Error closing secondary connection",{connection_id:n,error:t.message})}this.connections.clear(),this.secondary_nodes=s,this.connect_to_secondaries()}async force_sync(){if(!this.is_primary)throw new Error("Node is not configured as primary");const s=[];for(const[n,e]of this.connections)try{e.connected?s.push({connection_id:n,status:"sync_initiated"}):s.push({connection_id:n,status:"not_connected"})}catch(t){s.push({connection_id:n,status:"error",error:t.message})}return{success:!0,message:"Force sync initiated",results:s}}get_sync_status(){const s=[];for(const[n,e]of this.connections)s.push({connection_id:n,ip:e.ip,connected:e.connected,last_sync:e.last_sync});return{is_primary:this.is_primary,secondary_count:this.connections.size,stats:this.stats,secondaries:s}}async shutdown(){this.log.info("Shutting down simple sync manager");for(const[s,n]of this.connections)try{n.socket.end()}catch(e){this.log.warn("Error closing secondary connection during shutdown",{connection_id:s,error:e.message})}this.connections.clear(),this.is_primary=!1,this.log.info("Simple sync manager shutdown complete")}}let c=null;const f=()=>(c||(c=new p),c),F=()=>{f().initialize()},b=async()=>{c&&(await c.shutdown(),c=null)};export{f as get_simple_sync_manager,F as initialize_simple_sync_manager,b as shutdown_simple_sync_manager};
|