@joystick.js/db-canary 0.0.0-canary.2274 → 0.0.0-canary.2276

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +87 -104
  2. package/debug_test_runner.js +208 -0
  3. package/dist/client/index.js +1 -1
  4. package/dist/server/cluster/master.js +2 -2
  5. package/dist/server/cluster/worker.js +1 -1
  6. package/dist/server/index.js +1 -1
  7. package/dist/server/lib/auto_index_manager.js +1 -1
  8. package/dist/server/lib/bulk_insert_optimizer.js +1 -1
  9. package/dist/server/lib/http_server.js +3 -3
  10. package/dist/server/lib/operation_dispatcher.js +1 -1
  11. package/dist/server/lib/operations/admin.js +1 -1
  12. package/dist/server/lib/operations/update_one.js +1 -1
  13. package/dist/server/lib/simple_sync_manager.js +1 -0
  14. package/dist/server/lib/sync_receiver.js +1 -0
  15. package/full_debug_test_runner.js +197 -0
  16. package/package.json +10 -7
  17. package/src/client/index.js +1 -0
  18. package/src/server/cluster/master.js +8 -2
  19. package/src/server/cluster/worker.js +9 -3
  20. package/src/server/index.js +25 -24
  21. package/src/server/lib/auto_index_manager.js +8 -3
  22. package/src/server/lib/bulk_insert_optimizer.js +79 -0
  23. package/src/server/lib/http_server.js +7 -0
  24. package/src/server/lib/operation_dispatcher.js +16 -10
  25. package/src/server/lib/operations/admin.js +64 -31
  26. package/src/server/lib/operations/update_one.js +251 -1
  27. package/src/server/lib/simple_sync_manager.js +444 -0
  28. package/src/server/lib/sync_receiver.js +461 -0
  29. package/tests/client/index.test.js +7 -0
  30. package/tests/performance/isolated_5000000_test.js +184 -0
  31. package/tests/server/lib/http_server.test.js +3 -12
  32. package/tests/server/lib/operations/update_one.test.js +161 -0
  33. package/tests/server/lib/simple_sync_system.test.js +124 -0
  34. package/dist/server/lib/replication_manager.js +0 -1
  35. package/dist/server/lib/write_forwarder.js +0 -1
  36. package/src/server/lib/replication_manager.js +0 -727
  37. package/src/server/lib/write_forwarder.js +0 -636
  38. package/tests/server/lib/replication_manager.test.js +0 -202
  39. package/tests/server/lib/write_forwarder.test.js +0 -258
@@ -1 +1 @@
1
- import p from"net";import u from"../lib/op_types.js";import{send_success as l,send_error as o,send_message as g}from"../lib/send_response.js";import{shutdown_write_queue as m}from"../lib/write_queue.js";import{create_message_parser as w,encode_message as h}from"../lib/tcp_protocol.js";import f from"../lib/logger.js";import{initialize_database as y,cleanup_database as b}from"../lib/query_engine.js";import{handle_admin_operation as v,handle_ping_operation as k}from"../lib/operation_dispatcher.js";import{get_settings as $}from"../lib/load_settings.js";import{is_development_mode as S}from"../lib/development_mode.js";class D{constructor(){this.server=null,this.connections=new Map,this.connection_count=0,this.settings=null,this.port=null,this.write_id_counter=0,this.pending_writes=new Map,this.authenticated_clients=new Set,this.heartbeat_interval=null;const{create_context_logger:e}=f("worker");this.log=e({worker_pid:process.pid}),this.setup_worker()}setup_worker(){process.on("message",e=>{this.handle_master_message(e)}),process.on("SIGTERM",()=>{this.shutdown()}),process.on("SIGINT",()=>{this.shutdown()}),this.send_heartbeat(),this.heartbeat_interval=setInterval(()=>{this.send_heartbeat()},5e3),process.connected&&process.send({type:"worker_ready"})}handle_master_message(e){switch(e.type){case"config":this.handle_config(e);break;case"write_response":this.handle_write_response(e);break;case"auth_response":this.handle_auth_response(e);break;case"setup_response":this.handle_setup_response(e);break;case"write_notification":this.handle_write_notification(e);break;case"shutdown":this.shutdown();break;default:this.log.warn("Unknown message type received from master",{message_type:e.type})}}handle_config(e){const t=e.data.master_id;if(this.master_id&&this.master_id!==t){this.log.info("Worker already configured by different master, ignoring config message",{current_master_id:this.master_id,incoming_master_id:t,current_port:this.port,new_port:e.data.port});return}if(this.port!==null&&this.master_id===t){this.log.info("Worker already configured by same master, ignoring duplicate config message",{master_id:t,current_port:this.port,new_port:e.data.port});return}this.log.info("Received config message",{port:e.data.port,master_id:t}),this.port=e.data.port,this.settings=e.data.settings,this.master_id=t;try{let s="./data";try{const r=$();r?.data_path&&(s=r.data_path)}catch{}y(s),this.log.info("Database initialized in worker process",{database_path:s})}catch(s){this.log.error("Failed to initialize database in worker process",{error:s.message})}this.log.info("Starting server",{port:this.port}),this.start_server()}start_server(){this.server=p.createServer(e=>{this.handle_connection(e)}),this.server.listen(this.port,()=>{this.log.info("Server listening",{port:this.port}),process.connected&&process.send({type:"server_ready"})}),this.server.on("error",e=>{this.log.error("Server error",{error:e.message})})}handle_connection(e){const t=`${process.pid}_${Date.now()}_${Math.random()}`;e.id=t,e.message_parser=w(),this.connections.set(t,e),this.connection_count++,this.update_connection_count(),e.on("data",s=>{this.handle_socket_data(e,s)}),e.on("end",()=>{this.handle_socket_end(e)}),e.on("error",s=>{this.log.error("Socket error",{socket_id:t,error_message:s.message}),this.handle_socket_end(e)})}process_single_message(e,t){const s=t?.op||null;return s?this.check_op_type(s)?(this.route_operation(e,s,t?.data||{}),!0):(o(e,{message:"Invalid operation type"}),!1):(o(e,{message:"Missing operation type"}),!1)}handle_socket_data(e,t){try{const s=e.message_parser.parse_messages(t);for(const r of s)this.process_single_message(e,r)}catch(s){this.log.error("Data parsing error",{socket_id:e.id,error_message:s.message}),o(e,{message:"Invalid data format"})}}handle_socket_end(e){e.id&&(this.connections.delete(e.id),this.authenticated_clients.delete(e.id),this.connection_count--,this.update_connection_count()),this.log.info("Client disconnected",{socket_id:e.id})}check_op_type(e=""){return e?u.includes(e):!1}route_operation(e,t,s){switch(t){case"authentication":this.handle_authentication(e,s);break;case"setup":this.handle_setup(e,s);break;case"find_one":case"find":case"count_documents":case"get_indexes":this.handle_read_operation(e,t,s);break;case"create_index":case"drop_index":this.handle_write_operation(e,t,s);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":this.handle_write_operation(e,t,s);break;case"ping":k(e);break;case"admin":v(e,s,this.is_authenticated.bind(this));break;default:o(e,{message:`Unsupported operation: ${t}`})}}handle_authentication(e,t){if(this.is_authenticated(e))g(e,"Already authenticated");else{const s=`${e.id}_${Date.now()}`;process.send({type:"auth_request",data:{auth_id:s,socket_id:e.id,password:t.password}}),this.pending_writes.set(s,{socket:e,type:"auth"})}}handle_setup(e,t){const s=`${e.id}_${Date.now()}`;process.send({type:"setup_request",data:{setup_id:s,socket_id:e.id}}),this.pending_writes.set(s,{socket:e,type:"setup"})}handle_read_operation(e,t,s){if(!this.is_authenticated(e)){o(e,{message:"Authentication required"});return}const r=`${e.id}_${++this.write_id_counter}`;process.send({type:"write_request",data:{write_id:r,op_type:t,data:s,socket_id:e.id}}),this.pending_writes.set(r,{socket:e,type:"read",op_type:t})}handle_write_operation(e,t,s){if(!this.is_authenticated(e)){o(e,{message:"Authentication required"});return}const r=`${e.id}_${++this.write_id_counter}`;process.send({type:"write_request",data:{write_id:r,op_type:t,data:s,socket_id:e.id}}),this.pending_writes.set(r,{socket:e,type:"write",op_type:t})}handle_write_response(e){const{write_id:t,success:s,result:r,error:_}=e.data,i=this.pending_writes.get(t);if(!i){this.log.warn("No pending write found",{write_id:t});return}const{socket:n,op_type:a}=i;if(this.pending_writes.delete(t),n.destroyed||!n.writable){this.log.warn("Socket disconnected before response could be sent",{write_id:t});return}try{if(s){let d;a==="find_one"?d={ok:1,document:r}:a==="find"?d={ok:1,documents:r}:d={ok:1,...r};const c=h(d);n.write(c)}else{const c=h({ok:0,error:_});n.write(c)}}catch(d){this.log.error("Error sending response to client",{write_id:t,error:d.message})}}handle_auth_response(e){const{auth_id:t,success:s,message:r}=e.data,_=this.pending_writes.get(t);if(!_){this.log.warn("No pending auth found",{auth_id:t});return}const{socket:i}=_;if(this.pending_writes.delete(t),i.destroyed||!i.writable){this.log.warn("Socket disconnected before auth response could be sent",{auth_id:t});return}try{if(s){this.authenticated_clients.add(i.id);const a=h({ok:1,version:"1.0.0",message:r});i.write(a)}else o(i,{message:r}),i.end()}catch(n){this.log.error("Error sending auth response to client",{auth_id:t,error:n.message})}}handle_setup_response(e){const{setup_id:t,success:s,password:r,message:_,error:i}=e.data,n=this.pending_writes.get(t);if(!n){this.log.warn("No pending setup found",{setup_id:t});return}const{socket:a}=n;this.pending_writes.delete(t),s?l(a,{password:r,message:_}):o(a,{message:i})}handle_write_notification(e){this.log.info("Received write notification",{op_type:e.data.op_type,timestamp:e.data.timestamp})}is_authenticated(e){return S()?!0:this.authenticated_clients.has(e.id)}update_connection_count(){process.connected&&process.send({type:"connection_count",data:{count:this.connection_count}})}send_heartbeat(){if(process.connected)try{process.send({type:"heartbeat",data:{timestamp:Date.now()}})}catch{clearInterval(this.heartbeat_interval)}}async shutdown(){const e=Date.now();this.log.info("Initiating graceful shutdown");try{await m(),this.log.info("Write queue shutdown complete")}catch(s){this.log.error("Error shutting down write queue",{error:s.message})}try{await b(),this.log.info("Database cleanup complete")}catch(s){this.log.error("Error cleaning up database",{error:s.message})}this.server&&this.server.close(()=>{this.log.info("Server closed")});for(const[s,r]of this.connections)r.end();const t=process.env.NODE_ENV==="test"?100:5e3;setTimeout(()=>{const s=Date.now()-e;this.log.info("Worker shutdown complete",{shutdown_duration_ms:s}),process.exit(0)},t)}}const C=new D;
1
+ import p from"net";import u from"../lib/op_types.js";import{send_success as l,send_error as a,send_message as g}from"../lib/send_response.js";import{shutdown_write_queue as m}from"../lib/write_queue.js";import{create_message_parser as w,encode_message as h}from"../lib/tcp_protocol.js";import f from"../lib/logger.js";import{initialize_database as y,cleanup_database as b}from"../lib/query_engine.js";import{handle_admin_operation as v,handle_ping_operation as k}from"../lib/operation_dispatcher.js";import{get_settings as $}from"../lib/load_settings.js";import{is_development_mode as S}from"../lib/development_mode.js";class D{constructor(){this.server=null,this.connections=new Map,this.connection_count=0,this.settings=null,this.port=null,this.write_id_counter=0,this.pending_writes=new Map,this.authenticated_clients=new Set,this.heartbeat_interval=null;const{create_context_logger:e}=f("worker");this.log=e({worker_pid:process.pid}),this.setup_worker()}setup_worker(){process.on("message",e=>{this.handle_master_message(e)}),process.on("SIGTERM",()=>{this.shutdown()}),process.on("SIGINT",()=>{this.shutdown()}),this.send_heartbeat(),this.heartbeat_interval=setInterval(()=>{this.send_heartbeat()},5e3),process.connected&&process.send({type:"worker_ready"})}handle_master_message(e){switch(e.type){case"config":this.handle_config(e);break;case"write_response":this.handle_write_response(e);break;case"auth_response":this.handle_auth_response(e);break;case"setup_response":this.handle_setup_response(e);break;case"write_notification":this.handle_write_notification(e);break;case"shutdown":this.shutdown();break;default:this.log.warn("Unknown message type received from master",{message_type:e.type})}}async handle_config(e){const s=e.data.master_id;if(this.master_id&&this.master_id!==s){this.log.info("Worker already configured by different master, ignoring config message",{current_master_id:this.master_id,incoming_master_id:s,current_port:this.port,new_port:e.data.port});return}if(this.port!==null&&this.master_id===s){this.log.info("Worker already configured by same master, ignoring duplicate config message",{master_id:s,current_port:this.port,new_port:e.data.port});return}this.log.info("Received config message",{port:e.data.port,master_id:s}),this.port=e.data.port,this.settings=e.data.settings,this.master_id=s;try{let t;try{const r=$();if(r?.data_path)t=r.data_path;else{const{get_port_configuration:o}=await import("../lib/load_settings.js"),{tcp_port:i}=o();t=`./.joystick/data/joystickdb_${i}`}}catch{t=`./.joystick/data/joystickdb_${this.port}`}y(t),this.log.info("Database initialized in worker process",{database_path:t})}catch(t){this.log.error("Failed to initialize database in worker process",{error:t.message})}this.log.info("Starting server",{port:this.port}),this.start_server()}start_server(){this.server=p.createServer(e=>{this.handle_connection(e)}),this.server.listen(this.port,()=>{this.log.info("Server listening",{port:this.port}),process.connected&&process.send({type:"server_ready"})}),this.server.on("error",e=>{this.log.error("Server error",{error:e.message})})}handle_connection(e){const s=`${process.pid}_${Date.now()}_${Math.random()}`;e.id=s,e.message_parser=w(),this.connections.set(s,e),this.connection_count++,this.update_connection_count(),e.on("data",t=>{this.handle_socket_data(e,t)}),e.on("end",()=>{this.handle_socket_end(e)}),e.on("error",t=>{this.log.error("Socket error",{socket_id:s,error_message:t.message}),this.handle_socket_end(e)})}process_single_message(e,s){const t=s?.op||null;return t?this.check_op_type(t)?(this.route_operation(e,t,s?.data||{}),!0):(a(e,{message:"Invalid operation type"}),!1):(a(e,{message:"Missing operation type"}),!1)}handle_socket_data(e,s){try{const t=e.message_parser.parse_messages(s);for(const r of t)this.process_single_message(e,r)}catch(t){this.log.error("Data parsing error",{socket_id:e.id,error_message:t.message}),a(e,{message:"Invalid data format"})}}handle_socket_end(e){e.id&&(this.connections.delete(e.id),this.authenticated_clients.delete(e.id),this.connection_count--,this.update_connection_count()),this.log.info("Client disconnected",{socket_id:e.id})}check_op_type(e=""){return e?u.includes(e):!1}route_operation(e,s,t){switch(s){case"authentication":this.handle_authentication(e,t);break;case"setup":this.handle_setup(e,t);break;case"find_one":case"find":case"count_documents":case"get_indexes":this.handle_read_operation(e,s,t);break;case"create_index":case"drop_index":this.handle_write_operation(e,s,t);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":this.handle_write_operation(e,s,t);break;case"ping":k(e);break;case"admin":v(e,t,this.is_authenticated.bind(this));break;default:a(e,{message:`Unsupported operation: ${s}`})}}handle_authentication(e,s){if(this.is_authenticated(e))g(e,"Already authenticated");else{const t=`${e.id}_${Date.now()}`;process.send({type:"auth_request",data:{auth_id:t,socket_id:e.id,password:s.password}}),this.pending_writes.set(t,{socket:e,type:"auth"})}}handle_setup(e,s){const t=`${e.id}_${Date.now()}`;process.send({type:"setup_request",data:{setup_id:t,socket_id:e.id}}),this.pending_writes.set(t,{socket:e,type:"setup"})}handle_read_operation(e,s,t){if(!this.is_authenticated(e)){a(e,{message:"Authentication required"});return}const r=`${e.id}_${++this.write_id_counter}`;process.send({type:"write_request",data:{write_id:r,op_type:s,data:t,socket_id:e.id}}),this.pending_writes.set(r,{socket:e,type:"read",op_type:s})}handle_write_operation(e,s,t){if(!this.is_authenticated(e)){a(e,{message:"Authentication required"});return}const r=`${e.id}_${++this.write_id_counter}`;process.send({type:"write_request",data:{write_id:r,op_type:s,data:t,socket_id:e.id}}),this.pending_writes.set(r,{socket:e,type:"write",op_type:s})}handle_write_response(e){const{write_id:s,success:t,result:r,error:o}=e.data,i=this.pending_writes.get(s);if(!i){this.log.warn("No pending write found",{write_id:s});return}const{socket:n,op_type:d}=i;if(this.pending_writes.delete(s),n.destroyed||!n.writable){this.log.warn("Socket disconnected before response could be sent",{write_id:s});return}try{if(t){let _;d==="find_one"?_={ok:1,document:r}:d==="find"?_={ok:1,documents:r}:_={ok:1,...r};const c=h(_);n.write(c)}else{const c=h({ok:0,error:o});n.write(c)}}catch(_){this.log.error("Error sending response to client",{write_id:s,error:_.message})}}handle_auth_response(e){const{auth_id:s,success:t,message:r}=e.data,o=this.pending_writes.get(s);if(!o){this.log.warn("No pending auth found",{auth_id:s});return}const{socket:i}=o;if(this.pending_writes.delete(s),i.destroyed||!i.writable){this.log.warn("Socket disconnected before auth response could be sent",{auth_id:s});return}try{if(t){this.authenticated_clients.add(i.id);const d=h({ok:1,version:"1.0.0",message:r});i.write(d)}else a(i,{message:r}),i.end()}catch(n){this.log.error("Error sending auth response to client",{auth_id:s,error:n.message})}}handle_setup_response(e){const{setup_id:s,success:t,password:r,message:o,error:i}=e.data,n=this.pending_writes.get(s);if(!n){this.log.warn("No pending setup found",{setup_id:s});return}const{socket:d}=n;this.pending_writes.delete(s),t?l(d,{password:r,message:o}):a(d,{message:i})}handle_write_notification(e){this.log.info("Received write notification",{op_type:e.data.op_type,timestamp:e.data.timestamp})}is_authenticated(e){return S()?!0:this.authenticated_clients.has(e.id)}update_connection_count(){process.connected&&process.send({type:"connection_count",data:{count:this.connection_count}})}send_heartbeat(){if(process.connected)try{process.send({type:"heartbeat",data:{timestamp:Date.now()}})}catch{clearInterval(this.heartbeat_interval)}}async shutdown(){const e=Date.now();this.log.info("Initiating graceful shutdown");try{await m(),this.log.info("Write queue shutdown complete")}catch(t){this.log.error("Error shutting down write queue",{error:t.message})}try{await b(),this.log.info("Database cleanup complete")}catch(t){this.log.error("Error cleaning up database",{error:t.message})}this.server&&this.server.close(()=>{this.log.info("Server closed")});for(const[t,r]of this.connections)r.end();const s=process.env.NODE_ENV==="test"?100:5e3;setTimeout(()=>{const t=Date.now()-e;this.log.info("Worker shutdown complete",{shutdown_duration_ms:t}),process.exit(0)},s)}}const T=new D;
@@ -1 +1 @@
1
- import S from"net";import{decode as k}from"msgpackr";import x from"./lib/op_types.js";import g from"./lib/safe_json_parse.js";import{load_settings as l,get_settings as m,get_port_configuration as c}from"./lib/load_settings.js";import{send_error as _}from"./lib/send_response.js";import{start_cluster as T}from"./cluster/index.js";import h from"./lib/logger.js";import{initialize_database as z,cleanup_database as O}from"./lib/query_engine.js";import{create_message_parser as I,encode_message as w}from"./lib/tcp_protocol.js";import{create_connection_manager as R}from"./lib/connection_manager.js";import{shutdown_write_queue as C}from"./lib/write_queue.js";import{setup_authentication as E,verify_password as q,get_client_ip as N,is_rate_limited as $,initialize_auth_manager as A,reset_auth_state as B}from"./lib/auth_manager.js";import{initialize_api_key_manager as D}from"./lib/api_key_manager.js";import{is_development_mode as y,display_development_startup_message as F,warn_undefined_node_env as J}from"./lib/development_mode.js";import{restore_backup as K,start_backup_schedule as P,stop_backup_schedule as j}from"./lib/backup_manager.js";import{initialize_replication_manager as G,shutdown_replication_manager as M}from"./lib/replication_manager.js";import{initialize_write_forwarder as W,shutdown_write_forwarder as H}from"./lib/write_forwarder.js";import{handle_database_operation as U,handle_admin_operation as V,handle_ping_operation as Y}from"./lib/operation_dispatcher.js";import{start_http_server as L,stop_http_server as Q}from"./lib/http_server.js";import{create_recovery_token as X,initialize_recovery_manager as v,reset_recovery_state as Z}from"./lib/recovery_manager.js";import{has_settings as ee}from"./lib/load_settings.js";const i=new Set;let s=null;const re=e=>e&&e.password,d=e=>({ok:0,error:e}),te=()=>({ok:1,version:"1.0.0",message:"Authentication successful"}),u=(e,r)=>{const t=w(r);e.write(t),e.end()},p=(e,r)=>{const t=w(r);e.write(t)},ne=async(e,r={})=>{if(!re(r)){const t=d("Authentication operation requires password to be set in data.");u(e,t);return}try{const t=N(e);if($(t)){const a=d("Too many failed attempts. Please try again later.");u(e,a);return}if(!await q(r.password,t)){const a=d("Authentication failed");u(e,a);return}i.add(e.id);const o=te();p(e,o)}catch(t){const n=d(`Authentication error: ${t.message}`);u(e,n)}},oe=e=>({ok:1,password:e,message:"Authentication setup completed successfully. Save this password - it will not be shown again."}),ae=e=>({ok:0,error:`Setup error: ${e}`}),se=async(e,r={})=>{try{const t=E(),n=oe(t);p(e,n)}catch(t){const n=ae(t.message);p(e,n)}},ie=(e="")=>{if(!e)throw new Error("Must pass an op type for operation.");return x.includes(e)},ce=e=>g(e),_e=e=>{try{const r=k(e);return typeof r=="string"?g(r):r}catch{return null}},ar=e=>{try{return typeof e=="string"?ce(e):Buffer.isBuffer(e)?_e(e):e}catch{return null}},f=e=>y()?!0:i.has(e.id),pe=async(e,r)=>{if(e?.restore_from)try{r.info("Startup restore requested",{backup_filename:e.restore_from});const t=await K(e.restore_from);r.info("Startup restore completed",{backup_filename:e.restore_from,duration_ms:t.duration_ms});const n={...e};delete n.restore_from,process.env.JOYSTICK_DB_SETTINGS=JSON.stringify(n),l(),r.info("Removed restore_from from settings after successful restore")}catch(t){r.error("Startup restore failed",{backup_filename:e.restore_from,error:t.message}),r.info("Continuing with fresh database after restore failure")}},de=()=>{try{return l(),m()}catch{return null}},ue=async e=>{const{tcp_port:r}=c(),t=e?.data_path||`./.joystick/data/joystickdb_${r}`;z(t),A(),await D(),v()},le=e=>{try{G(),e.info("Replication manager initialized")}catch(r){e.warn("Failed to initialize replication manager",{error:r.message})}},me=e=>{try{W(),e.info("Write forwarder initialized")}catch(r){e.warn("Failed to initialize write forwarder",{error:r.message})}},fe=(e,r)=>{if(e?.s3)try{P(),r.info("Backup scheduling started")}catch(t){r.warn("Failed to start backup scheduling",{error:t.message})}},ge=async(e,r)=>{try{const t=await L(e);return t&&r.info("HTTP server started",{http_port:e}),t}catch(t){return r.warn("Failed to start HTTP server",{error:t.message}),null}},he=()=>{if(y()){const{tcp_port:e,http_port:r}=c();F(e,r)}else J()},we=()=>R({max_connections:1e3,idle_timeout:600*1e3,request_timeout:5*1e3}),ye=async(e,r,t,n)=>{s.update_activity(e.id);try{const o=t.parse_messages(r);for(const a of o)await ve(e,a,r.length,n)}catch(o){n.error("Message parsing failed",{client_id:e.id,error:o.message}),_(e,{message:"Invalid message format"}),e.end()}},ve=async(e,r,t,n)=>{const o=r,a=o?.op||null;if(!a){_(e,{message:"Missing operation type"});return}if(!ie(a)){_(e,{message:"Invalid operation type"});return}const b=s.create_request_timeout(e.id,a);try{await be(e,a,o,t)}finally{clearTimeout(b)}},be=async(e,r,t,n)=>{const o=t?.data||{};switch(r){case"authentication":await ne(e,o);break;case"setup":await se(e,o);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":case"find_one":case"find":case"count_documents":case"create_index":case"drop_index":case"get_indexes":await U(e,r,o,f,n,s,i);break;case"ping":Y(e);break;case"admin":await V(e,o,f,s,i);break;case"reload":await Se(e);break;default:_(e,{message:`Operation ${r} not implemented`})}},Se=async e=>{if(!f(e)){_(e,{message:"Authentication required"});return}try{const r=ke(),t=await xe(),n=Te(r,t);p(e,n)}catch(r){const t={ok:0,error:`Reload operation failed: ${r.message}`};p(e,t)}},ke=()=>{try{return m()}catch{return null}},xe=async()=>{try{return await l(),m()}catch{return{port:1983,authentication:{}}}},Te=(e,r)=>({ok:1,status:"success",message:"Configuration reloaded successfully",changes:{port_changed:e?e.port!==r.port:!1,authentication_changed:e?e.authentication?.password_hash!==r.authentication?.password_hash:!1},timestamp:new Date().toISOString()}),ze=(e,r)=>{r.info("Client disconnected",{socket_id:e.id}),i.delete(e.id),s.remove_connection(e.id)},Oe=(e,r,t)=>{t.error("Socket error",{socket_id:e.id,error:r.message}),i.delete(e.id),s.remove_connection(e.id)},Ie=(e,r,t)=>{e.on("data",async n=>{await ye(e,n,r,t)}),e.on("end",()=>{ze(e,t)}),e.on("error",n=>{Oe(e,n,t)})},Re=(e,r)=>{if(!s.add_connection(e))return;const t=I();Ie(e,t,r)},Ce=()=>async()=>{try{await Q(),j(),await M(),await H(),s&&s.shutdown(),i.clear(),await C(),await new Promise(e=>setTimeout(e,100)),await O(),B(),Z()}catch{}},sr=async()=>{const{create_context_logger:e}=h("server"),r=e(),t=de();await pe(t,r),await ue(t),le(r),me(r),fe(t,r),s=we();const{http_port:n}=c();await ge(n,r),he();const o=S.createServer((a={})=>{Re(a,r)});return o.cleanup=Ce(),o},Ee=e=>{try{v();const r=X();console.log("Emergency Recovery Token Generated"),console.log(`Visit: ${r.url}`),console.log("Token expires in 10 minutes"),e.info("Recovery token generated via CLI",{expires_at:new Date(r.expires_at).toISOString()}),process.exit(0)}catch(r){console.error("Failed to generate recovery token:",r.message),e.error("Recovery token generation failed",{error:r.message}),process.exit(1)}},qe=()=>{const{tcp_port:e}=c();return{worker_count:process.env.WORKER_COUNT?parseInt(process.env.WORKER_COUNT):void 0,port:e,environment:process.env.NODE_ENV||"development"}},Ne=(e,r)=>{const{tcp_port:t,http_port:n}=c(),o=ee();r.info("Starting JoystickDB server...",{workers:e.worker_count||"auto",tcp_port:t,http_port:n,environment:e.environment,has_settings:o,port_source:o?"JOYSTICK_DB_SETTINGS":"default"})};if(import.meta.url===`file://${process.argv[1]}`){const{create_context_logger:e}=h("main"),r=e();process.argv.includes("--generate-recovery-token")&&Ee(r);const t=qe();Ne(t,r),T(t)}export{ne as authentication,ie as check_op_type,sr as create_server,ar as parse_data,se as setup};
1
+ import S from"net";import{decode as k}from"msgpackr";import x from"./lib/op_types.js";import g from"./lib/safe_json_parse.js";import{load_settings as m,get_settings as l,get_port_configuration as c}from"./lib/load_settings.js";import{send_error as _}from"./lib/send_response.js";import{start_cluster as T}from"./cluster/index.js";import h from"./lib/logger.js";import{initialize_database as z,cleanup_database as O}from"./lib/query_engine.js";import{create_message_parser as I,encode_message as y}from"./lib/tcp_protocol.js";import{create_connection_manager as R}from"./lib/connection_manager.js";import{shutdown_write_queue as C}from"./lib/write_queue.js";import{setup_authentication as E,verify_password as q,get_client_ip as N,is_rate_limited as $,initialize_auth_manager as A,reset_auth_state as B}from"./lib/auth_manager.js";import{initialize_api_key_manager as D}from"./lib/api_key_manager.js";import{is_development_mode as v,display_development_startup_message as F,warn_undefined_node_env as J}from"./lib/development_mode.js";import{restore_backup as K,start_backup_schedule as P,stop_backup_schedule as j}from"./lib/backup_manager.js";import{initialize_simple_sync_manager as G,shutdown_simple_sync_manager as M}from"./lib/simple_sync_manager.js";import{initialize_sync_receiver as H,shutdown_sync_receiver as U}from"./lib/sync_receiver.js";import{handle_database_operation as V,handle_admin_operation as W,handle_ping_operation as Y}from"./lib/operation_dispatcher.js";import{start_http_server as L,stop_http_server as Q}from"./lib/http_server.js";import{create_recovery_token as X,initialize_recovery_manager as w,reset_recovery_state as Z}from"./lib/recovery_manager.js";import{has_settings as ee}from"./lib/load_settings.js";const i=new Set;let a=null;const re=e=>e&&e.password,d=e=>({ok:0,error:e}),te=()=>({ok:1,version:"1.0.0",message:"Authentication successful"}),u=(e,r)=>{const t=y(r);e.write(t),e.end()},p=(e,r)=>{const t=y(r);e.write(t)},ne=async(e,r={})=>{if(!re(r)){const t=d("Authentication operation requires password to be set in data.");u(e,t);return}try{const t=N(e);if($(t)){const o=d("Too many failed attempts. Please try again later.");u(e,o);return}if(!await q(r.password,t)){const o=d("Authentication failed");u(e,o);return}i.add(e.id);const s=te();p(e,s)}catch(t){const n=d(`Authentication error: ${t.message}`);u(e,n)}},se=e=>({ok:1,password:e,message:"Authentication setup completed successfully. Save this password - it will not be shown again."}),oe=e=>({ok:0,error:`Setup error: ${e}`}),ae=async(e,r={})=>{try{const t=E(),n=se(t);p(e,n)}catch(t){const n=oe(t.message);p(e,n)}},ie=(e="")=>{if(!e)throw new Error("Must pass an op type for operation.");return x.includes(e)},ce=e=>g(e),_e=e=>{try{const r=k(e);return typeof r=="string"?g(r):r}catch{return null}},or=e=>{try{return typeof e=="string"?ce(e):Buffer.isBuffer(e)?_e(e):e}catch{return null}},f=e=>v()?!0:i.has(e.id),pe=async(e,r)=>{if(e?.restore_from)try{r.info("Startup restore requested",{backup_filename:e.restore_from});const t=await K(e.restore_from);r.info("Startup restore completed",{backup_filename:e.restore_from,duration_ms:t.duration_ms});const n={...e};delete n.restore_from,process.env.JOYSTICK_DB_SETTINGS=JSON.stringify(n),m(),r.info("Removed restore_from from settings after successful restore")}catch(t){r.error("Startup restore failed",{backup_filename:e.restore_from,error:t.message}),r.info("Continuing with fresh database after restore failure")}},de=()=>{try{return m(),l()}catch{return null}},ue=async e=>{const{tcp_port:r}=c(),t=e?.data_path||`./.joystick/data/joystickdb_${r}`;z(t),A(),await D(),w()},me=e=>{try{G(),e.info("Simple sync manager initialized")}catch(r){e.warn("Failed to initialize simple sync manager",{error:r.message})}},le=e=>{H().then(()=>{e.info("Sync receiver initialized")}).catch(r=>{e.warn("Failed to initialize sync receiver",{error:r.message})})},fe=(e,r)=>{if(e?.s3)try{P(),r.info("Backup scheduling started")}catch(t){r.warn("Failed to start backup scheduling",{error:t.message})}},ge=async(e,r)=>{try{const t=await L(e);return t&&r.info("HTTP server started",{http_port:e}),t}catch(t){return r.warn("Failed to start HTTP server",{error:t.message}),null}},he=()=>{if(v()){const{tcp_port:e,http_port:r}=c();F(e,r)}else J()},ye=()=>R({max_connections:1e3,idle_timeout:600*1e3,request_timeout:5*1e3}),ve=async(e,r,t,n)=>{a.update_activity(e.id);try{const s=t.parse_messages(r);for(const o of s)await we(e,o,r.length,n)}catch(s){n.error("Message parsing failed",{client_id:e.id,error:s.message}),_(e,{message:"Invalid message format"}),e.end()}},we=async(e,r,t,n)=>{const s=r,o=s?.op||null;if(!o){_(e,{message:"Missing operation type"});return}if(!ie(o)){_(e,{message:"Invalid operation type"});return}const b=a.create_request_timeout(e.id,o);try{await be(e,o,s,t)}finally{clearTimeout(b)}},be=async(e,r,t,n)=>{const s=t?.data||{};switch(r){case"authentication":await ne(e,s);break;case"setup":await ae(e,s);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":case"find_one":case"find":case"count_documents":case"create_index":case"drop_index":case"get_indexes":await V(e,r,s,f,n,a,i);break;case"ping":Y(e);break;case"admin":await W(e,s,f,a,i);break;case"reload":await Se(e);break;default:_(e,{message:`Operation ${r} not implemented`})}},Se=async e=>{if(!f(e)){_(e,{message:"Authentication required"});return}try{const r=ke(),t=await xe(),n=Te(r,t);p(e,n)}catch(r){const t={ok:0,error:`Reload operation failed: ${r.message}`};p(e,t)}},ke=()=>{try{return l()}catch{return null}},xe=async()=>{try{return await m(),l()}catch{return{port:1983,authentication:{}}}},Te=(e,r)=>({ok:1,status:"success",message:"Configuration reloaded successfully",changes:{port_changed:e?e.port!==r.port:!1,authentication_changed:e?e.authentication?.password_hash!==r.authentication?.password_hash:!1},timestamp:new Date().toISOString()}),ze=(e,r)=>{r.info("Client disconnected",{socket_id:e.id}),i.delete(e.id),a.remove_connection(e.id)},Oe=(e,r,t)=>{t.error("Socket error",{socket_id:e.id,error:r.message}),i.delete(e.id),a.remove_connection(e.id)},Ie=(e,r,t)=>{e.on("data",async n=>{await ve(e,n,r,t)}),e.on("end",()=>{ze(e,t)}),e.on("error",n=>{Oe(e,n,t)})},Re=(e,r)=>{if(!a.add_connection(e))return;const t=I();Ie(e,t,r)},Ce=()=>async()=>{try{await Q(),j(),await M(),await U(),a&&a.shutdown(),i.clear(),await C(),await new Promise(e=>setTimeout(e,100)),await O(),B(),Z()}catch{}},ar=async()=>{const{create_context_logger:e}=h("server"),r=e(),t=de();await pe(t,r),await ue(t),me(r),le(r),fe(t,r),a=ye();const{http_port:n}=c();await ge(n,r),he();const s=S.createServer((o={})=>{Re(o,r)});return s.cleanup=Ce(),s},Ee=e=>{try{w();const r=X();console.log("Emergency Recovery Token Generated"),console.log(`Visit: ${r.url}`),console.log("Token expires in 10 minutes"),e.info("Recovery token generated via CLI",{expires_at:new Date(r.expires_at).toISOString()}),process.exit(0)}catch(r){console.error("Failed to generate recovery token:",r.message),e.error("Recovery token generation failed",{error:r.message}),process.exit(1)}},qe=()=>{const{tcp_port:e}=c();return{worker_count:process.env.WORKER_COUNT?parseInt(process.env.WORKER_COUNT):void 0,port:e,environment:process.env.NODE_ENV||"development"}},Ne=(e,r)=>{const{tcp_port:t,http_port:n}=c(),s=ee();r.info("Starting JoystickDB server...",{workers:e.worker_count||"auto",tcp_port:t,http_port:n,environment:e.environment,has_settings:s,port_source:s?"JOYSTICK_DB_SETTINGS":"default"})};if(import.meta.url===`file://${process.argv[1]}`){const{create_context_logger:e}=h("main"),r=e();process.argv.includes("--generate-recovery-token")&&Ee(r);const t=qe();Ne(t,r),T(t)}export{ne as authentication,ie as check_op_type,ar as create_server,or as parse_data,ae as setup};
@@ -1 +1 @@
1
- import{get_database as I}from"./query_engine.js";import{create_index as M,get_indexes as q,drop_index as w}from"./index_manager.js";import{get_settings as S}from"./load_settings.js";import A from"./logger.js";const{create_context_logger:_}=A("auto_index_manager");let u=null,d=new Map,c=new Map,m=null;const k=()=>(u||(u=I().openDB("auto_indexes",{create:!0}),d.clear(),c.clear(),N(),C(),O()),u),g=()=>{if(!u)throw new Error("Auto index database not initialized. Call initialize_auto_index_database first.");return u},f=()=>{try{return S().auto_indexing||{enabled:!0,frequency_threshold:100,performance_threshold_ms:50,max_auto_indexes_per_collection:10,monitoring_window_hours:24,cleanup_unused_after_hours:168,excluded_fields:["_id","created_at"],included_collections:["*"],excluded_collections:[]}}catch{return{enabled:!0,frequency_threshold:100,performance_threshold_ms:50,max_auto_indexes_per_collection:10,monitoring_window_hours:24,cleanup_unused_after_hours:168,excluded_fields:["_id","created_at"],included_collections:["*"],excluded_collections:[]}}},z=e=>{const o=f();return o.excluded_collections.includes(e)?!1:o.included_collections.includes("*")?!0:o.included_collections.includes(e)},Q=e=>!f().excluded_fields.includes(e),R=e=>{const o=[];if(!e||typeof e!="object")return o;for(const[t,a]of Object.entries(e))Q(t)&&o.push(t);return o},E=(e,o,t,a=!1,n=null)=>{const r=_(),s=f();if(!s.enabled||!z(e)||!u)return;const i=R(o),x=new Date;d.has(e)||d.set(e,new Map);const y=d.get(e);for(const h of i){y.has(h)||y.set(h,{query_count:0,total_time_ms:0,avg_time_ms:0,last_queried:x,slow_query_count:0,used_index_count:0});const l=y.get(h);l.query_count++,l.total_time_ms+=t,l.avg_time_ms=l.total_time_ms/l.query_count,l.last_queried=x,t>s.performance_threshold_ms&&l.slow_query_count++,a&&(n===h||n===null)&&l.used_index_count++}r.debug("Query recorded for auto-indexing analysis",{collection:e,fields:i,execution_time_ms:t,used_index:a,indexed_field:n})},B=()=>{const e=_();try{const o=g(),t={};for(const[a,n]of d.entries()){t[a]={};for(const[r,s]of n.entries())t[a][r]={...s,last_queried:s.last_queried.toISOString()}}o.put("query_stats",t),e.debug("Query statistics saved to database")}catch(o){e.error("Failed to save query statistics",{error:o.message})}},C=()=>{const e=_();try{const t=g().get("query_stats");if(t){d.clear();for(const[a,n]of Object.entries(t)){const r=new Map;for(const[s,i]of Object.entries(n))r.set(s,{...i,last_queried:new Date(i.last_queried)});d.set(a,r)}e.debug("Query statistics loaded from database")}}catch(o){e.error("Failed to load query statistics",{error:o.message})}},p=()=>{const e=_();try{const o=g(),t={};for(const[a,n]of c.entries()){t[a]={};for(const[r,s]of n.entries())t[a][r]={...s,created_at:s.created_at.toISOString(),last_used:s.last_used?s.last_used.toISOString():null}}o.put("auto_index_metadata",t),e.debug("Auto index metadata saved to database")}catch(o){e.error("Failed to save auto index metadata",{error:o.message})}},N=()=>{const e=_();try{const t=g().get("auto_index_metadata");if(t){c.clear();for(const[a,n]of Object.entries(t)){const r=new Map;for(const[s,i]of Object.entries(n))r.set(s,{...i,created_at:new Date(i.created_at),last_used:i.last_used?new Date(i.last_used):null});c.set(a,r)}e.debug("Auto index metadata loaded from database")}}catch(o){e.error("Failed to load auto index metadata",{error:o.message})}},v=(e,o)=>{try{const t=c.get(e);return!!(t&&t.has(o))}catch{return!1}},P=(e,o)=>q("default",e).filter(n=>v(e,n.field)).length>=o.max_auto_indexes_per_collection,T=(e,o)=>new Date-e.last_queried<=o,G=(e,o)=>o.some(t=>t.field===e),H=(e,o)=>{const t=e.query_count>=o.frequency_threshold,a=e.avg_time_ms>=o.performance_threshold_ms,n=e.slow_query_count>0;return t||a&&n},J=(e,o)=>e.slow_query_count*2+e.query_count/o.frequency_threshold,K=(e,o,t,a)=>({collection:e,field:o,stats:{...t},priority:J(t,a)}),b=()=>{const e=f(),o=[],t=e.monitoring_window_hours*60*60*1e3;for(const[a,n]of d.entries()){if(P(a,e))continue;const r=q("default",a);for(const[s,i]of n.entries())T(i,t)&&(G(s,r)||H(i,e)&&o.push(K(a,s,i,e)))}return o.sort((a,n)=>n.priority-a.priority)},F=async(e,o,t)=>{const a=_();try{return await M("default",e,o,{sparse:!0}),c.has(e)||c.set(e,new Map),c.get(e).set(o,{created_at:new Date,query_count_at_creation:t.query_count,avg_performance_improvement_ms:0,last_used:null,usage_count:0,auto_created:!0}),p(),a.info("Automatic index created",{collection:e,field:o,query_count:t.query_count,avg_time_ms:t.avg_time_ms,slow_query_count:t.slow_query_count}),!0}catch(n){return a.error("Failed to create automatic index",{collection:e,field:o,error:n.message}),!1}},D=async()=>{const e=_();if(f().enabled)try{const t=b();if(t.length===0){e.debug("No automatic index candidates found");return}e.info("Evaluating automatic index candidates",{candidate_count:t.length});for(const a of t.slice(0,5))await F(a.collection,a.field,a.stats)&&await new Promise(r=>setTimeout(r,100))}catch(t){e.error("Failed to evaluate automatic indexes",{error:t.message})}},L=async()=>{const e=_(),o=f(),t=new Date,a=o.cleanup_unused_after_hours*60*60*1e3;try{for(const[n,r]of c.entries())for(const[s,i]of r.entries())i.last_used?t-i.last_used>a&&(await w("default",n,s),r.delete(s),e.info("Removed unused automatic index",{collection:n,field:s,last_used:i.last_used,usage_count:i.usage_count})):t-i.created_at>a&&(await w("default",n,s),r.delete(s),e.info("Removed unused automatic index",{collection:n,field:s,created_at:i.created_at,usage_count:i.usage_count}));p()}catch(n){e.error("Failed to cleanup unused indexes",{error:n.message})}},U=(e,o)=>{const t=c.get(e);if(t&&t.has(o)){const a=t.get(o);a.last_used=new Date,a.usage_count++}},O=()=>{const e=f();m&&clearInterval(m),e.enabled&&(m=setInterval(async()=>{B(),await D(),await L()},6e4))},j=()=>{m&&(clearInterval(m),m=null)},V=(e=null)=>{if(e){const t=d.get(e);if(!t)return{};const a={};for(const[n,r]of t.entries())a[n]={...r};return a}const o={};for(const[t,a]of d.entries()){o[t]={};for(const[n,r]of a.entries())o[t][n]={...r}}return o},W=()=>{const e={total_auto_indexes:0,collections:{}};for(const[o,t]of c.entries()){e.collections[o]={};for(const[a,n]of t.entries())e.total_auto_indexes++,e.collections[o][a]={...n}}return e},X=async(e=null)=>{const o=_();try{if(e){const t=b().filter(a=>a.collection===e);for(const a of t)await F(a.collection,a.field,a.stats);o.info("Forced index evaluation completed",{collection:e,candidates_processed:t.length})}else await D(),o.info("Forced index evaluation completed for all collections");return{acknowledged:!0}}catch(t){throw o.error("Failed to force index evaluation",{error:t.message}),t}},Y=async(e,o=null)=>{const t=_();try{const a=c.get(e);if(!a)return{acknowledged:!0,removed_count:0};const n=o||Array.from(a.keys());let r=0;for(const s of n)a.has(s)&&(await w("default",e,s),a.delete(s),r++,t.info("Removed automatic index",{collection:e,field:s}));return p(),{acknowledged:!0,removed_count:r}}catch(a){throw t.error("Failed to remove automatic indexes",{collection:e,field_names:o,error:a.message}),a}},Z=()=>{if(j(),d.clear(),c.clear(),u){try{u.remove("query_stats"),u.remove("auto_index_metadata")}catch{}u=null}};export{Z as cleanup_auto_index_database,X as force_index_evaluation,g as get_auto_index_database,W as get_auto_index_statistics,V as get_query_statistics,k as initialize_auto_index_database,v as is_auto_created_index,U as record_index_usage,E as record_query,Y as remove_automatic_indexes,O as start_evaluation_timer,j as stop_evaluation_timer};
1
+ import{get_database as I}from"./query_engine.js";import{create_index as M,get_indexes as q,drop_index as w}from"./index_manager.js";import{get_settings as S}from"./load_settings.js";import A from"./logger.js";const{create_context_logger:_}=A("auto_index_manager");let u=null,d=new Map,c=new Map,m=null;const k=()=>(u||(u=I().openDB("auto_indexes",{create:!0}),d.clear(),c.clear(),N(),C(),O()),u),g=()=>{if(!u)throw new Error("Auto index database not initialized. Call initialize_auto_index_database first.");return u},f=()=>{try{return S().auto_indexing||{enabled:!0,frequency_threshold:100,performance_threshold_ms:50,max_auto_indexes_per_collection:10,monitoring_window_hours:24,cleanup_unused_after_hours:168,excluded_fields:["_id","created_at"],included_collections:["*"],excluded_collections:[]}}catch{return{enabled:!0,frequency_threshold:100,performance_threshold_ms:50,max_auto_indexes_per_collection:10,monitoring_window_hours:24,cleanup_unused_after_hours:168,excluded_fields:["_id","created_at"],included_collections:["*"],excluded_collections:[]}}},z=e=>{const o=f();return o.excluded_collections.includes(e)?!1:o.included_collections.includes("*")?!0:o.included_collections.includes(e)},Q=e=>!f().excluded_fields.includes(e),R=e=>{const o=[];if(!e||typeof e!="object")return o;try{for(const[t,a]of Object.entries(e))Q(t)&&o.push(t)}catch{return[]}return o},E=(e,o,t,a=!1,n=null)=>{const r=_(),s=f();if(!s.enabled||!z(e)||!u)return;const i=R(o),x=new Date;d.has(e)||d.set(e,new Map);const y=d.get(e);for(const h of i){y.has(h)||y.set(h,{query_count:0,total_time_ms:0,avg_time_ms:0,last_queried:x,slow_query_count:0,used_index_count:0});const l=y.get(h);l.query_count++,l.total_time_ms+=t,l.avg_time_ms=l.total_time_ms/l.query_count,l.last_queried=x,t>s.performance_threshold_ms&&l.slow_query_count++,a&&(n===h||n===null)&&l.used_index_count++}r.debug("Query recorded for auto-indexing analysis",{collection:e,fields:i,execution_time_ms:t,used_index:a,indexed_field:n})},B=()=>{const e=_();try{const o=g(),t={};for(const[a,n]of d.entries()){t[a]={};for(const[r,s]of n.entries())t[a][r]={...s,last_queried:s.last_queried.toISOString()}}o.put("query_stats",t),e.debug("Query statistics saved to database")}catch(o){e.error("Failed to save query statistics",{error:o.message})}},C=()=>{const e=_();try{const t=g().get("query_stats");if(t){d.clear();for(const[a,n]of Object.entries(t)){const r=new Map;for(const[s,i]of Object.entries(n))r.set(s,{...i,last_queried:new Date(i.last_queried)});d.set(a,r)}e.debug("Query statistics loaded from database")}}catch(o){e.error("Failed to load query statistics",{error:o.message})}},p=()=>{const e=_();try{const o=g(),t={};for(const[a,n]of c.entries()){t[a]={};for(const[r,s]of n.entries())t[a][r]={...s,created_at:s.created_at.toISOString(),last_used:s.last_used?s.last_used.toISOString():null}}o.put("auto_index_metadata",t),e.debug("Auto index metadata saved to database")}catch(o){e.error("Failed to save auto index metadata",{error:o.message})}},N=()=>{const e=_();try{const t=g().get("auto_index_metadata");if(t){c.clear();for(const[a,n]of Object.entries(t)){const r=new Map;for(const[s,i]of Object.entries(n))r.set(s,{...i,created_at:new Date(i.created_at),last_used:i.last_used?new Date(i.last_used):null});c.set(a,r)}e.debug("Auto index metadata loaded from database")}}catch(o){e.error("Failed to load auto index metadata",{error:o.message})}},v=(e,o)=>{try{const t=c.get(e);return!!(t&&t.has(o))}catch{return!1}},P=(e,o)=>q("default",e).filter(n=>v(e,n.field)).length>=o.max_auto_indexes_per_collection,T=(e,o)=>new Date-e.last_queried<=o,G=(e,o)=>o.some(t=>t.field===e),H=(e,o)=>{const t=e.query_count>=o.frequency_threshold,a=e.avg_time_ms>=o.performance_threshold_ms,n=e.slow_query_count>0;return t||a&&n},J=(e,o)=>e.slow_query_count*2+e.query_count/o.frequency_threshold,K=(e,o,t,a)=>({collection:e,field:o,stats:{...t},priority:J(t,a)}),b=()=>{const e=f(),o=[],t=e.monitoring_window_hours*60*60*1e3;for(const[a,n]of d.entries()){if(P(a,e))continue;const r=q("default",a);for(const[s,i]of n.entries())T(i,t)&&(G(s,r)||H(i,e)&&o.push(K(a,s,i,e)))}return o.sort((a,n)=>n.priority-a.priority)},F=async(e,o,t)=>{const a=_();try{return await M("default",e,o,{sparse:!0}),c.has(e)||c.set(e,new Map),c.get(e).set(o,{created_at:new Date,query_count_at_creation:t.query_count,avg_performance_improvement_ms:0,last_used:null,usage_count:0,auto_created:!0}),p(),a.info("Automatic index created",{collection:e,field:o,query_count:t.query_count,avg_time_ms:t.avg_time_ms,slow_query_count:t.slow_query_count}),!0}catch(n){return a.error("Failed to create automatic index",{collection:e,field:o,error:n.message}),!1}},D=async()=>{const e=_();if(f().enabled)try{const t=b();if(t.length===0){e.debug("No automatic index candidates found");return}e.info("Evaluating automatic index candidates",{candidate_count:t.length});for(const a of t.slice(0,5))await F(a.collection,a.field,a.stats)&&await new Promise(r=>setTimeout(r,100))}catch(t){e.error("Failed to evaluate automatic indexes",{error:t.message})}},L=async()=>{const e=_(),o=f(),t=new Date,a=o.cleanup_unused_after_hours*60*60*1e3;try{for(const[n,r]of c.entries())for(const[s,i]of r.entries())i.last_used?t-i.last_used>a&&(await w("default",n,s),r.delete(s),e.info("Removed unused automatic index",{collection:n,field:s,last_used:i.last_used,usage_count:i.usage_count})):t-i.created_at>a&&(await w("default",n,s),r.delete(s),e.info("Removed unused automatic index",{collection:n,field:s,created_at:i.created_at,usage_count:i.usage_count}));p()}catch(n){e.error("Failed to cleanup unused indexes",{error:n.message})}},U=(e,o)=>{const t=c.get(e);if(t&&t.has(o)){const a=t.get(o);a.last_used=new Date,a.usage_count++}},O=()=>{const e=f();m&&clearInterval(m),e.enabled&&(m=setInterval(async()=>{B(),await D(),await L()},6e4))},j=()=>{m&&(clearInterval(m),m=null)},V=(e=null)=>{if(e){const t=d.get(e);if(!t)return{};const a={};for(const[n,r]of t.entries())a[n]={...r};return a}const o={};for(const[t,a]of d.entries()){o[t]={};for(const[n,r]of a.entries())o[t][n]={...r}}return o},W=()=>{const e={total_auto_indexes:0,collections:{}};for(const[o,t]of c.entries()){e.collections[o]={};for(const[a,n]of t.entries())e.total_auto_indexes++,e.collections[o][a]={...n}}return e},X=async(e=null)=>{const o=_();try{if(e){const t=b().filter(a=>a.collection===e);for(const a of t)await F(a.collection,a.field,a.stats);o.info("Forced index evaluation completed",{collection:e,candidates_processed:t.length})}else await D(),o.info("Forced index evaluation completed for all collections");return{acknowledged:!0}}catch(t){throw o.error("Failed to force index evaluation",{error:t.message}),t}},Y=async(e,o=null)=>{const t=_();try{const a=c.get(e);if(!a)return{acknowledged:!0,removed_count:0};const n=o||Array.from(a.keys());let r=0;for(const s of n)a.has(s)&&(await w("default",e,s),a.delete(s),r++,t.info("Removed automatic index",{collection:e,field:s}));return p(),{acknowledged:!0,removed_count:r}}catch(a){throw t.error("Failed to remove automatic indexes",{collection:e,field_names:o,error:a.message}),a}},Z=()=>{if(j(),d.clear(),c.clear(),u){try{u.remove("query_stats"),u.remove("auto_index_metadata")}catch{}u=null}};export{Z as cleanup_auto_index_database,X as force_index_evaluation,g as get_auto_index_database,W as get_auto_index_statistics,V as get_query_statistics,k as initialize_auto_index_database,v as is_auto_created_index,U as record_index_usage,E as record_query,Y as remove_automatic_indexes,O as start_evaluation_timer,j as stop_evaluation_timer};
@@ -1 +1 @@
1
- import{get_database as f,build_collection_key as b,check_and_grow_map_size as v}from"./query_engine.js";import{get_write_queue as B}from"./write_queue.js";import"./auto_index_manager.js";import R from"./logger.js";const{create_context_logger:w}=R("bulk_insert_optimizer"),C=100*1024*1024,U=1e3,D=1e4,I=(e,r=100)=>{const t=e.slice(0,Math.min(r,e.length)),n=t.reduce((o,s)=>o+Buffer.byteLength(JSON.stringify(s),"utf8"),0);return Math.ceil(n/t.length)},P=(e,r)=>{const t=e*r,n=2,o=1024*1024*1024*10;return Math.max(t*n,o)},J=async e=>{const r=w();if(e.length===0)return;const t=I(e),n=P(e.length,t);r.info("Pre-allocating map size for bulk insert",{document_count:e.length,avg_document_size:t,required_map_size:n,required_map_size_gb:Math.round(n/(1024*1024*1024)*100)/100}),await v();const o=f();if(o.resize)try{o.resize(n),r.info("Map size pre-allocated successfully",{new_map_size:n,new_map_size_gb:Math.round(n/(1024*1024*1024)*100)/100})}catch(s){r.warn("Failed to pre-allocate map size",{error:s.message})}},O=(e,r=C)=>{const t=[];let n=[],o=0;for(const s of e){const i=Buffer.byteLength(JSON.stringify(s),"utf8");o+i>r&&n.length>0?(t.push(n),n=[s],o=i):(n.push(s),o+=i)}return n.length>0&&t.push(n),t},T=(()=>{let e=Date.now()*1e3;return()=>(++e).toString(36).padStart(12,"0")})(),A=(e,r,t)=>e.map(n=>({...n,_id:n._id||T()})).sort((n,o)=>{const s=b(r,t,n._id),i=b(r,t,o._id);return s.localeCompare(i)}),y=(e,r,t)=>{const n=new Date().toISOString();return e.map(o=>{const s=o._id||T(),i={...o,_id:s,_created_at:o._created_at||n,_updated_at:o._updated_at||n},l=JSON.stringify(i);return{key:b(r,t,s),value:l,document_id:s}})},E=async(e,r)=>{const t=[];return await e.transaction(()=>{for(const{key:n,value:o,document_id:s}of r){if(e.get(n))throw new Error(`Document with _id ${s} already exists`);e.put(n,o),t.push(s)}}),t},F=async function*(e,r,t,n=U){const o=f();for(let s=0;s<e.length;s+=n){const i=e.slice(s,s+n),l=y(i,r,t);yield await E(o,l),i.length=0,l.length=0;const c=Math.floor(s/n);e.length>=5e6?(c%5===0&&global.gc&&(global.gc(),await new Promise(a=>setTimeout(a,100))),await new Promise(a=>setImmediate(a))):e.length>=1e6?(c%8===0&&global.gc&&(global.gc(),await new Promise(a=>setTimeout(a,75))),await new Promise(a=>setImmediate(a))):e.length>1e5?(c%25===0&&global.gc&&(global.gc(),await new Promise(a=>setTimeout(a,25))),await new Promise(a=>setImmediate(a))):c%10===0&&await new Promise(a=>setImmediate(a))}},G=()=>!1,Z=e=>{},j=async(e,r)=>{w().debug("Index rebuilding skipped (not implemented)",{database:e,collection:r})},p=async(e,r,t,n={})=>{const{disable_indexing:o=!0,pre_allocate_map_size:s=!0,sort_keys:i=!0,stream_processing:l=!0,batch_size:_=U}=n,c=w(),a=Date.now(),h=process.memoryUsage();if(!e||!r)throw new Error("Database name and collection name are required");if(!Array.isArray(t)||t.length===0)throw new Error("Documents must be a non-empty array");c.info("Starting optimized bulk insert",{database:e,collection:r,document_count:t.length,options:n});let k=!1;try{s&&await J(t),o&&(k=G());let u=t;i&&(u=A(t,e,r));const m=[];let d=0;if(l)for await(const g of F(u,e,r,_))m.push(...g),d+=g.length,d%D===0&&c.info("Bulk insert progress",{processed:d,total:t.length,percentage:Math.round(d/t.length*100)});else{const g=O(u),q=f();for(const L of g){const N=y(L,e,r),S=await E(q,N);m.push(...S),d+=S.length,d%D===0&&c.info("Bulk insert progress",{processed:d,total:t.length,percentage:Math.round(d/t.length*100)})}}o&&await j(e,r);const z=Date.now(),M=process.memoryUsage(),x={duration_ms:z-a,documents_per_second:Math.round(t.length/((z-a)/1e3)),memory_delta_mb:Math.round((M.heapUsed-h.heapUsed)/(1024*1024)),peak_memory_mb:Math.round(M.heapUsed/(1024*1024))};return c.info("Optimized bulk insert completed",{database:e,collection:r,inserted_count:m.length,performance:x}),{acknowledged:!0,inserted_count:m.length,inserted_ids:m,performance:x}}catch(u){throw c.error("Optimized bulk insert failed",{database:e,collection:r,error:u.message}),u}finally{o&&Z(k)}},H=async(e,r,t,n={})=>{const{chunk_size:o=1e4}=n,s={acknowledged:!0,inserted_count:0,inserted_ids:[],performance:{duration_ms:0,documents_per_second:0,memory_delta_mb:0,peak_memory_mb:0}},i=Date.now();for(let _=0;_<t.length;_+=o){const c=t.slice(_,_+o),a=await p(e,r,c,n);s.inserted_count+=a.inserted_count,s.inserted_ids.push(...a.inserted_ids),await new Promise(h=>setImmediate(h))}const l=Date.now();return s.performance.duration_ms=l-i,s.performance.documents_per_second=Math.round(t.length/((l-i)/1e3)),s},V=async(e,r,t,n={})=>{const o=Date.now(),s=process.memoryUsage(),i=await p(e,r,t,n),l=Date.now(),_=process.memoryUsage();return{...i,performance:{...i.performance,total_duration_ms:l-o,memory_usage:{start_heap_mb:Math.round(s.heapUsed/(1024*1024)),end_heap_mb:Math.round(_.heapUsed/(1024*1024)),delta_heap_mb:Math.round((_.heapUsed-s.heapUsed)/(1024*1024)),peak_heap_mb:Math.round(_.heapUsed/(1024*1024))}}}},$=async(e,r,t,n={})=>{const o=B(),s={operation:"bulk_insert_optimized",database:e,collection:r,document_count:t.length};return await o.enqueue_write_operation(()=>p(e,r,t,n),s)};export{$ as bulk_insert,p as bulk_insert_optimized,V as bulk_insert_with_metrics,I as calculate_average_document_size,P as calculate_bulk_map_size,O as create_size_based_batches,H as non_blocking_bulk_insert,y as pre_encode_documents,A as sort_documents_by_key};
1
+ import{get_database as f,build_collection_key as w,check_and_grow_map_size as v}from"./query_engine.js";import{get_write_queue as B}from"./write_queue.js";import"./auto_index_manager.js";import R from"./logger.js";const{create_context_logger:y}=R("bulk_insert_optimizer"),J=100*1024*1024,D=1e3,I=1e4,O=(e,n=100)=>{const t=e.slice(0,Math.min(n,e.length)),r=t.reduce((o,s)=>o+Buffer.byteLength(JSON.stringify(s),"utf8"),0);return Math.ceil(r/t.length)},P=(e,n)=>{const t=e*n,r=2,o=1024*1024*1024*10;return Math.max(t*r,o)},C=async e=>{const n=y();if(e.length===0)return;const t=O(e),r=P(e.length,t);n.info("Pre-allocating map size for bulk insert",{document_count:e.length,avg_document_size:t,required_map_size:r,required_map_size_gb:Math.round(r/(1024*1024*1024)*100)/100}),await v();const o=f();if(o.resize)try{o.resize(r),n.info("Map size pre-allocated successfully",{new_map_size:r,new_map_size_gb:Math.round(r/(1024*1024*1024)*100)/100})}catch(s){n.warn("Failed to pre-allocate map size",{error:s.message})}},E=(e,n=J)=>{const t=[];let r=[],o=0;for(const s of e){const i=Buffer.byteLength(JSON.stringify(s),"utf8");o+i>n&&r.length>0?(t.push(r),r=[s],o=i):(r.push(s),o+=i)}return r.length>0&&t.push(r),t},k=(()=>{let e=Date.now()*1e3;return()=>(++e).toString(36).padStart(12,"0")})(),T=(e,n,t)=>e.map(r=>({...r,_id:r._id||k()})).sort((r,o)=>{const s=w(n,t,r._id),i=w(n,t,o._id);return s.localeCompare(i)}),z=(e,n,t)=>{const r=new Date().toISOString();return e.map(o=>{const s=o._id||k(),i={...o,_id:s,_created_at:o._created_at||r,_updated_at:o._updated_at||r},c=JSON.stringify(i);return{key:w(n,t,s),value:c,document_id:s}})},A=async(e,n)=>{const t=[];return await e.transaction(()=>{for(const{key:r,value:o,document_id:s}of n){if(e.get(r))throw new Error(`Document with _id ${s} already exists`);e.put(r,o),t.push(s)}}),t},F=async function*(e,n,t,r=D){const o=f();for(let s=0;s<e.length;s+=r){const i=e.slice(s,s+r),c=z(i,n,t);yield await A(o,c),i.length=0,c.length=0;const _=Math.floor(s/r);e.length>=5e6?(_%5===0&&global.gc&&(global.gc(),await new Promise(a=>setTimeout(a,100))),await new Promise(a=>setImmediate(a))):e.length>=1e6?(_%8===0&&global.gc&&(global.gc(),await new Promise(a=>setTimeout(a,75))),await new Promise(a=>setImmediate(a))):e.length>1e5?(_%25===0&&global.gc&&(global.gc(),await new Promise(a=>setTimeout(a,25))),await new Promise(a=>setImmediate(a))):_%10===0&&await new Promise(a=>setImmediate(a))}},G=()=>!1,Z=e=>{},$=async(e,n)=>{y().debug("Index rebuilding skipped (not implemented)",{database:e,collection:n})},j=async(e,n,t)=>{const r=f(),o=new Date().toISOString(),s=[];return await r.transaction(()=>{for(const i of t){const c=i._id||k(),d={...i,_id:c,_created_at:i._created_at||o,_updated_at:i._updated_at||o},_=w(e,n,c);if(r.get(_))throw new Error(`Document with _id ${c} already exists`);r.put(_,JSON.stringify(d)),s.push(c)}}),s},b=async(e,n,t,r={})=>{const{disable_indexing:o=!0,pre_allocate_map_size:s=!0,sort_keys:i=!0,stream_processing:c=!0,batch_size:d=D}=r,_=y(),a=Date.now(),p=process.memoryUsage();if(!e||!n)throw new Error("Database name and collection name are required");if(!Array.isArray(t)||t.length===0)throw new Error("Documents must be a non-empty array");if(t.length<5e3){_.debug("Using fast path for small dataset",{database:e,collection:n,document_count:t.length});try{const l=await j(e,n,t),m=Date.now(),u=process.memoryUsage(),g={duration_ms:m-a,documents_per_second:Math.round(t.length/((m-a)/1e3)),memory_delta_mb:Math.round((u.heapUsed-p.heapUsed)/(1024*1024)),peak_memory_mb:Math.round(u.heapUsed/(1024*1024))};return{acknowledged:!0,inserted_count:l.length,inserted_ids:l,performance:g}}catch(l){throw _.error("Fast path bulk insert failed",{database:e,collection:n,error:l.message}),l}}_.info("Starting optimized bulk insert",{database:e,collection:n,document_count:t.length,options:r});let M=!1;try{s&&await C(t),o&&(M=G());let l=t;i&&(l=T(t,e,n));const m=[];let u=0;if(c)for await(const h of F(l,e,n,d))m.push(...h),u+=h.length,u%I===0&&_.info("Bulk insert progress",{processed:u,total:t.length,percentage:Math.round(u/t.length*100)});else{const h=E(l),q=f();for(const N of h){const L=z(N,e,n),S=await A(q,L);m.push(...S),u+=S.length,u%I===0&&_.info("Bulk insert progress",{processed:u,total:t.length,percentage:Math.round(u/t.length*100)})}}o&&await $(e,n);const g=Date.now(),U=process.memoryUsage(),x={duration_ms:g-a,documents_per_second:Math.round(t.length/((g-a)/1e3)),memory_delta_mb:Math.round((U.heapUsed-p.heapUsed)/(1024*1024)),peak_memory_mb:Math.round(U.heapUsed/(1024*1024))};return _.info("Optimized bulk insert completed",{database:e,collection:n,inserted_count:m.length,performance:x}),{acknowledged:!0,inserted_count:m.length,inserted_ids:m,performance:x}}catch(l){throw _.error("Optimized bulk insert failed",{database:e,collection:n,error:l.message}),l}finally{o&&Z(M)}},H=async(e,n,t,r={})=>{const{chunk_size:o=1e4}=r,s={acknowledged:!0,inserted_count:0,inserted_ids:[],performance:{duration_ms:0,documents_per_second:0,memory_delta_mb:0,peak_memory_mb:0}},i=Date.now();for(let d=0;d<t.length;d+=o){const _=t.slice(d,d+o),a=await b(e,n,_,r);s.inserted_count+=a.inserted_count,s.inserted_ids.push(...a.inserted_ids),await new Promise(p=>setImmediate(p))}const c=Date.now();return s.performance.duration_ms=c-i,s.performance.documents_per_second=Math.round(t.length/((c-i)/1e3)),s},V=async(e,n,t,r={})=>{const o=Date.now(),s=process.memoryUsage(),i=await b(e,n,t,r),c=Date.now(),d=process.memoryUsage();return{...i,performance:{...i.performance,total_duration_ms:c-o,memory_usage:{start_heap_mb:Math.round(s.heapUsed/(1024*1024)),end_heap_mb:Math.round(d.heapUsed/(1024*1024)),delta_heap_mb:Math.round((d.heapUsed-s.heapUsed)/(1024*1024)),peak_heap_mb:Math.round(d.heapUsed/(1024*1024))}}}},K=async(e,n,t,r={})=>{const o=B(),s={operation:"bulk_insert_optimized",database:e,collection:n,document_count:t.length};return await o.enqueue_write_operation(()=>b(e,n,t,r),s)};export{K as bulk_insert,b as bulk_insert_optimized,V as bulk_insert_with_metrics,O as calculate_average_document_size,P as calculate_bulk_map_size,E as create_size_based_batches,H as non_blocking_bulk_insert,z as pre_encode_documents,T as sort_documents_by_key};
@@ -1,4 +1,4 @@
1
- import _ from"http";import{URL as x}from"url";import k from"crypto";import T from"./logger.js";import{setup_authentication as P,get_auth_stats as A}from"./auth_manager.js";import{is_token_valid as C,record_failed_recovery_attempt as S,change_password as I}from"./recovery_manager.js";import{validate_api_key as D,create_user as E,get_all_users as H,update_user as O,delete_user as R}from"./api_key_manager.js";import{is_development_mode as l}from"./development_mode.js";const{create_context_logger:B}=T("http_server"),a=B();let u=null,c=null,m=!1,h=new Map;const U=60*1e3,J=10,Y=()=>k.randomUUID(),g=()=>!A().configured,$=t=>{const e=Date.now(),r=(h.get(t)||[]).filter(n=>e-n<U);return h.set(t,r),r.length>=J},j=t=>{const e=Date.now(),o=h.get(t)||[];o.push(e),h.set(t,o)},N=(t,e=null)=>`<!DOCTYPE html>
1
+ import _ from"http";import{URL as x}from"url";import k from"crypto";import T from"./logger.js";import{setup_authentication as P,get_auth_stats as A}from"./auth_manager.js";import{is_token_valid as C,record_failed_recovery_attempt as S,change_password as I}from"./recovery_manager.js";import{validate_api_key as D,create_user as E,get_all_users as H,update_user as O,delete_user as R}from"./api_key_manager.js";import{is_development_mode as l}from"./development_mode.js";const{create_context_logger:B}=T("http_server"),a=B();let u=null,c=null,m=!1,h=new Map;const U=60*1e3,J=10,Y=()=>k.randomUUID(),g=()=>!A().configured,$=t=>{const e=Date.now(),r=(h.get(t)||[]).filter(n=>e-n<U);return h.set(t,r),r.length>=J},N=t=>{const e=Date.now(),o=h.get(t)||[];o.push(e),h.set(t,o)},j=(t,e=null)=>`<!DOCTYPE html>
2
2
  <html>
3
3
  <head>
4
4
  <title>JoystickDB Setup</title>
@@ -395,11 +395,11 @@ await client.ping();
395
395
  <p><strong>Your JoystickDB server is ready with the new password!</strong></p>
396
396
  </div>
397
397
  </body>
398
- </html>`,G=t=>new Promise((e,o)=>{let r="";t.on("data",n=>{r+=n.toString()}),t.on("end",()=>{try{const n=new URLSearchParams(r),s={};for(const[d,f]of n)s[d]=f;e(s)}catch(n){o(n)}}),t.on("error",n=>{o(n)})}),v=t=>new Promise((e,o)=>{let r="";t.on("data",n=>{r+=n.toString()}),t.on("end",()=>{try{const n=JSON.parse(r);e(n)}catch(n){o(n)}}),t.on("error",n=>{o(n)})}),w=t=>{if(l())return!0;const e=t.headers["x-joystick-db-api-key"];return D(e)},i=(t,e,o)=>{t.writeHead(e,{"Content-Type":"application/json"}),t.end(JSON.stringify(o))},z=async(t,e)=>{const o=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const r=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for user creation",{client_ip:o,development_mode:l()}),i(e,403,{error:r});return}try{const r=await v(t),n=await E(r);a.info("User created via API",{username:n.username,client_ip:o}),i(e,201,{ok:1,user:n})}catch(r){a.error("User creation failed via API",{client_ip:o,error:r.message}),i(e,400,{error:r.message})}},F=async(t,e)=>{const o=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const r=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for get users",{client_ip:o,development_mode:l()}),i(e,403,{error:r});return}try{const r=H();a.info("Users retrieved via API",{count:r.length,client_ip:o}),i(e,200,{ok:1,users:r})}catch(r){a.error("Get users failed via API",{client_ip:o,error:r.message}),i(e,500,{error:r.message})}},K=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const n=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for user update",{client_ip:r,username:o,development_mode:l()}),i(e,403,{error:n});return}try{const n=await v(t),s=await O(o,n);a.info("User updated via API",{username:o,client_ip:r}),i(e,200,{ok:1,user:s})}catch(n){a.error("User update failed via API",{client_ip:r,username:o,error:n.message});const s=n.message==="User not found"?404:400;i(e,s,{error:n.message})}},L=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const n=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for user deletion",{client_ip:r,username:o,development_mode:l()}),i(e,403,{error:n});return}try{R(o),a.info("User deleted via API",{username:o,client_ip:r}),i(e,200,{ok:1,message:"User deleted successfully"})}catch(n){a.error("User deletion failed via API",{client_ip:r,username:o,error:n.message});const s=n.message==="User not found"?404:400;i(e,s,{error:n.message})}},V=async(t,e,o)=>{if(t.method==="POST"&&o.length===0){await z(t,e);return}if(t.method==="GET"&&o.length===0){await F(t,e);return}if(t.method==="PUT"&&o.length===1){const r=o[0];await K(t,e,r);return}if(t.method==="DELETE"&&o.length===1){const r=o[0];await L(t,e,r);return}i(e,405,{error:"Method not allowed"})},X=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if($(r)){e.writeHead(429,{"Content-Type":"text/html"}),e.end(p("Too many setup attempts. Please try again later."));return}if(j(r),!g()){e.writeHead(400,{"Content-Type":"text/html"}),e.end(p("Setup has already been completed."));return}if(!c||o!==c){a.warn("Invalid setup token attempt",{client_ip:r,provided_token:o}),e.writeHead(403,{"Content-Type":"text/html"}),e.end(p("Invalid or missing setup token."));return}if(t.method==="GET"){e.writeHead(200,{"Content-Type":"text/html"}),e.end(N(c));return}if(t.method==="POST"){try{const n=P();m=!0,c=null,a.info("Setup completed successfully via HTTP interface",{client_ip:r}),e.writeHead(200,{"Content-Type":"text/html"}),e.end(W(n))}catch(n){a.error("Setup failed via HTTP interface",{client_ip:r,error:n.message}),e.writeHead(500,{"Content-Type":"text/html"}),e.end(p(n.message))}return}e.writeHead(405,{"Content-Type":"text/html"}),e.end(p("Method not allowed."))},Q=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if(a.info("Recovery request received",{client_ip:r,method:t.method}),!o){e.writeHead(400,{"Content-Type":"text/html"}),e.end(p("Recovery token is required."));return}const n=C(o);if(!n.valid){S(r);let s="Invalid recovery token.";n.reason==="expired"?s="Recovery token has expired. Generate a new token using --generate-recovery-token.":n.reason==="locked"?s="Recovery is locked due to too many failed attempts. Please try again later.":n.reason==="no_token"&&(s="No active recovery token found. Generate a new token using --generate-recovery-token."),a.warn("Invalid recovery token attempt",{client_ip:r,reason:n.reason,provided_token:o}),e.writeHead(403,{"Content-Type":"text/html"}),e.end(p(s));return}if(t.method==="GET"){e.writeHead(200,{"Content-Type":"text/html"}),e.end(y(o));return}if(t.method==="POST"){try{const s=await G(t),{password:d,confirm_password:f}=s;if(!d||!f){e.writeHead(400,{"Content-Type":"text/html"}),e.end(y(o,"Both password fields are required."));return}if(d!==f){e.writeHead(400,{"Content-Type":"text/html"}),e.end(y(o,"Passwords do not match."));return}const b=await I(d,r,()=>{a.info("Password change completed, existing connections should be terminated")});a.info("Emergency password change completed via HTTP interface",{client_ip:r,timestamp:b.timestamp}),e.writeHead(200,{"Content-Type":"text/html"}),e.end(M(b.timestamp))}catch(s){a.error("Emergency password change failed via HTTP interface",{client_ip:r,error:s.message}),e.writeHead(400,{"Content-Type":"text/html"}),e.end(y(o,s.message))}return}e.writeHead(405,{"Content-Type":"text/html"}),e.end(p("Method not allowed."))},Z=(t=1984)=>{const e=_.createServer(async(r,n)=>{try{const s=new x(r.url,`http://localhost:${t}`);if(s.pathname==="/setup"){const d=s.searchParams.get("token");await X(r,n,d);return}if(s.pathname==="/recovery"){const d=s.searchParams.get("token");await Q(r,n,d);return}if(s.pathname.startsWith("/api/users")){const d=s.pathname.split("/").slice(3);await V(r,n,d);return}n.writeHead(404,{"Content-Type":"text/html"}),n.end(`<!DOCTYPE html>
398
+ </html>`,G=t=>new Promise((e,o)=>{let r="";t.on("data",n=>{r+=n.toString()}),t.on("end",()=>{try{const n=new URLSearchParams(r),s={};for(const[d,f]of n)s[d]=f;e(s)}catch(n){o(n)}}),t.on("error",n=>{o(n)})}),v=t=>new Promise((e,o)=>{let r="";t.on("data",n=>{r+=n.toString()}),t.on("end",()=>{try{const n=JSON.parse(r);e(n)}catch(n){o(n)}}),t.on("error",n=>{o(n)})}),w=t=>{if(l())return!0;const e=t.headers["x-joystick-db-api-key"];return D(e)},i=(t,e,o)=>{t.writeHead(e,{"Content-Type":"application/json"}),t.end(JSON.stringify(o))},z=async(t,e)=>{const o=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const r=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for user creation",{client_ip:o,development_mode:l()}),i(e,403,{error:r});return}try{const r=await v(t),n=await E(r);a.info("User created via API",{username:n.username,client_ip:o}),i(e,201,{ok:1,user:n})}catch(r){a.error("User creation failed via API",{client_ip:o,error:r.message}),i(e,400,{error:r.message})}},F=async(t,e)=>{const o=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const r=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for get users",{client_ip:o,development_mode:l()}),i(e,403,{error:r});return}try{const r=H();a.info("Users retrieved via API",{count:r.length,client_ip:o}),i(e,200,{ok:1,users:r})}catch(r){a.error("Get users failed via API",{client_ip:o,error:r.message}),i(e,500,{error:r.message})}},K=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const n=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for user update",{client_ip:r,username:o,development_mode:l()}),i(e,403,{error:n});return}try{const n=await v(t),s=await O(o,n);a.info("User updated via API",{username:o,client_ip:r}),i(e,200,{ok:1,user:s})}catch(n){a.error("User update failed via API",{client_ip:r,username:o,error:n.message});const s=n.message==="User not found"?404:400;i(e,s,{error:n.message})}},L=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if(!w(t)){const n=l()?"API key validation failed (this should not happen in development mode)":"Database setup incomplete. A valid API key must be passed until an admin user has been created.";a.warn("Invalid API key for user deletion",{client_ip:r,username:o,development_mode:l()}),i(e,403,{error:n});return}try{R(o),a.info("User deleted via API",{username:o,client_ip:r}),i(e,200,{ok:1,message:"User deleted successfully"})}catch(n){a.error("User deletion failed via API",{client_ip:r,username:o,error:n.message});const s=n.message==="User not found"?404:400;i(e,s,{error:n.message})}},V=async(t,e,o)=>{if(t.method==="POST"&&o.length===0){await z(t,e);return}if(t.method==="GET"&&o.length===0){await F(t,e);return}if(t.method==="PUT"&&o.length===1){const r=o[0];await K(t,e,r);return}if(t.method==="DELETE"&&o.length===1){const r=o[0];await L(t,e,r);return}i(e,405,{error:"Method not allowed"})},X=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if($(r)){e.writeHead(429,{"Content-Type":"text/html"}),e.end(p("Too many setup attempts. Please try again later."));return}if(N(r),!g()){e.writeHead(400,{"Content-Type":"text/html"}),e.end(p("Setup has already been completed."));return}if(!c||o!==c){a.warn("Invalid setup token attempt",{client_ip:r,provided_token:o}),e.writeHead(403,{"Content-Type":"text/html"}),e.end(p("Invalid or missing setup token."));return}if(t.method==="GET"){e.writeHead(200,{"Content-Type":"text/html"}),e.end(j(c));return}if(t.method==="POST"){try{const n=P();m=!0,c=null,a.info("Setup completed successfully via HTTP interface",{client_ip:r}),e.writeHead(200,{"Content-Type":"text/html"}),e.end(W(n))}catch(n){a.error("Setup failed via HTTP interface",{client_ip:r,error:n.message}),e.writeHead(500,{"Content-Type":"text/html"}),e.end(p(n.message))}return}e.writeHead(405,{"Content-Type":"text/html"}),e.end(p("Method not allowed."))},Q=async(t,e,o)=>{const r=t.socket.remoteAddress||"127.0.0.1";if(a.info("Recovery request received",{client_ip:r,method:t.method}),!o){e.writeHead(400,{"Content-Type":"text/html"}),e.end(p("Recovery token is required."));return}const n=C(o);if(!n.valid){S(r);let s="Invalid recovery token.";n.reason==="expired"?s="Recovery token has expired. Generate a new token using --generate-recovery-token.":n.reason==="locked"?s="Recovery is locked due to too many failed attempts. Please try again later.":n.reason==="no_token"&&(s="No active recovery token found. Generate a new token using --generate-recovery-token."),a.warn("Invalid recovery token attempt",{client_ip:r,reason:n.reason,provided_token:o}),e.writeHead(403,{"Content-Type":"text/html"}),e.end(p(s));return}if(t.method==="GET"){e.writeHead(200,{"Content-Type":"text/html"}),e.end(y(o));return}if(t.method==="POST"){try{const s=await G(t),{password:d,confirm_password:f}=s;if(!d||!f){e.writeHead(400,{"Content-Type":"text/html"}),e.end(y(o,"Both password fields are required."));return}if(d!==f){e.writeHead(400,{"Content-Type":"text/html"}),e.end(y(o,"Passwords do not match."));return}const b=await I(d,r,()=>{a.info("Password change completed, existing connections should be terminated")});a.info("Emergency password change completed via HTTP interface",{client_ip:r,timestamp:b.timestamp}),e.writeHead(200,{"Content-Type":"text/html"}),e.end(M(b.timestamp))}catch(s){a.error("Emergency password change failed via HTTP interface",{client_ip:r,error:s.message}),e.writeHead(400,{"Content-Type":"text/html"}),e.end(y(o,s.message))}return}e.writeHead(405,{"Content-Type":"text/html"}),e.end(p("Method not allowed."))},Z=(t=1984)=>{const e=_.createServer(async(r,n)=>{try{const s=new x(r.url,`http://localhost:${t}`);if(s.pathname==="/setup"){const d=s.searchParams.get("token");await X(r,n,d);return}if(s.pathname==="/recovery"){const d=s.searchParams.get("token");await Q(r,n,d);return}if(s.pathname.startsWith("/api/users")){const d=s.pathname.split("/").slice(3);await V(r,n,d);return}n.writeHead(404,{"Content-Type":"text/html"}),n.end(`<!DOCTYPE html>
399
399
  <html>
400
400
  <head><title>404 Not Found</title></head>
401
401
  <body>
402
402
  <h1>404 Not Found</h1>
403
403
  <p>The requested resource was not found on this server.</p>
404
404
  </body>
405
- </html>`)}catch(s){a.error("HTTP request error",{error:s.message,url:r.url}),n.writeHead(500,{"Content-Type":"text/html"}),n.end(p("Internal server error."))}}),o=new Set;return e.on("connection",r=>{o.add(r),r.on("close",()=>{o.delete(r)})}),e._connections=o,e.on("error",r=>{a.error("HTTP server error",{error:r.message})}),e},q=(t=1984)=>{const e=g();e&&(c=Y(),m=!1);const o=Z(t);return new Promise((r,n)=>{o.once("error",s=>{e&&(c=null,m=!1),a.error("Failed to start HTTP server",{port:t,error:s.message}),n(s)}),o.listen(t,()=>{u=o,e?(a.info("JoystickDB Setup Required"),a.info(`Visit: http://localhost:${t}/setup?token=${c}`)):a.info("HTTP server started for recovery operations",{port:t}),r(o)})})},ee=()=>new Promise(t=>{if(!u){t();return}const e=u,o=e._connections||new Set;u=null,c=null,m=!1,h.clear(),o.forEach(r=>{try{r.destroy()}catch{}}),e.close(r=>{r?a.warn("HTTP server close error",{error:r.message}):a.info("HTTP server stopped"),setTimeout(()=>{t()},250)}),setTimeout(()=>{a.warn("HTTP server forced shutdown after timeout"),t()},2e3)}),te=()=>({setup_required:g(),setup_token:c,setup_completed:m,http_server_running:!!u});export{te as get_setup_info,g as is_setup_required,q as start_http_server,ee as stop_http_server};
405
+ </html>`)}catch(s){a.error("HTTP request error",{error:s.message,url:r.url}),n.writeHead(500,{"Content-Type":"text/html"}),n.end(p("Internal server error."))}}),o=new Set;return e.on("connection",r=>{o.add(r),r.on("close",()=>{o.delete(r)})}),e._connections=o,e.on("error",r=>{a.error("HTTP server error",{error:r.message})}),e},q=(t=1984)=>{const e=g();e&&(c=Y(),m=!1);const o=Z(t);return new Promise((r,n)=>{o.once("error",s=>{if(process.env.NODE_ENV==="test"){a.warn("Failed to start HTTP server",{error:s.message}),r(null);return}e&&(c=null,m=!1),a.error("Failed to start HTTP server",{port:t,error:s.message}),n(s)}),o.listen(t,()=>{u=o,e?(a.info("JoystickDB Setup Required"),a.info(`Visit: http://localhost:${t}/setup?token=${c}`)):a.info("HTTP server started for recovery operations",{port:t}),r(o)})})},ee=()=>new Promise(t=>{if(!u){t();return}const e=u,o=e._connections||new Set;u=null,c=null,m=!1,h.clear(),o.forEach(r=>{try{r.destroy()}catch{}}),e.close(r=>{r?a.warn("HTTP server close error",{error:r.message}):a.info("HTTP server stopped"),setTimeout(()=>{t()},250)}),setTimeout(()=>{a.warn("HTTP server forced shutdown after timeout"),t()},2e3)}),te=()=>({setup_required:g(),setup_token:c,setup_completed:m,http_server_running:!!u});export{te as get_setup_info,g as is_setup_required,q as start_http_server,ee as stop_http_server};
@@ -1 +1 @@
1
- import{encode_message as f}from"./tcp_protocol.js";import{get_write_forwarder as g}from"./write_forwarder.js";import{get_replication_manager as h}from"./replication_manager.js";import{check_and_grow_map_size as x}from"./query_engine.js";import{performance_monitor as v}from"./performance_monitor.js";import D from"./logger.js";import q from"./operations/insert_one.js";import A from"./operations/update_one.js";import I from"./operations/delete_one.js";import $ from"./operations/delete_many.js";import b from"./operations/bulk_write.js";import k from"./operations/find_one.js";import C from"./operations/find.js";import E from"./operations/count_documents.js";import L from"./operations/create_index.js";import U from"./operations/drop_index.js";import Z from"./operations/get_indexes.js";import j from"./operations/admin.js";const{create_context_logger:d}=D("operation_dispatcher"),B=e=>e.length>64,F=e=>["admin","config","local"].includes(e.toLowerCase()),G=e=>/^[a-zA-Z0-9_-]+$/.test(e),H=e=>!e||typeof e!="string"||B(e)||F(e)?!1:G(e),J=()=>({ok:0,error:"Authentication required"}),K=()=>({ok:0,error:"Invalid database name. Database names must be alphanumeric with underscores/hyphens, max 64 characters, and cannot be reserved names (admin, config, local)."}),_=(e,n)=>{const o=f(n);e.write(o)},u=(e,n,o,s,t,r,c,i)=>{v.log_structured_operation(e,n,o,s,t,r,c,i)},w=(e,n,o,s,t,r,c,i,l=null)=>{const m={client_id:n,op:o,collection:s,duration_ms:t,status:r,request_size:c};r==="success"?(m.response_size=i,e.info("Database operation completed",m)):(m.error=l,e.error("Database operation failed",m))},M=async(e,n,o)=>{switch(e){case"insert_one":return await q(n,o.collection,o.document,o.options);case"update_one":return await A(n,o.collection,o.filter,o.update,o.options);case"delete_one":return await I(n,o.collection,o.filter,o.options);case"delete_many":return await $(n,o.collection,o.filter,o.options);case"bulk_write":return await b(n,o.collection,o.operations,o.options);case"find_one":return await k(n,o.collection,o.filter,o.options);case"find":return await C(n,o.collection,o.filter,o.options);case"count_documents":return await E(n,o.collection,o.filter,o.options);case"create_index":return await L(n,o.collection,o.field,o.options);case"drop_index":return await U(n,o.collection,o.field);case"get_indexes":return await Z(n,o.collection);default:throw new Error(`Unsupported operation: ${e}`)}},N=(e,n)=>e==="find_one"?{ok:1,document:n}:e==="find"?{ok:1,documents:n}:e==="count_documents"?{ok:1,...n}:{ok:1,...n},O=e=>!["find","find_one","count_documents","get_indexes"].includes(e),P=(e,n)=>{if(!O(e))return;h().queue_replication(e,n.collection,n),setImmediate(()=>x())},Q=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t,l=N(n,s),p=f(l).length;_(e,l),u(e.id,n,o.collection,i,"success",null,r,p),w(c,e.id,n,o.collection,i,"success",r,p),P(n,o)},R=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t;u(e.id,n,o.collection,i,"error",s.message,r,0),w(c,e.id,n,o.collection,i,"error",r,0,s.message);const l={ok:0,error:s.message};_(e,l)},pe=async(e,n,o,s,t=0,r=null,c=null)=>{const i=Date.now();if(!s(e)){const a=J();_(e,a),u(e.id,n,null,0,"error","Authentication required",t,0);return}const l=o.database||"default";if(!H(l)){const a=K();_(e,a),u(e.id,n,o.collection,0,"error","Invalid database name",t,0);return}if(!await g().forward_operation(e,n,o))try{const a=await M(n,l,o);Q(e,n,o,a,i,t)}catch(a){R(e,n,o,a,i,t)}},S=()=>({ok:!1,error:"Authentication required"}),T=(e,n)=>e?{ok:1,...n}:{ok:!0,...n},V=e=>({ok:0,error:`Admin operation failed: ${e}`}),fe=async(e,n,o,s=null,t=null)=>{if(!o(e)){const r=S();_(e,r);return}try{const r=n?.admin_action,i=await j(r,n||{},s,t),l=T(r,i);_(e,l)}catch(r){const c=V(r.message);_(e,c)}},de=e=>{const n=Date.now(),o={ok:1,response_time_ms:Date.now()-n};_(e,o)};export{fe as handle_admin_operation,pe as handle_database_operation,de as handle_ping_operation};
1
+ import{encode_message as f}from"./tcp_protocol.js";import{get_simple_sync_manager as h}from"./simple_sync_manager.js";import{get_sync_receiver as w}from"./sync_receiver.js";import{check_and_grow_map_size as v}from"./query_engine.js";import{performance_monitor as x}from"./performance_monitor.js";import D from"./logger.js";import b from"./operations/insert_one.js";import y from"./operations/update_one.js";import k from"./operations/delete_one.js";import q from"./operations/delete_many.js";import A from"./operations/bulk_write.js";import I from"./operations/find_one.js";import $ from"./operations/find.js";import U from"./operations/count_documents.js";import W from"./operations/create_index.js";import C from"./operations/drop_index.js";import E from"./operations/get_indexes.js";import L from"./operations/admin.js";const{create_context_logger:d}=D("operation_dispatcher"),Z=e=>e.length>64,j=e=>["admin","config","local"].includes(e.toLowerCase()),B=e=>/^[a-zA-Z0-9_-]+$/.test(e),F=e=>!e||typeof e!="string"||Z(e)||j(e)?!1:B(e),G=()=>({ok:0,error:"Authentication required"}),H=()=>({ok:0,error:"Invalid database name. Database names must be alphanumeric with underscores/hyphens, max 64 characters, and cannot be reserved names (admin, config, local)."}),_=(e,n)=>{const o=f(n);e.write(o)},a=(e,n,o,s,t,r,c,i)=>{x.log_structured_operation(e,n,o,s,t,r,c,i)},g=(e,n,o,s,t,r,c,i,l=null)=>{const m={client_id:n,op:o,collection:s,duration_ms:t,status:r,request_size:c};r==="success"?(m.response_size=i,e.info("Database operation completed",m)):(m.error=l,e.error("Database operation failed",m))},J=async(e,n,o)=>{switch(e){case"insert_one":return await b(n,o.collection,o.document,o.options);case"update_one":return await y(n,o.collection,o.filter,o.update,o.options);case"delete_one":return await k(n,o.collection,o.filter,o.options);case"delete_many":return await q(n,o.collection,o.filter,o.options);case"bulk_write":return await A(n,o.collection,o.operations,o.options);case"find_one":return await I(n,o.collection,o.filter,o.options);case"find":return await $(n,o.collection,o.filter,o.options);case"count_documents":return await U(n,o.collection,o.filter,o.options);case"create_index":return await W(n,o.collection,o.field,o.options);case"drop_index":return await C(n,o.collection,o.field);case"get_indexes":return await E(n,o.collection);default:throw new Error(`Unsupported operation: ${e}`)}},K=(e,n)=>e==="find_one"?{ok:1,document:n}:e==="find"?{ok:1,documents:n}:e==="count_documents"?{ok:1,...n}:{ok:1,...n},M=e=>!["find","find_one","count_documents","get_indexes"].includes(e),N=(e,n)=>{if(!M(e))return;h().queue_sync(e,n.collection,n),setImmediate(()=>v())},O=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t,l=K(n,s),p=f(l).length;_(e,l),a(e.id,n,o.collection,i,"success",null,r,p),g(c,e.id,n,o.collection,i,"success",r,p),N(n,o)},P=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t;a(e.id,n,o.collection,i,"error",s.message,r,0),g(c,e.id,n,o.collection,i,"error",r,0,s.message);const l={ok:0,error:s.message};_(e,l)},pe=async(e,n,o,s,t=0,r=null,c=null)=>{const i=Date.now();if(!s(e)){const u=G();_(e,u),a(e.id,n,null,0,"error","Authentication required",t,0);return}const l=o.database||"default";if(!F(l)){const u=H();_(e,u),a(e.id,n,o.collection,0,"error","Invalid database name",t,0);return}if(w().should_block_client_operation(n)){_(e,{ok:0,error:"Write operations not allowed on secondary node. Use primary node for write operations."}),a(e.id,n,o.collection,0,"error","Write operation blocked on secondary",t,0);return}try{const u=await J(n,l,o);O(e,n,o,u,i,t)}catch(u){P(e,n,o,u,i,t)}},Q=()=>({ok:!1,error:"Authentication required"}),R=(e,n)=>e?{ok:1,...n}:{ok:!0,...n},S=e=>({ok:0,error:`Admin operation failed: ${e}`}),fe=async(e,n,o,s=null,t=null)=>{if(!o(e)){const r=Q();_(e,r);return}try{const r=n?.admin_action,i=await L(r,n||{},s,t),l=R(r,i);_(e,l)}catch(r){const c=S(r.message);_(e,c)}},de=e=>{const n=Date.now(),o={ok:1,response_time_ms:Date.now()-n};_(e,o)};export{fe as handle_admin_operation,pe as handle_database_operation,de as handle_ping_operation};
@@ -1 +1 @@
1
- import{get_database as y}from"../query_engine.js";import{get_settings as $}from"../load_settings.js";import{get_write_queue as v}from"../write_queue.js";import{get_auth_stats as q}from"../auth_manager.js";import{get_query_statistics as E,get_auto_index_statistics as D,force_index_evaluation as O,remove_automatic_indexes as P}from"../auto_index_manager.js";import{create_index as N,drop_index as R,get_indexes as U}from"../index_manager.js";import{test_s3_connection as j,create_backup as T,list_backups as W,restore_backup as A,cleanup_old_backups as J}from"../backup_manager.js";import{get_replication_manager as b}from"../replication_manager.js";import{get_write_forwarder as B}from"../write_forwarder.js";import L from"../logger.js";import{performance_monitor as w}from"../performance_monitor.js";const{create_context_logger:h}=L("admin"),I=()=>{try{return $()}catch{return{port:1983}}},G=t=>{try{const e=t.getStats?t.getStats():{};return{pageSize:e.pageSize||0,treeDepth:e.treeDepth||0,treeBranchPages:e.treeBranchPages||0,treeLeafPages:e.treeLeafPages||0,entryCount:e.entryCount||0,mapSize:e.mapSize||0,lastPageNumber:e.lastPageNumber||0}}catch{return{error:"Could not retrieve database stats"}}},H=(t,e)=>{const o={};let r=0;try{for(const{key:n}of t.getRange())if(typeof n=="string"&&n.includes(":")&&!n.startsWith("_")){const a=n.split(":")[0];o[a]=(o[a]||0)+1,r++}}catch(n){e.warn("Could not iterate database range for stats",{error:n.message})}return{collections:o,total_documents:r}},z=()=>{const t=process.memoryUsage();return{rss:Math.round(t.rss/1024/1024),heapTotal:Math.round(t.heapTotal/1024/1024),heapUsed:Math.round(t.heapUsed/1024/1024),external:Math.round(t.external/1024/1024)}},K=t=>t.mapSize>0?Math.round(t.lastPageNumber*t.pageSize/t.mapSize*100):0,Q=t=>({uptime:Math.floor(process.uptime()),uptime_formatted:M(process.uptime()),memory_usage:t,memory_usage_raw:process.memoryUsage(),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid,cpu_usage:process.cpuUsage()}),V=(t,e,o,r)=>({total_documents:t,total_collections:Object.keys(e).length,collections:e,stats:o,map_size_usage_percent:r,disk_usage:{map_size_mb:Math.round((o.mapSize||0)/1024/1024),used_space_mb:Math.round((o.lastPageNumber||0)*(o.pageSize||0)/1024/1024)}}),X=()=>{const t=h();try{const e=y(),o=I(),r=G(e),{collections:n,total_documents:a}=H(e,t),s=z(),i=K(r);return{server:Q(s),database:V(a,n,r,i),performance:{ops_per_second:F(),avg_response_time_ms:C()}}}catch(e){throw t.error("Failed to get enhanced stats",{error:e.message}),e}},M=t=>{const e=Math.floor(t/86400),o=Math.floor(t%86400/3600),r=Math.floor(t%3600/60),n=Math.floor(t%60);return e>0?`${e}d ${o}h ${r}m ${n}s`:o>0?`${o}h ${r}m ${n}s`:r>0?`${r}m ${n}s`:`${n}s`};let k=0,S=0,Y=Date.now();const F=()=>{const t=(Date.now()-Y)/1e3;return t>0?Math.round(k/t):0},C=()=>k>0?Math.round(S/k):0,Z=t=>{k++,S+=t},ee=t=>({name:t,document_count:0,indexes:[],estimated_size_bytes:0}),te=(t,e,o)=>{const r={};let n=0;try{for(const{key:a}of t.getRange())if(typeof a=="string"&&a.includes(":")&&!a.startsWith("_")){const s=a.split(":");if(s.length>=3){const i=s[0],c=s[1];i===e&&(r[c]||(r[c]=ee(c)),r[c].document_count++,n++)}}}catch(a){o.warn("Could not iterate database range for collections",{error:a.message})}return{collections_map:r,total_documents:n}},re=(t,e,o)=>{const r=["admin_test","test_collection","queue_test","users","products","orders","sessions","logs","analytics","settings","another_collection","list_test","pagination_test","get_test","query_test","admin_insert_test","admin_update_test","admin_delete_test"];let n=0;for(const a of r)try{const s=`${e}:${a}:`,i=t.getRange({start:s,end:s+"\xFF"});let c=0;for(const _ of i)c++,n++;c>0&&(o[a]={name:a,document_count:c,indexes:[],estimated_size_bytes:c*100})}catch{continue}return n},oe=(t,e,o,r)=>{try{const n=`index:${e}:`,a=t.getRange({start:n,end:n+"\xFF"});for(const{key:s,value:i}of a)if(typeof s=="string"&&s.startsWith(n)){const c=s.substring(n.length),_=c.split(":")[0],u=c.split(":")[1];o[_]&&u&&(o[_].indexes.includes(u)||o[_].indexes.push(u))}}catch(n){r.warn("Could not iterate index range",{error:n.message})}},se=(t="default")=>{const e=h();try{const o=y();let{collections_map:r,total_documents:n}=te(o,t,e);Object.keys(r).length===0&&(n+=re(o,t,r)),oe(o,t,r,e);const a=Object.values(r);return{collections:a,total_collections:a.length,total_documents:n}}catch(o){throw e.error("Failed to list collections",{error:o.message}),o}},ne=(t,e={})=>{const o=h();if(!t)throw new Error("Collection name is required");try{const r=y(),{limit:n=50,skip:a=0,sort_field:s,sort_order:i="asc",database:c="default"}=e,_=[],u=`${c}:${t}:`;let m=0,g=0;for(const{key:d,value:f}of r.getRange({start:u,end:u+"\xFF"}))if(typeof d=="string"&&d.startsWith(u)){if(g<a){g++;continue}if(m>=n)break;try{const l=JSON.parse(f),p=d.substring(u.length);_.push({_id:p,...l}),m++}catch(l){o.warn("Could not parse document",{collection:t,key:d,error:l.message})}}return s&&_.length>0&&_.sort((d,f)=>{const l=d[s],p=f[s];return i==="desc"?p>l?1:p<l?-1:0:l>p?1:l<p?-1:0}),{collection:t,documents:_,count:_.length,skip:a,limit:n,has_more:m===n}}catch(r){throw o.error("Failed to list documents",{collection:t,error:r.message}),r}},ae=(t,e,o="default")=>{const r=h();if(!t||!e)throw new Error("Collection name and document ID are required");try{const n=y(),a=`${o}:${t}:${e}`,s=n.get(a);if(!s)return{found:!1,collection:t,document_id:e};const i=JSON.parse(s);return{found:!0,collection:t,document_id:e,document:{_id:e,...i}}}catch(n){throw r.error("Failed to get document",{collection:t,document_id:e,error:n.message}),n}},ce=(t,e,o,r)=>{switch(t){case"$gt":return o>e;case"$gte":return o>=e;case"$lt":return o<e;case"$lte":return o<=e;case"$ne":return o!==e;case"$in":return Array.isArray(e)&&e.includes(o);case"$regex":const n=r.$options||"";return new RegExp(e,n).test(String(o));default:return o===r}},ie=(t,e)=>Object.keys(e).every(o=>{const r=e[o],n=t[o];return typeof r=="object"&&r!==null?Object.keys(r).every(a=>{const s=r[a];return ce(a,s,n,r)}):n===r}),_e=(t,e,o,r,n)=>{try{const a=JSON.parse(e),i={_id:t.substring(o.length),...a};return ie(i,r)?i:null}catch(a){return n.warn("Could not parse document during query",{key:t,error:a.message}),null}},ue=(t,e={},o={})=>{const r=h();if(!t)throw new Error("Collection name is required");try{const n=y(),{limit:a=100,skip:s=0,database:i="default"}=o,c=[],_=`${i}:${t}:`;let u=0,m=0,g=0;for(const{key:d,value:f}of n.getRange({start:_,end:_+"\xFF"}))if(typeof d=="string"&&d.startsWith(_)){g++;const l=_e(d,f,_,e,r);if(l){if(m<s){m++;continue}if(u>=a)break;c.push(l),u++}}return{collection:t,filter:e,documents:c,count:c.length,total_examined:g,skip:s,limit:a,has_more:u===a}}catch(n){throw r.error("Failed to query documents",{collection:t,filter:e,error:n.message}),n}},le=async(t,e,o,r={})=>await(await import("./insert_one.js")).default(t,e,o,r),de=async(t,e,o,r,n={})=>await(await import("./update_one.js")).default(t,e,o,r,n),me=async(t,e,o,r={})=>await(await import("./delete_one.js")).default(t,e,o,r);var qe=async(t,e={},o,r)=>{const n=h(),a=Date.now();try{let s;switch(t){case"stats":const c=z();s={server:{uptime:Math.floor(process.uptime()),uptime_formatted:M(process.uptime()),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid},memory:{heap_used_mb:c.heapUsed,heap_total_mb:c.heapTotal,rss_mb:c.rss,external_mb:c.external,heap_used_percent:c.heapTotal>0?Math.round(c.heapUsed/c.heapTotal*100):0},database:{...w.get_database_stats(),map_size_mb:Math.round((w.get_database_stats()?.map_size||0)/1024/1024),used_space_mb:Math.round((w.get_database_stats()?.used_space||0)/1024/1024),usage_percent:w.get_database_stats()?.usage_percent||0},performance:{ops_per_second:F(),avg_response_time_ms:C()},system:w.get_system_stats(),connections:o?.get_stats()||{active:r?.size||0,total:r?.size||0},write_queue:v()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...q()},settings:(()=>{try{return{port:$().port||1983}}catch{return{port:1983}}})()};break;case"list_collections":s=se();break;case"list_documents":s=ne(e.collection,{limit:e.limit,skip:e.skip,sort_field:e.sort_field,sort_order:e.sort_order});break;case"get_document":s=ae(e.collection,e.document_id);break;case"query_documents":s=ue(e.collection,e.filter,{limit:e.limit,skip:e.skip});break;case"insert_document":s=await le(e.database||"default",e.collection,e.document,e.options);break;case"update_document":const _=e.document_id?{_id:e.document_id}:e.filter;s=await de(e.database||"default",e.collection,_,e.update,e.options);break;case"delete_document":const u=e.document_id?{_id:e.document_id}:e.filter;s=await me(e.database||"default",e.collection,u,e.options);break;case"test_s3_connection":s=await j();break;case"backup_now":s=await T();break;case"list_backups":s=await W();break;case"restore_backup":if(!e.backup_filename)throw new Error("backup_filename is required for restore operation");s=await A(e.backup_filename);break;case"cleanup_backups":s=await J();break;case"get_auto_index_stats":s=D();break;case"get_query_stats":s=E(e.collection);break;case"evaluate_auto_indexes":s=await O(e.collection);break;case"remove_auto_indexes":if(!e.collection)throw new Error("collection is required for remove_auto_indexes operation");s=await P(e.collection,e.field_names);break;case"create_index":if(!e.collection||!e.field)throw new Error("collection and field are required for create_index operation");s=await N(e.database||"default",e.collection,e.field,e.options);break;case"drop_index":if(!e.collection||!e.field)throw new Error("collection and field are required for drop_index operation");s=await R(e.database||"default",e.collection,e.field);break;case"get_indexes":if(!e.collection)throw new Error("collection is required for get_indexes operation");s={indexes:U(e.database||"default",e.collection)};break;case"get_replication_status":s=b().get_replication_status();break;case"add_secondary":if(!e.id||!e.ip||!e.port||!e.private_key)throw new Error("id, ip, port, and private_key are required for add_secondary operation");s=await b().add_secondary({id:e.id,ip:e.ip,port:e.port,private_key:e.private_key,enabled:!0});break;case"remove_secondary":if(!e.secondary_id)throw new Error("secondary_id is required for remove_secondary operation");s=b().remove_secondary(e.secondary_id);break;case"sync_secondaries":s=await b().sync_secondaries();break;case"get_secondary_health":s=b().get_secondary_health();break;case"get_forwarder_status":s=B().get_forwarder_status();break;default:s={...X(),connections:o?.get_stats()||{},write_queue:v()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...q()},settings:(()=>{try{return{port:$().port||1983}}catch{return{port:1983}}})()}}const i=Date.now()-a;return Z(i),n.info("Admin operation completed",{admin_action:t||"default",duration_ms:i,status:"success"}),s}catch(s){const i=Date.now()-a;throw n.error("Admin operation failed",{admin_action:t||"default",duration_ms:i,status:"error",error:s.message}),s}};export{qe as default,Z as track_operation};
1
+ import{get_database as b}from"../query_engine.js";import{get_settings as q}from"../load_settings.js";import{get_write_queue as z}from"../write_queue.js";import{get_auth_stats as M}from"../auth_manager.js";import{get_query_statistics as P,get_auto_index_statistics as N,force_index_evaluation as O,remove_automatic_indexes as A}from"../auto_index_manager.js";import{create_index as R,drop_index as U,get_indexes as j}from"../index_manager.js";import{test_s3_connection as T,create_backup as W,list_backups as J,restore_backup as B,cleanup_old_backups as I}from"../backup_manager.js";import{get_simple_sync_manager as x}from"../simple_sync_manager.js";import{get_sync_receiver as v}from"../sync_receiver.js";import L from"../logger.js";import{performance_monitor as w}from"../performance_monitor.js";const{create_context_logger:y}=L("admin"),K=()=>{try{return q()}catch{return{port:1983}}},Y=t=>{try{const e=t.getStats?t.getStats():{};return{pageSize:e.pageSize||0,treeDepth:e.treeDepth||0,treeBranchPages:e.treeBranchPages||0,treeLeafPages:e.treeLeafPages||0,entryCount:e.entryCount||0,mapSize:e.mapSize||0,lastPageNumber:e.lastPageNumber||0}}catch{return{error:"Could not retrieve database stats"}}},G=(t,e)=>{const o={};let r=0;try{for(const{key:n}of t.getRange())if(typeof n=="string"&&n.includes(":")&&!n.startsWith("_")){const a=n.split(":")[0];o[a]=(o[a]||0)+1,r++}}catch(n){e.warn("Could not iterate database range for stats",{error:n.message})}return{collections:o,total_documents:r}},S=()=>{const t=process.memoryUsage();return{rss:Math.round(t.rss/1024/1024),heapTotal:Math.round(t.heapTotal/1024/1024),heapUsed:Math.round(t.heapUsed/1024/1024),external:Math.round(t.external/1024/1024)}},H=t=>t.mapSize>0?Math.round(t.lastPageNumber*t.pageSize/t.mapSize*100):0,Q=t=>({uptime:Math.floor(process.uptime()),uptime_formatted:E(process.uptime()),memory_usage:t,memory_usage_raw:process.memoryUsage(),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid,cpu_usage:process.cpuUsage()}),V=(t,e,o,r)=>({total_documents:t,total_collections:Object.keys(e).length,collections:e,stats:o,map_size_usage_percent:r,disk_usage:{map_size_mb:Math.round((o.mapSize||0)/1024/1024),used_space_mb:Math.round((o.lastPageNumber||0)*(o.pageSize||0)/1024/1024)}}),X=()=>{const t=y();try{const e=b(),o=K(),r=Y(e),{collections:n,total_documents:a}=G(e,t),s=S(),i=H(r);return{server:Q(s),database:V(a,n,r,i),performance:{ops_per_second:C(),avg_response_time_ms:D()}}}catch(e){throw t.error("Failed to get enhanced stats",{error:e.message}),e}},E=t=>{const e=Math.floor(t/86400),o=Math.floor(t%86400/3600),r=Math.floor(t%3600/60),n=Math.floor(t%60);return e>0?`${e}d ${o}h ${r}m ${n}s`:o>0?`${o}h ${r}m ${n}s`:r>0?`${r}m ${n}s`:`${n}s`};let $=0,F=0,Z=Date.now();const C=()=>{const t=(Date.now()-Z)/1e3;return t>0?Math.round($/t):0},D=()=>$>0?Math.round(F/$):0,ee=t=>{$++,F+=t},te=t=>({name:t,document_count:0,indexes:[],estimated_size_bytes:0}),re=(t,e,o)=>{const r={};let n=0;try{for(const{key:a}of t.getRange())if(typeof a=="string"&&a.includes(":")&&!a.startsWith("_")){const s=a.split(":");if(s.length>=3){const i=s[0],c=s[1];i===e&&(r[c]||(r[c]=te(c)),r[c].document_count++,n++)}}}catch(a){o.warn("Could not iterate database range for collections",{error:a.message})}return{collections_map:r,total_documents:n}},se=(t,e,o)=>{const r=["admin_test","test_collection","queue_test","users","products","orders","sessions","logs","analytics","settings","another_collection","list_test","pagination_test","get_test","query_test","admin_insert_test","admin_update_test","admin_delete_test"];let n=0;for(const a of r)try{const s=`${e}:${a}:`,i=t.getRange({start:s,end:s+"\xFF"});let c=0;for(const _ of i)c++,n++;c>0&&(o[a]={name:a,document_count:c,indexes:[],estimated_size_bytes:c*100})}catch{continue}return n},oe=(t,e,o,r)=>{try{const n=`index:${e}:`,a=t.getRange({start:n,end:n+"\xFF"});for(const{key:s,value:i}of a)if(typeof s=="string"&&s.startsWith(n)){const c=s.substring(n.length),_=c.split(":")[0],u=c.split(":")[1];o[_]&&u&&(o[_].indexes.includes(u)||o[_].indexes.push(u))}}catch(n){r.warn("Could not iterate index range",{error:n.message})}},ne=(t="default")=>{const e=y();try{const o=b();let{collections_map:r,total_documents:n}=re(o,t,e);Object.keys(r).length===0&&(n+=se(o,t,r)),oe(o,t,r,e);const a=Object.values(r);return{collections:a,total_collections:a.length,total_documents:n}}catch(o){throw e.error("Failed to list collections",{error:o.message}),o}},ae=(t,e={})=>{const o=y();if(!t)throw new Error("Collection name is required");try{const r=b(),{limit:n=50,skip:a=0,sort_field:s,sort_order:i="asc",database:c="default"}=e,_=[],u=`${c}:${t}:`;let m=0,p=0;for(const{key:d,value:g}of r.getRange({start:u,end:u+"\xFF"}))if(typeof d=="string"&&d.startsWith(u)){if(p<a){p++;continue}if(m>=n)break;try{const l=JSON.parse(g),f=d.substring(u.length);_.push({_id:f,...l}),m++}catch(l){o.warn("Could not parse document",{collection:t,key:d,error:l.message})}}return s&&_.length>0&&_.sort((d,g)=>{const l=d[s],f=g[s];return i==="desc"?f>l?1:f<l?-1:0:l>f?1:l<f?-1:0}),{collection:t,documents:_,count:_.length,skip:a,limit:n,has_more:m===n}}catch(r){throw o.error("Failed to list documents",{collection:t,error:r.message}),r}},ce=(t,e,o="default")=>{const r=y();if(!t||!e)throw new Error("Collection name and document ID are required");try{const n=b(),a=`${o}:${t}:${e}`,s=n.get(a);if(!s)return{found:!1,collection:t,document_id:e};const i=JSON.parse(s);return{found:!0,collection:t,document_id:e,document:{_id:e,...i}}}catch(n){throw r.error("Failed to get document",{collection:t,document_id:e,error:n.message}),n}},ie=(t,e,o,r)=>{switch(t){case"$gt":return o>e;case"$gte":return o>=e;case"$lt":return o<e;case"$lte":return o<=e;case"$ne":return o!==e;case"$in":return Array.isArray(e)&&e.includes(o);case"$regex":const n=r.$options||"";return new RegExp(e,n).test(String(o));default:return o===r}},_e=(t,e)=>Object.keys(e).every(o=>{const r=e[o],n=t[o];return typeof r=="object"&&r!==null?Object.keys(r).every(a=>{const s=r[a];return ie(a,s,n,r)}):n===r}),ue=(t,e,o,r,n)=>{try{const a=JSON.parse(e),i={_id:t.substring(o.length),...a};return _e(i,r)?i:null}catch(a){return n.warn("Could not parse document during query",{key:t,error:a.message}),null}},le=(t,e={},o={})=>{const r=y();if(!t)throw new Error("Collection name is required");try{const n=b(),{limit:a=100,skip:s=0,database:i="default"}=o,c=[],_=`${i}:${t}:`;let u=0,m=0,p=0;for(const{key:d,value:g}of n.getRange({start:_,end:_+"\xFF"}))if(typeof d=="string"&&d.startsWith(_)){p++;const l=ue(d,g,_,e,r);if(l){if(m<s){m++;continue}if(u>=a)break;c.push(l),u++}}return{collection:t,filter:e,documents:c,count:c.length,total_examined:p,skip:s,limit:a,has_more:u===a}}catch(n){throw r.error("Failed to query documents",{collection:t,filter:e,error:n.message}),n}},de=async(t,e,o,r={})=>await(await import("./insert_one.js")).default(t,e,o,r),me=async(t,e,o,r,n={})=>await(await import("./update_one.js")).default(t,e,o,r,n),pe=async(t,e,o,r={})=>await(await import("./delete_one.js")).default(t,e,o,r);var ze=async(t,e={},o,r)=>{const n=y(),a=Date.now();try{let s;switch(t){case"stats":const c=S();s={server:{uptime:Math.floor(process.uptime()),uptime_formatted:E(process.uptime()),node_version:process.version,platform:process.platform,arch:process.arch,pid:process.pid},memory:{heap_used_mb:c.heapUsed,heap_total_mb:c.heapTotal,rss_mb:c.rss,external_mb:c.external,heap_used_percent:c.heapTotal>0?Math.round(c.heapUsed/c.heapTotal*100):0},database:{...w.get_database_stats(),map_size_mb:Math.round((w.get_database_stats()?.map_size||0)/1024/1024),used_space_mb:Math.round((w.get_database_stats()?.used_space||0)/1024/1024),usage_percent:w.get_database_stats()?.usage_percent||0},performance:{ops_per_second:C(),avg_response_time_ms:D()},system:w.get_system_stats(),connections:o?.get_stats()||{active:r?.size||0,total:r?.size||0},write_queue:z()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...M()},settings:(()=>{try{return{port:q().port||1983}}catch{return{port:1983}}})()};break;case"list_collections":s=ne();break;case"list_documents":s=ae(e.collection,{limit:e.limit,skip:e.skip,sort_field:e.sort_field,sort_order:e.sort_order});break;case"get_document":s=ce(e.collection,e.document_id);break;case"query_documents":s=le(e.collection,e.filter,{limit:e.limit,skip:e.skip});break;case"insert_document":s=await de(e.database||"default",e.collection,e.document,e.options);break;case"update_document":const _=e.document_id?{_id:e.document_id}:e.filter;s=await me(e.database||"default",e.collection,_,e.update,e.options);break;case"delete_document":const u=e.document_id?{_id:e.document_id}:e.filter;s=await pe(e.database||"default",e.collection,u,e.options);break;case"test_s3_connection":s=await T();break;case"backup_now":s=await W();break;case"list_backups":s=await J();break;case"restore_backup":if(!e.backup_filename)throw new Error("backup_filename is required for restore operation");s=await B(e.backup_filename);break;case"cleanup_backups":s=await I();break;case"get_auto_index_stats":s=N();break;case"get_query_stats":s=P(e.collection);break;case"evaluate_auto_indexes":s=await O(e.collection);break;case"remove_auto_indexes":if(!e.collection)throw new Error("collection is required for remove_auto_indexes operation");s=await A(e.collection,e.field_names);break;case"create_index":if(!e.collection||!e.field)throw new Error("collection and field are required for create_index operation");s=await R(e.database||"default",e.collection,e.field,e.options);break;case"drop_index":if(!e.collection||!e.field)throw new Error("collection and field are required for drop_index operation");s=await U(e.database||"default",e.collection,e.field);break;case"get_indexes":if(!e.collection)throw new Error("collection is required for get_indexes operation");s={indexes:j(e.database||"default",e.collection)};break;case"get_sync_status":const m=x(),p=v();s={sync_manager:m.get_sync_status(),sync_receiver:p.get_sync_status()};break;case"update_secondary_nodes":if(!Array.isArray(e.secondary_nodes))throw new Error("secondary_nodes array is required for update_secondary_nodes operation");x().update_secondary_nodes(e.secondary_nodes),s={success:!0,message:"Secondary nodes updated successfully",secondary_nodes:e.secondary_nodes};break;case"force_sync":s=await x().force_sync();break;case"set_primary_role":if(typeof e.primary!="boolean")throw new Error("primary boolean value is required for set_primary_role operation");e.primary?(v().promote_to_primary(),s={success:!0,message:"Node promoted to primary successfully",role:"primary"}):s={success:!1,message:"Demoting primary to secondary requires server restart with updated configuration",role:"primary"};break;case"reload_sync_key":const l=v();if(!l.is_secondary)throw new Error("reload_sync_key can only be used on secondary nodes");await l.reload_api_key(),s={success:!0,message:"API_KEY reloaded successfully"};break;case"get_secondary_auth_status":const h=x().get_sync_status();s={secondary_count:h.secondary_count,auth_failures:h.stats.auth_failures,successful_syncs:h.stats.successful_syncs,failed_syncs:h.stats.failed_syncs,secondaries:h.secondaries};break;default:s={...X(),connections:o?.get_stats()||{},write_queue:z()?.get_stats()||{},authentication:{authenticated_clients:r?.size||0,...M()},settings:(()=>{try{return{port:q().port||1983}}catch{return{port:1983}}})()}}const i=Date.now()-a;return ee(i),n.info("Admin operation completed",{admin_action:t||"default",duration_ms:i,status:"success"}),s}catch(s){const i=Date.now()-a;throw n.error("Admin operation failed",{admin_action:t||"default",duration_ms:i,status:"error",error:s.message}),s}};export{ze as default,ee as track_operation};
@@ -1 +1 @@
1
- import{get_database as v,build_collection_key as O,generate_document_id as x}from"../query_engine.js";import{update_indexes_on_update as $,update_indexes_on_insert as S}from"../index_manager.js";import{get_write_queue as q}from"../write_queue.js";import E from"../logger.js";const{create_context_logger:J}=E("update_one"),w=(t,n,e)=>{const r=n.split("."),o={...t};let s=o;for(let c=0;c<r.length-1;c++){const u=r[c];!(u in s)||typeof s[u]!="object"||s[u]===null?s[u]={}:s[u]={...s[u]},s=s[u]}return s[r[r.length-1]]=e,o},N=(t,n)=>{let e={...t};for(const[r,o]of Object.entries(n))r.includes(".")?e=w(e,r,o):e[r]=o;return e},A=(t,n)=>{const e=n.split(".");let r=t;for(const o of e){if(r==null||typeof r!="object")return;r=r[o]}return r},F=(t,n)=>{const e=n.split("."),r={...t};let o=r;for(let s=0;s<e.length-1;s++){const c=e[s];if(!(c in o)||typeof o[c]!="object"||o[c]===null)return r;o[c]={...o[c]},o=o[c]}return delete o[e[e.length-1]],r},U=(t,n)=>{let e={...t};for(const r of Object.keys(n))r.includes(".")?e=F(e,r):delete e[r];return e},D=(t,n)=>{let e={...t};for(const[r,o]of Object.entries(n))if(r.includes(".")){const s=A(e,r)||0;e=w(e,r,s+o)}else e[r]=(e[r]||0)+o;return e},C=(t,n)=>{const e={...t};for(const[r,o]of Object.entries(n))Array.isArray(e[r])||(e[r]=[]),e[r]=[...e[r],o];return e},I=(t,n)=>{const e={...t};for(const[r,o]of Object.entries(n))Array.isArray(e[r])&&(e[r]=e[r].filter(s=>s!==o));return e},b=(t,n)=>{let e={...t};for(const[r,o]of Object.entries(n))switch(r){case"$set":e=N(e,o);break;case"$unset":e=U(e,o);break;case"$inc":e=D(e,o);break;case"$push":e=C(e,o);break;case"$pull":e=I(e,o);break;default:throw new Error(`Unsupported update operator: ${r}`)}return e},R=(t,n,e)=>t[n]===e,z=(t,n)=>{if(!n||Object.keys(n).length===0)return!0;for(const[e,r]of Object.entries(n))if(!R(t,e,r))return!1;return!0},B=t=>{if(!t)throw new Error("Database name is required")},G=t=>{if(!t)throw new Error("Collection name is required")},H=t=>{if(!t||typeof t!="object")throw new Error("Filter must be a valid object")},K=t=>{if(!t||typeof t!="object")throw new Error("Update must be a valid object")},L=(t,n,e,r)=>{B(t),G(n),H(e),K(r)},M=t=>{try{return JSON.parse(t)}catch{return null}},g=()=>new Date().toISOString(),P=t=>({...t,_updated_at:g()}),Q=(t,n)=>JSON.stringify(t)!==JSON.stringify(n),T=(t,n)=>{const e=x(),r=g(),o={...t,_id:e,_created_at:r,_updated_at:r};return b(o,n)},V=(t,n,e,r,o,s)=>{let c=0,u=0,i=null,d=null,l=null,a=null;const p=`${n}:${e}:`;let f=!1;const k=t.getRange({start:p,end:p+"\xFF"});for(const{key:m,value:h}of k){const _=M(h);if(_&&z(_,r)){f=!0,c=1;const j=b(_,o),y=P(j);Q(_,y)&&(t.put(m,JSON.stringify(y)),d=_,l=y,u=1);break}}if(!f&&s.upsert){a=T(r,o);const m=O(n,e,a._id);t.put(m,JSON.stringify(a)),i=a._id,c=0,u=0}return{matched_count:c,modified_count:u,upserted_id:i,old_document:d,new_document:l,upserted_document:a}},W=async(t,n,e,r)=>{e&&r&&await $(t,n,e,r)},X=async(t,n,e)=>{e&&await S(t,n,e)},Y=(t,n,e,r,o,s)=>{t.info("Update operation completed",{database:n,collection:e,matched_count:r,modified_count:o,upserted_id:s})},Z=(t,n,e)=>{const r={acknowledged:!0,matched_count:t,modified_count:n};return e&&(r.upserted_id=e),r},ee=(t,n,e)=>({operation:"update_one",database:t,collection:n,filter_keys:Object.keys(e||{})}),te=async(t,n,e,r,o={})=>{const s=J();L(t,n,e,r);const c=v(),u=await c.transaction(()=>V(c,t,n,e,r,o)),{matched_count:i,modified_count:d,upserted_id:l,old_document:a,new_document:p,upserted_document:f}=u;return await W(t,n,a,p),await X(t,n,f),Y(s,t,n,i,d,l),Z(i,d,l)},re=async(t,n,e,r,o={})=>{const s=q(),c=ee(t,n,e);return await s.enqueue_write_operation(()=>te(t,n,e,r,o),c)};var ue=re;export{ue as default};
1
+ import{get_database as $,build_collection_key as A,generate_document_id as x}from"../query_engine.js";import{update_indexes_on_update as S,update_indexes_on_insert as q}from"../index_manager.js";import{get_write_queue as E}from"../write_queue.js";import J from"../logger.js";const{create_context_logger:N}=J("update_one"),u=(n,o,e)=>{const t=o.split("."),r={...n};let s=r;for(let c=0;c<t.length-1;c++){const a=t[c];!(a in s)||typeof s[a]!="object"||s[a]===null?s[a]={}:s[a]={...s[a]},s=s[a]}return s[t[t.length-1]]=e,r},D=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))t.includes(".")?e=u(e,t,r):e[t]=r;return e},d=(n,o)=>{const e=o.split(".");let t=n;for(const r of e){if(t==null||typeof t!="object")return;t=t[r]}return t},w=(n,o)=>{const e=o.split("."),t={...n};let r=t;for(let s=0;s<e.length-1;s++){const c=e[s];if(!(c in r)||typeof r[c]!="object"||r[c]===null)return t;r[c]={...r[c]},r=r[c]}return delete r[e[e.length-1]],t},F=(n,o)=>{let e={...n};for(const t of Object.keys(o))t.includes(".")?e=w(e,t):delete e[t];return e},U=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))if(t.includes(".")){const s=d(e,t)||0;e=u(e,t,s+r)}else e[t]=(e[t]||0)+r;return e},I=(n,o)=>{const e={...n};for(const[t,r]of Object.entries(o))Array.isArray(e[t])||(e[t]=[]),e[t]=[...e[t],r];return e},C=(n,o)=>{const e={...n};for(const[t,r]of Object.entries(o))Array.isArray(e[t])&&(e[t]=e[t].filter(s=>s!==r));return e},R=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))if(t.includes(".")){let s=d(e,t);Array.isArray(s)||(s=[]),s.includes(r)||(s=[...s,r]),e=u(e,t,s)}else Array.isArray(e[t])||(e[t]=[]),e[t].includes(r)||(e[t]=[...e[t],r]);return e},z=(n,o)=>{const e={...n};for(const[t,r]of Object.entries(o))Array.isArray(e[t])&&Array.isArray(r)&&(e[t]=e[t].filter(s=>!r.includes(s)));return e},B=(n,o)=>{const e={...n};for(const[t,r]of Object.entries(o))Array.isArray(e[t])&&e[t].length>0&&(r===1||r==="1"?e[t]=e[t].slice(0,-1):(r===-1||r==="-1")&&(e[t]=e[t].slice(1)));return e},G=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))if(t.includes(".")||r.includes(".")){const s=d(e,t);s!==void 0&&(e=u(e,r,s),e=w(e,t))}else t in e&&(e[r]=e[t],delete e[t]);return e},H=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))if(t.includes(".")){const s=d(e,t);(s===void 0||r<s)&&(e=u(e,t,r))}else(!(t in e)||r<e[t])&&(e[t]=r);return e},K=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))if(t.includes(".")){const s=d(e,t);(s===void 0||r>s)&&(e=u(e,t,r))}else(!(t in e)||r>e[t])&&(e[t]=r);return e},L=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))if(t.includes(".")){const s=d(e,t)||0;e=u(e,t,s*r)}else e[t]=(e[t]||0)*r;return e},M=(n,o)=>{let e={...n};const t=new Date;for(const[r,s]of Object.entries(o)){let c;s===!0||typeof s=="object"&&s!==null&&s.$type==="date"?c=t:typeof s=="object"&&s!==null&&s.$type==="timestamp"?c=t.toISOString():c=t,r.includes(".")?e=u(e,r,c):e[r]=c}return e},g=(n,o)=>{let e={...n};for(const[t,r]of Object.entries(o))switch(t){case"$set":e=D(e,r);break;case"$unset":e=F(e,r);break;case"$inc":e=U(e,r);break;case"$push":e=I(e,r);break;case"$pull":e=C(e,r);break;case"$add_to_set":e=R(e,r);break;case"$pull_all":e=z(e,r);break;case"$pop":e=B(e,r);break;case"$rename":e=G(e,r);break;case"$min":e=H(e,r);break;case"$max":e=K(e,r);break;case"$mul":e=L(e,r);break;case"$current_date":e=M(e,r);break;default:throw new Error(`Unsupported update operator: ${t}`)}return e},P=(n,o,e)=>n[o]===e,Q=(n,o)=>{if(!o||Object.keys(o).length===0)return!0;for(const[e,t]of Object.entries(o))if(!P(n,e,t))return!1;return!0},T=n=>{if(!n)throw new Error("Database name is required")},V=n=>{if(!n)throw new Error("Collection name is required")},W=n=>{if(!n||typeof n!="object")throw new Error("Filter must be a valid object")},X=n=>{if(!n||typeof n!="object")throw new Error("Update must be a valid object")},Y=(n,o,e,t)=>{T(n),V(o),W(e),X(t)},Z=n=>{try{return JSON.parse(n)}catch{return null}},j=()=>new Date().toISOString(),ee=n=>({...n,_updated_at:j()}),te=(n,o)=>JSON.stringify(n)!==JSON.stringify(o),re=(n,o)=>{const e=x(),t=j(),r={...n,_id:e,_created_at:t,_updated_at:t};return g(r,o)},ne=(n,o,e,t,r,s)=>{let c=0,a=0,l=null,_=null,p=null,i=null;const y=`${o}:${e}:`;let m=!1;const v=n.getRange({start:y,end:y+"\xFF"});for(const{key:b,value:O}of v){const f=Z(O);if(f&&Q(f,t)){m=!0,c=1;const h=g(f,r),k=ee(h);te(f,k)&&(n.put(b,JSON.stringify(k)),_=f,p=k,a=1);break}}if(!m&&s.upsert){i=re(t,r);const b=A(o,e,i._id);n.put(b,JSON.stringify(i)),l=i._id,c=0,a=0}return{matched_count:c,modified_count:a,upserted_id:l,old_document:_,new_document:p,upserted_document:i}},oe=async(n,o,e,t)=>{e&&t&&await S(n,o,e,t)},se=async(n,o,e)=>{e&&await q(n,o,e)},ce=(n,o,e,t,r,s)=>{n.info("Update operation completed",{database:o,collection:e,matched_count:t,modified_count:r,upserted_id:s})},ae=(n,o,e)=>{const t={acknowledged:!0,matched_count:n,modified_count:o};return e&&(t.upserted_id=e),t},ue=(n,o,e)=>({operation:"update_one",database:n,collection:o,filter_keys:Object.keys(e||{})}),ie=async(n,o,e,t,r={})=>{const s=N();Y(n,o,e,t);const c=$(),a=await c.transaction(()=>ne(c,n,o,e,t,r)),{matched_count:l,modified_count:_,upserted_id:p,old_document:i,new_document:y,upserted_document:m}=a;return await oe(n,o,i,y),await se(n,o,m),ce(s,n,o,l,_,p),ae(l,_,p)},de=async(n,o,e,t,r={})=>{const s=E(),c=ue(n,o,e);return await s.enqueue_write_operation(()=>ie(n,o,e,t,r),c)};var ye=de;export{ye as default};
@@ -0,0 +1 @@
1
+ import l from"net";import{get_settings as y}from"./load_settings.js";import{encode_message as g}from"./tcp_protocol.js";import u from"./logger.js";const{create_context_logger:m}=u("simple_sync");class p{constructor(){this.is_primary=!1,this.secondary_nodes=[],this.connections=new Map,this.sync_port=1985,this.sync_timeout_ms=5e3,this.sync_retries=2,this.sequence_number=0,this.log=m(),this.stats={total_synced:0,successful_syncs:0,failed_syncs:0,auth_failures:0}}initialize(){try{const s=y();if(!s.primary){this.log.info("Node not configured as primary - sync disabled");return}this.is_primary=s.primary,this.secondary_nodes=s.secondary_nodes||[],this.sync_port=s.sync_port||1985,this.sync_timeout_ms=s.sync_timeout_ms||5e3,this.sync_retries=s.sync_retries||2,this.log.info("Initializing simple sync manager",{is_primary:this.is_primary,secondary_count:this.secondary_nodes.length,sync_port:this.sync_port,timeout_ms:this.sync_timeout_ms,retries:this.sync_retries}),this.connect_to_secondaries()}catch(s){this.log.warn("Could not initialize sync manager - settings not loaded",{error:s.message})}}connect_to_secondaries(){for(const s of this.secondary_nodes)this.connect_to_secondary(s)}connect_to_secondary(s){const{ip:n}=s,e=`${n}:${this.sync_port}`;if(!this.connections.has(e)){this.log.info("Connecting to secondary node",{ip:n,port:this.sync_port});try{const t=new l.Socket;t.connect(this.sync_port,n,()=>{this.log.info("Connected to secondary node",{ip:n,port:this.sync_port})}),t.on("error",o=>{this.log.error("Secondary connection error",{ip:n,error:o.message}),this.connections.delete(e),setTimeout(()=>{try{this.connect_to_secondary(s)}catch(r){this.log.error("Failed to retry secondary connection",{ip:n,error:r.message})}},5e3)}),t.on("close",()=>{this.log.warn("Secondary connection closed",{ip:n}),this.connections.delete(e),setTimeout(()=>{try{this.connect_to_secondary(s)}catch(o){this.log.error("Failed to retry secondary connection",{ip:n,error:o.message})}},5e3)}),t.on("data",o=>{try{const r=JSON.parse(o.toString());this.handle_sync_response(e,r)}catch(r){this.log.error("Failed to parse sync response",{connection_id:e,error:r.message})}}),this.connections.set(e,{socket:t,ip:n,connected:!0,last_sync:null})}catch(t){this.log.error("Failed to connect to secondary",{ip:n,error:t.message})}}}handle_sync_response(s,n){const{type:e,status:t,sequence:o,error:r}=n;e==="sync_acknowledged"&&(t==="success"?(this.stats.successful_syncs++,this.log.debug("Sync acknowledged",{connection_id:s,sequence:o})):t==="auth_failed"?(this.stats.auth_failures++,this.log.error("Sync authentication failed",{connection_id:s,sequence:o,error:r})):(this.stats.failed_syncs++,this.log.error("Sync failed",{connection_id:s,sequence:o,error:r})))}queue_sync(s,n,e){if(!this.is_primary||this.connections.size===0)return;const t=++this.sequence_number;this.log.debug("Queuing sync operation",{operation:s,collection:n,sequence:t,secondary_count:this.connections.size}),this.stats.total_synced++,this.send_sync_to_secondaries(s,n,e,t)}send_sync_to_secondaries(s,n,e,t){try{const o=y();if(!o.api_key){this.log.error("No API_KEY configured for sync operations");return}const r={type:"operation_sync",api_key:o.api_key,sequence:t,timestamp:Date.now(),operation:s,collection:n,data:e},h=g(r);for(const[a,i]of this.connections)if(!(!i.connected||!i.socket))try{i.socket.write(h),i.last_sync=Date.now(),this.log.debug("Sent sync to secondary",{connection_id:a,operation:s,sequence:t})}catch(d){this.log.error("Failed to send sync to secondary",{connection_id:a,error:d.message})}}catch(o){this.log.error("Failed to send sync to secondaries",{operation:s,sequence:t,error:o.message})}}update_secondary_nodes(s){this.log.info("Updating secondary nodes configuration",{old_count:this.secondary_nodes.length,new_count:s.length});for(const[n,e]of this.connections)try{e.socket.end()}catch(t){this.log.warn("Error closing secondary connection",{connection_id:n,error:t.message})}this.connections.clear(),this.secondary_nodes=s,this.connect_to_secondaries()}async force_sync(){if(!this.is_primary)throw new Error("Node is not configured as primary");const s=[];for(const[n,e]of this.connections)try{e.connected?s.push({connection_id:n,status:"sync_initiated"}):s.push({connection_id:n,status:"not_connected"})}catch(t){s.push({connection_id:n,status:"error",error:t.message})}return{success:!0,message:"Force sync initiated",results:s}}get_sync_status(){const s=[];for(const[n,e]of this.connections)s.push({connection_id:n,ip:e.ip,connected:e.connected,last_sync:e.last_sync});return{is_primary:this.is_primary,secondary_count:this.connections.size,stats:this.stats,secondaries:s}}async shutdown(){this.log.info("Shutting down simple sync manager");for(const[s,n]of this.connections)try{n.socket.end()}catch(e){this.log.warn("Error closing secondary connection during shutdown",{connection_id:s,error:e.message})}this.connections.clear(),this.is_primary=!1,this.log.info("Simple sync manager shutdown complete")}}let c=null;const f=()=>(c||(c=new p),c),F=()=>{f().initialize()},b=async()=>{c&&(await c.shutdown(),c=null)};export{f as get_simple_sync_manager,F as initialize_simple_sync_manager,b as shutdown_simple_sync_manager};
@@ -0,0 +1 @@
1
+ import y from"net";import h from"fs/promises";import{get_settings as d}from"./load_settings.js";import{create_message_parser as f,encode_message as m}from"./tcp_protocol.js";import g from"./logger.js";import u from"./operations/insert_one.js";import v from"./operations/update_one.js";import w from"./operations/delete_one.js";import k from"./operations/delete_many.js";import S from"./operations/bulk_write.js";import b from"./operations/create_index.js";import A from"./operations/drop_index.js";const{create_context_logger:I}=g("sync_receiver");class x{constructor(){this.is_secondary=!1,this.api_key=null,this.api_key_file_path=null,this.server=null,this.sync_port=1985,this.log=I(),this.stats={total_received:0,successful_syncs:0,failed_syncs:0,auth_failures:0,operations_applied:0}}async initialize(){try{const e=d();if(e.primary===!0){this.log.info("Node configured as primary - sync receiver disabled");return}if(e.primary===!1){if(this.is_secondary=!0,this.api_key_file_path=e.secondary_sync_key,this.sync_port=e.sync_port||1985,!this.api_key_file_path){this.log.error("Secondary node missing secondary_sync_key configuration");return}if(await this.load_api_key(),!this.api_key){this.log.error("Failed to load API_KEY - sync receiver disabled");return}this.log.info("Initializing sync receiver for secondary node",{api_key_file:this.api_key_file_path,sync_port:this.sync_port}),this.start_server()}}catch(e){this.log.warn("Could not initialize sync receiver - settings not loaded",{error:e.message})}}async load_api_key(){try{const e=await h.readFile(this.api_key_file_path,"utf8");this.api_key=e.trim(),this.log.info("API_KEY loaded successfully",{file_path:this.api_key_file_path,key_length:this.api_key.length})}catch(e){this.log.error("Failed to load API_KEY from file",{file_path:this.api_key_file_path,error:e.message})}}start_server(){try{this.server=y.createServer(e=>{this.log.debug("Sync connection established",{remote_address:e.remoteAddress,remote_port:e.remotePort});const s=f();e.on("data",r=>{try{const t=s.parse_messages(r);for(const o of t)this.handle_sync_message(e,o).catch(i=>{this.log.error("Failed to handle sync message",{error:i.message,remote_address:e.remoteAddress})})}catch(t){this.log.error("Failed to parse sync message",{error:t.message,remote_address:e.remoteAddress})}}),e.on("error",r=>{this.log.error("Sync connection error",{error:r.message,remote_address:e.remoteAddress})}),e.on("close",()=>{this.log.debug("Sync connection closed",{remote_address:e.remoteAddress})})}),this.server.listen(this.sync_port,()=>{this.log.info("Sync receiver server started",{port:this.sync_port})}),this.server.on("error",e=>{this.log.error("Sync receiver server error",{error:e.message,port:this.sync_port})})}catch(e){this.log.error("Failed to start sync receiver server",{error:e.message,port:this.sync_port})}}async handle_sync_message(e,s){this.stats.total_received++;let r;try{r=typeof s=="string"?JSON.parse(s):s}catch{this.send_sync_response(e,null,"error","Invalid JSON message");return}const{type:t,api_key:o,sequence:i,operation:a,collection:_,data:p}=r;if(t!=="operation_sync"){this.send_sync_response(e,i,"error","Invalid message type");return}if(!this.validate_api_key(o)){this.stats.auth_failures++,this.log.error("Sync authentication failed",{sequence:i,operation:a,remote_address:e.remoteAddress}),this.send_sync_response(e,i,"auth_failed","Invalid API_KEY");return}try{await this.apply_sync_operation(a,_,p),this.stats.successful_syncs++,this.stats.operations_applied++,this.log.debug("Sync operation applied successfully",{sequence:i,operation:a,collection:_}),this.send_sync_response(e,i,"success",null)}catch(c){this.stats.failed_syncs++,this.log.error("Failed to apply sync operation",{sequence:i,operation:a,collection:_,error:c.message}),this.send_sync_response(e,i,"error",c.message)}}validate_api_key(e){return!e||!this.api_key?!1:e===this.api_key}async apply_sync_operation(e,s,r){const t=r.database||"default";switch(e){case"insert_one":return await u(t,s,r.document,r.options);case"update_one":return await v(t,s,r.filter,r.update,r.options);case"delete_one":return await w(t,s,r.filter,r.options);case"delete_many":return await k(t,s,r.filter,r.options);case"bulk_write":return await S(t,s,r.operations,r.options);case"create_index":return await b(t,s,r.field,r.options);case"drop_index":return await A(t,s,r.field);default:throw new Error(`Unsupported sync operation: ${e}`)}}send_sync_response(e,s,r,t){const o={type:"sync_acknowledged",sequence:s,status:r,timestamp:Date.now()};t&&(o.error=t);try{const i=m(o);e.write(i)}catch(i){this.log.error("Failed to send sync response",{sequence:s,status:r,error:i.message})}}should_block_client_operation(e){return!(!this.is_secondary||["find","find_one","count_documents","get_indexes"].includes(e))}get_sync_status(){return{is_secondary:this.is_secondary,api_key_loaded:!!this.api_key,api_key_file:this.api_key_file_path,server_running:!!this.server&&this.server.listening,sync_port:this.sync_port,stats:this.stats}}async reload_api_key(){if(!this.api_key_file_path)throw new Error("No API_KEY file path configured");const e=this.api_key?this.api_key.length:0;await this.load_api_key(),this.log.info("API_KEY reloaded",{old_key_length:e,new_key_length:this.api_key?this.api_key.length:0})}promote_to_primary(){if(!this.is_secondary)throw new Error("Node is not configured as secondary");this.log.info("Promoting secondary to primary"),this.server&&(this.server.close(()=>{this.log.info("Sync receiver server stopped for primary promotion")}),this.server=null),this.is_secondary=!1,this.log.info("Node promoted to primary - sync receiver disabled")}async shutdown(){if(this.log.info("Shutting down sync receiver"),this.server)return new Promise(e=>{this.server.close(()=>{this.log.info("Sync receiver server closed"),e()})});this.log.info("Sync receiver shutdown complete")}}let n=null;const E=()=>(n||(n=new x),n),$=async()=>{await E().initialize()},j=async()=>{n&&(await n.shutdown(),n=null)};export{E as get_sync_receiver,$ as initialize_sync_receiver,j as shutdown_sync_receiver};
@@ -0,0 +1,197 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * @fileoverview Full debug test runner that mimics the original test runner
5
+ * but with enhanced exception tracking to identify problematic tests.
6
+ */
7
+
8
+ import { spawn } from 'child_process';
9
+
10
+ // Track uncaught exceptions with detailed context
11
+ const uncaught_exceptions = [];
12
+ let current_test_phase = 'startup';
13
+ let test_start_time = null;
14
+
15
+ // Enhanced exception handlers that log but don't exit
16
+ process.on('uncaughtException', (error) => {
17
+ const exception_info = {
18
+ type: 'uncaughtException',
19
+ phase: current_test_phase,
20
+ message: error.message,
21
+ stack: error.stack,
22
+ timestamp: new Date().toISOString(),
23
+ elapsed_ms: test_start_time ? Date.now() - test_start_time : 0
24
+ };
25
+
26
+ uncaught_exceptions.push(exception_info);
27
+
28
+ console.error(`\nšŸ”„ UNCAUGHT EXCEPTION #${uncaught_exceptions.length}:`);
29
+ console.error(`šŸ“ Phase: ${current_test_phase}`);
30
+ console.error(`ā° Elapsed: ${exception_info.elapsed_ms}ms`);
31
+ console.error(`šŸ’„ Error: ${error.message}`);
32
+ console.error(`šŸ“š Stack (first 5 lines):`);
33
+ const stack_lines = error.stack.split('\n').slice(0, 5);
34
+ stack_lines.forEach(line => console.error(` ${line}`));
35
+ console.error(`ā° Time: ${exception_info.timestamp}\n`);
36
+ });
37
+
38
+ process.on('unhandledRejection', (reason, promise) => {
39
+ const exception_info = {
40
+ type: 'unhandledRejection',
41
+ phase: current_test_phase,
42
+ reason: reason?.toString() || 'Unknown reason',
43
+ stack: reason?.stack || 'No stack available',
44
+ timestamp: new Date().toISOString(),
45
+ elapsed_ms: test_start_time ? Date.now() - test_start_time : 0
46
+ };
47
+
48
+ uncaught_exceptions.push(exception_info);
49
+
50
+ console.error(`\nšŸ”„ UNHANDLED REJECTION #${uncaught_exceptions.length}:`);
51
+ console.error(`šŸ“ Phase: ${current_test_phase}`);
52
+ console.error(`ā° Elapsed: ${exception_info.elapsed_ms}ms`);
53
+ console.error(`šŸ’„ Reason: ${reason}`);
54
+ console.error(`šŸ“š Stack (first 5 lines):`);
55
+ const stack_lines = (reason?.stack || 'No stack available').split('\n').slice(0, 5);
56
+ stack_lines.forEach(line => console.error(` ${line}`));
57
+ console.error(`ā° Time: ${exception_info.timestamp}\n`);
58
+ });
59
+
60
+ /**
61
+ * Runs the full test suite exactly like the original test runner.
62
+ * @returns {Promise<number>} Exit code
63
+ */
64
+ const run_full_test_suite_debug = () => {
65
+ return new Promise((resolve) => {
66
+ current_test_phase = 'full-test-suite';
67
+ test_start_time = Date.now();
68
+
69
+ console.log(`šŸ” Running FULL TEST SUITE with debug tracking`);
70
+ console.log(`šŸ“ This mimics the exact command: npm test`);
71
+ console.log(`šŸ’» Command: ./node_modules/.bin/ava --serial --verbose tests/client/**/*.test.js tests/server/**/*.test.js`);
72
+ console.log(`šŸ”§ NODE_OPTIONS: --expose-gc --max-old-space-size=4096`);
73
+ console.log(`ā° Started at: ${new Date().toISOString()}\n`);
74
+
75
+ const command = './node_modules/.bin/ava';
76
+ const args = ['--serial', '--verbose', 'tests/client/**/*.test.js', 'tests/server/**/*.test.js'];
77
+
78
+ const child = spawn(command, args, {
79
+ stdio: 'pipe', // Capture output so we can track progress
80
+ env: {
81
+ ...process.env,
82
+ NODE_ENV: 'test',
83
+ NODE_OPTIONS: '--expose-gc --max-old-space-size=4096'
84
+ }
85
+ });
86
+
87
+ let output_buffer = '';
88
+ let test_count = 0;
89
+
90
+ child.stdout.on('data', (data) => {
91
+ const text = data.toString();
92
+ output_buffer += text;
93
+ process.stdout.write(text);
94
+
95
+ // Track test progress
96
+ const test_matches = text.match(/āœ”/g);
97
+ if (test_matches) {
98
+ test_count += test_matches.length;
99
+ current_test_phase = `test-${test_count}`;
100
+ }
101
+ });
102
+
103
+ child.stderr.on('data', (data) => {
104
+ const text = data.toString();
105
+ output_buffer += text;
106
+ process.stderr.write(text);
107
+ });
108
+
109
+ child.on('close', (code) => {
110
+ const elapsed = Date.now() - test_start_time;
111
+
112
+ console.log(`\nāœ… Full test suite completed with exit code: ${code}`);
113
+ console.log(`ā° Total elapsed: ${elapsed}ms`);
114
+ console.log(`šŸ“Š Total tests detected: ${test_count}`);
115
+
116
+ // Exception analysis
117
+ console.log(`\nšŸ“Š UNCAUGHT EXCEPTION ANALYSIS:`);
118
+ console.log(`Total exceptions detected: ${uncaught_exceptions.length}`);
119
+
120
+ if (uncaught_exceptions.length > 0) {
121
+ console.log('\nšŸ”„ Exception Timeline:');
122
+
123
+ uncaught_exceptions.forEach((exc, index) => {
124
+ console.log(`\nException #${index + 1}:`);
125
+ console.log(` Type: ${exc.type}`);
126
+ console.log(` Phase: ${exc.phase}`);
127
+ console.log(` Elapsed: ${exc.elapsed_ms}ms`);
128
+ console.log(` Message: ${exc.message}`);
129
+ console.log(` Time: ${exc.timestamp}`);
130
+
131
+ if (exc.stack) {
132
+ console.log(` Key Stack Lines:`);
133
+ const stack_lines = exc.stack.split('\n')
134
+ .filter(line => line.includes('db/src/') || line.includes('db/tests/'))
135
+ .slice(0, 3);
136
+ stack_lines.forEach(line => console.log(` ${line.trim()}`));
137
+ }
138
+ });
139
+
140
+ // Try to correlate with test timing
141
+ console.log('\nšŸ•’ Exception Timing Analysis:');
142
+ uncaught_exceptions.forEach((exc, index) => {
143
+ const test_number_estimate = Math.floor((exc.elapsed_ms / elapsed) * test_count);
144
+ console.log(` Exception #${index + 1} occurred around test #${test_number_estimate} (${exc.elapsed_ms}ms elapsed)`);
145
+ });
146
+ }
147
+
148
+ resolve(code);
149
+ });
150
+
151
+ child.on('error', (error) => {
152
+ console.error(`\nāŒ Full test suite failed: ${error.message}`);
153
+ resolve(1);
154
+ });
155
+ });
156
+ };
157
+
158
+ /**
159
+ * Main execution function.
160
+ */
161
+ const main = async () => {
162
+ const args = process.argv.slice(2);
163
+
164
+ if (args.includes('--help') || args.includes('-h')) {
165
+ console.log(`
166
+ šŸ” Full Debug Test Runner for JoystickDB
167
+
168
+ Usage: node full_debug_test_runner.js
169
+
170
+ This runner executes the complete test suite exactly like 'npm test'
171
+ but captures uncaught exceptions and unhandled rejections with detailed
172
+ timing and context information to identify problematic tests.
173
+ `);
174
+ process.exit(0);
175
+ }
176
+
177
+ console.log('šŸŽÆ Selected strategy: Full Test Suite Debug Analysis');
178
+
179
+ const exit_code = await run_full_test_suite_debug();
180
+
181
+ if (exit_code === 0 && uncaught_exceptions.length === 0) {
182
+ console.log(`\nšŸŽ‰ All tests passed with NO uncaught exceptions!`);
183
+ } else if (exit_code === 0 && uncaught_exceptions.length > 0) {
184
+ console.log(`\nāš ļø All tests passed but ${uncaught_exceptions.length} uncaught exceptions detected`);
185
+ } else {
186
+ console.log(`\nšŸ’„ Tests failed with exit code: ${exit_code}`);
187
+ }
188
+
189
+ process.exit(exit_code);
190
+ };
191
+
192
+ // Run the main function
193
+ main().catch(error => {
194
+ console.error(`\nšŸ’„ Debug runner error: ${error.message}`);
195
+ console.error(error.stack);
196
+ process.exit(1);
197
+ });