@joystick.js/db-canary 0.0.0-canary.2262 → 0.0.0-canary.2264

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- import h from"net";import{EventEmitter as l}from"events";import{encode as m,decode as p}from"msgpackr";import f from"./database.js";const _=()=>({useFloat32:!1,int64AsType:"number",mapsAsObjects:!0}),g=s=>{const e=m(s,_()),t=Buffer.allocUnsafe(4);return t.writeUInt32BE(e.length,0),Buffer.concat([t,e])},q=(s,e)=>{const t=s.slice(0,e),n=s.slice(e);try{return{message:p(t,_()),buffer:n}}catch(i){throw new Error(`Invalid message format: ${i.message}`)}},y=s=>{if(s.length<4)return{expected_length:null,buffer:s};const e=s.readUInt32BE(0),t=s.slice(4);return{expected_length:e,buffer:t}},b=()=>{let s=Buffer.alloc(0),e=null;return{parse_messages:i=>{s=Buffer.concat([s,i]);const r=[];for(;s.length>0;){if(e===null){const c=y(s);if(e=c.expected_length,s=c.buffer,e===null)break}if(s.length<e)break;const a=q(s,e);r.push(a.message),s=a.buffer,e=null}return r},reset:()=>{s=Buffer.alloc(0),e=null}}},w=(s,e)=>Math.min(e*Math.pow(2,s-1),3e4),k=(s={})=>({host:s.host||"localhost",port:s.port||1983,password:s.password||null,timeout:s.timeout||5e3,reconnect:s.reconnect!==!1,max_reconnect_attempts:s.max_reconnect_attempts||10,reconnect_delay:s.reconnect_delay||1e3,auto_connect:s.auto_connect!==!1}),x=(s,e,t)=>setTimeout(()=>{s&&!s.destroyed&&(s.destroy(),e(new Error("Connection timeout")))},t),E=(s,e,t)=>setTimeout(()=>{const n=s.get(e);n&&(s.delete(e),n.reject(new Error("Request timeout")))},t),u=(s,e)=>{for(const[t,{reject:n,timeout:i}]of s)clearTimeout(i),n(new Error(e));s.clear()},T=s=>s.ok===1||s.ok===!0,v=s=>s.ok===0||s.ok===!1,B=s=>typeof s.error=="string"?s.error:JSON.stringify(s.error)||"Operation failed";class d extends l{constructor(e={}){super();const t=k(e);this.host=t.host,this.port=t.port,this.password=t.password,this.timeout=t.timeout,this.reconnect=t.reconnect,this.max_reconnect_attempts=t.max_reconnect_attempts,this.reconnect_delay=t.reconnect_delay,this.socket=null,this.message_parser=null,this.is_connected=!1,this.is_authenticated=!1,this.is_connecting=!1,this.reconnect_attempts=0,this.reconnect_timeout=null,this.pending_requests=new Map,this.request_id_counter=0,this.request_queue=[],t.auto_connect&&this.connect()}connect(){if(this.is_connecting||this.is_connected)return;this.is_connecting=!0,this.socket=new h.Socket,this.message_parser=b();const e=x(this.socket,this.handle_connection_error.bind(this),this.timeout);this.setup_socket_handlers(e),this.socket.connect(this.port,this.host,()=>{this.handle_successful_connection(e)})}setup_socket_handlers(e){this.socket.on("data",t=>{this.handle_incoming_data(t)}),this.socket.on("error",t=>{clearTimeout(e),this.handle_connection_error(t)}),this.socket.on("close",()=>{clearTimeout(e),this.handle_disconnect()})}handle_successful_connection(e){clearTimeout(e),this.is_connected=!0,this.is_connecting=!1,this.reconnect_attempts=0,this.emit("connect"),this.password?this.authenticate():this.handle_authentication_complete()}handle_authentication_complete(){this.is_authenticated=!0,this.emit("authenticated"),this.process_request_queue()}handle_incoming_data(e){try{const t=this.message_parser.parse_messages(e);for(const n of t)this.handle_message(n)}catch(t){this.emit("error",new Error(`Message parsing failed: ${t.message}`))}}async authenticate(){if(!this.password){this.emit("error",new Error('Password required for authentication. Provide password in client options: joystickdb.client({ password: "your_password" })')),this.disconnect();return}try{if((await this.send_request("authentication",{password:this.password})).ok===1)this.handle_authentication_complete();else throw new Error("Authentication failed")}catch(e){this.emit("error",new Error(`Authentication error: ${e.message}`)),this.disconnect()}}handle_message(e){this.pending_requests.size>0?this.handle_pending_request_response(e):this.emit("response",e)}handle_pending_request_response(e){const[t,{resolve:n,reject:i,timeout:r}]=this.pending_requests.entries().next().value;if(clearTimeout(r),this.pending_requests.delete(t),T(e))n(e);else if(v(e)){const a=B(e);i(new Error(a))}else n(e)}handle_connection_error(e){this.reset_connection_state(),u(this.pending_requests,"Connection lost"),this.emit("error",e),this.should_attempt_reconnect()?this.schedule_reconnect():this.emit("disconnect")}handle_disconnect(){this.reset_connection_state(),u(this.pending_requests,"Connection closed"),this.should_attempt_reconnect()?this.schedule_reconnect():this.emit("disconnect")}reset_connection_state(){this.is_connecting=!1,this.is_connected=!1,this.is_authenticated=!1,this.socket&&(this.socket.removeAllListeners(),this.socket.destroy(),this.socket=null),this.message_parser&&this.message_parser.reset()}should_attempt_reconnect(){return this.reconnect&&this.reconnect_attempts<this.max_reconnect_attempts}schedule_reconnect(){this.reconnect_attempts++;const e=w(this.reconnect_attempts,this.reconnect_delay);this.emit("reconnecting",{attempt:this.reconnect_attempts,delay:e}),this.reconnect_timeout=setTimeout(()=>{this.connect()},e)}send_request(e,t={},n=!0){return new Promise((i,r)=>{const a=++this.request_id_counter,o={message:{op:e,data:t},resolve:i,reject:r,request_id:a};if(this.should_queue_request(e,n)){this.request_queue.push(o);return}this.send_request_now(o)})}should_queue_request(e,t){const i=!["authentication","setup","ping"].includes(e);return(!this.is_connected||i&&!this.is_authenticated)&&t}send_request_now(e){const{message:t,resolve:n,reject:i,request_id:r}=e,a=E(this.pending_requests,r,this.timeout);this.pending_requests.set(r,{resolve:n,reject:i,timeout:a});try{const c=g(t);this.socket.write(c)}catch(c){clearTimeout(a),this.pending_requests.delete(r),i(c)}}process_request_queue(){for(;this.request_queue.length>0&&this.is_connected&&this.is_authenticated;){const e=this.request_queue.shift();this.send_request_now(e)}}disconnect(){this.reconnect=!1,this.reconnect_timeout&&(clearTimeout(this.reconnect_timeout),this.reconnect_timeout=null),this.socket&&this.socket.end()}async backup_now(){return this.send_request("admin",{admin_action:"backup_now"})}async list_backups(){return this.send_request("admin",{admin_action:"list_backups"})}async restore_backup(e){return this.send_request("admin",{admin_action:"restore_backup",backup_name:e})}async get_replication_status(){return this.send_request("admin",{admin_action:"get_replication_status"})}async add_secondary(e){return this.send_request("admin",{admin_action:"add_secondary",...e})}async remove_secondary(e){return this.send_request("admin",{admin_action:"remove_secondary",secondary_id:e})}async sync_secondaries(){return this.send_request("admin",{admin_action:"sync_secondaries"})}async get_secondary_health(){return this.send_request("admin",{admin_action:"get_secondary_health"})}async get_forwarder_status(){return this.send_request("admin",{admin_action:"get_forwarder_status"})}async ping(){return this.send_request("ping",{},!1)}async reload(){return this.send_request("reload")}async get_auto_index_stats(){return this.send_request("admin",{admin_action:"get_auto_index_stats"})}async setup(){const e=await this.send_request("setup",{},!1);return e.data&&e.data.instructions&&console.log(e.data.instructions),e}async delete_many(e,t={},n={}){return this.send_request("delete_many",{collection:e,filter:t,options:n})}db(e){return new f(this,e)}async list_databases(){return this.send_request("admin",{admin_action:"list_databases"})}async get_stats(){return this.send_request("admin",{admin_action:"stats"})}}class j{constructor(e,t,n){this.client=e,this.database_name=t,this.collection_name=n}async insert_one(e,t={}){return this.client.send_request("insert_one",{database:this.database_name,collection:this.collection_name,document:e,options:t})}async find_one(e={},t={}){return(await this.client.send_request("find_one",{database:this.database_name,collection:this.collection_name,filter:e,options:t})).document}async find(e={},t={}){return(await this.client.send_request("find",{database:this.database_name,collection:this.collection_name,filter:e,options:t})).documents||[]}async update_one(e,t,n={}){return this.client.send_request("update_one",{database:this.database_name,collection:this.collection_name,filter:e,update:t,options:n})}async delete_one(e,t={}){return this.client.send_request("delete_one",{database:this.database_name,collection:this.collection_name,filter:e,options:t})}async delete_many(e={},t={}){return this.client.send_request("delete_many",{database:this.database_name,collection:this.collection_name,filter:e,options:t})}async bulk_write(e,t={}){return this.client.send_request("bulk_write",{database:this.database_name,collection:this.collection_name,operations:e,options:t})}async create_index(e,t={}){return this.client.send_request("create_index",{database:this.database_name,collection:this.collection_name,field:e,options:t})}async upsert_index(e,t={}){return this.client.send_request("create_index",{database:this.database_name,collection:this.collection_name,field:e,options:{...t,upsert:!0}})}async drop_index(e){return this.client.send_request("drop_index",{database:this.database_name,collection:this.collection_name,field:e})}async get_indexes(){return this.client.send_request("get_indexes",{database:this.database_name,collection:this.collection_name})}}d.Collection=j;const C={client:s=>new d(s)};var P=C;export{P as default};
1
+ import h from"net";import{EventEmitter as l}from"events";import{encode as m,decode as p}from"msgpackr";import f from"./database.js";const _=()=>({useFloat32:!1,int64AsType:"number",mapsAsObjects:!0}),g=s=>{const e=m(s,_()),t=Buffer.allocUnsafe(4);return t.writeUInt32BE(e.length,0),Buffer.concat([t,e])},q=(s,e)=>{const t=s.slice(0,e),n=s.slice(e);try{return{message:p(t,_()),buffer:n}}catch(i){throw new Error(`Invalid message format: ${i.message}`)}},y=s=>{if(s.length<4)return{expected_length:null,buffer:s};const e=s.readUInt32BE(0),t=s.slice(4);return{expected_length:e,buffer:t}},b=()=>{let s=Buffer.alloc(0),e=null;return{parse_messages:i=>{s=Buffer.concat([s,i]);const r=[];for(;s.length>0;){if(e===null){const a=y(s);if(e=a.expected_length,s=a.buffer,e===null)break}if(s.length<e)break;const c=q(s,e);r.push(c.message),s=c.buffer,e=null}return r},reset:()=>{s=Buffer.alloc(0),e=null}}},w=(s,e)=>Math.min(e*Math.pow(2,s-1),3e4),k=(s={})=>({host:s.host||"localhost",port:s.port||1983,password:s.password||null,timeout:s.timeout||5e3,reconnect:s.reconnect!==!1,max_reconnect_attempts:s.max_reconnect_attempts||10,reconnect_delay:s.reconnect_delay||1e3,auto_connect:s.auto_connect!==!1}),x=(s,e,t)=>setTimeout(()=>{s&&!s.destroyed&&(s.destroy(),e(new Error("Connection timeout")))},t),E=(s,e,t)=>setTimeout(()=>{const n=s.get(e);n&&(s.delete(e),n.reject(new Error("Request timeout")))},t),u=(s,e)=>{for(const[t,{reject:n,timeout:i}]of s)clearTimeout(i),n(new Error(e));s.clear()},T=s=>s.ok===1||s.ok===!0,v=s=>s.ok===0||s.ok===!1,B=s=>typeof s.error=="string"?s.error:JSON.stringify(s.error)||"Operation failed";class d extends l{constructor(e={}){super();const t=k(e);this.host=t.host,this.port=t.port,this.password=t.password,this.timeout=t.timeout,this.reconnect=t.reconnect,this.max_reconnect_attempts=t.max_reconnect_attempts,this.reconnect_delay=t.reconnect_delay,this.socket=null,this.message_parser=null,this.is_connected=!1,this.is_authenticated=!1,this.is_connecting=!1,this.reconnect_attempts=0,this.reconnect_timeout=null,this.pending_requests=new Map,this.request_id_counter=0,this.request_queue=[],t.auto_connect&&this.connect()}connect(){if(this.is_connecting||this.is_connected)return;this.is_connecting=!0,this.socket=new h.Socket,this.message_parser=b();const e=x(this.socket,this.handle_connection_error.bind(this),this.timeout);this.setup_socket_handlers(e),this.socket.connect(this.port,this.host,()=>{this.handle_successful_connection(e)})}setup_socket_handlers(e){this.socket.on("data",t=>{this.handle_incoming_data(t)}),this.socket.on("error",t=>{clearTimeout(e),this.handle_connection_error(t)}),this.socket.on("close",()=>{clearTimeout(e),this.handle_disconnect()})}handle_successful_connection(e){clearTimeout(e),this.is_connected=!0,this.is_connecting=!1,this.reconnect_attempts=0,this.emit("connect"),this.password?this.authenticate():this.handle_authentication_complete()}handle_authentication_complete(){this.is_authenticated=!0,this.emit("authenticated"),this.process_request_queue()}handle_incoming_data(e){try{const t=this.message_parser.parse_messages(e);for(const n of t)this.handle_message(n)}catch(t){this.emit("error",new Error(`Message parsing failed: ${t.message}`))}}async authenticate(){if(!this.password){this.emit("error",new Error('Password required for authentication. Provide password in client options: joystickdb.client({ password: "your_password" })')),this.disconnect();return}try{if((await this.send_request("authentication",{password:this.password})).ok===1)this.handle_authentication_complete();else throw new Error("Authentication failed")}catch(e){this.emit("error",new Error(`Authentication error: ${e.message}`)),this.disconnect()}}handle_message(e){this.pending_requests.size>0?this.handle_pending_request_response(e):this.emit("response",e)}handle_pending_request_response(e){const[t,{resolve:n,reject:i,timeout:r}]=this.pending_requests.entries().next().value;if(clearTimeout(r),this.pending_requests.delete(t),T(e))n(e);else if(v(e)){const c=B(e);i(new Error(c))}else n(e)}handle_connection_error(e){this.reset_connection_state(),u(this.pending_requests,"Connection lost"),this.emit("error",e),this.should_attempt_reconnect()?this.schedule_reconnect():this.emit("disconnect")}handle_disconnect(){this.reset_connection_state(),u(this.pending_requests,"Connection closed"),this.should_attempt_reconnect()?this.schedule_reconnect():this.emit("disconnect")}reset_connection_state(){this.is_connecting=!1,this.is_connected=!1,this.is_authenticated=!1,this.socket&&(this.socket.removeAllListeners(),this.socket.destroy(),this.socket=null),this.message_parser&&this.message_parser.reset()}should_attempt_reconnect(){return this.reconnect&&this.reconnect_attempts<this.max_reconnect_attempts}schedule_reconnect(){this.reconnect_attempts++;const e=w(this.reconnect_attempts,this.reconnect_delay);this.emit("reconnecting",{attempt:this.reconnect_attempts,delay:e}),this.reconnect_timeout=setTimeout(()=>{this.connect()},e)}send_request(e,t={},n=!0){return new Promise((i,r)=>{const c=++this.request_id_counter,o={message:{op:e,data:t},resolve:i,reject:r,request_id:c};if(this.should_queue_request(e,n)){this.request_queue.push(o);return}this.send_request_now(o)})}should_queue_request(e,t){const i=!["authentication","setup","ping"].includes(e);return(!this.is_connected||i&&!this.is_authenticated)&&t}send_request_now(e){const{message:t,resolve:n,reject:i,request_id:r}=e,c=E(this.pending_requests,r,this.timeout);this.pending_requests.set(r,{resolve:n,reject:i,timeout:c});try{const a=g(t);this.socket.write(a)}catch(a){clearTimeout(c),this.pending_requests.delete(r),i(a)}}process_request_queue(){for(;this.request_queue.length>0&&this.is_connected&&this.is_authenticated;){const e=this.request_queue.shift();this.send_request_now(e)}}disconnect(){this.reconnect=!1,this.reconnect_timeout&&(clearTimeout(this.reconnect_timeout),this.reconnect_timeout=null),this.socket&&this.socket.end()}async backup_now(){return this.send_request("admin",{admin_action:"backup_now"})}async list_backups(){return this.send_request("admin",{admin_action:"list_backups"})}async restore_backup(e){return this.send_request("admin",{admin_action:"restore_backup",backup_name:e})}async get_replication_status(){return this.send_request("admin",{admin_action:"get_replication_status"})}async add_secondary(e){return this.send_request("admin",{admin_action:"add_secondary",...e})}async remove_secondary(e){return this.send_request("admin",{admin_action:"remove_secondary",secondary_id:e})}async sync_secondaries(){return this.send_request("admin",{admin_action:"sync_secondaries"})}async get_secondary_health(){return this.send_request("admin",{admin_action:"get_secondary_health"})}async get_forwarder_status(){return this.send_request("admin",{admin_action:"get_forwarder_status"})}async ping(){return this.send_request("ping",{},!1)}async reload(){return this.send_request("reload")}async get_auto_index_stats(){return this.send_request("admin",{admin_action:"get_auto_index_stats"})}async setup(){const e=await this.send_request("setup",{},!1);return e.data&&e.data.instructions&&console.log(e.data.instructions),e}async delete_many(e,t={},n={}){return this.send_request("delete_many",{collection:e,filter:t,options:n})}db(e){return new f(this,e)}async list_databases(){return this.send_request("admin",{admin_action:"list_databases"})}async get_stats(){return this.send_request("admin",{admin_action:"stats"})}}class j{constructor(e,t,n){this.client=e,this.database_name=t,this.collection_name=n}async insert_one(e,t={}){return this.client.send_request("insert_one",{database:this.database_name,collection:this.collection_name,document:e,options:t})}async find_one(e={},t={}){return(await this.client.send_request("find_one",{database:this.database_name,collection:this.collection_name,filter:e,options:t})).document}async find(e={},t={}){return(await this.client.send_request("find",{database:this.database_name,collection:this.collection_name,filter:e,options:t})).documents||[]}async count_documents(e={},t={}){return(await this.client.send_request("count_documents",{database:this.database_name,collection:this.collection_name,filter:e,options:t})).count}async update_one(e,t,n={}){return this.client.send_request("update_one",{database:this.database_name,collection:this.collection_name,filter:e,update:t,options:n})}async delete_one(e,t={}){return this.client.send_request("delete_one",{database:this.database_name,collection:this.collection_name,filter:e,options:t})}async delete_many(e={},t={}){return this.client.send_request("delete_many",{database:this.database_name,collection:this.collection_name,filter:e,options:t})}async bulk_write(e,t={}){return this.client.send_request("bulk_write",{database:this.database_name,collection:this.collection_name,operations:e,options:t})}async create_index(e,t={}){return this.client.send_request("create_index",{database:this.database_name,collection:this.collection_name,field:e,options:t})}async upsert_index(e,t={}){return this.client.send_request("create_index",{database:this.database_name,collection:this.collection_name,field:e,options:{...t,upsert:!0}})}async drop_index(e){return this.client.send_request("drop_index",{database:this.database_name,collection:this.collection_name,field:e})}async get_indexes(){return this.client.send_request("get_indexes",{database:this.database_name,collection:this.collection_name})}}d.Collection=j;const C={client:s=>new d(s)};var P=C;export{P as default};
@@ -1 +1 @@
1
- import S from"net";import{decode as k}from"msgpackr";import x from"./lib/op_types.js";import g from"./lib/safe_json_parse.js";import{load_settings as l,get_settings as m,get_port_configuration as p}from"./lib/load_settings.js";import{send_error as c}from"./lib/send_response.js";import{start_cluster as T}from"./cluster/index.js";import h from"./lib/logger.js";import{initialize_database as z,cleanup_database as O}from"./lib/query_engine.js";import{create_message_parser as I,encode_message as w}from"./lib/tcp_protocol.js";import{create_connection_manager as R}from"./lib/connection_manager.js";import{shutdown_write_queue as C}from"./lib/write_queue.js";import{setup_authentication as E,verify_password as q,get_client_ip as N,is_rate_limited as A,initialize_auth_manager as B,reset_auth_state as D}from"./lib/auth_manager.js";import{initialize_api_key_manager as $}from"./lib/api_key_manager.js";import{is_development_mode as y,display_development_startup_message as F,warn_undefined_node_env as J}from"./lib/development_mode.js";import{restore_backup as K,start_backup_schedule as P,stop_backup_schedule as G}from"./lib/backup_manager.js";import{initialize_replication_manager as M,shutdown_replication_manager as W}from"./lib/replication_manager.js";import{initialize_write_forwarder as H,shutdown_write_forwarder as U}from"./lib/write_forwarder.js";import{handle_database_operation as V,handle_admin_operation as Y,handle_ping_operation as j}from"./lib/operation_dispatcher.js";import{start_http_server as L,stop_http_server as Q}from"./lib/http_server.js";import{create_recovery_token as X,initialize_recovery_manager as v,reset_recovery_state as Z}from"./lib/recovery_manager.js";import{has_settings as ee}from"./lib/load_settings.js";const i=new Set;let s=null;const re=e=>e&&e.password,d=e=>({ok:0,error:e}),te=()=>({ok:1,version:"1.0.0",message:"Authentication successful"}),u=(e,r)=>{const t=w(r);e.write(t),e.end()},_=(e,r)=>{const t=w(r);e.write(t)},ne=async(e,r={})=>{if(!re(r)){const t=d("Authentication operation requires password to be set in data.");u(e,t);return}try{const t=N(e);if(A(t)){const a=d("Too many failed attempts. Please try again later.");u(e,a);return}if(!await q(r.password,t)){const a=d("Authentication failed");u(e,a);return}i.add(e.id);const o=te();_(e,o)}catch(t){const n=d(`Authentication error: ${t.message}`);u(e,n)}},oe=e=>({ok:1,password:e,message:"Authentication setup completed successfully. Save this password - it will not be shown again."}),ae=e=>({ok:0,error:`Setup error: ${e}`}),se=async(e,r={})=>{try{const t=E(),n=oe(t);_(e,n)}catch(t){const n=ae(t.message);_(e,n)}},ie=(e="")=>{if(!e)throw new Error("Must pass an op type for operation.");return x.includes(e)},ce=e=>g(e),_e=e=>{try{const r=k(e);return typeof r=="string"?g(r):r}catch{return null}},ar=e=>{try{return typeof e=="string"?ce(e):Buffer.isBuffer(e)?_e(e):e}catch{return null}},f=e=>y()?!0:i.has(e.id),pe=async(e,r)=>{if(e?.restore_from)try{r.info("Startup restore requested",{backup_filename:e.restore_from});const t=await K(e.restore_from);r.info("Startup restore completed",{backup_filename:e.restore_from,duration_ms:t.duration_ms});const n={...e};delete n.restore_from,process.env.JOYSTICK_DB_SETTINGS=JSON.stringify(n),l(),r.info("Removed restore_from from settings after successful restore")}catch(t){r.error("Startup restore failed",{backup_filename:e.restore_from,error:t.message}),r.info("Continuing with fresh database after restore failure")}},de=()=>{try{return l(),m()}catch{return null}},ue=async e=>{const r=e?.data_path||"./data";z(r),B(),await $(),v()},le=e=>{try{M(),e.info("Replication manager initialized")}catch(r){e.warn("Failed to initialize replication manager",{error:r.message})}},me=e=>{try{H(),e.info("Write forwarder initialized")}catch(r){e.warn("Failed to initialize write forwarder",{error:r.message})}},fe=(e,r)=>{if(e?.s3)try{P(),r.info("Backup scheduling started")}catch(t){r.warn("Failed to start backup scheduling",{error:t.message})}},ge=async(e,r)=>{try{const t=await L(e);return t&&r.info("HTTP server started",{http_port:e}),t}catch(t){return r.warn("Failed to start HTTP server",{error:t.message}),null}},he=()=>{if(y()){const{tcp_port:e,http_port:r}=p();F(e,r)}else J()},we=()=>R({max_connections:1e3,idle_timeout:600*1e3,request_timeout:5*1e3}),ye=async(e,r,t,n)=>{s.update_activity(e.id);try{const o=t.parse_messages(r);for(const a of o)await ve(e,a,r.length,n)}catch(o){n.error("Message parsing failed",{client_id:e.id,error:o.message}),c(e,{message:"Invalid message format"}),e.end()}},ve=async(e,r,t,n)=>{const o=r,a=o?.op||null;if(!a){c(e,{message:"Missing operation type"});return}if(!ie(a)){c(e,{message:"Invalid operation type"});return}const b=s.create_request_timeout(e.id,a);try{await be(e,a,o,t)}finally{clearTimeout(b)}},be=async(e,r,t,n)=>{const o=t?.data||{};switch(r){case"authentication":await ne(e,o);break;case"setup":await se(e,o);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":case"find_one":case"find":case"create_index":case"drop_index":case"get_indexes":await V(e,r,o,f,n,s,i);break;case"ping":j(e);break;case"admin":await Y(e,o,f,s,i);break;case"reload":await Se(e);break;default:c(e,{message:`Operation ${r} not implemented`})}},Se=async e=>{if(!f(e)){c(e,{message:"Authentication required"});return}try{const r=ke(),t=await xe(),n=Te(r,t);_(e,n)}catch(r){const t={ok:0,error:`Reload operation failed: ${r.message}`};_(e,t)}},ke=()=>{try{return m()}catch{return null}},xe=async()=>{try{return await l(),m()}catch{return{port:1983,authentication:{}}}},Te=(e,r)=>({ok:1,status:"success",message:"Configuration reloaded successfully",changes:{port_changed:e?e.port!==r.port:!1,authentication_changed:e?e.authentication?.password_hash!==r.authentication?.password_hash:!1},timestamp:new Date().toISOString()}),ze=(e,r)=>{r.info("Client disconnected",{socket_id:e.id}),i.delete(e.id),s.remove_connection(e.id)},Oe=(e,r,t)=>{t.error("Socket error",{socket_id:e.id,error:r.message}),i.delete(e.id),s.remove_connection(e.id)},Ie=(e,r,t)=>{e.on("data",async n=>{await ye(e,n,r,t)}),e.on("end",()=>{ze(e,t)}),e.on("error",n=>{Oe(e,n,t)})},Re=(e,r)=>{if(!s.add_connection(e))return;const t=I();Ie(e,t,r)},Ce=()=>async()=>{try{await Q(),G(),await W(),await U(),s&&s.shutdown(),i.clear(),await C(),await new Promise(e=>setTimeout(e,100)),await O(),D(),Z()}catch{}},sr=async()=>{const{create_context_logger:e}=h("server"),r=e(),t=de();await pe(t,r),await ue(t),le(r),me(r),fe(t,r),s=we();const{http_port:n}=p();await ge(n,r),he();const o=S.createServer((a={})=>{Re(a,r)});return o.cleanup=Ce(),o},Ee=e=>{try{v();const r=X();console.log("Emergency Recovery Token Generated"),console.log(`Visit: ${r.url}`),console.log("Token expires in 10 minutes"),e.info("Recovery token generated via CLI",{expires_at:new Date(r.expires_at).toISOString()}),process.exit(0)}catch(r){console.error("Failed to generate recovery token:",r.message),e.error("Recovery token generation failed",{error:r.message}),process.exit(1)}},qe=()=>{const{tcp_port:e}=p();return{worker_count:process.env.WORKER_COUNT?parseInt(process.env.WORKER_COUNT):void 0,port:e,environment:process.env.NODE_ENV||"development"}},Ne=(e,r)=>{const{tcp_port:t,http_port:n}=p(),o=ee();r.info("Starting JoystickDB server...",{workers:e.worker_count||"auto",tcp_port:t,http_port:n,environment:e.environment,has_settings:o,port_source:o?"JOYSTICK_DB_SETTINGS":"default"})};if(import.meta.url===`file://${process.argv[1]}`){const{create_context_logger:e}=h("main"),r=e();process.argv.includes("--generate-recovery-token")&&Ee(r);const t=qe();Ne(t,r),T(t)}export{ne as authentication,ie as check_op_type,sr as create_server,ar as parse_data,se as setup};
1
+ import S from"net";import{decode as k}from"msgpackr";import x from"./lib/op_types.js";import g from"./lib/safe_json_parse.js";import{load_settings as l,get_settings as m,get_port_configuration as p}from"./lib/load_settings.js";import{send_error as c}from"./lib/send_response.js";import{start_cluster as T}from"./cluster/index.js";import h from"./lib/logger.js";import{initialize_database as z,cleanup_database as O}from"./lib/query_engine.js";import{create_message_parser as I,encode_message as w}from"./lib/tcp_protocol.js";import{create_connection_manager as R}from"./lib/connection_manager.js";import{shutdown_write_queue as C}from"./lib/write_queue.js";import{setup_authentication as E,verify_password as q,get_client_ip as N,is_rate_limited as A,initialize_auth_manager as B,reset_auth_state as D}from"./lib/auth_manager.js";import{initialize_api_key_manager as $}from"./lib/api_key_manager.js";import{is_development_mode as y,display_development_startup_message as F,warn_undefined_node_env as J}from"./lib/development_mode.js";import{restore_backup as K,start_backup_schedule as P,stop_backup_schedule as G}from"./lib/backup_manager.js";import{initialize_replication_manager as M,shutdown_replication_manager as W}from"./lib/replication_manager.js";import{initialize_write_forwarder as H,shutdown_write_forwarder as U}from"./lib/write_forwarder.js";import{handle_database_operation as V,handle_admin_operation as Y,handle_ping_operation as j}from"./lib/operation_dispatcher.js";import{start_http_server as L,stop_http_server as Q}from"./lib/http_server.js";import{create_recovery_token as X,initialize_recovery_manager as v,reset_recovery_state as Z}from"./lib/recovery_manager.js";import{has_settings as ee}from"./lib/load_settings.js";const i=new Set;let s=null;const re=e=>e&&e.password,d=e=>({ok:0,error:e}),te=()=>({ok:1,version:"1.0.0",message:"Authentication successful"}),u=(e,r)=>{const t=w(r);e.write(t),e.end()},_=(e,r)=>{const t=w(r);e.write(t)},ne=async(e,r={})=>{if(!re(r)){const t=d("Authentication operation requires password to be set in data.");u(e,t);return}try{const t=N(e);if(A(t)){const a=d("Too many failed attempts. Please try again later.");u(e,a);return}if(!await q(r.password,t)){const a=d("Authentication failed");u(e,a);return}i.add(e.id);const o=te();_(e,o)}catch(t){const n=d(`Authentication error: ${t.message}`);u(e,n)}},oe=e=>({ok:1,password:e,message:"Authentication setup completed successfully. Save this password - it will not be shown again."}),ae=e=>({ok:0,error:`Setup error: ${e}`}),se=async(e,r={})=>{try{const t=E(),n=oe(t);_(e,n)}catch(t){const n=ae(t.message);_(e,n)}},ie=(e="")=>{if(!e)throw new Error("Must pass an op type for operation.");return x.includes(e)},ce=e=>g(e),_e=e=>{try{const r=k(e);return typeof r=="string"?g(r):r}catch{return null}},ar=e=>{try{return typeof e=="string"?ce(e):Buffer.isBuffer(e)?_e(e):e}catch{return null}},f=e=>y()?!0:i.has(e.id),pe=async(e,r)=>{if(e?.restore_from)try{r.info("Startup restore requested",{backup_filename:e.restore_from});const t=await K(e.restore_from);r.info("Startup restore completed",{backup_filename:e.restore_from,duration_ms:t.duration_ms});const n={...e};delete n.restore_from,process.env.JOYSTICK_DB_SETTINGS=JSON.stringify(n),l(),r.info("Removed restore_from from settings after successful restore")}catch(t){r.error("Startup restore failed",{backup_filename:e.restore_from,error:t.message}),r.info("Continuing with fresh database after restore failure")}},de=()=>{try{return l(),m()}catch{return null}},ue=async e=>{const r=e?.data_path||"./data";z(r),B(),await $(),v()},le=e=>{try{M(),e.info("Replication manager initialized")}catch(r){e.warn("Failed to initialize replication manager",{error:r.message})}},me=e=>{try{H(),e.info("Write forwarder initialized")}catch(r){e.warn("Failed to initialize write forwarder",{error:r.message})}},fe=(e,r)=>{if(e?.s3)try{P(),r.info("Backup scheduling started")}catch(t){r.warn("Failed to start backup scheduling",{error:t.message})}},ge=async(e,r)=>{try{const t=await L(e);return t&&r.info("HTTP server started",{http_port:e}),t}catch(t){return r.warn("Failed to start HTTP server",{error:t.message}),null}},he=()=>{if(y()){const{tcp_port:e,http_port:r}=p();F(e,r)}else J()},we=()=>R({max_connections:1e3,idle_timeout:600*1e3,request_timeout:5*1e3}),ye=async(e,r,t,n)=>{s.update_activity(e.id);try{const o=t.parse_messages(r);for(const a of o)await ve(e,a,r.length,n)}catch(o){n.error("Message parsing failed",{client_id:e.id,error:o.message}),c(e,{message:"Invalid message format"}),e.end()}},ve=async(e,r,t,n)=>{const o=r,a=o?.op||null;if(!a){c(e,{message:"Missing operation type"});return}if(!ie(a)){c(e,{message:"Invalid operation type"});return}const b=s.create_request_timeout(e.id,a);try{await be(e,a,o,t)}finally{clearTimeout(b)}},be=async(e,r,t,n)=>{const o=t?.data||{};switch(r){case"authentication":await ne(e,o);break;case"setup":await se(e,o);break;case"insert_one":case"update_one":case"delete_one":case"delete_many":case"bulk_write":case"find_one":case"find":case"count_documents":case"create_index":case"drop_index":case"get_indexes":await V(e,r,o,f,n,s,i);break;case"ping":j(e);break;case"admin":await Y(e,o,f,s,i);break;case"reload":await Se(e);break;default:c(e,{message:`Operation ${r} not implemented`})}},Se=async e=>{if(!f(e)){c(e,{message:"Authentication required"});return}try{const r=ke(),t=await xe(),n=Te(r,t);_(e,n)}catch(r){const t={ok:0,error:`Reload operation failed: ${r.message}`};_(e,t)}},ke=()=>{try{return m()}catch{return null}},xe=async()=>{try{return await l(),m()}catch{return{port:1983,authentication:{}}}},Te=(e,r)=>({ok:1,status:"success",message:"Configuration reloaded successfully",changes:{port_changed:e?e.port!==r.port:!1,authentication_changed:e?e.authentication?.password_hash!==r.authentication?.password_hash:!1},timestamp:new Date().toISOString()}),ze=(e,r)=>{r.info("Client disconnected",{socket_id:e.id}),i.delete(e.id),s.remove_connection(e.id)},Oe=(e,r,t)=>{t.error("Socket error",{socket_id:e.id,error:r.message}),i.delete(e.id),s.remove_connection(e.id)},Ie=(e,r,t)=>{e.on("data",async n=>{await ye(e,n,r,t)}),e.on("end",()=>{ze(e,t)}),e.on("error",n=>{Oe(e,n,t)})},Re=(e,r)=>{if(!s.add_connection(e))return;const t=I();Ie(e,t,r)},Ce=()=>async()=>{try{await Q(),G(),await W(),await U(),s&&s.shutdown(),i.clear(),await C(),await new Promise(e=>setTimeout(e,100)),await O(),D(),Z()}catch{}},sr=async()=>{const{create_context_logger:e}=h("server"),r=e(),t=de();await pe(t,r),await ue(t),le(r),me(r),fe(t,r),s=we();const{http_port:n}=p();await ge(n,r),he();const o=S.createServer((a={})=>{Re(a,r)});return o.cleanup=Ce(),o},Ee=e=>{try{v();const r=X();console.log("Emergency Recovery Token Generated"),console.log(`Visit: ${r.url}`),console.log("Token expires in 10 minutes"),e.info("Recovery token generated via CLI",{expires_at:new Date(r.expires_at).toISOString()}),process.exit(0)}catch(r){console.error("Failed to generate recovery token:",r.message),e.error("Recovery token generation failed",{error:r.message}),process.exit(1)}},qe=()=>{const{tcp_port:e}=p();return{worker_count:process.env.WORKER_COUNT?parseInt(process.env.WORKER_COUNT):void 0,port:e,environment:process.env.NODE_ENV||"development"}},Ne=(e,r)=>{const{tcp_port:t,http_port:n}=p(),o=ee();r.info("Starting JoystickDB server...",{workers:e.worker_count||"auto",tcp_port:t,http_port:n,environment:e.environment,has_settings:o,port_source:o?"JOYSTICK_DB_SETTINGS":"default"})};if(import.meta.url===`file://${process.argv[1]}`){const{create_context_logger:e}=h("main"),r=e();process.argv.includes("--generate-recovery-token")&&Ee(r);const t=qe();Ne(t,r),T(t)}export{ne as authentication,ie as check_op_type,sr as create_server,ar as parse_data,se as setup};
@@ -1 +1 @@
1
- const e=["authentication","setup","find_one","find","insert_one","update_one","delete_one","delete_many","bulk_write","create_index","drop_index","get_indexes","admin","ping","reload"];var n=e;export{n as default};
1
+ const e=["authentication","setup","find_one","find","count_documents","insert_one","update_one","delete_one","delete_many","bulk_write","create_index","drop_index","get_indexes","admin","ping","reload"];var n=e;export{n as default};
@@ -1 +1 @@
1
- import{encode_message as f}from"./tcp_protocol.js";import{get_write_forwarder as g}from"./write_forwarder.js";import{get_replication_manager as h}from"./replication_manager.js";import{check_and_grow_map_size as x}from"./query_engine.js";import{performance_monitor as v}from"./performance_monitor.js";import D from"./logger.js";import q from"./operations/insert_one.js";import A from"./operations/update_one.js";import b from"./operations/delete_one.js";import I from"./operations/delete_many.js";import $ from"./operations/bulk_write.js";import C from"./operations/find_one.js";import E from"./operations/find.js";import L from"./operations/create_index.js";import U from"./operations/drop_index.js";import Z from"./operations/get_indexes.js";import j from"./operations/admin.js";const{create_context_logger:d}=D("operation_dispatcher"),k=e=>e.length>64,B=e=>["admin","config","local"].includes(e.toLowerCase()),F=e=>/^[a-zA-Z0-9_-]+$/.test(e),G=e=>!e||typeof e!="string"||k(e)||B(e)?!1:F(e),H=()=>({ok:0,error:"Authentication required"}),J=()=>({ok:0,error:"Invalid database name. Database names must be alphanumeric with underscores/hyphens, max 64 characters, and cannot be reserved names (admin, config, local)."}),_=(e,n)=>{const o=f(n);e.write(o)},p=(e,n,o,s,t,r,c,i)=>{v.log_structured_operation(e,n,o,s,t,r,c,i)},w=(e,n,o,s,t,r,c,i,l=null)=>{const m={client_id:n,op:o,collection:s,duration_ms:t,status:r,request_size:c};r==="success"?(m.response_size=i,e.info("Database operation completed",m)):(m.error=l,e.error("Database operation failed",m))},K=async(e,n,o)=>{switch(e){case"insert_one":return await q(n,o.collection,o.document,o.options);case"update_one":return await A(n,o.collection,o.filter,o.update,o.options);case"delete_one":return await b(n,o.collection,o.filter,o.options);case"delete_many":return await I(n,o.collection,o.filter,o.options);case"bulk_write":return await $(n,o.collection,o.operations,o.options);case"find_one":return await C(n,o.collection,o.filter,o.options);case"find":return await E(n,o.collection,o.filter,o.options);case"create_index":return await L(n,o.collection,o.field,o.options);case"drop_index":return await U(n,o.collection,o.field);case"get_indexes":return await Z(n,o.collection);default:throw new Error(`Unsupported operation: ${e}`)}},M=(e,n)=>e==="find_one"?{ok:1,document:n}:e==="find"?{ok:1,documents:n}:{ok:1,...n},N=e=>!["find","find_one","get_indexes"].includes(e),O=(e,n)=>{if(!N(e))return;h().queue_replication(e,n.collection,n),setImmediate(()=>x())},P=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t,l=M(n,s),u=f(l).length;_(e,l),p(e.id,n,o.collection,i,"success",null,r,u),w(c,e.id,n,o.collection,i,"success",r,u),O(n,o)},Q=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t;p(e.id,n,o.collection,i,"error",s.message,r,0),w(c,e.id,n,o.collection,i,"error",r,0,s.message);const l={ok:0,error:s.message};_(e,l)},me=async(e,n,o,s,t=0,r=null,c=null)=>{const i=Date.now();if(!s(e)){const a=H();_(e,a),p(e.id,n,null,0,"error","Authentication required",t,0);return}const l=o.database||"default";if(!G(l)){const a=J();_(e,a),p(e.id,n,o.collection,0,"error","Invalid database name",t,0);return}if(!await g().forward_operation(e,n,o))try{const a=await K(n,l,o);P(e,n,o,a,i,t)}catch(a){Q(e,n,o,a,i,t)}},R=()=>({ok:!1,error:"Authentication required"}),S=(e,n)=>e?{ok:1,...n}:{ok:!0,...n},T=e=>({ok:0,error:`Admin operation failed: ${e}`}),pe=async(e,n,o,s=null,t=null)=>{if(!o(e)){const r=R();_(e,r);return}try{const r=n?.admin_action,i=await j(r,n||{},s,t),l=S(r,i);_(e,l)}catch(r){const c=T(r.message);_(e,c)}},ue=e=>{const n=Date.now(),o={ok:1,response_time_ms:Date.now()-n};_(e,o)};export{pe as handle_admin_operation,me as handle_database_operation,ue as handle_ping_operation};
1
+ import{encode_message as f}from"./tcp_protocol.js";import{get_write_forwarder as g}from"./write_forwarder.js";import{get_replication_manager as h}from"./replication_manager.js";import{check_and_grow_map_size as x}from"./query_engine.js";import{performance_monitor as v}from"./performance_monitor.js";import D from"./logger.js";import q from"./operations/insert_one.js";import A from"./operations/update_one.js";import I from"./operations/delete_one.js";import $ from"./operations/delete_many.js";import b from"./operations/bulk_write.js";import k from"./operations/find_one.js";import C from"./operations/find.js";import E from"./operations/count_documents.js";import L from"./operations/create_index.js";import U from"./operations/drop_index.js";import Z from"./operations/get_indexes.js";import j from"./operations/admin.js";const{create_context_logger:d}=D("operation_dispatcher"),B=e=>e.length>64,F=e=>["admin","config","local"].includes(e.toLowerCase()),G=e=>/^[a-zA-Z0-9_-]+$/.test(e),H=e=>!e||typeof e!="string"||B(e)||F(e)?!1:G(e),J=()=>({ok:0,error:"Authentication required"}),K=()=>({ok:0,error:"Invalid database name. Database names must be alphanumeric with underscores/hyphens, max 64 characters, and cannot be reserved names (admin, config, local)."}),_=(e,n)=>{const o=f(n);e.write(o)},u=(e,n,o,s,t,r,c,i)=>{v.log_structured_operation(e,n,o,s,t,r,c,i)},w=(e,n,o,s,t,r,c,i,l=null)=>{const m={client_id:n,op:o,collection:s,duration_ms:t,status:r,request_size:c};r==="success"?(m.response_size=i,e.info("Database operation completed",m)):(m.error=l,e.error("Database operation failed",m))},M=async(e,n,o)=>{switch(e){case"insert_one":return await q(n,o.collection,o.document,o.options);case"update_one":return await A(n,o.collection,o.filter,o.update,o.options);case"delete_one":return await I(n,o.collection,o.filter,o.options);case"delete_many":return await $(n,o.collection,o.filter,o.options);case"bulk_write":return await b(n,o.collection,o.operations,o.options);case"find_one":return await k(n,o.collection,o.filter,o.options);case"find":return await C(n,o.collection,o.filter,o.options);case"count_documents":return await E(n,o.collection,o.filter,o.options);case"create_index":return await L(n,o.collection,o.field,o.options);case"drop_index":return await U(n,o.collection,o.field);case"get_indexes":return await Z(n,o.collection);default:throw new Error(`Unsupported operation: ${e}`)}},N=(e,n)=>e==="find_one"?{ok:1,document:n}:e==="find"?{ok:1,documents:n}:e==="count_documents"?{ok:1,...n}:{ok:1,...n},O=e=>!["find","find_one","count_documents","get_indexes"].includes(e),P=(e,n)=>{if(!O(e))return;h().queue_replication(e,n.collection,n),setImmediate(()=>x())},Q=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t,l=N(n,s),p=f(l).length;_(e,l),u(e.id,n,o.collection,i,"success",null,r,p),w(c,e.id,n,o.collection,i,"success",r,p),P(n,o)},R=(e,n,o,s,t,r)=>{const c=d(),i=Date.now()-t;u(e.id,n,o.collection,i,"error",s.message,r,0),w(c,e.id,n,o.collection,i,"error",r,0,s.message);const l={ok:0,error:s.message};_(e,l)},pe=async(e,n,o,s,t=0,r=null,c=null)=>{const i=Date.now();if(!s(e)){const a=J();_(e,a),u(e.id,n,null,0,"error","Authentication required",t,0);return}const l=o.database||"default";if(!H(l)){const a=K();_(e,a),u(e.id,n,o.collection,0,"error","Invalid database name",t,0);return}if(!await g().forward_operation(e,n,o))try{const a=await M(n,l,o);Q(e,n,o,a,i,t)}catch(a){R(e,n,o,a,i,t)}},S=()=>({ok:!1,error:"Authentication required"}),T=(e,n)=>e?{ok:1,...n}:{ok:!0,...n},V=e=>({ok:0,error:`Admin operation failed: ${e}`}),fe=async(e,n,o,s=null,t=null)=>{if(!o(e)){const r=S();_(e,r);return}try{const r=n?.admin_action,i=await j(r,n||{},s,t),l=T(r,i);_(e,l)}catch(r){const c=V(r.message);_(e,c)}},de=e=>{const n=Date.now(),o={ok:1,response_time_ms:Date.now()-n};_(e,o)};export{fe as handle_admin_operation,pe as handle_database_operation,de as handle_ping_operation};
@@ -0,0 +1 @@
1
+ import{get_database as x,build_collection_key as y}from"../query_engine.js";import{can_use_index as w,find_documents_by_index as g}from"../index_manager.js";import{record_query as h,record_index_usage as q}from"../auto_index_manager.js";import{matches_filter as m}from"./find.js";import k from"../logger.js";const{create_context_logger:D}=k("count_documents"),F=(r,e)=>{if(!r)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required")},S=(r,e,n,s,i,c)=>{const{field:o,operators:_}=i,t=s[o];let u=0;q(e,n,o);let d=null;if(typeof t=="object"&&t!==null&&!Array.isArray(t)){for(const f of _)if(t[f]!==void 0){d=g(e,n,o,f,t[f]);break}}else _.includes("eq")&&(d=g(e,n,o,"eq",t));if(d)for(const f of d){if(c&&u>=c)break;const l=y(e,n,f),a=r.get(l);if(a){const p=JSON.parse(a);m(p,s)&&u++}}return u},b=(r,e,n,s,i)=>{let c=0;const o=`${e}:${n}:`,_=r.getRange({start:o,end:o+"\xFF"});for(const{key:t,value:u}of _){if(i&&c>=i)break;const d=JSON.parse(u);m(d,s)&&c++}return c},O=(r,e,n,s,i,c)=>{try{h(e,n,s,i,c)}catch(o){r.warn("Failed to record query for auto-indexing",{error:o.message})}},v=(r,e,n,s,i,c,o,_)=>{r.info("Count operation completed",{database:e,collection:n,count:s,used_index:i,indexed_field:c,execution_time_ms:o,limit_reached:_})},A=async(r,e,n={},s={})=>{const i=D();F(r,e);const c=x(),{limit:o}=s,_=Date.now();try{let t=0,u=!1,d=null,f=!1;const l=w(r,e,n);l&&(d=l.field,t=S(c,r,e,n,l,o),u=t>0,f=o&&t>=o),u||(t=b(c,r,e,n,o),f=o&&t>=o);const a=Date.now()-_;return O(i,e,n,a,u,d),v(i,r,e,t,u,d,a,f),{count:t,operation_time:new Date().toISOString(),limit_reached:f||!1}}catch(t){throw i.error("Failed to count documents",{database:r,collection:e,error:t.message}),t}};var j=A;export{j as default};
@@ -1 +1 @@
1
- import{get_database as O,build_collection_key as F}from"../query_engine.js";import{can_use_index as E,find_documents_by_index as x}from"../index_manager.js";import{record_query as D,record_index_usage as J}from"../auto_index_manager.js";import N from"../logger.js";const{create_context_logger:R}=N("find"),m=r=>r.split("."),b=r=>r==null,S=(r,e)=>{const t=[];for(let n=0;n<r.length;n++){const s=r[n];if(typeof s=="object"&&s!==null){const o=j(s,e);o!==void 0&&(Array.isArray(o)?t.push(...o):t.push(o))}}return t.length>0?t:void 0},j=(r,e)=>{const t=m(e);let n=r;for(let s=0;s<t.length;s++){const o=t[s];if(b(n))return;if(n=n[o],Array.isArray(n)&&s<t.length-1){const i=t.slice(s+1).join(".");return S(n,i)}}return n},C=(r,e)=>r.hasOwnProperty(e),P=(r,e)=>{const t=m(e);let n=r;for(let s=0;s<t.length;s++){if(b(n)||typeof n!="object")return!1;if(s===t.length-1)return C(n,t[s]);n=n[t[s]]}return!1},k=(r,e)=>r.includes(e),p=(r,e,t)=>{for(let n=0;n<r.length;n++)if(t(r[n],e))return!0;return!1},$=(r,e)=>Array.isArray(r)?k(r,e):r===e,U=(r,e)=>Array.isArray(r)?!k(r,e):r!==e,z=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t>n):r>e,B=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t>=n):r>=e,G=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t<n):r<e,H=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t<=n):r<=e,I=(r,e)=>Array.isArray(e)?Array.isArray(r)?p(r,e,(t,n)=>n.includes(t)):e.includes(r):!1,K=(r,e)=>{if(!Array.isArray(e))return!1;if(Array.isArray(r)){for(let t=0;t<r.length;t++)if(e.includes(r[t]))return!1;return!0}return!e.includes(r)},L=(r,e,t)=>{const n=P(r,e);return t?n:!n},M=(r,e,t="")=>{const n=new RegExp(e,t);return Array.isArray(r)?p(r,n,(s,o)=>typeof s=="string"&&o.test(s)):typeof r!="string"?!1:n.test(r)},Q=(r,e,t,n)=>{for(const[s,o]of Object.entries(n))switch(s){case"$eq":if(!$(t,o))return!1;break;case"$ne":if(!U(t,o))return!1;break;case"$gt":if(!z(t,o))return!1;break;case"$gte":if(!B(t,o))return!1;break;case"$lt":if(!G(t,o))return!1;break;case"$lte":if(!H(t,o))return!1;break;case"$in":if(!I(t,o))return!1;break;case"$nin":if(!K(t,o))return!1;break;case"$exists":if(!L(r,e,o))return!1;break;case"$regex":const i=n.$options||"";if(!M(t,o,i))return!1;break;case"$options":break;default:throw new Error(`Unsupported query operator: ${s}`)}return!0},T=(r,e)=>{for(let t=0;t<e.length;t++)if(y(r,e[t]))return!0;return!1},y=(r,e)=>{if(!e||Object.keys(e).length===0)return!0;if(e.$or&&Array.isArray(e.$or)){if(!T(r,e.$or))return!1;const t={...e};return delete t.$or,Object.keys(t).length>0?y(r,t):!0}for(const[t,n]of Object.entries(e)){const s=j(r,t);if(typeof n=="object"&&n!==null&&!Array.isArray(n)){if(!Q(r,t,s,n))return!1}else if(!$(s,n))return!1}return!0},V=r=>Object.values(r).some(e=>e===1||e===!0),W=(r,e)=>{const t={_id:r._id};for(const[n,s]of Object.entries(e))n==="_id"&&(s===0||s===!1)?delete t._id:(s===1||s===!0)&&(t[n]=r[n]);return t},X=(r,e)=>{const t={...r};for(const[n,s]of Object.entries(e))(s===0||s===!1)&&delete t[n];return t},Y=(r,e)=>!e||Object.keys(e).length===0?r:V(e)?W(r,e):X(r,e),Z=(r,e,t)=>{if(r===e)return 0;if(r===void 0)return 1;if(e===void 0)return-1;const n=r<e?-1:r>e?1:0;return t===-1?-n:n},v=(r,e)=>!e||Object.keys(e).length===0?r:r.sort((t,n)=>{for(const[s,o]of Object.entries(e)){const i=Z(t[s],n[s],o);if(i!==0)return i}return 0}),rr=(r,e)=>{if(!r)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required")},er=(r,e,t,n,s)=>{const{field:o,operators:i}=s,c=n[o],_=[];J(e,t,o);let a=null;if(typeof c=="object"&&c!==null&&!Array.isArray(c)){for(const f of i)if(c[f]!==void 0){a=x(e,t,o,f,c[f]);break}}else i.includes("eq")&&(a=x(e,t,o,"eq",c));if(a)for(const f of a){const u=F(e,t,f),l=r.get(u);if(l){const d=JSON.parse(l);y(d,n)&&_.push(d)}}return _},tr=(r,e,t,n)=>{const s=[],o=`${e}:${t}:`,i=r.getRange({start:o,end:o+"\xFF"});for(const{key:c,value:_}of i){const a=JSON.parse(_);y(a,n)&&s.push(a)}return s},nr=(r,e,t)=>{let n=r;return e>0&&(n=n.slice(e)),t&&t>0&&(n=n.slice(0,t)),n},sr=(r,e,t,n,s,o)=>{try{D(e,t,n,s,o)}catch(i){r.warn("Failed to record query for auto-indexing",{error:i.message})}},or=(r,e,t,n,s,o,i,c)=>{r.info("Find operation completed",{database:e,collection:t,documents_found:n,total_matching:s,used_index:o,indexed_field:i,execution_time_ms:c})},ir=async(r,e,t={},n={})=>{const s=R();rr(r,e);const o=O(),{projection:i,sort:c,limit:_,skip:a=0}=n,f=Date.now();try{let u=[],l=!1,d=null;const g=E(r,e,t);g&&(d=g.field,u=er(o,r,e,t,g),l=u.length>0),l||(u=tr(o,r,e,t));const q=v(u,c),h=nr(q,a,_).map(w=>Y(w,i)),A=Date.now()-f;return sr(s,e,t,A,l,d),or(s,r,e,h.length,u.length,l,d,A),h}catch(u){throw s.error("Failed to find documents",{database:r,collection:e,error:u.message}),u}};var _r=ir;export{_r as default};
1
+ import{get_database as O,build_collection_key as F}from"../query_engine.js";import{can_use_index as E,find_documents_by_index as x}from"../index_manager.js";import{record_query as D,record_index_usage as J}from"../auto_index_manager.js";import N from"../logger.js";const{create_context_logger:R}=N("find"),m=r=>r.split("."),b=r=>r==null,S=(r,e)=>{const t=[];for(let n=0;n<r.length;n++){const s=r[n];if(typeof s=="object"&&s!==null){const o=j(s,e);o!==void 0&&(Array.isArray(o)?t.push(...o):t.push(o))}}return t.length>0?t:void 0},j=(r,e)=>{const t=m(e);let n=r;for(let s=0;s<t.length;s++){const o=t[s];if(b(n))return;if(n=n[o],Array.isArray(n)&&s<t.length-1){const i=t.slice(s+1).join(".");return S(n,i)}}return n},C=(r,e)=>r.hasOwnProperty(e),P=(r,e)=>{const t=m(e);let n=r;for(let s=0;s<t.length;s++){if(b(n)||typeof n!="object")return!1;if(s===t.length-1)return C(n,t[s]);n=n[t[s]]}return!1},k=(r,e)=>r.includes(e),p=(r,e,t)=>{for(let n=0;n<r.length;n++)if(t(r[n],e))return!0;return!1},$=(r,e)=>Array.isArray(r)?k(r,e):r===e,U=(r,e)=>Array.isArray(r)?!k(r,e):r!==e,z=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t>n):r>e,B=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t>=n):r>=e,G=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t<n):r<e,H=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t<=n):r<=e,I=(r,e)=>Array.isArray(e)?Array.isArray(r)?p(r,e,(t,n)=>n.includes(t)):e.includes(r):!1,K=(r,e)=>{if(!Array.isArray(e))return!1;if(Array.isArray(r)){for(let t=0;t<r.length;t++)if(e.includes(r[t]))return!1;return!0}return!e.includes(r)},L=(r,e,t)=>{const n=P(r,e);return t?n:!n},M=(r,e,t="")=>{const n=new RegExp(e,t);return Array.isArray(r)?p(r,n,(s,o)=>typeof s=="string"&&o.test(s)):typeof r!="string"?!1:n.test(r)},Q=(r,e,t,n)=>{for(const[s,o]of Object.entries(n))switch(s){case"$eq":if(!$(t,o))return!1;break;case"$ne":if(!U(t,o))return!1;break;case"$gt":if(!z(t,o))return!1;break;case"$gte":if(!B(t,o))return!1;break;case"$lt":if(!G(t,o))return!1;break;case"$lte":if(!H(t,o))return!1;break;case"$in":if(!I(t,o))return!1;break;case"$nin":if(!K(t,o))return!1;break;case"$exists":if(!L(r,e,o))return!1;break;case"$regex":const i=n.$options||"";if(!M(t,o,i))return!1;break;case"$options":break;default:throw new Error(`Unsupported query operator: ${s}`)}return!0},T=(r,e)=>{for(let t=0;t<e.length;t++)if(y(r,e[t]))return!0;return!1},y=(r,e)=>{if(!e||Object.keys(e).length===0)return!0;if(e.$or&&Array.isArray(e.$or)){if(!T(r,e.$or))return!1;const t={...e};return delete t.$or,Object.keys(t).length>0?y(r,t):!0}for(const[t,n]of Object.entries(e)){const s=j(r,t);if(typeof n=="object"&&n!==null&&!Array.isArray(n)){if(!Q(r,t,s,n))return!1}else if(!$(s,n))return!1}return!0},V=r=>Object.values(r).some(e=>e===1||e===!0),W=(r,e)=>{const t={_id:r._id};for(const[n,s]of Object.entries(e))n==="_id"&&(s===0||s===!1)?delete t._id:(s===1||s===!0)&&(t[n]=r[n]);return t},X=(r,e)=>{const t={...r};for(const[n,s]of Object.entries(e))(s===0||s===!1)&&delete t[n];return t},Y=(r,e)=>!e||Object.keys(e).length===0?r:V(e)?W(r,e):X(r,e),Z=(r,e,t)=>{if(r===e)return 0;if(r===void 0)return 1;if(e===void 0)return-1;const n=r<e?-1:r>e?1:0;return t===-1?-n:n},v=(r,e)=>!e||Object.keys(e).length===0?r:r.sort((t,n)=>{for(const[s,o]of Object.entries(e)){const i=Z(t[s],n[s],o);if(i!==0)return i}return 0}),rr=(r,e)=>{if(!r)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required")},er=(r,e,t,n,s)=>{const{field:o,operators:i}=s,c=n[o],_=[];J(e,t,o);let a=null;if(typeof c=="object"&&c!==null&&!Array.isArray(c)){for(const f of i)if(c[f]!==void 0){a=x(e,t,o,f,c[f]);break}}else i.includes("eq")&&(a=x(e,t,o,"eq",c));if(a)for(const f of a){const u=F(e,t,f),l=r.get(u);if(l){const d=JSON.parse(l);y(d,n)&&_.push(d)}}return _},tr=(r,e,t,n)=>{const s=[],o=`${e}:${t}:`,i=r.getRange({start:o,end:o+"\xFF"});for(const{key:c,value:_}of i){const a=JSON.parse(_);y(a,n)&&s.push(a)}return s},nr=(r,e,t)=>{let n=r;return e>0&&(n=n.slice(e)),t&&t>0&&(n=n.slice(0,t)),n},sr=(r,e,t,n,s,o)=>{try{D(e,t,n,s,o)}catch(i){r.warn("Failed to record query for auto-indexing",{error:i.message})}},or=(r,e,t,n,s,o,i,c)=>{r.info("Find operation completed",{database:e,collection:t,documents_found:n,total_matching:s,used_index:o,indexed_field:i,execution_time_ms:c})},ir=async(r,e,t={},n={})=>{const s=R();rr(r,e);const o=O(),{projection:i,sort:c,limit:_,skip:a=0}=n,f=Date.now();try{let u=[],l=!1,d=null;const g=E(r,e,t);g&&(d=g.field,u=er(o,r,e,t,g),l=u.length>0),l||(u=tr(o,r,e,t));const q=v(u,c),h=nr(q,a,_).map(w=>Y(w,i)),A=Date.now()-f;return sr(s,e,t,A,l,d),or(s,r,e,h.length,u.length,l,d,A),h}catch(u){throw s.error("Failed to find documents",{database:r,collection:e,error:u.message}),u}};var _r=ir;export{_r as default,y as matches_filter};
package/package.json CHANGED
@@ -1,8 +1,8 @@
1
1
  {
2
2
  "name": "@joystick.js/db-canary",
3
3
  "type": "module",
4
- "version": "0.0.0-canary.2262",
5
- "canary_version": "0.0.0-canary.2261",
4
+ "version": "0.0.0-canary.2264",
5
+ "canary_version": "0.0.0-canary.2263",
6
6
  "description": "JoystickDB - A minimalist database server for the Joystick framework",
7
7
  "main": "./dist/server/index.js",
8
8
  "scripts": {
@@ -649,6 +649,16 @@ class Collection {
649
649
  return result.documents || [];
650
650
  }
651
651
 
652
+ async count_documents(filter = {}, options = {}) {
653
+ const result = await this.client.send_request('count_documents', {
654
+ database: this.database_name,
655
+ collection: this.collection_name,
656
+ filter,
657
+ options
658
+ });
659
+ return result.count;
660
+ }
661
+
652
662
  async update_one(filter, update, options = {}) {
653
663
  return this.client.send_request('update_one', {
654
664
  database: this.database_name,
@@ -495,6 +495,7 @@ const route_operation_to_handler = async (socket, op_type, parsed_data, raw_data
495
495
  case 'bulk_write':
496
496
  case 'find_one':
497
497
  case 'find':
498
+ case 'count_documents':
498
499
  case 'create_index':
499
500
  case 'drop_index':
500
501
  case 'get_indexes':
@@ -14,6 +14,7 @@ const op_types = [
14
14
  "setup",
15
15
  "find_one",
16
16
  "find",
17
+ "count_documents",
17
18
  "insert_one",
18
19
  "update_one",
19
20
  "delete_one",
@@ -19,6 +19,7 @@ import delete_many from './operations/delete_many.js';
19
19
  import bulk_write from './operations/bulk_write.js';
20
20
  import find_one from './operations/find_one.js';
21
21
  import find from './operations/find.js';
22
+ import count_documents from './operations/count_documents.js';
22
23
  import create_index_operation from './operations/create_index.js';
23
24
  import drop_index_operation from './operations/drop_index.js';
24
25
  import get_indexes_operation from './operations/get_indexes.js';
@@ -189,6 +190,9 @@ const execute_database_operation = async (op_type, database_name, data) => {
189
190
  case 'find':
190
191
  return await find(database_name, data.collection, data.filter, data.options);
191
192
 
193
+ case 'count_documents':
194
+ return await count_documents(database_name, data.collection, data.filter, data.options);
195
+
192
196
  case 'create_index':
193
197
  return await create_index_operation(database_name, data.collection, data.field, data.options);
194
198
 
@@ -214,6 +218,8 @@ const format_operation_response = (op_type, result) => {
214
218
  return { ok: 1, document: result };
215
219
  } else if (op_type === 'find') {
216
220
  return { ok: 1, documents: result };
221
+ } else if (op_type === 'count_documents') {
222
+ return { ok: 1, ...result };
217
223
  } else {
218
224
  return { ok: 1, ...result };
219
225
  }
@@ -225,7 +231,7 @@ const format_operation_response = (op_type, result) => {
225
231
  * @returns {boolean} True if write operation
226
232
  */
227
233
  const is_write_operation = (op_type) => {
228
- const read_operations = ['find', 'find_one', 'get_indexes'];
234
+ const read_operations = ['find', 'find_one', 'count_documents', 'get_indexes'];
229
235
  return !read_operations.includes(op_type);
230
236
  };
231
237
 
@@ -0,0 +1,219 @@
1
+ /**
2
+ * @fileoverview Count documents operation implementation for JoystickDB.
3
+ * Efficiently counts documents matching filter criteria without retrieving full documents.
4
+ * Supports complex filtering, index optimization, and safety limits.
5
+ */
6
+
7
+ import { get_database, build_collection_key } from '../query_engine.js';
8
+ import { can_use_index, find_documents_by_index } from '../index_manager.js';
9
+ import { record_query, record_index_usage } from '../auto_index_manager.js';
10
+ import { matches_filter } from './find.js';
11
+ import create_logger from '../logger.js';
12
+
13
+ const { create_context_logger } = create_logger('count_documents');
14
+
15
+ /**
16
+ * Validates required parameters for count operation.
17
+ * @param {string} database_name - Database name
18
+ * @param {string} collection_name - Collection name
19
+ * @throws {Error} When parameters are missing
20
+ */
21
+ const validate_count_parameters = (database_name, collection_name) => {
22
+ if (!database_name) {
23
+ throw new Error('Database name is required');
24
+ }
25
+
26
+ if (!collection_name) {
27
+ throw new Error('Collection name is required');
28
+ }
29
+ };
30
+
31
+ /**
32
+ * Counts documents using index optimization.
33
+ * @param {Object} db - Database instance
34
+ * @param {string} database_name - Database name
35
+ * @param {string} collection_name - Collection name
36
+ * @param {Object} filter - Query filter
37
+ * @param {Object} index_info - Index information
38
+ * @param {number} limit - Maximum count limit
39
+ * @returns {number} Count of matching documents
40
+ */
41
+ const count_using_index = (db, database_name, collection_name, filter, index_info, limit) => {
42
+ const { field, operators } = index_info;
43
+ const field_filter = filter[field];
44
+ let count = 0;
45
+
46
+ record_index_usage(database_name, collection_name, field);
47
+
48
+ let document_ids = null;
49
+
50
+ if (typeof field_filter === 'object' && field_filter !== null && !Array.isArray(field_filter)) {
51
+ for (const operator of operators) {
52
+ if (field_filter[operator] !== undefined) {
53
+ document_ids = find_documents_by_index(database_name, collection_name, field, operator, field_filter[operator]);
54
+ break;
55
+ }
56
+ }
57
+ } else if (operators.includes('eq')) {
58
+ document_ids = find_documents_by_index(database_name, collection_name, field, 'eq', field_filter);
59
+ }
60
+
61
+ if (document_ids) {
62
+ for (const document_id of document_ids) {
63
+ if (limit && count >= limit) {
64
+ break;
65
+ }
66
+
67
+ const collection_key = build_collection_key(database_name, collection_name, document_id);
68
+ const document_data = db.get(collection_key);
69
+
70
+ if (document_data) {
71
+ const document = JSON.parse(document_data);
72
+ if (matches_filter(document, filter)) {
73
+ count++;
74
+ }
75
+ }
76
+ }
77
+ }
78
+
79
+ return count;
80
+ };
81
+
82
+ /**
83
+ * Counts documents using full collection scan.
84
+ * @param {Object} db - Database instance
85
+ * @param {string} database_name - Database name
86
+ * @param {string} collection_name - Collection name
87
+ * @param {Object} filter - Query filter
88
+ * @param {number} limit - Maximum count limit
89
+ * @returns {number} Count of matching documents
90
+ */
91
+ const count_using_full_scan = (db, database_name, collection_name, filter, limit) => {
92
+ let count = 0;
93
+ const collection_prefix = `${database_name}:${collection_name}:`;
94
+ const range = db.getRange({ start: collection_prefix, end: collection_prefix + '\xFF' });
95
+
96
+ for (const { key, value: document_data } of range) {
97
+ if (limit && count >= limit) {
98
+ break;
99
+ }
100
+
101
+ const document = JSON.parse(document_data);
102
+ if (matches_filter(document, filter)) {
103
+ count++;
104
+ }
105
+ }
106
+
107
+ return count;
108
+ };
109
+
110
+ /**
111
+ * Records query for auto-indexing analysis.
112
+ * @param {Function} log - Logger function
113
+ * @param {string} collection_name - Collection name
114
+ * @param {Object} filter - Query filter
115
+ * @param {number} execution_time - Execution time in milliseconds
116
+ * @param {boolean} used_index - Whether index was used
117
+ * @param {string} indexed_field - Field that was indexed
118
+ */
119
+ const record_query_for_auto_indexing = (log, collection_name, filter, execution_time, used_index, indexed_field) => {
120
+ try {
121
+ record_query(collection_name, filter, execution_time, used_index, indexed_field);
122
+ } catch (auto_index_error) {
123
+ log.warn('Failed to record query for auto-indexing', {
124
+ error: auto_index_error.message
125
+ });
126
+ }
127
+ };
128
+
129
+ /**
130
+ * Logs count operation completion.
131
+ * @param {Function} log - Logger function
132
+ * @param {string} database_name - Database name
133
+ * @param {string} collection_name - Collection name
134
+ * @param {number} count - Number of documents counted
135
+ * @param {boolean} used_index - Whether index was used
136
+ * @param {string} indexed_field - Field that was indexed
137
+ * @param {number} execution_time - Execution time in milliseconds
138
+ * @param {boolean} limit_reached - Whether count limit was reached
139
+ */
140
+ const log_count_completion = (log, database_name, collection_name, count, used_index, indexed_field, execution_time, limit_reached) => {
141
+ log.info('Count operation completed', {
142
+ database: database_name,
143
+ collection: collection_name,
144
+ count,
145
+ used_index,
146
+ indexed_field,
147
+ execution_time_ms: execution_time,
148
+ limit_reached
149
+ });
150
+ };
151
+
152
+ /**
153
+ * Counts documents in a collection matching the specified filter criteria.
154
+ * @param {string} database_name - Name of the database
155
+ * @param {string} collection_name - Name of the collection to count in
156
+ * @param {Object} filter - Filter criteria for matching documents
157
+ * @param {Object} options - Count options
158
+ * @returns {Promise<Object>} Count result object
159
+ */
160
+ const count_documents = async (database_name, collection_name, filter = {}, options = {}) => {
161
+ const log = create_context_logger();
162
+
163
+ validate_count_parameters(database_name, collection_name);
164
+
165
+ const db = get_database();
166
+ const { limit } = options;
167
+ const start_time = Date.now();
168
+
169
+ try {
170
+ let count = 0;
171
+ let used_index = false;
172
+ let indexed_field = null;
173
+ let limit_reached = false;
174
+
175
+ const index_info = can_use_index(database_name, collection_name, filter);
176
+
177
+ if (index_info) {
178
+ indexed_field = index_info.field;
179
+ count = count_using_index(db, database_name, collection_name, filter, index_info, limit);
180
+ used_index = count > 0;
181
+ limit_reached = limit && count >= limit;
182
+ }
183
+
184
+ if (!used_index) {
185
+ count = count_using_full_scan(db, database_name, collection_name, filter, limit);
186
+ limit_reached = limit && count >= limit;
187
+ }
188
+
189
+ const execution_time = Date.now() - start_time;
190
+
191
+ record_query_for_auto_indexing(log, collection_name, filter, execution_time, used_index, indexed_field);
192
+
193
+ log_count_completion(
194
+ log,
195
+ database_name,
196
+ collection_name,
197
+ count,
198
+ used_index,
199
+ indexed_field,
200
+ execution_time,
201
+ limit_reached
202
+ );
203
+
204
+ return {
205
+ count,
206
+ operation_time: new Date().toISOString(),
207
+ limit_reached: limit_reached || false
208
+ };
209
+ } catch (error) {
210
+ log.error('Failed to count documents', {
211
+ database: database_name,
212
+ collection: collection_name,
213
+ error: error.message
214
+ });
215
+ throw error;
216
+ }
217
+ };
218
+
219
+ export default count_documents;
@@ -371,7 +371,7 @@ const handle_or_operator = (document, or_conditions) => {
371
371
  * @param {Object} filter - Filter criteria with field names and values/operators
372
372
  * @returns {boolean} True if document matches filter
373
373
  */
374
- const matches_filter = (document, filter) => {
374
+ export const matches_filter = (document, filter) => {
375
375
  if (!filter || Object.keys(filter).length === 0) {
376
376
  return true;
377
377
  }
@@ -0,0 +1,81 @@
1
+ /**
2
+ * Simple integration test for count_documents operation
3
+ * This demonstrates the count_documents functionality working end-to-end
4
+ */
5
+
6
+ import joystickdb from './src/client/index.js';
7
+
8
+ const test_count_documents = async () => {
9
+ console.log('🚀 Testing count_documents integration...\n');
10
+
11
+ // Create client (in development mode, no password needed)
12
+ const client = joystickdb.client({ host: 'localhost', port: 1983 });
13
+
14
+ try {
15
+ // Wait for connection
16
+ await new Promise(resolve => {
17
+ client.on('authenticated', resolve);
18
+ client.on('connect', resolve); // In dev mode, no auth needed
19
+ });
20
+
21
+ console.log('✅ Connected to JoystickDB');
22
+
23
+ // Get collection reference
24
+ const users = client.db('test').collection('users');
25
+
26
+ // Clean up any existing data
27
+ await users.delete_many({});
28
+ console.log('🧹 Cleaned up existing data');
29
+
30
+ // Insert test documents
31
+ await users.insert_one({ name: 'Alice', age: 25, active: true });
32
+ await users.insert_one({ name: 'Bob', age: 30, active: true });
33
+ await users.insert_one({ name: 'Carol', age: 35, active: false });
34
+ await users.insert_one({ name: 'Dave', age: 25, active: true });
35
+ console.log('📝 Inserted 4 test documents');
36
+
37
+ // Test 1: Count all documents
38
+ const total_count = await users.count_documents();
39
+ console.log(`📊 Total documents: ${total_count}`);
40
+ console.assert(total_count === 4, 'Expected 4 total documents');
41
+
42
+ // Test 2: Count with filter
43
+ const active_count = await users.count_documents({ active: true });
44
+ console.log(`📊 Active users: ${active_count}`);
45
+ console.assert(active_count === 3, 'Expected 3 active users');
46
+
47
+ // Test 3: Count with complex filter
48
+ const young_active_count = await users.count_documents({
49
+ age: { $lte: 30 },
50
+ active: true
51
+ });
52
+ console.log(`📊 Young active users (age <= 30): ${young_active_count}`);
53
+ console.assert(young_active_count === 2, 'Expected 2 young active users');
54
+
55
+ // Test 4: Count with limit
56
+ const limited_count = await users.count_documents({}, { limit: 2 });
57
+ console.log(`📊 Limited count (max 2): ${limited_count}`);
58
+ console.assert(limited_count === 2, 'Expected count to be limited to 2');
59
+
60
+ // Test 5: Count with no matches
61
+ const no_match_count = await users.count_documents({ age: 100 });
62
+ console.log(`📊 Users aged 100: ${no_match_count}`);
63
+ console.assert(no_match_count === 0, 'Expected 0 users aged 100');
64
+
65
+ // Clean up
66
+ await users.delete_many({});
67
+ console.log('🧹 Cleaned up test data');
68
+
69
+ console.log('\n✅ All count_documents tests passed!');
70
+ console.log('🎉 Integration test completed successfully');
71
+
72
+ } catch (error) {
73
+ console.error('❌ Test failed:', error.message);
74
+ process.exit(1);
75
+ } finally {
76
+ client.disconnect();
77
+ }
78
+ };
79
+
80
+ // Run the test
81
+ test_count_documents().catch(console.error);
@@ -0,0 +1,186 @@
1
+ import test from 'ava';
2
+ import sinon from 'sinon';
3
+ import count_documents from '../../../../src/server/lib/operations/count_documents.js';
4
+ import insert_one from '../../../../src/server/lib/operations/insert_one.js';
5
+ import { initialize_database, get_database, cleanup_database } from '../../../../src/server/lib/query_engine.js';
6
+
7
+ test.beforeEach(() => {
8
+ initialize_database('./test_data');
9
+ const db = get_database();
10
+ db.clearSync();
11
+ });
12
+
13
+ test.afterEach(async (t) => {
14
+ await cleanup_database();
15
+ });
16
+
17
+ test('count_documents - should count all documents when no filter provided', async (t) => {
18
+ await insert_one('default', 'users', { name: 'Alice', age: 25 });
19
+ await insert_one('default', 'users', { name: 'Bob', age: 30 });
20
+ await insert_one('default', 'users', { name: 'Carol', age: 35 });
21
+
22
+ const result = await count_documents('default', 'users');
23
+ t.is(result.count, 3);
24
+ t.truthy(result.operation_time);
25
+ t.is(result.limit_reached, false);
26
+ });
27
+
28
+ test('count_documents - should count documents matching filter', async (t) => {
29
+ await insert_one('default', 'users', { name: 'Alice', age: 25 });
30
+ await insert_one('default', 'users', { name: 'Bob', age: 25 });
31
+ await insert_one('default', 'users', { name: 'Carol', age: 30 });
32
+
33
+ const result = await count_documents('default', 'users', { age: 25 });
34
+ t.is(result.count, 2);
35
+ t.truthy(result.operation_time);
36
+ t.is(result.limit_reached, false);
37
+ });
38
+
39
+ test('count_documents - should return zero for no matches', async (t) => {
40
+ await insert_one('default', 'users', { name: 'Alice', age: 25 });
41
+ await insert_one('default', 'users', { name: 'Bob', age: 30 });
42
+
43
+ const result = await count_documents('default', 'users', { age: 40 });
44
+ t.is(result.count, 0);
45
+ t.truthy(result.operation_time);
46
+ t.is(result.limit_reached, false);
47
+ });
48
+
49
+ test('count_documents - should support all query operators', async (t) => {
50
+ await insert_one('default', 'users', { name: 'Eve', age: 20, tags: ['a'], email: 'eve@example.com' });
51
+ await insert_one('default', 'users', { name: 'Frank', age: 30, tags: ['b'], email: 'frank@example.com' });
52
+ await insert_one('default', 'users', { name: 'Grace', age: 40, tags: ['c'] });
53
+
54
+ t.is((await count_documents('default', 'users', { age: { $eq: 20 } })).count, 1);
55
+ t.is((await count_documents('default', 'users', { age: { $ne: 20 } })).count, 2);
56
+ t.is((await count_documents('default', 'users', { age: { $gt: 25 } })).count, 2);
57
+ t.is((await count_documents('default', 'users', { age: { $gte: 30 } })).count, 2);
58
+ t.is((await count_documents('default', 'users', { age: { $lt: 25 } })).count, 1);
59
+ t.is((await count_documents('default', 'users', { age: { $lte: 20 } })).count, 1);
60
+ t.is((await count_documents('default', 'users', { name: { $in: ['Eve', 'X'] } })).count, 1);
61
+ t.is((await count_documents('default', 'users', { name: { $nin: ['Frank'] } })).count, 2);
62
+ t.is((await count_documents('default', 'users', { email: { $exists: true } })).count, 2);
63
+ t.is((await count_documents('default', 'users', { phone: { $exists: false } })).count, 3);
64
+ t.is((await count_documents('default', 'users', { email: { $regex: '^eve@' } })).count, 1);
65
+ });
66
+
67
+ test('count_documents - should support complex filters with $or operator', async (t) => {
68
+ await insert_one('default', 'users', { name: 'Alice', age: 25, status: 'active' });
69
+ await insert_one('default', 'users', { name: 'Bob', age: 30, status: 'inactive' });
70
+ await insert_one('default', 'users', { name: 'Carol', age: 35, status: 'active' });
71
+ await insert_one('default', 'users', { name: 'Dave', age: 25, status: 'pending' });
72
+
73
+ const result = await count_documents('default', 'users', {
74
+ $or: [
75
+ { age: 25 },
76
+ { status: 'active' }
77
+ ]
78
+ });
79
+
80
+ t.is(result.count, 3); // Alice, Carol (active), Dave (age 25)
81
+ });
82
+
83
+ test('count_documents - should respect limit option', async (t) => {
84
+ await insert_one('default', 'users', { name: 'Alice', age: 25 });
85
+ await insert_one('default', 'users', { name: 'Bob', age: 25 });
86
+ await insert_one('default', 'users', { name: 'Carol', age: 25 });
87
+ await insert_one('default', 'users', { name: 'Dave', age: 25 });
88
+
89
+ const result = await count_documents('default', 'users', { age: 25 }, { limit: 2 });
90
+ t.is(result.count, 2);
91
+ t.is(result.limit_reached, true);
92
+ });
93
+
94
+ test('count_documents - should handle array field queries', async (t) => {
95
+ await insert_one('default', 'users', { name: 'Alice', tags: ['admin', 'user'] });
96
+ await insert_one('default', 'users', { name: 'Bob', tags: ['user'] });
97
+ await insert_one('default', 'users', { name: 'Carol', tags: ['guest'] });
98
+
99
+ const admin_count = await count_documents('default', 'users', { tags: 'admin' });
100
+ t.is(admin_count.count, 1);
101
+
102
+ const user_count = await count_documents('default', 'users', { tags: 'user' });
103
+ t.is(user_count.count, 2);
104
+ });
105
+
106
+ test('count_documents - should handle nested object queries', async (t) => {
107
+ await insert_one('default', 'users', { name: 'Alice', profile: { city: 'NYC', country: 'USA' } });
108
+ await insert_one('default', 'users', { name: 'Bob', profile: { city: 'LA', country: 'USA' } });
109
+ await insert_one('default', 'users', { name: 'Carol', profile: { city: 'London', country: 'UK' } });
110
+
111
+ const usa_count = await count_documents('default', 'users', { 'profile.country': 'USA' });
112
+ t.is(usa_count.count, 2);
113
+
114
+ const nyc_count = await count_documents('default', 'users', { 'profile.city': 'NYC' });
115
+ t.is(nyc_count.count, 1);
116
+ });
117
+
118
+ test('count_documents - should handle empty collection', async (t) => {
119
+ const result = await count_documents('default', 'empty_collection');
120
+ t.is(result.count, 0);
121
+ t.truthy(result.operation_time);
122
+ t.is(result.limit_reached, false);
123
+ });
124
+
125
+ test('count_documents - should throw error for missing database name', async (t) => {
126
+ await t.throwsAsync(
127
+ () => count_documents('', 'users'),
128
+ { message: 'Database name is required' }
129
+ );
130
+ });
131
+
132
+ test('count_documents - should throw error for missing collection name', async (t) => {
133
+ await t.throwsAsync(
134
+ () => count_documents('default', ''),
135
+ { message: 'Collection name is required' }
136
+ );
137
+ });
138
+
139
+ test('count_documents - should handle large datasets efficiently', async (t) => {
140
+ // Insert 100 documents
141
+ for (let i = 0; i < 100; i++) {
142
+ await insert_one('default', 'large_collection', {
143
+ id: i,
144
+ category: i % 5 === 0 ? 'special' : 'normal',
145
+ value: i * 10
146
+ });
147
+ }
148
+
149
+ const total_count = await count_documents('default', 'large_collection');
150
+ t.is(total_count.count, 100);
151
+
152
+ const special_count = await count_documents('default', 'large_collection', { category: 'special' });
153
+ t.is(special_count.count, 20); // Every 5th document
154
+
155
+ const range_count = await count_documents('default', 'large_collection', {
156
+ value: { $gte: 500, $lt: 800 }
157
+ });
158
+ t.is(range_count.count, 30); // Values 500-790
159
+ });
160
+
161
+ test('count_documents - should work with different data types', async (t) => {
162
+ await insert_one('default', 'mixed_types', { name: 'Alice', active: true, score: 95.5 });
163
+ await insert_one('default', 'mixed_types', { name: 'Bob', active: false, score: 87.2 });
164
+ await insert_one('default', 'mixed_types', { name: 'Carol', active: true, score: 92.8 });
165
+
166
+ const boolean_count = await count_documents('default', 'mixed_types', { active: true });
167
+ t.is(boolean_count.count, 2);
168
+
169
+ const float_count = await count_documents('default', 'mixed_types', { score: { $gt: 90 } });
170
+ t.is(float_count.count, 2);
171
+
172
+ const string_count = await count_documents('default', 'mixed_types', { name: { $regex: '^A' } });
173
+ t.is(string_count.count, 1);
174
+ });
175
+
176
+ test('count_documents - should handle null and undefined values', async (t) => {
177
+ await insert_one('default', 'nullable', { name: 'Alice', email: 'alice@example.com' });
178
+ await insert_one('default', 'nullable', { name: 'Bob', email: null });
179
+ await insert_one('default', 'nullable', { name: 'Carol' }); // email is undefined
180
+
181
+ const has_email_count = await count_documents('default', 'nullable', { email: { $exists: true } });
182
+ t.is(has_email_count.count, 2); // Alice and Bob (null is considered existing)
183
+
184
+ const null_email_count = await count_documents('default', 'nullable', { email: null });
185
+ t.is(null_email_count.count, 1); // Only Bob
186
+ });