@joystick.js/db-canary 0.0.0-canary.2250 → 0.0.0-canary.2252
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/database.js +1 -1
- package/dist/client/index.js +1 -1
- package/dist/server/cluster/master.js +4 -4
- package/dist/server/cluster/worker.js +1 -1
- package/dist/server/index.js +1 -1
- package/dist/server/lib/auto_index_manager.js +1 -1
- package/dist/server/lib/backup_manager.js +1 -1
- package/dist/server/lib/index_manager.js +1 -1
- package/dist/server/lib/operation_dispatcher.js +1 -1
- package/dist/server/lib/operations/admin.js +1 -1
- package/dist/server/lib/operations/bulk_write.js +1 -1
- package/dist/server/lib/operations/create_index.js +1 -1
- package/dist/server/lib/operations/delete_many.js +1 -1
- package/dist/server/lib/operations/delete_one.js +1 -1
- package/dist/server/lib/operations/find.js +1 -1
- package/dist/server/lib/operations/find_one.js +1 -1
- package/dist/server/lib/operations/insert_one.js +1 -1
- package/dist/server/lib/operations/update_one.js +1 -1
- package/dist/server/lib/send_response.js +1 -1
- package/dist/server/lib/tcp_protocol.js +1 -1
- package/package.json +2 -2
- package/src/client/database.js +92 -119
- package/src/client/index.js +279 -345
- package/src/server/cluster/master.js +265 -156
- package/src/server/cluster/worker.js +26 -18
- package/src/server/index.js +553 -330
- package/src/server/lib/auto_index_manager.js +85 -23
- package/src/server/lib/backup_manager.js +117 -70
- package/src/server/lib/index_manager.js +63 -25
- package/src/server/lib/operation_dispatcher.js +339 -168
- package/src/server/lib/operations/admin.js +343 -205
- package/src/server/lib/operations/bulk_write.js +458 -194
- package/src/server/lib/operations/create_index.js +127 -34
- package/src/server/lib/operations/delete_many.js +204 -67
- package/src/server/lib/operations/delete_one.js +164 -52
- package/src/server/lib/operations/find.js +563 -201
- package/src/server/lib/operations/find_one.js +544 -188
- package/src/server/lib/operations/insert_one.js +147 -52
- package/src/server/lib/operations/update_one.js +334 -93
- package/src/server/lib/send_response.js +37 -17
- package/src/server/lib/tcp_protocol.js +158 -53
- package/tests/server/cluster/master_read_write_operations.test.js +5 -14
- package/tests/server/integration/authentication_integration.test.js +18 -10
- package/tests/server/integration/backup_integration.test.js +35 -27
- package/tests/server/lib/api_key_manager.test.js +88 -32
- package/tests/server/lib/development_mode.test.js +2 -2
- package/tests/server/lib/operations/admin.test.js +20 -12
- package/tests/server/lib/operations/delete_one.test.js +10 -4
- package/tests/server/lib/operations/find_array_queries.test.js +261 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
import{create_index as
|
|
1
|
+
import{create_index as d}from"../index_manager.js";import _ from"../logger.js";const{create_context_logger:u}=_("create_index"),l=e=>{if(!e)throw new Error("Database name is required")},p=e=>{if(!e)throw new Error("Collection name is required")},x=e=>{if(!e)throw new Error("Field name is required")},f=(e,r,t)=>{l(e),p(r),x(t)},g=e=>{switch(e){case"created":return"created";case"updated":return"updated";default:return"already exists"}},w=(e,r,t,o)=>`Index ${e} on ${r}.${t}.${o}`,h=(e,r,t,o,c,s,a)=>{e.info(`Index ${r} successfully`,{database:t,collection:o,field:c,options:s,operation_type:a})},v=(e,r,t,o,c)=>{e.error("Failed to create/upsert index",{database:r,collection:t,field:o,error:c.message})},y=(e,r)=>({...e,message:r}),$=async(e,r,t,o={})=>{const c=u();f(e,r,t);try{const s=await d(e,r,t,o),a=s.operation_type||"created",n=g(a),i=w(n,e,r,t);return h(c,n,e,r,t,o,a),y(s,i)}catch(s){throw v(c,e,r,t,s),s}};var b=$;export{b as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as
|
|
1
|
+
import{get_database as l}from"../query_engine.js";import{update_indexes_on_delete as m}from"../index_manager.js";import{get_write_queue as f}from"../write_queue.js";import p from"../logger.js";const{create_context_logger:y}=p("delete_many"),w=e=>{if(!e)throw new Error("Database name is required")},g=e=>{if(!e)throw new Error("Collection name is required")},b=e=>{if(!e||typeof e!="object")throw new Error("Filter must be a valid object")},h=e=>{if(e!==void 0&&(typeof e!="number"||e<0))throw new Error("Limit must be a non-negative number")},v=(e,t,n,r)=>{w(e),g(t),b(n),h(r.limit)},k=(e,t,n)=>e[t]===n,q=(e,t)=>{if(!t||Object.keys(t).length===0)return!0;for(const[n,r]of Object.entries(t))if(!k(e,n,r))return!1;return!0},x=e=>{try{return JSON.parse(e)}catch{return null}},j=(e,t)=>t!==void 0&&e>=t,O=(e,t,n,r,a)=>{let o=0;const c=[],s=`${t}:${n}:`,i=e.getRange({start:s,end:s+"\xFF"});for(const{key:d,value:u}of i){if(j(o,a))break;const _=x(u);_&&q(_,r)&&(e.remove(d),c.push(_),o++)}return{deleted_count:o,deleted_documents:c}},E=async(e,t,n)=>{for(const r of n)await m(e,t,r)},D=(e,t,n,r,a)=>{e.info("Delete many operation completed",{database:t,collection:n,deleted_count:r,limit:a||"none"})},F=()=>new Date().toISOString(),S=e=>({acknowledged:!0,deleted_count:e,operation_time:F()}),$=(e,t,n,r)=>({operation:"delete_many",database:e,collection:t,filter_keys:Object.keys(n||{}),limit:r}),C=async(e,t,n,r={})=>{const a=y();v(e,t,n,r);const{limit:o}=r,c=l(),s=await c.transaction(()=>O(c,e,t,n,o)),{deleted_count:i,deleted_documents:d}=s;return await E(e,t,d),D(a,e,t,i,o),S(i)},I=async(e,t,n,r={})=>{const a=f(),o=$(e,t,n,r.limit);return await a.enqueue_write_operation(()=>C(e,t,n,r),o)};var z=I;export{z as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as _}from"../query_engine.js";import{update_indexes_on_delete as
|
|
1
|
+
import{get_database as _}from"../query_engine.js";import{update_indexes_on_delete as u}from"../index_manager.js";import{get_write_queue as f}from"../write_queue.js";import p from"../logger.js";const{create_context_logger:m}=p("delete_one"),w=e=>{if(!e)throw new Error("Database name is required")},g=e=>{if(!e)throw new Error("Collection name is required")},y=e=>{if(!e||typeof e!="object")throw new Error("Filter must be a valid object")},v=(e,t,o)=>{w(e),g(t),y(o)},b=(e,t,o)=>e[t]===o,h=(e,t)=>{if(!t||Object.keys(t).length===0)return!0;for(const[o,r]of Object.entries(t))if(!b(e,o,r))return!1;return!0},k=e=>{try{return JSON.parse(e)}catch{return null}},q=(e,t,o,r)=>{let a=0,n=null;const c=`${t}:${o}:`,s=e.getRange({start:c,end:c+"\xFF"});for(const{key:i,value:d}of s){const l=k(d);if(l&&h(l,r)){e.remove(i),n=l,a=1;break}}return{deleted_count:a,deleted_document:n}},x=async(e,t,o)=>{o&&await u(e,t,o)},j=(e,t,o,r)=>{e.info("Delete operation completed",{database:t,collection:o,deleted_count:r})},O=e=>({acknowledged:!0,deleted_count:e}),E=(e,t,o)=>({operation:"delete_one",database:e,collection:t,filter_keys:Object.keys(o||{})}),F=async(e,t,o,r={})=>{const a=m();v(e,t,o);const n=_(),c=await n.transaction(()=>q(n,e,t,o)),{deleted_count:s,deleted_document:i}=c;return await x(e,t,i),j(a,e,t,s),O(s)},D=async(e,t,o,r={})=>{const a=f(),n=E(e,t,o);return await a.enqueue_write_operation(()=>F(e,t,o,r),n)};var R=D;export{R as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as
|
|
1
|
+
import{get_database as O,build_collection_key as F}from"../query_engine.js";import{can_use_index as E,find_documents_by_index as x}from"../index_manager.js";import{record_query as D,record_index_usage as J}from"../auto_index_manager.js";import N from"../logger.js";const{create_context_logger:R}=N("find"),m=r=>r.split("."),b=r=>r==null,S=(r,e)=>{const t=[];for(let n=0;n<r.length;n++){const s=r[n];if(typeof s=="object"&&s!==null){const o=j(s,e);o!==void 0&&(Array.isArray(o)?t.push(...o):t.push(o))}}return t.length>0?t:void 0},j=(r,e)=>{const t=m(e);let n=r;for(let s=0;s<t.length;s++){const o=t[s];if(b(n))return;if(n=n[o],Array.isArray(n)&&s<t.length-1){const i=t.slice(s+1).join(".");return S(n,i)}}return n},C=(r,e)=>r.hasOwnProperty(e),P=(r,e)=>{const t=m(e);let n=r;for(let s=0;s<t.length;s++){if(b(n)||typeof n!="object")return!1;if(s===t.length-1)return C(n,t[s]);n=n[t[s]]}return!1},k=(r,e)=>r.includes(e),p=(r,e,t)=>{for(let n=0;n<r.length;n++)if(t(r[n],e))return!0;return!1},$=(r,e)=>Array.isArray(r)?k(r,e):r===e,U=(r,e)=>Array.isArray(r)?!k(r,e):r!==e,z=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t>n):r>e,B=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t>=n):r>=e,G=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t<n):r<e,H=(r,e)=>Array.isArray(r)?p(r,e,(t,n)=>t<=n):r<=e,I=(r,e)=>Array.isArray(e)?Array.isArray(r)?p(r,e,(t,n)=>n.includes(t)):e.includes(r):!1,K=(r,e)=>{if(!Array.isArray(e))return!1;if(Array.isArray(r)){for(let t=0;t<r.length;t++)if(e.includes(r[t]))return!1;return!0}return!e.includes(r)},L=(r,e,t)=>{const n=P(r,e);return t?n:!n},M=(r,e,t="")=>{const n=new RegExp(e,t);return Array.isArray(r)?p(r,n,(s,o)=>typeof s=="string"&&o.test(s)):n.test(r)},Q=(r,e,t,n)=>{for(const[s,o]of Object.entries(n))switch(s){case"$eq":if(!$(t,o))return!1;break;case"$ne":if(!U(t,o))return!1;break;case"$gt":if(!z(t,o))return!1;break;case"$gte":if(!B(t,o))return!1;break;case"$lt":if(!G(t,o))return!1;break;case"$lte":if(!H(t,o))return!1;break;case"$in":if(!I(t,o))return!1;break;case"$nin":if(!K(t,o))return!1;break;case"$exists":if(!L(r,e,o))return!1;break;case"$regex":const i=n.$options||"";if(!M(t,o,i))return!1;break;case"$options":break;default:throw new Error(`Unsupported query operator: ${s}`)}return!0},T=(r,e)=>{for(let t=0;t<e.length;t++)if(y(r,e[t]))return!0;return!1},y=(r,e)=>{if(!e||Object.keys(e).length===0)return!0;if(e.$or&&Array.isArray(e.$or)){if(!T(r,e.$or))return!1;const t={...e};return delete t.$or,Object.keys(t).length>0?y(r,t):!0}for(const[t,n]of Object.entries(e)){const s=j(r,t);if(typeof n=="object"&&n!==null&&!Array.isArray(n)){if(!Q(r,t,s,n))return!1}else if(!$(s,n))return!1}return!0},V=r=>Object.values(r).some(e=>e===1||e===!0),W=(r,e)=>{const t={_id:r._id};for(const[n,s]of Object.entries(e))n==="_id"&&(s===0||s===!1)?delete t._id:(s===1||s===!0)&&(t[n]=r[n]);return t},X=(r,e)=>{const t={...r};for(const[n,s]of Object.entries(e))(s===0||s===!1)&&delete t[n];return t},Y=(r,e)=>!e||Object.keys(e).length===0?r:V(e)?W(r,e):X(r,e),Z=(r,e,t)=>{if(r===e)return 0;if(r===void 0)return 1;if(e===void 0)return-1;const n=r<e?-1:r>e?1:0;return t===-1?-n:n},v=(r,e)=>!e||Object.keys(e).length===0?r:r.sort((t,n)=>{for(const[s,o]of Object.entries(e)){const i=Z(t[s],n[s],o);if(i!==0)return i}return 0}),rr=(r,e)=>{if(!r)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required")},er=(r,e,t,n,s)=>{const{field:o,operators:i}=s,c=n[o],_=[];J(e,t,o);let a=null;if(typeof c=="object"&&c!==null&&!Array.isArray(c)){for(const f of i)if(c[f]!==void 0){a=x(e,t,o,f,c[f]);break}}else i.includes("eq")&&(a=x(e,t,o,"eq",c));if(a)for(const f of a){const u=F(e,t,f),l=r.get(u);if(l){const d=JSON.parse(l);y(d,n)&&_.push(d)}}return _},tr=(r,e,t,n)=>{const s=[],o=`${e}:${t}:`,i=r.getRange({start:o,end:o+"\xFF"});for(const{key:c,value:_}of i){const a=JSON.parse(_);y(a,n)&&s.push(a)}return s},nr=(r,e,t)=>{let n=r;return e>0&&(n=n.slice(e)),t&&t>0&&(n=n.slice(0,t)),n},sr=(r,e,t,n,s,o)=>{try{D(e,t,n,s,o)}catch(i){r.warn("Failed to record query for auto-indexing",{error:i.message})}},or=(r,e,t,n,s,o,i,c)=>{r.info("Find operation completed",{database:e,collection:t,documents_found:n,total_matching:s,used_index:o,indexed_field:i,execution_time_ms:c})},ir=async(r,e,t={},n={})=>{const s=R();rr(r,e);const o=O(),{projection:i,sort:c,limit:_,skip:a=0}=n,f=Date.now();try{let u=[],l=!1,d=null;const g=E(r,e,t);g&&(d=g.field,u=er(o,r,e,t,g),l=u.length>0),l||(u=tr(o,r,e,t));const q=v(u,c),h=nr(q,a,_).map(w=>Y(w,i)),A=Date.now()-f;return sr(s,e,t,A,l,d),or(s,r,e,h.length,u.length,l,d,A),h}catch(u){throw s.error("Failed to find documents",{database:r,collection:e,error:u.message}),u}};var _r=ir;export{_r as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as
|
|
1
|
+
import{get_database as k,build_collection_key as m}from"../query_engine.js";import{can_use_index as q,find_documents_by_index as p}from"../index_manager.js";import{record_query as w,record_index_usage as O}from"../auto_index_manager.js";import D from"../logger.js";const{create_context_logger:E}=D("find_one"),g=r=>r.split("."),h=r=>r==null,F=(r,e)=>r.slice(e+1).join("."),N=(r,e)=>{const t=[];for(let n=0;n<r.length;n++){const o=r[n];if(typeof o=="object"&&o!==null){const s=A(o,e);s!==void 0&&(Array.isArray(s)?t.push(...s):t.push(s))}}return t.length>0?t:void 0},A=(r,e)=>{const t=g(e);let n=r;for(let o=0;o<t.length;o++){const s=t[o];if(h(n))return;if(n=n[s],Array.isArray(n)&&o<t.length-1){const c=F(t,o);return N(n,c)}}return n},R=(r,e)=>r.hasOwnProperty(e),C=(r,e)=>{const t=g(e);let n=r;for(let o=0;o<t.length;o++){if(h(n)||typeof n!="object")return!1;if(o===t.length-1)return R(n,t[o]);n=n[t[o]]}return!1},x=(r,e)=>r.includes(e),y=(r,e,t)=>{for(let n=0;n<r.length;n++)if(t(r[n],e))return!0;return!1},b=(r,e)=>Array.isArray(r)?x(r,e):r===e,J=(r,e)=>Array.isArray(r)?!x(r,e):r!==e,P=(r,e)=>Array.isArray(r)?y(r,e,(t,n)=>t>n):r>e,S=(r,e)=>Array.isArray(r)?y(r,e,(t,n)=>t>=n):r>=e,U=(r,e)=>Array.isArray(r)?y(r,e,(t,n)=>t<n):r<e,z=(r,e)=>Array.isArray(r)?y(r,e,(t,n)=>t<=n):r<=e,B=(r,e)=>Array.isArray(e)?Array.isArray(r)?y(r,e,(t,n)=>n.includes(t)):e.includes(r):!1,G=(r,e)=>{if(!Array.isArray(e))return!1;if(Array.isArray(r)){for(let t=0;t<r.length;t++)if(e.includes(r[t]))return!1;return!0}return!e.includes(r)},H=(r,e,t)=>{const n=C(r,e);return t?n:!n},I=(r,e,t="")=>{const n=new RegExp(e,t);return Array.isArray(r)?y(r,n,(o,s)=>typeof o=="string"&&s.test(o)):n.test(r)},K=(r,e,t,n)=>{for(const[o,s]of Object.entries(n))switch(o){case"$eq":if(!b(t,s))return!1;break;case"$ne":if(!J(t,s))return!1;break;case"$gt":if(!P(t,s))return!1;break;case"$gte":if(!S(t,s))return!1;break;case"$lt":if(!U(t,s))return!1;break;case"$lte":if(!z(t,s))return!1;break;case"$in":if(!B(t,s))return!1;break;case"$nin":if(!G(t,s))return!1;break;case"$exists":if(!H(r,e,s))return!1;break;case"$regex":const c=n.$options||"";if(!I(t,s,c))return!1;break;case"$options":break;default:throw new Error(`Unsupported query operator: ${o}`)}return!0},L=(r,e)=>{for(let t=0;t<e.length;t++)if(d(r,e[t]))return!0;return!1},d=(r,e)=>{if(!e||Object.keys(e).length===0)return!0;if(e.$or&&Array.isArray(e.$or)){if(!L(r,e.$or))return!1;const t={...e};return delete t.$or,Object.keys(t).length>0?d(r,t):!0}for(const[t,n]of Object.entries(e)){const o=A(r,t);if(typeof n=="object"&&n!==null&&!Array.isArray(n)){if(!K(r,t,o,n))return!1}else if(!b(o,n))return!1}return!0},M=r=>Object.values(r).some(e=>e===1||e===!0),Q=(r,e)=>{const t={_id:r._id};for(const[n,o]of Object.entries(e))n==="_id"&&(o===0||o===!1)?delete t._id:(o===1||o===!0)&&(t[n]=r[n]);return t},T=(r,e)=>{const t={...r};for(const[n,o]of Object.entries(e))(o===0||o===!1)&&delete t[n];return t},V=(r,e)=>!e||Object.keys(e).length===0?r:M(e)?Q(r,e):T(r,e),W=(r,e)=>{if(!r)throw new Error("Database name is required");if(!e)throw new Error("Collection name is required")},$=r=>{try{return JSON.parse(r)}catch{return null}},X=(r,e,t,n,o)=>{const{field:s,operators:c}=o,a=n[s];O(e,t,s);let i=null;if(typeof a=="object"&&a!==null&&!Array.isArray(a)){for(const u of c)if(a[u]!==void 0){i=p(e,t,s,u,a[u]);break}}else c.includes("eq")&&(i=p(e,t,s,"eq",a));if(i&&i.length>0)for(const u of i){const l=m(e,t,u),_=r.get(l);if(_){const f=$(_);if(f&&d(f,n))return f}}return null},Y=(r,e,t,n)=>{const o=`${e}:${t}:`,s=r.getRange({start:o,end:o+"\xFF"});for(const{key:c,value:a}of s){const i=$(a);if(i&&d(i,n))return i}return null},Z=(r,e,t,n,o,s)=>{try{w(e,t,n,o,s)}catch(c){r.warn("Failed to record query for auto-indexing",{error:c.message})}},v=(r,e,t,n,o,s,c)=>{r.info("Document found",{database:e,collection:t,document_id:n,used_index:o,indexed_field:s,execution_time_ms:c})},rr=(r,e,t,n,o,s)=>{r.info("No document found",{database:e,collection:t,used_index:n,indexed_field:o,execution_time_ms:s})},er=async(r,e,t={},n={})=>{const o=E();W(r,e);const s=k(),{projection:c}=n,a=Date.now();try{let i=null,u=!1,l=null;const _=q(r,e,t);_&&(l=_.field,i=X(s,r,e,t,_),u=i!==null),u||(i=Y(s,r,e,t));const f=Date.now()-a;if(Z(o,e,t,f,u,l),i){const j=V(i,c);return v(o,r,e,i._id,u,l,f),j}return rr(o,r,e,u,l,f),null}catch(i){throw o.error("Failed to find document",{database:r,collection:e,error:i.message}),i}};var ir=er;export{ir as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as
|
|
1
|
+
import{get_database as u,generate_document_id as p,build_collection_key as m}from"../query_engine.js";import{update_indexes_on_insert as w}from"../index_manager.js";import{get_write_queue as g}from"../write_queue.js";import l from"../logger.js";const{create_context_logger:f}=l("insert_one"),x=t=>{if(!t)throw new Error("Database name is required")},h=t=>{if(!t)throw new Error("Collection name is required")},q=t=>{if(!t||typeof t!="object")throw new Error("Document must be a valid object")},_=(t,e,r)=>{x(t),h(e),q(r)},y=()=>new Date().toISOString(),v=t=>{const e=t._id||p(),r=y();return{...t,_id:e,_created_at:r,_updated_at:r}},c=(t,e,r)=>{if(t.get(e))throw new Error(`Document with _id ${r} already exists`)},D=(t,e,r)=>(c(t,e,r._id),t.transaction(()=>{c(t,e,r._id),t.put(e,JSON.stringify(r))}),r),E=(t,e,r,n)=>{t.info("Document inserted successfully",{database:e,collection:r,document_id:n})},S=t=>({acknowledged:!0,inserted_id:t}),b=(t,e,r)=>({operation:"insert_one",database:t,collection:e,document_id:r._id||"auto-generated"}),j=async(t,e,r,n={})=>{const i=f();_(t,e,r);const s=u(),o=v(r),a=m(t,e,o._id),d=D(s,a,o);return await w(t,e,d),E(i,t,e,o._id),S(o._id)},k=async(t,e,r,n={})=>{_(t,e,r);const i=g(),s=b(t,e,r);return await i.enqueue_write_operation(()=>j(t,e,r,n),s)};var N=k;export{N as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{get_database as
|
|
1
|
+
import{get_database as O,build_collection_key as v,generate_document_id as j}from"../query_engine.js";import{update_indexes_on_update as x,update_indexes_on_insert as $}from"../index_manager.js";import{get_write_queue as S}from"../write_queue.js";import q from"../logger.js";const{create_context_logger:E}=q("update_one"),J=(e,r)=>({...e,...r}),N=(e,r)=>{const t={...e};for(const o of Object.keys(r))delete t[o];return t},A=(e,r)=>{const t={...e};for(const[o,n]of Object.entries(r))t[o]=(t[o]||0)+n;return t},F=(e,r)=>{const t={...e};for(const[o,n]of Object.entries(r))Array.isArray(t[o])||(t[o]=[]),t[o]=[...t[o],n];return t},U=(e,r)=>{const t={...e};for(const[o,n]of Object.entries(r))Array.isArray(t[o])&&(t[o]=t[o].filter(a=>a!==n));return t},w=(e,r)=>{let t={...e};for(const[o,n]of Object.entries(r))switch(o){case"$set":t=J(t,n);break;case"$unset":t=N(t,n);break;case"$inc":t=A(t,n);break;case"$push":t=F(t,n);break;case"$pull":t=U(t,n);break;default:throw new Error(`Unsupported update operator: ${o}`)}return t},D=(e,r,t)=>e[r]===t,C=(e,r)=>{if(!r||Object.keys(r).length===0)return!0;for(const[t,o]of Object.entries(r))if(!D(e,t,o))return!1;return!0},I=e=>{if(!e)throw new Error("Database name is required")},R=e=>{if(!e)throw new Error("Collection name is required")},z=e=>{if(!e||typeof e!="object")throw new Error("Filter must be a valid object")},B=e=>{if(!e||typeof e!="object")throw new Error("Update must be a valid object")},G=(e,r,t,o)=>{I(e),R(r),z(t),B(o)},H=e=>{try{return JSON.parse(e)}catch{return null}},b=()=>new Date().toISOString(),K=e=>({...e,_updated_at:b()}),L=(e,r)=>JSON.stringify(e)!==JSON.stringify(r),M=(e,r)=>{const t=j(),o=b(),n={...e,_id:t,_created_at:o,_updated_at:o};return w(n,r)},P=(e,r,t,o,n,a)=>{let c=0,u=0,d=null,i=null,_=null,s=null;const l=`${r}:${t}:`;let f=!1;const g=e.getRange({start:l,end:l+"\xFF"});for(const{key:m,value:h}of g){const p=H(h);if(p&&C(p,o)){f=!0,c=1;const k=w(p,n),y=K(k);L(p,y)&&(e.put(m,JSON.stringify(y)),i=p,_=y,u=1);break}}if(!f&&a.upsert){s=M(o,n);const m=v(r,t,s._id);e.put(m,JSON.stringify(s)),d=s._id,c=0,u=0}return{matched_count:c,modified_count:u,upserted_id:d,old_document:i,new_document:_,upserted_document:s}},Q=async(e,r,t,o)=>{t&&o&&await x(e,r,t,o)},T=async(e,r,t)=>{t&&await $(e,r,t)},V=(e,r,t,o,n,a)=>{e.info("Update operation completed",{database:r,collection:t,matched_count:o,modified_count:n,upserted_id:a})},W=(e,r,t)=>{const o={acknowledged:!0,matched_count:e,modified_count:r};return t&&(o.upserted_id=t),o},X=(e,r,t)=>({operation:"update_one",database:e,collection:r,filter_keys:Object.keys(t||{})}),Y=async(e,r,t,o,n={})=>{const a=E();G(e,r,t,o);const c=O(),u=await c.transaction(()=>P(c,e,r,t,o,n)),{matched_count:d,modified_count:i,upserted_id:_,old_document:s,new_document:l,upserted_document:f}=u;return await Q(e,r,s,l),await T(e,r,f),V(a,e,r,d,i,_),W(d,i,_)},Z=async(e,r,t,o,n={})=>{const a=S(),c=X(e,r,t);return await a.enqueue_write_operation(()=>Y(e,r,t,o,n),c)};var nt=Z;export{nt as default};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{encode_message as
|
|
1
|
+
import{encode_message as n}from"./tcp_protocol.js";const t=e=>({ok:!0,data:e}),c=e=>({ok:!1,error:e}),r=(e,s)=>{const o=n(s);e.write(o)},_=(e,s={})=>{const o=t(s);r(e,o)},d=(e,s={})=>{const o=c(s);r(e,o)},a=(e,s="")=>{_(e,{message:s})};export{d as send_error,a as send_message,_ as send_success};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{encode as d,decode as m}from"msgpackr";import
|
|
1
|
+
import{encode as d,decode as m}from"msgpackr";import i from"./logger.js";const{create_context_logger:l}=i("tcp_protocol"),_=()=>({useFloat32:!1,int64AsType:"number",mapsAsObjects:!0}),u=e=>{const s=Buffer.allocUnsafe(4);return s.writeUInt32BE(e,0),s},f=(e,s,t)=>{e.debug("Message encoded",{payload_size:s,total_size:t})},h=(e,s)=>{e.error("Failed to encode message",{error:s.message})},p=e=>{const s=l();try{const t=d(e,_()),r=u(t.length),o=Buffer.concat([r,t]);return f(s,t.length,o.length),o}catch(t){throw h(s,t),t}},b=e=>{if(e.length<4)return{expected_length:null,remaining_buffer:e};const s=e.readUInt32BE(0),t=e.slice(4);return{expected_length:s,remaining_buffer:t}},x=(e,s)=>{e.debug("Length prefix read",{expected_length:s})},B=(e,s)=>{const t=e.slice(0,s),r=e.slice(s);return{message_data:t,remaining_buffer:r}},k=e=>m(e,_()),w=(e,s)=>{e.debug("Message decoded",{message_size:s})},y=(e,s,t)=>{e.error("Failed to decode message",{message_size:s.length,error:t.message,hex_data:s.toString("hex")})},E=(e,s,t)=>{const{message_data:r,remaining_buffer:o}=B(e,s);try{const n=k(r);return w(t,r.length),{decoded_message:n,remaining_buffer:o}}catch(n){throw y(t,r,n),new Error(`Invalid message format: ${n.message}`)}},F=e=>{e.debug("Parser reset")},I=()=>{let e=Buffer.alloc(0),s=null;const t=l();return{parse_messages:n=>{e=Buffer.concat([e,n]);const a=[];for(;e.length>0;){if(s===null){const g=b(e);if(s=g.expected_length,e=g.remaining_buffer,s===null)break;x(t,s)}if(e.length<s)break;const c=E(e,s,t);a.push(c.decoded_message),e=c.remaining_buffer,s=null}return a},reset:()=>{e=Buffer.alloc(0),s=null,F(t)}}};export{I as create_message_parser,p as encode_message};
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@joystick.js/db-canary",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.0-canary.
|
|
5
|
-
"canary_version": "0.0.0-canary.
|
|
4
|
+
"version": "0.0.0-canary.2252",
|
|
5
|
+
"canary_version": "0.0.0-canary.2251",
|
|
6
6
|
"description": "JoystickDB - A minimalist database server for the Joystick framework",
|
|
7
7
|
"main": "./dist/server/index.js",
|
|
8
8
|
"scripts": {
|
package/src/client/database.js
CHANGED
|
@@ -1,165 +1,138 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates admin request data for database operations.
|
|
3
|
+
* @param {string} admin_action - The admin action to perform
|
|
4
|
+
* @param {string} database_name - Name of the database
|
|
5
|
+
* @param {Object} additional_data - Additional data for the request
|
|
6
|
+
* @returns {Object} Admin request data object
|
|
7
|
+
*/
|
|
8
|
+
const create_admin_request_data = (admin_action, database_name, additional_data = {}) => ({
|
|
9
|
+
admin_action,
|
|
10
|
+
database: database_name,
|
|
11
|
+
...additional_data
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Creates admin request data for collection operations.
|
|
16
|
+
* @param {string} admin_action - The admin action to perform
|
|
17
|
+
* @param {string} database_name - Name of the database
|
|
18
|
+
* @param {string} collection_name - Name of the collection
|
|
19
|
+
* @param {Object} additional_data - Additional data for the request
|
|
20
|
+
* @returns {Object} Admin request data object
|
|
21
|
+
*/
|
|
22
|
+
const create_collection_admin_request_data = (admin_action, database_name, collection_name, additional_data = {}) => ({
|
|
23
|
+
admin_action,
|
|
24
|
+
database: database_name,
|
|
25
|
+
collection: collection_name,
|
|
26
|
+
...additional_data
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Sends admin request through client.
|
|
31
|
+
* @param {Object} client - The client instance
|
|
32
|
+
* @param {Object} request_data - Request data to send
|
|
33
|
+
* @returns {Promise<Object>} Admin operation result
|
|
34
|
+
*/
|
|
35
|
+
const send_admin_request = async (client, request_data) => {
|
|
36
|
+
return client.send_request('admin', request_data);
|
|
37
|
+
};
|
|
38
|
+
|
|
1
39
|
/**
|
|
2
40
|
* Database interface for method chaining operations.
|
|
3
41
|
* Provides a fluent API for database-level operations and collection access.
|
|
4
42
|
*/
|
|
5
43
|
class Database {
|
|
6
|
-
/**
|
|
7
|
-
* Creates a new Database instance.
|
|
8
|
-
* @param {JoystickDBClient} client - The client instance
|
|
9
|
-
* @param {string} database_name - Name of the database
|
|
10
|
-
*/
|
|
11
44
|
constructor(client, database_name) {
|
|
12
45
|
this.client = client;
|
|
13
46
|
this.database_name = database_name;
|
|
14
47
|
}
|
|
15
48
|
|
|
16
|
-
/**
|
|
17
|
-
* Returns a collection interface for method chaining operations.
|
|
18
|
-
* @param {string} collection_name - Collection name
|
|
19
|
-
* @returns {Collection} Collection interface instance
|
|
20
|
-
*/
|
|
21
49
|
collection(collection_name) {
|
|
22
|
-
// NOTE: Import Collection class dynamically to avoid circular dependency.
|
|
23
50
|
const Collection = this.client.constructor.Collection;
|
|
24
51
|
return new Collection(this.client, this.database_name, collection_name);
|
|
25
52
|
}
|
|
26
53
|
|
|
27
|
-
/**
|
|
28
|
-
* Lists all collections in the database.
|
|
29
|
-
* @returns {Promise<Object>} Collections list
|
|
30
|
-
*/
|
|
31
54
|
async list_collections() {
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
database: this.database_name
|
|
35
|
-
});
|
|
55
|
+
const request_data = create_admin_request_data('list_collections', this.database_name);
|
|
56
|
+
return send_admin_request(this.client, request_data);
|
|
36
57
|
}
|
|
37
58
|
|
|
38
|
-
/**
|
|
39
|
-
* Gets database-specific statistics.
|
|
40
|
-
* @returns {Promise<Object>} Database statistics
|
|
41
|
-
*/
|
|
42
59
|
async get_stats() {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
database: this.database_name
|
|
46
|
-
});
|
|
60
|
+
const request_data = create_admin_request_data('get_database_stats', this.database_name);
|
|
61
|
+
return send_admin_request(this.client, request_data);
|
|
47
62
|
}
|
|
48
63
|
|
|
49
|
-
/**
|
|
50
|
-
* Drops the entire database (admin operation).
|
|
51
|
-
* @returns {Promise<Object>} Drop database result
|
|
52
|
-
*/
|
|
53
64
|
async drop_database() {
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
database: this.database_name
|
|
57
|
-
});
|
|
65
|
+
const request_data = create_admin_request_data('drop_database', this.database_name);
|
|
66
|
+
return send_admin_request(this.client, request_data);
|
|
58
67
|
}
|
|
59
68
|
|
|
60
|
-
/**
|
|
61
|
-
* Explicitly creates a collection in the database.
|
|
62
|
-
* @param {string} collection_name - Name of collection to create
|
|
63
|
-
* @param {Object} [options={}] - Collection creation options
|
|
64
|
-
* @returns {Promise<Object>} Create collection result
|
|
65
|
-
*/
|
|
66
69
|
async create_collection(collection_name, options = {}) {
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
options
|
|
72
|
-
|
|
70
|
+
const request_data = create_collection_admin_request_data(
|
|
71
|
+
'create_collection',
|
|
72
|
+
this.database_name,
|
|
73
|
+
collection_name,
|
|
74
|
+
{ options }
|
|
75
|
+
);
|
|
76
|
+
return send_admin_request(this.client, request_data);
|
|
73
77
|
}
|
|
74
78
|
|
|
75
|
-
/**
|
|
76
|
-
* Lists all documents in a collection (admin operation).
|
|
77
|
-
* @param {string} collection_name - Name of collection
|
|
78
|
-
* @returns {Promise<Object>} List documents result
|
|
79
|
-
*/
|
|
80
79
|
async list_documents(collection_name) {
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
80
|
+
const request_data = create_collection_admin_request_data(
|
|
81
|
+
'list_documents',
|
|
82
|
+
this.database_name,
|
|
83
|
+
collection_name
|
|
84
|
+
);
|
|
85
|
+
return send_admin_request(this.client, request_data);
|
|
86
86
|
}
|
|
87
87
|
|
|
88
|
-
/**
|
|
89
|
-
* Gets a specific document by ID (admin operation).
|
|
90
|
-
* @param {string} collection_name - Name of collection
|
|
91
|
-
* @param {string} document_id - Document ID
|
|
92
|
-
* @returns {Promise<Object>} Get document result
|
|
93
|
-
*/
|
|
94
88
|
async get_document(collection_name, document_id) {
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
document_id
|
|
100
|
-
|
|
89
|
+
const request_data = create_collection_admin_request_data(
|
|
90
|
+
'get_document',
|
|
91
|
+
this.database_name,
|
|
92
|
+
collection_name,
|
|
93
|
+
{ document_id }
|
|
94
|
+
);
|
|
95
|
+
return send_admin_request(this.client, request_data);
|
|
101
96
|
}
|
|
102
97
|
|
|
103
|
-
/**
|
|
104
|
-
* Queries documents in a collection (admin operation).
|
|
105
|
-
* @param {string} collection_name - Name of collection
|
|
106
|
-
* @param {Object} filter - Query filter
|
|
107
|
-
* @returns {Promise<Object>} Query documents result
|
|
108
|
-
*/
|
|
109
98
|
async query_documents(collection_name, filter) {
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
filter
|
|
115
|
-
|
|
99
|
+
const request_data = create_collection_admin_request_data(
|
|
100
|
+
'query_documents',
|
|
101
|
+
this.database_name,
|
|
102
|
+
collection_name,
|
|
103
|
+
{ filter }
|
|
104
|
+
);
|
|
105
|
+
return send_admin_request(this.client, request_data);
|
|
116
106
|
}
|
|
117
107
|
|
|
118
|
-
/**
|
|
119
|
-
* Inserts a document into a collection (admin operation).
|
|
120
|
-
* @param {string} collection_name - Name of collection
|
|
121
|
-
* @param {Object} document - Document to insert
|
|
122
|
-
* @returns {Promise<Object>} Insert document result
|
|
123
|
-
*/
|
|
124
108
|
async insert_document(collection_name, document) {
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
document
|
|
130
|
-
|
|
109
|
+
const request_data = create_collection_admin_request_data(
|
|
110
|
+
'insert_document',
|
|
111
|
+
this.database_name,
|
|
112
|
+
collection_name,
|
|
113
|
+
{ document }
|
|
114
|
+
);
|
|
115
|
+
return send_admin_request(this.client, request_data);
|
|
131
116
|
}
|
|
132
117
|
|
|
133
|
-
/**
|
|
134
|
-
* Updates a document in a collection (admin operation).
|
|
135
|
-
* @param {string} collection_name - Name of collection
|
|
136
|
-
* @param {string} document_id - Document ID
|
|
137
|
-
* @param {Object} update - Update operations
|
|
138
|
-
* @returns {Promise<Object>} Update document result
|
|
139
|
-
*/
|
|
140
118
|
async update_document(collection_name, document_id, update) {
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
document_id,
|
|
146
|
-
|
|
147
|
-
|
|
119
|
+
const request_data = create_collection_admin_request_data(
|
|
120
|
+
'update_document',
|
|
121
|
+
this.database_name,
|
|
122
|
+
collection_name,
|
|
123
|
+
{ document_id, update }
|
|
124
|
+
);
|
|
125
|
+
return send_admin_request(this.client, request_data);
|
|
148
126
|
}
|
|
149
127
|
|
|
150
|
-
/**
|
|
151
|
-
* Deletes a document from a collection (admin operation).
|
|
152
|
-
* @param {string} collection_name - Name of collection
|
|
153
|
-
* @param {string} document_id - Document ID
|
|
154
|
-
* @returns {Promise<Object>} Delete document result
|
|
155
|
-
*/
|
|
156
128
|
async delete_document(collection_name, document_id) {
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
document_id
|
|
162
|
-
|
|
129
|
+
const request_data = create_collection_admin_request_data(
|
|
130
|
+
'delete_document',
|
|
131
|
+
this.database_name,
|
|
132
|
+
collection_name,
|
|
133
|
+
{ document_id }
|
|
134
|
+
);
|
|
135
|
+
return send_admin_request(this.client, request_data);
|
|
163
136
|
}
|
|
164
137
|
}
|
|
165
138
|
|