muya 2.5.2 → 2.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- import{createSqliteState as o}from"../create-sqlite";import{bunMemoryBackend as s}from"../table/bun-backend";const n=s();describe("create-sqlite-state",()=>{it("should batchSet and update multiple documents",async()=>{const e=o({backend:n,tableName:"State2",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25}]);const a=[];for await(const i of e.search())a.push(i);expect(a).toHaveLength(2),await e.batchSet([{id:"1",name:"Alice2",age:31},{id:"2",name:"Bob2",age:26}]);const t=[];for await(const i of e.search())t.push(i);expect(t).toEqual([{id:"1",name:"Alice2",age:31},{id:"2",name:"Bob2",age:26}])}),it("should deleteBy condition",async()=>{const e=o({backend:n,tableName:"State3",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25},{id:"3",name:"Carol",age:40}]);const a=await e.deleteBy({age:{gt:30}});expect(a.length).toBe(1);const t=[];for await(const i of e.search())t.push(i);expect(t.map(i=>i.id)).toEqual(["1","2"])}),it("should get by key and with selector",async()=>{const e=o({backend:n,tableName:"State4",key:"id"});await e.set({id:"1",name:"Alice",age:30});const a=await e.get("1");expect(a).toEqual({id:"1",name:"Alice",age:30});const t=await e.get("1",c=>c.name);expect(t).toBe("Alice");const i=await e.get("999");expect(i).toBeUndefined()}),it("should count documents with and without where",async()=>{const e=o({backend:n,tableName:"State5",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25},{id:"3",name:"Carol",age:40}]),expect(await e.count()).toBe(3),expect(await e.count({where:{age:{gt:30}}})).toBe(1)}),it("should support search with options",async()=>{const e=o({backend:n,tableName:"State6",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25},{id:"3",name:"Carol",age:40}]);const a=[];for await(const t of e.search({where:{age:{lt:35}}}))a.push(t);expect(a.map(t=>t.id)).toEqual(["1","2"])})});
1
+ import{createSqliteState as i}from"../create-sqlite";import{bunMemoryBackend as p}from"../table/bun-backend";const c=p();describe("create-sqlite-state",()=>{it("should batchSet and update multiple documents",async()=>{const e=i({backend:c,tableName:"State2",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25}]);const t=[];for await(const r of e.search())t.push(r);expect(t).toHaveLength(2),await e.batchSet([{id:"1",name:"Alice2",age:31},{id:"2",name:"Bob2",age:26}]);const a=[];for await(const r of e.search())a.push(r);expect(a).toEqual([{id:"1",name:"Alice2",age:31},{id:"2",name:"Bob2",age:26}])}),it("should deleteBy condition",async()=>{const e=i({backend:c,tableName:"State3",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25},{id:"3",name:"Carol",age:40}]);const t=await e.deleteBy({age:{gt:30}});expect(t.length).toBe(1);const a=[];for await(const r of e.search())a.push(r);expect(a.map(r=>r.id)).toEqual(["1","2"])}),it("should get by key and with selector",async()=>{const e=i({backend:c,tableName:"State4",key:"id"});await e.set({id:"1",name:"Alice",age:30});const t=await e.get("1");expect(t).toEqual({id:"1",name:"Alice",age:30});const a=await e.get("1",o=>o.name);expect(a).toBe("Alice");const r=await e.get("999");expect(r).toBeUndefined()}),it("should count documents with and without where",async()=>{const e=i({backend:c,tableName:"State5",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25},{id:"3",name:"Carol",age:40}]),expect(await e.count()).toBe(3),expect(await e.count({where:{age:{gt:30}}})).toBe(1)}),it("should support search with options",async()=>{const e=i({backend:c,tableName:"State6",key:"id"});await e.batchSet([{id:"1",name:"Alice",age:30},{id:"2",name:"Bob",age:25},{id:"3",name:"Carol",age:40}]);const t=[];for await(const a of e.search({where:{age:{lt:35}}}))t.push(a);expect(t.map(a=>a.id)).toEqual(["1","2"])})}),describe("groupBy",()=>{it("should group by a simple field and count",async()=>{const e=i({backend:c,tableName:"GroupBy1",key:"id"});await e.batchSet([{id:"1",name:"Apple",category:"fruit",price:1},{id:"2",name:"Banana",category:"fruit",price:2},{id:"3",name:"Carrot",category:"vegetable",price:1},{id:"4",name:"Orange",category:"fruit",price:3},{id:"5",name:"Broccoli",category:"vegetable",price:2}]);const t=await e.groupBy("category");expect(t).toHaveLength(2);const a=t.find(o=>o.key==="fruit"),r=t.find(o=>o.key==="vegetable");expect(a?.count).toBe(3),expect(r?.count).toBe(2)}),it("should group by with where clause filter",async()=>{const e=i({backend:c,tableName:"GroupBy2",key:"id"});await e.batchSet([{id:"1",name:"Apple",category:"fruit",price:1},{id:"2",name:"Banana",category:"fruit",price:5},{id:"3",name:"Carrot",category:"vegetable",price:1},{id:"4",name:"Orange",category:"fruit",price:3},{id:"5",name:"Broccoli",category:"vegetable",price:6}]);const t=await e.groupBy("category",{where:{price:{gt:2}}});expect(t).toHaveLength(2);const a=t.find(o=>o.key==="fruit"),r=t.find(o=>o.key==="vegetable");expect(a?.count).toBe(2),expect(r?.count).toBe(1)}),it("should group by numeric field",async()=>{const e=i({backend:c,tableName:"GroupBy3",key:"id"});await e.batchSet([{id:"1",name:"Apple",category:"fruit",price:1},{id:"2",name:"Banana",category:"fruit",price:2},{id:"3",name:"Carrot",category:"vegetable",price:1},{id:"4",name:"Orange",category:"fruit",price:1}]);const t=await e.groupBy("price");expect(t).toHaveLength(2);const a=t.find(o=>o.key===1),r=t.find(o=>o.key===2);expect(a?.count).toBe(3),expect(r?.count).toBe(1)}),it("should return empty array for empty table",async()=>{const t=await i({backend:c,tableName:"GroupBy4",key:"id"}).groupBy("category");expect(t).toEqual([])}),it("should handle null/undefined values in grouped field",async()=>{const e=i({backend:c,tableName:"GroupBy5",key:"id"});await e.batchSet([{id:"1",name:"A",tag:"red"},{id:"2",name:"B",tag:"blue"},{id:"3",name:"C"},{id:"4",name:"D",tag:"red"}]);const t=await e.groupBy("tag");expect(t.length).toBeGreaterThanOrEqual(2);const a=t.find(o=>o.key==="red"),r=t.find(o=>o.key==="blue");expect(a?.count).toBe(2),expect(r?.count).toBe(1)}),it("should verify count matches sum of grouped counts",async()=>{const e=i({backend:c,tableName:"GroupBy6",key:"id"});await e.batchSet([{id:"1",name:"Apple",category:"fruit",price:1},{id:"2",name:"Banana",category:"fruit",price:2},{id:"3",name:"Carrot",category:"vegetable",price:1},{id:"4",name:"Orange",category:"fruit",price:3},{id:"5",name:"Broccoli",category:"vegetable",price:2}]);const t=await e.count(),r=(await e.groupBy("category")).reduce((o,n)=>o+n.count,0);expect(t).toBe(5),expect(r).toBe(t)}),it("should verify count with where matches grouped count with same where",async()=>{const e=i({backend:c,tableName:"GroupBy7",key:"id"});await e.batchSet([{id:"1",name:"Apple",category:"fruit",price:1},{id:"2",name:"Banana",category:"fruit",price:5},{id:"3",name:"Carrot",category:"vegetable",price:1},{id:"4",name:"Orange",category:"fruit",price:3},{id:"5",name:"Broccoli",category:"vegetable",price:6}]);const t={price:{gt:2}},a=await e.count({where:t}),o=(await e.groupBy("category",{where:t})).reduce((n,s)=>n+s.count,0);expect(a).toBe(3),expect(o).toBe(a)}),it("should have proper TypeScript inference for key type",async()=>{const e=i({backend:c,tableName:"GroupBy8",key:"id"});await e.batchSet([{id:"1",name:"Apple",category:"fruit",price:1},{id:"2",name:"Banana",category:"fruit",price:2}]);const a=(await e.groupBy("category"))[0].key;expect(typeof a).toBe("string");const o=(await e.groupBy("price"))[0].key;expect(typeof o).toBe("number")}),it("should infer nested field types correctly",async()=>{const e=i({backend:c,tableName:"GroupBy9",key:"id"});await e.batchSet([{id:"1",details:{category:"A",info:{rating:5}}},{id:"2",details:{category:"A",info:{rating:3}}},{id:"3",details:{category:"B",info:{rating:4}}}]);const t=await e.groupBy("details.category"),a=t[0].key;expect(typeof a).toBe("string"),expect(t).toHaveLength(2);const r=await e.groupBy("details.info.rating"),o=r[0].key;expect(typeof o).toBe("number"),expect(r).toHaveLength(3)})});
@@ -1 +1 @@
1
- import{STATE_SCHEDULER as u}from"../create";import{getId as m}from"../utils/id";import{createTable as d}from"./table/table";function h(l){let s;async function o(){if(!s){const{backend:e,...n}=l,t=e instanceof Promise?await e:e;s=await d({backend:t,...n})}return s}const i=m();u.add(i,{onScheduleDone(e){if(!e)return;const n=e,t={};for(const c of n)c.removedAll&&(t.removedAll=!0),c.mutations&&(t.mutations||(t.mutations=[]),t.mutations.push(...c.mutations));for(const c of r)c(t)}});function a(e){u.schedule(i,e)}const r=new Set;return{subscribe(e){return r.add(e),()=>r.delete(e)},async clear(){const e=await o();return a({removedAll:!0}),e.clear()},async set(e){const t=await(await o()).set(e);return a({mutations:[t]}),t},async batchSet(e){const t=await(await o()).batchSet(e);return a({mutations:t}),t},async batchDelete(e){const t=await(await o()).batchDelete(e);return a({mutations:t}),t},async delete(e){const t=await(await o()).delete(e);return t&&a({mutations:[t]}),t},async deleteBy(e){const t=await(await o()).deleteBy(e);return a({mutations:t}),t},async get(e,n){return(await o()).get(e,n)},async*search(e={}){const n=await o();for await(const t of n.search(e))yield t},async count(e){return await(await o()).count(e)}}}export{h as createSqliteState};
1
+ import{STATE_SCHEDULER as u}from"../create";import{getId as m}from"../utils/id";import{createTable as d}from"./table/table";function h(l){let s;async function o(){if(!s){const{backend:e,...n}=l,t=e instanceof Promise?await e:e;s=await d({backend:t,...n})}return s}const i=m();u.add(i,{onScheduleDone(e){if(!e)return;const n=e,t={};for(const c of n)c.removedAll&&(t.removedAll=!0),c.mutations&&(t.mutations||(t.mutations=[]),t.mutations.push(...c.mutations));for(const c of r)c(t)}});function a(e){u.schedule(i,e)}const r=new Set;return{subscribe(e){return r.add(e),()=>r.delete(e)},async clear(){const e=await o();return a({removedAll:!0}),e.clear()},async set(e){const t=await(await o()).set(e);return a({mutations:[t]}),t},async batchSet(e){const t=await(await o()).batchSet(e);return a({mutations:t}),t},async batchDelete(e){const t=await(await o()).batchDelete(e);return a({mutations:t}),t},async delete(e){const t=await(await o()).delete(e);return t&&a({mutations:[t]}),t},async deleteBy(e){const t=await(await o()).deleteBy(e);return a({mutations:t}),t},async get(e,n){return(await o()).get(e,n)},async*search(e={}){const n=await o();for await(const t of n.search(e))yield t},async count(e){return await(await o()).count(e)},async groupBy(e,n){return await(await o()).groupBy(e,n)}}}export{h as createSqliteState};
@@ -1,4 +1,4 @@
1
- import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as g}from"./where";const x=500,C=100;function R(l){return"$."+l}function M(l,s){if(!(!l||!s))return s.split(".").reduce((t,w)=>{if(typeof t=="object"&&t!==null&&w in t)return t[w]},l)}async function G(l){const{backend:s,tableName:t,indexes:w,key:$,disablePragmaOptimization:L}=l,E=$!==void 0;L||(await s.execute("PRAGMA journal_mode=WAL;"),await s.execute("PRAGMA synchronous=NORMAL;"),await s.execute("PRAGMA temp_store=MEMORY;"),await s.execute("PRAGMA cache_size=-20000;")),E?await s.execute(`
1
+ import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as h}from"./where";const F=500,C=100;function p(E){return"$."+E}function M(E,s){if(!(!E||!s))return s.split(".").reduce((t,w)=>{if(typeof t=="object"&&t!==null&&w in t)return t[w]},E)}async function B(E){const{backend:s,tableName:t,indexes:w,key:$,disablePragmaOptimization:x}=E,d=$!==void 0;x||(await s.execute("PRAGMA journal_mode=WAL;"),await s.execute("PRAGMA synchronous=NORMAL;"),await s.execute("PRAGMA temp_store=MEMORY;"),await s.execute("PRAGMA cache_size=-20000;")),d?await s.execute(`
2
2
  CREATE TABLE IF NOT EXISTS ${t} (
3
3
  key TEXT PRIMARY KEY,
4
4
  data TEXT NOT NULL
@@ -7,8 +7,8 @@ import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as g}from"./
7
7
  CREATE TABLE IF NOT EXISTS ${t} (
8
8
  data TEXT NOT NULL
9
9
  );
10
- `);let d;const T=[],f={};for(const e of w??[])if(typeof e=="string"&&e.startsWith("fts:")){const n=e.slice(4),o=n.replaceAll(".","_");T.push(n),f[n]=o}else if(typeof e=="object"&&e.type==="fts"){const n=e.path,o=n.replaceAll(".","_");if(T.push(n),f[n]=o,e.tokenizer){if(!d)d=e.tokenizer;else if(d!==e.tokenizer)throw new Error(`Conflicting FTS tokenizers: already using "${d}", got "${e.tokenizer}"`)}}else{const n=String(e);await s.execute(`CREATE INDEX IF NOT EXISTS idx_${t}_${n.replaceAll(/\W/g,"_")}
11
- ON ${t} (json_extract(data, '${R(n)}'));`)}if(T.length>0){let e;typeof d=="object"?e=_(d):d===void 0?e='"unicode61", "remove_diacritics=1"':e=d;const n=T.map(r=>f[r]).join(", "),o=`
10
+ `);let l;const y=[],f={};for(const e of w??[])if(typeof e=="string"&&e.startsWith("fts:")){const n=e.slice(4),o=n.replaceAll(".","_");y.push(n),f[n]=o}else if(typeof e=="object"&&e.type==="fts"){const n=e.path,o=n.replaceAll(".","_");if(y.push(n),f[n]=o,e.tokenizer){if(!l)l=e.tokenizer;else if(l!==e.tokenizer)throw new Error(`Conflicting FTS tokenizers: already using "${l}", got "${e.tokenizer}"`)}}else{const n=String(e);await s.execute(`CREATE INDEX IF NOT EXISTS idx_${t}_${n.replaceAll(/\W/g,"_")}
11
+ ON ${t} (json_extract(data, '${p(n)}'));`)}if(y.length>0){let e;typeof l=="object"?e=_(l):l===void 0?e='"unicode61", "remove_diacritics=1"':e=l;const n=y.map(r=>f[r]).join(", "),o=`
12
12
  CREATE VIRTUAL TABLE IF NOT EXISTS ${t}_fts
13
13
  USING fts5(${n}, tokenize=${e});
14
14
  `;await s.execute(o),await s.execute(`
@@ -18,7 +18,7 @@ import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as g}from"./
18
18
  INSERT INTO ${t}_fts(rowid, ${n})
19
19
  VALUES (
20
20
  new.rowid,
21
- ${T.map(r=>`json_extract(new.data, '${R(r)}')`).join(", ")}
21
+ ${y.map(r=>`json_extract(new.data, '${p(r)}')`).join(", ")}
22
22
  );
23
23
  END;
24
24
  `),await s.execute(`
@@ -32,7 +32,12 @@ import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as g}from"./
32
32
  AFTER UPDATE ON ${t}
33
33
  BEGIN
34
34
  UPDATE ${t}_fts
35
- SET ${T.map(r=>`${f[r]}=json_extract(new.data, '${R(r)}')`).join(", ")}
35
+ SET ${y.map(r=>`${f[r]}=json_extract(new.data, '${p(r)}')`).join(", ")}
36
36
  WHERE rowid = old.rowid;
37
37
  END;
38
- `)}function h(e){if(E)return M(e,String($))}const A={backend:s,async set(e,n){const o=n??s,r=JSON.stringify(e);if(E){const a=h(e);if(a==null)throw new Error(`Document is missing the configured key "${String($)}".`);return(await o.select(`SELECT key FROM ${t} WHERE key = ?`,[a])).length>0?(await o.execute(`UPDATE ${t} SET data = ? WHERE key = ?`,[r,a]),{key:a,op:"update",document:e}):(await o.execute(`INSERT INTO ${t} (key, data) VALUES (?, ?)`,[a,r]),{key:a,op:"insert",document:e})}await o.execute(`INSERT INTO ${t} (data) VALUES (?)`,[r]);const u=(await o.select("SELECT last_insert_rowid() AS id"))[0]?.id;if(typeof u!="number")throw new Error("Failed to retrieve last_insert_rowid()");return{key:u,op:"insert",document:e}},async get(e,n=o=>o){const o=E?"key = ?":"rowid = ?",r=await s.select(`SELECT rowid, data FROM ${t} WHERE ${o}`,[e]);if(r.length===0)return;const{data:c,rowid:u}=r[0],a=JSON.parse(c),i=E?h(a)??u:u;return n(a,{rowId:u,key:i})},async delete(e,n){const o=n??s,r=E?"key = ?":"rowid = ?";if(await o.execute(`DELETE FROM ${t} WHERE ${r}`,[e]),((await s.select("SELECT changes() AS c"))[0]?.c??0)>0)return{key:e,op:"delete"}},async*search(e={}){const{sortBy:n,order:o="asc",limit:r,offset:c=0,where:u,select:a=y=>y,pageSize:i=C}=e,p=g(u,t),k=`SELECT rowid, data FROM ${t} ${p}`;let S=0,D=c;for(;;){let y=k;n?y+=` ORDER BY json_extract(data, '${R(String(n))}') COLLATE NOCASE ${o.toUpperCase()}`:y+=E?` ORDER BY key COLLATE NOCASE ${o.toUpperCase()}`:` ORDER BY rowid ${o.toUpperCase()}`;const I=r?Math.min(i,r-S):i;y+=` LIMIT ${I} OFFSET ${D}`;const m=await s.select(y);if(m.length===0)break;for(const{rowid:O,data:b}of m){if(r&&S>=r)return;const N=JSON.parse(b),F=E?h(N)??O:O;yield a(N,{rowId:O,key:F}),S++}if(m.length<I||r&&S>=r)break;D+=m.length}},async count(e={}){const n=g(e.where,t),o=`SELECT COUNT(*) as count FROM ${t} ${n}`;return(await s.select(o))[0]?.count??0},async deleteBy(e){const n=g(e,t),o=E?"key":"rowid",r=[];return await s.transaction(async c=>{const u=await c.select(`SELECT ${o} AS k FROM ${t} ${n}`);if(u.length===0)return;const a=u.map(i=>i.k);for(let i=0;i<a.length;i+=x){const p=a.slice(i,i+x),k=p.map(()=>"?").join(",");await c.execute(`DELETE FROM ${t} WHERE ${o} IN (${k})`,p)}for(const i of a)r.push({key:i,op:"delete",document:void 0})}),r},async clear(){await s.execute(`DELETE FROM ${t}`)},async batchSet(e){const n=[];return await s.transaction(async o=>{for(const r of e){const c=await A.set(r,o);n.push(c)}}),n},async batchDelete(e){const n=[];return await s.transaction(async o=>{for(const r of e){const c=await A.delete(r,o);c&&n.push(c)}}),n}};return A}export{C as DEFAULT_PAGE_SIZE,G as createTable,M as getByPath,R as toJsonPath};
38
+ `)}function A(e){if(d)return M(e,String($))}const O={backend:s,async set(e,n){const o=n??s,r=JSON.stringify(e);if(d){const a=A(e);if(a==null)throw new Error(`Document is missing the configured key "${String($)}".`);return(await o.select(`SELECT key FROM ${t} WHERE key = ?`,[a])).length>0?(await o.execute(`UPDATE ${t} SET data = ? WHERE key = ?`,[r,a]),{key:a,op:"update",document:e}):(await o.execute(`INSERT INTO ${t} (key, data) VALUES (?, ?)`,[a,r]),{key:a,op:"insert",document:e})}await o.execute(`INSERT INTO ${t} (data) VALUES (?)`,[r]);const u=(await o.select("SELECT last_insert_rowid() AS id"))[0]?.id;if(typeof u!="number")throw new Error("Failed to retrieve last_insert_rowid()");return{key:u,op:"insert",document:e}},async get(e,n=o=>o){const o=d?"key = ?":"rowid = ?",r=await s.select(`SELECT rowid, data FROM ${t} WHERE ${o}`,[e]);if(r.length===0)return;const{data:i,rowid:u}=r[0],a=JSON.parse(i),c=d?A(a)??u:u;return n(a,{rowId:u,key:c})},async delete(e,n){const o=n??s,r=d?"key = ?":"rowid = ?";if(await o.execute(`DELETE FROM ${t} WHERE ${r}`,[e]),((await s.select("SELECT changes() AS c"))[0]?.c??0)>0)return{key:e,op:"delete"}},async*search(e={}){const{sortBy:n,order:o="asc",limit:r,offset:i=0,where:u,select:a=T=>T,pageSize:c=C}=e,m=h(u,t),g=`SELECT rowid, data FROM ${t} ${m}`;let S=0,k=i;for(;;){let T=g;n?T+=` ORDER BY json_extract(data, '${p(String(n))}') COLLATE NOCASE ${o.toUpperCase()}`:T+=d?` ORDER BY key COLLATE NOCASE ${o.toUpperCase()}`:` ORDER BY rowid ${o.toUpperCase()}`;const N=r?Math.min(c,r-S):c;T+=` LIMIT ${N} OFFSET ${k}`;const R=await s.select(T);if(R.length===0)break;for(const{rowid:D,data:L}of R){if(r&&S>=r)return;const I=JSON.parse(L),b=d?A(I)??D:D;yield a(I,{rowId:D,key:b}),S++}if(R.length<N||r&&S>=r)break;k+=R.length}},async count(e={}){const n=h(e.where,t),o=`SELECT COUNT(*) as count FROM ${t} ${n}`;return(await s.select(o))[0]?.count??0},async deleteBy(e){const n=h(e,t),o=d?"key":"rowid",r=[];return await s.transaction(async i=>{const u=await i.select(`SELECT ${o} AS k FROM ${t} ${n}`);if(u.length===0)return;const a=u.map(c=>c.k);for(let c=0;c<a.length;c+=F){const m=a.slice(c,c+F),g=m.map(()=>"?").join(",");await i.execute(`DELETE FROM ${t} WHERE ${o} IN (${g})`,m)}for(const c of a)r.push({key:c,op:"delete",document:void 0})}),r},async clear(){await s.execute(`DELETE FROM ${t}`)},async groupBy(e,n={}){const o=h(n.where,t),i=`
39
+ SELECT json_extract(data, '${p(String(e))}') AS groupKey, COUNT(*) AS count
40
+ FROM ${t}
41
+ ${o}
42
+ GROUP BY groupKey
43
+ `;return(await s.select(i)).map(a=>({key:a.groupKey,count:a.count}))},async batchSet(e){const n=[];return await s.transaction(async o=>{for(const r of e){const i=await O.set(r,o);n.push(i)}}),n},async batchDelete(e){const n=[];return await s.transaction(async o=>{for(const r of e){const i=await O.delete(r,o);i&&n.push(i)}}),n}};return O}export{C as DEFAULT_PAGE_SIZE,B as createTable,M as getByPath,p as toJsonPath};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "muya",
3
- "version": "2.5.2",
3
+ "version": "2.5.4",
4
4
  "author": "samuel.gjabel@gmail.com",
5
5
  "repository": "https://github.com/samuelgjabel/muya",
6
6
  "main": "cjs/index.js",
@@ -79,3 +79,186 @@ describe('create-sqlite-state', () => {
79
79
  expect(results.map((p) => p.id)).toEqual(['1', '2'])
80
80
  })
81
81
  })
82
+
83
+ interface Product {
84
+ id: string
85
+ name: string
86
+ category: string
87
+ price: number
88
+ }
89
+
90
+ describe('groupBy', () => {
91
+ it('should group by a simple field and count', async () => {
92
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy1', key: 'id' })
93
+ await sql.batchSet([
94
+ { id: '1', name: 'Apple', category: 'fruit', price: 1 },
95
+ { id: '2', name: 'Banana', category: 'fruit', price: 2 },
96
+ { id: '3', name: 'Carrot', category: 'vegetable', price: 1 },
97
+ { id: '4', name: 'Orange', category: 'fruit', price: 3 },
98
+ { id: '5', name: 'Broccoli', category: 'vegetable', price: 2 },
99
+ ])
100
+
101
+ const grouped = await sql.groupBy('category')
102
+
103
+ expect(grouped).toHaveLength(2)
104
+ const fruitGroup = grouped.find((g) => g.key === 'fruit')
105
+ const vegetableGroup = grouped.find((g) => g.key === 'vegetable')
106
+ expect(fruitGroup?.count).toBe(3)
107
+ expect(vegetableGroup?.count).toBe(2)
108
+ })
109
+
110
+ it('should group by with where clause filter', async () => {
111
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy2', key: 'id' })
112
+ await sql.batchSet([
113
+ { id: '1', name: 'Apple', category: 'fruit', price: 1 },
114
+ { id: '2', name: 'Banana', category: 'fruit', price: 5 },
115
+ { id: '3', name: 'Carrot', category: 'vegetable', price: 1 },
116
+ { id: '4', name: 'Orange', category: 'fruit', price: 3 },
117
+ { id: '5', name: 'Broccoli', category: 'vegetable', price: 6 },
118
+ ])
119
+
120
+ // Only group items with price > 2
121
+ const grouped = await sql.groupBy('category', { where: { price: { gt: 2 } } })
122
+
123
+ expect(grouped).toHaveLength(2)
124
+ const fruitGroup = grouped.find((g) => g.key === 'fruit')
125
+ const vegetableGroup = grouped.find((g) => g.key === 'vegetable')
126
+ expect(fruitGroup?.count).toBe(2) // Banana (5), Orange (3)
127
+ expect(vegetableGroup?.count).toBe(1) // Broccoli (6)
128
+ })
129
+
130
+ it('should group by numeric field', async () => {
131
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy3', key: 'id' })
132
+ await sql.batchSet([
133
+ { id: '1', name: 'Apple', category: 'fruit', price: 1 },
134
+ { id: '2', name: 'Banana', category: 'fruit', price: 2 },
135
+ { id: '3', name: 'Carrot', category: 'vegetable', price: 1 },
136
+ { id: '4', name: 'Orange', category: 'fruit', price: 1 },
137
+ ])
138
+
139
+ const grouped = await sql.groupBy('price')
140
+
141
+ expect(grouped).toHaveLength(2)
142
+ const price1 = grouped.find((g) => g.key === 1)
143
+ const price2 = grouped.find((g) => g.key === 2)
144
+ expect(price1?.count).toBe(3)
145
+ expect(price2?.count).toBe(1)
146
+ })
147
+
148
+ it('should return empty array for empty table', async () => {
149
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy4', key: 'id' })
150
+
151
+ const grouped = await sql.groupBy('category')
152
+
153
+ expect(grouped).toEqual([])
154
+ })
155
+
156
+ it('should handle null/undefined values in grouped field', async () => {
157
+ interface ItemWithOptional {
158
+ id: string
159
+ name: string
160
+ tag?: string
161
+ }
162
+ const sql = createSqliteState<ItemWithOptional>({ backend, tableName: 'GroupBy5', key: 'id' })
163
+ await sql.batchSet([
164
+ { id: '1', name: 'A', tag: 'red' },
165
+ { id: '2', name: 'B', tag: 'blue' },
166
+ { id: '3', name: 'C' }, // no tag
167
+ { id: '4', name: 'D', tag: 'red' },
168
+ ])
169
+
170
+ const grouped = await sql.groupBy('tag')
171
+
172
+ // Should have 3 groups: red, blue, and null/undefined
173
+ expect(grouped.length).toBeGreaterThanOrEqual(2)
174
+ const redGroup = grouped.find((g) => g.key === 'red')
175
+ const blueGroup = grouped.find((g) => g.key === 'blue')
176
+ expect(redGroup?.count).toBe(2)
177
+ expect(blueGroup?.count).toBe(1)
178
+ })
179
+
180
+ it('should verify count matches sum of grouped counts', async () => {
181
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy6', key: 'id' })
182
+ await sql.batchSet([
183
+ { id: '1', name: 'Apple', category: 'fruit', price: 1 },
184
+ { id: '2', name: 'Banana', category: 'fruit', price: 2 },
185
+ { id: '3', name: 'Carrot', category: 'vegetable', price: 1 },
186
+ { id: '4', name: 'Orange', category: 'fruit', price: 3 },
187
+ { id: '5', name: 'Broccoli', category: 'vegetable', price: 2 },
188
+ ])
189
+
190
+ const totalCount = await sql.count()
191
+ const grouped = await sql.groupBy('category')
192
+ const sumOfCounts = grouped.reduce((sum, group) => sum + group.count, 0)
193
+
194
+ expect(totalCount).toBe(5)
195
+ expect(sumOfCounts).toBe(totalCount)
196
+ })
197
+
198
+ it('should verify count with where matches grouped count with same where', async () => {
199
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy7', key: 'id' })
200
+ await sql.batchSet([
201
+ { id: '1', name: 'Apple', category: 'fruit', price: 1 },
202
+ { id: '2', name: 'Banana', category: 'fruit', price: 5 },
203
+ { id: '3', name: 'Carrot', category: 'vegetable', price: 1 },
204
+ { id: '4', name: 'Orange', category: 'fruit', price: 3 },
205
+ { id: '5', name: 'Broccoli', category: 'vegetable', price: 6 },
206
+ ])
207
+
208
+ const whereClause = { price: { gt: 2 } }
209
+ const filteredCount = await sql.count({ where: whereClause })
210
+ const grouped = await sql.groupBy('category', { where: whereClause })
211
+ const sumOfCounts = grouped.reduce((sum, group) => sum + group.count, 0)
212
+
213
+ expect(filteredCount).toBe(3) // Banana (5), Orange (3), Broccoli (6)
214
+ expect(sumOfCounts).toBe(filteredCount)
215
+ })
216
+
217
+ it('should have proper TypeScript inference for key type', async () => {
218
+ const sql = createSqliteState<Product>({ backend, tableName: 'GroupBy8', key: 'id' })
219
+ await sql.batchSet([
220
+ { id: '1', name: 'Apple', category: 'fruit', price: 1 },
221
+ { id: '2', name: 'Banana', category: 'fruit', price: 2 },
222
+ ])
223
+
224
+ // Group by string field - key should be string
225
+ const categoryGroups = await sql.groupBy('category')
226
+ const categoryKey: string = categoryGroups[0].key // TypeScript should infer string
227
+ expect(typeof categoryKey).toBe('string')
228
+
229
+ // Group by number field - key should be number
230
+ const priceGroups = await sql.groupBy('price')
231
+ const priceKey: number = priceGroups[0].key // TypeScript should infer number
232
+ expect(typeof priceKey).toBe('number')
233
+ })
234
+
235
+ it('should infer nested field types correctly', async () => {
236
+ interface NestedProduct {
237
+ id: string
238
+ details: {
239
+ category: string
240
+ info: {
241
+ rating: number
242
+ }
243
+ }
244
+ }
245
+ const sql = createSqliteState<NestedProduct>({ backend, tableName: 'GroupBy9', key: 'id' })
246
+ await sql.batchSet([
247
+ { id: '1', details: { category: 'A', info: { rating: 5 } } },
248
+ { id: '2', details: { category: 'A', info: { rating: 3 } } },
249
+ { id: '3', details: { category: 'B', info: { rating: 4 } } },
250
+ ])
251
+
252
+ // Group by nested string field
253
+ const categoryGroups = await sql.groupBy('details.category')
254
+ const nestedKey: string = categoryGroups[0].key
255
+ expect(typeof nestedKey).toBe('string')
256
+ expect(categoryGroups).toHaveLength(2)
257
+
258
+ // Group by deeply nested number field
259
+ const ratingGroups = await sql.groupBy('details.info.rating')
260
+ const ratingKey: number = ratingGroups[0].key
261
+ expect(typeof ratingKey).toBe('number')
262
+ expect(ratingGroups).toHaveLength(3)
263
+ })
264
+ })
@@ -2,7 +2,18 @@ import { STATE_SCHEDULER } from '../create'
2
2
  import { getId } from '../utils/id'
3
3
  import type { Backend } from './table'
4
4
  import { createTable } from './table/table'
5
- import type { DbOptions, DocType, Key, MutationResult, SearchOptions, Table } from './table/table.types'
5
+ import type {
6
+ DbOptions,
7
+ DocType,
8
+ DotPath,
9
+ GetFieldType,
10
+ GroupByOptions,
11
+ GroupByResult,
12
+ Key,
13
+ MutationResult,
14
+ SearchOptions,
15
+ Table,
16
+ } from './table/table.types'
6
17
  import type { Where } from './table/where'
7
18
 
8
19
  export interface CreateSqliteOptions<Document extends DocType> extends Omit<DbOptions<Document>, 'backend'> {
@@ -26,6 +37,10 @@ export interface SyncTable<Document extends DocType> {
26
37
  readonly count: (options?: { where?: Where<Document> }) => Promise<number>
27
38
  readonly deleteBy: (where: Where<Document>) => Promise<MutationResult<Document>[]>
28
39
  readonly clear: () => Promise<void>
40
+ readonly groupBy: <Field extends DotPath<Document>>(
41
+ field: Field,
42
+ options?: GroupByOptions<Document>,
43
+ ) => Promise<Array<GroupByResult<GetFieldType<Document, Field>>>>
29
44
  }
30
45
 
31
46
  /**
@@ -139,6 +154,10 @@ export function createSqliteState<Document extends DocType>(options: CreateSqlit
139
154
  const table = await getTable()
140
155
  return await table.count(countOptions)
141
156
  },
157
+ async groupBy(field, groupByOptions) {
158
+ const table = await getTable()
159
+ return await table.groupBy(field, groupByOptions)
160
+ },
142
161
  }
143
162
 
144
163
  return state
@@ -4,7 +4,18 @@
4
4
  /* eslint-disable @typescript-eslint/no-shadow */
5
5
  /* eslint-disable no-shadow */
6
6
  import type { Backend } from './backend'
7
- import type { Table, DbOptions, DocType, Key, SearchOptions, MutationResult } from './table.types'
7
+ import type {
8
+ Table,
9
+ DbOptions,
10
+ DocType,
11
+ Key,
12
+ SearchOptions,
13
+ MutationResult,
14
+ GroupByResult,
15
+ GroupByOptions,
16
+ DotPath,
17
+ GetFieldType,
18
+ } from './table.types'
8
19
  import { unicodeTokenizer, type FtsTokenizerOptions } from './tokenizer'
9
20
  import type { Where } from './where'
10
21
  import { getWhereQuery } from './where'
@@ -299,6 +310,23 @@ export async function createTable<Document extends DocType>(options: DbOptions<D
299
310
  await backend.execute(`DELETE FROM ${tableName}`)
300
311
  },
301
312
 
313
+ async groupBy<Field extends DotPath<Document>>(
314
+ field: Field,
315
+ options: GroupByOptions<Document> = {},
316
+ ): Promise<Array<GroupByResult<GetFieldType<Document, Field>>>> {
317
+ const whereSql = getWhereQuery<Document>(options.where, tableName)
318
+ const jsonPath = toJsonPath(String(field))
319
+ const query = `
320
+ SELECT json_extract(data, '${jsonPath}') AS groupKey, COUNT(*) AS count
321
+ FROM ${tableName}
322
+ ${whereSql}
323
+ GROUP BY groupKey
324
+ `
325
+ type FieldType = GetFieldType<Document, Field>
326
+ const results = await backend.select<Array<{ groupKey: FieldType; count: number }>>(query)
327
+ return results.map((row) => ({ key: row.groupKey, count: row.count }))
328
+ },
329
+
302
330
  async batchSet(documents: Document[]) {
303
331
  const mutations: MutationResult<Document>[] = []
304
332
  await backend.transaction(async (tx) => {
@@ -30,6 +30,18 @@ type DotPathRaw<T, D extends number = 5> = [D] extends [never]
30
30
 
31
31
  export type DotPath<T> = DotPathRaw<MakeAllFieldAsRequired<T>>
32
32
 
33
+ /**
34
+ * Extract the value type at a given dot path
35
+ * e.g., GetFieldType<{ user: { name: string } }, 'user.name'> = string
36
+ */
37
+ export type GetFieldType<T, Path extends string> = Path extends `${infer First}.${infer Rest}`
38
+ ? First extends keyof T
39
+ ? GetFieldType<T[First], Rest>
40
+ : never
41
+ : Path extends keyof T
42
+ ? T[Path]
43
+ : never
44
+
33
45
  // Built-in FTS5 tokenizers
34
46
  export type FtsTokenizer =
35
47
  | 'porter' // English stemming
@@ -86,6 +98,15 @@ interface MutationResultUpdateInsert<T> extends MutationResultBase<T> {
86
98
 
87
99
  export type MutationResult<T> = MutationResultDelete<T> | MutationResultUpdateInsert<T>
88
100
 
101
+ export interface GroupByResult<K> {
102
+ readonly key: K
103
+ readonly count: number
104
+ }
105
+
106
+ export interface GroupByOptions<Document extends DocType> {
107
+ readonly where?: Where<Document>
108
+ }
109
+
89
110
  export interface Table<Document extends DocType> extends DbNotGeneric {
90
111
  readonly set: (document: Document, backendOverride?: Backend) => Promise<MutationResult<Document>>
91
112
  readonly batchSet: (documents: Document[]) => Promise<MutationResult<Document>[]>
@@ -97,6 +118,10 @@ export interface Table<Document extends DocType> extends DbNotGeneric {
97
118
  readonly count: (options?: { where?: Where<Document> }) => Promise<number>
98
119
  readonly deleteBy: (where: Where<Document>) => Promise<MutationResult<Document>[]>
99
120
  readonly clear: () => Promise<void>
121
+ readonly groupBy: <Field extends DotPath<Document>>(
122
+ field: Field,
123
+ options?: GroupByOptions<Document>,
124
+ ) => Promise<Array<GroupByResult<GetFieldType<Document, Field>>>>
100
125
  }
101
126
 
102
127
  export type MakeAllFieldAsRequired<T> = {
@@ -1,5 +1,5 @@
1
1
  import type { Backend } from './table';
2
- import type { DbOptions, DocType, Key, MutationResult, SearchOptions } from './table/table.types';
2
+ import type { DbOptions, DocType, DotPath, GetFieldType, GroupByOptions, GroupByResult, Key, MutationResult, SearchOptions } from './table/table.types';
3
3
  import type { Where } from './table/where';
4
4
  export interface CreateSqliteOptions<Document extends DocType> extends Omit<DbOptions<Document>, 'backend'> {
5
5
  readonly backend: Backend | Promise<Backend>;
@@ -21,6 +21,7 @@ export interface SyncTable<Document extends DocType> {
21
21
  }) => Promise<number>;
22
22
  readonly deleteBy: (where: Where<Document>) => Promise<MutationResult<Document>[]>;
23
23
  readonly clear: () => Promise<void>;
24
+ readonly groupBy: <Field extends DotPath<Document>>(field: Field, options?: GroupByOptions<Document>) => Promise<Array<GroupByResult<GetFieldType<Document, Field>>>>;
24
25
  }
25
26
  /**
26
27
  * Create a SyncTable that wraps a Table and provides reactive capabilities
@@ -19,6 +19,11 @@ type DotPathRaw<T, D extends number = 5> = [D] extends [never] ? never : T exten
19
19
  [K in Extract<keyof T, string>]: T[K] extends object ? K | `${K}.${DotPathRaw<T[K], Previous[D]>}` : K;
20
20
  }[Extract<keyof T, string>] : never;
21
21
  export type DotPath<T> = DotPathRaw<MakeAllFieldAsRequired<T>>;
22
+ /**
23
+ * Extract the value type at a given dot path
24
+ * e.g., GetFieldType<{ user: { name: string } }, 'user.name'> = string
25
+ */
26
+ export type GetFieldType<T, Path extends string> = Path extends `${infer First}.${infer Rest}` ? First extends keyof T ? GetFieldType<T[First], Rest> : never : Path extends keyof T ? T[Path] : never;
22
27
  export type FtsTokenizer = 'porter' | 'simple' | 'icu' | 'unicode61' | FtsTokenizerOptions;
23
28
  export interface FtsType<Document extends DocType> {
24
29
  readonly type: 'fts';
@@ -59,6 +64,13 @@ interface MutationResultUpdateInsert<T> extends MutationResultBase<T> {
59
64
  document: T;
60
65
  }
61
66
  export type MutationResult<T> = MutationResultDelete<T> | MutationResultUpdateInsert<T>;
67
+ export interface GroupByResult<K> {
68
+ readonly key: K;
69
+ readonly count: number;
70
+ }
71
+ export interface GroupByOptions<Document extends DocType> {
72
+ readonly where?: Where<Document>;
73
+ }
62
74
  export interface Table<Document extends DocType> extends DbNotGeneric {
63
75
  readonly set: (document: Document, backendOverride?: Backend) => Promise<MutationResult<Document>>;
64
76
  readonly batchSet: (documents: Document[]) => Promise<MutationResult<Document>[]>;
@@ -71,6 +83,7 @@ export interface Table<Document extends DocType> extends DbNotGeneric {
71
83
  }) => Promise<number>;
72
84
  readonly deleteBy: (where: Where<Document>) => Promise<MutationResult<Document>[]>;
73
85
  readonly clear: () => Promise<void>;
86
+ readonly groupBy: <Field extends DotPath<Document>>(field: Field, options?: GroupByOptions<Document>) => Promise<Array<GroupByResult<GetFieldType<Document, Field>>>>;
74
87
  }
75
88
  export type MakeAllFieldAsRequired<T> = {
76
89
  [K in keyof T]-?: T[K] extends object ? MakeAllFieldAsRequired<T[K]> : T[K];