muya 2.4.0 → 2.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/sqlite/create-sqlite.js +1 -1
- package/esm/sqlite/table/table.js +5 -5
- package/package.json +1 -1
- package/src/sqlite/__tests__/use-sqlite.test.tsx +49 -0
- package/src/sqlite/create-sqlite.ts +4 -4
- package/src/sqlite/table/table.ts +6 -3
- package/src/sqlite/table/table.types.ts +1 -1
- package/types/sqlite/table/table.types.d.ts +1 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
import{STATE_SCHEDULER as f}from"../create";import{getId as P}from"../utils/id";import{shallow as v}from"../utils/shallow";import{selectSql as O}from"./select-sql";import{createTable as R,DEFAULT_STEP_SIZE as M}from"./table/table";function
|
|
1
|
+
import{STATE_SCHEDULER as f}from"../create";import{getId as P}from"../utils/id";import{shallow as v}from"../utils/shallow";import{selectSql as O}from"./select-sql";import{createTable as R,DEFAULT_STEP_SIZE as M}from"./table/table";function A(k){const g=P();function m(e){return`state-${g}-search-${e}`}let h;async function o(){if(!h){const{backend:e,...n}=k,t=e instanceof Promise?await e:e;h=await R({backend:t,...n})}return h}const c=new Map,d=new Map,y=new Map;async function p(e,n){const t=y.get(e),{options:a={}}=n,{stepSize:s=M}=a;if(!t)return!1;const r=[];for(let u=0;u<s;u++){const i=await t.next();if(i.done){y.delete(e);break}n.keys.has(String(i.value.key))||(r.push(i.value.document),n.keys.add(String(i.value.key)))}return r.length===0||v(n.items,r)?!1:(n.items=[...n.items,...r],!0)}function b(e){const n=d.get(e);n&&n()}async function x(e){const n=await o(),t=c.get(e);if(!t)return;const{options:a}=t,s=n.search({...a,select:(r,{rowId:u,key:i})=>({document:r,rowId:u,key:i})});y.set(e,s),t.keys=new Set,t.items=[],await p(e,t)}async function S(e){await x(e),b(e)}function T(e){const{key:n,op:t}=e,a=new Set;for(const[s,{keys:r}]of c)switch(t){case"delete":case"update":{r.has(String(n))&&a.add(s);break}case"insert":{a.add(s);break}}return a}async function l(e){const n=new Set;for(const t of e){const a=T(t);for(const s of a)n.add(s)}for(const t of n){const a=m(t);f.schedule(a,{searchId:t})}}const D=new Set;function w(e,n){c.has(e)||(c.set(e,{items:[],options:n,keys:new Set}),n&&S(e));const t=c.get(e);return n&&(t.options=n),t}const I={clear(e){c.delete(e)},async set(e){const t=await(await o()).set(e);return await l([t]),t},async batchSet(e){const t=await(await o()).batchSet(e);return await l(t),t},async delete(e){const t=await(await o()).delete(e);return t&&await l([t]),t},async deleteBy(e){const t=await(await o()).deleteBy(e);return await l(t),t},async get(e,n){return(await o()).get(e,n)},async*search(e={}){const n=await o();for await(const t of n.search(e))yield t},async count(e){return await(await o()).count(e)},updateSearchOptions(e,n){const t=w(e,n);t.options=n;const a=m(e);f.schedule(a,{searchId:e})},subscribe(e,n){const t=m(e),a=f.add(t,{onScheduleDone(){S(e)}});return D.add(a),d.has(e)||d.set(e,n),()=>{d.delete(e),a()}},getSnapshot(e){return w(e).items},refresh:S,destroy(){for(const e of D)e();c.clear(),d.clear()},async next(e){const n=c.get(e);if(n){const t=await p(e,n);return t&&b(e),t}return!1},select(e){return O(I,e)}};return I}export{A as createSqliteState};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import{unicodeTokenizer as
|
|
1
|
+
import{unicodeTokenizer as C}from"./tokenizer";import{getWhereQuery as k}from"./where";const L=500,M=100;function R(l){return"$."+l}function U(l,a){if(!(!l||!a))return a.split(".").reduce((t,y)=>{if(typeof t=="object"&&t!==null&&y in t)return t[y]},l)}async function z(l){const{backend:a,tableName:t,indexes:y,key:$,disablePragmaOptimization:x}=l,u=$!==void 0;x||(await a.execute("PRAGMA journal_mode=WAL;"),await a.execute("PRAGMA synchronous=NORMAL;"),await a.execute("PRAGMA temp_store=MEMORY;"),await a.execute("PRAGMA cache_size=-20000;")),u?await a.execute(`
|
|
2
2
|
CREATE TABLE IF NOT EXISTS ${t} (
|
|
3
3
|
key TEXT PRIMARY KEY,
|
|
4
4
|
data TEXT NOT NULL
|
|
@@ -7,8 +7,8 @@ import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as h}from"./
|
|
|
7
7
|
CREATE TABLE IF NOT EXISTS ${t} (
|
|
8
8
|
data TEXT NOT NULL
|
|
9
9
|
);
|
|
10
|
-
`);let
|
|
11
|
-
ON ${t} (json_extract(data, '${R(n)}'));`)}if(T.length>0){let e;typeof
|
|
10
|
+
`);let d;const T=[],f={};for(const e of y??[])if(typeof e=="string"&&e.startsWith("fts:")){const n=e.slice(4),r=n.replaceAll(".","_");T.push(n),f[n]=r}else if(typeof e=="object"&&e.type==="fts"){const n=e.path,r=n.replaceAll(".","_");if(T.push(n),f[n]=r,e.tokenizer){if(!d)d=e.tokenizer;else if(d!==e.tokenizer)throw new Error(`Conflicting FTS tokenizers: already using "${d}", got "${e.tokenizer}"`)}}else{const n=String(e);await a.execute(`CREATE INDEX IF NOT EXISTS idx_${t}_${n.replaceAll(/\W/g,"_")}
|
|
11
|
+
ON ${t} (json_extract(data, '${R(n)}'));`)}if(T.length>0){let e;typeof d=="object"?e=C(d):d===void 0?e='"unicode61", "remove_diacritics=1"':e=d;const n=T.map(o=>f[o]).join(", "),r=`
|
|
12
12
|
CREATE VIRTUAL TABLE IF NOT EXISTS ${t}_fts
|
|
13
13
|
USING fts5(${n}, tokenize=${e});
|
|
14
14
|
`;await a.execute(r),await a.execute(`
|
|
@@ -32,7 +32,7 @@ import{unicodeTokenizer as _}from"./tokenizer";import{getWhereQuery as h}from"./
|
|
|
32
32
|
AFTER UPDATE ON ${t}
|
|
33
33
|
BEGIN
|
|
34
34
|
UPDATE ${t}_fts
|
|
35
|
-
SET ${T.map(o=>`${
|
|
35
|
+
SET ${T.map(o=>`${f[o]}=json_extract(new.data, '${R(o)}')`).join(", ")}
|
|
36
36
|
WHERE rowid = old.rowid;
|
|
37
37
|
END;
|
|
38
|
-
`)}function
|
|
38
|
+
`)}function A(e){if(u)return U(e,String($))}async function b(e){return(await e.select("SELECT changes() AS c"))[0]?.c??0}const g={backend:a,async set(e,n){const r=n??a,o=JSON.stringify(e);if(u){const s=A(e);if(s==null)throw new Error(`Document is missing the configured key "${String($)}".`);if(await r.execute(`UPDATE ${t} SET data = ? WHERE key = ?`,[o,s]),await b(r)===1)return{key:s,op:"update"};try{return await r.execute(`INSERT INTO ${t} (key, data) VALUES (?, ?)`,[s,o]),{key:s,op:"insert"}}catch{return await r.execute(`UPDATE ${t} SET data = ? WHERE key = ?`,[o,s]),{key:s,op:"update"}}}await r.execute(`INSERT INTO ${t} (data) VALUES (?)`,[o]);const c=(await r.select("SELECT last_insert_rowid() AS id"))[0]?.id;if(typeof c!="number")throw new Error("Failed to retrieve last_insert_rowid()");return{key:c,op:"insert"}},async get(e,n=r=>r){const r=u?"key = ?":"rowid = ?",o=await a.select(`SELECT rowid, data FROM ${t} WHERE ${r}`,[e]);if(o.length===0)return;const{data:E,rowid:c}=o[0],s=JSON.parse(E),i=u?A(s)??c:c;return n(s,{rowId:c,key:i})},async delete(e){const n=u?"key = ?":"rowid = ?";if(await a.execute(`DELETE FROM ${t} WHERE ${n}`,[e]),((await a.select("SELECT changes() AS c"))[0]?.c??0)>0)return{key:e,op:"delete"}},async*search(e={}){const{sortBy:n,order:r="asc",limit:o,offset:E=0,where:c,select:s=w=>w,stepSize:i=M}=e,p=k(c,t),h=`SELECT rowid, data FROM ${t} ${p}`;let S=0,D=E;for(;;){let w=h;n?w+=` ORDER BY json_extract(data, '${R(String(n))}') COLLATE NOCASE ${r.toUpperCase()}`:w+=u?` ORDER BY key COLLATE NOCASE ${r.toUpperCase()}`:` ORDER BY rowid ${r.toUpperCase()}`;const I=o?Math.min(i,o-S):i;w+=` LIMIT ${I} OFFSET ${D}`;const m=await a.select(w);if(m.length===0)break;for(const{rowid:O,data:F}of m){if(o&&S>=o)return;const N=JSON.parse(F),_=u?A(N)??O:O;yield s(N,{rowId:O,key:_}),S++}if(m.length<I||o&&S>=o)break;D+=m.length}},async count(e={}){const n=k(e.where,t),r=`SELECT COUNT(*) as count FROM ${t} ${n}`;return(await a.select(r))[0]?.count??0},async deleteBy(e){const n=k(e,t),r=u?"key":"rowid",o=[];return await a.transaction(async E=>{const c=await E.select(`SELECT ${r} AS k FROM ${t} ${n}`);if(c.length===0)return;const s=c.map(i=>i.k);for(let i=0;i<s.length;i+=L){const p=s.slice(i,i+L),h=p.map(()=>"?").join(",");await E.execute(`DELETE FROM ${t} WHERE ${r} IN (${h})`,p)}for(const i of s)o.push({key:i,op:"delete"})}),o},async clear(){await a.execute(`DELETE FROM ${t}`)},async batchSet(e){const n=[];return await a.transaction(async r=>{for(const o of e){const E=await g.set(o,r);n.push(E)}}),n}};return g}export{M as DEFAULT_STEP_SIZE,z as createTable,U as getByPath,R as toJsonPath};
|
package/package.json
CHANGED
|
@@ -324,4 +324,53 @@ describe('use-sqlite-state', () => {
|
|
|
324
324
|
expect(result2.current[0].length).toBe(50)
|
|
325
325
|
})
|
|
326
326
|
})
|
|
327
|
+
|
|
328
|
+
it('should handle update of deep fields with deep id', async () => {
|
|
329
|
+
interface DeepItem {
|
|
330
|
+
person: {
|
|
331
|
+
id: string
|
|
332
|
+
name: string
|
|
333
|
+
age: number
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
const sql = createSqliteState<DeepItem>({ backend, tableName: 'State10', key: 'person.id' })
|
|
337
|
+
let reRenders = 0
|
|
338
|
+
const { result } = renderHook(() => {
|
|
339
|
+
reRenders++
|
|
340
|
+
return useSqliteValue(sql, { sortBy: 'person.age' }, [])
|
|
341
|
+
})
|
|
342
|
+
|
|
343
|
+
await waitFor(() => {
|
|
344
|
+
expect(reRenders).toBe(2)
|
|
345
|
+
expect(result.current[0].length).toBe(0)
|
|
346
|
+
})
|
|
347
|
+
|
|
348
|
+
act(() => {
|
|
349
|
+
sql.set({ person: { id: 'some_id', name: 'Alice', age: 30 } })
|
|
350
|
+
})
|
|
351
|
+
await waitFor(() => {
|
|
352
|
+
expect(reRenders).toBe(3)
|
|
353
|
+
expect(result.current[0]).toEqual([{ person: { id: 'some_id', name: 'Alice', age: 30 } }])
|
|
354
|
+
})
|
|
355
|
+
|
|
356
|
+
// update deep field
|
|
357
|
+
act(() => {
|
|
358
|
+
sql.set({ person: { id: 'some_id', name: 'Alice', age: 31 } })
|
|
359
|
+
})
|
|
360
|
+
await waitFor(() => {
|
|
361
|
+
// expect(reRenders).toBe(4)
|
|
362
|
+
expect(result.current[0]).toEqual([{ person: { id: 'some_id', name: 'Alice', age: 31 } }])
|
|
363
|
+
})
|
|
364
|
+
|
|
365
|
+
// update same field
|
|
366
|
+
act(() => {
|
|
367
|
+
sql.set({ person: { id: 'some_id', name: 'Alice', age: 31 } })
|
|
368
|
+
})
|
|
369
|
+
// should not re-render
|
|
370
|
+
await waitFor(() => {
|
|
371
|
+
expect(result.current[0]).toEqual([{ person: { id: 'some_id', name: 'Alice', age: 31 } }])
|
|
372
|
+
})
|
|
373
|
+
|
|
374
|
+
// add another item
|
|
375
|
+
})
|
|
327
376
|
})
|
|
@@ -77,7 +77,7 @@ export function createSqliteState<Document extends DocType>(options: CreateSqlit
|
|
|
77
77
|
|
|
78
78
|
interface NextResult {
|
|
79
79
|
document: Document
|
|
80
|
-
|
|
80
|
+
key: Key
|
|
81
81
|
}
|
|
82
82
|
// const emitter = createEmitter<Table<Document>>()
|
|
83
83
|
const cachedData = new Map<SearchId, DataItems<Document>>()
|
|
@@ -104,9 +104,9 @@ export function createSqliteState<Document extends DocType>(options: CreateSqlit
|
|
|
104
104
|
break
|
|
105
105
|
}
|
|
106
106
|
|
|
107
|
-
if (!data.keys.has(String(result.value.
|
|
107
|
+
if (!data.keys.has(String(result.value.key))) {
|
|
108
108
|
newItems.push(result.value.document)
|
|
109
|
-
data.keys.add(String(result.value.
|
|
109
|
+
data.keys.add(String(result.value.key))
|
|
110
110
|
}
|
|
111
111
|
}
|
|
112
112
|
|
|
@@ -136,7 +136,7 @@ export function createSqliteState<Document extends DocType>(options: CreateSqlit
|
|
|
136
136
|
const data = cachedData.get(searchId)
|
|
137
137
|
if (!data) return
|
|
138
138
|
const { options: refreshOptions } = data
|
|
139
|
-
const iterator = table.search({ ...refreshOptions, select: (document, { rowId }) => ({ document, rowId }) })
|
|
139
|
+
const iterator = table.search({ ...refreshOptions, select: (document, { rowId, key }) => ({ document, rowId, key }) })
|
|
140
140
|
iterators.set(searchId, iterator)
|
|
141
141
|
data.keys = new Set()
|
|
142
142
|
data.items = []
|
|
@@ -209,7 +209,7 @@ export async function createTable<Document extends DocType>(options: DbOptions<D
|
|
|
209
209
|
|
|
210
210
|
async get<Selected = Document>(
|
|
211
211
|
keyValue: Key,
|
|
212
|
-
selector: (document: Document, meta: { rowId: number }) => Selected = (d) => d as unknown as Selected,
|
|
212
|
+
selector: (document: Document, meta: { rowId: number; key: Key }) => Selected = (d) => d as unknown as Selected,
|
|
213
213
|
) {
|
|
214
214
|
const whereKey = hasUserKey ? `key = ?` : `rowid = ?`
|
|
215
215
|
const result = await backend.select<Array<{ data: string; rowid: number }>>(
|
|
@@ -219,7 +219,8 @@ export async function createTable<Document extends DocType>(options: DbOptions<D
|
|
|
219
219
|
if (result.length === 0) return
|
|
220
220
|
const { data, rowid } = result[0]
|
|
221
221
|
const document = JSON.parse(data) as Document
|
|
222
|
-
|
|
222
|
+
const logicalKey = hasUserKey ? (getKeyFromDocument(document) ?? rowid) : rowid
|
|
223
|
+
return selector(document, { rowId: rowid, key: logicalKey }) as Selected
|
|
223
224
|
},
|
|
224
225
|
|
|
225
226
|
async delete(keyValue: Key) {
|
|
@@ -264,7 +265,9 @@ export async function createTable<Document extends DocType>(options: DbOptions<D
|
|
|
264
265
|
for (const { rowid, data } of results) {
|
|
265
266
|
if (limit && yielded >= limit) return
|
|
266
267
|
const document = JSON.parse(data) as Document
|
|
267
|
-
|
|
268
|
+
const logicalKey = hasUserKey ? (getKeyFromDocument(document) ?? rowid) : rowid
|
|
269
|
+
// Pass both rowId and logicalKey
|
|
270
|
+
yield select(document, { rowId: rowid, key: logicalKey }) as Selected
|
|
268
271
|
yielded++
|
|
269
272
|
}
|
|
270
273
|
|
|
@@ -47,7 +47,7 @@ export interface DbOptions<Document extends DocType> {
|
|
|
47
47
|
}
|
|
48
48
|
|
|
49
49
|
export interface SearchOptions<Document extends DocType, Selected = Document> extends SqlSeachOptions<Document> {
|
|
50
|
-
readonly select?: (document: Document, meta: { rowId: number }) => Selected
|
|
50
|
+
readonly select?: (document: Document, meta: { rowId: number; key: Key }) => Selected
|
|
51
51
|
}
|
|
52
52
|
|
|
53
53
|
interface DbNotGeneric {
|
|
@@ -28,6 +28,7 @@ export interface DbOptions<Document extends DocType> {
|
|
|
28
28
|
export interface SearchOptions<Document extends DocType, Selected = Document> extends SqlSeachOptions<Document> {
|
|
29
29
|
readonly select?: (document: Document, meta: {
|
|
30
30
|
rowId: number;
|
|
31
|
+
key: Key;
|
|
31
32
|
}) => Selected;
|
|
32
33
|
}
|
|
33
34
|
interface DbNotGeneric {
|