gs-idb-pro 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -2
- package/lib/index.cjs +1247 -1
- package/lib/index.d.ts +3 -3
- package/lib/index.js +1281 -1
- package/lib/index.web.js +1555 -1
- package/package.json +10 -2
package/lib/index.web.js
CHANGED
|
@@ -1 +1,1555 @@
|
|
|
1
|
-
const e=Symbol("save"),t=Symbol("delete"),r=Object.freeze({key:({primaryKey:e})=>({value:e}),value:({value:e})=>({value:e}),keyValue:({primaryKey:e,value:t})=>({value:[e,t]})});function n(e){if(Array.isArray(e))return{key:e[1],value:e[0]};if("value"in e){const{key:t,value:r}=e;return{key:t,value:r}}throw new Error(`not include value in invalid DBRecord:${JSON.stringify(e)}`)}const a=Symbol("break"),i=Symbol("finished"),s=Symbol("continue"),o=Symbol("continue key"),c=Symbol("next key"),u=Symbol("continue primary key"),h=Symbol("next primary key");function d(e){return e instanceof IDBObjectStore||e instanceof IDBIndex}const f=Object.freeze({keyPath:"id",autoIncrement:!0,addedTimeField:!0,updatedTimeField:!0}),l=Object.freeze({addedTimeField:"added_at",softDeletedField:"deleted",updatedCountField:"updated_count",updatedTimeField:"updated_at"});function y(e){return e instanceof Function||"function"==typeof e}function m(e){return!0===e||!1===e||e instanceof Boolean||"boolean"==typeof e}function p(e){return"number"==typeof e||e instanceof Number}function g(e){return"string"==typeof e||e instanceof String}function w(e){return e&&(t=e,!(Array.isArray(t)||t instanceof Array||t?.constructor===Array))&&!y(e)&&(e instanceof Object||"object"==typeof e);var t}function b(e){return null!=e&&y(e[Symbol.iterator])}function S(e){return null!=e&&y(e[Symbol.asyncIterator])}function x(e){return null!=e&&"function"==typeof e.next}async function v(e,t){for(let r=0;r<e.length;r++)await t(e[r])}async function k(e,t){const r=await e,n=[];for(let e=0;e<r.length;e++)n.push(await t(r[e],e,r));return n}function D(e){if("object"!=typeof e||null===e||e instanceof Date||e instanceof Function||e instanceof RegExp||typeof Element<"u"&&e instanceof Element)return e;if(e instanceof Set){const t=new Set;for(const r of e)t.add(D(r));return t}if(e instanceof Map){const t=new Map;for(const[r,n]of e)t.set(D(r),D(n));return t}if(Array.isArray(e)){const t=[];for(const r of e)t.push(D(r));return t}const t={};for(const[r,n]of Object.entries(e))t[r]=D(n);return t}function P(e){return Object.freeze(e),Object.getOwnPropertyNames(e).forEach(t=>{const r=e[t];null!==r&&("object"==typeof r||"function"==typeof r)&&!Object.isFrozen(r)&&P(r)}),e}class ${rootData$;space;showFn;spaceEffectiveLength;out;type;ignoreCircularRef;constructor(e){let t=0;w(e[t])&&e[t].rootData$&&Object.assign(this,e[t++]),t<1&&(this.rootData$=e[t++]);const r=e.slice(t),n=r.filter(e=>p(e));void 0===this.out&&(this.out=r.find(e=>y(e))||null),void 0===this.showFn&&(this.showFn=!!r.find(e=>m(e))),void 0===this.spaceEffectiveLength&&(this.spaceEffectiveLength=n[0]||100),void 0===this.space&&(this.space=n[1]||2),void 0===this.type&&(this.type=r.find(e=>g(e))||"js"),void 0===this.ignoreCircularRef&&(this.ignoreCircularRef=!0)}destroy(){this.out=null,this.rootData$=null}}async function O(e,t,r){if("string"==typeof(e=await e)||e instanceof Date)return JSON.stringify(e);if(e instanceof Function)return t.showFn?e.toString():"undefined";if("object"!=typeof e||null===e)return e+"";if(e instanceof RegExp)return e.toString();if(typeof Element<"u"&&e instanceof Element)return"undefined";if(r.has(e)){if(t.ignoreCircularRef)return;throw new Error("circular reference")}r.add(e);try{return"json"===t.type?await async function(e,t,r){const n=[];if(Array.isArray(e)||e instanceof Set||b(e)&&!(e instanceof Map)){for(const a of e)if(void 0!==a&&(!y(a)||t.showFn)){const e=await O(a,t,r);void 0!==e&&n.push(e)}const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`[${a}${s()}${i}]`:"[]"}if(S(e)||x(e)){for await(const a of e)if(void 0!==a&&(!y(a)||t.showFn)){const e=await O(a,t,r);void 0!==e&&n.push(e)}const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`[${a}${s()}${i}]`:"[]"}let a=e instanceof Map?e.entries():Object.entries(e);for(const[e,i]of a)if(void 0!==i&&(!y(i)||t.showFn)){const a=await O(i,t,r);void 0!==a&&n.push(`"${e}": ${a}`)}const{spaceStart:i,spaceEnd:s,arrJoin:o}=E(n,t);return n.length?`[${i}${o()}${s}]`:"[]"}(e,t,r):await async function(e,t,r){const n=[];if(e instanceof Set){for(const a of e)(!y(a)||t.showFn)&&n.push(await O(a,t,r));const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`new Set([${a}${s()}${i}])`:"[]"}if(e instanceof Map){for(const[a,i]of e)(!y(i)||t.showFn)&&n.push(`${I(await O(a,t,r))}: ${await O(i,t,r)}`);const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`new Map(Object.entries({${a}${s()}${i}})`:"new Map()"}if(Array.isArray(e)||b(e)){for(const a of e)(!y(a)||t.showFn)&&n.push(await O(a,t,r));const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`[${a}${s()}${i}]`:"[]"}if(x(e)||S(e)){for await(const a of e)(!y(a)||t.showFn)&&n.push(await O(a,t,r));const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`[${a}${s()}${i}]`:"[]"}for(const[a,i]of Object.entries(e))(!y(i)||t.showFn)&&n.push(`${I(a)}: ${await O(i,t,r)}`);const{spaceStart:a,spaceEnd:i,arrJoin:s}=E(n,t);return n.length?`{${a}${s()}${i}}`:"{}"}(e,t,r)}finally{r.delete(e)}}function E(e,t){let r=t.space;e.reduce((e,t)=>e+t.length,0)<t.spaceEffectiveLength&&(r=0);const n=r?"\n"+"".padStart(r):"",a=r?"\n":"",i=r?"\n".padEnd(r+1):",";return{spaceStart:n,spaceEnd:a,arrJoin:r?()=>e.join(",\n").replace(/\n/g,i):()=>e.join(",")}}function I(e){return/^[a-z_]+\w*$/i.test(e)?e:`"${e}"`}async function j(...e){const t=new $(e),r=new Set;try{const e=await O(t.rootData$,t,r);if(!t.out)return e;t.out(e)}finally{r.clear(),t.destroy()}}function F(e){return e instanceof Function||"function"==typeof e}function M(e){return"string"==typeof e||e instanceof String}var A,q;function R(e){return"string"==typeof(t=e)||"number"==typeof t||t instanceof String||t instanceof Number||e instanceof Date||e instanceof ArrayBuffer||Array.isArray(e)&&e.every(R);var t}function B(e){return null==e||function(e){return e instanceof IDBKeyRange||R(e)}(e)}async function C(e){return(await indexedDB.databases()).find(t=>t.name===e)}async function N(e,t){const r=M(e)?{name:e}:e;F(t)&&(r.onsuccess=t);const{name:n,onsuccess:a,onupgradeneeded:i,version:s}=r;return function(e,t,r){return new Promise(async(n,a)=>{let i=!1;e.onsuccess=async()=>{if(!i)if(t)try{n(await t(e.result))}catch(e){a(e)}finally{e.result?.close?.()}else n(e.result)},e.onblocked=e.onerror=e=>a(e.target.error),r&&(e.onupgradeneeded=async t=>{try{await r(e.result,t,e)}catch(e){i=!0,a(e)}})})}(indexedDB.open(n,s),a,i)}function K(e,t,r){return function(e,t,...r){return new Promise(async(n,a)=>{let i=0;const s=F(r[i])?r[i++]:void 0,o=r[i]?"readwrite":"readonly",c=e instanceof IDBDatabase?e:await N(e),u=Array.isArray(t)?t:[t],h=c.transaction(u.map(e=>M(e)?e:e.store),o),d=u.map(e=>{if(M(e))return h.objectStore(e);const{store:t,index:r}=e;return M(r)?h.objectStore(t).index(r):h.objectStore(t)});if(s)try{n(await s(...d)),o?h.commit():h.abort()}catch(e){h.abort(),a(e)}finally{c.close()}else n(d)})}(e,t,r,!1)}function T(e){return new Promise(async(t,r)=>{e.onsuccess=()=>{try{t(e.result)}catch(e){r(e)}},e.onerror=e=>r(e.target.error)})}(q=A||(A={}))[q.True=1]="True",q[q.False=0]="False";const V=e=>e instanceof Date?e.getTime():e;function W(e){if(B(e))return e;const t=e;if("lt"in t&&"gt"in t){if(V(t.gt)>V(t.lt))throw new Error(`Invalid IDBRange: gt (${t.gt}) cannot be greater than lt (${t.lt})`);return IDBKeyRange.bound(t.gt,t.lt,!0,!0)}if("lt"in t&&"gte"in t){if(V(t.gte)>V(t.lt))throw new Error(`Invalid IDBRange: gte (${t.gte}) cannot be greater than lt (${t.lt})`);return IDBKeyRange.bound(t.gte,t.lt,!1,!0)}if("lte"in t&&"gt"in t){if(V(t.gt)>V(t.lte))throw new Error(`Invalid IDBRange: gt (${t.gt}) cannot be greater than lte (${t.lte})`);return IDBKeyRange.bound(t.gt,t.lte,!0,!1)}if("lte"in t&&"gte"in t){if(V(t.gte)>V(t.lte))throw new Error(`Invalid IDBRange: gte (${t.gte}) cannot be greater than lte (${t.lte})`);return IDBKeyRange.bound(t.gte,t.lte,!1,!1)}return"lt"in t?IDBKeyRange.upperBound(t.lt,!0):"lte"in t?IDBKeyRange.upperBound(t.lte,!1):"gt"in t?IDBKeyRange.lowerBound(t.gt,!0):"gte"in t?IDBKeyRange.lowerBound(t.gte,!1):void 0}function z(e){return null!=W(e)}async function _(e,t,r){const{query:n,direction:a="prev",preSkip:i,startKey:s,startPrimaryKey:o}=t,c=e.openCursor(W(n),a);return i&&(await T(c)).advance(i),s&&(o?(await T(c)).continuePrimaryKey(s,o):(await T(c)).continue(s)),c}function J(e,t,r){return Array.isArray(e)?t=>function(e,t,r){if(r?.length)for(const n of r)t.hasOwnProperty(n)&&(e[n]=t[n]);else Object.assign(e,t);return e}({},t,e):y(e)?e:t?e=>e:r?(e,t)=>[e,t]:(e,t)=>({key:e,value:t})}class U{idbPro;target;#e;constructor(e,t){this.idbPro=t,this.#e=e.storeSchema,this.target=e.target,d(e.target)&&(this.tx=this.#t)}get storeName(){return this.nativeStore?.name||this.target.store}get storeSchema(){if(this.#e)return this.#e;const e=this.idbPro.getStoreSchema(this.storeName);return Object.isFrozen(e)&&(this.#e=e),e}get factory(){return this.idbPro.factory}get nativeStore(){const{target:e}=this;return e instanceof IDBObjectStore?e:e instanceof IDBIndex?e.objectStore:void 0}get keyPath(){const{target:e}=this;if(d(e))return e.keyPath;const{storeSchema:t}=this,{index:r}=e;return r?t.indexSchemas.find(e=>e.name===r)?.keyPath:t.keyPath}async forEach(e,t){return e=y(e)?{fn:e}:e,t?this.cursorResult(e,!1):this.cursorVoid(e,!1)}async tx(e,t,r){let{target:n}=this;const{store:a,index:i}=n,s=await this.idbPro.openNativeDb();let o,c;try{o=s.transaction(a,!0===e?"readwrite":"readonly"),n=c=o.objectStore(a),i&&(n=n.index(i))}catch(e){throw s.close(),e}if(!t)return Object.freeze({db:s,tx:o,nativeStore:c,target:n});try{if(!0===e){const e=await t(n,c);return o.commit(),e}return t(n)}catch(e){throw!1!==r&&o.abort(),e}finally{s.close()}}openCursor(e,t){return this.tx(t,t=>new Promise(async(r,n)=>{const{fn:a}=e,i=await _(t,e);i.onsuccess=async()=>{i.result?!1===await a(i.result)&&r():r()},i.onerror=()=>n(i.error)}))}cursorVoid({query:r,direction:n,preSkip:i,startKey:s,startPrimaryKey:c,fn:h},d){let f=0;return this.openCursor({query:r,direction:n,preSkip:i,startKey:s,startPrimaryKey:c,fn:async r=>{const{value:n,primaryKey:i}=r,s=await h(n,i,f++),{control:c,key:l,primaryKey:y,modify:m,value:p}=w(s)?s:{control:s};switch(d&&(m===e?r.update(p||n):m===t&&r.delete()),c){case a:return!1;case o:r.continue(l);break;case u:r.continuePrimaryKey(l,y);break;default:r.continue()}}},!0)}async cursorResult({query:r,direction:n,preSkip:s,startKey:d,startPrimaryKey:f,fn:l,mapper:y},m){const{keyPath:p,defaultGetMapper:g}=this.storeSchema,w=J(y||g,p),b=[];let S=0;return await this.openCursor({query:r,direction:n,preSkip:s,startKey:d,startPrimaryKey:f,fn:async r=>{const{value:n,primaryKey:s}=r,{control:d,value:f,key:y,primaryKey:p,modify:g}=await(l?.(n,s,S,b))||{};switch(m&&(g===e?r.update(f||n):g===t&&r.delete()),(!d||d===i||d===c||d===h)&&b.push(w(f||n,p||s,S)),d){case a:case i:return!1;case c:case o:r.continue(y);break;case h:case u:r.continuePrimaryKey(y,p);break;default:r.continue()}S++}},m),b}#t(e,t){const{target:r}=this,n=r instanceof IDBObjectStore?r:r.objectStore;return t?t(r,n):Object.freeze({nativeStore:n,target:r})}}class G{idbPro;schemas;#r;constructor(e,t){this.idbPro=e,this.schemas=t}get storeNames(){return this.#r||(this.#r=Array.from(new Set(this.schemas.map(e=>e.target.store))))}read(e){return this.tx("newReader",e)}write(e,t=!0){return this.tx("newWriter",e,!0,t)}export(){return this.tx("newReader",async(...e)=>{const t={};for(const r of e)t[r.storeName]=await r.export();return t})}import(e,t,r){return r||(r="addOrChangeMany"),this.tx("newWriter",async(...n)=>{const a={};n=Array.from(new Map(n.map(e=>[e.storeName,e.asStore(!0)])).values());for(const i of n){const{storeName:n}=i,s=e[n];s&&(a[n]=await i[r](s,t))}if(t)return a},!0)}async tx(e,t,r,n){const{idbPro:a,schemas:i}=this,{factory:s}=a.schema,o=await a.openNativeDb();try{const c=o.transaction(this.storeNames,r?"readwrite":"readonly");try{const n=i.map(({storeSchema:t,target:r})=>{let n=c.objectStore(r.store);return r.index&&(n=n.index(r.index)),s[e]({storeSchema:t,target:n},a)});if(r){const e=await t(...n);return c.commit(),e}return await t(...n)}catch(e){throw!1!==n&&c.abort(),e}}finally{o.close()}}}class L extends U{direction;query;writable;parser;endsWithNull;preSkip;startKey;startPrimaryKey;constructor(e,t,n){if(super(e,t),!n)return;const{parser:a}=n;this.direction=n.direction,this.query=n.query,this.writable=!!n.writable,this.endsWithNull=!!n.endsWithNull,this.preSkip=n.preSkip,this.startKey=n.startKey,this.startPrimaryKey=n.startPrimaryKey,a&&(this.parser=y(a)?a:r[a])}async*[Symbol.asyncIterator](){const{parser:e,writable:t,endsWithNull:r}=this,{db:n,tx:i,target:s}=await this.tx(t);try{const n=await _(s,this);let o;if(e)for(;o=await T(n);){const{control:t,value:r}=await e(o);if(t||(yield r),t===a)break;o.continue()}else{let e=!1;const t=()=>{e=!0};for(;!e&&(o=await T(n));)yield{cursor:o,end:t}}t&&i?.commit(),r&&!o&&(yield null)}finally{n?.close()}}}function X(e,t,r,n){const a=w(e)?e:{};return y(e)?a.fn=e:z(e)&&(a.query=e),y(t)?a.fn=t:t&&(a.direction=t),r&&(a.direction=r),n?a.limit=n:a.limit||(a.limit=1e3),a.maxEmptyChecks||(a.maxEmptyChecks=2e4),a}async function H(e,t,r){t.size||(t.size=100),t.nextSkip||(t.nextSkip=0);const{query:n,direction:a,total:i,maxEmptyChecks:s,fn:o}=t;return await e.batchRead(async e=>(i||(t.total=await e.count({query:n,direction:a,maxEmptyChecks:s,fn:o}),t.pages=Math.ceil(t.total/t.size)),t.total<1?{info:P(t),rows:[]}:o?await async function(e,t,r){t.maxEmptyChecks||(t.maxEmptyChecks=2e4);const{page:n,query:a,direction:i,nextSkip:s}=t,{keyPath:o}=e.storeSchema;return 1===n?await Q(e,await _(e.target,{query:a,direction:i}),t,o):r&&s&&n-r.page===1?await Q(e,await _(e.target,{query:a,direction:i,preSkip:s}),t,o):await Q(e,await _(e.target,{query:a,direction:i}),t,o,!0)}(e,t,r):await async function(e,t){const{keyPath:r}=e.storeSchema,{page:n,query:a,direction:i,size:s,mapper:o}=t,c=(n-1)*s,u=await e.filter({query:a,preSkip:c,direction:i,limit:s,mapper:J(o,r,!0)});return t.nextSkip=c+u.length,{info:P(t),rows:u}}(e,t)))}async function Q({storeSchema:{defaultGetMapper:e}},t,r,n,a){const{page:i,size:s,total:o,maxEmptyChecks:c,fn:u}=r,h=J(r.mapper||e,n,!0),d=[],f=(i-1)*r.size;if(f>=o)return{info:P(r),rows:[]};let l=0;return await new Promise(async(e,r)=>{let n=0,i=0;const o=async()=>{const{result:r}=t;if(!r)return e();let{value:a,primaryKey:i}=r;if(await u(a,i,l)?(n=0,d.push(h(a,i,l))):n++,d.length>=s||n>=c)return e();l++,r.continue()};t.onerror=()=>r(t.error),t.onsuccess=a?async()=>{const{result:r}=t;if(!r)return e();let{value:a,primaryKey:s}=r;if(await u(a,s,l)?(n=0,i++):n++,i>=f||n>=c)return l=n=0,t.onsuccess=o,void r.continue();r.continue(),l++}:o}),l&&(r.nextSkip+=l+1),{info:P(r),rows:d}}class Y extends U{all(e,t){const r={};return p(t)&&(r.limit=t),z(e)?r.query=e:w(e)&&Object.assign(r,e),this.tx(!1,e=>T(e.getAll(W(r.query),r.limit||1e3)))}async count(e,t,r){const n=X(e,t,r),{query:a,direction:i,fn:s}=n;if(!s)return await this.tx(!1,e=>T(e.count(W(a))));const{maxEmptyChecks:o}=n,c=s;let u=0,h=0,d=0;return await this.openCursor({query:a,direction:i,fn:e=>{if(c(e.value,e.primaryKey,d++))u++,h=0;else if(++h>=o)return!1;e.continue()}}),u}get(e){return this.tx(!1,t=>T(t.get(W(e))))}getMany(e,t){return this.batchRead(async r=>{const n=await k(e,e=>r.get(e));return t?n.filter(e=>e):n})}getRange(e,t){return this.forEach({query:e,direction:t},!0)}async getRangeMany(e,t,r){return(await this.batchRead(r=>k(e,e=>r.getRange(e,t)))).flat()}index(e,t){let{target:r}=this;return r instanceof IDBIndex&&(r=r.objectStore),r instanceof IDBObjectStore?(r=r.index(e),this.createOperator(r,t)):this.idbPro.store(r.store,e)}asStore(e){let{target:t}=this;if(t instanceof IDBObjectStore)return this;if(t instanceof IDBIndex)t=t.objectStore;else{if(!("index"in t))return this;t={store:t.store}}return this.createOperator(t,e)}batchRead(e){return d(this.target)?e(this):this.tx(!1,t=>e(this.idbPro.schema.factory.newReader({storeSchema:this.storeSchema,target:t},this.idbPro)))}iterator(e,t){const r={};return g(t)&&(r.direction=t),z(e)?r.query=e:w(e)&&Object.assign(r,e),new L({storeSchema:this.storeSchema,target:this.target},this.idbPro,{...r,parser:"value"})}async filter(e,t,r,n){const o=X(e,t,r,n),{maxEmptyChecks:c,limit:u,fn:h}=o,{keyPath:d,defaultGetMapper:f}=this.storeSchema,l=J(o.mapper||f,d,!0);if(!h)return await this.forEach({...o,mapper:l,fn:(e,t,r)=>{if(r>=u-1)return{control:i}}},!0);let y=0;return this.forEach({...o,mapper:l,fn:(e,t,r,n)=>(e=l(e,t,r),h(e,t,r)?(y=0,n.length>=u-1?{control:i}:void 0):++y>=c?{control:a}:{control:s})},!0)}async find(e,t,r){const[n]=await this.filter(e,t,r,1);return n}async page(e,t){const r={...e};return r.page=t||e?.page||1,H(this,r,e)}nextPage(e){return this.page(e,e.page+1)}export(e,t){const r=X(e,t),{keyPath:n,exportMapper:a,name:i,defaultGetMapper:s}=this.storeSchema;if(!n&&Array.isArray(a))throw new Error(`When store [ ${i} ] keyPath does not exist, exportMapper does not support string[].`);return r.direction="next",r.mapper=J(a||s,n,!0),r.limit=r.maxEmptyChecks=Number.MAX_SAFE_INTEGER,this.filter(r)}asMap(){let{target:e}=this;return e instanceof IDBIndex?e=e.objectStore:e instanceof IDBObjectStore||(e={store:e.store}),this.factory.newDbMap({target:e},this.idbPro)}createOperator(e,t){const{idbPro:r}=this,n=this.storeSchema,a={storeSchema:Object.isFrozen(n)?n:void 0,target:e};return t?this.factory.newWriter(a,r):this.factory.newReader(a,r)}}function Z(e,t){if(!Array.isArray(e))return{[e]:t};const r={},n=e;for(let e=0;e<n.length;e++)r[n[e]]=t[e];return r}function ee(e,t){if(!Array.isArray(e))return t[e];const r=[];for(const n of e){if(!t[n])return;r.push(t[n])}return r}function te(e,t){if(!t||!(t instanceof Object)||Array.isArray(t))return t;const{addedTimeField:r,updatedTimeField:n,updatedCountField:a,softDeletedField:i}=e;return t={...t},r?.name&&!p(t[r.name])&&(t[r.name]=Date.now()),n?.name&&!p(t[n.name])&&(t[n.name]=Date.now()),i?.name&&!p(t[i.name])&&(t[i.name]=0),a?.name&&(t[a.name]=0),t}function re(e,t,r){if(!t||!(t instanceof Object)||Array.isArray(t))return t;const{updatedTimeField:n,updatedCountField:a}=e;return t={...r,...t},n.name&&(t[n.name]=Date.now()),a.name&&(t[a.name]=(t[a.name]||0)+1),t}class ne extends Y{add(e){return this.changeByPk({record:e,fn:async(e,t,r,n,a)=>{const{storeSchema:i}=this;return r=te(i,r),a?[{...r,...Z(a,await T(e.add(r)))},t]:[r,await T(e.add(r,t))]}})}addMany(e,t){return this.batchWrite(r=>t?k(e,e=>r.add(e)):v(e,e=>r.add(e)),!0)}addOrSkip(e){return this.batchWrite(async t=>{const{keyPath:r,defaultGetMapper:a}=t.storeSchema,{key:i,value:s}=r?{key:ee(r,e),value:e}:n(e);if(i){const e=await T(t.nativeStore.get(i));if(e)return J(a,r)?.(e,i)}if(t.target instanceof IDBIndex){const{keyPath:r}=t,n=ee(r,s);if(!n)return t.add(e);let a=await t.find(n);if(a)return a}return t.add(e)})}addOrSkipMany(e,t){return this.batchWrite(r=>t?k(e,e=>r.addOrSkip(e)):v(e,e=>r.addOrSkip(e)),!0)}replace(e){return this.changeByPk({record:e,getOld:!0,fn:async(e,t,r,n,a)=>{const{storeSchema:i}=this,{updatedTimeField:s,updatedCountField:o,addedTimeField:c}=i;return r=n?re(i,r,{[s.name]:n[s.name],[o.name]:n[o.name],[c.name]:n[c.name]}):te(i,r),a?[{...r,...Z(a,await T(e.put(r)))},t]:[r,await T(e.put(r,t))]}})}replaceMany(e,t){return this.batchWrite(r=>t?k(e,e=>r.replace(e)):v(e,e=>r.replace(e)),!0)}change(e,t){return this.changeByPk({record:e,getOld:!0,requiredOld:t,requiredPk:t,fn:async(e,t,r,n,a)=>{const{storeSchema:i}=this;if(n)return r=re(i,r,n),a?[{...r,...Z(a,await T(e.put(r)))},t]:[r,await T(e.put(r,t))]}})}changeMany(e,t){const{returns:r,throwIfMissing:n}=m(t)?{returns:t}:t||{};return this.batchWrite(t=>r?k(e,e=>t.change(e,n)):v(e,e=>t.change(e,n)),!0)}addOrChange(e){return this.changeByPk({record:e,getOld:!0,fn:async(e,t,r,n,a)=>{const{storeSchema:i}=this;return n?(r=re(i,r,n),a?[{...r,...Z(a,await T(e.put(r)))},t]:[r,await T(e.put(r,t))]):(r=te(i,r),a?[{...r,...Z(a,await T(e.add(r)))},t]:[r,await T(e.add(r,t))])}})}addOrChangeMany(e,t){return this.batchWrite(r=>t?k(e,e=>r.addOrChange(e)):v(e,e=>r.addOrChange(e)),!0)}delete(e,t){return this.changeByPk({pk:e,getOld:t,fn:(e,r,n,a)=>{if(e.delete(r),t)return[a,r]}})}deleteMany(e,t){return this.batchWrite(r=>t?k(e,e=>r.delete(e,!0)):v(e,e=>r.delete(e)),!0)}deleteRange(r,n){const{returns:a,physical:i,direction:s}=m(n)?{returns:n}:n||{},{name:o}=this.storeSchema.softDeletedField||{};return this.cursor({query:r,direction:s,fn:r=>i||!o?{modify:t}:(r[o]=1,{modify:e})},a)}deleteRangeMany(e,t){const r=m(t)?{returns:t}:t;return this.batchWrite(t=>r?.returns?k(e,e=>t.deleteRange(e,r)):v(e,e=>t.deleteRange(e,r)),!0)}changeRange(t,r){let{direction:n="next",query:a,newValue:i}="newValue"in t?t:{newValue:t};if(a||(a=ee(this.keyPath,i)),!a)throw new Error(`query is required:${JSON.stringify(t)}`);return this.cursor({query:a,direction:n,fn:t=>t instanceof Object?{modify:e,value:{...t,...i}}:{modify:e,value:i}},r)}changeRangeMany(e,t){return this.batchWrite(r=>t?k(e,e=>r.changeRange(e,t)):v(e,e=>r.changeRange(e,t)),!0)}cursor(e,t){return(e=y(e)?{fn:e}:e||{}).fn?t?this.cursorResult(e,!0):this.cursorVoid(e,!0):new L(this,this.idbPro)}batchWrite(e,t){const{target:r}=this;if(d(r))try{return e(this)}catch(e){throw!1!==t&&(r instanceof IDBIndex?r.objectStore:r).transaction.abort(),e}return this.tx(!0,t=>e(this.idbPro.schema.factory.newWriter({storeSchema:this.storeSchema,target:t},this.idbPro)),t)}changeByPk({pk:e,record:t,fn:r,requiredPk:a,getOld:i,requiredOld:s,saveMapper:o,getMapper:c}){const{storeSchema:u}=this,{keyPath:h,defaultSaveMapper:d,defaultGetMapper:f}=u;return t&&(o||d)&&(t=J(o||d,h)?.(t)),this.batchWrite(async o=>{let u,d=t;if(e)u=W(e);else if(h)u=ee(h,t);else{const{key:e,value:r}=n(t);u=e,d=r}if(a&&!u)throw new Error(`key is required: ${JSON.stringify(t)}`);const l=u&&(i||s)?await T(o.nativeStore.get(u)):void 0;if(s&&!l)throw new Error(`record not found: ${JSON.stringify(t)}`);const y=await r(o.nativeStore,u,d,l,h);if(y)return J(c||f,h)?.(y[0],y[1])})}}class ae extends U{get size(){return this.tx(!1,e=>T(e.count()))}delete(e){return this.tx(!0,async(t,r)=>{r.delete(e)})}batch(e){const{idbPro:t,storeSchema:r,factory:n}=this;return this.tx(!0,async(a,i)=>await e(n.newDbMap({storeSchema:r,target:i},t)))}asStore(e){const{factory:t}=this.idbPro.schema;return e?t.newWriter(this,this.idbPro):t.newReader({target:this.target},this.idbPro)}entries(){return new L({target:this.target},this.idbPro,{parser:"keyValue"})}async get(e,t){return await this.tx(!1,t=>T(t.get(e)))||t}getMany(e){return this.tx(!1,async t=>{const r=[];for(const n of e)r.push(await T(t.get(n)));return r})}async has(e){return!!await this.get(e)}keys(){return new L({storeSchema:this.storeSchema,target:this.target},this.idbPro,{parser:"key"})}set(e,t){return this.tx(!0,async(r,n)=>{await T(n.put(t,e))})}setMany(e){return this.tx(!0,async(t,r)=>{for(const[t,n]of e)r.put(n,t)})}values(){return new L({storeSchema:this.storeSchema,target:this.target},this.idbPro,{parser:"value"})}}function ie(e,t){if(e==t)return!0;if(typeof e!=typeof t)return!1;if(Array.isArray(e)&&Array.isArray(t)){if(e.length!==t.length)return!1;for(let r=0;r<e.length;r++)if(e[r]!==t[r])return!1;return!0}return!1}const se=({stores:e,schema:t})=>{const r=t.storeSchemas;let n="";for(const t of e){const e=r.find(e=>e.name===t.name);if(e){if(!ie(t.keyPath,e.keyPath)){n=`store [ ${t.name} ] keyPath not equal,schema.keyPath:${e.keyPath},store.keyPath:${t.keyPath}[]`;break}if(!t.autoIncrement!=!e.autoIncrement){n=`store [ ${t.name} ] autoIncrement not equal`;break}}}return!n||`The existing database is inconsistent with the definition and cannot be corrected: ${n}`},oe=async e=>{let t=se(e);return g(t)||(t=ce(e)),t},ce=({stores:e,schema:t})=>{const r=t.storeSchemas,n=e.map(e=>e.name);let a="";const i=r.map(e=>e.name).filter(e=>!n.includes(e));if(i.length)a=`store [ ${i.join(",")} ] not exist`;else for(const t of e){const e=r.find(e=>e.name===t.name);if(e&&(a=ue(t,Array.from(t.indexNames),e.indexSchemas),a))break}return!a||`The existing database Store index is inconsistent with the definition and requires a database version upgrade to be fixed: ${a}`};function ue(e,t,r){if(t.length!==r.length)return`store [ ${e.name} ] index count not equal`;for(const n of t){const t=r.find(e=>e.name===n);if(!t)return`store [ ${e.name} ] index [ ${n} ] not exist`;const a=e.index(n);if(!t.unique!=!a.unique)return`store [ ${e.name} ] index [ ${n} ] unique not equal`;if(!t.multiEntry!=!a.multiEntry)return`store [ ${e.name} ] index [ ${n} ] multiEntry not equal`;if(!ie(t.keyPath,a.keyPath))return`store [ ${e.name} ] index [ ${n} ] keyPath not equal`}return""}class he{upgradeContext;storeSchema;nativeStore;#n;constructor(e,t,r){this.upgradeContext=e,this.storeSchema=t,this.nativeStore=r}get writer(){return this.#n||(this.#n=this.upgradeContext.dbSchema.factory?.newWriter({target:this.nativeStore,storeSchema:this.storeSchema}))}add(e,t,r){return this.upgradeContext.versionIn(e)?this.writer.addMany(t,r):Promise.resolve()}addOrChange(e,t,r){return this.upgradeContext.versionIn(e)?this.writer.addOrChangeMany(t,r):Promise.resolve()}async call(e,t){if(this.upgradeContext.versionIn(e))return await t(this.writer,this.upgradeContext)}replace(e,t,r){return this.upgradeContext.versionIn(e)?this.writer.replaceMany(t,r):Promise.resolve()}}const de=Object.freeze({newDataOperators:(e,t)=>new G(t,e),newDbMap:(e,t)=>new ae(e,t),newReader:(e,t)=>new Y(e,t),newStoreUpgradeable:(e,t,r)=>new he(r,t,e),newWriter:(e,t)=>new ne(e,t)});function fe(e,t,r,n){const a=function(e,t,r){if(!1===e)return!1;if(g(e))return{name:e};if(w(e)){const r=e;return m(r.name)&&(r.name=t),e}if(!0===e||r)return{name:t};return!1}(t,r,n);if(!a)return a;const i=a;if(!1!==i.isIndexed){i.isIndexed||(i.isIndexed=!0);const{name:t}=i,r=e.indexSchemas;r.some(e=>e===t||e.name===t)||r.push(t)}return i}function le(e,t){let r=g(e)?{name:e}:e;return t&&(r={...t,...r}),r.indexSchemas||(r.indexSchemas=[]),function(e){Object.isFrozen(e)||(e.addedTimeField=fe(e,e.addedTimeField,"added_at",!0),e.updatedTimeField=fe(e,e.updatedTimeField,"updated_at",!0),e.updatedCountField=fe(e,e.updatedCountField,"updated_count",!1),e.softDeletedField=fe(e,e.softDeletedField,"deleted",!1))}(r),Object.isFrozen(r)||(r.indexSchemas=r.indexSchemas.map(ye)),function(e){if(!e.keyPath&&e.defaultData?.length)for(const t of e.defaultData)if(!Array.isArray(t)&&!("value"in t))throw new Error(`When \`defaultData\` must contain \`value\` fields or be an array:${JSON.stringify(t)}`)}(r),r}function ye(e){const t=g(e)?{name:e}:e;return t.keyPath||(t.keyPath=t.name),t}const me=e=>{const{versionDiffValidate:t,versionSameValidate:r,factory:n}=e;return e.storeSchemas||(e.storeSchemas=[]),e.storeTemplate||(e.storeTemplate={...f}),!t&&!1!==t&&(e.versionDiffValidate=se),!r&&!1!==r&&(e.versionSameValidate=oe),n?n!==de&&(e.factory={...de,...n}):e.factory=de,e.storeSchemas=e.storeSchemas.map(t=>le(t,e.storeTemplate)),e};function pe(e,t){(function(e,t){return e.unique===t.unique&&e.multiEntry===t.multiEntry&&ie(t.keyPath,e.keyPath)})(e.index(t.name),t)||(e.deleteIndex(t.name),ge(e,t))}function ge(e,t){try{e.createIndex(t.name,t.keyPath,{unique:t.unique,multiEntry:t.multiEntry})}catch{throw new Error(`store [ ${e.name} ] index [ ${t.name} ] create error: ${JSON.stringify(t)}`)}}function we(e,t){return e.database.objectStoreNames.contains(t.name)?function(e,t){const{indexSchemas:r}=e,n=t.indexNames,a=r.map(e=>e.name);for(const e of Array.from(n))a.includes(e)||t.deleteIndex(e);for(const e of r)n.contains(e.name)?pe(t,e):ge(t,e);return t}(t,e.transaction?.objectStore(t.name)):function(e,t){const r=t.createObjectStore(e.name,{keyPath:e.keyPath,autoIncrement:e.autoIncrement});for(const t of e.indexSchemas)ge(r,t);return r}(t,e.database)}class be{database;newVersion;oldVersion;dbSchema;transaction;#a={};constructor(e){this.database=e.database,this.newVersion=e.newVersion,this.oldVersion=e.oldVersion,this.dbSchema=e.dbSchema,this.transaction=e.transaction}deleteStoreIfExists(e){const t=this.database;t.objectStoreNames.contains(e)&&t.deleteObjectStore(e)}destroy(){try{!function(...e){for(const t of e)if(t)for(const e of Object.keys(t))try{t[e]?.destroy?.()}catch{}finally{delete t[e]}}(this.#a)}finally{for(const e of Object.keys(this.#a))delete this.#a[e]}}store(e){if(e in this.#a)return this.#a[e];const{factory:t}=this.dbSchema,{storeSchemas:r}=this.dbSchema,n=r.find(t=>t.name===e),a=we(this,n);return this.#a[e]=t.newStoreUpgradeable(a,n,this)}versionIn({oldMin:e,oldMax:t,newMax:r,newMin:n}){if(void 0===t&&void 0===r&&void 0===e&&void 0===n)throw new Error(`versionIn bounds must not be empty ${JSON.stringify({oldMax:t,newMax:r,oldMin:e,newMin:n})}`);if(t<e)throw new Error(`oldMax (${t}) cannot be less than oldMin (${e})`);if(r<n)throw new Error(`newMax (${r}) cannot be less than newMin (${n})`);const{oldVersion:a,newVersion:i}=this;return!(void 0!==e&&a<e||void 0!==t&&a>t||void 0!==n&&i<n)&&!(void 0!==r&&i>r)}}function Se(e,t){let{store:r,index:n}=e;const{storeTemplate:a}=t,{storeSchemas:i=[]}=t,s=g(r)?r:r.name,o=i.findIndex(e=>e===s||e.name===s),c=o>-1&&i[o];let u;u=g(r)?c||s:!c||g(c)||r===c?r:{...c,...r},(n||c?.indexSchemas?.length||r?.indexSchemas?.length)&&(g(u)&&(u={name:u}),u.indexSchemas=function(e,t,r){r&&t.push(r);for(const r of t){const t=g(r)?r:r.name,n=e.findIndex(e=>e===t||e.name===t);if(n>-1){const t=e[n];g(t)?e[n]=r:g(r)||(e[n]=Object.assign(t,r))}else e.push(r)}return e}(c.indexSchemas||[],r.indexSchemas||[],n));const h=le(u,a);o>-1?i[o]=h:i.push(h),t.storeSchemas=i;const d={store:s};return n&&(d.index=g(n)?n:n.name),{target:d}}const xe=Object.assign({name:"",addedTimeField:!1,autoIncrement:!1,indexSchemas:[],keyPath:void 0,softDeletedField:!1,updatedCountField:!1,updatedTimeField:!1});class ve{static#i;#s;#o;#c={};constructor(e,t){this.#s=e=g(e)?{name:e}:e,Array.isArray(e.storeSchemas)||(e.storeSchemas=[]),w(e.storeTemplate)||(e.storeTemplate=f),t&&(this.#o=!0)}static get defaultDb(){return this.#i||(this.#i=new ve("gs-idb"))}get initialized(){return Object.isFrozen(this.#s)}get schema(){return this.#s}get storeNames(){return Array.from(new Set(this.#s.storeSchemas.map(e=>g(e)?e:e.name)))}get factory(){return this.#s.factory||(this.#s.factory=de)}static releaseDefaultDB(){this.#i=void 0}static async openExistDb(e){const{generateDbSchema:t}=await Promise.resolve().then(function(){return je});return new ve(await t(e))}static store(e,t){return ve.defaultDb.store(e,t)}static stores(e){return ve.defaultDb.stores(e)}static map(e,t){return ve.defaultDb.map(e,t)}async openNativeDb(){const e=this.#s=P(this.initSchema());await this.#u();const{name:t,version:r}=e;return await N({name:t,version:r,onupgradeneeded:(t,r,n)=>async function(e,t,r,n){const{storeSchemas:a,beforeUpgrade:i,afterUpgrade:s,version:o}=e,{newVersion:c=o,oldVersion:u}=r,{transaction:h}=n,d=new be({database:t,newVersion:c,oldVersion:u,dbSchema:e,transaction:h});try{const e=[];if(y(i))try{await i(d)}catch(t){e.push(t)}for(const t of a.map(e=>e.name))try{d.store(t)}catch(t){e.push(t)}for(const{name:t,defaultData:r}of a)if(r)try{await d.store(t).add({oldMax:0},r)}catch(t){e.push(t)}for(const{name:t,versionData:r}of a)if(r)for(const{version:n,data:a,use:i="addOrChange"}of r)try{await d.store(t)[i](n,a)}catch(t){e.push(t)}for(const{name:t,storeDefined:r}of a)try{await(r?.(d.store(t)))}catch(t){e.push(t)}if(y(s))try{await s(d)}catch(t){e.push(t)}if(!e.length)return;throw 1===e.length?e[0]:new AggregateError(e,"Database upgrade error")}finally{!function(e){try{e?.destroy?.()}catch{}}(d)}}(e,t,r,n)})}store(e,t){const r=e.store?e:{store:e};return r.store||(r.store=e),r.index||(r.index=t),this.factory.newWriter(Se(r,this.schema),this)}stores(e){const{schema:t}=this,r=e.map(e=>Se(g(e)?{store:e}:e,t));return this.factory.newDataOperators(r,this)}initSchema(){if(this.initialized)return this.#s;const{validateSchemaWithDefaults:e=me}=this.#s;return this.#s=e(this.#s)}async traceSchema(e){await async function(...e){const t=globalThis.top?.console?.log||console.log;e.push(t),await j(...e)}(this.schema,e)}map(e,t){const r=g(e)?e:g(t)?t:"gs-storage-map";Array.isArray(e)&&(t=e);const{storeSchemas:n}=this.schema;return n.find(e=>e.name===r||e===r)||n.push({...xe,name:r,defaultData:t}),this.factory.newDbMap({target:{store:r}},this)}export(){return this.stores(this.storeNames).export()}import(e,t,r){return this.stores(this.storeNames).import(e,t,r)}getStoreSchema(e){if(e in this.#c)return this.#c[e];const t=this.schema.storeSchemas.findIndex(t=>t===e||t.name===e);let r=this.schema.storeSchemas[t];return this.initialized?this.#c[e]=r:g(r)&&(this.schema.storeSchemas[t]=r={name:r}),r}async#u(){let e=this.#o;if(void 0===e&&(e=this.#o=await async function(e){const{name:t,version:r}=e,n=await C(t);if(!n)return!0;const{versionDiffValidate:a,versionSameValidate:i}=e,s=await N(t);try{if(e.version<s.version)return"The existing database version is greater than the current version";const o=void 0===e.version||s.version===e.version?i:a;if(!o)return!0;const c=Array.from(s.objectStoreNames);if(c.length<1)return`The existing database [ ${t} ] is empty`;const u=await K(s,c),h=await o({schema:e,db:s,stores:u});if(g(h)||r!==n.version)return h}finally{s?.close()}return!0}(this.#s)),!0===e)return!0;if(g(e))throw new Error(e)}}function ke(e,t){return ve.defaultDb.store(e,t)}function De(e,t){return ve.defaultDb.map(e,t)}function Pe(e){return ve.defaultDb.stores(e)}function $e(){ve.releaseDefaultDB()}function Oe(e,t){const r=Array.from(e.indexNames).map(t=>e.index(t)),n=[],a=[];for(const{name:e,keyPath:t,unique:i,multiEntry:s}of r)Array.isArray(t)?a.push(...t):a.push(t),n.push({name:e,keyPath:t,unique:i,multiEntry:s});const i=new Set(a);return{indexSchemas:n,addedTimeField:Ee(t.addedTimeField,i),updatedTimeField:Ee(t.updatedTimeField,i),updatedCountField:Ee(t.updatedCountField,i),softDeletedField:Ee(t.softDeletedField,i)}}function Ee(e,t){return!!t.has(e)&&{name:e,isIndexed:!1}}async function Ie(e,t){if(!await C(e))throw new Error(`db [ ${e} ] not exist`);let{asString:r,specialFields:n=l,dataExportTarget:a}=t||{};!0===r&&(r=160),isNaN(r)||r<1&&(r=1);let i=await function(e,t){return N(e,async function(e){const r=Array.from(e.objectStoreNames),n=e.transaction(r,"readonly");try{return{name:e.name,version:e.version,storeSchemas:r.map(e=>function(e,t){const{name:r,keyPath:n,autoIncrement:a}=e;return{name:r,keyPath:n,autoIncrement:a,...Oe(e,t)}}(n.objectStore(e),t))}}finally{n.abort()}})}(e,n);return a&&await async function(e,t){const r=await new ve(D(e),!0).export();for(const n of e.storeSchemas){const a=r[n.name];a?.length&&("defaultData"===t?n.defaultData=a:"versionData"===t&&(n.versionData||(n.versionData=[]),n.versionData.push({version:{oldMax:e.version},data:a})))}return e}(i,a),r?await j({rootData$:i,spaceEffectiveLength:r}):i}var je=Object.freeze({__proto__:null,generateDbSchema:Ie});export{a as Break,s as Continue,o as ContinueKey,u as ContinuePrimaryKey,U as DataOperationBase,G as DataOperators,Y as DataReader,ne as DataWriter,L as DbIterator,r as DbIteratorParsers,ae as DbMap,t as Delete,i as Finished,ve as IDbPro,c as NextKey,h as NextPrimaryKey,e as Save,he as StoreUpgradeable,be as UpgradeContext,De as dbMap,ke as dbStore,Pe as dbStores,l as defaultSpecialFields,f as defaultStoreSchemaTemplate,Ie as generateDbSchema,z as isIDbQuery,d as isNativeTarget,n as parseDbNoneKeyPathRecord,W as parseIDbQuery,$e as releaseDefaultDB,me as validateSchemaWithDefaults,se as versionDiffValidate,oe as versionSameValidate};
|
|
1
|
+
const Save = Symbol("save"), Delete = Symbol("delete"), DbIteratorParsers = Object.freeze({
|
|
2
|
+
key: ({ primaryKey: value }) => ({ value }),
|
|
3
|
+
value: ({ value }) => ({ value }),
|
|
4
|
+
keyValue: ({ primaryKey: key, value }) => ({ value: [key, value] })
|
|
5
|
+
});
|
|
6
|
+
function parseDbNoneKeyPathRecord(record) {
|
|
7
|
+
if (Array.isArray(record))
|
|
8
|
+
return { key: record[1], value: record[0] };
|
|
9
|
+
if ("value" in record) {
|
|
10
|
+
const { key, value } = record;
|
|
11
|
+
return { key, value };
|
|
12
|
+
}
|
|
13
|
+
throw new Error(`not include value in invalid DBRecord\uFF1A${JSON.stringify(record)}`);
|
|
14
|
+
}
|
|
15
|
+
const Break = Symbol("break"), Finished = Symbol("finished"), Continue = Symbol("continue"), ContinueKey = Symbol("continue key"), NextKey = Symbol("next key"), ContinuePrimaryKey = Symbol("continue primary key"), NextPrimaryKey = Symbol("next primary key");
|
|
16
|
+
function isNativeTarget(target) {
|
|
17
|
+
return target instanceof IDBObjectStore || target instanceof IDBIndex;
|
|
18
|
+
}
|
|
19
|
+
const defaultStoreSchemaTemplate = Object.freeze({
|
|
20
|
+
keyPath: "id",
|
|
21
|
+
autoIncrement: !0,
|
|
22
|
+
addedTimeField: !0,
|
|
23
|
+
updatedTimeField: !0
|
|
24
|
+
}), defaultSpecialFields = Object.freeze({
|
|
25
|
+
addedTimeField: "added_at",
|
|
26
|
+
softDeletedField: "deleted",
|
|
27
|
+
updatedCountField: "updated_count",
|
|
28
|
+
updatedTimeField: "updated_at"
|
|
29
|
+
});
|
|
30
|
+
function t$1(t2) {
|
|
31
|
+
return t2 instanceof Function || typeof t2 == "function";
|
|
32
|
+
}
|
|
33
|
+
function n(t2) {
|
|
34
|
+
return Array.isArray(t2) || t2 instanceof Array || t2?.constructor === Array;
|
|
35
|
+
}
|
|
36
|
+
function r(t2) {
|
|
37
|
+
return t2 === !0 || t2 === !1 || t2 instanceof Boolean || typeof t2 == "boolean";
|
|
38
|
+
}
|
|
39
|
+
function i(t2) {
|
|
40
|
+
return typeof t2 == "number" || t2 instanceof Number;
|
|
41
|
+
}
|
|
42
|
+
function o$2(t2) {
|
|
43
|
+
return typeof t2 == "string" || t2 instanceof String;
|
|
44
|
+
}
|
|
45
|
+
function c(e) {
|
|
46
|
+
return e && !n(e) && !t$1(e) && (e instanceof Object || typeof e == "object");
|
|
47
|
+
}
|
|
48
|
+
function f(e) {
|
|
49
|
+
return e != null && t$1(e[Symbol.iterator]);
|
|
50
|
+
}
|
|
51
|
+
function l$1(e) {
|
|
52
|
+
return e != null && t$1(e[Symbol.asyncIterator]);
|
|
53
|
+
}
|
|
54
|
+
function h(t2) {
|
|
55
|
+
return t2 != null && typeof t2.next == "function";
|
|
56
|
+
}
|
|
57
|
+
async function v(t2, e) {
|
|
58
|
+
for (let n2 = 0; n2 < t2.length; n2++) await e(t2[n2]);
|
|
59
|
+
}
|
|
60
|
+
async function O(t2, e) {
|
|
61
|
+
const n2 = await t2, r2 = [];
|
|
62
|
+
for (let t3 = 0; t3 < n2.length; t3++) r2.push(await e(n2[t3], t3, n2));
|
|
63
|
+
return r2;
|
|
64
|
+
}
|
|
65
|
+
function x(t2) {
|
|
66
|
+
try {
|
|
67
|
+
t2?.destroy?.();
|
|
68
|
+
} catch {
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
function C(...t2) {
|
|
72
|
+
for (const e of t2) if (e) for (const t3 of Object.keys(e)) try {
|
|
73
|
+
e[t3]?.destroy?.();
|
|
74
|
+
} catch {
|
|
75
|
+
} finally {
|
|
76
|
+
delete e[t3];
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
function L(t2) {
|
|
80
|
+
if (typeof t2 != "object" || t2 === null || t2 instanceof Date || t2 instanceof Function || t2 instanceof RegExp || typeof Element < "u" && t2 instanceof Element) return t2;
|
|
81
|
+
if (t2 instanceof Set) {
|
|
82
|
+
const e2 = /* @__PURE__ */ new Set();
|
|
83
|
+
for (const n2 of t2) e2.add(L(n2));
|
|
84
|
+
return e2;
|
|
85
|
+
}
|
|
86
|
+
if (t2 instanceof Map) {
|
|
87
|
+
const e2 = /* @__PURE__ */ new Map();
|
|
88
|
+
for (const [n2, r2] of t2) e2.set(L(n2), L(r2));
|
|
89
|
+
return e2;
|
|
90
|
+
}
|
|
91
|
+
if (Array.isArray(t2)) {
|
|
92
|
+
const e2 = [];
|
|
93
|
+
for (const n2 of t2) e2.push(L(n2));
|
|
94
|
+
return e2;
|
|
95
|
+
}
|
|
96
|
+
const e = {};
|
|
97
|
+
for (const [n2, r2] of Object.entries(t2)) e[n2] = L(r2);
|
|
98
|
+
return e;
|
|
99
|
+
}
|
|
100
|
+
function P$1(t2, e, n2) {
|
|
101
|
+
if (n2?.length) for (const r2 of n2) e.hasOwnProperty(r2) && (t2[r2] = e[r2]);
|
|
102
|
+
else Object.assign(t2, e);
|
|
103
|
+
return t2;
|
|
104
|
+
}
|
|
105
|
+
function R(t2) {
|
|
106
|
+
return Object.freeze(t2), Object.getOwnPropertyNames(t2).forEach(((e) => {
|
|
107
|
+
const n2 = t2[e];
|
|
108
|
+
n2 !== null && (typeof n2 == "object" || typeof n2 == "function") && !Object.isFrozen(n2) && R(n2);
|
|
109
|
+
})), t2;
|
|
110
|
+
}
|
|
111
|
+
class et {
|
|
112
|
+
rootData$;
|
|
113
|
+
space;
|
|
114
|
+
showFn;
|
|
115
|
+
spaceEffectiveLength;
|
|
116
|
+
out;
|
|
117
|
+
type;
|
|
118
|
+
ignoreCircularRef;
|
|
119
|
+
constructor(e) {
|
|
120
|
+
let n2 = 0;
|
|
121
|
+
c(e[n2]) && e[n2].rootData$ && Object.assign(this, e[n2++]), n2 < 1 && (this.rootData$ = e[n2++]);
|
|
122
|
+
const s2 = e.slice(n2), a = s2.filter(((t2) => i(t2)));
|
|
123
|
+
this.out === void 0 && (this.out = s2.find(((e2) => t$1(e2))) || null), this.showFn === void 0 && (this.showFn = !!s2.find(((t2) => r(t2)))), this.spaceEffectiveLength === void 0 && (this.spaceEffectiveLength = a[0] || 100), this.space === void 0 && (this.space = a[1] || 2), this.type === void 0 && (this.type = s2.find(((t2) => o$2(t2))) || "js"), this.ignoreCircularRef === void 0 && (this.ignoreCircularRef = !0);
|
|
124
|
+
}
|
|
125
|
+
destroy() {
|
|
126
|
+
this.out = null, this.rootData$ = null;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
async function nt(e, n2, r2) {
|
|
130
|
+
if (typeof (e = await e) == "string" || e instanceof Date) return JSON.stringify(e);
|
|
131
|
+
if (e instanceof Function) return n2.showFn ? e.toString() : "undefined";
|
|
132
|
+
if (typeof e != "object" || e === null) return e + "";
|
|
133
|
+
if (e instanceof RegExp) return e.toString();
|
|
134
|
+
if (typeof Element < "u" && e instanceof Element) return "undefined";
|
|
135
|
+
if (r2.has(e)) {
|
|
136
|
+
if (n2.ignoreCircularRef) return;
|
|
137
|
+
throw new Error("circular reference");
|
|
138
|
+
}
|
|
139
|
+
r2.add(e);
|
|
140
|
+
try {
|
|
141
|
+
return n2.type === "json" ? await (async function(e2, n3, r3) {
|
|
142
|
+
const i2 = [];
|
|
143
|
+
if (Array.isArray(e2) || e2 instanceof Set || f(e2) && !(e2 instanceof Map)) {
|
|
144
|
+
for (const o4 of e2) if (o4 !== void 0 && (!t$1(o4) || n3.showFn)) {
|
|
145
|
+
const t2 = await nt(o4, n3, r3);
|
|
146
|
+
t2 !== void 0 && i2.push(t2);
|
|
147
|
+
}
|
|
148
|
+
const { spaceStart: o3, spaceEnd: s3, arrJoin: a2 } = rt(i2, n3);
|
|
149
|
+
return i2.length ? `[${o3}${a2()}${s3}]` : "[]";
|
|
150
|
+
}
|
|
151
|
+
if (l$1(e2) || h(e2)) {
|
|
152
|
+
for await (const o4 of e2) if (o4 !== void 0 && (!t$1(o4) || n3.showFn)) {
|
|
153
|
+
const t2 = await nt(o4, n3, r3);
|
|
154
|
+
t2 !== void 0 && i2.push(t2);
|
|
155
|
+
}
|
|
156
|
+
const { spaceStart: o3, spaceEnd: s3, arrJoin: a2 } = rt(i2, n3);
|
|
157
|
+
return i2.length ? `[${o3}${a2()}${s3}]` : "[]";
|
|
158
|
+
}
|
|
159
|
+
let o2 = e2 instanceof Map ? e2.entries() : Object.entries(e2);
|
|
160
|
+
for (const [e3, s3] of o2) if (s3 !== void 0 && (!t$1(s3) || n3.showFn)) {
|
|
161
|
+
const t2 = await nt(s3, n3, r3);
|
|
162
|
+
t2 !== void 0 && i2.push(`"${e3}": ${t2}`);
|
|
163
|
+
}
|
|
164
|
+
const { spaceStart: s2, spaceEnd: a, arrJoin: c2 } = rt(i2, n3);
|
|
165
|
+
return i2.length ? `[${s2}${c2()}${a}]` : "[]";
|
|
166
|
+
})(e, n2, r2) : await (async function(e2, n3, r3) {
|
|
167
|
+
const i2 = [];
|
|
168
|
+
if (e2 instanceof Set) {
|
|
169
|
+
for (const o4 of e2) (!t$1(o4) || n3.showFn) && i2.push(await nt(o4, n3, r3));
|
|
170
|
+
const { spaceStart: o3, spaceEnd: s3, arrJoin: a2 } = rt(i2, n3);
|
|
171
|
+
return i2.length ? `new Set([${o3}${a2()}${s3}])` : "[]";
|
|
172
|
+
}
|
|
173
|
+
if (e2 instanceof Map) {
|
|
174
|
+
for (const [o4, s4] of e2) (!t$1(s4) || n3.showFn) && i2.push(`${it(await nt(o4, n3, r3))}: ${await nt(s4, n3, r3)}`);
|
|
175
|
+
const { spaceStart: o3, spaceEnd: s3, arrJoin: a2 } = rt(i2, n3);
|
|
176
|
+
return i2.length ? `new Map(Object.entries({${o3}${a2()}${s3}})` : "new Map()";
|
|
177
|
+
}
|
|
178
|
+
if (Array.isArray(e2) || f(e2)) {
|
|
179
|
+
for (const o4 of e2) (!t$1(o4) || n3.showFn) && i2.push(await nt(o4, n3, r3));
|
|
180
|
+
const { spaceStart: o3, spaceEnd: s3, arrJoin: a2 } = rt(i2, n3);
|
|
181
|
+
return i2.length ? `[${o3}${a2()}${s3}]` : "[]";
|
|
182
|
+
}
|
|
183
|
+
if (h(e2) || l$1(e2)) {
|
|
184
|
+
for await (const o4 of e2) (!t$1(o4) || n3.showFn) && i2.push(await nt(o4, n3, r3));
|
|
185
|
+
const { spaceStart: o3, spaceEnd: s3, arrJoin: a2 } = rt(i2, n3);
|
|
186
|
+
return i2.length ? `[${o3}${a2()}${s3}]` : "[]";
|
|
187
|
+
}
|
|
188
|
+
for (const [o3, s3] of Object.entries(e2)) (!t$1(s3) || n3.showFn) && i2.push(`${it(o3)}: ${await nt(s3, n3, r3)}`);
|
|
189
|
+
const { spaceStart: o2, spaceEnd: s2, arrJoin: a } = rt(i2, n3);
|
|
190
|
+
return i2.length ? `{${o2}${a()}${s2}}` : "{}";
|
|
191
|
+
})(e, n2, r2);
|
|
192
|
+
} finally {
|
|
193
|
+
r2.delete(e);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
function rt(t2, e) {
|
|
197
|
+
let n2 = e.space;
|
|
198
|
+
t2.reduce(((t3, e2) => t3 + e2.length), 0) < e.spaceEffectiveLength && (n2 = 0);
|
|
199
|
+
const r2 = n2 ? `
|
|
200
|
+
` + "".padStart(n2) : "", i2 = n2 ? `
|
|
201
|
+
` : "", o2 = n2 ? `
|
|
202
|
+
`.padEnd(n2 + 1) : ",";
|
|
203
|
+
return { spaceStart: r2, spaceEnd: i2, arrJoin: n2 ? () => t2.join(`,
|
|
204
|
+
`).replace(/\n/g, o2) : () => t2.join(",") };
|
|
205
|
+
}
|
|
206
|
+
function it(t2) {
|
|
207
|
+
return /^[a-z_]+\w*$/i.test(t2) ? t2 : `"${t2}"`;
|
|
208
|
+
}
|
|
209
|
+
async function ot(...t2) {
|
|
210
|
+
const e = new et(t2), n2 = /* @__PURE__ */ new Set();
|
|
211
|
+
try {
|
|
212
|
+
const t3 = await nt(e.rootData$, e, n2);
|
|
213
|
+
if (!e.out) return t3;
|
|
214
|
+
e.out(t3);
|
|
215
|
+
} finally {
|
|
216
|
+
n2.clear(), e.destroy();
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
async function st(...t2) {
|
|
220
|
+
const e = globalThis.top?.console?.log || console.log;
|
|
221
|
+
t2.push(e), await ot(...t2);
|
|
222
|
+
}
|
|
223
|
+
function t(t2) {
|
|
224
|
+
return t2 instanceof Function || typeof t2 == "function";
|
|
225
|
+
}
|
|
226
|
+
function o$1(t2) {
|
|
227
|
+
return typeof t2 == "string" || t2 instanceof String;
|
|
228
|
+
}
|
|
229
|
+
function s$1(t2) {
|
|
230
|
+
return typeof t2 == "string" || typeof t2 == "number" || t2 instanceof String || t2 instanceof Number;
|
|
231
|
+
}
|
|
232
|
+
var o, s;
|
|
233
|
+
function d(n2) {
|
|
234
|
+
return s$1(n2) || n2 instanceof Date || n2 instanceof ArrayBuffer || Array.isArray(n2) && n2.every(d);
|
|
235
|
+
}
|
|
236
|
+
function y(e) {
|
|
237
|
+
return e instanceof IDBKeyRange || d(e);
|
|
238
|
+
}
|
|
239
|
+
function l(e) {
|
|
240
|
+
return e == null || y(e);
|
|
241
|
+
}
|
|
242
|
+
async function b(e) {
|
|
243
|
+
return (await indexedDB.databases()).find(((n2) => n2.name === e));
|
|
244
|
+
}
|
|
245
|
+
function g(e, n2, t2) {
|
|
246
|
+
return new Promise((async (r2, o2) => {
|
|
247
|
+
let s2 = !1;
|
|
248
|
+
e.onsuccess = async () => {
|
|
249
|
+
if (!s2) if (n2) try {
|
|
250
|
+
r2(await n2(e.result));
|
|
251
|
+
} catch (e2) {
|
|
252
|
+
o2(e2);
|
|
253
|
+
} finally {
|
|
254
|
+
e.result?.close?.();
|
|
255
|
+
}
|
|
256
|
+
else r2(e.result);
|
|
257
|
+
}, e.onblocked = e.onerror = (e2) => o2(e2.target.error), t2 && (e.onupgradeneeded = async (n3) => {
|
|
258
|
+
try {
|
|
259
|
+
await t2(e.result, n3, e);
|
|
260
|
+
} catch (e2) {
|
|
261
|
+
s2 = !0, o2(e2);
|
|
262
|
+
}
|
|
263
|
+
});
|
|
264
|
+
}));
|
|
265
|
+
}
|
|
266
|
+
async function B(e, r2) {
|
|
267
|
+
const o2 = o$1(e) ? { name: e } : e;
|
|
268
|
+
t(r2) && (o2.onsuccess = r2);
|
|
269
|
+
const { name: s2, onsuccess: a, onupgradeneeded: c2, version: i2 } = o2;
|
|
270
|
+
return g(indexedDB.open(s2, i2), a, c2);
|
|
271
|
+
}
|
|
272
|
+
function m(e, r2, ...o2) {
|
|
273
|
+
return new Promise((async (s2, a) => {
|
|
274
|
+
let c2 = 0;
|
|
275
|
+
const i2 = t(o2[c2]) ? o2[c2++] : void 0, u = o2[c2] ? "readwrite" : "readonly", f2 = e instanceof IDBDatabase ? e : await B(e), d2 = Array.isArray(r2) ? r2 : [r2], y2 = f2.transaction(d2.map(((e2) => o$1(e2) ? e2 : e2.store)), u), l2 = d2.map(((e2) => {
|
|
276
|
+
if (o$1(e2)) return y2.objectStore(e2);
|
|
277
|
+
const { store: t2, index: r3 } = e2;
|
|
278
|
+
return o$1(r3) ? y2.objectStore(t2).index(r3) : y2.objectStore(t2);
|
|
279
|
+
}));
|
|
280
|
+
if (i2) try {
|
|
281
|
+
s2(await i2(...l2)), u ? y2.commit() : y2.abort();
|
|
282
|
+
} catch (e2) {
|
|
283
|
+
y2.abort(), a(e2);
|
|
284
|
+
} finally {
|
|
285
|
+
f2.close();
|
|
286
|
+
}
|
|
287
|
+
else s2(l2);
|
|
288
|
+
}));
|
|
289
|
+
}
|
|
290
|
+
function p(e, n2, t2) {
|
|
291
|
+
return m(e, n2, t2, !1);
|
|
292
|
+
}
|
|
293
|
+
function D(e) {
|
|
294
|
+
return new Promise((async (n2, t2) => {
|
|
295
|
+
e.onsuccess = () => {
|
|
296
|
+
try {
|
|
297
|
+
n2(e.result);
|
|
298
|
+
} catch (e2) {
|
|
299
|
+
t2(e2);
|
|
300
|
+
}
|
|
301
|
+
}, e.onerror = (e2) => t2(e2.target.error);
|
|
302
|
+
}));
|
|
303
|
+
}
|
|
304
|
+
(s = o || (o = {}))[s.True = 1] = "True", s[s.False = 0] = "False";
|
|
305
|
+
const P = "gs-idb", W = "gs-storage-map", toNum = (v2) => v2 instanceof Date ? v2.getTime() : v2;
|
|
306
|
+
function parseIDbQuery(query) {
|
|
307
|
+
if (l(query))
|
|
308
|
+
return query;
|
|
309
|
+
const range = query;
|
|
310
|
+
if ("lt" in range && "gt" in range) {
|
|
311
|
+
if (toNum(range.gt) > toNum(range.lt))
|
|
312
|
+
throw new Error(`Invalid IDBRange: gt (${range.gt}) cannot be greater than lt (${range.lt})`);
|
|
313
|
+
return IDBKeyRange.bound(range.gt, range.lt, !0, !0);
|
|
314
|
+
}
|
|
315
|
+
if ("lt" in range && "gte" in range) {
|
|
316
|
+
if (toNum(range.gte) > toNum(range.lt))
|
|
317
|
+
throw new Error(`Invalid IDBRange: gte (${range.gte}) cannot be greater than lt (${range.lt})`);
|
|
318
|
+
return IDBKeyRange.bound(range.gte, range.lt, !1, !0);
|
|
319
|
+
}
|
|
320
|
+
if ("lte" in range && "gt" in range) {
|
|
321
|
+
if (toNum(range.gt) > toNum(range.lte))
|
|
322
|
+
throw new Error(`Invalid IDBRange: gt (${range.gt}) cannot be greater than lte (${range.lte})`);
|
|
323
|
+
return IDBKeyRange.bound(range.gt, range.lte, !0, !1);
|
|
324
|
+
}
|
|
325
|
+
if ("lte" in range && "gte" in range) {
|
|
326
|
+
if (toNum(range.gte) > toNum(range.lte))
|
|
327
|
+
throw new Error(`Invalid IDBRange: gte (${range.gte}) cannot be greater than lte (${range.lte})`);
|
|
328
|
+
return IDBKeyRange.bound(range.gte, range.lte, !1, !1);
|
|
329
|
+
}
|
|
330
|
+
if ("lt" in range)
|
|
331
|
+
return IDBKeyRange.upperBound(range.lt, !0);
|
|
332
|
+
if ("lte" in range)
|
|
333
|
+
return IDBKeyRange.upperBound(range.lte, !1);
|
|
334
|
+
if ("gt" in range)
|
|
335
|
+
return IDBKeyRange.lowerBound(range.gt, !0);
|
|
336
|
+
if ("gte" in range)
|
|
337
|
+
return IDBKeyRange.lowerBound(range.gte, !1);
|
|
338
|
+
}
|
|
339
|
+
function isIDbQuery(query) {
|
|
340
|
+
return parseIDbQuery(query) != null;
|
|
341
|
+
}
|
|
342
|
+
async function openCursor(target, arg, fn) {
|
|
343
|
+
const { query, direction = "prev", preSkip, startKey, startPrimaryKey } = arg, request = target.openCursor(parseIDbQuery(query), direction);
|
|
344
|
+
return preSkip && (await D(request)).advance(preSkip), startKey && (startPrimaryKey ? (await D(request)).continuePrimaryKey(startKey, startPrimaryKey) : (await D(request)).continue(startKey)), request;
|
|
345
|
+
}
|
|
346
|
+
function getMapperFn(mapper, keyPath, useArrayRecord) {
|
|
347
|
+
return Array.isArray(mapper) ? (v2) => P$1({}, v2, mapper) : t$1(mapper) ? mapper : keyPath ? (v2) => v2 : useArrayRecord ? (v2, k) => [v2, k] : (key, value) => ({ key, value });
|
|
348
|
+
}
|
|
349
|
+
class DataOperationBase {
|
|
350
|
+
idbPro;
|
|
351
|
+
target;
|
|
352
|
+
#storeSchema;
|
|
353
|
+
constructor(schema, db) {
|
|
354
|
+
this.idbPro = db, this.#storeSchema = schema.storeSchema, this.target = schema.target, isNativeTarget(schema.target) && (this.tx = this.#nativeOperation);
|
|
355
|
+
}
|
|
356
|
+
get storeName() {
|
|
357
|
+
return this.nativeStore?.name || this.target.store;
|
|
358
|
+
}
|
|
359
|
+
get storeSchema() {
|
|
360
|
+
if (this.#storeSchema)
|
|
361
|
+
return this.#storeSchema;
|
|
362
|
+
const storeSchema = this.idbPro.getStoreSchema(this.storeName);
|
|
363
|
+
return Object.isFrozen(storeSchema) && (this.#storeSchema = storeSchema), storeSchema;
|
|
364
|
+
}
|
|
365
|
+
get factory() {
|
|
366
|
+
return this.idbPro.factory;
|
|
367
|
+
}
|
|
368
|
+
get nativeStore() {
|
|
369
|
+
const { target } = this;
|
|
370
|
+
if (target instanceof IDBObjectStore)
|
|
371
|
+
return target;
|
|
372
|
+
if (target instanceof IDBIndex)
|
|
373
|
+
return target.objectStore;
|
|
374
|
+
}
|
|
375
|
+
get keyPath() {
|
|
376
|
+
const { target } = this;
|
|
377
|
+
if (isNativeTarget(target))
|
|
378
|
+
return target.keyPath;
|
|
379
|
+
const { storeSchema } = this, { index } = target;
|
|
380
|
+
return index ? storeSchema.indexSchemas.find((i2) => i2.name === index)?.keyPath : storeSchema.keyPath;
|
|
381
|
+
}
|
|
382
|
+
async forEach(arg, returns) {
|
|
383
|
+
return arg = t$1(arg) ? { fn: arg } : arg, returns ? this.cursorResult(arg, !1) : this.cursorVoid(arg, !1);
|
|
384
|
+
}
|
|
385
|
+
async tx(writable, fn, rollbackOnError) {
|
|
386
|
+
let { target } = this;
|
|
387
|
+
const { store, index } = target, db = await this.idbPro.openNativeDb();
|
|
388
|
+
let tx, nativeStore;
|
|
389
|
+
try {
|
|
390
|
+
tx = db.transaction(store, writable === !0 ? "readwrite" : "readonly"), target = nativeStore = tx.objectStore(store), index && (target = target.index(index));
|
|
391
|
+
} catch (e) {
|
|
392
|
+
throw db.close(), e;
|
|
393
|
+
}
|
|
394
|
+
if (!fn)
|
|
395
|
+
return Object.freeze({ db, tx, nativeStore, target });
|
|
396
|
+
try {
|
|
397
|
+
if (writable === !0) {
|
|
398
|
+
const result = await fn(target, nativeStore);
|
|
399
|
+
return tx.commit(), result;
|
|
400
|
+
}
|
|
401
|
+
return fn(target);
|
|
402
|
+
} catch (e) {
|
|
403
|
+
throw rollbackOnError !== !1 && tx.abort(), e;
|
|
404
|
+
} finally {
|
|
405
|
+
db.close();
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
openCursor(arg, writable) {
|
|
409
|
+
return this.tx(writable, (store) => new Promise(async (resolve, reject) => {
|
|
410
|
+
const { fn } = arg, request = await openCursor(store, arg);
|
|
411
|
+
request.onsuccess = async () => {
|
|
412
|
+
request.result ? await fn(request.result) === !1 && resolve() : resolve();
|
|
413
|
+
}, request.onerror = () => reject(request.error);
|
|
414
|
+
}));
|
|
415
|
+
}
|
|
416
|
+
cursorVoid({ query, direction, preSkip, startKey, startPrimaryKey, fn }, writable) {
|
|
417
|
+
let i2 = 0;
|
|
418
|
+
return this.openCursor({
|
|
419
|
+
query,
|
|
420
|
+
direction,
|
|
421
|
+
preSkip,
|
|
422
|
+
startKey,
|
|
423
|
+
startPrimaryKey,
|
|
424
|
+
fn: async (cursor) => {
|
|
425
|
+
const { value: ov, primaryKey: op } = cursor, result = await fn(ov, op, i2++), { control, key, primaryKey, modify, value } = c(result) ? result : { control: result };
|
|
426
|
+
switch (writable && (modify === Save ? cursor.update(value || ov) : modify === Delete && cursor.delete()), control) {
|
|
427
|
+
case Break:
|
|
428
|
+
return !1;
|
|
429
|
+
case ContinueKey:
|
|
430
|
+
cursor.continue(key);
|
|
431
|
+
break;
|
|
432
|
+
case ContinuePrimaryKey:
|
|
433
|
+
cursor.continuePrimaryKey(key, primaryKey);
|
|
434
|
+
break;
|
|
435
|
+
default:
|
|
436
|
+
cursor.continue();
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}, !0);
|
|
440
|
+
}
|
|
441
|
+
async cursorResult({ query, direction, preSkip, startKey, startPrimaryKey, fn, mapper: mpr }, writable) {
|
|
442
|
+
const { keyPath, defaultGetMapper } = this.storeSchema, mapper = getMapperFn(mpr || defaultGetMapper, keyPath), results = [];
|
|
443
|
+
let i2 = 0;
|
|
444
|
+
return await this.openCursor({
|
|
445
|
+
query,
|
|
446
|
+
direction,
|
|
447
|
+
preSkip,
|
|
448
|
+
startKey,
|
|
449
|
+
startPrimaryKey,
|
|
450
|
+
fn: async (cursor) => {
|
|
451
|
+
const { value: ov, primaryKey: op } = cursor, { control, value, key, primaryKey, modify } = await fn?.(ov, op, i2, results) || {};
|
|
452
|
+
switch (writable && (modify === Save ? cursor.update(value || ov) : modify === Delete && cursor.delete()), (!control || control === Finished || control === NextKey || control === NextPrimaryKey) && results.push(mapper(value || ov, primaryKey || op, i2)), control) {
|
|
453
|
+
case Break:
|
|
454
|
+
case Finished:
|
|
455
|
+
return !1;
|
|
456
|
+
case NextKey:
|
|
457
|
+
case ContinueKey:
|
|
458
|
+
cursor.continue(key);
|
|
459
|
+
break;
|
|
460
|
+
case NextPrimaryKey:
|
|
461
|
+
case ContinuePrimaryKey:
|
|
462
|
+
cursor.continuePrimaryKey(key, primaryKey);
|
|
463
|
+
break;
|
|
464
|
+
default:
|
|
465
|
+
cursor.continue();
|
|
466
|
+
}
|
|
467
|
+
i2++;
|
|
468
|
+
}
|
|
469
|
+
}, writable), results;
|
|
470
|
+
}
|
|
471
|
+
// noinspection JSUnusedLocalSymbols
|
|
472
|
+
#nativeOperation(writable, fn) {
|
|
473
|
+
const { target } = this, nativeStore = target instanceof IDBObjectStore ? target : target.objectStore;
|
|
474
|
+
return fn ? fn(target, nativeStore) : Object.freeze({ nativeStore, target });
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
class DataOperators {
|
|
478
|
+
idbPro;
|
|
479
|
+
schemas;
|
|
480
|
+
#_storeNames;
|
|
481
|
+
constructor(idbPro, schemas) {
|
|
482
|
+
this.idbPro = idbPro, this.schemas = schemas;
|
|
483
|
+
}
|
|
484
|
+
get storeNames() {
|
|
485
|
+
return this.#_storeNames || (this.#_storeNames = Array.from(new Set(this.schemas.map((s2) => s2.target.store))));
|
|
486
|
+
}
|
|
487
|
+
read(fn) {
|
|
488
|
+
return this.tx("newReader", fn);
|
|
489
|
+
}
|
|
490
|
+
write(fn, rollbackOnError = !0) {
|
|
491
|
+
return this.tx("newWriter", fn, !0, rollbackOnError);
|
|
492
|
+
}
|
|
493
|
+
export() {
|
|
494
|
+
return this.tx("newReader", async (...stores) => {
|
|
495
|
+
const result = {};
|
|
496
|
+
for (const store of stores)
|
|
497
|
+
result[store.storeName] = await store.export();
|
|
498
|
+
return result;
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
import(data, returns, use) {
|
|
502
|
+
return use || (use = "addOrChangeMany"), this.tx("newWriter", async (...stores) => {
|
|
503
|
+
const result = {};
|
|
504
|
+
stores = Array.from(new Map(stores.map((s2) => [s2.storeName, s2.asStore(!0)])).values());
|
|
505
|
+
for (const store of stores) {
|
|
506
|
+
const { storeName } = store, rows = data[storeName];
|
|
507
|
+
rows && (result[storeName] = await store[use](rows, returns));
|
|
508
|
+
}
|
|
509
|
+
if (returns)
|
|
510
|
+
return result;
|
|
511
|
+
}, !0);
|
|
512
|
+
}
|
|
513
|
+
async tx(method, fn, writable, rollbackOnError) {
|
|
514
|
+
const { idbPro, schemas } = this, { factory } = idbPro.schema, db = await idbPro.openNativeDb();
|
|
515
|
+
try {
|
|
516
|
+
const tx = db.transaction(this.storeNames, writable ? "readwrite" : "readonly");
|
|
517
|
+
try {
|
|
518
|
+
const stores = schemas.map(({ storeSchema, target: info }) => {
|
|
519
|
+
let target = tx.objectStore(info.store);
|
|
520
|
+
return info.index && (target = target.index(info.index)), factory[method]({ storeSchema, target }, idbPro);
|
|
521
|
+
});
|
|
522
|
+
return await fn(...stores);
|
|
523
|
+
} catch (e) {
|
|
524
|
+
throw rollbackOnError !== !1 && tx.abort(), e;
|
|
525
|
+
}
|
|
526
|
+
} finally {
|
|
527
|
+
db.close();
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
class DbIterator extends DataOperationBase {
|
|
532
|
+
direction;
|
|
533
|
+
query;
|
|
534
|
+
writable;
|
|
535
|
+
parser;
|
|
536
|
+
endsWithNull;
|
|
537
|
+
preSkip;
|
|
538
|
+
startKey;
|
|
539
|
+
startPrimaryKey;
|
|
540
|
+
constructor(schema, db, option) {
|
|
541
|
+
if (super(schema, db), !option)
|
|
542
|
+
return;
|
|
543
|
+
const { parser } = option;
|
|
544
|
+
this.direction = option.direction, this.query = option.query, this.writable = !!option.writable, this.endsWithNull = !!option.endsWithNull, this.preSkip = option.preSkip, this.startKey = option.startKey, this.startPrimaryKey = option.startPrimaryKey, parser && (this.parser = t$1(parser) ? parser : DbIteratorParsers[parser]);
|
|
545
|
+
}
|
|
546
|
+
async *[Symbol.asyncIterator]() {
|
|
547
|
+
const { parser, writable, endsWithNull } = this, { db, tx, target } = await this.tx(writable);
|
|
548
|
+
try {
|
|
549
|
+
const request = await openCursor(target, this);
|
|
550
|
+
let cursor;
|
|
551
|
+
if (parser)
|
|
552
|
+
for (; cursor = await D(request); ) {
|
|
553
|
+
const { control, value } = await parser(cursor);
|
|
554
|
+
if (control || (yield value), control === Break)
|
|
555
|
+
break;
|
|
556
|
+
cursor.continue();
|
|
557
|
+
}
|
|
558
|
+
else {
|
|
559
|
+
let ended = !1;
|
|
560
|
+
const end = () => {
|
|
561
|
+
ended = !0;
|
|
562
|
+
};
|
|
563
|
+
for (; !ended && (cursor = await D(request)); )
|
|
564
|
+
yield { cursor, end };
|
|
565
|
+
}
|
|
566
|
+
writable && tx?.commit(), endsWithNull && !cursor && (yield null);
|
|
567
|
+
} finally {
|
|
568
|
+
db?.close();
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
function parseFilterArg(arg1, arg2, arg3, limit) {
|
|
573
|
+
const args = c(arg1) ? arg1 : {};
|
|
574
|
+
return t$1(arg1) ? args.fn = arg1 : isIDbQuery(arg1) && (args.query = arg1), t$1(arg2) ? args.fn = arg2 : arg2 && (args.direction = arg2), arg3 && (args.direction = arg3), limit ? args.limit = limit : args.limit || (args.limit = 1e3), args.maxEmptyChecks || (args.maxEmptyChecks = 2e4), args;
|
|
575
|
+
}
|
|
576
|
+
async function queryPage(anyReader, info, arg) {
|
|
577
|
+
info.size || (info.size = 100), info.nextSkip || (info.nextSkip = 0);
|
|
578
|
+
const { query, direction, total, maxEmptyChecks, fn } = info;
|
|
579
|
+
return await anyReader.batchRead(async (nativeReader) => (total || (info.total = await nativeReader.count({ query, direction, maxEmptyChecks, fn }), info.pages = Math.ceil(info.total / info.size)), info.total < 1 ? { info: R(info), rows: [] } : fn ? await queryFnPage(nativeReader, info, arg) : await queryNoneFnPage(nativeReader, info)));
|
|
580
|
+
}
|
|
581
|
+
async function queryNoneFnPage(nativeReader, info) {
|
|
582
|
+
const { keyPath } = nativeReader.storeSchema, { page, query, direction, size, mapper } = info, preSkip = (page - 1) * size, rows = await nativeReader.filter({
|
|
583
|
+
query,
|
|
584
|
+
preSkip,
|
|
585
|
+
direction,
|
|
586
|
+
limit: size,
|
|
587
|
+
mapper: getMapperFn(mapper, keyPath, !0)
|
|
588
|
+
});
|
|
589
|
+
return info.nextSkip = preSkip + rows.length, {
|
|
590
|
+
info: R(info),
|
|
591
|
+
rows
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
async function queryFnPage(reader, info, arg) {
|
|
595
|
+
info.maxEmptyChecks || (info.maxEmptyChecks = 2e4);
|
|
596
|
+
const { page, query, direction, nextSkip } = info, { keyPath } = reader.storeSchema;
|
|
597
|
+
return page === 1 ? await queryFnRow(reader, await openCursor(reader.target, { query, direction }), info, keyPath) : arg && nextSkip && page - arg.page === 1 ? await queryFnRow(reader, await openCursor(reader.target, {
|
|
598
|
+
query,
|
|
599
|
+
direction,
|
|
600
|
+
preSkip: nextSkip
|
|
601
|
+
}), info, keyPath) : await queryFnRow(reader, await openCursor(reader.target, { query, direction }), info, keyPath, !0);
|
|
602
|
+
}
|
|
603
|
+
async function queryFnRow({ storeSchema: { defaultGetMapper } }, request, info, keyPath, hasSkip) {
|
|
604
|
+
const { page, size, total, maxEmptyChecks, fn } = info, mapper = getMapperFn(info.mapper || defaultGetMapper, keyPath, !0), rows = [], preSkip = (page - 1) * info.size;
|
|
605
|
+
if (preSkip >= total)
|
|
606
|
+
return { info: R(info), rows: [] };
|
|
607
|
+
let i2 = 0;
|
|
608
|
+
return await new Promise(async (resolve, reject) => {
|
|
609
|
+
let ept = 0, skipped = 0;
|
|
610
|
+
const rowFn = async () => {
|
|
611
|
+
const { result: cursor } = request;
|
|
612
|
+
if (!cursor)
|
|
613
|
+
return resolve();
|
|
614
|
+
let { value, primaryKey } = cursor;
|
|
615
|
+
if (await fn(value, primaryKey, i2) ? (ept = 0, rows.push(mapper(value, primaryKey, i2))) : ept++, rows.length >= size || ept >= maxEmptyChecks)
|
|
616
|
+
return resolve();
|
|
617
|
+
i2++, cursor.continue();
|
|
618
|
+
};
|
|
619
|
+
request.onerror = () => reject(request.error), hasSkip ? request.onsuccess = async () => {
|
|
620
|
+
const { result: cursor } = request;
|
|
621
|
+
if (!cursor)
|
|
622
|
+
return resolve();
|
|
623
|
+
let { value, primaryKey } = cursor;
|
|
624
|
+
if (await fn(value, primaryKey, i2) ? (ept = 0, skipped++) : ept++, skipped >= preSkip || ept >= maxEmptyChecks) {
|
|
625
|
+
i2 = ept = 0, request.onsuccess = rowFn, cursor.continue();
|
|
626
|
+
return;
|
|
627
|
+
}
|
|
628
|
+
cursor.continue(), i2++;
|
|
629
|
+
} : request.onsuccess = rowFn;
|
|
630
|
+
}), i2 && (info.nextSkip += i2 + 1), {
|
|
631
|
+
info: R(info),
|
|
632
|
+
rows
|
|
633
|
+
};
|
|
634
|
+
}
|
|
635
|
+
class DataReader extends DataOperationBase {
|
|
636
|
+
all(query, limit) {
|
|
637
|
+
const param = {};
|
|
638
|
+
return i(limit) && (param.limit = limit), isIDbQuery(query) ? param.query = query : c(query) && Object.assign(param, query), this.tx(!1, (store) => D(store.getAll(parseIDbQuery(param.query), param.limit || 1e3)));
|
|
639
|
+
}
|
|
640
|
+
async count(arg1, arg2, arg3) {
|
|
641
|
+
const args = parseFilterArg(arg1, arg2, arg3), { query, direction, fn } = args;
|
|
642
|
+
if (!fn)
|
|
643
|
+
return await this.tx(!1, (store) => D(store.count(parseIDbQuery(query))));
|
|
644
|
+
const { maxEmptyChecks } = args, findFn = fn;
|
|
645
|
+
let count = 0, ept = 0, i2 = 0;
|
|
646
|
+
return await this.openCursor({
|
|
647
|
+
query,
|
|
648
|
+
direction,
|
|
649
|
+
fn: (cursor) => {
|
|
650
|
+
if (findFn(cursor.value, cursor.primaryKey, i2++))
|
|
651
|
+
count++, ept = 0;
|
|
652
|
+
else if (++ept >= maxEmptyChecks)
|
|
653
|
+
return !1;
|
|
654
|
+
cursor.continue();
|
|
655
|
+
}
|
|
656
|
+
}), count;
|
|
657
|
+
}
|
|
658
|
+
get(key) {
|
|
659
|
+
return this.tx(!1, (store) => D(store.get(parseIDbQuery(key))));
|
|
660
|
+
}
|
|
661
|
+
getMany(keys, excludeEmpty) {
|
|
662
|
+
return this.batchRead(async (reader) => {
|
|
663
|
+
const rows = await O(keys, (k) => reader.get(k));
|
|
664
|
+
return excludeEmpty ? rows.filter((v2) => v2) : rows;
|
|
665
|
+
});
|
|
666
|
+
}
|
|
667
|
+
getRange(query, direction) {
|
|
668
|
+
return this.forEach({ query, direction }, !0);
|
|
669
|
+
}
|
|
670
|
+
async getRangeMany(keys, direction, excludeEmpty) {
|
|
671
|
+
return (await this.batchRead((w) => O(keys, (v2) => w.getRange(v2, direction)))).flat();
|
|
672
|
+
}
|
|
673
|
+
index(name, writable) {
|
|
674
|
+
let { target } = this;
|
|
675
|
+
return target instanceof IDBIndex && (target = target.objectStore), target instanceof IDBObjectStore ? (target = target.index(name), this.createOperator(target, writable)) : this.idbPro.store(target.store, name);
|
|
676
|
+
}
|
|
677
|
+
asStore(writable) {
|
|
678
|
+
let { target } = this;
|
|
679
|
+
if (target instanceof IDBObjectStore)
|
|
680
|
+
return this;
|
|
681
|
+
if (target instanceof IDBIndex)
|
|
682
|
+
target = target.objectStore;
|
|
683
|
+
else {
|
|
684
|
+
if (!("index" in target))
|
|
685
|
+
return this;
|
|
686
|
+
target = { store: target.store };
|
|
687
|
+
}
|
|
688
|
+
return this.createOperator(target, writable);
|
|
689
|
+
}
|
|
690
|
+
batchRead(fn) {
|
|
691
|
+
return isNativeTarget(this.target) ? fn(this) : this.tx(!1, (store) => fn(this.idbPro.schema.factory.newReader({
|
|
692
|
+
storeSchema: this.storeSchema,
|
|
693
|
+
target: store
|
|
694
|
+
}, this.idbPro)));
|
|
695
|
+
}
|
|
696
|
+
iterator(query, direction) {
|
|
697
|
+
const arg = {};
|
|
698
|
+
return o$2(direction) && (arg.direction = direction), isIDbQuery(query) ? arg.query = query : c(query) && Object.assign(arg, query), new DbIterator({ storeSchema: this.storeSchema, target: this.target }, this.idbPro, {
|
|
699
|
+
...arg,
|
|
700
|
+
parser: "value"
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
async filter(arg1, arg2, arg3, arg4) {
|
|
704
|
+
const args = parseFilterArg(arg1, arg2, arg3, arg4), { maxEmptyChecks, limit, fn } = args, { keyPath, defaultGetMapper } = this.storeSchema, mapper = getMapperFn(args.mapper || defaultGetMapper, keyPath, !0);
|
|
705
|
+
if (!fn)
|
|
706
|
+
return await this.forEach({
|
|
707
|
+
...args,
|
|
708
|
+
mapper,
|
|
709
|
+
fn: (value, k, i2) => {
|
|
710
|
+
if (i2 >= limit - 1)
|
|
711
|
+
return { control: Finished };
|
|
712
|
+
}
|
|
713
|
+
}, !0);
|
|
714
|
+
let ept = 0;
|
|
715
|
+
return this.forEach({
|
|
716
|
+
...args,
|
|
717
|
+
mapper,
|
|
718
|
+
fn: (value, k, i2, results) => {
|
|
719
|
+
if (fn(value, k, i2))
|
|
720
|
+
ept = 0;
|
|
721
|
+
else
|
|
722
|
+
return ++ept >= maxEmptyChecks ? { control: Break } : { control: Continue };
|
|
723
|
+
if (results.length >= limit - 1)
|
|
724
|
+
return { control: Finished };
|
|
725
|
+
}
|
|
726
|
+
}, !0);
|
|
727
|
+
}
|
|
728
|
+
async find(query, fn, direction) {
|
|
729
|
+
const [value] = await this.filter(query, fn, direction, 1);
|
|
730
|
+
return value;
|
|
731
|
+
}
|
|
732
|
+
async page(arg, page) {
|
|
733
|
+
const info = { ...arg };
|
|
734
|
+
return info.page = page || arg?.page || 1, queryPage(this, info, arg);
|
|
735
|
+
}
|
|
736
|
+
nextPage(info) {
|
|
737
|
+
return this.page(info, info.page + 1);
|
|
738
|
+
}
|
|
739
|
+
export(arg1, arg2) {
|
|
740
|
+
const args = parseFilterArg(arg1, arg2), { keyPath, exportMapper, name, defaultGetMapper } = this.storeSchema;
|
|
741
|
+
if (!keyPath && Array.isArray(exportMapper))
|
|
742
|
+
throw new Error(`When store [ ${name} ] keyPath does not exist, exportMapper does not support string[].`);
|
|
743
|
+
return args.direction = "next", args.mapper = getMapperFn(exportMapper || defaultGetMapper, keyPath, !0), args.limit = args.maxEmptyChecks = Number.MAX_SAFE_INTEGER, this.filter(args);
|
|
744
|
+
}
|
|
745
|
+
asMap() {
|
|
746
|
+
let { target } = this;
|
|
747
|
+
return target instanceof IDBIndex ? target = target.objectStore : target instanceof IDBObjectStore || (target = { store: target.store }), this.factory.newDbMap({ target }, this.idbPro);
|
|
748
|
+
}
|
|
749
|
+
createOperator(target, writable) {
|
|
750
|
+
const { idbPro } = this, tmp = this.storeSchema, schema = { storeSchema: Object.isFrozen(tmp) ? tmp : void 0, target };
|
|
751
|
+
return writable ? this.factory.newWriter(schema, idbPro) : this.factory.newReader(schema, idbPro);
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
function getKeyValueToObject(keyPath, value) {
|
|
755
|
+
if (!Array.isArray(keyPath))
|
|
756
|
+
return { [keyPath]: value };
|
|
757
|
+
const rv = {}, arrayKeyPath = keyPath;
|
|
758
|
+
for (let i2 = 0; i2 < arrayKeyPath.length; i2++)
|
|
759
|
+
rv[arrayKeyPath[i2]] = value[i2];
|
|
760
|
+
return rv;
|
|
761
|
+
}
|
|
762
|
+
function getValidKeyValue(keyPath, value) {
|
|
763
|
+
if (!Array.isArray(keyPath))
|
|
764
|
+
return value[keyPath];
|
|
765
|
+
const rv = [];
|
|
766
|
+
for (const k of keyPath) {
|
|
767
|
+
if (!value[k])
|
|
768
|
+
return;
|
|
769
|
+
rv.push(value[k]);
|
|
770
|
+
}
|
|
771
|
+
return rv;
|
|
772
|
+
}
|
|
773
|
+
function checkAddValue(storeSchema, value) {
|
|
774
|
+
if (!value || !(value instanceof Object) || Array.isArray(value))
|
|
775
|
+
return value;
|
|
776
|
+
const { addedTimeField, updatedTimeField, updatedCountField, softDeletedField } = storeSchema;
|
|
777
|
+
return value = { ...value }, addedTimeField?.name && !i(value[addedTimeField.name]) && (value[addedTimeField.name] = Date.now()), updatedTimeField?.name && !i(value[updatedTimeField.name]) && (value[updatedTimeField.name] = Date.now()), softDeletedField?.name && !i(value[softDeletedField.name]) && (value[softDeletedField.name] = 0), updatedCountField?.name && (value[updatedCountField.name] = 0), value;
|
|
778
|
+
}
|
|
779
|
+
function checkUpdateValue(storeSchema, newValue, oldValue) {
|
|
780
|
+
if (!newValue || !(newValue instanceof Object) || Array.isArray(newValue))
|
|
781
|
+
return newValue;
|
|
782
|
+
const { updatedTimeField, updatedCountField } = storeSchema;
|
|
783
|
+
return newValue = { ...oldValue, ...newValue }, updatedTimeField.name && (newValue[updatedTimeField.name] = Date.now()), updatedCountField.name && (newValue[updatedCountField.name] = (newValue[updatedCountField.name] || 0) + 1), newValue;
|
|
784
|
+
}
|
|
785
|
+
class DataWriter extends DataReader {
|
|
786
|
+
add(record) {
|
|
787
|
+
return this.changeByPk({
|
|
788
|
+
record,
|
|
789
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
790
|
+
const { storeSchema } = this;
|
|
791
|
+
return newValue = checkAddValue(storeSchema, newValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await D(store.add(newValue))) }, pk] : [newValue, await D(store.add(newValue, pk))];
|
|
792
|
+
}
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
addMany(records, returns) {
|
|
796
|
+
return this.batchWrite((w) => returns ? O(records, (v2) => w.add(v2)) : v(records, (v2) => w.add(v2)), !0);
|
|
797
|
+
}
|
|
798
|
+
addOrSkip(record) {
|
|
799
|
+
return this.batchWrite(async (w) => {
|
|
800
|
+
const { keyPath: storeKeyPath, defaultGetMapper } = w.storeSchema, { key: pk, value } = storeKeyPath ? {
|
|
801
|
+
key: getValidKeyValue(storeKeyPath, record),
|
|
802
|
+
value: record
|
|
803
|
+
} : parseDbNoneKeyPathRecord(record);
|
|
804
|
+
if (pk) {
|
|
805
|
+
const item = await D(w.nativeStore.get(pk));
|
|
806
|
+
if (item)
|
|
807
|
+
return getMapperFn(defaultGetMapper, storeKeyPath)?.(item, pk);
|
|
808
|
+
}
|
|
809
|
+
if (w.target instanceof IDBIndex) {
|
|
810
|
+
const { keyPath } = w, keyValue = getValidKeyValue(keyPath, value);
|
|
811
|
+
if (!keyValue)
|
|
812
|
+
return w.add(record);
|
|
813
|
+
let oldValue = await w.find(keyValue);
|
|
814
|
+
if (oldValue)
|
|
815
|
+
return oldValue;
|
|
816
|
+
}
|
|
817
|
+
return w.add(record);
|
|
818
|
+
});
|
|
819
|
+
}
|
|
820
|
+
addOrSkipMany(records, returns) {
|
|
821
|
+
return this.batchWrite((w) => returns ? O(records, (v2) => w.addOrSkip(v2)) : v(records, (v2) => w.addOrSkip(v2)), !0);
|
|
822
|
+
}
|
|
823
|
+
replace(record) {
|
|
824
|
+
return this.changeByPk({
|
|
825
|
+
record,
|
|
826
|
+
getOld: !0,
|
|
827
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
828
|
+
const { storeSchema } = this, { updatedTimeField, updatedCountField, addedTimeField } = storeSchema;
|
|
829
|
+
return oldValue ? newValue = checkUpdateValue(storeSchema, newValue, {
|
|
830
|
+
[updatedTimeField.name]: oldValue[updatedTimeField.name],
|
|
831
|
+
[updatedCountField.name]: oldValue[updatedCountField.name],
|
|
832
|
+
[addedTimeField.name]: oldValue[addedTimeField.name]
|
|
833
|
+
}) : newValue = checkAddValue(storeSchema, newValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await D(store.put(newValue))) }, pk] : [newValue, await D(store.put(newValue, pk))];
|
|
834
|
+
}
|
|
835
|
+
});
|
|
836
|
+
}
|
|
837
|
+
replaceMany(records, returns) {
|
|
838
|
+
return this.batchWrite((w) => returns ? O(records, (v2) => w.replace(v2)) : v(records, (v2) => w.replace(v2)), !0);
|
|
839
|
+
}
|
|
840
|
+
change(record, throwIfMissing) {
|
|
841
|
+
return this.changeByPk({
|
|
842
|
+
record,
|
|
843
|
+
getOld: !0,
|
|
844
|
+
requiredOld: throwIfMissing,
|
|
845
|
+
requiredPk: throwIfMissing,
|
|
846
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
847
|
+
const { storeSchema } = this;
|
|
848
|
+
if (oldValue)
|
|
849
|
+
return newValue = checkUpdateValue(storeSchema, newValue, oldValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await D(store.put(newValue))) }, pk] : [newValue, await D(store.put(newValue, pk))];
|
|
850
|
+
}
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
changeMany(records, option) {
|
|
854
|
+
const { returns, throwIfMissing } = r(option) ? { returns: option } : option || {};
|
|
855
|
+
return this.batchWrite((w) => returns ? O(records, (v2) => w.change(v2, throwIfMissing)) : v(records, (v2) => w.change(v2, throwIfMissing)), !0);
|
|
856
|
+
}
|
|
857
|
+
addOrChange(record) {
|
|
858
|
+
return this.changeByPk({
|
|
859
|
+
record,
|
|
860
|
+
getOld: !0,
|
|
861
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
862
|
+
const { storeSchema } = this;
|
|
863
|
+
return oldValue ? (newValue = checkUpdateValue(storeSchema, newValue, oldValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await D(store.put(newValue))) }, pk] : [newValue, await D(store.put(newValue, pk))]) : (newValue = checkAddValue(storeSchema, newValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await D(store.add(newValue))) }, pk] : [newValue, await D(store.add(newValue, pk))]);
|
|
864
|
+
}
|
|
865
|
+
});
|
|
866
|
+
}
|
|
867
|
+
addOrChangeMany(records, returns) {
|
|
868
|
+
return this.batchWrite((w) => returns ? O(records, (v2) => w.addOrChange(v2)) : v(records, (v2) => w.addOrChange(v2)), !0);
|
|
869
|
+
}
|
|
870
|
+
delete(pk, returns) {
|
|
871
|
+
return this.changeByPk({
|
|
872
|
+
pk,
|
|
873
|
+
getOld: returns,
|
|
874
|
+
fn: (store, pk2, newValue, oldValue) => {
|
|
875
|
+
if (store.delete(pk2), !!returns)
|
|
876
|
+
return [oldValue, pk2];
|
|
877
|
+
}
|
|
878
|
+
});
|
|
879
|
+
}
|
|
880
|
+
deleteMany(keys, returns) {
|
|
881
|
+
return this.batchWrite((w) => returns ? O(keys, (v2) => w.delete(v2, !0)) : v(keys, (v2) => w.delete(v2)), !0);
|
|
882
|
+
}
|
|
883
|
+
deleteRange(query, returns) {
|
|
884
|
+
const { returns: hasRtn, physical, direction } = r(returns) ? { returns } : returns || {}, { name } = this.storeSchema.softDeletedField || {};
|
|
885
|
+
return this.cursor({
|
|
886
|
+
query,
|
|
887
|
+
direction,
|
|
888
|
+
fn: (value) => physical || !name ? { modify: Delete } : (value[name] = 1, { modify: Save })
|
|
889
|
+
}, hasRtn);
|
|
890
|
+
}
|
|
891
|
+
deleteRangeMany(keys, returns) {
|
|
892
|
+
const option = r(returns) ? { returns } : returns;
|
|
893
|
+
return this.batchWrite((w) => option?.returns ? O(keys, (v2) => w.deleteRange(v2, option)) : v(keys, (v2) => w.deleteRange(v2, option)), !0);
|
|
894
|
+
}
|
|
895
|
+
changeRange(arg, returns) {
|
|
896
|
+
let { direction = "next", query, newValue } = "newValue" in arg ? arg : { newValue: arg };
|
|
897
|
+
if (query || (query = getValidKeyValue(this.keyPath, newValue)), !query)
|
|
898
|
+
throw new Error(`query is required\uFF1A${JSON.stringify(arg)}`);
|
|
899
|
+
return this.cursor({
|
|
900
|
+
query,
|
|
901
|
+
direction,
|
|
902
|
+
fn: (value) => value instanceof Object ? { modify: Save, value: { ...value, ...newValue } } : { modify: Save, value: newValue }
|
|
903
|
+
}, returns);
|
|
904
|
+
}
|
|
905
|
+
changeRangeMany(args, returns) {
|
|
906
|
+
return this.batchWrite((w) => returns ? O(args, (v2) => w.changeRange(v2, returns)) : v(args, (v2) => w.changeRange(v2, returns)), !0);
|
|
907
|
+
}
|
|
908
|
+
cursor(arg, returns) {
|
|
909
|
+
return arg = t$1(arg) ? { fn: arg } : arg || {}, arg.fn ? returns ? this.cursorResult(arg, !0) : this.cursorVoid(arg, !0) : new DbIterator(this, this.idbPro);
|
|
910
|
+
}
|
|
911
|
+
batchWrite(fn, rollbackOnError) {
|
|
912
|
+
const { target } = this;
|
|
913
|
+
if (isNativeTarget(target))
|
|
914
|
+
try {
|
|
915
|
+
return fn(this);
|
|
916
|
+
} catch (e) {
|
|
917
|
+
throw rollbackOnError !== !1 && (target instanceof IDBIndex ? target.objectStore : target).transaction.abort(), e;
|
|
918
|
+
}
|
|
919
|
+
return this.tx(!0, (store) => fn(this.idbPro.schema.factory.newWriter({
|
|
920
|
+
storeSchema: this.storeSchema,
|
|
921
|
+
target: store
|
|
922
|
+
}, this.idbPro)), rollbackOnError);
|
|
923
|
+
}
|
|
924
|
+
changeByPk({ pk, record, fn, requiredPk, getOld, requiredOld, saveMapper, getMapper }) {
|
|
925
|
+
const { storeSchema } = this, { keyPath, defaultSaveMapper, defaultGetMapper } = storeSchema;
|
|
926
|
+
return record && (saveMapper || defaultSaveMapper) && (record = getMapperFn(saveMapper || defaultSaveMapper, keyPath)?.(record)), this.batchWrite(async (w) => {
|
|
927
|
+
let query, newValue = record;
|
|
928
|
+
if (pk)
|
|
929
|
+
query = parseIDbQuery(pk);
|
|
930
|
+
else if (keyPath)
|
|
931
|
+
query = getValidKeyValue(keyPath, record);
|
|
932
|
+
else {
|
|
933
|
+
const { key, value } = parseDbNoneKeyPathRecord(record);
|
|
934
|
+
query = key, newValue = value;
|
|
935
|
+
}
|
|
936
|
+
if (requiredPk && !query)
|
|
937
|
+
throw new Error(`key is required: ${JSON.stringify(record)}`);
|
|
938
|
+
const oldValue = query && (getOld || requiredOld) ? await D(w.nativeStore.get(query)) : void 0;
|
|
939
|
+
if (requiredOld && !oldValue)
|
|
940
|
+
throw new Error(`record not found: ${JSON.stringify(record)}`);
|
|
941
|
+
const result = await fn(w.nativeStore, query, newValue, oldValue, keyPath);
|
|
942
|
+
if (result)
|
|
943
|
+
return getMapperFn(getMapper || defaultGetMapper, keyPath)?.(result[0], result[1]);
|
|
944
|
+
});
|
|
945
|
+
}
|
|
946
|
+
}
|
|
947
|
+
class DbMap extends DataOperationBase {
|
|
948
|
+
get size() {
|
|
949
|
+
return this.tx(!1, (store) => D(store.count()));
|
|
950
|
+
}
|
|
951
|
+
delete(key) {
|
|
952
|
+
return this.tx(!0, async (t2, store) => {
|
|
953
|
+
store.delete(key);
|
|
954
|
+
});
|
|
955
|
+
}
|
|
956
|
+
batch(fn) {
|
|
957
|
+
const { idbPro, storeSchema, factory } = this;
|
|
958
|
+
return this.tx(!0, async (t2, store) => await fn(factory.newDbMap({ storeSchema, target: store }, idbPro)));
|
|
959
|
+
}
|
|
960
|
+
asStore(writable) {
|
|
961
|
+
const { factory } = this.idbPro.schema;
|
|
962
|
+
return writable ? factory.newWriter(this, this.idbPro) : factory.newReader({ target: this.target }, this.idbPro);
|
|
963
|
+
}
|
|
964
|
+
entries() {
|
|
965
|
+
return new DbIterator({
|
|
966
|
+
target: this.target
|
|
967
|
+
}, this.idbPro, { parser: "keyValue" });
|
|
968
|
+
}
|
|
969
|
+
async get(key, defaultValue) {
|
|
970
|
+
return await this.tx(!1, (store) => D(store.get(key))) || defaultValue;
|
|
971
|
+
}
|
|
972
|
+
getMany(keys) {
|
|
973
|
+
return this.tx(!1, async (store) => {
|
|
974
|
+
const result = [];
|
|
975
|
+
for (const k of keys)
|
|
976
|
+
result.push(await D(store.get(k)));
|
|
977
|
+
return result;
|
|
978
|
+
});
|
|
979
|
+
}
|
|
980
|
+
async has(key) {
|
|
981
|
+
return !!await this.get(key);
|
|
982
|
+
}
|
|
983
|
+
keys() {
|
|
984
|
+
return new DbIterator({ storeSchema: this.storeSchema, target: this.target }, this.idbPro, { parser: "key" });
|
|
985
|
+
}
|
|
986
|
+
set(key, value) {
|
|
987
|
+
return this.tx(!0, async (t2, store) => {
|
|
988
|
+
await D(store.put(value, key));
|
|
989
|
+
});
|
|
990
|
+
}
|
|
991
|
+
setMany(values) {
|
|
992
|
+
return this.tx(!0, async (t2, store) => {
|
|
993
|
+
for (const [k, v2] of values)
|
|
994
|
+
store.put(v2, k);
|
|
995
|
+
});
|
|
996
|
+
}
|
|
997
|
+
values() {
|
|
998
|
+
return new DbIterator({ storeSchema: this.storeSchema, target: this.target }, this.idbPro, { parser: "value" });
|
|
999
|
+
}
|
|
1000
|
+
}
|
|
1001
|
+
function equalKeyPath(p1, p2) {
|
|
1002
|
+
if (p1 == p2)
|
|
1003
|
+
return !0;
|
|
1004
|
+
if (typeof p1 != typeof p2)
|
|
1005
|
+
return !1;
|
|
1006
|
+
if (Array.isArray(p1) && Array.isArray(p2)) {
|
|
1007
|
+
if (p1.length !== p2.length)
|
|
1008
|
+
return !1;
|
|
1009
|
+
for (let i2 = 0; i2 < p1.length; i2++)
|
|
1010
|
+
if (p1[i2] !== p2[i2])
|
|
1011
|
+
return !1;
|
|
1012
|
+
return !0;
|
|
1013
|
+
}
|
|
1014
|
+
return !1;
|
|
1015
|
+
}
|
|
1016
|
+
const versionDiffValidate = ({ stores, schema }) => {
|
|
1017
|
+
const storeSchemas = schema.storeSchemas;
|
|
1018
|
+
let info = "";
|
|
1019
|
+
for (const store of stores) {
|
|
1020
|
+
const ss = storeSchemas.find((s2) => s2.name === store.name);
|
|
1021
|
+
if (ss) {
|
|
1022
|
+
if (!equalKeyPath(store.keyPath, ss.keyPath)) {
|
|
1023
|
+
info = `store [ ${store.name} ] keyPath not equal,schema.keyPath:${ss.keyPath},store.keyPath:${store.keyPath}[]`;
|
|
1024
|
+
break;
|
|
1025
|
+
}
|
|
1026
|
+
if (!store.autoIncrement != !ss.autoIncrement) {
|
|
1027
|
+
info = `store [ ${store.name} ] autoIncrement not equal`;
|
|
1028
|
+
break;
|
|
1029
|
+
}
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
return info ? `The existing database is inconsistent with the definition and cannot be corrected\uFF1A ${info}` : !0;
|
|
1033
|
+
}, versionSameValidate = async (context) => {
|
|
1034
|
+
let result = versionDiffValidate(context);
|
|
1035
|
+
return o$2(result) || (result = validateStoreAndIndexes(context)), result;
|
|
1036
|
+
}, validateStoreAndIndexes = ({ stores, schema }) => {
|
|
1037
|
+
const storeSchemas = schema.storeSchemas, dbStoreNames = stores.map((s2) => s2.name);
|
|
1038
|
+
let info = "";
|
|
1039
|
+
const missingStoreNames = storeSchemas.map((s2) => s2.name).filter((s2) => !dbStoreNames.includes(s2));
|
|
1040
|
+
if (missingStoreNames.length)
|
|
1041
|
+
info = `store [ ${missingStoreNames.join(",")} ] not exist`;
|
|
1042
|
+
else
|
|
1043
|
+
for (const store of stores) {
|
|
1044
|
+
const ss = storeSchemas.find((s2) => s2.name === store.name);
|
|
1045
|
+
if (ss && (info = validateIndexes$1(store, Array.from(store.indexNames), ss.indexSchemas), info))
|
|
1046
|
+
break;
|
|
1047
|
+
}
|
|
1048
|
+
return info ? `The existing database Store index is inconsistent with the definition and requires a database version upgrade to be fixed\uFF1A ${info}` : !0;
|
|
1049
|
+
};
|
|
1050
|
+
function validateIndexes$1(store, indexNames, schemas) {
|
|
1051
|
+
if (indexNames.length !== schemas.length)
|
|
1052
|
+
return `store [ ${store.name} ] index count not equal`;
|
|
1053
|
+
for (const name of indexNames) {
|
|
1054
|
+
const schema = schemas.find((s2) => s2.name === name);
|
|
1055
|
+
if (!schema)
|
|
1056
|
+
return `store [ ${store.name} ] index [ ${name} ] not exist`;
|
|
1057
|
+
const index = store.index(name);
|
|
1058
|
+
if (!schema.unique != !index.unique)
|
|
1059
|
+
return `store [ ${store.name} ] index [ ${name} ] unique not equal`;
|
|
1060
|
+
if (!schema.multiEntry != !index.multiEntry)
|
|
1061
|
+
return `store [ ${store.name} ] index [ ${name} ] multiEntry not equal`;
|
|
1062
|
+
if (!equalKeyPath(schema.keyPath, index.keyPath))
|
|
1063
|
+
return `store [ ${store.name} ] index [ ${name} ] keyPath not equal`;
|
|
1064
|
+
}
|
|
1065
|
+
return "";
|
|
1066
|
+
}
|
|
1067
|
+
class StoreUpgradeable {
|
|
1068
|
+
upgradeContext;
|
|
1069
|
+
storeSchema;
|
|
1070
|
+
nativeStore;
|
|
1071
|
+
#writer;
|
|
1072
|
+
constructor(upgradeContext, storeSchema, nativeStore) {
|
|
1073
|
+
this.upgradeContext = upgradeContext, this.storeSchema = storeSchema, this.nativeStore = nativeStore;
|
|
1074
|
+
}
|
|
1075
|
+
get writer() {
|
|
1076
|
+
return this.#writer || (this.#writer = this.upgradeContext.dbSchema.factory?.newWriter({
|
|
1077
|
+
target: this.nativeStore,
|
|
1078
|
+
storeSchema: this.storeSchema
|
|
1079
|
+
}));
|
|
1080
|
+
}
|
|
1081
|
+
add(versionBounds, values, returns) {
|
|
1082
|
+
return this.upgradeContext.versionIn(versionBounds) ? this.writer.addMany(values, returns) : Promise.resolve();
|
|
1083
|
+
}
|
|
1084
|
+
addOrChange(versionBounds, values, returns) {
|
|
1085
|
+
return this.upgradeContext.versionIn(versionBounds) ? this.writer.addOrChangeMany(values, returns) : Promise.resolve();
|
|
1086
|
+
}
|
|
1087
|
+
async call(versionBounds, fn) {
|
|
1088
|
+
if (this.upgradeContext.versionIn(versionBounds))
|
|
1089
|
+
return await fn(this.writer, this.upgradeContext);
|
|
1090
|
+
}
|
|
1091
|
+
replace(versionBounds, values, returns) {
|
|
1092
|
+
return this.upgradeContext.versionIn(versionBounds) ? this.writer.replaceMany(values, returns) : Promise.resolve();
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
const DataOperatorFactory = Object.freeze({
|
|
1096
|
+
newDataOperators(schemas, db) {
|
|
1097
|
+
return new DataOperators(db, schemas);
|
|
1098
|
+
},
|
|
1099
|
+
newDbMap(schema, db) {
|
|
1100
|
+
return new DbMap(schema, db);
|
|
1101
|
+
},
|
|
1102
|
+
newReader(schema, db) {
|
|
1103
|
+
return new DataReader(schema, db);
|
|
1104
|
+
},
|
|
1105
|
+
newStoreUpgradeable(nativeStore, storeSchema, upgradeContext) {
|
|
1106
|
+
return new StoreUpgradeable(upgradeContext, storeSchema, nativeStore);
|
|
1107
|
+
},
|
|
1108
|
+
newWriter(schema, db) {
|
|
1109
|
+
return new DataWriter(schema, db);
|
|
1110
|
+
}
|
|
1111
|
+
});
|
|
1112
|
+
function validateSpecialFields(schema) {
|
|
1113
|
+
Object.isFrozen(schema) || (schema.addedTimeField = validateSpecialField(schema, schema.addedTimeField, "added_at", !0), schema.updatedTimeField = validateSpecialField(schema, schema.updatedTimeField, "updated_at", !0), schema.updatedCountField = validateSpecialField(schema, schema.updatedCountField, "updated_count", !1), schema.softDeletedField = validateSpecialField(schema, schema.softDeletedField, "deleted", !1));
|
|
1114
|
+
}
|
|
1115
|
+
function validateSpecialField(schema, field, defaultName, useDefault) {
|
|
1116
|
+
const validField = validateSpecialFieldBase(field, defaultName, useDefault);
|
|
1117
|
+
if (!validField)
|
|
1118
|
+
return validField;
|
|
1119
|
+
const schemaField = validField;
|
|
1120
|
+
if (schemaField.isIndexed !== !1) {
|
|
1121
|
+
schemaField.isIndexed || (schemaField.isIndexed = !0);
|
|
1122
|
+
const { name } = schemaField, indexSchemas = schema.indexSchemas;
|
|
1123
|
+
indexSchemas.some((i2) => i2 === name || i2.name === name) || indexSchemas.push(name);
|
|
1124
|
+
}
|
|
1125
|
+
return schemaField;
|
|
1126
|
+
}
|
|
1127
|
+
function validateSpecialFieldBase(field, defaultName, useDefault) {
|
|
1128
|
+
if (field === !1)
|
|
1129
|
+
return !1;
|
|
1130
|
+
if (o$2(field))
|
|
1131
|
+
return { name: field };
|
|
1132
|
+
if (c(field)) {
|
|
1133
|
+
const r$1 = field;
|
|
1134
|
+
return r(r$1.name) && (r$1.name = defaultName), field;
|
|
1135
|
+
} else if (field === !0 || useDefault)
|
|
1136
|
+
return { name: defaultName };
|
|
1137
|
+
return !1;
|
|
1138
|
+
}
|
|
1139
|
+
function validateStoreSchema(schema, storeTemplate) {
|
|
1140
|
+
let validSchema = o$2(schema) ? { name: schema } : schema;
|
|
1141
|
+
return storeTemplate && (validSchema = { ...storeTemplate, ...validSchema }), validSchema.indexSchemas || (validSchema.indexSchemas = []), validateSpecialFields(validSchema), Object.isFrozen(validSchema) || (validSchema.indexSchemas = validSchema.indexSchemas.map(indexMapper)), validateDefaultData(validSchema), validSchema;
|
|
1142
|
+
}
|
|
1143
|
+
function indexMapper(index) {
|
|
1144
|
+
const schema = o$2(index) ? { name: index } : index;
|
|
1145
|
+
return schema.keyPath || (schema.keyPath = schema.name), schema;
|
|
1146
|
+
}
|
|
1147
|
+
function validateDefaultData(schema) {
|
|
1148
|
+
if (!(schema.keyPath || !schema.defaultData?.length)) {
|
|
1149
|
+
for (const row of schema.defaultData)
|
|
1150
|
+
if (!Array.isArray(row) && !("value" in row))
|
|
1151
|
+
throw new Error(`When \`defaultData\` must contain \`value\` fields or be an array\uFF1A${JSON.stringify(row)}`);
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
const validateSchemaWithDefaults = (schema) => {
|
|
1155
|
+
const { versionDiffValidate: vdf, versionSameValidate: vsf, factory: dof } = schema;
|
|
1156
|
+
return schema.storeSchemas || (schema.storeSchemas = []), schema.storeTemplate || (schema.storeTemplate = { ...defaultStoreSchemaTemplate }), !vdf && vdf !== !1 && (schema.versionDiffValidate = versionDiffValidate), !vsf && vsf !== !1 && (schema.versionSameValidate = versionSameValidate), dof ? dof !== DataOperatorFactory && (schema.factory = { ...DataOperatorFactory, ...dof }) : schema.factory = DataOperatorFactory, schema.storeSchemas = schema.storeSchemas.map((s2) => validateStoreSchema(s2, schema.storeTemplate)), schema;
|
|
1157
|
+
};
|
|
1158
|
+
async function validateBeforeOpen(schema) {
|
|
1159
|
+
const { name, version } = schema, existDb = await b(name);
|
|
1160
|
+
if (!existDb)
|
|
1161
|
+
return !0;
|
|
1162
|
+
const { versionDiffValidate: versionDiffValidate2, versionSameValidate: versionSameValidate2 } = schema, db = await B(name);
|
|
1163
|
+
try {
|
|
1164
|
+
if (schema.version < db.version)
|
|
1165
|
+
return "The existing database version is greater than the current version";
|
|
1166
|
+
const validate = schema.version === void 0 || db.version === schema.version ? versionSameValidate2 : versionDiffValidate2;
|
|
1167
|
+
if (!validate)
|
|
1168
|
+
return !0;
|
|
1169
|
+
const storeNames = Array.from(db.objectStoreNames);
|
|
1170
|
+
if (storeNames.length < 1)
|
|
1171
|
+
return `The existing database [ ${name} ] is empty`;
|
|
1172
|
+
const stores = await p(db, storeNames), diffResult = await validate({ schema, db, stores });
|
|
1173
|
+
if (o$2(diffResult) || version !== existDb.version)
|
|
1174
|
+
return diffResult;
|
|
1175
|
+
} finally {
|
|
1176
|
+
db?.close();
|
|
1177
|
+
}
|
|
1178
|
+
return !0;
|
|
1179
|
+
}
|
|
1180
|
+
function validateIndexes(storeSchema, store) {
|
|
1181
|
+
const { indexSchemas } = storeSchema, existNames = store.indexNames, schemaNames = indexSchemas.map((option) => option.name);
|
|
1182
|
+
for (const name of Array.from(existNames))
|
|
1183
|
+
schemaNames.includes(name) || store.deleteIndex(name);
|
|
1184
|
+
for (const indexSchema of indexSchemas)
|
|
1185
|
+
existNames.contains(indexSchema.name) ? validateIndex(store, indexSchema) : createIndex(store, indexSchema);
|
|
1186
|
+
return store;
|
|
1187
|
+
}
|
|
1188
|
+
function validateIndex(store, indexOption) {
|
|
1189
|
+
const index = store.index(indexOption.name);
|
|
1190
|
+
checkIndex(index, indexOption) || (store.deleteIndex(indexOption.name), createIndex(store, indexOption));
|
|
1191
|
+
}
|
|
1192
|
+
function checkIndex(index, indexOption) {
|
|
1193
|
+
return index.unique !== indexOption.unique || index.multiEntry !== indexOption.multiEntry ? !1 : equalKeyPath(indexOption.keyPath, index.keyPath);
|
|
1194
|
+
}
|
|
1195
|
+
function createIndex(store, schema) {
|
|
1196
|
+
try {
|
|
1197
|
+
store.createIndex(schema.name, schema.keyPath, {
|
|
1198
|
+
unique: schema.unique,
|
|
1199
|
+
multiEntry: schema.multiEntry
|
|
1200
|
+
});
|
|
1201
|
+
} catch {
|
|
1202
|
+
throw new Error(`store [ ${store.name} ] index [ ${schema.name} ] create error: ${JSON.stringify(schema)}`);
|
|
1203
|
+
}
|
|
1204
|
+
}
|
|
1205
|
+
function validateStoreDefine(context, schema) {
|
|
1206
|
+
return context.database.objectStoreNames.contains(schema.name) ? validateIndexes(schema, context.transaction?.objectStore(schema.name)) : defineStore(schema, context.database);
|
|
1207
|
+
}
|
|
1208
|
+
function defineStore(schema, db) {
|
|
1209
|
+
const store = db.createObjectStore(schema.name, {
|
|
1210
|
+
keyPath: schema.keyPath,
|
|
1211
|
+
autoIncrement: schema.autoIncrement
|
|
1212
|
+
});
|
|
1213
|
+
for (const option of schema.indexSchemas)
|
|
1214
|
+
createIndex(store, option);
|
|
1215
|
+
return store;
|
|
1216
|
+
}
|
|
1217
|
+
class UpgradeContext {
|
|
1218
|
+
database;
|
|
1219
|
+
newVersion;
|
|
1220
|
+
oldVersion;
|
|
1221
|
+
dbSchema;
|
|
1222
|
+
transaction;
|
|
1223
|
+
#stores = {};
|
|
1224
|
+
constructor(args) {
|
|
1225
|
+
this.database = args.database, this.newVersion = args.newVersion, this.oldVersion = args.oldVersion, this.dbSchema = args.dbSchema, this.transaction = args.transaction;
|
|
1226
|
+
}
|
|
1227
|
+
deleteStoreIfExists(storeName) {
|
|
1228
|
+
const db = this.database;
|
|
1229
|
+
db.objectStoreNames.contains(storeName) && db.deleteObjectStore(storeName);
|
|
1230
|
+
}
|
|
1231
|
+
destroy() {
|
|
1232
|
+
try {
|
|
1233
|
+
C(this.#stores);
|
|
1234
|
+
} finally {
|
|
1235
|
+
for (const k of Object.keys(this.#stores))
|
|
1236
|
+
delete this.#stores[k];
|
|
1237
|
+
}
|
|
1238
|
+
}
|
|
1239
|
+
store(storeName) {
|
|
1240
|
+
if (storeName in this.#stores)
|
|
1241
|
+
return this.#stores[storeName];
|
|
1242
|
+
const { factory } = this.dbSchema, { storeSchemas } = this.dbSchema, storeSchema = storeSchemas.find((s2) => s2.name === storeName), nativeStore = validateStoreDefine(this, storeSchema);
|
|
1243
|
+
return this.#stores[storeName] = factory.newStoreUpgradeable(nativeStore, storeSchema, this);
|
|
1244
|
+
}
|
|
1245
|
+
versionIn({ oldMin, oldMax, newMax, newMin }) {
|
|
1246
|
+
if (oldMax === void 0 && newMax === void 0 && oldMin === void 0 && newMin === void 0)
|
|
1247
|
+
throw new Error(`versionIn bounds must not be empty ${JSON.stringify({ oldMax, newMax, oldMin, newMin })}`);
|
|
1248
|
+
if (oldMax < oldMin)
|
|
1249
|
+
throw new Error(`oldMax (${oldMax}) cannot be less than oldMin (${oldMin})`);
|
|
1250
|
+
if (newMax < newMin)
|
|
1251
|
+
throw new Error(`newMax (${newMax}) cannot be less than newMin (${newMin})`);
|
|
1252
|
+
const { oldVersion, newVersion } = this;
|
|
1253
|
+
return oldMin !== void 0 && oldVersion < oldMin || oldMax !== void 0 && oldVersion > oldMax || newMin !== void 0 && newVersion < newMin ? !1 : !(newMax !== void 0 && newVersion > newMax);
|
|
1254
|
+
}
|
|
1255
|
+
}
|
|
1256
|
+
async function upgradeDb(dbSchema, database, e, request) {
|
|
1257
|
+
const { storeSchemas, beforeUpgrade, afterUpgrade, version } = dbSchema, { newVersion = version, oldVersion } = e, { transaction } = request, context = new UpgradeContext({ database, newVersion, oldVersion, dbSchema, transaction });
|
|
1258
|
+
try {
|
|
1259
|
+
const errors = [];
|
|
1260
|
+
if (t$1(beforeUpgrade))
|
|
1261
|
+
try {
|
|
1262
|
+
await beforeUpgrade(context);
|
|
1263
|
+
} catch (e2) {
|
|
1264
|
+
errors.push(e2);
|
|
1265
|
+
}
|
|
1266
|
+
for (const storeName of storeSchemas.map((s2) => s2.name))
|
|
1267
|
+
try {
|
|
1268
|
+
context.store(storeName);
|
|
1269
|
+
} catch (e2) {
|
|
1270
|
+
errors.push(e2);
|
|
1271
|
+
}
|
|
1272
|
+
for (const { name, defaultData } of storeSchemas)
|
|
1273
|
+
if (defaultData)
|
|
1274
|
+
try {
|
|
1275
|
+
await context.store(name).add({ oldMax: 0 }, defaultData);
|
|
1276
|
+
} catch (e2) {
|
|
1277
|
+
errors.push(e2);
|
|
1278
|
+
}
|
|
1279
|
+
for (const { name, versionData } of storeSchemas)
|
|
1280
|
+
if (versionData)
|
|
1281
|
+
for (const { version: version2, data, use = "addOrChange" } of versionData)
|
|
1282
|
+
try {
|
|
1283
|
+
await context.store(name)[use](version2, data);
|
|
1284
|
+
} catch (e2) {
|
|
1285
|
+
errors.push(e2);
|
|
1286
|
+
}
|
|
1287
|
+
for (const { name, storeDefined } of storeSchemas)
|
|
1288
|
+
try {
|
|
1289
|
+
await storeDefined?.(context.store(name));
|
|
1290
|
+
} catch (e2) {
|
|
1291
|
+
errors.push(e2);
|
|
1292
|
+
}
|
|
1293
|
+
if (t$1(afterUpgrade))
|
|
1294
|
+
try {
|
|
1295
|
+
await afterUpgrade(context);
|
|
1296
|
+
} catch (e2) {
|
|
1297
|
+
errors.push(e2);
|
|
1298
|
+
}
|
|
1299
|
+
if (!errors.length)
|
|
1300
|
+
return;
|
|
1301
|
+
throw errors.length === 1 ? errors[0] : new AggregateError(errors, "Database upgrade error");
|
|
1302
|
+
} finally {
|
|
1303
|
+
x(context);
|
|
1304
|
+
}
|
|
1305
|
+
}
|
|
1306
|
+
function validateDataOperationSchema(schema, dbSchema) {
|
|
1307
|
+
let { store, index } = schema;
|
|
1308
|
+
const { storeTemplate } = dbSchema, { storeSchemas = [] } = dbSchema, storeName = o$2(store) ? store : store.name, storeIndex = storeSchemas.findIndex((s2) => s2 === storeName || s2.name === storeName), existStore = storeIndex > -1 && storeSchemas[storeIndex];
|
|
1309
|
+
let tmp;
|
|
1310
|
+
o$2(store) ? tmp = existStore || storeName : !existStore || o$2(existStore) || store === existStore ? tmp = store : tmp = { ...existStore, ...store }, (index || existStore?.indexSchemas?.length || store?.indexSchemas?.length) && (o$2(tmp) && (tmp = { name: tmp }), tmp.indexSchemas = mergeIndexes(existStore.indexSchemas || [], store.indexSchemas || [], index));
|
|
1311
|
+
const validSchema = validateStoreSchema(tmp, storeTemplate);
|
|
1312
|
+
storeIndex > -1 ? storeSchemas[storeIndex] = validSchema : storeSchemas.push(validSchema), dbSchema.storeSchemas = storeSchemas;
|
|
1313
|
+
const target = { store: storeName };
|
|
1314
|
+
return index && (target.index = o$2(index) ? index : index.name), {
|
|
1315
|
+
target
|
|
1316
|
+
};
|
|
1317
|
+
}
|
|
1318
|
+
function mergeIndexes(existIndexes, newIndexes, index) {
|
|
1319
|
+
index && newIndexes.push(index);
|
|
1320
|
+
for (const index2 of newIndexes) {
|
|
1321
|
+
const indexName = o$2(index2) ? index2 : index2.name, existIndex = existIndexes.findIndex((i2) => i2 === indexName || i2.name === indexName);
|
|
1322
|
+
if (existIndex > -1) {
|
|
1323
|
+
const old = existIndexes[existIndex];
|
|
1324
|
+
o$2(old) ? existIndexes[existIndex] = index2 : o$2(index2) || (existIndexes[existIndex] = Object.assign(old, index2));
|
|
1325
|
+
} else
|
|
1326
|
+
existIndexes.push(index2);
|
|
1327
|
+
}
|
|
1328
|
+
return existIndexes;
|
|
1329
|
+
}
|
|
1330
|
+
const mapStoreSchema = Object.assign({
|
|
1331
|
+
name: "",
|
|
1332
|
+
addedTimeField: !1,
|
|
1333
|
+
autoIncrement: !1,
|
|
1334
|
+
indexSchemas: [],
|
|
1335
|
+
keyPath: void 0,
|
|
1336
|
+
softDeletedField: !1,
|
|
1337
|
+
updatedCountField: !1,
|
|
1338
|
+
updatedTimeField: !1
|
|
1339
|
+
});
|
|
1340
|
+
class IDbPro {
|
|
1341
|
+
static #_db;
|
|
1342
|
+
#schema;
|
|
1343
|
+
#beforeUseValid;
|
|
1344
|
+
#storeSchemaRecord = {};
|
|
1345
|
+
constructor(schema, skipPreOpenValidation) {
|
|
1346
|
+
this.#schema = schema = o$2(schema) ? { name: schema } : schema, Array.isArray(schema.storeSchemas) || (schema.storeSchemas = []), c(schema.storeTemplate) || (schema.storeTemplate = defaultStoreSchemaTemplate), skipPreOpenValidation && (this.#beforeUseValid = !0);
|
|
1347
|
+
}
|
|
1348
|
+
/**
|
|
1349
|
+
* 默认实例
|
|
1350
|
+
*/
|
|
1351
|
+
static get defaultDb() {
|
|
1352
|
+
return this.#_db || (this.#_db = new IDbPro(P));
|
|
1353
|
+
}
|
|
1354
|
+
get initialized() {
|
|
1355
|
+
return Object.isFrozen(this.#schema);
|
|
1356
|
+
}
|
|
1357
|
+
get schema() {
|
|
1358
|
+
return this.#schema;
|
|
1359
|
+
}
|
|
1360
|
+
get storeNames() {
|
|
1361
|
+
return Array.from(new Set(this.#schema.storeSchemas.map((s2) => o$2(s2) ? s2 : s2.name)));
|
|
1362
|
+
}
|
|
1363
|
+
get factory() {
|
|
1364
|
+
return this.#schema.factory || (this.#schema.factory = DataOperatorFactory);
|
|
1365
|
+
}
|
|
1366
|
+
/**
|
|
1367
|
+
* 释放默认数据库实例(如果存在)
|
|
1368
|
+
*/
|
|
1369
|
+
static releaseDefaultDB() {
|
|
1370
|
+
this.#_db = void 0;
|
|
1371
|
+
}
|
|
1372
|
+
/**
|
|
1373
|
+
* 从已经存在的`name`数据库中打开
|
|
1374
|
+
* @warning 该方法仅用于不需要修改原有数据库结构的场景
|
|
1375
|
+
* - 如果需要修改,请使用 `generateDbSchema()` 从现有数据库生成配置,且适当修改后重新打开
|
|
1376
|
+
* @param name
|
|
1377
|
+
*/
|
|
1378
|
+
static async openExistDb(name) {
|
|
1379
|
+
const { generateDbSchema: generateDbSchema2 } = await Promise.resolve().then(function() {
|
|
1380
|
+
return generateDbSchema$1;
|
|
1381
|
+
});
|
|
1382
|
+
return new IDbPro(await generateDbSchema2(name));
|
|
1383
|
+
}
|
|
1384
|
+
static store(arg1, index) {
|
|
1385
|
+
return IDbPro.defaultDb.store(arg1, index);
|
|
1386
|
+
}
|
|
1387
|
+
static stores(schemas) {
|
|
1388
|
+
return IDbPro.defaultDb.stores(schemas);
|
|
1389
|
+
}
|
|
1390
|
+
static map(storeName, defaultData) {
|
|
1391
|
+
return IDbPro.defaultDb.map(storeName, defaultData);
|
|
1392
|
+
}
|
|
1393
|
+
async openNativeDb() {
|
|
1394
|
+
const schema = this.#schema = R(this.initSchema());
|
|
1395
|
+
await this.#isBeforeUseValidate();
|
|
1396
|
+
const { name, version } = schema;
|
|
1397
|
+
return await B({
|
|
1398
|
+
name,
|
|
1399
|
+
version,
|
|
1400
|
+
onupgradeneeded: (db, e, request) => upgradeDb(schema, db, e, request)
|
|
1401
|
+
});
|
|
1402
|
+
}
|
|
1403
|
+
store(arg1, index) {
|
|
1404
|
+
const operation = arg1.store ? arg1 : { store: arg1 };
|
|
1405
|
+
return operation.store || (operation.store = arg1), operation.index || (operation.index = index), this.factory.newWriter(validateDataOperationSchema(operation, this.schema), this);
|
|
1406
|
+
}
|
|
1407
|
+
stores(schemas) {
|
|
1408
|
+
const { schema } = this, results = schemas.map((store) => validateDataOperationSchema(o$2(store) ? { store } : store, schema));
|
|
1409
|
+
return this.factory.newDataOperators(results, this);
|
|
1410
|
+
}
|
|
1411
|
+
initSchema() {
|
|
1412
|
+
if (this.initialized)
|
|
1413
|
+
return this.#schema;
|
|
1414
|
+
const { validateSchemaWithDefaults: validate = validateSchemaWithDefaults } = this.#schema;
|
|
1415
|
+
return this.#schema = validate(this.#schema);
|
|
1416
|
+
}
|
|
1417
|
+
async traceSchema(showFn) {
|
|
1418
|
+
await st(this.schema, showFn);
|
|
1419
|
+
}
|
|
1420
|
+
map(storeName, defaultData) {
|
|
1421
|
+
const name = o$2(storeName) ? storeName : o$2(defaultData) ? defaultData : W;
|
|
1422
|
+
Array.isArray(storeName) && (defaultData = storeName);
|
|
1423
|
+
const { storeSchemas } = this.schema;
|
|
1424
|
+
return storeSchemas.find((s2) => s2.name === name || s2 === name) || storeSchemas.push({ ...mapStoreSchema, name, defaultData }), this.factory.newDbMap({ target: { store: name } }, this);
|
|
1425
|
+
}
|
|
1426
|
+
export() {
|
|
1427
|
+
return this.stores(this.storeNames).export();
|
|
1428
|
+
}
|
|
1429
|
+
import(data, returns, use) {
|
|
1430
|
+
return this.stores(this.storeNames).import(data, returns, use);
|
|
1431
|
+
}
|
|
1432
|
+
getStoreSchema(name) {
|
|
1433
|
+
if (name in this.#storeSchemaRecord)
|
|
1434
|
+
return this.#storeSchemaRecord[name];
|
|
1435
|
+
const index = this.schema.storeSchemas.findIndex((s2) => s2 === name || s2.name === name);
|
|
1436
|
+
let storeSchema = this.schema.storeSchemas[index];
|
|
1437
|
+
return this.initialized ? this.#storeSchemaRecord[name] = storeSchema : o$2(storeSchema) && (this.schema.storeSchemas[index] = storeSchema = { name: storeSchema }), storeSchema;
|
|
1438
|
+
}
|
|
1439
|
+
async #isBeforeUseValidate() {
|
|
1440
|
+
let valid = this.#beforeUseValid;
|
|
1441
|
+
if (valid === void 0 && (valid = this.#beforeUseValid = await validateBeforeOpen(this.#schema)), valid === !0)
|
|
1442
|
+
return !0;
|
|
1443
|
+
if (o$2(valid))
|
|
1444
|
+
throw new Error(valid);
|
|
1445
|
+
}
|
|
1446
|
+
}
|
|
1447
|
+
function dbStore(arg1, index) {
|
|
1448
|
+
return IDbPro.defaultDb.store(arg1, index);
|
|
1449
|
+
}
|
|
1450
|
+
function dbMap(storeName, defaultData) {
|
|
1451
|
+
return IDbPro.defaultDb.map(storeName, defaultData);
|
|
1452
|
+
}
|
|
1453
|
+
function dbStores(schemas) {
|
|
1454
|
+
return IDbPro.defaultDb.stores(schemas);
|
|
1455
|
+
}
|
|
1456
|
+
function releaseDefaultDB() {
|
|
1457
|
+
IDbPro.releaseDefaultDB();
|
|
1458
|
+
}
|
|
1459
|
+
function generateStoreSchema(store, fields) {
|
|
1460
|
+
const { name, keyPath, autoIncrement } = store;
|
|
1461
|
+
return {
|
|
1462
|
+
name,
|
|
1463
|
+
keyPath,
|
|
1464
|
+
autoIncrement,
|
|
1465
|
+
...parseIndexes(store, fields)
|
|
1466
|
+
};
|
|
1467
|
+
}
|
|
1468
|
+
function parseIndexes(store, fields) {
|
|
1469
|
+
const indexes = Array.from(store.indexNames).map((n2) => store.index(n2)), indexSchemas = [], paths = [];
|
|
1470
|
+
for (const { name, keyPath, unique, multiEntry } of indexes)
|
|
1471
|
+
Array.isArray(keyPath) ? paths.push(...keyPath) : paths.push(keyPath), indexSchemas.push({ name, keyPath, unique, multiEntry });
|
|
1472
|
+
const keySet = new Set(paths);
|
|
1473
|
+
return {
|
|
1474
|
+
indexSchemas,
|
|
1475
|
+
addedTimeField: convertSpecialField(fields.addedTimeField, keySet),
|
|
1476
|
+
updatedTimeField: convertSpecialField(fields.updatedTimeField, keySet),
|
|
1477
|
+
updatedCountField: convertSpecialField(fields.updatedCountField, keySet),
|
|
1478
|
+
softDeletedField: convertSpecialField(fields.softDeletedField, keySet)
|
|
1479
|
+
};
|
|
1480
|
+
}
|
|
1481
|
+
function convertSpecialField(field, keySet) {
|
|
1482
|
+
return keySet.has(field) ? { name: field, isIndexed: !1 } : !1;
|
|
1483
|
+
}
|
|
1484
|
+
async function generateDbSchema(db, option) {
|
|
1485
|
+
if (!await b(db))
|
|
1486
|
+
throw new Error(`db [ ${db} ] not exist`);
|
|
1487
|
+
let { asString, specialFields = defaultSpecialFields, dataExportTarget } = option || {};
|
|
1488
|
+
asString === !0 && (asString = 160), isNaN(asString) || asString < 1 && (asString = 1);
|
|
1489
|
+
let dbSchema = await generateRoot(db, specialFields);
|
|
1490
|
+
return dataExportTarget && await generateData(dbSchema, dataExportTarget), asString ? await ot({
|
|
1491
|
+
rootData$: dbSchema,
|
|
1492
|
+
spaceEffectiveLength: asString
|
|
1493
|
+
}) : dbSchema;
|
|
1494
|
+
}
|
|
1495
|
+
async function generateData(dbSchema, dataExportTarget) {
|
|
1496
|
+
const data = await new IDbPro(L(dbSchema), !0).export();
|
|
1497
|
+
for (const schema of dbSchema.storeSchemas) {
|
|
1498
|
+
const rows = data[schema.name];
|
|
1499
|
+
rows?.length && (dataExportTarget === "defaultData" ? schema.defaultData = rows : dataExportTarget === "versionData" && (schema.versionData || (schema.versionData = []), schema.versionData.push({
|
|
1500
|
+
version: { oldMax: dbSchema.version },
|
|
1501
|
+
data: rows
|
|
1502
|
+
})));
|
|
1503
|
+
}
|
|
1504
|
+
return dbSchema;
|
|
1505
|
+
}
|
|
1506
|
+
function generateRoot(db, specialFields) {
|
|
1507
|
+
return B(db, async function(existDb) {
|
|
1508
|
+
const names = Array.from(existDb.objectStoreNames), tx = existDb.transaction(names, "readonly");
|
|
1509
|
+
try {
|
|
1510
|
+
return {
|
|
1511
|
+
name: existDb.name,
|
|
1512
|
+
version: existDb.version,
|
|
1513
|
+
storeSchemas: names.map((name) => generateStoreSchema(tx.objectStore(name), specialFields))
|
|
1514
|
+
};
|
|
1515
|
+
} finally {
|
|
1516
|
+
tx.abort();
|
|
1517
|
+
}
|
|
1518
|
+
});
|
|
1519
|
+
}
|
|
1520
|
+
var generateDbSchema$1 = /* @__PURE__ */ Object.freeze({ __proto__: null, generateDbSchema });
|
|
1521
|
+
export {
|
|
1522
|
+
Break,
|
|
1523
|
+
Continue,
|
|
1524
|
+
ContinueKey,
|
|
1525
|
+
ContinuePrimaryKey,
|
|
1526
|
+
DataOperationBase,
|
|
1527
|
+
DataOperators,
|
|
1528
|
+
DataReader,
|
|
1529
|
+
DataWriter,
|
|
1530
|
+
DbIterator,
|
|
1531
|
+
DbIteratorParsers,
|
|
1532
|
+
DbMap,
|
|
1533
|
+
Delete,
|
|
1534
|
+
Finished,
|
|
1535
|
+
IDbPro,
|
|
1536
|
+
NextKey,
|
|
1537
|
+
NextPrimaryKey,
|
|
1538
|
+
Save,
|
|
1539
|
+
StoreUpgradeable,
|
|
1540
|
+
UpgradeContext,
|
|
1541
|
+
dbMap,
|
|
1542
|
+
dbStore,
|
|
1543
|
+
dbStores,
|
|
1544
|
+
defaultSpecialFields,
|
|
1545
|
+
defaultStoreSchemaTemplate,
|
|
1546
|
+
generateDbSchema,
|
|
1547
|
+
isIDbQuery,
|
|
1548
|
+
isNativeTarget,
|
|
1549
|
+
parseDbNoneKeyPathRecord,
|
|
1550
|
+
parseIDbQuery,
|
|
1551
|
+
releaseDefaultDB,
|
|
1552
|
+
validateSchemaWithDefaults,
|
|
1553
|
+
versionDiffValidate,
|
|
1554
|
+
versionSameValidate
|
|
1555
|
+
};
|