gs-idb-pro 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/lib/index.cjs +1247 -1
- package/lib/index.d.ts +3 -3
- package/lib/index.js +1281 -1
- package/lib/index.web.js +1555 -1
- package/package.json +1 -1
package/lib/index.cjs
CHANGED
|
@@ -1 +1,1247 @@
|
|
|
1
|
-
"use strict";var e=require("gs-base"),t=require("gs-idb-basic");const r=Symbol("save"),a=Symbol("delete"),n=Object.freeze({key:({primaryKey:e})=>({value:e}),value:({value:e})=>({value:e}),keyValue:({primaryKey:e,value:t})=>({value:[e,t]})});function s(e){if(Array.isArray(e))return{key:e[1],value:e[0]};if("value"in e){const{key:t,value:r}=e;return{key:t,value:r}}throw new Error(`not include value in invalid DBRecord:${JSON.stringify(e)}`)}const i=Symbol("break"),o=Symbol("finished"),c=Symbol("continue"),u=Symbol("continue key"),d=Symbol("next key"),h=Symbol("continue primary key"),l=Symbol("next primary key");function m(e){return e instanceof IDBObjectStore||e instanceof IDBIndex}const f=Object.freeze({keyPath:"id",autoIncrement:!0,addedTimeField:!0,updatedTimeField:!0}),y=Object.freeze({addedTimeField:"added_at",softDeletedField:"deleted",updatedCountField:"updated_count",updatedTimeField:"updated_at"}),p=e=>e instanceof Date?e.getTime():e;function g(e){if(t.isDbQueryOrNull(e))return e;const r=e;if("lt"in r&&"gt"in r){if(p(r.gt)>p(r.lt))throw new Error(`Invalid IDBRange: gt (${r.gt}) cannot be greater than lt (${r.lt})`);return IDBKeyRange.bound(r.gt,r.lt,!0,!0)}if("lt"in r&&"gte"in r){if(p(r.gte)>p(r.lt))throw new Error(`Invalid IDBRange: gte (${r.gte}) cannot be greater than lt (${r.lt})`);return IDBKeyRange.bound(r.gte,r.lt,!1,!0)}if("lte"in r&&"gt"in r){if(p(r.gt)>p(r.lte))throw new Error(`Invalid IDBRange: gt (${r.gt}) cannot be greater than lte (${r.lte})`);return IDBKeyRange.bound(r.gt,r.lte,!0,!1)}if("lte"in r&&"gte"in r){if(p(r.gte)>p(r.lte))throw new Error(`Invalid IDBRange: gte (${r.gte}) cannot be greater than lte (${r.lte})`);return IDBKeyRange.bound(r.gte,r.lte,!1,!1)}return"lt"in r?IDBKeyRange.upperBound(r.lt,!0):"lte"in r?IDBKeyRange.upperBound(r.lte,!1):"gt"in r?IDBKeyRange.lowerBound(r.gt,!0):"gte"in r?IDBKeyRange.lowerBound(r.gte,!1):void 0}function b(e){return null!=g(e)}async function S(e,r,a){const{query:n,direction:s="prev",preSkip:i,startKey:o,startPrimaryKey:c}=r,u=e.openCursor(g(n),s);return i&&(await t.requestDbResult(u)).advance(i),o&&(c?(await t.requestDbResult(u)).continuePrimaryKey(o,c):(await t.requestDbResult(u)).continue(o)),u}function w(t,r,a){return Array.isArray(t)?r=>e.copyFields({},r,t):e.isFunction(t)?t:r?e=>e:a?(e,t)=>[e,t]:(e,t)=>({key:e,value:t})}class x{idbPro;target;#e;constructor(e,t){this.idbPro=t,this.#e=e.storeSchema,this.target=e.target,m(e.target)&&(this.tx=this.#t)}get storeName(){return this.nativeStore?.name||this.target.store}get storeSchema(){if(this.#e)return this.#e;const e=this.idbPro.getStoreSchema(this.storeName);return Object.isFrozen(e)&&(this.#e=e),e}get factory(){return this.idbPro.factory}get nativeStore(){const{target:e}=this;return e instanceof IDBObjectStore?e:e instanceof IDBIndex?e.objectStore:void 0}get keyPath(){const{target:e}=this;if(m(e))return e.keyPath;const{storeSchema:t}=this,{index:r}=e;return r?t.indexSchemas.find(e=>e.name===r)?.keyPath:t.keyPath}async forEach(t,r){return t=e.isFunction(t)?{fn:t}:t,r?this.cursorResult(t,!1):this.cursorVoid(t,!1)}async tx(e,t,r){let{target:a}=this;const{store:n,index:s}=a,i=await this.idbPro.openNativeDb();let o,c;try{o=i.transaction(n,!0===e?"readwrite":"readonly"),a=c=o.objectStore(n),s&&(a=a.index(s))}catch(e){throw i.close(),e}if(!t)return Object.freeze({db:i,tx:o,nativeStore:c,target:a});try{if(!0===e){const e=await t(a,c);return o.commit(),e}return t(a)}catch(e){throw!1!==r&&o.abort(),e}finally{i.close()}}openCursor(e,t){return this.tx(t,t=>new Promise(async(r,a)=>{const{fn:n}=e,s=await S(t,e);s.onsuccess=async()=>{s.result?!1===await n(s.result)&&r():r()},s.onerror=()=>a(s.error)}))}cursorVoid({query:t,direction:n,preSkip:s,startKey:o,startPrimaryKey:c,fn:d},l){let m=0;return this.openCursor({query:t,direction:n,preSkip:s,startKey:o,startPrimaryKey:c,fn:async t=>{const{value:n,primaryKey:s}=t,o=await d(n,s,m++),{control:c,key:f,primaryKey:y,modify:p,value:g}=e.isObject(o)?o:{control:o};switch(l&&(p===r?t.update(g||n):p===a&&t.delete()),c){case i:return!1;case u:t.continue(f);break;case h:t.continuePrimaryKey(f,y);break;default:t.continue()}}},!0)}async cursorResult({query:e,direction:t,preSkip:n,startKey:s,startPrimaryKey:c,fn:m,mapper:f},y){const{keyPath:p,defaultGetMapper:g}=this.storeSchema,b=w(f||g,p),S=[];let x=0;return await this.openCursor({query:e,direction:t,preSkip:n,startKey:s,startPrimaryKey:c,fn:async e=>{const{value:t,primaryKey:n}=e,{control:s,value:c,key:f,primaryKey:p,modify:g}=await(m?.(t,n,x,S))||{};switch(y&&(g===r?e.update(c||t):g===a&&e.delete()),(!s||s===o||s===d||s===l)&&S.push(b(c||t,p||n,x)),s){case i:case o:return!1;case d:case u:e.continue(f);break;case l:case h:e.continuePrimaryKey(f,p);break;default:e.continue()}x++}},y),S}#t(e,t){const{target:r}=this,a=r instanceof IDBObjectStore?r:r.objectStore;return t?t(r,a):Object.freeze({nativeStore:a,target:r})}}class D{idbPro;schemas;#r;constructor(e,t){this.idbPro=e,this.schemas=t}get storeNames(){return this.#r||(this.#r=Array.from(new Set(this.schemas.map(e=>e.target.store))))}read(e){return this.tx("newReader",e)}write(e,t=!0){return this.tx("newWriter",e,!0,t)}export(){return this.tx("newReader",async(...e)=>{const t={};for(const r of e)t[r.storeName]=await r.export();return t})}import(e,t,r){return r||(r="addOrChangeMany"),this.tx("newWriter",async(...a)=>{const n={};a=Array.from(new Map(a.map(e=>[e.storeName,e.asStore(!0)])).values());for(const s of a){const{storeName:a}=s,i=e[a];i&&(n[a]=await s[r](i,t))}if(t)return n},!0)}async tx(e,t,r,a){const{idbPro:n,schemas:s}=this,{factory:i}=n.schema,o=await n.openNativeDb();try{const c=o.transaction(this.storeNames,r?"readwrite":"readonly");try{const a=s.map(({storeSchema:t,target:r})=>{let a=c.objectStore(r.store);return r.index&&(a=a.index(r.index)),i[e]({storeSchema:t,target:a},n)});if(r){const e=await t(...a);return c.commit(),e}return await t(...a)}catch(e){throw!1!==a&&c.abort(),e}}finally{o.close()}}}class v extends x{direction;query;writable;parser;endsWithNull;preSkip;startKey;startPrimaryKey;constructor(t,r,a){if(super(t,r),!a)return;const{parser:s}=a;this.direction=a.direction,this.query=a.query,this.writable=!!a.writable,this.endsWithNull=!!a.endsWithNull,this.preSkip=a.preSkip,this.startKey=a.startKey,this.startPrimaryKey=a.startPrimaryKey,s&&(this.parser=e.isFunction(s)?s:n[s])}async*[Symbol.asyncIterator](){const{parser:e,writable:r,endsWithNull:a}=this,{db:n,tx:s,target:o}=await this.tx(r);try{const n=await S(o,this);let c;if(e)for(;c=await t.requestDbResult(n);){const{control:t,value:r}=await e(c);if(t||(yield r),t===i)break;c.continue()}else{let e=!1;const r=()=>{e=!0};for(;!e&&(c=await t.requestDbResult(n));)yield{cursor:c,end:r}}r&&s?.commit(),a&&!c&&(yield null)}finally{n?.close()}}}function k(t,r,a,n){const s=e.isObject(t)?t:{};return e.isFunction(t)?s.fn=t:b(t)&&(s.query=t),e.isFunction(r)?s.fn=r:r&&(s.direction=r),a&&(s.direction=a),n?s.limit=n:s.limit||(s.limit=1e3),s.maxEmptyChecks||(s.maxEmptyChecks=2e4),s}async function P(t,r,a){r.size||(r.size=100),r.nextSkip||(r.nextSkip=0);const{query:n,direction:s,total:i,maxEmptyChecks:o,fn:c}=r;return await t.batchRead(async t=>(i||(r.total=await t.count({query:n,direction:s,maxEmptyChecks:o,fn:c}),r.pages=Math.ceil(r.total/r.size)),r.total<1?{info:e.deepFreeze(r),rows:[]}:c?await async function(e,t,r){t.maxEmptyChecks||(t.maxEmptyChecks=2e4);const{page:a,query:n,direction:s,nextSkip:i}=t,{keyPath:o}=e.storeSchema;return 1===a?await q(e,await S(e.target,{query:n,direction:s}),t,o):r&&i&&a-r.page===1?await q(e,await S(e.target,{query:n,direction:s,preSkip:i}),t,o):await q(e,await S(e.target,{query:n,direction:s}),t,o,!0)}(t,r,a):await async function(t,r){const{keyPath:a}=t.storeSchema,{page:n,query:s,direction:i,size:o,mapper:c}=r,u=(n-1)*o,d=await t.filter({query:s,preSkip:u,direction:i,limit:o,mapper:w(c,a,!0)});return r.nextSkip=u+d.length,{info:e.deepFreeze(r),rows:d}}(t,r)))}async function q({storeSchema:{defaultGetMapper:t}},r,a,n,s){const{page:i,size:o,total:c,maxEmptyChecks:u,fn:d}=a,h=w(a.mapper||t,n,!0),l=[],m=(i-1)*a.size;if(m>=c)return{info:e.deepFreeze(a),rows:[]};let f=0;return await new Promise(async(e,t)=>{let a=0,n=0;const i=async()=>{const{result:t}=r;if(!t)return e();let{value:n,primaryKey:s}=t;if(await d(n,s,f)?(a=0,l.push(h(n,s,f))):a++,l.length>=o||a>=u)return e();f++,t.continue()};r.onerror=()=>t(r.error),r.onsuccess=s?async()=>{const{result:t}=r;if(!t)return e();let{value:s,primaryKey:o}=t;if(await d(s,o,f)?(a=0,n++):a++,n>=m||a>=u)return f=a=0,r.onsuccess=i,void t.continue();t.continue(),f++}:i}),f&&(a.nextSkip+=f+1),{info:e.deepFreeze(a),rows:l}}class O extends x{all(r,a){const n={};return e.isNumber(a)&&(n.limit=a),b(r)?n.query=r:e.isObject(r)&&Object.assign(n,r),this.tx(!1,e=>t.requestDbResult(e.getAll(g(n.query),n.limit||1e3)))}async count(e,r,a){const n=k(e,r,a),{query:s,direction:i,fn:o}=n;if(!o)return await this.tx(!1,e=>t.requestDbResult(e.count(g(s))));const{maxEmptyChecks:c}=n,u=o;let d=0,h=0,l=0;return await this.openCursor({query:s,direction:i,fn:e=>{if(u(e.value,e.primaryKey,l++))d++,h=0;else if(++h>=c)return!1;e.continue()}}),d}get(e){return this.tx(!1,r=>t.requestDbResult(r.get(g(e))))}getMany(t,r){return this.batchRead(async a=>{const n=await e.asyncMap(t,e=>a.get(e));return r?n.filter(e=>e):n})}getRange(e,t){return this.forEach({query:e,direction:t},!0)}async getRangeMany(t,r,a){return(await this.batchRead(a=>e.asyncMap(t,e=>a.getRange(e,r)))).flat()}index(e,t){let{target:r}=this;return r instanceof IDBIndex&&(r=r.objectStore),r instanceof IDBObjectStore?(r=r.index(e),this.createOperator(r,t)):this.idbPro.store(r.store,e)}asStore(e){let{target:t}=this;if(t instanceof IDBObjectStore)return this;if(t instanceof IDBIndex)t=t.objectStore;else{if(!("index"in t))return this;t={store:t.store}}return this.createOperator(t,e)}batchRead(e){return m(this.target)?e(this):this.tx(!1,t=>e(this.idbPro.schema.factory.newReader({storeSchema:this.storeSchema,target:t},this.idbPro)))}iterator(t,r){const a={};return e.isString(r)&&(a.direction=r),b(t)?a.query=t:e.isObject(t)&&Object.assign(a,t),new v({storeSchema:this.storeSchema,target:this.target},this.idbPro,{...a,parser:"value"})}async filter(e,t,r,a){const n=k(e,t,r,a),{maxEmptyChecks:s,limit:u,fn:d}=n,{keyPath:h,defaultGetMapper:l}=this.storeSchema,m=w(n.mapper||l,h,!0);if(!d)return await this.forEach({...n,mapper:m,fn:(e,t,r)=>{if(r>=u-1)return{control:o}}},!0);let f=0;return this.forEach({...n,mapper:m,fn:(e,t,r,a)=>d(e,t,r)?(f=0,a.length>=u-1?{control:o}:void 0):++f>=s?{control:i}:{control:c}},!0)}async find(e,t,r){const[a]=await this.filter(e,t,r,1);return a}async page(e,t){const r={...e};return r.page=t||e?.page||1,P(this,r,e)}nextPage(e){return this.page(e,e.page+1)}export(e,t){const r=k(e,t),{keyPath:a,exportMapper:n,name:s,defaultGetMapper:i}=this.storeSchema;if(!a&&Array.isArray(n))throw new Error(`When store [ ${s} ] keyPath does not exist, exportMapper does not support string[].`);return r.direction="next",r.mapper=w(n||i,a,!0),r.limit=r.maxEmptyChecks=Number.MAX_SAFE_INTEGER,this.filter(r)}asMap(){let{target:e}=this;return e instanceof IDBIndex?e=e.objectStore:e instanceof IDBObjectStore||(e={store:e.store}),this.factory.newDbMap({target:e},this.idbPro)}createOperator(e,t){const{idbPro:r}=this,a=this.storeSchema,n={storeSchema:Object.isFrozen(a)?a:void 0,target:e};return t?this.factory.newWriter(n,r):this.factory.newReader(n,r)}}function I(e,t){if(!Array.isArray(e))return{[e]:t};const r={},a=e;for(let e=0;e<a.length;e++)r[a[e]]=t[e];return r}function R(e,t){if(!Array.isArray(e))return t[e];const r=[];for(const a of e){if(!t[a])return;r.push(t[a])}return r}function F(t,r){if(!r||!(r instanceof Object)||Array.isArray(r))return r;const{addedTimeField:a,updatedTimeField:n,updatedCountField:s,softDeletedField:i}=t;return r={...r},a?.name&&!e.isNumber(r[a.name])&&(r[a.name]=Date.now()),n?.name&&!e.isNumber(r[n.name])&&(r[n.name]=Date.now()),i?.name&&!e.isNumber(r[i.name])&&(r[i.name]=0),s?.name&&(r[s.name]=0),r}function M(e,t,r){if(!t||!(t instanceof Object)||Array.isArray(t))return t;const{updatedTimeField:a,updatedCountField:n}=e;return t={...r,...t},a.name&&(t[a.name]=Date.now()),n.name&&(t[n.name]=(t[n.name]||0)+1),t}class E extends O{add(e){return this.changeByPk({record:e,fn:async(e,r,a,n,s)=>{const{storeSchema:i}=this;return a=F(i,a),s?[{...a,...I(s,await t.requestDbResult(e.add(a)))},r]:[a,await t.requestDbResult(e.add(a,r))]}})}addMany(t,r){return this.batchWrite(a=>r?e.asyncMap(t,e=>a.add(e)):e.asyncForEach(t,e=>a.add(e)),!0)}addOrSkip(e){return this.batchWrite(async r=>{const{keyPath:a,defaultGetMapper:n}=r.storeSchema,{key:i,value:o}=a?{key:R(a,e),value:e}:s(e);if(i){const e=await t.requestDbResult(r.nativeStore.get(i));if(e)return w(n,a)?.(e,i)}if(r.target instanceof IDBIndex){const{keyPath:t}=r,a=R(t,o);if(!a)return r.add(e);let n=await r.find(a);if(n)return n}return r.add(e)})}addOrSkipMany(t,r){return this.batchWrite(a=>r?e.asyncMap(t,e=>a.addOrSkip(e)):e.asyncForEach(t,e=>a.addOrSkip(e)),!0)}replace(e){return this.changeByPk({record:e,getOld:!0,fn:async(e,r,a,n,s)=>{const{storeSchema:i}=this,{updatedTimeField:o,updatedCountField:c,addedTimeField:u}=i;return a=n?M(i,a,{[o.name]:n[o.name],[c.name]:n[c.name],[u.name]:n[u.name]}):F(i,a),s?[{...a,...I(s,await t.requestDbResult(e.put(a)))},r]:[a,await t.requestDbResult(e.put(a,r))]}})}replaceMany(t,r){return this.batchWrite(a=>r?e.asyncMap(t,e=>a.replace(e)):e.asyncForEach(t,e=>a.replace(e)),!0)}change(e,r){return this.changeByPk({record:e,getOld:!0,requiredOld:r,requiredPk:r,fn:async(e,r,a,n,s)=>{const{storeSchema:i}=this;if(n)return a=M(i,a,n),s?[{...a,...I(s,await t.requestDbResult(e.put(a)))},r]:[a,await t.requestDbResult(e.put(a,r))]}})}changeMany(t,r){const{returns:a,throwIfMissing:n}=e.isBoolean(r)?{returns:r}:r||{};return this.batchWrite(r=>a?e.asyncMap(t,e=>r.change(e,n)):e.asyncForEach(t,e=>r.change(e,n)),!0)}addOrChange(e){return this.changeByPk({record:e,getOld:!0,fn:async(e,r,a,n,s)=>{const{storeSchema:i}=this;return n?(a=M(i,a,n),s?[{...a,...I(s,await t.requestDbResult(e.put(a)))},r]:[a,await t.requestDbResult(e.put(a,r))]):(a=F(i,a),s?[{...a,...I(s,await t.requestDbResult(e.add(a)))},r]:[a,await t.requestDbResult(e.add(a,r))])}})}addOrChangeMany(t,r){return this.batchWrite(a=>r?e.asyncMap(t,e=>a.addOrChange(e)):e.asyncForEach(t,e=>a.addOrChange(e)),!0)}delete(e,t){return this.changeByPk({pk:e,getOld:t,fn:(e,r,a,n)=>{if(e.delete(r),t)return[n,r]}})}deleteMany(t,r){return this.batchWrite(a=>r?e.asyncMap(t,e=>a.delete(e,!0)):e.asyncForEach(t,e=>a.delete(e)),!0)}deleteRange(t,n){const{returns:s,physical:i,direction:o}=e.isBoolean(n)?{returns:n}:n||{},{name:c}=this.storeSchema.softDeletedField||{};return this.cursor({query:t,direction:o,fn:e=>i||!c?{modify:a}:(e[c]=1,{modify:r})},s)}deleteRangeMany(t,r){const a=e.isBoolean(r)?{returns:r}:r;return this.batchWrite(r=>a?.returns?e.asyncMap(t,e=>r.deleteRange(e,a)):e.asyncForEach(t,e=>r.deleteRange(e,a)),!0)}changeRange(e,t){let{direction:a="next",query:n,newValue:s}="newValue"in e?e:{newValue:e};if(n||(n=R(this.keyPath,s)),!n)throw new Error(`query is required:${JSON.stringify(e)}`);return this.cursor({query:n,direction:a,fn:e=>e instanceof Object?{modify:r,value:{...e,...s}}:{modify:r,value:s}},t)}changeRangeMany(t,r){return this.batchWrite(a=>r?e.asyncMap(t,e=>a.changeRange(e,r)):e.asyncForEach(t,e=>a.changeRange(e,r)),!0)}cursor(t,r){return(t=e.isFunction(t)?{fn:t}:t||{}).fn?r?this.cursorResult(t,!0):this.cursorVoid(t,!0):new v(this,this.idbPro)}batchWrite(e,t){const{target:r}=this;if(m(r))try{return e(this)}catch(e){throw!1!==t&&(r instanceof IDBIndex?r.objectStore:r).transaction.abort(),e}return this.tx(!0,t=>e(this.idbPro.schema.factory.newWriter({storeSchema:this.storeSchema,target:t},this.idbPro)),t)}changeByPk({pk:e,record:r,fn:a,requiredPk:n,getOld:i,requiredOld:o,saveMapper:c,getMapper:u}){const{storeSchema:d}=this,{keyPath:h,defaultSaveMapper:l,defaultGetMapper:m}=d;return r&&(c||l)&&(r=w(c||l,h)?.(r)),this.batchWrite(async c=>{let d,l=r;if(e)d=g(e);else if(h)d=R(h,r);else{const{key:e,value:t}=s(r);d=e,l=t}if(n&&!d)throw new Error(`key is required: ${JSON.stringify(r)}`);const f=d&&(i||o)?await t.requestDbResult(c.nativeStore.get(d)):void 0;if(o&&!f)throw new Error(`record not found: ${JSON.stringify(r)}`);const y=await a(c.nativeStore,d,l,f,h);if(y)return w(u||m,h)?.(y[0],y[1])})}}class j extends x{get size(){return this.tx(!1,e=>t.requestDbResult(e.count()))}delete(e){return this.tx(!0,async(t,r)=>{r.delete(e)})}batch(e){const{idbPro:t,storeSchema:r,factory:a}=this;return this.tx(!0,async(n,s)=>await e(a.newDbMap({storeSchema:r,target:s},t)))}asStore(e){const{factory:t}=this.idbPro.schema;return e?t.newWriter(this,this.idbPro):t.newReader({target:this.target},this.idbPro)}entries(){return new v({target:this.target},this.idbPro,{parser:"keyValue"})}async get(e,r){return await this.tx(!1,r=>t.requestDbResult(r.get(e)))||r}getMany(e){return this.tx(!1,async r=>{const a=[];for(const n of e)a.push(await t.requestDbResult(r.get(n)));return a})}async has(e){return!!await this.get(e)}keys(){return new v({storeSchema:this.storeSchema,target:this.target},this.idbPro,{parser:"key"})}set(e,r){return this.tx(!0,async(a,n)=>{await t.requestDbResult(n.put(r,e))})}setMany(e){return this.tx(!0,async(t,r)=>{for(const[t,a]of e)r.put(a,t)})}values(){return new v({storeSchema:this.storeSchema,target:this.target},this.idbPro,{parser:"value"})}}function N(e,t){if(e==t)return!0;if(typeof e!=typeof t)return!1;if(Array.isArray(e)&&Array.isArray(t)){if(e.length!==t.length)return!1;for(let r=0;r<e.length;r++)if(e[r]!==t[r])return!1;return!0}return!1}const B=({stores:e,schema:t})=>{const r=t.storeSchemas;let a="";for(const t of e){const e=r.find(e=>e.name===t.name);if(e){if(!N(t.keyPath,e.keyPath)){a=`store [ ${t.name} ] keyPath not equal,schema.keyPath:${e.keyPath},store.keyPath:${t.keyPath}[]`;break}if(!t.autoIncrement!=!e.autoIncrement){a=`store [ ${t.name} ] autoIncrement not equal`;break}}}return!a||`The existing database is inconsistent with the definition and cannot be corrected: ${a}`},C=async t=>{let r=B(t);return e.isString(r)||(r=K(t)),r},K=({stores:e,schema:t})=>{const r=t.storeSchemas,a=e.map(e=>e.name);let n="";const s=r.map(e=>e.name).filter(e=>!a.includes(e));if(s.length)n=`store [ ${s.join(",")} ] not exist`;else for(const t of e){const e=r.find(e=>e.name===t.name);if(e&&(n=$(t,Array.from(t.indexNames),e.indexSchemas),n))break}return!n||`The existing database Store index is inconsistent with the definition and requires a database version upgrade to be fixed: ${n}`};function $(e,t,r){if(t.length!==r.length)return`store [ ${e.name} ] index count not equal`;for(const a of t){const t=r.find(e=>e.name===a);if(!t)return`store [ ${e.name} ] index [ ${a} ] not exist`;const n=e.index(a);if(!t.unique!=!n.unique)return`store [ ${e.name} ] index [ ${a} ] unique not equal`;if(!t.multiEntry!=!n.multiEntry)return`store [ ${e.name} ] index [ ${a} ] multiEntry not equal`;if(!N(t.keyPath,n.keyPath))return`store [ ${e.name} ] index [ ${a} ] keyPath not equal`}return""}class A{upgradeContext;storeSchema;nativeStore;#a;constructor(e,t,r){this.upgradeContext=e,this.storeSchema=t,this.nativeStore=r}get writer(){return this.#a||(this.#a=this.upgradeContext.dbSchema.factory?.newWriter({target:this.nativeStore,storeSchema:this.storeSchema}))}add(e,t,r){return this.upgradeContext.versionIn(e)?this.writer.addMany(t,r):Promise.resolve()}addOrChange(e,t,r){return this.upgradeContext.versionIn(e)?this.writer.addOrChangeMany(t,r):Promise.resolve()}async call(e,t){if(this.upgradeContext.versionIn(e))return await t(this.writer,this.upgradeContext)}replace(e,t,r){return this.upgradeContext.versionIn(e)?this.writer.replaceMany(t,r):Promise.resolve()}}const T=Object.freeze({newDataOperators:(e,t)=>new D(t,e),newDbMap:(e,t)=>new j(e,t),newReader:(e,t)=>new O(e,t),newStoreUpgradeable:(e,t,r)=>new A(r,t,e),newWriter:(e,t)=>new E(e,t)});function V(t,r,a,n){const s=function(t,r,a){if(!1===t)return!1;if(e.isString(t))return{name:t};if(e.isObject(t)){const a=t;return e.isBoolean(a.name)&&(a.name=r),t}if(!0===t||a)return{name:r};return!1}(r,a,n);if(!s)return s;const i=s;if(!1!==i.isIndexed){i.isIndexed||(i.isIndexed=!0);const{name:e}=i,r=t.indexSchemas;r.some(t=>t===e||t.name===e)||r.push(e)}return i}function W(t,r){let a=e.isString(t)?{name:t}:t;return r&&(a={...r,...a}),a.indexSchemas||(a.indexSchemas=[]),function(e){Object.isFrozen(e)||(e.addedTimeField=V(e,e.addedTimeField,"added_at",!0),e.updatedTimeField=V(e,e.updatedTimeField,"updated_at",!0),e.updatedCountField=V(e,e.updatedCountField,"updated_count",!1),e.softDeletedField=V(e,e.softDeletedField,"deleted",!1))}(a),Object.isFrozen(a)||(a.indexSchemas=a.indexSchemas.map(z)),function(e){if(!e.keyPath&&e.defaultData?.length)for(const t of e.defaultData)if(!Array.isArray(t)&&!("value"in t))throw new Error(`When \`defaultData\` must contain \`value\` fields or be an array:${JSON.stringify(t)}`)}(a),a}function z(t){const r=e.isString(t)?{name:t}:t;return r.keyPath||(r.keyPath=r.name),r}const _=e=>{const{versionDiffValidate:t,versionSameValidate:r,factory:a}=e;return e.storeSchemas||(e.storeSchemas=[]),e.storeTemplate||(e.storeTemplate={...f}),!t&&!1!==t&&(e.versionDiffValidate=B),!r&&!1!==r&&(e.versionSameValidate=C),a?a!==T&&(e.factory={...T,...a}):e.factory=T,e.storeSchemas=e.storeSchemas.map(t=>W(t,e.storeTemplate)),e};function U(e,t){(function(e,t){return e.unique===t.unique&&e.multiEntry===t.multiEntry&&N(t.keyPath,e.keyPath)})(e.index(t.name),t)||(e.deleteIndex(t.name),J(e,t))}function J(e,t){try{e.createIndex(t.name,t.keyPath,{unique:t.unique,multiEntry:t.multiEntry})}catch{throw new Error(`store [ ${e.name} ] index [ ${t.name} ] create error: ${JSON.stringify(t)}`)}}function G(e,t){return e.database.objectStoreNames.contains(t.name)?function(e,t){const{indexSchemas:r}=e,a=t.indexNames,n=r.map(e=>e.name);for(const e of Array.from(a))n.includes(e)||t.deleteIndex(e);for(const e of r)a.contains(e.name)?U(t,e):J(t,e);return t}(t,e.transaction?.objectStore(t.name)):function(e,t){const r=t.createObjectStore(e.name,{keyPath:e.keyPath,autoIncrement:e.autoIncrement});for(const t of e.indexSchemas)J(r,t);return r}(t,e.database)}class Q{database;newVersion;oldVersion;dbSchema;transaction;#n={};constructor(e){this.database=e.database,this.newVersion=e.newVersion,this.oldVersion=e.oldVersion,this.dbSchema=e.dbSchema,this.transaction=e.transaction}deleteStoreIfExists(e){const t=this.database;t.objectStoreNames.contains(e)&&t.deleteObjectStore(e)}destroy(){try{e.destroyRecords(this.#n)}finally{for(const e of Object.keys(this.#n))delete this.#n[e]}}store(e){if(e in this.#n)return this.#n[e];const{factory:t}=this.dbSchema,{storeSchemas:r}=this.dbSchema,a=r.find(t=>t.name===e),n=G(this,a);return this.#n[e]=t.newStoreUpgradeable(n,a,this)}versionIn({oldMin:e,oldMax:t,newMax:r,newMin:a}){if(void 0===t&&void 0===r&&void 0===e&&void 0===a)throw new Error(`versionIn bounds must not be empty ${JSON.stringify({oldMax:t,newMax:r,oldMin:e,newMin:a})}`);if(t<e)throw new Error(`oldMax (${t}) cannot be less than oldMin (${e})`);if(r<a)throw new Error(`newMax (${r}) cannot be less than newMin (${a})`);const{oldVersion:n,newVersion:s}=this;return!(void 0!==e&&n<e||void 0!==t&&n>t||void 0!==a&&s<a)&&!(void 0!==r&&s>r)}}function L(t,r){let{store:a,index:n}=t;const{storeTemplate:s}=r,{storeSchemas:i=[]}=r,o=e.isString(a)?a:a.name,c=i.findIndex(e=>e===o||e.name===o),u=c>-1&&i[c];let d;d=e.isString(a)?u||o:!u||e.isString(u)||a===u?a:{...u,...a},(n||u?.indexSchemas?.length||a?.indexSchemas?.length)&&(e.isString(d)&&(d={name:d}),d.indexSchemas=function(t,r,a){a&&r.push(a);for(const a of r){const r=e.isString(a)?a:a.name,n=t.findIndex(e=>e===r||e.name===r);if(n>-1){const r=t[n];e.isString(r)?t[n]=a:e.isString(a)||(t[n]=Object.assign(r,a))}else t.push(a)}return t}(u.indexSchemas||[],a.indexSchemas||[],n));const h=W(d,s);c>-1?i[c]=h:i.push(h),r.storeSchemas=i;const l={store:o};return n&&(l.index=e.isString(n)?n:n.name),{target:l}}const X=Object.assign({name:"",addedTimeField:!1,autoIncrement:!1,indexSchemas:[],keyPath:void 0,softDeletedField:!1,updatedCountField:!1,updatedTimeField:!1});class H{static#s;#i;#o;#c={};constructor(t,r){this.#i=t=e.isString(t)?{name:t}:t,Array.isArray(t.storeSchemas)||(t.storeSchemas=[]),e.isObject(t.storeTemplate)||(t.storeTemplate=f),r&&(this.#o=!0)}static get defaultDb(){return this.#s||(this.#s=new H(t.DefaultDbName))}get initialized(){return Object.isFrozen(this.#i)}get schema(){return this.#i}get storeNames(){return Array.from(new Set(this.#i.storeSchemas.map(t=>e.isString(t)?t:t.name)))}get factory(){return this.#i.factory||(this.#i.factory=T)}static releaseDefaultDB(){this.#s=void 0}static async openExistDb(e){const{generateDbSchema:t}=await Promise.resolve().then(function(){return te});return new H(await t(e))}static store(e,t){return H.defaultDb.store(e,t)}static stores(e){return H.defaultDb.stores(e)}static map(e,t){return H.defaultDb.map(e,t)}async openNativeDb(){const r=this.#i=e.deepFreeze(this.initSchema());await this.#u();const{name:a,version:n}=r;return await t.openDb({name:a,version:n,onupgradeneeded:(t,a,n)=>async function(t,r,a,n){const{storeSchemas:s,beforeUpgrade:i,afterUpgrade:o,version:c}=t,{newVersion:u=c,oldVersion:d}=a,{transaction:h}=n,l=new Q({database:r,newVersion:u,oldVersion:d,dbSchema:t,transaction:h});try{const t=[];if(e.isFunction(i))try{await i(l)}catch(e){t.push(e)}for(const e of s.map(e=>e.name))try{l.store(e)}catch(e){t.push(e)}for(const{name:e,defaultData:r}of s)if(r)try{await l.store(e).add({oldMax:0},r)}catch(e){t.push(e)}for(const{name:e,versionData:r}of s)if(r)for(const{version:a,data:n,use:s="addOrChange"}of r)try{await l.store(e)[s](a,n)}catch(e){t.push(e)}for(const{name:e,storeDefined:r}of s)try{await(r?.(l.store(e)))}catch(e){t.push(e)}if(e.isFunction(o))try{await o(l)}catch(e){t.push(e)}if(!t.length)return;throw 1===t.length?t[0]:new AggregateError(t,"Database upgrade error")}finally{e.destroy(l)}}(r,t,a,n)})}store(e,t){const r=e.store?e:{store:e};return r.store||(r.store=e),r.index||(r.index=t),this.factory.newWriter(L(r,this.schema),this)}stores(t){const{schema:r}=this,a=t.map(t=>L(e.isString(t)?{store:t}:t,r));return this.factory.newDataOperators(a,this)}initSchema(){if(this.initialized)return this.#i;const{validateSchemaWithDefaults:e=_}=this.#i;return this.#i=e(this.#i)}async traceSchema(t){await e.logJson(this.schema,t)}map(r,a){const n=e.isString(r)?r:e.isString(a)?a:t.DefaultStorageStoreName;Array.isArray(r)&&(a=r);const{storeSchemas:s}=this.schema;return s.find(e=>e.name===n||e===n)||s.push({...X,name:n,defaultData:a}),this.factory.newDbMap({target:{store:n}},this)}export(){return this.stores(this.storeNames).export()}import(e,t,r){return this.stores(this.storeNames).import(e,t,r)}getStoreSchema(t){if(t in this.#c)return this.#c[t];const r=this.schema.storeSchemas.findIndex(e=>e===t||e.name===t);let a=this.schema.storeSchemas[r];return this.initialized?this.#c[t]=a:e.isString(a)&&(this.schema.storeSchemas[r]=a={name:a}),a}async#u(){let r=this.#o;if(void 0===r&&(r=this.#o=await async function(r){const{name:a,version:n}=r,s=await t.findExistDb(a);if(!s)return!0;const{versionDiffValidate:i,versionSameValidate:o}=r,c=await t.openDb(a);try{if(r.version<c.version)return"The existing database version is greater than the current version";const u=void 0===r.version||c.version===r.version?o:i;if(!u)return!0;const d=Array.from(c.objectStoreNames);if(d.length<1)return`The existing database [ ${a} ] is empty`;const h=await t.readTx(c,d),l=await u({schema:r,db:c,stores:h});if(e.isString(l)||n!==s.version)return l}finally{c?.close()}return!0}(this.#i)),!0===r)return!0;if(e.isString(r))throw new Error(r)}}function Y(e,t){const r=Array.from(e.indexNames).map(t=>e.index(t)),a=[],n=[];for(const{name:e,keyPath:t,unique:s,multiEntry:i}of r)Array.isArray(t)?n.push(...t):n.push(t),a.push({name:e,keyPath:t,unique:s,multiEntry:i});const s=new Set(n);return{indexSchemas:a,addedTimeField:Z(t.addedTimeField,s),updatedTimeField:Z(t.updatedTimeField,s),updatedCountField:Z(t.updatedCountField,s),softDeletedField:Z(t.softDeletedField,s)}}function Z(e,t){return!!t.has(e)&&{name:e,isIndexed:!1}}async function ee(r,a){if(!await t.findExistDb(r))throw new Error(`db [ ${r} ] not exist`);let{asString:n,specialFields:s=y,dataExportTarget:i}=a||{};!0===n&&(n=160),isNaN(n)||n<1&&(n=1);let o=await function(e,r){return t.openDb(e,async function(e){const t=Array.from(e.objectStoreNames),a=e.transaction(t,"readonly");try{return{name:e.name,version:e.version,storeSchemas:t.map(e=>function(e,t){const{name:r,keyPath:a,autoIncrement:n}=e;return{name:r,keyPath:a,autoIncrement:n,...Y(e,t)}}(a.objectStore(e),r))}}finally{a.abort()}})}(r,s);return i&&await async function(t,r){const a=await new H(e.copyObject(t),!0).export();for(const e of t.storeSchemas){const n=a[e.name];n?.length&&("defaultData"===r?e.defaultData=n:"versionData"===r&&(e.versionData||(e.versionData=[]),e.versionData.push({version:{oldMax:t.version},data:n})))}return t}(o,i),n?await e.toJson({rootData$:o,spaceEffectiveLength:n}):o}var te=Object.freeze({__proto__:null,generateDbSchema:ee});exports.Break=i,exports.Continue=c,exports.ContinueKey=u,exports.ContinuePrimaryKey=h,exports.DataOperationBase=x,exports.DataOperators=D,exports.DataReader=O,exports.DataWriter=E,exports.DbIterator=v,exports.DbIteratorParsers=n,exports.DbMap=j,exports.Delete=a,exports.Finished=o,exports.IDbPro=H,exports.NextKey=d,exports.NextPrimaryKey=l,exports.Save=r,exports.StoreUpgradeable=A,exports.UpgradeContext=Q,exports.dbMap=function(e,t){return H.defaultDb.map(e,t)},exports.dbStore=function(e,t){return H.defaultDb.store(e,t)},exports.dbStores=function(e){return H.defaultDb.stores(e)},exports.defaultSpecialFields=y,exports.defaultStoreSchemaTemplate=f,exports.generateDbSchema=ee,exports.isIDbQuery=b,exports.isNativeTarget=m,exports.parseDbNoneKeyPathRecord=s,exports.parseIDbQuery=g,exports.releaseDefaultDB=function(){H.releaseDefaultDB()},exports.validateSchemaWithDefaults=_,exports.versionDiffValidate=B,exports.versionSameValidate=C;
|
|
1
|
+
"use strict";
|
|
2
|
+
var gsBase = require("gs-base"), gsIdbBasic = require("gs-idb-basic");
|
|
3
|
+
const Save = Symbol("save"), Delete = Symbol("delete"), DbIteratorParsers = Object.freeze({
|
|
4
|
+
key: ({ primaryKey: value }) => ({ value }),
|
|
5
|
+
value: ({ value }) => ({ value }),
|
|
6
|
+
keyValue: ({ primaryKey: key, value }) => ({ value: [key, value] })
|
|
7
|
+
});
|
|
8
|
+
function parseDbNoneKeyPathRecord(record) {
|
|
9
|
+
if (Array.isArray(record))
|
|
10
|
+
return { key: record[1], value: record[0] };
|
|
11
|
+
if ("value" in record) {
|
|
12
|
+
const { key, value } = record;
|
|
13
|
+
return { key, value };
|
|
14
|
+
}
|
|
15
|
+
throw new Error(`not include value in invalid DBRecord\uFF1A${JSON.stringify(record)}`);
|
|
16
|
+
}
|
|
17
|
+
const Break = Symbol("break"), Finished = Symbol("finished"), Continue = Symbol("continue"), ContinueKey = Symbol("continue key"), NextKey = Symbol("next key"), ContinuePrimaryKey = Symbol("continue primary key"), NextPrimaryKey = Symbol("next primary key");
|
|
18
|
+
function isNativeTarget(target) {
|
|
19
|
+
return target instanceof IDBObjectStore || target instanceof IDBIndex;
|
|
20
|
+
}
|
|
21
|
+
const defaultStoreSchemaTemplate = Object.freeze({
|
|
22
|
+
keyPath: "id",
|
|
23
|
+
autoIncrement: !0,
|
|
24
|
+
addedTimeField: !0,
|
|
25
|
+
updatedTimeField: !0
|
|
26
|
+
}), defaultSpecialFields = Object.freeze({
|
|
27
|
+
addedTimeField: "added_at",
|
|
28
|
+
softDeletedField: "deleted",
|
|
29
|
+
updatedCountField: "updated_count",
|
|
30
|
+
updatedTimeField: "updated_at"
|
|
31
|
+
}), toNum = (v) => v instanceof Date ? v.getTime() : v;
|
|
32
|
+
function parseIDbQuery(query) {
|
|
33
|
+
if (gsIdbBasic.isDbQueryOrNull(query))
|
|
34
|
+
return query;
|
|
35
|
+
const range = query;
|
|
36
|
+
if ("lt" in range && "gt" in range) {
|
|
37
|
+
if (toNum(range.gt) > toNum(range.lt))
|
|
38
|
+
throw new Error(`Invalid IDBRange: gt (${range.gt}) cannot be greater than lt (${range.lt})`);
|
|
39
|
+
return IDBKeyRange.bound(range.gt, range.lt, !0, !0);
|
|
40
|
+
}
|
|
41
|
+
if ("lt" in range && "gte" in range) {
|
|
42
|
+
if (toNum(range.gte) > toNum(range.lt))
|
|
43
|
+
throw new Error(`Invalid IDBRange: gte (${range.gte}) cannot be greater than lt (${range.lt})`);
|
|
44
|
+
return IDBKeyRange.bound(range.gte, range.lt, !1, !0);
|
|
45
|
+
}
|
|
46
|
+
if ("lte" in range && "gt" in range) {
|
|
47
|
+
if (toNum(range.gt) > toNum(range.lte))
|
|
48
|
+
throw new Error(`Invalid IDBRange: gt (${range.gt}) cannot be greater than lte (${range.lte})`);
|
|
49
|
+
return IDBKeyRange.bound(range.gt, range.lte, !0, !1);
|
|
50
|
+
}
|
|
51
|
+
if ("lte" in range && "gte" in range) {
|
|
52
|
+
if (toNum(range.gte) > toNum(range.lte))
|
|
53
|
+
throw new Error(`Invalid IDBRange: gte (${range.gte}) cannot be greater than lte (${range.lte})`);
|
|
54
|
+
return IDBKeyRange.bound(range.gte, range.lte, !1, !1);
|
|
55
|
+
}
|
|
56
|
+
if ("lt" in range)
|
|
57
|
+
return IDBKeyRange.upperBound(range.lt, !0);
|
|
58
|
+
if ("lte" in range)
|
|
59
|
+
return IDBKeyRange.upperBound(range.lte, !1);
|
|
60
|
+
if ("gt" in range)
|
|
61
|
+
return IDBKeyRange.lowerBound(range.gt, !0);
|
|
62
|
+
if ("gte" in range)
|
|
63
|
+
return IDBKeyRange.lowerBound(range.gte, !1);
|
|
64
|
+
}
|
|
65
|
+
function isIDbQuery(query) {
|
|
66
|
+
return parseIDbQuery(query) != null;
|
|
67
|
+
}
|
|
68
|
+
async function openCursor(target, arg, fn) {
|
|
69
|
+
const { query, direction = "prev", preSkip, startKey, startPrimaryKey } = arg, request = target.openCursor(parseIDbQuery(query), direction);
|
|
70
|
+
return preSkip && (await gsIdbBasic.requestDbResult(request)).advance(preSkip), startKey && (startPrimaryKey ? (await gsIdbBasic.requestDbResult(request)).continuePrimaryKey(startKey, startPrimaryKey) : (await gsIdbBasic.requestDbResult(request)).continue(startKey)), request;
|
|
71
|
+
}
|
|
72
|
+
function getMapperFn(mapper, keyPath, useArrayRecord) {
|
|
73
|
+
return Array.isArray(mapper) ? (v) => gsBase.copyFields({}, v, mapper) : gsBase.isFunction(mapper) ? mapper : keyPath ? (v) => v : useArrayRecord ? (v, k) => [v, k] : (key, value) => ({ key, value });
|
|
74
|
+
}
|
|
75
|
+
class DataOperationBase {
|
|
76
|
+
idbPro;
|
|
77
|
+
target;
|
|
78
|
+
#storeSchema;
|
|
79
|
+
constructor(schema, db) {
|
|
80
|
+
this.idbPro = db, this.#storeSchema = schema.storeSchema, this.target = schema.target, isNativeTarget(schema.target) && (this.tx = this.#nativeOperation);
|
|
81
|
+
}
|
|
82
|
+
get storeName() {
|
|
83
|
+
return this.nativeStore?.name || this.target.store;
|
|
84
|
+
}
|
|
85
|
+
get storeSchema() {
|
|
86
|
+
if (this.#storeSchema)
|
|
87
|
+
return this.#storeSchema;
|
|
88
|
+
const storeSchema = this.idbPro.getStoreSchema(this.storeName);
|
|
89
|
+
return Object.isFrozen(storeSchema) && (this.#storeSchema = storeSchema), storeSchema;
|
|
90
|
+
}
|
|
91
|
+
get factory() {
|
|
92
|
+
return this.idbPro.factory;
|
|
93
|
+
}
|
|
94
|
+
get nativeStore() {
|
|
95
|
+
const { target } = this;
|
|
96
|
+
if (target instanceof IDBObjectStore)
|
|
97
|
+
return target;
|
|
98
|
+
if (target instanceof IDBIndex)
|
|
99
|
+
return target.objectStore;
|
|
100
|
+
}
|
|
101
|
+
get keyPath() {
|
|
102
|
+
const { target } = this;
|
|
103
|
+
if (isNativeTarget(target))
|
|
104
|
+
return target.keyPath;
|
|
105
|
+
const { storeSchema } = this, { index } = target;
|
|
106
|
+
return index ? storeSchema.indexSchemas.find((i) => i.name === index)?.keyPath : storeSchema.keyPath;
|
|
107
|
+
}
|
|
108
|
+
async forEach(arg, returns) {
|
|
109
|
+
return arg = gsBase.isFunction(arg) ? { fn: arg } : arg, returns ? this.cursorResult(arg, !1) : this.cursorVoid(arg, !1);
|
|
110
|
+
}
|
|
111
|
+
async tx(writable, fn, rollbackOnError) {
|
|
112
|
+
let { target } = this;
|
|
113
|
+
const { store, index } = target, db = await this.idbPro.openNativeDb();
|
|
114
|
+
let tx, nativeStore;
|
|
115
|
+
try {
|
|
116
|
+
tx = db.transaction(store, writable === !0 ? "readwrite" : "readonly"), target = nativeStore = tx.objectStore(store), index && (target = target.index(index));
|
|
117
|
+
} catch (e) {
|
|
118
|
+
throw db.close(), e;
|
|
119
|
+
}
|
|
120
|
+
if (!fn)
|
|
121
|
+
return Object.freeze({ db, tx, nativeStore, target });
|
|
122
|
+
try {
|
|
123
|
+
if (writable === !0) {
|
|
124
|
+
const result = await fn(target, nativeStore);
|
|
125
|
+
return tx.commit(), result;
|
|
126
|
+
}
|
|
127
|
+
return fn(target);
|
|
128
|
+
} catch (e) {
|
|
129
|
+
throw rollbackOnError !== !1 && tx.abort(), e;
|
|
130
|
+
} finally {
|
|
131
|
+
db.close();
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
openCursor(arg, writable) {
|
|
135
|
+
return this.tx(writable, (store) => new Promise(async (resolve, reject) => {
|
|
136
|
+
const { fn } = arg, request = await openCursor(store, arg);
|
|
137
|
+
request.onsuccess = async () => {
|
|
138
|
+
request.result ? await fn(request.result) === !1 && resolve() : resolve();
|
|
139
|
+
}, request.onerror = () => reject(request.error);
|
|
140
|
+
}));
|
|
141
|
+
}
|
|
142
|
+
cursorVoid({ query, direction, preSkip, startKey, startPrimaryKey, fn }, writable) {
|
|
143
|
+
let i = 0;
|
|
144
|
+
return this.openCursor({
|
|
145
|
+
query,
|
|
146
|
+
direction,
|
|
147
|
+
preSkip,
|
|
148
|
+
startKey,
|
|
149
|
+
startPrimaryKey,
|
|
150
|
+
fn: async (cursor) => {
|
|
151
|
+
const { value: ov, primaryKey: op } = cursor, result = await fn(ov, op, i++), { control, key, primaryKey, modify, value } = gsBase.isObject(result) ? result : { control: result };
|
|
152
|
+
switch (writable && (modify === Save ? cursor.update(value || ov) : modify === Delete && cursor.delete()), control) {
|
|
153
|
+
case Break:
|
|
154
|
+
return !1;
|
|
155
|
+
case ContinueKey:
|
|
156
|
+
cursor.continue(key);
|
|
157
|
+
break;
|
|
158
|
+
case ContinuePrimaryKey:
|
|
159
|
+
cursor.continuePrimaryKey(key, primaryKey);
|
|
160
|
+
break;
|
|
161
|
+
default:
|
|
162
|
+
cursor.continue();
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}, !0);
|
|
166
|
+
}
|
|
167
|
+
async cursorResult({ query, direction, preSkip, startKey, startPrimaryKey, fn, mapper: mpr }, writable) {
|
|
168
|
+
const { keyPath, defaultGetMapper } = this.storeSchema, mapper = getMapperFn(mpr || defaultGetMapper, keyPath), results = [];
|
|
169
|
+
let i = 0;
|
|
170
|
+
return await this.openCursor({
|
|
171
|
+
query,
|
|
172
|
+
direction,
|
|
173
|
+
preSkip,
|
|
174
|
+
startKey,
|
|
175
|
+
startPrimaryKey,
|
|
176
|
+
fn: async (cursor) => {
|
|
177
|
+
const { value: ov, primaryKey: op } = cursor, { control, value, key, primaryKey, modify } = await fn?.(ov, op, i, results) || {};
|
|
178
|
+
switch (writable && (modify === Save ? cursor.update(value || ov) : modify === Delete && cursor.delete()), (!control || control === Finished || control === NextKey || control === NextPrimaryKey) && results.push(mapper(value || ov, primaryKey || op, i)), control) {
|
|
179
|
+
case Break:
|
|
180
|
+
case Finished:
|
|
181
|
+
return !1;
|
|
182
|
+
case NextKey:
|
|
183
|
+
case ContinueKey:
|
|
184
|
+
cursor.continue(key);
|
|
185
|
+
break;
|
|
186
|
+
case NextPrimaryKey:
|
|
187
|
+
case ContinuePrimaryKey:
|
|
188
|
+
cursor.continuePrimaryKey(key, primaryKey);
|
|
189
|
+
break;
|
|
190
|
+
default:
|
|
191
|
+
cursor.continue();
|
|
192
|
+
}
|
|
193
|
+
i++;
|
|
194
|
+
}
|
|
195
|
+
}, writable), results;
|
|
196
|
+
}
|
|
197
|
+
// noinspection JSUnusedLocalSymbols
|
|
198
|
+
#nativeOperation(writable, fn) {
|
|
199
|
+
const { target } = this, nativeStore = target instanceof IDBObjectStore ? target : target.objectStore;
|
|
200
|
+
return fn ? fn(target, nativeStore) : Object.freeze({ nativeStore, target });
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
class DataOperators {
|
|
204
|
+
idbPro;
|
|
205
|
+
schemas;
|
|
206
|
+
#_storeNames;
|
|
207
|
+
constructor(idbPro, schemas) {
|
|
208
|
+
this.idbPro = idbPro, this.schemas = schemas;
|
|
209
|
+
}
|
|
210
|
+
get storeNames() {
|
|
211
|
+
return this.#_storeNames || (this.#_storeNames = Array.from(new Set(this.schemas.map((s) => s.target.store))));
|
|
212
|
+
}
|
|
213
|
+
read(fn) {
|
|
214
|
+
return this.tx("newReader", fn);
|
|
215
|
+
}
|
|
216
|
+
write(fn, rollbackOnError = !0) {
|
|
217
|
+
return this.tx("newWriter", fn, !0, rollbackOnError);
|
|
218
|
+
}
|
|
219
|
+
export() {
|
|
220
|
+
return this.tx("newReader", async (...stores) => {
|
|
221
|
+
const result = {};
|
|
222
|
+
for (const store of stores)
|
|
223
|
+
result[store.storeName] = await store.export();
|
|
224
|
+
return result;
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
import(data, returns, use) {
|
|
228
|
+
return use || (use = "addOrChangeMany"), this.tx("newWriter", async (...stores) => {
|
|
229
|
+
const result = {};
|
|
230
|
+
stores = Array.from(new Map(stores.map((s) => [s.storeName, s.asStore(!0)])).values());
|
|
231
|
+
for (const store of stores) {
|
|
232
|
+
const { storeName } = store, rows = data[storeName];
|
|
233
|
+
rows && (result[storeName] = await store[use](rows, returns));
|
|
234
|
+
}
|
|
235
|
+
if (returns)
|
|
236
|
+
return result;
|
|
237
|
+
}, !0);
|
|
238
|
+
}
|
|
239
|
+
async tx(method, fn, writable, rollbackOnError) {
|
|
240
|
+
const { idbPro, schemas } = this, { factory } = idbPro.schema, db = await idbPro.openNativeDb();
|
|
241
|
+
try {
|
|
242
|
+
const tx = db.transaction(this.storeNames, writable ? "readwrite" : "readonly");
|
|
243
|
+
try {
|
|
244
|
+
const stores = schemas.map(({ storeSchema, target: info }) => {
|
|
245
|
+
let target = tx.objectStore(info.store);
|
|
246
|
+
return info.index && (target = target.index(info.index)), factory[method]({ storeSchema, target }, idbPro);
|
|
247
|
+
});
|
|
248
|
+
return await fn(...stores);
|
|
249
|
+
} catch (e) {
|
|
250
|
+
throw rollbackOnError !== !1 && tx.abort(), e;
|
|
251
|
+
}
|
|
252
|
+
} finally {
|
|
253
|
+
db.close();
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
class DbIterator extends DataOperationBase {
|
|
258
|
+
direction;
|
|
259
|
+
query;
|
|
260
|
+
writable;
|
|
261
|
+
parser;
|
|
262
|
+
endsWithNull;
|
|
263
|
+
preSkip;
|
|
264
|
+
startKey;
|
|
265
|
+
startPrimaryKey;
|
|
266
|
+
constructor(schema, db, option) {
|
|
267
|
+
if (super(schema, db), !option)
|
|
268
|
+
return;
|
|
269
|
+
const { parser } = option;
|
|
270
|
+
this.direction = option.direction, this.query = option.query, this.writable = !!option.writable, this.endsWithNull = !!option.endsWithNull, this.preSkip = option.preSkip, this.startKey = option.startKey, this.startPrimaryKey = option.startPrimaryKey, parser && (this.parser = gsBase.isFunction(parser) ? parser : DbIteratorParsers[parser]);
|
|
271
|
+
}
|
|
272
|
+
async *[Symbol.asyncIterator]() {
|
|
273
|
+
const { parser, writable, endsWithNull } = this, { db, tx, target } = await this.tx(writable);
|
|
274
|
+
try {
|
|
275
|
+
const request = await openCursor(target, this);
|
|
276
|
+
let cursor;
|
|
277
|
+
if (parser)
|
|
278
|
+
for (; cursor = await gsIdbBasic.requestDbResult(request); ) {
|
|
279
|
+
const { control, value } = await parser(cursor);
|
|
280
|
+
if (control || (yield value), control === Break)
|
|
281
|
+
break;
|
|
282
|
+
cursor.continue();
|
|
283
|
+
}
|
|
284
|
+
else {
|
|
285
|
+
let ended = !1;
|
|
286
|
+
const end = () => {
|
|
287
|
+
ended = !0;
|
|
288
|
+
};
|
|
289
|
+
for (; !ended && (cursor = await gsIdbBasic.requestDbResult(request)); )
|
|
290
|
+
yield { cursor, end };
|
|
291
|
+
}
|
|
292
|
+
writable && tx?.commit(), endsWithNull && !cursor && (yield null);
|
|
293
|
+
} finally {
|
|
294
|
+
db?.close();
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
function parseFilterArg(arg1, arg2, arg3, limit) {
|
|
299
|
+
const args = gsBase.isObject(arg1) ? arg1 : {};
|
|
300
|
+
return gsBase.isFunction(arg1) ? args.fn = arg1 : isIDbQuery(arg1) && (args.query = arg1), gsBase.isFunction(arg2) ? args.fn = arg2 : arg2 && (args.direction = arg2), arg3 && (args.direction = arg3), limit ? args.limit = limit : args.limit || (args.limit = 1e3), args.maxEmptyChecks || (args.maxEmptyChecks = 2e4), args;
|
|
301
|
+
}
|
|
302
|
+
async function queryPage(anyReader, info, arg) {
|
|
303
|
+
info.size || (info.size = 100), info.nextSkip || (info.nextSkip = 0);
|
|
304
|
+
const { query, direction, total, maxEmptyChecks, fn } = info;
|
|
305
|
+
return await anyReader.batchRead(async (nativeReader) => (total || (info.total = await nativeReader.count({ query, direction, maxEmptyChecks, fn }), info.pages = Math.ceil(info.total / info.size)), info.total < 1 ? { info: gsBase.deepFreeze(info), rows: [] } : fn ? await queryFnPage(nativeReader, info, arg) : await queryNoneFnPage(nativeReader, info)));
|
|
306
|
+
}
|
|
307
|
+
async function queryNoneFnPage(nativeReader, info) {
|
|
308
|
+
const { keyPath } = nativeReader.storeSchema, { page, query, direction, size, mapper } = info, preSkip = (page - 1) * size, rows = await nativeReader.filter({
|
|
309
|
+
query,
|
|
310
|
+
preSkip,
|
|
311
|
+
direction,
|
|
312
|
+
limit: size,
|
|
313
|
+
mapper: getMapperFn(mapper, keyPath, !0)
|
|
314
|
+
});
|
|
315
|
+
return info.nextSkip = preSkip + rows.length, {
|
|
316
|
+
info: gsBase.deepFreeze(info),
|
|
317
|
+
rows
|
|
318
|
+
};
|
|
319
|
+
}
|
|
320
|
+
async function queryFnPage(reader, info, arg) {
|
|
321
|
+
info.maxEmptyChecks || (info.maxEmptyChecks = 2e4);
|
|
322
|
+
const { page, query, direction, nextSkip } = info, { keyPath } = reader.storeSchema;
|
|
323
|
+
return page === 1 ? await queryFnRow(reader, await openCursor(reader.target, { query, direction }), info, keyPath) : arg && nextSkip && page - arg.page === 1 ? await queryFnRow(reader, await openCursor(reader.target, {
|
|
324
|
+
query,
|
|
325
|
+
direction,
|
|
326
|
+
preSkip: nextSkip
|
|
327
|
+
}), info, keyPath) : await queryFnRow(reader, await openCursor(reader.target, { query, direction }), info, keyPath, !0);
|
|
328
|
+
}
|
|
329
|
+
async function queryFnRow({ storeSchema: { defaultGetMapper } }, request, info, keyPath, hasSkip) {
|
|
330
|
+
const { page, size, total, maxEmptyChecks, fn } = info, mapper = getMapperFn(info.mapper || defaultGetMapper, keyPath, !0), rows = [], preSkip = (page - 1) * info.size;
|
|
331
|
+
if (preSkip >= total)
|
|
332
|
+
return { info: gsBase.deepFreeze(info), rows: [] };
|
|
333
|
+
let i = 0;
|
|
334
|
+
return await new Promise(async (resolve, reject) => {
|
|
335
|
+
let ept = 0, skipped = 0;
|
|
336
|
+
const rowFn = async () => {
|
|
337
|
+
const { result: cursor } = request;
|
|
338
|
+
if (!cursor)
|
|
339
|
+
return resolve();
|
|
340
|
+
let { value, primaryKey } = cursor;
|
|
341
|
+
if (await fn(value, primaryKey, i) ? (ept = 0, rows.push(mapper(value, primaryKey, i))) : ept++, rows.length >= size || ept >= maxEmptyChecks)
|
|
342
|
+
return resolve();
|
|
343
|
+
i++, cursor.continue();
|
|
344
|
+
};
|
|
345
|
+
request.onerror = () => reject(request.error), hasSkip ? request.onsuccess = async () => {
|
|
346
|
+
const { result: cursor } = request;
|
|
347
|
+
if (!cursor)
|
|
348
|
+
return resolve();
|
|
349
|
+
let { value, primaryKey } = cursor;
|
|
350
|
+
if (await fn(value, primaryKey, i) ? (ept = 0, skipped++) : ept++, skipped >= preSkip || ept >= maxEmptyChecks) {
|
|
351
|
+
i = ept = 0, request.onsuccess = rowFn, cursor.continue();
|
|
352
|
+
return;
|
|
353
|
+
}
|
|
354
|
+
cursor.continue(), i++;
|
|
355
|
+
} : request.onsuccess = rowFn;
|
|
356
|
+
}), i && (info.nextSkip += i + 1), {
|
|
357
|
+
info: gsBase.deepFreeze(info),
|
|
358
|
+
rows
|
|
359
|
+
};
|
|
360
|
+
}
|
|
361
|
+
class DataReader extends DataOperationBase {
|
|
362
|
+
all(query, limit) {
|
|
363
|
+
const param = {};
|
|
364
|
+
return gsBase.isNumber(limit) && (param.limit = limit), isIDbQuery(query) ? param.query = query : gsBase.isObject(query) && Object.assign(param, query), this.tx(!1, (store) => gsIdbBasic.requestDbResult(store.getAll(parseIDbQuery(param.query), param.limit || 1e3)));
|
|
365
|
+
}
|
|
366
|
+
async count(arg1, arg2, arg3) {
|
|
367
|
+
const args = parseFilterArg(arg1, arg2, arg3), { query, direction, fn } = args;
|
|
368
|
+
if (!fn)
|
|
369
|
+
return await this.tx(!1, (store) => gsIdbBasic.requestDbResult(store.count(parseIDbQuery(query))));
|
|
370
|
+
const { maxEmptyChecks } = args, findFn = fn;
|
|
371
|
+
let count = 0, ept = 0, i = 0;
|
|
372
|
+
return await this.openCursor({
|
|
373
|
+
query,
|
|
374
|
+
direction,
|
|
375
|
+
fn: (cursor) => {
|
|
376
|
+
if (findFn(cursor.value, cursor.primaryKey, i++))
|
|
377
|
+
count++, ept = 0;
|
|
378
|
+
else if (++ept >= maxEmptyChecks)
|
|
379
|
+
return !1;
|
|
380
|
+
cursor.continue();
|
|
381
|
+
}
|
|
382
|
+
}), count;
|
|
383
|
+
}
|
|
384
|
+
get(key) {
|
|
385
|
+
return this.tx(!1, (store) => gsIdbBasic.requestDbResult(store.get(parseIDbQuery(key))));
|
|
386
|
+
}
|
|
387
|
+
getMany(keys, excludeEmpty) {
|
|
388
|
+
return this.batchRead(async (reader) => {
|
|
389
|
+
const rows = await gsBase.asyncMap(keys, (k) => reader.get(k));
|
|
390
|
+
return excludeEmpty ? rows.filter((v) => v) : rows;
|
|
391
|
+
});
|
|
392
|
+
}
|
|
393
|
+
getRange(query, direction) {
|
|
394
|
+
return this.forEach({ query, direction }, !0);
|
|
395
|
+
}
|
|
396
|
+
async getRangeMany(keys, direction, excludeEmpty) {
|
|
397
|
+
return (await this.batchRead((w) => gsBase.asyncMap(keys, (v) => w.getRange(v, direction)))).flat();
|
|
398
|
+
}
|
|
399
|
+
index(name, writable) {
|
|
400
|
+
let { target } = this;
|
|
401
|
+
return target instanceof IDBIndex && (target = target.objectStore), target instanceof IDBObjectStore ? (target = target.index(name), this.createOperator(target, writable)) : this.idbPro.store(target.store, name);
|
|
402
|
+
}
|
|
403
|
+
asStore(writable) {
|
|
404
|
+
let { target } = this;
|
|
405
|
+
if (target instanceof IDBObjectStore)
|
|
406
|
+
return this;
|
|
407
|
+
if (target instanceof IDBIndex)
|
|
408
|
+
target = target.objectStore;
|
|
409
|
+
else {
|
|
410
|
+
if (!("index" in target))
|
|
411
|
+
return this;
|
|
412
|
+
target = { store: target.store };
|
|
413
|
+
}
|
|
414
|
+
return this.createOperator(target, writable);
|
|
415
|
+
}
|
|
416
|
+
batchRead(fn) {
|
|
417
|
+
return isNativeTarget(this.target) ? fn(this) : this.tx(!1, (store) => fn(this.idbPro.schema.factory.newReader({
|
|
418
|
+
storeSchema: this.storeSchema,
|
|
419
|
+
target: store
|
|
420
|
+
}, this.idbPro)));
|
|
421
|
+
}
|
|
422
|
+
iterator(query, direction) {
|
|
423
|
+
const arg = {};
|
|
424
|
+
return gsBase.isString(direction) && (arg.direction = direction), isIDbQuery(query) ? arg.query = query : gsBase.isObject(query) && Object.assign(arg, query), new DbIterator({ storeSchema: this.storeSchema, target: this.target }, this.idbPro, {
|
|
425
|
+
...arg,
|
|
426
|
+
parser: "value"
|
|
427
|
+
});
|
|
428
|
+
}
|
|
429
|
+
async filter(arg1, arg2, arg3, arg4) {
|
|
430
|
+
const args = parseFilterArg(arg1, arg2, arg3, arg4), { maxEmptyChecks, limit, fn } = args, { keyPath, defaultGetMapper } = this.storeSchema, mapper = getMapperFn(args.mapper || defaultGetMapper, keyPath, !0);
|
|
431
|
+
if (!fn)
|
|
432
|
+
return await this.forEach({
|
|
433
|
+
...args,
|
|
434
|
+
mapper,
|
|
435
|
+
fn: (value, k, i) => {
|
|
436
|
+
if (i >= limit - 1)
|
|
437
|
+
return { control: Finished };
|
|
438
|
+
}
|
|
439
|
+
}, !0);
|
|
440
|
+
let ept = 0;
|
|
441
|
+
return this.forEach({
|
|
442
|
+
...args,
|
|
443
|
+
mapper,
|
|
444
|
+
fn: (value, k, i, results) => {
|
|
445
|
+
if (fn(value, k, i))
|
|
446
|
+
ept = 0;
|
|
447
|
+
else
|
|
448
|
+
return ++ept >= maxEmptyChecks ? { control: Break } : { control: Continue };
|
|
449
|
+
if (results.length >= limit - 1)
|
|
450
|
+
return { control: Finished };
|
|
451
|
+
}
|
|
452
|
+
}, !0);
|
|
453
|
+
}
|
|
454
|
+
async find(query, fn, direction) {
|
|
455
|
+
const [value] = await this.filter(query, fn, direction, 1);
|
|
456
|
+
return value;
|
|
457
|
+
}
|
|
458
|
+
async page(arg, page) {
|
|
459
|
+
const info = { ...arg };
|
|
460
|
+
return info.page = page || arg?.page || 1, queryPage(this, info, arg);
|
|
461
|
+
}
|
|
462
|
+
nextPage(info) {
|
|
463
|
+
return this.page(info, info.page + 1);
|
|
464
|
+
}
|
|
465
|
+
export(arg1, arg2) {
|
|
466
|
+
const args = parseFilterArg(arg1, arg2), { keyPath, exportMapper, name, defaultGetMapper } = this.storeSchema;
|
|
467
|
+
if (!keyPath && Array.isArray(exportMapper))
|
|
468
|
+
throw new Error(`When store [ ${name} ] keyPath does not exist, exportMapper does not support string[].`);
|
|
469
|
+
return args.direction = "next", args.mapper = getMapperFn(exportMapper || defaultGetMapper, keyPath, !0), args.limit = args.maxEmptyChecks = Number.MAX_SAFE_INTEGER, this.filter(args);
|
|
470
|
+
}
|
|
471
|
+
asMap() {
|
|
472
|
+
let { target } = this;
|
|
473
|
+
return target instanceof IDBIndex ? target = target.objectStore : target instanceof IDBObjectStore || (target = { store: target.store }), this.factory.newDbMap({ target }, this.idbPro);
|
|
474
|
+
}
|
|
475
|
+
createOperator(target, writable) {
|
|
476
|
+
const { idbPro } = this, tmp = this.storeSchema, schema = { storeSchema: Object.isFrozen(tmp) ? tmp : void 0, target };
|
|
477
|
+
return writable ? this.factory.newWriter(schema, idbPro) : this.factory.newReader(schema, idbPro);
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
function getKeyValueToObject(keyPath, value) {
|
|
481
|
+
if (!Array.isArray(keyPath))
|
|
482
|
+
return { [keyPath]: value };
|
|
483
|
+
const rv = {}, arrayKeyPath = keyPath;
|
|
484
|
+
for (let i = 0; i < arrayKeyPath.length; i++)
|
|
485
|
+
rv[arrayKeyPath[i]] = value[i];
|
|
486
|
+
return rv;
|
|
487
|
+
}
|
|
488
|
+
function getValidKeyValue(keyPath, value) {
|
|
489
|
+
if (!Array.isArray(keyPath))
|
|
490
|
+
return value[keyPath];
|
|
491
|
+
const rv = [];
|
|
492
|
+
for (const k of keyPath) {
|
|
493
|
+
if (!value[k])
|
|
494
|
+
return;
|
|
495
|
+
rv.push(value[k]);
|
|
496
|
+
}
|
|
497
|
+
return rv;
|
|
498
|
+
}
|
|
499
|
+
function checkAddValue(storeSchema, value) {
|
|
500
|
+
if (!value || !(value instanceof Object) || Array.isArray(value))
|
|
501
|
+
return value;
|
|
502
|
+
const { addedTimeField, updatedTimeField, updatedCountField, softDeletedField } = storeSchema;
|
|
503
|
+
return value = { ...value }, addedTimeField?.name && !gsBase.isNumber(value[addedTimeField.name]) && (value[addedTimeField.name] = Date.now()), updatedTimeField?.name && !gsBase.isNumber(value[updatedTimeField.name]) && (value[updatedTimeField.name] = Date.now()), softDeletedField?.name && !gsBase.isNumber(value[softDeletedField.name]) && (value[softDeletedField.name] = 0), updatedCountField?.name && (value[updatedCountField.name] = 0), value;
|
|
504
|
+
}
|
|
505
|
+
function checkUpdateValue(storeSchema, newValue, oldValue) {
|
|
506
|
+
if (!newValue || !(newValue instanceof Object) || Array.isArray(newValue))
|
|
507
|
+
return newValue;
|
|
508
|
+
const { updatedTimeField, updatedCountField } = storeSchema;
|
|
509
|
+
return newValue = { ...oldValue, ...newValue }, updatedTimeField.name && (newValue[updatedTimeField.name] = Date.now()), updatedCountField.name && (newValue[updatedCountField.name] = (newValue[updatedCountField.name] || 0) + 1), newValue;
|
|
510
|
+
}
|
|
511
|
+
class DataWriter extends DataReader {
|
|
512
|
+
add(record) {
|
|
513
|
+
return this.changeByPk({
|
|
514
|
+
record,
|
|
515
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
516
|
+
const { storeSchema } = this;
|
|
517
|
+
return newValue = checkAddValue(storeSchema, newValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await gsIdbBasic.requestDbResult(store.add(newValue))) }, pk] : [newValue, await gsIdbBasic.requestDbResult(store.add(newValue, pk))];
|
|
518
|
+
}
|
|
519
|
+
});
|
|
520
|
+
}
|
|
521
|
+
addMany(records, returns) {
|
|
522
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(records, (v) => w.add(v)) : gsBase.asyncForEach(records, (v) => w.add(v)), !0);
|
|
523
|
+
}
|
|
524
|
+
addOrSkip(record) {
|
|
525
|
+
return this.batchWrite(async (w) => {
|
|
526
|
+
const { keyPath: storeKeyPath, defaultGetMapper } = w.storeSchema, { key: pk, value } = storeKeyPath ? {
|
|
527
|
+
key: getValidKeyValue(storeKeyPath, record),
|
|
528
|
+
value: record
|
|
529
|
+
} : parseDbNoneKeyPathRecord(record);
|
|
530
|
+
if (pk) {
|
|
531
|
+
const item = await gsIdbBasic.requestDbResult(w.nativeStore.get(pk));
|
|
532
|
+
if (item)
|
|
533
|
+
return getMapperFn(defaultGetMapper, storeKeyPath)?.(item, pk);
|
|
534
|
+
}
|
|
535
|
+
if (w.target instanceof IDBIndex) {
|
|
536
|
+
const { keyPath } = w, keyValue = getValidKeyValue(keyPath, value);
|
|
537
|
+
if (!keyValue)
|
|
538
|
+
return w.add(record);
|
|
539
|
+
let oldValue = await w.find(keyValue);
|
|
540
|
+
if (oldValue)
|
|
541
|
+
return oldValue;
|
|
542
|
+
}
|
|
543
|
+
return w.add(record);
|
|
544
|
+
});
|
|
545
|
+
}
|
|
546
|
+
addOrSkipMany(records, returns) {
|
|
547
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(records, (v) => w.addOrSkip(v)) : gsBase.asyncForEach(records, (v) => w.addOrSkip(v)), !0);
|
|
548
|
+
}
|
|
549
|
+
replace(record) {
|
|
550
|
+
return this.changeByPk({
|
|
551
|
+
record,
|
|
552
|
+
getOld: !0,
|
|
553
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
554
|
+
const { storeSchema } = this, { updatedTimeField, updatedCountField, addedTimeField } = storeSchema;
|
|
555
|
+
return oldValue ? newValue = checkUpdateValue(storeSchema, newValue, {
|
|
556
|
+
[updatedTimeField.name]: oldValue[updatedTimeField.name],
|
|
557
|
+
[updatedCountField.name]: oldValue[updatedCountField.name],
|
|
558
|
+
[addedTimeField.name]: oldValue[addedTimeField.name]
|
|
559
|
+
}) : newValue = checkAddValue(storeSchema, newValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await gsIdbBasic.requestDbResult(store.put(newValue))) }, pk] : [newValue, await gsIdbBasic.requestDbResult(store.put(newValue, pk))];
|
|
560
|
+
}
|
|
561
|
+
});
|
|
562
|
+
}
|
|
563
|
+
replaceMany(records, returns) {
|
|
564
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(records, (v) => w.replace(v)) : gsBase.asyncForEach(records, (v) => w.replace(v)), !0);
|
|
565
|
+
}
|
|
566
|
+
change(record, throwIfMissing) {
|
|
567
|
+
return this.changeByPk({
|
|
568
|
+
record,
|
|
569
|
+
getOld: !0,
|
|
570
|
+
requiredOld: throwIfMissing,
|
|
571
|
+
requiredPk: throwIfMissing,
|
|
572
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
573
|
+
const { storeSchema } = this;
|
|
574
|
+
if (oldValue)
|
|
575
|
+
return newValue = checkUpdateValue(storeSchema, newValue, oldValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await gsIdbBasic.requestDbResult(store.put(newValue))) }, pk] : [newValue, await gsIdbBasic.requestDbResult(store.put(newValue, pk))];
|
|
576
|
+
}
|
|
577
|
+
});
|
|
578
|
+
}
|
|
579
|
+
changeMany(records, option) {
|
|
580
|
+
const { returns, throwIfMissing } = gsBase.isBoolean(option) ? { returns: option } : option || {};
|
|
581
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(records, (v) => w.change(v, throwIfMissing)) : gsBase.asyncForEach(records, (v) => w.change(v, throwIfMissing)), !0);
|
|
582
|
+
}
|
|
583
|
+
addOrChange(record) {
|
|
584
|
+
return this.changeByPk({
|
|
585
|
+
record,
|
|
586
|
+
getOld: !0,
|
|
587
|
+
fn: async (store, pk, newValue, oldValue, keyPath) => {
|
|
588
|
+
const { storeSchema } = this;
|
|
589
|
+
return oldValue ? (newValue = checkUpdateValue(storeSchema, newValue, oldValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await gsIdbBasic.requestDbResult(store.put(newValue))) }, pk] : [newValue, await gsIdbBasic.requestDbResult(store.put(newValue, pk))]) : (newValue = checkAddValue(storeSchema, newValue), keyPath ? [{ ...newValue, ...getKeyValueToObject(keyPath, await gsIdbBasic.requestDbResult(store.add(newValue))) }, pk] : [newValue, await gsIdbBasic.requestDbResult(store.add(newValue, pk))]);
|
|
590
|
+
}
|
|
591
|
+
});
|
|
592
|
+
}
|
|
593
|
+
addOrChangeMany(records, returns) {
|
|
594
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(records, (v) => w.addOrChange(v)) : gsBase.asyncForEach(records, (v) => w.addOrChange(v)), !0);
|
|
595
|
+
}
|
|
596
|
+
delete(pk, returns) {
|
|
597
|
+
return this.changeByPk({
|
|
598
|
+
pk,
|
|
599
|
+
getOld: returns,
|
|
600
|
+
fn: (store, pk2, newValue, oldValue) => {
|
|
601
|
+
if (store.delete(pk2), !!returns)
|
|
602
|
+
return [oldValue, pk2];
|
|
603
|
+
}
|
|
604
|
+
});
|
|
605
|
+
}
|
|
606
|
+
deleteMany(keys, returns) {
|
|
607
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(keys, (v) => w.delete(v, !0)) : gsBase.asyncForEach(keys, (v) => w.delete(v)), !0);
|
|
608
|
+
}
|
|
609
|
+
deleteRange(query, returns) {
|
|
610
|
+
const { returns: hasRtn, physical, direction } = gsBase.isBoolean(returns) ? { returns } : returns || {}, { name } = this.storeSchema.softDeletedField || {};
|
|
611
|
+
return this.cursor({
|
|
612
|
+
query,
|
|
613
|
+
direction,
|
|
614
|
+
fn: (value) => physical || !name ? { modify: Delete } : (value[name] = 1, { modify: Save })
|
|
615
|
+
}, hasRtn);
|
|
616
|
+
}
|
|
617
|
+
deleteRangeMany(keys, returns) {
|
|
618
|
+
const option = gsBase.isBoolean(returns) ? { returns } : returns;
|
|
619
|
+
return this.batchWrite((w) => option?.returns ? gsBase.asyncMap(keys, (v) => w.deleteRange(v, option)) : gsBase.asyncForEach(keys, (v) => w.deleteRange(v, option)), !0);
|
|
620
|
+
}
|
|
621
|
+
changeRange(arg, returns) {
|
|
622
|
+
let { direction = "next", query, newValue } = "newValue" in arg ? arg : { newValue: arg };
|
|
623
|
+
if (query || (query = getValidKeyValue(this.keyPath, newValue)), !query)
|
|
624
|
+
throw new Error(`query is required\uFF1A${JSON.stringify(arg)}`);
|
|
625
|
+
return this.cursor({
|
|
626
|
+
query,
|
|
627
|
+
direction,
|
|
628
|
+
fn: (value) => value instanceof Object ? { modify: Save, value: { ...value, ...newValue } } : { modify: Save, value: newValue }
|
|
629
|
+
}, returns);
|
|
630
|
+
}
|
|
631
|
+
changeRangeMany(args, returns) {
|
|
632
|
+
return this.batchWrite((w) => returns ? gsBase.asyncMap(args, (v) => w.changeRange(v, returns)) : gsBase.asyncForEach(args, (v) => w.changeRange(v, returns)), !0);
|
|
633
|
+
}
|
|
634
|
+
cursor(arg, returns) {
|
|
635
|
+
return arg = gsBase.isFunction(arg) ? { fn: arg } : arg || {}, arg.fn ? returns ? this.cursorResult(arg, !0) : this.cursorVoid(arg, !0) : new DbIterator(this, this.idbPro);
|
|
636
|
+
}
|
|
637
|
+
batchWrite(fn, rollbackOnError) {
|
|
638
|
+
const { target } = this;
|
|
639
|
+
if (isNativeTarget(target))
|
|
640
|
+
try {
|
|
641
|
+
return fn(this);
|
|
642
|
+
} catch (e) {
|
|
643
|
+
throw rollbackOnError !== !1 && (target instanceof IDBIndex ? target.objectStore : target).transaction.abort(), e;
|
|
644
|
+
}
|
|
645
|
+
return this.tx(!0, (store) => fn(this.idbPro.schema.factory.newWriter({
|
|
646
|
+
storeSchema: this.storeSchema,
|
|
647
|
+
target: store
|
|
648
|
+
}, this.idbPro)), rollbackOnError);
|
|
649
|
+
}
|
|
650
|
+
changeByPk({ pk, record, fn, requiredPk, getOld, requiredOld, saveMapper, getMapper }) {
|
|
651
|
+
const { storeSchema } = this, { keyPath, defaultSaveMapper, defaultGetMapper } = storeSchema;
|
|
652
|
+
return record && (saveMapper || defaultSaveMapper) && (record = getMapperFn(saveMapper || defaultSaveMapper, keyPath)?.(record)), this.batchWrite(async (w) => {
|
|
653
|
+
let query, newValue = record;
|
|
654
|
+
if (pk)
|
|
655
|
+
query = parseIDbQuery(pk);
|
|
656
|
+
else if (keyPath)
|
|
657
|
+
query = getValidKeyValue(keyPath, record);
|
|
658
|
+
else {
|
|
659
|
+
const { key, value } = parseDbNoneKeyPathRecord(record);
|
|
660
|
+
query = key, newValue = value;
|
|
661
|
+
}
|
|
662
|
+
if (requiredPk && !query)
|
|
663
|
+
throw new Error(`key is required: ${JSON.stringify(record)}`);
|
|
664
|
+
const oldValue = query && (getOld || requiredOld) ? await gsIdbBasic.requestDbResult(w.nativeStore.get(query)) : void 0;
|
|
665
|
+
if (requiredOld && !oldValue)
|
|
666
|
+
throw new Error(`record not found: ${JSON.stringify(record)}`);
|
|
667
|
+
const result = await fn(w.nativeStore, query, newValue, oldValue, keyPath);
|
|
668
|
+
if (result)
|
|
669
|
+
return getMapperFn(getMapper || defaultGetMapper, keyPath)?.(result[0], result[1]);
|
|
670
|
+
});
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
class DbMap extends DataOperationBase {
|
|
674
|
+
get size() {
|
|
675
|
+
return this.tx(!1, (store) => gsIdbBasic.requestDbResult(store.count()));
|
|
676
|
+
}
|
|
677
|
+
delete(key) {
|
|
678
|
+
return this.tx(!0, async (t, store) => {
|
|
679
|
+
store.delete(key);
|
|
680
|
+
});
|
|
681
|
+
}
|
|
682
|
+
batch(fn) {
|
|
683
|
+
const { idbPro, storeSchema, factory } = this;
|
|
684
|
+
return this.tx(!0, async (t, store) => await fn(factory.newDbMap({ storeSchema, target: store }, idbPro)));
|
|
685
|
+
}
|
|
686
|
+
asStore(writable) {
|
|
687
|
+
const { factory } = this.idbPro.schema;
|
|
688
|
+
return writable ? factory.newWriter(this, this.idbPro) : factory.newReader({ target: this.target }, this.idbPro);
|
|
689
|
+
}
|
|
690
|
+
entries() {
|
|
691
|
+
return new DbIterator({
|
|
692
|
+
target: this.target
|
|
693
|
+
}, this.idbPro, { parser: "keyValue" });
|
|
694
|
+
}
|
|
695
|
+
async get(key, defaultValue) {
|
|
696
|
+
return await this.tx(!1, (store) => gsIdbBasic.requestDbResult(store.get(key))) || defaultValue;
|
|
697
|
+
}
|
|
698
|
+
getMany(keys) {
|
|
699
|
+
return this.tx(!1, async (store) => {
|
|
700
|
+
const result = [];
|
|
701
|
+
for (const k of keys)
|
|
702
|
+
result.push(await gsIdbBasic.requestDbResult(store.get(k)));
|
|
703
|
+
return result;
|
|
704
|
+
});
|
|
705
|
+
}
|
|
706
|
+
async has(key) {
|
|
707
|
+
return !!await this.get(key);
|
|
708
|
+
}
|
|
709
|
+
keys() {
|
|
710
|
+
return new DbIterator({ storeSchema: this.storeSchema, target: this.target }, this.idbPro, { parser: "key" });
|
|
711
|
+
}
|
|
712
|
+
set(key, value) {
|
|
713
|
+
return this.tx(!0, async (t, store) => {
|
|
714
|
+
await gsIdbBasic.requestDbResult(store.put(value, key));
|
|
715
|
+
});
|
|
716
|
+
}
|
|
717
|
+
setMany(values) {
|
|
718
|
+
return this.tx(!0, async (t, store) => {
|
|
719
|
+
for (const [k, v] of values)
|
|
720
|
+
store.put(v, k);
|
|
721
|
+
});
|
|
722
|
+
}
|
|
723
|
+
values() {
|
|
724
|
+
return new DbIterator({ storeSchema: this.storeSchema, target: this.target }, this.idbPro, { parser: "value" });
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
function equalKeyPath(p1, p2) {
|
|
728
|
+
if (p1 == p2)
|
|
729
|
+
return !0;
|
|
730
|
+
if (typeof p1 != typeof p2)
|
|
731
|
+
return !1;
|
|
732
|
+
if (Array.isArray(p1) && Array.isArray(p2)) {
|
|
733
|
+
if (p1.length !== p2.length)
|
|
734
|
+
return !1;
|
|
735
|
+
for (let i = 0; i < p1.length; i++)
|
|
736
|
+
if (p1[i] !== p2[i])
|
|
737
|
+
return !1;
|
|
738
|
+
return !0;
|
|
739
|
+
}
|
|
740
|
+
return !1;
|
|
741
|
+
}
|
|
742
|
+
const versionDiffValidate = ({ stores, schema }) => {
|
|
743
|
+
const storeSchemas = schema.storeSchemas;
|
|
744
|
+
let info = "";
|
|
745
|
+
for (const store of stores) {
|
|
746
|
+
const ss = storeSchemas.find((s) => s.name === store.name);
|
|
747
|
+
if (ss) {
|
|
748
|
+
if (!equalKeyPath(store.keyPath, ss.keyPath)) {
|
|
749
|
+
info = `store [ ${store.name} ] keyPath not equal,schema.keyPath:${ss.keyPath},store.keyPath:${store.keyPath}[]`;
|
|
750
|
+
break;
|
|
751
|
+
}
|
|
752
|
+
if (!store.autoIncrement != !ss.autoIncrement) {
|
|
753
|
+
info = `store [ ${store.name} ] autoIncrement not equal`;
|
|
754
|
+
break;
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
return info ? `The existing database is inconsistent with the definition and cannot be corrected\uFF1A ${info}` : !0;
|
|
759
|
+
}, versionSameValidate = async (context) => {
|
|
760
|
+
let result = versionDiffValidate(context);
|
|
761
|
+
return gsBase.isString(result) || (result = validateStoreAndIndexes(context)), result;
|
|
762
|
+
}, validateStoreAndIndexes = ({ stores, schema }) => {
|
|
763
|
+
const storeSchemas = schema.storeSchemas, dbStoreNames = stores.map((s) => s.name);
|
|
764
|
+
let info = "";
|
|
765
|
+
const missingStoreNames = storeSchemas.map((s) => s.name).filter((s) => !dbStoreNames.includes(s));
|
|
766
|
+
if (missingStoreNames.length)
|
|
767
|
+
info = `store [ ${missingStoreNames.join(",")} ] not exist`;
|
|
768
|
+
else
|
|
769
|
+
for (const store of stores) {
|
|
770
|
+
const ss = storeSchemas.find((s) => s.name === store.name);
|
|
771
|
+
if (ss && (info = validateIndexes$1(store, Array.from(store.indexNames), ss.indexSchemas), info))
|
|
772
|
+
break;
|
|
773
|
+
}
|
|
774
|
+
return info ? `The existing database Store index is inconsistent with the definition and requires a database version upgrade to be fixed\uFF1A ${info}` : !0;
|
|
775
|
+
};
|
|
776
|
+
function validateIndexes$1(store, indexNames, schemas) {
|
|
777
|
+
if (indexNames.length !== schemas.length)
|
|
778
|
+
return `store [ ${store.name} ] index count not equal`;
|
|
779
|
+
for (const name of indexNames) {
|
|
780
|
+
const schema = schemas.find((s) => s.name === name);
|
|
781
|
+
if (!schema)
|
|
782
|
+
return `store [ ${store.name} ] index [ ${name} ] not exist`;
|
|
783
|
+
const index = store.index(name);
|
|
784
|
+
if (!schema.unique != !index.unique)
|
|
785
|
+
return `store [ ${store.name} ] index [ ${name} ] unique not equal`;
|
|
786
|
+
if (!schema.multiEntry != !index.multiEntry)
|
|
787
|
+
return `store [ ${store.name} ] index [ ${name} ] multiEntry not equal`;
|
|
788
|
+
if (!equalKeyPath(schema.keyPath, index.keyPath))
|
|
789
|
+
return `store [ ${store.name} ] index [ ${name} ] keyPath not equal`;
|
|
790
|
+
}
|
|
791
|
+
return "";
|
|
792
|
+
}
|
|
793
|
+
class StoreUpgradeable {
|
|
794
|
+
upgradeContext;
|
|
795
|
+
storeSchema;
|
|
796
|
+
nativeStore;
|
|
797
|
+
#writer;
|
|
798
|
+
constructor(upgradeContext, storeSchema, nativeStore) {
|
|
799
|
+
this.upgradeContext = upgradeContext, this.storeSchema = storeSchema, this.nativeStore = nativeStore;
|
|
800
|
+
}
|
|
801
|
+
get writer() {
|
|
802
|
+
return this.#writer || (this.#writer = this.upgradeContext.dbSchema.factory?.newWriter({
|
|
803
|
+
target: this.nativeStore,
|
|
804
|
+
storeSchema: this.storeSchema
|
|
805
|
+
}));
|
|
806
|
+
}
|
|
807
|
+
add(versionBounds, values, returns) {
|
|
808
|
+
return this.upgradeContext.versionIn(versionBounds) ? this.writer.addMany(values, returns) : Promise.resolve();
|
|
809
|
+
}
|
|
810
|
+
addOrChange(versionBounds, values, returns) {
|
|
811
|
+
return this.upgradeContext.versionIn(versionBounds) ? this.writer.addOrChangeMany(values, returns) : Promise.resolve();
|
|
812
|
+
}
|
|
813
|
+
async call(versionBounds, fn) {
|
|
814
|
+
if (this.upgradeContext.versionIn(versionBounds))
|
|
815
|
+
return await fn(this.writer, this.upgradeContext);
|
|
816
|
+
}
|
|
817
|
+
replace(versionBounds, values, returns) {
|
|
818
|
+
return this.upgradeContext.versionIn(versionBounds) ? this.writer.replaceMany(values, returns) : Promise.resolve();
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
const DataOperatorFactory = Object.freeze({
|
|
822
|
+
newDataOperators(schemas, db) {
|
|
823
|
+
return new DataOperators(db, schemas);
|
|
824
|
+
},
|
|
825
|
+
newDbMap(schema, db) {
|
|
826
|
+
return new DbMap(schema, db);
|
|
827
|
+
},
|
|
828
|
+
newReader(schema, db) {
|
|
829
|
+
return new DataReader(schema, db);
|
|
830
|
+
},
|
|
831
|
+
newStoreUpgradeable(nativeStore, storeSchema, upgradeContext) {
|
|
832
|
+
return new StoreUpgradeable(upgradeContext, storeSchema, nativeStore);
|
|
833
|
+
},
|
|
834
|
+
newWriter(schema, db) {
|
|
835
|
+
return new DataWriter(schema, db);
|
|
836
|
+
}
|
|
837
|
+
});
|
|
838
|
+
function validateSpecialFields(schema) {
|
|
839
|
+
Object.isFrozen(schema) || (schema.addedTimeField = validateSpecialField(schema, schema.addedTimeField, "added_at", !0), schema.updatedTimeField = validateSpecialField(schema, schema.updatedTimeField, "updated_at", !0), schema.updatedCountField = validateSpecialField(schema, schema.updatedCountField, "updated_count", !1), schema.softDeletedField = validateSpecialField(schema, schema.softDeletedField, "deleted", !1));
|
|
840
|
+
}
|
|
841
|
+
function validateSpecialField(schema, field, defaultName, useDefault) {
|
|
842
|
+
const validField = validateSpecialFieldBase(field, defaultName, useDefault);
|
|
843
|
+
if (!validField)
|
|
844
|
+
return validField;
|
|
845
|
+
const schemaField = validField;
|
|
846
|
+
if (schemaField.isIndexed !== !1) {
|
|
847
|
+
schemaField.isIndexed || (schemaField.isIndexed = !0);
|
|
848
|
+
const { name } = schemaField, indexSchemas = schema.indexSchemas;
|
|
849
|
+
indexSchemas.some((i) => i === name || i.name === name) || indexSchemas.push(name);
|
|
850
|
+
}
|
|
851
|
+
return schemaField;
|
|
852
|
+
}
|
|
853
|
+
function validateSpecialFieldBase(field, defaultName, useDefault) {
|
|
854
|
+
if (field === !1)
|
|
855
|
+
return !1;
|
|
856
|
+
if (gsBase.isString(field))
|
|
857
|
+
return { name: field };
|
|
858
|
+
if (gsBase.isObject(field)) {
|
|
859
|
+
const r = field;
|
|
860
|
+
return gsBase.isBoolean(r.name) && (r.name = defaultName), field;
|
|
861
|
+
} else if (field === !0 || useDefault)
|
|
862
|
+
return { name: defaultName };
|
|
863
|
+
return !1;
|
|
864
|
+
}
|
|
865
|
+
function validateStoreSchema(schema, storeTemplate) {
|
|
866
|
+
let validSchema = gsBase.isString(schema) ? { name: schema } : schema;
|
|
867
|
+
return storeTemplate && (validSchema = { ...storeTemplate, ...validSchema }), validSchema.indexSchemas || (validSchema.indexSchemas = []), validateSpecialFields(validSchema), Object.isFrozen(validSchema) || (validSchema.indexSchemas = validSchema.indexSchemas.map(indexMapper)), validateDefaultData(validSchema), validSchema;
|
|
868
|
+
}
|
|
869
|
+
function indexMapper(index) {
|
|
870
|
+
const schema = gsBase.isString(index) ? { name: index } : index;
|
|
871
|
+
return schema.keyPath || (schema.keyPath = schema.name), schema;
|
|
872
|
+
}
|
|
873
|
+
function validateDefaultData(schema) {
|
|
874
|
+
if (!(schema.keyPath || !schema.defaultData?.length)) {
|
|
875
|
+
for (const row of schema.defaultData)
|
|
876
|
+
if (!Array.isArray(row) && !("value" in row))
|
|
877
|
+
throw new Error(`When \`defaultData\` must contain \`value\` fields or be an array\uFF1A${JSON.stringify(row)}`);
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
const validateSchemaWithDefaults = (schema) => {
|
|
881
|
+
const { versionDiffValidate: vdf, versionSameValidate: vsf, factory: dof } = schema;
|
|
882
|
+
return schema.storeSchemas || (schema.storeSchemas = []), schema.storeTemplate || (schema.storeTemplate = { ...defaultStoreSchemaTemplate }), !vdf && vdf !== !1 && (schema.versionDiffValidate = versionDiffValidate), !vsf && vsf !== !1 && (schema.versionSameValidate = versionSameValidate), dof ? dof !== DataOperatorFactory && (schema.factory = { ...DataOperatorFactory, ...dof }) : schema.factory = DataOperatorFactory, schema.storeSchemas = schema.storeSchemas.map((s) => validateStoreSchema(s, schema.storeTemplate)), schema;
|
|
883
|
+
};
|
|
884
|
+
async function validateBeforeOpen(schema) {
|
|
885
|
+
const { name, version } = schema, existDb = await gsIdbBasic.findExistDb(name);
|
|
886
|
+
if (!existDb)
|
|
887
|
+
return !0;
|
|
888
|
+
const { versionDiffValidate: versionDiffValidate2, versionSameValidate: versionSameValidate2 } = schema, db = await gsIdbBasic.openDb(name);
|
|
889
|
+
try {
|
|
890
|
+
if (schema.version < db.version)
|
|
891
|
+
return "The existing database version is greater than the current version";
|
|
892
|
+
const validate = schema.version === void 0 || db.version === schema.version ? versionSameValidate2 : versionDiffValidate2;
|
|
893
|
+
if (!validate)
|
|
894
|
+
return !0;
|
|
895
|
+
const storeNames = Array.from(db.objectStoreNames);
|
|
896
|
+
if (storeNames.length < 1)
|
|
897
|
+
return `The existing database [ ${name} ] is empty`;
|
|
898
|
+
const stores = await gsIdbBasic.readTx(db, storeNames), diffResult = await validate({ schema, db, stores });
|
|
899
|
+
if (gsBase.isString(diffResult) || version !== existDb.version)
|
|
900
|
+
return diffResult;
|
|
901
|
+
} finally {
|
|
902
|
+
db?.close();
|
|
903
|
+
}
|
|
904
|
+
return !0;
|
|
905
|
+
}
|
|
906
|
+
function validateIndexes(storeSchema, store) {
|
|
907
|
+
const { indexSchemas } = storeSchema, existNames = store.indexNames, schemaNames = indexSchemas.map((option) => option.name);
|
|
908
|
+
for (const name of Array.from(existNames))
|
|
909
|
+
schemaNames.includes(name) || store.deleteIndex(name);
|
|
910
|
+
for (const indexSchema of indexSchemas)
|
|
911
|
+
existNames.contains(indexSchema.name) ? validateIndex(store, indexSchema) : createIndex(store, indexSchema);
|
|
912
|
+
return store;
|
|
913
|
+
}
|
|
914
|
+
function validateIndex(store, indexOption) {
|
|
915
|
+
const index = store.index(indexOption.name);
|
|
916
|
+
checkIndex(index, indexOption) || (store.deleteIndex(indexOption.name), createIndex(store, indexOption));
|
|
917
|
+
}
|
|
918
|
+
function checkIndex(index, indexOption) {
|
|
919
|
+
return index.unique !== indexOption.unique || index.multiEntry !== indexOption.multiEntry ? !1 : equalKeyPath(indexOption.keyPath, index.keyPath);
|
|
920
|
+
}
|
|
921
|
+
function createIndex(store, schema) {
|
|
922
|
+
try {
|
|
923
|
+
store.createIndex(schema.name, schema.keyPath, {
|
|
924
|
+
unique: schema.unique,
|
|
925
|
+
multiEntry: schema.multiEntry
|
|
926
|
+
});
|
|
927
|
+
} catch {
|
|
928
|
+
throw new Error(`store [ ${store.name} ] index [ ${schema.name} ] create error: ${JSON.stringify(schema)}`);
|
|
929
|
+
}
|
|
930
|
+
}
|
|
931
|
+
function validateStoreDefine(context, schema) {
|
|
932
|
+
return context.database.objectStoreNames.contains(schema.name) ? validateIndexes(schema, context.transaction?.objectStore(schema.name)) : defineStore(schema, context.database);
|
|
933
|
+
}
|
|
934
|
+
function defineStore(schema, db) {
|
|
935
|
+
const store = db.createObjectStore(schema.name, {
|
|
936
|
+
keyPath: schema.keyPath,
|
|
937
|
+
autoIncrement: schema.autoIncrement
|
|
938
|
+
});
|
|
939
|
+
for (const option of schema.indexSchemas)
|
|
940
|
+
createIndex(store, option);
|
|
941
|
+
return store;
|
|
942
|
+
}
|
|
943
|
+
class UpgradeContext {
|
|
944
|
+
database;
|
|
945
|
+
newVersion;
|
|
946
|
+
oldVersion;
|
|
947
|
+
dbSchema;
|
|
948
|
+
transaction;
|
|
949
|
+
#stores = {};
|
|
950
|
+
constructor(args) {
|
|
951
|
+
this.database = args.database, this.newVersion = args.newVersion, this.oldVersion = args.oldVersion, this.dbSchema = args.dbSchema, this.transaction = args.transaction;
|
|
952
|
+
}
|
|
953
|
+
deleteStoreIfExists(storeName) {
|
|
954
|
+
const db = this.database;
|
|
955
|
+
db.objectStoreNames.contains(storeName) && db.deleteObjectStore(storeName);
|
|
956
|
+
}
|
|
957
|
+
destroy() {
|
|
958
|
+
try {
|
|
959
|
+
gsBase.destroyRecords(this.#stores);
|
|
960
|
+
} finally {
|
|
961
|
+
for (const k of Object.keys(this.#stores))
|
|
962
|
+
delete this.#stores[k];
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
store(storeName) {
|
|
966
|
+
if (storeName in this.#stores)
|
|
967
|
+
return this.#stores[storeName];
|
|
968
|
+
const { factory } = this.dbSchema, { storeSchemas } = this.dbSchema, storeSchema = storeSchemas.find((s) => s.name === storeName), nativeStore = validateStoreDefine(this, storeSchema);
|
|
969
|
+
return this.#stores[storeName] = factory.newStoreUpgradeable(nativeStore, storeSchema, this);
|
|
970
|
+
}
|
|
971
|
+
versionIn({ oldMin, oldMax, newMax, newMin }) {
|
|
972
|
+
if (oldMax === void 0 && newMax === void 0 && oldMin === void 0 && newMin === void 0)
|
|
973
|
+
throw new Error(`versionIn bounds must not be empty ${JSON.stringify({ oldMax, newMax, oldMin, newMin })}`);
|
|
974
|
+
if (oldMax < oldMin)
|
|
975
|
+
throw new Error(`oldMax (${oldMax}) cannot be less than oldMin (${oldMin})`);
|
|
976
|
+
if (newMax < newMin)
|
|
977
|
+
throw new Error(`newMax (${newMax}) cannot be less than newMin (${newMin})`);
|
|
978
|
+
const { oldVersion, newVersion } = this;
|
|
979
|
+
return oldMin !== void 0 && oldVersion < oldMin || oldMax !== void 0 && oldVersion > oldMax || newMin !== void 0 && newVersion < newMin ? !1 : !(newMax !== void 0 && newVersion > newMax);
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
async function upgradeDb(dbSchema, database, e, request) {
|
|
983
|
+
const { storeSchemas, beforeUpgrade, afterUpgrade, version } = dbSchema, { newVersion = version, oldVersion } = e, { transaction } = request, context = new UpgradeContext({ database, newVersion, oldVersion, dbSchema, transaction });
|
|
984
|
+
try {
|
|
985
|
+
const errors = [];
|
|
986
|
+
if (gsBase.isFunction(beforeUpgrade))
|
|
987
|
+
try {
|
|
988
|
+
await beforeUpgrade(context);
|
|
989
|
+
} catch (e2) {
|
|
990
|
+
errors.push(e2);
|
|
991
|
+
}
|
|
992
|
+
for (const storeName of storeSchemas.map((s) => s.name))
|
|
993
|
+
try {
|
|
994
|
+
context.store(storeName);
|
|
995
|
+
} catch (e2) {
|
|
996
|
+
errors.push(e2);
|
|
997
|
+
}
|
|
998
|
+
for (const { name, defaultData } of storeSchemas)
|
|
999
|
+
if (defaultData)
|
|
1000
|
+
try {
|
|
1001
|
+
await context.store(name).add({ oldMax: 0 }, defaultData);
|
|
1002
|
+
} catch (e2) {
|
|
1003
|
+
errors.push(e2);
|
|
1004
|
+
}
|
|
1005
|
+
for (const { name, versionData } of storeSchemas)
|
|
1006
|
+
if (versionData)
|
|
1007
|
+
for (const { version: version2, data, use = "addOrChange" } of versionData)
|
|
1008
|
+
try {
|
|
1009
|
+
await context.store(name)[use](version2, data);
|
|
1010
|
+
} catch (e2) {
|
|
1011
|
+
errors.push(e2);
|
|
1012
|
+
}
|
|
1013
|
+
for (const { name, storeDefined } of storeSchemas)
|
|
1014
|
+
try {
|
|
1015
|
+
await storeDefined?.(context.store(name));
|
|
1016
|
+
} catch (e2) {
|
|
1017
|
+
errors.push(e2);
|
|
1018
|
+
}
|
|
1019
|
+
if (gsBase.isFunction(afterUpgrade))
|
|
1020
|
+
try {
|
|
1021
|
+
await afterUpgrade(context);
|
|
1022
|
+
} catch (e2) {
|
|
1023
|
+
errors.push(e2);
|
|
1024
|
+
}
|
|
1025
|
+
if (!errors.length)
|
|
1026
|
+
return;
|
|
1027
|
+
throw errors.length === 1 ? errors[0] : new AggregateError(errors, "Database upgrade error");
|
|
1028
|
+
} finally {
|
|
1029
|
+
gsBase.destroy(context);
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
function validateDataOperationSchema(schema, dbSchema) {
|
|
1033
|
+
let { store, index } = schema;
|
|
1034
|
+
const { storeTemplate } = dbSchema, { storeSchemas = [] } = dbSchema, storeName = gsBase.isString(store) ? store : store.name, storeIndex = storeSchemas.findIndex((s) => s === storeName || s.name === storeName), existStore = storeIndex > -1 && storeSchemas[storeIndex];
|
|
1035
|
+
let tmp;
|
|
1036
|
+
gsBase.isString(store) ? tmp = existStore || storeName : !existStore || gsBase.isString(existStore) || store === existStore ? tmp = store : tmp = { ...existStore, ...store }, (index || existStore?.indexSchemas?.length || store?.indexSchemas?.length) && (gsBase.isString(tmp) && (tmp = { name: tmp }), tmp.indexSchemas = mergeIndexes(existStore.indexSchemas || [], store.indexSchemas || [], index));
|
|
1037
|
+
const validSchema = validateStoreSchema(tmp, storeTemplate);
|
|
1038
|
+
storeIndex > -1 ? storeSchemas[storeIndex] = validSchema : storeSchemas.push(validSchema), dbSchema.storeSchemas = storeSchemas;
|
|
1039
|
+
const target = { store: storeName };
|
|
1040
|
+
return index && (target.index = gsBase.isString(index) ? index : index.name), {
|
|
1041
|
+
target
|
|
1042
|
+
};
|
|
1043
|
+
}
|
|
1044
|
+
function mergeIndexes(existIndexes, newIndexes, index) {
|
|
1045
|
+
index && newIndexes.push(index);
|
|
1046
|
+
for (const index2 of newIndexes) {
|
|
1047
|
+
const indexName = gsBase.isString(index2) ? index2 : index2.name, existIndex = existIndexes.findIndex((i) => i === indexName || i.name === indexName);
|
|
1048
|
+
if (existIndex > -1) {
|
|
1049
|
+
const old = existIndexes[existIndex];
|
|
1050
|
+
gsBase.isString(old) ? existIndexes[existIndex] = index2 : gsBase.isString(index2) || (existIndexes[existIndex] = Object.assign(old, index2));
|
|
1051
|
+
} else
|
|
1052
|
+
existIndexes.push(index2);
|
|
1053
|
+
}
|
|
1054
|
+
return existIndexes;
|
|
1055
|
+
}
|
|
1056
|
+
const mapStoreSchema = Object.assign({
|
|
1057
|
+
name: "",
|
|
1058
|
+
addedTimeField: !1,
|
|
1059
|
+
autoIncrement: !1,
|
|
1060
|
+
indexSchemas: [],
|
|
1061
|
+
keyPath: void 0,
|
|
1062
|
+
softDeletedField: !1,
|
|
1063
|
+
updatedCountField: !1,
|
|
1064
|
+
updatedTimeField: !1
|
|
1065
|
+
});
|
|
1066
|
+
class IDbPro {
|
|
1067
|
+
static #_db;
|
|
1068
|
+
#schema;
|
|
1069
|
+
#beforeUseValid;
|
|
1070
|
+
#storeSchemaRecord = {};
|
|
1071
|
+
constructor(schema, skipPreOpenValidation) {
|
|
1072
|
+
this.#schema = schema = gsBase.isString(schema) ? { name: schema } : schema, Array.isArray(schema.storeSchemas) || (schema.storeSchemas = []), gsBase.isObject(schema.storeTemplate) || (schema.storeTemplate = defaultStoreSchemaTemplate), skipPreOpenValidation && (this.#beforeUseValid = !0);
|
|
1073
|
+
}
|
|
1074
|
+
/**
|
|
1075
|
+
* 默认实例
|
|
1076
|
+
*/
|
|
1077
|
+
static get defaultDb() {
|
|
1078
|
+
return this.#_db || (this.#_db = new IDbPro(gsIdbBasic.DefaultDbName));
|
|
1079
|
+
}
|
|
1080
|
+
get initialized() {
|
|
1081
|
+
return Object.isFrozen(this.#schema);
|
|
1082
|
+
}
|
|
1083
|
+
get schema() {
|
|
1084
|
+
return this.#schema;
|
|
1085
|
+
}
|
|
1086
|
+
get storeNames() {
|
|
1087
|
+
return Array.from(new Set(this.#schema.storeSchemas.map((s) => gsBase.isString(s) ? s : s.name)));
|
|
1088
|
+
}
|
|
1089
|
+
get factory() {
|
|
1090
|
+
return this.#schema.factory || (this.#schema.factory = DataOperatorFactory);
|
|
1091
|
+
}
|
|
1092
|
+
/**
|
|
1093
|
+
* 释放默认数据库实例(如果存在)
|
|
1094
|
+
*/
|
|
1095
|
+
static releaseDefaultDB() {
|
|
1096
|
+
this.#_db = void 0;
|
|
1097
|
+
}
|
|
1098
|
+
/**
|
|
1099
|
+
* 从已经存在的`name`数据库中打开
|
|
1100
|
+
* @warning 该方法仅用于不需要修改原有数据库结构的场景
|
|
1101
|
+
* - 如果需要修改,请使用 `generateDbSchema()` 从现有数据库生成配置,且适当修改后重新打开
|
|
1102
|
+
* @param name
|
|
1103
|
+
*/
|
|
1104
|
+
static async openExistDb(name) {
|
|
1105
|
+
const { generateDbSchema: generateDbSchema2 } = await Promise.resolve().then(function() {
|
|
1106
|
+
return generateDbSchema$1;
|
|
1107
|
+
});
|
|
1108
|
+
return new IDbPro(await generateDbSchema2(name));
|
|
1109
|
+
}
|
|
1110
|
+
static store(arg1, index) {
|
|
1111
|
+
return IDbPro.defaultDb.store(arg1, index);
|
|
1112
|
+
}
|
|
1113
|
+
static stores(schemas) {
|
|
1114
|
+
return IDbPro.defaultDb.stores(schemas);
|
|
1115
|
+
}
|
|
1116
|
+
static map(storeName, defaultData) {
|
|
1117
|
+
return IDbPro.defaultDb.map(storeName, defaultData);
|
|
1118
|
+
}
|
|
1119
|
+
async openNativeDb() {
|
|
1120
|
+
const schema = this.#schema = gsBase.deepFreeze(this.initSchema());
|
|
1121
|
+
await this.#isBeforeUseValidate();
|
|
1122
|
+
const { name, version } = schema;
|
|
1123
|
+
return await gsIdbBasic.openDb({
|
|
1124
|
+
name,
|
|
1125
|
+
version,
|
|
1126
|
+
onupgradeneeded: (db, e, request) => upgradeDb(schema, db, e, request)
|
|
1127
|
+
});
|
|
1128
|
+
}
|
|
1129
|
+
store(arg1, index) {
|
|
1130
|
+
const operation = arg1.store ? arg1 : { store: arg1 };
|
|
1131
|
+
return operation.store || (operation.store = arg1), operation.index || (operation.index = index), this.factory.newWriter(validateDataOperationSchema(operation, this.schema), this);
|
|
1132
|
+
}
|
|
1133
|
+
stores(schemas) {
|
|
1134
|
+
const { schema } = this, results = schemas.map((store) => validateDataOperationSchema(gsBase.isString(store) ? { store } : store, schema));
|
|
1135
|
+
return this.factory.newDataOperators(results, this);
|
|
1136
|
+
}
|
|
1137
|
+
initSchema() {
|
|
1138
|
+
if (this.initialized)
|
|
1139
|
+
return this.#schema;
|
|
1140
|
+
const { validateSchemaWithDefaults: validate = validateSchemaWithDefaults } = this.#schema;
|
|
1141
|
+
return this.#schema = validate(this.#schema);
|
|
1142
|
+
}
|
|
1143
|
+
async traceSchema(showFn) {
|
|
1144
|
+
await gsBase.logJson(this.schema, showFn);
|
|
1145
|
+
}
|
|
1146
|
+
map(storeName, defaultData) {
|
|
1147
|
+
const name = gsBase.isString(storeName) ? storeName : gsBase.isString(defaultData) ? defaultData : gsIdbBasic.DefaultStorageStoreName;
|
|
1148
|
+
Array.isArray(storeName) && (defaultData = storeName);
|
|
1149
|
+
const { storeSchemas } = this.schema;
|
|
1150
|
+
return storeSchemas.find((s) => s.name === name || s === name) || storeSchemas.push({ ...mapStoreSchema, name, defaultData }), this.factory.newDbMap({ target: { store: name } }, this);
|
|
1151
|
+
}
|
|
1152
|
+
export() {
|
|
1153
|
+
return this.stores(this.storeNames).export();
|
|
1154
|
+
}
|
|
1155
|
+
import(data, returns, use) {
|
|
1156
|
+
return this.stores(this.storeNames).import(data, returns, use);
|
|
1157
|
+
}
|
|
1158
|
+
getStoreSchema(name) {
|
|
1159
|
+
if (name in this.#storeSchemaRecord)
|
|
1160
|
+
return this.#storeSchemaRecord[name];
|
|
1161
|
+
const index = this.schema.storeSchemas.findIndex((s) => s === name || s.name === name);
|
|
1162
|
+
let storeSchema = this.schema.storeSchemas[index];
|
|
1163
|
+
return this.initialized ? this.#storeSchemaRecord[name] = storeSchema : gsBase.isString(storeSchema) && (this.schema.storeSchemas[index] = storeSchema = { name: storeSchema }), storeSchema;
|
|
1164
|
+
}
|
|
1165
|
+
async #isBeforeUseValidate() {
|
|
1166
|
+
let valid = this.#beforeUseValid;
|
|
1167
|
+
if (valid === void 0 && (valid = this.#beforeUseValid = await validateBeforeOpen(this.#schema)), valid === !0)
|
|
1168
|
+
return !0;
|
|
1169
|
+
if (gsBase.isString(valid))
|
|
1170
|
+
throw new Error(valid);
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
function dbStore(arg1, index) {
|
|
1174
|
+
return IDbPro.defaultDb.store(arg1, index);
|
|
1175
|
+
}
|
|
1176
|
+
function dbMap(storeName, defaultData) {
|
|
1177
|
+
return IDbPro.defaultDb.map(storeName, defaultData);
|
|
1178
|
+
}
|
|
1179
|
+
function dbStores(schemas) {
|
|
1180
|
+
return IDbPro.defaultDb.stores(schemas);
|
|
1181
|
+
}
|
|
1182
|
+
function releaseDefaultDB() {
|
|
1183
|
+
IDbPro.releaseDefaultDB();
|
|
1184
|
+
}
|
|
1185
|
+
function generateStoreSchema(store, fields) {
|
|
1186
|
+
const { name, keyPath, autoIncrement } = store;
|
|
1187
|
+
return {
|
|
1188
|
+
name,
|
|
1189
|
+
keyPath,
|
|
1190
|
+
autoIncrement,
|
|
1191
|
+
...parseIndexes(store, fields)
|
|
1192
|
+
};
|
|
1193
|
+
}
|
|
1194
|
+
function parseIndexes(store, fields) {
|
|
1195
|
+
const indexes = Array.from(store.indexNames).map((n) => store.index(n)), indexSchemas = [], paths = [];
|
|
1196
|
+
for (const { name, keyPath, unique, multiEntry } of indexes)
|
|
1197
|
+
Array.isArray(keyPath) ? paths.push(...keyPath) : paths.push(keyPath), indexSchemas.push({ name, keyPath, unique, multiEntry });
|
|
1198
|
+
const keySet = new Set(paths);
|
|
1199
|
+
return {
|
|
1200
|
+
indexSchemas,
|
|
1201
|
+
addedTimeField: convertSpecialField(fields.addedTimeField, keySet),
|
|
1202
|
+
updatedTimeField: convertSpecialField(fields.updatedTimeField, keySet),
|
|
1203
|
+
updatedCountField: convertSpecialField(fields.updatedCountField, keySet),
|
|
1204
|
+
softDeletedField: convertSpecialField(fields.softDeletedField, keySet)
|
|
1205
|
+
};
|
|
1206
|
+
}
|
|
1207
|
+
function convertSpecialField(field, keySet) {
|
|
1208
|
+
return keySet.has(field) ? { name: field, isIndexed: !1 } : !1;
|
|
1209
|
+
}
|
|
1210
|
+
async function generateDbSchema(db, option) {
|
|
1211
|
+
if (!await gsIdbBasic.findExistDb(db))
|
|
1212
|
+
throw new Error(`db [ ${db} ] not exist`);
|
|
1213
|
+
let { asString, specialFields = defaultSpecialFields, dataExportTarget } = option || {};
|
|
1214
|
+
asString === !0 && (asString = 160), isNaN(asString) || asString < 1 && (asString = 1);
|
|
1215
|
+
let dbSchema = await generateRoot(db, specialFields);
|
|
1216
|
+
return dataExportTarget && await generateData(dbSchema, dataExportTarget), asString ? await gsBase.toJson({
|
|
1217
|
+
rootData$: dbSchema,
|
|
1218
|
+
spaceEffectiveLength: asString
|
|
1219
|
+
}) : dbSchema;
|
|
1220
|
+
}
|
|
1221
|
+
async function generateData(dbSchema, dataExportTarget) {
|
|
1222
|
+
const data = await new IDbPro(gsBase.copyObject(dbSchema), !0).export();
|
|
1223
|
+
for (const schema of dbSchema.storeSchemas) {
|
|
1224
|
+
const rows = data[schema.name];
|
|
1225
|
+
rows?.length && (dataExportTarget === "defaultData" ? schema.defaultData = rows : dataExportTarget === "versionData" && (schema.versionData || (schema.versionData = []), schema.versionData.push({
|
|
1226
|
+
version: { oldMax: dbSchema.version },
|
|
1227
|
+
data: rows
|
|
1228
|
+
})));
|
|
1229
|
+
}
|
|
1230
|
+
return dbSchema;
|
|
1231
|
+
}
|
|
1232
|
+
function generateRoot(db, specialFields) {
|
|
1233
|
+
return gsIdbBasic.openDb(db, async function(existDb) {
|
|
1234
|
+
const names = Array.from(existDb.objectStoreNames), tx = existDb.transaction(names, "readonly");
|
|
1235
|
+
try {
|
|
1236
|
+
return {
|
|
1237
|
+
name: existDb.name,
|
|
1238
|
+
version: existDb.version,
|
|
1239
|
+
storeSchemas: names.map((name) => generateStoreSchema(tx.objectStore(name), specialFields))
|
|
1240
|
+
};
|
|
1241
|
+
} finally {
|
|
1242
|
+
tx.abort();
|
|
1243
|
+
}
|
|
1244
|
+
});
|
|
1245
|
+
}
|
|
1246
|
+
var generateDbSchema$1 = /* @__PURE__ */ Object.freeze({ __proto__: null, generateDbSchema });
|
|
1247
|
+
exports.Break = Break, exports.Continue = Continue, exports.ContinueKey = ContinueKey, exports.ContinuePrimaryKey = ContinuePrimaryKey, exports.DataOperationBase = DataOperationBase, exports.DataOperators = DataOperators, exports.DataReader = DataReader, exports.DataWriter = DataWriter, exports.DbIterator = DbIterator, exports.DbIteratorParsers = DbIteratorParsers, exports.DbMap = DbMap, exports.Delete = Delete, exports.Finished = Finished, exports.IDbPro = IDbPro, exports.NextKey = NextKey, exports.NextPrimaryKey = NextPrimaryKey, exports.Save = Save, exports.StoreUpgradeable = StoreUpgradeable, exports.UpgradeContext = UpgradeContext, exports.dbMap = dbMap, exports.dbStore = dbStore, exports.dbStores = dbStores, exports.defaultSpecialFields = defaultSpecialFields, exports.defaultStoreSchemaTemplate = defaultStoreSchemaTemplate, exports.generateDbSchema = generateDbSchema, exports.isIDbQuery = isIDbQuery, exports.isNativeTarget = isNativeTarget, exports.parseDbNoneKeyPathRecord = parseDbNoneKeyPathRecord, exports.parseIDbQuery = parseIDbQuery, exports.releaseDefaultDB = releaseDefaultDB, exports.validateSchemaWithDefaults = validateSchemaWithDefaults, exports.versionDiffValidate = versionDiffValidate, exports.versionSameValidate = versionSameValidate;
|