@xyo-network/archivist-indexeddb 3.6.1 → 3.6.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,6 +22,7 @@ export declare class IndexedDbArchivist<TParams extends IndexedDbArchivistParams
22
22
  static readonly schemaIndexName: string;
23
23
  static readonly sequenceIndexName: string;
24
24
  private _dbName?;
25
+ private _dbVersion?;
25
26
  private _storeName?;
26
27
  /**
27
28
  * The database name. If not supplied via config, it defaults
@@ -62,6 +63,8 @@ export declare class IndexedDbArchivist<TParams extends IndexedDbArchivistParams
62
63
  protected insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]>;
63
64
  protected nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]>;
64
65
  protected startHandler(): Promise<boolean>;
66
+ private checkIndexes;
67
+ private checkObjectStore;
65
68
  /**
66
69
  * Returns that the desired DB/Store initialized to the correct version
67
70
  * @returns The initialized DB
@@ -1 +1 @@
1
- {"version":3,"file":"Archivist.d.ts","sourceRoot":"","sources":["../../src/Archivist.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,EAKL,wBAAwB,EACxB,oBAAoB,EAIrB,MAAM,8BAA8B,CAAA;AAGrC,OAAO,EACL,OAAO,EAAE,MAAM,EAAqB,eAAe,EACpD,MAAM,4BAA4B,CAAA;AACnC,OAAO,EACgB,YAAY,EAClC,MAAM,KAAK,CAAA;AAGZ,OAAO,EAAE,wBAAwB,EAAE,MAAM,aAAa,CAAA;AAEtD,MAAM,WAAW,YAAY;IAC3B,CAAC,CAAC,EAAE,MAAM,GAAG,eAAe,CAAA;CAC7B;AAED,qBACa,kBAAkB,CAC7B,OAAO,SAAS,wBAAwB,GAAG,wBAAwB,EACnE,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CACtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAAC;IAC9C,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAA2D;IAC3G,gBAAyB,mBAAmB,EAAE,MAAM,CAAiC;IACrF,MAAM,CAAC,QAAQ,CAAC,aAAa,eAAc;IAC3C,MAAM,CAAC,QAAQ,CAAC,gBAAgB,KAAI;IACpC,MAAM,CAAC,QAAQ,CAAC,gBAAgB,cAAa;IAC7C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAEhC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,CAElC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAGD,MAAM,CAAC,QAAQ,CAAC,aAAa,SAAuD;IAEpF,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAA2D;IAE5F,MAAM,CAAC,QAAQ,CAAC,eAAe,SAAyD;IAExF,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAA2D;IAE5F,OAAO,CAAC,OAAO,CAAC,CAAQ;IACxB,OAAO,CAAC,UAAU,CAAC,CAAQ;IAE3B;;;;;;OAMG;IACH,IAAI,MAAM,WAeT;IAED;;OAEG;IACH,IAAI,SAAS,WAEZ;IAED,IAAa,OAAO,aASnB;IAED;;;OAGG;IACH,IAAI,SAAS,WAUZ;IAED;;OAEG;IACH,OAAO,KAAK,OAAO,GAQlB;cAEwB,UAAU,IAAI,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAOjD,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;cAI7B,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;cA+BvD,aAAa,CAC3B,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAC9B,SAAS,EAAE,MAAM,EACf,KAAK,GAAE,KAAK,GAAG,MAAc,EAC7B,KAAK,GAAE,MAAW,EAClB,MAAM,CAAC,EAAE,GAAG,GACb,OAAO,CAAC,eAAe,EAAE,CAAC;IAwC7B;;;;;;;OAOG;cACa,0BAA0B,CACxC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAC9B,SAAS,EAAE,MAAM,EACjB,SAAS,EAAE,MAAM,EACjB,GAAG,EAAE,WAAW,GACf,OAAO,CAAC,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,SAAS,CAAC;cAgBxB,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;cAmCxD,aAAa,CAAC,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cA+BxF,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAShF,YAAY;IAQrC;;;OAGG;YACW,gBAAgB;IAoD9B;;;;OAIG;YACW,KAAK;CAWpB"}
1
+ {"version":3,"file":"Archivist.d.ts","sourceRoot":"","sources":["../../src/Archivist.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,EAKL,wBAAwB,EACxB,oBAAoB,EAIrB,MAAM,8BAA8B,CAAA;AAGrC,OAAO,EACL,OAAO,EAAE,MAAM,EAAqB,eAAe,EACpD,MAAM,4BAA4B,CAAA;AACnC,OAAO,EACgB,YAAY,EAClC,MAAM,KAAK,CAAA;AAQZ,OAAO,EAAE,wBAAwB,EAAE,MAAM,aAAa,CAAA;AAEtD,MAAM,WAAW,YAAY;IAC3B,CAAC,CAAC,EAAE,MAAM,GAAG,eAAe,CAAA;CAC7B;AAED,qBACa,kBAAkB,CAC7B,OAAO,SAAS,wBAAwB,GAAG,wBAAwB,EACnE,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CACtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAAC;IAC9C,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAA2D;IAC3G,gBAAyB,mBAAmB,EAAE,MAAM,CAAiC;IACrF,MAAM,CAAC,QAAQ,CAAC,aAAa,eAAc;IAC3C,MAAM,CAAC,QAAQ,CAAC,gBAAgB,KAAI;IACpC,MAAM,CAAC,QAAQ,CAAC,gBAAgB,cAAa;IAC7C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAEhC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,CAElC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAGD,MAAM,CAAC,QAAQ,CAAC,aAAa,SAAuD;IAEpF,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAA2D;IAE5F,MAAM,CAAC,QAAQ,CAAC,eAAe,SAAyD;IAExF,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAA2D;IAE5F,OAAO,CAAC,OAAO,CAAC,CAAQ;IACxB,OAAO,CAAC,UAAU,CAAC,CAAQ;IAC3B,OAAO,CAAC,UAAU,CAAC,CAAQ;IAE3B;;;;;;OAMG;IACH,IAAI,MAAM,WAeT;IAED;;OAEG;IACH,IAAI,SAAS,WAGZ;IAED,IAAa,OAAO,aASnB;IAED;;;OAGG;IACH,IAAI,SAAS,WAUZ;IAED;;OAEG;IACH,OAAO,KAAK,OAAO,GAQlB;cAEwB,UAAU,IAAI,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAOjD,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;cAI7B,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;cA+BvD,aAAa,CAC3B,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAC9B,SAAS,EAAE,MAAM,EACf,KAAK,GAAE,KAAK,GAAG,MAAc,EAC7B,KAAK,GAAE,MAAW,EAClB,MAAM,CAAC,EAAE,GAAG,GACb,OAAO,CAAC,eAAe,EAAE,CAAC;IA0C7B;;;;;;;OAOG;cACa,0BAA0B,CACxC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAC9B,SAAS,EAAE,MAAM,EACjB,SAAS,EAAE,MAAM,EACjB,GAAG,EAAE,WAAW,GACf,OAAO,CAAC,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,SAAS,CAAC;cAgBxB,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;cAmCxD,aAAa,CAAC,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAyBxF,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAShF,YAAY;YAQvB,YAAY;YAkBZ,gBAAgB;IAgB9B;;;OAGG;YACW,gBAAgB;IAiD9B;;;;OAIG;YACW,KAAK;CAWpB"}
@@ -0,0 +1,10 @@
1
+ import type { Logger } from '@xylabs/logger';
2
+ import type { IndexDescription } from '@xyo-network/archivist-model';
3
+ import type { IDBPDatabase, IDBPObjectStore } from 'idb';
4
+ import type { PayloadStore } from './Archivist.ts';
5
+ export declare function createStore(db: IDBPDatabase<PayloadStore>, storeName: string, indexes: IndexDescription[], logger?: Logger): void;
6
+ export declare function getExistingIndexes(db: IDBPDatabase<PayloadStore>, storeName: string): Promise<IndexDescription[]>;
7
+ export declare function useDb<T>(dbName: string, callback: (db: IDBPDatabase<PayloadStore>) => Promise<T> | T): Promise<T>;
8
+ export declare function useReadOnlyStore<T>(db: IDBPDatabase<PayloadStore>, storeName: string, callback: (store: IDBPObjectStore<PayloadStore, [string], string, 'readonly'>) => Promise<T> | T): Promise<T>;
9
+ export declare function useReadWriteStore<T>(db: IDBPDatabase<PayloadStore>, storeName: string, callback: (store: IDBPObjectStore<PayloadStore, [string], string, 'readwrite'>) => Promise<T> | T): Promise<T>;
10
+ //# sourceMappingURL=IndexedDbHelpers.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"IndexedDbHelpers.d.ts","sourceRoot":"","sources":["../../src/IndexedDbHelpers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAA;AAC5C,OAAO,KAAK,EAAE,gBAAgB,EAAkB,MAAM,8BAA8B,CAAA;AAEpF,OAAO,KAAK,EAAE,YAAY,EAAE,eAAe,EAAE,MAAM,KAAK,CAAA;AAGxD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AAElD,wBAAgB,WAAW,CAAC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,EAAE,MAAM,CAAC,EAAE,MAAM,QAmB1H;AAED,wBAAsB,kBAAkB,CAAC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,gBAAgB,EAAE,CAAC,CAqBvH;AAED,wBAAsB,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,KAAK,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAOvH;AAED,wBAAsB,gBAAgB,CAAC,CAAC,EACtC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAC9B,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,CAAC,KAAK,EAAE,eAAe,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,UAAU,CAAC,KAAK,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,GAC/F,OAAO,CAAC,CAAC,CAAC,CAQZ;AAED,wBAAsB,iBAAiB,CAAC,CAAC,EACvC,EAAE,EAAE,YAAY,CAAC,YAAY,CAAC,EAC9B,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,CAAC,KAAK,EAAE,eAAe,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,WAAW,CAAC,KAAK,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,GAChG,OAAO,CAAC,CAAC,CAAC,CAQZ"}
@@ -6,11 +6,11 @@ import { uniq } from "@xylabs/array";
6
6
  import { assertEx } from "@xylabs/assert";
7
7
  import { exists } from "@xylabs/exists";
8
8
  import { AbstractArchivist } from "@xyo-network/archivist-abstract";
9
- import { ArchivistAllQuerySchema, ArchivistClearQuerySchema, ArchivistDeleteQuerySchema, ArchivistInsertQuerySchema, ArchivistNextQuerySchema, buildStandardIndexName } from "@xyo-network/archivist-model";
9
+ import { ArchivistAllQuerySchema, ArchivistClearQuerySchema, ArchivistDeleteQuerySchema, ArchivistInsertQuerySchema, ArchivistNextQuerySchema, buildStandardIndexName as buildStandardIndexName2 } from "@xyo-network/archivist-model";
10
10
  import { creatableModule } from "@xyo-network/module-model";
11
11
  import { PayloadBuilder } from "@xyo-network/payload-builder";
12
12
  import { SequenceConstants } from "@xyo-network/payload-model";
13
- import { openDB } from "idb";
13
+ import { openDB as openDB2 } from "idb";
14
14
 
15
15
  // src/Schema.ts
16
16
  var IndexedDbArchivistSchema = "network.xyo.archivist.indexeddb";
@@ -18,6 +18,89 @@ var IndexedDbArchivistSchema = "network.xyo.archivist.indexeddb";
18
18
  // src/Config.ts
19
19
  var IndexedDbArchivistConfigSchema = `${IndexedDbArchivistSchema}.config`;
20
20
 
21
+ // src/IndexedDbHelpers.ts
22
+ import { buildStandardIndexName } from "@xyo-network/archivist-model";
23
+ import { openDB } from "idb";
24
+ function createStore(db, storeName, indexes, logger) {
25
+ logger?.log(`Creating store ${storeName}`);
26
+ const store = db.createObjectStore(storeName, {
27
+ // If it isn't explicitly set, create a value by auto incrementing.
28
+ autoIncrement: true
29
+ });
30
+ store.name = storeName;
31
+ for (const { key, multiEntry, unique } of indexes) {
32
+ const indexKeys = Object.keys(key);
33
+ const keys = indexKeys.length === 1 ? indexKeys[0] : indexKeys;
34
+ const indexName = buildStandardIndexName({
35
+ key,
36
+ unique
37
+ });
38
+ console.log("createIndex", indexName, keys, {
39
+ multiEntry,
40
+ unique
41
+ });
42
+ store.createIndex(indexName, keys, {
43
+ multiEntry,
44
+ unique
45
+ });
46
+ }
47
+ }
48
+ __name(createStore, "createStore");
49
+ async function getExistingIndexes(db, storeName) {
50
+ return await useReadOnlyStore(db, storeName, (store) => {
51
+ return [
52
+ ...store.indexNames
53
+ ].map((indexName) => {
54
+ const index = store.index(indexName);
55
+ const key = {};
56
+ if (Array.isArray(index.keyPath)) {
57
+ for (const keyPath of index.keyPath) {
58
+ key[keyPath] = 1;
59
+ }
60
+ } else {
61
+ key[index.keyPath] = 1;
62
+ }
63
+ const desc = {
64
+ name: indexName,
65
+ key,
66
+ unique: index.unique,
67
+ multiEntry: index.multiEntry
68
+ };
69
+ return desc;
70
+ });
71
+ });
72
+ }
73
+ __name(getExistingIndexes, "getExistingIndexes");
74
+ async function useDb(dbName, callback) {
75
+ const db = await openDB(dbName);
76
+ try {
77
+ return await callback(db);
78
+ } finally {
79
+ db.close();
80
+ }
81
+ }
82
+ __name(useDb, "useDb");
83
+ async function useReadOnlyStore(db, storeName, callback) {
84
+ const transaction = db.transaction(storeName, "readonly");
85
+ const store = transaction.objectStore(storeName);
86
+ try {
87
+ return await callback(store);
88
+ } finally {
89
+ await transaction.done;
90
+ }
91
+ }
92
+ __name(useReadOnlyStore, "useReadOnlyStore");
93
+ async function useReadWriteStore(db, storeName, callback) {
94
+ const transaction = db.transaction(storeName, "readwrite");
95
+ const store = transaction.objectStore(storeName);
96
+ try {
97
+ return await callback(store);
98
+ } finally {
99
+ await transaction.done;
100
+ }
101
+ }
102
+ __name(useReadWriteStore, "useReadWriteStore");
103
+
21
104
  // src/Archivist.ts
22
105
  function _ts_decorate(decorators, target, key, desc) {
23
106
  var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
@@ -67,14 +150,15 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
67
150
  unique: true
68
151
  };
69
152
  // eslint-disable-next-line @typescript-eslint/member-ordering
70
- static hashIndexName = buildStandardIndexName(_IndexedDbArchivist.hashIndex);
153
+ static hashIndexName = buildStandardIndexName2(_IndexedDbArchivist.hashIndex);
71
154
  // eslint-disable-next-line @typescript-eslint/member-ordering
72
- static dataHashIndexName = buildStandardIndexName(_IndexedDbArchivist.dataHashIndex);
155
+ static dataHashIndexName = buildStandardIndexName2(_IndexedDbArchivist.dataHashIndex);
73
156
  // eslint-disable-next-line @typescript-eslint/member-ordering
74
- static schemaIndexName = buildStandardIndexName(_IndexedDbArchivist.schemaIndex);
157
+ static schemaIndexName = buildStandardIndexName2(_IndexedDbArchivist.schemaIndex);
75
158
  // eslint-disable-next-line @typescript-eslint/member-ordering
76
- static sequenceIndexName = buildStandardIndexName(_IndexedDbArchivist.sequenceIndex);
159
+ static sequenceIndexName = buildStandardIndexName2(_IndexedDbArchivist.sequenceIndex);
77
160
  _dbName;
161
+ _dbVersion;
78
162
  _storeName;
79
163
  /**
80
164
  * The database name. If not supplied via config, it defaults
@@ -103,7 +187,8 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
103
187
  * The database version. If not supplied via config, it defaults to 1.
104
188
  */
105
189
  get dbVersion() {
106
- return this.config?.dbVersion ?? _IndexedDbArchivist.defaultDbVersion;
190
+ this._dbVersion = this._dbVersion ?? this.config?.dbVersion ?? _IndexedDbArchivist.defaultDbVersion;
191
+ return this._dbVersion;
107
192
  }
108
193
  get queries() {
109
194
  return [
@@ -176,36 +261,36 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
176
261
  });
177
262
  }
178
263
  async getFromCursor(db, storeName, order = "asc", limit = 10, cursor) {
179
- const transaction = db.transaction(storeName, "readonly");
180
- const store = transaction.objectStore(storeName);
181
- const sequenceIndex = assertEx(store.index(_IndexedDbArchivist.sequenceIndexName), () => "Failed to get sequence index");
182
- let sequenceCursor = void 0;
183
- const parsedCursor = cursor === SequenceConstants.minLocalSequence ? null : cursor;
184
- sequenceCursor = assertEx(await sequenceIndex.openCursor(null, order === "desc" ? "prev" : "next"), () => `Failed to get cursor [${parsedCursor}, ${cursor}]`);
185
- if (!sequenceCursor?.value) return [];
186
- try {
187
- sequenceCursor = parsedCursor ? sequenceCursor.value._sequence === parsedCursor ? await sequenceCursor?.advance(1) : await (await sequenceCursor?.continue(parsedCursor))?.advance(1) : sequenceCursor;
188
- } catch {
189
- return [];
190
- }
191
- let remaining = limit;
192
- const result = [];
193
- while (remaining) {
194
- const value = sequenceCursor?.value;
195
- if (value) {
196
- result.push(value);
197
- try {
198
- sequenceCursor = await sequenceCursor?.advance(1);
199
- } catch {
200
- break;
201
- }
202
- if (sequenceCursor === null) {
203
- break;
264
+ return await useReadOnlyStore(db, storeName, async (store) => {
265
+ const sequenceIndex = assertEx(store.index(_IndexedDbArchivist.sequenceIndexName), () => "Failed to get sequence index");
266
+ let sequenceCursor;
267
+ const parsedCursor = cursor === SequenceConstants.minLocalSequence ? null : cursor;
268
+ sequenceCursor = assertEx(await sequenceIndex.openCursor(null, order === "desc" ? "prev" : "next"), () => `Failed to get cursor [${parsedCursor}, ${cursor}]`);
269
+ if (!sequenceCursor?.value) return [];
270
+ try {
271
+ sequenceCursor = parsedCursor ? sequenceCursor.value._sequence === parsedCursor ? await sequenceCursor?.advance(1) : await (await sequenceCursor?.continue(parsedCursor))?.advance(1) : sequenceCursor;
272
+ } catch {
273
+ return [];
274
+ }
275
+ let remaining = limit;
276
+ const result = [];
277
+ while (remaining) {
278
+ const value = sequenceCursor?.value;
279
+ if (value) {
280
+ result.push(value);
281
+ try {
282
+ sequenceCursor = await sequenceCursor?.advance(1);
283
+ } catch {
284
+ break;
285
+ }
286
+ if (sequenceCursor === null) {
287
+ break;
288
+ }
204
289
  }
290
+ remaining--;
205
291
  }
206
- remaining--;
207
- }
208
- return result;
292
+ return result;
293
+ });
209
294
  }
210
295
  /**
211
296
  * Uses an index to get a payload by the index value, but returns the value with the primary key (from the root store)
@@ -216,20 +301,20 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
216
301
  * @returns The primary key and the payload, or undefined if not found
217
302
  */
218
303
  async getFromIndexWithPrimaryKey(db, storeName, indexName, key) {
219
- const transaction = db.transaction(storeName, "readonly");
220
- const store = transaction.objectStore(storeName);
221
- const index = store.index(indexName);
222
- const cursor = await index.openCursor(key);
223
- if (cursor) {
224
- const singleValue = cursor.value;
225
- if (typeof cursor.primaryKey !== "number") {
226
- throw new TypeError("primaryKey must be a number");
304
+ return await useReadOnlyStore(db, storeName, async (store) => {
305
+ const index = store.index(indexName);
306
+ const cursor = await index.openCursor(key);
307
+ if (cursor) {
308
+ const singleValue = cursor.value;
309
+ if (typeof cursor.primaryKey !== "number") {
310
+ throw new TypeError("primaryKey must be a number");
311
+ }
312
+ return [
313
+ cursor.primaryKey,
314
+ singleValue
315
+ ];
227
316
  }
228
- return [
229
- cursor.primaryKey,
230
- singleValue
231
- ];
232
- }
317
+ });
233
318
  }
234
319
  async getHandler(hashes) {
235
320
  const payloads = await this.useDb((db) => Promise.all(
@@ -252,20 +337,16 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
252
337
  }
253
338
  async insertHandler(payloads) {
254
339
  return await this.useDb(async (db) => {
255
- const tx = db.transaction(this.storeName, "readwrite");
256
- const store = tx.objectStore(this.storeName);
257
- const inserted = [];
258
- try {
340
+ return await useReadWriteStore(db, this.storeName, async (store) => {
341
+ const inserted = [];
259
342
  await Promise.all(payloads.map(async (payload) => {
260
343
  if (!await store.index(_IndexedDbArchivist.hashIndexName).get(payload._hash)) {
261
344
  await store.put(payload);
262
345
  inserted.push(payload);
263
346
  }
264
347
  }));
265
- } finally {
266
- await tx.done;
267
- }
268
- return inserted;
348
+ return inserted;
349
+ });
269
350
  });
270
351
  }
271
352
  async nextHandler(options) {
@@ -280,13 +361,47 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
280
361
  });
281
362
  return true;
282
363
  }
364
+ async checkIndexes(db) {
365
+ const { indexes, storeName } = this;
366
+ if (db.objectStoreNames.contains(storeName)) {
367
+ const existingIndexes = await getExistingIndexes(db, storeName);
368
+ const existingIndexNames = new Set(existingIndexes.map(({ name }) => name).filter(exists));
369
+ for (const { key, unique } of indexes) {
370
+ const indexName = buildStandardIndexName2({
371
+ key,
372
+ unique
373
+ });
374
+ if (!existingIndexNames.has(indexName)) {
375
+ this._dbVersion = this._dbVersion === void 0 ? 0 : this._dbVersion + 1;
376
+ break;
377
+ }
378
+ }
379
+ return existingIndexes;
380
+ }
381
+ return [];
382
+ }
383
+ async checkObjectStore() {
384
+ const { dbName, storeName } = this;
385
+ return await useDb(dbName, (db) => {
386
+ if (db.version >= (this._dbVersion ?? 0)) {
387
+ this._dbVersion = db.version;
388
+ }
389
+ if (db.objectStoreNames.contains(storeName)) {
390
+ return this.checkIndexes(db);
391
+ } else {
392
+ this._dbVersion = (this._dbVersion ?? 0) + 1;
393
+ return [];
394
+ }
395
+ });
396
+ }
283
397
  /**
284
398
  * Returns that the desired DB/Store initialized to the correct version
285
399
  * @returns The initialized DB
286
400
  */
287
401
  async getInitializedDb() {
402
+ const existingIndexes = await this.checkObjectStore();
288
403
  const { dbName, dbVersion, indexes, storeName, logger } = this;
289
- return await openDB(dbName, dbVersion, {
404
+ return await openDB2(dbName, dbVersion, {
290
405
  blocked(currentVersion, blockedVersion, event) {
291
406
  logger.warn(`IndexedDbArchivist: Blocked from upgrading from ${currentVersion} to ${blockedVersion}`, event);
292
407
  },
@@ -308,23 +423,17 @@ var IndexedDbArchivist = class _IndexedDbArchivist extends AbstractArchivist {
308
423
  }
309
424
  }
310
425
  }
311
- const store = database.createObjectStore(storeName, {
312
- // If it isn't explicitly set, create a value by auto incrementing.
313
- autoIncrement: true
314
- });
315
- store.name = storeName;
316
- for (const { key, multiEntry, unique } of indexes) {
317
- const indexKeys = Object.keys(key);
318
- const keys = indexKeys.length === 1 ? indexKeys[0] : indexKeys;
319
- const indexName = buildStandardIndexName({
320
- key,
321
- unique
322
- });
323
- store.createIndex(indexName, keys, {
324
- multiEntry,
325
- unique
326
- });
327
- }
426
+ const existingIndexesToKeep = existingIndexes.filter(({ name: existingName }) => !indexes.some(({ name }) => name === existingName));
427
+ console.log("existingIndexes", existingIndexes);
428
+ console.log("existingIndexesToKeep", existingIndexesToKeep);
429
+ console.log("indexes", indexes);
430
+ const indexesToCreate = indexes.map((idx) => ({
431
+ ...idx,
432
+ name: buildStandardIndexName2(idx)
433
+ })).reduce((acc, idx) => acc.set(idx.name, idx), /* @__PURE__ */ new Map()).values();
434
+ createStore(database, storeName, [
435
+ ...indexesToCreate
436
+ ], logger);
328
437
  }
329
438
  });
330
439
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/Archivist.ts","../../src/Schema.ts","../../src/Config.ts"],"sourcesContent":["/* eslint-disable complexity */\nimport { uniq } from '@xylabs/array'\nimport { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport { Hash, Hex } from '@xylabs/hex'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistNextQuerySchema,\n buildStandardIndexName,\n IndexDescription,\n} from '@xyo-network/archivist-model'\nimport { creatableModule } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport {\n Payload, Schema, SequenceConstants, WithStorageMeta,\n} from '@xyo-network/payload-model'\nimport {\n IDBPCursorWithValue, IDBPDatabase, openDB,\n} from 'idb'\n\nimport { IndexedDbArchivistConfigSchema } from './Config.ts'\nimport { IndexedDbArchivistParams } from './Params.ts'\n\nexport interface PayloadStore {\n [s: string]: WithStorageMeta\n}\n\n@creatableModule()\nexport class IndexedDbArchivist<\n TParams extends IndexedDbArchivistParams = IndexedDbArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n> extends AbstractArchivist<TParams, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, IndexedDbArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = IndexedDbArchivistConfigSchema\n static readonly defaultDbName = 'archivist'\n static readonly defaultDbVersion = 1\n static readonly defaultStoreName = 'payloads'\n private static readonly dataHashIndex: IndexDescription = {\n key: { _dataHash: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly hashIndex: IndexDescription = {\n key: { _hash: 1 }, multiEntry: false, unique: true,\n }\n\n private static readonly schemaIndex: IndexDescription = {\n key: { schema: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly sequenceIndex: IndexDescription = {\n key: { _sequence: 1 }, multiEntry: false, unique: true,\n }\n\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly hashIndexName = buildStandardIndexName(IndexedDbArchivist.hashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly dataHashIndexName = buildStandardIndexName(IndexedDbArchivist.dataHashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly schemaIndexName = buildStandardIndexName(IndexedDbArchivist.schemaIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly sequenceIndexName = buildStandardIndexName(IndexedDbArchivist.sequenceIndex)\n\n private _dbName?: string\n private _storeName?: string\n\n /**\n * The database name. If not supplied via config, it defaults\n * to the module name (not guaranteed to be unique) and if module\n * name is not supplied, it defaults to `archivist`. This behavior\n * biases towards a single, isolated DB per archivist which seems to\n * make the most sense for 99% of use cases.\n */\n get dbName() {\n if (!this._dbName) {\n if (this.config?.dbName) {\n this._dbName = this.config?.dbName\n } else {\n if (this.config?.name) {\n this.logger.warn('No dbName provided, using module name: ', this.config?.name)\n this._dbName = this.config?.name\n } else {\n this.logger.warn('No dbName provided, using default name: ', IndexedDbArchivist.defaultDbName)\n this._dbName = IndexedDbArchivist.defaultDbName\n }\n }\n }\n return assertEx(this._dbName)\n }\n\n /**\n * The database version. If not supplied via config, it defaults to 1.\n */\n get dbVersion() {\n return this.config?.dbVersion ?? IndexedDbArchivist.defaultDbVersion\n }\n\n override get queries() {\n return [\n ArchivistNextQuerySchema,\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n ...super.queries,\n ]\n }\n\n /**\n * The name of the object store. If not supplied via config, it defaults\n * to `payloads`.\n */\n get storeName() {\n if (!this._storeName) {\n if (this.config?.storeName) {\n this._storeName = this.config?.storeName\n } else {\n this.logger.warn('No storeName provided, using default name: ', IndexedDbArchivist.defaultStoreName)\n this._storeName = IndexedDbArchivist.defaultStoreName\n }\n }\n return assertEx(this._storeName)\n }\n\n /**\n * The indexes to create on the store\n */\n private get indexes() {\n return [\n IndexedDbArchivist.dataHashIndex,\n IndexedDbArchivist.hashIndex,\n IndexedDbArchivist.schemaIndex,\n IndexedDbArchivist.sequenceIndex,\n ...(this.config?.storage?.indexes ?? []),\n ]\n }\n\n protected override async allHandler(): Promise<WithStorageMeta<Payload>[]> {\n // Get all payloads from the store\n const payloads = await this.useDb(db => db.getAll(this.storeName))\n // Remove any metadata before returning to the client\n return payloads\n }\n\n protected override async clearHandler(): Promise<void> {\n await this.useDb(db => db.clear(this.storeName))\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n // Filter duplicates to prevent unnecessary DB queries\n const uniqueHashes = [...new Set(hashes)]\n const pairs = await PayloadBuilder.hashPairs(await this.getHandler(uniqueHashes))\n const hashesToDelete = (await Promise.all(pairs.map(async (pair) => {\n const dataHash0 = await PayloadBuilder.dataHash(pair[0])\n return [dataHash0, pair[1]]\n }))).flat()\n // Remove any duplicates\n const distinctHashes = [...new Set(hashesToDelete)]\n return await this.useDb(async (db) => {\n // Only return hashes that were successfully deleted\n const found = await Promise.all(\n distinctHashes.map(async (hash) => {\n // Check if the hash exists\n const existing\n = (await db.getKeyFromIndex(this.storeName, IndexedDbArchivist.hashIndexName, hash))\n ?? (await db.getKeyFromIndex(this.storeName, IndexedDbArchivist.dataHashIndexName, hash))\n // If it does exist\n if (existing) {\n // Delete it\n await db.delete(this.storeName, existing)\n // Return the hash so it gets added to the list of deleted hashes\n return hash\n }\n }),\n )\n return found.filter(exists).filter(hash => uniqueHashes.includes(hash))\n })\n }\n\n protected async getFromCursor(\n db: IDBPDatabase<PayloadStore>,\n storeName: string,\n order: 'asc' | 'desc' = 'asc',\n limit: number = 10,\n cursor?: Hex,\n ): Promise<WithStorageMeta[]> {\n // TODO: We have to handle the case where the cursor is not found, and then find the correct cursor to start with (thunked cursor)\n const transaction = db.transaction(storeName, 'readonly')\n const store = transaction.objectStore(storeName)\n const sequenceIndex = assertEx(store.index(IndexedDbArchivist.sequenceIndexName), () => 'Failed to get sequence index')\n let sequenceCursor: IDBPCursorWithValue<PayloadStore, [string]> | null | undefined = undefined\n const parsedCursor = cursor === SequenceConstants.minLocalSequence ? null : cursor\n sequenceCursor = assertEx(await sequenceIndex.openCursor(\n null,\n order === 'desc' ? 'prev' : 'next',\n ), () => `Failed to get cursor [${parsedCursor}, ${cursor}]`)\n if (!sequenceCursor?.value) return []\n try {\n sequenceCursor = parsedCursor\n ? sequenceCursor.value._sequence === parsedCursor ? await sequenceCursor?.advance(1) : await (await sequenceCursor?.continue(parsedCursor))?.advance(1)\n : sequenceCursor // advance to skip the initial value\n } catch {\n return []\n }\n\n let remaining = limit\n const result: WithStorageMeta[] = []\n while (remaining) {\n const value = sequenceCursor?.value\n if (value) {\n result.push(value)\n try {\n sequenceCursor = await sequenceCursor?.advance(1)\n } catch {\n break\n }\n if (sequenceCursor === null) {\n break\n }\n }\n remaining--\n }\n return result\n }\n\n /**\n * Uses an index to get a payload by the index value, but returns the value with the primary key (from the root store)\n * @param db The db instance to use\n * @param storeName The name of the store to use\n * @param indexName The index to use\n * @param key The key to get from the index\n * @returns The primary key and the payload, or undefined if not found\n */\n protected async getFromIndexWithPrimaryKey(\n db: IDBPDatabase<PayloadStore>,\n storeName: string,\n indexName: string,\n key: IDBValidKey,\n ): Promise<[number, WithStorageMeta] | undefined> {\n const transaction = db.transaction(storeName, 'readonly')\n const store = transaction.objectStore(storeName)\n const index = store.index(indexName)\n const cursor = await index.openCursor(key)\n if (cursor) {\n const singleValue = cursor.value\n // NOTE: It's known to be a number because we are using IndexedDB supplied auto-incrementing keys\n if (typeof cursor.primaryKey !== 'number') {\n throw new TypeError('primaryKey must be a number')\n }\n\n return [cursor.primaryKey, singleValue]\n }\n }\n\n protected override async getHandler(hashes: string[]): Promise<WithStorageMeta[]> {\n const payloads = await this.useDb(db =>\n Promise.all(\n // Filter duplicates to prevent unnecessary DB queries\n uniq(hashes).map(async (hash) => {\n // Find by hash\n const payload = await this.getFromIndexWithPrimaryKey(db, this.storeName, IndexedDbArchivist.hashIndexName, hash)\n // If found, return\n if (payload) return payload\n // Otherwise, find by data hash\n return this.getFromIndexWithPrimaryKey(db, this.storeName, IndexedDbArchivist.dataHashIndexName, hash)\n }),\n ))\n\n const found = new Set<string>()\n return (\n payloads\n // Filter out not found\n .filter(exists)\n // Sort by primary key\n .sort((a, b) => a![0] - b![0])\n // Filter out duplicates by hash\n .filter(([_key, payload]) => {\n if (found.has(payload._hash)) {\n return false\n } else {\n found.add(payload._hash)\n return true\n }\n })\n // Return just the payloads\n .map(([_key, payload]) => payload)\n )\n }\n\n protected override async insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]> {\n return await this.useDb(async (db) => {\n // Perform all inserts via a single transaction to ensure atomicity\n // with respect to checking for the pre-existence of the hash.\n // This is done to prevent duplicate root hashes due to race\n // conditions between checking vs insertion.\n const tx = db.transaction(this.storeName, 'readwrite')\n // Get the object store\n const store = tx.objectStore(this.storeName)\n // Return only the payloads that were successfully inserted\n const inserted: WithStorageMeta<Payload>[] = []\n try {\n await Promise.all(\n payloads.map(async (payload) => {\n // only insert if hash does not already exist\n if (!await store.index(IndexedDbArchivist.hashIndexName).get(payload._hash)) {\n // Insert the payload\n await store.put(payload)\n // Add it to the inserted list\n inserted.push(payload)\n }\n }),\n )\n } finally {\n // Ensure the transaction is closed\n await tx.done\n }\n return inserted\n })\n }\n\n protected override async nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]> {\n const {\n limit, cursor, order,\n } = options ?? {}\n return await this.useDb(async (db) => {\n return await this.getFromCursor(db, this.storeName, order, limit ?? 10, cursor)\n })\n }\n\n protected override async startHandler() {\n await super.startHandler()\n // NOTE: We could defer this creation to first access but we\n // want to fail fast here in case something is wrong\n await this.useDb(() => {})\n return true\n }\n\n /**\n * Returns that the desired DB/Store initialized to the correct version\n * @returns The initialized DB\n */\n private async getInitializedDb(): Promise<IDBPDatabase<PayloadStore>> {\n const {\n dbName, dbVersion, indexes, storeName, logger,\n } = this\n return await openDB<PayloadStore>(dbName, dbVersion, {\n blocked(currentVersion, blockedVersion, event) {\n logger.warn(`IndexedDbArchivist: Blocked from upgrading from ${currentVersion} to ${blockedVersion}`, event)\n },\n blocking(currentVersion, blockedVersion, event) {\n logger.warn(`IndexedDbArchivist: Blocking upgrade from ${currentVersion} to ${blockedVersion}`, event)\n },\n terminated() {\n logger.log('IndexedDbArchivist: Terminated')\n },\n upgrade(database, oldVersion, newVersion, transaction) {\n // NOTE: This is called whenever the DB is created/updated. We could simply ensure the desired end\n // state but, out of an abundance of caution, we will just delete (so we know where we are starting\n // from a known good point) and recreate the desired state. This prioritizes resilience over data\n // retention but we can revisit that tradeoff when it becomes limiting. Because distributed browser\n // state is extremely hard to debug, this seems like fair tradeoff for now.\n if (oldVersion !== newVersion) {\n logger.log(`IndexedDbArchivist: Upgrading from ${oldVersion} to ${newVersion}`)\n // Delete any existing databases that are not the current version\n const objectStores = transaction.objectStoreNames\n for (const name of objectStores) {\n try {\n database.deleteObjectStore(name)\n } catch {\n logger.log(`IndexedDbArchivist: Failed to delete existing object store ${name}`)\n }\n }\n }\n // Create the store\n const store = database.createObjectStore(storeName, {\n // If it isn't explicitly set, create a value by auto incrementing.\n autoIncrement: true,\n })\n // Name the store\n store.name = storeName\n // Create an index on the hash\n for (const {\n key, multiEntry, unique,\n } of indexes) {\n const indexKeys = Object.keys(key)\n const keys = indexKeys.length === 1 ? indexKeys[0] : indexKeys\n const indexName = buildStandardIndexName({ key, unique })\n store.createIndex(indexName, keys, { multiEntry, unique })\n }\n },\n })\n }\n\n /**\n * Executes a callback with the initialized DB and then closes the db\n * @param callback The method to execute with the initialized DB\n * @returns\n */\n private async useDb<T>(callback: (db: IDBPDatabase<PayloadStore>) => Promise<T> | T): Promise<T> {\n // Get the initialized DB\n const db = await this.getInitializedDb()\n try {\n // Perform the callback\n return await callback(db)\n } finally {\n // Close the DB\n db.close()\n }\n }\n}\n","export type IndexedDbArchivistSchema = 'network.xyo.archivist.indexeddb'\nexport const IndexedDbArchivistSchema: IndexedDbArchivistSchema = 'network.xyo.archivist.indexeddb'\n","import type { ArchivistConfig } from '@xyo-network/archivist-model'\n\nimport { IndexedDbArchivistSchema } from './Schema.ts'\n\nexport type IndexedDbArchivistConfigSchema = `${IndexedDbArchivistSchema}.config`\nexport const IndexedDbArchivistConfigSchema: IndexedDbArchivistConfigSchema = `${IndexedDbArchivistSchema}.config`\n\nexport type IndexedDbArchivistConfig = ArchivistConfig<{\n /**\n * The database name\n */\n dbName?: string\n /**\n * The version of the DB, defaults to 1\n */\n dbVersion?: number\n schema: IndexedDbArchivistConfigSchema\n /**\n * The name of the object store\n */\n storeName?: string\n}>\n"],"mappings":";;;;AACA,SAASA,YAAY;AACrB,SAASC,gBAAgB;AACzB,SAASC,cAAc;AAEvB,SAASC,yBAAyB;AAClC,SACEC,yBACAC,2BACAC,4BACAC,4BAGAC,0BACAC,8BAEK;AACP,SAASC,uBAAuB;AAChC,SAASC,sBAAsB;AAC/B,SACmBC,yBACZ;AACP,SACqCC,cAC9B;;;ACvBA,IAAMC,2BAAqD;;;ACI3D,IAAMC,iCAAiE,GAAGC,wBAAAA;;;AFLpD,SAAA,aAAA,YAAA,QAAA,KAAA,MAAA;;;;;;AAAA;AAkCtB,IAAMC,qBAAN,MAAMA,4BAGHC,kBAAAA;EArCV,OAqCUA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EACvD,OAAgBE,gBAAgB;EAChC,OAAgBC,mBAAmB;EACnC,OAAgBC,mBAAmB;EACnC,OAAwBC,gBAAkC;IACxDC,KAAK;MAAEC,WAAW;IAAE;IAAGC,YAAY;IAAOC,QAAQ;EACpD;EAEA,OAAwBC,YAA8B;IACpDJ,KAAK;MAAEK,OAAO;IAAE;IAAGH,YAAY;IAAOC,QAAQ;EAChD;EAEA,OAAwBG,cAAgC;IACtDN,KAAK;MAAEO,QAAQ;IAAE;IAAGL,YAAY;IAAOC,QAAQ;EACjD;EAEA,OAAwBK,gBAAkC;IACxDR,KAAK;MAAES,WAAW;IAAE;IAAGP,YAAY;IAAOC,QAAQ;EACpD;;EAGA,OAAgBO,gBAAgBC,uBAAuBpB,oBAAmBa,SAAS;;EAEnF,OAAgBQ,oBAAoBD,uBAAuBpB,oBAAmBQ,aAAa;;EAE3F,OAAgBc,kBAAkBF,uBAAuBpB,oBAAmBe,WAAW;;EAEvF,OAAgBQ,oBAAoBH,uBAAuBpB,oBAAmBiB,aAAa;EAEnFO;EACAC;;;;;;;;EASR,IAAIC,SAAS;AACX,QAAI,CAAC,KAAKF,SAAS;AACjB,UAAI,KAAKG,QAAQD,QAAQ;AACvB,aAAKF,UAAU,KAAKG,QAAQD;MAC9B,OAAO;AACL,YAAI,KAAKC,QAAQC,MAAM;AACrB,eAAKC,OAAOC,KAAK,2CAA2C,KAAKH,QAAQC,IAAAA;AACzE,eAAKJ,UAAU,KAAKG,QAAQC;QAC9B,OAAO;AACL,eAAKC,OAAOC,KAAK,4CAA4C9B,oBAAmBK,aAAa;AAC7F,eAAKmB,UAAUxB,oBAAmBK;QACpC;MACF;IACF;AACA,WAAO0B,SAAS,KAAKP,OAAO;EAC9B;;;;EAKA,IAAIQ,YAAY;AACd,WAAO,KAAKL,QAAQK,aAAahC,oBAAmBM;EACtD;EAEA,IAAa2B,UAAU;AACrB,WAAO;MACLC;MACAC;MACAC;MACAC;MACAC;SACG,MAAML;;EAEb;;;;;EAMA,IAAIM,YAAY;AACd,QAAI,CAAC,KAAKd,YAAY;AACpB,UAAI,KAAKE,QAAQY,WAAW;AAC1B,aAAKd,aAAa,KAAKE,QAAQY;MACjC,OAAO;AACL,aAAKV,OAAOC,KAAK,+CAA+C9B,oBAAmBO,gBAAgB;AACnG,aAAKkB,aAAazB,oBAAmBO;MACvC;IACF;AACA,WAAOwB,SAAS,KAAKN,UAAU;EACjC;;;;EAKA,IAAYe,UAAU;AACpB,WAAO;MACLxC,oBAAmBQ;MACnBR,oBAAmBa;MACnBb,oBAAmBe;MACnBf,oBAAmBiB;SACf,KAAKU,QAAQc,SAASD,WAAW,CAAA;;EAEzC;EAEA,MAAyBE,aAAkD;AAEzE,UAAMC,WAAW,MAAM,KAAKC,MAAMC,CAAAA,OAAMA,GAAGC,OAAO,KAAKP,SAAS,CAAA;AAEhE,WAAOI;EACT;EAEA,MAAyBI,eAA8B;AACrD,UAAM,KAAKH,MAAMC,CAAAA,OAAMA,GAAGG,MAAM,KAAKT,SAAS,CAAA;EAChD;EAEA,MAAyBU,cAAcC,QAAiC;AAEtE,UAAMC,eAAe;SAAI,IAAIC,IAAIF,MAAAA;;AACjC,UAAMG,QAAQ,MAAMC,eAAeC,UAAU,MAAM,KAAKC,WAAWL,YAAAA,CAAAA;AACnE,UAAMM,kBAAkB,MAAMC,QAAQC,IAAIN,MAAMO,IAAI,OAAOC,SAAAA;AACzD,YAAMC,YAAY,MAAMR,eAAeS,SAASF,KAAK,CAAA,CAAE;AACvD,aAAO;QAACC;QAAWD,KAAK,CAAA;;IAC1B,CAAA,CAAA,GAAKG,KAAI;AAET,UAAMC,iBAAiB;SAAI,IAAIb,IAAIK,cAAAA;;AACnC,WAAO,MAAM,KAAKb,MAAM,OAAOC,OAAAA;AAE7B,YAAMqB,QAAQ,MAAMR,QAAQC,IAC1BM,eAAeL,IAAI,OAAOO,SAAAA;AAExB,cAAMC,WACD,MAAMvB,GAAGwB,gBAAgB,KAAK9B,WAAWvC,oBAAmBmB,eAAegD,IAAAA,KAC1E,MAAMtB,GAAGwB,gBAAgB,KAAK9B,WAAWvC,oBAAmBqB,mBAAmB8C,IAAAA;AAErF,YAAIC,UAAU;AAEZ,gBAAMvB,GAAGyB,OAAO,KAAK/B,WAAW6B,QAAAA;AAEhC,iBAAOD;QACT;MACF,CAAA,CAAA;AAEF,aAAOD,MAAMK,OAAOC,MAAAA,EAAQD,OAAOJ,CAAAA,SAAQhB,aAAasB,SAASN,IAAAA,CAAAA;IACnE,CAAA;EACF;EAEA,MAAgBO,cACd7B,IACAN,WACEoC,QAAwB,OACxBC,QAAgB,IAChBC,QAC0B;AAE5B,UAAMC,cAAcjC,GAAGiC,YAAYvC,WAAW,UAAA;AAC9C,UAAMwC,QAAQD,YAAYE,YAAYzC,SAAAA;AACtC,UAAMtB,gBAAgBc,SAASgD,MAAME,MAAMjF,oBAAmBuB,iBAAiB,GAAG,MAAM,8BAAA;AACxF,QAAI2D,iBAAiFC;AACrF,UAAMC,eAAeP,WAAWQ,kBAAkBC,mBAAmB,OAAOT;AAC5EK,qBAAiBnD,SAAS,MAAMd,cAAcsE,WAC5C,MACAZ,UAAU,SAAS,SAAS,MAAA,GAC3B,MAAM,yBAAyBS,YAAAA,KAAiBP,MAAAA,GAAS;AAC5D,QAAI,CAACK,gBAAgBM,MAAO,QAAO,CAAA;AACnC,QAAI;AACFN,uBAAiBE,eACbF,eAAeM,MAAMtE,cAAckE,eAAe,MAAMF,gBAAgBO,QAAQ,CAAA,IAAK,OAAO,MAAMP,gBAAgBQ,SAASN,YAAAA,IAAgBK,QAAQ,CAAA,IACnJP;IACN,QAAQ;AACN,aAAO,CAAA;IACT;AAEA,QAAIS,YAAYf;AAChB,UAAMgB,SAA4B,CAAA;AAClC,WAAOD,WAAW;AAChB,YAAMH,QAAQN,gBAAgBM;AAC9B,UAAIA,OAAO;AACTI,eAAOC,KAAKL,KAAAA;AACZ,YAAI;AACFN,2BAAiB,MAAMA,gBAAgBO,QAAQ,CAAA;QACjD,QAAQ;AACN;QACF;AACA,YAAIP,mBAAmB,MAAM;AAC3B;QACF;MACF;AACAS;IACF;AACA,WAAOC;EACT;;;;;;;;;EAUA,MAAgBE,2BACdjD,IACAN,WACAwD,WACAtF,KACgD;AAChD,UAAMqE,cAAcjC,GAAGiC,YAAYvC,WAAW,UAAA;AAC9C,UAAMwC,QAAQD,YAAYE,YAAYzC,SAAAA;AACtC,UAAM0C,QAAQF,MAAME,MAAMc,SAAAA;AAC1B,UAAMlB,SAAS,MAAMI,MAAMM,WAAW9E,GAAAA;AACtC,QAAIoE,QAAQ;AACV,YAAMmB,cAAcnB,OAAOW;AAE3B,UAAI,OAAOX,OAAOoB,eAAe,UAAU;AACzC,cAAM,IAAIC,UAAU,6BAAA;MACtB;AAEA,aAAO;QAACrB,OAAOoB;QAAYD;;IAC7B;EACF;EAEA,MAAyBxC,WAAWN,QAA8C;AAChF,UAAMP,WAAW,MAAM,KAAKC,MAAMC,CAAAA,OAChCa,QAAQC;;MAENwC,KAAKjD,MAAAA,EAAQU,IAAI,OAAOO,SAAAA;AAEtB,cAAMiC,UAAU,MAAM,KAAKN,2BAA2BjD,IAAI,KAAKN,WAAWvC,oBAAmBmB,eAAegD,IAAAA;AAE5G,YAAIiC,QAAS,QAAOA;AAEpB,eAAO,KAAKN,2BAA2BjD,IAAI,KAAKN,WAAWvC,oBAAmBqB,mBAAmB8C,IAAAA;MACnG,CAAA;IAAA,CAAA;AAGJ,UAAMD,QAAQ,oBAAId,IAAAA;AAClB,WACET,SAEG4B,OAAOC,MAAAA,EAEP6B,KAAK,CAACC,GAAGC,MAAMD,EAAG,CAAA,IAAKC,EAAG,CAAA,CAAE,EAE5BhC,OAAO,CAAC,CAACiC,MAAMJ,OAAAA,MAAQ;AACtB,UAAIlC,MAAMuC,IAAIL,QAAQtF,KAAK,GAAG;AAC5B,eAAO;MACT,OAAO;AACLoD,cAAMwC,IAAIN,QAAQtF,KAAK;AACvB,eAAO;MACT;IACF,CAAA,EAEC8C,IAAI,CAAC,CAAC4C,MAAMJ,OAAAA,MAAaA,OAAAA;EAEhC;EAEA,MAAyBO,cAAchE,UAA2E;AAChH,WAAO,MAAM,KAAKC,MAAM,OAAOC,OAAAA;AAK7B,YAAM+D,KAAK/D,GAAGiC,YAAY,KAAKvC,WAAW,WAAA;AAE1C,YAAMwC,QAAQ6B,GAAG5B,YAAY,KAAKzC,SAAS;AAE3C,YAAMsE,WAAuC,CAAA;AAC7C,UAAI;AACF,cAAMnD,QAAQC,IACZhB,SAASiB,IAAI,OAAOwC,YAAAA;AAElB,cAAI,CAAC,MAAMrB,MAAME,MAAMjF,oBAAmBmB,aAAa,EAAE2F,IAAIV,QAAQtF,KAAK,GAAG;AAE3E,kBAAMiE,MAAMgC,IAAIX,OAAAA;AAEhBS,qBAAShB,KAAKO,OAAAA;UAChB;QACF,CAAA,CAAA;MAEJ,UAAA;AAEE,cAAMQ,GAAGI;MACX;AACA,aAAOH;IACT,CAAA;EACF;EAEA,MAAyBI,YAAYC,SAAqE;AACxG,UAAM,EACJtC,OAAOC,QAAQF,MAAK,IAClBuC,WAAW,CAAC;AAChB,WAAO,MAAM,KAAKtE,MAAM,OAAOC,OAAAA;AAC7B,aAAO,MAAM,KAAK6B,cAAc7B,IAAI,KAAKN,WAAWoC,OAAOC,SAAS,IAAIC,MAAAA;IAC1E,CAAA;EACF;EAEA,MAAyBsC,eAAe;AACtC,UAAM,MAAMA,aAAAA;AAGZ,UAAM,KAAKvE,MAAM,MAAA;IAAO,CAAA;AACxB,WAAO;EACT;;;;;EAMA,MAAcwE,mBAAwD;AACpE,UAAM,EACJ1F,QAAQM,WAAWQ,SAASD,WAAWV,OAAM,IAC3C;AACJ,WAAO,MAAMwF,OAAqB3F,QAAQM,WAAW;MACnDsF,QAAQC,gBAAgBC,gBAAgBC,OAAK;AAC3C5F,eAAOC,KAAK,mDAAmDyF,cAAAA,OAAqBC,cAAAA,IAAkBC,KAAAA;MACxG;MACAC,SAASH,gBAAgBC,gBAAgBC,OAAK;AAC5C5F,eAAOC,KAAK,6CAA6CyF,cAAAA,OAAqBC,cAAAA,IAAkBC,KAAAA;MAClG;MACAE,aAAAA;AACE9F,eAAO+F,IAAI,gCAAA;MACb;MACAC,QAAQC,UAAUC,YAAYC,YAAYlD,aAAW;AAMnD,YAAIiD,eAAeC,YAAY;AAC7BnG,iBAAO+F,IAAI,sCAAsCG,UAAAA,OAAiBC,UAAAA,EAAY;AAE9E,gBAAMC,eAAenD,YAAYoD;AACjC,qBAAWtG,QAAQqG,cAAc;AAC/B,gBAAI;AACFH,uBAASK,kBAAkBvG,IAAAA;YAC7B,QAAQ;AACNC,qBAAO+F,IAAI,8DAA8DhG,IAAAA,EAAM;YACjF;UACF;QACF;AAEA,cAAMmD,QAAQ+C,SAASM,kBAAkB7F,WAAW;;UAElD8F,eAAe;QACjB,CAAA;AAEAtD,cAAMnD,OAAOW;AAEb,mBAAW,EACT9B,KAAKE,YAAYC,OAAM,KACpB4B,SAAS;AACZ,gBAAM8F,YAAYC,OAAOC,KAAK/H,GAAAA;AAC9B,gBAAM+H,OAAOF,UAAUG,WAAW,IAAIH,UAAU,CAAA,IAAKA;AACrD,gBAAMvC,YAAY3E,uBAAuB;YAAEX;YAAKG;UAAO,CAAA;AACvDmE,gBAAM2D,YAAY3C,WAAWyC,MAAM;YAAE7H;YAAYC;UAAO,CAAA;QAC1D;MACF;IACF,CAAA;EACF;;;;;;EAOA,MAAcgC,MAAS+F,UAA0E;AAE/F,UAAM9F,KAAK,MAAM,KAAKuE,iBAAgB;AACtC,QAAI;AAEF,aAAO,MAAMuB,SAAS9F,EAAAA;IACxB,UAAA;AAEEA,SAAG+F,MAAK;IACV;EACF;AACF;;;;","names":["uniq","assertEx","exists","AbstractArchivist","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","ArchivistNextQuerySchema","buildStandardIndexName","creatableModule","PayloadBuilder","SequenceConstants","openDB","IndexedDbArchivistSchema","IndexedDbArchivistConfigSchema","IndexedDbArchivistSchema","IndexedDbArchivist","AbstractArchivist","configSchemas","IndexedDbArchivistConfigSchema","defaultConfigSchema","defaultDbName","defaultDbVersion","defaultStoreName","dataHashIndex","key","_dataHash","multiEntry","unique","hashIndex","_hash","schemaIndex","schema","sequenceIndex","_sequence","hashIndexName","buildStandardIndexName","dataHashIndexName","schemaIndexName","sequenceIndexName","_dbName","_storeName","dbName","config","name","logger","warn","assertEx","dbVersion","queries","ArchivistNextQuerySchema","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","storeName","indexes","storage","allHandler","payloads","useDb","db","getAll","clearHandler","clear","deleteHandler","hashes","uniqueHashes","Set","pairs","PayloadBuilder","hashPairs","getHandler","hashesToDelete","Promise","all","map","pair","dataHash0","dataHash","flat","distinctHashes","found","hash","existing","getKeyFromIndex","delete","filter","exists","includes","getFromCursor","order","limit","cursor","transaction","store","objectStore","index","sequenceCursor","undefined","parsedCursor","SequenceConstants","minLocalSequence","openCursor","value","advance","continue","remaining","result","push","getFromIndexWithPrimaryKey","indexName","singleValue","primaryKey","TypeError","uniq","payload","sort","a","b","_key","has","add","insertHandler","tx","inserted","get","put","done","nextHandler","options","startHandler","getInitializedDb","openDB","blocked","currentVersion","blockedVersion","event","blocking","terminated","log","upgrade","database","oldVersion","newVersion","objectStores","objectStoreNames","deleteObjectStore","createObjectStore","autoIncrement","indexKeys","Object","keys","length","createIndex","callback","close"]}
1
+ {"version":3,"sources":["../../src/Archivist.ts","../../src/Schema.ts","../../src/Config.ts","../../src/IndexedDbHelpers.ts"],"sourcesContent":["import { uniq } from '@xylabs/array'\nimport { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport { Hash, Hex } from '@xylabs/hex'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistNextQuerySchema,\n buildStandardIndexName,\n IndexDescription,\n} from '@xyo-network/archivist-model'\nimport { creatableModule } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport {\n Payload, Schema, SequenceConstants, WithStorageMeta,\n} from '@xyo-network/payload-model'\nimport {\n IDBPCursorWithValue, IDBPDatabase, openDB,\n} from 'idb'\n\nimport { IndexedDbArchivistConfigSchema } from './Config.ts'\nimport {\n createStore,\n getExistingIndexes,\n useDb, useReadOnlyStore, useReadWriteStore,\n} from './IndexedDbHelpers.ts'\nimport { IndexedDbArchivistParams } from './Params.ts'\n\nexport interface PayloadStore {\n [s: string]: WithStorageMeta\n}\n\n@creatableModule()\nexport class IndexedDbArchivist<\n TParams extends IndexedDbArchivistParams = IndexedDbArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n> extends AbstractArchivist<TParams, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, IndexedDbArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = IndexedDbArchivistConfigSchema\n static readonly defaultDbName = 'archivist'\n static readonly defaultDbVersion = 1\n static readonly defaultStoreName = 'payloads'\n private static readonly dataHashIndex: IndexDescription = {\n key: { _dataHash: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly hashIndex: IndexDescription = {\n key: { _hash: 1 }, multiEntry: false, unique: true,\n }\n\n private static readonly schemaIndex: IndexDescription = {\n key: { schema: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly sequenceIndex: IndexDescription = {\n key: { _sequence: 1 }, multiEntry: false, unique: true,\n }\n\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly hashIndexName = buildStandardIndexName(IndexedDbArchivist.hashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly dataHashIndexName = buildStandardIndexName(IndexedDbArchivist.dataHashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly schemaIndexName = buildStandardIndexName(IndexedDbArchivist.schemaIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly sequenceIndexName = buildStandardIndexName(IndexedDbArchivist.sequenceIndex)\n\n private _dbName?: string\n private _dbVersion?: number\n private _storeName?: string\n\n /**\n * The database name. If not supplied via config, it defaults\n * to the module name (not guaranteed to be unique) and if module\n * name is not supplied, it defaults to `archivist`. This behavior\n * biases towards a single, isolated DB per archivist which seems to\n * make the most sense for 99% of use cases.\n */\n get dbName() {\n if (!this._dbName) {\n if (this.config?.dbName) {\n this._dbName = this.config?.dbName\n } else {\n if (this.config?.name) {\n this.logger.warn('No dbName provided, using module name: ', this.config?.name)\n this._dbName = this.config?.name\n } else {\n this.logger.warn('No dbName provided, using default name: ', IndexedDbArchivist.defaultDbName)\n this._dbName = IndexedDbArchivist.defaultDbName\n }\n }\n }\n return assertEx(this._dbName)\n }\n\n /**\n * The database version. If not supplied via config, it defaults to 1.\n */\n get dbVersion() {\n this._dbVersion = this._dbVersion ?? this.config?.dbVersion ?? IndexedDbArchivist.defaultDbVersion\n return this._dbVersion\n }\n\n override get queries() {\n return [\n ArchivistNextQuerySchema,\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n ...super.queries,\n ]\n }\n\n /**\n * The name of the object store. If not supplied via config, it defaults\n * to `payloads`.\n */\n get storeName() {\n if (!this._storeName) {\n if (this.config?.storeName) {\n this._storeName = this.config?.storeName\n } else {\n this.logger.warn('No storeName provided, using default name: ', IndexedDbArchivist.defaultStoreName)\n this._storeName = IndexedDbArchivist.defaultStoreName\n }\n }\n return assertEx(this._storeName)\n }\n\n /**\n * The indexes to create on the store\n */\n private get indexes() {\n return [\n IndexedDbArchivist.dataHashIndex,\n IndexedDbArchivist.hashIndex,\n IndexedDbArchivist.schemaIndex,\n IndexedDbArchivist.sequenceIndex,\n ...(this.config?.storage?.indexes ?? []),\n ]\n }\n\n protected override async allHandler(): Promise<WithStorageMeta<Payload>[]> {\n // Get all payloads from the store\n const payloads = await this.useDb(db => db.getAll(this.storeName))\n // Remove any metadata before returning to the client\n return payloads\n }\n\n protected override async clearHandler(): Promise<void> {\n await this.useDb(db => db.clear(this.storeName))\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n // Filter duplicates to prevent unnecessary DB queries\n const uniqueHashes = [...new Set(hashes)]\n const pairs = await PayloadBuilder.hashPairs(await this.getHandler(uniqueHashes))\n const hashesToDelete = (await Promise.all(pairs.map(async (pair) => {\n const dataHash0 = await PayloadBuilder.dataHash(pair[0])\n return [dataHash0, pair[1]]\n }))).flat()\n // Remove any duplicates\n const distinctHashes = [...new Set(hashesToDelete)]\n return await this.useDb(async (db) => {\n // Only return hashes that were successfully deleted\n const found = await Promise.all(\n distinctHashes.map(async (hash) => {\n // Check if the hash exists\n const existing\n = (await db.getKeyFromIndex(this.storeName, IndexedDbArchivist.hashIndexName, hash))\n ?? (await db.getKeyFromIndex(this.storeName, IndexedDbArchivist.dataHashIndexName, hash))\n // If it does exist\n if (existing) {\n // Delete it\n await db.delete(this.storeName, existing)\n // Return the hash so it gets added to the list of deleted hashes\n return hash\n }\n }),\n )\n return found.filter(exists).filter(hash => uniqueHashes.includes(hash))\n })\n }\n\n protected async getFromCursor(\n db: IDBPDatabase<PayloadStore>,\n storeName: string,\n order: 'asc' | 'desc' = 'asc',\n limit: number = 10,\n cursor?: Hex,\n ): Promise<WithStorageMeta[]> {\n // TODO: We have to handle the case where the cursor is not found, and then find the correct cursor to start with (thunked cursor)\n return await useReadOnlyStore(db, storeName, async (store) => {\n const sequenceIndex = assertEx(store.index(IndexedDbArchivist.sequenceIndexName), () => 'Failed to get sequence index')\n let sequenceCursor: IDBPCursorWithValue<PayloadStore, [string]> | null | undefined\n const parsedCursor = cursor === SequenceConstants.minLocalSequence ? null : cursor\n sequenceCursor = assertEx(await sequenceIndex.openCursor(\n null,\n order === 'desc' ? 'prev' : 'next',\n ), () => `Failed to get cursor [${parsedCursor}, ${cursor}]`)\n if (!sequenceCursor?.value) return []\n try {\n sequenceCursor = parsedCursor\n ? sequenceCursor.value._sequence === parsedCursor\n ? await sequenceCursor?.advance(1)\n : await (await sequenceCursor?.continue(parsedCursor))?.advance(1)\n : sequenceCursor // advance to skip the initial value\n } catch {\n return []\n }\n\n let remaining = limit\n const result: WithStorageMeta[] = []\n while (remaining) {\n const value = sequenceCursor?.value\n if (value) {\n result.push(value)\n try {\n sequenceCursor = await sequenceCursor?.advance(1)\n } catch {\n break\n }\n if (sequenceCursor === null) {\n break\n }\n }\n remaining--\n }\n return result\n })\n }\n\n /**\n * Uses an index to get a payload by the index value, but returns the value with the primary key (from the root store)\n * @param db The db instance to use\n * @param storeName The name of the store to use\n * @param indexName The index to use\n * @param key The key to get from the index\n * @returns The primary key and the payload, or undefined if not found\n */\n protected async getFromIndexWithPrimaryKey(\n db: IDBPDatabase<PayloadStore>,\n storeName: string,\n indexName: string,\n key: IDBValidKey,\n ): Promise<[number, WithStorageMeta] | undefined> {\n return await useReadOnlyStore(db, storeName, async (store) => {\n const index = store.index(indexName)\n const cursor = await index.openCursor(key)\n if (cursor) {\n const singleValue = cursor.value\n // NOTE: It's known to be a number because we are using IndexedDB supplied auto-incrementing keys\n if (typeof cursor.primaryKey !== 'number') {\n throw new TypeError('primaryKey must be a number')\n }\n\n return [cursor.primaryKey, singleValue]\n }\n })\n }\n\n protected override async getHandler(hashes: string[]): Promise<WithStorageMeta[]> {\n const payloads = await this.useDb(db =>\n Promise.all(\n // Filter duplicates to prevent unnecessary DB queries\n uniq(hashes).map(async (hash) => {\n // Find by hash\n const payload = await this.getFromIndexWithPrimaryKey(db, this.storeName, IndexedDbArchivist.hashIndexName, hash)\n // If found, return\n if (payload) return payload\n // Otherwise, find by data hash\n return this.getFromIndexWithPrimaryKey(db, this.storeName, IndexedDbArchivist.dataHashIndexName, hash)\n }),\n ))\n\n const found = new Set<string>()\n return (\n payloads\n // Filter out not found\n .filter(exists)\n // Sort by primary key\n .sort((a, b) => a![0] - b![0])\n // Filter out duplicates by hash\n .filter(([_key, payload]) => {\n if (found.has(payload._hash)) {\n return false\n } else {\n found.add(payload._hash)\n return true\n }\n })\n // Return just the payloads\n .map(([_key, payload]) => payload)\n )\n }\n\n protected override async insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]> {\n return await this.useDb(async (db) => {\n // Perform all inserts via a single transaction to ensure atomicity\n // with respect to checking for the pre-existence of the hash.\n // This is done to prevent duplicate root hashes due to race\n // conditions between checking vs insertion.\n return await useReadWriteStore(db, this.storeName, async (store) => {\n // Return only the payloads that were successfully inserted\n const inserted: WithStorageMeta<Payload>[] = []\n await Promise.all(\n payloads.map(async (payload) => {\n // only insert if hash does not already exist\n if (!await store.index(IndexedDbArchivist.hashIndexName).get(payload._hash)) {\n // Insert the payload\n await store.put(payload)\n // Add it to the inserted list\n inserted.push(payload)\n }\n }),\n )\n return inserted\n })\n })\n }\n\n protected override async nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]> {\n const {\n limit, cursor, order,\n } = options ?? {}\n return await this.useDb(async (db) => {\n return await this.getFromCursor(db, this.storeName, order, limit ?? 10, cursor)\n })\n }\n\n protected override async startHandler() {\n await super.startHandler()\n // NOTE: We could defer this creation to first access but we\n // want to fail fast here in case something is wrong\n await this.useDb(() => {})\n return true\n }\n\n private async checkIndexes(db: IDBPDatabase<PayloadStore>): Promise<IndexDescription[]> {\n const { indexes, storeName } = this\n if (db.objectStoreNames.contains(storeName)) {\n const existingIndexes = await getExistingIndexes(db, storeName)\n const existingIndexNames = new Set(existingIndexes.map(({ name }) => name).filter(exists))\n for (const { key, unique } of indexes) {\n const indexName = buildStandardIndexName({ key, unique })\n if (!existingIndexNames.has(indexName)) {\n // the index is missing, so trigger an upgrade\n this._dbVersion = this._dbVersion === undefined ? 0 : this._dbVersion + 1\n break\n }\n }\n return existingIndexes\n }\n return []\n }\n\n private async checkObjectStore(): Promise<IndexDescription[]> {\n const { dbName, storeName } = this\n return await useDb(dbName, (db) => {\n // we check the version here to see if someone else upgraded it past where we think we are\n if (db.version >= (this._dbVersion ?? 0)) {\n this._dbVersion = db.version\n }\n if (db.objectStoreNames.contains(storeName)) {\n return this.checkIndexes(db)\n } else {\n this._dbVersion = (this._dbVersion ?? 0) + 1\n return []\n }\n })\n }\n\n /**\n * Returns that the desired DB/Store initialized to the correct version\n * @returns The initialized DB\n */\n private async getInitializedDb(): Promise<IDBPDatabase<PayloadStore>> {\n const existingIndexes = await this.checkObjectStore()\n const {\n dbName, dbVersion, indexes, storeName, logger,\n } = this\n return await openDB<PayloadStore>(dbName, dbVersion, {\n blocked(currentVersion, blockedVersion, event) {\n logger.warn(`IndexedDbArchivist: Blocked from upgrading from ${currentVersion} to ${blockedVersion}`, event)\n },\n blocking(currentVersion, blockedVersion, event) {\n logger.warn(`IndexedDbArchivist: Blocking upgrade from ${currentVersion} to ${blockedVersion}`, event)\n },\n terminated() {\n logger.log('IndexedDbArchivist: Terminated')\n },\n upgrade(database, oldVersion, newVersion, transaction) {\n // NOTE: This is called whenever the DB is created/updated. We could simply ensure the desired end\n // state but, out of an abundance of caution, we will just delete (so we know where we are starting\n // from a known good point) and recreate the desired state. This prioritizes resilience over data\n // retention but we can revisit that tradeoff when it becomes limiting. Because distributed browser\n // state is extremely hard to debug, this seems like fair tradeoff for now.\n if (oldVersion !== newVersion) {\n logger.log(`IndexedDbArchivist: Upgrading from ${oldVersion} to ${newVersion}`)\n // Delete any existing databases that are not the current version\n const objectStores = transaction.objectStoreNames\n for (const name of objectStores) {\n try {\n database.deleteObjectStore(name)\n } catch {\n logger.log(`IndexedDbArchivist: Failed to delete existing object store ${name}`)\n }\n }\n }\n // keep any indexes that were there before but are not required by this config\n // we do this incase there are two or more configs trying to use the db and they have mismatched indexes, so they do not erase each other's indexes\n const existingIndexesToKeep = existingIndexes.filter(({ name: existingName }) => !indexes.some(({ name }) => name === existingName))\n console.log('existingIndexes', existingIndexes)\n console.log('existingIndexesToKeep', existingIndexesToKeep)\n console.log('indexes', indexes)\n const indexesToCreate = indexes.map(idx => ({\n ...idx,\n name: buildStandardIndexName(idx),\n // eslint-disable-next-line unicorn/no-array-reduce\n })).reduce((acc, idx) => acc.set(idx.name, idx), new Map<string, IndexDescription>()).values()\n createStore(database, storeName, [...indexesToCreate], logger)\n },\n })\n }\n\n /**\n * Executes a callback with the initialized DB and then closes the db\n * @param callback The method to execute with the initialized DB\n * @returns\n */\n private async useDb<T>(callback: (db: IDBPDatabase<PayloadStore>) => Promise<T> | T): Promise<T> {\n // Get the initialized DB\n const db = await this.getInitializedDb()\n try {\n // Perform the callback\n return await callback(db)\n } finally {\n // Close the DB\n db.close()\n }\n }\n}\n","export type IndexedDbArchivistSchema = 'network.xyo.archivist.indexeddb'\nexport const IndexedDbArchivistSchema: IndexedDbArchivistSchema = 'network.xyo.archivist.indexeddb'\n","import type { ArchivistConfig } from '@xyo-network/archivist-model'\n\nimport { IndexedDbArchivistSchema } from './Schema.ts'\n\nexport type IndexedDbArchivistConfigSchema = `${IndexedDbArchivistSchema}.config`\nexport const IndexedDbArchivistConfigSchema: IndexedDbArchivistConfigSchema = `${IndexedDbArchivistSchema}.config`\n\nexport type IndexedDbArchivistConfig = ArchivistConfig<{\n /**\n * The database name\n */\n dbName?: string\n /**\n * The version of the DB, defaults to 1\n */\n dbVersion?: number\n schema: IndexedDbArchivistConfigSchema\n /**\n * The name of the object store\n */\n storeName?: string\n}>\n","import type { Logger } from '@xylabs/logger'\nimport type { IndexDescription, IndexDirection } from '@xyo-network/archivist-model'\nimport { buildStandardIndexName } from '@xyo-network/archivist-model'\nimport type { IDBPDatabase, IDBPObjectStore } from 'idb'\nimport { openDB } from 'idb'\n\nimport type { PayloadStore } from './Archivist.ts'\n\nexport function createStore(db: IDBPDatabase<PayloadStore>, storeName: string, indexes: IndexDescription[], logger?: Logger) {\n logger?.log(`Creating store ${storeName}`)\n // Create the store\n const store = db.createObjectStore(storeName, {\n // If it isn't explicitly set, create a value by auto incrementing.\n autoIncrement: true,\n })\n // Name the store\n store.name = storeName\n // Create an index on the hash\n for (const {\n key, multiEntry, unique,\n } of indexes) {\n const indexKeys = Object.keys(key)\n const keys = indexKeys.length === 1 ? indexKeys[0] : indexKeys\n const indexName = buildStandardIndexName({ key, unique })\n console.log('createIndex', indexName, keys, { multiEntry, unique })\n store.createIndex(indexName, keys, { multiEntry, unique })\n }\n}\n\nexport async function getExistingIndexes(db: IDBPDatabase<PayloadStore>, storeName: string): Promise<IndexDescription[]> {\n return await useReadOnlyStore(db, storeName, (store) => {\n return [...store.indexNames].map((indexName) => {\n const index = store.index(indexName)\n const key: Record<string, IndexDirection> = {}\n if (Array.isArray(index.keyPath)) {\n for (const keyPath of index.keyPath) {\n key[keyPath] = 1\n }\n } else {\n key[index.keyPath] = 1\n }\n const desc: IndexDescription = {\n name: indexName,\n key,\n unique: index.unique,\n multiEntry: index.multiEntry,\n }\n return desc\n })\n })\n}\n\nexport async function useDb<T>(dbName: string, callback: (db: IDBPDatabase<PayloadStore>) => Promise<T> | T): Promise<T> {\n const db = await openDB<PayloadStore>(dbName)\n try {\n return await callback(db)\n } finally {\n db.close()\n }\n}\n\nexport async function useReadOnlyStore<T>(\n db: IDBPDatabase<PayloadStore>,\n storeName: string,\n callback: (store: IDBPObjectStore<PayloadStore, [string], string, 'readonly'>) => Promise<T> | T,\n): Promise<T> {\n const transaction = db.transaction(storeName, 'readonly')\n const store = transaction.objectStore(storeName)\n try {\n return await callback(store)\n } finally {\n await transaction.done\n }\n}\n\nexport async function useReadWriteStore<T>(\n db: IDBPDatabase<PayloadStore>,\n storeName: string,\n callback: (store: IDBPObjectStore<PayloadStore, [string], string, 'readwrite'>) => Promise<T> | T,\n): Promise<T> {\n const transaction = db.transaction(storeName, 'readwrite')\n const store = transaction.objectStore(storeName)\n try {\n return await callback(store)\n } finally {\n await transaction.done\n }\n}\n"],"mappings":";;;;AAAA,SAASA,YAAY;AACrB,SAASC,gBAAgB;AACzB,SAASC,cAAc;AAEvB,SAASC,yBAAyB;AAClC,SACEC,yBACAC,2BACAC,4BACAC,4BAGAC,0BACAC,0BAAAA,+BAEK;AACP,SAASC,uBAAuB;AAChC,SAASC,sBAAsB;AAC/B,SACmBC,yBACZ;AACP,SACqCC,UAAAA,eAC9B;;;ACtBA,IAAMC,2BAAqD;;;ACI3D,IAAMC,iCAAiE,GAAGC,wBAAAA;;;ACHjF,SAASC,8BAA8B;AAEvC,SAASC,cAAc;AAIhB,SAASC,YAAYC,IAAgCC,WAAmBC,SAA6BC,QAAe;AACzHA,UAAQC,IAAI,kBAAkBH,SAAAA,EAAW;AAEzC,QAAMI,QAAQL,GAAGM,kBAAkBL,WAAW;;IAE5CM,eAAe;EACjB,CAAA;AAEAF,QAAMG,OAAOP;AAEb,aAAW,EACTQ,KAAKC,YAAYC,OAAM,KACpBT,SAAS;AACZ,UAAMU,YAAYC,OAAOC,KAAKL,GAAAA;AAC9B,UAAMK,OAAOF,UAAUG,WAAW,IAAIH,UAAU,CAAA,IAAKA;AACrD,UAAMI,YAAYC,uBAAuB;MAAER;MAAKE;IAAO,CAAA;AACvDO,YAAQd,IAAI,eAAeY,WAAWF,MAAM;MAAEJ;MAAYC;IAAO,CAAA;AACjEN,UAAMc,YAAYH,WAAWF,MAAM;MAAEJ;MAAYC;IAAO,CAAA;EAC1D;AACF;AAnBgBZ;AAqBhB,eAAsBqB,mBAAmBpB,IAAgCC,WAAiB;AACxF,SAAO,MAAMoB,iBAAiBrB,IAAIC,WAAW,CAACI,UAAAA;AAC5C,WAAO;SAAIA,MAAMiB;MAAYC,IAAI,CAACP,cAAAA;AAChC,YAAMQ,QAAQnB,MAAMmB,MAAMR,SAAAA;AAC1B,YAAMP,MAAsC,CAAC;AAC7C,UAAIgB,MAAMC,QAAQF,MAAMG,OAAO,GAAG;AAChC,mBAAWA,WAAWH,MAAMG,SAAS;AACnClB,cAAIkB,OAAAA,IAAW;QACjB;MACF,OAAO;AACLlB,YAAIe,MAAMG,OAAO,IAAI;MACvB;AACA,YAAMC,OAAyB;QAC7BpB,MAAMQ;QACNP;QACAE,QAAQa,MAAMb;QACdD,YAAYc,MAAMd;MACpB;AACA,aAAOkB;IACT,CAAA;EACF,CAAA;AACF;AArBsBR;AAuBtB,eAAsBS,MAASC,QAAgBC,UAA4D;AACzG,QAAM/B,KAAK,MAAMgC,OAAqBF,MAAAA;AACtC,MAAI;AACF,WAAO,MAAMC,SAAS/B,EAAAA;EACxB,UAAA;AACEA,OAAGiC,MAAK;EACV;AACF;AAPsBJ;AAStB,eAAsBR,iBACpBrB,IACAC,WACA8B,UAAgG;AAEhG,QAAMG,cAAclC,GAAGkC,YAAYjC,WAAW,UAAA;AAC9C,QAAMI,QAAQ6B,YAAYC,YAAYlC,SAAAA;AACtC,MAAI;AACF,WAAO,MAAM8B,SAAS1B,KAAAA;EACxB,UAAA;AACE,UAAM6B,YAAYE;EACpB;AACF;AAZsBf;AActB,eAAsBgB,kBACpBrC,IACAC,WACA8B,UAAiG;AAEjG,QAAMG,cAAclC,GAAGkC,YAAYjC,WAAW,WAAA;AAC9C,QAAMI,QAAQ6B,YAAYC,YAAYlC,SAAAA;AACtC,MAAI;AACF,WAAO,MAAM8B,SAAS1B,KAAAA;EACxB,UAAA;AACE,UAAM6B,YAAYE;EACpB;AACF;AAZsBC;;;;;;;;;;AHrCf,IAAMC,qBAAN,MAAMA,4BAGHC,kBAAAA;SAAAA;;;EACR,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeC;;EAC5E,OAAyBC,sBAA8BD;EACvD,OAAgBE,gBAAgB;EAChC,OAAgBC,mBAAmB;EACnC,OAAgBC,mBAAmB;EACnC,OAAwBC,gBAAkC;IACxDC,KAAK;MAAEC,WAAW;IAAE;IAAGC,YAAY;IAAOC,QAAQ;EACpD;EAEA,OAAwBC,YAA8B;IACpDJ,KAAK;MAAEK,OAAO;IAAE;IAAGH,YAAY;IAAOC,QAAQ;EAChD;EAEA,OAAwBG,cAAgC;IACtDN,KAAK;MAAEO,QAAQ;IAAE;IAAGL,YAAY;IAAOC,QAAQ;EACjD;EAEA,OAAwBK,gBAAkC;IACxDR,KAAK;MAAES,WAAW;IAAE;IAAGP,YAAY;IAAOC,QAAQ;EACpD;;EAGA,OAAgBO,gBAAgBC,wBAAuBpB,oBAAmBa,SAAS;;EAEnF,OAAgBQ,oBAAoBD,wBAAuBpB,oBAAmBQ,aAAa;;EAE3F,OAAgBc,kBAAkBF,wBAAuBpB,oBAAmBe,WAAW;;EAEvF,OAAgBQ,oBAAoBH,wBAAuBpB,oBAAmBiB,aAAa;EAEnFO;EACAC;EACAC;;;;;;;;EASR,IAAIC,SAAS;AACX,QAAI,CAAC,KAAKH,SAAS;AACjB,UAAI,KAAKI,QAAQD,QAAQ;AACvB,aAAKH,UAAU,KAAKI,QAAQD;MAC9B,OAAO;AACL,YAAI,KAAKC,QAAQC,MAAM;AACrB,eAAKC,OAAOC,KAAK,2CAA2C,KAAKH,QAAQC,IAAAA;AACzE,eAAKL,UAAU,KAAKI,QAAQC;QAC9B,OAAO;AACL,eAAKC,OAAOC,KAAK,4CAA4C/B,oBAAmBK,aAAa;AAC7F,eAAKmB,UAAUxB,oBAAmBK;QACpC;MACF;IACF;AACA,WAAO2B,SAAS,KAAKR,OAAO;EAC9B;;;;EAKA,IAAIS,YAAY;AACd,SAAKR,aAAa,KAAKA,cAAc,KAAKG,QAAQK,aAAajC,oBAAmBM;AAClF,WAAO,KAAKmB;EACd;EAEA,IAAaS,UAAU;AACrB,WAAO;MACLC;MACAC;MACAC;MACAC;MACAC;SACG,MAAML;;EAEb;;;;;EAMA,IAAIM,YAAY;AACd,QAAI,CAAC,KAAKd,YAAY;AACpB,UAAI,KAAKE,QAAQY,WAAW;AAC1B,aAAKd,aAAa,KAAKE,QAAQY;MACjC,OAAO;AACL,aAAKV,OAAOC,KAAK,+CAA+C/B,oBAAmBO,gBAAgB;AACnG,aAAKmB,aAAa1B,oBAAmBO;MACvC;IACF;AACA,WAAOyB,SAAS,KAAKN,UAAU;EACjC;;;;EAKA,IAAYe,UAAU;AACpB,WAAO;MACLzC,oBAAmBQ;MACnBR,oBAAmBa;MACnBb,oBAAmBe;MACnBf,oBAAmBiB;SACf,KAAKW,QAAQc,SAASD,WAAW,CAAA;;EAEzC;EAEA,MAAyBE,aAAkD;AAEzE,UAAMC,WAAW,MAAM,KAAKC,MAAMC,CAAAA,OAAMA,GAAGC,OAAO,KAAKP,SAAS,CAAA;AAEhE,WAAOI;EACT;EAEA,MAAyBI,eAA8B;AACrD,UAAM,KAAKH,MAAMC,CAAAA,OAAMA,GAAGG,MAAM,KAAKT,SAAS,CAAA;EAChD;EAEA,MAAyBU,cAAcC,QAAiC;AAEtE,UAAMC,eAAe;SAAI,IAAIC,IAAIF,MAAAA;;AACjC,UAAMG,QAAQ,MAAMC,eAAeC,UAAU,MAAM,KAAKC,WAAWL,YAAAA,CAAAA;AACnE,UAAMM,kBAAkB,MAAMC,QAAQC,IAAIN,MAAMO,IAAI,OAAOC,SAAAA;AACzD,YAAMC,YAAY,MAAMR,eAAeS,SAASF,KAAK,CAAA,CAAE;AACvD,aAAO;QAACC;QAAWD,KAAK,CAAA;;IAC1B,CAAA,CAAA,GAAKG,KAAI;AAET,UAAMC,iBAAiB;SAAI,IAAIb,IAAIK,cAAAA;;AACnC,WAAO,MAAM,KAAKb,MAAM,OAAOC,OAAAA;AAE7B,YAAMqB,QAAQ,MAAMR,QAAQC,IAC1BM,eAAeL,IAAI,OAAOO,SAAAA;AAExB,cAAMC,WACD,MAAMvB,GAAGwB,gBAAgB,KAAK9B,WAAWxC,oBAAmBmB,eAAeiD,IAAAA,KAC1E,MAAMtB,GAAGwB,gBAAgB,KAAK9B,WAAWxC,oBAAmBqB,mBAAmB+C,IAAAA;AAErF,YAAIC,UAAU;AAEZ,gBAAMvB,GAAGyB,OAAO,KAAK/B,WAAW6B,QAAAA;AAEhC,iBAAOD;QACT;MACF,CAAA,CAAA;AAEF,aAAOD,MAAMK,OAAOC,MAAAA,EAAQD,OAAOJ,CAAAA,SAAQhB,aAAasB,SAASN,IAAAA,CAAAA;IACnE,CAAA;EACF;EAEA,MAAgBO,cACd7B,IACAN,WACEoC,QAAwB,OACxBC,QAAgB,IAChBC,QAC0B;AAE5B,WAAO,MAAMC,iBAAiBjC,IAAIN,WAAW,OAAOwC,UAAAA;AAClD,YAAM/D,gBAAgBe,SAASgD,MAAMC,MAAMjF,oBAAmBuB,iBAAiB,GAAG,MAAM,8BAAA;AACxF,UAAI2D;AACJ,YAAMC,eAAeL,WAAWM,kBAAkBC,mBAAmB,OAAOP;AAC5EI,uBAAiBlD,SAAS,MAAMf,cAAcqE,WAC5C,MACAV,UAAU,SAAS,SAAS,MAAA,GAC3B,MAAM,yBAAyBO,YAAAA,KAAiBL,MAAAA,GAAS;AAC5D,UAAI,CAACI,gBAAgBK,MAAO,QAAO,CAAA;AACnC,UAAI;AACFL,yBAAiBC,eACbD,eAAeK,MAAMrE,cAAciE,eACjC,MAAMD,gBAAgBM,QAAQ,CAAA,IAC9B,OAAO,MAAMN,gBAAgBO,SAASN,YAAAA,IAAgBK,QAAQ,CAAA,IAChEN;MACN,QAAQ;AACN,eAAO,CAAA;MACT;AAEA,UAAIQ,YAAYb;AAChB,YAAMc,SAA4B,CAAA;AAClC,aAAOD,WAAW;AAChB,cAAMH,QAAQL,gBAAgBK;AAC9B,YAAIA,OAAO;AACTI,iBAAOC,KAAKL,KAAAA;AACZ,cAAI;AACFL,6BAAiB,MAAMA,gBAAgBM,QAAQ,CAAA;UACjD,QAAQ;AACN;UACF;AACA,cAAIN,mBAAmB,MAAM;AAC3B;UACF;QACF;AACAQ;MACF;AACA,aAAOC;IACT,CAAA;EACF;;;;;;;;;EAUA,MAAgBE,2BACd/C,IACAN,WACAsD,WACArF,KACgD;AAChD,WAAO,MAAMsE,iBAAiBjC,IAAIN,WAAW,OAAOwC,UAAAA;AAClD,YAAMC,QAAQD,MAAMC,MAAMa,SAAAA;AAC1B,YAAMhB,SAAS,MAAMG,MAAMK,WAAW7E,GAAAA;AACtC,UAAIqE,QAAQ;AACV,cAAMiB,cAAcjB,OAAOS;AAE3B,YAAI,OAAOT,OAAOkB,eAAe,UAAU;AACzC,gBAAM,IAAIC,UAAU,6BAAA;QACtB;AAEA,eAAO;UAACnB,OAAOkB;UAAYD;;MAC7B;IACF,CAAA;EACF;EAEA,MAAyBtC,WAAWN,QAA8C;AAChF,UAAMP,WAAW,MAAM,KAAKC,MAAMC,CAAAA,OAChCa,QAAQC;;MAENsC,KAAK/C,MAAAA,EAAQU,IAAI,OAAOO,SAAAA;AAEtB,cAAM+B,UAAU,MAAM,KAAKN,2BAA2B/C,IAAI,KAAKN,WAAWxC,oBAAmBmB,eAAeiD,IAAAA;AAE5G,YAAI+B,QAAS,QAAOA;AAEpB,eAAO,KAAKN,2BAA2B/C,IAAI,KAAKN,WAAWxC,oBAAmBqB,mBAAmB+C,IAAAA;MACnG,CAAA;IAAA,CAAA;AAGJ,UAAMD,QAAQ,oBAAId,IAAAA;AAClB,WACET,SAEG4B,OAAOC,MAAAA,EAEP2B,KAAK,CAACC,GAAGC,MAAMD,EAAG,CAAA,IAAKC,EAAG,CAAA,CAAE,EAE5B9B,OAAO,CAAC,CAAC+B,MAAMJ,OAAAA,MAAQ;AACtB,UAAIhC,MAAMqC,IAAIL,QAAQrF,KAAK,GAAG;AAC5B,eAAO;MACT,OAAO;AACLqD,cAAMsC,IAAIN,QAAQrF,KAAK;AACvB,eAAO;MACT;IACF,CAAA,EAEC+C,IAAI,CAAC,CAAC0C,MAAMJ,OAAAA,MAAaA,OAAAA;EAEhC;EAEA,MAAyBO,cAAc9D,UAA2E;AAChH,WAAO,MAAM,KAAKC,MAAM,OAAOC,OAAAA;AAK7B,aAAO,MAAM6D,kBAAkB7D,IAAI,KAAKN,WAAW,OAAOwC,UAAAA;AAExD,cAAM4B,WAAuC,CAAA;AAC7C,cAAMjD,QAAQC,IACZhB,SAASiB,IAAI,OAAOsC,YAAAA;AAElB,cAAI,CAAC,MAAMnB,MAAMC,MAAMjF,oBAAmBmB,aAAa,EAAE0F,IAAIV,QAAQrF,KAAK,GAAG;AAE3E,kBAAMkE,MAAM8B,IAAIX,OAAAA;AAEhBS,qBAAShB,KAAKO,OAAAA;UAChB;QACF,CAAA,CAAA;AAEF,eAAOS;MACT,CAAA;IACF,CAAA;EACF;EAEA,MAAyBG,YAAYC,SAAqE;AACxG,UAAM,EACJnC,OAAOC,QAAQF,MAAK,IAClBoC,WAAW,CAAC;AAChB,WAAO,MAAM,KAAKnE,MAAM,OAAOC,OAAAA;AAC7B,aAAO,MAAM,KAAK6B,cAAc7B,IAAI,KAAKN,WAAWoC,OAAOC,SAAS,IAAIC,MAAAA;IAC1E,CAAA;EACF;EAEA,MAAyBmC,eAAe;AACtC,UAAM,MAAMA,aAAAA;AAGZ,UAAM,KAAKpE,MAAM,MAAA;IAAO,CAAA;AACxB,WAAO;EACT;EAEA,MAAcqE,aAAapE,IAA6D;AACtF,UAAM,EAAEL,SAASD,UAAS,IAAK;AAC/B,QAAIM,GAAGqE,iBAAiBC,SAAS5E,SAAAA,GAAY;AAC3C,YAAM6E,kBAAkB,MAAMC,mBAAmBxE,IAAIN,SAAAA;AACrD,YAAM+E,qBAAqB,IAAIlE,IAAIgE,gBAAgBxD,IAAI,CAAC,EAAEhC,KAAI,MAAOA,IAAAA,EAAM2C,OAAOC,MAAAA,CAAAA;AAClF,iBAAW,EAAEhE,KAAKG,OAAM,KAAM6B,SAAS;AACrC,cAAMqD,YAAY1E,wBAAuB;UAAEX;UAAKG;QAAO,CAAA;AACvD,YAAI,CAAC2G,mBAAmBf,IAAIV,SAAAA,GAAY;AAEtC,eAAKrE,aAAa,KAAKA,eAAe+F,SAAY,IAAI,KAAK/F,aAAa;AACxE;QACF;MACF;AACA,aAAO4F;IACT;AACA,WAAO,CAAA;EACT;EAEA,MAAcI,mBAAgD;AAC5D,UAAM,EAAE9F,QAAQa,UAAS,IAAK;AAC9B,WAAO,MAAMK,MAAMlB,QAAQ,CAACmB,OAAAA;AAE1B,UAAIA,GAAG4E,YAAY,KAAKjG,cAAc,IAAI;AACxC,aAAKA,aAAaqB,GAAG4E;MACvB;AACA,UAAI5E,GAAGqE,iBAAiBC,SAAS5E,SAAAA,GAAY;AAC3C,eAAO,KAAK0E,aAAapE,EAAAA;MAC3B,OAAO;AACL,aAAKrB,cAAc,KAAKA,cAAc,KAAK;AAC3C,eAAO,CAAA;MACT;IACF,CAAA;EACF;;;;;EAMA,MAAckG,mBAAwD;AACpE,UAAMN,kBAAkB,MAAM,KAAKI,iBAAgB;AACnD,UAAM,EACJ9F,QAAQM,WAAWQ,SAASD,WAAWV,OAAM,IAC3C;AACJ,WAAO,MAAM8F,QAAqBjG,QAAQM,WAAW;MACnD4F,QAAQC,gBAAgBC,gBAAgBC,OAAK;AAC3ClG,eAAOC,KAAK,mDAAmD+F,cAAAA,OAAqBC,cAAAA,IAAkBC,KAAAA;MACxG;MACAC,SAASH,gBAAgBC,gBAAgBC,OAAK;AAC5ClG,eAAOC,KAAK,6CAA6C+F,cAAAA,OAAqBC,cAAAA,IAAkBC,KAAAA;MAClG;MACAE,aAAAA;AACEpG,eAAOqG,IAAI,gCAAA;MACb;MACAC,QAAQC,UAAUC,YAAYC,YAAYC,aAAW;AAMnD,YAAIF,eAAeC,YAAY;AAC7BzG,iBAAOqG,IAAI,sCAAsCG,UAAAA,OAAiBC,UAAAA,EAAY;AAE9E,gBAAME,eAAeD,YAAYrB;AACjC,qBAAWtF,QAAQ4G,cAAc;AAC/B,gBAAI;AACFJ,uBAASK,kBAAkB7G,IAAAA;YAC7B,QAAQ;AACNC,qBAAOqG,IAAI,8DAA8DtG,IAAAA,EAAM;YACjF;UACF;QACF;AAGA,cAAM8G,wBAAwBtB,gBAAgB7C,OAAO,CAAC,EAAE3C,MAAM+G,aAAY,MAAO,CAACnG,QAAQoG,KAAK,CAAC,EAAEhH,KAAI,MAAOA,SAAS+G,YAAAA,CAAAA;AACtHE,gBAAQX,IAAI,mBAAmBd,eAAAA;AAC/ByB,gBAAQX,IAAI,yBAAyBQ,qBAAAA;AACrCG,gBAAQX,IAAI,WAAW1F,OAAAA;AACvB,cAAMsG,kBAAkBtG,QAAQoB,IAAImF,CAAAA,SAAQ;UAC1C,GAAGA;UACHnH,MAAMT,wBAAuB4H,GAAAA;QAE/B,EAAA,EAAIC,OAAO,CAACC,KAAKF,QAAQE,IAAIC,IAAIH,IAAInH,MAAMmH,GAAAA,GAAM,oBAAII,IAAAA,CAAAA,EAAiCC,OAAM;AAC5FC,oBAAYjB,UAAU7F,WAAW;aAAIuG;WAAkBjH,MAAAA;MACzD;IACF,CAAA;EACF;;;;;;EAOA,MAAce,MAAS0G,UAA0E;AAE/F,UAAMzG,KAAK,MAAM,KAAK6E,iBAAgB;AACtC,QAAI;AAEF,aAAO,MAAM4B,SAASzG,EAAAA;IACxB,UAAA;AAEEA,SAAG0G,MAAK;IACV;EACF;AACF;;;;","names":["uniq","assertEx","exists","AbstractArchivist","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","ArchivistNextQuerySchema","buildStandardIndexName","creatableModule","PayloadBuilder","SequenceConstants","openDB","IndexedDbArchivistSchema","IndexedDbArchivistConfigSchema","IndexedDbArchivistSchema","buildStandardIndexName","openDB","createStore","db","storeName","indexes","logger","log","store","createObjectStore","autoIncrement","name","key","multiEntry","unique","indexKeys","Object","keys","length","indexName","buildStandardIndexName","console","createIndex","getExistingIndexes","useReadOnlyStore","indexNames","map","index","Array","isArray","keyPath","desc","useDb","dbName","callback","openDB","close","transaction","objectStore","done","useReadWriteStore","IndexedDbArchivist","AbstractArchivist","configSchemas","IndexedDbArchivistConfigSchema","defaultConfigSchema","defaultDbName","defaultDbVersion","defaultStoreName","dataHashIndex","key","_dataHash","multiEntry","unique","hashIndex","_hash","schemaIndex","schema","sequenceIndex","_sequence","hashIndexName","buildStandardIndexName","dataHashIndexName","schemaIndexName","sequenceIndexName","_dbName","_dbVersion","_storeName","dbName","config","name","logger","warn","assertEx","dbVersion","queries","ArchivistNextQuerySchema","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","storeName","indexes","storage","allHandler","payloads","useDb","db","getAll","clearHandler","clear","deleteHandler","hashes","uniqueHashes","Set","pairs","PayloadBuilder","hashPairs","getHandler","hashesToDelete","Promise","all","map","pair","dataHash0","dataHash","flat","distinctHashes","found","hash","existing","getKeyFromIndex","delete","filter","exists","includes","getFromCursor","order","limit","cursor","useReadOnlyStore","store","index","sequenceCursor","parsedCursor","SequenceConstants","minLocalSequence","openCursor","value","advance","continue","remaining","result","push","getFromIndexWithPrimaryKey","indexName","singleValue","primaryKey","TypeError","uniq","payload","sort","a","b","_key","has","add","insertHandler","useReadWriteStore","inserted","get","put","nextHandler","options","startHandler","checkIndexes","objectStoreNames","contains","existingIndexes","getExistingIndexes","existingIndexNames","undefined","checkObjectStore","version","getInitializedDb","openDB","blocked","currentVersion","blockedVersion","event","blocking","terminated","log","upgrade","database","oldVersion","newVersion","transaction","objectStores","deleteObjectStore","existingIndexesToKeep","existingName","some","console","indexesToCreate","idx","reduce","acc","set","Map","values","createStore","callback","close"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xyo-network/archivist-indexeddb",
3
- "version": "3.6.1",
3
+ "version": "3.6.3",
4
4
  "description": "Primary SDK for using XYO Protocol 2.0",
5
5
  "homepage": "https://xyo.network",
6
6
  "bugs": {
@@ -33,11 +33,12 @@
33
33
  "@xylabs/assert": "^4.4.26",
34
34
  "@xylabs/exists": "^4.4.26",
35
35
  "@xylabs/hex": "^4.4.26",
36
- "@xyo-network/archivist-abstract": "^3.6.1",
37
- "@xyo-network/archivist-model": "^3.6.1",
38
- "@xyo-network/module-model": "^3.6.1",
39
- "@xyo-network/payload-builder": "^3.6.1",
40
- "@xyo-network/payload-model": "^3.6.1",
36
+ "@xylabs/logger": "^4.4.26",
37
+ "@xyo-network/archivist-abstract": "^3.6.3",
38
+ "@xyo-network/archivist-model": "^3.6.3",
39
+ "@xyo-network/module-model": "^3.6.3",
40
+ "@xyo-network/payload-builder": "^3.6.3",
41
+ "@xyo-network/payload-model": "^3.6.3",
41
42
  "idb": "^8.0.1"
42
43
  },
43
44
  "devDependencies": {
@@ -45,9 +46,9 @@
45
46
  "@xylabs/object": "^4.4.26",
46
47
  "@xylabs/ts-scripts-yarn3": "^4.2.6",
47
48
  "@xylabs/tsconfig": "^4.2.6",
48
- "@xyo-network/account": "^3.6.1",
49
- "@xyo-network/id-payload-plugin": "^3.6.1",
50
- "@xyo-network/payload-wrapper": "^3.6.1",
49
+ "@xyo-network/account": "^3.6.3",
50
+ "@xyo-network/id-payload-plugin": "^3.6.3",
51
+ "@xyo-network/payload-wrapper": "^3.6.3",
51
52
  "fake-indexeddb": "^6.0.0",
52
53
  "typescript": "^5.7.2",
53
54
  "vitest": "^2.1.8"
package/src/Archivist.ts CHANGED
@@ -1,4 +1,3 @@
1
- /* eslint-disable complexity */
2
1
  import { uniq } from '@xylabs/array'
3
2
  import { assertEx } from '@xylabs/assert'
4
3
  import { exists } from '@xylabs/exists'
@@ -25,6 +24,11 @@ import {
25
24
  } from 'idb'
26
25
 
27
26
  import { IndexedDbArchivistConfigSchema } from './Config.ts'
27
+ import {
28
+ createStore,
29
+ getExistingIndexes,
30
+ useDb, useReadOnlyStore, useReadWriteStore,
31
+ } from './IndexedDbHelpers.ts'
28
32
  import { IndexedDbArchivistParams } from './Params.ts'
29
33
 
30
34
  export interface PayloadStore {
@@ -67,6 +71,7 @@ export class IndexedDbArchivist<
67
71
  static readonly sequenceIndexName = buildStandardIndexName(IndexedDbArchivist.sequenceIndex)
68
72
 
69
73
  private _dbName?: string
74
+ private _dbVersion?: number
70
75
  private _storeName?: string
71
76
 
72
77
  /**
@@ -97,7 +102,8 @@ export class IndexedDbArchivist<
97
102
  * The database version. If not supplied via config, it defaults to 1.
98
103
  */
99
104
  get dbVersion() {
100
- return this.config?.dbVersion ?? IndexedDbArchivist.defaultDbVersion
105
+ this._dbVersion = this._dbVersion ?? this.config?.dbVersion ?? IndexedDbArchivist.defaultDbVersion
106
+ return this._dbVersion
101
107
  }
102
108
 
103
109
  override get queries() {
@@ -190,42 +196,44 @@ export class IndexedDbArchivist<
190
196
  cursor?: Hex,
191
197
  ): Promise<WithStorageMeta[]> {
192
198
  // TODO: We have to handle the case where the cursor is not found, and then find the correct cursor to start with (thunked cursor)
193
- const transaction = db.transaction(storeName, 'readonly')
194
- const store = transaction.objectStore(storeName)
195
- const sequenceIndex = assertEx(store.index(IndexedDbArchivist.sequenceIndexName), () => 'Failed to get sequence index')
196
- let sequenceCursor: IDBPCursorWithValue<PayloadStore, [string]> | null | undefined = undefined
197
- const parsedCursor = cursor === SequenceConstants.minLocalSequence ? null : cursor
198
- sequenceCursor = assertEx(await sequenceIndex.openCursor(
199
- null,
200
- order === 'desc' ? 'prev' : 'next',
201
- ), () => `Failed to get cursor [${parsedCursor}, ${cursor}]`)
202
- if (!sequenceCursor?.value) return []
203
- try {
204
- sequenceCursor = parsedCursor
205
- ? sequenceCursor.value._sequence === parsedCursor ? await sequenceCursor?.advance(1) : await (await sequenceCursor?.continue(parsedCursor))?.advance(1)
206
- : sequenceCursor // advance to skip the initial value
207
- } catch {
208
- return []
209
- }
199
+ return await useReadOnlyStore(db, storeName, async (store) => {
200
+ const sequenceIndex = assertEx(store.index(IndexedDbArchivist.sequenceIndexName), () => 'Failed to get sequence index')
201
+ let sequenceCursor: IDBPCursorWithValue<PayloadStore, [string]> | null | undefined
202
+ const parsedCursor = cursor === SequenceConstants.minLocalSequence ? null : cursor
203
+ sequenceCursor = assertEx(await sequenceIndex.openCursor(
204
+ null,
205
+ order === 'desc' ? 'prev' : 'next',
206
+ ), () => `Failed to get cursor [${parsedCursor}, ${cursor}]`)
207
+ if (!sequenceCursor?.value) return []
208
+ try {
209
+ sequenceCursor = parsedCursor
210
+ ? sequenceCursor.value._sequence === parsedCursor
211
+ ? await sequenceCursor?.advance(1)
212
+ : await (await sequenceCursor?.continue(parsedCursor))?.advance(1)
213
+ : sequenceCursor // advance to skip the initial value
214
+ } catch {
215
+ return []
216
+ }
210
217
 
211
- let remaining = limit
212
- const result: WithStorageMeta[] = []
213
- while (remaining) {
214
- const value = sequenceCursor?.value
215
- if (value) {
216
- result.push(value)
217
- try {
218
- sequenceCursor = await sequenceCursor?.advance(1)
219
- } catch {
220
- break
221
- }
222
- if (sequenceCursor === null) {
223
- break
218
+ let remaining = limit
219
+ const result: WithStorageMeta[] = []
220
+ while (remaining) {
221
+ const value = sequenceCursor?.value
222
+ if (value) {
223
+ result.push(value)
224
+ try {
225
+ sequenceCursor = await sequenceCursor?.advance(1)
226
+ } catch {
227
+ break
228
+ }
229
+ if (sequenceCursor === null) {
230
+ break
231
+ }
224
232
  }
233
+ remaining--
225
234
  }
226
- remaining--
227
- }
228
- return result
235
+ return result
236
+ })
229
237
  }
230
238
 
231
239
  /**
@@ -242,19 +250,19 @@ export class IndexedDbArchivist<
242
250
  indexName: string,
243
251
  key: IDBValidKey,
244
252
  ): Promise<[number, WithStorageMeta] | undefined> {
245
- const transaction = db.transaction(storeName, 'readonly')
246
- const store = transaction.objectStore(storeName)
247
- const index = store.index(indexName)
248
- const cursor = await index.openCursor(key)
249
- if (cursor) {
250
- const singleValue = cursor.value
251
- // NOTE: It's known to be a number because we are using IndexedDB supplied auto-incrementing keys
252
- if (typeof cursor.primaryKey !== 'number') {
253
- throw new TypeError('primaryKey must be a number')
254
- }
253
+ return await useReadOnlyStore(db, storeName, async (store) => {
254
+ const index = store.index(indexName)
255
+ const cursor = await index.openCursor(key)
256
+ if (cursor) {
257
+ const singleValue = cursor.value
258
+ // NOTE: It's known to be a number because we are using IndexedDB supplied auto-incrementing keys
259
+ if (typeof cursor.primaryKey !== 'number') {
260
+ throw new TypeError('primaryKey must be a number')
261
+ }
255
262
 
256
- return [cursor.primaryKey, singleValue]
257
- }
263
+ return [cursor.primaryKey, singleValue]
264
+ }
265
+ })
258
266
  }
259
267
 
260
268
  protected override async getHandler(hashes: string[]): Promise<WithStorageMeta[]> {
@@ -298,12 +306,9 @@ export class IndexedDbArchivist<
298
306
  // with respect to checking for the pre-existence of the hash.
299
307
  // This is done to prevent duplicate root hashes due to race
300
308
  // conditions between checking vs insertion.
301
- const tx = db.transaction(this.storeName, 'readwrite')
302
- // Get the object store
303
- const store = tx.objectStore(this.storeName)
304
- // Return only the payloads that were successfully inserted
305
- const inserted: WithStorageMeta<Payload>[] = []
306
- try {
309
+ return await useReadWriteStore(db, this.storeName, async (store) => {
310
+ // Return only the payloads that were successfully inserted
311
+ const inserted: WithStorageMeta<Payload>[] = []
307
312
  await Promise.all(
308
313
  payloads.map(async (payload) => {
309
314
  // only insert if hash does not already exist
@@ -315,11 +320,8 @@ export class IndexedDbArchivist<
315
320
  }
316
321
  }),
317
322
  )
318
- } finally {
319
- // Ensure the transaction is closed
320
- await tx.done
321
- }
322
- return inserted
323
+ return inserted
324
+ })
323
325
  })
324
326
  }
325
327
 
@@ -340,11 +342,46 @@ export class IndexedDbArchivist<
340
342
  return true
341
343
  }
342
344
 
345
+ private async checkIndexes(db: IDBPDatabase<PayloadStore>): Promise<IndexDescription[]> {
346
+ const { indexes, storeName } = this
347
+ if (db.objectStoreNames.contains(storeName)) {
348
+ const existingIndexes = await getExistingIndexes(db, storeName)
349
+ const existingIndexNames = new Set(existingIndexes.map(({ name }) => name).filter(exists))
350
+ for (const { key, unique } of indexes) {
351
+ const indexName = buildStandardIndexName({ key, unique })
352
+ if (!existingIndexNames.has(indexName)) {
353
+ // the index is missing, so trigger an upgrade
354
+ this._dbVersion = this._dbVersion === undefined ? 0 : this._dbVersion + 1
355
+ break
356
+ }
357
+ }
358
+ return existingIndexes
359
+ }
360
+ return []
361
+ }
362
+
363
+ private async checkObjectStore(): Promise<IndexDescription[]> {
364
+ const { dbName, storeName } = this
365
+ return await useDb(dbName, (db) => {
366
+ // we check the version here to see if someone else upgraded it past where we think we are
367
+ if (db.version >= (this._dbVersion ?? 0)) {
368
+ this._dbVersion = db.version
369
+ }
370
+ if (db.objectStoreNames.contains(storeName)) {
371
+ return this.checkIndexes(db)
372
+ } else {
373
+ this._dbVersion = (this._dbVersion ?? 0) + 1
374
+ return []
375
+ }
376
+ })
377
+ }
378
+
343
379
  /**
344
380
  * Returns that the desired DB/Store initialized to the correct version
345
381
  * @returns The initialized DB
346
382
  */
347
383
  private async getInitializedDb(): Promise<IDBPDatabase<PayloadStore>> {
384
+ const existingIndexes = await this.checkObjectStore()
348
385
  const {
349
386
  dbName, dbVersion, indexes, storeName, logger,
350
387
  } = this
@@ -376,22 +413,18 @@ export class IndexedDbArchivist<
376
413
  }
377
414
  }
378
415
  }
379
- // Create the store
380
- const store = database.createObjectStore(storeName, {
381
- // If it isn't explicitly set, create a value by auto incrementing.
382
- autoIncrement: true,
383
- })
384
- // Name the store
385
- store.name = storeName
386
- // Create an index on the hash
387
- for (const {
388
- key, multiEntry, unique,
389
- } of indexes) {
390
- const indexKeys = Object.keys(key)
391
- const keys = indexKeys.length === 1 ? indexKeys[0] : indexKeys
392
- const indexName = buildStandardIndexName({ key, unique })
393
- store.createIndex(indexName, keys, { multiEntry, unique })
394
- }
416
+ // keep any indexes that were there before but are not required by this config
417
+ // we do this incase there are two or more configs trying to use the db and they have mismatched indexes, so they do not erase each other's indexes
418
+ const existingIndexesToKeep = existingIndexes.filter(({ name: existingName }) => !indexes.some(({ name }) => name === existingName))
419
+ console.log('existingIndexes', existingIndexes)
420
+ console.log('existingIndexesToKeep', existingIndexesToKeep)
421
+ console.log('indexes', indexes)
422
+ const indexesToCreate = indexes.map(idx => ({
423
+ ...idx,
424
+ name: buildStandardIndexName(idx),
425
+ // eslint-disable-next-line unicorn/no-array-reduce
426
+ })).reduce((acc, idx) => acc.set(idx.name, idx), new Map<string, IndexDescription>()).values()
427
+ createStore(database, storeName, [...indexesToCreate], logger)
395
428
  },
396
429
  })
397
430
  }
@@ -0,0 +1,88 @@
1
+ import type { Logger } from '@xylabs/logger'
2
+ import type { IndexDescription, IndexDirection } from '@xyo-network/archivist-model'
3
+ import { buildStandardIndexName } from '@xyo-network/archivist-model'
4
+ import type { IDBPDatabase, IDBPObjectStore } from 'idb'
5
+ import { openDB } from 'idb'
6
+
7
+ import type { PayloadStore } from './Archivist.ts'
8
+
9
+ export function createStore(db: IDBPDatabase<PayloadStore>, storeName: string, indexes: IndexDescription[], logger?: Logger) {
10
+ logger?.log(`Creating store ${storeName}`)
11
+ // Create the store
12
+ const store = db.createObjectStore(storeName, {
13
+ // If it isn't explicitly set, create a value by auto incrementing.
14
+ autoIncrement: true,
15
+ })
16
+ // Name the store
17
+ store.name = storeName
18
+ // Create an index on the hash
19
+ for (const {
20
+ key, multiEntry, unique,
21
+ } of indexes) {
22
+ const indexKeys = Object.keys(key)
23
+ const keys = indexKeys.length === 1 ? indexKeys[0] : indexKeys
24
+ const indexName = buildStandardIndexName({ key, unique })
25
+ console.log('createIndex', indexName, keys, { multiEntry, unique })
26
+ store.createIndex(indexName, keys, { multiEntry, unique })
27
+ }
28
+ }
29
+
30
+ export async function getExistingIndexes(db: IDBPDatabase<PayloadStore>, storeName: string): Promise<IndexDescription[]> {
31
+ return await useReadOnlyStore(db, storeName, (store) => {
32
+ return [...store.indexNames].map((indexName) => {
33
+ const index = store.index(indexName)
34
+ const key: Record<string, IndexDirection> = {}
35
+ if (Array.isArray(index.keyPath)) {
36
+ for (const keyPath of index.keyPath) {
37
+ key[keyPath] = 1
38
+ }
39
+ } else {
40
+ key[index.keyPath] = 1
41
+ }
42
+ const desc: IndexDescription = {
43
+ name: indexName,
44
+ key,
45
+ unique: index.unique,
46
+ multiEntry: index.multiEntry,
47
+ }
48
+ return desc
49
+ })
50
+ })
51
+ }
52
+
53
+ export async function useDb<T>(dbName: string, callback: (db: IDBPDatabase<PayloadStore>) => Promise<T> | T): Promise<T> {
54
+ const db = await openDB<PayloadStore>(dbName)
55
+ try {
56
+ return await callback(db)
57
+ } finally {
58
+ db.close()
59
+ }
60
+ }
61
+
62
+ export async function useReadOnlyStore<T>(
63
+ db: IDBPDatabase<PayloadStore>,
64
+ storeName: string,
65
+ callback: (store: IDBPObjectStore<PayloadStore, [string], string, 'readonly'>) => Promise<T> | T,
66
+ ): Promise<T> {
67
+ const transaction = db.transaction(storeName, 'readonly')
68
+ const store = transaction.objectStore(storeName)
69
+ try {
70
+ return await callback(store)
71
+ } finally {
72
+ await transaction.done
73
+ }
74
+ }
75
+
76
+ export async function useReadWriteStore<T>(
77
+ db: IDBPDatabase<PayloadStore>,
78
+ storeName: string,
79
+ callback: (store: IDBPObjectStore<PayloadStore, [string], string, 'readwrite'>) => Promise<T> | T,
80
+ ): Promise<T> {
81
+ const transaction = db.transaction(storeName, 'readwrite')
82
+ const store = transaction.objectStore(storeName)
83
+ try {
84
+ return await callback(store)
85
+ } finally {
86
+ await transaction.done
87
+ }
88
+ }