@xyo-network/archivist-leveldb 4.2.1 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ import { AbstractArchivist } from '@xyo-network/archivist-abstract';
4
4
  import { ArchivistModuleEventData, ArchivistNextOptions } from '@xyo-network/archivist-model';
5
5
  import { BoundWitness } from '@xyo-network/boundwitness-model';
6
6
  import { Payload, Schema, WithStorageMeta } from '@xyo-network/payload-model';
7
- import { AbstractLevel, AbstractSublevel } from 'abstract-level-2';
7
+ import { AbstractLevel, AbstractSublevel } from 'abstract-level';
8
8
  import { LevelDbArchivistParams } from './Params.ts';
9
9
  /** Note: We have indexes as top level sublevels since making them a sublevel of a store, getting all the values of that store includes the sublevels */
10
10
  export interface PayloadStore {
@@ -1 +1 @@
1
- {"version":3,"file":"Archivist.d.ts","sourceRoot":"","sources":["../../src/Archivist.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,EAAa,UAAU,EAAE,MAAM,iBAAiB,CAAA;AAEvD,OAAO,EAAE,iBAAiB,EAAqB,MAAM,iCAAiC,CAAA;AACtF,OAAO,EAOL,wBAAwB,EACxB,oBAAoB,EAIrB,MAAM,8BAA8B,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAA;AAG9D,OAAO,EACL,OAAO,EAAE,MAAM,EAAE,eAAe,EACjC,MAAM,4BAA4B,CAAA;AACnC,OAAO,EACmB,aAAa,EAAE,gBAAgB,EACxD,MAAM,kBAAkB,CAAA;AAKzB,OAAO,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAA;AAEpD,yJAAyJ;AAEzJ,MAAM,WAAW,YAAY;IAC3B,CAAC,CAAC,EAAE,MAAM,GAAG,eAAe,CAAA;CAC7B;AAED,MAAM,MAAM,oBAAoB,GAAG,aAAa,CAAC,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAA;AAC9G,MAAM,MAAM,uBAAuB,GAAG,gBAAgB,CAAC,oBAAoB,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAA;AAC1I,MAAM,MAAM,qBAAqB,CAAC,CAAC,IAAI,gBAAgB,CAAC,oBAAoB,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;AAMpH,8BAAsB,wBAAwB,CAC5C,OAAO,SAAS,sBAAsB,GAAG,sBAAsB,EAC/D,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CACtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAAC;IAC9C,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAAyD;IACzG,gBAAyB,mBAAmB,EAAE,MAAM,CAA+B;IACnF,gBAAyB,MAAM;;MAAmD;IAElF,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAGD,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAAiE;IAElG,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAAiE;IAElG,IAAI,MAAM,WAET;IAED,IAAI,UAAU,WAEb;IAED,IAAI,QAAQ,WAEX;IAED,IAAa,OAAO,aAUnB;IAED,IAAI,SAAS,WAEZ;IAED,OAAO,CAAC,MAAM,CAAC,mBAAmB;cAQT,UAAU,IAAI,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAOjD,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;cAa7B,aAAa,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;cAaxC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAoClE,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAiB/D,aAAa,CAAC,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cA+BxF,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAehF,YAAY;IAUrC,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,MAAM,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;IAU5G,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,GAAG,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;cAUzF,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,uBAAuB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;IAOjG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,oBAAoB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;CAC/F;AAED,qBACa,gBAAiB,SAAQ,wBAAwB;IAC5D,OAAO,CAAC,GAAG,CAAmD;IAC9D,OAAO,CAAC,QAAQ,CAAc;cAEL,WAAW;cAQX,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,oBAAoB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;CAUlG"}
1
+ {"version":3,"file":"Archivist.d.ts","sourceRoot":"","sources":["../../src/Archivist.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,EAAa,UAAU,EAAE,MAAM,iBAAiB,CAAA;AAEvD,OAAO,EAAE,iBAAiB,EAAqB,MAAM,iCAAiC,CAAA;AACtF,OAAO,EAOL,wBAAwB,EACxB,oBAAoB,EAIrB,MAAM,8BAA8B,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAA;AAG9D,OAAO,EACL,OAAO,EAAE,MAAM,EAAE,eAAe,EACjC,MAAM,4BAA4B,CAAA;AACnC,OAAO,EACmB,aAAa,EAAE,gBAAgB,EACxD,MAAM,gBAAgB,CAAA;AAKvB,OAAO,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAA;AAEpD,yJAAyJ;AAEzJ,MAAM,WAAW,YAAY;IAC3B,CAAC,CAAC,EAAE,MAAM,GAAG,eAAe,CAAA;CAC7B;AAED,MAAM,MAAM,oBAAoB,GAAG,aAAa,CAAC,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAA;AAC9G,MAAM,MAAM,uBAAuB,GAAG,gBAAgB,CAAC,oBAAoB,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAA;AAC1I,MAAM,MAAM,qBAAqB,CAAC,CAAC,IAAI,gBAAgB,CAAC,oBAAoB,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;AAMpH,8BAAsB,wBAAwB,CAC5C,OAAO,SAAS,sBAAsB,GAAG,sBAAsB,EAC/D,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CACtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAAC;IAC9C,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAAyD;IACzG,gBAAyB,mBAAmB,EAAE,MAAM,CAA+B;IACnF,gBAAyB,MAAM;;MAAmD;IAElF,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAGD,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAAiE;IAElG,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAAiE;IAElG,IAAI,MAAM,WAET;IAED,IAAI,UAAU,WAEb;IAED,IAAI,QAAQ,WAEX;IAED,IAAa,OAAO,aAUnB;IAED,IAAI,SAAS,WAEZ;IAED,OAAO,CAAC,MAAM,CAAC,mBAAmB;cAQT,UAAU,IAAI,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAOjD,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;cAa7B,aAAa,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;cAaxC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAoClE,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAiB/D,aAAa,CAAC,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cA+BxF,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAehF,YAAY;IAUrC,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,MAAM,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;IAU5G,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,GAAG,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;cAUzF,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,uBAAuB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;IAOjG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,oBAAoB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;CAC/F;AAED,qBACa,gBAAiB,SAAQ,wBAAwB;IAC5D,OAAO,CAAC,GAAG,CAAmD;IAC9D,OAAO,CAAC,QAAQ,CAAc;cAEL,WAAW;cAQX,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,oBAAoB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;CAUlG"}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/Archivist.ts","../../src/Schema.ts","../../src/Config.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport { Hash, Hex } from '@xylabs/hex'\nimport { fulfilled, Promisable } from '@xylabs/promise'\nimport { isDefined } from '@xylabs/typeof'\nimport { AbstractArchivist, StorageClassLabel } from '@xyo-network/archivist-abstract'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuery,\n ArchivistInsertQuerySchema,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistNextQuerySchema,\n buildStandardIndexName,\n IndexDescription,\n} from '@xyo-network/archivist-model'\nimport { BoundWitness } from '@xyo-network/boundwitness-model'\nimport { creatableModule } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport {\n Payload, Schema, WithStorageMeta,\n} from '@xyo-network/payload-model'\nimport {\n AbstractBatchOperation, AbstractLevel, AbstractSublevel,\n} from 'abstract-level-2'\nimport { Mutex } from 'async-mutex'\nimport { Level } from 'level'\n\nimport { LevelDbArchivistConfigSchema } from './Config.ts'\nimport { LevelDbArchivistParams } from './Params.ts'\n\n/** Note: We have indexes as top level sublevels since making them a sublevel of a store, getting all the values of that store includes the sublevels */\n\nexport interface PayloadStore {\n [s: string]: WithStorageMeta\n}\n\nexport type AbstractPayloadLevel = AbstractLevel<string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>\nexport type AbstractPayloadSubLevel = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>\nexport type AbstractIndexSubLevel<T> = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, T, Hash>\n\nconst indexSubLevelName = (storeName: string, indexName: string) => {\n return `_${storeName}|${indexName}`\n}\n\nexport abstract class AbstractLevelDbArchivist<\n TParams extends LevelDbArchivistParams = LevelDbArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n> extends AbstractArchivist<TParams, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, LevelDbArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = LevelDbArchivistConfigSchema\n static override readonly labels = { ...super.labels, [StorageClassLabel]: 'disk' }\n\n private static readonly dataHashIndex: IndexDescription = {\n key: { _dataHash: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly sequenceIndex: IndexDescription = {\n key: { _sequence: 1 }, multiEntry: false, unique: true,\n }\n\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly dataHashIndexName = buildStandardIndexName(AbstractLevelDbArchivist.dataHashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly sequenceIndexName = buildStandardIndexName(AbstractLevelDbArchivist.sequenceIndex)\n\n get dbName() {\n return assertEx(this.config.dbName, () => 'No dbName specified')\n }\n\n get folderPath() {\n return `${this.location}/${this.config.dbName}/${this.storeName}`\n }\n\n get location() {\n return assertEx(this.config.location, () => 'No location specified')\n }\n\n override get queries() {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistNextQuerySchema,\n ...super.queries,\n ]\n }\n\n get storeName() {\n return assertEx(this.config.storeName, () => 'No storeName specified')\n }\n\n private static findIndexFromCursor(payloads: WithStorageMeta[], cursor: Hex) {\n const index = payloads.findIndex(({ _sequence }) => _sequence === cursor)\n if (index === -1) {\n return Infinity // move to the end\n }\n return index\n }\n\n protected override async allHandler(): Promise<WithStorageMeta<Payload>[]> {\n return await this.withStore(async (db) => {\n const values = [...(await db.values().all())]\n return values.filter(exists).sort(PayloadBuilder.compareStorageMeta)\n })\n }\n\n protected override async clearHandler(): Promise<void> {\n await this.withDb(async (db) => {\n await db.clear()\n })\n await this.withDataHashIndex(async (index) => {\n await index.clear()\n })\n await this.withSequenceIndex(async (index) => {\n await index.clear()\n })\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<BoundWitness[]> {\n const payloads = assertEx(await this.allHandler(), () => 'Nothing to commit')\n const settled = await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }).filter(exists),\n )\n await this.clearHandler()\n return settled.filter(fulfilled).map(result => result.value).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]> {\n // not using the getHandler since duplicate data hashes are not handled\n const payloadsWithMeta = (await this.allHandler()).filter(({ _hash, _dataHash }) => hashes.includes(_hash) || hashes.includes(_dataHash))\n // Delete the payloads\n const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._hash,\n }))\n\n await this.withStore(async (store) => {\n await store.batch(batchCommands)\n })\n\n // Delete the dataHash indexes\n const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, string, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._dataHash,\n }))\n\n await this.withDataHashIndex(async (index) => {\n await index.batch(batchDataHashIndexCommands)\n })\n\n // Delete the sequence indexes\n const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._sequence,\n }))\n\n await this.withSequenceIndex(async (index) => {\n await index.batch(batchSequenceIndexCommands)\n })\n\n return payloadsWithMeta\n }\n\n protected override async getHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]> {\n const foundByHash = await this.withStore(async (store) => {\n return (await store.getMany(hashes)).filter(exists)\n })\n const remainingHashes = hashes.filter(hash => !foundByHash.some(({ _hash }) => _hash === hash))\n const hashesFromDataHashes = await this.withDataHashIndex(async (index) => {\n return (await index.getMany(remainingHashes)).filter(exists)\n })\n const foundByDataHash = hashesFromDataHashes.length > 0\n ? await this.withStore(async (store) => {\n return (await store.getMany(hashesFromDataHashes)).filter(exists)\n })\n : []\n const result = [...foundByHash, ...foundByDataHash].sort(PayloadBuilder.compareStorageMeta)\n return result\n }\n\n protected override async insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]> {\n // Insert the payloads\n const payloadsWithMeta = payloads.toSorted(PayloadBuilder.compareStorageMeta)\n const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._hash, value: payload, keyEncoding: 'utf8', valueEncoding: 'json',\n }))\n await this.withStore(async (store) => {\n await store.batch(batchCommands)\n })\n\n // Insert the dataHash indexes\n // Note: We use the dataHash|hash for the key to allow for multiple entries\n const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, string, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._dataHash, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',\n }))\n await this.withDataHashIndex(async (index) => {\n await index.batch(batchDataHashIndexCommands)\n })\n\n // Insert the sequence indexes\n // Note: We use the dataHash|hash for the key to allow for multiple entries\n const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._sequence, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',\n }))\n await this.withSequenceIndex(async (index) => {\n await index.batch(batchSequenceIndexCommands)\n })\n\n return payloadsWithMeta\n }\n\n protected override async nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]> {\n const {\n limit = 100, cursor, order, open = true,\n } = options ?? {}\n let all = await this.allHandler()\n if (order === 'desc') {\n all = all.toReversed()\n }\n const startIndex = isDefined(cursor)\n ? AbstractLevelDbArchivist.findIndexFromCursor(all, cursor) + (open ? 1 : 0)\n : 0\n const result = all.slice(startIndex, startIndex + limit)\n return result\n }\n\n protected override async startHandler() {\n await super.startHandler()\n // NOTE: We could defer this creation to first access but we\n // want to fail fast here in case something is wrong\n await this.withStore(() => {})\n if (this.config.clearStoreOnStart) {\n await this.clearHandler()\n }\n }\n\n protected withDataHashIndex<T>(func: (index: AbstractIndexSubLevel<string>) => Promisable<T>): Promisable<T> {\n return this.withDb(async (db) => {\n const index = db.sublevel<string, Hash>(\n indexSubLevelName(this.storeName, AbstractLevelDbArchivist.dataHashIndexName),\n { keyEncoding: 'utf8', valueEncoding: 'utf8' },\n )\n return await func(index)\n })\n }\n\n protected withSequenceIndex<T>(func: (index: AbstractIndexSubLevel<Hex>) => Promisable<T>): Promisable<T> {\n return this.withDb(async (db) => {\n const index = db.sublevel<Hex, Hash>(\n indexSubLevelName(this.storeName, AbstractLevelDbArchivist.sequenceIndexName),\n { keyEncoding: 'utf8', valueEncoding: 'utf8' },\n )\n return await func(index)\n })\n }\n\n protected async withStore<T>(func: (store: AbstractPayloadSubLevel) => Promisable<T>): Promise<T> {\n return await this.withDb(async (db) => {\n const subLevel: AbstractPayloadSubLevel = db.sublevel<Hash, WithStorageMeta<Payload>>(this.storeName, { keyEncoding: 'utf8', valueEncoding: 'json' })\n return await func(subLevel)\n })\n }\n\n protected abstract withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promisable<T>\n}\n\n@creatableModule()\nexport class LevelDbArchivist extends AbstractLevelDbArchivist {\n private _db: Level<Hash, WithStorageMeta<Payload>> | undefined\n private _dbMutex = new Mutex()\n\n protected override async stopHandler() {\n await this._dbMutex.runExclusive(async () => {\n await this._db?.close()\n this._db = undefined\n })\n await super.stopHandler()\n }\n\n protected override async withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promise<T> {\n const db = await this._dbMutex.runExclusive(async () => {\n if (!this._db) {\n this._db = new Level<Hash, WithStorageMeta<Payload>>(this.folderPath, { keyEncoding: 'utf8', valueEncoding: 'json' })\n await this._db.open()\n }\n return this._db\n })\n return await func(db)\n }\n}\n","export const LevelDbArchivistSchema = 'network.xyo.archivist.leveldb' as const\nexport type LevelDbArchivistSchema = typeof LevelDbArchivistSchema\n","import type { ArchivistConfig } from '@xyo-network/archivist-model'\n\nimport { LevelDbArchivistSchema } from './Schema.ts'\n\nexport type LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`\nexport const LevelDbArchivistConfigSchema: LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`\n\nexport type LevelDbArchivistConfig<TStoreName extends string = string> = ArchivistConfig<{\n /**\n * If true, the store will be cleared on start\n */\n clearStoreOnStart?: boolean\n /**\n * The database name - also used as the filename for the db\n */\n dbName?: string\n /**\n * The location where the folder for the db will be created\n */\n location?: string\n schema: LevelDbArchivistConfigSchema\n /**\n * The name of the object store - becomes a sub-level\n */\n storeName?: TStoreName\n}>\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,cAAc;AAEvB,SAAS,iBAA6B;AACtC,SAAS,iBAAiB;AAC1B,SAAS,mBAAmB,yBAAyB;AACrD;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EAGA;AAAA,EACA;AAAA,OAEK;AAEP,SAAS,uBAAuB;AAChC,SAAS,sBAAsB;AAO/B,SAAS,aAAa;AACtB,SAAS,aAAa;;;AC7Bf,IAAM,yBAAyB;;;ACK/B,IAAM,+BAA6D,GAAG,sBAAsB;;;AFuCnG,IAAM,oBAAoB,CAAC,WAAmB,cAAsB;AAClE,SAAO,IAAI,SAAS,IAAI,SAAS;AACnC;AAEO,IAAe,2BAAf,MAAe,kCAGZ,kBAAuC;AAAA,EAC/C,OAAyB,gBAA0B,CAAC,GAAG,MAAM,eAAe,4BAA4B;AAAA,EACxG,OAAyB,sBAA8B;AAAA,EACvD,OAAyB,SAAS,EAAE,GAAG,MAAM,QAAQ,CAAC,iBAAiB,GAAG,OAAO;AAAA,EAEjF,OAAwB,gBAAkC;AAAA,IACxD,KAAK,EAAE,WAAW,EAAE;AAAA,IAAG,YAAY;AAAA,IAAO,QAAQ;AAAA,EACpD;AAAA,EAEA,OAAwB,gBAAkC;AAAA,IACxD,KAAK,EAAE,WAAW,EAAE;AAAA,IAAG,YAAY;AAAA,IAAO,QAAQ;AAAA,EACpD;AAAA;AAAA,EAGA,OAAgB,oBAAoB,uBAAuB,0BAAyB,aAAa;AAAA;AAAA,EAEjG,OAAgB,oBAAoB,uBAAuB,0BAAyB,aAAa;AAAA,EAEjG,IAAI,SAAS;AACX,WAAO,SAAS,KAAK,OAAO,QAAQ,MAAM,qBAAqB;AAAA,EACjE;AAAA,EAEA,IAAI,aAAa;AACf,WAAO,GAAG,KAAK,QAAQ,IAAI,KAAK,OAAO,MAAM,IAAI,KAAK,SAAS;AAAA,EACjE;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,SAAS,KAAK,OAAO,UAAU,MAAM,uBAAuB;AAAA,EACrE;AAAA,EAEA,IAAa,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG,MAAM;AAAA,IACX;AAAA,EACF;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,SAAS,KAAK,OAAO,WAAW,MAAM,wBAAwB;AAAA,EACvE;AAAA,EAEA,OAAe,oBAAoB,UAA6B,QAAa;AAC3E,UAAM,QAAQ,SAAS,UAAU,CAAC,EAAE,UAAU,MAAM,cAAc,MAAM;AACxE,QAAI,UAAU,IAAI;AAChB,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,aAAkD;AACzE,WAAO,MAAM,KAAK,UAAU,OAAO,OAAO;AACxC,YAAM,SAAS,CAAC,GAAI,MAAM,GAAG,OAAO,EAAE,IAAI,CAAE;AAC5C,aAAO,OAAO,OAAO,MAAM,EAAE,KAAK,eAAe,kBAAkB;AAAA,IACrE,CAAC;AAAA,EACH;AAAA,EAEA,MAAyB,eAA8B;AACrD,UAAM,KAAK,OAAO,OAAO,OAAO;AAC9B,YAAM,GAAG,MAAM;AAAA,IACjB,CAAC;AACD,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM;AAAA,IACpB,CAAC;AACD,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM;AAAA,IACpB,CAAC;AACD,WAAO,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAyB,gBAAyC;AAChE,UAAM,WAAW,SAAS,MAAM,KAAK,WAAW,GAAG,MAAM,mBAAmB;AAC5E,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,OAAO,QAAQ,MAAM,KAAK,iBAAiB,GAAG,UAAU,CAAC,CAAC,GAAG,IAAI,OAAO,WAAW;AACjF,cAAM,eAAqC,EAAE,QAAQ,2BAA2B;AAChF,cAAM,QAAQ,MAAM,KAAK,UAAU,cAAc,QAAQ;AACzD,gBAAQ,MAAM,QAAQ,MAAM,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,KAAK,CAAC;AAAA,MACtD,CAAC,EAAE,OAAO,MAAM;AAAA,IAClB;AACA,UAAM,KAAK,aAAa;AACxB,WAAO,QAAQ,OAAO,SAAS,EAAE,IAAI,YAAU,OAAO,KAAK,EAAE,OAAO,MAAM;AAAA,EAC5E;AAAA,EAEA,MAAyB,cAAc,QAAqD;AAE1F,UAAM,oBAAoB,MAAM,KAAK,WAAW,GAAG,OAAO,CAAC,EAAE,OAAO,UAAU,MAAM,OAAO,SAAS,KAAK,KAAK,OAAO,SAAS,SAAS,CAAC;AAExI,UAAM,gBAAwG,iBAAiB,IAAI,cAAY;AAAA,MAC7I,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,UAAU,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,aAAa;AAAA,IACjC,CAAC;AAGD,UAAM,6BAAmG,iBAAiB,IAAI,cAAY;AAAA,MACxI,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAGD,UAAM,6BAAgG,iBAAiB,IAAI,cAAY;AAAA,MACrI,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,WAAW,QAAqD;AACvF,UAAM,cAAc,MAAM,KAAK,UAAU,OAAO,UAAU;AACxD,cAAQ,MAAM,MAAM,QAAQ,MAAM,GAAG,OAAO,MAAM;AAAA,IACpD,CAAC;AACD,UAAM,kBAAkB,OAAO,OAAO,UAAQ,CAAC,YAAY,KAAK,CAAC,EAAE,MAAM,MAAM,UAAU,IAAI,CAAC;AAC9F,UAAM,uBAAuB,MAAM,KAAK,kBAAkB,OAAO,UAAU;AACzE,cAAQ,MAAM,MAAM,QAAQ,eAAe,GAAG,OAAO,MAAM;AAAA,IAC7D,CAAC;AACD,UAAM,kBAAkB,qBAAqB,SAAS,IAClD,MAAM,KAAK,UAAU,OAAO,UAAU;AACpC,cAAQ,MAAM,MAAM,QAAQ,oBAAoB,GAAG,OAAO,MAAM;AAAA,IAClE,CAAC,IACD,CAAC;AACL,UAAM,SAAS,CAAC,GAAG,aAAa,GAAG,eAAe,EAAE,KAAK,eAAe,kBAAkB;AAC1F,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,cAAc,UAA2E;AAEhH,UAAM,mBAAmB,SAAS,SAAS,eAAe,kBAAkB;AAC5E,UAAM,gBAAwG,iBAAiB,IAAI,cAAY;AAAA,MAC7I,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAO,OAAO;AAAA,MAAS,aAAa;AAAA,MAAQ,eAAe;AAAA,IACvF,EAAE;AACF,UAAM,KAAK,UAAU,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,aAAa;AAAA,IACjC,CAAC;AAID,UAAM,6BAAgG,iBAAiB,IAAI,cAAY;AAAA,MACrI,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAW,OAAO,QAAQ;AAAA,MAAO,aAAa;AAAA,MAAQ,eAAe;AAAA,IACjG,EAAE;AACF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAID,UAAM,6BAA6F,iBAAiB,IAAI,cAAY;AAAA,MAClI,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAW,OAAO,QAAQ;AAAA,MAAO,aAAa;AAAA,MAAQ,eAAe;AAAA,IACjG,EAAE;AACF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,YAAY,SAAqE;AACxG,UAAM;AAAA,MACJ,QAAQ;AAAA,MAAK;AAAA,MAAQ;AAAA,MAAO,OAAO;AAAA,IACrC,IAAI,WAAW,CAAC;AAChB,QAAI,MAAM,MAAM,KAAK,WAAW;AAChC,QAAI,UAAU,QAAQ;AACpB,YAAM,IAAI,WAAW;AAAA,IACvB;AACA,UAAM,aAAa,UAAU,MAAM,IAC/B,0BAAyB,oBAAoB,KAAK,MAAM,KAAK,OAAO,IAAI,KACxE;AACJ,UAAM,SAAS,IAAI,MAAM,YAAY,aAAa,KAAK;AACvD,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,eAAe;AACtC,UAAM,MAAM,aAAa;AAGzB,UAAM,KAAK,UAAU,MAAM;AAAA,IAAC,CAAC;AAC7B,QAAI,KAAK,OAAO,mBAAmB;AACjC,YAAM,KAAK,aAAa;AAAA,IAC1B;AAAA,EACF;AAAA,EAEU,kBAAqB,MAA8E;AAC3G,WAAO,KAAK,OAAO,OAAO,OAAO;AAC/B,YAAM,QAAQ,GAAG;AAAA,QACf,kBAAkB,KAAK,WAAW,0BAAyB,iBAAiB;AAAA,QAC5E,EAAE,aAAa,QAAQ,eAAe,OAAO;AAAA,MAC/C;AACA,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEU,kBAAqB,MAA2E;AACxG,WAAO,KAAK,OAAO,OAAO,OAAO;AAC/B,YAAM,QAAQ,GAAG;AAAA,QACf,kBAAkB,KAAK,WAAW,0BAAyB,iBAAiB;AAAA,QAC5E,EAAE,aAAa,QAAQ,eAAe,OAAO;AAAA,MAC/C;AACA,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,UAAa,MAAqE;AAChG,WAAO,MAAM,KAAK,OAAO,OAAO,OAAO;AACrC,YAAM,WAAoC,GAAG,SAAyC,KAAK,WAAW,EAAE,aAAa,QAAQ,eAAe,OAAO,CAAC;AACpJ,aAAO,MAAM,KAAK,QAAQ;AAAA,IAC5B,CAAC;AAAA,EACH;AAGF;AAGO,IAAM,mBAAN,cAA+B,yBAAyB;AAAA,EACrD;AAAA,EACA,WAAW,IAAI,MAAM;AAAA,EAE7B,MAAyB,cAAc;AACrC,UAAM,KAAK,SAAS,aAAa,YAAY;AAC3C,YAAM,KAAK,KAAK,MAAM;AACtB,WAAK,MAAM;AAAA,IACb,CAAC;AACD,UAAM,MAAM,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAyB,OAAU,MAA+D;AAChG,UAAM,KAAK,MAAM,KAAK,SAAS,aAAa,YAAY;AACtD,UAAI,CAAC,KAAK,KAAK;AACb,aAAK,MAAM,IAAI,MAAsC,KAAK,YAAY,EAAE,aAAa,QAAQ,eAAe,OAAO,CAAC;AACpH,cAAM,KAAK,IAAI,KAAK;AAAA,MACtB;AACA,aAAO,KAAK;AAAA,IACd,CAAC;AACD,WAAO,MAAM,KAAK,EAAE;AAAA,EACtB;AACF;AAtBa,mBAAN;AAAA,EADN,gBAAgB;AAAA,GACJ;","names":[]}
1
+ {"version":3,"sources":["../../src/Archivist.ts","../../src/Schema.ts","../../src/Config.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport { Hash, Hex } from '@xylabs/hex'\nimport { fulfilled, Promisable } from '@xylabs/promise'\nimport { isDefined } from '@xylabs/typeof'\nimport { AbstractArchivist, StorageClassLabel } from '@xyo-network/archivist-abstract'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuery,\n ArchivistInsertQuerySchema,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistNextQuerySchema,\n buildStandardIndexName,\n IndexDescription,\n} from '@xyo-network/archivist-model'\nimport { BoundWitness } from '@xyo-network/boundwitness-model'\nimport { creatableModule } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport {\n Payload, Schema, WithStorageMeta,\n} from '@xyo-network/payload-model'\nimport {\n AbstractBatchOperation, AbstractLevel, AbstractSublevel,\n} from 'abstract-level'\nimport { Mutex } from 'async-mutex'\nimport { Level } from 'level'\n\nimport { LevelDbArchivistConfigSchema } from './Config.ts'\nimport { LevelDbArchivistParams } from './Params.ts'\n\n/** Note: We have indexes as top level sublevels since making them a sublevel of a store, getting all the values of that store includes the sublevels */\n\nexport interface PayloadStore {\n [s: string]: WithStorageMeta\n}\n\nexport type AbstractPayloadLevel = AbstractLevel<string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>\nexport type AbstractPayloadSubLevel = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>\nexport type AbstractIndexSubLevel<T> = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, T, Hash>\n\nconst indexSubLevelName = (storeName: string, indexName: string) => {\n return `_${storeName}|${indexName}`\n}\n\nexport abstract class AbstractLevelDbArchivist<\n TParams extends LevelDbArchivistParams = LevelDbArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n> extends AbstractArchivist<TParams, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, LevelDbArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = LevelDbArchivistConfigSchema\n static override readonly labels = { ...super.labels, [StorageClassLabel]: 'disk' }\n\n private static readonly dataHashIndex: IndexDescription = {\n key: { _dataHash: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly sequenceIndex: IndexDescription = {\n key: { _sequence: 1 }, multiEntry: false, unique: true,\n }\n\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly dataHashIndexName = buildStandardIndexName(AbstractLevelDbArchivist.dataHashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly sequenceIndexName = buildStandardIndexName(AbstractLevelDbArchivist.sequenceIndex)\n\n get dbName() {\n return assertEx(this.config.dbName, () => 'No dbName specified')\n }\n\n get folderPath() {\n return `${this.location}/${this.config.dbName}/${this.storeName}`\n }\n\n get location() {\n return assertEx(this.config.location, () => 'No location specified')\n }\n\n override get queries() {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistNextQuerySchema,\n ...super.queries,\n ]\n }\n\n get storeName() {\n return assertEx(this.config.storeName, () => 'No storeName specified')\n }\n\n private static findIndexFromCursor(payloads: WithStorageMeta[], cursor: Hex) {\n const index = payloads.findIndex(({ _sequence }) => _sequence === cursor)\n if (index === -1) {\n return Infinity // move to the end\n }\n return index\n }\n\n protected override async allHandler(): Promise<WithStorageMeta<Payload>[]> {\n return await this.withStore(async (db) => {\n const values = [...(await db.values().all())]\n return values.filter(exists).sort(PayloadBuilder.compareStorageMeta)\n })\n }\n\n protected override async clearHandler(): Promise<void> {\n await this.withDb(async (db) => {\n await db.clear()\n })\n await this.withDataHashIndex(async (index) => {\n await index.clear()\n })\n await this.withSequenceIndex(async (index) => {\n await index.clear()\n })\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<BoundWitness[]> {\n const payloads = assertEx(await this.allHandler(), () => 'Nothing to commit')\n const settled = await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }).filter(exists),\n )\n await this.clearHandler()\n return settled.filter(fulfilled).map(result => result.value).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]> {\n // not using the getHandler since duplicate data hashes are not handled\n const payloadsWithMeta = (await this.allHandler()).filter(({ _hash, _dataHash }) => hashes.includes(_hash) || hashes.includes(_dataHash))\n // Delete the payloads\n const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._hash,\n }))\n\n await this.withStore(async (store) => {\n await store.batch(batchCommands)\n })\n\n // Delete the dataHash indexes\n const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, string, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._dataHash,\n }))\n\n await this.withDataHashIndex(async (index) => {\n await index.batch(batchDataHashIndexCommands)\n })\n\n // Delete the sequence indexes\n const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._sequence,\n }))\n\n await this.withSequenceIndex(async (index) => {\n await index.batch(batchSequenceIndexCommands)\n })\n\n return payloadsWithMeta\n }\n\n protected override async getHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]> {\n const foundByHash = await this.withStore(async (store) => {\n return (await store.getMany(hashes)).filter(exists)\n })\n const remainingHashes = hashes.filter(hash => !foundByHash.some(({ _hash }) => _hash === hash))\n const hashesFromDataHashes = await this.withDataHashIndex(async (index) => {\n return (await index.getMany(remainingHashes)).filter(exists)\n })\n const foundByDataHash = hashesFromDataHashes.length > 0\n ? await this.withStore(async (store) => {\n return (await store.getMany(hashesFromDataHashes)).filter(exists)\n })\n : []\n const result = [...foundByHash, ...foundByDataHash].sort(PayloadBuilder.compareStorageMeta)\n return result\n }\n\n protected override async insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]> {\n // Insert the payloads\n const payloadsWithMeta = payloads.toSorted(PayloadBuilder.compareStorageMeta)\n const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._hash, value: payload, keyEncoding: 'utf8', valueEncoding: 'json',\n }))\n await this.withStore(async (store) => {\n await store.batch(batchCommands)\n })\n\n // Insert the dataHash indexes\n // Note: We use the dataHash|hash for the key to allow for multiple entries\n const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, string, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._dataHash, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',\n }))\n await this.withDataHashIndex(async (index) => {\n await index.batch(batchDataHashIndexCommands)\n })\n\n // Insert the sequence indexes\n // Note: We use the dataHash|hash for the key to allow for multiple entries\n const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._sequence, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',\n }))\n await this.withSequenceIndex(async (index) => {\n await index.batch(batchSequenceIndexCommands)\n })\n\n return payloadsWithMeta\n }\n\n protected override async nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]> {\n const {\n limit = 100, cursor, order, open = true,\n } = options ?? {}\n let all = await this.allHandler()\n if (order === 'desc') {\n all = all.toReversed()\n }\n const startIndex = isDefined(cursor)\n ? AbstractLevelDbArchivist.findIndexFromCursor(all, cursor) + (open ? 1 : 0)\n : 0\n const result = all.slice(startIndex, startIndex + limit)\n return result\n }\n\n protected override async startHandler() {\n await super.startHandler()\n // NOTE: We could defer this creation to first access but we\n // want to fail fast here in case something is wrong\n await this.withStore(() => {})\n if (this.config.clearStoreOnStart) {\n await this.clearHandler()\n }\n }\n\n protected withDataHashIndex<T>(func: (index: AbstractIndexSubLevel<string>) => Promisable<T>): Promisable<T> {\n return this.withDb(async (db) => {\n const index = db.sublevel<string, Hash>(\n indexSubLevelName(this.storeName, AbstractLevelDbArchivist.dataHashIndexName),\n { keyEncoding: 'utf8', valueEncoding: 'utf8' },\n )\n return await func(index)\n })\n }\n\n protected withSequenceIndex<T>(func: (index: AbstractIndexSubLevel<Hex>) => Promisable<T>): Promisable<T> {\n return this.withDb(async (db) => {\n const index = db.sublevel<Hex, Hash>(\n indexSubLevelName(this.storeName, AbstractLevelDbArchivist.sequenceIndexName),\n { keyEncoding: 'utf8', valueEncoding: 'utf8' },\n )\n return await func(index)\n })\n }\n\n protected async withStore<T>(func: (store: AbstractPayloadSubLevel) => Promisable<T>): Promise<T> {\n return await this.withDb(async (db) => {\n const subLevel: AbstractPayloadSubLevel = db.sublevel<Hash, WithStorageMeta<Payload>>(this.storeName, { keyEncoding: 'utf8', valueEncoding: 'json' })\n return await func(subLevel)\n })\n }\n\n protected abstract withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promisable<T>\n}\n\n@creatableModule()\nexport class LevelDbArchivist extends AbstractLevelDbArchivist {\n private _db: Level<Hash, WithStorageMeta<Payload>> | undefined\n private _dbMutex = new Mutex()\n\n protected override async stopHandler() {\n await this._dbMutex.runExclusive(async () => {\n await this._db?.close()\n this._db = undefined\n })\n await super.stopHandler()\n }\n\n protected override async withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promise<T> {\n const db = await this._dbMutex.runExclusive(async () => {\n if (!this._db) {\n this._db = new Level<Hash, WithStorageMeta<Payload>>(this.folderPath, { keyEncoding: 'utf8', valueEncoding: 'json' })\n await this._db.open()\n }\n return this._db\n })\n return await func(db)\n }\n}\n","export const LevelDbArchivistSchema = 'network.xyo.archivist.leveldb' as const\nexport type LevelDbArchivistSchema = typeof LevelDbArchivistSchema\n","import type { ArchivistConfig } from '@xyo-network/archivist-model'\n\nimport { LevelDbArchivistSchema } from './Schema.ts'\n\nexport type LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`\nexport const LevelDbArchivistConfigSchema: LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`\n\nexport type LevelDbArchivistConfig<TStoreName extends string = string> = ArchivistConfig<{\n /**\n * If true, the store will be cleared on start\n */\n clearStoreOnStart?: boolean\n /**\n * The database name - also used as the filename for the db\n */\n dbName?: string\n /**\n * The location where the folder for the db will be created\n */\n location?: string\n schema: LevelDbArchivistConfigSchema\n /**\n * The name of the object store - becomes a sub-level\n */\n storeName?: TStoreName\n}>\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,cAAc;AAEvB,SAAS,iBAA6B;AACtC,SAAS,iBAAiB;AAC1B,SAAS,mBAAmB,yBAAyB;AACrD;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EAGA;AAAA,EACA;AAAA,OAEK;AAEP,SAAS,uBAAuB;AAChC,SAAS,sBAAsB;AAO/B,SAAS,aAAa;AACtB,SAAS,aAAa;;;AC7Bf,IAAM,yBAAyB;;;ACK/B,IAAM,+BAA6D,GAAG,sBAAsB;;;AFuCnG,IAAM,oBAAoB,CAAC,WAAmB,cAAsB;AAClE,SAAO,IAAI,SAAS,IAAI,SAAS;AACnC;AAEO,IAAe,2BAAf,MAAe,kCAGZ,kBAAuC;AAAA,EAC/C,OAAyB,gBAA0B,CAAC,GAAG,MAAM,eAAe,4BAA4B;AAAA,EACxG,OAAyB,sBAA8B;AAAA,EACvD,OAAyB,SAAS,EAAE,GAAG,MAAM,QAAQ,CAAC,iBAAiB,GAAG,OAAO;AAAA,EAEjF,OAAwB,gBAAkC;AAAA,IACxD,KAAK,EAAE,WAAW,EAAE;AAAA,IAAG,YAAY;AAAA,IAAO,QAAQ;AAAA,EACpD;AAAA,EAEA,OAAwB,gBAAkC;AAAA,IACxD,KAAK,EAAE,WAAW,EAAE;AAAA,IAAG,YAAY;AAAA,IAAO,QAAQ;AAAA,EACpD;AAAA;AAAA,EAGA,OAAgB,oBAAoB,uBAAuB,0BAAyB,aAAa;AAAA;AAAA,EAEjG,OAAgB,oBAAoB,uBAAuB,0BAAyB,aAAa;AAAA,EAEjG,IAAI,SAAS;AACX,WAAO,SAAS,KAAK,OAAO,QAAQ,MAAM,qBAAqB;AAAA,EACjE;AAAA,EAEA,IAAI,aAAa;AACf,WAAO,GAAG,KAAK,QAAQ,IAAI,KAAK,OAAO,MAAM,IAAI,KAAK,SAAS;AAAA,EACjE;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,SAAS,KAAK,OAAO,UAAU,MAAM,uBAAuB;AAAA,EACrE;AAAA,EAEA,IAAa,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG,MAAM;AAAA,IACX;AAAA,EACF;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,SAAS,KAAK,OAAO,WAAW,MAAM,wBAAwB;AAAA,EACvE;AAAA,EAEA,OAAe,oBAAoB,UAA6B,QAAa;AAC3E,UAAM,QAAQ,SAAS,UAAU,CAAC,EAAE,UAAU,MAAM,cAAc,MAAM;AACxE,QAAI,UAAU,IAAI;AAChB,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,aAAkD;AACzE,WAAO,MAAM,KAAK,UAAU,OAAO,OAAO;AACxC,YAAM,SAAS,CAAC,GAAI,MAAM,GAAG,OAAO,EAAE,IAAI,CAAE;AAC5C,aAAO,OAAO,OAAO,MAAM,EAAE,KAAK,eAAe,kBAAkB;AAAA,IACrE,CAAC;AAAA,EACH;AAAA,EAEA,MAAyB,eAA8B;AACrD,UAAM,KAAK,OAAO,OAAO,OAAO;AAC9B,YAAM,GAAG,MAAM;AAAA,IACjB,CAAC;AACD,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM;AAAA,IACpB,CAAC;AACD,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM;AAAA,IACpB,CAAC;AACD,WAAO,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAyB,gBAAyC;AAChE,UAAM,WAAW,SAAS,MAAM,KAAK,WAAW,GAAG,MAAM,mBAAmB;AAC5E,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,OAAO,QAAQ,MAAM,KAAK,iBAAiB,GAAG,UAAU,CAAC,CAAC,GAAG,IAAI,OAAO,WAAW;AACjF,cAAM,eAAqC,EAAE,QAAQ,2BAA2B;AAChF,cAAM,QAAQ,MAAM,KAAK,UAAU,cAAc,QAAQ;AACzD,gBAAQ,MAAM,QAAQ,MAAM,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,KAAK,CAAC;AAAA,MACtD,CAAC,EAAE,OAAO,MAAM;AAAA,IAClB;AACA,UAAM,KAAK,aAAa;AACxB,WAAO,QAAQ,OAAO,SAAS,EAAE,IAAI,YAAU,OAAO,KAAK,EAAE,OAAO,MAAM;AAAA,EAC5E;AAAA,EAEA,MAAyB,cAAc,QAAqD;AAE1F,UAAM,oBAAoB,MAAM,KAAK,WAAW,GAAG,OAAO,CAAC,EAAE,OAAO,UAAU,MAAM,OAAO,SAAS,KAAK,KAAK,OAAO,SAAS,SAAS,CAAC;AAExI,UAAM,gBAAwG,iBAAiB,IAAI,cAAY;AAAA,MAC7I,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,UAAU,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,aAAa;AAAA,IACjC,CAAC;AAGD,UAAM,6BAAmG,iBAAiB,IAAI,cAAY;AAAA,MACxI,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAGD,UAAM,6BAAgG,iBAAiB,IAAI,cAAY;AAAA,MACrI,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,WAAW,QAAqD;AACvF,UAAM,cAAc,MAAM,KAAK,UAAU,OAAO,UAAU;AACxD,cAAQ,MAAM,MAAM,QAAQ,MAAM,GAAG,OAAO,MAAM;AAAA,IACpD,CAAC;AACD,UAAM,kBAAkB,OAAO,OAAO,UAAQ,CAAC,YAAY,KAAK,CAAC,EAAE,MAAM,MAAM,UAAU,IAAI,CAAC;AAC9F,UAAM,uBAAuB,MAAM,KAAK,kBAAkB,OAAO,UAAU;AACzE,cAAQ,MAAM,MAAM,QAAQ,eAAe,GAAG,OAAO,MAAM;AAAA,IAC7D,CAAC;AACD,UAAM,kBAAkB,qBAAqB,SAAS,IAClD,MAAM,KAAK,UAAU,OAAO,UAAU;AACpC,cAAQ,MAAM,MAAM,QAAQ,oBAAoB,GAAG,OAAO,MAAM;AAAA,IAClE,CAAC,IACD,CAAC;AACL,UAAM,SAAS,CAAC,GAAG,aAAa,GAAG,eAAe,EAAE,KAAK,eAAe,kBAAkB;AAC1F,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,cAAc,UAA2E;AAEhH,UAAM,mBAAmB,SAAS,SAAS,eAAe,kBAAkB;AAC5E,UAAM,gBAAwG,iBAAiB,IAAI,cAAY;AAAA,MAC7I,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAO,OAAO;AAAA,MAAS,aAAa;AAAA,MAAQ,eAAe;AAAA,IACvF,EAAE;AACF,UAAM,KAAK,UAAU,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,aAAa;AAAA,IACjC,CAAC;AAID,UAAM,6BAAgG,iBAAiB,IAAI,cAAY;AAAA,MACrI,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAW,OAAO,QAAQ;AAAA,MAAO,aAAa;AAAA,MAAQ,eAAe;AAAA,IACjG,EAAE;AACF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAID,UAAM,6BAA6F,iBAAiB,IAAI,cAAY;AAAA,MAClI,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAW,OAAO,QAAQ;AAAA,MAAO,aAAa;AAAA,MAAQ,eAAe;AAAA,IACjG,EAAE;AACF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,YAAY,SAAqE;AACxG,UAAM;AAAA,MACJ,QAAQ;AAAA,MAAK;AAAA,MAAQ;AAAA,MAAO,OAAO;AAAA,IACrC,IAAI,WAAW,CAAC;AAChB,QAAI,MAAM,MAAM,KAAK,WAAW;AAChC,QAAI,UAAU,QAAQ;AACpB,YAAM,IAAI,WAAW;AAAA,IACvB;AACA,UAAM,aAAa,UAAU,MAAM,IAC/B,0BAAyB,oBAAoB,KAAK,MAAM,KAAK,OAAO,IAAI,KACxE;AACJ,UAAM,SAAS,IAAI,MAAM,YAAY,aAAa,KAAK;AACvD,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,eAAe;AACtC,UAAM,MAAM,aAAa;AAGzB,UAAM,KAAK,UAAU,MAAM;AAAA,IAAC,CAAC;AAC7B,QAAI,KAAK,OAAO,mBAAmB;AACjC,YAAM,KAAK,aAAa;AAAA,IAC1B;AAAA,EACF;AAAA,EAEU,kBAAqB,MAA8E;AAC3G,WAAO,KAAK,OAAO,OAAO,OAAO;AAC/B,YAAM,QAAQ,GAAG;AAAA,QACf,kBAAkB,KAAK,WAAW,0BAAyB,iBAAiB;AAAA,QAC5E,EAAE,aAAa,QAAQ,eAAe,OAAO;AAAA,MAC/C;AACA,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEU,kBAAqB,MAA2E;AACxG,WAAO,KAAK,OAAO,OAAO,OAAO;AAC/B,YAAM,QAAQ,GAAG;AAAA,QACf,kBAAkB,KAAK,WAAW,0BAAyB,iBAAiB;AAAA,QAC5E,EAAE,aAAa,QAAQ,eAAe,OAAO;AAAA,MAC/C;AACA,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,UAAa,MAAqE;AAChG,WAAO,MAAM,KAAK,OAAO,OAAO,OAAO;AACrC,YAAM,WAAoC,GAAG,SAAyC,KAAK,WAAW,EAAE,aAAa,QAAQ,eAAe,OAAO,CAAC;AACpJ,aAAO,MAAM,KAAK,QAAQ;AAAA,IAC5B,CAAC;AAAA,EACH;AAGF;AAGO,IAAM,mBAAN,cAA+B,yBAAyB;AAAA,EACrD;AAAA,EACA,WAAW,IAAI,MAAM;AAAA,EAE7B,MAAyB,cAAc;AACrC,UAAM,KAAK,SAAS,aAAa,YAAY;AAC3C,YAAM,KAAK,KAAK,MAAM;AACtB,WAAK,MAAM;AAAA,IACb,CAAC;AACD,UAAM,MAAM,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAyB,OAAU,MAA+D;AAChG,UAAM,KAAK,MAAM,KAAK,SAAS,aAAa,YAAY;AACtD,UAAI,CAAC,KAAK,KAAK;AACb,aAAK,MAAM,IAAI,MAAsC,KAAK,YAAY,EAAE,aAAa,QAAQ,eAAe,OAAO,CAAC;AACpH,cAAM,KAAK,IAAI,KAAK;AAAA,MACtB;AACA,aAAO,KAAK;AAAA,IACd,CAAC;AACD,WAAO,MAAM,KAAK,EAAE;AAAA,EACtB;AACF;AAtBa,mBAAN;AAAA,EADN,gBAAgB;AAAA,GACJ;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xyo-network/archivist-leveldb",
3
- "version": "4.2.1",
3
+ "version": "5.0.0",
4
4
  "description": "Primary SDK for using XYO Protocol 2.0",
5
5
  "homepage": "https://xyo.network",
6
6
  "bugs": {
@@ -28,34 +28,38 @@
28
28
  },
29
29
  "module": "dist/browser/index.mjs",
30
30
  "types": "dist/browser/index.d.ts",
31
+ "files": [
32
+ "dist",
33
+ "src"
34
+ ],
31
35
  "dependencies": {
32
- "@xylabs/assert": "^4.14.1",
33
- "@xylabs/exists": "^4.14.1",
34
- "@xylabs/hex": "^4.14.1",
35
- "@xylabs/promise": "^4.14.1",
36
- "@xylabs/typeof": "^4.14.1",
37
- "@xyo-network/archivist-abstract": "^4.2.1",
38
- "@xyo-network/archivist-model": "^4.2.1",
39
- "@xyo-network/boundwitness-model": "^4.2.1",
40
- "@xyo-network/module-model": "^4.2.1",
41
- "@xyo-network/payload-builder": "^4.2.1",
42
- "@xyo-network/payload-model": "^4.2.1",
43
- "abstract-level-2": "npm:abstract-level@^2",
36
+ "@xylabs/assert": "^5.0.0",
37
+ "@xylabs/exists": "^5.0.0",
38
+ "@xylabs/hex": "^5.0.0",
39
+ "@xylabs/promise": "^5.0.0",
40
+ "@xylabs/typeof": "^5.0.0",
41
+ "@xyo-network/archivist-abstract": "^5.0.0",
42
+ "@xyo-network/archivist-model": "^5.0.0",
43
+ "@xyo-network/boundwitness-model": "^5.0.0",
44
+ "@xyo-network/module-model": "^5.0.0",
45
+ "@xyo-network/payload-builder": "^5.0.0",
46
+ "@xyo-network/payload-model": "^5.0.0",
47
+ "abstract-level": "^3.1.0",
44
48
  "async-mutex": "^0.5.0",
45
- "level": "^9.0.0"
49
+ "level": "^10.0.0"
46
50
  },
47
51
  "devDependencies": {
48
- "@xylabs/delay": "^4.14.1",
49
- "@xylabs/object": "^4.14.1",
50
- "@xylabs/ts-scripts-yarn3": "^7.0.1",
51
- "@xylabs/tsconfig": "^7.0.1",
52
- "@xylabs/vitest-extended": "^4.14.1",
53
- "@xyo-network/account": "^4.2.1",
54
- "@xyo-network/account-model": "^4.2.1",
55
- "@xyo-network/archivist-acceptance-tests": "^4.2.1",
56
- "@xyo-network/id-payload-plugin": "^4.2.1",
57
- "@xyo-network/payload-wrapper": "^4.2.1",
58
- "@xyo-network/wallet": "^4.2.1",
52
+ "@xylabs/delay": "^5.0.0",
53
+ "@xylabs/object": "^5.0.0",
54
+ "@xylabs/ts-scripts-yarn3": "^7.0.2",
55
+ "@xylabs/tsconfig": "^7.0.2",
56
+ "@xylabs/vitest-extended": "^5.0.0",
57
+ "@xyo-network/account": "^5.0.0",
58
+ "@xyo-network/account-model": "^5.0.0",
59
+ "@xyo-network/archivist-acceptance-tests": "^5.0.0",
60
+ "@xyo-network/id-payload-plugin": "^5.0.0",
61
+ "@xyo-network/payload-wrapper": "^5.0.0",
62
+ "@xyo-network/wallet": "^5.0.0",
59
63
  "typescript": "^5.8.3",
60
64
  "uuid": "^11.1.0",
61
65
  "vitest": "^3.2.4"
package/src/Archivist.ts CHANGED
@@ -25,7 +25,7 @@ import {
25
25
  } from '@xyo-network/payload-model'
26
26
  import {
27
27
  AbstractBatchOperation, AbstractLevel, AbstractSublevel,
28
- } from 'abstract-level-2'
28
+ } from 'abstract-level'
29
29
  import { Mutex } from 'async-mutex'
30
30
  import { Level } from 'level'
31
31
 
@@ -0,0 +1,427 @@
1
+ /* eslint-disable complexity */
2
+ /* eslint-disable max-statements */
3
+
4
+ import { tmpdir } from 'node:os'
5
+
6
+ import { delay } from '@xylabs/delay'
7
+ import type { Hash } from '@xylabs/hex'
8
+ import type { AnyObject } from '@xylabs/object'
9
+ import { toSafeJsonString } from '@xylabs/object'
10
+ import { Account } from '@xyo-network/account'
11
+ import type { AccountInstance } from '@xyo-network/account-model'
12
+ import { generateArchivistNextTests } from '@xyo-network/archivist-acceptance-tests'
13
+ import type { ArchivistInstance } from '@xyo-network/archivist-model'
14
+ import { isArchivistInstance, isArchivistModule } from '@xyo-network/archivist-model'
15
+ import { IdSchema } from '@xyo-network/id-payload-plugin'
16
+ import { PayloadBuilder } from '@xyo-network/payload-builder'
17
+ import type { Payload, WithStorageMeta } from '@xyo-network/payload-model'
18
+ import { PayloadWrapper } from '@xyo-network/payload-wrapper'
19
+ import { v4 } from 'uuid'
20
+ import {
21
+ beforeAll, describe, expect, it,
22
+ } from 'vitest'
23
+
24
+ import { LevelDbArchivist } from '../Archivist.ts'
25
+ import { LevelDbArchivistConfigSchema } from '../Config.ts'
26
+
27
+ /**
28
+ * @group module
29
+ * @group archivist
30
+ */
31
+ describe('LevelDbArchivist [full]', () => {
32
+ type TestPayload = Payload<{ salt: string; schema: string }>
33
+
34
+ const fillDb = async (db: ArchivistInstance, count: number = 10): Promise<TestPayload[]> => {
35
+ const sources = Array.from({ length: count }).map((_, i) => {
36
+ return { salt: `${i}`, schema: IdSchema }
37
+ })
38
+ await db.insert(sources)
39
+ return sources
40
+ }
41
+
42
+ const shuffleArray = <T>(original: Array<T>) => {
43
+ const shuffled = [...original]
44
+ for (let i = shuffled.length - 1; i > 0; i--) {
45
+ // Generate a random index between 0 and i
46
+ const j = Math.floor(Math.random() * (i + 1))
47
+ // Swap elements at indices i and j
48
+ ;[shuffled[i], shuffled[j]] = [shuffled[j], shuffled[i]]
49
+ }
50
+ return shuffled
51
+ }
52
+ let account: AccountInstance
53
+ beforeAll(async () => {
54
+ account = await Account.random()
55
+ })
56
+ describe('config', () => {
57
+ describe('dbName', () => {
58
+ it('supplied via config uses config value', async () => {
59
+ const dbName = 'testDbName'
60
+ const archivist = await LevelDbArchivist.create({
61
+ account,
62
+ config: {
63
+ dbName, schema: LevelDbArchivistConfigSchema, storeName: 'payloads', location: tmpdir(), clearStoreOnStart: true,
64
+ },
65
+ })
66
+ expect(archivist.dbName).toBe(dbName)
67
+ })
68
+ })
69
+ describe('dbStore', () => {
70
+ it('supplied via config uses config value', async () => {
71
+ const dbName = 'testDbName'
72
+ const storeName = 'testStoreName'
73
+ const archivist = await LevelDbArchivist.create({
74
+ account,
75
+ config: {
76
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
77
+ },
78
+ })
79
+ expect(archivist.storeName).toBe(storeName)
80
+ })
81
+ it('allows for multiple dbStores within the same dbName', async () => {
82
+ const dbName = 'testDbName'
83
+ const storeName1 = 'testStoreName1'
84
+ const storeName2 = 'testStoreName2'
85
+ const archivist1 = await LevelDbArchivist.create({
86
+ account,
87
+ config: {
88
+ dbName, schema: LevelDbArchivistConfigSchema, storeName: storeName1, location: tmpdir(), clearStoreOnStart: true,
89
+ },
90
+ })
91
+ const archivist2 = await LevelDbArchivist.create({
92
+ account,
93
+ config: {
94
+ dbName, schema: LevelDbArchivistConfigSchema, storeName: storeName2, location: tmpdir(), clearStoreOnStart: true,
95
+ },
96
+ })
97
+
98
+ expect(isArchivistInstance(archivist1)).toBeTruthy()
99
+ expect(isArchivistModule(archivist1)).toBeTruthy()
100
+
101
+ // TODO: This test is not testing the end state of indexedDB, but rather the
102
+ // state of the Archivist instance and therefore isn't valid. We'd want to actually
103
+ // open indexedDB and check the state of the stores matches what we want (which it doesn't).
104
+ expect(archivist1.storeName).toBe(storeName1)
105
+ expect(archivist2.storeName).toBe(storeName2)
106
+ })
107
+ })
108
+ })
109
+ describe('all', () => {
110
+ const dbName = 'e926a178-9c6a-4604-b65c-d1fccd97f1de'
111
+ const storeName = '27fcea19-c30f-415a-a7f9-0b0514705cb1'
112
+ let sources: Payload[] = []
113
+ let archivistModule: ArchivistInstance
114
+ beforeAll(async () => {
115
+ archivistModule = await LevelDbArchivist.create({
116
+ account,
117
+ config: {
118
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
119
+ },
120
+ })
121
+ sources = await fillDb(archivistModule)
122
+ })
123
+ it('returns all data', async () => {
124
+ const getResult = await archivistModule.all?.()
125
+ expect(getResult).toBeDefined()
126
+ expect(getResult?.length).toBe(sources.length)
127
+ expect(PayloadBuilder.omitStorageMeta(getResult)).toEqual(sources)
128
+ })
129
+ })
130
+
131
+ describe('delete', () => {
132
+ const dbName = '6e3fcd65-f24f-4ebc-b314-f597b385fb8e'
133
+ const storeName = 'c0872f52-32b9-415e-8ca9-af78713cee28'
134
+ let sources: Payload[] = []
135
+ let archivistModule: ArchivistInstance
136
+ beforeAll(async () => {
137
+ archivistModule = await LevelDbArchivist.create({
138
+ account,
139
+ config: {
140
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
141
+ },
142
+ })
143
+ sources = await fillDb(archivistModule)
144
+ })
145
+ it('deletes data', async () => {
146
+ const getResult = (await archivistModule.all?.()) ?? []
147
+ expect(getResult).toBeDefined()
148
+ expect(getResult?.length).toBe(sources.length)
149
+ const dataHashes = (await PayloadBuilder.dataHashes(getResult)) ?? []
150
+ const deleteResult = await archivistModule.delete?.(dataHashes)
151
+ expect(deleteResult.length).toBe(dataHashes.length)
152
+ expect((await archivistModule.all?.()).length).toBe(0)
153
+ })
154
+ })
155
+ describe('get', () => {
156
+ const dbName = 'b4379714-73d1-42c6-88e7-1a363b7ed86f'
157
+ const storeName = '3dbdb153-79d0-45d0-b2f7-9f06cdd74b1e'
158
+ let sources: TestPayload[] = []
159
+ let archivistModule: ArchivistInstance
160
+ beforeAll(async () => {
161
+ archivistModule = await LevelDbArchivist.create({
162
+ account,
163
+ config: {
164
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
165
+ },
166
+ })
167
+ sources = await fillDb(archivistModule)
168
+ })
169
+ it('gets existing data', async () => {
170
+ for (const source of sources) {
171
+ const sourceHash = await PayloadBuilder.dataHash(source)
172
+ const getResult = await archivistModule.get([sourceHash])
173
+ expect(getResult).toBeDefined()
174
+ expect(getResult.length).toBe(1)
175
+ const resultHash = await PayloadWrapper.wrap(getResult[0]).dataHash()
176
+ expect(resultHash).toBe(sourceHash)
177
+ }
178
+ })
179
+ it('returned by order of insertion', async () => {
180
+ const shuffled = shuffleArray(sources)
181
+ const sourceHashes = await Promise.all(shuffled.map(source => PayloadBuilder.dataHash(source)))
182
+ const getResult = (await archivistModule.get(sourceHashes)) as WithStorageMeta<TestPayload>[]
183
+ expect(getResult).toBeDefined()
184
+ expect(getResult.length).toBe(sourceHashes.length)
185
+ const salts = sources.map(source => source.salt)
186
+ const resultSalts = getResult.map(result => result?.salt)
187
+ expect(resultSalts).toEqual(salts)
188
+ })
189
+ it('returns nothing for non-existing hashes', async () => {
190
+ const hashThatDoesNotExist = '0000000000000000000000000000000000000000000000000000000000000000' as Hash
191
+ const getResult = await archivistModule.get([hashThatDoesNotExist])
192
+ expect(getResult).toBeDefined()
193
+ expect(getResult.length).toBe(0)
194
+ })
195
+ describe('by hash', () => {
196
+ let payload1: Payload<AnyObject>
197
+ let payload2: Payload<AnyObject>
198
+ let dataHash1: Hash
199
+ let dataHash2: Hash
200
+ let rootHash1: Hash
201
+ let rootHash2: Hash
202
+ beforeAll(async () => {
203
+ const salt = '650123f6-191e-4cc4-a813-f7a29dcbfb0e'
204
+ payload1 = {
205
+ $some: [
206
+ '12bed6aa884f5b7ffc08e19790b5db0da724b8b7471138dcbec090a0798861db0da8255f0d9297ba981b2cbbea65d9eadabac6632124f10f22c709d333a1f285',
207
+ ],
208
+ salt,
209
+ schema: IdSchema,
210
+ }
211
+ payload2 = {
212
+ $some: [
213
+ '22bed6aa884f5b7ffc08e19790b5db0da724b8b7471138dcbec090a0798861db0da8255f0d9297ba981b2cbbea65d9eadabac6632124f10f22c709d333a1f285',
214
+ ],
215
+ salt,
216
+ schema: IdSchema,
217
+ }
218
+ dataHash1 = await PayloadBuilder.dataHash(payload1)
219
+ dataHash2 = await PayloadBuilder.dataHash(payload2)
220
+ rootHash1 = await PayloadBuilder.hash(payload1)
221
+ rootHash2 = await PayloadBuilder.hash(payload2)
222
+ expect(dataHash1).toBe(dataHash2)
223
+ expect(rootHash1).not.toBe(rootHash2)
224
+ await archivistModule.insert([payload1])
225
+ await archivistModule.insert([payload2])
226
+ })
227
+ describe('data hash', () => {
228
+ it('returns value using hash', async () => {
229
+ const result = await archivistModule.get([dataHash1])
230
+ expect(result).toBeDefined()
231
+ expect(result.length).toBe(1)
232
+ })
233
+ it('deduplicates multiple hashes', async () => {
234
+ const result = await archivistModule.get([dataHash1, dataHash2])
235
+ expect(result).toBeDefined()
236
+ expect(result.length).toBe(1)
237
+ })
238
+ it('returns the first occurrence of the hash', async () => {
239
+ // Same data hash contained by multiple root hashes
240
+ const result = await archivistModule.get([dataHash2])
241
+ expect(result).toBeDefined()
242
+ expect(result.length).toBe(1)
243
+ // Returns the first occurrence of the data hash
244
+ // expect(PayloadBuilder.omitStorageMeta(result[0])).toEqual(payload1)
245
+ })
246
+ })
247
+ describe('root hash', () => {
248
+ it('returns value using hash', async () => {
249
+ const result = await archivistModule.get([rootHash1])
250
+ expect(result).toBeDefined()
251
+ expect(result.length).toBe(1)
252
+ })
253
+ it('deduplicates multiple hashes', async () => {
254
+ const result = await archivistModule.get([rootHash1, rootHash1])
255
+ expect(result).toBeDefined()
256
+ expect(result.length).toBe(1)
257
+ })
258
+ })
259
+ })
260
+ })
261
+ describe('insert', () => {
262
+ describe('with unique data', () => {
263
+ const dbName = 'bd86d2dd-dc48-4621-8c1f-105ba2e90287'
264
+ const storeName = 'f8d14049-2966-4198-a2ab-1c096a949315'
265
+ let sources: Payload[] = []
266
+ let archivistModule: ArchivistInstance
267
+ beforeAll(async () => {
268
+ archivistModule = await LevelDbArchivist.create({
269
+ account,
270
+ config: {
271
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
272
+ },
273
+ })
274
+ sources = await fillDb(archivistModule)
275
+ })
276
+ it('can round trip data using data hash', async () => {
277
+ await Promise.all(
278
+ sources.map(async (source) => {
279
+ const sourceHash = await PayloadBuilder.dataHash(source)
280
+ const getResult = await archivistModule.get([sourceHash])
281
+ expect(getResult).toBeDefined()
282
+ expect(getResult.length).toBe(1)
283
+ const [result] = getResult
284
+ expect(PayloadBuilder.omitStorageMeta(result)).toEqual(PayloadBuilder.omitStorageMeta(source))
285
+ const resultHash = await PayloadBuilder.dataHash(result)
286
+ expect(resultHash).toBe(sourceHash)
287
+ }),
288
+ )
289
+ })
290
+ it('can round trip data using root hash', async () => {
291
+ await Promise.all(
292
+ sources.map(async (source) => {
293
+ const sourceHash = await PayloadBuilder.hash(source)
294
+ const getResult = await archivistModule.get([sourceHash])
295
+ expect(getResult).toBeDefined()
296
+ expect(getResult.length).toBe(1)
297
+ const [result] = getResult
298
+ expect(PayloadBuilder.omitStorageMeta(result)).toEqual(PayloadBuilder.omitStorageMeta(source))
299
+ const resultHash = await PayloadBuilder.hash(result)
300
+ expect(resultHash).toBe(sourceHash)
301
+ }),
302
+ )
303
+ })
304
+ })
305
+ describe('with duplicate data', () => {
306
+ const dbName = 'bb43b6fe-2f9e-4bda-8177-f94336353f98'
307
+ const storeName = '91c6b87d-3ac8-4cfd-8aee-d509f3de0299'
308
+ let archivistModule: ArchivistInstance
309
+ beforeAll(async () => {
310
+ archivistModule = await LevelDbArchivist.create({
311
+ account,
312
+ config: {
313
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
314
+ },
315
+ })
316
+ })
317
+ it('handles duplicate insertions', async () => {
318
+ // Insert same payload twice
319
+ const source = { salt: '2d515e1d-d82c-4545-9903-3eded7fefa7c', schema: IdSchema }
320
+ // First insertion should succeed and return the inserted payload
321
+ expect((await archivistModule.insert([source]))[0]._hash).toEqual(await PayloadBuilder.hash(source))
322
+ // Second insertion should succeed but return empty array since no new data was inserted
323
+ expect(await archivistModule.insert([source])).toEqual([])
324
+ // Ensure we can get the inserted payload
325
+ const sourceHash = await PayloadBuilder.dataHash(source)
326
+ const getResult = await archivistModule.get([sourceHash])
327
+ expect(getResult).toBeDefined()
328
+ expect(getResult.length).toBe(1)
329
+ const resultHash = await PayloadBuilder.dataHash(getResult[0])
330
+ expect(resultHash).toBe(sourceHash)
331
+ // Ensure the DB has only one instance of the payload written to it
332
+ const allResult = await archivistModule.all?.()
333
+ expect(allResult).toBeDefined()
334
+ expect(allResult.length).toBe(1)
335
+ })
336
+ })
337
+ })
338
+
339
+ describe('next', () => {
340
+ const dbName = 'bd86d2dd-dc48-4621-8c1f-105ba2e90288'
341
+ const storeName = 'f8d14049-2966-4198-a2ab-1c096a949316'
342
+ it('next', async () => {
343
+ const archivist = await LevelDbArchivist.create({
344
+ account: 'random',
345
+ config: {
346
+ dbName, schema: LevelDbArchivistConfigSchema, storeName, location: tmpdir(), clearStoreOnStart: true,
347
+ },
348
+ })
349
+ const account = await Account.random()
350
+
351
+ const payloads1 = [
352
+ { schema: 'network.xyo.test', value: 1 },
353
+ ]
354
+
355
+ const payloads2 = [
356
+ { schema: 'network.xyo.test', value: 2 },
357
+ ]
358
+
359
+ const payloads3 = [
360
+ { schema: 'network.xyo.test', value: 3 },
361
+ ]
362
+
363
+ const payloads4 = [
364
+ { schema: 'network.xyo.test', value: 4 },
365
+ ]
366
+
367
+ await archivist.insert(payloads1)
368
+ await delay(1)
369
+ console.log(toSafeJsonString(payloads1, 10))
370
+ const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account)
371
+ await delay(1)
372
+ await archivist.insert(payloads3)
373
+ await delay(1)
374
+ await archivist.insert(payloads4)
375
+ await delay(1)
376
+ expect(bw).toBeDefined()
377
+ expect(payloads).toBeDefined()
378
+ expect(errors).toBeDefined()
379
+
380
+ const batch1 = await archivist.next?.({ limit: 2 })
381
+ expect(batch1.length).toBe(2)
382
+ expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
383
+
384
+ const batch2 = await archivist.next?.({ limit: 2, cursor: batch1?.[1]._sequence })
385
+ expect(batch2.length).toBe(2)
386
+ expect(await PayloadBuilder.dataHash(batch2?.[1])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
387
+
388
+ const batch3 = await archivist.next?.({ limit: 20 })
389
+ expect(batch3.length).toBe(4)
390
+ expect(await PayloadBuilder.dataHash(batch3?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
391
+
392
+ const batch4 = await archivist.next?.({ limit: 20, cursor: batch1?.[0]._sequence })
393
+ expect(batch4.length).toBe(3)
394
+ expect(await PayloadBuilder.dataHash(batch4?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
395
+
396
+ // desc
397
+ const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' })
398
+ expect(batch1Desc.length).toBe(2)
399
+ expect(await PayloadBuilder.dataHash(batch1Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
400
+
401
+ const batch2Desc = await archivist.next?.({
402
+ limit: 2, cursor: batch1Desc?.[1]._sequence, order: 'desc',
403
+ })
404
+ expect(batch2Desc.length).toBe(2)
405
+ expect(await PayloadBuilder.dataHash(batch2Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
406
+
407
+ const batch3Desc = await archivist.next?.({
408
+ limit: 20, cursor: batch1Desc?.[1]._sequence, order: 'desc',
409
+ })
410
+ expect(batch3Desc.length).toBe(2)
411
+ expect(await PayloadBuilder.dataHash(batch3Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
412
+ })
413
+ })
414
+ generateArchivistNextTests(async () => {
415
+ const dbName = v4()
416
+ const storeName = v4()
417
+ const location = tmpdir()
418
+ const clearStoreOnStart = true
419
+ const schema = LevelDbArchivistConfigSchema
420
+ return await LevelDbArchivist.create({
421
+ account: 'random',
422
+ config: {
423
+ dbName, schema, storeName, location, clearStoreOnStart,
424
+ },
425
+ })
426
+ })
427
+ })
@@ -0,0 +1,163 @@
1
+ /* eslint-disable max-statements */
2
+ import '@xylabs/vitest-extended'
3
+
4
+ import { tmpdir } from 'node:os'
5
+
6
+ import { delay } from '@xylabs/delay'
7
+ import { toSafeJsonString } from '@xylabs/object'
8
+ import { isArchivistInstance, isArchivistModule } from '@xyo-network/archivist-model'
9
+ import type { Id } from '@xyo-network/id-payload-plugin'
10
+ import {
11
+ asId,
12
+ IdSchema, isId,
13
+ } from '@xyo-network/id-payload-plugin'
14
+ import { PayloadBuilder } from '@xyo-network/payload-builder'
15
+ import { HDWallet } from '@xyo-network/wallet'
16
+ import {
17
+ describe, expect, it,
18
+ } from 'vitest'
19
+
20
+ import { LevelDbArchivist } from '../Archivist.ts'
21
+ import { LevelDbArchivistConfigSchema } from '../Config.ts'
22
+
23
+ /**
24
+ * @group module
25
+ * @group archivist
26
+ */
27
+ describe('LevelArchivist', () => {
28
+ it('should listen to cleared events', async () => {
29
+ const archivist = await LevelDbArchivist.create({
30
+ account: 'random',
31
+ config: {
32
+ schema: LevelDbArchivistConfigSchema, location: tmpdir(), dbName: 'test1.db', storeName: 'payloads', clearStoreOnStart: true,
33
+ },
34
+ })
35
+
36
+ expect(isArchivistInstance(archivist)).toBe(true)
37
+ expect(isArchivistModule(archivist)).toBe(true)
38
+
39
+ // Create a new promise and resolve it when the event fires
40
+ const eventPromise = new Promise<void>((resolve) => {
41
+ archivist.on('cleared', () => {
42
+ expect(true).toBe(true) // Confirm event fired
43
+ resolve() // Resolve the promise
44
+ })
45
+ })
46
+ await archivist.clear()
47
+ return eventPromise
48
+ })
49
+
50
+ it('should return items inserted in the order they were provided in', async () => {
51
+ const archivist = await LevelDbArchivist.create({
52
+ account: 'random',
53
+ config: {
54
+ schema: LevelDbArchivistConfigSchema, location: tmpdir(), dbName: 'test2.db', storeName: 'payloads', clearStoreOnStart: true,
55
+ },
56
+ })
57
+ const payloads: Id[] = Array.from({ length: 100 }, (_, i) => new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: `${i}` }).build())
58
+ // Ensure payload was create in order provided
59
+ for (const [index, id] of payloads.entries()) {
60
+ expect(id?.salt).toBe(`${index}`)
61
+ }
62
+
63
+ const withStorageMeta = await PayloadBuilder.addStorageMeta(payloads)
64
+
65
+ // Ensure payload was returned in order provided
66
+ for (const [index, result] of withStorageMeta.entries()) {
67
+ expect(isId(result)).toBe(true)
68
+ const id = asId(result)
69
+ expect(id).toBeDefined()
70
+ expect(id?.salt).toBe(`${index}`)
71
+ expect(await PayloadBuilder.dataHash(result)).toEqual(await PayloadBuilder.dataHash(payloads[index]))
72
+ }
73
+
74
+ const results = await archivist.insert(payloads)
75
+ expect(results.length).toBe(payloads.length)
76
+
77
+ // Ensure payload was inserted in order provided
78
+ for (const [index, result] of results.entries()) {
79
+ expect(isId(result)).toBe(true)
80
+ const id = asId(result)
81
+ expect(id).toBeDefined()
82
+ if (index > 0) {
83
+ expect(result._sequence > results[index - 1]._sequence).toBeTrue()
84
+ }
85
+ if (index < 99) {
86
+ expect(result._sequence < results[index + 1]._sequence).toBeTrue()
87
+ }
88
+ if (id?.salt !== `${index}`) {
89
+ console.warn('result-', results[index - 1])
90
+ console.warn('result', result)
91
+ console.warn('result+', results[index + 1])
92
+ }
93
+ expect(id?.salt).toBe(`${index}`)
94
+ expect(await PayloadBuilder.dataHash(result)).toEqual(await PayloadBuilder.dataHash(payloads[index]))
95
+ }
96
+ })
97
+
98
+ it('next', async () => {
99
+ const archivist = await LevelDbArchivist.create({
100
+ account: await HDWallet.random(),
101
+ config: {
102
+ schema: LevelDbArchivistConfigSchema, location: tmpdir(), dbName: 'test3.db', storeName: 'payloads', clearStoreOnStart: true,
103
+ },
104
+ })
105
+ const account = await HDWallet.random()
106
+
107
+ const payloads1 = [
108
+ { schema: 'network.xyo.test', value: 1 },
109
+ ]
110
+
111
+ const payloads2 = [
112
+ { schema: 'network.xyo.test', value: 2 },
113
+ ]
114
+
115
+ const payloads3 = [
116
+ { schema: 'network.xyo.test', value: 3 },
117
+ ]
118
+
119
+ const payloads4 = [
120
+ { schema: 'network.xyo.test', value: 4 },
121
+ ]
122
+
123
+ const insertedPayloads1 = await archivist.insert(payloads1)
124
+ expect(insertedPayloads1[0]._hash).toBe(await PayloadBuilder.hash(payloads1[0]))
125
+ expect(insertedPayloads1[0]._dataHash).toBe(await PayloadBuilder.dataHash(payloads1[0]))
126
+ expect(insertedPayloads1[0]._sequence).toBeDefined()
127
+ await delay(1)
128
+ console.log(toSafeJsonString(payloads1, 10))
129
+ const [bw, payloads, errors] = await archivist.insertQuery(payloads2, account)
130
+ expect(bw).toBeDefined()
131
+ expect(payloads).toBeDefined()
132
+ expect(errors).toBeDefined()
133
+ await delay(1)
134
+ await archivist.insert(payloads3)
135
+ await delay(1)
136
+ await archivist.insert(payloads4)
137
+
138
+ console.log('bw', toSafeJsonString([bw, payloads, errors], 10))
139
+
140
+ const batch1 = await archivist.next?.({ limit: 2 })
141
+ expect(batch1).toBeArrayOfSize(2)
142
+ expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
143
+ expect(await PayloadBuilder.dataHash(batch1?.[0])).toEqual(await PayloadBuilder.dataHash(insertedPayloads1[0]))
144
+
145
+ const batch2 = await archivist.next?.({ limit: 2, cursor: batch1?.[0]._sequence })
146
+ expect(batch2).toBeArrayOfSize(2)
147
+ expect(await PayloadBuilder.dataHash(batch2?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
148
+ expect(await PayloadBuilder.dataHash(batch2?.[1])).toEqual(await PayloadBuilder.dataHash(payloads3[0]))
149
+
150
+ // desc
151
+ const batch1Desc = await archivist.next?.({ limit: 2, order: 'desc' })
152
+ expect(batch1Desc).toBeArrayOfSize(2)
153
+ expect(await PayloadBuilder.dataHash(batch1Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads4[0]))
154
+ expect(await PayloadBuilder.dataHash(batch1Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads3[0]))
155
+
156
+ const batch2Desc = await archivist.next?.({
157
+ limit: 2, cursor: batch1Desc[1]._sequence, order: 'desc',
158
+ })
159
+ expect(batch2Desc).toBeArrayOfSize(2)
160
+ expect(await PayloadBuilder.dataHash(batch2Desc?.[0])).toEqual(await PayloadBuilder.dataHash(payloads2[0]))
161
+ expect(await PayloadBuilder.dataHash(batch2Desc?.[1])).toEqual(await PayloadBuilder.dataHash(payloads1[0]))
162
+ })
163
+ })
@@ -0,0 +1,66 @@
1
+ import '@xylabs/vitest-extended'
2
+
3
+ import { tmpdir } from 'node:os'
4
+
5
+ import type { Id } from '@xyo-network/id-payload-plugin'
6
+ import { IdSchema } from '@xyo-network/id-payload-plugin'
7
+ import { PayloadBuilder } from '@xyo-network/payload-builder'
8
+ import {
9
+ describe, expect, it,
10
+ } from 'vitest'
11
+
12
+ import { LevelDbArchivist } from '../Archivist.ts'
13
+ import { LevelDbArchivistConfigSchema } from '../Config.ts'
14
+
15
+ /**
16
+ * @group module
17
+ * @group archivist
18
+ */
19
+ describe('LevelArchivist', () => {
20
+ it('should listen to cleared events', async () => {
21
+ const archivistLevel1Test1Payloads1 = await LevelDbArchivist.create({
22
+ account: 'random',
23
+ config: {
24
+ schema: LevelDbArchivistConfigSchema, location: tmpdir() + '/level1', dbName: 'test1.db', storeName: 'payloads1', clearStoreOnStart: true,
25
+ },
26
+ })
27
+ const archivistLevel1Test1Payloads2 = await LevelDbArchivist.create({
28
+ account: 'random',
29
+ config: {
30
+ schema: LevelDbArchivistConfigSchema, location: tmpdir() + '/level1', dbName: 'test1.db', storeName: 'payloads2', clearStoreOnStart: true,
31
+ },
32
+ })
33
+ const archivistLevel1Test2Payloads1 = await LevelDbArchivist.create({
34
+ account: 'random',
35
+ config: {
36
+ schema: LevelDbArchivistConfigSchema, location: tmpdir() + '/level1', dbName: 'test2.db', storeName: 'payloads1', clearStoreOnStart: true,
37
+ },
38
+ })
39
+ const archivistLevel2Test1Payloads1 = await LevelDbArchivist.create({
40
+ account: 'random',
41
+ config: {
42
+ schema: LevelDbArchivistConfigSchema, location: tmpdir() + '/level2', dbName: 'test1.db', storeName: 'payloads1', clearStoreOnStart: true,
43
+ },
44
+ })
45
+
46
+ const p111Original = await PayloadBuilder.addStorageMeta(new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: '111' }).build())
47
+ const p112Original = await PayloadBuilder.addStorageMeta(new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: '112' }).build())
48
+ const p121Original = await PayloadBuilder.addStorageMeta(new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: '121' }).build())
49
+ const p211Original = await PayloadBuilder.addStorageMeta(new PayloadBuilder<Id>({ schema: IdSchema }).fields({ salt: '211' }).build())
50
+
51
+ await archivistLevel1Test1Payloads1.insert([p111Original])
52
+ await archivistLevel1Test1Payloads2.insert([p112Original])
53
+ await archivistLevel1Test2Payloads1.insert([p121Original])
54
+ await archivistLevel2Test1Payloads1.insert([p211Original])
55
+
56
+ const [p111] = await archivistLevel1Test1Payloads1.next()
57
+ const [p112] = await archivistLevel1Test1Payloads2.next()
58
+ const [p121] = await archivistLevel1Test2Payloads1.next()
59
+ const [p211] = await archivistLevel2Test1Payloads1.next()
60
+
61
+ expect(p111._hash).toBe(p111Original._hash)
62
+ expect(p112._hash).toBe(p112Original._hash)
63
+ expect(p121._hash).toBe(p121Original._hash)
64
+ expect(p211._hash).toBe(p211Original._hash)
65
+ })
66
+ })
package/xy.config.ts DELETED
@@ -1,10 +0,0 @@
1
- import type { XyTsupConfig } from '@xylabs/ts-scripts-yarn3'
2
- const config: XyTsupConfig = {
3
- compile: {
4
- browser: { src: true },
5
- neutral: {},
6
- node: {},
7
- },
8
- }
9
-
10
- export default config