@xyo-network/archivist-storage 3.6.0-rc.1 → 3.6.0-rc.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,11 +1,10 @@
|
|
|
1
|
-
import type { Hash } from '@xylabs/hex';
|
|
1
|
+
import type { Hash, Hex } from '@xylabs/hex';
|
|
2
2
|
import type { Promisable, PromisableArray } from '@xylabs/promise';
|
|
3
|
-
import type { WithStorageMeta } from '@xyo-network/archivist-abstract';
|
|
4
3
|
import { AbstractArchivist } from '@xyo-network/archivist-abstract';
|
|
5
4
|
import type { ArchivistConfig, ArchivistInstance, ArchivistModuleEventData, ArchivistNextOptions, ArchivistParams } from '@xyo-network/archivist-model';
|
|
6
5
|
import type { BoundWitness } from '@xyo-network/boundwitness-model';
|
|
7
6
|
import type { AnyConfigSchema } from '@xyo-network/module-model';
|
|
8
|
-
import type { Payload, Schema } from '@xyo-network/payload-model';
|
|
7
|
+
import type { Payload, Schema, WithStorageMeta } from '@xyo-network/payload-model';
|
|
9
8
|
export type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config';
|
|
10
9
|
export declare const StorageArchivistConfigSchema: StorageArchivistConfigSchema;
|
|
11
10
|
export type StorageArchivistConfig = ArchivistConfig<{
|
|
@@ -28,14 +27,14 @@ export declare class StorageArchivist<TParams extends StorageArchivistParams = S
|
|
|
28
27
|
get type(): "local" | "session" | "page";
|
|
29
28
|
private get privateStorage();
|
|
30
29
|
private get storage();
|
|
31
|
-
protected allHandler(): PromisableArray<Payload
|
|
30
|
+
protected allHandler(): PromisableArray<WithStorageMeta<Payload>>;
|
|
32
31
|
protected clearHandler(): void | Promise<void>;
|
|
33
32
|
protected commitHandler(): Promise<BoundWitness[]>;
|
|
34
33
|
protected deleteHandler(hashes: Hash[]): Promise<Hash[]>;
|
|
35
|
-
protected
|
|
36
|
-
protected getHandler(hashes: string[]): Promisable<Payload[]>;
|
|
37
|
-
protected insertHandler(payloads: Payload[]): Promise<Payload[]>;
|
|
38
|
-
protected nextHandler(options?: ArchivistNextOptions): Promisable<Payload[]>;
|
|
34
|
+
protected getFromCursor(order?: 'asc' | 'desc', limit?: number, cursor?: Hex): WithStorageMeta[];
|
|
35
|
+
protected getHandler(hashes: string[]): Promisable<WithStorageMeta<Payload>[]>;
|
|
36
|
+
protected insertHandler(payloads: Payload[]): Promise<WithStorageMeta<Payload>[]>;
|
|
37
|
+
protected nextHandler(options?: ArchivistNextOptions): Promisable<WithStorageMeta<Payload>[]>;
|
|
39
38
|
protected startHandler(): Promise<boolean>;
|
|
40
39
|
}
|
|
41
40
|
//# sourceMappingURL=StorageArchivist.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageArchivist.d.ts","sourceRoot":"","sources":["../../src/StorageArchivist.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;
|
|
1
|
+
{"version":3,"file":"StorageArchivist.d.ts","sourceRoot":"","sources":["../../src/StorageArchivist.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AAC5C,OAAO,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAA;AAElE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,KAAK,EACV,eAAe,EAEf,iBAAiB,EACjB,wBAAwB,EACxB,oBAAoB,EACpB,eAAe,EAChB,MAAM,8BAA8B,CAAA;AAQrC,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAA;AAEhE,OAAO,KAAK,EACV,OAAO,EAAE,MAAM,EAAE,eAAe,EACjC,MAAM,4BAA4B,CAAA;AAMnC,MAAM,MAAM,4BAA4B,GAAG,sCAAsC,CAAA;AACjF,eAAO,MAAM,4BAA4B,EAAE,4BAAqE,CAAA;AAEhH,MAAM,MAAM,sBAAsB,GAAG,eAAe,CAAC;IACnD,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,MAAM,EAAE,4BAA4B,CAAA;IACpC,IAAI,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;CACpC,CAAC,CAAA;AAEF,MAAM,MAAM,sBAAsB,GAAG,eAAe,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAA;AAC7F,qBAAa,gBAAgB,CAC3B,OAAO,SAAS,sBAAsB,GAAG,sBAAsB,EAC/D,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CAEtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAC7C,YAAW,iBAAiB;IAC5B,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAAyD;IACzG,gBAAyB,mBAAmB,EAAE,MAAM,CAA+B;IAEnF,OAAO,CAAC,eAAe,CAAuB;IAC9C,OAAO,CAAC,QAAQ,CAAuB;IAEvC,IAAI,UAAU,WAEb;IAED,IAAI,YAAY,WAEf;IAED,IAAI,SAAS,WAEZ;IAED,IAAa,OAAO,IAAI,MAAM,EAAE,CAS/B;IAED,IAAI,IAAI,iCAEP;IAGD,OAAO,KAAK,cAAc,GAGzB;IAGD,OAAO,KAAK,OAAO,GAGlB;cAEkB,UAAU,IAAI,eAAe,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC;cAiBvD,YAAY,IAAI,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;cAM9B,aAAa,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;cAgBxC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IAWvE,SAAS,CAAC,aAAa,CACrB,KAAK,GAAE,KAAK,GAAG,MAAc,EAC7B,KAAK,GAAE,MAAW,EAClB,MAAM,CAAC,EAAE,GAAG,GACX,eAAe,EAAE;cAaD,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAiB9D,aAAa,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAY7E,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,UAAU,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAO7E,YAAY;CAItC"}
|
package/dist/neutral/index.mjs
CHANGED
|
@@ -11,7 +11,7 @@ import { PayloadBuilder } from "@xyo-network/payload-builder";
|
|
|
11
11
|
import store from "store2";
|
|
12
12
|
var storeTypes = store;
|
|
13
13
|
var StorageArchivistConfigSchema = "network.xyo.archivist.storage.config";
|
|
14
|
-
var StorageArchivist = class
|
|
14
|
+
var StorageArchivist = class extends AbstractArchivist {
|
|
15
15
|
static {
|
|
16
16
|
__name(this, "StorageArchivist");
|
|
17
17
|
}
|
|
@@ -93,24 +93,13 @@ var StorageArchivist = class _StorageArchivist extends AbstractArchivist {
|
|
|
93
93
|
return hash;
|
|
94
94
|
}))).filter(exists);
|
|
95
95
|
}
|
|
96
|
-
|
|
97
|
-
const
|
|
98
|
-
|
|
99
|
-
const payloads = Object.entries(this.storage.getAll()).map(([, value]) => value).filter((payload) => {
|
|
100
|
-
if (found.has(payload._dataHash)) {
|
|
101
|
-
return false;
|
|
102
|
-
} else {
|
|
103
|
-
found.add(payload._dataHash);
|
|
104
|
-
return true;
|
|
105
|
-
}
|
|
106
|
-
}).sort((a, b) => {
|
|
107
|
-
return order === "asc" ? a._timestamp - b._timestamp : b._timestamp - a._timestamp;
|
|
96
|
+
getFromCursor(order = "asc", limit = 10, cursor) {
|
|
97
|
+
const payloads = Object.entries(this.storage.getAll()).map(([, value]) => value).sort((a, b) => {
|
|
98
|
+
return order === "asc" ? a._sequence - b._sequence : b._sequence - a._sequence;
|
|
108
99
|
});
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
return payloads.slice(index + 1, index + 1 + limit);
|
|
113
|
-
}
|
|
100
|
+
const index = payloads.findIndex((payload) => payload._sequence === cursor);
|
|
101
|
+
if (index !== -1) {
|
|
102
|
+
return payloads.slice(index + 1, index + 1 + limit);
|
|
114
103
|
}
|
|
115
104
|
return payloads.slice(0, limit);
|
|
116
105
|
}
|
|
@@ -128,19 +117,19 @@ var StorageArchivist = class _StorageArchivist extends AbstractArchivist {
|
|
|
128
117
|
}).map((payload) => PayloadBuilder.omitStorageMeta(payload));
|
|
129
118
|
}
|
|
130
119
|
async insertHandler(payloads) {
|
|
131
|
-
return await Promise.all(payloads.map(async (payload
|
|
132
|
-
const storagePayload = await
|
|
120
|
+
return await Promise.all(payloads.map(async (payload) => {
|
|
121
|
+
const storagePayload = await PayloadBuilder.addSequencedStorageMeta(payload);
|
|
133
122
|
const value = JSON.stringify(storagePayload);
|
|
134
123
|
console.log("insert.storagePayloads:", storagePayload);
|
|
135
124
|
assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`);
|
|
136
125
|
this.storage.set(storagePayload._hash, storagePayload);
|
|
137
126
|
this.storage.set(storagePayload._dataHash, storagePayload);
|
|
138
|
-
return
|
|
127
|
+
return storagePayload;
|
|
139
128
|
}));
|
|
140
129
|
}
|
|
141
130
|
nextHandler(options) {
|
|
142
|
-
const { limit,
|
|
143
|
-
return this.
|
|
131
|
+
const { limit, cursor, order } = options ?? {};
|
|
132
|
+
return this.getFromCursor(order, limit ?? 10, cursor);
|
|
144
133
|
}
|
|
145
134
|
async startHandler() {
|
|
146
135
|
await super.startHandler();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/StorageArchivist.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport type { Hash } from '@xylabs/hex'\nimport type { Promisable, PromisableArray } from '@xylabs/promise'\nimport { fulfilled } from '@xylabs/promise'\nimport type { WithStorageMeta } from '@xyo-network/archivist-abstract'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport type {\n ArchivistConfig,\n ArchivistInsertQuery,\n ArchivistInstance,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistParams,\n} from '@xyo-network/archivist-model'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n} from '@xyo-network/archivist-model'\nimport type { BoundWitness } from '@xyo-network/boundwitness-model'\nimport type { AnyConfigSchema } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport type { Payload, Schema } from '@xyo-network/payload-model'\nimport type { StoreBase, StoreType } from 'store2'\nimport store from 'store2'\n\nconst storeTypes = store as unknown as StoreType\n\nexport type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\nexport const StorageArchivistConfigSchema: StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\n\nexport type StorageArchivistConfig = ArchivistConfig<{\n maxEntries?: number\n maxEntrySize?: number\n namespace?: string\n schema: StorageArchivistConfigSchema\n type?: 'local' | 'session' | 'page'\n}>\n\nexport type StorageArchivistParams = ArchivistParams<AnyConfigSchema<StorageArchivistConfig>>\nexport class StorageArchivist<\n TParams extends StorageArchivistParams = StorageArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n>\n extends AbstractArchivist<TParams, TEventData>\n implements ArchivistInstance {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, StorageArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = StorageArchivistConfigSchema\n\n private _privateStorage: StoreBase | undefined\n private _storage: StoreBase | undefined\n\n get maxEntries() {\n return this.config?.maxEntries ?? 1000\n }\n\n get maxEntrySize() {\n return this.config?.maxEntrySize ?? 16_000\n }\n\n get namespace() {\n return this.config?.namespace ?? 'xyo-archivist'\n }\n\n override get queries(): string[] {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ...super.queries,\n ]\n }\n\n get type() {\n return this.config?.type ?? 'local'\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get privateStorage(): StoreBase {\n this._privateStorage = this._storage ?? storeTypes[this.type].namespace(`${this.namespace}|private`)\n return this._privateStorage\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get storage(): StoreBase {\n this._storage = this._storage ?? storeTypes[this.type].namespace(this.namespace)\n return this._storage\n }\n\n protected override allHandler(): PromisableArray<Payload> {\n const found = new Set<string>()\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n return Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n })\n .sort((a, b) => a._timestamp - b._timestamp)\n .map(payload => PayloadBuilder.omitStorageMeta(payload))\n }\n\n protected override clearHandler(): void | Promise<void> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n this.storage.clear()\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<BoundWitness[]> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n const payloads = await this.all()\n assertEx(payloads.length > 0, () => 'Nothing to commit')\n const settled = (await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }),\n )).filter(exists)\n // TODO - rather than clear, delete the payloads that come back as successfully inserted\n await this.clear()\n return (settled.filter(fulfilled).map(result => result.value)).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n return (\n await Promise.all(\n hashes.map((hash) => {\n this.storage.remove(hash)\n return hash\n }),\n )\n ).filter(exists)\n }\n\n protected getFromOffset(\n order: 'asc' | 'desc' = 'asc',\n limit: number = 10,\n offset?: Hash,\n ): WithStorageMeta[] {\n const offsetHash = offset ? (this.storage.get(offset) as WithStorageMeta | undefined)?._dataHash : undefined\n const found = new Set<string>()\n const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n })\n .sort((a, b) => {\n return order === 'asc' ? a._timestamp - b._timestamp : b._timestamp - a._timestamp\n })\n if (offsetHash) {\n const index = payloads.findIndex(payload => payload._dataHash === offsetHash)\n if (index !== -1) {\n return payloads.slice(index + 1, index + 1 + limit)\n }\n }\n return payloads.slice(0, limit)\n }\n\n protected override getHandler(hashes: string[]): Promisable<Payload[]> {\n const found = new Set<string>()\n return (\n hashes.map((hash) => {\n return this.storage.get(hash)\n })\n ).filter(exists)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n }).map(payload => PayloadBuilder.omitStorageMeta(payload))\n }\n\n protected override async insertHandler(payloads: Payload[]): Promise<Payload[]> {\n return await Promise.all(payloads.map(async (payload, index) => {\n const storagePayload = await StorageArchivist.addSequencedStorageMeta(payload, index)\n const value = JSON.stringify(storagePayload)\n console.log('insert.storagePayloads:', storagePayload)\n assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`)\n this.storage.set(storagePayload._hash, storagePayload)\n this.storage.set(storagePayload._dataHash, storagePayload)\n return payload\n }))\n }\n\n protected override nextHandler(options?: ArchivistNextOptions): Promisable<Payload[]> {\n const {\n limit, offset, order,\n } = options ?? {}\n return this.getFromOffset(order, limit ?? 10, offset)\n }\n\n protected override async startHandler() {\n await super.startHandler()\n return true\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,cAAc;AAGvB,SAASC,iBAAiB;AAE1B,SAASC,yBAAyB;AASlC,SACEC,yBACAC,2BACAC,4BACAC,4BACAC,kCACK;AAGP,SAASC,sBAAsB;AAG/B,OAAOC,WAAW;AAElB,IAAMC,aAAaC;AAGZ,IAAMC,+BAA6D;AAWnE,IAAMC,mBAAN,MAAMA,0BAIHC,kBAAAA;EA/CV,OA+CUA;;;EAER,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeH;;EAC5E,OAAyBI,sBAA8BJ;EAE/CK;EACAC;EAER,IAAIC,aAAa;AACf,WAAO,KAAKC,QAAQD,cAAc;EACpC;EAEA,IAAIE,eAAe;AACjB,WAAO,KAAKD,QAAQC,gBAAgB;EACtC;EAEA,IAAIC,YAAY;AACd,WAAO,KAAKF,QAAQE,aAAa;EACnC;EAEA,IAAaC,UAAoB;AAC/B,WAAO;MACLC;MACAC;MACAC;MACAC;MACAC;SACG,MAAML;;EAEb;EAEA,IAAIM,OAAO;AACT,WAAO,KAAKT,QAAQS,QAAQ;EAC9B;;EAGA,IAAYC,iBAA4B;AACtC,SAAKb,kBAAkB,KAAKC,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,GAAG,KAAKA,SAAS,UAAU;AACnG,WAAO,KAAKL;EACd;;EAGA,IAAYc,UAAqB;AAC/B,SAAKb,WAAW,KAAKA,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,KAAKA,SAAS;AAC/E,WAAO,KAAKJ;EACd;EAEmBc,aAAuC;AACxD,UAAMC,QAAQ,oBAAIC,IAAAA;AAClB,SAAKC,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,WAAOC,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACtCC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAMD,EAAEE,aAAaD,EAAEC,UAAU,EAC1CV,IAAIG,CAAAA,YAAWQ,eAAeC,gBAAgBT,OAAAA,CAAAA;EACnD;EAEmBU,eAAqC;AACtD,SAAKnB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,SAAKN,QAAQwB,MAAK;AAClB,WAAO,KAAKC,KAAK,WAAW;MAAEC,KAAK;IAAK,CAAA;EAC1C;EAEA,MAAyBC,gBAAyC;AAChE,SAAKvB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,UAAMsB,WAAW,MAAM,KAAKC,IAAG;AAC/BC,aAASF,SAAStB,SAAS,GAAG,MAAM,mBAAA;AACpC,UAAMyB,WAAW,MAAMC,QAAQC,WAC7B1B,OAAO2B,QAAQ,MAAM,KAAKC,iBAAgB,GAAIC,UAAU,CAAA,CAAE,GAAG1B,IAAI,OAAO2B,WAAAA;AACtE,YAAMC,eAAqC;QAAEC,QAAQ3C;MAA2B;AAChF,YAAM4C,QAAQ,MAAM,KAAKC,UAAUH,cAAcV,QAAAA;AACjD,cAAQ,MAAMS,QAAQG,MAAMA,MAAM,CAAA,GAAIA,MAAM,CAAA,CAAE,KAAK,CAAA;IACrD,CAAA,CAAA,GACC5B,OAAO8B,MAAAA;AAEV,UAAM,KAAKlB,MAAK;AAChB,WAAQO,QAAQnB,OAAO+B,SAAAA,EAAWjC,IAAIkC,CAAAA,WAAUA,OAAOjC,KAAK,EAAGC,OAAO8B,MAAAA;EACxE;EAEA,MAAyBG,cAAcC,QAAiC;AACtE,YACE,MAAMd,QAAQH,IACZiB,OAAOpC,IAAI,CAACqC,SAAAA;AACV,WAAK/C,QAAQgD,OAAOD,IAAAA;AACpB,aAAOA;IACT,CAAA,CAAA,GAEFnC,OAAO8B,MAAAA;EACX;EAEUO,cACRC,QAAwB,OACxBC,QAAgB,IAChBC,QACmB;AACnB,UAAMC,aAAaD,SAAU,KAAKpD,QAAQsD,IAAIF,MAAAA,GAAyCrC,YAAYwC;AACnG,UAAMrD,QAAQ,oBAAIC,IAAAA;AAClB,UAAMyB,WAA8BrB,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACnEC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAAA;AACR,aAAO+B,UAAU,QAAQhC,EAAEE,aAAaD,EAAEC,aAAaD,EAAEC,aAAaF,EAAEE;IAC1E,CAAA;AACF,QAAIiC,YAAY;AACd,YAAMG,QAAQ5B,SAAS6B,UAAU5C,CAAAA,YAAWA,QAAQE,cAAcsC,UAAAA;AAClE,UAAIG,UAAU,IAAI;AAChB,eAAO5B,SAAS8B,MAAMF,QAAQ,GAAGA,QAAQ,IAAIL,KAAAA;MAC/C;IACF;AACA,WAAOvB,SAAS8B,MAAM,GAAGP,KAAAA;EAC3B;EAEmBQ,WAAWb,QAAyC;AACrE,UAAM5C,QAAQ,oBAAIC,IAAAA;AAClB,WACE2C,OAAOpC,IAAI,CAACqC,SAAAA;AACV,aAAO,KAAK/C,QAAQsD,IAAIP,IAAAA;IAC1B,CAAA,EACAnC,OAAO8B,MAAAA,EACN9B,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EAAGL,IAAIG,CAAAA,YAAWQ,eAAeC,gBAAgBT,OAAAA,CAAAA;EACrD;EAEA,MAAyB+C,cAAchC,UAAyC;AAC9E,WAAO,MAAMI,QAAQH,IAAID,SAASlB,IAAI,OAAOG,SAAS2C,UAAAA;AACpD,YAAMK,iBAAiB,MAAM/E,kBAAiBgF,wBAAwBjD,SAAS2C,KAAAA;AAC/E,YAAM7C,QAAQoD,KAAKC,UAAUH,cAAAA;AAC7BI,cAAQ5D,IAAI,2BAA2BwD,cAAAA;AACvC/B,eAASnB,MAAML,SAAS,KAAKhB,cAAc,MAAM,sBAAsBuE,eAAeK,KAAK,KAAKvD,MAAML,MAAM,GAAG;AAC/G,WAAKN,QAAQmE,IAAIN,eAAeK,OAAOL,cAAAA;AACvC,WAAK7D,QAAQmE,IAAIN,eAAe9C,WAAW8C,cAAAA;AAC3C,aAAOhD;IACT,CAAA,CAAA;EACF;EAEmBuD,YAAYC,SAAuD;AACpF,UAAM,EACJlB,OAAOC,QAAQF,MAAK,IAClBmB,WAAW,CAAC;AAChB,WAAO,KAAKpB,cAAcC,OAAOC,SAAS,IAAIC,MAAAA;EAChD;EAEA,MAAyBkB,eAAe;AACtC,UAAM,MAAMA,aAAAA;AACZ,WAAO;EACT;AACF;","names":["assertEx","exists","fulfilled","AbstractArchivist","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistCommitQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","PayloadBuilder","store","storeTypes","store","StorageArchivistConfigSchema","StorageArchivist","AbstractArchivist","configSchemas","defaultConfigSchema","_privateStorage","_storage","maxEntries","config","maxEntrySize","namespace","queries","ArchivistAllQuerySchema","ArchivistDeleteQuerySchema","ArchivistClearQuerySchema","ArchivistInsertQuerySchema","ArchivistCommitQuerySchema","type","privateStorage","storage","allHandler","found","Set","logger","log","length","Object","entries","getAll","map","value","filter","payload","has","_dataHash","add","sort","a","b","_timestamp","PayloadBuilder","omitStorageMeta","clearHandler","clear","emit","mod","commitHandler","payloads","all","assertEx","settled","Promise","allSettled","values","parentArchivists","commit","parent","queryPayload","schema","query","bindQuery","exists","fulfilled","result","deleteHandler","hashes","hash","remove","getFromOffset","order","limit","offset","offsetHash","get","undefined","index","findIndex","slice","getHandler","insertHandler","storagePayload","addSequencedStorageMeta","JSON","stringify","console","_hash","set","nextHandler","options","startHandler"]}
|
|
1
|
+
{"version":3,"sources":["../../src/StorageArchivist.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport type { Hash, Hex } from '@xylabs/hex'\nimport type { Promisable, PromisableArray } from '@xylabs/promise'\nimport { fulfilled } from '@xylabs/promise'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport type {\n ArchivistConfig,\n ArchivistInsertQuery,\n ArchivistInstance,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistParams,\n} from '@xyo-network/archivist-model'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n} from '@xyo-network/archivist-model'\nimport type { BoundWitness } from '@xyo-network/boundwitness-model'\nimport type { AnyConfigSchema } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport type {\n Payload, Schema, WithStorageMeta,\n} from '@xyo-network/payload-model'\nimport type { StoreBase, StoreType } from 'store2'\nimport store from 'store2'\n\nconst storeTypes = store as unknown as StoreType\n\nexport type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\nexport const StorageArchivistConfigSchema: StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\n\nexport type StorageArchivistConfig = ArchivistConfig<{\n maxEntries?: number\n maxEntrySize?: number\n namespace?: string\n schema: StorageArchivistConfigSchema\n type?: 'local' | 'session' | 'page'\n}>\n\nexport type StorageArchivistParams = ArchivistParams<AnyConfigSchema<StorageArchivistConfig>>\nexport class StorageArchivist<\n TParams extends StorageArchivistParams = StorageArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n>\n extends AbstractArchivist<TParams, TEventData>\n implements ArchivistInstance {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, StorageArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = StorageArchivistConfigSchema\n\n private _privateStorage: StoreBase | undefined\n private _storage: StoreBase | undefined\n\n get maxEntries() {\n return this.config?.maxEntries ?? 1000\n }\n\n get maxEntrySize() {\n return this.config?.maxEntrySize ?? 16_000\n }\n\n get namespace() {\n return this.config?.namespace ?? 'xyo-archivist'\n }\n\n override get queries(): string[] {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ...super.queries,\n ]\n }\n\n get type() {\n return this.config?.type ?? 'local'\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get privateStorage(): StoreBase {\n this._privateStorage = this._storage ?? storeTypes[this.type].namespace(`${this.namespace}|private`)\n return this._privateStorage\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get storage(): StoreBase {\n this._storage = this._storage ?? storeTypes[this.type].namespace(this.namespace)\n return this._storage\n }\n\n protected override allHandler(): PromisableArray<WithStorageMeta<Payload>> {\n const found = new Set<string>()\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n return Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n })\n .sort((a, b) => a._timestamp - b._timestamp)\n .map(payload => PayloadBuilder.omitStorageMeta(payload))\n }\n\n protected override clearHandler(): void | Promise<void> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n this.storage.clear()\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<BoundWitness[]> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n const payloads = await this.all()\n assertEx(payloads.length > 0, () => 'Nothing to commit')\n const settled = (await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }),\n )).filter(exists)\n // TODO - rather than clear, delete the payloads that come back as successfully inserted\n await this.clear()\n return (settled.filter(fulfilled).map(result => result.value)).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n return (\n await Promise.all(\n hashes.map((hash) => {\n this.storage.remove(hash)\n return hash\n }),\n )\n ).filter(exists)\n }\n\n protected getFromCursor(\n order: 'asc' | 'desc' = 'asc',\n limit: number = 10,\n cursor?: Hex,\n ): WithStorageMeta[] {\n const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .sort((a, b) => {\n return order === 'asc' ? a._sequence - b._sequence : b._sequence - a._sequence\n })\n const index = payloads.findIndex(payload => payload._sequence === cursor)\n if (index !== -1) {\n return payloads.slice(index + 1, index + 1 + limit)\n }\n return payloads.slice(0, limit)\n }\n\n protected override getHandler(hashes: string[]): Promisable<WithStorageMeta<Payload>[]> {\n const found = new Set<string>()\n return (\n hashes.map((hash) => {\n return this.storage.get(hash)\n })\n ).filter(exists)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n }).map(payload => PayloadBuilder.omitStorageMeta(payload))\n }\n\n protected override async insertHandler(payloads: Payload[]): Promise<WithStorageMeta<Payload>[]> {\n return await Promise.all(payloads.map(async (payload) => {\n const storagePayload = await PayloadBuilder.addSequencedStorageMeta(payload)\n const value = JSON.stringify(storagePayload)\n console.log('insert.storagePayloads:', storagePayload)\n assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`)\n this.storage.set(storagePayload._hash, storagePayload)\n this.storage.set(storagePayload._dataHash, storagePayload)\n return storagePayload\n }))\n }\n\n protected override nextHandler(options?: ArchivistNextOptions): Promisable<WithStorageMeta<Payload>[]> {\n const {\n limit, cursor, order,\n } = options ?? {}\n return this.getFromCursor(order, limit ?? 10, cursor)\n }\n\n protected override async startHandler() {\n await super.startHandler()\n return true\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,cAAc;AAGvB,SAASC,iBAAiB;AAC1B,SAASC,yBAAyB;AASlC,SACEC,yBACAC,2BACAC,4BACAC,4BACAC,kCACK;AAGP,SAASC,sBAAsB;AAK/B,OAAOC,WAAW;AAElB,IAAMC,aAAaC;AAGZ,IAAMC,+BAA6D;AAWnE,IAAMC,mBAAN,cAIGC,kBAAAA;EAhDV,OAgDUA;;;EAER,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeH;;EAC5E,OAAyBI,sBAA8BJ;EAE/CK;EACAC;EAER,IAAIC,aAAa;AACf,WAAO,KAAKC,QAAQD,cAAc;EACpC;EAEA,IAAIE,eAAe;AACjB,WAAO,KAAKD,QAAQC,gBAAgB;EACtC;EAEA,IAAIC,YAAY;AACd,WAAO,KAAKF,QAAQE,aAAa;EACnC;EAEA,IAAaC,UAAoB;AAC/B,WAAO;MACLC;MACAC;MACAC;MACAC;MACAC;SACG,MAAML;;EAEb;EAEA,IAAIM,OAAO;AACT,WAAO,KAAKT,QAAQS,QAAQ;EAC9B;;EAGA,IAAYC,iBAA4B;AACtC,SAAKb,kBAAkB,KAAKC,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,GAAG,KAAKA,SAAS,UAAU;AACnG,WAAO,KAAKL;EACd;;EAGA,IAAYc,UAAqB;AAC/B,SAAKb,WAAW,KAAKA,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,KAAKA,SAAS;AAC/E,WAAO,KAAKJ;EACd;EAEmBc,aAAwD;AACzE,UAAMC,QAAQ,oBAAIC,IAAAA;AAClB,SAAKC,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,WAAOC,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACtCC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAMD,EAAEE,aAAaD,EAAEC,UAAU,EAC1CV,IAAIG,CAAAA,YAAWQ,eAAeC,gBAAgBT,OAAAA,CAAAA;EACnD;EAEmBU,eAAqC;AACtD,SAAKnB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,SAAKN,QAAQwB,MAAK;AAClB,WAAO,KAAKC,KAAK,WAAW;MAAEC,KAAK;IAAK,CAAA;EAC1C;EAEA,MAAyBC,gBAAyC;AAChE,SAAKvB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,UAAMsB,WAAW,MAAM,KAAKC,IAAG;AAC/BC,aAASF,SAAStB,SAAS,GAAG,MAAM,mBAAA;AACpC,UAAMyB,WAAW,MAAMC,QAAQC,WAC7B1B,OAAO2B,QAAQ,MAAM,KAAKC,iBAAgB,GAAIC,UAAU,CAAA,CAAE,GAAG1B,IAAI,OAAO2B,WAAAA;AACtE,YAAMC,eAAqC;QAAEC,QAAQ3C;MAA2B;AAChF,YAAM4C,QAAQ,MAAM,KAAKC,UAAUH,cAAcV,QAAAA;AACjD,cAAQ,MAAMS,QAAQG,MAAMA,MAAM,CAAA,GAAIA,MAAM,CAAA,CAAE,KAAK,CAAA;IACrD,CAAA,CAAA,GACC5B,OAAO8B,MAAAA;AAEV,UAAM,KAAKlB,MAAK;AAChB,WAAQO,QAAQnB,OAAO+B,SAAAA,EAAWjC,IAAIkC,CAAAA,WAAUA,OAAOjC,KAAK,EAAGC,OAAO8B,MAAAA;EACxE;EAEA,MAAyBG,cAAcC,QAAiC;AACtE,YACE,MAAMd,QAAQH,IACZiB,OAAOpC,IAAI,CAACqC,SAAAA;AACV,WAAK/C,QAAQgD,OAAOD,IAAAA;AACpB,aAAOA;IACT,CAAA,CAAA,GAEFnC,OAAO8B,MAAAA;EACX;EAEUO,cACRC,QAAwB,OACxBC,QAAgB,IAChBC,QACmB;AACnB,UAAMxB,WAA8BrB,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACnEC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBM,KAAK,CAACC,GAAGC,MAAAA;AACR,aAAO+B,UAAU,QAAQhC,EAAEmC,YAAYlC,EAAEkC,YAAYlC,EAAEkC,YAAYnC,EAAEmC;IACvE,CAAA;AACF,UAAMC,QAAQ1B,SAAS2B,UAAU1C,CAAAA,YAAWA,QAAQwC,cAAcD,MAAAA;AAClE,QAAIE,UAAU,IAAI;AAChB,aAAO1B,SAAS4B,MAAMF,QAAQ,GAAGA,QAAQ,IAAIH,KAAAA;IAC/C;AACA,WAAOvB,SAAS4B,MAAM,GAAGL,KAAAA;EAC3B;EAEmBM,WAAWX,QAA0D;AACtF,UAAM5C,QAAQ,oBAAIC,IAAAA;AAClB,WACE2C,OAAOpC,IAAI,CAACqC,SAAAA;AACV,aAAO,KAAK/C,QAAQ0D,IAAIX,IAAAA;IAC1B,CAAA,EACAnC,OAAO8B,MAAAA,EACN9B,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EAAGL,IAAIG,CAAAA,YAAWQ,eAAeC,gBAAgBT,OAAAA,CAAAA;EACrD;EAEA,MAAyB8C,cAAc/B,UAA0D;AAC/F,WAAO,MAAMI,QAAQH,IAAID,SAASlB,IAAI,OAAOG,YAAAA;AAC3C,YAAM+C,iBAAiB,MAAMvC,eAAewC,wBAAwBhD,OAAAA;AACpE,YAAMF,QAAQmD,KAAKC,UAAUH,cAAAA;AAC7BI,cAAQ3D,IAAI,2BAA2BuD,cAAAA;AACvC9B,eAASnB,MAAML,SAAS,KAAKhB,cAAc,MAAM,sBAAsBsE,eAAeK,KAAK,KAAKtD,MAAML,MAAM,GAAG;AAC/G,WAAKN,QAAQkE,IAAIN,eAAeK,OAAOL,cAAAA;AACvC,WAAK5D,QAAQkE,IAAIN,eAAe7C,WAAW6C,cAAAA;AAC3C,aAAOA;IACT,CAAA,CAAA;EACF;EAEmBO,YAAYC,SAAwE;AACrG,UAAM,EACJjB,OAAOC,QAAQF,MAAK,IAClBkB,WAAW,CAAC;AAChB,WAAO,KAAKnB,cAAcC,OAAOC,SAAS,IAAIC,MAAAA;EAChD;EAEA,MAAyBiB,eAAe;AACtC,UAAM,MAAMA,aAAAA;AACZ,WAAO;EACT;AACF;","names":["assertEx","exists","fulfilled","AbstractArchivist","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistCommitQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","PayloadBuilder","store","storeTypes","store","StorageArchivistConfigSchema","StorageArchivist","AbstractArchivist","configSchemas","defaultConfigSchema","_privateStorage","_storage","maxEntries","config","maxEntrySize","namespace","queries","ArchivistAllQuerySchema","ArchivistDeleteQuerySchema","ArchivistClearQuerySchema","ArchivistInsertQuerySchema","ArchivistCommitQuerySchema","type","privateStorage","storage","allHandler","found","Set","logger","log","length","Object","entries","getAll","map","value","filter","payload","has","_dataHash","add","sort","a","b","_timestamp","PayloadBuilder","omitStorageMeta","clearHandler","clear","emit","mod","commitHandler","payloads","all","assertEx","settled","Promise","allSettled","values","parentArchivists","commit","parent","queryPayload","schema","query","bindQuery","exists","fulfilled","result","deleteHandler","hashes","hash","remove","getFromCursor","order","limit","cursor","_sequence","index","findIndex","slice","getHandler","get","insertHandler","storagePayload","addSequencedStorageMeta","JSON","stringify","console","_hash","set","nextHandler","options","startHandler"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@xyo-network/archivist-storage",
|
|
3
|
-
"version": "3.6.0-rc.
|
|
3
|
+
"version": "3.6.0-rc.3",
|
|
4
4
|
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
5
5
|
"homepage": "https://xyo.network",
|
|
6
6
|
"bugs": {
|
|
@@ -32,14 +32,13 @@
|
|
|
32
32
|
"@xylabs/assert": "^4.4.12",
|
|
33
33
|
"@xylabs/exists": "^4.4.12",
|
|
34
34
|
"@xylabs/hex": "^4.4.12",
|
|
35
|
-
"@xylabs/object": "^4.4.12",
|
|
36
35
|
"@xylabs/promise": "^4.4.12",
|
|
37
|
-
"@xyo-network/archivist-abstract": "^3.6.0-rc.
|
|
38
|
-
"@xyo-network/archivist-model": "^3.6.0-rc.
|
|
39
|
-
"@xyo-network/boundwitness-model": "^3.6.0-rc.
|
|
40
|
-
"@xyo-network/module-model": "^3.6.0-rc.
|
|
41
|
-
"@xyo-network/payload-builder": "^3.6.0-rc.
|
|
42
|
-
"@xyo-network/payload-model": "^3.6.0-rc.
|
|
36
|
+
"@xyo-network/archivist-abstract": "^3.6.0-rc.3",
|
|
37
|
+
"@xyo-network/archivist-model": "^3.6.0-rc.3",
|
|
38
|
+
"@xyo-network/boundwitness-model": "^3.6.0-rc.3",
|
|
39
|
+
"@xyo-network/module-model": "^3.6.0-rc.3",
|
|
40
|
+
"@xyo-network/payload-builder": "^3.6.0-rc.3",
|
|
41
|
+
"@xyo-network/payload-model": "^3.6.0-rc.3",
|
|
43
42
|
"store2": "^2.14.3"
|
|
44
43
|
},
|
|
45
44
|
"devDependencies": {
|
|
@@ -47,12 +46,12 @@
|
|
|
47
46
|
"@xylabs/ts-scripts-yarn3": "^4.2.4",
|
|
48
47
|
"@xylabs/tsconfig": "^4.2.4",
|
|
49
48
|
"@xylabs/vitest-extended": "^4.4.12",
|
|
50
|
-
"@xyo-network/account": "^3.6.0-rc.
|
|
51
|
-
"@xyo-network/archivist-memory": "^3.6.0-rc.
|
|
52
|
-
"@xyo-network/boundwitness-wrapper": "^3.6.0-rc.
|
|
53
|
-
"@xyo-network/id-payload-plugin": "^3.6.0-rc.
|
|
54
|
-
"@xyo-network/node-memory": "^3.6.0-rc.
|
|
55
|
-
"@xyo-network/payload-wrapper": "^3.6.0-rc.
|
|
49
|
+
"@xyo-network/account": "^3.6.0-rc.3",
|
|
50
|
+
"@xyo-network/archivist-memory": "^3.6.0-rc.3",
|
|
51
|
+
"@xyo-network/boundwitness-wrapper": "^3.6.0-rc.3",
|
|
52
|
+
"@xyo-network/id-payload-plugin": "^3.6.0-rc.3",
|
|
53
|
+
"@xyo-network/node-memory": "^3.6.0-rc.3",
|
|
54
|
+
"@xyo-network/payload-wrapper": "^3.6.0-rc.3",
|
|
56
55
|
"typescript": "^5.7.2",
|
|
57
56
|
"vitest": "^2.1.8"
|
|
58
57
|
},
|
package/src/StorageArchivist.ts
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import { assertEx } from '@xylabs/assert'
|
|
2
2
|
import { exists } from '@xylabs/exists'
|
|
3
|
-
import type { Hash } from '@xylabs/hex'
|
|
3
|
+
import type { Hash, Hex } from '@xylabs/hex'
|
|
4
4
|
import type { Promisable, PromisableArray } from '@xylabs/promise'
|
|
5
5
|
import { fulfilled } from '@xylabs/promise'
|
|
6
|
-
import type { WithStorageMeta } from '@xyo-network/archivist-abstract'
|
|
7
6
|
import { AbstractArchivist } from '@xyo-network/archivist-abstract'
|
|
8
7
|
import type {
|
|
9
8
|
ArchivistConfig,
|
|
@@ -23,7 +22,9 @@ import {
|
|
|
23
22
|
import type { BoundWitness } from '@xyo-network/boundwitness-model'
|
|
24
23
|
import type { AnyConfigSchema } from '@xyo-network/module-model'
|
|
25
24
|
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
26
|
-
import type {
|
|
25
|
+
import type {
|
|
26
|
+
Payload, Schema, WithStorageMeta,
|
|
27
|
+
} from '@xyo-network/payload-model'
|
|
27
28
|
import type { StoreBase, StoreType } from 'store2'
|
|
28
29
|
import store from 'store2'
|
|
29
30
|
|
|
@@ -92,7 +93,7 @@ export class StorageArchivist<
|
|
|
92
93
|
return this._storage
|
|
93
94
|
}
|
|
94
95
|
|
|
95
|
-
protected override allHandler(): PromisableArray<Payload
|
|
96
|
+
protected override allHandler(): PromisableArray<WithStorageMeta<Payload>> {
|
|
96
97
|
const found = new Set<string>()
|
|
97
98
|
this.logger?.log(`this.storage.length: ${this.storage.length}`)
|
|
98
99
|
return Object.entries(this.storage.getAll())
|
|
@@ -142,36 +143,24 @@ export class StorageArchivist<
|
|
|
142
143
|
).filter(exists)
|
|
143
144
|
}
|
|
144
145
|
|
|
145
|
-
protected
|
|
146
|
+
protected getFromCursor(
|
|
146
147
|
order: 'asc' | 'desc' = 'asc',
|
|
147
148
|
limit: number = 10,
|
|
148
|
-
|
|
149
|
+
cursor?: Hex,
|
|
149
150
|
): WithStorageMeta[] {
|
|
150
|
-
const offsetHash = offset ? (this.storage.get(offset) as WithStorageMeta | undefined)?._dataHash : undefined
|
|
151
|
-
const found = new Set<string>()
|
|
152
151
|
const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll())
|
|
153
152
|
.map(([, value]) => value)
|
|
154
|
-
.filter((payload) => {
|
|
155
|
-
if (found.has(payload._dataHash)) {
|
|
156
|
-
return false
|
|
157
|
-
} else {
|
|
158
|
-
found.add(payload._dataHash)
|
|
159
|
-
return true
|
|
160
|
-
}
|
|
161
|
-
})
|
|
162
153
|
.sort((a, b) => {
|
|
163
|
-
return order === 'asc' ? a.
|
|
154
|
+
return order === 'asc' ? a._sequence - b._sequence : b._sequence - a._sequence
|
|
164
155
|
})
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
return payloads.slice(index + 1, index + 1 + limit)
|
|
169
|
-
}
|
|
156
|
+
const index = payloads.findIndex(payload => payload._sequence === cursor)
|
|
157
|
+
if (index !== -1) {
|
|
158
|
+
return payloads.slice(index + 1, index + 1 + limit)
|
|
170
159
|
}
|
|
171
160
|
return payloads.slice(0, limit)
|
|
172
161
|
}
|
|
173
162
|
|
|
174
|
-
protected override getHandler(hashes: string[]): Promisable<Payload[]> {
|
|
163
|
+
protected override getHandler(hashes: string[]): Promisable<WithStorageMeta<Payload>[]> {
|
|
175
164
|
const found = new Set<string>()
|
|
176
165
|
return (
|
|
177
166
|
hashes.map((hash) => {
|
|
@@ -188,23 +177,23 @@ export class StorageArchivist<
|
|
|
188
177
|
}).map(payload => PayloadBuilder.omitStorageMeta(payload))
|
|
189
178
|
}
|
|
190
179
|
|
|
191
|
-
protected override async insertHandler(payloads: Payload[]): Promise<Payload[]> {
|
|
192
|
-
return await Promise.all(payloads.map(async (payload
|
|
193
|
-
const storagePayload = await
|
|
180
|
+
protected override async insertHandler(payloads: Payload[]): Promise<WithStorageMeta<Payload>[]> {
|
|
181
|
+
return await Promise.all(payloads.map(async (payload) => {
|
|
182
|
+
const storagePayload = await PayloadBuilder.addSequencedStorageMeta(payload)
|
|
194
183
|
const value = JSON.stringify(storagePayload)
|
|
195
184
|
console.log('insert.storagePayloads:', storagePayload)
|
|
196
185
|
assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`)
|
|
197
186
|
this.storage.set(storagePayload._hash, storagePayload)
|
|
198
187
|
this.storage.set(storagePayload._dataHash, storagePayload)
|
|
199
|
-
return
|
|
188
|
+
return storagePayload
|
|
200
189
|
}))
|
|
201
190
|
}
|
|
202
191
|
|
|
203
|
-
protected override nextHandler(options?: ArchivistNextOptions): Promisable<Payload[]> {
|
|
192
|
+
protected override nextHandler(options?: ArchivistNextOptions): Promisable<WithStorageMeta<Payload>[]> {
|
|
204
193
|
const {
|
|
205
|
-
limit,
|
|
194
|
+
limit, cursor, order,
|
|
206
195
|
} = options ?? {}
|
|
207
|
-
return this.
|
|
196
|
+
return this.getFromCursor(order, limit ?? 10, cursor)
|
|
208
197
|
}
|
|
209
198
|
|
|
210
199
|
protected override async startHandler() {
|