@xyo-network/archivist-storage 3.5.2 → 3.6.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
import type { Hash } from '@xylabs/hex';
|
|
2
2
|
import type { Promisable, PromisableArray } from '@xylabs/promise';
|
|
3
|
+
import type { WithStorageMeta } from '@xyo-network/archivist-abstract';
|
|
3
4
|
import { AbstractArchivist } from '@xyo-network/archivist-abstract';
|
|
4
5
|
import type { ArchivistConfig, ArchivistInstance, ArchivistModuleEventData, ArchivistNextOptions, ArchivistParams } from '@xyo-network/archivist-model';
|
|
5
6
|
import type { BoundWitness } from '@xyo-network/boundwitness-model';
|
|
6
7
|
import type { AnyConfigSchema } from '@xyo-network/module-model';
|
|
7
|
-
import type { Payload,
|
|
8
|
-
type WithStorageMeta<T extends Payload = Payload> = WithMeta<T> & {
|
|
9
|
-
_timestamp: number;
|
|
10
|
-
};
|
|
8
|
+
import type { Payload, Schema } from '@xyo-network/payload-model';
|
|
11
9
|
export type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config';
|
|
12
10
|
export declare const StorageArchivistConfigSchema: StorageArchivistConfigSchema;
|
|
13
11
|
export type StorageArchivistConfig = ArchivistConfig<{
|
|
@@ -30,17 +28,14 @@ export declare class StorageArchivist<TParams extends StorageArchivistParams = S
|
|
|
30
28
|
get type(): "local" | "session" | "page";
|
|
31
29
|
private get privateStorage();
|
|
32
30
|
private get storage();
|
|
33
|
-
protected allHandler(): PromisableArray<
|
|
31
|
+
protected allHandler(): PromisableArray<Payload>;
|
|
34
32
|
protected clearHandler(): void | Promise<void>;
|
|
35
|
-
protected commitHandler(): Promise<
|
|
33
|
+
protected commitHandler(): Promise<BoundWitness[]>;
|
|
36
34
|
protected deleteHandler(hashes: Hash[]): Promise<Hash[]>;
|
|
37
35
|
protected getFromOffset(order?: 'asc' | 'desc', limit?: number, offset?: Hash): WithStorageMeta[];
|
|
38
|
-
protected getHandler(hashes: string[]): Promisable<
|
|
39
|
-
protected insertHandler(payloads: Payload[]): Promise<
|
|
40
|
-
protected nextHandler(options?: ArchivistNextOptions): Promisable<
|
|
36
|
+
protected getHandler(hashes: string[]): Promisable<Payload[]>;
|
|
37
|
+
protected insertHandler(payloads: Payload[]): Promise<Payload[]>;
|
|
38
|
+
protected nextHandler(options?: ArchivistNextOptions): Promisable<Payload[]>;
|
|
41
39
|
protected startHandler(): Promise<boolean>;
|
|
42
|
-
private addStorageMeta;
|
|
43
|
-
private removeStorageMeta;
|
|
44
40
|
}
|
|
45
|
-
export {};
|
|
46
41
|
//# sourceMappingURL=StorageArchivist.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageArchivist.d.ts","sourceRoot":"","sources":["../../src/StorageArchivist.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAA;AAElE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,KAAK,EACV,eAAe,EAEf,iBAAiB,EACjB,wBAAwB,EACxB,oBAAoB,EACpB,eAAe,EAChB,MAAM,8BAA8B,CAAA;AAQrC,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAA;AAEhE,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"StorageArchivist.d.ts","sourceRoot":"","sources":["../../src/StorageArchivist.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAA;AAElE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,iCAAiC,CAAA;AACtE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,KAAK,EACV,eAAe,EAEf,iBAAiB,EACjB,wBAAwB,EACxB,oBAAoB,EACpB,eAAe,EAChB,MAAM,8BAA8B,CAAA;AAQrC,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAA;AAEhE,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,4BAA4B,CAAA;AAMjE,MAAM,MAAM,4BAA4B,GAAG,sCAAsC,CAAA;AACjF,eAAO,MAAM,4BAA4B,EAAE,4BAAqE,CAAA;AAEhH,MAAM,MAAM,sBAAsB,GAAG,eAAe,CAAC;IACnD,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,MAAM,EAAE,4BAA4B,CAAA;IACpC,IAAI,CAAC,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;CACpC,CAAC,CAAA;AAEF,MAAM,MAAM,sBAAsB,GAAG,eAAe,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAA;AAC7F,qBAAa,gBAAgB,CAC3B,OAAO,SAAS,sBAAsB,GAAG,sBAAsB,EAC/D,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CAEtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAC7C,YAAW,iBAAiB;IAC5B,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAAyD;IACzG,gBAAyB,mBAAmB,EAAE,MAAM,CAA+B;IAEnF,OAAO,CAAC,eAAe,CAAuB;IAC9C,OAAO,CAAC,QAAQ,CAAuB;IAEvC,IAAI,UAAU,WAEb;IAED,IAAI,YAAY,WAEf;IAED,IAAI,SAAS,WAEZ;IAED,IAAa,OAAO,IAAI,MAAM,EAAE,CAS/B;IAED,IAAI,IAAI,iCAEP;IAGD,OAAO,KAAK,cAAc,GAGzB;IAGD,OAAO,KAAK,OAAO,GAGlB;cAEkB,UAAU,IAAI,eAAe,CAAC,OAAO,CAAC;cAiBtC,YAAY,IAAI,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;cAM9B,aAAa,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;cAgBxC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IAWvE,SAAS,CAAC,aAAa,CACrB,KAAK,GAAE,KAAK,GAAG,MAAc,EAC7B,KAAK,GAAE,MAAW,EAClB,MAAM,CAAC,EAAE,IAAI,GACZ,eAAe,EAAE;cAyBD,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC;cAiB7C,aAAa,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;cAY5D,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC;cAO5D,YAAY;CAItC"}
|
package/dist/neutral/index.mjs
CHANGED
|
@@ -11,7 +11,7 @@ import { PayloadBuilder } from "@xyo-network/payload-builder";
|
|
|
11
11
|
import store from "store2";
|
|
12
12
|
var storeTypes = store;
|
|
13
13
|
var StorageArchivistConfigSchema = "network.xyo.archivist.storage.config";
|
|
14
|
-
var StorageArchivist = class extends AbstractArchivist {
|
|
14
|
+
var StorageArchivist = class _StorageArchivist extends AbstractArchivist {
|
|
15
15
|
static {
|
|
16
16
|
__name(this, "StorageArchivist");
|
|
17
17
|
}
|
|
@@ -58,13 +58,13 @@ var StorageArchivist = class extends AbstractArchivist {
|
|
|
58
58
|
const found = /* @__PURE__ */ new Set();
|
|
59
59
|
this.logger?.log(`this.storage.length: ${this.storage.length}`);
|
|
60
60
|
return Object.entries(this.storage.getAll()).map(([, value]) => value).filter((payload) => {
|
|
61
|
-
if (found.has(payload
|
|
61
|
+
if (found.has(payload._dataHash)) {
|
|
62
62
|
return false;
|
|
63
63
|
} else {
|
|
64
|
-
found.add(payload
|
|
64
|
+
found.add(payload._dataHash);
|
|
65
65
|
return true;
|
|
66
66
|
}
|
|
67
|
-
}).sort((a, b) => a._timestamp - b._timestamp).map((payload) =>
|
|
67
|
+
}).sort((a, b) => a._timestamp - b._timestamp).map((payload) => PayloadBuilder.omitStorageMeta(payload));
|
|
68
68
|
}
|
|
69
69
|
clearHandler() {
|
|
70
70
|
this.logger?.log(`this.storage.length: ${this.storage.length}`);
|
|
@@ -94,20 +94,20 @@ var StorageArchivist = class extends AbstractArchivist {
|
|
|
94
94
|
}))).filter(exists);
|
|
95
95
|
}
|
|
96
96
|
getFromOffset(order = "asc", limit = 10, offset) {
|
|
97
|
-
const offsetHash = offset ? this.storage.get(offset)
|
|
97
|
+
const offsetHash = offset ? this.storage.get(offset)?._dataHash : void 0;
|
|
98
98
|
const found = /* @__PURE__ */ new Set();
|
|
99
99
|
const payloads = Object.entries(this.storage.getAll()).map(([, value]) => value).filter((payload) => {
|
|
100
|
-
if (found.has(payload
|
|
100
|
+
if (found.has(payload._dataHash)) {
|
|
101
101
|
return false;
|
|
102
102
|
} else {
|
|
103
|
-
found.add(payload
|
|
103
|
+
found.add(payload._dataHash);
|
|
104
104
|
return true;
|
|
105
105
|
}
|
|
106
106
|
}).sort((a, b) => {
|
|
107
107
|
return order === "asc" ? a._timestamp - b._timestamp : b._timestamp - a._timestamp;
|
|
108
108
|
});
|
|
109
109
|
if (offsetHash) {
|
|
110
|
-
const index = payloads.findIndex((payload) => payload
|
|
110
|
+
const index = payloads.findIndex((payload) => payload._dataHash === offsetHash);
|
|
111
111
|
if (index !== -1) {
|
|
112
112
|
return payloads.slice(index + 1, index + 1 + limit);
|
|
113
113
|
}
|
|
@@ -119,26 +119,24 @@ var StorageArchivist = class extends AbstractArchivist {
|
|
|
119
119
|
return hashes.map((hash) => {
|
|
120
120
|
return this.storage.get(hash);
|
|
121
121
|
}).filter(exists).filter((payload) => {
|
|
122
|
-
if (found.has(payload
|
|
122
|
+
if (found.has(payload._dataHash)) {
|
|
123
123
|
return false;
|
|
124
124
|
} else {
|
|
125
|
-
found.add(payload
|
|
125
|
+
found.add(payload._dataHash);
|
|
126
126
|
return true;
|
|
127
127
|
}
|
|
128
|
-
}).map((payload) =>
|
|
128
|
+
}).map((payload) => PayloadBuilder.omitStorageMeta(payload));
|
|
129
129
|
}
|
|
130
130
|
async insertHandler(payloads) {
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
return pairs.map(([payload, hash]) => {
|
|
134
|
-
const storagePayload = this.addStorageMeta(payload, timestamp++);
|
|
131
|
+
return await Promise.all(payloads.map(async (payload, index) => {
|
|
132
|
+
const storagePayload = await _StorageArchivist.addSequencedStorageMeta(payload, index);
|
|
135
133
|
const value = JSON.stringify(storagePayload);
|
|
136
134
|
console.log("insert.storagePayloads:", storagePayload);
|
|
137
|
-
assertEx(value.length < this.maxEntrySize, () => `Payload too large [${
|
|
138
|
-
this.storage.set(
|
|
139
|
-
this.storage.set(
|
|
135
|
+
assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`);
|
|
136
|
+
this.storage.set(storagePayload._hash, storagePayload);
|
|
137
|
+
this.storage.set(storagePayload._dataHash, storagePayload);
|
|
140
138
|
return payload;
|
|
141
|
-
});
|
|
139
|
+
}));
|
|
142
140
|
}
|
|
143
141
|
nextHandler(options) {
|
|
144
142
|
const { limit, offset, order } = options ?? {};
|
|
@@ -148,16 +146,6 @@ var StorageArchivist = class extends AbstractArchivist {
|
|
|
148
146
|
await super.startHandler();
|
|
149
147
|
return true;
|
|
150
148
|
}
|
|
151
|
-
addStorageMeta(payload, _timestamp) {
|
|
152
|
-
return {
|
|
153
|
-
...payload,
|
|
154
|
-
_timestamp
|
|
155
|
-
};
|
|
156
|
-
}
|
|
157
|
-
removeStorageMeta(payload) {
|
|
158
|
-
const { _timestamp, ...rest } = payload;
|
|
159
|
-
return rest;
|
|
160
|
-
}
|
|
161
149
|
};
|
|
162
150
|
export {
|
|
163
151
|
StorageArchivist,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/StorageArchivist.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport type { Hash } from '@xylabs/hex'\nimport type { Promisable, PromisableArray } from '@xylabs/promise'\nimport { fulfilled } from '@xylabs/promise'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport type {\n ArchivistConfig,\n ArchivistInsertQuery,\n ArchivistInstance,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistParams,\n} from '@xyo-network/archivist-model'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n} from '@xyo-network/archivist-model'\nimport type { BoundWitness } from '@xyo-network/boundwitness-model'\nimport type { AnyConfigSchema } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport type {\n Payload, PayloadWithMeta, Schema, WithMeta,\n} from '@xyo-network/payload-model'\nimport type { StoreBase, StoreType } from 'store2'\nimport store from 'store2'\n\nconst storeTypes = store as unknown as StoreType\n\ntype WithStorageMeta<T extends Payload = Payload> = WithMeta<T> & { _timestamp: number }\n\nexport type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\nexport const StorageArchivistConfigSchema: StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\n\nexport type StorageArchivistConfig = ArchivistConfig<{\n maxEntries?: number\n maxEntrySize?: number\n namespace?: string\n schema: StorageArchivistConfigSchema\n type?: 'local' | 'session' | 'page'\n}>\n\nexport type StorageArchivistParams = ArchivistParams<AnyConfigSchema<StorageArchivistConfig>>\nexport class StorageArchivist<\n TParams extends StorageArchivistParams = StorageArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n>\n extends AbstractArchivist<TParams, TEventData>\n implements ArchivistInstance {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, StorageArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = StorageArchivistConfigSchema\n\n private _privateStorage: StoreBase | undefined\n private _storage: StoreBase | undefined\n\n get maxEntries() {\n return this.config?.maxEntries ?? 1000\n }\n\n get maxEntrySize() {\n return this.config?.maxEntrySize ?? 16_000\n }\n\n get namespace() {\n return this.config?.namespace ?? 'xyo-archivist'\n }\n\n override get queries(): string[] {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ...super.queries,\n ]\n }\n\n get type() {\n return this.config?.type ?? 'local'\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get privateStorage(): StoreBase {\n this._privateStorage = this._storage ?? storeTypes[this.type].namespace(`${this.namespace}|private`)\n return this._privateStorage\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get storage(): StoreBase {\n this._storage = this._storage ?? storeTypes[this.type].namespace(this.namespace)\n return this._storage\n }\n\n protected override allHandler(): PromisableArray<PayloadWithMeta> {\n const found = new Set<string>()\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n return Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload.$hash)) {\n return false\n } else {\n found.add(payload.$hash)\n return true\n }\n })\n .sort((a, b) => a._timestamp - b._timestamp)\n .map(payload => this.removeStorageMeta(payload))\n }\n\n protected override clearHandler(): void | Promise<void> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n this.storage.clear()\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<WithMeta<BoundWitness>[]> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n const payloads = await this.all()\n assertEx(payloads.length > 0, () => 'Nothing to commit')\n const settled = (await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }),\n )).filter(exists)\n // TODO - rather than clear, delete the payloads that come back as successfully inserted\n await this.clear()\n return (settled.filter(fulfilled).map(result => result.value)).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n return (\n await Promise.all(\n hashes.map((hash) => {\n this.storage.remove(hash)\n return hash\n }),\n )\n ).filter(exists)\n }\n\n protected getFromOffset(\n order: 'asc' | 'desc' = 'asc',\n limit: number = 10,\n offset?: Hash,\n ): WithStorageMeta[] {\n const offsetHash = offset ? (this.storage.get(offset) as PayloadWithMeta | undefined)?.$hash : undefined\n const found = new Set<string>()\n const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload.$hash)) {\n return false\n } else {\n found.add(payload.$hash)\n return true\n }\n })\n .sort((a, b) => {\n return order === 'asc' ? a._timestamp - b._timestamp : b._timestamp - a._timestamp\n })\n if (offsetHash) {\n const index = payloads.findIndex(payload => payload.$hash === offsetHash)\n if (index !== -1) {\n return payloads.slice(index + 1, index + 1 + limit)\n }\n }\n return payloads.slice(0, limit)\n }\n\n protected override getHandler(hashes: string[]): Promisable<PayloadWithMeta[]> {\n const found = new Set<string>()\n return (\n hashes.map((hash) => {\n return this.storage.get(hash)\n })\n ).filter(exists)\n .filter((payload) => {\n if (found.has(payload.$hash)) {\n return false\n } else {\n found.add(payload.$hash)\n return true\n }\n }).map(payload => this.removeStorageMeta(payload))\n }\n\n protected override async insertHandler(payloads: Payload[]): Promise<PayloadWithMeta[]> {\n let timestamp = Date.now()\n const pairs = await PayloadBuilder.hashPairs(payloads)\n return pairs.map(([payload, hash]) => {\n const storagePayload = this.addStorageMeta(payload, timestamp++)\n const value = JSON.stringify(storagePayload)\n console.log('insert.storagePayloads:', storagePayload)\n assertEx(value.length < this.maxEntrySize, () => `Payload too large [${hash}, ${value.length}]`)\n this.storage.set(hash, storagePayload)\n this.storage.set(payload.$hash, storagePayload)\n return payload\n })\n }\n\n protected override nextHandler(options?: ArchivistNextOptions): Promisable<PayloadWithMeta[]> {\n const {\n limit, offset, order,\n } = options ?? {}\n return this.getFromOffset(order, limit ?? 10, offset)\n }\n\n protected override async startHandler() {\n await super.startHandler()\n return true\n }\n\n private addStorageMeta<T extends Payload = Payload>(payload: WithMeta<T>, _timestamp: number): WithStorageMeta<T> {\n return { ...payload, _timestamp }\n }\n\n private removeStorageMeta<T extends Payload = Payload>(payload: WithStorageMeta<T>): WithMeta<T> {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const { _timestamp, ...rest } = payload\n return rest as WithMeta<T>\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,cAAc;AAGvB,SAASC,iBAAiB;AAC1B,SAASC,yBAAyB;AASlC,SACEC,yBACAC,2BACAC,4BACAC,4BACAC,kCACK;AAGP,SAASC,sBAAsB;AAK/B,OAAOC,WAAW;AAElB,IAAMC,aAAaC;AAKZ,IAAMC,+BAA6D;AAWnE,IAAMC,mBAAN,cAIGC,kBAAAA;EAlDV,OAkDUA;;;EAER,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeH;;EAC5E,OAAyBI,sBAA8BJ;EAE/CK;EACAC;EAER,IAAIC,aAAa;AACf,WAAO,KAAKC,QAAQD,cAAc;EACpC;EAEA,IAAIE,eAAe;AACjB,WAAO,KAAKD,QAAQC,gBAAgB;EACtC;EAEA,IAAIC,YAAY;AACd,WAAO,KAAKF,QAAQE,aAAa;EACnC;EAEA,IAAaC,UAAoB;AAC/B,WAAO;MACLC;MACAC;MACAC;MACAC;MACAC;SACG,MAAML;;EAEb;EAEA,IAAIM,OAAO;AACT,WAAO,KAAKT,QAAQS,QAAQ;EAC9B;;EAGA,IAAYC,iBAA4B;AACtC,SAAKb,kBAAkB,KAAKC,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,GAAG,KAAKA,SAAS,UAAU;AACnG,WAAO,KAAKL;EACd;;EAGA,IAAYc,UAAqB;AAC/B,SAAKb,WAAW,KAAKA,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,KAAKA,SAAS;AAC/E,WAAO,KAAKJ;EACd;EAEmBc,aAA+C;AAChE,UAAMC,QAAQ,oBAAIC,IAAAA;AAClB,SAAKC,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,WAAOC,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACtCC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,KAAK,GAAG;AAC5B,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,KAAK;AACvB,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAMD,EAAEE,aAAaD,EAAEC,UAAU,EAC1CV,IAAIG,CAAAA,YAAW,KAAKQ,kBAAkBR,OAAAA,CAAAA;EAC3C;EAEmBS,eAAqC;AACtD,SAAKlB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,SAAKN,QAAQuB,MAAK;AAClB,WAAO,KAAKC,KAAK,WAAW;MAAEC,KAAK;IAAK,CAAA;EAC1C;EAEA,MAAyBC,gBAAmD;AAC1E,SAAKtB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,UAAMqB,WAAW,MAAM,KAAKC,IAAG;AAC/BC,aAASF,SAASrB,SAAS,GAAG,MAAM,mBAAA;AACpC,UAAMwB,WAAW,MAAMC,QAAQC,WAC7BzB,OAAO0B,QAAQ,MAAM,KAAKC,iBAAgB,GAAIC,UAAU,CAAA,CAAE,GAAGzB,IAAI,OAAO0B,WAAAA;AACtE,YAAMC,eAAqC;QAAEC,QAAQ1C;MAA2B;AAChF,YAAM2C,QAAQ,MAAM,KAAKC,UAAUH,cAAcV,QAAAA;AACjD,cAAQ,MAAMS,QAAQG,MAAMA,MAAM,CAAA,GAAIA,MAAM,CAAA,CAAE,KAAK,CAAA;IACrD,CAAA,CAAA,GACC3B,OAAO6B,MAAAA;AAEV,UAAM,KAAKlB,MAAK;AAChB,WAAQO,QAAQlB,OAAO8B,SAAAA,EAAWhC,IAAIiC,CAAAA,WAAUA,OAAOhC,KAAK,EAAGC,OAAO6B,MAAAA;EACxE;EAEA,MAAyBG,cAAcC,QAAiC;AACtE,YACE,MAAMd,QAAQH,IACZiB,OAAOnC,IAAI,CAACoC,SAAAA;AACV,WAAK9C,QAAQ+C,OAAOD,IAAAA;AACpB,aAAOA;IACT,CAAA,CAAA,GAEFlC,OAAO6B,MAAAA;EACX;EAEUO,cACRC,QAAwB,OACxBC,QAAgB,IAChBC,QACmB;AACnB,UAAMC,aAAaD,SAAU,KAAKnD,QAAQqD,IAAIF,MAAAA,GAAyCpC,QAAQuC;AAC/F,UAAMpD,QAAQ,oBAAIC,IAAAA;AAClB,UAAMwB,WAA8BpB,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACnEC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,KAAK,GAAG;AAC5B,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,KAAK;AACvB,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAAA;AACR,aAAO8B,UAAU,QAAQ/B,EAAEE,aAAaD,EAAEC,aAAaD,EAAEC,aAAaF,EAAEE;IAC1E,CAAA;AACF,QAAIgC,YAAY;AACd,YAAMG,QAAQ5B,SAAS6B,UAAU3C,CAAAA,YAAWA,QAAQE,UAAUqC,UAAAA;AAC9D,UAAIG,UAAU,IAAI;AAChB,eAAO5B,SAAS8B,MAAMF,QAAQ,GAAGA,QAAQ,IAAIL,KAAAA;MAC/C;IACF;AACA,WAAOvB,SAAS8B,MAAM,GAAGP,KAAAA;EAC3B;EAEmBQ,WAAWb,QAAiD;AAC7E,UAAM3C,QAAQ,oBAAIC,IAAAA;AAClB,WACE0C,OAAOnC,IAAI,CAACoC,SAAAA;AACV,aAAO,KAAK9C,QAAQqD,IAAIP,IAAAA;IAC1B,CAAA,EACAlC,OAAO6B,MAAAA,EACN7B,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,KAAK,GAAG;AAC5B,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,KAAK;AACvB,eAAO;MACT;IACF,CAAA,EAAGL,IAAIG,CAAAA,YAAW,KAAKQ,kBAAkBR,OAAAA,CAAAA;EAC7C;EAEA,MAAyB8C,cAAchC,UAAiD;AACtF,QAAIiC,YAAYC,KAAKC,IAAG;AACxB,UAAMC,QAAQ,MAAMC,eAAeC,UAAUtC,QAAAA;AAC7C,WAAOoC,MAAMrD,IAAI,CAAC,CAACG,SAASiC,IAAAA,MAAK;AAC/B,YAAMoB,iBAAiB,KAAKC,eAAetD,SAAS+C,WAAAA;AACpD,YAAMjD,QAAQyD,KAAKC,UAAUH,cAAAA;AAC7BI,cAAQjE,IAAI,2BAA2B6D,cAAAA;AACvCrC,eAASlB,MAAML,SAAS,KAAKhB,cAAc,MAAM,sBAAsBwD,IAAAA,KAASnC,MAAML,MAAM,GAAG;AAC/F,WAAKN,QAAQuE,IAAIzB,MAAMoB,cAAAA;AACvB,WAAKlE,QAAQuE,IAAI1D,QAAQE,OAAOmD,cAAAA;AAChC,aAAOrD;IACT,CAAA;EACF;EAEmB2D,YAAYC,SAA+D;AAC5F,UAAM,EACJvB,OAAOC,QAAQF,MAAK,IAClBwB,WAAW,CAAC;AAChB,WAAO,KAAKzB,cAAcC,OAAOC,SAAS,IAAIC,MAAAA;EAChD;EAEA,MAAyBuB,eAAe;AACtC,UAAM,MAAMA,aAAAA;AACZ,WAAO;EACT;EAEQP,eAA4CtD,SAAsBO,YAAwC;AAChH,WAAO;MAAE,GAAGP;MAASO;IAAW;EAClC;EAEQC,kBAA+CR,SAA0C;AAE/F,UAAM,EAAEO,YAAY,GAAGuD,KAAAA,IAAS9D;AAChC,WAAO8D;EACT;AACF;","names":["assertEx","exists","fulfilled","AbstractArchivist","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistCommitQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","PayloadBuilder","store","storeTypes","store","StorageArchivistConfigSchema","StorageArchivist","AbstractArchivist","configSchemas","defaultConfigSchema","_privateStorage","_storage","maxEntries","config","maxEntrySize","namespace","queries","ArchivistAllQuerySchema","ArchivistDeleteQuerySchema","ArchivistClearQuerySchema","ArchivistInsertQuerySchema","ArchivistCommitQuerySchema","type","privateStorage","storage","allHandler","found","Set","logger","log","length","Object","entries","getAll","map","value","filter","payload","has","$hash","add","sort","a","b","_timestamp","removeStorageMeta","clearHandler","clear","emit","mod","commitHandler","payloads","all","assertEx","settled","Promise","allSettled","values","parentArchivists","commit","parent","queryPayload","schema","query","bindQuery","exists","fulfilled","result","deleteHandler","hashes","hash","remove","getFromOffset","order","limit","offset","offsetHash","get","undefined","index","findIndex","slice","getHandler","insertHandler","timestamp","Date","now","pairs","PayloadBuilder","hashPairs","storagePayload","addStorageMeta","JSON","stringify","console","set","nextHandler","options","startHandler","rest"]}
|
|
1
|
+
{"version":3,"sources":["../../src/StorageArchivist.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport type { Hash } from '@xylabs/hex'\nimport type { Promisable, PromisableArray } from '@xylabs/promise'\nimport { fulfilled } from '@xylabs/promise'\nimport type { WithStorageMeta } from '@xyo-network/archivist-abstract'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport type {\n ArchivistConfig,\n ArchivistInsertQuery,\n ArchivistInstance,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistParams,\n} from '@xyo-network/archivist-model'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuerySchema,\n} from '@xyo-network/archivist-model'\nimport type { BoundWitness } from '@xyo-network/boundwitness-model'\nimport type { AnyConfigSchema } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport type { Payload, Schema } from '@xyo-network/payload-model'\nimport type { StoreBase, StoreType } from 'store2'\nimport store from 'store2'\n\nconst storeTypes = store as unknown as StoreType\n\nexport type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\nexport const StorageArchivistConfigSchema: StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'\n\nexport type StorageArchivistConfig = ArchivistConfig<{\n maxEntries?: number\n maxEntrySize?: number\n namespace?: string\n schema: StorageArchivistConfigSchema\n type?: 'local' | 'session' | 'page'\n}>\n\nexport type StorageArchivistParams = ArchivistParams<AnyConfigSchema<StorageArchivistConfig>>\nexport class StorageArchivist<\n TParams extends StorageArchivistParams = StorageArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n>\n extends AbstractArchivist<TParams, TEventData>\n implements ArchivistInstance {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, StorageArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = StorageArchivistConfigSchema\n\n private _privateStorage: StoreBase | undefined\n private _storage: StoreBase | undefined\n\n get maxEntries() {\n return this.config?.maxEntries ?? 1000\n }\n\n get maxEntrySize() {\n return this.config?.maxEntrySize ?? 16_000\n }\n\n get namespace() {\n return this.config?.namespace ?? 'xyo-archivist'\n }\n\n override get queries(): string[] {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ...super.queries,\n ]\n }\n\n get type() {\n return this.config?.type ?? 'local'\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get privateStorage(): StoreBase {\n this._privateStorage = this._storage ?? storeTypes[this.type].namespace(`${this.namespace}|private`)\n return this._privateStorage\n }\n\n /* This has to be a getter so that it can access it during construction */\n private get storage(): StoreBase {\n this._storage = this._storage ?? storeTypes[this.type].namespace(this.namespace)\n return this._storage\n }\n\n protected override allHandler(): PromisableArray<Payload> {\n const found = new Set<string>()\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n return Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n })\n .sort((a, b) => a._timestamp - b._timestamp)\n .map(payload => PayloadBuilder.omitStorageMeta(payload))\n }\n\n protected override clearHandler(): void | Promise<void> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n this.storage.clear()\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<BoundWitness[]> {\n this.logger?.log(`this.storage.length: ${this.storage.length}`)\n const payloads = await this.all()\n assertEx(payloads.length > 0, () => 'Nothing to commit')\n const settled = (await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }),\n )).filter(exists)\n // TODO - rather than clear, delete the payloads that come back as successfully inserted\n await this.clear()\n return (settled.filter(fulfilled).map(result => result.value)).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n return (\n await Promise.all(\n hashes.map((hash) => {\n this.storage.remove(hash)\n return hash\n }),\n )\n ).filter(exists)\n }\n\n protected getFromOffset(\n order: 'asc' | 'desc' = 'asc',\n limit: number = 10,\n offset?: Hash,\n ): WithStorageMeta[] {\n const offsetHash = offset ? (this.storage.get(offset) as WithStorageMeta | undefined)?._dataHash : undefined\n const found = new Set<string>()\n const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll())\n .map(([, value]) => value)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n })\n .sort((a, b) => {\n return order === 'asc' ? a._timestamp - b._timestamp : b._timestamp - a._timestamp\n })\n if (offsetHash) {\n const index = payloads.findIndex(payload => payload._dataHash === offsetHash)\n if (index !== -1) {\n return payloads.slice(index + 1, index + 1 + limit)\n }\n }\n return payloads.slice(0, limit)\n }\n\n protected override getHandler(hashes: string[]): Promisable<Payload[]> {\n const found = new Set<string>()\n return (\n hashes.map((hash) => {\n return this.storage.get(hash)\n })\n ).filter(exists)\n .filter((payload) => {\n if (found.has(payload._dataHash)) {\n return false\n } else {\n found.add(payload._dataHash)\n return true\n }\n }).map(payload => PayloadBuilder.omitStorageMeta(payload))\n }\n\n protected override async insertHandler(payloads: Payload[]): Promise<Payload[]> {\n return await Promise.all(payloads.map(async (payload, index) => {\n const storagePayload = await StorageArchivist.addSequencedStorageMeta(payload, index)\n const value = JSON.stringify(storagePayload)\n console.log('insert.storagePayloads:', storagePayload)\n assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`)\n this.storage.set(storagePayload._hash, storagePayload)\n this.storage.set(storagePayload._dataHash, storagePayload)\n return payload\n }))\n }\n\n protected override nextHandler(options?: ArchivistNextOptions): Promisable<Payload[]> {\n const {\n limit, offset, order,\n } = options ?? {}\n return this.getFromOffset(order, limit ?? 10, offset)\n }\n\n protected override async startHandler() {\n await super.startHandler()\n return true\n }\n}\n"],"mappings":";;;;AAAA,SAASA,gBAAgB;AACzB,SAASC,cAAc;AAGvB,SAASC,iBAAiB;AAE1B,SAASC,yBAAyB;AASlC,SACEC,yBACAC,2BACAC,4BACAC,4BACAC,kCACK;AAGP,SAASC,sBAAsB;AAG/B,OAAOC,WAAW;AAElB,IAAMC,aAAaC;AAGZ,IAAMC,+BAA6D;AAWnE,IAAMC,mBAAN,MAAMA,0BAIHC,kBAAAA;EA/CV,OA+CUA;;;EAER,OAAyBC,gBAA0B;OAAI,MAAMA;IAAeH;;EAC5E,OAAyBI,sBAA8BJ;EAE/CK;EACAC;EAER,IAAIC,aAAa;AACf,WAAO,KAAKC,QAAQD,cAAc;EACpC;EAEA,IAAIE,eAAe;AACjB,WAAO,KAAKD,QAAQC,gBAAgB;EACtC;EAEA,IAAIC,YAAY;AACd,WAAO,KAAKF,QAAQE,aAAa;EACnC;EAEA,IAAaC,UAAoB;AAC/B,WAAO;MACLC;MACAC;MACAC;MACAC;MACAC;SACG,MAAML;;EAEb;EAEA,IAAIM,OAAO;AACT,WAAO,KAAKT,QAAQS,QAAQ;EAC9B;;EAGA,IAAYC,iBAA4B;AACtC,SAAKb,kBAAkB,KAAKC,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,GAAG,KAAKA,SAAS,UAAU;AACnG,WAAO,KAAKL;EACd;;EAGA,IAAYc,UAAqB;AAC/B,SAAKb,WAAW,KAAKA,YAAYR,WAAW,KAAKmB,IAAI,EAAEP,UAAU,KAAKA,SAAS;AAC/E,WAAO,KAAKJ;EACd;EAEmBc,aAAuC;AACxD,UAAMC,QAAQ,oBAAIC,IAAAA;AAClB,SAAKC,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,WAAOC,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACtCC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAMD,EAAEE,aAAaD,EAAEC,UAAU,EAC1CV,IAAIG,CAAAA,YAAWQ,eAAeC,gBAAgBT,OAAAA,CAAAA;EACnD;EAEmBU,eAAqC;AACtD,SAAKnB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,SAAKN,QAAQwB,MAAK;AAClB,WAAO,KAAKC,KAAK,WAAW;MAAEC,KAAK;IAAK,CAAA;EAC1C;EAEA,MAAyBC,gBAAyC;AAChE,SAAKvB,QAAQC,IAAI,wBAAwB,KAAKL,QAAQM,MAAM,EAAE;AAC9D,UAAMsB,WAAW,MAAM,KAAKC,IAAG;AAC/BC,aAASF,SAAStB,SAAS,GAAG,MAAM,mBAAA;AACpC,UAAMyB,WAAW,MAAMC,QAAQC,WAC7B1B,OAAO2B,QAAQ,MAAM,KAAKC,iBAAgB,GAAIC,UAAU,CAAA,CAAE,GAAG1B,IAAI,OAAO2B,WAAAA;AACtE,YAAMC,eAAqC;QAAEC,QAAQ3C;MAA2B;AAChF,YAAM4C,QAAQ,MAAM,KAAKC,UAAUH,cAAcV,QAAAA;AACjD,cAAQ,MAAMS,QAAQG,MAAMA,MAAM,CAAA,GAAIA,MAAM,CAAA,CAAE,KAAK,CAAA;IACrD,CAAA,CAAA,GACC5B,OAAO8B,MAAAA;AAEV,UAAM,KAAKlB,MAAK;AAChB,WAAQO,QAAQnB,OAAO+B,SAAAA,EAAWjC,IAAIkC,CAAAA,WAAUA,OAAOjC,KAAK,EAAGC,OAAO8B,MAAAA;EACxE;EAEA,MAAyBG,cAAcC,QAAiC;AACtE,YACE,MAAMd,QAAQH,IACZiB,OAAOpC,IAAI,CAACqC,SAAAA;AACV,WAAK/C,QAAQgD,OAAOD,IAAAA;AACpB,aAAOA;IACT,CAAA,CAAA,GAEFnC,OAAO8B,MAAAA;EACX;EAEUO,cACRC,QAAwB,OACxBC,QAAgB,IAChBC,QACmB;AACnB,UAAMC,aAAaD,SAAU,KAAKpD,QAAQsD,IAAIF,MAAAA,GAAyCrC,YAAYwC;AACnG,UAAMrD,QAAQ,oBAAIC,IAAAA;AAClB,UAAMyB,WAA8BrB,OAAOC,QAAQ,KAAKR,QAAQS,OAAM,CAAA,EACnEC,IAAI,CAAC,CAAA,EAAGC,KAAAA,MAAWA,KAAAA,EACnBC,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EACCE,KAAK,CAACC,GAAGC,MAAAA;AACR,aAAO+B,UAAU,QAAQhC,EAAEE,aAAaD,EAAEC,aAAaD,EAAEC,aAAaF,EAAEE;IAC1E,CAAA;AACF,QAAIiC,YAAY;AACd,YAAMG,QAAQ5B,SAAS6B,UAAU5C,CAAAA,YAAWA,QAAQE,cAAcsC,UAAAA;AAClE,UAAIG,UAAU,IAAI;AAChB,eAAO5B,SAAS8B,MAAMF,QAAQ,GAAGA,QAAQ,IAAIL,KAAAA;MAC/C;IACF;AACA,WAAOvB,SAAS8B,MAAM,GAAGP,KAAAA;EAC3B;EAEmBQ,WAAWb,QAAyC;AACrE,UAAM5C,QAAQ,oBAAIC,IAAAA;AAClB,WACE2C,OAAOpC,IAAI,CAACqC,SAAAA;AACV,aAAO,KAAK/C,QAAQsD,IAAIP,IAAAA;IAC1B,CAAA,EACAnC,OAAO8B,MAAAA,EACN9B,OAAO,CAACC,YAAAA;AACP,UAAIX,MAAMY,IAAID,QAAQE,SAAS,GAAG;AAChC,eAAO;MACT,OAAO;AACLb,cAAMc,IAAIH,QAAQE,SAAS;AAC3B,eAAO;MACT;IACF,CAAA,EAAGL,IAAIG,CAAAA,YAAWQ,eAAeC,gBAAgBT,OAAAA,CAAAA;EACrD;EAEA,MAAyB+C,cAAchC,UAAyC;AAC9E,WAAO,MAAMI,QAAQH,IAAID,SAASlB,IAAI,OAAOG,SAAS2C,UAAAA;AACpD,YAAMK,iBAAiB,MAAM/E,kBAAiBgF,wBAAwBjD,SAAS2C,KAAAA;AAC/E,YAAM7C,QAAQoD,KAAKC,UAAUH,cAAAA;AAC7BI,cAAQ5D,IAAI,2BAA2BwD,cAAAA;AACvC/B,eAASnB,MAAML,SAAS,KAAKhB,cAAc,MAAM,sBAAsBuE,eAAeK,KAAK,KAAKvD,MAAML,MAAM,GAAG;AAC/G,WAAKN,QAAQmE,IAAIN,eAAeK,OAAOL,cAAAA;AACvC,WAAK7D,QAAQmE,IAAIN,eAAe9C,WAAW8C,cAAAA;AAC3C,aAAOhD;IACT,CAAA,CAAA;EACF;EAEmBuD,YAAYC,SAAuD;AACpF,UAAM,EACJlB,OAAOC,QAAQF,MAAK,IAClBmB,WAAW,CAAC;AAChB,WAAO,KAAKpB,cAAcC,OAAOC,SAAS,IAAIC,MAAAA;EAChD;EAEA,MAAyBkB,eAAe;AACtC,UAAM,MAAMA,aAAAA;AACZ,WAAO;EACT;AACF;","names":["assertEx","exists","fulfilled","AbstractArchivist","ArchivistAllQuerySchema","ArchivistClearQuerySchema","ArchivistCommitQuerySchema","ArchivistDeleteQuerySchema","ArchivistInsertQuerySchema","PayloadBuilder","store","storeTypes","store","StorageArchivistConfigSchema","StorageArchivist","AbstractArchivist","configSchemas","defaultConfigSchema","_privateStorage","_storage","maxEntries","config","maxEntrySize","namespace","queries","ArchivistAllQuerySchema","ArchivistDeleteQuerySchema","ArchivistClearQuerySchema","ArchivistInsertQuerySchema","ArchivistCommitQuerySchema","type","privateStorage","storage","allHandler","found","Set","logger","log","length","Object","entries","getAll","map","value","filter","payload","has","_dataHash","add","sort","a","b","_timestamp","PayloadBuilder","omitStorageMeta","clearHandler","clear","emit","mod","commitHandler","payloads","all","assertEx","settled","Promise","allSettled","values","parentArchivists","commit","parent","queryPayload","schema","query","bindQuery","exists","fulfilled","result","deleteHandler","hashes","hash","remove","getFromOffset","order","limit","offset","offsetHash","get","undefined","index","findIndex","slice","getHandler","insertHandler","storagePayload","addSequencedStorageMeta","JSON","stringify","console","_hash","set","nextHandler","options","startHandler"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@xyo-network/archivist-storage",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.6.0-rc.1",
|
|
4
4
|
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
5
5
|
"homepage": "https://xyo.network",
|
|
6
6
|
"bugs": {
|
|
@@ -29,33 +29,35 @@
|
|
|
29
29
|
"module": "dist/neutral/index.mjs",
|
|
30
30
|
"types": "dist/neutral/index.d.ts",
|
|
31
31
|
"dependencies": {
|
|
32
|
-
"@xylabs/assert": "^4.4.
|
|
33
|
-
"@xylabs/exists": "^4.4.
|
|
34
|
-
"@xylabs/hex": "^4.4.
|
|
35
|
-
"@xylabs/
|
|
36
|
-
"@
|
|
37
|
-
"@xyo-network/archivist-
|
|
38
|
-
"@xyo-network/
|
|
39
|
-
"@xyo-network/
|
|
40
|
-
"@xyo-network/
|
|
41
|
-
"@xyo-network/payload-
|
|
32
|
+
"@xylabs/assert": "^4.4.12",
|
|
33
|
+
"@xylabs/exists": "^4.4.12",
|
|
34
|
+
"@xylabs/hex": "^4.4.12",
|
|
35
|
+
"@xylabs/object": "^4.4.12",
|
|
36
|
+
"@xylabs/promise": "^4.4.12",
|
|
37
|
+
"@xyo-network/archivist-abstract": "^3.6.0-rc.1",
|
|
38
|
+
"@xyo-network/archivist-model": "^3.6.0-rc.1",
|
|
39
|
+
"@xyo-network/boundwitness-model": "^3.6.0-rc.1",
|
|
40
|
+
"@xyo-network/module-model": "^3.6.0-rc.1",
|
|
41
|
+
"@xyo-network/payload-builder": "^3.6.0-rc.1",
|
|
42
|
+
"@xyo-network/payload-model": "^3.6.0-rc.1",
|
|
42
43
|
"store2": "^2.14.3"
|
|
43
44
|
},
|
|
44
45
|
"devDependencies": {
|
|
45
|
-
"@xylabs/delay": "^4.4.
|
|
46
|
+
"@xylabs/delay": "^4.4.12",
|
|
46
47
|
"@xylabs/ts-scripts-yarn3": "^4.2.4",
|
|
47
48
|
"@xylabs/tsconfig": "^4.2.4",
|
|
48
|
-
"@xylabs/vitest-extended": "^4.4.
|
|
49
|
-
"@xyo-network/account": "^3.
|
|
50
|
-
"@xyo-network/archivist-memory": "^3.
|
|
51
|
-
"@xyo-network/boundwitness-wrapper": "^3.
|
|
52
|
-
"@xyo-network/id-payload-plugin": "^3.
|
|
53
|
-
"@xyo-network/node-memory": "^3.
|
|
54
|
-
"@xyo-network/payload-wrapper": "^3.
|
|
49
|
+
"@xylabs/vitest-extended": "^4.4.12",
|
|
50
|
+
"@xyo-network/account": "^3.6.0-rc.1",
|
|
51
|
+
"@xyo-network/archivist-memory": "^3.6.0-rc.1",
|
|
52
|
+
"@xyo-network/boundwitness-wrapper": "^3.6.0-rc.1",
|
|
53
|
+
"@xyo-network/id-payload-plugin": "^3.6.0-rc.1",
|
|
54
|
+
"@xyo-network/node-memory": "^3.6.0-rc.1",
|
|
55
|
+
"@xyo-network/payload-wrapper": "^3.6.0-rc.1",
|
|
55
56
|
"typescript": "^5.7.2",
|
|
56
|
-
"vitest": "^2.1.
|
|
57
|
+
"vitest": "^2.1.8"
|
|
57
58
|
},
|
|
58
59
|
"publishConfig": {
|
|
59
60
|
"access": "public"
|
|
60
|
-
}
|
|
61
|
+
},
|
|
62
|
+
"stableVersion": "3.5.2"
|
|
61
63
|
}
|
package/src/StorageArchivist.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { exists } from '@xylabs/exists'
|
|
|
3
3
|
import type { Hash } from '@xylabs/hex'
|
|
4
4
|
import type { Promisable, PromisableArray } from '@xylabs/promise'
|
|
5
5
|
import { fulfilled } from '@xylabs/promise'
|
|
6
|
+
import type { WithStorageMeta } from '@xyo-network/archivist-abstract'
|
|
6
7
|
import { AbstractArchivist } from '@xyo-network/archivist-abstract'
|
|
7
8
|
import type {
|
|
8
9
|
ArchivistConfig,
|
|
@@ -22,16 +23,12 @@ import {
|
|
|
22
23
|
import type { BoundWitness } from '@xyo-network/boundwitness-model'
|
|
23
24
|
import type { AnyConfigSchema } from '@xyo-network/module-model'
|
|
24
25
|
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
25
|
-
import type {
|
|
26
|
-
Payload, PayloadWithMeta, Schema, WithMeta,
|
|
27
|
-
} from '@xyo-network/payload-model'
|
|
26
|
+
import type { Payload, Schema } from '@xyo-network/payload-model'
|
|
28
27
|
import type { StoreBase, StoreType } from 'store2'
|
|
29
28
|
import store from 'store2'
|
|
30
29
|
|
|
31
30
|
const storeTypes = store as unknown as StoreType
|
|
32
31
|
|
|
33
|
-
type WithStorageMeta<T extends Payload = Payload> = WithMeta<T> & { _timestamp: number }
|
|
34
|
-
|
|
35
32
|
export type StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'
|
|
36
33
|
export const StorageArchivistConfigSchema: StorageArchivistConfigSchema = 'network.xyo.archivist.storage.config'
|
|
37
34
|
|
|
@@ -95,21 +92,21 @@ export class StorageArchivist<
|
|
|
95
92
|
return this._storage
|
|
96
93
|
}
|
|
97
94
|
|
|
98
|
-
protected override allHandler(): PromisableArray<
|
|
95
|
+
protected override allHandler(): PromisableArray<Payload> {
|
|
99
96
|
const found = new Set<string>()
|
|
100
97
|
this.logger?.log(`this.storage.length: ${this.storage.length}`)
|
|
101
98
|
return Object.entries(this.storage.getAll())
|
|
102
99
|
.map(([, value]) => value)
|
|
103
100
|
.filter((payload) => {
|
|
104
|
-
if (found.has(payload
|
|
101
|
+
if (found.has(payload._dataHash)) {
|
|
105
102
|
return false
|
|
106
103
|
} else {
|
|
107
|
-
found.add(payload
|
|
104
|
+
found.add(payload._dataHash)
|
|
108
105
|
return true
|
|
109
106
|
}
|
|
110
107
|
})
|
|
111
108
|
.sort((a, b) => a._timestamp - b._timestamp)
|
|
112
|
-
.map(payload =>
|
|
109
|
+
.map(payload => PayloadBuilder.omitStorageMeta(payload))
|
|
113
110
|
}
|
|
114
111
|
|
|
115
112
|
protected override clearHandler(): void | Promise<void> {
|
|
@@ -118,7 +115,7 @@ export class StorageArchivist<
|
|
|
118
115
|
return this.emit('cleared', { mod: this })
|
|
119
116
|
}
|
|
120
117
|
|
|
121
|
-
protected override async commitHandler(): Promise<
|
|
118
|
+
protected override async commitHandler(): Promise<BoundWitness[]> {
|
|
122
119
|
this.logger?.log(`this.storage.length: ${this.storage.length}`)
|
|
123
120
|
const payloads = await this.all()
|
|
124
121
|
assertEx(payloads.length > 0, () => 'Nothing to commit')
|
|
@@ -150,15 +147,15 @@ export class StorageArchivist<
|
|
|
150
147
|
limit: number = 10,
|
|
151
148
|
offset?: Hash,
|
|
152
149
|
): WithStorageMeta[] {
|
|
153
|
-
const offsetHash = offset ? (this.storage.get(offset) as
|
|
150
|
+
const offsetHash = offset ? (this.storage.get(offset) as WithStorageMeta | undefined)?._dataHash : undefined
|
|
154
151
|
const found = new Set<string>()
|
|
155
152
|
const payloads: WithStorageMeta[] = Object.entries(this.storage.getAll())
|
|
156
153
|
.map(([, value]) => value)
|
|
157
154
|
.filter((payload) => {
|
|
158
|
-
if (found.has(payload
|
|
155
|
+
if (found.has(payload._dataHash)) {
|
|
159
156
|
return false
|
|
160
157
|
} else {
|
|
161
|
-
found.add(payload
|
|
158
|
+
found.add(payload._dataHash)
|
|
162
159
|
return true
|
|
163
160
|
}
|
|
164
161
|
})
|
|
@@ -166,7 +163,7 @@ export class StorageArchivist<
|
|
|
166
163
|
return order === 'asc' ? a._timestamp - b._timestamp : b._timestamp - a._timestamp
|
|
167
164
|
})
|
|
168
165
|
if (offsetHash) {
|
|
169
|
-
const index = payloads.findIndex(payload => payload
|
|
166
|
+
const index = payloads.findIndex(payload => payload._dataHash === offsetHash)
|
|
170
167
|
if (index !== -1) {
|
|
171
168
|
return payloads.slice(index + 1, index + 1 + limit)
|
|
172
169
|
}
|
|
@@ -174,7 +171,7 @@ export class StorageArchivist<
|
|
|
174
171
|
return payloads.slice(0, limit)
|
|
175
172
|
}
|
|
176
173
|
|
|
177
|
-
protected override getHandler(hashes: string[]): Promisable<
|
|
174
|
+
protected override getHandler(hashes: string[]): Promisable<Payload[]> {
|
|
178
175
|
const found = new Set<string>()
|
|
179
176
|
return (
|
|
180
177
|
hashes.map((hash) => {
|
|
@@ -182,30 +179,28 @@ export class StorageArchivist<
|
|
|
182
179
|
})
|
|
183
180
|
).filter(exists)
|
|
184
181
|
.filter((payload) => {
|
|
185
|
-
if (found.has(payload
|
|
182
|
+
if (found.has(payload._dataHash)) {
|
|
186
183
|
return false
|
|
187
184
|
} else {
|
|
188
|
-
found.add(payload
|
|
185
|
+
found.add(payload._dataHash)
|
|
189
186
|
return true
|
|
190
187
|
}
|
|
191
|
-
}).map(payload =>
|
|
188
|
+
}).map(payload => PayloadBuilder.omitStorageMeta(payload))
|
|
192
189
|
}
|
|
193
190
|
|
|
194
|
-
protected override async insertHandler(payloads: Payload[]): Promise<
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
return pairs.map(([payload, hash]) => {
|
|
198
|
-
const storagePayload = this.addStorageMeta(payload, timestamp++)
|
|
191
|
+
protected override async insertHandler(payloads: Payload[]): Promise<Payload[]> {
|
|
192
|
+
return await Promise.all(payloads.map(async (payload, index) => {
|
|
193
|
+
const storagePayload = await StorageArchivist.addSequencedStorageMeta(payload, index)
|
|
199
194
|
const value = JSON.stringify(storagePayload)
|
|
200
195
|
console.log('insert.storagePayloads:', storagePayload)
|
|
201
|
-
assertEx(value.length < this.maxEntrySize, () => `Payload too large [${
|
|
202
|
-
this.storage.set(
|
|
203
|
-
this.storage.set(
|
|
196
|
+
assertEx(value.length < this.maxEntrySize, () => `Payload too large [${storagePayload._hash}, ${value.length}]`)
|
|
197
|
+
this.storage.set(storagePayload._hash, storagePayload)
|
|
198
|
+
this.storage.set(storagePayload._dataHash, storagePayload)
|
|
204
199
|
return payload
|
|
205
|
-
})
|
|
200
|
+
}))
|
|
206
201
|
}
|
|
207
202
|
|
|
208
|
-
protected override nextHandler(options?: ArchivistNextOptions): Promisable<
|
|
203
|
+
protected override nextHandler(options?: ArchivistNextOptions): Promisable<Payload[]> {
|
|
209
204
|
const {
|
|
210
205
|
limit, offset, order,
|
|
211
206
|
} = options ?? {}
|
|
@@ -216,14 +211,4 @@ export class StorageArchivist<
|
|
|
216
211
|
await super.startHandler()
|
|
217
212
|
return true
|
|
218
213
|
}
|
|
219
|
-
|
|
220
|
-
private addStorageMeta<T extends Payload = Payload>(payload: WithMeta<T>, _timestamp: number): WithStorageMeta<T> {
|
|
221
|
-
return { ...payload, _timestamp }
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
private removeStorageMeta<T extends Payload = Payload>(payload: WithStorageMeta<T>): WithMeta<T> {
|
|
225
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
226
|
-
const { _timestamp, ...rest } = payload
|
|
227
|
-
return rest as WithMeta<T>
|
|
228
|
-
}
|
|
229
214
|
}
|