@xyo-network/archivist-leveldb 3.6.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +165 -0
- package/README.md +13 -0
- package/dist/browser/Archivist.d.ts +46 -0
- package/dist/browser/Archivist.d.ts.map +1 -0
- package/dist/browser/Config.d.ts +24 -0
- package/dist/browser/Config.d.ts.map +1 -0
- package/dist/browser/Params.d.ts +5 -0
- package/dist/browser/Params.d.ts.map +1 -0
- package/dist/browser/Schema.d.ts +3 -0
- package/dist/browser/Schema.d.ts.map +1 -0
- package/dist/browser/index.d.ts +5 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.mjs +261 -0
- package/dist/browser/index.mjs.map +1 -0
- package/package.json +61 -0
- package/src/Archivist.ts +290 -0
- package/src/Config.ts +26 -0
- package/src/Params.ts +8 -0
- package/src/Schema.ts +2 -0
- package/src/index.ts +4 -0
- package/typedoc.json +5 -0
- package/xy.config.ts +10 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
GNU LESSER GENERAL PUBLIC LICENSE
|
|
2
|
+
Version 3, 29 June 2007
|
|
3
|
+
|
|
4
|
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
5
|
+
Everyone is permitted to copy and distribute verbatim copies
|
|
6
|
+
of this license document, but changing it is not allowed.
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
This version of the GNU Lesser General Public License incorporates
|
|
10
|
+
the terms and conditions of version 3 of the GNU General Public
|
|
11
|
+
License, supplemented by the additional permissions listed below.
|
|
12
|
+
|
|
13
|
+
0. Additional Definitions.
|
|
14
|
+
|
|
15
|
+
As used herein, "this License" refers to version 3 of the GNU Lesser
|
|
16
|
+
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
|
17
|
+
General Public License.
|
|
18
|
+
|
|
19
|
+
"The Library" refers to a covered work governed by this License,
|
|
20
|
+
other than an Application or a Combined Work as defined below.
|
|
21
|
+
|
|
22
|
+
An "Application" is any work that makes use of an interface provided
|
|
23
|
+
by the Library, but which is not otherwise based on the Library.
|
|
24
|
+
Defining a subclass of a class defined by the Library is deemed a mode
|
|
25
|
+
of using an interface provided by the Library.
|
|
26
|
+
|
|
27
|
+
A "Combined Work" is a work produced by combining or linking an
|
|
28
|
+
Application with the Library. The particular version of the Library
|
|
29
|
+
with which the Combined Work was made is also called the "Linked
|
|
30
|
+
Version".
|
|
31
|
+
|
|
32
|
+
The "Minimal Corresponding Source" for a Combined Work means the
|
|
33
|
+
Corresponding Source for the Combined Work, excluding any source code
|
|
34
|
+
for portions of the Combined Work that, considered in isolation, are
|
|
35
|
+
based on the Application, and not on the Linked Version.
|
|
36
|
+
|
|
37
|
+
The "Corresponding Application Code" for a Combined Work means the
|
|
38
|
+
object code and/or source code for the Application, including any data
|
|
39
|
+
and utility programs needed for reproducing the Combined Work from the
|
|
40
|
+
Application, but excluding the System Libraries of the Combined Work.
|
|
41
|
+
|
|
42
|
+
1. Exception to Section 3 of the GNU GPL.
|
|
43
|
+
|
|
44
|
+
You may convey a covered work under sections 3 and 4 of this License
|
|
45
|
+
without being bound by section 3 of the GNU GPL.
|
|
46
|
+
|
|
47
|
+
2. Conveying Modified Versions.
|
|
48
|
+
|
|
49
|
+
If you modify a copy of the Library, and, in your modifications, a
|
|
50
|
+
facility refers to a function or data to be supplied by an Application
|
|
51
|
+
that uses the facility (other than as an argument passed when the
|
|
52
|
+
facility is invoked), then you may convey a copy of the modified
|
|
53
|
+
version:
|
|
54
|
+
|
|
55
|
+
a) under this License, provided that you make a good faith effort to
|
|
56
|
+
ensure that, in the event an Application does not supply the
|
|
57
|
+
function or data, the facility still operates, and performs
|
|
58
|
+
whatever part of its purpose remains meaningful, or
|
|
59
|
+
|
|
60
|
+
b) under the GNU GPL, with none of the additional permissions of
|
|
61
|
+
this License applicable to that copy.
|
|
62
|
+
|
|
63
|
+
3. Object Code Incorporating Material from Library Header Files.
|
|
64
|
+
|
|
65
|
+
The object code form of an Application may incorporate material from
|
|
66
|
+
a header file that is part of the Library. You may convey such object
|
|
67
|
+
code under terms of your choice, provided that, if the incorporated
|
|
68
|
+
material is not limited to numerical parameters, data structure
|
|
69
|
+
layouts and accessors, or small macros, inline functions and templates
|
|
70
|
+
(ten or fewer lines in length), you do both of the following:
|
|
71
|
+
|
|
72
|
+
a) Give prominent notice with each copy of the object code that the
|
|
73
|
+
Library is used in it and that the Library and its use are
|
|
74
|
+
covered by this License.
|
|
75
|
+
|
|
76
|
+
b) Accompany the object code with a copy of the GNU GPL and this license
|
|
77
|
+
document.
|
|
78
|
+
|
|
79
|
+
4. Combined Works.
|
|
80
|
+
|
|
81
|
+
You may convey a Combined Work under terms of your choice that,
|
|
82
|
+
taken together, effectively do not restrict modification of the
|
|
83
|
+
portions of the Library contained in the Combined Work and reverse
|
|
84
|
+
engineering for debugging such modifications, if you also do each of
|
|
85
|
+
the following:
|
|
86
|
+
|
|
87
|
+
a) Give prominent notice with each copy of the Combined Work that
|
|
88
|
+
the Library is used in it and that the Library and its use are
|
|
89
|
+
covered by this License.
|
|
90
|
+
|
|
91
|
+
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
|
92
|
+
document.
|
|
93
|
+
|
|
94
|
+
c) For a Combined Work that displays copyright notices during
|
|
95
|
+
execution, include the copyright notice for the Library among
|
|
96
|
+
these notices, as well as a reference directing the user to the
|
|
97
|
+
copies of the GNU GPL and this license document.
|
|
98
|
+
|
|
99
|
+
d) Do one of the following:
|
|
100
|
+
|
|
101
|
+
0) Convey the Minimal Corresponding Source under the terms of this
|
|
102
|
+
License, and the Corresponding Application Code in a form
|
|
103
|
+
suitable for, and under terms that permit, the user to
|
|
104
|
+
recombine or relink the Application with a modified version of
|
|
105
|
+
the Linked Version to produce a modified Combined Work, in the
|
|
106
|
+
manner specified by section 6 of the GNU GPL for conveying
|
|
107
|
+
Corresponding Source.
|
|
108
|
+
|
|
109
|
+
1) Use a suitable shared library mechanism for linking with the
|
|
110
|
+
Library. A suitable mechanism is one that (a) uses at run time
|
|
111
|
+
a copy of the Library already present on the user's computer
|
|
112
|
+
system, and (b) will operate properly with a modified version
|
|
113
|
+
of the Library that is interface-compatible with the Linked
|
|
114
|
+
Version.
|
|
115
|
+
|
|
116
|
+
e) Provide Installation Information, but only if you would otherwise
|
|
117
|
+
be required to provide such information under section 6 of the
|
|
118
|
+
GNU GPL, and only to the extent that such information is
|
|
119
|
+
necessary to install and execute a modified version of the
|
|
120
|
+
Combined Work produced by recombining or relinking the
|
|
121
|
+
Application with a modified version of the Linked Version. (If
|
|
122
|
+
you use option 4d0, the Installation Information must accompany
|
|
123
|
+
the Minimal Corresponding Source and Corresponding Application
|
|
124
|
+
Code. If you use option 4d1, you must provide the Installation
|
|
125
|
+
Information in the manner specified by section 6 of the GNU GPL
|
|
126
|
+
for conveying Corresponding Source.)
|
|
127
|
+
|
|
128
|
+
5. Combined Libraries.
|
|
129
|
+
|
|
130
|
+
You may place library facilities that are a work based on the
|
|
131
|
+
Library side by side in a single library together with other library
|
|
132
|
+
facilities that are not Applications and are not covered by this
|
|
133
|
+
License, and convey such a combined library under terms of your
|
|
134
|
+
choice, if you do both of the following:
|
|
135
|
+
|
|
136
|
+
a) Accompany the combined library with a copy of the same work based
|
|
137
|
+
on the Library, uncombined with any other library facilities,
|
|
138
|
+
conveyed under the terms of this License.
|
|
139
|
+
|
|
140
|
+
b) Give prominent notice with the combined library that part of it
|
|
141
|
+
is a work based on the Library, and explaining where to find the
|
|
142
|
+
accompanying uncombined form of the same work.
|
|
143
|
+
|
|
144
|
+
6. Revised Versions of the GNU Lesser General Public License.
|
|
145
|
+
|
|
146
|
+
The Free Software Foundation may publish revised and/or new versions
|
|
147
|
+
of the GNU Lesser General Public License from time to time. Such new
|
|
148
|
+
versions will be similar in spirit to the present version, but may
|
|
149
|
+
differ in detail to address new problems or concerns.
|
|
150
|
+
|
|
151
|
+
Each version is given a distinguishing version number. If the
|
|
152
|
+
Library as you received it specifies that a certain numbered version
|
|
153
|
+
of the GNU Lesser General Public License "or any later version"
|
|
154
|
+
applies to it, you have the option of following the terms and
|
|
155
|
+
conditions either of that published version or of any later version
|
|
156
|
+
published by the Free Software Foundation. If the Library as you
|
|
157
|
+
received it does not specify a version number of the GNU Lesser
|
|
158
|
+
General Public License, you may choose any version of the GNU Lesser
|
|
159
|
+
General Public License ever published by the Free Software Foundation.
|
|
160
|
+
|
|
161
|
+
If the Library as you received it specifies that a proxy can decide
|
|
162
|
+
whether future versions of the GNU Lesser General Public License shall
|
|
163
|
+
apply, that proxy's public statement of acceptance of any version is
|
|
164
|
+
permanent authorization for you to choose that version for the
|
|
165
|
+
Library.
|
package/README.md
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
[![logo][]](https://xyo.network)
|
|
2
|
+
|
|
3
|
+
Part of [sdk-xyo-client-js](https://www.npmjs.com/package/@xyo-network/sdk-xyo-client-js)
|
|
4
|
+
|
|
5
|
+
## License
|
|
6
|
+
|
|
7
|
+
> See the [LICENSE](LICENSE) file for license details
|
|
8
|
+
|
|
9
|
+
## Credits
|
|
10
|
+
|
|
11
|
+
[Made with 🔥 and ❄️ by XYO](https://xyo.network)
|
|
12
|
+
|
|
13
|
+
[logo]: https://cdn.xy.company/img/brand/XYO_full_colored.png
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { Hash, Hex } from '@xylabs/hex';
|
|
2
|
+
import { Promisable } from '@xylabs/promise';
|
|
3
|
+
import { AbstractArchivist } from '@xyo-network/archivist-abstract';
|
|
4
|
+
import { ArchivistModuleEventData, ArchivistNextOptions } from '@xyo-network/archivist-model';
|
|
5
|
+
import { BoundWitness } from '@xyo-network/boundwitness-model';
|
|
6
|
+
import { Payload, Schema, WithStorageMeta } from '@xyo-network/payload-model';
|
|
7
|
+
import { AbstractLevel, AbstractSublevel } from 'abstract-level';
|
|
8
|
+
import { LevelDbArchivistParams } from './Params.ts';
|
|
9
|
+
/** Note: We have indexes as top level sublevels since making them a sublevel of a store, getting all the values of that store includes the sublevels */
|
|
10
|
+
export interface PayloadStore {
|
|
11
|
+
[s: string]: WithStorageMeta;
|
|
12
|
+
}
|
|
13
|
+
export type AbstractPayloadLevel = AbstractLevel<string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>;
|
|
14
|
+
export type AbstractPayloadSubLevel = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>;
|
|
15
|
+
export type AbstractIndexSubLevel<T> = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, T, Hash>;
|
|
16
|
+
export declare abstract class AbstractLevelDbArchivist<TParams extends LevelDbArchivistParams = LevelDbArchivistParams, TEventData extends ArchivistModuleEventData = ArchivistModuleEventData> extends AbstractArchivist<TParams, TEventData> {
|
|
17
|
+
static readonly configSchemas: Schema[];
|
|
18
|
+
static readonly defaultConfigSchema: Schema;
|
|
19
|
+
private static readonly dataHashIndex;
|
|
20
|
+
private static readonly sequenceIndex;
|
|
21
|
+
static readonly dataHashIndexName: string;
|
|
22
|
+
static readonly sequenceIndexName: string;
|
|
23
|
+
get dbName(): string;
|
|
24
|
+
get folderPath(): string;
|
|
25
|
+
get location(): string;
|
|
26
|
+
get queries(): string[];
|
|
27
|
+
get storeName(): string;
|
|
28
|
+
private static findIndexFromCursor;
|
|
29
|
+
protected allHandler(): Promise<WithStorageMeta<Payload>[]>;
|
|
30
|
+
protected clearHandler(): Promise<void>;
|
|
31
|
+
protected commitHandler(): Promise<BoundWitness[]>;
|
|
32
|
+
protected deleteHandler(hashes: Hash[]): Promise<Hash[]>;
|
|
33
|
+
protected getHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]>;
|
|
34
|
+
protected insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]>;
|
|
35
|
+
protected nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]>;
|
|
36
|
+
protected startHandler(): Promise<boolean>;
|
|
37
|
+
protected withDataHashIndex<T>(func: (index: AbstractIndexSubLevel<string>) => Promisable<T>): Promisable<T>;
|
|
38
|
+
protected withSequenceIndex<T>(func: (index: AbstractIndexSubLevel<Hex>) => Promisable<T>): Promisable<T>;
|
|
39
|
+
protected withStore<T>(func: (store: AbstractPayloadSubLevel) => Promisable<T>): Promise<T>;
|
|
40
|
+
protected abstract withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promisable<T>;
|
|
41
|
+
}
|
|
42
|
+
export declare class LevelDbArchivist extends AbstractLevelDbArchivist {
|
|
43
|
+
private dbMutex;
|
|
44
|
+
protected withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promise<T>;
|
|
45
|
+
}
|
|
46
|
+
//# sourceMappingURL=Archivist.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Archivist.d.ts","sourceRoot":"","sources":["../../src/Archivist.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AACvC,OAAO,EAAa,UAAU,EAAE,MAAM,iBAAiB,CAAA;AACvD,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAA;AACnE,OAAO,EAOL,wBAAwB,EACxB,oBAAoB,EAIrB,MAAM,8BAA8B,CAAA;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,iCAAiC,CAAA;AAG9D,OAAO,EACL,OAAO,EAAE,MAAM,EAAE,eAAe,EACjC,MAAM,4BAA4B,CAAA;AACnC,OAAO,EACmB,aAAa,EAAE,gBAAgB,EACxD,MAAM,gBAAgB,CAAA;AAKvB,OAAO,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAA;AAEpD,yJAAyJ;AAEzJ,MAAM,WAAW,YAAY;IAC3B,CAAC,CAAC,EAAE,MAAM,GAAG,eAAe,CAAA;CAC7B;AAED,MAAM,MAAM,oBAAoB,GAAG,aAAa,CAAC,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAA;AAC9G,MAAM,MAAM,uBAAuB,GAAG,gBAAgB,CAAC,oBAAoB,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC,CAAA;AAC1I,MAAM,MAAM,qBAAqB,CAAC,CAAC,IAAI,gBAAgB,CAAC,oBAAoB,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;AAMpH,8BAAsB,wBAAwB,CAC5C,OAAO,SAAS,sBAAsB,GAAG,sBAAsB,EAC/D,UAAU,SAAS,wBAAwB,GAAG,wBAAwB,CACtE,SAAQ,iBAAiB,CAAC,OAAO,EAAE,UAAU,CAAC;IAC9C,gBAAyB,aAAa,EAAE,MAAM,EAAE,CAAyD;IACzG,gBAAyB,mBAAmB,EAAE,MAAM,CAA+B;IAEnF,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAED,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEpC;IAGD,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAAiE;IAElG,MAAM,CAAC,QAAQ,CAAC,iBAAiB,SAAiE;IAElG,IAAI,MAAM,WAET;IAED,IAAI,UAAU,WAEb;IAED,IAAI,QAAQ,WAEX;IAED,IAAa,OAAO,aAUnB;IAED,IAAI,SAAS,WAEZ;IAED,OAAO,CAAC,MAAM,CAAC,mBAAmB;cAQT,UAAU,IAAI,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAOjD,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;cAa7B,aAAa,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;cAaxC,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;cAoC9C,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAiB/D,aAAa,CAAC,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,EAAE,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cA+BxF,WAAW,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,CAAC;cAehF,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC;IAWzD,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,MAAM,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;IAU5G,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,qBAAqB,CAAC,GAAG,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;cAUzF,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,EAAE,uBAAuB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;IAOjG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,oBAAoB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,CAAC,CAAC;CAC/F;AAED,qBACa,gBAAiB,SAAQ,wBAAwB;IAC5D,OAAO,CAAC,OAAO,CAAc;cACJ,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,oBAAoB,KAAK,UAAU,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;CAUlG"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { ArchivistConfig } from '@xyo-network/archivist-model';
|
|
2
|
+
import { LevelDbArchivistSchema } from './Schema.ts';
|
|
3
|
+
export type LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`;
|
|
4
|
+
export declare const LevelDbArchivistConfigSchema: LevelDbArchivistConfigSchema;
|
|
5
|
+
export type LevelDbArchivistConfig<TStoreName extends string = string> = ArchivistConfig<{
|
|
6
|
+
/**
|
|
7
|
+
* If true, the store will be cleared on start
|
|
8
|
+
*/
|
|
9
|
+
clearStoreOnStart?: boolean;
|
|
10
|
+
/**
|
|
11
|
+
* The database name - also used as the filename for the db
|
|
12
|
+
*/
|
|
13
|
+
dbName?: string;
|
|
14
|
+
/**
|
|
15
|
+
* The location where the folder for the db will be created
|
|
16
|
+
*/
|
|
17
|
+
location?: string;
|
|
18
|
+
schema: LevelDbArchivistConfigSchema;
|
|
19
|
+
/**
|
|
20
|
+
* The name of the object store - becomes a sub-level
|
|
21
|
+
*/
|
|
22
|
+
storeName?: TStoreName;
|
|
23
|
+
}>;
|
|
24
|
+
//# sourceMappingURL=Config.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Config.d.ts","sourceRoot":"","sources":["../../src/Config.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAA;AAEnE,OAAO,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAA;AAEpD,MAAM,MAAM,4BAA4B,GAAG,GAAG,sBAAsB,SAAS,CAAA;AAC7E,eAAO,MAAM,4BAA4B,EAAE,4BAAiE,CAAA;AAE5G,MAAM,MAAM,sBAAsB,CAAC,UAAU,SAAS,MAAM,GAAG,MAAM,IAAI,eAAe,CAAC;IACvF;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAA;IAC3B;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAA;IACf;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,MAAM,EAAE,4BAA4B,CAAA;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,UAAU,CAAA;CACvB,CAAC,CAAA"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { ArchivistParams } from '@xyo-network/archivist-model';
|
|
2
|
+
import type { AnyConfigSchema } from '@xyo-network/module-model';
|
|
3
|
+
import type { LevelDbArchivistConfig } from './Config.ts';
|
|
4
|
+
export type LevelDbArchivistParams = ArchivistParams<AnyConfigSchema<LevelDbArchivistConfig>, {}>;
|
|
5
|
+
//# sourceMappingURL=Params.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Params.d.ts","sourceRoot":"","sources":["../../src/Params.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAA;AACnE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAA;AAEhE,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,aAAa,CAAA;AAEzD,MAAM,MAAM,sBAAsB,GAAG,eAAe,CAAC,eAAe,CAAC,sBAAsB,CAAC,EAAE,EAE7F,CAAC,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Schema.d.ts","sourceRoot":"","sources":["../../src/Schema.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,sBAAsB,EAAG,+BAAwC,CAAA;AAC9E,MAAM,MAAM,sBAAsB,GAAG,OAAO,sBAAsB,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAA;AAC9B,cAAc,aAAa,CAAA;AAC3B,cAAc,aAAa,CAAA;AAC3B,cAAc,aAAa,CAAA"}
|
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __decorateClass = (decorators, target, key, kind) => {
|
|
4
|
+
var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
|
|
5
|
+
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
6
|
+
if (decorator = decorators[i])
|
|
7
|
+
result = (kind ? decorator(target, key, result) : decorator(result)) || result;
|
|
8
|
+
if (kind && result) __defProp(target, key, result);
|
|
9
|
+
return result;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
// src/Archivist.ts
|
|
13
|
+
import { assertEx } from "@xylabs/assert";
|
|
14
|
+
import { exists } from "@xylabs/exists";
|
|
15
|
+
import { fulfilled } from "@xylabs/promise";
|
|
16
|
+
import { AbstractArchivist } from "@xyo-network/archivist-abstract";
|
|
17
|
+
import {
|
|
18
|
+
ArchivistAllQuerySchema,
|
|
19
|
+
ArchivistClearQuerySchema,
|
|
20
|
+
ArchivistCommitQuerySchema,
|
|
21
|
+
ArchivistDeleteQuerySchema,
|
|
22
|
+
ArchivistInsertQuerySchema,
|
|
23
|
+
ArchivistNextQuerySchema,
|
|
24
|
+
buildStandardIndexName
|
|
25
|
+
} from "@xyo-network/archivist-model";
|
|
26
|
+
import { creatableModule } from "@xyo-network/module-model";
|
|
27
|
+
import { PayloadBuilder } from "@xyo-network/payload-builder";
|
|
28
|
+
import { Mutex } from "async-mutex";
|
|
29
|
+
import { Level } from "level";
|
|
30
|
+
|
|
31
|
+
// src/Schema.ts
|
|
32
|
+
var LevelDbArchivistSchema = "network.xyo.archivist.leveldb";
|
|
33
|
+
|
|
34
|
+
// src/Config.ts
|
|
35
|
+
var LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`;
|
|
36
|
+
|
|
37
|
+
// src/Archivist.ts
|
|
38
|
+
var indexSubLevelName = (storeName, indexName) => {
|
|
39
|
+
return `_${storeName}|${indexName}`;
|
|
40
|
+
};
|
|
41
|
+
var AbstractLevelDbArchivist = class _AbstractLevelDbArchivist extends AbstractArchivist {
|
|
42
|
+
static configSchemas = [...super.configSchemas, LevelDbArchivistConfigSchema];
|
|
43
|
+
static defaultConfigSchema = LevelDbArchivistConfigSchema;
|
|
44
|
+
static dataHashIndex = {
|
|
45
|
+
key: { _dataHash: 1 },
|
|
46
|
+
multiEntry: false,
|
|
47
|
+
unique: false
|
|
48
|
+
};
|
|
49
|
+
static sequenceIndex = {
|
|
50
|
+
key: { _sequence: 1 },
|
|
51
|
+
multiEntry: false,
|
|
52
|
+
unique: true
|
|
53
|
+
};
|
|
54
|
+
// eslint-disable-next-line @typescript-eslint/member-ordering
|
|
55
|
+
static dataHashIndexName = buildStandardIndexName(_AbstractLevelDbArchivist.dataHashIndex);
|
|
56
|
+
// eslint-disable-next-line @typescript-eslint/member-ordering
|
|
57
|
+
static sequenceIndexName = buildStandardIndexName(_AbstractLevelDbArchivist.sequenceIndex);
|
|
58
|
+
get dbName() {
|
|
59
|
+
return assertEx(this.config.dbName, () => "No dbName specified");
|
|
60
|
+
}
|
|
61
|
+
get folderPath() {
|
|
62
|
+
return `${this.location}/${this.storeName}`;
|
|
63
|
+
}
|
|
64
|
+
get location() {
|
|
65
|
+
return assertEx(this.config.location, () => "No location specified");
|
|
66
|
+
}
|
|
67
|
+
get queries() {
|
|
68
|
+
return [
|
|
69
|
+
ArchivistAllQuerySchema,
|
|
70
|
+
ArchivistDeleteQuerySchema,
|
|
71
|
+
ArchivistClearQuerySchema,
|
|
72
|
+
ArchivistInsertQuerySchema,
|
|
73
|
+
ArchivistCommitQuerySchema,
|
|
74
|
+
ArchivistNextQuerySchema,
|
|
75
|
+
...super.queries
|
|
76
|
+
];
|
|
77
|
+
}
|
|
78
|
+
get storeName() {
|
|
79
|
+
return assertEx(this.config.storeName, () => "No storeName specified");
|
|
80
|
+
}
|
|
81
|
+
static findIndexFromCursor(payloads, cursor) {
|
|
82
|
+
const index = payloads.findIndex(({ _sequence }) => _sequence === cursor);
|
|
83
|
+
if (index === -1) {
|
|
84
|
+
return Infinity;
|
|
85
|
+
}
|
|
86
|
+
return index;
|
|
87
|
+
}
|
|
88
|
+
async allHandler() {
|
|
89
|
+
return await this.withStore(async (db) => {
|
|
90
|
+
const values = [...await db.values().all()];
|
|
91
|
+
return values.filter(exists).sort(PayloadBuilder.compareStorageMeta);
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
async clearHandler() {
|
|
95
|
+
await this.withDb(async (db) => {
|
|
96
|
+
await db.clear();
|
|
97
|
+
});
|
|
98
|
+
await this.withDataHashIndex(async (index) => {
|
|
99
|
+
await index.clear();
|
|
100
|
+
});
|
|
101
|
+
await this.withSequenceIndex(async (index) => {
|
|
102
|
+
await index.clear();
|
|
103
|
+
});
|
|
104
|
+
return this.emit("cleared", { mod: this });
|
|
105
|
+
}
|
|
106
|
+
async commitHandler() {
|
|
107
|
+
const payloads = assertEx(await this.allHandler(), () => "Nothing to commit");
|
|
108
|
+
const settled = await Promise.allSettled(
|
|
109
|
+
Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {
|
|
110
|
+
const queryPayload = { schema: ArchivistInsertQuerySchema };
|
|
111
|
+
const query = await this.bindQuery(queryPayload, payloads);
|
|
112
|
+
return (await parent?.query(query[0], query[1]))?.[0];
|
|
113
|
+
}).filter(exists)
|
|
114
|
+
);
|
|
115
|
+
await this.clearHandler();
|
|
116
|
+
return settled.filter(fulfilled).map((result) => result.value).filter(exists);
|
|
117
|
+
}
|
|
118
|
+
async deleteHandler(hashes) {
|
|
119
|
+
const payloadsWithMeta = (await this.allHandler()).filter(({ _hash, _dataHash }) => hashes.includes(_hash) || hashes.includes(_dataHash));
|
|
120
|
+
const batchCommands = payloadsWithMeta.map((payload) => ({
|
|
121
|
+
type: "del",
|
|
122
|
+
key: payload._hash
|
|
123
|
+
}));
|
|
124
|
+
await this.withStore(async (store) => {
|
|
125
|
+
await store.batch(batchCommands);
|
|
126
|
+
});
|
|
127
|
+
const batchDataHashIndexCommands = payloadsWithMeta.map((payload) => ({
|
|
128
|
+
type: "del",
|
|
129
|
+
key: payload._dataHash
|
|
130
|
+
}));
|
|
131
|
+
await this.withDataHashIndex(async (index) => {
|
|
132
|
+
await index.batch(batchDataHashIndexCommands);
|
|
133
|
+
});
|
|
134
|
+
const batchSequenceIndexCommands = payloadsWithMeta.map((payload) => ({
|
|
135
|
+
type: "del",
|
|
136
|
+
key: payload._sequence
|
|
137
|
+
}));
|
|
138
|
+
await this.withSequenceIndex(async (index) => {
|
|
139
|
+
await index.batch(batchSequenceIndexCommands);
|
|
140
|
+
});
|
|
141
|
+
return hashes;
|
|
142
|
+
}
|
|
143
|
+
async getHandler(hashes) {
|
|
144
|
+
const foundByHash = await this.withStore(async (store) => {
|
|
145
|
+
return (await store.getMany(hashes)).filter(exists);
|
|
146
|
+
});
|
|
147
|
+
const remainingHashes = hashes.filter((hash) => !foundByHash.some(({ _hash }) => _hash === hash));
|
|
148
|
+
const hashesFromDataHashes = await this.withDataHashIndex(async (index) => {
|
|
149
|
+
return (await index.getMany(remainingHashes)).filter(exists);
|
|
150
|
+
});
|
|
151
|
+
const foundByDataHash = hashesFromDataHashes.length > 0 ? await this.withStore(async (store) => {
|
|
152
|
+
return (await store.getMany(hashesFromDataHashes)).filter(exists);
|
|
153
|
+
}) : [];
|
|
154
|
+
const result = [...foundByHash, ...foundByDataHash].sort(PayloadBuilder.compareStorageMeta);
|
|
155
|
+
return result;
|
|
156
|
+
}
|
|
157
|
+
async insertHandler(payloads) {
|
|
158
|
+
const payloadsWithMeta = payloads.sort(PayloadBuilder.compareStorageMeta);
|
|
159
|
+
const batchCommands = payloadsWithMeta.map((payload) => ({
|
|
160
|
+
type: "put",
|
|
161
|
+
key: payload._hash,
|
|
162
|
+
value: payload,
|
|
163
|
+
keyEncoding: "utf8",
|
|
164
|
+
valueEncoding: "json"
|
|
165
|
+
}));
|
|
166
|
+
await this.withStore(async (store) => {
|
|
167
|
+
await store.batch(batchCommands);
|
|
168
|
+
});
|
|
169
|
+
const batchDataHashIndexCommands = payloadsWithMeta.map((payload) => ({
|
|
170
|
+
type: "put",
|
|
171
|
+
key: payload._dataHash,
|
|
172
|
+
value: payload._hash,
|
|
173
|
+
keyEncoding: "utf8",
|
|
174
|
+
valueEncoding: "utf8"
|
|
175
|
+
}));
|
|
176
|
+
await this.withDataHashIndex(async (index) => {
|
|
177
|
+
await index.batch(batchDataHashIndexCommands);
|
|
178
|
+
});
|
|
179
|
+
const batchSequenceIndexCommands = payloadsWithMeta.map((payload) => ({
|
|
180
|
+
type: "put",
|
|
181
|
+
key: payload._sequence,
|
|
182
|
+
value: payload._hash,
|
|
183
|
+
keyEncoding: "utf8",
|
|
184
|
+
valueEncoding: "utf8"
|
|
185
|
+
}));
|
|
186
|
+
await this.withSequenceIndex(async (index) => {
|
|
187
|
+
await index.batch(batchSequenceIndexCommands);
|
|
188
|
+
});
|
|
189
|
+
return payloadsWithMeta;
|
|
190
|
+
}
|
|
191
|
+
async nextHandler(options) {
|
|
192
|
+
const {
|
|
193
|
+
limit,
|
|
194
|
+
cursor,
|
|
195
|
+
order
|
|
196
|
+
} = options ?? {};
|
|
197
|
+
let all = await this.allHandler();
|
|
198
|
+
if (order === "desc") {
|
|
199
|
+
all = all.reverse();
|
|
200
|
+
}
|
|
201
|
+
const startIndex = cursor ? _AbstractLevelDbArchivist.findIndexFromCursor(all, cursor) + 1 : 0;
|
|
202
|
+
const result = all.slice(startIndex, limit ? startIndex + limit : void 0);
|
|
203
|
+
return result;
|
|
204
|
+
}
|
|
205
|
+
async startHandler() {
|
|
206
|
+
await super.startHandler();
|
|
207
|
+
await this.withStore(() => {
|
|
208
|
+
});
|
|
209
|
+
if (this.config.clearStoreOnStart) {
|
|
210
|
+
await this.clearHandler();
|
|
211
|
+
}
|
|
212
|
+
return true;
|
|
213
|
+
}
|
|
214
|
+
withDataHashIndex(func) {
|
|
215
|
+
return this.withDb(async (db) => {
|
|
216
|
+
const index = db.sublevel(
|
|
217
|
+
indexSubLevelName(this.storeName, _AbstractLevelDbArchivist.dataHashIndexName),
|
|
218
|
+
{ keyEncoding: "utf8", valueEncoding: "utf8" }
|
|
219
|
+
);
|
|
220
|
+
return await func(index);
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
withSequenceIndex(func) {
|
|
224
|
+
return this.withDb(async (db) => {
|
|
225
|
+
const index = db.sublevel(
|
|
226
|
+
indexSubLevelName(this.storeName, _AbstractLevelDbArchivist.sequenceIndexName),
|
|
227
|
+
{ keyEncoding: "utf8", valueEncoding: "utf8" }
|
|
228
|
+
);
|
|
229
|
+
return await func(index);
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
async withStore(func) {
|
|
233
|
+
return await this.withDb(async (db) => {
|
|
234
|
+
const subLevel = db.sublevel(this.storeName, { keyEncoding: "utf8", valueEncoding: "json" });
|
|
235
|
+
return await func(subLevel);
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
};
|
|
239
|
+
var LevelDbArchivist = class extends AbstractLevelDbArchivist {
|
|
240
|
+
dbMutex = new Mutex();
|
|
241
|
+
async withDb(func) {
|
|
242
|
+
return await this.dbMutex.runExclusive(async () => {
|
|
243
|
+
const db = new Level(this.folderPath, { keyEncoding: "utf8", valueEncoding: "json" });
|
|
244
|
+
try {
|
|
245
|
+
return await func(db);
|
|
246
|
+
} finally {
|
|
247
|
+
await db.close();
|
|
248
|
+
}
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
LevelDbArchivist = __decorateClass([
|
|
253
|
+
creatableModule()
|
|
254
|
+
], LevelDbArchivist);
|
|
255
|
+
export {
|
|
256
|
+
AbstractLevelDbArchivist,
|
|
257
|
+
LevelDbArchivist,
|
|
258
|
+
LevelDbArchivistConfigSchema,
|
|
259
|
+
LevelDbArchivistSchema
|
|
260
|
+
};
|
|
261
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/Archivist.ts","../../src/Schema.ts","../../src/Config.ts"],"sourcesContent":["import { assertEx } from '@xylabs/assert'\nimport { exists } from '@xylabs/exists'\nimport { Hash, Hex } from '@xylabs/hex'\nimport { fulfilled, Promisable } from '@xylabs/promise'\nimport { AbstractArchivist } from '@xyo-network/archivist-abstract'\nimport {\n ArchivistAllQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistInsertQuery,\n ArchivistInsertQuerySchema,\n ArchivistModuleEventData,\n ArchivistNextOptions,\n ArchivistNextQuerySchema,\n buildStandardIndexName,\n IndexDescription,\n} from '@xyo-network/archivist-model'\nimport { BoundWitness } from '@xyo-network/boundwitness-model'\nimport { creatableModule } from '@xyo-network/module-model'\nimport { PayloadBuilder } from '@xyo-network/payload-builder'\nimport {\n Payload, Schema, WithStorageMeta,\n} from '@xyo-network/payload-model'\nimport {\n AbstractBatchOperation, AbstractLevel, AbstractSublevel,\n} from 'abstract-level'\nimport { Mutex } from 'async-mutex'\nimport { Level } from 'level'\n\nimport { LevelDbArchivistConfigSchema } from './Config.ts'\nimport { LevelDbArchivistParams } from './Params.ts'\n\n/** Note: We have indexes as top level sublevels since making them a sublevel of a store, getting all the values of that store includes the sublevels */\n\nexport interface PayloadStore {\n [s: string]: WithStorageMeta\n}\n\nexport type AbstractPayloadLevel = AbstractLevel<string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>\nexport type AbstractPayloadSubLevel = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>\nexport type AbstractIndexSubLevel<T> = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, T, Hash>\n\nconst indexSubLevelName = (storeName: string, indexName: string) => {\n return `_${storeName}|${indexName}`\n}\n\nexport abstract class AbstractLevelDbArchivist<\n TParams extends LevelDbArchivistParams = LevelDbArchivistParams,\n TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,\n> extends AbstractArchivist<TParams, TEventData> {\n static override readonly configSchemas: Schema[] = [...super.configSchemas, LevelDbArchivistConfigSchema]\n static override readonly defaultConfigSchema: Schema = LevelDbArchivistConfigSchema\n\n private static readonly dataHashIndex: IndexDescription = {\n key: { _dataHash: 1 }, multiEntry: false, unique: false,\n }\n\n private static readonly sequenceIndex: IndexDescription = {\n key: { _sequence: 1 }, multiEntry: false, unique: true,\n }\n\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly dataHashIndexName = buildStandardIndexName(AbstractLevelDbArchivist.dataHashIndex)\n // eslint-disable-next-line @typescript-eslint/member-ordering\n static readonly sequenceIndexName = buildStandardIndexName(AbstractLevelDbArchivist.sequenceIndex)\n\n get dbName() {\n return assertEx(this.config.dbName, () => 'No dbName specified')\n }\n\n get folderPath() {\n return `${this.location}/${this.storeName}`\n }\n\n get location() {\n return assertEx(this.config.location, () => 'No location specified')\n }\n\n override get queries() {\n return [\n ArchivistAllQuerySchema,\n ArchivistDeleteQuerySchema,\n ArchivistClearQuerySchema,\n ArchivistInsertQuerySchema,\n ArchivistCommitQuerySchema,\n ArchivistNextQuerySchema,\n ...super.queries,\n ]\n }\n\n get storeName() {\n return assertEx(this.config.storeName, () => 'No storeName specified')\n }\n\n private static findIndexFromCursor(payloads: WithStorageMeta[], cursor: Hex) {\n const index = payloads.findIndex(({ _sequence }) => _sequence === cursor)\n if (index === -1) {\n return Infinity // move to the end\n }\n return index\n }\n\n protected override async allHandler(): Promise<WithStorageMeta<Payload>[]> {\n return await this.withStore(async (db) => {\n const values = [...(await db.values().all())]\n return values.filter(exists).sort(PayloadBuilder.compareStorageMeta)\n })\n }\n\n protected override async clearHandler(): Promise<void> {\n await this.withDb(async (db) => {\n await db.clear()\n })\n await this.withDataHashIndex(async (index) => {\n await index.clear()\n })\n await this.withSequenceIndex(async (index) => {\n await index.clear()\n })\n return this.emit('cleared', { mod: this })\n }\n\n protected override async commitHandler(): Promise<BoundWitness[]> {\n const payloads = assertEx(await this.allHandler(), () => 'Nothing to commit')\n const settled = await Promise.allSettled(\n Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {\n const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }\n const query = await this.bindQuery(queryPayload, payloads)\n return (await parent?.query(query[0], query[1]))?.[0]\n }).filter(exists),\n )\n await this.clearHandler()\n return settled.filter(fulfilled).map(result => result.value).filter(exists)\n }\n\n protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {\n // not using the getHandler since duplicate data hashes are not handled\n const payloadsWithMeta = (await this.allHandler()).filter(({ _hash, _dataHash }) => hashes.includes(_hash) || hashes.includes(_dataHash))\n // Delete the payloads\n const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._hash,\n }))\n\n await this.withStore(async (store) => {\n await store.batch(batchCommands)\n })\n\n // Delete the dataHash indexes\n const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, string, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._dataHash,\n }))\n\n await this.withDataHashIndex(async (index) => {\n await index.batch(batchDataHashIndexCommands)\n })\n\n // Delete the sequence indexes\n const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'del',\n key: payload._sequence,\n }))\n\n await this.withSequenceIndex(async (index) => {\n await index.batch(batchSequenceIndexCommands)\n })\n\n return hashes\n }\n\n protected override async getHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]> {\n const foundByHash = await this.withStore(async (store) => {\n return (await store.getMany(hashes)).filter(exists)\n })\n const remainingHashes = hashes.filter(hash => !foundByHash.some(({ _hash }) => _hash === hash))\n const hashesFromDataHashes = await this.withDataHashIndex(async (index) => {\n return (await index.getMany(remainingHashes)).filter(exists)\n })\n const foundByDataHash = hashesFromDataHashes.length > 0\n ? await this.withStore(async (store) => {\n return (await store.getMany(hashesFromDataHashes)).filter(exists)\n })\n : []\n const result = [...foundByHash, ...foundByDataHash].sort(PayloadBuilder.compareStorageMeta)\n return result\n }\n\n protected override async insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]> {\n // Insert the payloads\n const payloadsWithMeta = payloads.sort(PayloadBuilder.compareStorageMeta)\n const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._hash, value: payload, keyEncoding: 'utf8', valueEncoding: 'json',\n }))\n await this.withStore(async (store) => {\n await store.batch(batchCommands)\n })\n\n // Insert the dataHash indexes\n // Note: We use the dataHash|hash for the key to allow for multiple entries\n const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, string, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._dataHash, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',\n }))\n await this.withDataHashIndex(async (index) => {\n await index.batch(batchDataHashIndexCommands)\n })\n\n // Insert the sequence indexes\n // Note: We use the dataHash|hash for the key to allow for multiple entries\n const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({\n type: 'put', key: payload._sequence, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',\n }))\n await this.withSequenceIndex(async (index) => {\n await index.batch(batchSequenceIndexCommands)\n })\n\n return payloadsWithMeta\n }\n\n protected override async nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]> {\n const {\n limit, cursor, order,\n } = options ?? {}\n let all = await this.allHandler()\n if (order === 'desc') {\n all = all.reverse()\n }\n const startIndex = cursor\n ? AbstractLevelDbArchivist.findIndexFromCursor(all, cursor) + 1\n : 0\n const result = all.slice(startIndex, limit ? startIndex + limit : undefined)\n return result\n }\n\n protected override async startHandler(): Promise<boolean> {\n await super.startHandler()\n // NOTE: We could defer this creation to first access but we\n // want to fail fast here in case something is wrong\n await this.withStore(() => {})\n if (this.config.clearStoreOnStart) {\n await this.clearHandler()\n }\n return true\n }\n\n protected withDataHashIndex<T>(func: (index: AbstractIndexSubLevel<string>) => Promisable<T>): Promisable<T> {\n return this.withDb(async (db) => {\n const index = db.sublevel<string, Hash>(\n indexSubLevelName(this.storeName, AbstractLevelDbArchivist.dataHashIndexName),\n { keyEncoding: 'utf8', valueEncoding: 'utf8' },\n )\n return await func(index)\n })\n }\n\n protected withSequenceIndex<T>(func: (index: AbstractIndexSubLevel<Hex>) => Promisable<T>): Promisable<T> {\n return this.withDb(async (db) => {\n const index = db.sublevel<Hex, Hash>(\n indexSubLevelName(this.storeName, AbstractLevelDbArchivist.sequenceIndexName),\n { keyEncoding: 'utf8', valueEncoding: 'utf8' },\n )\n return await func(index)\n })\n }\n\n protected async withStore<T>(func: (store: AbstractPayloadSubLevel) => Promisable<T>): Promise<T> {\n return await this.withDb(async (db) => {\n const subLevel: AbstractPayloadSubLevel = db.sublevel<Hash, WithStorageMeta<Payload>>(this.storeName, { keyEncoding: 'utf8', valueEncoding: 'json' })\n return await func(subLevel)\n })\n }\n\n protected abstract withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promisable<T>\n}\n\n@creatableModule()\nexport class LevelDbArchivist extends AbstractLevelDbArchivist {\n private dbMutex = new Mutex()\n protected override async withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promise<T> {\n return await this.dbMutex.runExclusive(async () => {\n const db: AbstractPayloadLevel = new Level<Hash, WithStorageMeta<Payload>>(this.folderPath, { keyEncoding: 'utf8', valueEncoding: 'json' })\n try {\n return await func(db)\n } finally {\n await db.close()\n }\n })\n }\n}\n","export const LevelDbArchivistSchema = 'network.xyo.archivist.leveldb' as const\nexport type LevelDbArchivistSchema = typeof LevelDbArchivistSchema\n","import type { ArchivistConfig } from '@xyo-network/archivist-model'\n\nimport { LevelDbArchivistSchema } from './Schema.ts'\n\nexport type LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`\nexport const LevelDbArchivistConfigSchema: LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`\n\nexport type LevelDbArchivistConfig<TStoreName extends string = string> = ArchivistConfig<{\n /**\n * If true, the store will be cleared on start\n */\n clearStoreOnStart?: boolean\n /**\n * The database name - also used as the filename for the db\n */\n dbName?: string\n /**\n * The location where the folder for the db will be created\n */\n location?: string\n schema: LevelDbArchivistConfigSchema\n /**\n * The name of the object store - becomes a sub-level\n */\n storeName?: TStoreName\n}>\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,cAAc;AAEvB,SAAS,iBAA6B;AACtC,SAAS,yBAAyB;AAClC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EAGA;AAAA,EACA;AAAA,OAEK;AAEP,SAAS,uBAAuB;AAChC,SAAS,sBAAsB;AAO/B,SAAS,aAAa;AACtB,SAAS,aAAa;;;AC5Bf,IAAM,yBAAyB;;;ACK/B,IAAM,+BAA6D,GAAG,sBAAsB;;;AFsCnG,IAAM,oBAAoB,CAAC,WAAmB,cAAsB;AAClE,SAAO,IAAI,SAAS,IAAI,SAAS;AACnC;AAEO,IAAe,2BAAf,MAAe,kCAGZ,kBAAuC;AAAA,EAC/C,OAAyB,gBAA0B,CAAC,GAAG,MAAM,eAAe,4BAA4B;AAAA,EACxG,OAAyB,sBAA8B;AAAA,EAEvD,OAAwB,gBAAkC;AAAA,IACxD,KAAK,EAAE,WAAW,EAAE;AAAA,IAAG,YAAY;AAAA,IAAO,QAAQ;AAAA,EACpD;AAAA,EAEA,OAAwB,gBAAkC;AAAA,IACxD,KAAK,EAAE,WAAW,EAAE;AAAA,IAAG,YAAY;AAAA,IAAO,QAAQ;AAAA,EACpD;AAAA;AAAA,EAGA,OAAgB,oBAAoB,uBAAuB,0BAAyB,aAAa;AAAA;AAAA,EAEjG,OAAgB,oBAAoB,uBAAuB,0BAAyB,aAAa;AAAA,EAEjG,IAAI,SAAS;AACX,WAAO,SAAS,KAAK,OAAO,QAAQ,MAAM,qBAAqB;AAAA,EACjE;AAAA,EAEA,IAAI,aAAa;AACf,WAAO,GAAG,KAAK,QAAQ,IAAI,KAAK,SAAS;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAW;AACb,WAAO,SAAS,KAAK,OAAO,UAAU,MAAM,uBAAuB;AAAA,EACrE;AAAA,EAEA,IAAa,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG,MAAM;AAAA,IACX;AAAA,EACF;AAAA,EAEA,IAAI,YAAY;AACd,WAAO,SAAS,KAAK,OAAO,WAAW,MAAM,wBAAwB;AAAA,EACvE;AAAA,EAEA,OAAe,oBAAoB,UAA6B,QAAa;AAC3E,UAAM,QAAQ,SAAS,UAAU,CAAC,EAAE,UAAU,MAAM,cAAc,MAAM;AACxE,QAAI,UAAU,IAAI;AAChB,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,aAAkD;AACzE,WAAO,MAAM,KAAK,UAAU,OAAO,OAAO;AACxC,YAAM,SAAS,CAAC,GAAI,MAAM,GAAG,OAAO,EAAE,IAAI,CAAE;AAC5C,aAAO,OAAO,OAAO,MAAM,EAAE,KAAK,eAAe,kBAAkB;AAAA,IACrE,CAAC;AAAA,EACH;AAAA,EAEA,MAAyB,eAA8B;AACrD,UAAM,KAAK,OAAO,OAAO,OAAO;AAC9B,YAAM,GAAG,MAAM;AAAA,IACjB,CAAC;AACD,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM;AAAA,IACpB,CAAC;AACD,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM;AAAA,IACpB,CAAC;AACD,WAAO,KAAK,KAAK,WAAW,EAAE,KAAK,KAAK,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAyB,gBAAyC;AAChE,UAAM,WAAW,SAAS,MAAM,KAAK,WAAW,GAAG,MAAM,mBAAmB;AAC5E,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,OAAO,QAAQ,MAAM,KAAK,iBAAiB,GAAG,UAAU,CAAC,CAAC,GAAG,IAAI,OAAO,WAAW;AACjF,cAAM,eAAqC,EAAE,QAAQ,2BAA2B;AAChF,cAAM,QAAQ,MAAM,KAAK,UAAU,cAAc,QAAQ;AACzD,gBAAQ,MAAM,QAAQ,MAAM,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,KAAK,CAAC;AAAA,MACtD,CAAC,EAAE,OAAO,MAAM;AAAA,IAClB;AACA,UAAM,KAAK,aAAa;AACxB,WAAO,QAAQ,OAAO,SAAS,EAAE,IAAI,YAAU,OAAO,KAAK,EAAE,OAAO,MAAM;AAAA,EAC5E;AAAA,EAEA,MAAyB,cAAc,QAAiC;AAEtE,UAAM,oBAAoB,MAAM,KAAK,WAAW,GAAG,OAAO,CAAC,EAAE,OAAO,UAAU,MAAM,OAAO,SAAS,KAAK,KAAK,OAAO,SAAS,SAAS,CAAC;AAExI,UAAM,gBAAwG,iBAAiB,IAAI,cAAY;AAAA,MAC7I,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,UAAU,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,aAAa;AAAA,IACjC,CAAC;AAGD,UAAM,6BAAmG,iBAAiB,IAAI,cAAY;AAAA,MACxI,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAGD,UAAM,6BAAgG,iBAAiB,IAAI,cAAY;AAAA,MACrI,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,EAAE;AAEF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,WAAW,QAAqD;AACvF,UAAM,cAAc,MAAM,KAAK,UAAU,OAAO,UAAU;AACxD,cAAQ,MAAM,MAAM,QAAQ,MAAM,GAAG,OAAO,MAAM;AAAA,IACpD,CAAC;AACD,UAAM,kBAAkB,OAAO,OAAO,UAAQ,CAAC,YAAY,KAAK,CAAC,EAAE,MAAM,MAAM,UAAU,IAAI,CAAC;AAC9F,UAAM,uBAAuB,MAAM,KAAK,kBAAkB,OAAO,UAAU;AACzE,cAAQ,MAAM,MAAM,QAAQ,eAAe,GAAG,OAAO,MAAM;AAAA,IAC7D,CAAC;AACD,UAAM,kBAAkB,qBAAqB,SAAS,IAClD,MAAM,KAAK,UAAU,OAAO,UAAU;AACtC,cAAQ,MAAM,MAAM,QAAQ,oBAAoB,GAAG,OAAO,MAAM;AAAA,IAClE,CAAC,IACC,CAAC;AACL,UAAM,SAAS,CAAC,GAAG,aAAa,GAAG,eAAe,EAAE,KAAK,eAAe,kBAAkB;AAC1F,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,cAAc,UAA2E;AAEhH,UAAM,mBAAmB,SAAS,KAAK,eAAe,kBAAkB;AACxE,UAAM,gBAAwG,iBAAiB,IAAI,cAAY;AAAA,MAC7I,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAO,OAAO;AAAA,MAAS,aAAa;AAAA,MAAQ,eAAe;AAAA,IACvF,EAAE;AACF,UAAM,KAAK,UAAU,OAAO,UAAU;AACpC,YAAM,MAAM,MAAM,aAAa;AAAA,IACjC,CAAC;AAID,UAAM,6BAAgG,iBAAiB,IAAI,cAAY;AAAA,MACrI,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAW,OAAO,QAAQ;AAAA,MAAO,aAAa;AAAA,MAAQ,eAAe;AAAA,IACjG,EAAE;AACF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAID,UAAM,6BAA6F,iBAAiB,IAAI,cAAY;AAAA,MAClI,MAAM;AAAA,MAAO,KAAK,QAAQ;AAAA,MAAW,OAAO,QAAQ;AAAA,MAAO,aAAa;AAAA,MAAQ,eAAe;AAAA,IACjG,EAAE;AACF,UAAM,KAAK,kBAAkB,OAAO,UAAU;AAC5C,YAAM,MAAM,MAAM,0BAA0B;AAAA,IAC9C,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,YAAY,SAAqE;AACxG,UAAM;AAAA,MACJ;AAAA,MAAO;AAAA,MAAQ;AAAA,IACjB,IAAI,WAAW,CAAC;AAChB,QAAI,MAAM,MAAM,KAAK,WAAW;AAChC,QAAI,UAAU,QAAQ;AACpB,YAAM,IAAI,QAAQ;AAAA,IACpB;AACA,UAAM,aAAa,SACf,0BAAyB,oBAAoB,KAAK,MAAM,IAAI,IAC5D;AACJ,UAAM,SAAS,IAAI,MAAM,YAAY,QAAQ,aAAa,QAAQ,MAAS;AAC3E,WAAO;AAAA,EACT;AAAA,EAEA,MAAyB,eAAiC;AACxD,UAAM,MAAM,aAAa;AAGzB,UAAM,KAAK,UAAU,MAAM;AAAA,IAAC,CAAC;AAC7B,QAAI,KAAK,OAAO,mBAAmB;AACjC,YAAM,KAAK,aAAa;AAAA,IAC1B;AACA,WAAO;AAAA,EACT;AAAA,EAEU,kBAAqB,MAA8E;AAC3G,WAAO,KAAK,OAAO,OAAO,OAAO;AAC/B,YAAM,QAAQ,GAAG;AAAA,QACf,kBAAkB,KAAK,WAAW,0BAAyB,iBAAiB;AAAA,QAC5E,EAAE,aAAa,QAAQ,eAAe,OAAO;AAAA,MAC/C;AACA,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEU,kBAAqB,MAA2E;AACxG,WAAO,KAAK,OAAO,OAAO,OAAO;AAC/B,YAAM,QAAQ,GAAG;AAAA,QACf,kBAAkB,KAAK,WAAW,0BAAyB,iBAAiB;AAAA,QAC5E,EAAE,aAAa,QAAQ,eAAe,OAAO;AAAA,MAC/C;AACA,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,UAAa,MAAqE;AAChG,WAAO,MAAM,KAAK,OAAO,OAAO,OAAO;AACrC,YAAM,WAAoC,GAAG,SAAyC,KAAK,WAAW,EAAE,aAAa,QAAQ,eAAe,OAAO,CAAC;AACpJ,aAAO,MAAM,KAAK,QAAQ;AAAA,IAC5B,CAAC;AAAA,EACH;AAGF;AAGO,IAAM,mBAAN,cAA+B,yBAAyB;AAAA,EACrD,UAAU,IAAI,MAAM;AAAA,EAC5B,MAAyB,OAAU,MAA+D;AAChG,WAAO,MAAM,KAAK,QAAQ,aAAa,YAAY;AACjD,YAAM,KAA2B,IAAI,MAAsC,KAAK,YAAY,EAAE,aAAa,QAAQ,eAAe,OAAO,CAAC;AAC1I,UAAI;AACF,eAAO,MAAM,KAAK,EAAE;AAAA,MACtB,UAAE;AACA,cAAM,GAAG,MAAM;AAAA,MACjB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAZa,mBAAN;AAAA,EADN,gBAAgB;AAAA,GACJ;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@xyo-network/archivist-leveldb",
|
|
3
|
+
"version": "3.6.11",
|
|
4
|
+
"description": "Primary SDK for using XYO Protocol 2.0",
|
|
5
|
+
"homepage": "https://xyo.network",
|
|
6
|
+
"bugs": {
|
|
7
|
+
"url": "git+https://github.com/XYOracleNetwork/sdk-xyo-client-js/issues",
|
|
8
|
+
"email": "support@xyo.network"
|
|
9
|
+
},
|
|
10
|
+
"repository": {
|
|
11
|
+
"type": "git",
|
|
12
|
+
"url": "git+https://github.com/XYOracleNetwork/sdk-xyo-client-js.git"
|
|
13
|
+
},
|
|
14
|
+
"license": "LGPL-3.0-only",
|
|
15
|
+
"author": {
|
|
16
|
+
"name": "XYO Development Team",
|
|
17
|
+
"email": "support@xyo.network",
|
|
18
|
+
"url": "https://xyo.network"
|
|
19
|
+
},
|
|
20
|
+
"sideEffects": false,
|
|
21
|
+
"type": "module",
|
|
22
|
+
"exports": {
|
|
23
|
+
".": {
|
|
24
|
+
"types": "./dist/browser/index.d.ts",
|
|
25
|
+
"default": "./dist/browser/index.mjs"
|
|
26
|
+
},
|
|
27
|
+
"./package.json": "./package.json"
|
|
28
|
+
},
|
|
29
|
+
"module": "dist/browser/index.mjs",
|
|
30
|
+
"types": "dist/browser/index.d.ts",
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"@xylabs/array": "^4.5.1",
|
|
33
|
+
"@xylabs/assert": "^4.5.1",
|
|
34
|
+
"@xylabs/exists": "^4.5.1",
|
|
35
|
+
"@xylabs/hex": "^4.5.1",
|
|
36
|
+
"@xylabs/promise": "^4.5.1",
|
|
37
|
+
"@xyo-network/archivist-abstract": "^3.6.11",
|
|
38
|
+
"@xyo-network/archivist-model": "^3.6.11",
|
|
39
|
+
"@xyo-network/boundwitness-model": "^3.6.11",
|
|
40
|
+
"@xyo-network/module-model": "^3.6.11",
|
|
41
|
+
"@xyo-network/payload-builder": "^3.6.11",
|
|
42
|
+
"@xyo-network/payload-model": "^3.6.11",
|
|
43
|
+
"abstract-level": "^2.0.1",
|
|
44
|
+
"async-mutex": "^0.3.0",
|
|
45
|
+
"level": "^9.0.0"
|
|
46
|
+
},
|
|
47
|
+
"devDependencies": {
|
|
48
|
+
"@xylabs/delay": "^4.5.1",
|
|
49
|
+
"@xylabs/object": "^4.5.1",
|
|
50
|
+
"@xylabs/ts-scripts-yarn3": "^4.2.6",
|
|
51
|
+
"@xylabs/tsconfig": "^4.2.6",
|
|
52
|
+
"@xyo-network/account": "^3.6.11",
|
|
53
|
+
"@xyo-network/id-payload-plugin": "^3.6.11",
|
|
54
|
+
"@xyo-network/payload-wrapper": "^3.6.11",
|
|
55
|
+
"typescript": "^5.7.3",
|
|
56
|
+
"vitest": "^3.0.4"
|
|
57
|
+
},
|
|
58
|
+
"publishConfig": {
|
|
59
|
+
"access": "public"
|
|
60
|
+
}
|
|
61
|
+
}
|
package/src/Archivist.ts
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import { assertEx } from '@xylabs/assert'
|
|
2
|
+
import { exists } from '@xylabs/exists'
|
|
3
|
+
import { Hash, Hex } from '@xylabs/hex'
|
|
4
|
+
import { fulfilled, Promisable } from '@xylabs/promise'
|
|
5
|
+
import { AbstractArchivist } from '@xyo-network/archivist-abstract'
|
|
6
|
+
import {
|
|
7
|
+
ArchivistAllQuerySchema,
|
|
8
|
+
ArchivistClearQuerySchema,
|
|
9
|
+
ArchivistCommitQuerySchema,
|
|
10
|
+
ArchivistDeleteQuerySchema,
|
|
11
|
+
ArchivistInsertQuery,
|
|
12
|
+
ArchivistInsertQuerySchema,
|
|
13
|
+
ArchivistModuleEventData,
|
|
14
|
+
ArchivistNextOptions,
|
|
15
|
+
ArchivistNextQuerySchema,
|
|
16
|
+
buildStandardIndexName,
|
|
17
|
+
IndexDescription,
|
|
18
|
+
} from '@xyo-network/archivist-model'
|
|
19
|
+
import { BoundWitness } from '@xyo-network/boundwitness-model'
|
|
20
|
+
import { creatableModule } from '@xyo-network/module-model'
|
|
21
|
+
import { PayloadBuilder } from '@xyo-network/payload-builder'
|
|
22
|
+
import {
|
|
23
|
+
Payload, Schema, WithStorageMeta,
|
|
24
|
+
} from '@xyo-network/payload-model'
|
|
25
|
+
import {
|
|
26
|
+
AbstractBatchOperation, AbstractLevel, AbstractSublevel,
|
|
27
|
+
} from 'abstract-level'
|
|
28
|
+
import { Mutex } from 'async-mutex'
|
|
29
|
+
import { Level } from 'level'
|
|
30
|
+
|
|
31
|
+
import { LevelDbArchivistConfigSchema } from './Config.ts'
|
|
32
|
+
import { LevelDbArchivistParams } from './Params.ts'
|
|
33
|
+
|
|
34
|
+
/** Note: We have indexes as top level sublevels since making them a sublevel of a store, getting all the values of that store includes the sublevels */
|
|
35
|
+
|
|
36
|
+
export interface PayloadStore {
|
|
37
|
+
[s: string]: WithStorageMeta
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export type AbstractPayloadLevel = AbstractLevel<string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>
|
|
41
|
+
export type AbstractPayloadSubLevel = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, Hash, WithStorageMeta<Payload>>
|
|
42
|
+
export type AbstractIndexSubLevel<T> = AbstractSublevel<AbstractPayloadLevel, string | Buffer | Uint8Array, T, Hash>
|
|
43
|
+
|
|
44
|
+
const indexSubLevelName = (storeName: string, indexName: string) => {
|
|
45
|
+
return `_${storeName}|${indexName}`
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export abstract class AbstractLevelDbArchivist<
|
|
49
|
+
TParams extends LevelDbArchivistParams = LevelDbArchivistParams,
|
|
50
|
+
TEventData extends ArchivistModuleEventData = ArchivistModuleEventData,
|
|
51
|
+
> extends AbstractArchivist<TParams, TEventData> {
|
|
52
|
+
static override readonly configSchemas: Schema[] = [...super.configSchemas, LevelDbArchivistConfigSchema]
|
|
53
|
+
static override readonly defaultConfigSchema: Schema = LevelDbArchivistConfigSchema
|
|
54
|
+
|
|
55
|
+
private static readonly dataHashIndex: IndexDescription = {
|
|
56
|
+
key: { _dataHash: 1 }, multiEntry: false, unique: false,
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
private static readonly sequenceIndex: IndexDescription = {
|
|
60
|
+
key: { _sequence: 1 }, multiEntry: false, unique: true,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// eslint-disable-next-line @typescript-eslint/member-ordering
|
|
64
|
+
static readonly dataHashIndexName = buildStandardIndexName(AbstractLevelDbArchivist.dataHashIndex)
|
|
65
|
+
// eslint-disable-next-line @typescript-eslint/member-ordering
|
|
66
|
+
static readonly sequenceIndexName = buildStandardIndexName(AbstractLevelDbArchivist.sequenceIndex)
|
|
67
|
+
|
|
68
|
+
get dbName() {
|
|
69
|
+
return assertEx(this.config.dbName, () => 'No dbName specified')
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
get folderPath() {
|
|
73
|
+
return `${this.location}/${this.storeName}`
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
get location() {
|
|
77
|
+
return assertEx(this.config.location, () => 'No location specified')
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
override get queries() {
|
|
81
|
+
return [
|
|
82
|
+
ArchivistAllQuerySchema,
|
|
83
|
+
ArchivistDeleteQuerySchema,
|
|
84
|
+
ArchivistClearQuerySchema,
|
|
85
|
+
ArchivistInsertQuerySchema,
|
|
86
|
+
ArchivistCommitQuerySchema,
|
|
87
|
+
ArchivistNextQuerySchema,
|
|
88
|
+
...super.queries,
|
|
89
|
+
]
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
get storeName() {
|
|
93
|
+
return assertEx(this.config.storeName, () => 'No storeName specified')
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
private static findIndexFromCursor(payloads: WithStorageMeta[], cursor: Hex) {
|
|
97
|
+
const index = payloads.findIndex(({ _sequence }) => _sequence === cursor)
|
|
98
|
+
if (index === -1) {
|
|
99
|
+
return Infinity // move to the end
|
|
100
|
+
}
|
|
101
|
+
return index
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
protected override async allHandler(): Promise<WithStorageMeta<Payload>[]> {
|
|
105
|
+
return await this.withStore(async (db) => {
|
|
106
|
+
const values = [...(await db.values().all())]
|
|
107
|
+
return values.filter(exists).sort(PayloadBuilder.compareStorageMeta)
|
|
108
|
+
})
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
protected override async clearHandler(): Promise<void> {
|
|
112
|
+
await this.withDb(async (db) => {
|
|
113
|
+
await db.clear()
|
|
114
|
+
})
|
|
115
|
+
await this.withDataHashIndex(async (index) => {
|
|
116
|
+
await index.clear()
|
|
117
|
+
})
|
|
118
|
+
await this.withSequenceIndex(async (index) => {
|
|
119
|
+
await index.clear()
|
|
120
|
+
})
|
|
121
|
+
return this.emit('cleared', { mod: this })
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
protected override async commitHandler(): Promise<BoundWitness[]> {
|
|
125
|
+
const payloads = assertEx(await this.allHandler(), () => 'Nothing to commit')
|
|
126
|
+
const settled = await Promise.allSettled(
|
|
127
|
+
Object.values((await this.parentArchivists()).commit ?? [])?.map(async (parent) => {
|
|
128
|
+
const queryPayload: ArchivistInsertQuery = { schema: ArchivistInsertQuerySchema }
|
|
129
|
+
const query = await this.bindQuery(queryPayload, payloads)
|
|
130
|
+
return (await parent?.query(query[0], query[1]))?.[0]
|
|
131
|
+
}).filter(exists),
|
|
132
|
+
)
|
|
133
|
+
await this.clearHandler()
|
|
134
|
+
return settled.filter(fulfilled).map(result => result.value).filter(exists)
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
protected override async deleteHandler(hashes: Hash[]): Promise<Hash[]> {
|
|
138
|
+
// not using the getHandler since duplicate data hashes are not handled
|
|
139
|
+
const payloadsWithMeta = (await this.allHandler()).filter(({ _hash, _dataHash }) => hashes.includes(_hash) || hashes.includes(_dataHash))
|
|
140
|
+
// Delete the payloads
|
|
141
|
+
const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({
|
|
142
|
+
type: 'del',
|
|
143
|
+
key: payload._hash,
|
|
144
|
+
}))
|
|
145
|
+
|
|
146
|
+
await this.withStore(async (store) => {
|
|
147
|
+
await store.batch(batchCommands)
|
|
148
|
+
})
|
|
149
|
+
|
|
150
|
+
// Delete the dataHash indexes
|
|
151
|
+
const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, string, Hash>> = payloadsWithMeta.map(payload => ({
|
|
152
|
+
type: 'del',
|
|
153
|
+
key: payload._dataHash,
|
|
154
|
+
}))
|
|
155
|
+
|
|
156
|
+
await this.withDataHashIndex(async (index) => {
|
|
157
|
+
await index.batch(batchDataHashIndexCommands)
|
|
158
|
+
})
|
|
159
|
+
|
|
160
|
+
// Delete the sequence indexes
|
|
161
|
+
const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({
|
|
162
|
+
type: 'del',
|
|
163
|
+
key: payload._sequence,
|
|
164
|
+
}))
|
|
165
|
+
|
|
166
|
+
await this.withSequenceIndex(async (index) => {
|
|
167
|
+
await index.batch(batchSequenceIndexCommands)
|
|
168
|
+
})
|
|
169
|
+
|
|
170
|
+
return hashes
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
protected override async getHandler(hashes: Hash[]): Promise<WithStorageMeta<Payload>[]> {
|
|
174
|
+
const foundByHash = await this.withStore(async (store) => {
|
|
175
|
+
return (await store.getMany(hashes)).filter(exists)
|
|
176
|
+
})
|
|
177
|
+
const remainingHashes = hashes.filter(hash => !foundByHash.some(({ _hash }) => _hash === hash))
|
|
178
|
+
const hashesFromDataHashes = await this.withDataHashIndex(async (index) => {
|
|
179
|
+
return (await index.getMany(remainingHashes)).filter(exists)
|
|
180
|
+
})
|
|
181
|
+
const foundByDataHash = hashesFromDataHashes.length > 0
|
|
182
|
+
? await this.withStore(async (store) => {
|
|
183
|
+
return (await store.getMany(hashesFromDataHashes)).filter(exists)
|
|
184
|
+
})
|
|
185
|
+
: []
|
|
186
|
+
const result = [...foundByHash, ...foundByDataHash].sort(PayloadBuilder.compareStorageMeta)
|
|
187
|
+
return result
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
protected override async insertHandler(payloads: WithStorageMeta<Payload>[]): Promise<WithStorageMeta<Payload>[]> {
|
|
191
|
+
// Insert the payloads
|
|
192
|
+
const payloadsWithMeta = payloads.sort(PayloadBuilder.compareStorageMeta)
|
|
193
|
+
const batchCommands: Array<AbstractBatchOperation<AbstractPayloadSubLevel, Hash, WithStorageMeta<Payload>>> = payloadsWithMeta.map(payload => ({
|
|
194
|
+
type: 'put', key: payload._hash, value: payload, keyEncoding: 'utf8', valueEncoding: 'json',
|
|
195
|
+
}))
|
|
196
|
+
await this.withStore(async (store) => {
|
|
197
|
+
await store.batch(batchCommands)
|
|
198
|
+
})
|
|
199
|
+
|
|
200
|
+
// Insert the dataHash indexes
|
|
201
|
+
// Note: We use the dataHash|hash for the key to allow for multiple entries
|
|
202
|
+
const batchDataHashIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, string, Hash>> = payloadsWithMeta.map(payload => ({
|
|
203
|
+
type: 'put', key: payload._dataHash, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',
|
|
204
|
+
}))
|
|
205
|
+
await this.withDataHashIndex(async (index) => {
|
|
206
|
+
await index.batch(batchDataHashIndexCommands)
|
|
207
|
+
})
|
|
208
|
+
|
|
209
|
+
// Insert the sequence indexes
|
|
210
|
+
// Note: We use the dataHash|hash for the key to allow for multiple entries
|
|
211
|
+
const batchSequenceIndexCommands: Array<AbstractBatchOperation<AbstractPayloadLevel, Hex, Hash>> = payloadsWithMeta.map(payload => ({
|
|
212
|
+
type: 'put', key: payload._sequence, value: payload._hash, keyEncoding: 'utf8', valueEncoding: 'utf8',
|
|
213
|
+
}))
|
|
214
|
+
await this.withSequenceIndex(async (index) => {
|
|
215
|
+
await index.batch(batchSequenceIndexCommands)
|
|
216
|
+
})
|
|
217
|
+
|
|
218
|
+
return payloadsWithMeta
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
protected override async nextHandler(options?: ArchivistNextOptions): Promise<WithStorageMeta<Payload>[]> {
|
|
222
|
+
const {
|
|
223
|
+
limit, cursor, order,
|
|
224
|
+
} = options ?? {}
|
|
225
|
+
let all = await this.allHandler()
|
|
226
|
+
if (order === 'desc') {
|
|
227
|
+
all = all.reverse()
|
|
228
|
+
}
|
|
229
|
+
const startIndex = cursor
|
|
230
|
+
? AbstractLevelDbArchivist.findIndexFromCursor(all, cursor) + 1
|
|
231
|
+
: 0
|
|
232
|
+
const result = all.slice(startIndex, limit ? startIndex + limit : undefined)
|
|
233
|
+
return result
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
protected override async startHandler(): Promise<boolean> {
|
|
237
|
+
await super.startHandler()
|
|
238
|
+
// NOTE: We could defer this creation to first access but we
|
|
239
|
+
// want to fail fast here in case something is wrong
|
|
240
|
+
await this.withStore(() => {})
|
|
241
|
+
if (this.config.clearStoreOnStart) {
|
|
242
|
+
await this.clearHandler()
|
|
243
|
+
}
|
|
244
|
+
return true
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
protected withDataHashIndex<T>(func: (index: AbstractIndexSubLevel<string>) => Promisable<T>): Promisable<T> {
|
|
248
|
+
return this.withDb(async (db) => {
|
|
249
|
+
const index = db.sublevel<string, Hash>(
|
|
250
|
+
indexSubLevelName(this.storeName, AbstractLevelDbArchivist.dataHashIndexName),
|
|
251
|
+
{ keyEncoding: 'utf8', valueEncoding: 'utf8' },
|
|
252
|
+
)
|
|
253
|
+
return await func(index)
|
|
254
|
+
})
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
protected withSequenceIndex<T>(func: (index: AbstractIndexSubLevel<Hex>) => Promisable<T>): Promisable<T> {
|
|
258
|
+
return this.withDb(async (db) => {
|
|
259
|
+
const index = db.sublevel<Hex, Hash>(
|
|
260
|
+
indexSubLevelName(this.storeName, AbstractLevelDbArchivist.sequenceIndexName),
|
|
261
|
+
{ keyEncoding: 'utf8', valueEncoding: 'utf8' },
|
|
262
|
+
)
|
|
263
|
+
return await func(index)
|
|
264
|
+
})
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
protected async withStore<T>(func: (store: AbstractPayloadSubLevel) => Promisable<T>): Promise<T> {
|
|
268
|
+
return await this.withDb(async (db) => {
|
|
269
|
+
const subLevel: AbstractPayloadSubLevel = db.sublevel<Hash, WithStorageMeta<Payload>>(this.storeName, { keyEncoding: 'utf8', valueEncoding: 'json' })
|
|
270
|
+
return await func(subLevel)
|
|
271
|
+
})
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
protected abstract withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promisable<T>
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
@creatableModule()
|
|
278
|
+
export class LevelDbArchivist extends AbstractLevelDbArchivist {
|
|
279
|
+
private dbMutex = new Mutex()
|
|
280
|
+
protected override async withDb<T>(func: (db: AbstractPayloadLevel) => Promisable<T>): Promise<T> {
|
|
281
|
+
return await this.dbMutex.runExclusive(async () => {
|
|
282
|
+
const db: AbstractPayloadLevel = new Level<Hash, WithStorageMeta<Payload>>(this.folderPath, { keyEncoding: 'utf8', valueEncoding: 'json' })
|
|
283
|
+
try {
|
|
284
|
+
return await func(db)
|
|
285
|
+
} finally {
|
|
286
|
+
await db.close()
|
|
287
|
+
}
|
|
288
|
+
})
|
|
289
|
+
}
|
|
290
|
+
}
|
package/src/Config.ts
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { ArchivistConfig } from '@xyo-network/archivist-model'
|
|
2
|
+
|
|
3
|
+
import { LevelDbArchivistSchema } from './Schema.ts'
|
|
4
|
+
|
|
5
|
+
export type LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`
|
|
6
|
+
export const LevelDbArchivistConfigSchema: LevelDbArchivistConfigSchema = `${LevelDbArchivistSchema}.config`
|
|
7
|
+
|
|
8
|
+
export type LevelDbArchivistConfig<TStoreName extends string = string> = ArchivistConfig<{
|
|
9
|
+
/**
|
|
10
|
+
* If true, the store will be cleared on start
|
|
11
|
+
*/
|
|
12
|
+
clearStoreOnStart?: boolean
|
|
13
|
+
/**
|
|
14
|
+
* The database name - also used as the filename for the db
|
|
15
|
+
*/
|
|
16
|
+
dbName?: string
|
|
17
|
+
/**
|
|
18
|
+
* The location where the folder for the db will be created
|
|
19
|
+
*/
|
|
20
|
+
location?: string
|
|
21
|
+
schema: LevelDbArchivistConfigSchema
|
|
22
|
+
/**
|
|
23
|
+
* The name of the object store - becomes a sub-level
|
|
24
|
+
*/
|
|
25
|
+
storeName?: TStoreName
|
|
26
|
+
}>
|
package/src/Params.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { ArchivistParams } from '@xyo-network/archivist-model'
|
|
2
|
+
import type { AnyConfigSchema } from '@xyo-network/module-model'
|
|
3
|
+
|
|
4
|
+
import type { LevelDbArchivistConfig } from './Config.ts'
|
|
5
|
+
|
|
6
|
+
export type LevelDbArchivistParams = ArchivistParams<AnyConfigSchema<LevelDbArchivistConfig>, {
|
|
7
|
+
|
|
8
|
+
}>
|
package/src/Schema.ts
ADDED
package/src/index.ts
ADDED
package/typedoc.json
ADDED