@lodestar/db 1.35.0-dev.c85be4e26c → 1.35.0-dev.c9deb9b59f
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/abstractPrefixedRepository.d.ts.map +1 -0
- package/lib/abstractPrefixedRepository.js +10 -0
- package/lib/abstractPrefixedRepository.js.map +1 -1
- package/lib/abstractRepository.d.ts.map +1 -0
- package/lib/abstractRepository.js +8 -0
- package/lib/abstractRepository.js.map +1 -1
- package/lib/const.d.ts.map +1 -0
- package/lib/controller/index.d.ts +1 -2
- package/lib/controller/index.d.ts.map +1 -0
- package/lib/controller/index.js +1 -1
- package/lib/controller/index.js.map +1 -1
- package/lib/controller/interface.d.ts.map +1 -0
- package/lib/controller/level.d.ts +1 -1
- package/lib/controller/level.d.ts.map +1 -0
- package/lib/controller/level.js +5 -1
- package/lib/controller/level.js.map +1 -1
- package/lib/controller/level_bun.d.ts +35 -0
- package/lib/controller/level_bun.d.ts.map +1 -0
- package/lib/controller/level_bun.js +269 -0
- package/lib/controller/level_bun.js.map +1 -0
- package/lib/controller/metrics.d.ts.map +1 -0
- package/lib/index.d.ts +2 -2
- package/lib/index.d.ts.map +1 -0
- package/lib/index.js +2 -2
- package/lib/index.js.map +1 -1
- package/lib/util.d.ts.map +1 -0
- package/package.json +25 -11
- package/src/abstractPrefixedRepository.ts +244 -0
- package/src/abstractRepository.ts +283 -0
- package/src/const.ts +2 -0
- package/src/controller/index.ts +2 -0
- package/src/controller/interface.ts +62 -0
- package/src/controller/level.ts +246 -0
- package/src/controller/level_bun.ts +288 -0
- package/src/controller/metrics.ts +10 -0
- package/src/index.ts +5 -0
- package/src/util.ts +50 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lodestar/db",
|
|
3
|
-
"version": "1.35.0-dev.
|
|
3
|
+
"version": "1.35.0-dev.c9deb9b59f",
|
|
4
4
|
"description": "DB modules of Lodestar",
|
|
5
5
|
"author": "ChainSafe Systems",
|
|
6
6
|
"homepage": "https://github.com/ChainSafe/lodestar#readme",
|
|
@@ -12,14 +12,27 @@
|
|
|
12
12
|
"url": "https://github.com/ChainSafe/lodestar/issues"
|
|
13
13
|
},
|
|
14
14
|
"type": "module",
|
|
15
|
-
"exports":
|
|
15
|
+
"exports": {
|
|
16
|
+
".": {
|
|
17
|
+
"bun": "./src/index.ts",
|
|
18
|
+
"import": "./lib/index.js"
|
|
19
|
+
},
|
|
20
|
+
"./controller/level": {
|
|
21
|
+
"bun": "./src/controller/level_bun.ts",
|
|
22
|
+
"import": "./lib/controller/level.js"
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
"imports": {
|
|
26
|
+
"#controller/level": {
|
|
27
|
+
"bun": "./src/controller/level_bun.ts",
|
|
28
|
+
"import": "./src/controller/level.js"
|
|
29
|
+
}
|
|
30
|
+
},
|
|
16
31
|
"types": "./lib/index.d.ts",
|
|
17
32
|
"files": [
|
|
18
|
-
"
|
|
19
|
-
"lib
|
|
20
|
-
"
|
|
21
|
-
"*.d.ts",
|
|
22
|
-
"*.js"
|
|
33
|
+
"src",
|
|
34
|
+
"lib",
|
|
35
|
+
"!**/*.tsbuildinfo"
|
|
23
36
|
],
|
|
24
37
|
"scripts": {
|
|
25
38
|
"clean": "rm -rf lib && rm -f *.tsbuildinfo",
|
|
@@ -37,13 +50,14 @@
|
|
|
37
50
|
},
|
|
38
51
|
"dependencies": {
|
|
39
52
|
"@chainsafe/ssz": "^1.2.2",
|
|
40
|
-
"@lodestar/
|
|
41
|
-
"@lodestar/
|
|
53
|
+
"@lodestar/bun": "git+https://github.com/ChainSafe/lodestar-bun.git",
|
|
54
|
+
"@lodestar/config": "1.35.0-dev.c9deb9b59f",
|
|
55
|
+
"@lodestar/utils": "1.35.0-dev.c9deb9b59f",
|
|
42
56
|
"classic-level": "^1.4.1",
|
|
43
57
|
"it-all": "^3.0.4"
|
|
44
58
|
},
|
|
45
59
|
"devDependencies": {
|
|
46
|
-
"@lodestar/logger": "1.35.0-dev.
|
|
60
|
+
"@lodestar/logger": "1.35.0-dev.c9deb9b59f"
|
|
47
61
|
},
|
|
48
|
-
"gitHead": "
|
|
62
|
+
"gitHead": "9f18a0bd68957d2324cc3b05f36910ab08f28672"
|
|
49
63
|
}
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import {Type} from "@chainsafe/ssz";
|
|
2
|
+
import {ChainForkConfig} from "@lodestar/config";
|
|
3
|
+
import {BUCKET_LENGTH} from "./const.js";
|
|
4
|
+
import {KeyValue} from "./controller/index.js";
|
|
5
|
+
import {Db, DbReqOpts, FilterOptions} from "./controller/interface.js";
|
|
6
|
+
import {encodeKey} from "./util.js";
|
|
7
|
+
|
|
8
|
+
type Id = Uint8Array | string | number | bigint;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Repository is a high level kv storage
|
|
12
|
+
* This abstract repository is designed in a way to store items with different prefixed
|
|
13
|
+
* Specially when those prefixed data is not available in the object to be stored
|
|
14
|
+
*
|
|
15
|
+
* By default, SSZ-encoded values,
|
|
16
|
+
*/
|
|
17
|
+
export abstract class PrefixedRepository<P, I extends Id, T> {
|
|
18
|
+
private readonly dbReqOpts: DbReqOpts;
|
|
19
|
+
/** Inclusive range for the minimum key for the bucket */
|
|
20
|
+
private readonly minKey: Uint8Array;
|
|
21
|
+
/** Exclusive range for the maximum key for the bucket */
|
|
22
|
+
private readonly maxKey: Uint8Array;
|
|
23
|
+
|
|
24
|
+
protected constructor(
|
|
25
|
+
protected config: ChainForkConfig,
|
|
26
|
+
protected db: Db,
|
|
27
|
+
protected bucket: number,
|
|
28
|
+
protected type: Type<T>,
|
|
29
|
+
private readonly bucketId: string
|
|
30
|
+
) {
|
|
31
|
+
this.dbReqOpts = {bucketId: this.bucketId};
|
|
32
|
+
this.minKey = encodeKey(bucket, Buffer.alloc(0));
|
|
33
|
+
this.maxKey = encodeKey(bucket + 1, Buffer.alloc(0));
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
abstract encodeKeyRaw(prefix: P, id: I): Uint8Array;
|
|
37
|
+
abstract decodeKeyRaw(raw: Uint8Array): {prefix: P; id: I};
|
|
38
|
+
/**
|
|
39
|
+
* Max key is inclusive
|
|
40
|
+
* */
|
|
41
|
+
abstract getMaxKeyRaw(prefix: P): Uint8Array;
|
|
42
|
+
/**
|
|
43
|
+
* Min key is inclusive
|
|
44
|
+
* */
|
|
45
|
+
abstract getMinKeyRaw(prefix: P): Uint8Array;
|
|
46
|
+
|
|
47
|
+
protected encodeValue(value: T): Uint8Array {
|
|
48
|
+
return this.type.serialize(value);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
protected decodeValue(data: Uint8Array): T {
|
|
52
|
+
return this.type.deserialize(data);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
protected wrapKey(raw: Uint8Array): Uint8Array {
|
|
56
|
+
return encodeKey(this.bucket, raw);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
protected unwrapKey(key: Uint8Array): Uint8Array {
|
|
60
|
+
return key.slice(BUCKET_LENGTH);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// The Id can be inferred from the value
|
|
64
|
+
getId(value: T): I {
|
|
65
|
+
return this.type.hashTreeRoot(value) as I;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async get(prefix: P, id: I): Promise<T | null> {
|
|
69
|
+
const key = this.wrapKey(this.encodeKeyRaw(prefix, id));
|
|
70
|
+
const v = await this.db.get(key, this.dbReqOpts);
|
|
71
|
+
return v ? this.decodeValue(v) : null;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async getMany(prefix: P, ids: I[]): Promise<(T | undefined)[]> {
|
|
75
|
+
const keys = [];
|
|
76
|
+
for (const id of ids) {
|
|
77
|
+
keys.push(this.wrapKey(this.encodeKeyRaw(prefix, id)));
|
|
78
|
+
}
|
|
79
|
+
const values = await this.db.getMany(keys, this.dbReqOpts);
|
|
80
|
+
|
|
81
|
+
const result = [];
|
|
82
|
+
for (const value of values) {
|
|
83
|
+
result.push(value ? this.decodeValue(value) : undefined);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return result;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async getManyBinary(prefix: P, ids: I[]): Promise<(Uint8Array | undefined)[]> {
|
|
90
|
+
const keys = [];
|
|
91
|
+
for (const id of ids) {
|
|
92
|
+
keys.push(this.wrapKey(this.encodeKeyRaw(prefix, id)));
|
|
93
|
+
}
|
|
94
|
+
return await this.db.getMany(keys, this.dbReqOpts);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async getBinary(prefix: P, id: I): Promise<Uint8Array | null> {
|
|
98
|
+
const key = this.wrapKey(this.encodeKeyRaw(prefix, id));
|
|
99
|
+
return await this.db.get(key, this.dbReqOpts);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
async put(prefix: P, item: T): Promise<void> {
|
|
103
|
+
const id = this.getId(item);
|
|
104
|
+
const key = this.wrapKey(this.encodeKeyRaw(prefix, id));
|
|
105
|
+
await this.db.put(key, this.encodeValue(item), this.dbReqOpts);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
async putMany(prefix: P, items: T[]): Promise<void> {
|
|
109
|
+
const batch: KeyValue<Uint8Array, Uint8Array>[] = [];
|
|
110
|
+
for (const item of items) {
|
|
111
|
+
const id = this.getId(item);
|
|
112
|
+
const key = this.wrapKey(this.encodeKeyRaw(prefix, id));
|
|
113
|
+
batch.push({key, value: this.encodeValue(item)});
|
|
114
|
+
}
|
|
115
|
+
await this.db.batchPut(batch, this.dbReqOpts);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async putBinary(prefix: P, id: I, bytes: Uint8Array): Promise<void> {
|
|
119
|
+
const key = this.wrapKey(this.encodeKeyRaw(prefix, id));
|
|
120
|
+
await this.db.put(key, bytes, this.dbReqOpts);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async putManyBinary(prefix: P, items: KeyValue<I, Uint8Array>[]): Promise<void> {
|
|
124
|
+
const batch: KeyValue<Uint8Array, Uint8Array>[] = [];
|
|
125
|
+
for (const {key, value} of items) {
|
|
126
|
+
batch.push({key: this.wrapKey(this.encodeKeyRaw(prefix, key)), value: value});
|
|
127
|
+
}
|
|
128
|
+
await this.db.batchPut(batch, this.dbReqOpts);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
async delete(prefix: P, id: I): Promise<void> {
|
|
132
|
+
const key = this.wrapKey(this.encodeKeyRaw(prefix, id));
|
|
133
|
+
await this.db.delete(key, this.dbReqOpts);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async deleteMany(prefix: P | P[]): Promise<void> {
|
|
137
|
+
const keys: Uint8Array[][] = [];
|
|
138
|
+
|
|
139
|
+
for (const p of Array.isArray(prefix) ? prefix : [prefix]) {
|
|
140
|
+
const prefixedKeys = await this.db.keys({
|
|
141
|
+
gte: this.wrapKey(this.getMinKeyRaw(p)),
|
|
142
|
+
lte: this.wrapKey(this.getMaxKeyRaw(p)),
|
|
143
|
+
bucketId: this.bucketId,
|
|
144
|
+
});
|
|
145
|
+
keys.push(prefixedKeys);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
await this.db.batchDelete(keys.flat(), this.dbReqOpts);
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async *valuesStream(prefix: P | P[]): AsyncIterable<T> {
|
|
152
|
+
for (const p of Array.isArray(prefix) ? prefix : [prefix]) {
|
|
153
|
+
for await (const vb of this.db.valuesStream({
|
|
154
|
+
gte: this.wrapKey(this.getMinKeyRaw(p)),
|
|
155
|
+
lte: this.wrapKey(this.getMaxKeyRaw(p)),
|
|
156
|
+
bucketId: this.bucketId,
|
|
157
|
+
})) {
|
|
158
|
+
yield this.decodeValue(vb);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
async *valuesStreamBinary(prefix: P | P[]): AsyncIterable<{prefix: P; id: I; value: Uint8Array}> {
|
|
164
|
+
for (const p of Array.isArray(prefix) ? prefix : [prefix]) {
|
|
165
|
+
for await (const {key, value} of this.db.entriesStream({
|
|
166
|
+
gte: this.wrapKey(this.getMinKeyRaw(p)),
|
|
167
|
+
lte: this.wrapKey(this.getMaxKeyRaw(p)),
|
|
168
|
+
bucketId: this.bucketId,
|
|
169
|
+
})) {
|
|
170
|
+
const {prefix, id} = this.decodeKeyRaw(this.unwrapKey(key));
|
|
171
|
+
|
|
172
|
+
yield {
|
|
173
|
+
prefix,
|
|
174
|
+
id,
|
|
175
|
+
value,
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
async *entriesStream(prefix: P | P[]): AsyncIterable<{prefix: P; id: I; value: T}> {
|
|
182
|
+
for (const v of Array.isArray(prefix) ? prefix : [prefix]) {
|
|
183
|
+
for await (const {key, value} of this.db.entriesStream({
|
|
184
|
+
gte: this.wrapKey(this.getMinKeyRaw(v)),
|
|
185
|
+
lte: this.wrapKey(this.getMaxKeyRaw(v)),
|
|
186
|
+
bucketId: this.bucketId,
|
|
187
|
+
})) {
|
|
188
|
+
const {prefix, id} = this.decodeKeyRaw(this.unwrapKey(key));
|
|
189
|
+
|
|
190
|
+
yield {
|
|
191
|
+
prefix,
|
|
192
|
+
id,
|
|
193
|
+
value: this.decodeValue(value),
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async *entriesStreamBinary(prefix: P | P[]): AsyncIterable<{prefix: P; id: I; value: Uint8Array}> {
|
|
200
|
+
for (const v of Array.isArray(prefix) ? prefix : [prefix]) {
|
|
201
|
+
for await (const {key, value} of this.db.entriesStream({
|
|
202
|
+
gte: this.wrapKey(this.getMinKeyRaw(v)),
|
|
203
|
+
lte: this.wrapKey(this.getMaxKeyRaw(v)),
|
|
204
|
+
bucketId: this.bucketId,
|
|
205
|
+
})) {
|
|
206
|
+
const {prefix, id} = this.decodeKeyRaw(this.unwrapKey(key));
|
|
207
|
+
|
|
208
|
+
yield {
|
|
209
|
+
prefix,
|
|
210
|
+
id: id,
|
|
211
|
+
value,
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
async keys(opts?: FilterOptions<{prefix: P; id: I}>): Promise<{prefix: P; id: I}[]> {
|
|
218
|
+
const optsBuff: FilterOptions<Uint8Array> = {
|
|
219
|
+
bucketId: this.bucketId,
|
|
220
|
+
};
|
|
221
|
+
|
|
222
|
+
if (opts?.gte !== undefined) {
|
|
223
|
+
optsBuff.gte = this.wrapKey(this.encodeKeyRaw(opts.gte.prefix, opts.gte.id));
|
|
224
|
+
} else if (opts?.gt !== undefined) {
|
|
225
|
+
optsBuff.gt = this.wrapKey(this.encodeKeyRaw(opts.gt.prefix, opts.gt.id));
|
|
226
|
+
} else {
|
|
227
|
+
optsBuff.gte = this.minKey;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if (opts?.lte !== undefined) {
|
|
231
|
+
optsBuff.lte = this.wrapKey(this.encodeKeyRaw(opts.lte.prefix, opts.lte.id));
|
|
232
|
+
} else if (opts?.lt !== undefined) {
|
|
233
|
+
optsBuff.lt = this.wrapKey(this.encodeKeyRaw(opts.lt.prefix, opts.lt.id));
|
|
234
|
+
} else {
|
|
235
|
+
optsBuff.lt = this.maxKey;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (opts?.reverse !== undefined) optsBuff.reverse = opts.reverse;
|
|
239
|
+
if (opts?.limit !== undefined) optsBuff.limit = opts.limit;
|
|
240
|
+
|
|
241
|
+
const data = await this.db.keys(optsBuff);
|
|
242
|
+
return (data ?? []).map((data) => this.decodeKeyRaw(this.unwrapKey(data)));
|
|
243
|
+
}
|
|
244
|
+
}
|
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
import {Type} from "@chainsafe/ssz";
|
|
2
|
+
import {ChainForkConfig} from "@lodestar/config";
|
|
3
|
+
import {BUCKET_LENGTH} from "./const.js";
|
|
4
|
+
import {FilterOptions, KeyValue} from "./controller/index.js";
|
|
5
|
+
import {Db, DbReqOpts} from "./controller/interface.js";
|
|
6
|
+
import {encodeKey as _encodeKey} from "./util.js";
|
|
7
|
+
|
|
8
|
+
export type Id = Uint8Array | string | number | bigint;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Repository is a high level kv storage
|
|
12
|
+
* managing a Uint8Array to Uint8Array kv database
|
|
13
|
+
* It translates typed keys and values to Uint8Arrays required by the underlying database
|
|
14
|
+
*
|
|
15
|
+
* By default, SSZ-encoded values,
|
|
16
|
+
* indexed by root
|
|
17
|
+
*/
|
|
18
|
+
export abstract class Repository<I extends Id, T> {
|
|
19
|
+
private readonly dbReqOpts: DbReqOpts;
|
|
20
|
+
|
|
21
|
+
private readonly minKey: Uint8Array;
|
|
22
|
+
private readonly maxKey: Uint8Array;
|
|
23
|
+
|
|
24
|
+
protected constructor(
|
|
25
|
+
protected config: ChainForkConfig,
|
|
26
|
+
protected db: Db,
|
|
27
|
+
protected bucket: number,
|
|
28
|
+
protected type: Type<T>,
|
|
29
|
+
protected readonly bucketId: string
|
|
30
|
+
) {
|
|
31
|
+
this.dbReqOpts = {bucketId: this.bucketId};
|
|
32
|
+
this.minKey = _encodeKey(bucket, Buffer.alloc(0));
|
|
33
|
+
this.maxKey = _encodeKey(bucket + 1, Buffer.alloc(0));
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
encodeValue(value: T): Uint8Array {
|
|
37
|
+
return this.type.serialize(value);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
decodeValue(data: Uint8Array): T {
|
|
41
|
+
return this.type.deserialize(data);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
encodeKey(id: I): Uint8Array {
|
|
45
|
+
return _encodeKey(this.bucket, id);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
decodeKey(key: Uint8Array): I {
|
|
49
|
+
return key.slice(BUCKET_LENGTH) as I;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async get(id: I): Promise<T | null> {
|
|
53
|
+
const value = await this.db.get(this.encodeKey(id), this.dbReqOpts);
|
|
54
|
+
if (!value) return null;
|
|
55
|
+
return this.decodeValue(value);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
async getBinary(id: I): Promise<Uint8Array | null> {
|
|
59
|
+
const value = await this.db.get(this.encodeKey(id), this.dbReqOpts);
|
|
60
|
+
if (!value) return null;
|
|
61
|
+
return value;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async has(id: I): Promise<boolean> {
|
|
65
|
+
return (await this.get(id)) !== null;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async put(id: I, value: T): Promise<void> {
|
|
69
|
+
await this.db.put(this.encodeKey(id), this.encodeValue(value), this.dbReqOpts);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
async putBinary(id: I, value: Uint8Array): Promise<void> {
|
|
73
|
+
await this.db.put(this.encodeKey(id), value, this.dbReqOpts);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async delete(id: I): Promise<void> {
|
|
77
|
+
await this.db.delete(this.encodeKey(id), this.dbReqOpts);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// The Id can be inferred from the value
|
|
81
|
+
getId(value: T): I {
|
|
82
|
+
return this.type.hashTreeRoot(value) as I;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async add(value: T): Promise<void> {
|
|
86
|
+
await this.put(this.getId(value), value);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async remove(value: T): Promise<void> {
|
|
90
|
+
await this.delete(this.getId(value));
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async batchPut(items: KeyValue<I, T>[]): Promise<void> {
|
|
94
|
+
if (items.length === 1) {
|
|
95
|
+
return this.put(items[0].key, items[0].value);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
await this.db.batchPut(
|
|
99
|
+
Array.from({length: items.length}, (_, i) => ({
|
|
100
|
+
key: this.encodeKey(items[i].key),
|
|
101
|
+
value: this.encodeValue(items[i].value),
|
|
102
|
+
})),
|
|
103
|
+
this.dbReqOpts
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Similar to batchPut but we support value as Uint8Array
|
|
108
|
+
async batchPutBinary(items: KeyValue<I, Uint8Array>[]): Promise<void> {
|
|
109
|
+
if (items.length === 1) {
|
|
110
|
+
return this.db.put(this.encodeKey(items[0].key), items[0].value, this.dbReqOpts);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
await this.db.batchPut(
|
|
114
|
+
Array.from({length: items.length}, (_, i) => ({
|
|
115
|
+
key: this.encodeKey(items[i].key),
|
|
116
|
+
value: items[i].value,
|
|
117
|
+
})),
|
|
118
|
+
this.dbReqOpts
|
|
119
|
+
);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async batchDelete(ids: I[]): Promise<void> {
|
|
123
|
+
if (ids.length === 1) {
|
|
124
|
+
return this.delete(ids[0]);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
await this.db.batchDelete(
|
|
128
|
+
Array.from({length: ids.length}, (_, i) => this.encodeKey(ids[i])),
|
|
129
|
+
this.dbReqOpts
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async batchAdd(values: T[]): Promise<void> {
|
|
134
|
+
// handle single value in batchPut
|
|
135
|
+
await this.batchPut(
|
|
136
|
+
Array.from({length: values.length}, (_, i) => ({
|
|
137
|
+
key: this.getId(values[i]),
|
|
138
|
+
value: values[i],
|
|
139
|
+
}))
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
async batchRemove(values: T[]): Promise<void> {
|
|
144
|
+
// handle single value in batchDelete
|
|
145
|
+
await this.batchDelete(Array.from({length: values.length}, (_ignored, i) => this.getId(values[i])));
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async keys(opts?: FilterOptions<I>): Promise<I[]> {
|
|
149
|
+
const data = await this.db.keys(this.dbFilterOptions(opts));
|
|
150
|
+
return (data ?? []).map((data) => this.decodeKey(data));
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
async *keysStream(opts?: FilterOptions<I>): AsyncIterable<I> {
|
|
154
|
+
const keysStream = this.db.keysStream(this.dbFilterOptions(opts));
|
|
155
|
+
const decodeKey = this.decodeKey.bind(this);
|
|
156
|
+
for await (const key of keysStream) {
|
|
157
|
+
yield decodeKey(key);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
async values(opts?: FilterOptions<I>): Promise<T[]> {
|
|
162
|
+
const data = await this.db.values(this.dbFilterOptions(opts));
|
|
163
|
+
return (data ?? []).map((data) => this.decodeValue(data));
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
async *valuesStream(opts?: FilterOptions<I>): AsyncIterable<T> {
|
|
167
|
+
const valuesStream = this.db.valuesStream(this.dbFilterOptions(opts));
|
|
168
|
+
const decodeValue = this.decodeValue.bind(this);
|
|
169
|
+
for await (const value of valuesStream) {
|
|
170
|
+
yield decodeValue(value);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
async *binaryEntriesStream(opts?: FilterOptions<I>): AsyncIterable<KeyValue<Uint8Array, Uint8Array>> {
|
|
175
|
+
yield* this.db.entriesStream(this.dbFilterOptions(opts));
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
async entries(opts?: FilterOptions<I>): Promise<KeyValue<I, T>[]> {
|
|
179
|
+
const data = await this.db.entries(this.dbFilterOptions(opts));
|
|
180
|
+
return (data ?? []).map((data) => ({
|
|
181
|
+
key: this.decodeKey(data.key),
|
|
182
|
+
value: this.decodeValue(data.value),
|
|
183
|
+
}));
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
async *entriesStream(opts?: FilterOptions<I>): AsyncIterable<KeyValue<I, T>> {
|
|
187
|
+
const entriesStream = this.db.entriesStream(this.dbFilterOptions(opts));
|
|
188
|
+
const decodeKey = this.decodeKey.bind(this);
|
|
189
|
+
const decodeValue = this.decodeValue.bind(this);
|
|
190
|
+
for await (const entry of entriesStream) {
|
|
191
|
+
yield {
|
|
192
|
+
key: decodeKey(entry.key),
|
|
193
|
+
value: decodeValue(entry.value),
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async firstKey(): Promise<I | null> {
|
|
199
|
+
// Metrics accounted in this.keys()
|
|
200
|
+
const keys = await this.keys({limit: 1, bucketId: this.bucketId});
|
|
201
|
+
if (!keys.length) {
|
|
202
|
+
return null;
|
|
203
|
+
}
|
|
204
|
+
return keys[0];
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
async lastKey(): Promise<I | null> {
|
|
208
|
+
// Metrics accounted in this.keys()
|
|
209
|
+
const keys = await this.keys({limit: 1, reverse: true, bucketId: this.bucketId});
|
|
210
|
+
if (!keys.length) {
|
|
211
|
+
return null;
|
|
212
|
+
}
|
|
213
|
+
return keys[0];
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
async firstValue(): Promise<T | null> {
|
|
217
|
+
// Metrics accounted in this.values()
|
|
218
|
+
const values = await this.values({limit: 1, bucketId: this.bucketId});
|
|
219
|
+
if (!values.length) {
|
|
220
|
+
return null;
|
|
221
|
+
}
|
|
222
|
+
return values[0];
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
async lastValue(): Promise<T | null> {
|
|
226
|
+
// Metrics accounted in this.values()
|
|
227
|
+
const values = await this.values({limit: 1, reverse: true, bucketId: this.bucketId});
|
|
228
|
+
if (!values.length) {
|
|
229
|
+
return null;
|
|
230
|
+
}
|
|
231
|
+
return values[0];
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async firstEntry(): Promise<KeyValue<I, T> | null> {
|
|
235
|
+
// Metrics accounted in this.entries()
|
|
236
|
+
const entries = await this.entries({limit: 1, bucketId: this.bucketId});
|
|
237
|
+
if (!entries.length) {
|
|
238
|
+
return null;
|
|
239
|
+
}
|
|
240
|
+
return entries[0];
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
async lastEntry(): Promise<KeyValue<I, T> | null> {
|
|
244
|
+
// Metrics accounted in this.entries()
|
|
245
|
+
const entries = await this.entries({limit: 1, reverse: true, bucketId: this.bucketId});
|
|
246
|
+
if (!entries.length) {
|
|
247
|
+
return null;
|
|
248
|
+
}
|
|
249
|
+
return entries[0];
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Transforms opts from I to Uint8Array
|
|
254
|
+
*/
|
|
255
|
+
protected dbFilterOptions(opts?: FilterOptions<I>): FilterOptions<Uint8Array> {
|
|
256
|
+
const optsBuff: FilterOptions<Uint8Array> = {
|
|
257
|
+
bucketId: this.bucketId,
|
|
258
|
+
};
|
|
259
|
+
|
|
260
|
+
// Set at least one min key
|
|
261
|
+
if (opts?.lt !== undefined) {
|
|
262
|
+
optsBuff.lt = this.encodeKey(opts.lt);
|
|
263
|
+
} else if (opts?.lte !== undefined) {
|
|
264
|
+
optsBuff.lte = this.encodeKey(opts.lte);
|
|
265
|
+
} else {
|
|
266
|
+
optsBuff.lt = this.maxKey;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// Set at least on max key
|
|
270
|
+
if (opts?.gt !== undefined) {
|
|
271
|
+
optsBuff.gt = this.encodeKey(opts.gt);
|
|
272
|
+
} else if (opts?.gte !== undefined) {
|
|
273
|
+
optsBuff.gte = this.encodeKey(opts.gte);
|
|
274
|
+
} else {
|
|
275
|
+
optsBuff.gte = this.minKey;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
if (opts?.reverse !== undefined) optsBuff.reverse = opts.reverse;
|
|
279
|
+
if (opts?.limit !== undefined) optsBuff.limit = opts.limit;
|
|
280
|
+
|
|
281
|
+
return optsBuff;
|
|
282
|
+
}
|
|
283
|
+
}
|
package/src/const.ts
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import {LevelDbControllerMetrics} from "./metrics.js";
|
|
2
|
+
|
|
3
|
+
/** Shortcut for Uint8Array based DatabaseController */
|
|
4
|
+
export type Db = DatabaseController<Uint8Array, Uint8Array>;
|
|
5
|
+
|
|
6
|
+
export type DatabaseOptions = {
|
|
7
|
+
name: string;
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
export interface FilterOptions<K> {
|
|
11
|
+
gt?: K;
|
|
12
|
+
gte?: K;
|
|
13
|
+
lt?: K;
|
|
14
|
+
lte?: K;
|
|
15
|
+
reverse?: boolean;
|
|
16
|
+
limit?: number;
|
|
17
|
+
/** For metrics */
|
|
18
|
+
bucketId?: string;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export type DbReqOpts = {
|
|
22
|
+
/** For metrics */
|
|
23
|
+
bucketId?: string;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export interface KeyValue<K, V> {
|
|
27
|
+
key: K;
|
|
28
|
+
value: V;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export interface DatabaseController<K, V> {
|
|
32
|
+
// service start / stop
|
|
33
|
+
|
|
34
|
+
close(): Promise<void>;
|
|
35
|
+
|
|
36
|
+
/** To inject metrics after CLI initialization */
|
|
37
|
+
setMetrics(metrics: LevelDbControllerMetrics): void;
|
|
38
|
+
|
|
39
|
+
// Core API
|
|
40
|
+
|
|
41
|
+
get(key: K, opts?: DbReqOpts): Promise<V | null>;
|
|
42
|
+
getMany(key: K[], opts?: DbReqOpts): Promise<(V | undefined)[]>;
|
|
43
|
+
|
|
44
|
+
put(key: K, value: V, opts?: DbReqOpts): Promise<void>;
|
|
45
|
+
delete(key: K, opts?: DbReqOpts): Promise<void>;
|
|
46
|
+
|
|
47
|
+
// Batch operations
|
|
48
|
+
|
|
49
|
+
batchPut(items: KeyValue<K, V>[], opts?: DbReqOpts): Promise<void>;
|
|
50
|
+
batchDelete(keys: K[], opts?: DbReqOpts): Promise<void>;
|
|
51
|
+
|
|
52
|
+
// Iterate over entries
|
|
53
|
+
|
|
54
|
+
keysStream(opts?: FilterOptions<K>): AsyncIterable<K>;
|
|
55
|
+
keys(opts?: FilterOptions<K>): Promise<K[]>;
|
|
56
|
+
|
|
57
|
+
valuesStream(opts?: FilterOptions<K>): AsyncIterable<V>;
|
|
58
|
+
values(opts?: FilterOptions<K>): Promise<V[]>;
|
|
59
|
+
|
|
60
|
+
entriesStream(opts?: FilterOptions<K>): AsyncIterable<KeyValue<K, V>>;
|
|
61
|
+
entries(opts?: FilterOptions<K>): Promise<KeyValue<K, V>[]>;
|
|
62
|
+
}
|