@lodestar/db 1.35.0-dev.f80d2d52da → 1.35.0-dev.fd1dac853d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/abstractPrefixedRepository.d.ts.map +1 -0
- package/lib/abstractPrefixedRepository.js +10 -0
- package/lib/abstractPrefixedRepository.js.map +1 -1
- package/lib/abstractRepository.d.ts.map +1 -0
- package/lib/abstractRepository.js +8 -0
- package/lib/abstractRepository.js.map +1 -1
- package/lib/const.d.ts.map +1 -0
- package/lib/controller/index.d.ts +1 -2
- package/lib/controller/index.d.ts.map +1 -0
- package/lib/controller/index.js +1 -1
- package/lib/controller/index.js.map +1 -1
- package/lib/controller/interface.d.ts.map +1 -0
- package/lib/controller/level.d.ts +1 -1
- package/lib/controller/level.d.ts.map +1 -0
- package/lib/controller/level.js +5 -1
- package/lib/controller/level.js.map +1 -1
- package/lib/controller/level_bun.d.ts +35 -0
- package/lib/controller/level_bun.d.ts.map +1 -0
- package/lib/controller/level_bun.js +269 -0
- package/lib/controller/level_bun.js.map +1 -0
- package/lib/controller/metrics.d.ts.map +1 -0
- package/lib/index.d.ts +2 -2
- package/lib/index.d.ts.map +1 -0
- package/lib/index.js +2 -2
- package/lib/index.js.map +1 -1
- package/lib/util.d.ts.map +1 -0
- package/package.json +25 -11
- package/src/abstractPrefixedRepository.ts +244 -0
- package/src/abstractRepository.ts +283 -0
- package/src/const.ts +2 -0
- package/src/controller/index.ts +2 -0
- package/src/controller/interface.ts +62 -0
- package/src/controller/level.ts +246 -0
- package/src/controller/level_bun.ts +288 -0
- package/src/controller/metrics.ts +10 -0
- package/src/index.ts +5 -0
- package/src/util.ts +50 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import {ClassicLevel} from "classic-level";
|
|
2
|
+
import {Logger} from "@lodestar/utils";
|
|
3
|
+
import {DatabaseController, DatabaseOptions, DbReqOpts, FilterOptions, KeyValue} from "./interface.js";
|
|
4
|
+
import {LevelDbControllerMetrics} from "./metrics.js";
|
|
5
|
+
|
|
6
|
+
enum Status {
|
|
7
|
+
started = "started",
|
|
8
|
+
closed = "closed",
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface LevelDBOptions extends DatabaseOptions {
|
|
12
|
+
db?: ClassicLevel<Uint8Array, Uint8Array>;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export type LevelDbControllerModules = {
|
|
16
|
+
logger: Logger;
|
|
17
|
+
metrics?: LevelDbControllerMetrics | null;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
const BUCKET_ID_UNKNOWN = "unknown";
|
|
21
|
+
|
|
22
|
+
/** Time between capturing metric for db size, every few minutes is sufficient */
|
|
23
|
+
const DB_SIZE_METRIC_INTERVAL_MS = 5 * 60 * 1000;
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* The LevelDB implementation of DB
|
|
27
|
+
*/
|
|
28
|
+
export class LevelDbController implements DatabaseController<Uint8Array, Uint8Array> {
|
|
29
|
+
private status = Status.started;
|
|
30
|
+
|
|
31
|
+
private dbSizeMetricInterval?: NodeJS.Timeout;
|
|
32
|
+
|
|
33
|
+
constructor(
|
|
34
|
+
private readonly logger: Logger,
|
|
35
|
+
private readonly db: ClassicLevel<Uint8Array, Uint8Array>,
|
|
36
|
+
private metrics: LevelDbControllerMetrics | null
|
|
37
|
+
) {
|
|
38
|
+
this.metrics = metrics ?? null;
|
|
39
|
+
|
|
40
|
+
if (this.metrics) {
|
|
41
|
+
this.collectDbSizeMetric();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
static async create(opts: LevelDBOptions, {metrics, logger}: LevelDbControllerModules): Promise<LevelDbController> {
|
|
46
|
+
const db =
|
|
47
|
+
opts.db ||
|
|
48
|
+
new ClassicLevel(opts.name || "beaconchain", {
|
|
49
|
+
keyEncoding: "binary",
|
|
50
|
+
valueEncoding: "binary",
|
|
51
|
+
multithreading: true,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
await db.open();
|
|
56
|
+
} catch (e) {
|
|
57
|
+
if ((e as LevelDbError).cause?.code === "LEVEL_LOCKED") {
|
|
58
|
+
throw new Error("Database already in use by another process");
|
|
59
|
+
}
|
|
60
|
+
throw e;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return new LevelDbController(logger, db, metrics ?? null);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async close(): Promise<void> {
|
|
67
|
+
if (this.status === Status.closed) return;
|
|
68
|
+
this.status = Status.closed;
|
|
69
|
+
|
|
70
|
+
if (this.dbSizeMetricInterval) {
|
|
71
|
+
clearInterval(this.dbSizeMetricInterval);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
await this.db.close();
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/** To inject metrics after CLI initialization */
|
|
78
|
+
setMetrics(metrics: LevelDbControllerMetrics): void {
|
|
79
|
+
if (this.metrics !== null) {
|
|
80
|
+
throw Error("metrics can only be set once");
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
this.metrics = metrics;
|
|
84
|
+
if (this.status === Status.started) {
|
|
85
|
+
this.collectDbSizeMetric();
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async clear(): Promise<void> {
|
|
90
|
+
await this.db.clear();
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async get(key: Uint8Array, opts?: DbReqOpts): Promise<Uint8Array | null> {
|
|
94
|
+
try {
|
|
95
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
96
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
97
|
+
return (await this.db.get(key)) as Uint8Array | null;
|
|
98
|
+
} catch (e) {
|
|
99
|
+
if ((e as LevelDbError).code === "LEVEL_NOT_FOUND") {
|
|
100
|
+
return null;
|
|
101
|
+
}
|
|
102
|
+
throw e;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Return the multiple items in the order of the given keys
|
|
108
|
+
* Will return `null` for the keys which does not exists
|
|
109
|
+
*
|
|
110
|
+
* https://github.com/Level/abstract-level?tab=readme-ov-file#dbgetmanykeys-options
|
|
111
|
+
*/
|
|
112
|
+
async getMany(keys: Uint8Array[], opts?: DbReqOpts): Promise<(Uint8Array | undefined)[]> {
|
|
113
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
114
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length);
|
|
115
|
+
return await this.db.getMany(keys);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
put(key: Uint8Array, value: Uint8Array, opts?: DbReqOpts): Promise<void> {
|
|
119
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
120
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
121
|
+
|
|
122
|
+
return this.db.put(key, value);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
delete(key: Uint8Array, opts?: DbReqOpts): Promise<void> {
|
|
126
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
127
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
128
|
+
|
|
129
|
+
return this.db.del(key);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
batchPut(items: KeyValue<Uint8Array, Uint8Array>[], opts?: DbReqOpts): Promise<void> {
|
|
133
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
134
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, items.length);
|
|
135
|
+
|
|
136
|
+
return this.db.batch(items.map((item) => ({type: "put", key: item.key, value: item.value})));
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
batchDelete(keys: Uint8Array[], opts?: DbReqOpts): Promise<void> {
|
|
140
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
141
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length);
|
|
142
|
+
|
|
143
|
+
return this.db.batch(keys.map((key) => ({type: "del", key: key})));
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
keysStream(opts: FilterOptions<Uint8Array> = {}): AsyncIterable<Uint8Array> {
|
|
147
|
+
return this.metricsIterator(this.db.keys(opts), (key) => key, opts.bucketId ?? BUCKET_ID_UNKNOWN);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
valuesStream(opts: FilterOptions<Uint8Array> = {}): AsyncIterable<Uint8Array> {
|
|
151
|
+
return this.metricsIterator(this.db.values(opts), (value) => value, opts.bucketId ?? BUCKET_ID_UNKNOWN);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
entriesStream(opts: FilterOptions<Uint8Array> = {}): AsyncIterable<KeyValue<Uint8Array, Uint8Array>> {
|
|
155
|
+
return this.metricsIterator(
|
|
156
|
+
this.db.iterator(opts),
|
|
157
|
+
(entry) => ({key: entry[0], value: entry[1]}),
|
|
158
|
+
opts.bucketId ?? BUCKET_ID_UNKNOWN
|
|
159
|
+
);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
keys(opts: FilterOptions<Uint8Array> = {}): Promise<Uint8Array[]> {
|
|
163
|
+
return this.metricsAll(this.db.keys(opts).all(), opts.bucketId ?? BUCKET_ID_UNKNOWN);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
values(opts: FilterOptions<Uint8Array> = {}): Promise<Uint8Array[]> {
|
|
167
|
+
return this.metricsAll(this.db.values(opts).all(), opts.bucketId ?? BUCKET_ID_UNKNOWN);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
async entries(opts: FilterOptions<Uint8Array> = {}): Promise<KeyValue<Uint8Array, Uint8Array>[]> {
|
|
171
|
+
const entries = await this.metricsAll(this.db.iterator(opts).all(), opts.bucketId ?? BUCKET_ID_UNKNOWN);
|
|
172
|
+
return entries.map((entry) => ({key: entry[0], value: entry[1]}));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Get the approximate number of bytes of file system space used by the range [start..end).
|
|
177
|
+
* The result might not include recently written data.
|
|
178
|
+
*/
|
|
179
|
+
approximateSize(start: Uint8Array, end: Uint8Array): Promise<number> {
|
|
180
|
+
return this.db.approximateSize(start, end);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Manually trigger a database compaction in the range [start..end].
|
|
185
|
+
*/
|
|
186
|
+
compactRange(start: Uint8Array, end: Uint8Array): Promise<void> {
|
|
187
|
+
return this.db.compactRange(start, end);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/** Capture metrics for db.iterator, db.keys, db.values .all() calls */
|
|
191
|
+
private async metricsAll<T>(promise: Promise<T[]>, bucket: string): Promise<T[]> {
|
|
192
|
+
this.metrics?.dbReadReq.inc({bucket}, 1);
|
|
193
|
+
const items = await promise;
|
|
194
|
+
this.metrics?.dbReadItems.inc({bucket}, items.length);
|
|
195
|
+
return items;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/** Capture metrics for db.iterator, db.keys, db.values AsyncIterable calls */
|
|
199
|
+
private async *metricsIterator<T, K>(
|
|
200
|
+
iterator: AsyncIterable<T>,
|
|
201
|
+
getValue: (item: T) => K,
|
|
202
|
+
bucket: string
|
|
203
|
+
): AsyncIterable<K> {
|
|
204
|
+
this.metrics?.dbReadReq.inc({bucket}, 1);
|
|
205
|
+
|
|
206
|
+
let itemsRead = 0;
|
|
207
|
+
|
|
208
|
+
for await (const item of iterator) {
|
|
209
|
+
// Count metrics after done condition
|
|
210
|
+
itemsRead++;
|
|
211
|
+
|
|
212
|
+
yield getValue(item);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
this.metrics?.dbReadItems.inc({bucket}, itemsRead);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/** Start interval to capture metric for db size */
|
|
219
|
+
private collectDbSizeMetric(): void {
|
|
220
|
+
this.dbSizeMetric();
|
|
221
|
+
this.dbSizeMetricInterval = setInterval(this.dbSizeMetric.bind(this), DB_SIZE_METRIC_INTERVAL_MS);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/** Capture metric for db size */
|
|
225
|
+
private dbSizeMetric(): void {
|
|
226
|
+
const timer = this.metrics?.dbApproximateSizeTime.startTimer();
|
|
227
|
+
const minKey = Buffer.from([0x00]);
|
|
228
|
+
const maxKey = Buffer.from([0xff]);
|
|
229
|
+
|
|
230
|
+
this.approximateSize(minKey, maxKey)
|
|
231
|
+
.then((dbSize) => {
|
|
232
|
+
this.metrics?.dbSizeTotal.set(dbSize);
|
|
233
|
+
})
|
|
234
|
+
.catch((e) => {
|
|
235
|
+
this.logger.debug("Error approximating db size", {}, e);
|
|
236
|
+
})
|
|
237
|
+
.finally(timer);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
static async destroy(location: string): Promise<void> {
|
|
241
|
+
return ClassicLevel.destroy(location);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/** From https://www.npmjs.com/package/level */
|
|
246
|
+
type LevelDbError = {code: "LEVEL_NOT_FOUND"; cause?: {code: "LEVEL_LOCKED"}};
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
import {
|
|
2
|
+
DB,
|
|
3
|
+
dbBatchDelete,
|
|
4
|
+
dbBatchPut,
|
|
5
|
+
dbClose,
|
|
6
|
+
dbDelete,
|
|
7
|
+
dbDestroy,
|
|
8
|
+
dbGet,
|
|
9
|
+
dbIterator,
|
|
10
|
+
dbOpen,
|
|
11
|
+
dbPut,
|
|
12
|
+
iteratorDestroy,
|
|
13
|
+
iteratorKey,
|
|
14
|
+
iteratorNext,
|
|
15
|
+
iteratorSeek,
|
|
16
|
+
iteratorSeekToFirst,
|
|
17
|
+
iteratorValid,
|
|
18
|
+
iteratorValue,
|
|
19
|
+
} from "@lodestar/bun";
|
|
20
|
+
import {Logger} from "@lodestar/utils";
|
|
21
|
+
import {DatabaseController, DatabaseOptions, DbReqOpts, FilterOptions, KeyValue} from "./interface.js";
|
|
22
|
+
import {LevelDbControllerMetrics} from "./metrics.js";
|
|
23
|
+
|
|
24
|
+
export type LevelDbControllerModules = {
|
|
25
|
+
logger: Logger;
|
|
26
|
+
metrics?: LevelDbControllerMetrics | null;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
export enum Status {
|
|
30
|
+
started = "started",
|
|
31
|
+
closed = "closed",
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const BUCKET_ID_UNKNOWN = "unknown";
|
|
35
|
+
|
|
36
|
+
export class LevelDbController implements DatabaseController<Uint8Array, Uint8Array> {
|
|
37
|
+
private status = Status.started;
|
|
38
|
+
|
|
39
|
+
constructor(
|
|
40
|
+
private readonly db: DB,
|
|
41
|
+
private metrics: LevelDbControllerMetrics | null
|
|
42
|
+
) {}
|
|
43
|
+
|
|
44
|
+
static async create(options: DatabaseOptions, {metrics}: LevelDbControllerModules): Promise<LevelDbController> {
|
|
45
|
+
const db = dbOpen(options.name, {create_if_missing: true});
|
|
46
|
+
return new LevelDbController(db, metrics ?? null);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
static async destroy(location: string): Promise<void> {
|
|
50
|
+
dbDestroy(location);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async close(): Promise<void> {
|
|
54
|
+
if (this.status === Status.closed) {
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
this.status = Status.closed;
|
|
58
|
+
|
|
59
|
+
dbClose(this.db);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
setMetrics(metrics: LevelDbControllerMetrics): void {
|
|
63
|
+
if (this.metrics !== null) {
|
|
64
|
+
throw new Error("Metrics already set");
|
|
65
|
+
}
|
|
66
|
+
this.metrics = metrics;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async get(key: Uint8Array, opts?: DbReqOpts): Promise<Uint8Array | null> {
|
|
70
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
71
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
72
|
+
|
|
73
|
+
return dbGet(this.db, key);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async getMany(keys: Uint8Array[], opts?: DbReqOpts): Promise<(Uint8Array | undefined)[]> {
|
|
77
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
78
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length);
|
|
79
|
+
|
|
80
|
+
return keys.map((key) => dbGet(this.db, key) ?? undefined);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async put(key: Uint8Array, value: Uint8Array, opts?: DbReqOpts): Promise<void> {
|
|
84
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
85
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
86
|
+
|
|
87
|
+
dbPut(this.db, key, value);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async delete(key: Uint8Array, opts?: DbReqOpts): Promise<void> {
|
|
91
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
92
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
93
|
+
|
|
94
|
+
dbDelete(this.db, key);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async batchPut(items: KeyValue<Uint8Array, Uint8Array>[], opts?: DbReqOpts): Promise<void> {
|
|
98
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
99
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, items.length);
|
|
100
|
+
|
|
101
|
+
dbBatchPut(this.db, items);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
async batchDelete(keys: Uint8Array[], opts?: DbReqOpts): Promise<void> {
|
|
105
|
+
this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
106
|
+
this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length);
|
|
107
|
+
|
|
108
|
+
dbBatchDelete(this.db, keys);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
keysStream(opts: FilterOptions<Uint8Array> = {}): AsyncIterable<Uint8Array> {
|
|
112
|
+
const iterator = dbIterator(this.db);
|
|
113
|
+
if (opts.gt) {
|
|
114
|
+
iteratorSeek(iterator, opts.gt);
|
|
115
|
+
iteratorNext(iterator);
|
|
116
|
+
} else if (opts.gte) {
|
|
117
|
+
iteratorSeek(iterator, opts.gte);
|
|
118
|
+
} else {
|
|
119
|
+
iteratorSeekToFirst(iterator);
|
|
120
|
+
}
|
|
121
|
+
const bucket = opts.bucketId ?? BUCKET_ID_UNKNOWN;
|
|
122
|
+
const metrics = this.metrics;
|
|
123
|
+
metrics?.dbReadReq.inc({bucket}, 1);
|
|
124
|
+
let itemsRead = 0;
|
|
125
|
+
return (async function* () {
|
|
126
|
+
try {
|
|
127
|
+
while (iteratorValid(iterator)) {
|
|
128
|
+
const key = iteratorKey(iterator);
|
|
129
|
+
if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break;
|
|
130
|
+
if (opts.lte && Buffer.compare(key, opts.lte) > 0) break;
|
|
131
|
+
itemsRead++;
|
|
132
|
+
yield key;
|
|
133
|
+
iteratorNext(iterator);
|
|
134
|
+
}
|
|
135
|
+
} finally {
|
|
136
|
+
metrics?.dbReadItems.inc({bucket}, itemsRead);
|
|
137
|
+
iteratorDestroy(iterator);
|
|
138
|
+
}
|
|
139
|
+
})();
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
async keys(opts: FilterOptions<Uint8Array> = {}): Promise<Uint8Array[]> {
|
|
143
|
+
const iterator = dbIterator(this.db);
|
|
144
|
+
if (opts.gt) {
|
|
145
|
+
iteratorSeek(iterator, opts.gt);
|
|
146
|
+
iteratorNext(iterator);
|
|
147
|
+
} else if (opts.gte) {
|
|
148
|
+
iteratorSeek(iterator, opts.gte);
|
|
149
|
+
} else {
|
|
150
|
+
iteratorSeekToFirst(iterator);
|
|
151
|
+
}
|
|
152
|
+
const keys = [];
|
|
153
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
154
|
+
try {
|
|
155
|
+
while (iteratorValid(iterator)) {
|
|
156
|
+
const key = iteratorKey(iterator);
|
|
157
|
+
if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break;
|
|
158
|
+
if (opts.lte && Buffer.compare(key, opts.lte) > 0) break;
|
|
159
|
+
keys.push(key);
|
|
160
|
+
iteratorNext(iterator);
|
|
161
|
+
}
|
|
162
|
+
return keys;
|
|
163
|
+
} finally {
|
|
164
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length);
|
|
165
|
+
iteratorDestroy(iterator);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
valuesStream(opts: FilterOptions<Uint8Array> = {}): AsyncIterable<Uint8Array> {
|
|
170
|
+
const iterator = dbIterator(this.db);
|
|
171
|
+
if (opts.gt) {
|
|
172
|
+
iteratorSeek(iterator, opts.gt);
|
|
173
|
+
iteratorNext(iterator);
|
|
174
|
+
} else if (opts.gte) {
|
|
175
|
+
iteratorSeek(iterator, opts.gte);
|
|
176
|
+
} else {
|
|
177
|
+
iteratorSeekToFirst(iterator);
|
|
178
|
+
}
|
|
179
|
+
const bucket = opts.bucketId ?? BUCKET_ID_UNKNOWN;
|
|
180
|
+
const metrics = this.metrics;
|
|
181
|
+
metrics?.dbReadReq.inc({bucket}, 1);
|
|
182
|
+
let itemsRead = 0;
|
|
183
|
+
return (async function* () {
|
|
184
|
+
try {
|
|
185
|
+
while (iteratorValid(iterator)) {
|
|
186
|
+
const key = iteratorKey(iterator);
|
|
187
|
+
if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break;
|
|
188
|
+
if (opts.lte && Buffer.compare(key, opts.lte) > 0) break;
|
|
189
|
+
itemsRead++;
|
|
190
|
+
const value = iteratorValue(iterator);
|
|
191
|
+
yield value;
|
|
192
|
+
iteratorNext(iterator);
|
|
193
|
+
}
|
|
194
|
+
} finally {
|
|
195
|
+
metrics?.dbReadItems.inc({bucket}, itemsRead);
|
|
196
|
+
iteratorDestroy(iterator);
|
|
197
|
+
}
|
|
198
|
+
})();
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
async values(opts: FilterOptions<Uint8Array> = {}): Promise<Uint8Array[]> {
|
|
202
|
+
const iterator = dbIterator(this.db);
|
|
203
|
+
if (opts.gt) {
|
|
204
|
+
iteratorSeek(iterator, opts.gt);
|
|
205
|
+
iteratorNext(iterator);
|
|
206
|
+
} else if (opts.gte) {
|
|
207
|
+
iteratorSeek(iterator, opts.gte);
|
|
208
|
+
} else {
|
|
209
|
+
iteratorSeekToFirst(iterator);
|
|
210
|
+
}
|
|
211
|
+
const values = [];
|
|
212
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
213
|
+
try {
|
|
214
|
+
while (iteratorValid(iterator)) {
|
|
215
|
+
const key = iteratorKey(iterator);
|
|
216
|
+
if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break;
|
|
217
|
+
if (opts.lte && Buffer.compare(key, opts.lte) > 0) break;
|
|
218
|
+
const value = iteratorValue(iterator);
|
|
219
|
+
values.push(value);
|
|
220
|
+
iteratorNext(iterator);
|
|
221
|
+
}
|
|
222
|
+
return values;
|
|
223
|
+
} finally {
|
|
224
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, values.length);
|
|
225
|
+
iteratorDestroy(iterator);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
entriesStream(opts: FilterOptions<Uint8Array> = {}): AsyncIterable<KeyValue<Uint8Array, Uint8Array>> {
|
|
230
|
+
const iterator = dbIterator(this.db);
|
|
231
|
+
if (opts.gt) {
|
|
232
|
+
iteratorSeek(iterator, opts.gt);
|
|
233
|
+
iteratorNext(iterator);
|
|
234
|
+
} else if (opts.gte) {
|
|
235
|
+
iteratorSeek(iterator, opts.gte);
|
|
236
|
+
} else {
|
|
237
|
+
iteratorSeekToFirst(iterator);
|
|
238
|
+
}
|
|
239
|
+
const bucket = opts.bucketId ?? BUCKET_ID_UNKNOWN;
|
|
240
|
+
const metrics = this.metrics;
|
|
241
|
+
metrics?.dbReadReq.inc({bucket}, 1);
|
|
242
|
+
let itemsRead = 0;
|
|
243
|
+
return (async function* () {
|
|
244
|
+
try {
|
|
245
|
+
while (iteratorValid(iterator)) {
|
|
246
|
+
const key = iteratorKey(iterator);
|
|
247
|
+
if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break;
|
|
248
|
+
if (opts.lte && Buffer.compare(key, opts.lte) > 0) break;
|
|
249
|
+
itemsRead++;
|
|
250
|
+
const value = iteratorValue(iterator);
|
|
251
|
+
yield {key, value};
|
|
252
|
+
iteratorNext(iterator);
|
|
253
|
+
}
|
|
254
|
+
} finally {
|
|
255
|
+
metrics?.dbReadItems.inc({bucket}, itemsRead);
|
|
256
|
+
iteratorDestroy(iterator);
|
|
257
|
+
}
|
|
258
|
+
})();
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
async entries(opts: FilterOptions<Uint8Array> = {}): Promise<KeyValue<Uint8Array, Uint8Array>[]> {
|
|
262
|
+
const iterator = dbIterator(this.db);
|
|
263
|
+
if (opts.gt) {
|
|
264
|
+
iteratorSeek(iterator, opts.gt);
|
|
265
|
+
iteratorNext(iterator);
|
|
266
|
+
} else if (opts.gte) {
|
|
267
|
+
iteratorSeek(iterator, opts.gte);
|
|
268
|
+
} else {
|
|
269
|
+
iteratorSeekToFirst(iterator);
|
|
270
|
+
}
|
|
271
|
+
const entries = [];
|
|
272
|
+
this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1);
|
|
273
|
+
try {
|
|
274
|
+
while (iteratorValid(iterator)) {
|
|
275
|
+
const key = iteratorKey(iterator);
|
|
276
|
+
if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break;
|
|
277
|
+
if (opts.lte && Buffer.compare(key, opts.lte) > 0) break;
|
|
278
|
+
const value = iteratorValue(iterator);
|
|
279
|
+
entries.push({key, value});
|
|
280
|
+
iteratorNext(iterator);
|
|
281
|
+
}
|
|
282
|
+
return entries;
|
|
283
|
+
} finally {
|
|
284
|
+
this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, entries.length);
|
|
285
|
+
iteratorDestroy(iterator);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import {Counter, Gauge, Histogram} from "@lodestar/utils";
|
|
2
|
+
|
|
3
|
+
export type LevelDbControllerMetrics = {
|
|
4
|
+
dbReadReq: Counter<{bucket: string}>;
|
|
5
|
+
dbReadItems: Counter<{bucket: string}>;
|
|
6
|
+
dbWriteReq: Counter<{bucket: string}>;
|
|
7
|
+
dbWriteItems: Counter<{bucket: string}>;
|
|
8
|
+
dbSizeTotal: Gauge;
|
|
9
|
+
dbApproximateSizeTime: Histogram;
|
|
10
|
+
};
|
package/src/index.ts
ADDED
package/src/util.ts
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import {bytesToInt, intToBytes} from "@lodestar/utils";
|
|
2
|
+
import {BUCKET_LENGTH} from "./const.js";
|
|
3
|
+
|
|
4
|
+
export const uintLen = 8;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Encode a key for the db write/read, Prepend a bucket to a key
|
|
8
|
+
*
|
|
9
|
+
* The encoding of key is very important step that can cause failure of proper indexing and querying of data
|
|
10
|
+
*
|
|
11
|
+
* We are using LevelDB which have pluggable comparator support, so you can decide how to
|
|
12
|
+
* compare keys. But for NodeJS binding only default comparison algorithm is supported which
|
|
13
|
+
* uses lexicographical comparison of the raw bytes of the keys
|
|
14
|
+
*
|
|
15
|
+
* It is important to use **helpers implemented here** to encode db keys so that key comparison properly work.
|
|
16
|
+
*/
|
|
17
|
+
export function encodeKey(bucket: number, key: Uint8Array | string | number | bigint): Uint8Array {
|
|
18
|
+
let buf: Buffer;
|
|
19
|
+
const prefixLength = BUCKET_LENGTH;
|
|
20
|
+
//all keys are writen with prefixLength offet
|
|
21
|
+
if (typeof key === "string") {
|
|
22
|
+
buf = Buffer.alloc(key.length + prefixLength);
|
|
23
|
+
buf.write(key, prefixLength);
|
|
24
|
+
} else if (typeof key === "number" || typeof key === "bigint") {
|
|
25
|
+
buf = Buffer.alloc(uintLen + prefixLength);
|
|
26
|
+
intToBytes(BigInt(key), uintLen, "be").copy(buf, prefixLength);
|
|
27
|
+
} else {
|
|
28
|
+
buf = Buffer.alloc(key.length + prefixLength);
|
|
29
|
+
buf.set(key, prefixLength);
|
|
30
|
+
}
|
|
31
|
+
//bucket prefix on position 0
|
|
32
|
+
buf.set(intToBytes(bucket, BUCKET_LENGTH, "le"), 0);
|
|
33
|
+
return buf;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function encodeNumberForDbKey(value: number, byteSize: number): Uint8Array {
|
|
37
|
+
return intToBytes(value, byteSize, "be");
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export function decodeNumberForDbKey(value: Uint8Array, byteSize: number): number {
|
|
41
|
+
return bytesToInt(value.slice(0, byteSize), "be");
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export function encodeStringForDbKey(value: string): Uint8Array {
|
|
45
|
+
return Buffer.from(value, "utf-8");
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function decodeStringForDbKey(value: Uint8Array): string {
|
|
49
|
+
return Buffer.from(value).toString("utf8");
|
|
50
|
+
}
|