lba 1.0.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -22
- package/index.js +363 -65
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -9,44 +9,69 @@ It supports sharding, compression (zlib), and atomic updates.
|
|
|
9
9
|
npm install lba
|
|
10
10
|
```
|
|
11
11
|
|
|
12
|
+
## Features
|
|
13
|
+
|
|
14
|
+
- **High-Concurrency Sharding:** Uses FNV-1a hashing to distribute data across multiple shards, reducing file lock contention and achieving over 28,000+ reads/sec.
|
|
15
|
+
|
|
16
|
+
- **Memory-Efficient Streaming:** Iterate through millions of records without memory spikes using Async Generators.
|
|
17
|
+
|
|
18
|
+
- **Atomic Updates:** Built-in update() method ensures thread-safe Read-Modify-Write operations.
|
|
19
|
+
|
|
20
|
+
- **Smart LRU Cache:** Internal memory cache provides sub-millisecond latency for frequent data access.
|
|
21
|
+
|
|
22
|
+
- **Automatic Compression:** Integrated zlib compression reduces disk footprint by up to 90%.
|
|
23
|
+
|
|
24
|
+
- **Data Integrity:** Magic Byte verification detects and recovers from unexpected process terminations.
|
|
25
|
+
|
|
12
26
|
## Usage
|
|
13
27
|
|
|
14
28
|
```js
|
|
15
|
-
const LBA = require("
|
|
29
|
+
const LBA = require("lba");
|
|
16
30
|
|
|
17
|
-
// Initialize DB (
|
|
18
|
-
const db = new LBA("./my-data");
|
|
31
|
+
// Initialize DB (supports optional config: shardCount, cacheLimit, fastMode)
|
|
32
|
+
const db = new LBA("./my-data", { shardCount: 16 });
|
|
19
33
|
|
|
20
34
|
(async () => {
|
|
21
|
-
// 1. Set
|
|
22
|
-
await db.set("user:1001", {
|
|
23
|
-
name: "Alice",
|
|
24
|
-
age: 30,
|
|
25
|
-
items: ["sword", "shield"],
|
|
26
|
-
});
|
|
27
|
-
|
|
28
|
-
// 2. Get Data
|
|
35
|
+
// 1. Set & Get (Auto-compressed)
|
|
36
|
+
await db.set("user:1001", { name: "Alice", age: 30 });
|
|
29
37
|
const user = await db.get("user:1001");
|
|
30
|
-
console.log(user);
|
|
31
|
-
// Output: { name: 'Alice', age: 30, items: ['sword', 'shield'] }
|
|
32
38
|
|
|
33
|
-
//
|
|
39
|
+
// 2. Atomic Update (Prevents race conditions)
|
|
34
40
|
await db.update("user:1001", (data) => {
|
|
35
41
|
data.age += 1;
|
|
36
|
-
return data;
|
|
42
|
+
return data;
|
|
37
43
|
});
|
|
38
44
|
|
|
39
|
-
//
|
|
40
|
-
await db.
|
|
45
|
+
// 3. Batch Operations (High-speed bulk processing)
|
|
46
|
+
await db.batchSet({
|
|
47
|
+
key1: "value1",
|
|
48
|
+
key2: "value2",
|
|
49
|
+
});
|
|
50
|
+
const results = await db.batchGet(["key1", "key2"]);
|
|
41
51
|
})();
|
|
42
52
|
```
|
|
43
53
|
|
|
44
|
-
##
|
|
54
|
+
## Memory-Efficient Iteration
|
|
45
55
|
|
|
46
|
-
|
|
56
|
+
For large datasets, use the Async Generator to keep memory usage low.
|
|
47
57
|
|
|
48
|
-
|
|
58
|
+
```js
|
|
59
|
+
// Extremely fast: up to 1.7M items/sec processing
|
|
60
|
+
for await (const [key, value] of db.entries({ batchSize: 50 })) {
|
|
61
|
+
console.log(key, value);
|
|
62
|
+
}
|
|
63
|
+
```
|
|
49
64
|
|
|
50
|
-
|
|
65
|
+
## Maintenance
|
|
66
|
+
|
|
67
|
+
Since LBA uses a log-structured approach, use vacuum() to reclaim disk space.
|
|
68
|
+
|
|
69
|
+
```js
|
|
70
|
+
await db.vacuum(); // Compacts files and removes deleted entries
|
|
71
|
+
```
|
|
51
72
|
|
|
52
|
-
|
|
73
|
+
| Operation | Throughput |
|
|
74
|
+
| :-------------- | :------------------: |
|
|
75
|
+
| **Batch Write** | 10,000+ ops/sec |
|
|
76
|
+
| **Batch Read** | 28,000+ ops/sec |
|
|
77
|
+
| **Streaming** | 1,700,000+ items/sec |
|
package/index.js
CHANGED
|
@@ -6,12 +6,23 @@ const util = require("util");
|
|
|
6
6
|
const inflateAsync = util.promisify(zlib.inflateRaw);
|
|
7
7
|
const deflateAsync = util.promisify(zlib.deflateRaw);
|
|
8
8
|
|
|
9
|
+
const MAGIC_BYTES = [0x4c, 0x42];
|
|
10
|
+
const HEADER_SIZE = 11;
|
|
11
|
+
const DEFAULT_SHARD_COUNT = 32;
|
|
12
|
+
const DEFAULT_CACHE_LIMIT = 10000;
|
|
13
|
+
const DEFAULT_COMPRESSION_LEVEL = 6;
|
|
14
|
+
const FAST_COMPRESSION_LEVEL = 1;
|
|
15
|
+
const FNV_OFFSET = 2166136261;
|
|
16
|
+
const FNV_PRIME = 16777619;
|
|
17
|
+
|
|
9
18
|
class LBA {
|
|
10
19
|
constructor(dbDir = "lba_storage", options = {}) {
|
|
11
20
|
this.dbDir = path.resolve(dbDir);
|
|
12
|
-
this.shardCount = options.shardCount ||
|
|
13
|
-
this.cacheLimit = options.cacheLimit ||
|
|
14
|
-
this.compressionLevel = options.fastMode
|
|
21
|
+
this.shardCount = options.shardCount || DEFAULT_SHARD_COUNT;
|
|
22
|
+
this.cacheLimit = options.cacheLimit || DEFAULT_CACHE_LIMIT;
|
|
23
|
+
this.compressionLevel = options.fastMode
|
|
24
|
+
? FAST_COMPRESSION_LEVEL
|
|
25
|
+
: DEFAULT_COMPRESSION_LEVEL;
|
|
15
26
|
|
|
16
27
|
this.indices = Array.from({ length: this.shardCount }, () => new Map());
|
|
17
28
|
this.cache = new Map();
|
|
@@ -23,18 +34,35 @@ class LBA {
|
|
|
23
34
|
this.fileHandles = new Array(this.shardCount).fill(null);
|
|
24
35
|
this.isLoaded = new Array(this.shardCount).fill(false);
|
|
25
36
|
|
|
26
|
-
|
|
37
|
+
this.stats = {
|
|
38
|
+
cacheHits: 0,
|
|
39
|
+
cacheMisses: 0,
|
|
40
|
+
reads: 0,
|
|
41
|
+
writes: 0,
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
this.headerBuffer = Buffer.allocUnsafe(HEADER_SIZE);
|
|
45
|
+
|
|
46
|
+
this._ensureDbDir();
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
_ensureDbDir() {
|
|
50
|
+
if (!fs.existsSync(this.dbDir)) {
|
|
27
51
|
fs.mkdirSync(this.dbDir, { recursive: true });
|
|
52
|
+
}
|
|
28
53
|
}
|
|
29
54
|
|
|
30
55
|
_getShard(key) {
|
|
31
|
-
let hash =
|
|
56
|
+
let hash = FNV_OFFSET;
|
|
32
57
|
const sKey = String(key);
|
|
33
|
-
|
|
58
|
+
const len = sKey.length;
|
|
59
|
+
|
|
60
|
+
for (let i = 0; i < len; i++) {
|
|
34
61
|
hash ^= sKey.charCodeAt(i);
|
|
35
|
-
hash = Math.imul(hash,
|
|
62
|
+
hash = Math.imul(hash, FNV_PRIME);
|
|
36
63
|
}
|
|
37
|
-
|
|
64
|
+
|
|
65
|
+
return (hash >>> 0) % this.shardCount;
|
|
38
66
|
}
|
|
39
67
|
|
|
40
68
|
async _ensureShardLoaded(sIdx) {
|
|
@@ -45,44 +73,52 @@ class LBA {
|
|
|
45
73
|
|
|
46
74
|
const stat = await this.fileHandles[sIdx].stat();
|
|
47
75
|
const size = stat.size;
|
|
76
|
+
|
|
77
|
+
if (size === 0) {
|
|
78
|
+
this.isLoaded[sIdx] = true;
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
|
|
48
82
|
let offset = 0;
|
|
49
|
-
const headerSize = 11;
|
|
50
|
-
const headerBuf = Buffer.alloc(headerSize);
|
|
51
83
|
|
|
52
|
-
while (offset +
|
|
84
|
+
while (offset + HEADER_SIZE <= size) {
|
|
53
85
|
const { bytesRead } = await this.fileHandles[sIdx].read(
|
|
54
|
-
|
|
86
|
+
this.headerBuffer,
|
|
55
87
|
0,
|
|
56
|
-
|
|
88
|
+
HEADER_SIZE,
|
|
57
89
|
offset,
|
|
58
90
|
);
|
|
59
|
-
if (bytesRead < headerSize) break;
|
|
60
91
|
|
|
61
|
-
if (
|
|
92
|
+
if (bytesRead < HEADER_SIZE) break;
|
|
93
|
+
|
|
94
|
+
if (
|
|
95
|
+
this.headerBuffer[0] !== MAGIC_BYTES[0] ||
|
|
96
|
+
this.headerBuffer[1] !== MAGIC_BYTES[1]
|
|
97
|
+
) {
|
|
62
98
|
console.warn(
|
|
63
|
-
`Shard ${sIdx}:
|
|
99
|
+
`Shard ${sIdx}: Data corruption detected (Offset: ${offset}). Subsequent data will be ignored for recovery.`,
|
|
64
100
|
);
|
|
65
101
|
break;
|
|
66
102
|
}
|
|
67
103
|
|
|
68
|
-
const vLen =
|
|
69
|
-
const kLen =
|
|
70
|
-
const recordTotalSize =
|
|
104
|
+
const vLen = this.headerBuffer.readUInt32BE(6);
|
|
105
|
+
const kLen = this.headerBuffer[10];
|
|
106
|
+
const recordTotalSize = HEADER_SIZE + kLen + vLen;
|
|
71
107
|
|
|
72
108
|
if (offset + recordTotalSize > size) {
|
|
73
109
|
console.warn(
|
|
74
|
-
`Shard ${sIdx}:
|
|
110
|
+
`Shard ${sIdx}: Incomplete record detected (Offset: ${offset}). Discarding last data.`,
|
|
75
111
|
);
|
|
76
112
|
break;
|
|
77
113
|
}
|
|
78
114
|
|
|
79
|
-
const kBuf = Buffer.
|
|
80
|
-
await this.fileHandles[sIdx].read(kBuf, 0, kLen, offset +
|
|
115
|
+
const kBuf = Buffer.allocUnsafe(kLen);
|
|
116
|
+
await this.fileHandles[sIdx].read(kBuf, 0, kLen, offset + HEADER_SIZE);
|
|
81
117
|
const key = kBuf.toString();
|
|
82
118
|
|
|
83
119
|
if (vLen > 0) {
|
|
84
120
|
this.indices[sIdx].set(key, {
|
|
85
|
-
offset: offset +
|
|
121
|
+
offset: offset + HEADER_SIZE + kLen,
|
|
86
122
|
length: vLen,
|
|
87
123
|
});
|
|
88
124
|
} else {
|
|
@@ -96,28 +132,30 @@ class LBA {
|
|
|
96
132
|
}
|
|
97
133
|
|
|
98
134
|
async _readImpl(sIdx, key) {
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
this.
|
|
135
|
+
const keyStr = String(key);
|
|
136
|
+
|
|
137
|
+
if (this.cache.has(keyStr)) {
|
|
138
|
+
this.stats.cacheHits++;
|
|
139
|
+
const val = this.cache.get(keyStr);
|
|
140
|
+
this.cache.delete(keyStr);
|
|
141
|
+
this.cache.set(keyStr, val);
|
|
103
142
|
return val;
|
|
104
143
|
}
|
|
105
144
|
|
|
106
|
-
|
|
145
|
+
this.stats.cacheMisses++;
|
|
146
|
+
this.stats.reads++;
|
|
147
|
+
|
|
148
|
+
const meta = this.indices[sIdx].get(keyStr);
|
|
107
149
|
if (!meta) return null;
|
|
108
150
|
|
|
109
|
-
const buf = Buffer.
|
|
151
|
+
const buf = Buffer.allocUnsafe(meta.length);
|
|
110
152
|
await this.fileHandles[sIdx].read(buf, 0, meta.length, meta.offset);
|
|
111
153
|
|
|
112
154
|
try {
|
|
113
155
|
const decompressed = await inflateAsync(buf);
|
|
114
156
|
const data = JSON.parse(decompressed.toString());
|
|
115
157
|
|
|
116
|
-
|
|
117
|
-
const firstKey = this.cache.keys().next().value;
|
|
118
|
-
this.cache.delete(firstKey);
|
|
119
|
-
}
|
|
120
|
-
this.cache.set(String(key), data);
|
|
158
|
+
this._addToCache(keyStr, data);
|
|
121
159
|
|
|
122
160
|
return data;
|
|
123
161
|
} catch (err) {
|
|
@@ -126,7 +164,22 @@ class LBA {
|
|
|
126
164
|
}
|
|
127
165
|
}
|
|
128
166
|
|
|
167
|
+
_addToCache(key, value) {
|
|
168
|
+
if (this.cache.has(key)) {
|
|
169
|
+
this.cache.delete(key);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
while (this.cache.size >= this.cacheLimit) {
|
|
173
|
+
const firstKey = this.cache.keys().next().value;
|
|
174
|
+
this.cache.delete(firstKey);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
this.cache.set(key, value);
|
|
178
|
+
}
|
|
179
|
+
|
|
129
180
|
async _writeImpl(sIdx, key, value) {
|
|
181
|
+
this.stats.writes++;
|
|
182
|
+
|
|
130
183
|
const kStr = String(key);
|
|
131
184
|
const kBuf = Buffer.from(kStr);
|
|
132
185
|
|
|
@@ -139,9 +192,9 @@ class LBA {
|
|
|
139
192
|
vLen = vBuf.length;
|
|
140
193
|
}
|
|
141
194
|
|
|
142
|
-
const head = Buffer.
|
|
143
|
-
head[0] =
|
|
144
|
-
head[1] =
|
|
195
|
+
const head = Buffer.allocUnsafe(HEADER_SIZE);
|
|
196
|
+
head[0] = MAGIC_BYTES[0];
|
|
197
|
+
head[1] = MAGIC_BYTES[1];
|
|
145
198
|
head.writeUInt32BE(0, 2);
|
|
146
199
|
head.writeUInt32BE(vLen, 6);
|
|
147
200
|
head[10] = kBuf.length;
|
|
@@ -157,12 +210,11 @@ class LBA {
|
|
|
157
210
|
|
|
158
211
|
if (vLen > 0) {
|
|
159
212
|
this.indices[sIdx].set(kStr, {
|
|
160
|
-
offset: pos +
|
|
213
|
+
offset: pos + HEADER_SIZE + kBuf.length,
|
|
161
214
|
length: vLen,
|
|
162
215
|
});
|
|
163
216
|
|
|
164
|
-
this.
|
|
165
|
-
this.cache.set(kStr, value);
|
|
217
|
+
this._addToCache(kStr, value);
|
|
166
218
|
} else {
|
|
167
219
|
this.indices[sIdx].delete(kStr);
|
|
168
220
|
this.cache.delete(kStr);
|
|
@@ -173,7 +225,10 @@ class LBA {
|
|
|
173
225
|
const next = this.queues[sIdx]
|
|
174
226
|
.then(() => this._ensureShardLoaded(sIdx))
|
|
175
227
|
.then(task)
|
|
176
|
-
.catch((err) =>
|
|
228
|
+
.catch((err) => {
|
|
229
|
+
console.error(`LBA Error (Shard ${sIdx}):`, err);
|
|
230
|
+
throw err;
|
|
231
|
+
});
|
|
177
232
|
|
|
178
233
|
this.queues[sIdx] = next;
|
|
179
234
|
return next;
|
|
@@ -201,11 +256,219 @@ class LBA {
|
|
|
201
256
|
if (next !== undefined) {
|
|
202
257
|
await this._writeImpl(sIdx, key, next);
|
|
203
258
|
}
|
|
259
|
+
return next;
|
|
204
260
|
});
|
|
205
261
|
}
|
|
206
262
|
|
|
263
|
+
async forEach(callback, options = {}) {
|
|
264
|
+
const batchSize = options.batchSize || 100;
|
|
265
|
+
let processed = 0;
|
|
266
|
+
let batch = [];
|
|
267
|
+
|
|
268
|
+
for (let i = 0; i < this.shardCount; i++) {
|
|
269
|
+
await this._enqueue(i, async () => {
|
|
270
|
+
for (const key of this.indices[i].keys()) {
|
|
271
|
+
try {
|
|
272
|
+
const value = await this._readImpl(i, key);
|
|
273
|
+
if (value !== null) {
|
|
274
|
+
batch.push({ key, value });
|
|
275
|
+
|
|
276
|
+
if (batch.length >= batchSize) {
|
|
277
|
+
for (const item of batch) {
|
|
278
|
+
await callback(item.key, item.value);
|
|
279
|
+
processed++;
|
|
280
|
+
}
|
|
281
|
+
batch = [];
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
} catch (err) {
|
|
285
|
+
console.error(`Error reading key ${key} from shard ${i}:`, err);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
for (const item of batch) {
|
|
292
|
+
await callback(item.key, item.value);
|
|
293
|
+
processed++;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
return processed;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
async *entries(options = {}) {
|
|
300
|
+
const batchSize = options.batchSize || 50;
|
|
301
|
+
|
|
302
|
+
for (let i = 0; i < this.shardCount; i++) {
|
|
303
|
+
const entries = await this._enqueue(i, async () => {
|
|
304
|
+
const batch = [];
|
|
305
|
+
|
|
306
|
+
for (const key of this.indices[i].keys()) {
|
|
307
|
+
try {
|
|
308
|
+
const value = await this._readImpl(i, key);
|
|
309
|
+
if (value !== null) {
|
|
310
|
+
batch.push([key, value]);
|
|
311
|
+
|
|
312
|
+
if (batch.length >= batchSize) {
|
|
313
|
+
const result = [...batch];
|
|
314
|
+
batch.length = 0;
|
|
315
|
+
return result;
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
} catch (err) {
|
|
319
|
+
console.error(`Error reading key ${key} from shard ${i}:`, err);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
return batch;
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
for (const entry of entries) {
|
|
327
|
+
yield entry;
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
async getAll(options = {}) {
|
|
333
|
+
const maxSize = options.maxSize || Infinity;
|
|
334
|
+
const results = {};
|
|
335
|
+
let count = 0;
|
|
336
|
+
|
|
337
|
+
await this.forEach(
|
|
338
|
+
(key, value) => {
|
|
339
|
+
if (count >= maxSize) {
|
|
340
|
+
return;
|
|
341
|
+
}
|
|
342
|
+
results[key] = value;
|
|
343
|
+
count++;
|
|
344
|
+
},
|
|
345
|
+
{ batchSize: options.batchSize || 100 },
|
|
346
|
+
);
|
|
347
|
+
|
|
348
|
+
return results;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
async keys() {
|
|
352
|
+
const allKeys = [];
|
|
353
|
+
|
|
354
|
+
for (let i = 0; i < this.shardCount; i++) {
|
|
355
|
+
await this._enqueue(i, async () => {
|
|
356
|
+
allKeys.push(...this.indices[i].keys());
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
return allKeys;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
async count() {
|
|
364
|
+
let total = 0;
|
|
365
|
+
|
|
366
|
+
const counts = await Promise.all(
|
|
367
|
+
Array.from({ length: this.shardCount }, (_, i) =>
|
|
368
|
+
this._enqueue(i, async () => this.indices[i].size),
|
|
369
|
+
),
|
|
370
|
+
);
|
|
371
|
+
|
|
372
|
+
return counts.reduce((sum, count) => sum + count, 0);
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
async batchGet(keys) {
|
|
376
|
+
const results = {};
|
|
377
|
+
const keysByShard = new Map();
|
|
378
|
+
|
|
379
|
+
for (const key of keys) {
|
|
380
|
+
const sIdx = this._getShard(key);
|
|
381
|
+
if (!keysByShard.has(sIdx)) {
|
|
382
|
+
keysByShard.set(sIdx, []);
|
|
383
|
+
}
|
|
384
|
+
keysByShard.get(sIdx).push(key);
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
const promises = [];
|
|
388
|
+
for (const [sIdx, shardKeys] of keysByShard) {
|
|
389
|
+
promises.push(
|
|
390
|
+
this._enqueue(sIdx, async () => {
|
|
391
|
+
const shardResults = {};
|
|
392
|
+
for (const key of shardKeys) {
|
|
393
|
+
try {
|
|
394
|
+
const value = await this._readImpl(sIdx, key);
|
|
395
|
+
if (value !== null) {
|
|
396
|
+
shardResults[key] = value;
|
|
397
|
+
}
|
|
398
|
+
} catch (err) {
|
|
399
|
+
console.error(`Error in batchGet for key ${key}:`, err);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
return shardResults;
|
|
403
|
+
}),
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
const shardResults = await Promise.all(promises);
|
|
408
|
+
|
|
409
|
+
for (const shardResult of shardResults) {
|
|
410
|
+
Object.assign(results, shardResult);
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
return results;
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
async batchSet(entries) {
|
|
417
|
+
const entriesByShard = new Map();
|
|
418
|
+
|
|
419
|
+
for (const [key, value] of Object.entries(entries)) {
|
|
420
|
+
const sIdx = this._getShard(key);
|
|
421
|
+
if (!entriesByShard.has(sIdx)) {
|
|
422
|
+
entriesByShard.set(sIdx, []);
|
|
423
|
+
}
|
|
424
|
+
entriesByShard.get(sIdx).push([key, value]);
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
const promises = [];
|
|
428
|
+
for (const [sIdx, shardEntries] of entriesByShard) {
|
|
429
|
+
promises.push(
|
|
430
|
+
this._enqueue(sIdx, async () => {
|
|
431
|
+
for (const [key, value] of shardEntries) {
|
|
432
|
+
await this._writeImpl(sIdx, key, value);
|
|
433
|
+
}
|
|
434
|
+
}),
|
|
435
|
+
);
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
await Promise.all(promises);
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
getStats() {
|
|
442
|
+
const hitRate =
|
|
443
|
+
this.stats.cacheHits + this.stats.cacheMisses > 0
|
|
444
|
+
? (
|
|
445
|
+
(this.stats.cacheHits /
|
|
446
|
+
(this.stats.cacheHits + this.stats.cacheMisses)) *
|
|
447
|
+
100
|
|
448
|
+
).toFixed(2)
|
|
449
|
+
: 0;
|
|
450
|
+
|
|
451
|
+
return {
|
|
452
|
+
...this.stats,
|
|
453
|
+
cacheSize: this.cache.size,
|
|
454
|
+
cacheLimit: this.cacheLimit,
|
|
455
|
+
cacheHitRate: `${hitRate}%`,
|
|
456
|
+
shardCount: this.shardCount,
|
|
457
|
+
};
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
resetStats() {
|
|
461
|
+
this.stats = {
|
|
462
|
+
cacheHits: 0,
|
|
463
|
+
cacheMisses: 0,
|
|
464
|
+
reads: 0,
|
|
465
|
+
writes: 0,
|
|
466
|
+
};
|
|
467
|
+
}
|
|
468
|
+
|
|
207
469
|
async vacuum() {
|
|
208
470
|
const tasks = [];
|
|
471
|
+
|
|
209
472
|
for (let i = 0; i < this.shardCount; i++) {
|
|
210
473
|
tasks.push(
|
|
211
474
|
this._enqueue(i, async () => {
|
|
@@ -214,49 +477,84 @@ class LBA {
|
|
|
214
477
|
const tmpPath = path.join(this.dbDir, `vacuum_${i}.tmp`);
|
|
215
478
|
const oldPath = path.join(this.dbDir, `shard_${i}.lba`);
|
|
216
479
|
|
|
217
|
-
|
|
218
|
-
let newPos = 0;
|
|
480
|
+
let tmpHandle = null;
|
|
219
481
|
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
482
|
+
try {
|
|
483
|
+
tmpHandle = await fs.promises.open(tmpPath, "w");
|
|
484
|
+
let newPos = 0;
|
|
223
485
|
|
|
224
|
-
const
|
|
486
|
+
for (const [key, meta] of this.indices[i]) {
|
|
487
|
+
const vBuf = Buffer.allocUnsafe(meta.length);
|
|
488
|
+
await this.fileHandles[i].read(vBuf, 0, meta.length, meta.offset);
|
|
225
489
|
|
|
226
|
-
|
|
227
|
-
head[0] = 0x4c;
|
|
228
|
-
head[1] = 0x42;
|
|
229
|
-
head.writeUInt32BE(0, 2);
|
|
230
|
-
head.writeUInt32BE(vBuf.length, 6);
|
|
231
|
-
head[10] = kBuf.length;
|
|
490
|
+
const kBuf = Buffer.from(key);
|
|
232
491
|
|
|
233
|
-
|
|
492
|
+
const head = Buffer.allocUnsafe(HEADER_SIZE);
|
|
493
|
+
head[0] = MAGIC_BYTES[0];
|
|
494
|
+
head[1] = MAGIC_BYTES[1];
|
|
495
|
+
head.writeUInt32BE(0, 2);
|
|
496
|
+
head.writeUInt32BE(vBuf.length, 6);
|
|
497
|
+
head[10] = kBuf.length;
|
|
234
498
|
|
|
235
|
-
|
|
499
|
+
const block = Buffer.concat([head, kBuf, vBuf]);
|
|
236
500
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
501
|
+
await tmpHandle.write(block);
|
|
502
|
+
|
|
503
|
+
meta.offset = newPos + HEADER_SIZE + kBuf.length;
|
|
504
|
+
newPos += block.length;
|
|
505
|
+
}
|
|
240
506
|
|
|
241
|
-
|
|
242
|
-
|
|
507
|
+
await tmpHandle.close();
|
|
508
|
+
tmpHandle = null;
|
|
243
509
|
|
|
244
|
-
|
|
510
|
+
await this.fileHandles[i].close();
|
|
245
511
|
|
|
246
|
-
|
|
512
|
+
await fs.promises.rename(tmpPath, oldPath);
|
|
513
|
+
|
|
514
|
+
this.fileHandles[i] = await fs.promises.open(oldPath, "a+");
|
|
515
|
+
} catch (err) {
|
|
516
|
+
console.error(`Vacuum error for shard ${i}:`, err);
|
|
517
|
+
|
|
518
|
+
if (tmpHandle) {
|
|
519
|
+
try {
|
|
520
|
+
await tmpHandle.close();
|
|
521
|
+
} catch (e) {}
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
if (fs.existsSync(tmpPath)) {
|
|
525
|
+
try {
|
|
526
|
+
await fs.promises.unlink(tmpPath);
|
|
527
|
+
} catch (e) {}
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
throw err;
|
|
531
|
+
}
|
|
247
532
|
}),
|
|
248
533
|
);
|
|
249
534
|
}
|
|
535
|
+
|
|
250
536
|
await Promise.all(tasks);
|
|
251
537
|
this.cache.clear();
|
|
252
538
|
}
|
|
253
539
|
|
|
254
540
|
async close() {
|
|
255
541
|
await Promise.all(this.queues);
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
542
|
+
|
|
543
|
+
const closePromises = this.fileHandles.map(async (handle) => {
|
|
544
|
+
if (handle) {
|
|
545
|
+
try {
|
|
546
|
+
await handle.close();
|
|
547
|
+
} catch (err) {
|
|
548
|
+
console.error("Error closing file handle:", err);
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
});
|
|
552
|
+
|
|
553
|
+
await Promise.all(closePromises);
|
|
554
|
+
|
|
555
|
+
this.fileHandles.fill(null);
|
|
259
556
|
this.isLoaded.fill(false);
|
|
557
|
+
this.cache.clear();
|
|
260
558
|
}
|
|
261
559
|
}
|
|
262
560
|
|