lba 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +52 -0
  2. package/index.js +263 -0
  3. package/package.json +17 -0
package/README.md ADDED
@@ -0,0 +1,52 @@
1
+ # LBA (Log-structured Binary Archive)
2
+
3
+ A lightweight, high-performance, file-based key-value store for Node.js.
4
+ It supports sharding, compression (zlib), and atomic updates.
5
+
6
+ ## Installation
7
+
8
+ ```bash
9
+ npm install lba
10
+ ```
11
+
12
+ ## Usage
13
+
14
+ ```js
15
+ const LBA = require("@your-username/lba");
16
+
17
+ // Initialize DB (creates 'lba_storage' folder by default)
18
+ const db = new LBA("./my-data");
19
+
20
+ (async () => {
21
+ // 1. Set Data
22
+ await db.set("user:1001", {
23
+ name: "Alice",
24
+ age: 30,
25
+ items: ["sword", "shield"],
26
+ });
27
+
28
+ // 2. Get Data
29
+ const user = await db.get("user:1001");
30
+ console.log(user);
31
+ // Output: { name: 'Alice', age: 30, items: ['sword', 'shield'] }
32
+
33
+ // 3. Atomic Update (Thread-safe)
34
+ await db.update("user:1001", (data) => {
35
+ data.age += 1;
36
+ return data; // Return modified data
37
+ });
38
+
39
+ // 4. Delete
40
+ await db.delete("user:1001");
41
+ })();
42
+ ```
43
+
44
+ ## Features
45
+
46
+ - Sharding: Distributes data across multiple files to reduce lock contention.
47
+
48
+ - Compression: Automatically compresses data using zlib.
49
+
50
+ - Atomic Updates: Prevents race conditions during read-modify-write operations.
51
+
52
+ - LRU Cache: Built-in memory caching for fast reads.
package/index.js ADDED
@@ -0,0 +1,263 @@
1
+ const fs = require("fs");
2
+ const path = require("path");
3
+ const zlib = require("zlib");
4
+ const util = require("util");
5
+
6
+ const inflateAsync = util.promisify(zlib.inflateRaw);
7
+ const deflateAsync = util.promisify(zlib.deflateRaw);
8
+
9
+ class LBA {
10
+ constructor(dbDir = "lba_storage", options = {}) {
11
+ this.dbDir = path.resolve(dbDir);
12
+ this.shardCount = options.shardCount || 32;
13
+ this.cacheLimit = options.cacheLimit || 10000;
14
+ this.compressionLevel = options.fastMode ? 1 : 6;
15
+
16
+ this.indices = Array.from({ length: this.shardCount }, () => new Map());
17
+ this.cache = new Map();
18
+
19
+ this.queues = Array.from({ length: this.shardCount }, () =>
20
+ Promise.resolve(),
21
+ );
22
+
23
+ this.fileHandles = new Array(this.shardCount).fill(null);
24
+ this.isLoaded = new Array(this.shardCount).fill(false);
25
+
26
+ if (!fs.existsSync(this.dbDir))
27
+ fs.mkdirSync(this.dbDir, { recursive: true });
28
+ }
29
+
30
+ _getShard(key) {
31
+ let hash = 2166136261;
32
+ const sKey = String(key);
33
+ for (let i = 0; i < sKey.length; i++) {
34
+ hash ^= sKey.charCodeAt(i);
35
+ hash = Math.imul(hash, 16777619);
36
+ }
37
+ return Math.abs(hash) % this.shardCount;
38
+ }
39
+
40
+ async _ensureShardLoaded(sIdx) {
41
+ if (this.isLoaded[sIdx]) return;
42
+
43
+ const fPath = path.join(this.dbDir, `shard_${sIdx}.lba`);
44
+ this.fileHandles[sIdx] = await fs.promises.open(fPath, "a+");
45
+
46
+ const stat = await this.fileHandles[sIdx].stat();
47
+ const size = stat.size;
48
+ let offset = 0;
49
+ const headerSize = 11;
50
+ const headerBuf = Buffer.alloc(headerSize);
51
+
52
+ while (offset + headerSize <= size) {
53
+ const { bytesRead } = await this.fileHandles[sIdx].read(
54
+ headerBuf,
55
+ 0,
56
+ headerSize,
57
+ offset,
58
+ );
59
+ if (bytesRead < headerSize) break;
60
+
61
+ if (headerBuf[0] !== 0x4c || headerBuf[1] !== 0x42) {
62
+ console.warn(
63
+ `Shard ${sIdx}: 데이터 손상 감지 (Offset: ${offset}). 복구를 위해 이후 데이터를 무시합니다.`,
64
+ );
65
+ break;
66
+ }
67
+
68
+ const vLen = headerBuf.readUInt32BE(6);
69
+ const kLen = headerBuf[10];
70
+ const recordTotalSize = headerSize + kLen + vLen;
71
+
72
+ if (offset + recordTotalSize > size) {
73
+ console.warn(
74
+ `Shard ${sIdx}: 불완전한 기록 발견 (Offset: ${offset}). 마지막 데이터를 폐기합니다.`,
75
+ );
76
+ break;
77
+ }
78
+
79
+ const kBuf = Buffer.alloc(kLen);
80
+ await this.fileHandles[sIdx].read(kBuf, 0, kLen, offset + headerSize);
81
+ const key = kBuf.toString();
82
+
83
+ if (vLen > 0) {
84
+ this.indices[sIdx].set(key, {
85
+ offset: offset + headerSize + kLen,
86
+ length: vLen,
87
+ });
88
+ } else {
89
+ this.indices[sIdx].delete(key);
90
+ }
91
+
92
+ offset += recordTotalSize;
93
+ }
94
+
95
+ this.isLoaded[sIdx] = true;
96
+ }
97
+
98
+ async _readImpl(sIdx, key) {
99
+ if (this.cache.has(key)) {
100
+ const val = this.cache.get(key);
101
+ this.cache.delete(key);
102
+ this.cache.set(key, val);
103
+ return val;
104
+ }
105
+
106
+ const meta = this.indices[sIdx].get(String(key));
107
+ if (!meta) return null;
108
+
109
+ const buf = Buffer.alloc(meta.length);
110
+ await this.fileHandles[sIdx].read(buf, 0, meta.length, meta.offset);
111
+
112
+ try {
113
+ const decompressed = await inflateAsync(buf);
114
+ const data = JSON.parse(decompressed.toString());
115
+
116
+ if (this.cache.size >= this.cacheLimit) {
117
+ const firstKey = this.cache.keys().next().value;
118
+ this.cache.delete(firstKey);
119
+ }
120
+ this.cache.set(String(key), data);
121
+
122
+ return data;
123
+ } catch (err) {
124
+ console.error(`Read error for key ${key}:`, err);
125
+ return null;
126
+ }
127
+ }
128
+
129
+ async _writeImpl(sIdx, key, value) {
130
+ const kStr = String(key);
131
+ const kBuf = Buffer.from(kStr);
132
+
133
+ let vBuf;
134
+ let vLen = 0;
135
+
136
+ if (value !== null && value !== undefined) {
137
+ const jsonStr = JSON.stringify(value);
138
+ vBuf = await deflateAsync(jsonStr, { level: this.compressionLevel });
139
+ vLen = vBuf.length;
140
+ }
141
+
142
+ const head = Buffer.alloc(11);
143
+ head[0] = 0x4c;
144
+ head[1] = 0x42;
145
+ head.writeUInt32BE(0, 2);
146
+ head.writeUInt32BE(vLen, 6);
147
+ head[10] = kBuf.length;
148
+
149
+ const parts = [head, kBuf];
150
+ if (vLen > 0) parts.push(vBuf);
151
+ const block = Buffer.concat(parts);
152
+
153
+ const stat = await this.fileHandles[sIdx].stat();
154
+ const pos = stat.size;
155
+
156
+ await this.fileHandles[sIdx].write(block, 0, block.length, pos);
157
+
158
+ if (vLen > 0) {
159
+ this.indices[sIdx].set(kStr, {
160
+ offset: pos + 11 + kBuf.length,
161
+ length: vLen,
162
+ });
163
+
164
+ this.cache.delete(kStr);
165
+ this.cache.set(kStr, value);
166
+ } else {
167
+ this.indices[sIdx].delete(kStr);
168
+ this.cache.delete(kStr);
169
+ }
170
+ }
171
+
172
+ _enqueue(sIdx, task) {
173
+ const next = this.queues[sIdx]
174
+ .then(() => this._ensureShardLoaded(sIdx))
175
+ .then(task)
176
+ .catch((err) => console.error(`LBA Error (Shard ${sIdx}):`, err));
177
+
178
+ this.queues[sIdx] = next;
179
+ return next;
180
+ }
181
+
182
+ async get(key) {
183
+ const sIdx = this._getShard(key);
184
+ return this._enqueue(sIdx, () => this._readImpl(sIdx, key));
185
+ }
186
+
187
+ async set(key, value) {
188
+ const sIdx = this._getShard(key);
189
+ return this._enqueue(sIdx, () => this._writeImpl(sIdx, key, value));
190
+ }
191
+
192
+ async delete(key) {
193
+ return this.set(key, null);
194
+ }
195
+
196
+ async update(key, fn) {
197
+ const sIdx = this._getShard(key);
198
+ return this._enqueue(sIdx, async () => {
199
+ const current = await this._readImpl(sIdx, key);
200
+ const next = await fn(current);
201
+ if (next !== undefined) {
202
+ await this._writeImpl(sIdx, key, next);
203
+ }
204
+ });
205
+ }
206
+
207
+ async vacuum() {
208
+ const tasks = [];
209
+ for (let i = 0; i < this.shardCount; i++) {
210
+ tasks.push(
211
+ this._enqueue(i, async () => {
212
+ if (!this.isLoaded[i] || this.indices[i].size === 0) return;
213
+
214
+ const tmpPath = path.join(this.dbDir, `vacuum_${i}.tmp`);
215
+ const oldPath = path.join(this.dbDir, `shard_${i}.lba`);
216
+
217
+ const tmpHandle = await fs.promises.open(tmpPath, "w");
218
+ let newPos = 0;
219
+
220
+ for (const [key, meta] of this.indices[i]) {
221
+ const vBuf = Buffer.alloc(meta.length);
222
+ await this.fileHandles[i].read(vBuf, 0, meta.length, meta.offset);
223
+
224
+ const kBuf = Buffer.from(key);
225
+
226
+ const head = Buffer.alloc(11);
227
+ head[0] = 0x4c;
228
+ head[1] = 0x42;
229
+ head.writeUInt32BE(0, 2);
230
+ head.writeUInt32BE(vBuf.length, 6);
231
+ head[10] = kBuf.length;
232
+
233
+ const block = Buffer.concat([head, kBuf, vBuf]);
234
+
235
+ await tmpHandle.write(block);
236
+
237
+ meta.offset = newPos + 11 + kBuf.length;
238
+ newPos += block.length;
239
+ }
240
+
241
+ await tmpHandle.close();
242
+ await this.fileHandles[i].close();
243
+
244
+ await fs.promises.rename(tmpPath, oldPath);
245
+
246
+ this.fileHandles[i] = await fs.promises.open(oldPath, "a+");
247
+ }),
248
+ );
249
+ }
250
+ await Promise.all(tasks);
251
+ this.cache.clear();
252
+ }
253
+
254
+ async close() {
255
+ await Promise.all(this.queues);
256
+ for (const handle of this.fileHandles) {
257
+ if (handle) await handle.close();
258
+ }
259
+ this.isLoaded.fill(false);
260
+ }
261
+ }
262
+
263
+ module.exports = LBA;
package/package.json ADDED
@@ -0,0 +1,17 @@
1
+ {
2
+ "name": "lba",
3
+ "version": "1.0.0",
4
+ "description": "A lightweight, log-structured binary key-value store.",
5
+ "main": "index.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "keywords": [
10
+ "database",
11
+ "key-value",
12
+ "lba",
13
+ "storage"
14
+ ],
15
+ "author": "kyoomin",
16
+ "license": "MIT"
17
+ }