lmcs-db 1.0.4 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +241 -85
- package/dist/core/collection.d.ts +33 -0
- package/dist/core/collection.js +287 -0
- package/dist/core/database.d.ts +35 -0
- package/dist/core/database.js +165 -0
- package/dist/core/indexer.d.ts +20 -0
- package/dist/core/indexer.js +89 -0
- package/dist/core/transaction-context.d.ts +13 -0
- package/dist/core/transaction-context.js +48 -0
- package/dist/core/transaction.d.ts +25 -0
- package/dist/core/transaction.js +122 -0
- package/dist/crypto/key-derivation.d.ts +0 -0
- package/dist/crypto/key-derivation.js +1 -0
- package/dist/crypto/manager.d.ts +22 -0
- package/dist/crypto/manager.js +76 -0
- package/dist/crypto/vault.d.ts +18 -0
- package/dist/crypto/vault.js +44 -0
- package/dist/index.d.ts +5 -2
- package/dist/index.js +12 -9
- package/dist/persistence/AsyncWriteWorker.js +11 -7
- package/dist/storage/aol.d.ts +26 -0
- package/dist/storage/aol.js +166 -0
- package/dist/storage/base.d.ts +36 -0
- package/dist/storage/base.js +13 -0
- package/dist/storage/binary.d.ts +21 -0
- package/dist/storage/binary.js +124 -0
- package/dist/storage/index.d.ts +5 -0
- package/dist/storage/index.js +13 -0
- package/dist/storage/json.d.ts +18 -0
- package/dist/storage/json.js +153 -0
- package/dist/storage/memory.d.ts +14 -0
- package/dist/storage/memory.js +42 -0
- package/dist/utils/checksum.d.ts +0 -0
- package/dist/utils/checksum.js +1 -0
- package/dist/utils/errors.d.ts +16 -0
- package/dist/utils/errors.js +37 -0
- package/dist/utils/lock.d.ts +9 -0
- package/dist/utils/lock.js +75 -0
- package/package.json +11 -5
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CryptoVault = void 0;
|
|
4
|
+
const crypto_1 = require("crypto");
|
|
5
|
+
class CryptoVault {
|
|
6
|
+
derivedKey;
|
|
7
|
+
ALGORITHM = 'aes-256-gcm';
|
|
8
|
+
VERSION = 1;
|
|
9
|
+
ITERATIONS = 100000;
|
|
10
|
+
salt;
|
|
11
|
+
constructor(password, existingSalt) {
|
|
12
|
+
if (!password || typeof password !== 'string') {
|
|
13
|
+
throw new Error('Password must be a non-empty string');
|
|
14
|
+
}
|
|
15
|
+
const saltBuffer = existingSalt
|
|
16
|
+
? Buffer.from(existingSalt, 'hex')
|
|
17
|
+
: (0, crypto_1.randomBytes)(32);
|
|
18
|
+
this.salt = saltBuffer.toString('hex');
|
|
19
|
+
this.derivedKey = (0, crypto_1.pbkdf2Sync)(password, saltBuffer, this.ITERATIONS, 32, 'sha256');
|
|
20
|
+
}
|
|
21
|
+
encrypt(data) {
|
|
22
|
+
const iv = (0, crypto_1.randomBytes)(16);
|
|
23
|
+
const cipher = (0, crypto_1.createCipheriv)(this.ALGORITHM, this.derivedKey, iv);
|
|
24
|
+
let ciphertext = cipher.update(data, 'utf8', 'hex');
|
|
25
|
+
ciphertext += cipher.final('hex');
|
|
26
|
+
return {
|
|
27
|
+
ciphertext,
|
|
28
|
+
iv: iv.toString('hex'),
|
|
29
|
+
authTag: cipher.getAuthTag().toString('hex'),
|
|
30
|
+
salt: this.salt,
|
|
31
|
+
iterations: this.ITERATIONS,
|
|
32
|
+
version: this.VERSION
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
decrypt(payload) {
|
|
36
|
+
const key = (0, crypto_1.pbkdf2Sync)(this.derivedKey.toString('hex'), Buffer.from(payload.salt, 'hex'), payload.iterations, 32, 'sha256');
|
|
37
|
+
const decipher = (0, crypto_1.createDecipheriv)(this.ALGORITHM, key, Buffer.from(payload.iv, 'hex'));
|
|
38
|
+
decipher.setAuthTag(Buffer.from(payload.authTag, 'hex'));
|
|
39
|
+
let decrypted = decipher.update(payload.ciphertext, 'hex', 'utf8');
|
|
40
|
+
decrypted += decipher.final('utf8');
|
|
41
|
+
return decrypted;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.CryptoVault = CryptoVault;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,2 +1,5 @@
|
|
|
1
|
-
export {
|
|
2
|
-
export {
|
|
1
|
+
export { Database, createDatabase, DatabaseOptions, StorageType } from './core/database';
|
|
2
|
+
export { Collection, QueryOptions } from './core/collection';
|
|
3
|
+
export { CryptoManager, EncryptedData } from './crypto/manager';
|
|
4
|
+
export { AOLStorage } from './storage/aol';
|
|
5
|
+
export { BinaryStorage } from './storage/binary';
|
package/dist/index.js
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.
|
|
7
|
-
var
|
|
8
|
-
Object.defineProperty(exports, "
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
3
|
+
exports.BinaryStorage = exports.AOLStorage = exports.CryptoManager = exports.Collection = exports.createDatabase = exports.Database = void 0;
|
|
4
|
+
var database_1 = require("./core/database");
|
|
5
|
+
Object.defineProperty(exports, "Database", { enumerable: true, get: function () { return database_1.Database; } });
|
|
6
|
+
Object.defineProperty(exports, "createDatabase", { enumerable: true, get: function () { return database_1.createDatabase; } });
|
|
7
|
+
var collection_1 = require("./core/collection");
|
|
8
|
+
Object.defineProperty(exports, "Collection", { enumerable: true, get: function () { return collection_1.Collection; } });
|
|
9
|
+
var manager_1 = require("./crypto/manager");
|
|
10
|
+
Object.defineProperty(exports, "CryptoManager", { enumerable: true, get: function () { return manager_1.CryptoManager; } });
|
|
11
|
+
var aol_1 = require("./storage/aol");
|
|
12
|
+
Object.defineProperty(exports, "AOLStorage", { enumerable: true, get: function () { return aol_1.AOLStorage; } });
|
|
13
|
+
var binary_1 = require("./storage/binary");
|
|
14
|
+
Object.defineProperty(exports, "BinaryStorage", { enumerable: true, get: function () { return binary_1.BinaryStorage; } });
|
|
@@ -1,13 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
class AsyncWriteWorker {
|
|
4
|
+
writing = false;
|
|
5
|
+
pendingPayload;
|
|
6
|
+
resolveIdle;
|
|
7
|
+
writeFn;
|
|
8
|
+
enqueuedCount = 0;
|
|
9
|
+
writesCount = 0;
|
|
10
|
+
lastDurationMs = 0;
|
|
11
|
+
lastBytes = 0;
|
|
12
|
+
totalBytes = 0;
|
|
13
|
+
lastStartedAt;
|
|
14
|
+
lastFinishedAt;
|
|
4
15
|
constructor(writeFn) {
|
|
5
|
-
this.writing = false;
|
|
6
|
-
this.enqueuedCount = 0;
|
|
7
|
-
this.writesCount = 0;
|
|
8
|
-
this.lastDurationMs = 0;
|
|
9
|
-
this.lastBytes = 0;
|
|
10
|
-
this.totalBytes = 0;
|
|
11
16
|
this.writeFn = writeFn;
|
|
12
17
|
}
|
|
13
18
|
enqueue(data) {
|
|
@@ -69,4 +74,3 @@ class AsyncWriteWorker {
|
|
|
69
74
|
}
|
|
70
75
|
}
|
|
71
76
|
exports.default = AsyncWriteWorker;
|
|
72
|
-
//# sourceMappingURL=AsyncWriteWorker.js.map
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { BaseStorage, LogEntry, StorageConfig } from "./base";
|
|
2
|
+
export declare class AOLStorage extends BaseStorage {
|
|
3
|
+
private buffer;
|
|
4
|
+
private crypto?;
|
|
5
|
+
private locker;
|
|
6
|
+
private bufferSize;
|
|
7
|
+
private compactionTimer?;
|
|
8
|
+
private isInitialized;
|
|
9
|
+
private writeCount;
|
|
10
|
+
constructor(config: StorageConfig & {
|
|
11
|
+
compactionInterval?: number;
|
|
12
|
+
bufferSize?: number;
|
|
13
|
+
});
|
|
14
|
+
initialize(): Promise<void>;
|
|
15
|
+
append(entry: LogEntry): Promise<void>;
|
|
16
|
+
flush(): Promise<void>;
|
|
17
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
18
|
+
compact(): Promise<void>;
|
|
19
|
+
close(): Promise<void>;
|
|
20
|
+
getStats(): {
|
|
21
|
+
buffered: number;
|
|
22
|
+
totalWrites: number;
|
|
23
|
+
};
|
|
24
|
+
clear?(): Promise<void>;
|
|
25
|
+
}
|
|
26
|
+
export { LogEntry };
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AOLStorage = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const manager_1 = require("../crypto/manager");
|
|
6
|
+
const promises_1 = require("fs/promises");
|
|
7
|
+
const path_1 = require("path");
|
|
8
|
+
const lock_1 = require("../utils/lock");
|
|
9
|
+
const crypto_1 = require("crypto");
|
|
10
|
+
class AOLStorage extends base_1.BaseStorage {
|
|
11
|
+
buffer = [];
|
|
12
|
+
crypto;
|
|
13
|
+
locker = new lock_1.FileLocker();
|
|
14
|
+
bufferSize = 100;
|
|
15
|
+
compactionTimer;
|
|
16
|
+
isInitialized = false;
|
|
17
|
+
writeCount = 0;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
super(config);
|
|
20
|
+
if (config.encryptionKey) {
|
|
21
|
+
this.crypto = new manager_1.CryptoManager(config.encryptionKey);
|
|
22
|
+
}
|
|
23
|
+
this.bufferSize = config.bufferSize || 100;
|
|
24
|
+
if (config.compactionInterval && config.compactionInterval > 0) {
|
|
25
|
+
this.compactionTimer = setInterval(() => {
|
|
26
|
+
this.compact().catch(console.error);
|
|
27
|
+
}, config.compactionInterval);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
async initialize() {
|
|
31
|
+
const filePath = this.getFilePath("aol");
|
|
32
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(filePath), { recursive: true });
|
|
33
|
+
this.isInitialized = true;
|
|
34
|
+
}
|
|
35
|
+
async append(entry) {
|
|
36
|
+
if (!this.isInitialized)
|
|
37
|
+
throw new Error("Storage not initialized");
|
|
38
|
+
if (this.config.enableChecksums) {
|
|
39
|
+
const entryForChecksum = { ...entry };
|
|
40
|
+
delete entryForChecksum.checksum;
|
|
41
|
+
entry.checksum = (0, crypto_1.createHash)("sha256")
|
|
42
|
+
.update(JSON.stringify(entryForChecksum))
|
|
43
|
+
.digest("hex");
|
|
44
|
+
}
|
|
45
|
+
this.buffer.push(entry);
|
|
46
|
+
this.writeCount++;
|
|
47
|
+
if (this.buffer.length >= this.bufferSize) {
|
|
48
|
+
await this.flush();
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
async flush() {
|
|
52
|
+
if (this.buffer.length === 0 || !this.isInitialized)
|
|
53
|
+
return;
|
|
54
|
+
const filePath = this.getFilePath("aol");
|
|
55
|
+
const lockPath = `${filePath}.lock`;
|
|
56
|
+
await this.locker.withLock(lockPath, async () => {
|
|
57
|
+
const lines = this.buffer.map((entry) => {
|
|
58
|
+
const line = JSON.stringify(entry);
|
|
59
|
+
if (this.crypto) {
|
|
60
|
+
return JSON.stringify(this.crypto.encrypt(line));
|
|
61
|
+
}
|
|
62
|
+
return line;
|
|
63
|
+
});
|
|
64
|
+
await (0, promises_1.appendFile)(filePath, lines.join("\n") + "\n", "utf8");
|
|
65
|
+
// fsync para durabilidade
|
|
66
|
+
const fd = await (0, promises_1.open)(filePath, "a");
|
|
67
|
+
try {
|
|
68
|
+
await fd.sync();
|
|
69
|
+
}
|
|
70
|
+
finally {
|
|
71
|
+
await fd.close();
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
this.buffer = [];
|
|
75
|
+
}
|
|
76
|
+
async *readStream() {
|
|
77
|
+
await this.flush();
|
|
78
|
+
const filePath = this.getFilePath("aol");
|
|
79
|
+
try {
|
|
80
|
+
const content = await (0, promises_1.readFile)(filePath, "utf8");
|
|
81
|
+
const lines = content.split("\n").filter((l) => l.trim());
|
|
82
|
+
for (const line of lines) {
|
|
83
|
+
try {
|
|
84
|
+
let jsonStr;
|
|
85
|
+
if (this.crypto) {
|
|
86
|
+
const encrypted = JSON.parse(line);
|
|
87
|
+
jsonStr = this.crypto.decrypt(encrypted);
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
jsonStr = line;
|
|
91
|
+
}
|
|
92
|
+
const entry = JSON.parse(jsonStr);
|
|
93
|
+
// Verifica checksum
|
|
94
|
+
if (this.config.enableChecksums && entry.checksum) {
|
|
95
|
+
const expected = entry.checksum;
|
|
96
|
+
const checkEntry = { ...entry };
|
|
97
|
+
delete checkEntry.checksum;
|
|
98
|
+
const actual = (0, crypto_1.createHash)("sha256")
|
|
99
|
+
.update(JSON.stringify(checkEntry))
|
|
100
|
+
.digest("hex");
|
|
101
|
+
if (actual !== expected) {
|
|
102
|
+
console.warn(`Checksum mismatch for entry ${entry.id}, skipping`);
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
yield entry;
|
|
107
|
+
}
|
|
108
|
+
catch (err) {
|
|
109
|
+
console.warn("Corrupted log entry skipped:", err);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
catch (err) {
|
|
114
|
+
if (err.code !== "ENOENT")
|
|
115
|
+
throw err;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
async compact() {
|
|
119
|
+
await this.flush();
|
|
120
|
+
const filePath = this.getFilePath("aol");
|
|
121
|
+
const tempFile = `${filePath}.tmp`;
|
|
122
|
+
const state = new Map();
|
|
123
|
+
for await (const entry of this.readStream()) {
|
|
124
|
+
const key = `${entry.collection}:${entry.id}`;
|
|
125
|
+
if (entry.op === "DELETE") {
|
|
126
|
+
state.delete(key);
|
|
127
|
+
}
|
|
128
|
+
else if (entry.op !== "BEGIN" &&
|
|
129
|
+
entry.op !== "COMMIT" &&
|
|
130
|
+
entry.op !== "ROLLBACK") {
|
|
131
|
+
state.set(key, entry);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
const entries = Array.from(state.values());
|
|
135
|
+
if (entries.length === 0) {
|
|
136
|
+
await (0, promises_1.writeFile)(filePath, "", "utf8");
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
const lines = entries.map((e) => {
|
|
140
|
+
const line = JSON.stringify(e);
|
|
141
|
+
return this.crypto ? JSON.stringify(this.crypto.encrypt(line)) : line;
|
|
142
|
+
});
|
|
143
|
+
await (0, promises_1.writeFile)(tempFile, lines.join("\n"), "utf8");
|
|
144
|
+
const lockPath = `${filePath}.lock`;
|
|
145
|
+
await this.locker.withLock(lockPath, async () => {
|
|
146
|
+
await (0, promises_1.rename)(tempFile, filePath);
|
|
147
|
+
});
|
|
148
|
+
console.log(`Compaction complete: ${this.writeCount} writes -> ${entries.length} entries`);
|
|
149
|
+
}
|
|
150
|
+
async close() {
|
|
151
|
+
await this.flush();
|
|
152
|
+
if (this.compactionTimer)
|
|
153
|
+
clearInterval(this.compactionTimer);
|
|
154
|
+
this.isInitialized = false;
|
|
155
|
+
}
|
|
156
|
+
getStats() {
|
|
157
|
+
return {
|
|
158
|
+
buffered: this.buffer.length,
|
|
159
|
+
totalWrites: this.writeCount,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
clear() {
|
|
163
|
+
throw new Error("Method not implemented.");
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
exports.AOLStorage = AOLStorage;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
export interface IStorage {
|
|
2
|
+
initialize(): Promise<void>;
|
|
3
|
+
append(entry: LogEntry): Promise<void>;
|
|
4
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
5
|
+
flush(): Promise<void>;
|
|
6
|
+
close(): Promise<void>;
|
|
7
|
+
clear?(): Promise<void>;
|
|
8
|
+
compact?(): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
export interface LogEntry {
|
|
11
|
+
op: 'INSERT' | 'UPDATE' | 'DELETE' | 'BEGIN' | 'COMMIT' | 'ROLLBACK';
|
|
12
|
+
collection: string;
|
|
13
|
+
id: string;
|
|
14
|
+
data?: unknown;
|
|
15
|
+
checksum: string;
|
|
16
|
+
timestamp: number;
|
|
17
|
+
txId?: string;
|
|
18
|
+
}
|
|
19
|
+
export interface StorageConfig {
|
|
20
|
+
dbPath: string;
|
|
21
|
+
dbName: string;
|
|
22
|
+
encryptionKey?: string;
|
|
23
|
+
enableChecksums?: boolean;
|
|
24
|
+
}
|
|
25
|
+
export declare abstract class BaseStorage implements IStorage {
|
|
26
|
+
protected config: StorageConfig;
|
|
27
|
+
constructor(config: StorageConfig);
|
|
28
|
+
abstract initialize(): Promise<void>;
|
|
29
|
+
abstract append(entry: LogEntry): Promise<void>;
|
|
30
|
+
abstract readStream(): AsyncGenerator<LogEntry>;
|
|
31
|
+
abstract flush(): Promise<void>;
|
|
32
|
+
abstract close(): Promise<void>;
|
|
33
|
+
abstract clear?(): Promise<void>;
|
|
34
|
+
abstract compact?(): Promise<void>;
|
|
35
|
+
protected getFilePath(extension: string): string;
|
|
36
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BaseStorage = void 0;
|
|
4
|
+
class BaseStorage {
|
|
5
|
+
config;
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.config = config;
|
|
8
|
+
}
|
|
9
|
+
getFilePath(extension) {
|
|
10
|
+
return `${this.config.dbPath}/${this.config.dbName}.${extension}`;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
exports.BaseStorage = BaseStorage;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { BaseStorage, LogEntry, StorageConfig } from "./base";
|
|
2
|
+
export declare class BinaryStorage extends BaseStorage {
|
|
3
|
+
private data;
|
|
4
|
+
private locker;
|
|
5
|
+
private crypto?;
|
|
6
|
+
private filePath;
|
|
7
|
+
private lockPath;
|
|
8
|
+
private readonly MAGIC;
|
|
9
|
+
private readonly VERSION;
|
|
10
|
+
constructor(config: StorageConfig);
|
|
11
|
+
initialize(): Promise<void>;
|
|
12
|
+
append(entry: LogEntry): Promise<void>;
|
|
13
|
+
readAll(): Promise<LogEntry[]>;
|
|
14
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
15
|
+
flush(): Promise<void>;
|
|
16
|
+
close(): Promise<void>;
|
|
17
|
+
clear(): Promise<void>;
|
|
18
|
+
compact(): Promise<void>;
|
|
19
|
+
private serialize;
|
|
20
|
+
private deserialize;
|
|
21
|
+
}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BinaryStorage = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const promises_1 = require("fs/promises");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
const crypto_1 = require("crypto");
|
|
8
|
+
const lock_1 = require("../utils/lock");
|
|
9
|
+
const manager_1 = require("../crypto/manager");
|
|
10
|
+
class BinaryStorage extends base_1.BaseStorage {
|
|
11
|
+
data = [];
|
|
12
|
+
locker = new lock_1.FileLocker();
|
|
13
|
+
crypto;
|
|
14
|
+
filePath;
|
|
15
|
+
lockPath;
|
|
16
|
+
MAGIC = "LMCS";
|
|
17
|
+
VERSION = 1;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
super(config);
|
|
20
|
+
this.filePath = this.getFilePath("bin");
|
|
21
|
+
this.lockPath = `${config.dbPath}/${config.dbName}.bin.lock`;
|
|
22
|
+
if (config.encryptionKey) {
|
|
23
|
+
this.crypto = new manager_1.CryptoManager(config.encryptionKey);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
async initialize() {
|
|
27
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(this.filePath), { recursive: true });
|
|
28
|
+
try {
|
|
29
|
+
await (0, promises_1.access)(this.filePath);
|
|
30
|
+
const buffer = await (0, promises_1.readFile)(this.filePath);
|
|
31
|
+
if (buffer.length > 0) {
|
|
32
|
+
const payload = this.deserialize(buffer);
|
|
33
|
+
this.data = payload;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch (err) {
|
|
37
|
+
if (err.code !== "ENOENT")
|
|
38
|
+
throw err;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
async append(entry) {
|
|
42
|
+
this.data.push(entry);
|
|
43
|
+
// Binary always does full rewrite (not append-only)
|
|
44
|
+
await this.flush();
|
|
45
|
+
}
|
|
46
|
+
async readAll() {
|
|
47
|
+
return [...this.data];
|
|
48
|
+
}
|
|
49
|
+
async *readStream() {
|
|
50
|
+
for (const entry of this.data) {
|
|
51
|
+
yield entry;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
async flush() {
|
|
55
|
+
await this.locker.withLock(this.lockPath, async () => {
|
|
56
|
+
const buffer = this.serialize(this.data);
|
|
57
|
+
await (0, promises_1.writeFile)(this.filePath, buffer);
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
async close() {
|
|
61
|
+
await this.flush();
|
|
62
|
+
this.data = [];
|
|
63
|
+
}
|
|
64
|
+
async clear() {
|
|
65
|
+
this.data = [];
|
|
66
|
+
await this.flush();
|
|
67
|
+
}
|
|
68
|
+
async compact() {
|
|
69
|
+
// Binary storage rewrites on every append, so it's always compact
|
|
70
|
+
return Promise.resolve();
|
|
71
|
+
}
|
|
72
|
+
serialize(entries) {
|
|
73
|
+
const jsonStr = JSON.stringify(entries);
|
|
74
|
+
const compressed = Buffer.from(jsonStr); // Placeholder for compression
|
|
75
|
+
const header = {
|
|
76
|
+
magic: this.MAGIC,
|
|
77
|
+
version: this.VERSION,
|
|
78
|
+
checksum: (0, crypto_1.createHash)("sha256").update(compressed).digest("hex"),
|
|
79
|
+
encrypted: !!this.crypto,
|
|
80
|
+
};
|
|
81
|
+
let payload = compressed;
|
|
82
|
+
if (this.crypto) {
|
|
83
|
+
const encrypted = this.crypto.encrypt(jsonStr);
|
|
84
|
+
payload = Buffer.from(JSON.stringify(encrypted));
|
|
85
|
+
}
|
|
86
|
+
const headerBuf = Buffer.from(JSON.stringify(header));
|
|
87
|
+
const headerLen = Buffer.alloc(4);
|
|
88
|
+
headerLen.writeUInt32BE(headerBuf.length);
|
|
89
|
+
const payloadLen = Buffer.alloc(4);
|
|
90
|
+
payloadLen.writeUInt32BE(payload.length);
|
|
91
|
+
return Buffer.concat([headerLen, headerBuf, payloadLen, payload]);
|
|
92
|
+
}
|
|
93
|
+
deserialize(buffer) {
|
|
94
|
+
try {
|
|
95
|
+
let offset = 0;
|
|
96
|
+
const headerLen = buffer.readUInt32BE(offset);
|
|
97
|
+
offset += 4;
|
|
98
|
+
const headerBuf = buffer.slice(offset, offset + headerLen);
|
|
99
|
+
offset += headerLen;
|
|
100
|
+
const header = JSON.parse(headerBuf.toString());
|
|
101
|
+
if (header.magic !== this.MAGIC)
|
|
102
|
+
throw new Error("Invalid file format");
|
|
103
|
+
const payloadLen = buffer.readUInt32BE(offset);
|
|
104
|
+
offset += 4;
|
|
105
|
+
const payload = buffer.slice(offset, offset + payloadLen);
|
|
106
|
+
let jsonStr;
|
|
107
|
+
if (header.encrypted) {
|
|
108
|
+
if (!this.crypto)
|
|
109
|
+
throw new Error("File is encrypted but no key provided");
|
|
110
|
+
const encrypted = JSON.parse(payload.toString());
|
|
111
|
+
jsonStr = this.crypto.decrypt(encrypted);
|
|
112
|
+
}
|
|
113
|
+
else {
|
|
114
|
+
jsonStr = payload.toString();
|
|
115
|
+
}
|
|
116
|
+
return JSON.parse(jsonStr);
|
|
117
|
+
}
|
|
118
|
+
catch (error) {
|
|
119
|
+
console.error("Failed to deserialize binary storage:", error);
|
|
120
|
+
return [];
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
exports.BinaryStorage = BinaryStorage;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BinaryStorage = exports.AOLStorage = exports.JSONStorage = exports.MemoryStorage = exports.BaseStorage = void 0;
|
|
4
|
+
var base_1 = require("./base");
|
|
5
|
+
Object.defineProperty(exports, "BaseStorage", { enumerable: true, get: function () { return base_1.BaseStorage; } });
|
|
6
|
+
var memory_1 = require("./memory");
|
|
7
|
+
Object.defineProperty(exports, "MemoryStorage", { enumerable: true, get: function () { return memory_1.MemoryStorage; } });
|
|
8
|
+
var json_1 = require("./json");
|
|
9
|
+
Object.defineProperty(exports, "JSONStorage", { enumerable: true, get: function () { return json_1.JSONStorage; } });
|
|
10
|
+
var aol_1 = require("./aol");
|
|
11
|
+
Object.defineProperty(exports, "AOLStorage", { enumerable: true, get: function () { return aol_1.AOLStorage; } });
|
|
12
|
+
var binary_1 = require("./binary");
|
|
13
|
+
Object.defineProperty(exports, "BinaryStorage", { enumerable: true, get: function () { return binary_1.BinaryStorage; } });
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { BaseStorage, LogEntry, StorageConfig } from "./base";
|
|
2
|
+
export declare class JSONStorage extends BaseStorage {
|
|
3
|
+
private autosaveInterval;
|
|
4
|
+
private cache;
|
|
5
|
+
private crypto?;
|
|
6
|
+
private locker;
|
|
7
|
+
private dirty;
|
|
8
|
+
private autosaveTimer?;
|
|
9
|
+
private isInitialized;
|
|
10
|
+
constructor(config: StorageConfig, autosaveInterval?: number);
|
|
11
|
+
initialize(): Promise<void>;
|
|
12
|
+
append(entry: LogEntry): Promise<void>;
|
|
13
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
14
|
+
flush(): Promise<void>;
|
|
15
|
+
close(): Promise<void>;
|
|
16
|
+
compact(): Promise<void>;
|
|
17
|
+
clear?(): Promise<void>;
|
|
18
|
+
}
|