lmcs-db 1.0.3 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +247 -68
- package/dist/LmcsDB.d.ts +13 -0
- package/dist/LmcsDB.js +20 -5
- package/dist/LmcsDB.js.map +1 -1
- package/dist/core/collection.d.ts +33 -0
- package/dist/core/collection.js +287 -0
- package/dist/core/database.d.ts +35 -0
- package/dist/core/database.js +165 -0
- package/dist/core/indexer.d.ts +20 -0
- package/dist/core/indexer.js +89 -0
- package/dist/core/transaction-context.d.ts +13 -0
- package/dist/core/transaction-context.js +48 -0
- package/dist/core/transaction.d.ts +25 -0
- package/dist/core/transaction.js +122 -0
- package/dist/crypto/key-derivation.d.ts +0 -0
- package/dist/crypto/key-derivation.js +1 -0
- package/dist/crypto/manager.d.ts +22 -0
- package/dist/crypto/manager.js +76 -0
- package/dist/crypto/vault.d.ts +18 -0
- package/dist/crypto/vault.js +44 -0
- package/dist/index.d.ts +5 -2
- package/dist/index.js +12 -9
- package/dist/persistence/AsyncWriteWorker.d.ts +30 -0
- package/dist/persistence/AsyncWriteWorker.js +76 -0
- package/dist/persistence/AsyncWriteWorker.js.map +1 -0
- package/dist/storage/BinaryStorage.d.ts +3 -0
- package/dist/storage/BinaryStorage.js +43 -5
- package/dist/storage/BinaryStorage.js.map +1 -1
- package/dist/storage/aol.d.ts +26 -0
- package/dist/storage/aol.js +166 -0
- package/dist/storage/base.d.ts +36 -0
- package/dist/storage/base.js +13 -0
- package/dist/storage/binary.d.ts +21 -0
- package/dist/storage/binary.js +124 -0
- package/dist/storage/index.d.ts +5 -0
- package/dist/storage/index.js +13 -0
- package/dist/storage/json.d.ts +18 -0
- package/dist/storage/json.js +153 -0
- package/dist/storage/memory.d.ts +14 -0
- package/dist/storage/memory.js +42 -0
- package/dist/utils/checksum.d.ts +0 -0
- package/dist/utils/checksum.js +1 -0
- package/dist/utils/errors.d.ts +16 -0
- package/dist/utils/errors.js +37 -0
- package/dist/utils/lock.d.ts +9 -0
- package/dist/utils/lock.js +75 -0
- package/package.json +11 -5
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AOLStorage = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const manager_1 = require("../crypto/manager");
|
|
6
|
+
const promises_1 = require("fs/promises");
|
|
7
|
+
const path_1 = require("path");
|
|
8
|
+
const lock_1 = require("../utils/lock");
|
|
9
|
+
const crypto_1 = require("crypto");
|
|
10
|
+
class AOLStorage extends base_1.BaseStorage {
|
|
11
|
+
buffer = [];
|
|
12
|
+
crypto;
|
|
13
|
+
locker = new lock_1.FileLocker();
|
|
14
|
+
bufferSize = 100;
|
|
15
|
+
compactionTimer;
|
|
16
|
+
isInitialized = false;
|
|
17
|
+
writeCount = 0;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
super(config);
|
|
20
|
+
if (config.encryptionKey) {
|
|
21
|
+
this.crypto = new manager_1.CryptoManager(config.encryptionKey);
|
|
22
|
+
}
|
|
23
|
+
this.bufferSize = config.bufferSize || 100;
|
|
24
|
+
if (config.compactionInterval && config.compactionInterval > 0) {
|
|
25
|
+
this.compactionTimer = setInterval(() => {
|
|
26
|
+
this.compact().catch(console.error);
|
|
27
|
+
}, config.compactionInterval);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
async initialize() {
|
|
31
|
+
const filePath = this.getFilePath("aol");
|
|
32
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(filePath), { recursive: true });
|
|
33
|
+
this.isInitialized = true;
|
|
34
|
+
}
|
|
35
|
+
async append(entry) {
|
|
36
|
+
if (!this.isInitialized)
|
|
37
|
+
throw new Error("Storage not initialized");
|
|
38
|
+
if (this.config.enableChecksums) {
|
|
39
|
+
const entryForChecksum = { ...entry };
|
|
40
|
+
delete entryForChecksum.checksum;
|
|
41
|
+
entry.checksum = (0, crypto_1.createHash)("sha256")
|
|
42
|
+
.update(JSON.stringify(entryForChecksum))
|
|
43
|
+
.digest("hex");
|
|
44
|
+
}
|
|
45
|
+
this.buffer.push(entry);
|
|
46
|
+
this.writeCount++;
|
|
47
|
+
if (this.buffer.length >= this.bufferSize) {
|
|
48
|
+
await this.flush();
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
async flush() {
|
|
52
|
+
if (this.buffer.length === 0 || !this.isInitialized)
|
|
53
|
+
return;
|
|
54
|
+
const filePath = this.getFilePath("aol");
|
|
55
|
+
const lockPath = `${filePath}.lock`;
|
|
56
|
+
await this.locker.withLock(lockPath, async () => {
|
|
57
|
+
const lines = this.buffer.map((entry) => {
|
|
58
|
+
const line = JSON.stringify(entry);
|
|
59
|
+
if (this.crypto) {
|
|
60
|
+
return JSON.stringify(this.crypto.encrypt(line));
|
|
61
|
+
}
|
|
62
|
+
return line;
|
|
63
|
+
});
|
|
64
|
+
await (0, promises_1.appendFile)(filePath, lines.join("\n") + "\n", "utf8");
|
|
65
|
+
// fsync para durabilidade
|
|
66
|
+
const fd = await (0, promises_1.open)(filePath, "a");
|
|
67
|
+
try {
|
|
68
|
+
await fd.sync();
|
|
69
|
+
}
|
|
70
|
+
finally {
|
|
71
|
+
await fd.close();
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
this.buffer = [];
|
|
75
|
+
}
|
|
76
|
+
async *readStream() {
|
|
77
|
+
await this.flush();
|
|
78
|
+
const filePath = this.getFilePath("aol");
|
|
79
|
+
try {
|
|
80
|
+
const content = await (0, promises_1.readFile)(filePath, "utf8");
|
|
81
|
+
const lines = content.split("\n").filter((l) => l.trim());
|
|
82
|
+
for (const line of lines) {
|
|
83
|
+
try {
|
|
84
|
+
let jsonStr;
|
|
85
|
+
if (this.crypto) {
|
|
86
|
+
const encrypted = JSON.parse(line);
|
|
87
|
+
jsonStr = this.crypto.decrypt(encrypted);
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
jsonStr = line;
|
|
91
|
+
}
|
|
92
|
+
const entry = JSON.parse(jsonStr);
|
|
93
|
+
// Verifica checksum
|
|
94
|
+
if (this.config.enableChecksums && entry.checksum) {
|
|
95
|
+
const expected = entry.checksum;
|
|
96
|
+
const checkEntry = { ...entry };
|
|
97
|
+
delete checkEntry.checksum;
|
|
98
|
+
const actual = (0, crypto_1.createHash)("sha256")
|
|
99
|
+
.update(JSON.stringify(checkEntry))
|
|
100
|
+
.digest("hex");
|
|
101
|
+
if (actual !== expected) {
|
|
102
|
+
console.warn(`Checksum mismatch for entry ${entry.id}, skipping`);
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
yield entry;
|
|
107
|
+
}
|
|
108
|
+
catch (err) {
|
|
109
|
+
console.warn("Corrupted log entry skipped:", err);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
catch (err) {
|
|
114
|
+
if (err.code !== "ENOENT")
|
|
115
|
+
throw err;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
async compact() {
|
|
119
|
+
await this.flush();
|
|
120
|
+
const filePath = this.getFilePath("aol");
|
|
121
|
+
const tempFile = `${filePath}.tmp`;
|
|
122
|
+
const state = new Map();
|
|
123
|
+
for await (const entry of this.readStream()) {
|
|
124
|
+
const key = `${entry.collection}:${entry.id}`;
|
|
125
|
+
if (entry.op === "DELETE") {
|
|
126
|
+
state.delete(key);
|
|
127
|
+
}
|
|
128
|
+
else if (entry.op !== "BEGIN" &&
|
|
129
|
+
entry.op !== "COMMIT" &&
|
|
130
|
+
entry.op !== "ROLLBACK") {
|
|
131
|
+
state.set(key, entry);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
const entries = Array.from(state.values());
|
|
135
|
+
if (entries.length === 0) {
|
|
136
|
+
await (0, promises_1.writeFile)(filePath, "", "utf8");
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
const lines = entries.map((e) => {
|
|
140
|
+
const line = JSON.stringify(e);
|
|
141
|
+
return this.crypto ? JSON.stringify(this.crypto.encrypt(line)) : line;
|
|
142
|
+
});
|
|
143
|
+
await (0, promises_1.writeFile)(tempFile, lines.join("\n"), "utf8");
|
|
144
|
+
const lockPath = `${filePath}.lock`;
|
|
145
|
+
await this.locker.withLock(lockPath, async () => {
|
|
146
|
+
await (0, promises_1.rename)(tempFile, filePath);
|
|
147
|
+
});
|
|
148
|
+
console.log(`Compaction complete: ${this.writeCount} writes -> ${entries.length} entries`);
|
|
149
|
+
}
|
|
150
|
+
async close() {
|
|
151
|
+
await this.flush();
|
|
152
|
+
if (this.compactionTimer)
|
|
153
|
+
clearInterval(this.compactionTimer);
|
|
154
|
+
this.isInitialized = false;
|
|
155
|
+
}
|
|
156
|
+
getStats() {
|
|
157
|
+
return {
|
|
158
|
+
buffered: this.buffer.length,
|
|
159
|
+
totalWrites: this.writeCount,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
clear() {
|
|
163
|
+
throw new Error("Method not implemented.");
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
exports.AOLStorage = AOLStorage;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
export interface IStorage {
|
|
2
|
+
initialize(): Promise<void>;
|
|
3
|
+
append(entry: LogEntry): Promise<void>;
|
|
4
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
5
|
+
flush(): Promise<void>;
|
|
6
|
+
close(): Promise<void>;
|
|
7
|
+
clear?(): Promise<void>;
|
|
8
|
+
compact?(): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
export interface LogEntry {
|
|
11
|
+
op: 'INSERT' | 'UPDATE' | 'DELETE' | 'BEGIN' | 'COMMIT' | 'ROLLBACK';
|
|
12
|
+
collection: string;
|
|
13
|
+
id: string;
|
|
14
|
+
data?: unknown;
|
|
15
|
+
checksum: string;
|
|
16
|
+
timestamp: number;
|
|
17
|
+
txId?: string;
|
|
18
|
+
}
|
|
19
|
+
export interface StorageConfig {
|
|
20
|
+
dbPath: string;
|
|
21
|
+
dbName: string;
|
|
22
|
+
encryptionKey?: string;
|
|
23
|
+
enableChecksums?: boolean;
|
|
24
|
+
}
|
|
25
|
+
export declare abstract class BaseStorage implements IStorage {
|
|
26
|
+
protected config: StorageConfig;
|
|
27
|
+
constructor(config: StorageConfig);
|
|
28
|
+
abstract initialize(): Promise<void>;
|
|
29
|
+
abstract append(entry: LogEntry): Promise<void>;
|
|
30
|
+
abstract readStream(): AsyncGenerator<LogEntry>;
|
|
31
|
+
abstract flush(): Promise<void>;
|
|
32
|
+
abstract close(): Promise<void>;
|
|
33
|
+
abstract clear?(): Promise<void>;
|
|
34
|
+
abstract compact?(): Promise<void>;
|
|
35
|
+
protected getFilePath(extension: string): string;
|
|
36
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BaseStorage = void 0;
|
|
4
|
+
class BaseStorage {
|
|
5
|
+
config;
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.config = config;
|
|
8
|
+
}
|
|
9
|
+
getFilePath(extension) {
|
|
10
|
+
return `${this.config.dbPath}/${this.config.dbName}.${extension}`;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
exports.BaseStorage = BaseStorage;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { BaseStorage, LogEntry, StorageConfig } from "./base";
|
|
2
|
+
export declare class BinaryStorage extends BaseStorage {
|
|
3
|
+
private data;
|
|
4
|
+
private locker;
|
|
5
|
+
private crypto?;
|
|
6
|
+
private filePath;
|
|
7
|
+
private lockPath;
|
|
8
|
+
private readonly MAGIC;
|
|
9
|
+
private readonly VERSION;
|
|
10
|
+
constructor(config: StorageConfig);
|
|
11
|
+
initialize(): Promise<void>;
|
|
12
|
+
append(entry: LogEntry): Promise<void>;
|
|
13
|
+
readAll(): Promise<LogEntry[]>;
|
|
14
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
15
|
+
flush(): Promise<void>;
|
|
16
|
+
close(): Promise<void>;
|
|
17
|
+
clear(): Promise<void>;
|
|
18
|
+
compact(): Promise<void>;
|
|
19
|
+
private serialize;
|
|
20
|
+
private deserialize;
|
|
21
|
+
}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BinaryStorage = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const promises_1 = require("fs/promises");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
const crypto_1 = require("crypto");
|
|
8
|
+
const lock_1 = require("../utils/lock");
|
|
9
|
+
const manager_1 = require("../crypto/manager");
|
|
10
|
+
class BinaryStorage extends base_1.BaseStorage {
|
|
11
|
+
data = [];
|
|
12
|
+
locker = new lock_1.FileLocker();
|
|
13
|
+
crypto;
|
|
14
|
+
filePath;
|
|
15
|
+
lockPath;
|
|
16
|
+
MAGIC = "LMCS";
|
|
17
|
+
VERSION = 1;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
super(config);
|
|
20
|
+
this.filePath = this.getFilePath("bin");
|
|
21
|
+
this.lockPath = `${config.dbPath}/${config.dbName}.bin.lock`;
|
|
22
|
+
if (config.encryptionKey) {
|
|
23
|
+
this.crypto = new manager_1.CryptoManager(config.encryptionKey);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
async initialize() {
|
|
27
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(this.filePath), { recursive: true });
|
|
28
|
+
try {
|
|
29
|
+
await (0, promises_1.access)(this.filePath);
|
|
30
|
+
const buffer = await (0, promises_1.readFile)(this.filePath);
|
|
31
|
+
if (buffer.length > 0) {
|
|
32
|
+
const payload = this.deserialize(buffer);
|
|
33
|
+
this.data = payload;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch (err) {
|
|
37
|
+
if (err.code !== "ENOENT")
|
|
38
|
+
throw err;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
async append(entry) {
|
|
42
|
+
this.data.push(entry);
|
|
43
|
+
// Binary always does full rewrite (not append-only)
|
|
44
|
+
await this.flush();
|
|
45
|
+
}
|
|
46
|
+
async readAll() {
|
|
47
|
+
return [...this.data];
|
|
48
|
+
}
|
|
49
|
+
async *readStream() {
|
|
50
|
+
for (const entry of this.data) {
|
|
51
|
+
yield entry;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
async flush() {
|
|
55
|
+
await this.locker.withLock(this.lockPath, async () => {
|
|
56
|
+
const buffer = this.serialize(this.data);
|
|
57
|
+
await (0, promises_1.writeFile)(this.filePath, buffer);
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
async close() {
|
|
61
|
+
await this.flush();
|
|
62
|
+
this.data = [];
|
|
63
|
+
}
|
|
64
|
+
async clear() {
|
|
65
|
+
this.data = [];
|
|
66
|
+
await this.flush();
|
|
67
|
+
}
|
|
68
|
+
async compact() {
|
|
69
|
+
// Binary storage rewrites on every append, so it's always compact
|
|
70
|
+
return Promise.resolve();
|
|
71
|
+
}
|
|
72
|
+
serialize(entries) {
|
|
73
|
+
const jsonStr = JSON.stringify(entries);
|
|
74
|
+
const compressed = Buffer.from(jsonStr); // Placeholder for compression
|
|
75
|
+
const header = {
|
|
76
|
+
magic: this.MAGIC,
|
|
77
|
+
version: this.VERSION,
|
|
78
|
+
checksum: (0, crypto_1.createHash)("sha256").update(compressed).digest("hex"),
|
|
79
|
+
encrypted: !!this.crypto,
|
|
80
|
+
};
|
|
81
|
+
let payload = compressed;
|
|
82
|
+
if (this.crypto) {
|
|
83
|
+
const encrypted = this.crypto.encrypt(jsonStr);
|
|
84
|
+
payload = Buffer.from(JSON.stringify(encrypted));
|
|
85
|
+
}
|
|
86
|
+
const headerBuf = Buffer.from(JSON.stringify(header));
|
|
87
|
+
const headerLen = Buffer.alloc(4);
|
|
88
|
+
headerLen.writeUInt32BE(headerBuf.length);
|
|
89
|
+
const payloadLen = Buffer.alloc(4);
|
|
90
|
+
payloadLen.writeUInt32BE(payload.length);
|
|
91
|
+
return Buffer.concat([headerLen, headerBuf, payloadLen, payload]);
|
|
92
|
+
}
|
|
93
|
+
deserialize(buffer) {
|
|
94
|
+
try {
|
|
95
|
+
let offset = 0;
|
|
96
|
+
const headerLen = buffer.readUInt32BE(offset);
|
|
97
|
+
offset += 4;
|
|
98
|
+
const headerBuf = buffer.slice(offset, offset + headerLen);
|
|
99
|
+
offset += headerLen;
|
|
100
|
+
const header = JSON.parse(headerBuf.toString());
|
|
101
|
+
if (header.magic !== this.MAGIC)
|
|
102
|
+
throw new Error("Invalid file format");
|
|
103
|
+
const payloadLen = buffer.readUInt32BE(offset);
|
|
104
|
+
offset += 4;
|
|
105
|
+
const payload = buffer.slice(offset, offset + payloadLen);
|
|
106
|
+
let jsonStr;
|
|
107
|
+
if (header.encrypted) {
|
|
108
|
+
if (!this.crypto)
|
|
109
|
+
throw new Error("File is encrypted but no key provided");
|
|
110
|
+
const encrypted = JSON.parse(payload.toString());
|
|
111
|
+
jsonStr = this.crypto.decrypt(encrypted);
|
|
112
|
+
}
|
|
113
|
+
else {
|
|
114
|
+
jsonStr = payload.toString();
|
|
115
|
+
}
|
|
116
|
+
return JSON.parse(jsonStr);
|
|
117
|
+
}
|
|
118
|
+
catch (error) {
|
|
119
|
+
console.error("Failed to deserialize binary storage:", error);
|
|
120
|
+
return [];
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
exports.BinaryStorage = BinaryStorage;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BinaryStorage = exports.AOLStorage = exports.JSONStorage = exports.MemoryStorage = exports.BaseStorage = void 0;
|
|
4
|
+
var base_1 = require("./base");
|
|
5
|
+
Object.defineProperty(exports, "BaseStorage", { enumerable: true, get: function () { return base_1.BaseStorage; } });
|
|
6
|
+
var memory_1 = require("./memory");
|
|
7
|
+
Object.defineProperty(exports, "MemoryStorage", { enumerable: true, get: function () { return memory_1.MemoryStorage; } });
|
|
8
|
+
var json_1 = require("./json");
|
|
9
|
+
Object.defineProperty(exports, "JSONStorage", { enumerable: true, get: function () { return json_1.JSONStorage; } });
|
|
10
|
+
var aol_1 = require("./aol");
|
|
11
|
+
Object.defineProperty(exports, "AOLStorage", { enumerable: true, get: function () { return aol_1.AOLStorage; } });
|
|
12
|
+
var binary_1 = require("./binary");
|
|
13
|
+
Object.defineProperty(exports, "BinaryStorage", { enumerable: true, get: function () { return binary_1.BinaryStorage; } });
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { BaseStorage, LogEntry, StorageConfig } from "./base";
|
|
2
|
+
export declare class JSONStorage extends BaseStorage {
|
|
3
|
+
private autosaveInterval;
|
|
4
|
+
private cache;
|
|
5
|
+
private crypto?;
|
|
6
|
+
private locker;
|
|
7
|
+
private dirty;
|
|
8
|
+
private autosaveTimer?;
|
|
9
|
+
private isInitialized;
|
|
10
|
+
constructor(config: StorageConfig, autosaveInterval?: number);
|
|
11
|
+
initialize(): Promise<void>;
|
|
12
|
+
append(entry: LogEntry): Promise<void>;
|
|
13
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
14
|
+
flush(): Promise<void>;
|
|
15
|
+
close(): Promise<void>;
|
|
16
|
+
compact(): Promise<void>;
|
|
17
|
+
clear?(): Promise<void>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.JSONStorage = void 0;
|
|
37
|
+
const base_1 = require("./base");
|
|
38
|
+
const manager_1 = require("../crypto/manager");
|
|
39
|
+
const promises_1 = require("fs/promises");
|
|
40
|
+
const path_1 = require("path");
|
|
41
|
+
const lock_1 = require("../utils/lock");
|
|
42
|
+
class JSONStorage extends base_1.BaseStorage {
|
|
43
|
+
autosaveInterval;
|
|
44
|
+
cache = [];
|
|
45
|
+
crypto;
|
|
46
|
+
locker = new lock_1.FileLocker();
|
|
47
|
+
dirty = false;
|
|
48
|
+
autosaveTimer;
|
|
49
|
+
isInitialized = false;
|
|
50
|
+
constructor(config, autosaveInterval = 5000) {
|
|
51
|
+
super(config);
|
|
52
|
+
this.autosaveInterval = autosaveInterval;
|
|
53
|
+
if (config.encryptionKey) {
|
|
54
|
+
this.crypto = new manager_1.CryptoManager(config.encryptionKey);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
async initialize() {
|
|
58
|
+
const filePath = this.getFilePath("json");
|
|
59
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(filePath), { recursive: true });
|
|
60
|
+
try {
|
|
61
|
+
await (0, promises_1.access)(filePath);
|
|
62
|
+
const content = await (0, promises_1.readFile)(filePath, "utf8");
|
|
63
|
+
if (content.trim()) {
|
|
64
|
+
let jsonContent;
|
|
65
|
+
if (this.crypto) {
|
|
66
|
+
const encrypted = JSON.parse(content);
|
|
67
|
+
jsonContent = this.crypto.decrypt(encrypted);
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
jsonContent = content;
|
|
71
|
+
}
|
|
72
|
+
this.cache = JSON.parse(jsonContent);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
catch (err) {
|
|
76
|
+
if (err.code !== "ENOENT") {
|
|
77
|
+
// Se falhar na descriptografia, limpa (chave errada)
|
|
78
|
+
console.warn("Failed to load existing data, starting fresh");
|
|
79
|
+
this.cache = [];
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
// Autosave
|
|
83
|
+
if (this.autosaveInterval > 0) {
|
|
84
|
+
this.autosaveTimer = setInterval(() => {
|
|
85
|
+
if (this.dirty)
|
|
86
|
+
this.flush().catch(console.error);
|
|
87
|
+
}, this.autosaveInterval);
|
|
88
|
+
}
|
|
89
|
+
this.isInitialized = true;
|
|
90
|
+
}
|
|
91
|
+
async append(entry) {
|
|
92
|
+
if (!this.isInitialized)
|
|
93
|
+
throw new Error("Storage not initialized");
|
|
94
|
+
// Calcula checksum se necessário
|
|
95
|
+
if (this.config.enableChecksums) {
|
|
96
|
+
const { createHash } = await Promise.resolve().then(() => __importStar(require("crypto")));
|
|
97
|
+
entry.checksum = createHash("sha256")
|
|
98
|
+
.update(JSON.stringify({ ...entry, checksum: "" }))
|
|
99
|
+
.digest("hex");
|
|
100
|
+
}
|
|
101
|
+
this.cache.push(entry);
|
|
102
|
+
this.dirty = true;
|
|
103
|
+
}
|
|
104
|
+
async *readStream() {
|
|
105
|
+
for (const entry of this.cache) {
|
|
106
|
+
yield entry;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
async flush() {
|
|
110
|
+
if (!this.dirty || !this.isInitialized)
|
|
111
|
+
return;
|
|
112
|
+
const filePath = this.getFilePath("json");
|
|
113
|
+
const lockPath = `${filePath}.lock`;
|
|
114
|
+
await this.locker.withLock(lockPath, async () => {
|
|
115
|
+
const content = JSON.stringify(this.cache, null, 2);
|
|
116
|
+
if (this.crypto) {
|
|
117
|
+
const encrypted = this.crypto.encrypt(content);
|
|
118
|
+
await (0, promises_1.writeFile)(filePath, JSON.stringify(encrypted), "utf8");
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
await (0, promises_1.writeFile)(filePath, content, "utf8");
|
|
122
|
+
}
|
|
123
|
+
this.dirty = false;
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
async close() {
|
|
127
|
+
if (this.autosaveTimer)
|
|
128
|
+
clearInterval(this.autosaveTimer);
|
|
129
|
+
await this.flush();
|
|
130
|
+
this.cache = [];
|
|
131
|
+
this.isInitialized = false;
|
|
132
|
+
}
|
|
133
|
+
async compact() {
|
|
134
|
+
// Remove duplicados (mantém último estado)
|
|
135
|
+
const seen = new Map();
|
|
136
|
+
for (const entry of this.cache) {
|
|
137
|
+
const key = `${entry.collection}:${entry.id}`;
|
|
138
|
+
if (entry.op === "DELETE") {
|
|
139
|
+
seen.delete(key);
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
seen.set(key, entry);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
this.cache = Array.from(seen.values());
|
|
146
|
+
this.dirty = true;
|
|
147
|
+
await this.flush();
|
|
148
|
+
}
|
|
149
|
+
clear() {
|
|
150
|
+
throw new Error("Method not implemented.");
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
exports.JSONStorage = JSONStorage;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { BaseStorage, LogEntry, StorageConfig } from "./base";
|
|
2
|
+
export declare class MemoryStorage extends BaseStorage {
|
|
3
|
+
private logs;
|
|
4
|
+
private isInitialized;
|
|
5
|
+
constructor(config: StorageConfig);
|
|
6
|
+
initialize(): Promise<void>;
|
|
7
|
+
append(entry: LogEntry): Promise<void>;
|
|
8
|
+
readStream(): AsyncGenerator<LogEntry>;
|
|
9
|
+
flush(): Promise<void>;
|
|
10
|
+
close(): Promise<void>;
|
|
11
|
+
clear(): Promise<void>;
|
|
12
|
+
getSize(): number;
|
|
13
|
+
compact?(): Promise<void>;
|
|
14
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MemoryStorage = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
class MemoryStorage extends base_1.BaseStorage {
|
|
6
|
+
logs = [];
|
|
7
|
+
isInitialized = false;
|
|
8
|
+
constructor(config) {
|
|
9
|
+
super(config);
|
|
10
|
+
}
|
|
11
|
+
async initialize() {
|
|
12
|
+
this.isInitialized = true;
|
|
13
|
+
this.logs = [];
|
|
14
|
+
}
|
|
15
|
+
async append(entry) {
|
|
16
|
+
if (!this.isInitialized)
|
|
17
|
+
throw new Error("Storage not initialized");
|
|
18
|
+
this.logs.push({ ...entry });
|
|
19
|
+
}
|
|
20
|
+
async *readStream() {
|
|
21
|
+
for (const entry of this.logs) {
|
|
22
|
+
yield entry;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
async flush() {
|
|
26
|
+
// Nada a fazer em memória
|
|
27
|
+
}
|
|
28
|
+
async close() {
|
|
29
|
+
this.logs = [];
|
|
30
|
+
this.isInitialized = false;
|
|
31
|
+
}
|
|
32
|
+
async clear() {
|
|
33
|
+
this.logs = [];
|
|
34
|
+
}
|
|
35
|
+
getSize() {
|
|
36
|
+
return this.logs.length;
|
|
37
|
+
}
|
|
38
|
+
compact() {
|
|
39
|
+
throw new Error("Method not implemented.");
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.MemoryStorage = MemoryStorage;
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export declare class LMCSError extends Error {
|
|
2
|
+
code: string;
|
|
3
|
+
constructor(message: string, code: string);
|
|
4
|
+
}
|
|
5
|
+
export declare class ValidationError extends LMCSError {
|
|
6
|
+
constructor(message: string);
|
|
7
|
+
}
|
|
8
|
+
export declare class CorruptionError extends LMCSError {
|
|
9
|
+
constructor(message: string);
|
|
10
|
+
}
|
|
11
|
+
export declare class ConcurrencyError extends LMCSError {
|
|
12
|
+
constructor(message: string);
|
|
13
|
+
}
|
|
14
|
+
export declare class TransactionError extends LMCSError {
|
|
15
|
+
constructor(message: string);
|
|
16
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.TransactionError = exports.ConcurrencyError = exports.CorruptionError = exports.ValidationError = exports.LMCSError = void 0;
|
|
4
|
+
class LMCSError extends Error {
|
|
5
|
+
code;
|
|
6
|
+
constructor(message, code) {
|
|
7
|
+
super(message);
|
|
8
|
+
this.code = code;
|
|
9
|
+
this.name = this.constructor.name;
|
|
10
|
+
Error.captureStackTrace(this, this.constructor);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
exports.LMCSError = LMCSError;
|
|
14
|
+
class ValidationError extends LMCSError {
|
|
15
|
+
constructor(message) {
|
|
16
|
+
super(message, 'VALIDATION_ERROR');
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
exports.ValidationError = ValidationError;
|
|
20
|
+
class CorruptionError extends LMCSError {
|
|
21
|
+
constructor(message) {
|
|
22
|
+
super(message, 'DATA_CORRUPTION');
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
exports.CorruptionError = CorruptionError;
|
|
26
|
+
class ConcurrencyError extends LMCSError {
|
|
27
|
+
constructor(message) {
|
|
28
|
+
super(message, 'CONCURRENCY_ERROR');
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
exports.ConcurrencyError = ConcurrencyError;
|
|
32
|
+
class TransactionError extends LMCSError {
|
|
33
|
+
constructor(message) {
|
|
34
|
+
super(message, 'TRANSACTION_ERROR');
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
exports.TransactionError = TransactionError;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export declare class FileLocker {
|
|
2
|
+
private locks;
|
|
3
|
+
private queues;
|
|
4
|
+
acquire(filePath: string, options?: {
|
|
5
|
+
retries?: number;
|
|
6
|
+
}): Promise<void>;
|
|
7
|
+
release(filePath: string): Promise<void>;
|
|
8
|
+
withLock<T>(filePath: string, fn: () => Promise<T>): Promise<T>;
|
|
9
|
+
}
|