@liorandb/core 1.0.18 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +39 -13
- package/dist/index.js +458 -117
- package/package.json +1 -1
- package/src/core/checkpoint.ts +163 -0
- package/src/core/compaction.ts +110 -48
- package/src/core/database.ts +101 -61
- package/src/core/wal.ts +268 -0
package/package.json
CHANGED
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
|
|
4
|
+
/* =========================
|
|
5
|
+
TYPES
|
|
6
|
+
========================= */
|
|
7
|
+
|
|
8
|
+
export interface CheckpointData {
|
|
9
|
+
lsn: number; // Last durable LSN
|
|
10
|
+
walGen: number; // WAL generation at checkpoint
|
|
11
|
+
time: number; // Timestamp (ms)
|
|
12
|
+
version: number; // Format version
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
interface StoredCheckpoint {
|
|
16
|
+
data: CheckpointData;
|
|
17
|
+
crc: number;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/* =========================
|
|
21
|
+
CONSTANTS
|
|
22
|
+
========================= */
|
|
23
|
+
|
|
24
|
+
const CHECKPOINT_A = "__checkpoint_A.json";
|
|
25
|
+
const CHECKPOINT_B = "__checkpoint_B.json";
|
|
26
|
+
const FORMAT_VERSION = 1;
|
|
27
|
+
|
|
28
|
+
/* =========================
|
|
29
|
+
CRC32 (no deps)
|
|
30
|
+
========================= */
|
|
31
|
+
|
|
32
|
+
const CRC32_TABLE = (() => {
|
|
33
|
+
const table = new Uint32Array(256);
|
|
34
|
+
for (let i = 0; i < 256; i++) {
|
|
35
|
+
let c = i;
|
|
36
|
+
for (let k = 0; k < 8; k++) {
|
|
37
|
+
c = (c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1);
|
|
38
|
+
}
|
|
39
|
+
table[i] = c >>> 0;
|
|
40
|
+
}
|
|
41
|
+
return table;
|
|
42
|
+
})();
|
|
43
|
+
|
|
44
|
+
function crc32(input: string): number {
|
|
45
|
+
let crc = 0xFFFFFFFF;
|
|
46
|
+
for (let i = 0; i < input.length; i++) {
|
|
47
|
+
crc = CRC32_TABLE[(crc ^ input.charCodeAt(i)) & 0xFF] ^ (crc >>> 8);
|
|
48
|
+
}
|
|
49
|
+
return (crc ^ 0xFFFFFFFF) >>> 0;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/* =========================
|
|
53
|
+
CHECKPOINT MANAGER
|
|
54
|
+
========================= */
|
|
55
|
+
|
|
56
|
+
export class CheckpointManager {
|
|
57
|
+
private baseDir: string;
|
|
58
|
+
private data: CheckpointData;
|
|
59
|
+
|
|
60
|
+
constructor(baseDir: string) {
|
|
61
|
+
this.baseDir = baseDir;
|
|
62
|
+
this.data = {
|
|
63
|
+
lsn: 0,
|
|
64
|
+
walGen: 1,
|
|
65
|
+
time: 0,
|
|
66
|
+
version: FORMAT_VERSION
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
this.load();
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/* -------------------------
|
|
73
|
+
LOAD (CRC + FALLBACK)
|
|
74
|
+
------------------------- */
|
|
75
|
+
|
|
76
|
+
private load() {
|
|
77
|
+
const a = this.readCheckpoint(CHECKPOINT_A);
|
|
78
|
+
const b = this.readCheckpoint(CHECKPOINT_B);
|
|
79
|
+
|
|
80
|
+
if (a && b) {
|
|
81
|
+
// pick newest valid checkpoint
|
|
82
|
+
this.data = a.data.lsn >= b.data.lsn ? a.data : b.data;
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (a) {
|
|
87
|
+
this.data = a.data;
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (b) {
|
|
92
|
+
this.data = b.data;
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
console.warn("No valid checkpoint found, starting from zero");
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
private readCheckpoint(file: string): StoredCheckpoint | null {
|
|
100
|
+
const filePath = path.join(this.baseDir, file);
|
|
101
|
+
if (!fs.existsSync(filePath)) return null;
|
|
102
|
+
|
|
103
|
+
try {
|
|
104
|
+
const raw = fs.readFileSync(filePath, "utf8");
|
|
105
|
+
const parsed = JSON.parse(raw) as StoredCheckpoint;
|
|
106
|
+
|
|
107
|
+
if (!parsed?.data || typeof parsed.crc !== "number") {
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const expected = crc32(JSON.stringify(parsed.data));
|
|
112
|
+
if (expected !== parsed.crc) {
|
|
113
|
+
console.error(`Checkpoint CRC mismatch: ${file}`);
|
|
114
|
+
return null;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
return parsed;
|
|
118
|
+
} catch {
|
|
119
|
+
return null;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/* -------------------------
|
|
124
|
+
SAVE (DUAL WRITE)
|
|
125
|
+
------------------------- */
|
|
126
|
+
|
|
127
|
+
save(lsn: number, walGen: number) {
|
|
128
|
+
const data: CheckpointData = {
|
|
129
|
+
lsn,
|
|
130
|
+
walGen,
|
|
131
|
+
time: Date.now(),
|
|
132
|
+
version: FORMAT_VERSION
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
const stored: StoredCheckpoint = {
|
|
136
|
+
data,
|
|
137
|
+
crc: crc32(JSON.stringify(data))
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
// alternate between A/B for crash safety
|
|
141
|
+
const target =
|
|
142
|
+
lsn % 2 === 0 ? CHECKPOINT_A : CHECKPOINT_B;
|
|
143
|
+
|
|
144
|
+
try {
|
|
145
|
+
fs.writeFileSync(
|
|
146
|
+
path.join(this.baseDir, target),
|
|
147
|
+
JSON.stringify(stored, null, 2),
|
|
148
|
+
"utf8"
|
|
149
|
+
);
|
|
150
|
+
this.data = data;
|
|
151
|
+
} catch (err) {
|
|
152
|
+
console.error("Failed to write checkpoint:", err);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
/* -------------------------
|
|
157
|
+
GET CURRENT
|
|
158
|
+
------------------------- */
|
|
159
|
+
|
|
160
|
+
get(): CheckpointData {
|
|
161
|
+
return this.data;
|
|
162
|
+
}
|
|
163
|
+
}
|
package/src/core/compaction.ts
CHANGED
|
@@ -5,117 +5,179 @@ import { Collection } from "./collection.js";
|
|
|
5
5
|
import { Index } from "./index.js";
|
|
6
6
|
import { decryptData } from "../utils/encryption.js";
|
|
7
7
|
|
|
8
|
+
/* ---------------------------------------------------------
|
|
9
|
+
CONSTANTS
|
|
10
|
+
--------------------------------------------------------- */
|
|
11
|
+
|
|
8
12
|
const TMP_SUFFIX = "__compact_tmp";
|
|
9
|
-
const OLD_SUFFIX = "
|
|
13
|
+
const OLD_SUFFIX = "__compact_old";
|
|
14
|
+
const INDEX_DIR = "__indexes";
|
|
15
|
+
|
|
16
|
+
/* ---------------------------------------------------------
|
|
17
|
+
PUBLIC ENTRY
|
|
18
|
+
--------------------------------------------------------- */
|
|
10
19
|
|
|
11
20
|
/**
|
|
12
|
-
*
|
|
21
|
+
* Full production-safe compaction:
|
|
22
|
+
* 1. Crash recovery
|
|
23
|
+
* 2. Snapshot rebuild
|
|
24
|
+
* 3. Atomic swap
|
|
25
|
+
* 4. Reopen DB
|
|
26
|
+
* 5. Rebuild indexes
|
|
13
27
|
*/
|
|
14
28
|
export async function compactCollectionEngine(col: Collection) {
|
|
15
|
-
await crashRecovery(col.dir);
|
|
16
|
-
|
|
17
29
|
const baseDir = col.dir;
|
|
18
30
|
const tmpDir = baseDir + TMP_SUFFIX;
|
|
19
31
|
const oldDir = baseDir + OLD_SUFFIX;
|
|
20
32
|
|
|
21
|
-
|
|
33
|
+
await crashRecovery(baseDir);
|
|
34
|
+
|
|
22
35
|
safeRemove(tmpDir);
|
|
23
36
|
safeRemove(oldDir);
|
|
24
37
|
|
|
25
|
-
// Snapshot rebuild
|
|
26
38
|
await snapshotRebuild(col, tmpDir);
|
|
27
39
|
|
|
28
|
-
|
|
29
|
-
atomicSwap(baseDir, tmpDir, oldDir);
|
|
40
|
+
await atomicSwap(baseDir, tmpDir, oldDir);
|
|
30
41
|
|
|
31
|
-
// Cleanup old data
|
|
32
42
|
safeRemove(oldDir);
|
|
43
|
+
|
|
44
|
+
// Reopen DB after swap
|
|
45
|
+
await reopenCollectionDB(col);
|
|
46
|
+
|
|
47
|
+
// Rebuild indexes after compaction
|
|
48
|
+
await rebuildIndexes(col);
|
|
33
49
|
}
|
|
34
50
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
*/
|
|
51
|
+
/* ---------------------------------------------------------
|
|
52
|
+
SNAPSHOT REBUILD
|
|
53
|
+
--------------------------------------------------------- */
|
|
54
|
+
|
|
38
55
|
async function snapshotRebuild(col: Collection, tmpDir: string) {
|
|
39
56
|
fs.mkdirSync(tmpDir, { recursive: true });
|
|
40
57
|
|
|
41
|
-
const tmpDB = new ClassicLevel(tmpDir, {
|
|
58
|
+
const tmpDB = new ClassicLevel(tmpDir, {
|
|
59
|
+
valueEncoding: "utf8"
|
|
60
|
+
});
|
|
42
61
|
|
|
43
62
|
for await (const [key, val] of col.db.iterator()) {
|
|
44
|
-
|
|
63
|
+
if (val !== undefined) {
|
|
64
|
+
await tmpDB.put(key, val);
|
|
65
|
+
}
|
|
45
66
|
}
|
|
46
67
|
|
|
47
68
|
await tmpDB.close();
|
|
48
|
-
await col.db.close();
|
|
69
|
+
await col.db.close(); // important: close before swap
|
|
49
70
|
}
|
|
50
71
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
*/
|
|
54
|
-
|
|
72
|
+
/* ---------------------------------------------------------
|
|
73
|
+
ATOMIC SWAP (HARDENED)
|
|
74
|
+
--------------------------------------------------------- */
|
|
75
|
+
|
|
76
|
+
async function atomicSwap(base: string, tmp: string, old: string) {
|
|
77
|
+
// Phase 1: rename base → old
|
|
55
78
|
fs.renameSync(base, old);
|
|
56
|
-
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
// Phase 2: rename tmp → base
|
|
82
|
+
fs.renameSync(tmp, base);
|
|
83
|
+
} catch (err) {
|
|
84
|
+
// Rollback if tmp rename fails
|
|
85
|
+
if (fs.existsSync(old)) {
|
|
86
|
+
fs.renameSync(old, base);
|
|
87
|
+
}
|
|
88
|
+
throw err;
|
|
89
|
+
}
|
|
57
90
|
}
|
|
58
91
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
*/
|
|
92
|
+
/* ---------------------------------------------------------
|
|
93
|
+
CRASH RECOVERY
|
|
94
|
+
--------------------------------------------------------- */
|
|
95
|
+
|
|
62
96
|
export async function crashRecovery(baseDir: string) {
|
|
63
97
|
const tmp = baseDir + TMP_SUFFIX;
|
|
64
98
|
const old = baseDir + OLD_SUFFIX;
|
|
65
99
|
|
|
66
|
-
|
|
67
|
-
|
|
100
|
+
const baseExists = fs.existsSync(baseDir);
|
|
101
|
+
const tmpExists = fs.existsSync(tmp);
|
|
102
|
+
const oldExists = fs.existsSync(old);
|
|
103
|
+
|
|
104
|
+
// Case 1: swap interrupted → tmp is valid snapshot
|
|
105
|
+
if (tmpExists && oldExists) {
|
|
68
106
|
safeRemove(baseDir);
|
|
69
107
|
fs.renameSync(tmp, baseDir);
|
|
70
108
|
safeRemove(old);
|
|
109
|
+
return;
|
|
71
110
|
}
|
|
72
111
|
|
|
73
|
-
//
|
|
74
|
-
if (
|
|
112
|
+
// Case 2: base→old happened but tmp missing
|
|
113
|
+
if (!baseExists && oldExists) {
|
|
75
114
|
fs.renameSync(old, baseDir);
|
|
115
|
+
return;
|
|
76
116
|
}
|
|
77
117
|
|
|
78
|
-
//
|
|
79
|
-
if (
|
|
118
|
+
// Case 3: rebuild interrupted
|
|
119
|
+
if (tmpExists && !oldExists) {
|
|
80
120
|
safeRemove(tmp);
|
|
81
121
|
}
|
|
82
122
|
}
|
|
83
123
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
*/
|
|
124
|
+
/* ---------------------------------------------------------
|
|
125
|
+
REOPEN DB
|
|
126
|
+
--------------------------------------------------------- */
|
|
127
|
+
|
|
128
|
+
async function reopenCollectionDB(col: Collection) {
|
|
129
|
+
col.db = new ClassicLevel(col.dir, {
|
|
130
|
+
valueEncoding: "utf8"
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/* ---------------------------------------------------------
|
|
135
|
+
INDEX REBUILD (SAFE)
|
|
136
|
+
--------------------------------------------------------- */
|
|
137
|
+
|
|
87
138
|
export async function rebuildIndexes(col: Collection) {
|
|
88
|
-
const indexRoot = path.join(col.dir,
|
|
139
|
+
const indexRoot = path.join(col.dir, INDEX_DIR);
|
|
89
140
|
|
|
90
|
-
|
|
91
|
-
safeRemove(indexRoot);
|
|
92
|
-
fs.mkdirSync(indexRoot, { recursive: true });
|
|
141
|
+
const oldIndexes = new Map(col["indexes"]);
|
|
93
142
|
|
|
94
|
-
|
|
95
|
-
|
|
143
|
+
// Close old index handles
|
|
144
|
+
for (const idx of oldIndexes.values()) {
|
|
145
|
+
try {
|
|
146
|
+
await idx.close();
|
|
147
|
+
} catch {}
|
|
96
148
|
}
|
|
97
149
|
|
|
98
|
-
|
|
150
|
+
safeRemove(indexRoot);
|
|
151
|
+
fs.mkdirSync(indexRoot, { recursive: true });
|
|
152
|
+
|
|
153
|
+
const rebuiltIndexes = new Map<string, Index>();
|
|
99
154
|
|
|
100
|
-
for (const idx of
|
|
101
|
-
const
|
|
155
|
+
for (const idx of oldIndexes.values()) {
|
|
156
|
+
const rebuilt = new Index(col.dir, idx.field, {
|
|
102
157
|
unique: idx.unique
|
|
103
158
|
});
|
|
104
159
|
|
|
105
160
|
for await (const [, enc] of col.db.iterator()) {
|
|
106
|
-
|
|
107
|
-
|
|
161
|
+
if (!enc) continue;
|
|
162
|
+
|
|
163
|
+
try {
|
|
164
|
+
const doc = decryptData(enc);
|
|
165
|
+
await rebuilt.insert(doc);
|
|
166
|
+
} catch {
|
|
167
|
+
// Skip corrupted doc safely
|
|
168
|
+
}
|
|
108
169
|
}
|
|
109
170
|
|
|
110
|
-
|
|
171
|
+
rebuiltIndexes.set(idx.field, rebuilt);
|
|
111
172
|
}
|
|
112
173
|
|
|
113
|
-
col["indexes"] =
|
|
174
|
+
col["indexes"] = rebuiltIndexes;
|
|
114
175
|
}
|
|
115
176
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
*/
|
|
177
|
+
/* ---------------------------------------------------------
|
|
178
|
+
UTIL
|
|
179
|
+
--------------------------------------------------------- */
|
|
180
|
+
|
|
119
181
|
function safeRemove(p: string) {
|
|
120
182
|
if (fs.existsSync(p)) {
|
|
121
183
|
fs.rmSync(p, { recursive: true, force: true });
|
package/src/core/database.ts
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
import path from "path";
|
|
2
2
|
import fs from "fs";
|
|
3
|
-
import { execFile } from "child_process";
|
|
4
|
-
import { promisify } from "util";
|
|
5
3
|
import { Collection } from "./collection.js";
|
|
6
4
|
import { Index, IndexOptions } from "./index.js";
|
|
7
5
|
import { MigrationEngine } from "./migration.js";
|
|
@@ -9,14 +7,12 @@ import type { LioranManager } from "../LioranManager.js";
|
|
|
9
7
|
import type { ZodSchema } from "zod";
|
|
10
8
|
import { decryptData } from "../utils/encryption.js";
|
|
11
9
|
|
|
12
|
-
|
|
10
|
+
import { WALManager } from "./wal.js";
|
|
11
|
+
import { CheckpointManager } from "./checkpoint.js";
|
|
13
12
|
|
|
14
13
|
/* ----------------------------- TYPES ----------------------------- */
|
|
15
14
|
|
|
16
15
|
type TXOp = { tx: number; col: string; op: string; args: any[] };
|
|
17
|
-
type TXCommit = { tx: number; commit: true };
|
|
18
|
-
type TXApplied = { tx: number; applied: true };
|
|
19
|
-
type WALEntry = TXOp | TXCommit | TXApplied;
|
|
20
16
|
|
|
21
17
|
type IndexMeta = {
|
|
22
18
|
field: string;
|
|
@@ -26,11 +22,11 @@ type IndexMeta = {
|
|
|
26
22
|
type DBMeta = {
|
|
27
23
|
version: number;
|
|
28
24
|
indexes: Record<string, IndexMeta[]>;
|
|
29
|
-
schemaVersion: string;
|
|
25
|
+
schemaVersion: string;
|
|
30
26
|
};
|
|
31
27
|
|
|
32
28
|
const META_FILE = "__db_meta.json";
|
|
33
|
-
const META_VERSION =
|
|
29
|
+
const META_VERSION = 2;
|
|
34
30
|
const DEFAULT_SCHEMA_VERSION = "v1";
|
|
35
31
|
|
|
36
32
|
/* ---------------------- TRANSACTION CONTEXT ---------------------- */
|
|
@@ -59,11 +55,34 @@ class DBTransactionContext {
|
|
|
59
55
|
}
|
|
60
56
|
|
|
61
57
|
async commit() {
|
|
62
|
-
|
|
63
|
-
|
|
58
|
+
// 1️⃣ Write all operations
|
|
59
|
+
for (const op of this.ops) {
|
|
60
|
+
await this.db.wal.append({
|
|
61
|
+
tx: this.txId,
|
|
62
|
+
type: "op",
|
|
63
|
+
payload: op
|
|
64
|
+
} as any);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// 2️⃣ Commit marker
|
|
68
|
+
const commitLSN = await this.db.wal.append({
|
|
69
|
+
tx: this.txId,
|
|
70
|
+
type: "commit"
|
|
71
|
+
} as any);
|
|
72
|
+
|
|
73
|
+
// 3️⃣ Apply to storage
|
|
64
74
|
await this.db.applyTransaction(this.ops);
|
|
65
|
-
|
|
66
|
-
|
|
75
|
+
|
|
76
|
+
// 4️⃣ Applied marker
|
|
77
|
+
const appliedLSN = await this.db.wal.append({
|
|
78
|
+
tx: this.txId,
|
|
79
|
+
type: "applied"
|
|
80
|
+
} as any);
|
|
81
|
+
|
|
82
|
+
// 5️⃣ Advance checkpoint to durable applied LSN
|
|
83
|
+
this.db.advanceCheckpoint(appliedLSN);
|
|
84
|
+
|
|
85
|
+
await this.db.postCommitMaintenance();
|
|
67
86
|
}
|
|
68
87
|
}
|
|
69
88
|
|
|
@@ -75,28 +94,87 @@ export class LioranDB {
|
|
|
75
94
|
manager: LioranManager;
|
|
76
95
|
collections: Map<string, Collection>;
|
|
77
96
|
|
|
78
|
-
private walPath: string;
|
|
79
97
|
private metaPath: string;
|
|
80
98
|
private meta!: DBMeta;
|
|
81
99
|
|
|
82
100
|
private migrator: MigrationEngine;
|
|
83
101
|
private static TX_SEQ = 0;
|
|
84
102
|
|
|
103
|
+
public wal: WALManager;
|
|
104
|
+
private checkpoint: CheckpointManager;
|
|
105
|
+
|
|
85
106
|
constructor(basePath: string, dbName: string, manager: LioranManager) {
|
|
86
107
|
this.basePath = basePath;
|
|
87
108
|
this.dbName = dbName;
|
|
88
109
|
this.manager = manager;
|
|
89
110
|
this.collections = new Map();
|
|
90
111
|
|
|
91
|
-
this.walPath = path.join(basePath, "__tx_wal.log");
|
|
92
112
|
this.metaPath = path.join(basePath, META_FILE);
|
|
93
113
|
|
|
94
114
|
fs.mkdirSync(basePath, { recursive: true });
|
|
95
115
|
|
|
96
116
|
this.loadMeta();
|
|
117
|
+
|
|
118
|
+
this.wal = new WALManager(basePath);
|
|
119
|
+
this.checkpoint = new CheckpointManager(basePath);
|
|
120
|
+
|
|
97
121
|
this.migrator = new MigrationEngine(this);
|
|
98
122
|
|
|
99
|
-
this.
|
|
123
|
+
this.initialize().catch(console.error);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/* ------------------------- INIT & RECOVERY ------------------------- */
|
|
127
|
+
|
|
128
|
+
private async initialize() {
|
|
129
|
+
await this.recoverFromWAL();
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
private async recoverFromWAL() {
|
|
133
|
+
const checkpointData = this.checkpoint.get();
|
|
134
|
+
const fromLSN = checkpointData.lsn;
|
|
135
|
+
|
|
136
|
+
const committed = new Set<number>();
|
|
137
|
+
const applied = new Set<number>();
|
|
138
|
+
const ops = new Map<number, TXOp[]>();
|
|
139
|
+
|
|
140
|
+
await this.wal.replay(fromLSN, async (record) => {
|
|
141
|
+
if (record.type === "commit") {
|
|
142
|
+
committed.add(record.tx);
|
|
143
|
+
} else if (record.type === "applied") {
|
|
144
|
+
applied.add(record.tx);
|
|
145
|
+
} else if (record.type === "op") {
|
|
146
|
+
if (!ops.has(record.tx)) ops.set(record.tx, []);
|
|
147
|
+
ops.get(record.tx)!.push(record.payload as TXOp);
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
let highestAppliedLSN = fromLSN;
|
|
152
|
+
|
|
153
|
+
for (const tx of committed) {
|
|
154
|
+
if (applied.has(tx)) continue;
|
|
155
|
+
|
|
156
|
+
const txOps = ops.get(tx);
|
|
157
|
+
if (txOps) {
|
|
158
|
+
await this.applyTransaction(txOps);
|
|
159
|
+
highestAppliedLSN = this.wal.getCurrentLSN();
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Advance checkpoint after recovery
|
|
164
|
+
this.advanceCheckpoint(highestAppliedLSN);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/* ------------------------- CHECKPOINT ADVANCE ------------------------- */
|
|
168
|
+
|
|
169
|
+
public advanceCheckpoint(lsn: number) {
|
|
170
|
+
const current = this.checkpoint.get();
|
|
171
|
+
|
|
172
|
+
if (lsn > current.lsn) {
|
|
173
|
+
this.checkpoint.save(lsn, this.wal.getCurrentGen());
|
|
174
|
+
|
|
175
|
+
// Optional WAL cleanup (safe because checkpoint advanced)
|
|
176
|
+
this.wal.cleanup(this.wal.getCurrentGen() - 1).catch(() => {});
|
|
177
|
+
}
|
|
100
178
|
}
|
|
101
179
|
|
|
102
180
|
/* ------------------------- META ------------------------- */
|
|
@@ -146,50 +224,7 @@ export class LioranDB {
|
|
|
146
224
|
await this.migrator.upgradeToLatest();
|
|
147
225
|
}
|
|
148
226
|
|
|
149
|
-
/* -------------------------
|
|
150
|
-
|
|
151
|
-
async writeWAL(entries: WALEntry[]) {
|
|
152
|
-
const fd = await fs.promises.open(this.walPath, "a");
|
|
153
|
-
for (const e of entries) {
|
|
154
|
-
await fd.write(JSON.stringify(e) + "\n");
|
|
155
|
-
}
|
|
156
|
-
await fd.sync();
|
|
157
|
-
await fd.close();
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
async clearWAL() {
|
|
161
|
-
try { await fs.promises.unlink(this.walPath); } catch {}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
private async recoverFromWAL() {
|
|
165
|
-
if (!fs.existsSync(this.walPath)) return;
|
|
166
|
-
|
|
167
|
-
const raw = await fs.promises.readFile(this.walPath, "utf8");
|
|
168
|
-
|
|
169
|
-
const committed = new Set<number>();
|
|
170
|
-
const applied = new Set<number>();
|
|
171
|
-
const ops = new Map<number, TXOp[]>();
|
|
172
|
-
|
|
173
|
-
for (const line of raw.split("\n")) {
|
|
174
|
-
if (!line.trim()) continue;
|
|
175
|
-
const entry: WALEntry = JSON.parse(line);
|
|
176
|
-
|
|
177
|
-
if ("commit" in entry) committed.add(entry.tx);
|
|
178
|
-
else if ("applied" in entry) applied.add(entry.tx);
|
|
179
|
-
else {
|
|
180
|
-
if (!ops.has(entry.tx)) ops.set(entry.tx, []);
|
|
181
|
-
ops.get(entry.tx)!.push(entry);
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
for (const tx of committed) {
|
|
186
|
-
if (applied.has(tx)) continue;
|
|
187
|
-
const txOps = ops.get(tx);
|
|
188
|
-
if (txOps) await this.applyTransaction(txOps);
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
await this.clearWAL();
|
|
192
|
-
}
|
|
227
|
+
/* ------------------------- TX APPLY ------------------------- */
|
|
193
228
|
|
|
194
229
|
async applyTransaction(ops: TXOp[]) {
|
|
195
230
|
for (const { col, op, args } of ops) {
|
|
@@ -269,13 +304,11 @@ export class LioranDB {
|
|
|
269
304
|
/* ------------------------- COMPACTION ------------------------- */
|
|
270
305
|
|
|
271
306
|
async compactCollection(name: string) {
|
|
272
|
-
await this.clearWAL();
|
|
273
307
|
const col = this.collection(name);
|
|
274
308
|
await col.compact();
|
|
275
309
|
}
|
|
276
310
|
|
|
277
311
|
async compactAll() {
|
|
278
|
-
await this.clearWAL();
|
|
279
312
|
for (const name of this.collections.keys()) {
|
|
280
313
|
await this.compactCollection(name);
|
|
281
314
|
}
|
|
@@ -291,12 +324,19 @@ export class LioranDB {
|
|
|
291
324
|
return result;
|
|
292
325
|
}
|
|
293
326
|
|
|
327
|
+
/* ------------------------- POST COMMIT ------------------------- */
|
|
328
|
+
|
|
329
|
+
public async postCommitMaintenance() {
|
|
330
|
+
// Hook for background compaction, stats, etc.
|
|
331
|
+
}
|
|
332
|
+
|
|
294
333
|
/* ------------------------- SHUTDOWN ------------------------- */
|
|
295
334
|
|
|
296
335
|
async close(): Promise<void> {
|
|
297
336
|
for (const col of this.collections.values()) {
|
|
298
337
|
try { await col.close(); } catch {}
|
|
299
338
|
}
|
|
339
|
+
|
|
300
340
|
this.collections.clear();
|
|
301
341
|
}
|
|
302
342
|
}
|