@liorandb/core 1.0.18 → 1.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +36 -13
- package/dist/index.js +347 -109
- package/package.json +1 -1
- package/src/core/checkpoint.ts +111 -0
- package/src/core/compaction.ts +79 -32
- package/src/core/database.ts +78 -58
- package/src/core/wal.ts +213 -0
package/package.json
CHANGED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
|
|
4
|
+
/* =========================
|
|
5
|
+
TYPES
|
|
6
|
+
========================= */
|
|
7
|
+
|
|
8
|
+
export interface CheckpointData {
|
|
9
|
+
lsn: number; // Last durable LSN
|
|
10
|
+
walGen: number; // WAL generation at checkpoint
|
|
11
|
+
time: number; // Timestamp (ms)
|
|
12
|
+
version: number; // For future format upgrades
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/* =========================
|
|
16
|
+
CONSTANTS
|
|
17
|
+
========================= */
|
|
18
|
+
|
|
19
|
+
const CHECKPOINT_FILE = "__checkpoint.json";
|
|
20
|
+
const TMP_SUFFIX = ".tmp";
|
|
21
|
+
const FORMAT_VERSION = 1;
|
|
22
|
+
|
|
23
|
+
/* =========================
|
|
24
|
+
CHECKPOINT MANAGER
|
|
25
|
+
========================= */
|
|
26
|
+
|
|
27
|
+
export class CheckpointManager {
|
|
28
|
+
private filePath: string;
|
|
29
|
+
private data: CheckpointData;
|
|
30
|
+
|
|
31
|
+
constructor(baseDir: string) {
|
|
32
|
+
this.filePath = path.join(baseDir, CHECKPOINT_FILE);
|
|
33
|
+
this.data = {
|
|
34
|
+
lsn: 0,
|
|
35
|
+
walGen: 1,
|
|
36
|
+
time: 0,
|
|
37
|
+
version: FORMAT_VERSION
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
this.load();
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/* -------------------------
|
|
44
|
+
LOAD (Crash-safe)
|
|
45
|
+
------------------------- */
|
|
46
|
+
|
|
47
|
+
private load() {
|
|
48
|
+
if (!fs.existsSync(this.filePath)) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
try {
|
|
53
|
+
const raw = fs.readFileSync(this.filePath, "utf8");
|
|
54
|
+
const parsed = JSON.parse(raw) as CheckpointData;
|
|
55
|
+
|
|
56
|
+
if (
|
|
57
|
+
typeof parsed.lsn === "number" &&
|
|
58
|
+
typeof parsed.walGen === "number"
|
|
59
|
+
) {
|
|
60
|
+
this.data = parsed;
|
|
61
|
+
}
|
|
62
|
+
} catch {
|
|
63
|
+
console.error("Checkpoint corrupted, starting from zero");
|
|
64
|
+
this.data = {
|
|
65
|
+
lsn: 0,
|
|
66
|
+
walGen: 1,
|
|
67
|
+
time: 0,
|
|
68
|
+
version: FORMAT_VERSION
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/* -------------------------
|
|
74
|
+
SAVE (Atomic Write)
|
|
75
|
+
------------------------- */
|
|
76
|
+
|
|
77
|
+
save(lsn: number, walGen: number) {
|
|
78
|
+
const newData: CheckpointData = {
|
|
79
|
+
lsn,
|
|
80
|
+
walGen,
|
|
81
|
+
time: Date.now(),
|
|
82
|
+
version: FORMAT_VERSION
|
|
83
|
+
};
|
|
84
|
+
|
|
85
|
+
const tmpPath = this.filePath + TMP_SUFFIX;
|
|
86
|
+
|
|
87
|
+
try {
|
|
88
|
+
// Write to temp file first
|
|
89
|
+
fs.writeFileSync(
|
|
90
|
+
tmpPath,
|
|
91
|
+
JSON.stringify(newData, null, 2),
|
|
92
|
+
{ encoding: "utf8" }
|
|
93
|
+
);
|
|
94
|
+
|
|
95
|
+
// Atomic rename
|
|
96
|
+
fs.renameSync(tmpPath, this.filePath);
|
|
97
|
+
|
|
98
|
+
this.data = newData;
|
|
99
|
+
} catch (err) {
|
|
100
|
+
console.error("Failed to write checkpoint:", err);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/* -------------------------
|
|
105
|
+
GET CURRENT
|
|
106
|
+
------------------------- */
|
|
107
|
+
|
|
108
|
+
get(): CheckpointData {
|
|
109
|
+
return this.data;
|
|
110
|
+
}
|
|
111
|
+
}
|
package/src/core/compaction.ts
CHANGED
|
@@ -5,117 +5,164 @@ import { Collection } from "./collection.js";
|
|
|
5
5
|
import { Index } from "./index.js";
|
|
6
6
|
import { decryptData } from "../utils/encryption.js";
|
|
7
7
|
|
|
8
|
+
/* ---------------------------------------------------------
|
|
9
|
+
CONSTANTS
|
|
10
|
+
--------------------------------------------------------- */
|
|
11
|
+
|
|
8
12
|
const TMP_SUFFIX = "__compact_tmp";
|
|
9
|
-
const OLD_SUFFIX = "
|
|
13
|
+
const OLD_SUFFIX = "__compact_old";
|
|
14
|
+
const INDEX_DIR = "__indexes";
|
|
15
|
+
|
|
16
|
+
/* ---------------------------------------------------------
|
|
17
|
+
PUBLIC ENTRY
|
|
18
|
+
--------------------------------------------------------- */
|
|
10
19
|
|
|
11
20
|
/**
|
|
12
|
-
*
|
|
21
|
+
* Full safe compaction pipeline:
|
|
22
|
+
* 1. Crash recovery
|
|
23
|
+
* 2. Snapshot rebuild
|
|
24
|
+
* 3. Atomic directory swap
|
|
25
|
+
* 4. Index rebuild
|
|
13
26
|
*/
|
|
14
27
|
export async function compactCollectionEngine(col: Collection) {
|
|
15
|
-
await crashRecovery(col.dir);
|
|
16
|
-
|
|
17
28
|
const baseDir = col.dir;
|
|
18
29
|
const tmpDir = baseDir + TMP_SUFFIX;
|
|
19
30
|
const oldDir = baseDir + OLD_SUFFIX;
|
|
20
31
|
|
|
21
|
-
//
|
|
32
|
+
// Recover from any previous crash mid-compaction
|
|
33
|
+
await crashRecovery(baseDir);
|
|
34
|
+
|
|
35
|
+
// Clean leftovers (paranoia safety)
|
|
22
36
|
safeRemove(tmpDir);
|
|
23
37
|
safeRemove(oldDir);
|
|
24
38
|
|
|
25
|
-
//
|
|
39
|
+
// Step 1: rebuild snapshot
|
|
26
40
|
await snapshotRebuild(col, tmpDir);
|
|
27
41
|
|
|
28
|
-
//
|
|
42
|
+
// Step 2: atomic swap
|
|
29
43
|
atomicSwap(baseDir, tmpDir, oldDir);
|
|
30
44
|
|
|
31
|
-
// Cleanup
|
|
45
|
+
// Cleanup
|
|
32
46
|
safeRemove(oldDir);
|
|
33
47
|
}
|
|
34
48
|
|
|
49
|
+
/* ---------------------------------------------------------
|
|
50
|
+
SNAPSHOT REBUILD
|
|
51
|
+
--------------------------------------------------------- */
|
|
52
|
+
|
|
35
53
|
/**
|
|
36
|
-
*
|
|
54
|
+
* Rebuilds DB by copying only live keys
|
|
55
|
+
* WAL is assumed already checkpointed
|
|
37
56
|
*/
|
|
38
57
|
async function snapshotRebuild(col: Collection, tmpDir: string) {
|
|
39
58
|
fs.mkdirSync(tmpDir, { recursive: true });
|
|
40
59
|
|
|
41
|
-
const tmpDB = new ClassicLevel(tmpDir, {
|
|
60
|
+
const tmpDB = new ClassicLevel(tmpDir, {
|
|
61
|
+
valueEncoding: "utf8"
|
|
62
|
+
});
|
|
42
63
|
|
|
43
64
|
for await (const [key, val] of col.db.iterator()) {
|
|
44
|
-
|
|
65
|
+
if (val !== undefined) {
|
|
66
|
+
await tmpDB.put(key, val);
|
|
67
|
+
}
|
|
45
68
|
}
|
|
46
69
|
|
|
47
70
|
await tmpDB.close();
|
|
48
71
|
await col.db.close();
|
|
49
72
|
}
|
|
50
73
|
|
|
74
|
+
/* ---------------------------------------------------------
|
|
75
|
+
ATOMIC SWAP
|
|
76
|
+
--------------------------------------------------------- */
|
|
77
|
+
|
|
51
78
|
/**
|
|
52
|
-
* Atomic directory
|
|
79
|
+
* Atomic directory replacement (POSIX safe)
|
|
53
80
|
*/
|
|
54
81
|
function atomicSwap(base: string, tmp: string, old: string) {
|
|
55
82
|
fs.renameSync(base, old);
|
|
56
83
|
fs.renameSync(tmp, base);
|
|
57
84
|
}
|
|
58
85
|
|
|
86
|
+
/* ---------------------------------------------------------
|
|
87
|
+
CRASH RECOVERY
|
|
88
|
+
--------------------------------------------------------- */
|
|
89
|
+
|
|
59
90
|
/**
|
|
60
|
-
*
|
|
91
|
+
* Handles all partial-compaction states
|
|
61
92
|
*/
|
|
62
93
|
export async function crashRecovery(baseDir: string) {
|
|
63
94
|
const tmp = baseDir + TMP_SUFFIX;
|
|
64
95
|
const old = baseDir + OLD_SUFFIX;
|
|
65
96
|
|
|
66
|
-
|
|
67
|
-
|
|
97
|
+
const baseExists = fs.existsSync(baseDir);
|
|
98
|
+
const tmpExists = fs.existsSync(tmp);
|
|
99
|
+
const oldExists = fs.existsSync(old);
|
|
100
|
+
|
|
101
|
+
// Case 1: swap interrupted → tmp is valid snapshot
|
|
102
|
+
if (tmpExists && oldExists) {
|
|
68
103
|
safeRemove(baseDir);
|
|
69
104
|
fs.renameSync(tmp, baseDir);
|
|
70
105
|
safeRemove(old);
|
|
106
|
+
return;
|
|
71
107
|
}
|
|
72
108
|
|
|
73
|
-
//
|
|
74
|
-
if (
|
|
109
|
+
// Case 2: rename(base → old) happened, but tmp missing
|
|
110
|
+
if (!baseExists && oldExists) {
|
|
75
111
|
fs.renameSync(old, baseDir);
|
|
112
|
+
return;
|
|
76
113
|
}
|
|
77
114
|
|
|
78
|
-
//
|
|
79
|
-
if (
|
|
115
|
+
// Case 3: rebuild interrupted
|
|
116
|
+
if (tmpExists && !oldExists) {
|
|
80
117
|
safeRemove(tmp);
|
|
81
118
|
}
|
|
82
119
|
}
|
|
83
120
|
|
|
121
|
+
/* ---------------------------------------------------------
|
|
122
|
+
INDEX REBUILD
|
|
123
|
+
--------------------------------------------------------- */
|
|
124
|
+
|
|
84
125
|
/**
|
|
85
|
-
*
|
|
126
|
+
* Rebuilds all indexes from compacted DB
|
|
127
|
+
* Guarantees index consistency
|
|
86
128
|
*/
|
|
87
129
|
export async function rebuildIndexes(col: Collection) {
|
|
88
|
-
const indexRoot = path.join(col.dir,
|
|
89
|
-
|
|
90
|
-
// Destroy existing indexes
|
|
91
|
-
safeRemove(indexRoot);
|
|
92
|
-
fs.mkdirSync(indexRoot, { recursive: true });
|
|
130
|
+
const indexRoot = path.join(col.dir, INDEX_DIR);
|
|
93
131
|
|
|
132
|
+
// Close existing index handles
|
|
94
133
|
for (const idx of col["indexes"].values()) {
|
|
95
|
-
try {
|
|
134
|
+
try {
|
|
135
|
+
await idx.close();
|
|
136
|
+
} catch {}
|
|
96
137
|
}
|
|
97
138
|
|
|
139
|
+
// Destroy index directory
|
|
140
|
+
safeRemove(indexRoot);
|
|
141
|
+
fs.mkdirSync(indexRoot, { recursive: true });
|
|
142
|
+
|
|
98
143
|
const newIndexes = new Map<string, Index>();
|
|
99
144
|
|
|
100
145
|
for (const idx of col["indexes"].values()) {
|
|
101
|
-
const
|
|
146
|
+
const rebuilt = new Index(col.dir, idx.field, {
|
|
102
147
|
unique: idx.unique
|
|
103
148
|
});
|
|
104
149
|
|
|
105
150
|
for await (const [, enc] of col.db.iterator()) {
|
|
151
|
+
if (!enc) continue;
|
|
106
152
|
const doc = decryptData(enc);
|
|
107
|
-
await
|
|
153
|
+
await rebuilt.insert(doc);
|
|
108
154
|
}
|
|
109
155
|
|
|
110
|
-
newIndexes.set(idx.field,
|
|
156
|
+
newIndexes.set(idx.field, rebuilt);
|
|
111
157
|
}
|
|
112
158
|
|
|
113
159
|
col["indexes"] = newIndexes;
|
|
114
160
|
}
|
|
115
161
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
*/
|
|
162
|
+
/* ---------------------------------------------------------
|
|
163
|
+
UTIL
|
|
164
|
+
--------------------------------------------------------- */
|
|
165
|
+
|
|
119
166
|
function safeRemove(p: string) {
|
|
120
167
|
if (fs.existsSync(p)) {
|
|
121
168
|
fs.rmSync(p, { recursive: true, force: true });
|
package/src/core/database.ts
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
import path from "path";
|
|
2
2
|
import fs from "fs";
|
|
3
|
-
import { execFile } from "child_process";
|
|
4
|
-
import { promisify } from "util";
|
|
5
3
|
import { Collection } from "./collection.js";
|
|
6
4
|
import { Index, IndexOptions } from "./index.js";
|
|
7
5
|
import { MigrationEngine } from "./migration.js";
|
|
@@ -9,7 +7,8 @@ import type { LioranManager } from "../LioranManager.js";
|
|
|
9
7
|
import type { ZodSchema } from "zod";
|
|
10
8
|
import { decryptData } from "../utils/encryption.js";
|
|
11
9
|
|
|
12
|
-
|
|
10
|
+
import { WALManager } from "./wal.js";
|
|
11
|
+
import { CheckpointManager } from "./checkpoint.js";
|
|
13
12
|
|
|
14
13
|
/* ----------------------------- TYPES ----------------------------- */
|
|
15
14
|
|
|
@@ -26,11 +25,11 @@ type IndexMeta = {
|
|
|
26
25
|
type DBMeta = {
|
|
27
26
|
version: number;
|
|
28
27
|
indexes: Record<string, IndexMeta[]>;
|
|
29
|
-
schemaVersion: string;
|
|
28
|
+
schemaVersion: string;
|
|
30
29
|
};
|
|
31
30
|
|
|
32
31
|
const META_FILE = "__db_meta.json";
|
|
33
|
-
const META_VERSION =
|
|
32
|
+
const META_VERSION = 2;
|
|
34
33
|
const DEFAULT_SCHEMA_VERSION = "v1";
|
|
35
34
|
|
|
36
35
|
/* ---------------------- TRANSACTION CONTEXT ---------------------- */
|
|
@@ -59,11 +58,30 @@ class DBTransactionContext {
|
|
|
59
58
|
}
|
|
60
59
|
|
|
61
60
|
async commit() {
|
|
62
|
-
|
|
63
|
-
|
|
61
|
+
for (const op of this.ops) {
|
|
62
|
+
const recordOp: any = {
|
|
63
|
+
tx: this.txId,
|
|
64
|
+
type: "op",
|
|
65
|
+
payload: op
|
|
66
|
+
};
|
|
67
|
+
await this.db.wal.append(recordOp);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const commitRecord: any = {
|
|
71
|
+
tx: this.txId,
|
|
72
|
+
type: "commit"
|
|
73
|
+
};
|
|
74
|
+
await this.db.wal.append(commitRecord);
|
|
75
|
+
|
|
64
76
|
await this.db.applyTransaction(this.ops);
|
|
65
|
-
|
|
66
|
-
|
|
77
|
+
|
|
78
|
+
const appliedRecord: any = {
|
|
79
|
+
tx: this.txId,
|
|
80
|
+
type: "applied"
|
|
81
|
+
};
|
|
82
|
+
await this.db.wal.append(appliedRecord);
|
|
83
|
+
|
|
84
|
+
await this.db.postCommitMaintenance();
|
|
67
85
|
}
|
|
68
86
|
}
|
|
69
87
|
|
|
@@ -75,28 +93,68 @@ export class LioranDB {
|
|
|
75
93
|
manager: LioranManager;
|
|
76
94
|
collections: Map<string, Collection>;
|
|
77
95
|
|
|
78
|
-
private walPath: string;
|
|
79
96
|
private metaPath: string;
|
|
80
97
|
private meta!: DBMeta;
|
|
81
98
|
|
|
82
99
|
private migrator: MigrationEngine;
|
|
83
100
|
private static TX_SEQ = 0;
|
|
84
101
|
|
|
102
|
+
public wal: WALManager;
|
|
103
|
+
private checkpoint: CheckpointManager;
|
|
104
|
+
|
|
85
105
|
constructor(basePath: string, dbName: string, manager: LioranManager) {
|
|
86
106
|
this.basePath = basePath;
|
|
87
107
|
this.dbName = dbName;
|
|
88
108
|
this.manager = manager;
|
|
89
109
|
this.collections = new Map();
|
|
90
110
|
|
|
91
|
-
this.walPath = path.join(basePath, "__tx_wal.log");
|
|
92
111
|
this.metaPath = path.join(basePath, META_FILE);
|
|
93
112
|
|
|
94
113
|
fs.mkdirSync(basePath, { recursive: true });
|
|
95
114
|
|
|
96
115
|
this.loadMeta();
|
|
116
|
+
|
|
117
|
+
this.wal = new WALManager(basePath);
|
|
118
|
+
this.checkpoint = new CheckpointManager(basePath);
|
|
119
|
+
|
|
97
120
|
this.migrator = new MigrationEngine(this);
|
|
98
121
|
|
|
99
|
-
this.
|
|
122
|
+
this.initialize().catch(console.error);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/* ------------------------- INIT & RECOVERY ------------------------- */
|
|
126
|
+
|
|
127
|
+
private async initialize() {
|
|
128
|
+
await this.recoverFromWAL();
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
private async recoverFromWAL() {
|
|
132
|
+
const checkpointData = this.checkpoint.get();
|
|
133
|
+
const fromLSN = checkpointData.lsn;
|
|
134
|
+
|
|
135
|
+
const committed = new Set<number>();
|
|
136
|
+
const applied = new Set<number>();
|
|
137
|
+
const ops = new Map<number, TXOp[]>();
|
|
138
|
+
|
|
139
|
+
await this.wal.replay(fromLSN, async (record) => {
|
|
140
|
+
if (record.type === "commit") {
|
|
141
|
+
committed.add(record.tx);
|
|
142
|
+
} else if (record.type === "applied") {
|
|
143
|
+
applied.add(record.tx);
|
|
144
|
+
} else if (record.type === "op") {
|
|
145
|
+
if (!ops.has(record.tx)) ops.set(record.tx, []);
|
|
146
|
+
ops.get(record.tx)!.push(record.payload);
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
for (const tx of committed) {
|
|
151
|
+
if (applied.has(tx)) continue;
|
|
152
|
+
|
|
153
|
+
const txOps = ops.get(tx);
|
|
154
|
+
if (txOps) {
|
|
155
|
+
await this.applyTransaction(txOps);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
100
158
|
}
|
|
101
159
|
|
|
102
160
|
/* ------------------------- META ------------------------- */
|
|
@@ -146,50 +204,7 @@ export class LioranDB {
|
|
|
146
204
|
await this.migrator.upgradeToLatest();
|
|
147
205
|
}
|
|
148
206
|
|
|
149
|
-
/* -------------------------
|
|
150
|
-
|
|
151
|
-
async writeWAL(entries: WALEntry[]) {
|
|
152
|
-
const fd = await fs.promises.open(this.walPath, "a");
|
|
153
|
-
for (const e of entries) {
|
|
154
|
-
await fd.write(JSON.stringify(e) + "\n");
|
|
155
|
-
}
|
|
156
|
-
await fd.sync();
|
|
157
|
-
await fd.close();
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
async clearWAL() {
|
|
161
|
-
try { await fs.promises.unlink(this.walPath); } catch {}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
private async recoverFromWAL() {
|
|
165
|
-
if (!fs.existsSync(this.walPath)) return;
|
|
166
|
-
|
|
167
|
-
const raw = await fs.promises.readFile(this.walPath, "utf8");
|
|
168
|
-
|
|
169
|
-
const committed = new Set<number>();
|
|
170
|
-
const applied = new Set<number>();
|
|
171
|
-
const ops = new Map<number, TXOp[]>();
|
|
172
|
-
|
|
173
|
-
for (const line of raw.split("\n")) {
|
|
174
|
-
if (!line.trim()) continue;
|
|
175
|
-
const entry: WALEntry = JSON.parse(line);
|
|
176
|
-
|
|
177
|
-
if ("commit" in entry) committed.add(entry.tx);
|
|
178
|
-
else if ("applied" in entry) applied.add(entry.tx);
|
|
179
|
-
else {
|
|
180
|
-
if (!ops.has(entry.tx)) ops.set(entry.tx, []);
|
|
181
|
-
ops.get(entry.tx)!.push(entry);
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
for (const tx of committed) {
|
|
186
|
-
if (applied.has(tx)) continue;
|
|
187
|
-
const txOps = ops.get(tx);
|
|
188
|
-
if (txOps) await this.applyTransaction(txOps);
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
await this.clearWAL();
|
|
192
|
-
}
|
|
207
|
+
/* ------------------------- TX APPLY ------------------------- */
|
|
193
208
|
|
|
194
209
|
async applyTransaction(ops: TXOp[]) {
|
|
195
210
|
for (const { col, op, args } of ops) {
|
|
@@ -269,13 +284,11 @@ export class LioranDB {
|
|
|
269
284
|
/* ------------------------- COMPACTION ------------------------- */
|
|
270
285
|
|
|
271
286
|
async compactCollection(name: string) {
|
|
272
|
-
await this.clearWAL();
|
|
273
287
|
const col = this.collection(name);
|
|
274
288
|
await col.compact();
|
|
275
289
|
}
|
|
276
290
|
|
|
277
291
|
async compactAll() {
|
|
278
|
-
await this.clearWAL();
|
|
279
292
|
for (const name of this.collections.keys()) {
|
|
280
293
|
await this.compactCollection(name);
|
|
281
294
|
}
|
|
@@ -291,12 +304,19 @@ export class LioranDB {
|
|
|
291
304
|
return result;
|
|
292
305
|
}
|
|
293
306
|
|
|
307
|
+
/* ------------------------- POST COMMIT ------------------------- */
|
|
308
|
+
|
|
309
|
+
public async postCommitMaintenance() {
|
|
310
|
+
// Custom maintenance can be added here
|
|
311
|
+
}
|
|
312
|
+
|
|
294
313
|
/* ------------------------- SHUTDOWN ------------------------- */
|
|
295
314
|
|
|
296
315
|
async close(): Promise<void> {
|
|
297
316
|
for (const col of this.collections.values()) {
|
|
298
317
|
try { await col.close(); } catch {}
|
|
299
318
|
}
|
|
319
|
+
|
|
300
320
|
this.collections.clear();
|
|
301
321
|
}
|
|
302
322
|
}
|