@liorandb/core 1.0.19 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,6 +23,10 @@ export interface UpdateOptions {
23
23
  upsert?: boolean;
24
24
  }
25
25
 
26
+ export interface CollectionOptions {
27
+ readonly?: boolean;
28
+ }
29
+
26
30
  export class Collection<T = any> {
27
31
  dir: string;
28
32
  db: ClassicLevel<string, string>;
@@ -33,16 +37,31 @@ export class Collection<T = any> {
33
37
  private migrations: Migration<T>[] = [];
34
38
 
35
39
  private indexes = new Map<string, Index>();
40
+ private readonlyMode: boolean;
36
41
 
37
42
  constructor(
38
43
  dir: string,
39
44
  schema?: ZodSchema<T>,
40
- schemaVersion: number = 1
45
+ schemaVersion: number = 1,
46
+ options?: CollectionOptions
41
47
  ) {
42
48
  this.dir = dir;
43
- this.db = new ClassicLevel(dir, { valueEncoding: "utf8" });
44
49
  this.schema = schema;
45
50
  this.schemaVersion = schemaVersion;
51
+ this.readonlyMode = options?.readonly ?? false;
52
+
53
+ this.db = new ClassicLevel(dir, {
54
+ valueEncoding: "utf8",
55
+ readOnly: this.readonlyMode
56
+ } as any);
57
+ }
58
+
59
+ /* ===================== INTERNAL ===================== */
60
+
61
+ private assertWritable() {
62
+ if (this.readonlyMode) {
63
+ throw new Error("Collection is in readonly replica mode");
64
+ }
46
65
  }
47
66
 
48
67
  /* ===================== SCHEMA ===================== */
@@ -106,6 +125,7 @@ export class Collection<T = any> {
106
125
  }
107
126
 
108
127
  private async _updateIndexes(oldDoc: any, newDoc: any) {
128
+ if (this.readonlyMode) return;
109
129
  for (const index of this.indexes.values()) {
110
130
  await index.update(oldDoc, newDoc);
111
131
  }
@@ -114,6 +134,8 @@ export class Collection<T = any> {
114
134
  /* ===================== COMPACTION ===================== */
115
135
 
116
136
  async compact(): Promise<void> {
137
+ this.assertWritable();
138
+
117
139
  return this._enqueue(async () => {
118
140
  try { await this.db.close(); } catch {}
119
141
 
@@ -145,6 +167,8 @@ export class Collection<T = any> {
145
167
  /* ===================== STORAGE ===================== */
146
168
 
147
169
  private async _insertOne(doc: any) {
170
+ this.assertWritable();
171
+
148
172
  const _id = doc._id ?? uuid();
149
173
  const final = this.validate({
150
174
  _id,
@@ -159,6 +183,8 @@ export class Collection<T = any> {
159
183
  }
160
184
 
161
185
  private async _insertMany(docs: any[]) {
186
+ this.assertWritable();
187
+
162
188
  const batch: any[] = [];
163
189
  const out = [];
164
190
 
@@ -214,14 +240,13 @@ export class Collection<T = any> {
214
240
  }
215
241
 
216
242
  private async _readAndMigrate(id: string) {
217
- const enc = await this.db.get(id);
243
+ const enc = await this.db.get(id).catch(() => null);
218
244
  if (!enc) return null;
219
245
 
220
246
  const raw = decryptData(enc);
221
247
  const migrated = this.migrateIfNeeded(raw);
222
248
 
223
- // Lazy write-back if migrated
224
- if (raw.__v !== this.schemaVersion) {
249
+ if (!this.readonlyMode && raw.__v !== this.schemaVersion) {
225
250
  await this.db.put(id, encryptData(migrated));
226
251
  await this._updateIndexes(raw, migrated);
227
252
  }
@@ -247,9 +272,7 @@ export class Collection<T = any> {
247
272
 
248
273
  private async _findOne(query: any) {
249
274
  if (query?._id) {
250
- try {
251
- return await this._readAndMigrate(String(query._id));
252
- } catch { return null; }
275
+ return this._readAndMigrate(String(query._id));
253
276
  }
254
277
 
255
278
  const ids = await this._getCandidateIds(query);
@@ -285,6 +308,8 @@ export class Collection<T = any> {
285
308
  /* ===================== UPDATE ===================== */
286
309
 
287
310
  private async _updateOne(filter: any, update: any, options: UpdateOptions) {
311
+ this.assertWritable();
312
+
288
313
  const ids = await this._getCandidateIds(filter);
289
314
 
290
315
  for (const id of ids) {
@@ -313,6 +338,8 @@ export class Collection<T = any> {
313
338
  }
314
339
 
315
340
  private async _updateMany(filter: any, update: any) {
341
+ this.assertWritable();
342
+
316
343
  const ids = await this._getCandidateIds(filter);
317
344
  const out = [];
318
345
 
@@ -340,6 +367,8 @@ export class Collection<T = any> {
340
367
  /* ===================== DELETE ===================== */
341
368
 
342
369
  private async _deleteOne(filter: any) {
370
+ this.assertWritable();
371
+
343
372
  const ids = await this._getCandidateIds(filter);
344
373
 
345
374
  for (const id of ids) {
@@ -357,6 +386,8 @@ export class Collection<T = any> {
357
386
  }
358
387
 
359
388
  private async _deleteMany(filter: any) {
389
+ this.assertWritable();
390
+
360
391
  const ids = await this._getCandidateIds(filter);
361
392
  let count = 0;
362
393
 
@@ -18,42 +18,40 @@ const INDEX_DIR = "__indexes";
18
18
  --------------------------------------------------------- */
19
19
 
20
20
  /**
21
- * Full safe compaction pipeline:
21
+ * Full production-safe compaction:
22
22
  * 1. Crash recovery
23
23
  * 2. Snapshot rebuild
24
- * 3. Atomic directory swap
25
- * 4. Index rebuild
24
+ * 3. Atomic swap
25
+ * 4. Reopen DB
26
+ * 5. Rebuild indexes
26
27
  */
27
28
  export async function compactCollectionEngine(col: Collection) {
28
29
  const baseDir = col.dir;
29
30
  const tmpDir = baseDir + TMP_SUFFIX;
30
31
  const oldDir = baseDir + OLD_SUFFIX;
31
32
 
32
- // Recover from any previous crash mid-compaction
33
33
  await crashRecovery(baseDir);
34
34
 
35
- // Clean leftovers (paranoia safety)
36
35
  safeRemove(tmpDir);
37
36
  safeRemove(oldDir);
38
37
 
39
- // Step 1: rebuild snapshot
40
38
  await snapshotRebuild(col, tmpDir);
41
39
 
42
- // Step 2: atomic swap
43
- atomicSwap(baseDir, tmpDir, oldDir);
40
+ await atomicSwap(baseDir, tmpDir, oldDir);
44
41
 
45
- // Cleanup
46
42
  safeRemove(oldDir);
43
+
44
+ // Reopen DB after swap
45
+ await reopenCollectionDB(col);
46
+
47
+ // Rebuild indexes after compaction
48
+ await rebuildIndexes(col);
47
49
  }
48
50
 
49
51
  /* ---------------------------------------------------------
50
52
  SNAPSHOT REBUILD
51
53
  --------------------------------------------------------- */
52
54
 
53
- /**
54
- * Rebuilds DB by copying only live keys
55
- * WAL is assumed already checkpointed
56
- */
57
55
  async function snapshotRebuild(col: Collection, tmpDir: string) {
58
56
  fs.mkdirSync(tmpDir, { recursive: true });
59
57
 
@@ -68,28 +66,33 @@ async function snapshotRebuild(col: Collection, tmpDir: string) {
68
66
  }
69
67
 
70
68
  await tmpDB.close();
71
- await col.db.close();
69
+ await col.db.close(); // important: close before swap
72
70
  }
73
71
 
74
72
  /* ---------------------------------------------------------
75
- ATOMIC SWAP
73
+ ATOMIC SWAP (HARDENED)
76
74
  --------------------------------------------------------- */
77
75
 
78
- /**
79
- * Atomic directory replacement (POSIX safe)
80
- */
81
- function atomicSwap(base: string, tmp: string, old: string) {
76
+ async function atomicSwap(base: string, tmp: string, old: string) {
77
+ // Phase 1: rename base old
82
78
  fs.renameSync(base, old);
83
- fs.renameSync(tmp, base);
79
+
80
+ try {
81
+ // Phase 2: rename tmp → base
82
+ fs.renameSync(tmp, base);
83
+ } catch (err) {
84
+ // Rollback if tmp rename fails
85
+ if (fs.existsSync(old)) {
86
+ fs.renameSync(old, base);
87
+ }
88
+ throw err;
89
+ }
84
90
  }
85
91
 
86
92
  /* ---------------------------------------------------------
87
93
  CRASH RECOVERY
88
94
  --------------------------------------------------------- */
89
95
 
90
- /**
91
- * Handles all partial-compaction states
92
- */
93
96
  export async function crashRecovery(baseDir: string) {
94
97
  const tmp = baseDir + TMP_SUFFIX;
95
98
  const old = baseDir + OLD_SUFFIX;
@@ -106,7 +109,7 @@ export async function crashRecovery(baseDir: string) {
106
109
  return;
107
110
  }
108
111
 
109
- // Case 2: rename(base old) happened, but tmp missing
112
+ // Case 2: base→old happened but tmp missing
110
113
  if (!baseExists && oldExists) {
111
114
  fs.renameSync(old, baseDir);
112
115
  return;
@@ -119,44 +122,56 @@ export async function crashRecovery(baseDir: string) {
119
122
  }
120
123
 
121
124
  /* ---------------------------------------------------------
122
- INDEX REBUILD
125
+ REOPEN DB
126
+ --------------------------------------------------------- */
127
+
128
+ async function reopenCollectionDB(col: Collection) {
129
+ col.db = new ClassicLevel(col.dir, {
130
+ valueEncoding: "utf8"
131
+ });
132
+ }
133
+
134
+ /* ---------------------------------------------------------
135
+ INDEX REBUILD (SAFE)
123
136
  --------------------------------------------------------- */
124
137
 
125
- /**
126
- * Rebuilds all indexes from compacted DB
127
- * Guarantees index consistency
128
- */
129
138
  export async function rebuildIndexes(col: Collection) {
130
139
  const indexRoot = path.join(col.dir, INDEX_DIR);
131
140
 
132
- // Close existing index handles
133
- for (const idx of col["indexes"].values()) {
141
+ const oldIndexes = new Map(col["indexes"]);
142
+
143
+ // Close old index handles
144
+ for (const idx of oldIndexes.values()) {
134
145
  try {
135
146
  await idx.close();
136
147
  } catch {}
137
148
  }
138
149
 
139
- // Destroy index directory
140
150
  safeRemove(indexRoot);
141
151
  fs.mkdirSync(indexRoot, { recursive: true });
142
152
 
143
- const newIndexes = new Map<string, Index>();
153
+ const rebuiltIndexes = new Map<string, Index>();
144
154
 
145
- for (const idx of col["indexes"].values()) {
155
+ for (const idx of oldIndexes.values()) {
146
156
  const rebuilt = new Index(col.dir, idx.field, {
147
157
  unique: idx.unique
148
158
  });
149
159
 
150
160
  for await (const [, enc] of col.db.iterator()) {
151
161
  if (!enc) continue;
152
- const doc = decryptData(enc);
153
- await rebuilt.insert(doc);
162
+
163
+ try {
164
+ const doc = decryptData(enc);
165
+ await rebuilt.insert(doc);
166
+ } catch {
167
+ // Skip corrupted doc safely
168
+ }
154
169
  }
155
170
 
156
- newIndexes.set(idx.field, rebuilt);
171
+ rebuiltIndexes.set(idx.field, rebuilt);
157
172
  }
158
173
 
159
- col["indexes"] = newIndexes;
174
+ col["indexes"] = rebuiltIndexes;
160
175
  }
161
176
 
162
177
  /* ---------------------------------------------------------
@@ -13,9 +13,6 @@ import { CheckpointManager } from "./checkpoint.js";
13
13
  /* ----------------------------- TYPES ----------------------------- */
14
14
 
15
15
  type TXOp = { tx: number; col: string; op: string; args: any[] };
16
- type TXCommit = { tx: number; commit: true };
17
- type TXApplied = { tx: number; applied: true };
18
- type WALEntry = TXOp | TXCommit | TXApplied;
19
16
 
20
17
  type IndexMeta = {
21
18
  field: string;
@@ -43,43 +40,49 @@ class DBTransactionContext {
43
40
  ) {}
44
41
 
45
42
  collection(name: string) {
46
- return new Proxy({}, {
47
- get: (_, prop: string) => {
48
- return (...args: any[]) => {
49
- this.ops.push({
50
- tx: this.txId,
51
- col: name,
52
- op: prop,
53
- args
54
- });
55
- };
43
+ return new Proxy(
44
+ {},
45
+ {
46
+ get: (_, prop: string) => {
47
+ return (...args: any[]) => {
48
+ this.ops.push({
49
+ tx: this.txId,
50
+ col: name,
51
+ op: prop,
52
+ args
53
+ });
54
+ };
55
+ }
56
56
  }
57
- });
57
+ );
58
58
  }
59
59
 
60
60
  async commit() {
61
+ if (this.db.isReadonly()) {
62
+ throw new Error("Cannot commit transaction in readonly mode");
63
+ }
64
+
61
65
  for (const op of this.ops) {
62
- const recordOp: any = {
66
+ await this.db.wal.append({
63
67
  tx: this.txId,
64
68
  type: "op",
65
69
  payload: op
66
- };
67
- await this.db.wal.append(recordOp);
70
+ } as any);
68
71
  }
69
72
 
70
- const commitRecord: any = {
73
+ const commitLSN = await this.db.wal.append({
71
74
  tx: this.txId,
72
75
  type: "commit"
73
- };
74
- await this.db.wal.append(commitRecord);
76
+ } as any);
75
77
 
76
78
  await this.db.applyTransaction(this.ops);
77
79
 
78
- const appliedRecord: any = {
80
+ const appliedLSN = await this.db.wal.append({
79
81
  tx: this.txId,
80
82
  type: "applied"
81
- };
82
- await this.db.wal.append(appliedRecord);
83
+ } as any);
84
+
85
+ this.db.advanceCheckpoint(appliedLSN);
83
86
 
84
87
  await this.db.postCommitMaintenance();
85
88
  }
@@ -99,8 +102,10 @@ export class LioranDB {
99
102
  private migrator: MigrationEngine;
100
103
  private static TX_SEQ = 0;
101
104
 
102
- public wal: WALManager;
103
- private checkpoint: CheckpointManager;
105
+ public wal!: WALManager;
106
+ private checkpoint!: CheckpointManager;
107
+
108
+ private readonly readonlyMode: boolean;
104
109
 
105
110
  constructor(basePath: string, dbName: string, manager: LioranManager) {
106
111
  this.basePath = basePath;
@@ -108,24 +113,42 @@ export class LioranDB {
108
113
  this.manager = manager;
109
114
  this.collections = new Map();
110
115
 
116
+ this.readonlyMode = (manager as any)?.isReadonly?.() ?? false;
117
+
111
118
  this.metaPath = path.join(basePath, META_FILE);
112
119
 
113
120
  fs.mkdirSync(basePath, { recursive: true });
114
121
 
115
122
  this.loadMeta();
116
123
 
117
- this.wal = new WALManager(basePath);
118
- this.checkpoint = new CheckpointManager(basePath);
124
+ if (!this.readonlyMode) {
125
+ this.wal = new WALManager(basePath);
126
+ this.checkpoint = new CheckpointManager(basePath);
127
+ }
119
128
 
120
129
  this.migrator = new MigrationEngine(this);
121
130
 
122
131
  this.initialize().catch(console.error);
123
132
  }
124
133
 
134
+ /* ------------------------- MODE ------------------------- */
135
+
136
+ public isReadonly(): boolean {
137
+ return this.readonlyMode;
138
+ }
139
+
140
+ private assertWritable() {
141
+ if (this.readonlyMode) {
142
+ throw new Error("Database is in readonly replica mode");
143
+ }
144
+ }
145
+
125
146
  /* ------------------------- INIT & RECOVERY ------------------------- */
126
147
 
127
148
  private async initialize() {
128
- await this.recoverFromWAL();
149
+ if (!this.readonlyMode) {
150
+ await this.recoverFromWAL();
151
+ }
129
152
  }
130
153
 
131
154
  private async recoverFromWAL() {
@@ -143,18 +166,36 @@ export class LioranDB {
143
166
  applied.add(record.tx);
144
167
  } else if (record.type === "op") {
145
168
  if (!ops.has(record.tx)) ops.set(record.tx, []);
146
- ops.get(record.tx)!.push(record.payload);
169
+ ops.get(record.tx)!.push(record.payload as TXOp);
147
170
  }
148
171
  });
149
172
 
173
+ let highestAppliedLSN = fromLSN;
174
+
150
175
  for (const tx of committed) {
151
176
  if (applied.has(tx)) continue;
152
177
 
153
178
  const txOps = ops.get(tx);
154
179
  if (txOps) {
155
180
  await this.applyTransaction(txOps);
181
+ highestAppliedLSN = this.wal.getCurrentLSN();
156
182
  }
157
183
  }
184
+
185
+ this.advanceCheckpoint(highestAppliedLSN);
186
+ }
187
+
188
+ /* ------------------------- CHECKPOINT ADVANCE ------------------------- */
189
+
190
+ public advanceCheckpoint(lsn: number) {
191
+ if (this.readonlyMode) return;
192
+
193
+ const current = this.checkpoint.get();
194
+
195
+ if (lsn > current.lsn) {
196
+ this.checkpoint.save(lsn, this.wal.getCurrentGen());
197
+ this.wal.cleanup(this.wal.getCurrentGen() - 1).catch(() => {});
198
+ }
158
199
  }
159
200
 
160
201
  /* ------------------------- META ------------------------- */
@@ -179,6 +220,7 @@ export class LioranDB {
179
220
  }
180
221
 
181
222
  private saveMeta() {
223
+ if (this.readonlyMode) return;
182
224
  fs.writeFileSync(this.metaPath, JSON.stringify(this.meta, null, 2));
183
225
  }
184
226
 
@@ -187,6 +229,7 @@ export class LioranDB {
187
229
  }
188
230
 
189
231
  setSchemaVersion(v: string) {
232
+ this.assertWritable();
190
233
  this.meta.schemaVersion = v;
191
234
  this.saveMeta();
192
235
  }
@@ -194,6 +237,7 @@ export class LioranDB {
194
237
  /* ------------------------- DB MIGRATIONS ------------------------- */
195
238
 
196
239
  migrate(from: string, to: string, fn: (db: LioranDB) => Promise<void>) {
240
+ this.assertWritable();
197
241
  this.migrator.register(from, to, async db => {
198
242
  await fn(db);
199
243
  db.setSchemaVersion(to);
@@ -201,6 +245,7 @@ export class LioranDB {
201
245
  }
202
246
 
203
247
  async applyMigrations(targetVersion: string) {
248
+ this.assertWritable();
204
249
  await this.migrator.upgradeToLatest();
205
250
  }
206
251
 
@@ -234,7 +279,8 @@ export class LioranDB {
234
279
  const col = new Collection<T>(
235
280
  colPath,
236
281
  schema,
237
- schemaVersion ?? 1
282
+ schemaVersion ?? 1,
283
+ { readonly: this.readonlyMode }
238
284
  );
239
285
 
240
286
  const metas = this.meta.indexes[name] ?? [];
@@ -253,6 +299,8 @@ export class LioranDB {
253
299
  field: string,
254
300
  options: IndexOptions = {}
255
301
  ) {
302
+ this.assertWritable();
303
+
256
304
  const col = this.collection(collection);
257
305
 
258
306
  const existing = this.meta.indexes[collection]?.find(i => i.field === field);
@@ -265,10 +313,7 @@ export class LioranDB {
265
313
  try {
266
314
  const doc = decryptData(enc);
267
315
  await index.insert(doc);
268
- } catch (err) {
269
- const msg = err instanceof Error ? err.message : String(err);
270
- console.warn(`Index build skipped doc ${key}: ${msg}`);
271
- }
316
+ } catch {}
272
317
  }
273
318
 
274
319
  col.registerIndex(index);
@@ -284,11 +329,13 @@ export class LioranDB {
284
329
  /* ------------------------- COMPACTION ------------------------- */
285
330
 
286
331
  async compactCollection(name: string) {
332
+ this.assertWritable();
287
333
  const col = this.collection(name);
288
334
  await col.compact();
289
335
  }
290
336
 
291
337
  async compactAll() {
338
+ this.assertWritable();
292
339
  for (const name of this.collections.keys()) {
293
340
  await this.compactCollection(name);
294
341
  }
@@ -297,6 +344,7 @@ export class LioranDB {
297
344
  /* ------------------------- TX API ------------------------- */
298
345
 
299
346
  async transaction<T>(fn: (tx: DBTransactionContext) => Promise<T>): Promise<T> {
347
+ this.assertWritable();
300
348
  const txId = ++LioranDB.TX_SEQ;
301
349
  const tx = new DBTransactionContext(this, txId);
302
350
  const result = await fn(tx);
@@ -306,9 +354,7 @@ export class LioranDB {
306
354
 
307
355
  /* ------------------------- POST COMMIT ------------------------- */
308
356
 
309
- public async postCommitMaintenance() {
310
- // Custom maintenance can be added here
311
- }
357
+ public async postCommitMaintenance() {}
312
358
 
313
359
  /* ------------------------- SHUTDOWN ------------------------- */
314
360
 
@@ -316,7 +362,6 @@ export class LioranDB {
316
362
  for (const col of this.collections.values()) {
317
363
  try { await col.close(); } catch {}
318
364
  }
319
-
320
365
  this.collections.clear();
321
366
  }
322
367
  }