@pixagram/lacerta-db 0.11.3 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -1,7 +1,7 @@
1
1
  /**
2
- * LacertaDB V0.11.3 - Production Library
2
+ * Production Library
3
3
  * @module LacertaDB
4
- * @version 0.11.3
4
+ * @version 0.12.0
5
5
  * @license MIT
6
6
  * @author Pixagram SA
7
7
  */
@@ -27,6 +27,9 @@ import TurboSerial from "@pixagram/turboserial";
27
27
  import TurboBase64 from "@pixagram/turbobase64";
28
28
 
29
29
  // Default TurboSerial configuration (overridable via LacertaDB constructor)
30
+ // Tuned for cache/entity-store workloads: plain JSON objects, no circular refs,
31
+ // no property descriptors, no internal compression (Document-level compression
32
+ // is a separate opt-in via options.compressed).
30
33
  const TURBO_SERIAL_DEFAULTS = {
31
34
  compression: false,
32
35
  preservePropertyDescriptors: false,
@@ -45,7 +48,11 @@ const TURBO_SERIAL_DEFAULTS = {
45
48
 
46
49
  /**
47
50
  * Optimized QuickStore.
48
- * Keeps index in memory to avoid blocking main thread with JSON parsing.
51
+ * All documents live in an in-memory Map for O(1) reads (no serialization overhead).
52
+ * localStorage is only touched on:
53
+ * - Lazy hydration (first access loads all docs from localStorage into memory)
54
+ * - Debounced writes (add/update/delete schedule an async persist)
55
+ * - beforeunload flush (synchronous save of dirty entries)
49
56
  */
50
57
  class QuickStore {
51
58
  constructor(dbName, serializer, base64) {
@@ -55,15 +62,16 @@ class QuickStore {
55
62
  this._keyPrefix = `lacertadb_${dbName}_quickstore_`;
56
63
  this._indexKey = `${this._keyPrefix}index`;
57
64
 
58
- // Optimization: Keep index in memory using a Set for O(1) lookups
59
- this._indexCache = new Set();
60
- this._indexLoaded = false;
65
+ // In-memory cache: docId deserialized data
66
+ this._docs = new Map();
67
+ this._hydrated = false;
61
68
 
62
- // Async persistence state
63
- this._saveIndexTimer = null;
64
- this._dirty = false;
69
+ // Dirty tracking: set of docIds that need localStorage persistence
70
+ this._dirtyDocs = new Set();
71
+ this._dirtyIndex = false;
72
+ this._saveTimer = null;
65
73
 
66
- // Safety: Flush on unload to prevent data loss
74
+ // Safety: flush on unload
67
75
  this._flushHandler = () => this._flushSync();
68
76
  if (typeof window !== 'undefined') {
69
77
  window.addEventListener('beforeunload', this._flushHandler);
@@ -76,120 +84,114 @@ class QuickStore {
76
84
  window.removeEventListener('beforeunload', this._flushHandler);
77
85
  this._flushHandler = null;
78
86
  }
79
- if (this._saveIndexTimer) {
87
+ if (this._saveTimer) {
80
88
  if (typeof window !== 'undefined' && window.cancelIdleCallback) {
81
- window.cancelIdleCallback(this._saveIndexTimer);
89
+ window.cancelIdleCallback(this._saveTimer);
82
90
  } else {
83
- clearTimeout(this._saveIndexTimer);
91
+ clearTimeout(this._saveTimer);
84
92
  }
85
- this._saveIndexTimer = null;
93
+ this._saveTimer = null;
86
94
  }
87
95
  }
88
96
 
89
- _ensureIndexLoaded() {
90
- if (this._indexLoaded) return;
97
+ /** Lazy hydration: load all docs from localStorage into memory on first access */
98
+ _ensureHydrated() {
99
+ if (this._hydrated) return;
91
100
 
92
101
  const indexStr = localStorage.getItem(this._indexKey);
93
102
  if (indexStr) {
94
103
  try {
95
104
  const decoded = this._base64.decode(indexStr);
96
105
  const list = this._serializer.deserialize(decoded);
97
- this._indexCache = new Set(list);
106
+ for (const docId of list) {
107
+ const key = `${this._keyPrefix}data_${docId}`;
108
+ const stored = localStorage.getItem(key);
109
+ if (stored) {
110
+ try {
111
+ const decodedDoc = this._base64.decode(stored);
112
+ this._docs.set(docId, this._serializer.deserialize(decodedDoc));
113
+ } catch (e) {
114
+ // Corrupted entry — skip it
115
+ }
116
+ }
117
+ }
98
118
  } catch (e) {
99
119
  console.warn('QuickStore index corrupted, resetting.', e);
100
- this._indexCache = new Set();
101
120
  }
102
121
  }
103
- this._indexLoaded = true;
122
+ this._hydrated = true;
104
123
  }
105
124
 
106
- _scheduleIndexSave() {
107
- this._dirty = true;
108
- if (this._saveIndexTimer) return;
125
+ /** Schedule debounced persistence of dirty entries */
126
+ _scheduleSave() {
127
+ if (this._saveTimer) return;
109
128
 
110
129
  const save = () => {
111
- if (!this._dirty) return;
112
- try {
113
- const list = Array.from(this._indexCache);
114
- const serializedIndex = this._serializer.serialize(list);
115
- const encodedIndex = this._base64.encode(serializedIndex);
116
- localStorage.setItem(this._indexKey, encodedIndex);
117
- this._dirty = false;
118
- } catch (e) {
119
- if (e.name === 'QuotaExceededError') {
120
- console.error('CRITICAL: QuickStore index save failed — localStorage quota exceeded');
121
- if (typeof window !== 'undefined') {
122
- window.dispatchEvent(new CustomEvent('lacertadb:quotaexceeded', { detail: { source: 'quickstore', db: this._dbName } }));
123
- }
124
- } else {
125
- console.error('QuickStore index save failed:', e);
126
- }
127
- }
128
- this._saveIndexTimer = null;
130
+ this._saveTimer = null;
131
+ this._persistDirty();
129
132
  };
130
133
 
131
- // Debounce with idle callback to prevent UI freezing
132
134
  if (typeof window !== 'undefined' && window.requestIdleCallback) {
133
- this._saveIndexTimer = window.requestIdleCallback(save);
135
+ this._saveTimer = window.requestIdleCallback(save);
134
136
  } else {
135
- this._saveIndexTimer = setTimeout(save, 200);
137
+ this._saveTimer = setTimeout(save, 200);
136
138
  }
137
139
  }
138
140
 
139
- _flushSync() {
140
- if (!this._dirty) return;
141
+ /** Persist only dirty documents and the index if changed */
142
+ _persistDirty() {
141
143
  try {
142
- const list = Array.from(this._indexCache);
143
- const serializedIndex = this._serializer.serialize(list);
144
- const encodedIndex = this._base64.encode(serializedIndex);
145
- localStorage.setItem(this._indexKey, encodedIndex);
146
- this._dirty = false;
144
+ for (const docId of this._dirtyDocs) {
145
+ const key = `${this._keyPrefix}data_${docId}`;
146
+ const data = this._docs.get(docId);
147
+ if (data !== undefined) {
148
+ const serialized = this._serializer.serialize(data);
149
+ const encoded = this._base64.encode(serialized);
150
+ localStorage.setItem(key, encoded);
151
+ } else {
152
+ localStorage.removeItem(key);
153
+ }
154
+ }
155
+ this._dirtyDocs.clear();
156
+
157
+ if (this._dirtyIndex) {
158
+ const list = Array.from(this._docs.keys());
159
+ const serialized = this._serializer.serialize(list);
160
+ const encoded = this._base64.encode(serialized);
161
+ localStorage.setItem(this._indexKey, encoded);
162
+ this._dirtyIndex = false;
163
+ }
147
164
  } catch (e) {
148
165
  if (e.name === 'QuotaExceededError') {
149
- console.error('CRITICAL: QuickStore flush failed — localStorage quota exceeded');
166
+ console.error('CRITICAL: QuickStore save failed — localStorage quota exceeded');
150
167
  if (typeof window !== 'undefined') {
151
- window.dispatchEvent(new CustomEvent('lacertadb:quotaexceeded', { detail: { source: 'quickstore_flush', db: this._dbName } }));
168
+ window.dispatchEvent(new CustomEvent('lacertadb:quotaexceeded', { detail: { source: 'quickstore', db: this._dbName } }));
152
169
  }
153
170
  } else {
154
- console.error('QuickStore flush failed:', e);
171
+ console.error('QuickStore save failed:', e);
155
172
  }
156
173
  }
157
174
  }
158
175
 
159
- add(docId, data) {
160
- this._ensureIndexLoaded();
161
- const key = `${this._keyPrefix}data_${docId}`;
162
- try {
163
- const serializedData = this._serializer.serialize(data);
164
- const encodedData = this._base64.encode(serializedData);
165
- localStorage.setItem(key, encodedData);
176
+ _flushSync() {
177
+ if (this._dirtyDocs.size === 0 && !this._dirtyIndex) return;
178
+ this._persistDirty();
179
+ }
166
180
 
167
- if (!this._indexCache.has(docId)) {
168
- this._indexCache.add(docId);
169
- this._scheduleIndexSave();
170
- }
171
- return true;
172
- } catch (e) {
173
- if (e.name === 'QuotaExceededError') {
174
- throw new LacertaDBError('QuickStore quota exceeded', 'QUOTA_EXCEEDED', e);
175
- }
176
- return false;
177
- }
181
+ add(docId, data) {
182
+ this._ensureHydrated();
183
+ const isNew = !this._docs.has(docId);
184
+ this._docs.set(docId, data);
185
+ this._dirtyDocs.add(docId);
186
+ if (isNew) this._dirtyIndex = true;
187
+ this._scheduleSave();
188
+ return true;
178
189
  }
179
190
 
180
191
  get(docId) {
181
- // Direct O(1) access
182
- const key = `${this._keyPrefix}data_${docId}`;
183
- const stored = localStorage.getItem(key);
184
- if (stored) {
185
- try {
186
- const decoded = this._base64.decode(stored);
187
- return this._serializer.deserialize(decoded);
188
- } catch (e) {
189
- console.error('Failed to parse QuickStore data:', e);
190
- }
191
- }
192
- return null;
192
+ this._ensureHydrated();
193
+ const data = this._docs.get(docId);
194
+ return data !== undefined ? data : null;
193
195
  }
194
196
 
195
197
  update(docId, data) {
@@ -197,22 +199,20 @@ class QuickStore {
197
199
  }
198
200
 
199
201
  delete(docId) {
200
- this._ensureIndexLoaded();
201
- const key = `${this._keyPrefix}data_${docId}`;
202
- localStorage.removeItem(key);
203
-
204
- if (this._indexCache.has(docId)) {
205
- this._indexCache.delete(docId);
206
- this._scheduleIndexSave();
202
+ this._ensureHydrated();
203
+ if (this._docs.has(docId)) {
204
+ this._docs.delete(docId);
205
+ this._dirtyDocs.add(docId); // marks for localStorage removal
206
+ this._dirtyIndex = true;
207
+ this._scheduleSave();
207
208
  }
208
209
  }
209
210
 
210
211
  getAll() {
211
- this._ensureIndexLoaded();
212
+ this._ensureHydrated();
212
213
  const results = [];
213
- for (const docId of this._indexCache) {
214
- const doc = this.get(docId);
215
- if (doc) results.push({ _id: docId, ...doc });
214
+ for (const [docId, data] of this._docs) {
215
+ results.push({ _id: docId, ...data });
216
216
  }
217
217
  return results;
218
218
  }
@@ -224,23 +224,27 @@ class QuickStore {
224
224
  }
225
225
 
226
226
  clear() {
227
- this._ensureIndexLoaded();
228
- for (const docId of this._indexCache) {
227
+ this._ensureHydrated();
228
+ for (const docId of this._docs.keys()) {
229
229
  localStorage.removeItem(`${this._keyPrefix}data_${docId}`);
230
230
  }
231
231
  localStorage.removeItem(this._indexKey);
232
- this._indexCache.clear();
233
- this._dirty = false;
234
- if (this._saveIndexTimer) {
235
- if (window.cancelIdleCallback) window.cancelIdleCallback(this._saveIndexTimer);
236
- else clearTimeout(this._saveIndexTimer);
237
- this._saveIndexTimer = null;
232
+ this._docs.clear();
233
+ this._dirtyDocs.clear();
234
+ this._dirtyIndex = false;
235
+ if (this._saveTimer) {
236
+ if (typeof window !== 'undefined' && window.cancelIdleCallback) {
237
+ window.cancelIdleCallback(this._saveTimer);
238
+ } else {
239
+ clearTimeout(this._saveTimer);
240
+ }
241
+ this._saveTimer = null;
238
242
  }
239
243
  }
240
244
 
241
245
  get size() {
242
- this._ensureIndexLoaded();
243
- return this._indexCache.size;
246
+ this._ensureHydrated();
247
+ return this._docs.size;
244
248
  }
245
249
  }
246
250
 
@@ -364,8 +368,9 @@ class LacertaDBError extends Error {
364
368
  this.name = 'LacertaDBError';
365
369
  this.code = code;
366
370
  this.originalError = originalError || null;
367
- this.timestamp = new Date().toISOString();
371
+ this._ts = Date.now();
368
372
  }
373
+ get timestamp() { return new Date(this._ts).toISOString(); }
369
374
  }
370
375
 
371
376
  // ========================
@@ -418,9 +423,11 @@ class LFUCache {
418
423
  constructor(maxSize = 100, ttl = null) {
419
424
  this._maxSize = maxSize;
420
425
  this._ttl = ttl;
421
- this._cache = new Map();
422
- this._frequencies = new Map();
423
- this._timestamps = new Map();
426
+ this._cache = new Map(); // key → value
427
+ this._frequencies = new Map(); // key → frequency
428
+ this._timestamps = new Map(); // key → insertion timestamp
429
+ this._buckets = new Map(); // frequency → Set<key>
430
+ this._minFreq = 0;
424
431
  }
425
432
 
426
433
  get(key) {
@@ -436,36 +443,63 @@ class LFUCache {
436
443
  }
437
444
  }
438
445
 
439
- this._frequencies.set(key, (this._frequencies.get(key) || 0) + 1);
446
+ // Promote: remove from old bucket, add to new bucket
447
+ const oldFreq = this._frequencies.get(key) || 1;
448
+ const newFreq = oldFreq + 1;
449
+ this._frequencies.set(key, newFreq);
450
+
451
+ const oldBucket = this._buckets.get(oldFreq);
452
+ if (oldBucket) {
453
+ oldBucket.delete(key);
454
+ if (oldBucket.size === 0) {
455
+ this._buckets.delete(oldFreq);
456
+ if (this._minFreq === oldFreq) this._minFreq = newFreq;
457
+ }
458
+ }
459
+
460
+ if (!this._buckets.has(newFreq)) this._buckets.set(newFreq, new Set());
461
+ this._buckets.get(newFreq).add(key);
462
+
440
463
  return this._cache.get(key);
441
464
  }
442
465
 
443
466
  set(key, value) {
467
+ if (this._maxSize <= 0) return;
468
+
444
469
  if (this._cache.has(key)) {
445
470
  this._cache.set(key, value);
446
- this._frequencies.set(key, (this._frequencies.get(key) || 0) + 1);
447
- } else {
448
- if (this._cache.size >= this._maxSize) {
449
- let minFreq = Infinity;
450
- let evictKey = null;
451
- for (const [k, freq] of this._frequencies) {
452
- if (freq < minFreq) {
453
- minFreq = freq;
454
- evictKey = k;
455
- }
456
- }
457
- if (evictKey) {
458
- this.delete(evictKey);
459
- }
460
- }
471
+ this.get(key); // triggers frequency promotion
472
+ return;
473
+ }
461
474
 
462
- this._cache.set(key, value);
463
- this._frequencies.set(key, 1);
464
- this._timestamps.set(key, Date.now());
475
+ if (this._cache.size >= this._maxSize) {
476
+ // O(1) eviction: grab any key from the lowest-frequency bucket
477
+ const minBucket = this._buckets.get(this._minFreq);
478
+ if (minBucket && minBucket.size > 0) {
479
+ const evictKey = minBucket.values().next().value;
480
+ this.delete(evictKey);
481
+ }
465
482
  }
483
+
484
+ this._cache.set(key, value);
485
+ this._frequencies.set(key, 1);
486
+ this._timestamps.set(key, Date.now());
487
+
488
+ if (!this._buckets.has(1)) this._buckets.set(1, new Set());
489
+ this._buckets.get(1).add(key);
490
+ this._minFreq = 1;
466
491
  }
467
492
 
468
493
  delete(key) {
494
+ if (!this._cache.has(key)) return false;
495
+
496
+ const freq = this._frequencies.get(key) || 1;
497
+ const bucket = this._buckets.get(freq);
498
+ if (bucket) {
499
+ bucket.delete(key);
500
+ if (bucket.size === 0) this._buckets.delete(freq);
501
+ }
502
+
469
503
  this._frequencies.delete(key);
470
504
  this._timestamps.delete(key);
471
505
  return this._cache.delete(key);
@@ -475,6 +509,8 @@ class LFUCache {
475
509
  this._cache.clear();
476
510
  this._frequencies.clear();
477
511
  this._timestamps.clear();
512
+ this._buckets.clear();
513
+ this._minFreq = 0;
478
514
  }
479
515
 
480
516
  has(key) {
@@ -493,56 +529,63 @@ class LFUCache {
493
529
  class TTLCache {
494
530
  constructor(ttl = 60000) {
495
531
  this._ttl = ttl;
496
- this._cache = new Map();
497
- this._timers = new Map();
532
+ this._cache = new Map(); // key → { value, ts }
533
+ this._sweepTimer = null;
534
+ this._sweepInterval = Math.min(ttl, 30000); // sweep at most every 30s
535
+
536
+ // Start periodic sweep
537
+ if (typeof globalThis !== 'undefined') {
538
+ this._sweepTimer = setInterval(() => this._sweep(), this._sweepInterval);
539
+ }
498
540
  }
499
541
 
500
542
  get(key) {
501
- return this._cache.get(key) || null;
502
- }
543
+ const entry = this._cache.get(key);
544
+ if (!entry) return null;
503
545
 
504
- set(key, value) {
505
- if (this._timers.has(key)) {
506
- clearTimeout(this._timers.get(key));
546
+ // Lazy eviction: check TTL on read
547
+ if (Date.now() - entry.ts > this._ttl) {
548
+ this._cache.delete(key);
549
+ return null;
507
550
  }
551
+ return entry.value;
552
+ }
508
553
 
509
- this._cache.set(key, value);
510
-
511
- const timer = setTimeout(() => {
512
- this.delete(key);
513
- }, this._ttl);
514
- this._timers.set(key, timer);
554
+ set(key, value) {
555
+ this._cache.set(key, { value, ts: Date.now() });
515
556
  }
516
557
 
517
558
  delete(key) {
518
- if (this._timers.has(key)) {
519
- clearTimeout(this._timers.get(key));
520
- this._timers.delete(key);
521
- }
522
559
  return this._cache.delete(key);
523
560
  }
524
561
 
525
562
  clear() {
526
- for (const timer of this._timers.values()) {
527
- clearTimeout(timer);
528
- }
529
- this._timers.clear();
530
563
  this._cache.clear();
531
564
  }
532
565
 
533
566
  has(key) {
534
- return this._cache.has(key);
567
+ return this.get(key) !== null;
535
568
  }
536
569
 
537
570
  get size() {
538
571
  return this._cache.size;
539
572
  }
540
573
 
574
+ /** Periodic sweep: remove all expired entries in one pass */
575
+ _sweep() {
576
+ const now = Date.now();
577
+ for (const [key, entry] of this._cache) {
578
+ if (now - entry.ts > this._ttl) {
579
+ this._cache.delete(key);
580
+ }
581
+ }
582
+ }
583
+
541
584
  destroy() {
542
- for (const timer of this._timers.values()) {
543
- clearTimeout(timer);
585
+ if (this._sweepTimer) {
586
+ clearInterval(this._sweepTimer);
587
+ this._sweepTimer = null;
544
588
  }
545
- this._timers.clear();
546
589
  this._cache.clear();
547
590
  }
548
591
  }
@@ -680,6 +723,9 @@ class BrowserCompressionUtility {
680
723
  }
681
724
  }
682
725
 
726
+ // Shared singleton — BrowserCompressionUtility is stateless
727
+ const _sharedCompression = new BrowserCompressionUtility();
728
+
683
729
  // ========================
684
730
  // Browser Encryption Utility
685
731
  // ========================
@@ -1193,9 +1239,40 @@ class QuadTree {
1193
1239
  }
1194
1240
 
1195
1241
  // ========================
1196
- // B-Tree Index Implementation
1242
+ // B-Tree Index Implementation (Hardened)
1197
1243
  // ========================
1198
1244
 
1245
+ /**
1246
+ * Safe total-order comparison for B-Tree keys.
1247
+ * JavaScript's >, <, === do NOT provide a total order when
1248
+ * types are mixed or special values (undefined, null, NaN) appear.
1249
+ * This function guarantees a consistent -1/0/+1 for ANY input.
1250
+ *
1251
+ * Ordering: numbers < strings (within same type, natural order)
1252
+ * @param {*} a
1253
+ * @param {*} b
1254
+ * @returns {number} -1 if a<b, 0 if a===b, 1 if a>b
1255
+ */
1256
+ function _btreeCmp(a, b) {
1257
+ // Identical references (covers same-value primitives and same object)
1258
+ if (a === b) return 0;
1259
+
1260
+ const ta = typeof a;
1261
+ const tb = typeof b;
1262
+
1263
+ // Same type — fast path (99% of real usage)
1264
+ if (ta === tb) {
1265
+ if (ta === 'number') return a < b ? -1 : 1;
1266
+ if (ta === 'string') return a < b ? -1 : (a > b ? 1 : 0);
1267
+ // Fallback: coerce to string for other types
1268
+ const sa = String(a), sb = String(b);
1269
+ return sa < sb ? -1 : (sa > sb ? 1 : 0);
1270
+ }
1271
+
1272
+ // Different types — sort by type name for a stable total order
1273
+ return ta < tb ? -1 : 1;
1274
+ }
1275
+
1199
1276
  class BTreeNode {
1200
1277
  constructor(order, leaf) {
1201
1278
  this.keys = new Array(2 * order - 1);
@@ -1208,11 +1285,11 @@ class BTreeNode {
1208
1285
 
1209
1286
  search(key) {
1210
1287
  let i = 0;
1211
- while (i < this.n && key > this.keys[i]) {
1288
+ while (i < this.n && _btreeCmp(key, this.keys[i]) > 0) {
1212
1289
  i++;
1213
1290
  }
1214
1291
 
1215
- if (i < this.n && key === this.keys[i]) {
1292
+ if (i < this.n && _btreeCmp(key, this.keys[i]) === 0) {
1216
1293
  return this.values[i];
1217
1294
  }
1218
1295
 
@@ -1223,28 +1300,18 @@ class BTreeNode {
1223
1300
  return this.children[i] ? this.children[i].search(key) : null;
1224
1301
  }
1225
1302
 
1226
- // Optimized range search with subtree pruning (O(log n + k) instead of O(n))
1227
- // excludeMin/excludeMax: when true, boundary values are excluded from results
1228
1303
  rangeSearch(min, max, results, excludeMin = false, excludeMax = false) {
1229
- // Skip keys strictly below the min bound
1230
1304
  let i = 0;
1231
1305
  if (min !== null) {
1232
- while (i < this.n && this.keys[i] < min) {
1306
+ while (i < this.n && _btreeCmp(this.keys[i], min) < 0) {
1233
1307
  i++;
1234
1308
  }
1235
- // If min is exclusive, also skip keys equal to min
1236
- if (excludeMin) {
1237
- // But first descend into the child at boundary — it may have keys > min
1238
- // that are relevant. We handle this below in the loop.
1239
- }
1240
1309
  }
1241
1310
 
1242
- // Process keys from i onward
1243
1311
  for (; i < this.n; i++) {
1244
- // Early exit: if current key exceeds max (or equals max when exclusive),
1245
- // descend into left child then stop — no further keys can match
1246
1312
  if (max !== null) {
1247
- const pastMax = excludeMax ? this.keys[i] >= max : this.keys[i] > max;
1313
+ const cmpMax = _btreeCmp(this.keys[i], max);
1314
+ const pastMax = excludeMax ? cmpMax >= 0 : cmpMax > 0;
1248
1315
  if (pastMax) {
1249
1316
  if (!this.leaf && this.children[i]) {
1250
1317
  this.children[i].rangeSearch(min, max, results, excludeMin, excludeMax);
@@ -1253,21 +1320,20 @@ class BTreeNode {
1253
1320
  }
1254
1321
  }
1255
1322
 
1256
- // Descend into left child of current key
1257
1323
  if (!this.leaf && this.children[i]) {
1258
1324
  this.children[i].rangeSearch(min, max, results, excludeMin, excludeMax);
1259
1325
  }
1260
1326
 
1261
- // Check current key against bounds
1262
- const meetsMin = min === null || (excludeMin ? this.keys[i] > min : this.keys[i] >= min);
1263
- const meetsMax = max === null || (excludeMax ? this.keys[i] < max : this.keys[i] <= max);
1327
+ const cmpMin = min === null ? 1 : _btreeCmp(this.keys[i], min);
1328
+ const cmpMaxCheck = max === null ? -1 : _btreeCmp(this.keys[i], max);
1329
+ const meetsMin = min === null || (excludeMin ? cmpMin > 0 : cmpMin >= 0);
1330
+ const meetsMax = max === null || (excludeMax ? cmpMaxCheck < 0 : cmpMaxCheck <= 0);
1264
1331
 
1265
1332
  if (meetsMin && meetsMax && this.values[i]) {
1266
1333
  this.values[i].forEach(v => results.push(v));
1267
1334
  }
1268
1335
  }
1269
1336
 
1270
- // Descend into rightmost child
1271
1337
  if (!this.leaf && this.children[i]) {
1272
1338
  this.children[i].rangeSearch(min, max, results, excludeMin, excludeMax);
1273
1339
  }
@@ -1277,13 +1343,13 @@ class BTreeNode {
1277
1343
  let i = this.n - 1;
1278
1344
 
1279
1345
  if (this.leaf) {
1280
- while (i >= 0 && this.keys[i] > key) {
1346
+ while (i >= 0 && _btreeCmp(this.keys[i], key) > 0) {
1281
1347
  this.keys[i + 1] = this.keys[i];
1282
1348
  this.values[i + 1] = this.values[i];
1283
1349
  i--;
1284
1350
  }
1285
1351
 
1286
- if (i >= 0 && this.keys[i] === key) {
1352
+ if (i >= 0 && _btreeCmp(this.keys[i], key) === 0) {
1287
1353
  if (!this.values[i]) {
1288
1354
  this.values[i] = new Set();
1289
1355
  }
@@ -1294,11 +1360,11 @@ class BTreeNode {
1294
1360
  this.n++;
1295
1361
  }
1296
1362
  } else {
1297
- while (i >= 0 && this.keys[i] > key) {
1363
+ while (i >= 0 && _btreeCmp(this.keys[i], key) > 0) {
1298
1364
  i--;
1299
1365
  }
1300
1366
 
1301
- if (i >= 0 && this.keys[i] === key) {
1367
+ if (i >= 0 && _btreeCmp(this.keys[i], key) === 0) {
1302
1368
  if (!this.values[i]) {
1303
1369
  this.values[i] = new Set();
1304
1370
  }
@@ -1310,18 +1376,13 @@ class BTreeNode {
1310
1376
  if (this.children[i] && this.children[i].n === 2 * this.order - 1) {
1311
1377
  this.splitChild(i, this.children[i]);
1312
1378
 
1313
- // FIX: After split, the promoted median may equal key.
1314
- // If so, add value to it directly instead of descending
1315
- // (which would create a duplicate entry in the child).
1316
- if (this.keys[i] === key) {
1317
- if (!this.values[i]) {
1318
- this.values[i] = new Set();
1319
- }
1379
+ const cmp = _btreeCmp(this.keys[i], key);
1380
+ if (cmp === 0) {
1381
+ if (!this.values[i]) this.values[i] = new Set();
1320
1382
  this.values[i].add(value);
1321
1383
  return;
1322
1384
  }
1323
-
1324
- if (this.keys[i] < key) {
1385
+ if (cmp < 0) {
1325
1386
  i++;
1326
1387
  }
1327
1388
  }
@@ -1346,9 +1407,14 @@ class BTreeNode {
1346
1407
  }
1347
1408
  }
1348
1409
 
1410
+ // CRITICAL: Save the median BEFORE cleaning stale slots.
1411
+ // The clean loop covers index (order-1) which IS the median position.
1412
+ const medianKey = y.keys[this.order - 1];
1413
+ const medianValue = y.values[this.order - 1];
1414
+
1349
1415
  y.n = this.order - 1;
1350
1416
 
1351
- // Clean stale slots in y after split
1417
+ // Clean all stale slots in y (median + right half)
1352
1418
  for (let j = this.order - 1; j < 2 * this.order - 1; j++) {
1353
1419
  y.keys[j] = undefined;
1354
1420
  y.values[j] = undefined;
@@ -1359,28 +1425,26 @@ class BTreeNode {
1359
1425
  }
1360
1426
  }
1361
1427
 
1428
+ // Shift parent's children right to make room
1362
1429
  for (let j = this.n; j >= i + 1; j--) {
1363
1430
  this.children[j + 1] = this.children[j];
1364
1431
  }
1365
-
1366
1432
  this.children[i + 1] = z;
1367
1433
 
1434
+ // Shift parent's keys/values right to make room
1368
1435
  for (let j = this.n - 1; j >= i; j--) {
1369
1436
  this.keys[j + 1] = this.keys[j];
1370
1437
  this.values[j + 1] = this.values[j];
1371
1438
  }
1372
1439
 
1373
- this.keys[i] = y.keys[this.order - 1];
1374
- this.values[i] = y.values[this.order - 1];
1375
- // Clear the promoted median from y (it's now in the parent)
1376
- y.keys[this.order - 1] = undefined;
1377
- y.values[this.order - 1] = undefined;
1440
+ // Promote the saved median
1441
+ this.keys[i] = medianKey;
1442
+ this.values[i] = medianValue;
1378
1443
  this.n++;
1379
1444
  }
1380
1445
 
1381
- // ---- Deletion helpers (proper B-tree delete with rebalancing) ----
1446
+ // ---- Deletion helpers ----
1382
1447
 
1383
- // Get the predecessor: rightmost key in the left subtree of keys[idx]
1384
1448
  _getPredecessor(idx) {
1385
1449
  let node = this.children[idx];
1386
1450
  while (!node.leaf) {
@@ -1389,7 +1453,6 @@ class BTreeNode {
1389
1453
  return { key: node.keys[node.n - 1], value: node.values[node.n - 1] };
1390
1454
  }
1391
1455
 
1392
- // Get the successor: leftmost key in the right subtree of keys[idx]
1393
1456
  _getSuccessor(idx) {
1394
1457
  let node = this.children[idx + 1];
1395
1458
  while (!node.leaf) {
@@ -1398,23 +1461,19 @@ class BTreeNode {
1398
1461
  return { key: node.keys[0], value: node.values[0] };
1399
1462
  }
1400
1463
 
1401
- // Merge children[idx+1] into children[idx], pulling keys[idx] down as separator
1402
1464
  _merge(idx) {
1403
1465
  const child = this.children[idx];
1404
1466
  const sibling = this.children[idx + 1];
1405
1467
  const t = this.order;
1406
1468
 
1407
- // Pull separator key down into child
1408
1469
  child.keys[t - 1] = this.keys[idx];
1409
1470
  child.values[t - 1] = this.values[idx];
1410
1471
 
1411
- // Copy keys/values from sibling into child
1412
1472
  for (let j = 0; j < sibling.n; j++) {
1413
1473
  child.keys[t + j] = sibling.keys[j];
1414
1474
  child.values[t + j] = sibling.values[j];
1415
1475
  }
1416
1476
 
1417
- // Copy children from sibling
1418
1477
  if (!child.leaf) {
1419
1478
  for (let j = 0; j <= sibling.n; j++) {
1420
1479
  child.children[t + j] = sibling.children[j];
@@ -1423,18 +1482,15 @@ class BTreeNode {
1423
1482
 
1424
1483
  child.n += sibling.n + 1;
1425
1484
 
1426
- // Shift keys/values left in this node to fill the gap
1427
1485
  for (let j = idx; j < this.n - 1; j++) {
1428
1486
  this.keys[j] = this.keys[j + 1];
1429
1487
  this.values[j] = this.values[j + 1];
1430
1488
  }
1431
1489
 
1432
- // Shift children left in this node
1433
1490
  for (let j = idx + 1; j < this.n; j++) {
1434
1491
  this.children[j] = this.children[j + 1];
1435
1492
  }
1436
1493
 
1437
- // Clean stale trailing slots
1438
1494
  this.keys[this.n - 1] = undefined;
1439
1495
  this.values[this.n - 1] = undefined;
1440
1496
  this.children[this.n] = undefined;
@@ -1442,12 +1498,10 @@ class BTreeNode {
1442
1498
  this.n--;
1443
1499
  }
1444
1500
 
1445
- // Borrow the last key from children[idx-1] through the parent
1446
1501
  _borrowFromPrev(idx) {
1447
1502
  const child = this.children[idx];
1448
1503
  const sibling = this.children[idx - 1];
1449
1504
 
1450
- // Shift everything in child right by 1
1451
1505
  for (let j = child.n - 1; j >= 0; j--) {
1452
1506
  child.keys[j + 1] = child.keys[j];
1453
1507
  child.values[j + 1] = child.values[j];
@@ -1458,21 +1512,17 @@ class BTreeNode {
1458
1512
  }
1459
1513
  }
1460
1514
 
1461
- // Move separator from parent down to child[0]
1462
1515
  child.keys[0] = this.keys[idx - 1];
1463
1516
  child.values[0] = this.values[idx - 1];
1464
1517
 
1465
- // Move last child of sibling to child
1466
1518
  if (!child.leaf) {
1467
1519
  child.children[0] = sibling.children[sibling.n];
1468
1520
  sibling.children[sibling.n] = undefined;
1469
1521
  }
1470
1522
 
1471
- // Move last key of sibling up to parent
1472
1523
  this.keys[idx - 1] = sibling.keys[sibling.n - 1];
1473
1524
  this.values[idx - 1] = sibling.values[sibling.n - 1];
1474
1525
 
1475
- // Clean stale slots in sibling
1476
1526
  sibling.keys[sibling.n - 1] = undefined;
1477
1527
  sibling.values[sibling.n - 1] = undefined;
1478
1528
 
@@ -1480,25 +1530,20 @@ class BTreeNode {
1480
1530
  sibling.n--;
1481
1531
  }
1482
1532
 
1483
- // Borrow the first key from children[idx+1] through the parent
1484
1533
  _borrowFromNext(idx) {
1485
1534
  const child = this.children[idx];
1486
1535
  const sibling = this.children[idx + 1];
1487
1536
 
1488
- // Move separator from parent down to end of child
1489
1537
  child.keys[child.n] = this.keys[idx];
1490
1538
  child.values[child.n] = this.values[idx];
1491
1539
 
1492
- // Move first child of sibling to child
1493
1540
  if (!child.leaf) {
1494
1541
  child.children[child.n + 1] = sibling.children[0];
1495
1542
  }
1496
1543
 
1497
- // Move first key of sibling up to parent
1498
1544
  this.keys[idx] = sibling.keys[0];
1499
1545
  this.values[idx] = sibling.values[0];
1500
1546
 
1501
- // Shift sibling's keys/values left
1502
1547
  for (let j = 0; j < sibling.n - 1; j++) {
1503
1548
  sibling.keys[j] = sibling.keys[j + 1];
1504
1549
  sibling.values[j] = sibling.values[j + 1];
@@ -1510,7 +1555,6 @@ class BTreeNode {
1510
1555
  sibling.children[sibling.n] = undefined;
1511
1556
  }
1512
1557
 
1513
- // Clean stale trailing slots in sibling
1514
1558
  sibling.keys[sibling.n - 1] = undefined;
1515
1559
  sibling.values[sibling.n - 1] = undefined;
1516
1560
 
@@ -1518,8 +1562,6 @@ class BTreeNode {
1518
1562
  sibling.n--;
1519
1563
  }
1520
1564
 
1521
- // Ensure children[idx] has at least `order` keys (minimum degree)
1522
- // so we can safely descend into it during deletion
1523
1565
  _fill(idx) {
1524
1566
  const t = this.order;
1525
1567
  if (idx > 0 && this.children[idx - 1] && this.children[idx - 1].n >= t) {
@@ -1527,7 +1569,6 @@ class BTreeNode {
1527
1569
  } else if (idx < this.n && this.children[idx + 1] && this.children[idx + 1].n >= t) {
1528
1570
  this._borrowFromNext(idx);
1529
1571
  } else {
1530
- // Merge with a sibling
1531
1572
  if (idx < this.n) {
1532
1573
  this._merge(idx);
1533
1574
  } else {
@@ -1536,7 +1577,6 @@ class BTreeNode {
1536
1577
  }
1537
1578
  }
1538
1579
 
1539
- // Remove a leaf-level key entry (shift keys, values left)
1540
1580
  _removeFromLeaf(idx) {
1541
1581
  for (let j = idx; j < this.n - 1; j++) {
1542
1582
  this.keys[j] = this.keys[j + 1];
@@ -1547,42 +1587,33 @@ class BTreeNode {
1547
1587
  this.n--;
1548
1588
  }
1549
1589
 
1550
- // Remove an internal key entry using predecessor/successor replacement
1551
1590
  _removeFromInternal(idx) {
1552
1591
  const t = this.order;
1553
1592
  const key = this.keys[idx];
1554
1593
 
1555
1594
  if (this.children[idx] && this.children[idx].n >= t) {
1556
- // Left child has enough keys: replace with predecessor
1557
1595
  const pred = this._getPredecessor(idx);
1558
1596
  this.keys[idx] = pred.key;
1559
1597
  this.values[idx] = pred.value;
1560
1598
  this.children[idx]._remove(pred.key, null, true);
1561
1599
  } else if (this.children[idx + 1] && this.children[idx + 1].n >= t) {
1562
- // Right child has enough keys: replace with successor
1563
1600
  const succ = this._getSuccessor(idx);
1564
1601
  this.keys[idx] = succ.key;
1565
1602
  this.values[idx] = succ.value;
1566
1603
  this.children[idx + 1]._remove(succ.key, null, true);
1567
1604
  } else {
1568
- // Both children at minimum: merge them, then delete from merged child
1569
1605
  this._merge(idx);
1570
1606
  this.children[idx]._remove(key, null, true);
1571
1607
  }
1572
1608
  }
1573
1609
 
1574
- // Core removal engine.
1575
- // removeEntire=false: remove one value from the Set; delete key entry if Set empties
1576
- // removeEntire=true: delete the entire key entry regardless of Set contents
1577
- // Returns true if a key entry was fully removed, false otherwise
1578
1610
  _remove(key, value, removeEntire) {
1579
1611
  let i = 0;
1580
- while (i < this.n && key > this.keys[i]) {
1612
+ while (i < this.n && _btreeCmp(key, this.keys[i]) > 0) {
1581
1613
  i++;
1582
1614
  }
1583
1615
 
1584
- if (i < this.n && key === this.keys[i]) {
1585
- // Key found at this node
1616
+ if (i < this.n && _btreeCmp(key, this.keys[i]) === 0) {
1586
1617
  let shouldRemoveEntry = removeEntire;
1587
1618
 
1588
1619
  if (!shouldRemoveEntry && this.values[i]) {
@@ -1600,17 +1631,14 @@ class BTreeNode {
1600
1631
  }
1601
1632
  return false;
1602
1633
  } else {
1603
- // Key not found at this level — descend
1604
1634
  if (this.leaf) return false;
1605
1635
 
1606
1636
  const isLastChild = (i === this.n);
1607
1637
 
1608
- // Ensure the child we descend into has enough keys for safe deletion
1609
1638
  if (this.children[i] && this.children[i].n < this.order) {
1610
1639
  this._fill(i);
1611
1640
  }
1612
1641
 
1613
- // After _fill, if the last child was merged, idx shifted
1614
1642
  if (isLastChild && i > this.n) {
1615
1643
  return this.children[i - 1]
1616
1644
  ? this.children[i - 1]._remove(key, value, removeEntire)
@@ -1623,20 +1651,23 @@ class BTreeNode {
1623
1651
  }
1624
1652
  }
1625
1653
 
1626
- // Public remove: remove a single (key, value) pair
1627
1654
  remove(key, value) {
1628
1655
  return this._remove(key, value, false);
1629
1656
  }
1630
1657
 
1631
- // Public removeKey: remove an entire key entry (used internally for predecessor/successor cleanup)
1632
1658
  removeKey(key) {
1633
1659
  return this._remove(key, null, true);
1634
1660
  }
1635
1661
 
1636
1662
  verify() {
1637
1663
  const issues = [];
1664
+ for (let i = 0; i < this.n; i++) {
1665
+ if (this.keys[i] === undefined || this.keys[i] === null) {
1666
+ issues.push(`Invalid key (${this.keys[i]}) at index ${i}`);
1667
+ }
1668
+ }
1638
1669
  for (let i = 1; i < this.n; i++) {
1639
- if (this.keys[i] <= this.keys[i - 1]) {
1670
+ if (_btreeCmp(this.keys[i], this.keys[i - 1]) <= 0) {
1640
1671
  issues.push(`Key order violation at index ${i}`);
1641
1672
  }
1642
1673
  }
@@ -1657,14 +1688,11 @@ class BTreeIndex {
1657
1688
  this._root = null;
1658
1689
  this._order = order;
1659
1690
  this._size = 0;
1660
- this._lastVerification = Date.now();
1661
- this._verificationInterval = 60000;
1662
1691
  }
1663
1692
 
1664
1693
  insert(key, value) {
1665
- if (Date.now() - this._lastVerification > this._verificationInterval) {
1666
- this.verify();
1667
- }
1694
+ // Reject keys that break comparison semantics
1695
+ if (key === undefined || key === null || (typeof key === 'number' && isNaN(key))) return;
1668
1696
 
1669
1697
  // Check for exact duplicate (key, value) to keep _size accurate
1670
1698
  if (this._root) {
@@ -1685,13 +1713,13 @@ class BTreeIndex {
1685
1713
  s.children[0] = this._root;
1686
1714
  s.splitChild(0, this._root);
1687
1715
 
1688
- // FIX: Check if promoted median equals key
1689
1716
  let i = 0;
1690
- if (s.keys[0] === key) {
1717
+ const cmp = _btreeCmp(s.keys[0], key);
1718
+ if (cmp === 0) {
1691
1719
  if (!s.values[0]) s.values[0] = new Set();
1692
1720
  s.values[0].add(value);
1693
1721
  } else {
1694
- if (s.keys[0] < key) i++;
1722
+ if (cmp < 0) i++;
1695
1723
  s.children[i].insertNonFull(key, value);
1696
1724
  }
1697
1725
 
@@ -1740,7 +1768,6 @@ class BTreeIndex {
1740
1768
  const existing = this._root.search(key);
1741
1769
  if (existing && existing.has(value)) {
1742
1770
  this._root.remove(key, value);
1743
- // Shrink root if it became empty (all keys merged down)
1744
1771
  if (this._root.n === 0 && !this._root.leaf && this._root.children[0]) {
1745
1772
  this._root = this._root.children[0];
1746
1773
  }
@@ -1749,12 +1776,9 @@ class BTreeIndex {
1749
1776
  }
1750
1777
 
1751
1778
  verify() {
1752
- this._lastVerification = Date.now();
1753
1779
  if (!this._root) return { healthy: true, issues: [] };
1754
1780
  const issues = this._root.verify();
1755
1781
  if (issues.length > 0) {
1756
- // NOTE: verify detects structural violations but cannot auto-repair.
1757
- // A full rebuild is required to fix a corrupted index.
1758
1782
  console.warn('BTree index issues detected (rebuild required):', issues);
1759
1783
  }
1760
1784
  return {
@@ -1772,6 +1796,52 @@ class BTreeIndex {
1772
1796
  get size() {
1773
1797
  return this._size;
1774
1798
  }
1799
+
1800
+ /**
1801
+ * Export all entries as a flat sorted array for persistence.
1802
+ * Format: [[key, [docId1, docId2, ...]], ...]
1803
+ * @returns {Array}
1804
+ */
1805
+ toSortedEntries() {
1806
+ if (!this._root) return [];
1807
+ const entries = [];
1808
+ this._collectInOrder(this._root, entries);
1809
+ return entries;
1810
+ }
1811
+
1812
+ /** @private In-order traversal to collect all key-value pairs */
1813
+ _collectInOrder(node, entries) {
1814
+ for (let i = 0; i < node.n; i++) {
1815
+ if (!node.leaf && node.children[i]) {
1816
+ this._collectInOrder(node.children[i], entries);
1817
+ }
1818
+ if (node.values[i] && node.values[i].size > 0) {
1819
+ entries.push([node.keys[i], Array.from(node.values[i])]);
1820
+ }
1821
+ }
1822
+ if (!node.leaf && node.children[node.n]) {
1823
+ this._collectInOrder(node.children[node.n], entries);
1824
+ }
1825
+ }
1826
+
1827
+ /**
1828
+ * Restore a BTreeIndex from persisted sorted entries.
1829
+ * Much faster than full document scan + unpack.
1830
+ * @param {Array} entries - [[key, [docId1, ...]], ...]
1831
+ * @param {number} [order=4]
1832
+ * @returns {BTreeIndex}
1833
+ */
1834
+ static fromSortedEntries(entries, order = 4) {
1835
+ const tree = new BTreeIndex(order);
1836
+ for (let i = 0; i < entries.length; i++) {
1837
+ const [key, values] = entries[i];
1838
+ if (key === undefined || key === null) continue;
1839
+ for (let j = 0; j < values.length; j++) {
1840
+ tree.insert(key, values[j]);
1841
+ }
1842
+ }
1843
+ return tree;
1844
+ }
1775
1845
  }
1776
1846
 
1777
1847
  // ========================
@@ -1958,12 +2028,20 @@ class IndexManager {
1958
2028
  this._indexData = new Map();
1959
2029
  this._indexQueue = [];
1960
2030
  this._processing = false;
2031
+
2032
+ // Debounced persistence — coalesce many writes into one IDB save
2033
+ this._dirtyIndexes = new Set();
2034
+ this._persistTimer = null;
2035
+ this._persistDelay = 2000; // ms — save at most every 2s
1961
2036
  }
1962
2037
 
1963
2038
  get indexes() {
1964
2039
  return this._indexes;
1965
2040
  }
1966
2041
 
2042
+ /** Reserved _id prefix for persisted index entries in the documents store */
2043
+ static get IDX_PREFIX() { return '__lacerta_idx_'; }
2044
+
1967
2045
  async createIndex(fieldPath, options = {}) {
1968
2046
  const indexName = options.name || fieldPath;
1969
2047
 
@@ -1990,6 +2068,11 @@ class IndexManager {
1990
2068
  return indexName;
1991
2069
  }
1992
2070
 
2071
+ /**
2072
+ * Full rebuild: scan all documents from IDB, extract field values, build index.
2073
+ * This is the SLOW path — only used on first-ever index creation or when
2074
+ * persisted index data is missing/corrupt.
2075
+ */
1993
2076
  async rebuildIndex(indexName) {
1994
2077
  const index = this._indexes.get(indexName);
1995
2078
  if (!index) {
@@ -1999,25 +2082,26 @@ class IndexManager {
1999
2082
  const indexData = this._createIndexStructure(index.type);
2000
2083
  this._indexData.set(indexName, indexData);
2001
2084
 
2002
- // Optimization: Use Batched Processing instead of single Cursor
2003
- // This prevents transaction timeouts caused by async crypto operations inside the loop
2004
2085
  let lastKey = null;
2005
- const batchSize = 100; // Keep batch small for responsiveness
2086
+ const batchSize = 200;
2006
2087
 
2007
2088
  while (true) {
2008
- // 1. Fetch Batch (Transaction opens and closes here)
2009
2089
  const batch = await this._collection._indexedDB.getBatch(
2010
2090
  this._collection._db,
2011
- 'documents',
2091
+ this._collection._storeName,
2012
2092
  lastKey,
2013
2093
  batchSize
2014
2094
  );
2015
2095
 
2016
2096
  if (batch.length === 0) break;
2017
2097
 
2018
- // 2. Process Batch (Async crypto operations safe here)
2019
2098
  for (const docData of batch) {
2020
- lastKey = docData._id; // Update for next batch
2099
+ // Skip persisted index entries
2100
+ if (typeof docData._id === 'string' && docData._id.startsWith(IndexManager.IDX_PREFIX)) {
2101
+ lastKey = docData._id;
2102
+ continue;
2103
+ }
2104
+ lastKey = docData._id;
2021
2105
  let doc = docData;
2022
2106
 
2023
2107
  if (docData.packedData) {
@@ -2025,7 +2109,6 @@ class IndexManager {
2025
2109
  compressed: docData._compressed,
2026
2110
  encrypted: docData._encrypted
2027
2111
  }, this._serializer);
2028
- // This await is what killed the transaction before
2029
2112
  await d.unpack(this._collection.database.encryption);
2030
2113
  doc = d.objectOutput();
2031
2114
  }
@@ -2037,7 +2120,6 @@ class IndexManager {
2037
2120
  }
2038
2121
 
2039
2122
  if (index.unique && indexData.has && indexData.has(value)) {
2040
- console.error(`Unique constraint violation on index '${indexName}'`);
2041
2123
  continue;
2042
2124
  }
2043
2125
 
@@ -2047,9 +2129,106 @@ class IndexManager {
2047
2129
 
2048
2130
  this._addToIndex(indexData, value, doc._id, index.type);
2049
2131
  }
2132
+ }
2133
+
2134
+ // Persist immediately after a full rebuild so next load is fast
2135
+ await this._persistIndex(indexName);
2136
+ }
2137
+
2138
+ /**
2139
+ * FAST PATH: Restore a BTree index from persisted entries stored in IDB.
2140
+ * Returns true if successful, false if persisted data is missing/corrupt.
2141
+ * @param {string} indexName
2142
+ * @returns {Promise<boolean>}
2143
+ */
2144
+ async _restoreIndex(indexName) {
2145
+ const index = this._indexes.get(indexName);
2146
+ if (!index || index.type !== 'btree') return false;
2147
+
2148
+ try {
2149
+ const docId = `${IndexManager.IDX_PREFIX}${indexName}`;
2150
+ const stored = await this._collection._indexedDB.get(
2151
+ this._collection._db, this._collection._storeName, docId
2152
+ );
2153
+
2154
+ if (!stored || !stored._entries || !Array.isArray(stored._entries)) {
2155
+ return false;
2156
+ }
2157
+
2158
+ // Restore B-Tree from sorted entries — no document scanning needed
2159
+ const btree = BTreeIndex.fromSortedEntries(stored._entries, 4);
2160
+
2161
+ // Quick sanity check
2162
+ const v = btree.verify();
2163
+ if (!v.healthy) {
2164
+ console.warn(`[IndexManager] Persisted index '${indexName}' is corrupt, will rebuild`);
2165
+ return false;
2166
+ }
2050
2167
 
2051
- // Optional: Yield to main thread briefly to prevent UI freeze
2052
- if (window.requestIdleCallback) await new Promise(r => window.requestIdleCallback(r));
2168
+ this._indexData.set(indexName, btree);
2169
+ return true;
2170
+ } catch (e) {
2171
+ return false;
2172
+ }
2173
+ }
2174
+
2175
+ /**
2176
+ * Persist a single BTree index's entries to IDB.
2177
+ * Stored as a document with reserved _id in the existing 'documents' store.
2178
+ * @param {string} indexName
2179
+ */
2180
+ async _persistIndex(indexName) {
2181
+ const indexData = this._indexData.get(indexName);
2182
+ if (!indexData || !(indexData instanceof BTreeIndex)) return;
2183
+
2184
+ try {
2185
+ const docId = `${IndexManager.IDX_PREFIX}${indexName}`;
2186
+ const payload = {
2187
+ _id: docId,
2188
+ _entries: indexData.toSortedEntries(),
2189
+ _persisted_at: Date.now(),
2190
+ _size: indexData.size
2191
+ };
2192
+
2193
+ await this._collection._indexedDB.put(
2194
+ this._collection._db, this._collection._storeName, payload
2195
+ );
2196
+ } catch (e) {
2197
+ console.warn(`[IndexManager] Failed to persist index '${indexName}':`, e.message);
2198
+ }
2199
+ }
2200
+
2201
+ /**
2202
+ * Schedule a debounced persist for modified indexes.
2203
+ * Coalesces rapid writes into a single IDB save.
2204
+ * @param {string} indexName
2205
+ */
2206
+ _schedulePersist(indexName) {
2207
+ this._dirtyIndexes.add(indexName);
2208
+
2209
+ if (this._persistTimer) return;
2210
+
2211
+ this._persistTimer = setTimeout(async () => {
2212
+ this._persistTimer = null;
2213
+ const dirty = Array.from(this._dirtyIndexes);
2214
+ this._dirtyIndexes.clear();
2215
+
2216
+ for (const name of dirty) {
2217
+ await this._persistIndex(name);
2218
+ }
2219
+ }, this._persistDelay);
2220
+ }
2221
+
2222
+ /** Flush any pending index persistence immediately (e.g., before page unload) */
2223
+ async flushPersistence() {
2224
+ if (this._persistTimer) {
2225
+ clearTimeout(this._persistTimer);
2226
+ this._persistTimer = null;
2227
+ }
2228
+ const dirty = Array.from(this._dirtyIndexes);
2229
+ this._dirtyIndexes.clear();
2230
+ for (const name of dirty) {
2231
+ await this._persistIndex(name);
2053
2232
  }
2054
2233
  }
2055
2234
 
@@ -2137,6 +2316,11 @@ class IndexManager {
2137
2316
  if (newValue) indexData.addPoint(newValue, docId);
2138
2317
  break;
2139
2318
  }
2319
+
2320
+ // Schedule async persistence for modified btree indexes
2321
+ if (index.type === 'btree') {
2322
+ this._schedulePersist(indexName);
2323
+ }
2140
2324
  }
2141
2325
  }
2142
2326
 
@@ -2171,7 +2355,6 @@ class IndexManager {
2171
2355
  }
2172
2356
 
2173
2357
  _queryBTree(indexData, options) {
2174
- // Handle simple value query
2175
2358
  if (typeof options !== 'object' || options === null) {
2176
2359
  return indexData.find(options);
2177
2360
  }
@@ -2183,7 +2366,6 @@ class IndexManager {
2183
2366
  docs.forEach(doc => results.add(doc));
2184
2367
  }
2185
2368
 
2186
- // Combined range queries: pick the tightest bounds and correct exclusivity
2187
2369
  const hasGte = options.$gte !== undefined;
2188
2370
  const hasGt = options.$gt !== undefined;
2189
2371
  const hasLte = options.$lte !== undefined;
@@ -2245,7 +2427,12 @@ class IndexManager {
2245
2427
  dropIndex(indexName) {
2246
2428
  this._indexes.delete(indexName);
2247
2429
  this._indexData.delete(indexName);
2430
+ this._dirtyIndexes.delete(indexName);
2248
2431
  this._saveIndexMetadata();
2432
+
2433
+ // Remove persisted index from IDB (fire-and-forget)
2434
+ const docId = `${IndexManager.IDX_PREFIX}${indexName}`;
2435
+ this._collection._indexedDB.delete(this._collection._db, this._collection._storeName, docId).catch(() => {});
2249
2436
  }
2250
2437
 
2251
2438
  _getFieldValue(doc, path) {
@@ -2283,6 +2470,11 @@ class IndexManager {
2283
2470
  });
2284
2471
  }
2285
2472
 
2473
+ /**
2474
+ * Load index definitions and restore persisted index data.
2475
+ * FAST PATH: Restore BTree from persisted entries (no document scanning).
2476
+ * SLOW PATH: Full rebuild only if persisted data is missing/corrupt.
2477
+ */
2286
2478
  async loadIndexMetadata() {
2287
2479
  const key = `lacertadb_${this._collection.database.name}_${this._collection.name}_indexes`;
2288
2480
  const stored = localStorage.getItem(key);
@@ -2298,8 +2490,27 @@ class IndexManager {
2298
2490
  this._indexes.set(name, index);
2299
2491
  }
2300
2492
 
2301
- for (const indexName of this._indexes.keys()) {
2302
- await this.rebuildIndex(indexName);
2493
+ // Try to restore each index from persisted IDB data (fast path).
2494
+ // Only fall back to full rebuild for indexes that can't be restored.
2495
+ const needsRebuild = [];
2496
+
2497
+ for (const [indexName, index] of this._indexes) {
2498
+ if (index.type === 'btree') {
2499
+ const restored = await this._restoreIndex(indexName);
2500
+ if (!restored) {
2501
+ needsRebuild.push(indexName);
2502
+ }
2503
+ } else {
2504
+ // Non-btree indexes (text, geo, hash) always need rebuild
2505
+ needsRebuild.push(indexName);
2506
+ }
2507
+ }
2508
+
2509
+ if (needsRebuild.length > 0) {
2510
+ // Rebuild only the indexes that couldn't be restored
2511
+ for (const indexName of needsRebuild) {
2512
+ await this.rebuildIndex(indexName);
2513
+ }
2303
2514
  }
2304
2515
  } catch (error) {
2305
2516
  console.error('Failed to load index metadata:', error);
@@ -2336,7 +2547,6 @@ class IndexManager {
2336
2547
  } else if (indexData.verify) {
2337
2548
  const result = indexData.verify();
2338
2549
  if (result.requiresRebuild) {
2339
- // BTree detected structural issues — rebuild the index from source data
2340
2550
  await this.rebuildIndex(name);
2341
2551
  result.rebuilt = true;
2342
2552
  }
@@ -2349,6 +2559,10 @@ class IndexManager {
2349
2559
  }
2350
2560
 
2351
2561
  destroy() {
2562
+ if (this._persistTimer) {
2563
+ clearTimeout(this._persistTimer);
2564
+ this._persistTimer = null;
2565
+ }
2352
2566
  for (const [name, indexData] of this._indexData) {
2353
2567
  if (indexData && indexData.clear) {
2354
2568
  indexData.clear();
@@ -2356,6 +2570,7 @@ class IndexManager {
2356
2570
  }
2357
2571
  this._indexData.clear();
2358
2572
  this._indexes.clear();
2573
+ this._dirtyIndexes.clear();
2359
2574
  this._indexQueue = [];
2360
2575
  this._processing = false;
2361
2576
  }
@@ -2588,9 +2803,9 @@ class IndexedDBUtility {
2588
2803
  });
2589
2804
  }
2590
2805
 
2591
- async batchOperation(db, operations) {
2592
- return this.performTransaction(db, ['documents'], 'readwrite', tx => {
2593
- const store = tx.objectStore('documents');
2806
+ async batchOperation(db, operations, storeName = 'documents') {
2807
+ return this.performTransaction(db, [storeName], 'readwrite', tx => {
2808
+ const store = tx.objectStore(storeName);
2594
2809
 
2595
2810
  // CRITICAL: Queue ALL IDB requests synchronously to prevent
2596
2811
  // TransactionInactiveError. Do NOT use await between requests.
@@ -2634,7 +2849,7 @@ class Document {
2634
2849
  this._attachments = data._attachments || [];
2635
2850
  this._data = null;
2636
2851
  this._packedData = data.packedData || null;
2637
- this._compression = new BrowserCompressionUtility();
2852
+ this._compression = _sharedCompression;
2638
2853
  this._serializer = serializer;
2639
2854
 
2640
2855
  if (data.data) {
@@ -3579,6 +3794,30 @@ class PerformanceMonitor {
3579
3794
  }
3580
3795
  }
3581
3796
 
3797
+ // ========================
3798
+ // Stable Cache Key Utility
3799
+ // ========================
3800
+
3801
+ /**
3802
+ * Generate a deterministic cache key from query filter + options.
3803
+ * Uses sorted-keys JSON.stringify for stability, avoiding the overhead
3804
+ * of full TurboSerial serialize + Base64 encode on every query call.
3805
+ * @param {object} filter
3806
+ * @param {object} options
3807
+ * @returns {string}
3808
+ */
3809
+ function _stableCacheKey(filter, options) {
3810
+ const replacer = (_, v) => {
3811
+ if (v && typeof v === 'object' && !Array.isArray(v)) {
3812
+ const sorted = {};
3813
+ for (const k of Object.keys(v).sort()) sorted[k] = v[k];
3814
+ return sorted;
3815
+ }
3816
+ return v;
3817
+ };
3818
+ return JSON.stringify({ f: filter, o: options }, replacer);
3819
+ }
3820
+
3582
3821
  // ========================
3583
3822
  // Collection Class (Optimized)
3584
3823
  // ========================
@@ -3589,7 +3828,8 @@ class Collection {
3589
3828
  this.database = database;
3590
3829
  this._serializer = database._serializer;
3591
3830
  this._base64 = database._base64;
3592
- this._db = null;
3831
+ this._db = null; // Reference to parent's consolidated IDB connection
3832
+ this._storeName = name; // Object store name within the consolidated database
3593
3833
  this._metadata = null;
3594
3834
  this._settings = database.settings;
3595
3835
  this._indexedDB = new IndexedDBUtility();
@@ -3605,6 +3845,12 @@ class Collection {
3605
3845
  enabled: true
3606
3846
  });
3607
3847
 
3848
+ // Document-level cache: avoids IDB reads + deserialization for repeated get() calls
3849
+ this._docCache = new LRUCache(200);
3850
+
3851
+ // Pending indexes: definitions registered before init() — applied during init
3852
+ this._pendingIndexes = [];
3853
+
3608
3854
  this._performanceMonitor = database.performanceMonitor;
3609
3855
  this._initialized = false;
3610
3856
  }
@@ -3624,13 +3870,10 @@ class Collection {
3624
3870
  async init() {
3625
3871
  if (this._initialized) return this;
3626
3872
 
3627
- const dbName = `${this.database.name}_${this.name}`;
3628
- this._db = await connectionPool.getConnection(dbName, 1, (db, oldVersion) => {
3629
- if (oldVersion < 1 && !db.objectStoreNames.contains('documents')) {
3630
- const store = db.createObjectStore('documents', {keyPath: '_id'});
3631
- store.createIndex('modified', '_modified', {unique: false});
3632
- }
3633
- });
3873
+ // Use the parent Database's consolidated IDB connection
3874
+ // (ensure store exists in case ensureCollection was used without createCollection)
3875
+ await this.database._ensureStore(this._storeName);
3876
+ this._db = this.database._db;
3634
3877
 
3635
3878
  // Load per-collection metadata (with per-doc tracking) from its own localStorage key
3636
3879
  this._metadata = CollectionMetadata.load(
@@ -3639,6 +3882,16 @@ class Collection {
3639
3882
 
3640
3883
  await this._indexManager.loadIndexMetadata();
3641
3884
 
3885
+ // Apply any indexes that were registered before init()
3886
+ if (this._pendingIndexes.length > 0) {
3887
+ for (const { fieldPath, options } of this._pendingIndexes) {
3888
+ if (!this._indexManager.indexes.has(options.name || fieldPath)) {
3889
+ await this._indexManager.createIndex(fieldPath, options).catch(() => {});
3890
+ }
3891
+ }
3892
+ this._pendingIndexes = [];
3893
+ }
3894
+
3642
3895
  if (this._settings.freeSpaceEvery > 0 && this._settings.sizeLimitKB !== Infinity) {
3643
3896
  this._cleanupInterval = setInterval(() => this._freeSpace(), this._settings.freeSpaceEvery);
3644
3897
  }
@@ -3649,6 +3902,11 @@ class Collection {
3649
3902
 
3650
3903
  // Index methods
3651
3904
  async createIndex(fieldPath, options = {}) {
3905
+ // If not yet initialized, queue the definition — will be applied during init()
3906
+ if (!this._initialized) {
3907
+ this._pendingIndexes.push({ fieldPath, options });
3908
+ return options.name || fieldPath;
3909
+ }
3652
3910
  return await this._indexManager.createIndex(fieldPath, options);
3653
3911
  }
3654
3912
 
@@ -3681,7 +3939,7 @@ class Collection {
3681
3939
  await this._trigger('beforeAdd', documentData);
3682
3940
 
3683
3941
  const doc = new Document({data: documentData, _id: options.id}, {
3684
- compressed: options.compressed !== false,
3942
+ compressed: options.compressed || false,
3685
3943
  permanent: options.permanent || false
3686
3944
  }, this._serializer);
3687
3945
 
@@ -3697,7 +3955,7 @@ class Collection {
3697
3955
 
3698
3956
  await doc.pack(this.database.encryption);
3699
3957
  const dbOutput = doc.databaseOutput();
3700
- await this._indexedDB.add(this._db, 'documents', dbOutput);
3958
+ await this._indexedDB.add(this._db, this._storeName, dbOutput);
3701
3959
 
3702
3960
  const fullDoc = doc.objectOutput();
3703
3961
  await this._indexManager.updateIndexForDocument(doc._id, null, fullDoc);
@@ -3709,15 +3967,20 @@ class Collection {
3709
3967
  await this._checkSpaceLimit();
3710
3968
  await this._trigger('afterAdd', doc);
3711
3969
  this._cacheStrategy.clear();
3970
+ this._docCache.set(doc._id, fullDoc);
3712
3971
  return doc._id;
3713
3972
  }
3714
3973
 
3715
3974
  async get(docId, options = {}) {
3716
3975
  if (!this._initialized) await this.init();
3717
3976
 
3718
- await this._trigger('beforeGet', docId);
3977
+ // Document-level cache: return immediately if cached (skips IDB + deserialize)
3978
+ if (!options.includeAttachments) {
3979
+ const cached = this._docCache.get(docId);
3980
+ if (cached) return cached;
3981
+ }
3719
3982
 
3720
- const stored = await this._indexedDB.get(this._db, 'documents', docId);
3983
+ const stored = await this._indexedDB.get(this._db, this._storeName, docId);
3721
3984
  if (!stored) {
3722
3985
  throw new LacertaDBError(`Document with id '${docId}' not found.`, 'DOCUMENT_NOT_FOUND');
3723
3986
  }
@@ -3736,14 +3999,21 @@ class Collection {
3736
3999
  }
3737
4000
 
3738
4001
  await this._trigger('afterGet', doc);
3739
- return doc.objectOutput(options.includeAttachments);
4002
+ const output = doc.objectOutput(options.includeAttachments);
4003
+ // Populate document cache (skip if attachments were included — those are transient)
4004
+ if (!options.includeAttachments) {
4005
+ this._docCache.set(docId, output);
4006
+ }
4007
+ return output;
3740
4008
  }
3741
4009
 
3742
4010
  async getAll(options = {}) {
3743
4011
  if (!this._initialized) await this.init();
3744
4012
 
3745
- const stored = await this._indexedDB.getAll(this._db, 'documents', undefined, options.limit);
3746
- return Promise.all(stored.map(async docData => {
4013
+ const stored = await this._indexedDB.getAll(this._db, this._storeName, undefined, options.limit);
4014
+ // Filter out persisted index entries (reserved _id prefix)
4015
+ const userDocs = stored.filter(d => !(typeof d._id === 'string' && d._id.startsWith(IndexManager.IDX_PREFIX)));
4016
+ return Promise.all(userDocs.map(async docData => {
3747
4017
  try {
3748
4018
  const doc = new Document(docData, {
3749
4019
  encrypted: docData._encrypted,
@@ -3765,7 +4035,7 @@ class Collection {
3765
4035
 
3766
4036
  await this._trigger('beforeUpdate', {docId, updates});
3767
4037
 
3768
- const stored = await this._indexedDB.get(this._db, 'documents', docId);
4038
+ const stored = await this._indexedDB.get(this._db, this._storeName, docId);
3769
4039
  if (!stored) {
3770
4040
  throw new LacertaDBError(`Document with id '${docId}' not found for update.`, 'DOCUMENT_NOT_FOUND');
3771
4041
  }
@@ -3801,7 +4071,7 @@ class Collection {
3801
4071
 
3802
4072
  await doc.pack(this.database.encryption);
3803
4073
  const dbOutput = doc.databaseOutput();
3804
- await this._indexedDB.put(this._db, 'documents', dbOutput);
4074
+ await this._indexedDB.put(this._db, this._storeName, dbOutput);
3805
4075
 
3806
4076
  const newDocOutput = doc.objectOutput();
3807
4077
  await this._indexManager.updateIndexForDocument(doc._id, oldDocOutput, newDocOutput);
@@ -3812,6 +4082,7 @@ class Collection {
3812
4082
 
3813
4083
  await this._trigger('afterUpdate', doc);
3814
4084
  this._cacheStrategy.clear();
4085
+ this._docCache.set(doc._id, newDocOutput);
3815
4086
  return doc._id;
3816
4087
  }
3817
4088
 
@@ -3820,7 +4091,7 @@ class Collection {
3820
4091
 
3821
4092
  await this._trigger('beforeDelete', docId);
3822
4093
 
3823
- const doc = await this._indexedDB.get(this._db, 'documents', docId);
4094
+ const doc = await this._indexedDB.get(this._db, this._storeName, docId);
3824
4095
  if (!doc) {
3825
4096
  throw new LacertaDBError('Document not found for deletion', 'DOCUMENT_NOT_FOUND');
3826
4097
  }
@@ -3840,7 +4111,7 @@ class Collection {
3840
4111
 
3841
4112
  await this._indexManager.updateIndexForDocument(docId, fullDoc, null);
3842
4113
 
3843
- await this._indexedDB.delete(this._db, 'documents', docId);
4114
+ await this._indexedDB.delete(this._db, this._storeName, docId);
3844
4115
  const attachments = doc._attachments;
3845
4116
  if (attachments && attachments.length > 0) {
3846
4117
  await this._opfs.deleteAttachments(this.database.name, this.name, docId);
@@ -3851,14 +4122,13 @@ class Collection {
3851
4122
 
3852
4123
  await this._trigger('afterDelete', docId);
3853
4124
  this._cacheStrategy.clear();
3854
- }
3855
-
3856
- async query(filter = {}, options = {}) {
4125
+ this._docCache.delete(docId);
4126
+ } async query(filter = {}, options = {}) {
3857
4127
  if (!this._initialized) await this.init();
3858
4128
 
3859
4129
  const startTime = performance.now();
3860
4130
 
3861
- const cacheKey = this._base64.encode(this._serializer.serialize({filter, options}));
4131
+ const cacheKey = _stableCacheKey(filter, options);
3862
4132
  const cached = this._cacheStrategy.get(cacheKey);
3863
4133
 
3864
4134
  if (cached) {
@@ -3911,8 +4181,18 @@ class Collection {
3911
4181
  if (!this._initialized) await this.init();
3912
4182
 
3913
4183
  const startTime = performance.now();
3914
- const docs = await this.getAll();
3915
- const result = await aggregationPipeline.execute(docs, pipeline, this.database);
4184
+
4185
+ // Optimization: push leading $match down to query() which can use indexes
4186
+ let docs;
4187
+ let remainingPipeline = pipeline;
4188
+ if (pipeline.length > 0 && pipeline[0].$match) {
4189
+ docs = await this.query(pipeline[0].$match);
4190
+ remainingPipeline = pipeline.slice(1);
4191
+ } else {
4192
+ docs = await this.getAll();
4193
+ }
4194
+
4195
+ const result = await aggregationPipeline.execute(docs, remainingPipeline, this.database);
3916
4196
  if (this._performanceMonitor) this._performanceMonitor.recordOperation('aggregate', performance.now() - startTime);
3917
4197
  return result;
3918
4198
  }
@@ -3923,14 +4203,19 @@ class Collection {
3923
4203
  const startTime = performance.now();
3924
4204
  const operations = [];
3925
4205
  const results = [];
4206
+ const useSync = !this.database.encryption && !(options.compressed);
3926
4207
 
3927
4208
  for (const documentData of documents) {
3928
4209
  const doc = new Document({data: documentData}, {
3929
- compressed: options.compressed !== false,
4210
+ compressed: options.compressed || false,
3930
4211
  permanent: options.permanent || false
3931
4212
  }, this._serializer);
3932
4213
 
3933
- await doc.pack(this.database.encryption);
4214
+ if (useSync) {
4215
+ doc.packSync();
4216
+ } else {
4217
+ await doc.pack(this.database.encryption);
4218
+ }
3934
4219
  operations.push({
3935
4220
  type: 'add',
3936
4221
  data: doc.databaseOutput()
@@ -3938,7 +4223,7 @@ class Collection {
3938
4223
  results.push(doc);
3939
4224
  }
3940
4225
 
3941
- const dbResults = await this._indexedDB.batchOperation(this._db, operations);
4226
+ const dbResults = await this._indexedDB.batchOperation(this._db, operations, this._storeName);
3942
4227
 
3943
4228
  for (let i = 0; i < results.length; i++) {
3944
4229
  if (dbResults[i].success) {
@@ -3948,6 +4233,7 @@ class Collection {
3948
4233
 
3949
4234
  const sizeKB = doc._packedData.byteLength / 1024;
3950
4235
  this._metadata.addDocument(doc._id, sizeKB, doc._permanent, 0);
4236
+ this._docCache.set(doc._id, fullDoc);
3951
4237
  }
3952
4238
  }
3953
4239
 
@@ -3970,10 +4256,22 @@ class Collection {
3970
4256
  const oldDocs = [];
3971
4257
  const newDocs = [];
3972
4258
  const skipped = [];
4259
+ const useSync = !this.database.encryption && !(options.compressed);
4260
+
4261
+ // Phase 1: Bulk-fetch all existing docs in a single IDB read transaction
4262
+ const updateIds = updates.map(u => u.id);
4263
+ const storedMap = new Map();
4264
+
4265
+ // Fetch all at once via getAll, then build a Map for O(1) lookup
4266
+ const allStored = await this._indexedDB.getAll(this._db, this._storeName);
4267
+ for (const doc of allStored) {
4268
+ if (doc._id && updateIds.includes(doc._id)) {
4269
+ storedMap.set(doc._id, doc);
4270
+ }
4271
+ }
3973
4272
 
3974
- // Phase 1: Read all existing docs and prepare put operations
3975
4273
  for (const update of updates) {
3976
- const stored = await this._indexedDB.get(this._db, 'documents', update.id);
4274
+ const stored = storedMap.get(update.id);
3977
4275
  if (!stored) {
3978
4276
  skipped.push({ success: false, id: update.id, error: 'Document not found' });
3979
4277
  continue;
@@ -3996,7 +4294,11 @@ class Collection {
3996
4294
  doc._modified = Date.now();
3997
4295
  doc._attachments = stored._attachments;
3998
4296
 
3999
- await doc.pack(this.database.encryption);
4297
+ if (useSync) {
4298
+ doc.packSync();
4299
+ } else {
4300
+ await doc.pack(this.database.encryption);
4301
+ }
4000
4302
  newDocs.push(doc);
4001
4303
 
4002
4304
  operations.push({
@@ -4008,16 +4310,18 @@ class Collection {
4008
4310
  if (operations.length === 0) return skipped;
4009
4311
 
4010
4312
  // Phase 2: Single-transaction write
4011
- const dbResults = await this._indexedDB.batchOperation(this._db, operations);
4313
+ const dbResults = await this._indexedDB.batchOperation(this._db, operations, this._storeName);
4012
4314
 
4013
- // Phase 3: Update indexes and metadata post-transaction
4315
+ // Phase 3: Update indexes, metadata, and doc cache post-transaction
4014
4316
  for (let i = 0; i < newDocs.length; i++) {
4015
4317
  if (dbResults[i].success) {
4016
4318
  const doc = newDocs[i];
4017
- await this._indexManager.updateIndexForDocument(doc._id, oldDocs[i], doc.objectOutput());
4319
+ const newOutput = doc.objectOutput();
4320
+ await this._indexManager.updateIndexForDocument(doc._id, oldDocs[i], newOutput);
4018
4321
 
4019
4322
  const sizeKB = doc._packedData.byteLength / 1024;
4020
4323
  this._metadata.updateDocument(doc._id, sizeKB, doc._permanent, doc._attachments.length);
4324
+ this._docCache.set(doc._id, newOutput);
4021
4325
  }
4022
4326
  }
4023
4327
 
@@ -4051,7 +4355,7 @@ class Collection {
4051
4355
 
4052
4356
  // Phase 1: Validate all documents and prepare delete operations
4053
4357
  for (const { id, options } of normalizedItems) {
4054
- const doc = await this._indexedDB.get(this._db, 'documents', id);
4358
+ const doc = await this._indexedDB.get(this._db, this._storeName, id);
4055
4359
  if (!doc) {
4056
4360
  skipped.push({ success: false, id, error: 'Document not found' });
4057
4361
  continue;
@@ -4074,7 +4378,7 @@ class Collection {
4074
4378
  if (operations.length === 0) return skipped;
4075
4379
 
4076
4380
  // Phase 2: Single-transaction delete
4077
- const dbResults = await this._indexedDB.batchOperation(this._db, operations);
4381
+ const dbResults = await this._indexedDB.batchOperation(this._db, operations, this._storeName);
4078
4382
 
4079
4383
  // Phase 3: Update indexes, OPFS cleanup, and metadata post-transaction
4080
4384
  for (let i = 0; i < docsToRemove.length; i++) {
@@ -4087,6 +4391,7 @@ class Collection {
4087
4391
  }
4088
4392
 
4089
4393
  this._metadata.removeDocument(id);
4394
+ this._docCache.delete(id);
4090
4395
  }
4091
4396
  }
4092
4397
 
@@ -4130,14 +4435,16 @@ class Collection {
4130
4435
  }
4131
4436
 
4132
4437
  async _trigger(event, data) {
4133
- if (!this._events.has(event)) return;
4134
- for (const callback of this._events.get(event)) {
4438
+ const listeners = this._events.get(event);
4439
+ if (!listeners || listeners.length === 0) return;
4440
+ for (const callback of listeners) {
4135
4441
  await callback(data);
4136
4442
  }
4137
4443
  }
4138
4444
 
4139
4445
  clearCache() {
4140
4446
  this._cacheStrategy.clear();
4447
+ this._docCache.clear();
4141
4448
  }
4142
4449
 
4143
4450
  async clear(options = {}) {
@@ -4145,7 +4452,7 @@ class Collection {
4145
4452
 
4146
4453
  if (options.force) {
4147
4454
  // Clear documents first
4148
- await this._indexedDB.clear(this._db, 'documents');
4455
+ await this._indexedDB.clear(this._db, this._storeName);
4149
4456
 
4150
4457
  // Reset metadata
4151
4458
  if (this._metadata) this._metadata.destroy();
@@ -4157,6 +4464,7 @@ class Collection {
4157
4464
 
4158
4465
  // Clear cache
4159
4466
  this._cacheStrategy.clear();
4467
+ this._docCache.clear();
4160
4468
 
4161
4469
  // Rebuild indexes after clearing
4162
4470
  for (const indexName of this._indexManager.indexes.keys()) {
@@ -4185,6 +4493,12 @@ class Collection {
4185
4493
  this._metadata.destroy();
4186
4494
  }
4187
4495
 
4496
+ // Flush dirty index data to IDB before teardown
4497
+ if (this._indexManager) {
4498
+ this._indexManager.flushPersistence().catch(() => {});
4499
+ this._indexManager.destroy();
4500
+ }
4501
+
4188
4502
  // Clear the cleanup interval
4189
4503
  if (this._cleanupInterval) {
4190
4504
  clearInterval(this._cleanupInterval);
@@ -4196,13 +4510,14 @@ class Collection {
4196
4510
  this._cacheStrategy.destroy();
4197
4511
  }
4198
4512
 
4199
- // Release the connection
4200
- if (this._db) {
4201
- const dbName = `${this.database.name}_${this.name}`;
4202
- connectionPool.releaseConnection(dbName);
4203
- this._db = null;
4513
+ // Clear document cache
4514
+ if (this._docCache) {
4515
+ this._docCache.clear();
4204
4516
  }
4205
4517
 
4518
+ // Release the connection reference (owned by parent Database)
4519
+ this._db = null;
4520
+
4206
4521
  // Clear event listeners
4207
4522
  this._events.clear();
4208
4523
  }
@@ -4223,6 +4538,14 @@ class Database {
4223
4538
  this._serializer = serializer;
4224
4539
  this._base64 = base64;
4225
4540
 
4541
+ // Consolidated IDB connection (one per Database, not per Collection)
4542
+ this._db = null;
4543
+ this._idbVersion = 0;
4544
+ this._knownStores = new Set();
4545
+ this._ensureStorePromise = null;
4546
+ this._idbVersionKey = `lacertadb_${name}_idb_version`;
4547
+ this._idbStoresKey = `lacertadb_${name}_idb_stores`;
4548
+
4226
4549
  // Database-level encryption
4227
4550
  this._encryption = null;
4228
4551
  }
@@ -4255,11 +4578,127 @@ class Database {
4255
4578
  return !!this._encryption;
4256
4579
  }
4257
4580
 
4581
+ /**
4582
+ * Open or reuse the consolidated IDB connection.
4583
+ * All collections share this single connection.
4584
+ * @returns {Promise<IDBDatabase>}
4585
+ */
4586
+ async _getConnection() {
4587
+ if (this._db) return this._db;
4588
+
4589
+ // Load known version and stores from localStorage
4590
+ try {
4591
+ this._idbVersion = parseInt(localStorage.getItem(this._idbVersionKey), 10) || 1;
4592
+ const storedStores = localStorage.getItem(this._idbStoresKey);
4593
+ if (storedStores) {
4594
+ const decoded = this._base64.decode(storedStores);
4595
+ const list = this._serializer.deserialize(decoded);
4596
+ this._knownStores = new Set(list);
4597
+ }
4598
+ } catch (_) {
4599
+ this._idbVersion = 1;
4600
+ }
4601
+
4602
+ this._db = await this._openIDB(this._idbVersion);
4603
+ return this._db;
4604
+ }
4605
+
4606
+ /** @private Open IDB at a specific version */
4607
+ async _openIDB(version) {
4608
+ const knownStores = this._knownStores;
4609
+ return new Promise((resolve, reject) => {
4610
+ const request = indexedDB.open(`lacertadb_${this.name}`, version);
4611
+ request.onerror = () => reject(new LacertaDBError(
4612
+ 'Failed to open database', 'DATABASE_OPEN_FAILED', request.error
4613
+ ));
4614
+ request.onsuccess = () => resolve(request.result);
4615
+ request.onupgradeneeded = event => {
4616
+ const db = event.target.result;
4617
+ for (const storeName of knownStores) {
4618
+ if (!db.objectStoreNames.contains(storeName)) {
4619
+ const store = db.createObjectStore(storeName, { keyPath: '_id' });
4620
+ store.createIndex('modified', '_modified', { unique: false });
4621
+ }
4622
+ }
4623
+ };
4624
+ });
4625
+ }
4626
+
4627
+ /**
4628
+ * Ensure an object store exists for a collection.
4629
+ * If the store doesn't exist, bumps the IDB version to create it.
4630
+ * @param {string} storeName
4631
+ * @returns {Promise<void>}
4632
+ */
4633
+ /**
4634
+ * Ensure an object store exists for a collection.
4635
+ * Batches multiple new stores into a single IDB version bump.
4636
+ * Dedup-guarded so concurrent init() calls don't race.
4637
+ * @param {string} storeName
4638
+ * @returns {Promise<void>}
4639
+ */
4640
+ async _ensureStore(storeName) {
4641
+ // Already exists in current IDB — nothing to do
4642
+ if (this._db && this._db.objectStoreNames.contains(storeName)) {
4643
+ this._knownStores.add(storeName);
4644
+ return;
4645
+ }
4646
+
4647
+ this._knownStores.add(storeName);
4648
+
4649
+ // Dedup: if a version bump is already in flight, piggyback on it
4650
+ if (this._ensureStorePromise) {
4651
+ await this._ensureStorePromise;
4652
+ // After the in-flight bump, our store should now exist
4653
+ if (this._db && this._db.objectStoreNames.contains(storeName)) return;
4654
+ }
4655
+
4656
+ // Collect ALL known stores that are missing from current IDB
4657
+ const missingStores = [];
4658
+ for (const name of this._knownStores) {
4659
+ if (!this._db || !this._db.objectStoreNames.contains(name)) {
4660
+ missingStores.push(name);
4661
+ }
4662
+ }
4663
+
4664
+ if (missingStores.length === 0) return;
4665
+
4666
+ this._ensureStorePromise = (async () => {
4667
+ this._idbVersion++;
4668
+
4669
+ // Persist the new version and store list
4670
+ localStorage.setItem(this._idbVersionKey, String(this._idbVersion));
4671
+ const serialized = this._serializer.serialize(Array.from(this._knownStores));
4672
+ const encoded = this._base64.encode(serialized);
4673
+ localStorage.setItem(this._idbStoresKey, encoded);
4674
+
4675
+ // Close current connection and reopen with new version (creates all missing stores)
4676
+ if (this._db) {
4677
+ this._db.close();
4678
+ this._db = null;
4679
+ }
4680
+
4681
+ this._db = await this._openIDB(this._idbVersion);
4682
+ })();
4683
+
4684
+ try {
4685
+ await this._ensureStorePromise;
4686
+ } finally {
4687
+ this._ensureStorePromise = null;
4688
+ }
4689
+ }
4690
+
4258
4691
  async init(options = {}) {
4259
4692
  this._metadata = DatabaseMetadata.load(this.name, this._serializer, this._base64);
4260
4693
  this._settings = Settings.load(this.name, this._serializer, this._base64);
4261
4694
  this._quickStore = new QuickStore(this.name, this._serializer, this._base64);
4262
4695
 
4696
+ // Open the consolidated IDB connection
4697
+ await this._getConnection();
4698
+
4699
+ // Migrate old per-collection databases if they exist
4700
+ await this._migrateOldDatabases();
4701
+
4263
4702
  if (options.pin) {
4264
4703
  await this._initializeEncryption(options.pin, options.salt, options.encryptionConfig);
4265
4704
  }
@@ -4349,8 +4788,10 @@ class Database {
4349
4788
  throw new LacertaDBError(`Collection '${name}' already exists.`, 'COLLECTION_EXISTS');
4350
4789
  }
4351
4790
 
4791
+ // Ensure the object store exists in the consolidated IDB
4792
+ await this._ensureStore(name);
4793
+
4352
4794
  const collection = new Collection(name, this);
4353
- // Lazy initialization - don't init here
4354
4795
  this._collections.set(name, collection);
4355
4796
 
4356
4797
  if (!this._metadata.collections[name]) {
@@ -4370,6 +4811,8 @@ class Database {
4370
4811
  return collection;
4371
4812
  }
4372
4813
  if (this._metadata.collections[name]) {
4814
+ // Ensure store exists before initializing
4815
+ await this._ensureStore(name);
4373
4816
  const collection = new Collection(name, this);
4374
4817
  this._collections.set(name, collection);
4375
4818
  await collection.init();
@@ -4378,6 +4821,37 @@ class Database {
4378
4821
  throw new LacertaDBError(`Collection '${name}' not found.`, 'COLLECTION_NOT_FOUND');
4379
4822
  }
4380
4823
 
4824
+ /**
4825
+ * Ensure a collection handle exists in memory without triggering init().
4826
+ * Creates the IDB object store if needed.
4827
+ * The collection will lazy-init on first actual operation.
4828
+ * @param {string} name
4829
+ * @returns {Collection}
4830
+ */
4831
+ ensureCollection(name) {
4832
+ if (this._collections.has(name)) {
4833
+ return this._collections.get(name);
4834
+ }
4835
+ // Mark store as known — will be created on next _ensureStore or IDB open
4836
+ if (!this._knownStores.has(name)) {
4837
+ this._knownStores.add(name);
4838
+ // Persist so warm start creates all stores in one shot
4839
+ try {
4840
+ const serialized = this._serializer.serialize(Array.from(this._knownStores));
4841
+ const encoded = this._base64.encode(serialized);
4842
+ localStorage.setItem(this._idbStoresKey, encoded);
4843
+ } catch (_) {}
4844
+ }
4845
+ const collection = new Collection(name, this);
4846
+ this._collections.set(name, collection);
4847
+ if (!this._metadata.collections[name]) {
4848
+ this._metadata.setCollection(new CollectionMetadata(
4849
+ name, {}, this._serializer, this._base64, this.name
4850
+ ));
4851
+ }
4852
+ return collection;
4853
+ }
4854
+
4381
4855
  async dropCollection(name) {
4382
4856
  if (this._collections.has(name)) {
4383
4857
  const collection = this._collections.get(name);
@@ -4394,13 +4868,126 @@ class Database {
4394
4868
  localStorage.removeItem(`lacertadb_${this.name}_${name}_collmeta`);
4395
4869
  localStorage.removeItem(`lacertadb_${this.name}_${name}_indexes`);
4396
4870
 
4397
- const dbName = `${this.name}_${name}`;
4398
- await new Promise((resolve, reject) => {
4399
- const deleteReq = indexedDB.deleteDatabase(dbName);
4400
- deleteReq.onsuccess = resolve;
4401
- deleteReq.onerror = reject;
4402
- deleteReq.onblocked = () => console.warn(`Deletion of '${dbName}' is blocked.`);
4403
- });
4871
+ // Clear the store contents (can't delete an object store without version bump,
4872
+ // but clearing it is equivalent for our purposes — the empty store costs nothing)
4873
+ if (this._db && this._knownStores.has(name)) {
4874
+ try {
4875
+ const idbUtil = new IndexedDBUtility();
4876
+ await idbUtil.clear(this._db, name);
4877
+ } catch (e) {
4878
+ // Store may not exist yet if collection was never initialized
4879
+ }
4880
+ }
4881
+
4882
+ // Also clean up old per-collection database if it exists (migration residue)
4883
+ const legacyDbName = `${this.name}_${name}`;
4884
+ try {
4885
+ await new Promise((resolve, reject) => {
4886
+ const deleteReq = indexedDB.deleteDatabase(legacyDbName);
4887
+ deleteReq.onsuccess = resolve;
4888
+ deleteReq.onerror = resolve; // don't fail if it doesn't exist
4889
+ deleteReq.onblocked = resolve;
4890
+ });
4891
+ } catch (e) {}
4892
+ }
4893
+
4894
+ /**
4895
+ * Migrate data from old per-collection databases to the consolidated database.
4896
+ * Runs once on first load with the new schema. Safe to call multiple times.
4897
+ * @private
4898
+ */
4899
+ async _migrateOldDatabases() {
4900
+ const migrationKey = `lacertadb_${this.name}_consolidated`;
4901
+ if (localStorage.getItem(migrationKey)) return; // already migrated
4902
+
4903
+ const collectionNames = Object.keys(this._metadata.collections || {});
4904
+ if (collectionNames.length === 0) {
4905
+ localStorage.setItem(migrationKey, '1');
4906
+ return;
4907
+ }
4908
+
4909
+ let migrated = 0;
4910
+ for (const collName of collectionNames) {
4911
+ const legacyDbName = `${this.name}_${collName}`;
4912
+
4913
+ try {
4914
+ // Try to open the old per-collection database
4915
+ const oldDb = await new Promise((resolve, reject) => {
4916
+ const request = indexedDB.open(legacyDbName, 1);
4917
+ request.onerror = () => resolve(null);
4918
+ request.onsuccess = () => resolve(request.result);
4919
+ request.onupgradeneeded = (event) => {
4920
+ // If version was 0, it's a brand new DB — nothing to migrate
4921
+ if (event.oldVersion === 0) {
4922
+ event.target.transaction.abort();
4923
+ resolve(null);
4924
+ }
4925
+ };
4926
+ });
4927
+
4928
+ if (!oldDb) {
4929
+ // Clean up ghost database created by the probe
4930
+ indexedDB.deleteDatabase(legacyDbName);
4931
+ continue;
4932
+ }
4933
+
4934
+ // Check if the old DB has a 'documents' store
4935
+ if (!oldDb.objectStoreNames.contains('documents')) {
4936
+ oldDb.close();
4937
+ continue;
4938
+ }
4939
+
4940
+ // Read all documents from the old database
4941
+ const oldDocs = await new Promise((resolve, reject) => {
4942
+ const tx = oldDb.transaction('documents', 'readonly');
4943
+ const store = tx.objectStore('documents');
4944
+ const request = store.getAll();
4945
+ request.onsuccess = () => resolve(request.result || []);
4946
+ request.onerror = () => resolve([]);
4947
+ });
4948
+
4949
+ oldDb.close();
4950
+
4951
+ if (oldDocs.length === 0) continue;
4952
+
4953
+ // Ensure the new consolidated store exists
4954
+ await this._ensureStore(collName);
4955
+
4956
+ // Write all documents to the new consolidated store
4957
+ const idbUtil = new IndexedDBUtility();
4958
+ const ops = oldDocs.map(doc => ({ type: 'put', data: doc }));
4959
+ // Use performTransaction directly since batchOperation hardcodes 'documents'
4960
+ await idbUtil.performTransaction(this._db, [collName], 'readwrite', tx => {
4961
+ const store = tx.objectStore(collName);
4962
+ const promises = ops.map(op => {
4963
+ return new Promise((resolve, reject) => {
4964
+ const req = store.put(op.data);
4965
+ req.onsuccess = () => resolve();
4966
+ req.onerror = () => resolve(); // skip individual failures
4967
+ });
4968
+ });
4969
+ return Promise.all(promises);
4970
+ });
4971
+
4972
+ // Delete the old database
4973
+ await new Promise((resolve) => {
4974
+ const deleteReq = indexedDB.deleteDatabase(legacyDbName);
4975
+ deleteReq.onsuccess = resolve;
4976
+ deleteReq.onerror = resolve;
4977
+ deleteReq.onblocked = resolve;
4978
+ });
4979
+
4980
+ migrated++;
4981
+ } catch (e) {
4982
+ console.warn(`[LacertaDB] Migration of '${collName}' failed:`, e.message);
4983
+ }
4984
+ }
4985
+
4986
+ if (migrated > 0) {
4987
+ console.log(`[LacertaDB] Migrated ${migrated} collections to consolidated database`);
4988
+ }
4989
+
4990
+ localStorage.setItem(migrationKey, '1');
4404
4991
  }
4405
4992
 
4406
4993
  listCollections() {
@@ -4428,7 +5015,7 @@ class Database {
4428
5015
 
4429
5016
  async export(format = 'json', password = null) {
4430
5017
  const data = {
4431
- version: '0.10.2',
5018
+ version: '0.11.1',
4432
5019
  database: this.name,
4433
5020
  timestamp: Date.now(),
4434
5021
  collections: {}
@@ -4508,6 +5095,12 @@ class Database {
4508
5095
  }
4509
5096
  this._collections.clear();
4510
5097
 
5098
+ // Close consolidated IDB connection
5099
+ if (this._db) {
5100
+ this._db.close();
5101
+ this._db = null;
5102
+ }
5103
+
4511
5104
  // Clear quickstore
4512
5105
  if (this._quickStore) {
4513
5106
  this._quickStore.destroy();
@@ -4579,7 +5172,7 @@ class LacertaDB {
4579
5172
  this._databases.delete(name);
4580
5173
  }
4581
5174
 
4582
- ['metadata', 'settings', 'version', 'encryption'].forEach(suffix => {
5175
+ ['metadata', 'settings', 'version', 'encryption', 'idb_version', 'idb_stores', 'consolidated'].forEach(suffix => {
4583
5176
  localStorage.removeItem(`lacertadb_${name}_${suffix}`);
4584
5177
  });
4585
5178
 
@@ -4587,7 +5180,7 @@ class LacertaDB {
4587
5180
  const quickStore = new QuickStore(name, this._serializer, this._base64);
4588
5181
  quickStore.clear();
4589
5182
 
4590
- // Clean up all collections and indexes
5183
+ // Clean up all collection-level localStorage keys
4591
5184
  const keysToRemove = [];
4592
5185
  for (let i = 0; i < localStorage.length; i++) {
4593
5186
  const key = localStorage.key(i);
@@ -4596,6 +5189,14 @@ class LacertaDB {
4596
5189
  }
4597
5190
  }
4598
5191
  keysToRemove.forEach(key => localStorage.removeItem(key));
5192
+
5193
+ // Delete the consolidated IDB database
5194
+ await new Promise((resolve) => {
5195
+ const deleteReq = indexedDB.deleteDatabase(`lacertadb_${name}`);
5196
+ deleteReq.onsuccess = resolve;
5197
+ deleteReq.onerror = resolve;
5198
+ deleteReq.onblocked = resolve;
5199
+ });
4599
5200
  }
4600
5201
 
4601
5202
  listDatabases() {
@@ -4614,7 +5215,7 @@ class LacertaDB {
4614
5215
 
4615
5216
  async createBackup(password = null) {
4616
5217
  const backup = {
4617
- version: '0.10.2',
5218
+ version: '0.11.1',
4618
5219
  timestamp: Date.now(),
4619
5220
  databases: {}
4620
5221
  };
@@ -4664,7 +5265,12 @@ class LacertaDB {
4664
5265
  }
4665
5266
 
4666
5267
  close() {
4667
- connectionPool.closeAll();
5268
+ for (const db of this._databases.values()) {
5269
+ if (db._db) {
5270
+ db._db.close();
5271
+ db._db = null;
5272
+ }
5273
+ }
4668
5274
  }
4669
5275
 
4670
5276
  destroy() {
@@ -4672,7 +5278,6 @@ class LacertaDB {
4672
5278
  db.destroy();
4673
5279
  }
4674
5280
  this._databases.clear();
4675
- connectionPool.closeAll();
4676
5281
  }
4677
5282
  }
4678
5283