@pixagram/lacerta-db 0.13.0 → 0.13.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -439,7 +439,15 @@ class LRUCache {
439
439
 
440
440
  delete(key) { return this._cache.delete(key); }
441
441
  clear() { this._cache.clear(); }
442
- has(key) { return this.get(key) !== null; }
442
+ has(key) {
443
+ const item = this._cache.get(key);
444
+ if (!item) return false;
445
+ if (this._ttl && (Date.now() - item.ts > this._ttl)) {
446
+ this._cache.delete(key);
447
+ return false;
448
+ }
449
+ return true;
450
+ }
443
451
  get size() { return this._cache.size; }
444
452
  }
445
453
 
@@ -755,18 +763,38 @@ class BrowserCompressionUtility {
755
763
  }
756
764
  }
757
765
 
766
+ /**
767
+ * Synchronous "compression" — real CompressionStream requires async,
768
+ * so we prepend the raw marker (0x00) for format compatibility with
769
+ * the async decompress path. Without this marker, a doc packed via
770
+ * packSync could be silently misread by the async unpack path.
771
+ */
758
772
  compressSync(input) {
759
773
  if (!(input instanceof Uint8Array)) {
760
774
  throw new TypeError('Input must be Uint8Array');
761
775
  }
762
- return input;
776
+ const result = new Uint8Array(input.byteLength + 1);
777
+ result[0] = 0x00; // Raw marker
778
+ result.set(input, 1);
779
+ return result;
763
780
  }
764
781
 
765
782
  decompressSync(input) {
766
783
  if (!(input instanceof Uint8Array)) {
767
784
  throw new TypeError('Input must be Uint8Array');
768
785
  }
769
- return input;
786
+ if (input.length === 0) return input;
787
+
788
+ const marker = input[0];
789
+ if (marker === 0x01) {
790
+ // Deflate-compressed data — can't decompress synchronously
791
+ throw new LacertaDBError(
792
+ 'Cannot synchronously decompress deflate data. Use async unpack() instead.',
793
+ 'SYNC_DECOMPRESS_NOT_SUPPORTED'
794
+ );
795
+ }
796
+ // 0x00 (raw) or legacy (no marker): strip marker if present
797
+ return marker === 0x00 ? input.slice(1) : input;
770
798
  }
771
799
  }
772
800
 
@@ -1263,6 +1291,36 @@ class QuadTree {
1263
1291
  }
1264
1292
  }
1265
1293
 
1294
+ /**
1295
+ * Targeted removal: navigate to the quad containing (x, y) and remove
1296
+ * the point with matching data. O(log n) instead of O(n) full scan.
1297
+ * @param {number} x - Longitude
1298
+ * @param {number} y - Latitude
1299
+ * @param {*} id - Document ID
1300
+ * @returns {boolean} true if removed
1301
+ */
1302
+ removeAt(x, y, id) {
1303
+ if (!this._contains(this.boundary, { x, y })) return false;
1304
+
1305
+ // Try to remove from this node's points
1306
+ for (let i = 0; i < this.points.length; i++) {
1307
+ if (this.points[i].data === id) {
1308
+ this.points.splice(i, 1);
1309
+ return true;
1310
+ }
1311
+ }
1312
+
1313
+ // Navigate into the correct child quad
1314
+ if (this.divided) {
1315
+ return this.northeast.removeAt(x, y, id) ||
1316
+ this.northwest.removeAt(x, y, id) ||
1317
+ this.southeast.removeAt(x, y, id) ||
1318
+ this.southwest.removeAt(x, y, id);
1319
+ }
1320
+
1321
+ return false;
1322
+ }
1323
+
1266
1324
  _subdivide() {
1267
1325
  const {x, y, w, h} = this.boundary;
1268
1326
  const mw = w/2;
@@ -1872,21 +1930,110 @@ class BTreeIndex {
1872
1930
  }
1873
1931
 
1874
1932
  /**
1875
- * Restore a BTreeIndex from persisted sorted entries.
1876
- * Much faster than full document scan + unpack.
1877
- * @param {Array} entries - [[key, [docId1, ...]], ...]
1933
+ * Restore a BTreeIndex from persisted sorted entries via O(n) bottom-up bulk-load.
1934
+ * Builds leaf nodes directly from sorted data, then constructs internal levels
1935
+ * by promoting separators no individual insert() calls, no comparisons.
1936
+ * @param {Array} entries - [[key, [docId1, ...]], ...] — MUST be sorted by key
1878
1937
  * @param {number} [order=4]
1879
1938
  * @returns {BTreeIndex}
1880
1939
  */
1881
1940
  static fromSortedEntries(entries, order = 4) {
1882
1941
  const tree = new BTreeIndex(order);
1942
+ if (entries.length === 0) return tree;
1943
+
1944
+ const maxKeys = 2 * order - 1;
1945
+
1946
+ // Count total values for _size
1947
+ let totalSize = 0;
1883
1948
  for (let i = 0; i < entries.length; i++) {
1884
- const [key, values] = entries[i];
1885
- if (key === undefined || key === null) continue;
1886
- for (let j = 0; j < values.length; j++) {
1887
- tree.insert(key, values[j]);
1949
+ if (entries[i][0] === undefined || entries[i][0] === null) continue;
1950
+ totalSize += entries[i][1].length;
1951
+ }
1952
+
1953
+ // Filter out null/undefined keys
1954
+ const clean = [];
1955
+ for (let i = 0; i < entries.length; i++) {
1956
+ if (entries[i][0] !== undefined && entries[i][0] !== null) {
1957
+ clean.push(entries[i]);
1958
+ }
1959
+ }
1960
+
1961
+ if (clean.length === 0) return tree;
1962
+
1963
+ // Step 1: Build leaf nodes, filling each to maxKeys
1964
+ const leaves = [];
1965
+ let pos = 0;
1966
+ while (pos < clean.length) {
1967
+ const node = new BTreeNode(order, true);
1968
+ const remaining = clean.length - pos;
1969
+
1970
+ // How many entries for this leaf?
1971
+ let count;
1972
+ if (remaining <= maxKeys) {
1973
+ // Last chunk — take everything
1974
+ count = remaining;
1975
+ } else if (remaining < maxKeys + order) {
1976
+ // Would leave a tiny next leaf — split evenly
1977
+ count = Math.ceil(remaining / 2);
1978
+ } else {
1979
+ count = maxKeys;
1980
+ }
1981
+
1982
+ for (let j = 0; j < count; j++) {
1983
+ const [key, values] = clean[pos++];
1984
+ node.keys[j] = key;
1985
+ node.values[j] = new Set(values);
1986
+ node.n++;
1888
1987
  }
1988
+ leaves.push(node);
1889
1989
  }
1990
+
1991
+ // Step 2: Build internal levels bottom-up
1992
+ let level = leaves;
1993
+ while (level.length > 1) {
1994
+ const parents = [];
1995
+ let ci = 0;
1996
+
1997
+ while (ci < level.length) {
1998
+ const parent = new BTreeNode(order, false);
1999
+ parent.children[0] = level[ci++];
2000
+
2001
+ // Promote first key from each subsequent child as separator
2002
+ while (parent.n < maxKeys && ci < level.length) {
2003
+ const child = level[ci];
2004
+
2005
+ // Promote child's first key+values as separator in parent
2006
+ parent.keys[parent.n] = child.keys[0];
2007
+ parent.values[parent.n] = child.values[0];
2008
+
2009
+ // Shift child entries left to remove promoted key
2010
+ for (let j = 0; j < child.n - 1; j++) {
2011
+ child.keys[j] = child.keys[j + 1];
2012
+ child.values[j] = child.values[j + 1];
2013
+ }
2014
+ if (!child.leaf) {
2015
+ for (let j = 0; j < child.n; j++) {
2016
+ child.children[j] = child.children[j + 1];
2017
+ }
2018
+ child.children[child.n] = undefined;
2019
+ }
2020
+ child.keys[child.n - 1] = undefined;
2021
+ child.values[child.n - 1] = undefined;
2022
+ child.n--;
2023
+
2024
+ parent.children[parent.n + 1] = child;
2025
+ parent.n++;
2026
+ ci++;
2027
+ }
2028
+
2029
+ parents.push(parent);
2030
+ }
2031
+
2032
+ level = parents;
2033
+ }
2034
+
2035
+ tree._root = level[0];
2036
+ tree._size = totalSize;
1890
2037
  return tree;
1891
2038
  }
1892
2039
  }
@@ -1991,6 +2138,63 @@ class TextIndex {
1991
2138
  get size() {
1992
2139
  return this._docTokens.size;
1993
2140
  }
2141
+
2142
+ /**
2143
+ * Export index state for persistence.
2144
+ * Only the inverted index is stored — _docTokens is derived on restore.
2145
+ * Format: { invertedIndex: [[token, [docId, ...]], ...] }
2146
+ * @returns {Object}
2147
+ */
2148
+ toSerializable() {
2149
+ const invertedIndex = [];
2150
+ for (const [token, docIds] of this._invertedIndex) {
2151
+ invertedIndex.push([token, Array.from(docIds)]);
2152
+ }
2153
+ return { invertedIndex };
2154
+ }
2155
+
2156
+ /**
2157
+ * Restore a TextIndex from persisted data.
2158
+ * Rebuilds both _invertedIndex and _docTokens from the serialized inverted index.
2159
+ * @param {Object} data - { invertedIndex: [[token, [docId, ...]], ...] }
2160
+ * @returns {TextIndex}
2161
+ */
2162
+ static fromSerialized(data) {
2163
+ const idx = new TextIndex();
2164
+ if (!data || !Array.isArray(data.invertedIndex)) return idx;
2165
+
2166
+ for (const [token, docIds] of data.invertedIndex) {
2167
+ const docSet = new Set(docIds);
2168
+ idx._invertedIndex.set(token, docSet);
2169
+ // Derive _docTokens from inverted index
2170
+ for (const docId of docIds) {
2171
+ if (!idx._docTokens.has(docId)) idx._docTokens.set(docId, new Set());
2172
+ idx._docTokens.get(docId).add(token);
2173
+ }
2174
+ }
2175
+ return idx;
2176
+ }
2177
+
2178
+ /**
2179
+ * Verify index integrity. TextIndex is always healthy if it loaded.
2180
+ * @returns {{ healthy: boolean, issues: Array, requiresRebuild: boolean }}
2181
+ */
2182
+ verify() {
2183
+ const issues = [];
2184
+ // Cross-check: every docId in _invertedIndex must appear in _docTokens
2185
+ for (const [token, docIds] of this._invertedIndex) {
2186
+ for (const docId of docIds) {
2187
+ if (!this._docTokens.has(docId)) {
2188
+ issues.push(`Token '${token}' references unknown docId '${docId}'`);
2189
+ }
2190
+ }
2191
+ }
2192
+ return {
2193
+ healthy: issues.length === 0,
2194
+ issues,
2195
+ requiresRebuild: issues.length > 0
2196
+ };
2197
+ }
1994
2198
  }
1995
2199
 
1996
2200
  // ========================
@@ -2001,6 +2205,8 @@ class GeoIndex {
2001
2205
  constructor() {
2002
2206
  this._tree = new QuadTree({x: 0, y: 0, w: 180, h: 90});
2003
2207
  this._size = 0;
2208
+ // Coordinate lookup: docId → {x, y} for O(log n) targeted removal
2209
+ this._pointLookup = new Map();
2004
2210
  }
2005
2211
 
2006
2212
  addPoint(coords, docId) {
@@ -2008,11 +2214,20 @@ class GeoIndex {
2008
2214
  return;
2009
2215
  }
2010
2216
  this._tree.insert({x: coords.lng, y: coords.lat, data: docId});
2217
+ this._pointLookup.set(docId, { x: coords.lng, y: coords.lat });
2011
2218
  this._size++;
2012
2219
  }
2013
2220
 
2014
2221
  removePoint(docId) {
2015
- this._tree.remove(docId);
2222
+ const coords = this._pointLookup.get(docId);
2223
+ if (coords) {
2224
+ // Targeted removal: navigate to the correct quad and remove there
2225
+ this._tree.removeAt(coords.x, coords.y, docId);
2226
+ this._pointLookup.delete(docId);
2227
+ } else {
2228
+ // Fallback: full-tree scan (shouldn't happen if data is consistent)
2229
+ this._tree.remove(docId);
2230
+ }
2016
2231
  if (this._size > 0) this._size--;
2017
2232
  }
2018
2233
 
@@ -2084,6 +2299,178 @@ class GeoIndex {
2084
2299
  get size() {
2085
2300
  return this._size;
2086
2301
  }
2302
+
2303
+ /**
2304
+ * Export all points for persistence.
2305
+ * Uses Float64Array for coordinates (TurboSerial handles TypedArrays natively).
2306
+ * Format: { coords: Float64Array([lng0, lat0, lng1, lat1, ...]), docIds: [id0, id1, ...] }
2307
+ * @returns {Object}
2308
+ */
2309
+ toSerializable() {
2310
+ const points = [];
2311
+ this._collectAllPoints(this._tree, points);
2312
+
2313
+ const coords = new Float64Array(points.length * 2);
2314
+ const docIds = new Array(points.length);
2315
+
2316
+ for (let i = 0; i < points.length; i++) {
2317
+ coords[i * 2] = points[i].x; // lng
2318
+ coords[i * 2 + 1] = points[i].y; // lat
2319
+ docIds[i] = points[i].data; // docId
2320
+ }
2321
+
2322
+ return { coords, docIds };
2323
+ }
2324
+
2325
+ /**
2326
+ * Recursively collect all points from the QuadTree.
2327
+ * @param {QuadTree} node
2328
+ * @param {Array} points
2329
+ * @private
2330
+ */
2331
+ _collectAllPoints(node, points) {
2332
+ for (const p of node.points) {
2333
+ points.push(p);
2334
+ }
2335
+ if (node.divided) {
2336
+ this._collectAllPoints(node.northeast, points);
2337
+ this._collectAllPoints(node.northwest, points);
2338
+ this._collectAllPoints(node.southeast, points);
2339
+ this._collectAllPoints(node.southwest, points);
2340
+ }
2341
+ }
2342
+
2343
+ /**
2344
+ * Restore a GeoIndex from persisted data.
2345
+ * @param {Object} data - { coords: Float64Array, docIds: Array }
2346
+ * @returns {GeoIndex}
2347
+ */
2348
+ static fromSerialized(data) {
2349
+ const idx = new GeoIndex();
2350
+ if (!data || !data.coords || !data.docIds) return idx;
2351
+
2352
+ const { coords, docIds } = data;
2353
+ for (let i = 0; i < docIds.length; i++) {
2354
+ idx.addPoint(
2355
+ { lng: coords[i * 2], lat: coords[i * 2 + 1] },
2356
+ docIds[i]
2357
+ );
2358
+ }
2359
+ return idx;
2360
+ }
2361
+
2362
+ /**
2363
+ * Verify index integrity.
2364
+ * @returns {{ healthy: boolean, issues: Array, requiresRebuild: boolean }}
2365
+ */
2366
+ verify() {
2367
+ // Verify size consistency: count all points vs _size
2368
+ const points = [];
2369
+ this._collectAllPoints(this._tree, points);
2370
+ const issues = [];
2371
+ if (points.length !== this._size) {
2372
+ issues.push(`Size mismatch: _size=${this._size}, actual=${points.length}`);
2373
+ }
2374
+ return {
2375
+ healthy: issues.length === 0,
2376
+ issues,
2377
+ requiresRebuild: issues.length > 0
2378
+ };
2379
+ }
2380
+ }
2381
+
2382
+ // ========================
2383
+ // Index Manager (Cursor Optimized)
2384
+ // ========================
2385
+
2386
+ // ========================
2387
+ // Hash Index (O(1) Lookup)
2388
+ // ========================
2389
+
2390
+ class HashIndex {
2391
+ constructor() {
2392
+ this._map = new Map(); // value → Set<docId>
2393
+ }
2394
+
2395
+ insert(value, docId) {
2396
+ let bucket = this._map.get(value);
2397
+ if (!bucket) {
2398
+ bucket = new Set();
2399
+ this._map.set(value, bucket);
2400
+ }
2401
+ bucket.add(docId);
2402
+ }
2403
+
2404
+ find(value) {
2405
+ const bucket = this._map.get(value);
2406
+ return bucket ? Array.from(bucket) : [];
2407
+ }
2408
+
2409
+ remove(value, docId) {
2410
+ const bucket = this._map.get(value);
2411
+ if (bucket) {
2412
+ bucket.delete(docId);
2413
+ if (bucket.size === 0) this._map.delete(value);
2414
+ }
2415
+ }
2416
+
2417
+ has(value) {
2418
+ return this._map.has(value);
2419
+ }
2420
+
2421
+ clear() {
2422
+ this._map.clear();
2423
+ }
2424
+
2425
+ get size() {
2426
+ let count = 0;
2427
+ for (const bucket of this._map.values()) count += bucket.size;
2428
+ return count;
2429
+ }
2430
+
2431
+ /**
2432
+ * Export for persistence. Format: [[value, [docId, ...]], ...]
2433
+ * @returns {Array}
2434
+ */
2435
+ toSerializable() {
2436
+ const entries = [];
2437
+ for (const [value, docIds] of this._map) {
2438
+ entries.push([value, Array.from(docIds)]);
2439
+ }
2440
+ return entries;
2441
+ }
2442
+
2443
+ /**
2444
+ * Restore from persisted data.
2445
+ * @param {Array} entries - [[value, [docId, ...]], ...]
2446
+ * @returns {HashIndex}
2447
+ */
2448
+ static fromSerialized(entries) {
2449
+ const idx = new HashIndex();
2450
+ if (!Array.isArray(entries)) return idx;
2451
+ for (const [value, docIds] of entries) {
2452
+ idx._map.set(value, new Set(docIds));
2453
+ }
2454
+ return idx;
2455
+ }
2456
+
2457
+ /**
2458
+ * Verify index integrity.
2459
+ * @returns {{ healthy: boolean, issues: Array, requiresRebuild: boolean }}
2460
+ */
2461
+ verify() {
2462
+ const issues = [];
2463
+ for (const [value, bucket] of this._map) {
2464
+ if (!(bucket instanceof Set)) {
2465
+ issues.push(`Value '${value}' has non-Set bucket`);
2466
+ }
2467
+ }
2468
+ return {
2469
+ healthy: issues.length === 0,
2470
+ issues,
2471
+ requiresRebuild: issues.length > 0
2472
+ };
2473
+ }
2087
2474
  }
2088
2475
 
2089
2476
  // ========================
@@ -2207,14 +2594,17 @@ class IndexManager {
2207
2594
  }
2208
2595
 
2209
2596
  /**
2210
- * FAST PATH: Restore a BTree index from persisted entries stored in IDB.
2597
+ * FAST PATH: Restore an index from persisted entries stored in IDB.
2211
2598
  * Returns true if successful, false if persisted data is missing/corrupt.
2599
+ *
2600
+ * Supported types: btree, text, geo, hash
2601
+ *
2212
2602
  * @param {string} indexName
2213
2603
  * @returns {Promise<boolean>}
2214
2604
  */
2215
2605
  async _restoreIndex(indexName) {
2216
2606
  const index = this._indexes.get(indexName);
2217
- if (!index || index.type !== 'btree') return false;
2607
+ if (!index) return false;
2218
2608
 
2219
2609
  try {
2220
2610
  const docId = `${IndexManager.IDX_PREFIX}${indexName}`;
@@ -2222,21 +2612,66 @@ class IndexManager {
2222
2612
  this._collection._db, this._collection._storeName, docId
2223
2613
  );
2224
2614
 
2225
- if (!stored || !stored._entries || !Array.isArray(stored._entries)) {
2615
+ if (!stored) return false;
2616
+
2617
+ // Type guard: reject if persisted type doesn't match current definition
2618
+ if (stored._type && stored._type !== index.type) {
2619
+ console.warn(`[IndexManager] Type mismatch for '${indexName}': stored=${stored._type}, expected=${index.type}`);
2226
2620
  return false;
2227
2621
  }
2228
2622
 
2229
- // Restore B-Tree from sorted entries — no document scanning needed
2230
- const btree = BTreeIndex.fromSortedEntries(stored._entries, 4);
2623
+ let restored = null;
2231
2624
 
2232
- // Quick sanity check
2233
- const v = btree.verify();
2234
- if (!v.healthy) {
2235
- console.warn(`[IndexManager] Persisted index '${indexName}' is corrupt, will rebuild`);
2236
- return false;
2625
+ switch (index.type) {
2626
+ case 'btree': {
2627
+ if (!stored._entries || !Array.isArray(stored._entries)) return false;
2628
+ restored = BTreeIndex.fromSortedEntries(stored._entries, 4);
2629
+ const v = restored.verify();
2630
+ if (!v.healthy) {
2631
+ console.warn(`[IndexManager] Persisted btree '${indexName}' is corrupt, will rebuild`);
2632
+ return false;
2633
+ }
2634
+ break;
2635
+ }
2636
+
2637
+ case 'text': {
2638
+ if (!stored._data || !stored._data.invertedIndex) return false;
2639
+ restored = TextIndex.fromSerialized(stored._data);
2640
+ const v = restored.verify();
2641
+ if (!v.healthy) {
2642
+ console.warn(`[IndexManager] Persisted text index '${indexName}' is corrupt, will rebuild`);
2643
+ return false;
2644
+ }
2645
+ break;
2646
+ }
2647
+
2648
+ case 'geo': {
2649
+ if (!stored._data || !stored._data.coords || !stored._data.docIds) return false;
2650
+ restored = GeoIndex.fromSerialized(stored._data);
2651
+ const v = restored.verify();
2652
+ if (!v.healthy) {
2653
+ console.warn(`[IndexManager] Persisted geo index '${indexName}' is corrupt, will rebuild`);
2654
+ return false;
2655
+ }
2656
+ break;
2657
+ }
2658
+
2659
+ case 'hash': {
2660
+ if (!stored._entries || !Array.isArray(stored._entries)) return false;
2661
+ restored = HashIndex.fromSerialized(stored._entries);
2662
+ const v = restored.verify();
2663
+ if (!v.healthy) {
2664
+ console.warn(`[IndexManager] Persisted hash index '${indexName}' is corrupt, will rebuild`);
2665
+ return false;
2666
+ }
2667
+ break;
2668
+ }
2669
+
2670
+ default:
2671
+ return false;
2237
2672
  }
2238
2673
 
2239
- this._indexData.set(indexName, btree);
2674
+ this._indexData.set(indexName, restored);
2240
2675
  return true;
2241
2676
  } catch (e) {
2242
2677
  return false;
@@ -2244,23 +2679,56 @@ class IndexManager {
2244
2679
  }
2245
2680
 
2246
2681
  /**
2247
- * Persist a single BTree index's entries to IDB.
2682
+ * Persist a single index's entries to IDB.
2248
2683
  * Stored as a document with reserved _id in the existing 'documents' store.
2684
+ *
2685
+ * Supported types:
2686
+ * btree → toSortedEntries()
2687
+ * text → TextIndex.toSerializable()
2688
+ * geo → GeoIndex.toSerializable() (Float64Array coords)
2689
+ * hash → [[value, [docId, ...]], ...]
2690
+ *
2249
2691
  * @param {string} indexName
2250
2692
  */
2251
2693
  async _persistIndex(indexName) {
2252
2694
  const indexData = this._indexData.get(indexName);
2253
- if (!indexData || !(indexData instanceof BTreeIndex)) return;
2695
+ const index = this._indexes.get(indexName);
2696
+ if (!indexData || !index) return;
2254
2697
 
2255
2698
  try {
2256
2699
  const docId = `${IndexManager.IDX_PREFIX}${indexName}`;
2257
2700
  const payload = {
2258
2701
  _id: docId,
2259
- _entries: indexData.toSortedEntries(),
2702
+ _type: index.type,
2260
2703
  _persisted_at: Date.now(),
2261
- _size: indexData.size
2704
+ _size: indexData.size || 0
2262
2705
  };
2263
2706
 
2707
+ switch (index.type) {
2708
+ case 'btree':
2709
+ if (!(indexData instanceof BTreeIndex)) return;
2710
+ payload._entries = indexData.toSortedEntries();
2711
+ break;
2712
+
2713
+ case 'text':
2714
+ if (!(indexData instanceof TextIndex)) return;
2715
+ payload._data = indexData.toSerializable();
2716
+ break;
2717
+
2718
+ case 'geo':
2719
+ if (!(indexData instanceof GeoIndex)) return;
2720
+ payload._data = indexData.toSerializable();
2721
+ break;
2722
+
2723
+ case 'hash':
2724
+ if (!(indexData instanceof HashIndex)) return;
2725
+ payload._entries = indexData.toSerializable();
2726
+ break;
2727
+
2728
+ default:
2729
+ return; // Unknown type, skip
2730
+ }
2731
+
2264
2732
  await this._collection._indexedDB.put(
2265
2733
  this._collection._db, this._collection._storeName, payload
2266
2734
  );
@@ -2308,13 +2776,13 @@ class IndexManager {
2308
2776
  case 'btree':
2309
2777
  return new BTreeIndex();
2310
2778
  case 'hash':
2311
- return new Map();
2779
+ return new HashIndex();
2312
2780
  case 'text':
2313
2781
  return new TextIndex();
2314
2782
  case 'geo':
2315
2783
  return new GeoIndex();
2316
2784
  default:
2317
- return new Map();
2785
+ return new HashIndex();
2318
2786
  }
2319
2787
  }
2320
2788
 
@@ -2324,10 +2792,7 @@ class IndexManager {
2324
2792
  indexData.insert(value, docId);
2325
2793
  break;
2326
2794
  case 'hash':
2327
- if (!indexData.has(value)) {
2328
- indexData.set(value, new Set());
2329
- }
2330
- indexData.get(value).add(docId);
2795
+ indexData.insert(value, docId);
2331
2796
  break;
2332
2797
  case 'text':
2333
2798
  indexData.addDocument(value, docId);
@@ -2365,17 +2830,8 @@ class IndexManager {
2365
2830
  if (newValue !== undefined) indexData.insert(newValue, docId);
2366
2831
  break;
2367
2832
  case 'hash':
2368
- if (oldValue !== undefined) {
2369
- const oldSet = indexData.get(oldValue);
2370
- if (oldSet) {
2371
- oldSet.delete(docId);
2372
- if (oldSet.size === 0) indexData.delete(oldValue);
2373
- }
2374
- }
2375
- if (newValue !== undefined) {
2376
- if (!indexData.has(newValue)) indexData.set(newValue, new Set());
2377
- indexData.get(newValue).add(docId);
2378
- }
2833
+ if (oldValue !== undefined) indexData.remove(oldValue, docId);
2834
+ if (newValue !== undefined) indexData.insert(newValue, docId);
2379
2835
  break;
2380
2836
  case 'text':
2381
2837
  if (oldValue || newValue) {
@@ -2388,10 +2844,8 @@ class IndexManager {
2388
2844
  break;
2389
2845
  }
2390
2846
 
2391
- // Schedule async persistence for modified btree indexes
2392
- if (index.type === 'btree') {
2393
- this._schedulePersist(indexName);
2394
- }
2847
+ // Schedule async persistence for modified indexes (all types)
2848
+ this._schedulePersist(indexName);
2395
2849
  }
2396
2850
  }
2397
2851
 
@@ -2457,17 +2911,14 @@ class IndexManager {
2457
2911
 
2458
2912
  _queryHash(indexData, options) {
2459
2913
  if (options.$eq !== undefined) {
2460
- const docs = indexData.get(options.$eq);
2461
- return docs ? Array.from(docs) : [];
2914
+ return indexData.find(options.$eq);
2462
2915
  }
2463
2916
 
2464
2917
  if (options.$in !== undefined) {
2465
2918
  const results = new Set();
2466
2919
  for (const value of options.$in) {
2467
- const docs = indexData.get(value);
2468
- if (docs) {
2469
- docs.forEach(doc => results.add(doc));
2470
- }
2920
+ const docs = indexData.find(value);
2921
+ for (let i = 0; i < docs.length; i++) results.add(docs[i]);
2471
2922
  }
2472
2923
  return Array.from(results);
2473
2924
  }
@@ -2583,13 +3034,8 @@ class IndexManager {
2583
3034
  const needsRebuild = [];
2584
3035
 
2585
3036
  for (const [indexName, index] of this._indexes) {
2586
- if (index.type === 'btree') {
2587
- const restored = await this._restoreIndex(indexName);
2588
- if (!restored) {
2589
- needsRebuild.push(indexName);
2590
- }
2591
- } else {
2592
- // Non-btree indexes (text, geo, hash) always need rebuild
3037
+ const restored = await this._restoreIndex(indexName);
3038
+ if (!restored) {
2593
3039
  needsRebuild.push(indexName);
2594
3040
  }
2595
3041
  }
@@ -2620,8 +3066,17 @@ class IndexManager {
2620
3066
 
2621
3067
  _estimateMemoryUsage(indexData) {
2622
3068
  if (!indexData) return 0;
2623
- if (indexData instanceof Map) return indexData.size * 100;
2624
3069
  if (indexData instanceof BTreeIndex) return indexData.size * 120;
3070
+ if (indexData instanceof TextIndex) {
3071
+ // Rough estimate: inverted index entries + docTokens forward map
3072
+ let bytes = 0;
3073
+ for (const [token, docIds] of indexData._invertedIndex) {
3074
+ bytes += token.length * 2 + docIds.size * 64;
3075
+ }
3076
+ return bytes;
3077
+ }
3078
+ if (indexData instanceof GeoIndex) return indexData.size * 80;
3079
+ if (indexData instanceof HashIndex) return indexData.size * 100;
2625
3080
  return 0;
2626
3081
  }
2627
3082
 
@@ -3158,11 +3613,31 @@ class CollectionMetadata {
3158
3613
  this.createdAt = data.createdAt || Date.now();
3159
3614
  this.modifiedAt = data.modifiedAt || Date.now();
3160
3615
 
3161
- // Per-document tracking (in-memory Maps for O(1) ops)
3162
- this._docSizes = new Map(data._docSizes || []); // docId -> sizeKB
3163
- this._docModified = new Map(data._docModified || []); // docId -> timestamp
3164
- this._docPermanent = new Map(data._docPermanent || []); // docId -> boolean
3165
- this._docAttachments = new Map(data._docAttachments || []); // docId -> count
3616
+ // Per-document tracking: single Map<docId, {size, modified, permanent, attachments}>
3617
+ // Halves Map overhead vs. 4 separate Maps with identical key sets.
3618
+ this._docMeta = new Map();
3619
+
3620
+ // Hydrate from persisted data (supports both old 4-map and new unified format)
3621
+ if (data._docMeta) {
3622
+ // New unified format
3623
+ for (const [docId, meta] of data._docMeta) {
3624
+ this._docMeta.set(docId, meta);
3625
+ }
3626
+ } else if (data._docSizes) {
3627
+ // Legacy 4-map format — migrate on load
3628
+ const sizes = new Map(data._docSizes);
3629
+ const modified = new Map(data._docModified || []);
3630
+ const permanent = new Map(data._docPermanent || []);
3631
+ const attachments = new Map(data._docAttachments || []);
3632
+ for (const [docId, size] of sizes) {
3633
+ this._docMeta.set(docId, {
3634
+ size,
3635
+ modified: modified.get(docId) || Date.now(),
3636
+ permanent: permanent.get(docId) || false,
3637
+ attachments: attachments.get(docId) || 0
3638
+ });
3639
+ }
3640
+ }
3166
3641
 
3167
3642
  // Debounced persistence
3168
3643
  this._dirty = false;
@@ -3177,10 +3652,12 @@ class CollectionMetadata {
3177
3652
  // ---- Mutations (in-memory only, schedule async save) ----
3178
3653
 
3179
3654
  addDocument(docId, sizeKB, isPermanent = false, attachmentCount = 0) {
3180
- this._docSizes.set(docId, sizeKB);
3181
- this._docModified.set(docId, Date.now());
3182
- this._docPermanent.set(docId, isPermanent);
3183
- this._docAttachments.set(docId, attachmentCount);
3655
+ this._docMeta.set(docId, {
3656
+ size: sizeKB,
3657
+ modified: Date.now(),
3658
+ permanent: isPermanent,
3659
+ attachments: attachmentCount
3660
+ });
3184
3661
 
3185
3662
  this.sizeKB += sizeKB;
3186
3663
  this.length++;
@@ -3189,27 +3666,28 @@ class CollectionMetadata {
3189
3666
  }
3190
3667
 
3191
3668
  updateDocument(docId, newSizeKB, isPermanent = false, attachmentCount = 0) {
3192
- const oldSize = this._docSizes.get(docId) || 0;
3669
+ const existing = this._docMeta.get(docId);
3670
+ const oldSize = existing ? existing.size : 0;
3193
3671
  this.sizeKB = this.sizeKB - oldSize + newSizeKB;
3194
3672
 
3195
- this._docSizes.set(docId, newSizeKB);
3196
- this._docModified.set(docId, Date.now());
3197
- this._docPermanent.set(docId, isPermanent);
3198
- this._docAttachments.set(docId, attachmentCount);
3673
+ this._docMeta.set(docId, {
3674
+ size: newSizeKB,
3675
+ modified: Date.now(),
3676
+ permanent: isPermanent,
3677
+ attachments: attachmentCount
3678
+ });
3199
3679
 
3200
3680
  this.modifiedAt = Date.now();
3201
3681
  this._scheduleSave();
3202
3682
  }
3203
3683
 
3204
3684
  removeDocument(docId) {
3205
- const sizeKB = this._docSizes.get(docId) || 0;
3685
+ const existing = this._docMeta.get(docId);
3686
+ const sizeKB = existing ? existing.size : 0;
3206
3687
  this.sizeKB -= sizeKB;
3207
3688
  this.length--;
3208
3689
 
3209
- this._docSizes.delete(docId);
3210
- this._docModified.delete(docId);
3211
- this._docPermanent.delete(docId);
3212
- this._docAttachments.delete(docId);
3690
+ this._docMeta.delete(docId);
3213
3691
 
3214
3692
  this.modifiedAt = Date.now();
3215
3693
  this._scheduleSave();
@@ -3219,9 +3697,9 @@ class CollectionMetadata {
3219
3697
 
3220
3698
  getOldestNonPermanentDocuments(count) {
3221
3699
  const candidates = [];
3222
- for (const [docId, modified] of this._docModified) {
3223
- if (!this._docPermanent.get(docId)) {
3224
- candidates.push({ id: docId, modified });
3700
+ for (const [docId, meta] of this._docMeta) {
3701
+ if (!meta.permanent) {
3702
+ candidates.push({ id: docId, modified: meta.modified });
3225
3703
  }
3226
3704
  }
3227
3705
  candidates.sort((a, b) => a.modified - b.modified);
@@ -3229,15 +3707,17 @@ class CollectionMetadata {
3229
3707
  }
3230
3708
 
3231
3709
  getDocumentSize(docId) {
3232
- return this._docSizes.get(docId) || 0;
3710
+ const meta = this._docMeta.get(docId);
3711
+ return meta ? meta.size : 0;
3233
3712
  }
3234
3713
 
3235
3714
  isDocumentPermanent(docId) {
3236
- return this._docPermanent.get(docId) || false;
3715
+ const meta = this._docMeta.get(docId);
3716
+ return meta ? meta.permanent : false;
3237
3717
  }
3238
3718
 
3239
3719
  hasDocument(docId) {
3240
- return this._docSizes.has(docId);
3720
+ return this._docMeta.has(docId);
3241
3721
  }
3242
3722
 
3243
3723
  // ---- Aggregate snapshot (for DatabaseMetadata) ----
@@ -3284,10 +3764,7 @@ class CollectionMetadata {
3284
3764
  length: this.length,
3285
3765
  createdAt: this.createdAt,
3286
3766
  modifiedAt: this.modifiedAt,
3287
- _docSizes: Array.from(this._docSizes.entries()),
3288
- _docModified: Array.from(this._docModified.entries()),
3289
- _docPermanent: Array.from(this._docPermanent.entries()),
3290
- _docAttachments: Array.from(this._docAttachments.entries())
3767
+ _docMeta: Array.from(this._docMeta.entries())
3291
3768
  };
3292
3769
 
3293
3770
  try {
@@ -3403,10 +3880,7 @@ class CollectionMetadata {
3403
3880
  this.sizeKB = 0;
3404
3881
  this.length = 0;
3405
3882
  this.modifiedAt = Date.now();
3406
- this._docSizes.clear();
3407
- this._docModified.clear();
3408
- this._docPermanent.clear();
3409
- this._docAttachments.clear();
3883
+ this._docMeta.clear();
3410
3884
  this._dirty = true;
3411
3885
  this._flushSync();
3412
3886
  }
@@ -3722,6 +4196,10 @@ class QueryEngine {
3722
4196
  // Path cache: avoids repeated path.split('.') allocations during scans
3723
4197
  this._pathCache = new Map();
3724
4198
 
4199
+ // Pre-compiled Set cache for $in/$nin/$all operators.
4200
+ // Avoids rebuilding on every per-document call during a query scan.
4201
+ this._setCache = new WeakMap();
4202
+
3725
4203
  this.operators = {
3726
4204
  '$eq': (a, b) => a === b,
3727
4205
  '$ne': (a, b) => a !== b,
@@ -3729,8 +4207,18 @@ class QueryEngine {
3729
4207
  '$gte': (a, b) => a >= b,
3730
4208
  '$lt': (a, b) => a < b,
3731
4209
  '$lte': (a, b) => a <= b,
3732
- '$in': (a, b) => Array.isArray(b) && b.includes(a),
3733
- '$nin': (a, b) => Array.isArray(b) && !b.includes(a),
4210
+ '$in': (a, b) => {
4211
+ if (!Array.isArray(b)) return false;
4212
+ let s = this._setCache.get(b);
4213
+ if (!s) { s = new Set(b); this._setCache.set(b, s); }
4214
+ return s.has(a);
4215
+ },
4216
+ '$nin': (a, b) => {
4217
+ if (!Array.isArray(b)) return false;
4218
+ let s = this._setCache.get(b);
4219
+ if (!s) { s = new Set(b); this._setCache.set(b, s); }
4220
+ return !s.has(a);
4221
+ },
3734
4222
 
3735
4223
  '$and': (doc, conditions) => conditions.every(cond => this.evaluate(doc, cond)),
3736
4224
  '$or': (doc, conditions) => conditions.some(cond => this.evaluate(doc, cond)),
@@ -3740,7 +4228,12 @@ class QueryEngine {
3740
4228
  '$exists': (value, exists) => (value !== undefined) === exists,
3741
4229
  '$type': (value, type) => typeof value === type,
3742
4230
 
3743
- '$all': (arr, values) => Array.isArray(arr) && values.every(v => arr.includes(v)),
4231
+ '$all': (arr, values) => {
4232
+ if (!Array.isArray(arr)) return false;
4233
+ let s = this._setCache.get(arr);
4234
+ if (!s) { s = new Set(arr); this._setCache.set(arr, s); }
4235
+ return values.every(v => s.has(v));
4236
+ },
3744
4237
  '$elemMatch': (arr, condition) => Array.isArray(arr) && arr.some(elem => this.evaluate({ value: elem }, { value: condition })),
3745
4238
  '$size': (arr, size) => Array.isArray(arr) && arr.length === size,
3746
4239
 
@@ -3790,13 +4283,11 @@ class QueryEngine {
3790
4283
  this._pathCache.set(path, parts);
3791
4284
  // Cap cache size to prevent unbounded growth
3792
4285
  if (this._pathCache.size > 2000) {
3793
- // Delete oldest entries (first 500)
4286
+ // Delete oldest 500 entries
3794
4287
  const iter = this._pathCache.keys();
3795
- for (let i = 0; i < 500; i++) iter.next();
3796
- // Rebuild with remaining
3797
- const newCache = new Map();
3798
- for (const [k, v] of this._pathCache) newCache.set(k, v);
3799
- this._pathCache = newCache;
4288
+ for (let i = 0; i < 500; i++) {
4289
+ this._pathCache.delete(iter.next().value);
4290
+ }
3800
4291
  }
3801
4292
  }
3802
4293
  return parts;
@@ -3935,10 +4426,16 @@ class AggregationPipeline {
3935
4426
  result[fieldKey] = group.docs.length;
3936
4427
  break;
3937
4428
  case '$max':
3938
- result[fieldKey] = Math.max(...group.docs.map(d => queryEngine.getFieldValue(d, field)));
4429
+ result[fieldKey] = group.docs.reduce((max, d) => {
4430
+ const v = queryEngine.getFieldValue(d, field);
4431
+ return v !== undefined && (max === undefined || v > max) ? v : max;
4432
+ }, undefined);
3939
4433
  break;
3940
4434
  case '$min':
3941
- result[fieldKey] = Math.min(...group.docs.map(d => queryEngine.getFieldValue(d, field)));
4435
+ result[fieldKey] = group.docs.reduce((min, d) => {
4436
+ const v = queryEngine.getFieldValue(d, field);
4437
+ return v !== undefined && (min === undefined || v < min) ? v : min;
4438
+ }, undefined);
3942
4439
  break;
3943
4440
  }
3944
4441
  }
@@ -4098,12 +4595,18 @@ class MigrationManager {
4098
4595
  for (const collectionName of collections) {
4099
4596
  const coll = await this.database.getCollection(collectionName);
4100
4597
  const docs = await coll.getAll();
4598
+
4599
+ // Collect all updates, then apply in a single batch transaction
4600
+ const updates = [];
4101
4601
  for (const doc of docs) {
4102
4602
  const updated = await migration[direction](doc);
4103
4603
  if (updated) {
4104
- await coll.update(doc._id, updated);
4604
+ updates.push({ id: doc._id, data: updated });
4105
4605
  }
4106
4606
  }
4607
+ if (updates.length > 0) {
4608
+ await coll.batchUpdate(updates);
4609
+ }
4107
4610
  }
4108
4611
  }
4109
4612
  }
@@ -4114,13 +4617,18 @@ class MigrationManager {
4114
4617
 
4115
4618
  class PerformanceMonitor {
4116
4619
  constructor() {
4117
- this._metrics = {
4118
- operations: [],
4119
- latencies: [],
4120
- cacheHits: 0,
4121
- cacheMisses: 0,
4122
- memoryUsage: []
4123
- };
4620
+ // Fixed-size ring buffers — O(1) insert, no shift() overhead
4621
+ this._ops = new Array(100);
4622
+ this._opsIdx = 0;
4623
+ this._opsLen = 0;
4624
+ this._lats = new Float64Array(100);
4625
+ this._latsIdx = 0;
4626
+ this._latsLen = 0;
4627
+ this._mem = new Array(60);
4628
+ this._memIdx = 0;
4629
+ this._memLen = 0;
4630
+ this._cacheHits = 0;
4631
+ this._cacheMisses = 0;
4124
4632
  this._monitoring = false;
4125
4633
  this._monitoringInterval = null;
4126
4634
  }
@@ -4140,35 +4648,56 @@ class PerformanceMonitor {
4140
4648
 
4141
4649
  recordOperation(type, duration) {
4142
4650
  if (!this._monitoring) return;
4143
- this._metrics.operations.push({ type, duration, timestamp: Date.now() });
4144
- this._metrics.latencies.push(duration);
4145
- if (this._metrics.operations.length > 100) this._metrics.operations.shift();
4146
- if (this._metrics.latencies.length > 100) this._metrics.latencies.shift();
4651
+ this._ops[this._opsIdx] = { type, duration, timestamp: Date.now() };
4652
+ this._opsIdx = (this._opsIdx + 1) % 100;
4653
+ if (this._opsLen < 100) this._opsLen++;
4654
+ this._lats[this._latsIdx] = duration;
4655
+ this._latsIdx = (this._latsIdx + 1) % 100;
4656
+ if (this._latsLen < 100) this._latsLen++;
4147
4657
  }
4148
4658
 
4149
- recordCacheHit() { this._metrics.cacheHits++; }
4150
- recordCacheMiss() { this._metrics.cacheMisses++; }
4659
+ recordCacheHit() { this._cacheHits++; }
4660
+ recordCacheMiss() { this._cacheMisses++; }
4151
4661
 
4152
4662
  _collectMetrics() {
4153
4663
  if (performance && performance.memory) {
4154
- this._metrics.memoryUsage.push({
4664
+ this._mem[this._memIdx] = {
4155
4665
  used: performance.memory.usedJSHeapSize,
4156
4666
  total: performance.memory.totalJSHeapSize,
4157
4667
  limit: performance.memory.jsHeapSizeLimit,
4158
4668
  timestamp: Date.now()
4159
- });
4160
- if (this._metrics.memoryUsage.length > 60) this._metrics.memoryUsage.shift();
4669
+ };
4670
+ this._memIdx = (this._memIdx + 1) % 60;
4671
+ if (this._memLen < 60) this._memLen++;
4161
4672
  }
4162
4673
  }
4163
4674
 
4675
+ /** Helper: iterate the ring buffer entries (newest to oldest) */
4676
+ _iterRing(buf, idx, len) {
4677
+ const results = [];
4678
+ for (let i = 0; i < len; i++) {
4679
+ const pos = (idx - 1 - i + buf.length) % buf.length;
4680
+ if (buf[pos] !== undefined) results.push(buf[pos]);
4681
+ }
4682
+ return results;
4683
+ }
4684
+
4164
4685
  getStats() {
4165
- const opsPerSec = this._metrics.operations.filter(op => Date.now() - op.timestamp < 1000).length;
4166
- const totalLatency = this._metrics.latencies.reduce((a, b) => a + b, 0);
4167
- const avgLatency = this._metrics.latencies.length > 0 ? totalLatency / this._metrics.latencies.length : 0;
4168
- const totalCacheOps = this._metrics.cacheHits + this._metrics.cacheMisses;
4169
- const cacheHitRate = totalCacheOps > 0 ? (this._metrics.cacheHits / totalCacheOps) * 100 : 0;
4686
+ const now = Date.now();
4687
+ const ops = this._iterRing(this._ops, this._opsIdx, this._opsLen);
4688
+ const opsPerSec = ops.filter(op => now - op.timestamp < 1000).length;
4689
+
4690
+ let totalLatency = 0;
4691
+ for (let i = 0; i < this._latsLen; i++) {
4692
+ totalLatency += this._lats[i];
4693
+ }
4694
+ const avgLatency = this._latsLen > 0 ? totalLatency / this._latsLen : 0;
4170
4695
 
4171
- const latestMemory = this._metrics.memoryUsage.length > 0 ? this._metrics.memoryUsage[this._metrics.memoryUsage.length - 1] : null;
4696
+ const totalCacheOps = this._cacheHits + this._cacheMisses;
4697
+ const cacheHitRate = totalCacheOps > 0 ? (this._cacheHits / totalCacheOps) * 100 : 0;
4698
+
4699
+ const memEntries = this._iterRing(this._mem, this._memIdx, this._memLen);
4700
+ const latestMemory = memEntries.length > 0 ? memEntries[0] : null;
4172
4701
  const memoryUsageMB = latestMemory ? latestMemory.used / (1024 * 1024) : 0;
4173
4702
 
4174
4703
  return {
@@ -4186,14 +4715,18 @@ class PerformanceMonitor {
4186
4715
  if (stats.avgLatency > 100) {
4187
4716
  tips.push('High average latency detected. Consider enabling compression and indexing frequently queried fields.');
4188
4717
  }
4189
- if (stats.cacheHitRate < 50 && (this._metrics.cacheHits + this._metrics.cacheMisses) > 20) {
4718
+ if (stats.cacheHitRate < 50 && (this._cacheHits + this._cacheMisses) > 20) {
4190
4719
  tips.push('Low cache hit rate. Consider increasing cache size or optimizing query patterns.');
4191
4720
  }
4192
- if (this._metrics.memoryUsage.length > 10) {
4193
- const recent = this._metrics.memoryUsage.slice(-10);
4194
- const trend = recent[recent.length - 1].used - recent[0].used;
4195
- if (trend > 10 * 1024 * 1024) {
4196
- tips.push('Memory usage is increasing rapidly. Check for memory leaks or consider batch processing.');
4721
+ if (this._memLen > 10) {
4722
+ const memEntries = this._iterRing(this._mem, this._memIdx, Math.min(this._memLen, 10));
4723
+ const oldest = memEntries[memEntries.length - 1];
4724
+ const newest = memEntries[0];
4725
+ if (oldest && newest) {
4726
+ const trend = newest.used - oldest.used;
4727
+ if (trend > 10 * 1024 * 1024) {
4728
+ tips.push('Memory usage is increasing rapidly. Check for memory leaks or consider batch processing.');
4729
+ }
4197
4730
  }
4198
4731
  }
4199
4732
  return tips.length > 0 ? tips : ['Performance is optimal. No issues detected.'];
@@ -4284,7 +4817,6 @@ class Collection {
4284
4817
  this.database = database;
4285
4818
  this._serializer = database._serializer;
4286
4819
  this._base64 = database._base64;
4287
- this._db = null; // Reference to parent's consolidated IDB connection
4288
4820
  this._storeName = name; // Object store name within the consolidated database
4289
4821
  this._metadata = null;
4290
4822
  this._settings = database.settings;
@@ -4304,6 +4836,10 @@ class Collection {
4304
4836
  // Document-level cache: avoids IDB reads + deserialization for repeated get() calls
4305
4837
  this._docCache = new LRUCache(200);
4306
4838
 
4839
+ // Generation counter: bumped on every write, included in query cache keys.
4840
+ // Old cache entries die naturally via LRU eviction — no nuclear clear() needed.
4841
+ this._cacheGeneration = 0;
4842
+
4307
4843
  // Pending indexes: definitions registered before init() — applied during init
4308
4844
  this._pendingIndexes = [];
4309
4845
 
@@ -4311,6 +4847,13 @@ class Collection {
4311
4847
  this._initialized = false;
4312
4848
  }
4313
4849
 
4850
+ /**
4851
+ * Live reference to parent Database's consolidated IDB connection.
4852
+ * Always reads from the parent to avoid stale references after
4853
+ * _ensureStore version bumps close/reopen the connection.
4854
+ */
4855
+ get _db() { return this.database._db; }
4856
+
4314
4857
  get settings() {
4315
4858
  return this._settings;
4316
4859
  }
@@ -4328,7 +4871,6 @@ class Collection {
4328
4871
 
4329
4872
  // Use the parent Database's consolidated IDB connection
4330
4873
  await this.database._ensureStore(this._storeName);
4331
- this._db = this.database._db;
4332
4874
 
4333
4875
  // Load per-collection metadata from IDB (with localStorage migration fallback)
4334
4876
  this._metadata = await CollectionMetadata.loadAsync(
@@ -4421,7 +4963,7 @@ class Collection {
4421
4963
 
4422
4964
  await this._checkSpaceLimit();
4423
4965
  await this._trigger('afterAdd', doc);
4424
- this._cacheStrategy.clear();
4966
+ this._cacheGeneration++;
4425
4967
  this._docCache.set(doc._id, fullDoc);
4426
4968
  return doc._id;
4427
4969
  }
@@ -4536,38 +5078,64 @@ class Collection {
4536
5078
  this.database.metadata.setCollection(this._metadata);
4537
5079
 
4538
5080
  await this._trigger('afterUpdate', doc);
4539
- this._cacheStrategy.clear();
5081
+ this._cacheGeneration++;
4540
5082
  this._docCache.set(doc._id, newDocOutput);
4541
5083
  return doc._id;
4542
5084
  }
4543
5085
 
5086
+ /**
5087
+ * Insert or update a document atomically.
5088
+ * If a document with the given id exists, merges updates; otherwise creates it.
5089
+ * @param {string} docId
5090
+ * @param {object} data
5091
+ * @param {object} [options]
5092
+ * @returns {Promise<string>} Document ID
5093
+ */
5094
+ async upsert(docId, data, options = {}) {
5095
+ if (!this._initialized) await this.init();
5096
+
5097
+ const existing = await this._indexedDB.get(this._db, this._storeName, docId);
5098
+ if (existing) {
5099
+ return this.update(docId, data, options);
5100
+ }
5101
+ return this.add(data, { ...options, id: docId });
5102
+ }
5103
+
4544
5104
  async delete(docId, options = {}) {
4545
5105
  if (!this._initialized) await this.init();
4546
5106
 
4547
5107
  await this._trigger('beforeDelete', docId);
4548
5108
 
4549
- const doc = await this._indexedDB.get(this._db, this._storeName, docId);
4550
- if (!doc) {
5109
+ const stored = await this._indexedDB.get(this._db, this._storeName, docId);
5110
+ if (!stored) {
4551
5111
  throw new LacertaDBError('Document not found for deletion', 'DOCUMENT_NOT_FOUND');
4552
5112
  }
4553
5113
 
4554
- if (doc._permanent && !options.force) {
5114
+ if (stored._permanent && !options.force) {
4555
5115
  throw new LacertaDBError(
4556
5116
  'Cannot delete a permanent document. Use options.force = true to force deletion.',
4557
5117
  'PERMANENT_DOCUMENT_PROTECTION'
4558
5118
  );
4559
5119
  }
4560
5120
 
4561
- if (doc._permanent && options.force) {
5121
+ if (stored._permanent && options.force) {
4562
5122
  console.warn(`Force deleting permanent document: ${docId}`);
4563
5123
  }
4564
5124
 
4565
- const fullDoc = await this.get(docId);
5125
+ // Unpack the doc we already fetched — no second IDB read
5126
+ const existingDoc = new Document(stored, {
5127
+ encrypted: stored._encrypted,
5128
+ compressed: stored._compressed
5129
+ }, this._serializer);
5130
+ if (stored.packedData) {
5131
+ await existingDoc.unpack(this.database.encryption);
5132
+ }
5133
+ const fullDoc = existingDoc.objectOutput();
4566
5134
 
4567
5135
  await this._indexManager.updateIndexForDocument(docId, fullDoc, null);
4568
5136
 
4569
5137
  await this._indexedDB.delete(this._db, this._storeName, docId);
4570
- const attachments = doc._attachments;
5138
+ const attachments = stored._attachments;
4571
5139
  if (attachments && attachments.length > 0) {
4572
5140
  await this._opfs.deleteAttachments(this.database.name, this.name, docId);
4573
5141
  }
@@ -4576,14 +5144,14 @@ class Collection {
4576
5144
  this.database.metadata.setCollection(this._metadata);
4577
5145
 
4578
5146
  await this._trigger('afterDelete', docId);
4579
- this._cacheStrategy.clear();
5147
+ this._cacheGeneration++;
4580
5148
  this._docCache.delete(docId);
4581
5149
  } async query(filter = {}, options = {}) {
4582
5150
  if (!this._initialized) await this.init();
4583
5151
 
4584
5152
  const startTime = performance.now();
4585
5153
 
4586
- const cacheKey = _stableCacheKey(filter, options);
5154
+ const cacheKey = _stableCacheKey(filter, options) ^ (this._cacheGeneration * 2654435761);
4587
5155
  const cached = this._cacheStrategy.get(cacheKey);
4588
5156
 
4589
5157
  if (cached) {
@@ -4595,24 +5163,51 @@ class Collection {
4595
5163
  let results;
4596
5164
  let usedIndex = false;
4597
5165
 
5166
+ // --- Index selection: pick the most selective matching index ---
5167
+ let bestIndex = null;
5168
+ let bestSize = Infinity;
5169
+
4598
5170
  for (const [indexName, index] of this._indexManager.indexes) {
4599
5171
  const fieldValue = filter[index.fieldPath];
4600
5172
  if (fieldValue !== undefined) {
4601
- const docIds = await this._indexManager.query(indexName, fieldValue);
4602
- results = await Promise.all(
4603
- docIds.map(id => this.get(id).catch(() => null))
4604
- );
4605
- results = results.filter(Boolean);
4606
- usedIndex = true;
4607
- break;
5173
+ const indexData = this._indexManager._indexData.get(indexName);
5174
+ const size = indexData ? (indexData.size || 0) : Infinity;
5175
+ if (size < bestSize) {
5176
+ bestSize = size;
5177
+ bestIndex = { indexName, fieldValue };
5178
+ }
4608
5179
  }
4609
5180
  }
4610
5181
 
4611
- if (!usedIndex) {
4612
- results = await this.getAll(options);
4613
- if (Object.keys(filter).length > 0) {
4614
- results = results.filter(doc => queryEngine.evaluate(doc, filter));
5182
+ if (bestIndex) {
5183
+ const docIds = await this._indexManager.query(bestIndex.indexName, bestIndex.fieldValue);
5184
+ results = await Promise.all(
5185
+ docIds.map(id => this.get(id).catch(() => null))
5186
+ );
5187
+ results = results.filter(Boolean);
5188
+
5189
+ // Apply remaining filter fields the index didn't cover
5190
+ const remainingFilter = {};
5191
+ const indexedField = this._indexManager.indexes.get(bestIndex.indexName).fieldPath;
5192
+ for (const key in filter) {
5193
+ if (key !== indexedField) remainingFilter[key] = filter[key];
4615
5194
  }
5195
+ if (Object.keys(remainingFilter).length > 0) {
5196
+ results = results.filter(doc => queryEngine.evaluate(doc, remainingFilter));
5197
+ }
5198
+ usedIndex = true;
5199
+ }
5200
+
5201
+ if (!usedIndex) {
5202
+ const hasFilter = Object.keys(filter).length > 0;
5203
+ // Can we short-circuit? Only if no sort is needed.
5204
+ const canShortCircuit = !options.sort && hasFilter;
5205
+ const target = canShortCircuit ? (options.skip || 0) + (options.limit || Infinity) : Infinity;
5206
+
5207
+ results = await this._scanWithFilter(
5208
+ hasFilter ? filter : null,
5209
+ target
5210
+ );
4616
5211
  }
4617
5212
 
4618
5213
  if (options.sort) results = aggregationPipeline.stages.$sort(results, options.sort);
@@ -4632,6 +5227,64 @@ class Collection {
4632
5227
  return results;
4633
5228
  }
4634
5229
 
5230
+ /**
5231
+ * Cursor-based scan: fetches documents in batches, deserializes and
5232
+ * evaluates filter per-batch, stops early when target count is reached.
5233
+ * Avoids loading + deserializing the entire collection for selective queries.
5234
+ *
5235
+ * @param {object|null} filter - Query filter, or null for all docs
5236
+ * @param {number} target - Stop after collecting this many matches (Infinity = no limit)
5237
+ * @returns {Promise<Array>}
5238
+ */
5239
+ async _scanWithFilter(filter, target) {
5240
+ const results = [];
5241
+ let lastKey = null;
5242
+ const batchSize = 200;
5243
+
5244
+ while (true) {
5245
+ const batch = await this._indexedDB.getBatch(
5246
+ this._db, this._storeName, lastKey, batchSize
5247
+ );
5248
+
5249
+ if (batch.length === 0) break;
5250
+
5251
+ for (const docData of batch) {
5252
+ lastKey = docData._id;
5253
+
5254
+ // Skip persisted index entries
5255
+ if (typeof docData._id === 'string' && docData._id.startsWith(IndexManager.IDX_PREFIX)) {
5256
+ continue;
5257
+ }
5258
+
5259
+ try {
5260
+ const doc = new Document(docData, {
5261
+ encrypted: docData._encrypted,
5262
+ compressed: docData._compressed
5263
+ }, this._serializer);
5264
+
5265
+ if (docData.packedData) {
5266
+ await doc.unpack(this.database.encryption);
5267
+ }
5268
+
5269
+ const output = doc.objectOutput();
5270
+
5271
+ if (!filter || queryEngine.evaluate(output, filter)) {
5272
+ results.push(output);
5273
+ this._docCache.set(docData._id, output);
5274
+
5275
+ if (results.length >= target) return results;
5276
+ }
5277
+ } catch (error) {
5278
+ console.error(`Failed to unpack document ${docData._id}:`, error);
5279
+ }
5280
+ }
5281
+
5282
+ if (batch.length < batchSize) break;
5283
+ }
5284
+
5285
+ return results;
5286
+ }
5287
+
4635
5288
  async aggregate(pipeline) {
4636
5289
  if (!this._initialized) await this.init();
4637
5290
 
@@ -4713,15 +5366,27 @@ class Collection {
4713
5366
  const skipped = [];
4714
5367
  const useSync = !this.database.encryption && !(options.compressed);
4715
5368
 
4716
- // Phase 1: Bulk-fetch all existing docs in a single IDB read transaction
4717
- const updateIds = updates.map(u => u.id);
5369
+ // Phase 1: Fetch only the documents we need (not the entire collection)
4718
5370
  const storedMap = new Map();
4719
5371
 
4720
- // Fetch all at once via getAll, then build a Map for O(1) lookup
4721
- const allStored = await this._indexedDB.getAll(this._db, this._storeName);
4722
- for (const doc of allStored) {
4723
- if (doc._id && updateIds.includes(doc._id)) {
4724
- storedMap.set(doc._id, doc);
5372
+ // Single read transaction: fetch all target docs at once via IDB getAll
5373
+ // with a bounded key set, falling back to individual gets for small batches
5374
+ if (updates.length <= 20) {
5375
+ // Small batch: individual gets (avoids loading/filtering entire store)
5376
+ const fetches = updates.map(u =>
5377
+ this._indexedDB.get(this._db, this._storeName, u.id)
5378
+ .then(doc => doc && storedMap.set(u.id, doc))
5379
+ .catch(() => {})
5380
+ );
5381
+ await Promise.all(fetches);
5382
+ } else {
5383
+ // Larger batch: use getAll + Set-based filter (still cheaper than N transactions)
5384
+ const updateIdSet = new Set(updates.map(u => u.id));
5385
+ const allStored = await this._indexedDB.getAll(this._db, this._storeName);
5386
+ for (const doc of allStored) {
5387
+ if (doc._id && updateIdSet.has(doc._id)) {
5388
+ storedMap.set(doc._id, doc);
5389
+ }
4725
5390
  }
4726
5391
  }
4727
5392
 
@@ -4781,7 +5446,7 @@ class Collection {
4781
5446
  }
4782
5447
 
4783
5448
  this.database.metadata.setCollection(this._metadata);
4784
- this._cacheStrategy.clear();
5449
+ this._cacheGeneration++;
4785
5450
 
4786
5451
  if (this._performanceMonitor) {
4787
5452
  this._performanceMonitor.recordOperation('batchUpdate', performance.now() - startTime);
@@ -4808,21 +5473,51 @@ class Collection {
4808
5473
  const docsToRemove = [];
4809
5474
  const skipped = [];
4810
5475
 
4811
- // Phase 1: Validate all documents and prepare delete operations
5476
+ // Phase 1: Bulk-fetch all target docs, validate, and unpack in-place
5477
+ const storedMap = new Map();
5478
+
5479
+ if (normalizedItems.length <= 20) {
5480
+ // Small batch: parallel individual gets
5481
+ const fetches = normalizedItems.map(({ id }) =>
5482
+ this._indexedDB.get(this._db, this._storeName, id)
5483
+ .then(doc => doc && storedMap.set(id, doc))
5484
+ .catch(() => {})
5485
+ );
5486
+ await Promise.all(fetches);
5487
+ } else {
5488
+ // Large batch: single getAll + Set-filter
5489
+ const idSet = new Set(normalizedItems.map(({ id }) => id));
5490
+ const allStored = await this._indexedDB.getAll(this._db, this._storeName);
5491
+ for (const doc of allStored) {
5492
+ if (doc._id && idSet.has(doc._id)) {
5493
+ storedMap.set(doc._id, doc);
5494
+ }
5495
+ }
5496
+ }
5497
+
4812
5498
  for (const { id, options } of normalizedItems) {
4813
- const doc = await this._indexedDB.get(this._db, this._storeName, id);
4814
- if (!doc) {
5499
+ const stored = storedMap.get(id);
5500
+ if (!stored) {
4815
5501
  skipped.push({ success: false, id, error: 'Document not found' });
4816
5502
  continue;
4817
5503
  }
4818
5504
 
4819
- if (doc._permanent && !options.force) {
5505
+ if (stored._permanent && !options.force) {
4820
5506
  skipped.push({ success: false, id, error: 'Cannot delete permanent document without force flag' });
4821
5507
  continue;
4822
5508
  }
4823
5509
 
4824
- const fullDoc = await this.get(id);
4825
- docsToRemove.push({ id, fullDoc, stored: doc });
5510
+ // Unpack directly from the raw doc — no second IDB fetch
5511
+ const existingDoc = new Document(stored, {
5512
+ encrypted: stored._encrypted,
5513
+ compressed: stored._compressed
5514
+ }, this._serializer);
5515
+ if (stored.packedData) {
5516
+ await existingDoc.unpack(this.database.encryption);
5517
+ }
5518
+ const fullDoc = existingDoc.objectOutput();
5519
+
5520
+ docsToRemove.push({ id, fullDoc, stored });
4826
5521
 
4827
5522
  operations.push({
4828
5523
  type: 'delete',
@@ -4851,7 +5546,7 @@ class Collection {
4851
5546
  }
4852
5547
 
4853
5548
  this.database.metadata.setCollection(this._metadata);
4854
- this._cacheStrategy.clear();
5549
+ this._cacheGeneration++;
4855
5550
 
4856
5551
  if (this._performanceMonitor) {
4857
5552
  this._performanceMonitor.recordOperation('batchDelete', performance.now() - startTime);
@@ -5813,10 +6508,11 @@ export {
5813
6508
  BTreeIndex,
5814
6509
  TextIndex,
5815
6510
  GeoIndex,
6511
+ HashIndex,
5816
6512
  SecureDatabaseEncryption,
5817
6513
  QuickStore,
5818
6514
  AsyncMutex,
5819
6515
  IndexedDBConnectionPool,
5820
6516
  BrowserCompressionUtility,
5821
6517
  BrowserEncryptionUtility
5822
- };
6518
+ };