jexidb 2.1.2 → 2.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/Database.mjs CHANGED
@@ -770,10 +770,19 @@ class Database extends EventEmitter {
770
770
  }
771
771
 
772
772
  // Manual save is now the default behavior
773
-
773
+
774
+ // CRITICAL FIX: Ensure IndexManager totalLines is consistent with offsets
775
+ // This prevents data integrity issues when database is initialized without existing data
776
+ if (this.indexManager && this.offsets) {
777
+ this.indexManager.setTotalLines(this.offsets.length)
778
+ if (this.opts.debugMode) {
779
+ console.log(`🔧 Initialized index totalLines to ${this.offsets.length}`)
780
+ }
781
+ }
782
+
774
783
  this.initialized = true
775
784
  this.emit('initialized')
776
-
785
+
777
786
  if (this.opts.debugMode) {
778
787
  console.log(`✅ Database initialized with ${this.writeBuffer.length} records`)
779
788
  }
@@ -1319,38 +1328,21 @@ class Database extends EventEmitter {
1319
1328
  this.pendingIndexUpdates = []
1320
1329
  }
1321
1330
 
1322
- // CRITICAL FIX: Flush write buffer completely after capturing snapshot
1323
- await this._flushWriteBufferCompletely()
1324
-
1325
- // CRITICAL FIX: Wait for all I/O operations to complete before clearing writeBuffer
1326
- await this._waitForIOCompletion()
1327
-
1328
- // CRITICAL FIX: Verify write buffer is empty after I/O completion
1329
- // But allow for ongoing insertions during high-volume scenarios
1330
- if (this.writeBuffer.length > 0) {
1331
- if (this.opts.debugMode) {
1332
- console.log(`💾 Save: WriteBuffer still has ${this.writeBuffer.length} items after flush - this may indicate ongoing insertions`)
1333
- }
1334
-
1335
- // If we have a reasonable number of items, continue processing
1336
- if (this.writeBuffer.length < 10000) { // Reasonable threshold
1337
- if (this.opts.debugMode) {
1338
- console.log(`💾 Save: Continuing to process remaining ${this.writeBuffer.length} items`)
1339
- }
1340
- // Continue with the save process - the remaining items will be included in the final save
1341
- } else {
1342
- // Too many items remaining - likely a real problem
1343
- throw new Error(`WriteBuffer has too many items after flush: ${this.writeBuffer.length} items remaining (threshold: 10000)`)
1344
- }
1331
+ // CRITICAL FIX: DO NOT flush writeBuffer before processing existing records
1332
+ // This prevents duplicating updated records in the file.
1333
+ // The _streamExistingRecords() will handle replacing old records with updated ones from writeBufferSnapshot.
1334
+ // After processing, all records (existing + updated + new) will be written to file in one operation.
1335
+ if (this.opts.debugMode) {
1336
+ console.log(`💾 Save: writeBufferSnapshot captured with ${writeBufferSnapshot.length} records (will be processed with existing records)`)
1345
1337
  }
1346
1338
 
1347
1339
  // OPTIMIZATION: Parallel operations - cleanup and data preparation
1348
1340
  let allData = []
1349
1341
  let orphanedCount = 0
1350
1342
 
1351
- // Check if there are new records to save (after flush, writeBuffer should be empty)
1352
- // CRITICAL FIX: Also check writeBufferSnapshot.length > 0 to handle updates/deletes
1353
- // that were in writeBuffer before flush but are now in snapshot
1343
+ // Check if there are records to save from writeBufferSnapshot
1344
+ // CRITICAL FIX: Process writeBufferSnapshot records (both new and updated) with existing records
1345
+ // Updated records will replace old ones via _streamExistingRecords, new records will be added
1354
1346
  if (this.opts.debugMode) {
1355
1347
  console.log(`💾 Save: writeBuffer.length=${this.writeBuffer.length}, writeBufferSnapshot.length=${writeBufferSnapshot.length}`)
1356
1348
  }
@@ -1397,11 +1389,49 @@ class Database extends EventEmitter {
1397
1389
  // CRITICAL FIX: Normalize IDs to strings for consistent comparison
1398
1390
  const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)))
1399
1391
 
1392
+ // CRITICAL FIX: Create a map of records in existingRecords by ID for comparison
1393
+ const existingRecordsById = new Map()
1394
+ existingRecords.forEach(r => {
1395
+ if (r && r.id) {
1396
+ existingRecordsById.set(String(r.id), r)
1397
+ }
1398
+ })
1399
+
1400
1400
  // Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
1401
+ // CRITICAL FIX: Also ensure that if an updated record wasn't properly replaced, we replace it now
1401
1402
  for (const record of writeBufferSnapshot) {
1402
- if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(String(record.id))) {
1403
+ if (!record || !record.id) continue
1404
+ if (deletedIdsSnapshot.has(String(record.id))) continue
1405
+
1406
+ const recordIdStr = String(record.id)
1407
+ const existingRecord = existingRecordsById.get(recordIdStr)
1408
+
1409
+ if (!existingRecord) {
1403
1410
  // This is a new record, not an update
1404
1411
  allData.push(record)
1412
+ if (this.opts.debugMode) {
1413
+ console.log(`💾 Save: Adding NEW record to allData:`, { id: recordIdStr, price: record.price, app_id: record.app_id, currency: record.currency })
1414
+ }
1415
+ } else {
1416
+ // This is an update - verify that existingRecords contains the updated version
1417
+ // If not, replace it (this handles edge cases where substitution might have failed)
1418
+ const existingIndex = allData.findIndex(r => r && r.id && String(r.id) === recordIdStr)
1419
+ if (existingIndex !== -1) {
1420
+ // Verify if the existing record is actually the updated one
1421
+ // Compare key fields to detect if replacement is needed
1422
+ const needsReplacement = JSON.stringify(allData[existingIndex]) !== JSON.stringify(record)
1423
+ if (needsReplacement) {
1424
+ if (this.opts.debugMode) {
1425
+ console.log(`💾 Save: REPLACING existing record with updated version in allData:`, {
1426
+ old: { id: String(allData[existingIndex].id), price: allData[existingIndex].price },
1427
+ new: { id: recordIdStr, price: record.price }
1428
+ })
1429
+ }
1430
+ allData[existingIndex] = record
1431
+ } else if (this.opts.debugMode) {
1432
+ console.log(`💾 Save: Record already correctly updated in allData:`, { id: recordIdStr })
1433
+ }
1434
+ }
1405
1435
  }
1406
1436
  }
1407
1437
  })
@@ -1452,13 +1482,52 @@ class Database extends EventEmitter {
1452
1482
  allData = [...existingRecords]
1453
1483
 
1454
1484
  // OPTIMIZATION: Use Set for faster lookups of existing record IDs
1455
- const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => r.id))
1485
+ // CRITICAL FIX: Normalize IDs to strings for consistent comparison
1486
+ const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)))
1487
+
1488
+ // CRITICAL FIX: Create a map of records in existingRecords by ID for comparison
1489
+ const existingRecordsById = new Map()
1490
+ existingRecords.forEach(r => {
1491
+ if (r && r.id) {
1492
+ existingRecordsById.set(String(r.id), r)
1493
+ }
1494
+ })
1456
1495
 
1457
1496
  // Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
1497
+ // CRITICAL FIX: Also ensure that if an updated record wasn't properly replaced, we replace it now
1458
1498
  for (const record of writeBufferSnapshot) {
1459
- if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(record.id)) {
1499
+ if (!record || !record.id) continue
1500
+ if (deletedIdsSnapshot.has(String(record.id))) continue
1501
+
1502
+ const recordIdStr = String(record.id)
1503
+ const existingRecord = existingRecordsById.get(recordIdStr)
1504
+
1505
+ if (!existingRecord) {
1460
1506
  // This is a new record, not an update
1461
1507
  allData.push(record)
1508
+ if (this.opts.debugMode) {
1509
+ console.log(`💾 Save: Adding NEW record to allData:`, { id: recordIdStr, price: record.price, app_id: record.app_id, currency: record.currency })
1510
+ }
1511
+ } else {
1512
+ // This is an update - verify that existingRecords contains the updated version
1513
+ // If not, replace it (this handles edge cases where substitution might have failed)
1514
+ const existingIndex = allData.findIndex(r => r && r.id && String(r.id) === recordIdStr)
1515
+ if (existingIndex !== -1) {
1516
+ // Verify if the existing record is actually the updated one
1517
+ // Compare key fields to detect if replacement is needed
1518
+ const needsReplacement = JSON.stringify(allData[existingIndex]) !== JSON.stringify(record)
1519
+ if (needsReplacement) {
1520
+ if (this.opts.debugMode) {
1521
+ console.log(`💾 Save: REPLACING existing record with updated version in allData:`, {
1522
+ old: { id: String(allData[existingIndex].id), price: allData[existingIndex].price },
1523
+ new: { id: recordIdStr, price: record.price }
1524
+ })
1525
+ }
1526
+ allData[existingIndex] = record
1527
+ } else if (this.opts.debugMode) {
1528
+ console.log(`💾 Save: Record already correctly updated in allData:`, { id: recordIdStr })
1529
+ }
1530
+ }
1462
1531
  }
1463
1532
  }
1464
1533
 
@@ -1466,10 +1535,11 @@ class Database extends EventEmitter {
1466
1535
  const updatedCount = writeBufferSnapshot.filter(r => r && r.id && existingRecordIds.has(String(r.id))).length
1467
1536
  const newCount = writeBufferSnapshot.filter(r => r && r.id && !existingRecordIds.has(String(r.id))).length
1468
1537
  console.log(`💾 Save: Combined data - existingRecords: ${existingRecords.length}, updatedFromBuffer: ${updatedCount}, newFromBuffer: ${newCount}, total: ${allData.length}`)
1469
- console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? r.id : 'no-id'))
1538
+ console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? String(r.id) : 'no-id'))
1470
1539
  console.log(`💾 Save: Existing record IDs:`, Array.from(existingRecordIds))
1471
- console.log(`💾 Save: Sample existing record:`, existingRecords[0] ? { id: existingRecords[0].id, name: existingRecords[0].name, tags: existingRecords[0].tags } : 'null')
1472
- console.log(`💾 Save: Sample writeBuffer record:`, writeBufferSnapshot[0] ? { id: writeBufferSnapshot[0].id, name: writeBufferSnapshot[0].name, tags: writeBufferSnapshot[0].tags } : 'null')
1540
+ console.log(`💾 Save: All records in allData:`, allData.map(r => r && r.id ? { id: String(r.id), price: r.price, app_id: r.app_id, currency: r.currency } : 'no-id'))
1541
+ console.log(`💾 Save: Sample existing record:`, existingRecords[0] ? { id: String(existingRecords[0].id), price: existingRecords[0].price, app_id: existingRecords[0].app_id, currency: existingRecords[0].currency } : 'null')
1542
+ console.log(`💾 Save: Sample writeBuffer record:`, writeBufferSnapshot[0] ? { id: String(writeBufferSnapshot[0].id), price: writeBufferSnapshot[0].price, app_id: writeBufferSnapshot[0].app_id, currency: writeBufferSnapshot[0].currency } : 'null')
1473
1543
  }
1474
1544
  }).catch(error => {
1475
1545
  if (this.opts.debugMode) {
@@ -1545,6 +1615,8 @@ class Database extends EventEmitter {
1545
1615
 
1546
1616
  if (this.opts.debugMode) {
1547
1617
  console.log(`💾 Save: allData.length=${allData.length}, cleanedData.length=${cleanedData.length}`)
1618
+ console.log(`💾 Save: Current offsets.length before recalculation: ${this.offsets.length}`)
1619
+ console.log(`💾 Save: All records in allData before serialization:`, allData.map(r => r && r.id ? { id: String(r.id), price: r.price, app_id: r.app_id, currency: r.currency } : 'no-id'))
1548
1620
  console.log(`💾 Save: Sample cleaned record:`, cleanedData[0] ? Object.keys(cleanedData[0]) : 'null')
1549
1621
  }
1550
1622
 
@@ -1556,11 +1628,14 @@ class Database extends EventEmitter {
1556
1628
 
1557
1629
  if (this.opts.debugMode) {
1558
1630
  console.log(`💾 Save: Serialized ${lines.length} lines`)
1631
+ console.log(`💾 Save: All records in allData after serialization check:`, allData.map(r => r && r.id ? { id: String(r.id), price: r.price, app_id: r.app_id, currency: r.currency } : 'no-id'))
1559
1632
  if (lines.length > 0) {
1560
1633
  console.log(`💾 Save: First line (first 200 chars):`, lines[0].substring(0, 200))
1561
1634
  }
1562
1635
  }
1563
1636
 
1637
+ // CRITICAL FIX: Always recalculate offsets from serialized data to ensure consistency
1638
+ // Even if _streamExistingRecords updated offsets, we need to recalculate based on actual serialized data
1564
1639
  this.offsets = []
1565
1640
  let currentOffset = 0
1566
1641
  for (let i = 0; i < lines.length; i++) {
@@ -1571,6 +1646,10 @@ class Database extends EventEmitter {
1571
1646
  currentOffset += Buffer.byteLength(lineWithNewline, 'utf8')
1572
1647
  }
1573
1648
 
1649
+ if (this.opts.debugMode) {
1650
+ console.log(`💾 Save: Recalculated offsets.length=${this.offsets.length}, should match lines.length=${lines.length}`)
1651
+ }
1652
+
1574
1653
  // CRITICAL FIX: Ensure indexOffset matches actual file size
1575
1654
  this.indexOffset = currentOffset
1576
1655
 
@@ -1578,56 +1657,9 @@ class Database extends EventEmitter {
1578
1657
  console.log(`💾 Save: Calculated indexOffset: ${this.indexOffset}, allData.length: ${allData.length}`)
1579
1658
  }
1580
1659
 
1581
- // OPTIMIZATION: Parallel operations - file writing and index data preparation
1582
- const parallelWriteOperations = []
1583
-
1584
- // Add main file write operation
1585
- parallelWriteOperations.push(
1586
- this.fileHandler.writeBatch([jsonlData])
1587
- )
1588
-
1589
- // Add index file operations - ALWAYS save offsets, even without indexed fields
1590
- if (this.indexManager) {
1591
- const idxPath = this.normalizedFile.replace('.jdb', '.idx.jdb')
1592
-
1593
- // OPTIMIZATION: Parallel data preparation
1594
- const indexDataPromise = Promise.resolve({
1595
- index: this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0 ? this.indexManager.toJSON() : {},
1596
- offsets: this.offsets, // Save actual offsets for efficient file operations
1597
- indexOffset: this.indexOffset // Save file size for proper range calculations
1598
- })
1599
-
1600
- // Add term mapping data if needed
1601
- const termMappingFields = this.getTermMappingFields()
1602
- if (termMappingFields.length > 0 && this.termManager) {
1603
- const termDataPromise = this.termManager.saveTerms()
1604
-
1605
- // Combine index data and term data
1606
- const combinedDataPromise = Promise.all([indexDataPromise, termDataPromise]).then(([indexData, termData]) => {
1607
- indexData.termMapping = termData
1608
- return indexData
1609
- })
1610
-
1611
- // Add index file write operation
1612
- parallelWriteOperations.push(
1613
- combinedDataPromise.then(indexData => {
1614
- const idxFileHandler = new FileHandler(idxPath, this.fileMutex, this.opts)
1615
- return idxFileHandler.writeAll(JSON.stringify(indexData, null, 2))
1616
- })
1617
- )
1618
- } else {
1619
- // Add index file write operation without term mapping
1620
- parallelWriteOperations.push(
1621
- indexDataPromise.then(indexData => {
1622
- const idxFileHandler = new FileHandler(idxPath, this.fileMutex, this.opts)
1623
- return idxFileHandler.writeAll(JSON.stringify(indexData, null, 2))
1624
- })
1625
- )
1626
- }
1627
- }
1628
-
1629
- // Execute parallel write operations
1630
- await Promise.all(parallelWriteOperations)
1660
+ // CRITICAL FIX: Write main data file first
1661
+ // Index will be saved AFTER reconstruction to ensure it contains correct data
1662
+ await this.fileHandler.writeBatch([jsonlData])
1631
1663
 
1632
1664
  if (this.opts.debugMode) {
1633
1665
  console.log(`💾 Saved ${allData.length} records to ${this.normalizedFile}`)
@@ -1639,11 +1671,15 @@ class Database extends EventEmitter {
1639
1671
  this.shouldSave = false
1640
1672
  this.lastSaveTime = Date.now()
1641
1673
 
1642
- // Clear writeBuffer and deletedIds after successful save only if we had data to save
1643
- if (allData.length > 0) {
1644
- // Rebuild index when records were deleted or updated to maintain consistency
1674
+ // CRITICAL FIX: Always clear deletedIds and rebuild index if there were deletions,
1675
+ // even if allData.length === 0 (all records were deleted)
1645
1676
  const hadDeletedRecords = deletedIdsSnapshot.size > 0
1646
1677
  const hadUpdatedRecords = writeBufferSnapshot.length > 0
1678
+
1679
+ // Clear writeBuffer and deletedIds after successful save
1680
+ // Also rebuild index if records were deleted or updated, even if allData is empty
1681
+ if (allData.length > 0 || hadDeletedRecords || hadUpdatedRecords) {
1682
+ // Rebuild index when records were deleted or updated to maintain consistency
1647
1683
  if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
1648
1684
  if (hadDeletedRecords || hadUpdatedRecords) {
1649
1685
  // Clear the index and rebuild it from the saved records
@@ -1661,9 +1697,16 @@ class Database extends EventEmitter {
1661
1697
 
1662
1698
  // Rebuild index from the saved records
1663
1699
  // CRITICAL: Process term mapping for records loaded from file to ensure ${field}Ids are available
1700
+ if (this.opts.debugMode) {
1701
+ console.log(`💾 Save: Rebuilding index from ${allData.length} records in allData`)
1702
+ }
1664
1703
  for (let i = 0; i < allData.length; i++) {
1665
1704
  let record = allData[i]
1666
1705
 
1706
+ if (this.opts.debugMode && i < 3) {
1707
+ console.log(`💾 Save: Rebuilding index record[${i}]:`, { id: String(record.id), price: record.price, app_id: record.app_id, currency: record.currency })
1708
+ }
1709
+
1667
1710
  // CRITICAL FIX: Ensure records have ${field}Ids for term mapping fields
1668
1711
  // Records from writeBuffer already have ${field}Ids from processTermMapping
1669
1712
  // Records from file need to be processed to restore ${field}Ids
@@ -1690,6 +1733,21 @@ class Database extends EventEmitter {
1690
1733
 
1691
1734
  await this.indexManager.add(record, i)
1692
1735
  }
1736
+
1737
+ // VALIDATION: Ensure index consistency after rebuild
1738
+ // Check that all indexed records have valid line numbers
1739
+ const indexedRecordCount = this.indexManager.getIndexedRecordCount?.() || allData.length
1740
+ if (indexedRecordCount !== this.offsets.length) {
1741
+ console.warn(`⚠️ Index inconsistency detected: indexed ${indexedRecordCount} records but offsets has ${this.offsets.length} entries`)
1742
+ // Force consistency by setting totalLines to match offsets
1743
+ this.indexManager.setTotalLines(this.offsets.length)
1744
+ } else {
1745
+ this.indexManager.setTotalLines(this.offsets.length)
1746
+ }
1747
+
1748
+ if (this.opts.debugMode) {
1749
+ console.log(`💾 Save: Index rebuilt with ${allData.length} records, totalLines set to ${this.offsets.length}`)
1750
+ }
1693
1751
  }
1694
1752
  }
1695
1753
 
@@ -1712,6 +1770,22 @@ class Database extends EventEmitter {
1712
1770
  for (const deletedId of deletedIdsSnapshot) {
1713
1771
  this.deletedIds.delete(deletedId)
1714
1772
  }
1773
+ } else if (hadDeletedRecords) {
1774
+ // CRITICAL FIX: Even if allData is empty, clear deletedIds and rebuild index
1775
+ // when records were deleted to ensure consistency
1776
+ if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
1777
+ // Clear the index since all records were deleted
1778
+ this.indexManager.clear()
1779
+ this.indexManager.setTotalLines(0)
1780
+ if (this.opts.debugMode) {
1781
+ console.log(`🧹 Cleared index after removing all ${deletedIdsSnapshot.size} deleted records`)
1782
+ }
1783
+ }
1784
+
1785
+ // Clear deletedIds even when allData is empty
1786
+ for (const deletedId of deletedIdsSnapshot) {
1787
+ this.deletedIds.delete(deletedId)
1788
+ }
1715
1789
 
1716
1790
  // CRITICAL FIX: Ensure writeBuffer is completely cleared after successful save
1717
1791
  if (this.writeBuffer.length > 0) {
@@ -2231,6 +2305,43 @@ class Database extends EventEmitter {
2231
2305
  }
2232
2306
 
2233
2307
  try {
2308
+ // INTEGRITY CHECK: Validate data consistency before querying
2309
+ // Check if index and offsets are synchronized
2310
+ if (this.indexManager && this.offsets && this.offsets.length > 0) {
2311
+ const indexTotalLines = this.indexManager.totalLines || 0
2312
+ const offsetsLength = this.offsets.length
2313
+
2314
+ if (indexTotalLines !== offsetsLength) {
2315
+ console.warn(`⚠️ Data integrity issue detected: index.totalLines=${indexTotalLines}, offsets.length=${offsetsLength}`)
2316
+ // Auto-correct by updating index totalLines to match offsets
2317
+ this.indexManager.setTotalLines(offsetsLength)
2318
+ if (this.opts.debugMode) {
2319
+ console.log(`🔧 Auto-corrected index totalLines to ${offsetsLength}`)
2320
+ }
2321
+
2322
+ // CRITICAL FIX: Also save the corrected index to prevent persistence of inconsistency
2323
+ // This ensures the .idx.jdb file contains the correct totalLines value
2324
+ try {
2325
+ await this._saveIndexDataToFile()
2326
+ if (this.opts.debugMode) {
2327
+ console.log(`💾 Saved corrected index data to prevent future inconsistencies`)
2328
+ }
2329
+ } catch (error) {
2330
+ if (this.opts.debugMode) {
2331
+ console.warn(`⚠️ Failed to save corrected index: ${error.message}`)
2332
+ }
2333
+ }
2334
+
2335
+ // Verify the fix worked
2336
+ const newIndexTotalLines = this.indexManager.totalLines || 0
2337
+ if (newIndexTotalLines === offsetsLength) {
2338
+ console.log(`✅ Data integrity successfully corrected: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`)
2339
+ } else {
2340
+ console.error(`❌ Data integrity correction failed: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`)
2341
+ }
2342
+ }
2343
+ }
2344
+
2234
2345
  // Validate indexed query mode if enabled
2235
2346
  if (this.opts.indexedQueryMode === 'strict') {
2236
2347
  this._validateIndexedQuery(criteria, options)
@@ -2253,36 +2364,25 @@ class Database extends EventEmitter {
2253
2364
 
2254
2365
 
2255
2366
  // Combine results, removing duplicates (writeBuffer takes precedence)
2256
- // OPTIMIZATION: Use parallel processing for better performance when writeBuffer has many records
2367
+ // OPTIMIZATION: Unified efficient approach with consistent precedence rules
2257
2368
  let allResults
2258
- if (writeBufferResults.length > 50) {
2259
- // Parallel approach for large writeBuffer
2260
- const [fileResultsSet, writeBufferSet] = await Promise.all([
2261
- Promise.resolve(new Set(fileResultsWithTerms.map(r => r.id))),
2262
- Promise.resolve(new Set(writeBufferResultsWithTerms.map(r => r.id)))
2263
- ])
2369
+
2370
+ // Create efficient lookup map for writeBuffer records
2371
+ const writeBufferMap = new Map()
2372
+ writeBufferResultsWithTerms.forEach(record => {
2373
+ if (record && record.id) {
2374
+ writeBufferMap.set(record.id, record)
2375
+ }
2376
+ })
2264
2377
 
2265
- // Merge efficiently: keep file results not in writeBuffer, then add all writeBuffer results
2266
- const filteredFileResults = await Promise.resolve(
2267
- fileResultsWithTerms.filter(r => !writeBufferSet.has(r.id))
2268
- )
2378
+ // Filter file results to exclude any records that exist in writeBuffer
2379
+ // This ensures writeBuffer always takes precedence
2380
+ const filteredFileResults = fileResultsWithTerms.filter(record =>
2381
+ record && record.id && !writeBufferMap.has(record.id)
2382
+ )
2383
+
2384
+ // Combine results: file results (filtered) + all writeBuffer results
2269
2385
  allResults = [...filteredFileResults, ...writeBufferResultsWithTerms]
2270
- } else {
2271
- // Sequential approach for small writeBuffer (original logic)
2272
- allResults = [...fileResultsWithTerms]
2273
-
2274
- // Replace file records with writeBuffer records and add new writeBuffer records
2275
- for (const record of writeBufferResultsWithTerms) {
2276
- const existingIndex = allResults.findIndex(r => r.id === record.id)
2277
- if (existingIndex !== -1) {
2278
- // Replace existing record with writeBuffer version
2279
- allResults[existingIndex] = record
2280
- } else {
2281
- // Add new record from writeBuffer
2282
- allResults.push(record)
2283
- }
2284
- }
2285
- }
2286
2386
 
2287
2387
  // Remove records that are marked as deleted
2288
2388
  const finalResults = allResults.filter(record => !this.deletedIds.has(record.id))
@@ -2540,19 +2640,6 @@ class Database extends EventEmitter {
2540
2640
 
2541
2641
  // CRITICAL FIX: Validate state before update operation
2542
2642
  this.validateState()
2543
-
2544
- // CRITICAL FIX: If there's data to save, call save() to persist it
2545
- // Only save if there are actual records in writeBuffer
2546
- if (this.shouldSave && this.writeBuffer.length > 0) {
2547
- if (this.opts.debugMode) {
2548
- console.log(`🔄 UPDATE: Calling save() before update - writeBuffer.length=${this.writeBuffer.length}`)
2549
- }
2550
- const saveStart = Date.now()
2551
- await this.save(false) // Use save(false) since we're already in queue
2552
- if (this.opts.debugMode) {
2553
- console.log(`🔄 UPDATE: Save completed in ${Date.now() - saveStart}ms`)
2554
- }
2555
- }
2556
2643
 
2557
2644
  if (this.opts.debugMode) {
2558
2645
  console.log(`🔄 UPDATE: Starting find() - writeBuffer=${this.writeBuffer.length}`)
@@ -2565,7 +2652,12 @@ class Database extends EventEmitter {
2565
2652
  }
2566
2653
 
2567
2654
  const updatedRecords = []
2568
-
2655
+
2656
+ if (this.opts.debugMode) {
2657
+ console.log(`🔄 UPDATE: About to process ${records.length} records`)
2658
+ console.log(`🔄 UPDATE: Records:`, records.map(r => ({ id: r.id, value: r.value })))
2659
+ }
2660
+
2569
2661
  for (const record of records) {
2570
2662
  const recordStart = Date.now()
2571
2663
  if (this.opts.debugMode) {
@@ -2574,11 +2666,20 @@ class Database extends EventEmitter {
2574
2666
 
2575
2667
  const updated = { ...record, ...updateData }
2576
2668
 
2669
+ // DEBUG: Log the update operation details
2670
+ if (this.opts.debugMode) {
2671
+ console.log(`🔄 UPDATE: Original record ID: ${record.id}, type: ${typeof record.id}`)
2672
+ console.log(`🔄 UPDATE: Updated record ID: ${updated.id}, type: ${typeof updated.id}`)
2673
+ console.log(`🔄 UPDATE: Update data keys:`, Object.keys(updateData))
2674
+ console.log(`🔄 UPDATE: Updated record keys:`, Object.keys(updated))
2675
+ }
2676
+
2577
2677
  // Process term mapping for update
2578
2678
  const termMappingStart = Date.now()
2579
2679
  this.processTermMapping(updated, true, record)
2580
2680
  if (this.opts.debugMode) {
2581
2681
  console.log(`🔄 UPDATE: Term mapping completed in ${Date.now() - termMappingStart}ms`)
2682
+ console.log(`🔄 UPDATE: After term mapping - ID: ${updated.id}, type: ${typeof updated.id}`)
2582
2683
  }
2583
2684
 
2584
2685
  // CRITICAL FIX: Remove old terms from index before adding new ones
@@ -2593,13 +2694,20 @@ class Database extends EventEmitter {
2593
2694
  // For records in the file, we need to ensure they are properly marked for replacement
2594
2695
  const index = this.writeBuffer.findIndex(r => r.id === record.id)
2595
2696
  let lineNumber = null
2596
-
2697
+
2698
+ if (this.opts.debugMode) {
2699
+ console.log(`🔄 UPDATE: writeBuffer.findIndex for ${record.id} returned ${index}`)
2700
+ console.log(`🔄 UPDATE: writeBuffer length: ${this.writeBuffer.length}`)
2701
+ console.log(`🔄 UPDATE: writeBuffer IDs:`, this.writeBuffer.map(r => r.id))
2702
+ }
2703
+
2597
2704
  if (index !== -1) {
2598
2705
  // Record is already in writeBuffer, update it
2599
2706
  this.writeBuffer[index] = updated
2600
2707
  lineNumber = this._getAbsoluteLineNumber(index)
2601
2708
  if (this.opts.debugMode) {
2602
2709
  console.log(`🔄 UPDATE: Updated existing writeBuffer record at index ${index}`)
2710
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`)
2603
2711
  }
2604
2712
  } else {
2605
2713
  // Record is in file, add updated version to writeBuffer
@@ -2609,6 +2717,7 @@ class Database extends EventEmitter {
2609
2717
  lineNumber = this._getAbsoluteLineNumber(this.writeBuffer.length - 1)
2610
2718
  if (this.opts.debugMode) {
2611
2719
  console.log(`🔄 UPDATE: Added updated record to writeBuffer (will replace file record ${record.id})`)
2720
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`)
2612
2721
  }
2613
2722
  }
2614
2723
 
@@ -2644,13 +2753,32 @@ class Database extends EventEmitter {
2644
2753
  */
2645
2754
  async delete(criteria) {
2646
2755
  this._validateInitialization('delete')
2647
-
2756
+
2648
2757
  return this.operationQueue.enqueue(async () => {
2649
2758
  this.isInsideOperationQueue = true
2650
2759
  try {
2651
2760
  // CRITICAL FIX: Validate state before delete operation
2652
2761
  this.validateState()
2653
-
2762
+
2763
+ // 🔧 NEW: Validate indexed query mode for delete operations
2764
+ if (this.opts.indexedQueryMode === 'strict') {
2765
+ this._validateIndexedQuery(criteria, { operation: 'delete' })
2766
+ }
2767
+
2768
+ // ⚠️ NEW: Warn about non-indexed fields in permissive mode
2769
+ if (this.opts.indexedQueryMode !== 'strict') {
2770
+ const indexedFields = Object.keys(this.opts.indexes || {})
2771
+ const queryFields = this._extractQueryFields(criteria)
2772
+ const nonIndexedFields = queryFields.filter(field => !indexedFields.includes(field))
2773
+
2774
+ if (nonIndexedFields.length > 0) {
2775
+ if (this.opts.debugMode) {
2776
+ console.warn(`⚠️ Delete operation using non-indexed fields: ${nonIndexedFields.join(', ')}`)
2777
+ console.warn(` This may be slow or fail silently. Consider indexing these fields.`)
2778
+ }
2779
+ }
2780
+ }
2781
+
2654
2782
  const records = await this.find(criteria)
2655
2783
  const deletedIds = []
2656
2784
 
@@ -4084,14 +4212,23 @@ class Database extends EventEmitter {
4084
4212
  // Create a map of updated records for quick lookup
4085
4213
  // CRITICAL FIX: Normalize IDs to strings for consistent comparison
4086
4214
  const updatedRecordsMap = new Map()
4087
- writeBufferSnapshot.forEach(record => {
4215
+ writeBufferSnapshot.forEach((record, index) => {
4088
4216
  if (record && record.id !== undefined && record.id !== null) {
4089
4217
  // Normalize ID to string for consistent comparison
4090
4218
  const normalizedId = String(record.id)
4091
4219
  updatedRecordsMap.set(normalizedId, record)
4220
+ if (this.opts.debugMode) {
4221
+ console.log(`💾 Save: Added to updatedRecordsMap: ID=${normalizedId} (original: ${record.id}, type: ${typeof record.id}), index=${index}`)
4222
+ }
4223
+ } else if (this.opts.debugMode) {
4224
+ console.log(`⚠️ Save: Skipped record in writeBufferSnapshot[${index}] - missing or invalid ID:`, record ? { id: record.id, keys: Object.keys(record) } : 'null')
4092
4225
  }
4093
4226
  })
4094
4227
 
4228
+ if (this.opts.debugMode) {
4229
+ console.log(`💾 Save: updatedRecordsMap size: ${updatedRecordsMap.size}, keys:`, Array.from(updatedRecordsMap.keys()))
4230
+ }
4231
+
4095
4232
  // OPTIMIZATION: Cache file stats to avoid repeated stat() calls
4096
4233
  let fileSize = 0
4097
4234
  if (this._cachedFileStats && this._cachedFileStats.timestamp > Date.now() - 1000) {
@@ -4245,14 +4382,30 @@ class Database extends EventEmitter {
4245
4382
  try {
4246
4383
  const arrayData = JSON.parse(trimmedLine)
4247
4384
  if (Array.isArray(arrayData) && arrayData.length > 0) {
4248
- // For arrays without explicit ID, use the first element as a fallback
4249
- // or try to find the ID field if it exists
4385
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
4386
+ // The schema defines the order of fields in the array
4387
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
4388
+ const schema = this.serializer.schemaManager.getSchema()
4389
+ const idIndex = schema.indexOf('id')
4390
+ if (idIndex !== -1 && arrayData.length > idIndex) {
4391
+ // ID is at the position defined by schema
4392
+ recordId = arrayData[idIndex]
4393
+ } else if (arrayData.length > schema.length) {
4394
+ // ID might be appended after schema fields (for backward compatibility)
4395
+ recordId = arrayData[schema.length]
4396
+ } else {
4397
+ // Fallback: use first element
4398
+ recordId = arrayData[0]
4399
+ }
4400
+ } else {
4401
+ // No schema available, try common positions
4250
4402
  if (arrayData.length > 2) {
4251
- // ID is typically at position 2 in array format [age, city, id, name]
4403
+ // Try position 2 (common in older formats)
4252
4404
  recordId = arrayData[2]
4253
4405
  } else {
4254
- // For arrays without ID field, use first element as fallback
4406
+ // Fallback: use first element
4255
4407
  recordId = arrayData[0]
4408
+ }
4256
4409
  }
4257
4410
  if (recordId !== undefined && recordId !== null) {
4258
4411
  recordId = String(recordId)
@@ -4301,11 +4454,20 @@ class Database extends EventEmitter {
4301
4454
 
4302
4455
  // CRITICAL FIX: Normalize ID to string for consistent comparison
4303
4456
  const normalizedId = String(recordWithIds.id)
4457
+ if (this.opts.debugMode) {
4458
+ console.log(`💾 Save: Checking record ID=${normalizedId} (original: ${recordWithIds.id}, type: ${typeof recordWithIds.id}) in updatedRecordsMap`)
4459
+ console.log(`💾 Save: updatedRecordsMap.has(${normalizedId}): ${updatedRecordsMap.has(normalizedId)}`)
4460
+ if (!updatedRecordsMap.has(normalizedId)) {
4461
+ console.log(`💾 Save: Record ${normalizedId} NOT found in updatedRecordsMap. Available keys:`, Array.from(updatedRecordsMap.keys()))
4462
+ }
4463
+ }
4304
4464
  if (updatedRecordsMap.has(normalizedId)) {
4305
4465
  // Replace with updated version
4306
4466
  const updatedRecord = updatedRecordsMap.get(normalizedId)
4307
4467
  if (this.opts.debugMode) {
4308
- console.log(`💾 Save: Updated record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`)
4468
+ console.log(`💾 Save: REPLACING record ${recordWithIds.id} with updated version`)
4469
+ console.log(`💾 Save: Old record:`, { id: recordWithIds.id, price: recordWithIds.price, app_id: recordWithIds.app_id, currency: recordWithIds.currency })
4470
+ console.log(`💾 Save: New record:`, { id: updatedRecord.id, price: updatedRecord.price, app_id: updatedRecord.app_id, currency: updatedRecord.currency })
4309
4471
  }
4310
4472
  return {
4311
4473
  type: 'updated',
@@ -4316,7 +4478,7 @@ class Database extends EventEmitter {
4316
4478
  } else if (!deletedIdsSnapshot.has(String(recordWithIds.id))) {
4317
4479
  // Keep existing record if not deleted
4318
4480
  if (this.opts.debugMode) {
4319
- console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`)
4481
+ console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - not in deletedIdsSnapshot`)
4320
4482
  }
4321
4483
  return {
4322
4484
  type: 'kept',
@@ -4327,7 +4489,9 @@ class Database extends EventEmitter {
4327
4489
  } else {
4328
4490
  // Skip deleted record
4329
4491
  if (this.opts.debugMode) {
4330
- console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted`)
4492
+ console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted (found in deletedIdsSnapshot)`)
4493
+ console.log(`💾 Save: deletedIdsSnapshot contains:`, Array.from(deletedIdsSnapshot))
4494
+ console.log(`💾 Save: Record ID check: String(${recordWithIds.id}) = "${String(recordWithIds.id)}", has() = ${deletedIdsSnapshot.has(String(recordWithIds.id))}`)
4331
4495
  }
4332
4496
  return {
4333
4497
  type: 'deleted',
@@ -4373,6 +4537,54 @@ class Database extends EventEmitter {
4373
4537
 
4374
4538
  switch (result.type) {
4375
4539
  case 'unchanged':
4540
+ // CRITICAL FIX: Verify that unchanged records are not deleted
4541
+ // Extract ID from the line to check against deletedIdsSnapshot
4542
+ let unchangedRecordId = null
4543
+ try {
4544
+ if (result.line.startsWith('[') && result.line.endsWith(']')) {
4545
+ const arrayData = JSON.parse(result.line)
4546
+ if (Array.isArray(arrayData) && arrayData.length > 0) {
4547
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
4548
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
4549
+ const schema = this.serializer.schemaManager.getSchema()
4550
+ const idIndex = schema.indexOf('id')
4551
+ if (idIndex !== -1 && arrayData.length > idIndex) {
4552
+ unchangedRecordId = String(arrayData[idIndex])
4553
+ } else if (arrayData.length > schema.length) {
4554
+ unchangedRecordId = String(arrayData[schema.length])
4555
+ } else {
4556
+ unchangedRecordId = String(arrayData[0])
4557
+ }
4558
+ } else {
4559
+ // No schema, try common positions
4560
+ if (arrayData.length > 2) {
4561
+ unchangedRecordId = String(arrayData[2])
4562
+ } else {
4563
+ unchangedRecordId = String(arrayData[0])
4564
+ }
4565
+ }
4566
+ }
4567
+ } else {
4568
+ const obj = JSON.parse(result.line)
4569
+ unchangedRecordId = obj.id ? String(obj.id) : null
4570
+ }
4571
+ } catch (e) {
4572
+ // If we can't parse, skip this record to be safe
4573
+ if (this.opts.debugMode) {
4574
+ console.log(`💾 Save: Could not parse unchanged record to check deletion: ${e.message}`)
4575
+ }
4576
+ continue
4577
+ }
4578
+
4579
+ // Skip if this record is deleted
4580
+ if (unchangedRecordId && deletedIdsSnapshot.has(unchangedRecordId)) {
4581
+ if (this.opts.debugMode) {
4582
+ console.log(`💾 Save: Skipping unchanged record ${unchangedRecordId} - deleted`)
4583
+ }
4584
+ deletedOffsets.add(offset)
4585
+ break
4586
+ }
4587
+
4376
4588
  // Collect unchanged lines for batch processing
4377
4589
  unchangedLines.push(result.line)
4378
4590
  keptRecords.push({ offset, type: 'unchanged', line: result.line })