jexidb 2.1.3 → 2.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/Database.cjs CHANGED
@@ -1241,11 +1241,11 @@ class IndexManager {
1241
1241
  // This will be handled by the QueryManager's streaming strategy
1242
1242
  continue;
1243
1243
  }
1244
- if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue)) {
1244
+ if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue) && criteriaValue !== null) {
1245
1245
  const fieldIndex = data[field];
1246
1246
 
1247
1247
  // Handle $in operator for array queries
1248
- if (criteriaValue.$in !== undefined) {
1248
+ if (criteriaValue.$in !== undefined && criteriaValue.$in !== null) {
1249
1249
  const inValues = Array.isArray(criteriaValue.$in) ? criteriaValue.$in : [criteriaValue.$in];
1250
1250
 
1251
1251
  // PERFORMANCE: Cache term mapping field check once
@@ -2137,6 +2137,14 @@ class IndexManager {
2137
2137
  // Keep the current index with initialized fields
2138
2138
  return;
2139
2139
  }
2140
+
2141
+ // Restore totalLines from saved data
2142
+ if (index.totalLines !== undefined) {
2143
+ this.totalLines = index.totalLines;
2144
+ if (this.opts.debugMode) {
2145
+ console.log(`🔍 IndexManager.load: Restored totalLines=${this.totalLines}`);
2146
+ }
2147
+ }
2140
2148
  this.index = processedIndex;
2141
2149
  }
2142
2150
 
@@ -2177,7 +2185,8 @@ class IndexManager {
2177
2185
  */
2178
2186
  toJSON() {
2179
2187
  const serializable = {
2180
- data: {}
2188
+ data: {},
2189
+ totalLines: this.totalLines
2181
2190
  };
2182
2191
 
2183
2192
  // Check if this is a term mapping field for conversion
@@ -2408,35 +2417,10 @@ class SchemaManager {
2408
2417
  const obj = {};
2409
2418
  const idIndex = this.schema.indexOf('id');
2410
2419
 
2411
- // CRITICAL FIX: Handle schema migration where 'id' was first field in old schema
2412
- // but is not in current schema. Check if first element looks like an ID.
2413
- // Only do this if:
2414
- // 1. 'id' is not in current schema
2415
- // 2. Array has significantly more elements than current schema (2+ extra elements)
2416
- // This suggests the old schema had more fields, and 'id' was likely the first
2417
- // 3. First element is a very short string (max 20 chars) that looks like a generated ID
2418
- // (typically alphanumeric, often starting with letters like 'mit...' or similar patterns)
2419
- // 4. First field in current schema is not 'id' (to avoid false positives)
2420
- // 5. First element is not an array (to avoid false positives with array fields)
2420
+ // DISABLED: Schema migration detection was causing field mapping corruption
2421
+ // The logic was incorrectly assuming ID was in first position when it's appended at the end
2422
+ // This caused fields to be shifted incorrectly during object-to-array-to-object conversion
2421
2423
  let arrayOffset = 0;
2422
- if (idIndex === -1 && arr.length >= this.schema.length + 2 && this.schema.length > 0) {
2423
- // Only apply if array has at least 2 extra elements (suggests old schema had more fields)
2424
- const firstElement = arr[0];
2425
- const firstFieldName = this.schema[0];
2426
-
2427
- // Only apply shift if:
2428
- // - First field is not 'id'
2429
- // - First element is a very short string (max 20 chars) that looks like a generated ID
2430
- // - First element is not an array (to avoid false positives)
2431
- // - Array has at least 2 extra elements (strong indicator of schema migration)
2432
- if (firstFieldName !== 'id' && typeof firstElement === 'string' && !Array.isArray(firstElement) && firstElement.length > 0 && firstElement.length <= 20 &&
2433
- // Very conservative: max 20 chars (typical ID length)
2434
- /^[a-zA-Z0-9_-]+$/.test(firstElement)) {
2435
- // First element is likely the ID from old schema
2436
- obj.id = firstElement;
2437
- arrayOffset = 1;
2438
- }
2439
- }
2440
2424
 
2441
2425
  // Map array values to object properties
2442
2426
  // Only include fields that are in the schema
@@ -6042,7 +6026,7 @@ class QueryManager {
6042
6026
  }
6043
6027
  return false;
6044
6028
  }
6045
- if (typeof condition === 'object' && !Array.isArray(condition)) {
6029
+ if (typeof condition === 'object' && !Array.isArray(condition) && condition !== null) {
6046
6030
  const operators = Object.keys(condition).map(op => normalizeOperator(op));
6047
6031
  if (this.opts.debugMode) {
6048
6032
  console.log(`🔍 Field '${field}' has operators:`, operators);
@@ -6339,7 +6323,7 @@ class QueryManager {
6339
6323
  if (field.startsWith('$')) continue;
6340
6324
  if (termMappingFields.includes(field)) {
6341
6325
  // For term mapping fields, simple equality or $in queries work well
6342
- if (typeof condition === 'string' || typeof condition === 'object' && condition.$in && Array.isArray(condition.$in)) {
6326
+ if (typeof condition === 'string' || typeof condition === 'object' && condition !== null && condition.$in && Array.isArray(condition.$in)) {
6343
6327
  return true;
6344
6328
  }
6345
6329
  }
@@ -8237,6 +8221,14 @@ class Database extends events.EventEmitter {
8237
8221
 
8238
8222
  // Manual save is now the default behavior
8239
8223
 
8224
+ // CRITICAL FIX: Ensure IndexManager totalLines is consistent with offsets
8225
+ // This prevents data integrity issues when database is initialized without existing data
8226
+ if (this.indexManager && this.offsets) {
8227
+ this.indexManager.setTotalLines(this.offsets.length);
8228
+ if (this.opts.debugMode) {
8229
+ console.log(`🔧 Initialized index totalLines to ${this.offsets.length}`);
8230
+ }
8231
+ }
8240
8232
  this.initialized = true;
8241
8233
  this.emit('initialized');
8242
8234
  if (this.opts.debugMode) {
@@ -9042,6 +9034,7 @@ class Database extends events.EventEmitter {
9042
9034
  });
9043
9035
  if (this.opts.debugMode) {
9044
9036
  console.log(`💾 Save: allData.length=${allData.length}, cleanedData.length=${cleanedData.length}`);
9037
+ console.log(`💾 Save: Current offsets.length before recalculation: ${this.offsets.length}`);
9045
9038
  console.log(`💾 Save: All records in allData before serialization:`, allData.map(r => r && r.id ? {
9046
9039
  id: String(r.id),
9047
9040
  price: r.price,
@@ -9065,6 +9058,9 @@ class Database extends events.EventEmitter {
9065
9058
  console.log(`💾 Save: First line (first 200 chars):`, lines[0].substring(0, 200));
9066
9059
  }
9067
9060
  }
9061
+
9062
+ // CRITICAL FIX: Always recalculate offsets from serialized data to ensure consistency
9063
+ // Even if _streamExistingRecords updated offsets, we need to recalculate based on actual serialized data
9068
9064
  this.offsets = [];
9069
9065
  let currentOffset = 0;
9070
9066
  for (let i = 0; i < lines.length; i++) {
@@ -9074,6 +9070,9 @@ class Database extends events.EventEmitter {
9074
9070
  const lineWithNewline = lines[i] + '\n';
9075
9071
  currentOffset += Buffer.byteLength(lineWithNewline, 'utf8');
9076
9072
  }
9073
+ if (this.opts.debugMode) {
9074
+ console.log(`💾 Save: Recalculated offsets.length=${this.offsets.length}, should match lines.length=${lines.length}`);
9075
+ }
9077
9076
 
9078
9077
  // CRITICAL FIX: Ensure indexOffset matches actual file size
9079
9078
  this.indexOffset = currentOffset;
@@ -9093,11 +9092,15 @@ class Database extends events.EventEmitter {
9093
9092
  this.shouldSave = false;
9094
9093
  this.lastSaveTime = Date.now();
9095
9094
 
9096
- // Clear writeBuffer and deletedIds after successful save only if we had data to save
9097
- if (allData.length > 0) {
9095
+ // CRITICAL FIX: Always clear deletedIds and rebuild index if there were deletions,
9096
+ // even if allData.length === 0 (all records were deleted)
9097
+ const hadDeletedRecords = deletedIdsSnapshot.size > 0;
9098
+ const hadUpdatedRecords = writeBufferSnapshot.length > 0;
9099
+
9100
+ // Clear writeBuffer and deletedIds after successful save
9101
+ // Also rebuild index if records were deleted or updated, even if allData is empty
9102
+ if (allData.length > 0 || hadDeletedRecords || hadUpdatedRecords) {
9098
9103
  // Rebuild index when records were deleted or updated to maintain consistency
9099
- const hadDeletedRecords = deletedIdsSnapshot.size > 0;
9100
- const hadUpdatedRecords = writeBufferSnapshot.length > 0;
9101
9104
  if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
9102
9105
  if (hadDeletedRecords || hadUpdatedRecords) {
9103
9106
  // Clear the index and rebuild it from the saved records
@@ -9154,8 +9157,19 @@ class Database extends events.EventEmitter {
9154
9157
  }
9155
9158
  await this.indexManager.add(record, i);
9156
9159
  }
9160
+
9161
+ // VALIDATION: Ensure index consistency after rebuild
9162
+ // Check that all indexed records have valid line numbers
9163
+ const indexedRecordCount = this.indexManager.getIndexedRecordCount?.() || allData.length;
9164
+ if (indexedRecordCount !== this.offsets.length) {
9165
+ console.warn(`⚠️ Index inconsistency detected: indexed ${indexedRecordCount} records but offsets has ${this.offsets.length} entries`);
9166
+ // Force consistency by setting totalLines to match offsets
9167
+ this.indexManager.setTotalLines(this.offsets.length);
9168
+ } else {
9169
+ this.indexManager.setTotalLines(this.offsets.length);
9170
+ }
9157
9171
  if (this.opts.debugMode) {
9158
- console.log(`💾 Save: Index rebuilt with ${allData.length} records`);
9172
+ console.log(`💾 Save: Index rebuilt with ${allData.length} records, totalLines set to ${this.offsets.length}`);
9159
9173
  }
9160
9174
  }
9161
9175
  }
@@ -9176,6 +9190,22 @@ class Database extends events.EventEmitter {
9176
9190
  for (const deletedId of deletedIdsSnapshot) {
9177
9191
  this.deletedIds.delete(deletedId);
9178
9192
  }
9193
+ } else if (hadDeletedRecords) {
9194
+ // CRITICAL FIX: Even if allData is empty, clear deletedIds and rebuild index
9195
+ // when records were deleted to ensure consistency
9196
+ if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
9197
+ // Clear the index since all records were deleted
9198
+ this.indexManager.clear();
9199
+ this.indexManager.setTotalLines(0);
9200
+ if (this.opts.debugMode) {
9201
+ console.log(`🧹 Cleared index after removing all ${deletedIdsSnapshot.size} deleted records`);
9202
+ }
9203
+ }
9204
+
9205
+ // Clear deletedIds even when allData is empty
9206
+ for (const deletedId of deletedIdsSnapshot) {
9207
+ this.deletedIds.delete(deletedId);
9208
+ }
9179
9209
 
9180
9210
  // CRITICAL FIX: Ensure writeBuffer is completely cleared after successful save
9181
9211
  if (this.writeBuffer.length > 0) {
@@ -9675,6 +9705,42 @@ class Database extends events.EventEmitter {
9675
9705
  console.log(`🔍 FIND START: criteria=${JSON.stringify(criteria)}, writeBuffer=${this.writeBuffer.length}`);
9676
9706
  }
9677
9707
  try {
9708
+ // INTEGRITY CHECK: Validate data consistency before querying
9709
+ // Check if index and offsets are synchronized
9710
+ if (this.indexManager && this.offsets && this.offsets.length > 0) {
9711
+ const indexTotalLines = this.indexManager.totalLines || 0;
9712
+ const offsetsLength = this.offsets.length;
9713
+ if (indexTotalLines !== offsetsLength) {
9714
+ console.warn(`⚠️ Data integrity issue detected: index.totalLines=${indexTotalLines}, offsets.length=${offsetsLength}`);
9715
+ // Auto-correct by updating index totalLines to match offsets
9716
+ this.indexManager.setTotalLines(offsetsLength);
9717
+ if (this.opts.debugMode) {
9718
+ console.log(`🔧 Auto-corrected index totalLines to ${offsetsLength}`);
9719
+ }
9720
+
9721
+ // CRITICAL FIX: Also save the corrected index to prevent persistence of inconsistency
9722
+ // This ensures the .idx.jdb file contains the correct totalLines value
9723
+ try {
9724
+ await this._saveIndexDataToFile();
9725
+ if (this.opts.debugMode) {
9726
+ console.log(`💾 Saved corrected index data to prevent future inconsistencies`);
9727
+ }
9728
+ } catch (error) {
9729
+ if (this.opts.debugMode) {
9730
+ console.warn(`⚠️ Failed to save corrected index: ${error.message}`);
9731
+ }
9732
+ }
9733
+
9734
+ // Verify the fix worked
9735
+ const newIndexTotalLines = this.indexManager.totalLines || 0;
9736
+ if (newIndexTotalLines === offsetsLength) {
9737
+ console.log(`✅ Data integrity successfully corrected: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`);
9738
+ } else {
9739
+ console.error(`❌ Data integrity correction failed: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`);
9740
+ }
9741
+ }
9742
+ }
9743
+
9678
9744
  // Validate indexed query mode if enabled
9679
9745
  if (this.opts.indexedQueryMode === 'strict') {
9680
9746
  this._validateIndexedQuery(criteria, options);
@@ -9691,31 +9757,23 @@ class Database extends events.EventEmitter {
9691
9757
  const writeBufferResultsWithTerms = options.restoreTerms !== false ? writeBufferResults.map(record => this.restoreTermIdsAfterDeserialization(record)) : writeBufferResults;
9692
9758
 
9693
9759
  // Combine results, removing duplicates (writeBuffer takes precedence)
9694
- // OPTIMIZATION: Use parallel processing for better performance when writeBuffer has many records
9760
+ // OPTIMIZATION: Unified efficient approach with consistent precedence rules
9695
9761
  let allResults;
9696
- if (writeBufferResults.length > 50) {
9697
- // Parallel approach for large writeBuffer
9698
- const [fileResultsSet, writeBufferSet] = await Promise.all([Promise.resolve(new Set(fileResultsWithTerms.map(r => r.id))), Promise.resolve(new Set(writeBufferResultsWithTerms.map(r => r.id)))]);
9699
9762
 
9700
- // Merge efficiently: keep file results not in writeBuffer, then add all writeBuffer results
9701
- const filteredFileResults = await Promise.resolve(fileResultsWithTerms.filter(r => !writeBufferSet.has(r.id)));
9702
- allResults = [...filteredFileResults, ...writeBufferResultsWithTerms];
9703
- } else {
9704
- // Sequential approach for small writeBuffer (original logic)
9705
- allResults = [...fileResultsWithTerms];
9706
-
9707
- // Replace file records with writeBuffer records and add new writeBuffer records
9708
- for (const record of writeBufferResultsWithTerms) {
9709
- const existingIndex = allResults.findIndex(r => r.id === record.id);
9710
- if (existingIndex !== -1) {
9711
- // Replace existing record with writeBuffer version
9712
- allResults[existingIndex] = record;
9713
- } else {
9714
- // Add new record from writeBuffer
9715
- allResults.push(record);
9716
- }
9763
+ // Create efficient lookup map for writeBuffer records
9764
+ const writeBufferMap = new Map();
9765
+ writeBufferResultsWithTerms.forEach(record => {
9766
+ if (record && record.id) {
9767
+ writeBufferMap.set(record.id, record);
9717
9768
  }
9718
- }
9769
+ });
9770
+
9771
+ // Filter file results to exclude any records that exist in writeBuffer
9772
+ // This ensures writeBuffer always takes precedence
9773
+ const filteredFileResults = fileResultsWithTerms.filter(record => record && record.id && !writeBufferMap.has(record.id));
9774
+
9775
+ // Combine results: file results (filtered) + all writeBuffer results
9776
+ allResults = [...filteredFileResults, ...writeBufferResultsWithTerms];
9719
9777
 
9720
9778
  // Remove records that are marked as deleted
9721
9779
  const finalResults = allResults.filter(record => !this.deletedIds.has(record.id));
@@ -9963,19 +10021,6 @@ class Database extends events.EventEmitter {
9963
10021
 
9964
10022
  // CRITICAL FIX: Validate state before update operation
9965
10023
  this.validateState();
9966
-
9967
- // CRITICAL FIX: If there's data to save, call save() to persist it
9968
- // Only save if there are actual records in writeBuffer
9969
- if (this.shouldSave && this.writeBuffer.length > 0) {
9970
- if (this.opts.debugMode) {
9971
- console.log(`🔄 UPDATE: Calling save() before update - writeBuffer.length=${this.writeBuffer.length}`);
9972
- }
9973
- const saveStart = Date.now();
9974
- await this.save(false); // Use save(false) since we're already in queue
9975
- if (this.opts.debugMode) {
9976
- console.log(`🔄 UPDATE: Save completed in ${Date.now() - saveStart}ms`);
9977
- }
9978
- }
9979
10024
  if (this.opts.debugMode) {
9980
10025
  console.log(`🔄 UPDATE: Starting find() - writeBuffer=${this.writeBuffer.length}`);
9981
10026
  }
@@ -9988,6 +10033,13 @@ class Database extends events.EventEmitter {
9988
10033
  console.log(`🔄 UPDATE: Find completed in ${Date.now() - findStart}ms, found ${records.length} records`);
9989
10034
  }
9990
10035
  const updatedRecords = [];
10036
+ if (this.opts.debugMode) {
10037
+ console.log(`🔄 UPDATE: About to process ${records.length} records`);
10038
+ console.log(`🔄 UPDATE: Records:`, records.map(r => ({
10039
+ id: r.id,
10040
+ value: r.value
10041
+ })));
10042
+ }
9991
10043
  for (const record of records) {
9992
10044
  const recordStart = Date.now();
9993
10045
  if (this.opts.debugMode) {
@@ -10026,12 +10078,18 @@ class Database extends events.EventEmitter {
10026
10078
  // For records in the file, we need to ensure they are properly marked for replacement
10027
10079
  const index = this.writeBuffer.findIndex(r => r.id === record.id);
10028
10080
  let lineNumber = null;
10081
+ if (this.opts.debugMode) {
10082
+ console.log(`🔄 UPDATE: writeBuffer.findIndex for ${record.id} returned ${index}`);
10083
+ console.log(`🔄 UPDATE: writeBuffer length: ${this.writeBuffer.length}`);
10084
+ console.log(`🔄 UPDATE: writeBuffer IDs:`, this.writeBuffer.map(r => r.id));
10085
+ }
10029
10086
  if (index !== -1) {
10030
10087
  // Record is already in writeBuffer, update it
10031
10088
  this.writeBuffer[index] = updated;
10032
10089
  lineNumber = this._getAbsoluteLineNumber(index);
10033
10090
  if (this.opts.debugMode) {
10034
10091
  console.log(`🔄 UPDATE: Updated existing writeBuffer record at index ${index}`);
10092
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`);
10035
10093
  }
10036
10094
  } else {
10037
10095
  // Record is in file, add updated version to writeBuffer
@@ -10041,6 +10099,7 @@ class Database extends events.EventEmitter {
10041
10099
  lineNumber = this._getAbsoluteLineNumber(this.writeBuffer.length - 1);
10042
10100
  if (this.opts.debugMode) {
10043
10101
  console.log(`🔄 UPDATE: Added updated record to writeBuffer (will replace file record ${record.id})`);
10102
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`);
10044
10103
  }
10045
10104
  }
10046
10105
  const indexUpdateStart = Date.now();
@@ -10076,6 +10135,26 @@ class Database extends events.EventEmitter {
10076
10135
  try {
10077
10136
  // CRITICAL FIX: Validate state before delete operation
10078
10137
  this.validateState();
10138
+
10139
+ // 🔧 NEW: Validate indexed query mode for delete operations
10140
+ if (this.opts.indexedQueryMode === 'strict') {
10141
+ this._validateIndexedQuery(criteria, {
10142
+ operation: 'delete'
10143
+ });
10144
+ }
10145
+
10146
+ // ⚠️ NEW: Warn about non-indexed fields in permissive mode
10147
+ if (this.opts.indexedQueryMode !== 'strict') {
10148
+ const indexedFields = Object.keys(this.opts.indexes || {});
10149
+ const queryFields = this._extractQueryFields(criteria);
10150
+ const nonIndexedFields = queryFields.filter(field => !indexedFields.includes(field));
10151
+ if (nonIndexedFields.length > 0) {
10152
+ if (this.opts.debugMode) {
10153
+ console.warn(`⚠️ Delete operation using non-indexed fields: ${nonIndexedFields.join(', ')}`);
10154
+ console.warn(` This may be slow or fail silently. Consider indexing these fields.`);
10155
+ }
10156
+ }
10157
+ }
10079
10158
  const records = await this.find(criteria);
10080
10159
  const deletedIds = [];
10081
10160
  if (this.opts.debugMode) {
@@ -11628,14 +11707,30 @@ class Database extends events.EventEmitter {
11628
11707
  try {
11629
11708
  const arrayData = JSON.parse(trimmedLine);
11630
11709
  if (Array.isArray(arrayData) && arrayData.length > 0) {
11631
- // For arrays without explicit ID, use the first element as a fallback
11632
- // or try to find the ID field if it exists
11633
- if (arrayData.length > 2) {
11634
- // ID is typically at position 2 in array format [age, city, id, name]
11635
- recordId = arrayData[2];
11710
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
11711
+ // The schema defines the order of fields in the array
11712
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
11713
+ const schema = this.serializer.schemaManager.getSchema();
11714
+ const idIndex = schema.indexOf('id');
11715
+ if (idIndex !== -1 && arrayData.length > idIndex) {
11716
+ // ID is at the position defined by schema
11717
+ recordId = arrayData[idIndex];
11718
+ } else if (arrayData.length > schema.length) {
11719
+ // ID might be appended after schema fields (for backward compatibility)
11720
+ recordId = arrayData[schema.length];
11721
+ } else {
11722
+ // Fallback: use first element
11723
+ recordId = arrayData[0];
11724
+ }
11636
11725
  } else {
11637
- // For arrays without ID field, use first element as fallback
11638
- recordId = arrayData[0];
11726
+ // No schema available, try common positions
11727
+ if (arrayData.length > 2) {
11728
+ // Try position 2 (common in older formats)
11729
+ recordId = arrayData[2];
11730
+ } else {
11731
+ // Fallback: use first element
11732
+ recordId = arrayData[0];
11733
+ }
11639
11734
  }
11640
11735
  if (recordId !== undefined && recordId !== null) {
11641
11736
  recordId = String(recordId);
@@ -11717,7 +11812,7 @@ class Database extends events.EventEmitter {
11717
11812
  } else if (!deletedIdsSnapshot.has(String(recordWithIds.id))) {
11718
11813
  // Keep existing record if not deleted
11719
11814
  if (this.opts.debugMode) {
11720
- console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`);
11815
+ console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - not in deletedIdsSnapshot`);
11721
11816
  }
11722
11817
  return {
11723
11818
  type: 'kept',
@@ -11728,7 +11823,9 @@ class Database extends events.EventEmitter {
11728
11823
  } else {
11729
11824
  // Skip deleted record
11730
11825
  if (this.opts.debugMode) {
11731
- console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted`);
11826
+ console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted (found in deletedIdsSnapshot)`);
11827
+ console.log(`💾 Save: deletedIdsSnapshot contains:`, Array.from(deletedIdsSnapshot));
11828
+ console.log(`💾 Save: Record ID check: String(${recordWithIds.id}) = "${String(recordWithIds.id)}", has() = ${deletedIdsSnapshot.has(String(recordWithIds.id))}`);
11732
11829
  }
11733
11830
  return {
11734
11831
  type: 'deleted',
@@ -11771,6 +11868,54 @@ class Database extends events.EventEmitter {
11771
11868
  const offset = parseInt(rangeKey);
11772
11869
  switch (result.type) {
11773
11870
  case 'unchanged':
11871
+ // CRITICAL FIX: Verify that unchanged records are not deleted
11872
+ // Extract ID from the line to check against deletedIdsSnapshot
11873
+ let unchangedRecordId = null;
11874
+ try {
11875
+ if (result.line.startsWith('[') && result.line.endsWith(']')) {
11876
+ const arrayData = JSON.parse(result.line);
11877
+ if (Array.isArray(arrayData) && arrayData.length > 0) {
11878
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
11879
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
11880
+ const schema = this.serializer.schemaManager.getSchema();
11881
+ const idIndex = schema.indexOf('id');
11882
+ if (idIndex !== -1 && arrayData.length > idIndex) {
11883
+ unchangedRecordId = String(arrayData[idIndex]);
11884
+ } else if (arrayData.length > schema.length) {
11885
+ unchangedRecordId = String(arrayData[schema.length]);
11886
+ } else {
11887
+ unchangedRecordId = String(arrayData[0]);
11888
+ }
11889
+ } else {
11890
+ // No schema, try common positions
11891
+ if (arrayData.length > 2) {
11892
+ unchangedRecordId = String(arrayData[2]);
11893
+ } else {
11894
+ unchangedRecordId = String(arrayData[0]);
11895
+ }
11896
+ }
11897
+ }
11898
+ } else {
11899
+ const obj = JSON.parse(result.line);
11900
+ unchangedRecordId = obj.id ? String(obj.id) : null;
11901
+ }
11902
+ } catch (e) {
11903
+ // If we can't parse, skip this record to be safe
11904
+ if (this.opts.debugMode) {
11905
+ console.log(`💾 Save: Could not parse unchanged record to check deletion: ${e.message}`);
11906
+ }
11907
+ continue;
11908
+ }
11909
+
11910
+ // Skip if this record is deleted
11911
+ if (unchangedRecordId && deletedIdsSnapshot.has(unchangedRecordId)) {
11912
+ if (this.opts.debugMode) {
11913
+ console.log(`💾 Save: Skipping unchanged record ${unchangedRecordId} - deleted`);
11914
+ }
11915
+ deletedOffsets.add(offset);
11916
+ break;
11917
+ }
11918
+
11774
11919
  // Collect unchanged lines for batch processing
11775
11920
  unchangedLines.push(result.line);
11776
11921
  keptRecords.push({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jexidb",
3
- "version": "2.1.3",
3
+ "version": "2.1.4",
4
4
  "type": "module",
5
5
  "description": "JexiDB is a pure JS NPM library for managing data on disk efficiently, without the need for a server.",
6
6
  "main": "./dist/Database.cjs",
@@ -36,15 +36,9 @@
36
36
  "rollup": "^4.48.1"
37
37
  },
38
38
  "dependencies": {
39
- "@valentech/sializer": "^0.3.9",
40
39
  "async-mutex": "^0.5.0",
41
40
  "p-limit": "^6.1.0"
42
41
  },
43
- "optionalDependencies": {
44
- "fast-deep-equal": "^3.1.3",
45
- "fast-json-stringify": "^5.7.0",
46
- "msgpack": "^1.0.3"
47
- },
48
42
  "directories": {
49
43
  "test": "test"
50
44
  },
package/src/Database.mjs CHANGED
@@ -770,10 +770,19 @@ class Database extends EventEmitter {
770
770
  }
771
771
 
772
772
  // Manual save is now the default behavior
773
-
773
+
774
+ // CRITICAL FIX: Ensure IndexManager totalLines is consistent with offsets
775
+ // This prevents data integrity issues when database is initialized without existing data
776
+ if (this.indexManager && this.offsets) {
777
+ this.indexManager.setTotalLines(this.offsets.length)
778
+ if (this.opts.debugMode) {
779
+ console.log(`🔧 Initialized index totalLines to ${this.offsets.length}`)
780
+ }
781
+ }
782
+
774
783
  this.initialized = true
775
784
  this.emit('initialized')
776
-
785
+
777
786
  if (this.opts.debugMode) {
778
787
  console.log(`✅ Database initialized with ${this.writeBuffer.length} records`)
779
788
  }
@@ -1606,6 +1615,7 @@ class Database extends EventEmitter {
1606
1615
 
1607
1616
  if (this.opts.debugMode) {
1608
1617
  console.log(`💾 Save: allData.length=${allData.length}, cleanedData.length=${cleanedData.length}`)
1618
+ console.log(`💾 Save: Current offsets.length before recalculation: ${this.offsets.length}`)
1609
1619
  console.log(`💾 Save: All records in allData before serialization:`, allData.map(r => r && r.id ? { id: String(r.id), price: r.price, app_id: r.app_id, currency: r.currency } : 'no-id'))
1610
1620
  console.log(`💾 Save: Sample cleaned record:`, cleanedData[0] ? Object.keys(cleanedData[0]) : 'null')
1611
1621
  }
@@ -1624,6 +1634,8 @@ class Database extends EventEmitter {
1624
1634
  }
1625
1635
  }
1626
1636
 
1637
+ // CRITICAL FIX: Always recalculate offsets from serialized data to ensure consistency
1638
+ // Even if _streamExistingRecords updated offsets, we need to recalculate based on actual serialized data
1627
1639
  this.offsets = []
1628
1640
  let currentOffset = 0
1629
1641
  for (let i = 0; i < lines.length; i++) {
@@ -1634,6 +1646,10 @@ class Database extends EventEmitter {
1634
1646
  currentOffset += Buffer.byteLength(lineWithNewline, 'utf8')
1635
1647
  }
1636
1648
 
1649
+ if (this.opts.debugMode) {
1650
+ console.log(`💾 Save: Recalculated offsets.length=${this.offsets.length}, should match lines.length=${lines.length}`)
1651
+ }
1652
+
1637
1653
  // CRITICAL FIX: Ensure indexOffset matches actual file size
1638
1654
  this.indexOffset = currentOffset
1639
1655
 
@@ -1655,11 +1671,15 @@ class Database extends EventEmitter {
1655
1671
  this.shouldSave = false
1656
1672
  this.lastSaveTime = Date.now()
1657
1673
 
1658
- // Clear writeBuffer and deletedIds after successful save only if we had data to save
1659
- if (allData.length > 0) {
1660
- // Rebuild index when records were deleted or updated to maintain consistency
1674
+ // CRITICAL FIX: Always clear deletedIds and rebuild index if there were deletions,
1675
+ // even if allData.length === 0 (all records were deleted)
1661
1676
  const hadDeletedRecords = deletedIdsSnapshot.size > 0
1662
1677
  const hadUpdatedRecords = writeBufferSnapshot.length > 0
1678
+
1679
+ // Clear writeBuffer and deletedIds after successful save
1680
+ // Also rebuild index if records were deleted or updated, even if allData is empty
1681
+ if (allData.length > 0 || hadDeletedRecords || hadUpdatedRecords) {
1682
+ // Rebuild index when records were deleted or updated to maintain consistency
1663
1683
  if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
1664
1684
  if (hadDeletedRecords || hadUpdatedRecords) {
1665
1685
  // Clear the index and rebuild it from the saved records
@@ -1713,8 +1733,20 @@ class Database extends EventEmitter {
1713
1733
 
1714
1734
  await this.indexManager.add(record, i)
1715
1735
  }
1736
+
1737
+ // VALIDATION: Ensure index consistency after rebuild
1738
+ // Check that all indexed records have valid line numbers
1739
+ const indexedRecordCount = this.indexManager.getIndexedRecordCount?.() || allData.length
1740
+ if (indexedRecordCount !== this.offsets.length) {
1741
+ console.warn(`⚠️ Index inconsistency detected: indexed ${indexedRecordCount} records but offsets has ${this.offsets.length} entries`)
1742
+ // Force consistency by setting totalLines to match offsets
1743
+ this.indexManager.setTotalLines(this.offsets.length)
1744
+ } else {
1745
+ this.indexManager.setTotalLines(this.offsets.length)
1746
+ }
1747
+
1716
1748
  if (this.opts.debugMode) {
1717
- console.log(`💾 Save: Index rebuilt with ${allData.length} records`)
1749
+ console.log(`💾 Save: Index rebuilt with ${allData.length} records, totalLines set to ${this.offsets.length}`)
1718
1750
  }
1719
1751
  }
1720
1752
  }
@@ -1738,6 +1770,22 @@ class Database extends EventEmitter {
1738
1770
  for (const deletedId of deletedIdsSnapshot) {
1739
1771
  this.deletedIds.delete(deletedId)
1740
1772
  }
1773
+ } else if (hadDeletedRecords) {
1774
+ // CRITICAL FIX: Even if allData is empty, clear deletedIds and rebuild index
1775
+ // when records were deleted to ensure consistency
1776
+ if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
1777
+ // Clear the index since all records were deleted
1778
+ this.indexManager.clear()
1779
+ this.indexManager.setTotalLines(0)
1780
+ if (this.opts.debugMode) {
1781
+ console.log(`🧹 Cleared index after removing all ${deletedIdsSnapshot.size} deleted records`)
1782
+ }
1783
+ }
1784
+
1785
+ // Clear deletedIds even when allData is empty
1786
+ for (const deletedId of deletedIdsSnapshot) {
1787
+ this.deletedIds.delete(deletedId)
1788
+ }
1741
1789
 
1742
1790
  // CRITICAL FIX: Ensure writeBuffer is completely cleared after successful save
1743
1791
  if (this.writeBuffer.length > 0) {
@@ -2257,6 +2305,43 @@ class Database extends EventEmitter {
2257
2305
  }
2258
2306
 
2259
2307
  try {
2308
+ // INTEGRITY CHECK: Validate data consistency before querying
2309
+ // Check if index and offsets are synchronized
2310
+ if (this.indexManager && this.offsets && this.offsets.length > 0) {
2311
+ const indexTotalLines = this.indexManager.totalLines || 0
2312
+ const offsetsLength = this.offsets.length
2313
+
2314
+ if (indexTotalLines !== offsetsLength) {
2315
+ console.warn(`⚠️ Data integrity issue detected: index.totalLines=${indexTotalLines}, offsets.length=${offsetsLength}`)
2316
+ // Auto-correct by updating index totalLines to match offsets
2317
+ this.indexManager.setTotalLines(offsetsLength)
2318
+ if (this.opts.debugMode) {
2319
+ console.log(`🔧 Auto-corrected index totalLines to ${offsetsLength}`)
2320
+ }
2321
+
2322
+ // CRITICAL FIX: Also save the corrected index to prevent persistence of inconsistency
2323
+ // This ensures the .idx.jdb file contains the correct totalLines value
2324
+ try {
2325
+ await this._saveIndexDataToFile()
2326
+ if (this.opts.debugMode) {
2327
+ console.log(`💾 Saved corrected index data to prevent future inconsistencies`)
2328
+ }
2329
+ } catch (error) {
2330
+ if (this.opts.debugMode) {
2331
+ console.warn(`⚠️ Failed to save corrected index: ${error.message}`)
2332
+ }
2333
+ }
2334
+
2335
+ // Verify the fix worked
2336
+ const newIndexTotalLines = this.indexManager.totalLines || 0
2337
+ if (newIndexTotalLines === offsetsLength) {
2338
+ console.log(`✅ Data integrity successfully corrected: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`)
2339
+ } else {
2340
+ console.error(`❌ Data integrity correction failed: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`)
2341
+ }
2342
+ }
2343
+ }
2344
+
2260
2345
  // Validate indexed query mode if enabled
2261
2346
  if (this.opts.indexedQueryMode === 'strict') {
2262
2347
  this._validateIndexedQuery(criteria, options)
@@ -2279,36 +2364,25 @@ class Database extends EventEmitter {
2279
2364
 
2280
2365
 
2281
2366
  // Combine results, removing duplicates (writeBuffer takes precedence)
2282
- // OPTIMIZATION: Use parallel processing for better performance when writeBuffer has many records
2367
+ // OPTIMIZATION: Unified efficient approach with consistent precedence rules
2283
2368
  let allResults
2284
- if (writeBufferResults.length > 50) {
2285
- // Parallel approach for large writeBuffer
2286
- const [fileResultsSet, writeBufferSet] = await Promise.all([
2287
- Promise.resolve(new Set(fileResultsWithTerms.map(r => r.id))),
2288
- Promise.resolve(new Set(writeBufferResultsWithTerms.map(r => r.id)))
2289
- ])
2369
+
2370
+ // Create efficient lookup map for writeBuffer records
2371
+ const writeBufferMap = new Map()
2372
+ writeBufferResultsWithTerms.forEach(record => {
2373
+ if (record && record.id) {
2374
+ writeBufferMap.set(record.id, record)
2375
+ }
2376
+ })
2290
2377
 
2291
- // Merge efficiently: keep file results not in writeBuffer, then add all writeBuffer results
2292
- const filteredFileResults = await Promise.resolve(
2293
- fileResultsWithTerms.filter(r => !writeBufferSet.has(r.id))
2294
- )
2378
+ // Filter file results to exclude any records that exist in writeBuffer
2379
+ // This ensures writeBuffer always takes precedence
2380
+ const filteredFileResults = fileResultsWithTerms.filter(record =>
2381
+ record && record.id && !writeBufferMap.has(record.id)
2382
+ )
2383
+
2384
+ // Combine results: file results (filtered) + all writeBuffer results
2295
2385
  allResults = [...filteredFileResults, ...writeBufferResultsWithTerms]
2296
- } else {
2297
- // Sequential approach for small writeBuffer (original logic)
2298
- allResults = [...fileResultsWithTerms]
2299
-
2300
- // Replace file records with writeBuffer records and add new writeBuffer records
2301
- for (const record of writeBufferResultsWithTerms) {
2302
- const existingIndex = allResults.findIndex(r => r.id === record.id)
2303
- if (existingIndex !== -1) {
2304
- // Replace existing record with writeBuffer version
2305
- allResults[existingIndex] = record
2306
- } else {
2307
- // Add new record from writeBuffer
2308
- allResults.push(record)
2309
- }
2310
- }
2311
- }
2312
2386
 
2313
2387
  // Remove records that are marked as deleted
2314
2388
  const finalResults = allResults.filter(record => !this.deletedIds.has(record.id))
@@ -2566,19 +2640,6 @@ class Database extends EventEmitter {
2566
2640
 
2567
2641
  // CRITICAL FIX: Validate state before update operation
2568
2642
  this.validateState()
2569
-
2570
- // CRITICAL FIX: If there's data to save, call save() to persist it
2571
- // Only save if there are actual records in writeBuffer
2572
- if (this.shouldSave && this.writeBuffer.length > 0) {
2573
- if (this.opts.debugMode) {
2574
- console.log(`🔄 UPDATE: Calling save() before update - writeBuffer.length=${this.writeBuffer.length}`)
2575
- }
2576
- const saveStart = Date.now()
2577
- await this.save(false) // Use save(false) since we're already in queue
2578
- if (this.opts.debugMode) {
2579
- console.log(`🔄 UPDATE: Save completed in ${Date.now() - saveStart}ms`)
2580
- }
2581
- }
2582
2643
 
2583
2644
  if (this.opts.debugMode) {
2584
2645
  console.log(`🔄 UPDATE: Starting find() - writeBuffer=${this.writeBuffer.length}`)
@@ -2591,7 +2652,12 @@ class Database extends EventEmitter {
2591
2652
  }
2592
2653
 
2593
2654
  const updatedRecords = []
2594
-
2655
+
2656
+ if (this.opts.debugMode) {
2657
+ console.log(`🔄 UPDATE: About to process ${records.length} records`)
2658
+ console.log(`🔄 UPDATE: Records:`, records.map(r => ({ id: r.id, value: r.value })))
2659
+ }
2660
+
2595
2661
  for (const record of records) {
2596
2662
  const recordStart = Date.now()
2597
2663
  if (this.opts.debugMode) {
@@ -2628,13 +2694,20 @@ class Database extends EventEmitter {
2628
2694
  // For records in the file, we need to ensure they are properly marked for replacement
2629
2695
  const index = this.writeBuffer.findIndex(r => r.id === record.id)
2630
2696
  let lineNumber = null
2631
-
2697
+
2698
+ if (this.opts.debugMode) {
2699
+ console.log(`🔄 UPDATE: writeBuffer.findIndex for ${record.id} returned ${index}`)
2700
+ console.log(`🔄 UPDATE: writeBuffer length: ${this.writeBuffer.length}`)
2701
+ console.log(`🔄 UPDATE: writeBuffer IDs:`, this.writeBuffer.map(r => r.id))
2702
+ }
2703
+
2632
2704
  if (index !== -1) {
2633
2705
  // Record is already in writeBuffer, update it
2634
2706
  this.writeBuffer[index] = updated
2635
2707
  lineNumber = this._getAbsoluteLineNumber(index)
2636
2708
  if (this.opts.debugMode) {
2637
2709
  console.log(`🔄 UPDATE: Updated existing writeBuffer record at index ${index}`)
2710
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`)
2638
2711
  }
2639
2712
  } else {
2640
2713
  // Record is in file, add updated version to writeBuffer
@@ -2644,6 +2717,7 @@ class Database extends EventEmitter {
2644
2717
  lineNumber = this._getAbsoluteLineNumber(this.writeBuffer.length - 1)
2645
2718
  if (this.opts.debugMode) {
2646
2719
  console.log(`🔄 UPDATE: Added updated record to writeBuffer (will replace file record ${record.id})`)
2720
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`)
2647
2721
  }
2648
2722
  }
2649
2723
 
@@ -2679,13 +2753,32 @@ class Database extends EventEmitter {
2679
2753
  */
2680
2754
  async delete(criteria) {
2681
2755
  this._validateInitialization('delete')
2682
-
2756
+
2683
2757
  return this.operationQueue.enqueue(async () => {
2684
2758
  this.isInsideOperationQueue = true
2685
2759
  try {
2686
2760
  // CRITICAL FIX: Validate state before delete operation
2687
2761
  this.validateState()
2688
-
2762
+
2763
+ // 🔧 NEW: Validate indexed query mode for delete operations
2764
+ if (this.opts.indexedQueryMode === 'strict') {
2765
+ this._validateIndexedQuery(criteria, { operation: 'delete' })
2766
+ }
2767
+
2768
+ // ⚠️ NEW: Warn about non-indexed fields in permissive mode
2769
+ if (this.opts.indexedQueryMode !== 'strict') {
2770
+ const indexedFields = Object.keys(this.opts.indexes || {})
2771
+ const queryFields = this._extractQueryFields(criteria)
2772
+ const nonIndexedFields = queryFields.filter(field => !indexedFields.includes(field))
2773
+
2774
+ if (nonIndexedFields.length > 0) {
2775
+ if (this.opts.debugMode) {
2776
+ console.warn(`⚠️ Delete operation using non-indexed fields: ${nonIndexedFields.join(', ')}`)
2777
+ console.warn(` This may be slow or fail silently. Consider indexing these fields.`)
2778
+ }
2779
+ }
2780
+ }
2781
+
2689
2782
  const records = await this.find(criteria)
2690
2783
  const deletedIds = []
2691
2784
 
@@ -4289,14 +4382,30 @@ class Database extends EventEmitter {
4289
4382
  try {
4290
4383
  const arrayData = JSON.parse(trimmedLine)
4291
4384
  if (Array.isArray(arrayData) && arrayData.length > 0) {
4292
- // For arrays without explicit ID, use the first element as a fallback
4293
- // or try to find the ID field if it exists
4385
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
4386
+ // The schema defines the order of fields in the array
4387
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
4388
+ const schema = this.serializer.schemaManager.getSchema()
4389
+ const idIndex = schema.indexOf('id')
4390
+ if (idIndex !== -1 && arrayData.length > idIndex) {
4391
+ // ID is at the position defined by schema
4392
+ recordId = arrayData[idIndex]
4393
+ } else if (arrayData.length > schema.length) {
4394
+ // ID might be appended after schema fields (for backward compatibility)
4395
+ recordId = arrayData[schema.length]
4396
+ } else {
4397
+ // Fallback: use first element
4398
+ recordId = arrayData[0]
4399
+ }
4400
+ } else {
4401
+ // No schema available, try common positions
4294
4402
  if (arrayData.length > 2) {
4295
- // ID is typically at position 2 in array format [age, city, id, name]
4403
+ // Try position 2 (common in older formats)
4296
4404
  recordId = arrayData[2]
4297
4405
  } else {
4298
- // For arrays without ID field, use first element as fallback
4406
+ // Fallback: use first element
4299
4407
  recordId = arrayData[0]
4408
+ }
4300
4409
  }
4301
4410
  if (recordId !== undefined && recordId !== null) {
4302
4411
  recordId = String(recordId)
@@ -4369,7 +4478,7 @@ class Database extends EventEmitter {
4369
4478
  } else if (!deletedIdsSnapshot.has(String(recordWithIds.id))) {
4370
4479
  // Keep existing record if not deleted
4371
4480
  if (this.opts.debugMode) {
4372
- console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`)
4481
+ console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - not in deletedIdsSnapshot`)
4373
4482
  }
4374
4483
  return {
4375
4484
  type: 'kept',
@@ -4380,7 +4489,9 @@ class Database extends EventEmitter {
4380
4489
  } else {
4381
4490
  // Skip deleted record
4382
4491
  if (this.opts.debugMode) {
4383
- console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted`)
4492
+ console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted (found in deletedIdsSnapshot)`)
4493
+ console.log(`💾 Save: deletedIdsSnapshot contains:`, Array.from(deletedIdsSnapshot))
4494
+ console.log(`💾 Save: Record ID check: String(${recordWithIds.id}) = "${String(recordWithIds.id)}", has() = ${deletedIdsSnapshot.has(String(recordWithIds.id))}`)
4384
4495
  }
4385
4496
  return {
4386
4497
  type: 'deleted',
@@ -4426,6 +4537,54 @@ class Database extends EventEmitter {
4426
4537
 
4427
4538
  switch (result.type) {
4428
4539
  case 'unchanged':
4540
+ // CRITICAL FIX: Verify that unchanged records are not deleted
4541
+ // Extract ID from the line to check against deletedIdsSnapshot
4542
+ let unchangedRecordId = null
4543
+ try {
4544
+ if (result.line.startsWith('[') && result.line.endsWith(']')) {
4545
+ const arrayData = JSON.parse(result.line)
4546
+ if (Array.isArray(arrayData) && arrayData.length > 0) {
4547
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
4548
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
4549
+ const schema = this.serializer.schemaManager.getSchema()
4550
+ const idIndex = schema.indexOf('id')
4551
+ if (idIndex !== -1 && arrayData.length > idIndex) {
4552
+ unchangedRecordId = String(arrayData[idIndex])
4553
+ } else if (arrayData.length > schema.length) {
4554
+ unchangedRecordId = String(arrayData[schema.length])
4555
+ } else {
4556
+ unchangedRecordId = String(arrayData[0])
4557
+ }
4558
+ } else {
4559
+ // No schema, try common positions
4560
+ if (arrayData.length > 2) {
4561
+ unchangedRecordId = String(arrayData[2])
4562
+ } else {
4563
+ unchangedRecordId = String(arrayData[0])
4564
+ }
4565
+ }
4566
+ }
4567
+ } else {
4568
+ const obj = JSON.parse(result.line)
4569
+ unchangedRecordId = obj.id ? String(obj.id) : null
4570
+ }
4571
+ } catch (e) {
4572
+ // If we can't parse, skip this record to be safe
4573
+ if (this.opts.debugMode) {
4574
+ console.log(`💾 Save: Could not parse unchanged record to check deletion: ${e.message}`)
4575
+ }
4576
+ continue
4577
+ }
4578
+
4579
+ // Skip if this record is deleted
4580
+ if (unchangedRecordId && deletedIdsSnapshot.has(unchangedRecordId)) {
4581
+ if (this.opts.debugMode) {
4582
+ console.log(`💾 Save: Skipping unchanged record ${unchangedRecordId} - deleted`)
4583
+ }
4584
+ deletedOffsets.add(offset)
4585
+ break
4586
+ }
4587
+
4429
4588
  // Collect unchanged lines for batch processing
4430
4589
  unchangedLines.push(result.line)
4431
4590
  keptRecords.push({ offset, type: 'unchanged', line: result.line })
@@ -163,38 +163,10 @@ export default class SchemaManager {
163
163
  const obj = {}
164
164
  const idIndex = this.schema.indexOf('id')
165
165
 
166
- // CRITICAL FIX: Handle schema migration where 'id' was first field in old schema
167
- // but is not in current schema. Check if first element looks like an ID.
168
- // Only do this if:
169
- // 1. 'id' is not in current schema
170
- // 2. Array has significantly more elements than current schema (2+ extra elements)
171
- // This suggests the old schema had more fields, and 'id' was likely the first
172
- // 3. First element is a very short string (max 20 chars) that looks like a generated ID
173
- // (typically alphanumeric, often starting with letters like 'mit...' or similar patterns)
174
- // 4. First field in current schema is not 'id' (to avoid false positives)
175
- // 5. First element is not an array (to avoid false positives with array fields)
166
+ // DISABLED: Schema migration detection was causing field mapping corruption
167
+ // The logic was incorrectly assuming ID was in first position when it's appended at the end
168
+ // This caused fields to be shifted incorrectly during object-to-array-to-object conversion
176
169
  let arrayOffset = 0
177
- if (idIndex === -1 && arr.length >= this.schema.length + 2 && this.schema.length > 0) {
178
- // Only apply if array has at least 2 extra elements (suggests old schema had more fields)
179
- const firstElement = arr[0]
180
- const firstFieldName = this.schema[0]
181
-
182
- // Only apply shift if:
183
- // - First field is not 'id'
184
- // - First element is a very short string (max 20 chars) that looks like a generated ID
185
- // - First element is not an array (to avoid false positives)
186
- // - Array has at least 2 extra elements (strong indicator of schema migration)
187
- if (firstFieldName !== 'id' &&
188
- typeof firstElement === 'string' &&
189
- !Array.isArray(firstElement) &&
190
- firstElement.length > 0 &&
191
- firstElement.length <= 20 && // Very conservative: max 20 chars (typical ID length)
192
- /^[a-zA-Z0-9_-]+$/.test(firstElement)) {
193
- // First element is likely the ID from old schema
194
- obj.id = firstElement
195
- arrayOffset = 1
196
- }
197
- }
198
170
 
199
171
  // Map array values to object properties
200
172
  // Only include fields that are in the schema
@@ -1022,11 +1022,11 @@ export default class IndexManager {
1022
1022
  continue;
1023
1023
  }
1024
1024
 
1025
- if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue)) {
1025
+ if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue) && criteriaValue !== null) {
1026
1026
  const fieldIndex = data[field];
1027
-
1027
+
1028
1028
  // Handle $in operator for array queries
1029
- if (criteriaValue.$in !== undefined) {
1029
+ if (criteriaValue.$in !== undefined && criteriaValue.$in !== null) {
1030
1030
  const inValues = Array.isArray(criteriaValue.$in) ? criteriaValue.$in : [criteriaValue.$in];
1031
1031
 
1032
1032
  // PERFORMANCE: Cache term mapping field check once
@@ -1969,6 +1969,14 @@ export default class IndexManager {
1969
1969
  return
1970
1970
  }
1971
1971
 
1972
+ // Restore totalLines from saved data
1973
+ if (index.totalLines !== undefined) {
1974
+ this.totalLines = index.totalLines
1975
+ if (this.opts.debugMode) {
1976
+ console.log(`🔍 IndexManager.load: Restored totalLines=${this.totalLines}`)
1977
+ }
1978
+ }
1979
+
1972
1980
  this.index = processedIndex
1973
1981
  }
1974
1982
 
@@ -2008,7 +2016,10 @@ export default class IndexManager {
2008
2016
  * This resolves the issue where Sets appear as empty objects in JSON.stringify
2009
2017
  */
2010
2018
  toJSON() {
2011
- const serializable = { data: {} }
2019
+ const serializable = {
2020
+ data: {},
2021
+ totalLines: this.totalLines
2022
+ }
2012
2023
 
2013
2024
  // Check if this is a term mapping field for conversion
2014
2025
  const isTermMappingField = (field) => {
@@ -1215,7 +1215,7 @@ export class QueryManager {
1215
1215
  return false;
1216
1216
  }
1217
1217
 
1218
- if (typeof condition === 'object' && !Array.isArray(condition)) {
1218
+ if (typeof condition === 'object' && !Array.isArray(condition) && condition !== null) {
1219
1219
  const operators = Object.keys(condition).map(op => normalizeOperator(op));
1220
1220
  if (this.opts.debugMode) {
1221
1221
  console.log(`🔍 Field '${field}' has operators:`, operators)
@@ -1532,8 +1532,8 @@ export class QueryManager {
1532
1532
 
1533
1533
  if (termMappingFields.includes(field)) {
1534
1534
  // For term mapping fields, simple equality or $in queries work well
1535
- if (typeof condition === 'string' ||
1536
- (typeof condition === 'object' && condition.$in && Array.isArray(condition.$in))) {
1535
+ if (typeof condition === 'string' ||
1536
+ (typeof condition === 'object' && condition !== null && condition.$in && Array.isArray(condition.$in))) {
1537
1537
  return true;
1538
1538
  }
1539
1539
  }