jexidb 2.1.2 → 2.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/Database.cjs CHANGED
@@ -1241,11 +1241,11 @@ class IndexManager {
1241
1241
  // This will be handled by the QueryManager's streaming strategy
1242
1242
  continue;
1243
1243
  }
1244
- if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue)) {
1244
+ if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue) && criteriaValue !== null) {
1245
1245
  const fieldIndex = data[field];
1246
1246
 
1247
1247
  // Handle $in operator for array queries
1248
- if (criteriaValue.$in !== undefined) {
1248
+ if (criteriaValue.$in !== undefined && criteriaValue.$in !== null) {
1249
1249
  const inValues = Array.isArray(criteriaValue.$in) ? criteriaValue.$in : [criteriaValue.$in];
1250
1250
 
1251
1251
  // PERFORMANCE: Cache term mapping field check once
@@ -2137,6 +2137,14 @@ class IndexManager {
2137
2137
  // Keep the current index with initialized fields
2138
2138
  return;
2139
2139
  }
2140
+
2141
+ // Restore totalLines from saved data
2142
+ if (index.totalLines !== undefined) {
2143
+ this.totalLines = index.totalLines;
2144
+ if (this.opts.debugMode) {
2145
+ console.log(`🔍 IndexManager.load: Restored totalLines=${this.totalLines}`);
2146
+ }
2147
+ }
2140
2148
  this.index = processedIndex;
2141
2149
  }
2142
2150
 
@@ -2177,7 +2185,8 @@ class IndexManager {
2177
2185
  */
2178
2186
  toJSON() {
2179
2187
  const serializable = {
2180
- data: {}
2188
+ data: {},
2189
+ totalLines: this.totalLines
2181
2190
  };
2182
2191
 
2183
2192
  // Check if this is a term mapping field for conversion
@@ -2257,9 +2266,9 @@ class IndexManager {
2257
2266
  }
2258
2267
  }
2259
2268
 
2260
- /**
2261
- * SchemaManager - Manages field schemas for optimized array-based serialization
2262
- * This replaces the need for repeating field names in JSON objects
2269
+ /**
2270
+ * SchemaManager - Manages field schemas for optimized array-based serialization
2271
+ * This replaces the need for repeating field names in JSON objects
2263
2272
  */
2264
2273
  class SchemaManager {
2265
2274
  constructor(opts = {}) {
@@ -2277,8 +2286,8 @@ class SchemaManager {
2277
2286
  this.isInitialized = false;
2278
2287
  }
2279
2288
 
2280
- /**
2281
- * Initialize schema from options or auto-detect from data
2289
+ /**
2290
+ * Initialize schema from options or auto-detect from data
2282
2291
  */
2283
2292
  initializeSchema(schemaOrData, autoDetect = false) {
2284
2293
  if (this.isInitialized && this.opts.strictSchema) {
@@ -2303,8 +2312,8 @@ class SchemaManager {
2303
2312
  }
2304
2313
  }
2305
2314
 
2306
- /**
2307
- * Set explicit schema
2315
+ /**
2316
+ * Set explicit schema
2308
2317
  */
2309
2318
  setSchema(fieldNames) {
2310
2319
  this.schema = [...fieldNames]; // Create copy
@@ -2319,8 +2328,8 @@ class SchemaManager {
2319
2328
  }
2320
2329
  }
2321
2330
 
2322
- /**
2323
- * Auto-detect schema from sample data
2331
+ /**
2332
+ * Auto-detect schema from sample data
2324
2333
  */
2325
2334
  autoDetectSchema(sampleData) {
2326
2335
  if (Array.isArray(sampleData)) {
@@ -2341,9 +2350,9 @@ class SchemaManager {
2341
2350
  }
2342
2351
  }
2343
2352
 
2344
- /**
2345
- * Initialize schema from database options
2346
- * Note: schema option is no longer supported, use fields instead
2353
+ /**
2354
+ * Initialize schema from database options
2355
+ * Note: schema option is no longer supported, use fields instead
2347
2356
  */
2348
2357
  initializeFromOptions(opts) {
2349
2358
  // Schema option is no longer supported - fields should be used instead
@@ -2351,8 +2360,8 @@ class SchemaManager {
2351
2360
  // Schema initialization is handled by Database.initializeSchema() using fields
2352
2361
  }
2353
2362
 
2354
- /**
2355
- * Add new field to schema (for schema evolution)
2363
+ /**
2364
+ * Add new field to schema (for schema evolution)
2356
2365
  */
2357
2366
  addField(fieldName) {
2358
2367
  if (this.fieldToIndex.has(fieldName)) {
@@ -2368,8 +2377,8 @@ class SchemaManager {
2368
2377
  return newIndex;
2369
2378
  }
2370
2379
 
2371
- /**
2372
- * Convert object to array using schema with strict field enforcement
2380
+ /**
2381
+ * Convert object to array using schema with strict field enforcement
2373
2382
  */
2374
2383
  objectToArray(obj) {
2375
2384
  if (!this.isInitialized || !this.opts.enableArraySerialization) {
@@ -2395,8 +2404,8 @@ class SchemaManager {
2395
2404
  return result;
2396
2405
  }
2397
2406
 
2398
- /**
2399
- * Convert array back to object using schema
2407
+ /**
2408
+ * Convert array back to object using schema
2400
2409
  */
2401
2410
  arrayToObject(arr) {
2402
2411
  if (!this.isInitialized || !this.opts.enableArraySerialization) {
@@ -2406,31 +2415,36 @@ class SchemaManager {
2406
2415
  return arr; // Don't convert non-arrays
2407
2416
  }
2408
2417
  const obj = {};
2418
+ const idIndex = this.schema.indexOf('id');
2419
+
2420
+ // DISABLED: Schema migration detection was causing field mapping corruption
2421
+ // The logic was incorrectly assuming ID was in first position when it's appended at the end
2422
+ // This caused fields to be shifted incorrectly during object-to-array-to-object conversion
2423
+ let arrayOffset = 0;
2409
2424
 
2410
2425
  // Map array values to object properties
2411
2426
  // Only include fields that are in the schema
2412
- for (let i = 0; i < Math.min(arr.length, this.schema.length); i++) {
2427
+ for (let i = 0; i < Math.min(arr.length - arrayOffset, this.schema.length); i++) {
2413
2428
  const fieldName = this.schema[i];
2429
+ const arrayIndex = i + arrayOffset;
2414
2430
  // Only include non-undefined values to avoid cluttering the object
2415
- if (arr[i] !== undefined) {
2416
- obj[fieldName] = arr[i];
2431
+ if (arr[arrayIndex] !== undefined) {
2432
+ obj[fieldName] = arr[arrayIndex];
2417
2433
  }
2418
2434
  }
2419
2435
 
2420
2436
  // CRITICAL FIX: Always preserve 'id' field if it exists in the original object
2421
2437
  // The 'id' field may not be in the schema but must be preserved
2422
- // Check if 'id' is in the schema first
2423
- const idIndex = this.schema.indexOf('id');
2424
2438
  if (idIndex !== -1 && arr[idIndex] !== undefined) {
2425
2439
  // 'id' is in schema and has a value
2426
2440
  obj.id = arr[idIndex];
2427
- } else if (idIndex === -1 && arr.length > this.schema.length) {
2441
+ } else if (!obj.id && arr.length > this.schema.length + arrayOffset) {
2428
2442
  // 'id' is not in schema but array has extra element(s) - check if last element could be ID
2429
2443
  // This handles cases where ID was added after schema initialization
2430
- for (let i = this.schema.length; i < arr.length; i++) {
2444
+ for (let i = this.schema.length + arrayOffset; i < arr.length; i++) {
2431
2445
  // Try to infer if this is an ID (string that looks like an ID)
2432
2446
  const potentialId = arr[i];
2433
- if (potentialId !== undefined && potentialId !== null && typeof potentialId === 'string' && potentialId.length > 0) {
2447
+ if (potentialId !== undefined && potentialId !== null && typeof potentialId === 'string' && potentialId.length > 0 && potentialId.length < 100) {
2434
2448
  obj.id = potentialId;
2435
2449
  break; // Use first potential ID found
2436
2450
  }
@@ -2439,43 +2453,43 @@ class SchemaManager {
2439
2453
  return obj;
2440
2454
  }
2441
2455
 
2442
- /**
2443
- * Get field index by name
2456
+ /**
2457
+ * Get field index by name
2444
2458
  */
2445
2459
  getFieldIndex(fieldName) {
2446
2460
  return this.fieldToIndex.get(fieldName);
2447
2461
  }
2448
2462
 
2449
- /**
2450
- * Get field name by index
2463
+ /**
2464
+ * Get field name by index
2451
2465
  */
2452
2466
  getFieldName(index) {
2453
2467
  return this.indexToField.get(index);
2454
2468
  }
2455
2469
 
2456
- /**
2457
- * Check if field exists in schema
2470
+ /**
2471
+ * Check if field exists in schema
2458
2472
  */
2459
2473
  hasField(fieldName) {
2460
2474
  return this.fieldToIndex.has(fieldName);
2461
2475
  }
2462
2476
 
2463
- /**
2464
- * Get schema as array of field names
2477
+ /**
2478
+ * Get schema as array of field names
2465
2479
  */
2466
2480
  getSchema() {
2467
2481
  return [...this.schema]; // Return copy
2468
2482
  }
2469
2483
 
2470
- /**
2471
- * Get schema size
2484
+ /**
2485
+ * Get schema size
2472
2486
  */
2473
2487
  getSchemaSize() {
2474
2488
  return this.schema.length;
2475
2489
  }
2476
2490
 
2477
- /**
2478
- * Validate that object conforms to schema
2491
+ /**
2492
+ * Validate that object conforms to schema
2479
2493
  */
2480
2494
  validateObject(obj) {
2481
2495
  if (!this.isInitialized || !this.opts.strictSchema) {
@@ -2497,8 +2511,8 @@ class SchemaManager {
2497
2511
  return true;
2498
2512
  }
2499
2513
 
2500
- /**
2501
- * Get schema metadata for serialization
2514
+ /**
2515
+ * Get schema metadata for serialization
2502
2516
  */
2503
2517
  getSchemaMetadata() {
2504
2518
  return {
@@ -2509,8 +2523,8 @@ class SchemaManager {
2509
2523
  };
2510
2524
  }
2511
2525
 
2512
- /**
2513
- * Reset schema
2526
+ /**
2527
+ * Reset schema
2514
2528
  */
2515
2529
  reset() {
2516
2530
  this.schema = [];
@@ -2520,8 +2534,8 @@ class SchemaManager {
2520
2534
  this.schemaVersion++;
2521
2535
  }
2522
2536
 
2523
- /**
2524
- * Get performance statistics
2537
+ /**
2538
+ * Get performance statistics
2525
2539
  */
2526
2540
  getStats() {
2527
2541
  return {
@@ -5134,9 +5148,23 @@ class QueryManager {
5134
5148
  }
5135
5149
  }
5136
5150
 
5151
+ // Handle $not operator - include it if it can be processed by IndexManager
5152
+ if (criteria.$not && typeof criteria.$not === 'object') {
5153
+ // Check if $not condition contains only indexable fields
5154
+ const notFields = Object.keys(criteria.$not);
5155
+ const allNotFieldsIndexed = notFields.every(field => this.indexManager.opts.indexes && this.indexManager.opts.indexes[field]);
5156
+ if (allNotFieldsIndexed && notFields.length > 0) {
5157
+ // Extract indexable criteria from $not condition
5158
+ const indexableNotCriteria = this._extractIndexableCriteria(criteria.$not);
5159
+ if (Object.keys(indexableNotCriteria).length > 0) {
5160
+ indexableCriteria.$not = indexableNotCriteria;
5161
+ }
5162
+ }
5163
+ }
5164
+
5137
5165
  // Handle regular field conditions
5138
5166
  for (const [field, condition] of Object.entries(criteria)) {
5139
- if (field.startsWith('$')) continue; // Skip logical operators
5167
+ if (field.startsWith('$')) continue; // Skip logical operators (already handled above)
5140
5168
 
5141
5169
  // RegExp conditions cannot be pre-filtered using indices
5142
5170
  if (condition instanceof RegExp) {
@@ -5484,47 +5512,82 @@ class QueryManager {
5484
5512
  // Read specific records using the line numbers
5485
5513
  if (lineNumbers.size > 0) {
5486
5514
  const lineNumbersArray = Array.from(lineNumbers);
5487
- const ranges = this.database.getRanges(lineNumbersArray);
5488
- const groupedRanges = await this.database.fileHandler.groupedRanges(ranges);
5489
- const fs = await import('fs');
5490
- const fd = await fs.promises.open(this.database.fileHandler.file, 'r');
5491
- try {
5492
- for (const groupedRange of groupedRanges) {
5493
- var _iteratorAbruptCompletion2 = false;
5494
- var _didIteratorError2 = false;
5495
- var _iteratorError2;
5515
+ const persistedCount = Array.isArray(this.database.offsets) ? this.database.offsets.length : 0;
5516
+
5517
+ // Separate lineNumbers into file records and writeBuffer records
5518
+ const fileLineNumbers = [];
5519
+ const writeBufferLineNumbers = [];
5520
+ for (const lineNumber of lineNumbersArray) {
5521
+ if (lineNumber >= persistedCount) {
5522
+ // This lineNumber points to writeBuffer
5523
+ writeBufferLineNumbers.push(lineNumber);
5524
+ } else {
5525
+ // This lineNumber points to file
5526
+ fileLineNumbers.push(lineNumber);
5527
+ }
5528
+ }
5529
+
5530
+ // Read records from file
5531
+ if (fileLineNumbers.length > 0) {
5532
+ const ranges = this.database.getRanges(fileLineNumbers);
5533
+ if (ranges.length > 0) {
5534
+ const groupedRanges = await this.database.fileHandler.groupedRanges(ranges);
5535
+ const fs = await import('fs');
5536
+ const fd = await fs.promises.open(this.database.fileHandler.file, 'r');
5496
5537
  try {
5497
- for (var _iterator2 = _asyncIterator(this.database.fileHandler.readGroupedRange(groupedRange, fd)), _step2; _iteratorAbruptCompletion2 = !(_step2 = await _iterator2.next()).done; _iteratorAbruptCompletion2 = false) {
5498
- const row = _step2.value;
5499
- {
5538
+ for (const groupedRange of groupedRanges) {
5539
+ var _iteratorAbruptCompletion2 = false;
5540
+ var _didIteratorError2 = false;
5541
+ var _iteratorError2;
5542
+ try {
5543
+ for (var _iterator2 = _asyncIterator(this.database.fileHandler.readGroupedRange(groupedRange, fd)), _step2; _iteratorAbruptCompletion2 = !(_step2 = await _iterator2.next()).done; _iteratorAbruptCompletion2 = false) {
5544
+ const row = _step2.value;
5545
+ {
5546
+ try {
5547
+ const record = this.database.serializer.deserialize(row.line);
5548
+ const recordWithTerms = options.restoreTerms !== false ? this.database.restoreTermIdsAfterDeserialization(record) : record;
5549
+ results.push(recordWithTerms);
5550
+ if (limit && results.length >= limit) break;
5551
+ } catch (error) {
5552
+ // Skip invalid lines
5553
+ }
5554
+ }
5555
+ }
5556
+ } catch (err) {
5557
+ _didIteratorError2 = true;
5558
+ _iteratorError2 = err;
5559
+ } finally {
5500
5560
  try {
5501
- const record = this.database.serializer.deserialize(row.line);
5502
- const recordWithTerms = options.restoreTerms !== false ? this.database.restoreTermIdsAfterDeserialization(record) : record;
5503
- results.push(recordWithTerms);
5504
- if (limit && results.length >= limit) break;
5505
- } catch (error) {
5506
- // Skip invalid lines
5561
+ if (_iteratorAbruptCompletion2 && _iterator2.return != null) {
5562
+ await _iterator2.return();
5563
+ }
5564
+ } finally {
5565
+ if (_didIteratorError2) {
5566
+ throw _iteratorError2;
5567
+ }
5507
5568
  }
5508
5569
  }
5570
+ if (limit && results.length >= limit) break;
5509
5571
  }
5510
- } catch (err) {
5511
- _didIteratorError2 = true;
5512
- _iteratorError2 = err;
5513
5572
  } finally {
5514
- try {
5515
- if (_iteratorAbruptCompletion2 && _iterator2.return != null) {
5516
- await _iterator2.return();
5517
- }
5518
- } finally {
5519
- if (_didIteratorError2) {
5520
- throw _iteratorError2;
5521
- }
5522
- }
5573
+ await fd.close();
5523
5574
  }
5575
+ }
5576
+ }
5577
+
5578
+ // Read records from writeBuffer
5579
+ if (writeBufferLineNumbers.length > 0 && this.database.writeBuffer) {
5580
+ for (const lineNumber of writeBufferLineNumbers) {
5524
5581
  if (limit && results.length >= limit) break;
5582
+ const writeBufferIndex = lineNumber - persistedCount;
5583
+ if (writeBufferIndex >= 0 && writeBufferIndex < this.database.writeBuffer.length) {
5584
+ const record = this.database.writeBuffer[writeBufferIndex];
5585
+ if (record) {
5586
+ const recordWithTerms = options.restoreTerms !== false ? this.database.restoreTermIdsAfterDeserialization(record) : record;
5587
+ results.push(recordWithTerms);
5588
+ }
5589
+ }
5525
5590
  }
5526
- } finally {
5527
- await fd.close();
5528
5591
  }
5529
5592
  }
5530
5593
  if (options.orderBy) {
@@ -5944,8 +6007,8 @@ class QueryManager {
5944
6007
  }
5945
6008
  }
5946
6009
  const allFieldsIndexed = Object.keys(criteria).every(field => {
5947
- // Skip $and as it's handled separately above
5948
- if (field === '$and') return true;
6010
+ // Skip $and and $not as they're handled separately above
6011
+ if (field === '$and' || field === '$not') return true;
5949
6012
  if (!this.opts.indexes || !this.opts.indexes[field]) {
5950
6013
  if (this.opts.debugMode) {
5951
6014
  console.log(`🔍 Field '${field}' not indexed. Available indexes:`, Object.keys(this.opts.indexes || {}));
@@ -5963,7 +6026,7 @@ class QueryManager {
5963
6026
  }
5964
6027
  return false;
5965
6028
  }
5966
- if (typeof condition === 'object' && !Array.isArray(condition)) {
6029
+ if (typeof condition === 'object' && !Array.isArray(condition) && condition !== null) {
5967
6030
  const operators = Object.keys(condition).map(op => normalizeOperator(op));
5968
6031
  if (this.opts.debugMode) {
5969
6032
  console.log(`🔍 Field '${field}' has operators:`, operators);
@@ -6260,7 +6323,7 @@ class QueryManager {
6260
6323
  if (field.startsWith('$')) continue;
6261
6324
  if (termMappingFields.includes(field)) {
6262
6325
  // For term mapping fields, simple equality or $in queries work well
6263
- if (typeof condition === 'string' || typeof condition === 'object' && condition.$in && Array.isArray(condition.$in)) {
6326
+ if (typeof condition === 'string' || typeof condition === 'object' && condition !== null && condition.$in && Array.isArray(condition.$in)) {
6264
6327
  return true;
6265
6328
  }
6266
6329
  }
@@ -8158,6 +8221,14 @@ class Database extends events.EventEmitter {
8158
8221
 
8159
8222
  // Manual save is now the default behavior
8160
8223
 
8224
+ // CRITICAL FIX: Ensure IndexManager totalLines is consistent with offsets
8225
+ // This prevents data integrity issues when database is initialized without existing data
8226
+ if (this.indexManager && this.offsets) {
8227
+ this.indexManager.setTotalLines(this.offsets.length);
8228
+ if (this.opts.debugMode) {
8229
+ console.log(`🔧 Initialized index totalLines to ${this.offsets.length}`);
8230
+ }
8231
+ }
8161
8232
  this.initialized = true;
8162
8233
  this.emit('initialized');
8163
8234
  if (this.opts.debugMode) {
@@ -8654,39 +8725,21 @@ class Database extends events.EventEmitter {
8654
8725
  this.pendingIndexUpdates = [];
8655
8726
  }
8656
8727
 
8657
- // CRITICAL FIX: Flush write buffer completely after capturing snapshot
8658
- await this._flushWriteBufferCompletely();
8659
-
8660
- // CRITICAL FIX: Wait for all I/O operations to complete before clearing writeBuffer
8661
- await this._waitForIOCompletion();
8662
-
8663
- // CRITICAL FIX: Verify write buffer is empty after I/O completion
8664
- // But allow for ongoing insertions during high-volume scenarios
8665
- if (this.writeBuffer.length > 0) {
8666
- if (this.opts.debugMode) {
8667
- console.log(`💾 Save: WriteBuffer still has ${this.writeBuffer.length} items after flush - this may indicate ongoing insertions`);
8668
- }
8669
-
8670
- // If we have a reasonable number of items, continue processing
8671
- if (this.writeBuffer.length < 10000) {
8672
- // Reasonable threshold
8673
- if (this.opts.debugMode) {
8674
- console.log(`💾 Save: Continuing to process remaining ${this.writeBuffer.length} items`);
8675
- }
8676
- // Continue with the save process - the remaining items will be included in the final save
8677
- } else {
8678
- // Too many items remaining - likely a real problem
8679
- throw new Error(`WriteBuffer has too many items after flush: ${this.writeBuffer.length} items remaining (threshold: 10000)`);
8680
- }
8728
+ // CRITICAL FIX: DO NOT flush writeBuffer before processing existing records
8729
+ // This prevents duplicating updated records in the file.
8730
+ // The _streamExistingRecords() will handle replacing old records with updated ones from writeBufferSnapshot.
8731
+ // After processing, all records (existing + updated + new) will be written to file in one operation.
8732
+ if (this.opts.debugMode) {
8733
+ console.log(`💾 Save: writeBufferSnapshot captured with ${writeBufferSnapshot.length} records (will be processed with existing records)`);
8681
8734
  }
8682
8735
 
8683
8736
  // OPTIMIZATION: Parallel operations - cleanup and data preparation
8684
8737
  let allData = [];
8685
8738
  let orphanedCount = 0;
8686
8739
 
8687
- // Check if there are new records to save (after flush, writeBuffer should be empty)
8688
- // CRITICAL FIX: Also check writeBufferSnapshot.length > 0 to handle updates/deletes
8689
- // that were in writeBuffer before flush but are now in snapshot
8740
+ // Check if there are records to save from writeBufferSnapshot
8741
+ // CRITICAL FIX: Process writeBufferSnapshot records (both new and updated) with existing records
8742
+ // Updated records will replace old ones via _streamExistingRecords, new records will be added
8690
8743
  if (this.opts.debugMode) {
8691
8744
  console.log(`💾 Save: writeBuffer.length=${this.writeBuffer.length}, writeBufferSnapshot.length=${writeBufferSnapshot.length}`);
8692
8745
  }
@@ -8729,11 +8782,60 @@ class Database extends events.EventEmitter {
8729
8782
  // CRITICAL FIX: Normalize IDs to strings for consistent comparison
8730
8783
  const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)));
8731
8784
 
8785
+ // CRITICAL FIX: Create a map of records in existingRecords by ID for comparison
8786
+ const existingRecordsById = new Map();
8787
+ existingRecords.forEach(r => {
8788
+ if (r && r.id) {
8789
+ existingRecordsById.set(String(r.id), r);
8790
+ }
8791
+ });
8792
+
8732
8793
  // Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
8794
+ // CRITICAL FIX: Also ensure that if an updated record wasn't properly replaced, we replace it now
8733
8795
  for (const record of writeBufferSnapshot) {
8734
- if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(String(record.id))) {
8796
+ if (!record || !record.id) continue;
8797
+ if (deletedIdsSnapshot.has(String(record.id))) continue;
8798
+ const recordIdStr = String(record.id);
8799
+ const existingRecord = existingRecordsById.get(recordIdStr);
8800
+ if (!existingRecord) {
8735
8801
  // This is a new record, not an update
8736
8802
  allData.push(record);
8803
+ if (this.opts.debugMode) {
8804
+ console.log(`💾 Save: Adding NEW record to allData:`, {
8805
+ id: recordIdStr,
8806
+ price: record.price,
8807
+ app_id: record.app_id,
8808
+ currency: record.currency
8809
+ });
8810
+ }
8811
+ } else {
8812
+ // This is an update - verify that existingRecords contains the updated version
8813
+ // If not, replace it (this handles edge cases where substitution might have failed)
8814
+ const existingIndex = allData.findIndex(r => r && r.id && String(r.id) === recordIdStr);
8815
+ if (existingIndex !== -1) {
8816
+ // Verify if the existing record is actually the updated one
8817
+ // Compare key fields to detect if replacement is needed
8818
+ const needsReplacement = JSON.stringify(allData[existingIndex]) !== JSON.stringify(record);
8819
+ if (needsReplacement) {
8820
+ if (this.opts.debugMode) {
8821
+ console.log(`💾 Save: REPLACING existing record with updated version in allData:`, {
8822
+ old: {
8823
+ id: String(allData[existingIndex].id),
8824
+ price: allData[existingIndex].price
8825
+ },
8826
+ new: {
8827
+ id: recordIdStr,
8828
+ price: record.price
8829
+ }
8830
+ });
8831
+ }
8832
+ allData[existingIndex] = record;
8833
+ } else if (this.opts.debugMode) {
8834
+ console.log(`💾 Save: Record already correctly updated in allData:`, {
8835
+ id: recordIdStr
8836
+ });
8837
+ }
8838
+ }
8737
8839
  }
8738
8840
  }
8739
8841
  }));
@@ -8780,30 +8882,88 @@ class Database extends events.EventEmitter {
8780
8882
  allData = [...existingRecords];
8781
8883
 
8782
8884
  // OPTIMIZATION: Use Set for faster lookups of existing record IDs
8783
- const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => r.id));
8885
+ // CRITICAL FIX: Normalize IDs to strings for consistent comparison
8886
+ const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)));
8887
+
8888
+ // CRITICAL FIX: Create a map of records in existingRecords by ID for comparison
8889
+ const existingRecordsById = new Map();
8890
+ existingRecords.forEach(r => {
8891
+ if (r && r.id) {
8892
+ existingRecordsById.set(String(r.id), r);
8893
+ }
8894
+ });
8784
8895
 
8785
8896
  // Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
8897
+ // CRITICAL FIX: Also ensure that if an updated record wasn't properly replaced, we replace it now
8786
8898
  for (const record of writeBufferSnapshot) {
8787
- if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(record.id)) {
8899
+ if (!record || !record.id) continue;
8900
+ if (deletedIdsSnapshot.has(String(record.id))) continue;
8901
+ const recordIdStr = String(record.id);
8902
+ const existingRecord = existingRecordsById.get(recordIdStr);
8903
+ if (!existingRecord) {
8788
8904
  // This is a new record, not an update
8789
8905
  allData.push(record);
8906
+ if (this.opts.debugMode) {
8907
+ console.log(`💾 Save: Adding NEW record to allData:`, {
8908
+ id: recordIdStr,
8909
+ price: record.price,
8910
+ app_id: record.app_id,
8911
+ currency: record.currency
8912
+ });
8913
+ }
8914
+ } else {
8915
+ // This is an update - verify that existingRecords contains the updated version
8916
+ // If not, replace it (this handles edge cases where substitution might have failed)
8917
+ const existingIndex = allData.findIndex(r => r && r.id && String(r.id) === recordIdStr);
8918
+ if (existingIndex !== -1) {
8919
+ // Verify if the existing record is actually the updated one
8920
+ // Compare key fields to detect if replacement is needed
8921
+ const needsReplacement = JSON.stringify(allData[existingIndex]) !== JSON.stringify(record);
8922
+ if (needsReplacement) {
8923
+ if (this.opts.debugMode) {
8924
+ console.log(`💾 Save: REPLACING existing record with updated version in allData:`, {
8925
+ old: {
8926
+ id: String(allData[existingIndex].id),
8927
+ price: allData[existingIndex].price
8928
+ },
8929
+ new: {
8930
+ id: recordIdStr,
8931
+ price: record.price
8932
+ }
8933
+ });
8934
+ }
8935
+ allData[existingIndex] = record;
8936
+ } else if (this.opts.debugMode) {
8937
+ console.log(`💾 Save: Record already correctly updated in allData:`, {
8938
+ id: recordIdStr
8939
+ });
8940
+ }
8941
+ }
8790
8942
  }
8791
8943
  }
8792
8944
  if (this.opts.debugMode) {
8793
8945
  const updatedCount = writeBufferSnapshot.filter(r => r && r.id && existingRecordIds.has(String(r.id))).length;
8794
8946
  const newCount = writeBufferSnapshot.filter(r => r && r.id && !existingRecordIds.has(String(r.id))).length;
8795
8947
  console.log(`💾 Save: Combined data - existingRecords: ${existingRecords.length}, updatedFromBuffer: ${updatedCount}, newFromBuffer: ${newCount}, total: ${allData.length}`);
8796
- console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? r.id : 'no-id'));
8948
+ console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? String(r.id) : 'no-id'));
8797
8949
  console.log(`💾 Save: Existing record IDs:`, Array.from(existingRecordIds));
8950
+ console.log(`💾 Save: All records in allData:`, allData.map(r => r && r.id ? {
8951
+ id: String(r.id),
8952
+ price: r.price,
8953
+ app_id: r.app_id,
8954
+ currency: r.currency
8955
+ } : 'no-id'));
8798
8956
  console.log(`💾 Save: Sample existing record:`, existingRecords[0] ? {
8799
- id: existingRecords[0].id,
8800
- name: existingRecords[0].name,
8801
- tags: existingRecords[0].tags
8957
+ id: String(existingRecords[0].id),
8958
+ price: existingRecords[0].price,
8959
+ app_id: existingRecords[0].app_id,
8960
+ currency: existingRecords[0].currency
8802
8961
  } : 'null');
8803
8962
  console.log(`💾 Save: Sample writeBuffer record:`, writeBufferSnapshot[0] ? {
8804
- id: writeBufferSnapshot[0].id,
8805
- name: writeBufferSnapshot[0].name,
8806
- tags: writeBufferSnapshot[0].tags
8963
+ id: String(writeBufferSnapshot[0].id),
8964
+ price: writeBufferSnapshot[0].price,
8965
+ app_id: writeBufferSnapshot[0].app_id,
8966
+ currency: writeBufferSnapshot[0].currency
8807
8967
  } : 'null');
8808
8968
  }
8809
8969
  }).catch(error => {
@@ -8874,6 +9034,13 @@ class Database extends events.EventEmitter {
8874
9034
  });
8875
9035
  if (this.opts.debugMode) {
8876
9036
  console.log(`💾 Save: allData.length=${allData.length}, cleanedData.length=${cleanedData.length}`);
9037
+ console.log(`💾 Save: Current offsets.length before recalculation: ${this.offsets.length}`);
9038
+ console.log(`💾 Save: All records in allData before serialization:`, allData.map(r => r && r.id ? {
9039
+ id: String(r.id),
9040
+ price: r.price,
9041
+ app_id: r.app_id,
9042
+ currency: r.currency
9043
+ } : 'no-id'));
8877
9044
  console.log(`💾 Save: Sample cleaned record:`, cleanedData[0] ? Object.keys(cleanedData[0]) : 'null');
8878
9045
  }
8879
9046
  const jsonlData = cleanedData.length > 0 ? this.serializer.serializeBatch(cleanedData) : '';
@@ -8881,10 +9048,19 @@ class Database extends events.EventEmitter {
8881
9048
  const lines = jsonlString.split('\n').filter(line => line.trim());
8882
9049
  if (this.opts.debugMode) {
8883
9050
  console.log(`💾 Save: Serialized ${lines.length} lines`);
9051
+ console.log(`💾 Save: All records in allData after serialization check:`, allData.map(r => r && r.id ? {
9052
+ id: String(r.id),
9053
+ price: r.price,
9054
+ app_id: r.app_id,
9055
+ currency: r.currency
9056
+ } : 'no-id'));
8884
9057
  if (lines.length > 0) {
8885
9058
  console.log(`💾 Save: First line (first 200 chars):`, lines[0].substring(0, 200));
8886
9059
  }
8887
9060
  }
9061
+
9062
+ // CRITICAL FIX: Always recalculate offsets from serialized data to ensure consistency
9063
+ // Even if _streamExistingRecords updated offsets, we need to recalculate based on actual serialized data
8888
9064
  this.offsets = [];
8889
9065
  let currentOffset = 0;
8890
9066
  for (let i = 0; i < lines.length; i++) {
@@ -8894,6 +9070,9 @@ class Database extends events.EventEmitter {
8894
9070
  const lineWithNewline = lines[i] + '\n';
8895
9071
  currentOffset += Buffer.byteLength(lineWithNewline, 'utf8');
8896
9072
  }
9073
+ if (this.opts.debugMode) {
9074
+ console.log(`💾 Save: Recalculated offsets.length=${this.offsets.length}, should match lines.length=${lines.length}`);
9075
+ }
8897
9076
 
8898
9077
  // CRITICAL FIX: Ensure indexOffset matches actual file size
8899
9078
  this.indexOffset = currentOffset;
@@ -8901,51 +9080,9 @@ class Database extends events.EventEmitter {
8901
9080
  console.log(`💾 Save: Calculated indexOffset: ${this.indexOffset}, allData.length: ${allData.length}`);
8902
9081
  }
8903
9082
 
8904
- // OPTIMIZATION: Parallel operations - file writing and index data preparation
8905
- const parallelWriteOperations = [];
8906
-
8907
- // Add main file write operation
8908
- parallelWriteOperations.push(this.fileHandler.writeBatch([jsonlData]));
8909
-
8910
- // Add index file operations - ALWAYS save offsets, even without indexed fields
8911
- if (this.indexManager) {
8912
- const idxPath = this.normalizedFile.replace('.jdb', '.idx.jdb');
8913
-
8914
- // OPTIMIZATION: Parallel data preparation
8915
- const indexDataPromise = Promise.resolve({
8916
- index: this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0 ? this.indexManager.toJSON() : {},
8917
- offsets: this.offsets,
8918
- // Save actual offsets for efficient file operations
8919
- indexOffset: this.indexOffset // Save file size for proper range calculations
8920
- });
8921
-
8922
- // Add term mapping data if needed
8923
- const termMappingFields = this.getTermMappingFields();
8924
- if (termMappingFields.length > 0 && this.termManager) {
8925
- const termDataPromise = this.termManager.saveTerms();
8926
-
8927
- // Combine index data and term data
8928
- const combinedDataPromise = Promise.all([indexDataPromise, termDataPromise]).then(([indexData, termData]) => {
8929
- indexData.termMapping = termData;
8930
- return indexData;
8931
- });
8932
-
8933
- // Add index file write operation
8934
- parallelWriteOperations.push(combinedDataPromise.then(indexData => {
8935
- const idxFileHandler = new FileHandler(idxPath, this.fileMutex, this.opts);
8936
- return idxFileHandler.writeAll(JSON.stringify(indexData, null, 2));
8937
- }));
8938
- } else {
8939
- // Add index file write operation without term mapping
8940
- parallelWriteOperations.push(indexDataPromise.then(indexData => {
8941
- const idxFileHandler = new FileHandler(idxPath, this.fileMutex, this.opts);
8942
- return idxFileHandler.writeAll(JSON.stringify(indexData, null, 2));
8943
- }));
8944
- }
8945
- }
8946
-
8947
- // Execute parallel write operations
8948
- await Promise.all(parallelWriteOperations);
9083
+ // CRITICAL FIX: Write main data file first
9084
+ // Index will be saved AFTER reconstruction to ensure it contains correct data
9085
+ await this.fileHandler.writeBatch([jsonlData]);
8949
9086
  if (this.opts.debugMode) {
8950
9087
  console.log(`💾 Saved ${allData.length} records to ${this.normalizedFile}`);
8951
9088
  }
@@ -8955,22 +9092,45 @@ class Database extends events.EventEmitter {
8955
9092
  this.shouldSave = false;
8956
9093
  this.lastSaveTime = Date.now();
8957
9094
 
8958
- // Clear writeBuffer and deletedIds after successful save only if we had data to save
8959
- if (allData.length > 0) {
8960
- // Rebuild index when records were deleted to maintain consistency
8961
- const hadDeletedRecords = deletedIdsSnapshot.size > 0;
9095
+ // CRITICAL FIX: Always clear deletedIds and rebuild index if there were deletions,
9096
+ // even if allData.length === 0 (all records were deleted)
9097
+ const hadDeletedRecords = deletedIdsSnapshot.size > 0;
9098
+ const hadUpdatedRecords = writeBufferSnapshot.length > 0;
9099
+
9100
+ // Clear writeBuffer and deletedIds after successful save
9101
+ // Also rebuild index if records were deleted or updated, even if allData is empty
9102
+ if (allData.length > 0 || hadDeletedRecords || hadUpdatedRecords) {
9103
+ // Rebuild index when records were deleted or updated to maintain consistency
8962
9104
  if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
8963
- if (hadDeletedRecords) {
8964
- // Clear the index and rebuild it from the remaining records
9105
+ if (hadDeletedRecords || hadUpdatedRecords) {
9106
+ // Clear the index and rebuild it from the saved records
9107
+ // This ensures that lineNumbers point to the correct positions in the file
8965
9108
  this.indexManager.clear();
8966
9109
  if (this.opts.debugMode) {
8967
- console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records`);
9110
+ if (hadDeletedRecords && hadUpdatedRecords) {
9111
+ console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records and updating ${writeBufferSnapshot.length} records`);
9112
+ } else if (hadDeletedRecords) {
9113
+ console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records`);
9114
+ } else {
9115
+ console.log(`🧹 Rebuilding index after updating ${writeBufferSnapshot.length} records`);
9116
+ }
8968
9117
  }
8969
9118
 
8970
9119
  // Rebuild index from the saved records
8971
9120
  // CRITICAL: Process term mapping for records loaded from file to ensure ${field}Ids are available
9121
+ if (this.opts.debugMode) {
9122
+ console.log(`💾 Save: Rebuilding index from ${allData.length} records in allData`);
9123
+ }
8972
9124
  for (let i = 0; i < allData.length; i++) {
8973
9125
  let record = allData[i];
9126
+ if (this.opts.debugMode && i < 3) {
9127
+ console.log(`💾 Save: Rebuilding index record[${i}]:`, {
9128
+ id: String(record.id),
9129
+ price: record.price,
9130
+ app_id: record.app_id,
9131
+ currency: record.currency
9132
+ });
9133
+ }
8974
9134
 
8975
9135
  // CRITICAL FIX: Ensure records have ${field}Ids for term mapping fields
8976
9136
  // Records from writeBuffer already have ${field}Ids from processTermMapping
@@ -8997,6 +9157,20 @@ class Database extends events.EventEmitter {
8997
9157
  }
8998
9158
  await this.indexManager.add(record, i);
8999
9159
  }
9160
+
9161
+ // VALIDATION: Ensure index consistency after rebuild
9162
+ // Check that all indexed records have valid line numbers
9163
+ const indexedRecordCount = this.indexManager.getIndexedRecordCount?.() || allData.length;
9164
+ if (indexedRecordCount !== this.offsets.length) {
9165
+ console.warn(`⚠️ Index inconsistency detected: indexed ${indexedRecordCount} records but offsets has ${this.offsets.length} entries`);
9166
+ // Force consistency by setting totalLines to match offsets
9167
+ this.indexManager.setTotalLines(this.offsets.length);
9168
+ } else {
9169
+ this.indexManager.setTotalLines(this.offsets.length);
9170
+ }
9171
+ if (this.opts.debugMode) {
9172
+ console.log(`💾 Save: Index rebuilt with ${allData.length} records, totalLines set to ${this.offsets.length}`);
9173
+ }
9000
9174
  }
9001
9175
  }
9002
9176
 
@@ -9016,6 +9190,22 @@ class Database extends events.EventEmitter {
9016
9190
  for (const deletedId of deletedIdsSnapshot) {
9017
9191
  this.deletedIds.delete(deletedId);
9018
9192
  }
9193
+ } else if (hadDeletedRecords) {
9194
+ // CRITICAL FIX: Even if allData is empty, clear deletedIds and rebuild index
9195
+ // when records were deleted to ensure consistency
9196
+ if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
9197
+ // Clear the index since all records were deleted
9198
+ this.indexManager.clear();
9199
+ this.indexManager.setTotalLines(0);
9200
+ if (this.opts.debugMode) {
9201
+ console.log(`🧹 Cleared index after removing all ${deletedIdsSnapshot.size} deleted records`);
9202
+ }
9203
+ }
9204
+
9205
+ // Clear deletedIds even when allData is empty
9206
+ for (const deletedId of deletedIdsSnapshot) {
9207
+ this.deletedIds.delete(deletedId);
9208
+ }
9019
9209
 
9020
9210
  // CRITICAL FIX: Ensure writeBuffer is completely cleared after successful save
9021
9211
  if (this.writeBuffer.length > 0) {
@@ -9077,12 +9267,21 @@ class Database extends events.EventEmitter {
9077
9267
  this.termManager.decrementTermCount(termId);
9078
9268
  }
9079
9269
  } else if (oldRecord[field] && Array.isArray(oldRecord[field])) {
9080
- // Use terms to decrement (fallback for backward compatibility)
9081
- for (const term of oldRecord[field]) {
9082
- const termId = this.termManager.termToId.get(term);
9083
- if (termId) {
9270
+ // Check if field contains term IDs (numbers) or terms (strings)
9271
+ const firstValue = oldRecord[field][0];
9272
+ if (typeof firstValue === 'number') {
9273
+ // Field contains term IDs (from find with restoreTerms: false)
9274
+ for (const termId of oldRecord[field]) {
9084
9275
  this.termManager.decrementTermCount(termId);
9085
9276
  }
9277
+ } else if (typeof firstValue === 'string') {
9278
+ // Field contains terms (strings) - convert to term IDs
9279
+ for (const term of oldRecord[field]) {
9280
+ const termId = this.termManager.termToId.get(term);
9281
+ if (termId) {
9282
+ this.termManager.decrementTermCount(termId);
9283
+ }
9284
+ }
9086
9285
  }
9087
9286
  }
9088
9287
  }
@@ -9323,6 +9522,7 @@ class Database extends events.EventEmitter {
9323
9522
  }
9324
9523
 
9325
9524
  // Apply schema enforcement - convert to array format and back to enforce schema
9525
+ // This will discard any fields not in the schema
9326
9526
  const schemaEnforcedRecord = this.applySchemaEnforcement(record);
9327
9527
 
9328
9528
  // Don't store in this.data - only use writeBuffer and index
@@ -9505,6 +9705,42 @@ class Database extends events.EventEmitter {
9505
9705
  console.log(`🔍 FIND START: criteria=${JSON.stringify(criteria)}, writeBuffer=${this.writeBuffer.length}`);
9506
9706
  }
9507
9707
  try {
9708
+ // INTEGRITY CHECK: Validate data consistency before querying
9709
+ // Check if index and offsets are synchronized
9710
+ if (this.indexManager && this.offsets && this.offsets.length > 0) {
9711
+ const indexTotalLines = this.indexManager.totalLines || 0;
9712
+ const offsetsLength = this.offsets.length;
9713
+ if (indexTotalLines !== offsetsLength) {
9714
+ console.warn(`⚠️ Data integrity issue detected: index.totalLines=${indexTotalLines}, offsets.length=${offsetsLength}`);
9715
+ // Auto-correct by updating index totalLines to match offsets
9716
+ this.indexManager.setTotalLines(offsetsLength);
9717
+ if (this.opts.debugMode) {
9718
+ console.log(`🔧 Auto-corrected index totalLines to ${offsetsLength}`);
9719
+ }
9720
+
9721
+ // CRITICAL FIX: Also save the corrected index to prevent persistence of inconsistency
9722
+ // This ensures the .idx.jdb file contains the correct totalLines value
9723
+ try {
9724
+ await this._saveIndexDataToFile();
9725
+ if (this.opts.debugMode) {
9726
+ console.log(`💾 Saved corrected index data to prevent future inconsistencies`);
9727
+ }
9728
+ } catch (error) {
9729
+ if (this.opts.debugMode) {
9730
+ console.warn(`⚠️ Failed to save corrected index: ${error.message}`);
9731
+ }
9732
+ }
9733
+
9734
+ // Verify the fix worked
9735
+ const newIndexTotalLines = this.indexManager.totalLines || 0;
9736
+ if (newIndexTotalLines === offsetsLength) {
9737
+ console.log(`✅ Data integrity successfully corrected: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`);
9738
+ } else {
9739
+ console.error(`❌ Data integrity correction failed: index.totalLines=${newIndexTotalLines}, offsets.length=${offsetsLength}`);
9740
+ }
9741
+ }
9742
+ }
9743
+
9508
9744
  // Validate indexed query mode if enabled
9509
9745
  if (this.opts.indexedQueryMode === 'strict') {
9510
9746
  this._validateIndexedQuery(criteria, options);
@@ -9521,31 +9757,23 @@ class Database extends events.EventEmitter {
9521
9757
  const writeBufferResultsWithTerms = options.restoreTerms !== false ? writeBufferResults.map(record => this.restoreTermIdsAfterDeserialization(record)) : writeBufferResults;
9522
9758
 
9523
9759
  // Combine results, removing duplicates (writeBuffer takes precedence)
9524
- // OPTIMIZATION: Use parallel processing for better performance when writeBuffer has many records
9760
+ // OPTIMIZATION: Unified efficient approach with consistent precedence rules
9525
9761
  let allResults;
9526
- if (writeBufferResults.length > 50) {
9527
- // Parallel approach for large writeBuffer
9528
- const [fileResultsSet, writeBufferSet] = await Promise.all([Promise.resolve(new Set(fileResultsWithTerms.map(r => r.id))), Promise.resolve(new Set(writeBufferResultsWithTerms.map(r => r.id)))]);
9529
9762
 
9530
- // Merge efficiently: keep file results not in writeBuffer, then add all writeBuffer results
9531
- const filteredFileResults = await Promise.resolve(fileResultsWithTerms.filter(r => !writeBufferSet.has(r.id)));
9532
- allResults = [...filteredFileResults, ...writeBufferResultsWithTerms];
9533
- } else {
9534
- // Sequential approach for small writeBuffer (original logic)
9535
- allResults = [...fileResultsWithTerms];
9536
-
9537
- // Replace file records with writeBuffer records and add new writeBuffer records
9538
- for (const record of writeBufferResultsWithTerms) {
9539
- const existingIndex = allResults.findIndex(r => r.id === record.id);
9540
- if (existingIndex !== -1) {
9541
- // Replace existing record with writeBuffer version
9542
- allResults[existingIndex] = record;
9543
- } else {
9544
- // Add new record from writeBuffer
9545
- allResults.push(record);
9546
- }
9763
+ // Create efficient lookup map for writeBuffer records
9764
+ const writeBufferMap = new Map();
9765
+ writeBufferResultsWithTerms.forEach(record => {
9766
+ if (record && record.id) {
9767
+ writeBufferMap.set(record.id, record);
9547
9768
  }
9548
- }
9769
+ });
9770
+
9771
+ // Filter file results to exclude any records that exist in writeBuffer
9772
+ // This ensures writeBuffer always takes precedence
9773
+ const filteredFileResults = fileResultsWithTerms.filter(record => record && record.id && !writeBufferMap.has(record.id));
9774
+
9775
+ // Combine results: file results (filtered) + all writeBuffer results
9776
+ allResults = [...filteredFileResults, ...writeBufferResultsWithTerms];
9549
9777
 
9550
9778
  // Remove records that are marked as deleted
9551
9779
  const finalResults = allResults.filter(record => !this.deletedIds.has(record.id));
@@ -9793,19 +10021,6 @@ class Database extends events.EventEmitter {
9793
10021
 
9794
10022
  // CRITICAL FIX: Validate state before update operation
9795
10023
  this.validateState();
9796
-
9797
- // CRITICAL FIX: If there's data to save, call save() to persist it
9798
- // Only save if there are actual records in writeBuffer
9799
- if (this.shouldSave && this.writeBuffer.length > 0) {
9800
- if (this.opts.debugMode) {
9801
- console.log(`🔄 UPDATE: Calling save() before update - writeBuffer.length=${this.writeBuffer.length}`);
9802
- }
9803
- const saveStart = Date.now();
9804
- await this.save(false); // Use save(false) since we're already in queue
9805
- if (this.opts.debugMode) {
9806
- console.log(`🔄 UPDATE: Save completed in ${Date.now() - saveStart}ms`);
9807
- }
9808
- }
9809
10024
  if (this.opts.debugMode) {
9810
10025
  console.log(`🔄 UPDATE: Starting find() - writeBuffer=${this.writeBuffer.length}`);
9811
10026
  }
@@ -9818,6 +10033,13 @@ class Database extends events.EventEmitter {
9818
10033
  console.log(`🔄 UPDATE: Find completed in ${Date.now() - findStart}ms, found ${records.length} records`);
9819
10034
  }
9820
10035
  const updatedRecords = [];
10036
+ if (this.opts.debugMode) {
10037
+ console.log(`🔄 UPDATE: About to process ${records.length} records`);
10038
+ console.log(`🔄 UPDATE: Records:`, records.map(r => ({
10039
+ id: r.id,
10040
+ value: r.value
10041
+ })));
10042
+ }
9821
10043
  for (const record of records) {
9822
10044
  const recordStart = Date.now();
9823
10045
  if (this.opts.debugMode) {
@@ -9828,19 +10050,12 @@ class Database extends events.EventEmitter {
9828
10050
  ...updateData
9829
10051
  };
9830
10052
 
9831
- // CRITICAL FIX: Update schema if new fields are added during update
9832
- // This ensures new fields (like 'status') are included in the serialized format
9833
- if (this.serializer && this.serializer.schemaManager) {
9834
- const newFields = Object.keys(updateData).filter(field => !record.hasOwnProperty(field));
9835
- for (const field of newFields) {
9836
- if (field !== 'id' && field !== '_') {
9837
- // Skip special fields
9838
- this.serializer.schemaManager.addField(field);
9839
- if (this.opts.debugMode) {
9840
- console.log(`🔄 UPDATE: Added new field '${field}' to schema`);
9841
- }
9842
- }
9843
- }
10053
+ // DEBUG: Log the update operation details
10054
+ if (this.opts.debugMode) {
10055
+ console.log(`🔄 UPDATE: Original record ID: ${record.id}, type: ${typeof record.id}`);
10056
+ console.log(`🔄 UPDATE: Updated record ID: ${updated.id}, type: ${typeof updated.id}`);
10057
+ console.log(`🔄 UPDATE: Update data keys:`, Object.keys(updateData));
10058
+ console.log(`🔄 UPDATE: Updated record keys:`, Object.keys(updated));
9844
10059
  }
9845
10060
 
9846
10061
  // Process term mapping for update
@@ -9848,6 +10063,7 @@ class Database extends events.EventEmitter {
9848
10063
  this.processTermMapping(updated, true, record);
9849
10064
  if (this.opts.debugMode) {
9850
10065
  console.log(`🔄 UPDATE: Term mapping completed in ${Date.now() - termMappingStart}ms`);
10066
+ console.log(`🔄 UPDATE: After term mapping - ID: ${updated.id}, type: ${typeof updated.id}`);
9851
10067
  }
9852
10068
 
9853
10069
  // CRITICAL FIX: Remove old terms from index before adding new ones
@@ -9862,12 +10078,18 @@ class Database extends events.EventEmitter {
9862
10078
  // For records in the file, we need to ensure they are properly marked for replacement
9863
10079
  const index = this.writeBuffer.findIndex(r => r.id === record.id);
9864
10080
  let lineNumber = null;
10081
+ if (this.opts.debugMode) {
10082
+ console.log(`🔄 UPDATE: writeBuffer.findIndex for ${record.id} returned ${index}`);
10083
+ console.log(`🔄 UPDATE: writeBuffer length: ${this.writeBuffer.length}`);
10084
+ console.log(`🔄 UPDATE: writeBuffer IDs:`, this.writeBuffer.map(r => r.id));
10085
+ }
9865
10086
  if (index !== -1) {
9866
10087
  // Record is already in writeBuffer, update it
9867
10088
  this.writeBuffer[index] = updated;
9868
10089
  lineNumber = this._getAbsoluteLineNumber(index);
9869
10090
  if (this.opts.debugMode) {
9870
10091
  console.log(`🔄 UPDATE: Updated existing writeBuffer record at index ${index}`);
10092
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`);
9871
10093
  }
9872
10094
  } else {
9873
10095
  // Record is in file, add updated version to writeBuffer
@@ -9877,6 +10099,7 @@ class Database extends events.EventEmitter {
9877
10099
  lineNumber = this._getAbsoluteLineNumber(this.writeBuffer.length - 1);
9878
10100
  if (this.opts.debugMode) {
9879
10101
  console.log(`🔄 UPDATE: Added updated record to writeBuffer (will replace file record ${record.id})`);
10102
+ console.log(`🔄 UPDATE: writeBuffer now has ${this.writeBuffer.length} records`);
9880
10103
  }
9881
10104
  }
9882
10105
  const indexUpdateStart = Date.now();
@@ -9912,6 +10135,26 @@ class Database extends events.EventEmitter {
9912
10135
  try {
9913
10136
  // CRITICAL FIX: Validate state before delete operation
9914
10137
  this.validateState();
10138
+
10139
+ // 🔧 NEW: Validate indexed query mode for delete operations
10140
+ if (this.opts.indexedQueryMode === 'strict') {
10141
+ this._validateIndexedQuery(criteria, {
10142
+ operation: 'delete'
10143
+ });
10144
+ }
10145
+
10146
+ // ⚠️ NEW: Warn about non-indexed fields in permissive mode
10147
+ if (this.opts.indexedQueryMode !== 'strict') {
10148
+ const indexedFields = Object.keys(this.opts.indexes || {});
10149
+ const queryFields = this._extractQueryFields(criteria);
10150
+ const nonIndexedFields = queryFields.filter(field => !indexedFields.includes(field));
10151
+ if (nonIndexedFields.length > 0) {
10152
+ if (this.opts.debugMode) {
10153
+ console.warn(`⚠️ Delete operation using non-indexed fields: ${nonIndexedFields.join(', ')}`);
10154
+ console.warn(` This may be slow or fail silently. Consider indexing these fields.`);
10155
+ }
10156
+ }
10157
+ }
9915
10158
  const records = await this.find(criteria);
9916
10159
  const deletedIds = [];
9917
10160
  if (this.opts.debugMode) {
@@ -10821,64 +11064,112 @@ class Database extends events.EventEmitter {
10821
11064
  // Fetch actual records
10822
11065
  const lineNumbers = limitedEntries.map(([lineNumber]) => lineNumber);
10823
11066
  const scoresByLineNumber = new Map(limitedEntries);
10824
-
10825
- // Use getRanges and fileHandler to read records
10826
- const ranges = this.getRanges(lineNumbers);
10827
- const groupedRanges = await this.fileHandler.groupedRanges(ranges);
10828
- const fs = await import('fs');
10829
- const fd = await fs.promises.open(this.fileHandler.file, 'r');
11067
+ const persistedCount = Array.isArray(this.offsets) ? this.offsets.length : 0;
11068
+
11069
+ // Separate lineNumbers into file records and writeBuffer records
11070
+ const fileLineNumbers = [];
11071
+ const writeBufferLineNumbers = [];
11072
+ for (const lineNumber of lineNumbers) {
11073
+ if (lineNumber >= persistedCount) {
11074
+ // This lineNumber points to writeBuffer
11075
+ writeBufferLineNumbers.push(lineNumber);
11076
+ } else {
11077
+ // This lineNumber points to file
11078
+ fileLineNumbers.push(lineNumber);
11079
+ }
11080
+ }
10830
11081
  const results = [];
10831
- try {
10832
- for (const groupedRange of groupedRanges) {
10833
- var _iteratorAbruptCompletion3 = false;
10834
- var _didIteratorError3 = false;
10835
- var _iteratorError3;
11082
+
11083
+ // Read records from file
11084
+ if (fileLineNumbers.length > 0) {
11085
+ const ranges = this.getRanges(fileLineNumbers);
11086
+ if (ranges.length > 0) {
11087
+ // Create a map from start offset to lineNumber for accurate mapping
11088
+ const startToLineNumber = new Map();
11089
+ for (const range of ranges) {
11090
+ if (range.index !== undefined) {
11091
+ startToLineNumber.set(range.start, range.index);
11092
+ }
11093
+ }
11094
+ const groupedRanges = await this.fileHandler.groupedRanges(ranges);
11095
+ const fs = await import('fs');
11096
+ const fd = await fs.promises.open(this.fileHandler.file, 'r');
10836
11097
  try {
10837
- for (var _iterator3 = _asyncIterator(this.fileHandler.readGroupedRange(groupedRange, fd)), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
10838
- const row = _step3.value;
10839
- {
10840
- try {
10841
- const record = this.serializer.deserialize(row.line);
11098
+ for (const groupedRange of groupedRanges) {
11099
+ var _iteratorAbruptCompletion3 = false;
11100
+ var _didIteratorError3 = false;
11101
+ var _iteratorError3;
11102
+ try {
11103
+ for (var _iterator3 = _asyncIterator(this.fileHandler.readGroupedRange(groupedRange, fd)), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
11104
+ const row = _step3.value;
11105
+ {
11106
+ try {
11107
+ const record = this.serializer.deserialize(row.line);
10842
11108
 
10843
- // Get line number from the row
10844
- const lineNumber = row._ || 0;
11109
+ // Get line number from the row, fallback to start offset mapping
11110
+ let lineNumber = row._ !== null && row._ !== undefined ? row._ : startToLineNumber.get(row.start) ?? 0;
10845
11111
 
10846
- // Restore term IDs to terms
10847
- const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
11112
+ // Restore term IDs to terms
11113
+ const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
10848
11114
 
10849
- // Add line number
10850
- recordWithTerms._ = lineNumber;
11115
+ // Add line number
11116
+ recordWithTerms._ = lineNumber;
10851
11117
 
10852
- // Add score if includeScore is true
10853
- if (opts.includeScore) {
10854
- recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
11118
+ // Add score if includeScore is true (default is true)
11119
+ if (opts.includeScore !== false) {
11120
+ recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
11121
+ }
11122
+ results.push(recordWithTerms);
11123
+ } catch (error) {
11124
+ // Skip invalid lines
11125
+ if (this.opts.debugMode) {
11126
+ console.error('Error deserializing record in score():', error);
11127
+ }
11128
+ }
10855
11129
  }
10856
- results.push(recordWithTerms);
10857
- } catch (error) {
10858
- // Skip invalid lines
10859
- if (this.opts.debugMode) {
10860
- console.error('Error deserializing record in score():', error);
11130
+ }
11131
+ } catch (err) {
11132
+ _didIteratorError3 = true;
11133
+ _iteratorError3 = err;
11134
+ } finally {
11135
+ try {
11136
+ if (_iteratorAbruptCompletion3 && _iterator3.return != null) {
11137
+ await _iterator3.return();
11138
+ }
11139
+ } finally {
11140
+ if (_didIteratorError3) {
11141
+ throw _iteratorError3;
10861
11142
  }
10862
11143
  }
10863
11144
  }
10864
11145
  }
10865
- } catch (err) {
10866
- _didIteratorError3 = true;
10867
- _iteratorError3 = err;
10868
11146
  } finally {
10869
- try {
10870
- if (_iteratorAbruptCompletion3 && _iterator3.return != null) {
10871
- await _iterator3.return();
10872
- }
10873
- } finally {
10874
- if (_didIteratorError3) {
10875
- throw _iteratorError3;
11147
+ await fd.close();
11148
+ }
11149
+ }
11150
+ }
11151
+
11152
+ // Read records from writeBuffer
11153
+ if (writeBufferLineNumbers.length > 0 && this.writeBuffer) {
11154
+ for (const lineNumber of writeBufferLineNumbers) {
11155
+ const writeBufferIndex = lineNumber - persistedCount;
11156
+ if (writeBufferIndex >= 0 && writeBufferIndex < this.writeBuffer.length) {
11157
+ const record = this.writeBuffer[writeBufferIndex];
11158
+ if (record) {
11159
+ // Restore term IDs to terms
11160
+ const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
11161
+
11162
+ // Add line number
11163
+ recordWithTerms._ = lineNumber;
11164
+
11165
+ // Add score if includeScore is true
11166
+ if (opts.includeScore) {
11167
+ recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
10876
11168
  }
11169
+ results.push(recordWithTerms);
10877
11170
  }
10878
11171
  }
10879
11172
  }
10880
- } finally {
10881
- await fd.close();
10882
11173
  }
10883
11174
 
10884
11175
  // Re-sort results to maintain score order (since reads might be out of order)
@@ -11251,13 +11542,24 @@ class Database extends events.EventEmitter {
11251
11542
  // Create a map of updated records for quick lookup
11252
11543
  // CRITICAL FIX: Normalize IDs to strings for consistent comparison
11253
11544
  const updatedRecordsMap = new Map();
11254
- writeBufferSnapshot.forEach(record => {
11545
+ writeBufferSnapshot.forEach((record, index) => {
11255
11546
  if (record && record.id !== undefined && record.id !== null) {
11256
11547
  // Normalize ID to string for consistent comparison
11257
11548
  const normalizedId = String(record.id);
11258
11549
  updatedRecordsMap.set(normalizedId, record);
11550
+ if (this.opts.debugMode) {
11551
+ console.log(`💾 Save: Added to updatedRecordsMap: ID=${normalizedId} (original: ${record.id}, type: ${typeof record.id}), index=${index}`);
11552
+ }
11553
+ } else if (this.opts.debugMode) {
11554
+ console.log(`⚠️ Save: Skipped record in writeBufferSnapshot[${index}] - missing or invalid ID:`, record ? {
11555
+ id: record.id,
11556
+ keys: Object.keys(record)
11557
+ } : 'null');
11259
11558
  }
11260
11559
  });
11560
+ if (this.opts.debugMode) {
11561
+ console.log(`💾 Save: updatedRecordsMap size: ${updatedRecordsMap.size}, keys:`, Array.from(updatedRecordsMap.keys()));
11562
+ }
11261
11563
 
11262
11564
  // OPTIMIZATION: Cache file stats to avoid repeated stat() calls
11263
11565
  let fileSize = 0;
@@ -11405,14 +11707,30 @@ class Database extends events.EventEmitter {
11405
11707
  try {
11406
11708
  const arrayData = JSON.parse(trimmedLine);
11407
11709
  if (Array.isArray(arrayData) && arrayData.length > 0) {
11408
- // For arrays without explicit ID, use the first element as a fallback
11409
- // or try to find the ID field if it exists
11410
- if (arrayData.length > 2) {
11411
- // ID is typically at position 2 in array format [age, city, id, name]
11412
- recordId = arrayData[2];
11710
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
11711
+ // The schema defines the order of fields in the array
11712
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
11713
+ const schema = this.serializer.schemaManager.getSchema();
11714
+ const idIndex = schema.indexOf('id');
11715
+ if (idIndex !== -1 && arrayData.length > idIndex) {
11716
+ // ID is at the position defined by schema
11717
+ recordId = arrayData[idIndex];
11718
+ } else if (arrayData.length > schema.length) {
11719
+ // ID might be appended after schema fields (for backward compatibility)
11720
+ recordId = arrayData[schema.length];
11721
+ } else {
11722
+ // Fallback: use first element
11723
+ recordId = arrayData[0];
11724
+ }
11413
11725
  } else {
11414
- // For arrays without ID field, use first element as fallback
11415
- recordId = arrayData[0];
11726
+ // No schema available, try common positions
11727
+ if (arrayData.length > 2) {
11728
+ // Try position 2 (common in older formats)
11729
+ recordId = arrayData[2];
11730
+ } else {
11731
+ // Fallback: use first element
11732
+ recordId = arrayData[0];
11733
+ }
11416
11734
  }
11417
11735
  if (recordId !== undefined && recordId !== null) {
11418
11736
  recordId = String(recordId);
@@ -11460,11 +11778,30 @@ class Database extends events.EventEmitter {
11460
11778
 
11461
11779
  // CRITICAL FIX: Normalize ID to string for consistent comparison
11462
11780
  const normalizedId = String(recordWithIds.id);
11781
+ if (this.opts.debugMode) {
11782
+ console.log(`💾 Save: Checking record ID=${normalizedId} (original: ${recordWithIds.id}, type: ${typeof recordWithIds.id}) in updatedRecordsMap`);
11783
+ console.log(`💾 Save: updatedRecordsMap.has(${normalizedId}): ${updatedRecordsMap.has(normalizedId)}`);
11784
+ if (!updatedRecordsMap.has(normalizedId)) {
11785
+ console.log(`💾 Save: Record ${normalizedId} NOT found in updatedRecordsMap. Available keys:`, Array.from(updatedRecordsMap.keys()));
11786
+ }
11787
+ }
11463
11788
  if (updatedRecordsMap.has(normalizedId)) {
11464
11789
  // Replace with updated version
11465
11790
  const updatedRecord = updatedRecordsMap.get(normalizedId);
11466
11791
  if (this.opts.debugMode) {
11467
- console.log(`💾 Save: Updated record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`);
11792
+ console.log(`💾 Save: REPLACING record ${recordWithIds.id} with updated version`);
11793
+ console.log(`💾 Save: Old record:`, {
11794
+ id: recordWithIds.id,
11795
+ price: recordWithIds.price,
11796
+ app_id: recordWithIds.app_id,
11797
+ currency: recordWithIds.currency
11798
+ });
11799
+ console.log(`💾 Save: New record:`, {
11800
+ id: updatedRecord.id,
11801
+ price: updatedRecord.price,
11802
+ app_id: updatedRecord.app_id,
11803
+ currency: updatedRecord.currency
11804
+ });
11468
11805
  }
11469
11806
  return {
11470
11807
  type: 'updated',
@@ -11475,7 +11812,7 @@ class Database extends events.EventEmitter {
11475
11812
  } else if (!deletedIdsSnapshot.has(String(recordWithIds.id))) {
11476
11813
  // Keep existing record if not deleted
11477
11814
  if (this.opts.debugMode) {
11478
- console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`);
11815
+ console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - not in deletedIdsSnapshot`);
11479
11816
  }
11480
11817
  return {
11481
11818
  type: 'kept',
@@ -11486,7 +11823,9 @@ class Database extends events.EventEmitter {
11486
11823
  } else {
11487
11824
  // Skip deleted record
11488
11825
  if (this.opts.debugMode) {
11489
- console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted`);
11826
+ console.log(`💾 Save: Skipped record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'}) - deleted (found in deletedIdsSnapshot)`);
11827
+ console.log(`💾 Save: deletedIdsSnapshot contains:`, Array.from(deletedIdsSnapshot));
11828
+ console.log(`💾 Save: Record ID check: String(${recordWithIds.id}) = "${String(recordWithIds.id)}", has() = ${deletedIdsSnapshot.has(String(recordWithIds.id))}`);
11490
11829
  }
11491
11830
  return {
11492
11831
  type: 'deleted',
@@ -11529,6 +11868,54 @@ class Database extends events.EventEmitter {
11529
11868
  const offset = parseInt(rangeKey);
11530
11869
  switch (result.type) {
11531
11870
  case 'unchanged':
11871
+ // CRITICAL FIX: Verify that unchanged records are not deleted
11872
+ // Extract ID from the line to check against deletedIdsSnapshot
11873
+ let unchangedRecordId = null;
11874
+ try {
11875
+ if (result.line.startsWith('[') && result.line.endsWith(']')) {
11876
+ const arrayData = JSON.parse(result.line);
11877
+ if (Array.isArray(arrayData) && arrayData.length > 0) {
11878
+ // CRITICAL FIX: Use schema to find ID position, not hardcoded position
11879
+ if (this.serializer && this.serializer.schemaManager && this.serializer.schemaManager.isInitialized) {
11880
+ const schema = this.serializer.schemaManager.getSchema();
11881
+ const idIndex = schema.indexOf('id');
11882
+ if (idIndex !== -1 && arrayData.length > idIndex) {
11883
+ unchangedRecordId = String(arrayData[idIndex]);
11884
+ } else if (arrayData.length > schema.length) {
11885
+ unchangedRecordId = String(arrayData[schema.length]);
11886
+ } else {
11887
+ unchangedRecordId = String(arrayData[0]);
11888
+ }
11889
+ } else {
11890
+ // No schema, try common positions
11891
+ if (arrayData.length > 2) {
11892
+ unchangedRecordId = String(arrayData[2]);
11893
+ } else {
11894
+ unchangedRecordId = String(arrayData[0]);
11895
+ }
11896
+ }
11897
+ }
11898
+ } else {
11899
+ const obj = JSON.parse(result.line);
11900
+ unchangedRecordId = obj.id ? String(obj.id) : null;
11901
+ }
11902
+ } catch (e) {
11903
+ // If we can't parse, skip this record to be safe
11904
+ if (this.opts.debugMode) {
11905
+ console.log(`💾 Save: Could not parse unchanged record to check deletion: ${e.message}`);
11906
+ }
11907
+ continue;
11908
+ }
11909
+
11910
+ // Skip if this record is deleted
11911
+ if (unchangedRecordId && deletedIdsSnapshot.has(unchangedRecordId)) {
11912
+ if (this.opts.debugMode) {
11913
+ console.log(`💾 Save: Skipping unchanged record ${unchangedRecordId} - deleted`);
11914
+ }
11915
+ deletedOffsets.add(offset);
11916
+ break;
11917
+ }
11918
+
11532
11919
  // Collect unchanged lines for batch processing
11533
11920
  unchangedLines.push(result.line);
11534
11921
  keptRecords.push({