jexidb 2.1.2 → 2.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/Database.cjs CHANGED
@@ -2257,9 +2257,9 @@ class IndexManager {
2257
2257
  }
2258
2258
  }
2259
2259
 
2260
- /**
2261
- * SchemaManager - Manages field schemas for optimized array-based serialization
2262
- * This replaces the need for repeating field names in JSON objects
2260
+ /**
2261
+ * SchemaManager - Manages field schemas for optimized array-based serialization
2262
+ * This replaces the need for repeating field names in JSON objects
2263
2263
  */
2264
2264
  class SchemaManager {
2265
2265
  constructor(opts = {}) {
@@ -2277,8 +2277,8 @@ class SchemaManager {
2277
2277
  this.isInitialized = false;
2278
2278
  }
2279
2279
 
2280
- /**
2281
- * Initialize schema from options or auto-detect from data
2280
+ /**
2281
+ * Initialize schema from options or auto-detect from data
2282
2282
  */
2283
2283
  initializeSchema(schemaOrData, autoDetect = false) {
2284
2284
  if (this.isInitialized && this.opts.strictSchema) {
@@ -2303,8 +2303,8 @@ class SchemaManager {
2303
2303
  }
2304
2304
  }
2305
2305
 
2306
- /**
2307
- * Set explicit schema
2306
+ /**
2307
+ * Set explicit schema
2308
2308
  */
2309
2309
  setSchema(fieldNames) {
2310
2310
  this.schema = [...fieldNames]; // Create copy
@@ -2319,8 +2319,8 @@ class SchemaManager {
2319
2319
  }
2320
2320
  }
2321
2321
 
2322
- /**
2323
- * Auto-detect schema from sample data
2322
+ /**
2323
+ * Auto-detect schema from sample data
2324
2324
  */
2325
2325
  autoDetectSchema(sampleData) {
2326
2326
  if (Array.isArray(sampleData)) {
@@ -2341,9 +2341,9 @@ class SchemaManager {
2341
2341
  }
2342
2342
  }
2343
2343
 
2344
- /**
2345
- * Initialize schema from database options
2346
- * Note: schema option is no longer supported, use fields instead
2344
+ /**
2345
+ * Initialize schema from database options
2346
+ * Note: schema option is no longer supported, use fields instead
2347
2347
  */
2348
2348
  initializeFromOptions(opts) {
2349
2349
  // Schema option is no longer supported - fields should be used instead
@@ -2351,8 +2351,8 @@ class SchemaManager {
2351
2351
  // Schema initialization is handled by Database.initializeSchema() using fields
2352
2352
  }
2353
2353
 
2354
- /**
2355
- * Add new field to schema (for schema evolution)
2354
+ /**
2355
+ * Add new field to schema (for schema evolution)
2356
2356
  */
2357
2357
  addField(fieldName) {
2358
2358
  if (this.fieldToIndex.has(fieldName)) {
@@ -2368,8 +2368,8 @@ class SchemaManager {
2368
2368
  return newIndex;
2369
2369
  }
2370
2370
 
2371
- /**
2372
- * Convert object to array using schema with strict field enforcement
2371
+ /**
2372
+ * Convert object to array using schema with strict field enforcement
2373
2373
  */
2374
2374
  objectToArray(obj) {
2375
2375
  if (!this.isInitialized || !this.opts.enableArraySerialization) {
@@ -2395,8 +2395,8 @@ class SchemaManager {
2395
2395
  return result;
2396
2396
  }
2397
2397
 
2398
- /**
2399
- * Convert array back to object using schema
2398
+ /**
2399
+ * Convert array back to object using schema
2400
2400
  */
2401
2401
  arrayToObject(arr) {
2402
2402
  if (!this.isInitialized || !this.opts.enableArraySerialization) {
@@ -2406,31 +2406,61 @@ class SchemaManager {
2406
2406
  return arr; // Don't convert non-arrays
2407
2407
  }
2408
2408
  const obj = {};
2409
+ const idIndex = this.schema.indexOf('id');
2410
+
2411
+ // CRITICAL FIX: Handle schema migration where 'id' was first field in old schema
2412
+ // but is not in current schema. Check if first element looks like an ID.
2413
+ // Only do this if:
2414
+ // 1. 'id' is not in current schema
2415
+ // 2. Array has significantly more elements than current schema (2+ extra elements)
2416
+ // This suggests the old schema had more fields, and 'id' was likely the first
2417
+ // 3. First element is a very short string (max 20 chars) that looks like a generated ID
2418
+ // (typically alphanumeric, often starting with letters like 'mit...' or similar patterns)
2419
+ // 4. First field in current schema is not 'id' (to avoid false positives)
2420
+ // 5. First element is not an array (to avoid false positives with array fields)
2421
+ let arrayOffset = 0;
2422
+ if (idIndex === -1 && arr.length >= this.schema.length + 2 && this.schema.length > 0) {
2423
+ // Only apply if array has at least 2 extra elements (suggests old schema had more fields)
2424
+ const firstElement = arr[0];
2425
+ const firstFieldName = this.schema[0];
2426
+
2427
+ // Only apply shift if:
2428
+ // - First field is not 'id'
2429
+ // - First element is a very short string (max 20 chars) that looks like a generated ID
2430
+ // - First element is not an array (to avoid false positives)
2431
+ // - Array has at least 2 extra elements (strong indicator of schema migration)
2432
+ if (firstFieldName !== 'id' && typeof firstElement === 'string' && !Array.isArray(firstElement) && firstElement.length > 0 && firstElement.length <= 20 &&
2433
+ // Very conservative: max 20 chars (typical ID length)
2434
+ /^[a-zA-Z0-9_-]+$/.test(firstElement)) {
2435
+ // First element is likely the ID from old schema
2436
+ obj.id = firstElement;
2437
+ arrayOffset = 1;
2438
+ }
2439
+ }
2409
2440
 
2410
2441
  // Map array values to object properties
2411
2442
  // Only include fields that are in the schema
2412
- for (let i = 0; i < Math.min(arr.length, this.schema.length); i++) {
2443
+ for (let i = 0; i < Math.min(arr.length - arrayOffset, this.schema.length); i++) {
2413
2444
  const fieldName = this.schema[i];
2445
+ const arrayIndex = i + arrayOffset;
2414
2446
  // Only include non-undefined values to avoid cluttering the object
2415
- if (arr[i] !== undefined) {
2416
- obj[fieldName] = arr[i];
2447
+ if (arr[arrayIndex] !== undefined) {
2448
+ obj[fieldName] = arr[arrayIndex];
2417
2449
  }
2418
2450
  }
2419
2451
 
2420
2452
  // CRITICAL FIX: Always preserve 'id' field if it exists in the original object
2421
2453
  // The 'id' field may not be in the schema but must be preserved
2422
- // Check if 'id' is in the schema first
2423
- const idIndex = this.schema.indexOf('id');
2424
2454
  if (idIndex !== -1 && arr[idIndex] !== undefined) {
2425
2455
  // 'id' is in schema and has a value
2426
2456
  obj.id = arr[idIndex];
2427
- } else if (idIndex === -1 && arr.length > this.schema.length) {
2457
+ } else if (!obj.id && arr.length > this.schema.length + arrayOffset) {
2428
2458
  // 'id' is not in schema but array has extra element(s) - check if last element could be ID
2429
2459
  // This handles cases where ID was added after schema initialization
2430
- for (let i = this.schema.length; i < arr.length; i++) {
2460
+ for (let i = this.schema.length + arrayOffset; i < arr.length; i++) {
2431
2461
  // Try to infer if this is an ID (string that looks like an ID)
2432
2462
  const potentialId = arr[i];
2433
- if (potentialId !== undefined && potentialId !== null && typeof potentialId === 'string' && potentialId.length > 0) {
2463
+ if (potentialId !== undefined && potentialId !== null && typeof potentialId === 'string' && potentialId.length > 0 && potentialId.length < 100) {
2434
2464
  obj.id = potentialId;
2435
2465
  break; // Use first potential ID found
2436
2466
  }
@@ -2439,43 +2469,43 @@ class SchemaManager {
2439
2469
  return obj;
2440
2470
  }
2441
2471
 
2442
- /**
2443
- * Get field index by name
2472
+ /**
2473
+ * Get field index by name
2444
2474
  */
2445
2475
  getFieldIndex(fieldName) {
2446
2476
  return this.fieldToIndex.get(fieldName);
2447
2477
  }
2448
2478
 
2449
- /**
2450
- * Get field name by index
2479
+ /**
2480
+ * Get field name by index
2451
2481
  */
2452
2482
  getFieldName(index) {
2453
2483
  return this.indexToField.get(index);
2454
2484
  }
2455
2485
 
2456
- /**
2457
- * Check if field exists in schema
2486
+ /**
2487
+ * Check if field exists in schema
2458
2488
  */
2459
2489
  hasField(fieldName) {
2460
2490
  return this.fieldToIndex.has(fieldName);
2461
2491
  }
2462
2492
 
2463
- /**
2464
- * Get schema as array of field names
2493
+ /**
2494
+ * Get schema as array of field names
2465
2495
  */
2466
2496
  getSchema() {
2467
2497
  return [...this.schema]; // Return copy
2468
2498
  }
2469
2499
 
2470
- /**
2471
- * Get schema size
2500
+ /**
2501
+ * Get schema size
2472
2502
  */
2473
2503
  getSchemaSize() {
2474
2504
  return this.schema.length;
2475
2505
  }
2476
2506
 
2477
- /**
2478
- * Validate that object conforms to schema
2507
+ /**
2508
+ * Validate that object conforms to schema
2479
2509
  */
2480
2510
  validateObject(obj) {
2481
2511
  if (!this.isInitialized || !this.opts.strictSchema) {
@@ -2497,8 +2527,8 @@ class SchemaManager {
2497
2527
  return true;
2498
2528
  }
2499
2529
 
2500
- /**
2501
- * Get schema metadata for serialization
2530
+ /**
2531
+ * Get schema metadata for serialization
2502
2532
  */
2503
2533
  getSchemaMetadata() {
2504
2534
  return {
@@ -2509,8 +2539,8 @@ class SchemaManager {
2509
2539
  };
2510
2540
  }
2511
2541
 
2512
- /**
2513
- * Reset schema
2542
+ /**
2543
+ * Reset schema
2514
2544
  */
2515
2545
  reset() {
2516
2546
  this.schema = [];
@@ -2520,8 +2550,8 @@ class SchemaManager {
2520
2550
  this.schemaVersion++;
2521
2551
  }
2522
2552
 
2523
- /**
2524
- * Get performance statistics
2553
+ /**
2554
+ * Get performance statistics
2525
2555
  */
2526
2556
  getStats() {
2527
2557
  return {
@@ -5134,9 +5164,23 @@ class QueryManager {
5134
5164
  }
5135
5165
  }
5136
5166
 
5167
+ // Handle $not operator - include it if it can be processed by IndexManager
5168
+ if (criteria.$not && typeof criteria.$not === 'object') {
5169
+ // Check if $not condition contains only indexable fields
5170
+ const notFields = Object.keys(criteria.$not);
5171
+ const allNotFieldsIndexed = notFields.every(field => this.indexManager.opts.indexes && this.indexManager.opts.indexes[field]);
5172
+ if (allNotFieldsIndexed && notFields.length > 0) {
5173
+ // Extract indexable criteria from $not condition
5174
+ const indexableNotCriteria = this._extractIndexableCriteria(criteria.$not);
5175
+ if (Object.keys(indexableNotCriteria).length > 0) {
5176
+ indexableCriteria.$not = indexableNotCriteria;
5177
+ }
5178
+ }
5179
+ }
5180
+
5137
5181
  // Handle regular field conditions
5138
5182
  for (const [field, condition] of Object.entries(criteria)) {
5139
- if (field.startsWith('$')) continue; // Skip logical operators
5183
+ if (field.startsWith('$')) continue; // Skip logical operators (already handled above)
5140
5184
 
5141
5185
  // RegExp conditions cannot be pre-filtered using indices
5142
5186
  if (condition instanceof RegExp) {
@@ -5484,47 +5528,82 @@ class QueryManager {
5484
5528
  // Read specific records using the line numbers
5485
5529
  if (lineNumbers.size > 0) {
5486
5530
  const lineNumbersArray = Array.from(lineNumbers);
5487
- const ranges = this.database.getRanges(lineNumbersArray);
5488
- const groupedRanges = await this.database.fileHandler.groupedRanges(ranges);
5489
- const fs = await import('fs');
5490
- const fd = await fs.promises.open(this.database.fileHandler.file, 'r');
5491
- try {
5492
- for (const groupedRange of groupedRanges) {
5493
- var _iteratorAbruptCompletion2 = false;
5494
- var _didIteratorError2 = false;
5495
- var _iteratorError2;
5531
+ const persistedCount = Array.isArray(this.database.offsets) ? this.database.offsets.length : 0;
5532
+
5533
+ // Separate lineNumbers into file records and writeBuffer records
5534
+ const fileLineNumbers = [];
5535
+ const writeBufferLineNumbers = [];
5536
+ for (const lineNumber of lineNumbersArray) {
5537
+ if (lineNumber >= persistedCount) {
5538
+ // This lineNumber points to writeBuffer
5539
+ writeBufferLineNumbers.push(lineNumber);
5540
+ } else {
5541
+ // This lineNumber points to file
5542
+ fileLineNumbers.push(lineNumber);
5543
+ }
5544
+ }
5545
+
5546
+ // Read records from file
5547
+ if (fileLineNumbers.length > 0) {
5548
+ const ranges = this.database.getRanges(fileLineNumbers);
5549
+ if (ranges.length > 0) {
5550
+ const groupedRanges = await this.database.fileHandler.groupedRanges(ranges);
5551
+ const fs = await import('fs');
5552
+ const fd = await fs.promises.open(this.database.fileHandler.file, 'r');
5496
5553
  try {
5497
- for (var _iterator2 = _asyncIterator(this.database.fileHandler.readGroupedRange(groupedRange, fd)), _step2; _iteratorAbruptCompletion2 = !(_step2 = await _iterator2.next()).done; _iteratorAbruptCompletion2 = false) {
5498
- const row = _step2.value;
5499
- {
5554
+ for (const groupedRange of groupedRanges) {
5555
+ var _iteratorAbruptCompletion2 = false;
5556
+ var _didIteratorError2 = false;
5557
+ var _iteratorError2;
5558
+ try {
5559
+ for (var _iterator2 = _asyncIterator(this.database.fileHandler.readGroupedRange(groupedRange, fd)), _step2; _iteratorAbruptCompletion2 = !(_step2 = await _iterator2.next()).done; _iteratorAbruptCompletion2 = false) {
5560
+ const row = _step2.value;
5561
+ {
5562
+ try {
5563
+ const record = this.database.serializer.deserialize(row.line);
5564
+ const recordWithTerms = options.restoreTerms !== false ? this.database.restoreTermIdsAfterDeserialization(record) : record;
5565
+ results.push(recordWithTerms);
5566
+ if (limit && results.length >= limit) break;
5567
+ } catch (error) {
5568
+ // Skip invalid lines
5569
+ }
5570
+ }
5571
+ }
5572
+ } catch (err) {
5573
+ _didIteratorError2 = true;
5574
+ _iteratorError2 = err;
5575
+ } finally {
5500
5576
  try {
5501
- const record = this.database.serializer.deserialize(row.line);
5502
- const recordWithTerms = options.restoreTerms !== false ? this.database.restoreTermIdsAfterDeserialization(record) : record;
5503
- results.push(recordWithTerms);
5504
- if (limit && results.length >= limit) break;
5505
- } catch (error) {
5506
- // Skip invalid lines
5577
+ if (_iteratorAbruptCompletion2 && _iterator2.return != null) {
5578
+ await _iterator2.return();
5579
+ }
5580
+ } finally {
5581
+ if (_didIteratorError2) {
5582
+ throw _iteratorError2;
5583
+ }
5507
5584
  }
5508
5585
  }
5586
+ if (limit && results.length >= limit) break;
5509
5587
  }
5510
- } catch (err) {
5511
- _didIteratorError2 = true;
5512
- _iteratorError2 = err;
5513
5588
  } finally {
5514
- try {
5515
- if (_iteratorAbruptCompletion2 && _iterator2.return != null) {
5516
- await _iterator2.return();
5517
- }
5518
- } finally {
5519
- if (_didIteratorError2) {
5520
- throw _iteratorError2;
5521
- }
5522
- }
5589
+ await fd.close();
5523
5590
  }
5591
+ }
5592
+ }
5593
+
5594
+ // Read records from writeBuffer
5595
+ if (writeBufferLineNumbers.length > 0 && this.database.writeBuffer) {
5596
+ for (const lineNumber of writeBufferLineNumbers) {
5524
5597
  if (limit && results.length >= limit) break;
5598
+ const writeBufferIndex = lineNumber - persistedCount;
5599
+ if (writeBufferIndex >= 0 && writeBufferIndex < this.database.writeBuffer.length) {
5600
+ const record = this.database.writeBuffer[writeBufferIndex];
5601
+ if (record) {
5602
+ const recordWithTerms = options.restoreTerms !== false ? this.database.restoreTermIdsAfterDeserialization(record) : record;
5603
+ results.push(recordWithTerms);
5604
+ }
5605
+ }
5525
5606
  }
5526
- } finally {
5527
- await fd.close();
5528
5607
  }
5529
5608
  }
5530
5609
  if (options.orderBy) {
@@ -5944,8 +6023,8 @@ class QueryManager {
5944
6023
  }
5945
6024
  }
5946
6025
  const allFieldsIndexed = Object.keys(criteria).every(field => {
5947
- // Skip $and as it's handled separately above
5948
- if (field === '$and') return true;
6026
+ // Skip $and and $not as they're handled separately above
6027
+ if (field === '$and' || field === '$not') return true;
5949
6028
  if (!this.opts.indexes || !this.opts.indexes[field]) {
5950
6029
  if (this.opts.debugMode) {
5951
6030
  console.log(`🔍 Field '${field}' not indexed. Available indexes:`, Object.keys(this.opts.indexes || {}));
@@ -8654,39 +8733,21 @@ class Database extends events.EventEmitter {
8654
8733
  this.pendingIndexUpdates = [];
8655
8734
  }
8656
8735
 
8657
- // CRITICAL FIX: Flush write buffer completely after capturing snapshot
8658
- await this._flushWriteBufferCompletely();
8659
-
8660
- // CRITICAL FIX: Wait for all I/O operations to complete before clearing writeBuffer
8661
- await this._waitForIOCompletion();
8662
-
8663
- // CRITICAL FIX: Verify write buffer is empty after I/O completion
8664
- // But allow for ongoing insertions during high-volume scenarios
8665
- if (this.writeBuffer.length > 0) {
8666
- if (this.opts.debugMode) {
8667
- console.log(`💾 Save: WriteBuffer still has ${this.writeBuffer.length} items after flush - this may indicate ongoing insertions`);
8668
- }
8669
-
8670
- // If we have a reasonable number of items, continue processing
8671
- if (this.writeBuffer.length < 10000) {
8672
- // Reasonable threshold
8673
- if (this.opts.debugMode) {
8674
- console.log(`💾 Save: Continuing to process remaining ${this.writeBuffer.length} items`);
8675
- }
8676
- // Continue with the save process - the remaining items will be included in the final save
8677
- } else {
8678
- // Too many items remaining - likely a real problem
8679
- throw new Error(`WriteBuffer has too many items after flush: ${this.writeBuffer.length} items remaining (threshold: 10000)`);
8680
- }
8736
+ // CRITICAL FIX: DO NOT flush writeBuffer before processing existing records
8737
+ // This prevents duplicating updated records in the file.
8738
+ // The _streamExistingRecords() will handle replacing old records with updated ones from writeBufferSnapshot.
8739
+ // After processing, all records (existing + updated + new) will be written to file in one operation.
8740
+ if (this.opts.debugMode) {
8741
+ console.log(`💾 Save: writeBufferSnapshot captured with ${writeBufferSnapshot.length} records (will be processed with existing records)`);
8681
8742
  }
8682
8743
 
8683
8744
  // OPTIMIZATION: Parallel operations - cleanup and data preparation
8684
8745
  let allData = [];
8685
8746
  let orphanedCount = 0;
8686
8747
 
8687
- // Check if there are new records to save (after flush, writeBuffer should be empty)
8688
- // CRITICAL FIX: Also check writeBufferSnapshot.length > 0 to handle updates/deletes
8689
- // that were in writeBuffer before flush but are now in snapshot
8748
+ // Check if there are records to save from writeBufferSnapshot
8749
+ // CRITICAL FIX: Process writeBufferSnapshot records (both new and updated) with existing records
8750
+ // Updated records will replace old ones via _streamExistingRecords, new records will be added
8690
8751
  if (this.opts.debugMode) {
8691
8752
  console.log(`💾 Save: writeBuffer.length=${this.writeBuffer.length}, writeBufferSnapshot.length=${writeBufferSnapshot.length}`);
8692
8753
  }
@@ -8729,11 +8790,60 @@ class Database extends events.EventEmitter {
8729
8790
  // CRITICAL FIX: Normalize IDs to strings for consistent comparison
8730
8791
  const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)));
8731
8792
 
8793
+ // CRITICAL FIX: Create a map of records in existingRecords by ID for comparison
8794
+ const existingRecordsById = new Map();
8795
+ existingRecords.forEach(r => {
8796
+ if (r && r.id) {
8797
+ existingRecordsById.set(String(r.id), r);
8798
+ }
8799
+ });
8800
+
8732
8801
  // Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
8802
+ // CRITICAL FIX: Also ensure that if an updated record wasn't properly replaced, we replace it now
8733
8803
  for (const record of writeBufferSnapshot) {
8734
- if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(String(record.id))) {
8804
+ if (!record || !record.id) continue;
8805
+ if (deletedIdsSnapshot.has(String(record.id))) continue;
8806
+ const recordIdStr = String(record.id);
8807
+ const existingRecord = existingRecordsById.get(recordIdStr);
8808
+ if (!existingRecord) {
8735
8809
  // This is a new record, not an update
8736
8810
  allData.push(record);
8811
+ if (this.opts.debugMode) {
8812
+ console.log(`💾 Save: Adding NEW record to allData:`, {
8813
+ id: recordIdStr,
8814
+ price: record.price,
8815
+ app_id: record.app_id,
8816
+ currency: record.currency
8817
+ });
8818
+ }
8819
+ } else {
8820
+ // This is an update - verify that existingRecords contains the updated version
8821
+ // If not, replace it (this handles edge cases where substitution might have failed)
8822
+ const existingIndex = allData.findIndex(r => r && r.id && String(r.id) === recordIdStr);
8823
+ if (existingIndex !== -1) {
8824
+ // Verify if the existing record is actually the updated one
8825
+ // Compare key fields to detect if replacement is needed
8826
+ const needsReplacement = JSON.stringify(allData[existingIndex]) !== JSON.stringify(record);
8827
+ if (needsReplacement) {
8828
+ if (this.opts.debugMode) {
8829
+ console.log(`💾 Save: REPLACING existing record with updated version in allData:`, {
8830
+ old: {
8831
+ id: String(allData[existingIndex].id),
8832
+ price: allData[existingIndex].price
8833
+ },
8834
+ new: {
8835
+ id: recordIdStr,
8836
+ price: record.price
8837
+ }
8838
+ });
8839
+ }
8840
+ allData[existingIndex] = record;
8841
+ } else if (this.opts.debugMode) {
8842
+ console.log(`💾 Save: Record already correctly updated in allData:`, {
8843
+ id: recordIdStr
8844
+ });
8845
+ }
8846
+ }
8737
8847
  }
8738
8848
  }
8739
8849
  }));
@@ -8780,30 +8890,88 @@ class Database extends events.EventEmitter {
8780
8890
  allData = [...existingRecords];
8781
8891
 
8782
8892
  // OPTIMIZATION: Use Set for faster lookups of existing record IDs
8783
- const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => r.id));
8893
+ // CRITICAL FIX: Normalize IDs to strings for consistent comparison
8894
+ const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)));
8895
+
8896
+ // CRITICAL FIX: Create a map of records in existingRecords by ID for comparison
8897
+ const existingRecordsById = new Map();
8898
+ existingRecords.forEach(r => {
8899
+ if (r && r.id) {
8900
+ existingRecordsById.set(String(r.id), r);
8901
+ }
8902
+ });
8784
8903
 
8785
8904
  // Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
8905
+ // CRITICAL FIX: Also ensure that if an updated record wasn't properly replaced, we replace it now
8786
8906
  for (const record of writeBufferSnapshot) {
8787
- if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(record.id)) {
8907
+ if (!record || !record.id) continue;
8908
+ if (deletedIdsSnapshot.has(String(record.id))) continue;
8909
+ const recordIdStr = String(record.id);
8910
+ const existingRecord = existingRecordsById.get(recordIdStr);
8911
+ if (!existingRecord) {
8788
8912
  // This is a new record, not an update
8789
8913
  allData.push(record);
8914
+ if (this.opts.debugMode) {
8915
+ console.log(`💾 Save: Adding NEW record to allData:`, {
8916
+ id: recordIdStr,
8917
+ price: record.price,
8918
+ app_id: record.app_id,
8919
+ currency: record.currency
8920
+ });
8921
+ }
8922
+ } else {
8923
+ // This is an update - verify that existingRecords contains the updated version
8924
+ // If not, replace it (this handles edge cases where substitution might have failed)
8925
+ const existingIndex = allData.findIndex(r => r && r.id && String(r.id) === recordIdStr);
8926
+ if (existingIndex !== -1) {
8927
+ // Verify if the existing record is actually the updated one
8928
+ // Compare key fields to detect if replacement is needed
8929
+ const needsReplacement = JSON.stringify(allData[existingIndex]) !== JSON.stringify(record);
8930
+ if (needsReplacement) {
8931
+ if (this.opts.debugMode) {
8932
+ console.log(`💾 Save: REPLACING existing record with updated version in allData:`, {
8933
+ old: {
8934
+ id: String(allData[existingIndex].id),
8935
+ price: allData[existingIndex].price
8936
+ },
8937
+ new: {
8938
+ id: recordIdStr,
8939
+ price: record.price
8940
+ }
8941
+ });
8942
+ }
8943
+ allData[existingIndex] = record;
8944
+ } else if (this.opts.debugMode) {
8945
+ console.log(`💾 Save: Record already correctly updated in allData:`, {
8946
+ id: recordIdStr
8947
+ });
8948
+ }
8949
+ }
8790
8950
  }
8791
8951
  }
8792
8952
  if (this.opts.debugMode) {
8793
8953
  const updatedCount = writeBufferSnapshot.filter(r => r && r.id && existingRecordIds.has(String(r.id))).length;
8794
8954
  const newCount = writeBufferSnapshot.filter(r => r && r.id && !existingRecordIds.has(String(r.id))).length;
8795
8955
  console.log(`💾 Save: Combined data - existingRecords: ${existingRecords.length}, updatedFromBuffer: ${updatedCount}, newFromBuffer: ${newCount}, total: ${allData.length}`);
8796
- console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? r.id : 'no-id'));
8956
+ console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? String(r.id) : 'no-id'));
8797
8957
  console.log(`💾 Save: Existing record IDs:`, Array.from(existingRecordIds));
8958
+ console.log(`💾 Save: All records in allData:`, allData.map(r => r && r.id ? {
8959
+ id: String(r.id),
8960
+ price: r.price,
8961
+ app_id: r.app_id,
8962
+ currency: r.currency
8963
+ } : 'no-id'));
8798
8964
  console.log(`💾 Save: Sample existing record:`, existingRecords[0] ? {
8799
- id: existingRecords[0].id,
8800
- name: existingRecords[0].name,
8801
- tags: existingRecords[0].tags
8965
+ id: String(existingRecords[0].id),
8966
+ price: existingRecords[0].price,
8967
+ app_id: existingRecords[0].app_id,
8968
+ currency: existingRecords[0].currency
8802
8969
  } : 'null');
8803
8970
  console.log(`💾 Save: Sample writeBuffer record:`, writeBufferSnapshot[0] ? {
8804
- id: writeBufferSnapshot[0].id,
8805
- name: writeBufferSnapshot[0].name,
8806
- tags: writeBufferSnapshot[0].tags
8971
+ id: String(writeBufferSnapshot[0].id),
8972
+ price: writeBufferSnapshot[0].price,
8973
+ app_id: writeBufferSnapshot[0].app_id,
8974
+ currency: writeBufferSnapshot[0].currency
8807
8975
  } : 'null');
8808
8976
  }
8809
8977
  }).catch(error => {
@@ -8874,6 +9042,12 @@ class Database extends events.EventEmitter {
8874
9042
  });
8875
9043
  if (this.opts.debugMode) {
8876
9044
  console.log(`💾 Save: allData.length=${allData.length}, cleanedData.length=${cleanedData.length}`);
9045
+ console.log(`💾 Save: All records in allData before serialization:`, allData.map(r => r && r.id ? {
9046
+ id: String(r.id),
9047
+ price: r.price,
9048
+ app_id: r.app_id,
9049
+ currency: r.currency
9050
+ } : 'no-id'));
8877
9051
  console.log(`💾 Save: Sample cleaned record:`, cleanedData[0] ? Object.keys(cleanedData[0]) : 'null');
8878
9052
  }
8879
9053
  const jsonlData = cleanedData.length > 0 ? this.serializer.serializeBatch(cleanedData) : '';
@@ -8881,6 +9055,12 @@ class Database extends events.EventEmitter {
8881
9055
  const lines = jsonlString.split('\n').filter(line => line.trim());
8882
9056
  if (this.opts.debugMode) {
8883
9057
  console.log(`💾 Save: Serialized ${lines.length} lines`);
9058
+ console.log(`💾 Save: All records in allData after serialization check:`, allData.map(r => r && r.id ? {
9059
+ id: String(r.id),
9060
+ price: r.price,
9061
+ app_id: r.app_id,
9062
+ currency: r.currency
9063
+ } : 'no-id'));
8884
9064
  if (lines.length > 0) {
8885
9065
  console.log(`💾 Save: First line (first 200 chars):`, lines[0].substring(0, 200));
8886
9066
  }
@@ -8901,51 +9081,9 @@ class Database extends events.EventEmitter {
8901
9081
  console.log(`💾 Save: Calculated indexOffset: ${this.indexOffset}, allData.length: ${allData.length}`);
8902
9082
  }
8903
9083
 
8904
- // OPTIMIZATION: Parallel operations - file writing and index data preparation
8905
- const parallelWriteOperations = [];
8906
-
8907
- // Add main file write operation
8908
- parallelWriteOperations.push(this.fileHandler.writeBatch([jsonlData]));
8909
-
8910
- // Add index file operations - ALWAYS save offsets, even without indexed fields
8911
- if (this.indexManager) {
8912
- const idxPath = this.normalizedFile.replace('.jdb', '.idx.jdb');
8913
-
8914
- // OPTIMIZATION: Parallel data preparation
8915
- const indexDataPromise = Promise.resolve({
8916
- index: this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0 ? this.indexManager.toJSON() : {},
8917
- offsets: this.offsets,
8918
- // Save actual offsets for efficient file operations
8919
- indexOffset: this.indexOffset // Save file size for proper range calculations
8920
- });
8921
-
8922
- // Add term mapping data if needed
8923
- const termMappingFields = this.getTermMappingFields();
8924
- if (termMappingFields.length > 0 && this.termManager) {
8925
- const termDataPromise = this.termManager.saveTerms();
8926
-
8927
- // Combine index data and term data
8928
- const combinedDataPromise = Promise.all([indexDataPromise, termDataPromise]).then(([indexData, termData]) => {
8929
- indexData.termMapping = termData;
8930
- return indexData;
8931
- });
8932
-
8933
- // Add index file write operation
8934
- parallelWriteOperations.push(combinedDataPromise.then(indexData => {
8935
- const idxFileHandler = new FileHandler(idxPath, this.fileMutex, this.opts);
8936
- return idxFileHandler.writeAll(JSON.stringify(indexData, null, 2));
8937
- }));
8938
- } else {
8939
- // Add index file write operation without term mapping
8940
- parallelWriteOperations.push(indexDataPromise.then(indexData => {
8941
- const idxFileHandler = new FileHandler(idxPath, this.fileMutex, this.opts);
8942
- return idxFileHandler.writeAll(JSON.stringify(indexData, null, 2));
8943
- }));
8944
- }
8945
- }
8946
-
8947
- // Execute parallel write operations
8948
- await Promise.all(parallelWriteOperations);
9084
+ // CRITICAL FIX: Write main data file first
9085
+ // Index will be saved AFTER reconstruction to ensure it contains correct data
9086
+ await this.fileHandler.writeBatch([jsonlData]);
8949
9087
  if (this.opts.debugMode) {
8950
9088
  console.log(`💾 Saved ${allData.length} records to ${this.normalizedFile}`);
8951
9089
  }
@@ -8957,20 +9095,39 @@ class Database extends events.EventEmitter {
8957
9095
 
8958
9096
  // Clear writeBuffer and deletedIds after successful save only if we had data to save
8959
9097
  if (allData.length > 0) {
8960
- // Rebuild index when records were deleted to maintain consistency
9098
+ // Rebuild index when records were deleted or updated to maintain consistency
8961
9099
  const hadDeletedRecords = deletedIdsSnapshot.size > 0;
9100
+ const hadUpdatedRecords = writeBufferSnapshot.length > 0;
8962
9101
  if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
8963
- if (hadDeletedRecords) {
8964
- // Clear the index and rebuild it from the remaining records
9102
+ if (hadDeletedRecords || hadUpdatedRecords) {
9103
+ // Clear the index and rebuild it from the saved records
9104
+ // This ensures that lineNumbers point to the correct positions in the file
8965
9105
  this.indexManager.clear();
8966
9106
  if (this.opts.debugMode) {
8967
- console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records`);
9107
+ if (hadDeletedRecords && hadUpdatedRecords) {
9108
+ console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records and updating ${writeBufferSnapshot.length} records`);
9109
+ } else if (hadDeletedRecords) {
9110
+ console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records`);
9111
+ } else {
9112
+ console.log(`🧹 Rebuilding index after updating ${writeBufferSnapshot.length} records`);
9113
+ }
8968
9114
  }
8969
9115
 
8970
9116
  // Rebuild index from the saved records
8971
9117
  // CRITICAL: Process term mapping for records loaded from file to ensure ${field}Ids are available
9118
+ if (this.opts.debugMode) {
9119
+ console.log(`💾 Save: Rebuilding index from ${allData.length} records in allData`);
9120
+ }
8972
9121
  for (let i = 0; i < allData.length; i++) {
8973
9122
  let record = allData[i];
9123
+ if (this.opts.debugMode && i < 3) {
9124
+ console.log(`💾 Save: Rebuilding index record[${i}]:`, {
9125
+ id: String(record.id),
9126
+ price: record.price,
9127
+ app_id: record.app_id,
9128
+ currency: record.currency
9129
+ });
9130
+ }
8974
9131
 
8975
9132
  // CRITICAL FIX: Ensure records have ${field}Ids for term mapping fields
8976
9133
  // Records from writeBuffer already have ${field}Ids from processTermMapping
@@ -8997,6 +9154,9 @@ class Database extends events.EventEmitter {
8997
9154
  }
8998
9155
  await this.indexManager.add(record, i);
8999
9156
  }
9157
+ if (this.opts.debugMode) {
9158
+ console.log(`💾 Save: Index rebuilt with ${allData.length} records`);
9159
+ }
9000
9160
  }
9001
9161
  }
9002
9162
 
@@ -9077,12 +9237,21 @@ class Database extends events.EventEmitter {
9077
9237
  this.termManager.decrementTermCount(termId);
9078
9238
  }
9079
9239
  } else if (oldRecord[field] && Array.isArray(oldRecord[field])) {
9080
- // Use terms to decrement (fallback for backward compatibility)
9081
- for (const term of oldRecord[field]) {
9082
- const termId = this.termManager.termToId.get(term);
9083
- if (termId) {
9240
+ // Check if field contains term IDs (numbers) or terms (strings)
9241
+ const firstValue = oldRecord[field][0];
9242
+ if (typeof firstValue === 'number') {
9243
+ // Field contains term IDs (from find with restoreTerms: false)
9244
+ for (const termId of oldRecord[field]) {
9084
9245
  this.termManager.decrementTermCount(termId);
9085
9246
  }
9247
+ } else if (typeof firstValue === 'string') {
9248
+ // Field contains terms (strings) - convert to term IDs
9249
+ for (const term of oldRecord[field]) {
9250
+ const termId = this.termManager.termToId.get(term);
9251
+ if (termId) {
9252
+ this.termManager.decrementTermCount(termId);
9253
+ }
9254
+ }
9086
9255
  }
9087
9256
  }
9088
9257
  }
@@ -9323,6 +9492,7 @@ class Database extends events.EventEmitter {
9323
9492
  }
9324
9493
 
9325
9494
  // Apply schema enforcement - convert to array format and back to enforce schema
9495
+ // This will discard any fields not in the schema
9326
9496
  const schemaEnforcedRecord = this.applySchemaEnforcement(record);
9327
9497
 
9328
9498
  // Don't store in this.data - only use writeBuffer and index
@@ -9828,19 +9998,12 @@ class Database extends events.EventEmitter {
9828
9998
  ...updateData
9829
9999
  };
9830
10000
 
9831
- // CRITICAL FIX: Update schema if new fields are added during update
9832
- // This ensures new fields (like 'status') are included in the serialized format
9833
- if (this.serializer && this.serializer.schemaManager) {
9834
- const newFields = Object.keys(updateData).filter(field => !record.hasOwnProperty(field));
9835
- for (const field of newFields) {
9836
- if (field !== 'id' && field !== '_') {
9837
- // Skip special fields
9838
- this.serializer.schemaManager.addField(field);
9839
- if (this.opts.debugMode) {
9840
- console.log(`🔄 UPDATE: Added new field '${field}' to schema`);
9841
- }
9842
- }
9843
- }
10001
+ // DEBUG: Log the update operation details
10002
+ if (this.opts.debugMode) {
10003
+ console.log(`🔄 UPDATE: Original record ID: ${record.id}, type: ${typeof record.id}`);
10004
+ console.log(`🔄 UPDATE: Updated record ID: ${updated.id}, type: ${typeof updated.id}`);
10005
+ console.log(`🔄 UPDATE: Update data keys:`, Object.keys(updateData));
10006
+ console.log(`🔄 UPDATE: Updated record keys:`, Object.keys(updated));
9844
10007
  }
9845
10008
 
9846
10009
  // Process term mapping for update
@@ -9848,6 +10011,7 @@ class Database extends events.EventEmitter {
9848
10011
  this.processTermMapping(updated, true, record);
9849
10012
  if (this.opts.debugMode) {
9850
10013
  console.log(`🔄 UPDATE: Term mapping completed in ${Date.now() - termMappingStart}ms`);
10014
+ console.log(`🔄 UPDATE: After term mapping - ID: ${updated.id}, type: ${typeof updated.id}`);
9851
10015
  }
9852
10016
 
9853
10017
  // CRITICAL FIX: Remove old terms from index before adding new ones
@@ -10821,64 +10985,112 @@ class Database extends events.EventEmitter {
10821
10985
  // Fetch actual records
10822
10986
  const lineNumbers = limitedEntries.map(([lineNumber]) => lineNumber);
10823
10987
  const scoresByLineNumber = new Map(limitedEntries);
10824
-
10825
- // Use getRanges and fileHandler to read records
10826
- const ranges = this.getRanges(lineNumbers);
10827
- const groupedRanges = await this.fileHandler.groupedRanges(ranges);
10828
- const fs = await import('fs');
10829
- const fd = await fs.promises.open(this.fileHandler.file, 'r');
10988
+ const persistedCount = Array.isArray(this.offsets) ? this.offsets.length : 0;
10989
+
10990
+ // Separate lineNumbers into file records and writeBuffer records
10991
+ const fileLineNumbers = [];
10992
+ const writeBufferLineNumbers = [];
10993
+ for (const lineNumber of lineNumbers) {
10994
+ if (lineNumber >= persistedCount) {
10995
+ // This lineNumber points to writeBuffer
10996
+ writeBufferLineNumbers.push(lineNumber);
10997
+ } else {
10998
+ // This lineNumber points to file
10999
+ fileLineNumbers.push(lineNumber);
11000
+ }
11001
+ }
10830
11002
  const results = [];
10831
- try {
10832
- for (const groupedRange of groupedRanges) {
10833
- var _iteratorAbruptCompletion3 = false;
10834
- var _didIteratorError3 = false;
10835
- var _iteratorError3;
11003
+
11004
+ // Read records from file
11005
+ if (fileLineNumbers.length > 0) {
11006
+ const ranges = this.getRanges(fileLineNumbers);
11007
+ if (ranges.length > 0) {
11008
+ // Create a map from start offset to lineNumber for accurate mapping
11009
+ const startToLineNumber = new Map();
11010
+ for (const range of ranges) {
11011
+ if (range.index !== undefined) {
11012
+ startToLineNumber.set(range.start, range.index);
11013
+ }
11014
+ }
11015
+ const groupedRanges = await this.fileHandler.groupedRanges(ranges);
11016
+ const fs = await import('fs');
11017
+ const fd = await fs.promises.open(this.fileHandler.file, 'r');
10836
11018
  try {
10837
- for (var _iterator3 = _asyncIterator(this.fileHandler.readGroupedRange(groupedRange, fd)), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
10838
- const row = _step3.value;
10839
- {
10840
- try {
10841
- const record = this.serializer.deserialize(row.line);
11019
+ for (const groupedRange of groupedRanges) {
11020
+ var _iteratorAbruptCompletion3 = false;
11021
+ var _didIteratorError3 = false;
11022
+ var _iteratorError3;
11023
+ try {
11024
+ for (var _iterator3 = _asyncIterator(this.fileHandler.readGroupedRange(groupedRange, fd)), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
11025
+ const row = _step3.value;
11026
+ {
11027
+ try {
11028
+ const record = this.serializer.deserialize(row.line);
10842
11029
 
10843
- // Get line number from the row
10844
- const lineNumber = row._ || 0;
11030
+ // Get line number from the row, fallback to start offset mapping
11031
+ let lineNumber = row._ !== null && row._ !== undefined ? row._ : startToLineNumber.get(row.start) ?? 0;
10845
11032
 
10846
- // Restore term IDs to terms
10847
- const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
11033
+ // Restore term IDs to terms
11034
+ const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
10848
11035
 
10849
- // Add line number
10850
- recordWithTerms._ = lineNumber;
11036
+ // Add line number
11037
+ recordWithTerms._ = lineNumber;
10851
11038
 
10852
- // Add score if includeScore is true
10853
- if (opts.includeScore) {
10854
- recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
11039
+ // Add score if includeScore is true (default is true)
11040
+ if (opts.includeScore !== false) {
11041
+ recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
11042
+ }
11043
+ results.push(recordWithTerms);
11044
+ } catch (error) {
11045
+ // Skip invalid lines
11046
+ if (this.opts.debugMode) {
11047
+ console.error('Error deserializing record in score():', error);
11048
+ }
11049
+ }
10855
11050
  }
10856
- results.push(recordWithTerms);
10857
- } catch (error) {
10858
- // Skip invalid lines
10859
- if (this.opts.debugMode) {
10860
- console.error('Error deserializing record in score():', error);
11051
+ }
11052
+ } catch (err) {
11053
+ _didIteratorError3 = true;
11054
+ _iteratorError3 = err;
11055
+ } finally {
11056
+ try {
11057
+ if (_iteratorAbruptCompletion3 && _iterator3.return != null) {
11058
+ await _iterator3.return();
11059
+ }
11060
+ } finally {
11061
+ if (_didIteratorError3) {
11062
+ throw _iteratorError3;
10861
11063
  }
10862
11064
  }
10863
11065
  }
10864
11066
  }
10865
- } catch (err) {
10866
- _didIteratorError3 = true;
10867
- _iteratorError3 = err;
10868
11067
  } finally {
10869
- try {
10870
- if (_iteratorAbruptCompletion3 && _iterator3.return != null) {
10871
- await _iterator3.return();
10872
- }
10873
- } finally {
10874
- if (_didIteratorError3) {
10875
- throw _iteratorError3;
11068
+ await fd.close();
11069
+ }
11070
+ }
11071
+ }
11072
+
11073
+ // Read records from writeBuffer
11074
+ if (writeBufferLineNumbers.length > 0 && this.writeBuffer) {
11075
+ for (const lineNumber of writeBufferLineNumbers) {
11076
+ const writeBufferIndex = lineNumber - persistedCount;
11077
+ if (writeBufferIndex >= 0 && writeBufferIndex < this.writeBuffer.length) {
11078
+ const record = this.writeBuffer[writeBufferIndex];
11079
+ if (record) {
11080
+ // Restore term IDs to terms
11081
+ const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
11082
+
11083
+ // Add line number
11084
+ recordWithTerms._ = lineNumber;
11085
+
11086
+ // Add score if includeScore is true
11087
+ if (opts.includeScore) {
11088
+ recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
10876
11089
  }
11090
+ results.push(recordWithTerms);
10877
11091
  }
10878
11092
  }
10879
11093
  }
10880
- } finally {
10881
- await fd.close();
10882
11094
  }
10883
11095
 
10884
11096
  // Re-sort results to maintain score order (since reads might be out of order)
@@ -11251,13 +11463,24 @@ class Database extends events.EventEmitter {
11251
11463
  // Create a map of updated records for quick lookup
11252
11464
  // CRITICAL FIX: Normalize IDs to strings for consistent comparison
11253
11465
  const updatedRecordsMap = new Map();
11254
- writeBufferSnapshot.forEach(record => {
11466
+ writeBufferSnapshot.forEach((record, index) => {
11255
11467
  if (record && record.id !== undefined && record.id !== null) {
11256
11468
  // Normalize ID to string for consistent comparison
11257
11469
  const normalizedId = String(record.id);
11258
11470
  updatedRecordsMap.set(normalizedId, record);
11471
+ if (this.opts.debugMode) {
11472
+ console.log(`💾 Save: Added to updatedRecordsMap: ID=${normalizedId} (original: ${record.id}, type: ${typeof record.id}), index=${index}`);
11473
+ }
11474
+ } else if (this.opts.debugMode) {
11475
+ console.log(`⚠️ Save: Skipped record in writeBufferSnapshot[${index}] - missing or invalid ID:`, record ? {
11476
+ id: record.id,
11477
+ keys: Object.keys(record)
11478
+ } : 'null');
11259
11479
  }
11260
11480
  });
11481
+ if (this.opts.debugMode) {
11482
+ console.log(`💾 Save: updatedRecordsMap size: ${updatedRecordsMap.size}, keys:`, Array.from(updatedRecordsMap.keys()));
11483
+ }
11261
11484
 
11262
11485
  // OPTIMIZATION: Cache file stats to avoid repeated stat() calls
11263
11486
  let fileSize = 0;
@@ -11460,11 +11683,30 @@ class Database extends events.EventEmitter {
11460
11683
 
11461
11684
  // CRITICAL FIX: Normalize ID to string for consistent comparison
11462
11685
  const normalizedId = String(recordWithIds.id);
11686
+ if (this.opts.debugMode) {
11687
+ console.log(`💾 Save: Checking record ID=${normalizedId} (original: ${recordWithIds.id}, type: ${typeof recordWithIds.id}) in updatedRecordsMap`);
11688
+ console.log(`💾 Save: updatedRecordsMap.has(${normalizedId}): ${updatedRecordsMap.has(normalizedId)}`);
11689
+ if (!updatedRecordsMap.has(normalizedId)) {
11690
+ console.log(`💾 Save: Record ${normalizedId} NOT found in updatedRecordsMap. Available keys:`, Array.from(updatedRecordsMap.keys()));
11691
+ }
11692
+ }
11463
11693
  if (updatedRecordsMap.has(normalizedId)) {
11464
11694
  // Replace with updated version
11465
11695
  const updatedRecord = updatedRecordsMap.get(normalizedId);
11466
11696
  if (this.opts.debugMode) {
11467
- console.log(`💾 Save: Updated record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`);
11697
+ console.log(`💾 Save: REPLACING record ${recordWithIds.id} with updated version`);
11698
+ console.log(`💾 Save: Old record:`, {
11699
+ id: recordWithIds.id,
11700
+ price: recordWithIds.price,
11701
+ app_id: recordWithIds.app_id,
11702
+ currency: recordWithIds.currency
11703
+ });
11704
+ console.log(`💾 Save: New record:`, {
11705
+ id: updatedRecord.id,
11706
+ price: updatedRecord.price,
11707
+ app_id: updatedRecord.app_id,
11708
+ currency: updatedRecord.currency
11709
+ });
11468
11710
  }
11469
11711
  return {
11470
11712
  type: 'updated',