jexidb 2.1.1 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Database.cjs +7621 -113
- package/package.json +9 -2
- package/src/Database.mjs +244 -79
- package/src/SchemaManager.mjs +325 -268
- package/src/Serializer.mjs +20 -1
- package/src/managers/QueryManager.mjs +74 -18
- package/.babelrc +0 -13
- package/.gitattributes +0 -2
- package/CHANGELOG.md +0 -140
- package/babel.config.json +0 -5
- package/docs/API.md +0 -1057
- package/docs/EXAMPLES.md +0 -701
- package/docs/README.md +0 -194
- package/examples/iterate-usage-example.js +0 -157
- package/examples/simple-iterate-example.js +0 -115
- package/jest.config.js +0 -24
- package/scripts/README.md +0 -47
- package/scripts/benchmark-array-serialization.js +0 -108
- package/scripts/clean-test-files.js +0 -75
- package/scripts/prepare.js +0 -31
- package/scripts/run-tests.js +0 -80
- package/scripts/score-mode-demo.js +0 -45
- package/test/$not-operator-with-and.test.js +0 -282
- package/test/README.md +0 -8
- package/test/close-init-cycle.test.js +0 -256
- package/test/coverage-method.test.js +0 -93
- package/test/critical-bugs-fixes.test.js +0 -1069
- package/test/deserialize-corruption-fixes.test.js +0 -296
- package/test/exists-method.test.js +0 -318
- package/test/explicit-indexes-comparison.test.js +0 -219
- package/test/filehandler-non-adjacent-ranges-bug.test.js +0 -175
- package/test/index-line-number-regression.test.js +0 -100
- package/test/index-missing-index-data.test.js +0 -91
- package/test/index-persistence.test.js +0 -491
- package/test/index-serialization.test.js +0 -314
- package/test/indexed-query-mode.test.js +0 -360
- package/test/insert-session-auto-flush.test.js +0 -353
- package/test/iterate-method.test.js +0 -272
- package/test/legacy-operator-compat.test.js +0 -154
- package/test/query-operators.test.js +0 -238
- package/test/regex-array-fields.test.js +0 -129
- package/test/score-method.test.js +0 -298
- package/test/setup.js +0 -17
- package/test/term-mapping-minimal.test.js +0 -154
- package/test/term-mapping-simple.test.js +0 -257
- package/test/term-mapping.test.js +0 -514
- package/test/writebuffer-flush-resilience.test.js +0 -204
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "jexidb",
|
|
3
|
-
"version": "2.1.
|
|
3
|
+
"version": "2.1.2",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "JexiDB is a pure JS NPM library for managing data on disk efficiently, without the need for a server.",
|
|
6
6
|
"main": "./dist/Database.cjs",
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
"test:coverage": "jest --coverage && npm run clean:test-files",
|
|
18
18
|
"test:legacy": "node --expose-gc test/test.mjs",
|
|
19
19
|
"clean:test-files": "node scripts/clean-test-files.js",
|
|
20
|
-
"build": "
|
|
20
|
+
"build": "rollup -c",
|
|
21
21
|
"prepare": "node scripts/prepare.js"
|
|
22
22
|
},
|
|
23
23
|
"author": "EdenwareApps",
|
|
@@ -27,6 +27,7 @@
|
|
|
27
27
|
"@babel/core": "^7.25.2",
|
|
28
28
|
"@babel/plugin-transform-async-generator-functions": "^7.25.4",
|
|
29
29
|
"@babel/preset-env": "^7.28.3",
|
|
30
|
+
"@rollup/plugin-babel": "^6.1.0",
|
|
30
31
|
"@rollup/plugin-commonjs": "^28.0.6",
|
|
31
32
|
"@rollup/plugin-node-resolve": "^16.0.1",
|
|
32
33
|
"babel-jest": "^30.0.5",
|
|
@@ -47,6 +48,12 @@
|
|
|
47
48
|
"directories": {
|
|
48
49
|
"test": "test"
|
|
49
50
|
},
|
|
51
|
+
"files": [
|
|
52
|
+
"dist",
|
|
53
|
+
"src",
|
|
54
|
+
"README.md",
|
|
55
|
+
"LICENSE"
|
|
56
|
+
],
|
|
50
57
|
"repository": {
|
|
51
58
|
"type": "git",
|
|
52
59
|
"url": "git+https://github.com/EdenwareApps/jexidb.git"
|
package/src/Database.mjs
CHANGED
|
@@ -545,6 +545,32 @@ class Database extends EventEmitter {
|
|
|
545
545
|
return
|
|
546
546
|
}
|
|
547
547
|
|
|
548
|
+
// Handle legacy 'schema' option migration
|
|
549
|
+
if (this.opts.schema) {
|
|
550
|
+
// If fields is already provided and valid, ignore schema
|
|
551
|
+
if (this.opts.fields && typeof this.opts.fields === 'object' && Object.keys(this.opts.fields).length > 0) {
|
|
552
|
+
if (this.opts.debugMode) {
|
|
553
|
+
console.log(`⚠️ Both 'schema' and 'fields' options provided. Ignoring 'schema' and using 'fields'. [${this.instanceId}]`)
|
|
554
|
+
}
|
|
555
|
+
} else if (Array.isArray(this.opts.schema)) {
|
|
556
|
+
// Schema as array is no longer supported
|
|
557
|
+
throw new Error('The "schema" option as an array is no longer supported. Please use "fields" as an object instead. Example: { fields: { id: "number", name: "string" } }')
|
|
558
|
+
} else if (typeof this.opts.schema === 'object' && this.opts.schema !== null) {
|
|
559
|
+
// Schema as object - migrate to fields
|
|
560
|
+
this.opts.fields = { ...this.opts.schema }
|
|
561
|
+
if (this.opts.debugMode) {
|
|
562
|
+
console.log(`⚠️ Migrated 'schema' option to 'fields'. Please update your code to use 'fields' instead of 'schema'. [${this.instanceId}]`)
|
|
563
|
+
}
|
|
564
|
+
} else {
|
|
565
|
+
throw new Error('The "schema" option must be an object. Example: { schema: { id: "number", name: "string" } }')
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
// Validate that fields is provided (mandatory)
|
|
570
|
+
if (!this.opts.fields || typeof this.opts.fields !== 'object' || Object.keys(this.opts.fields).length === 0) {
|
|
571
|
+
throw new Error('The "fields" option is mandatory and must be an object with at least one field definition. Example: { fields: { id: "number", name: "string" } }')
|
|
572
|
+
}
|
|
573
|
+
|
|
548
574
|
// CRITICAL FIX: Initialize serializer first - this was missing and causing crashes
|
|
549
575
|
this.serializer = new Serializer(this.opts)
|
|
550
576
|
|
|
@@ -1027,12 +1053,22 @@ class Database extends EventEmitter {
|
|
|
1027
1053
|
}
|
|
1028
1054
|
}
|
|
1029
1055
|
|
|
1030
|
-
// Reinitialize schema from saved configuration
|
|
1031
|
-
|
|
1056
|
+
// Reinitialize schema from saved configuration (only if fields not provided)
|
|
1057
|
+
// Note: fields option takes precedence over saved schema
|
|
1058
|
+
if (!this.opts.fields && config.schema && this.serializer) {
|
|
1032
1059
|
this.serializer.initializeSchema(config.schema)
|
|
1033
1060
|
if (this.opts.debugMode) {
|
|
1034
1061
|
console.log(`📂 Loaded schema from ${idxPath}:`, config.schema.join(', '))
|
|
1035
1062
|
}
|
|
1063
|
+
} else if (this.opts.fields && this.serializer) {
|
|
1064
|
+
// Use fields option instead of saved schema
|
|
1065
|
+
const fieldNames = Object.keys(this.opts.fields)
|
|
1066
|
+
if (fieldNames.length > 0) {
|
|
1067
|
+
this.serializer.initializeSchema(fieldNames)
|
|
1068
|
+
if (this.opts.debugMode) {
|
|
1069
|
+
console.log(`📂 Schema initialized from fields option:`, fieldNames.join(', '))
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1036
1072
|
}
|
|
1037
1073
|
}
|
|
1038
1074
|
}
|
|
@@ -1263,7 +1299,8 @@ class Database extends EventEmitter {
|
|
|
1263
1299
|
|
|
1264
1300
|
// CRITICAL FIX: Capture writeBuffer and deletedIds at the start to prevent race conditions
|
|
1265
1301
|
const writeBufferSnapshot = [...this.writeBuffer]
|
|
1266
|
-
|
|
1302
|
+
// CRITICAL FIX: Normalize deleted IDs to strings for consistent comparison
|
|
1303
|
+
const deletedIdsSnapshot = new Set(Array.from(this.deletedIds).map(id => String(id)))
|
|
1267
1304
|
|
|
1268
1305
|
// OPTIMIZATION: Process pending index updates in batch before save
|
|
1269
1306
|
if (this.pendingIndexUpdates && this.pendingIndexUpdates.length > 0) {
|
|
@@ -1312,10 +1349,12 @@ class Database extends EventEmitter {
|
|
|
1312
1349
|
let orphanedCount = 0
|
|
1313
1350
|
|
|
1314
1351
|
// Check if there are new records to save (after flush, writeBuffer should be empty)
|
|
1352
|
+
// CRITICAL FIX: Also check writeBufferSnapshot.length > 0 to handle updates/deletes
|
|
1353
|
+
// that were in writeBuffer before flush but are now in snapshot
|
|
1315
1354
|
if (this.opts.debugMode) {
|
|
1316
1355
|
console.log(`💾 Save: writeBuffer.length=${this.writeBuffer.length}, writeBufferSnapshot.length=${writeBufferSnapshot.length}`)
|
|
1317
1356
|
}
|
|
1318
|
-
if (this.writeBuffer.length > 0) {
|
|
1357
|
+
if (this.writeBuffer.length > 0 || writeBufferSnapshot.length > 0) {
|
|
1319
1358
|
if (this.opts.debugMode) {
|
|
1320
1359
|
console.log(`💾 Save: WriteBuffer has ${writeBufferSnapshot.length} records, using streaming approach`)
|
|
1321
1360
|
}
|
|
@@ -1349,21 +1388,20 @@ class Database extends EventEmitter {
|
|
|
1349
1388
|
// Add streaming operation
|
|
1350
1389
|
parallelOperations.push(
|
|
1351
1390
|
this._streamExistingRecords(deletedIdsSnapshot, writeBufferSnapshot).then(existingRecords => {
|
|
1391
|
+
// CRITICAL FIX: _streamExistingRecords already handles updates via updatedRecordsMap
|
|
1392
|
+
// So existingRecords already contains updated records from writeBufferSnapshot
|
|
1393
|
+
// We only need to add records from writeBufferSnapshot that are NEW (not updates)
|
|
1352
1394
|
allData = [...existingRecords]
|
|
1353
1395
|
|
|
1354
|
-
// OPTIMIZATION: Use
|
|
1355
|
-
|
|
1396
|
+
// OPTIMIZATION: Use Set for faster lookups of existing record IDs
|
|
1397
|
+
// CRITICAL FIX: Normalize IDs to strings for consistent comparison
|
|
1398
|
+
const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => String(r.id)))
|
|
1356
1399
|
|
|
1400
|
+
// Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
|
|
1357
1401
|
for (const record of writeBufferSnapshot) {
|
|
1358
|
-
if (!deletedIdsSnapshot.has(record.id)) {
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
const existingIndex = allData.findIndex(r => r.id === record.id)
|
|
1362
|
-
allData[existingIndex] = record
|
|
1363
|
-
} else {
|
|
1364
|
-
// Add new record
|
|
1365
|
-
allData.push(record)
|
|
1366
|
-
}
|
|
1402
|
+
if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(String(record.id))) {
|
|
1403
|
+
// This is a new record, not an update
|
|
1404
|
+
allData.push(record)
|
|
1367
1405
|
}
|
|
1368
1406
|
}
|
|
1369
1407
|
})
|
|
@@ -1408,15 +1446,43 @@ class Database extends EventEmitter {
|
|
|
1408
1446
|
console.log(`💾 Save: _streamExistingRecords returned ${existingRecords.length} records`)
|
|
1409
1447
|
console.log(`💾 Save: existingRecords:`, existingRecords)
|
|
1410
1448
|
}
|
|
1411
|
-
//
|
|
1412
|
-
|
|
1449
|
+
// CRITICAL FIX: _streamExistingRecords already handles updates via updatedRecordsMap
|
|
1450
|
+
// So existingRecords already contains updated records from writeBufferSnapshot
|
|
1451
|
+
// We only need to add records from writeBufferSnapshot that are NEW (not updates)
|
|
1452
|
+
allData = [...existingRecords]
|
|
1453
|
+
|
|
1454
|
+
// OPTIMIZATION: Use Set for faster lookups of existing record IDs
|
|
1455
|
+
const existingRecordIds = new Set(existingRecords.filter(r => r && r.id).map(r => r.id))
|
|
1456
|
+
|
|
1457
|
+
// Add only NEW records from writeBufferSnapshot (not updates, as those are already in existingRecords)
|
|
1458
|
+
for (const record of writeBufferSnapshot) {
|
|
1459
|
+
if (record && record.id && !deletedIdsSnapshot.has(String(record.id)) && !existingRecordIds.has(record.id)) {
|
|
1460
|
+
// This is a new record, not an update
|
|
1461
|
+
allData.push(record)
|
|
1462
|
+
}
|
|
1463
|
+
}
|
|
1464
|
+
|
|
1465
|
+
if (this.opts.debugMode) {
|
|
1466
|
+
const updatedCount = writeBufferSnapshot.filter(r => r && r.id && existingRecordIds.has(String(r.id))).length
|
|
1467
|
+
const newCount = writeBufferSnapshot.filter(r => r && r.id && !existingRecordIds.has(String(r.id))).length
|
|
1468
|
+
console.log(`💾 Save: Combined data - existingRecords: ${existingRecords.length}, updatedFromBuffer: ${updatedCount}, newFromBuffer: ${newCount}, total: ${allData.length}`)
|
|
1469
|
+
console.log(`💾 Save: WriteBuffer record IDs:`, writeBufferSnapshot.map(r => r && r.id ? r.id : 'no-id'))
|
|
1470
|
+
console.log(`💾 Save: Existing record IDs:`, Array.from(existingRecordIds))
|
|
1471
|
+
console.log(`💾 Save: Sample existing record:`, existingRecords[0] ? { id: existingRecords[0].id, name: existingRecords[0].name, tags: existingRecords[0].tags } : 'null')
|
|
1472
|
+
console.log(`💾 Save: Sample writeBuffer record:`, writeBufferSnapshot[0] ? { id: writeBufferSnapshot[0].id, name: writeBufferSnapshot[0].name, tags: writeBufferSnapshot[0].tags } : 'null')
|
|
1473
|
+
}
|
|
1413
1474
|
}).catch(error => {
|
|
1414
1475
|
if (this.opts.debugMode) {
|
|
1415
1476
|
console.log(`💾 Save: _streamExistingRecords failed:`, error.message)
|
|
1416
1477
|
}
|
|
1417
1478
|
// CRITICAL FIX: Use safe fallback to preserve existing data instead of losing it
|
|
1418
1479
|
return this._loadExistingRecordsFallback(deletedIdsSnapshot, writeBufferSnapshot).then(fallbackRecords => {
|
|
1419
|
-
|
|
1480
|
+
// CRITICAL FIX: Avoid duplicating updated records
|
|
1481
|
+
const fallbackRecordIds = new Set(fallbackRecords.map(r => r.id))
|
|
1482
|
+
const newRecordsFromBuffer = writeBufferSnapshot.filter(record =>
|
|
1483
|
+
!deletedIdsSnapshot.has(String(record.id)) && !fallbackRecordIds.has(record.id)
|
|
1484
|
+
)
|
|
1485
|
+
allData = [...fallbackRecords, ...newRecordsFromBuffer]
|
|
1420
1486
|
if (this.opts.debugMode) {
|
|
1421
1487
|
console.log(`💾 Save: Fallback preserved ${fallbackRecords.length} existing records, total: ${allData.length}`)
|
|
1422
1488
|
}
|
|
@@ -1426,7 +1492,7 @@ class Database extends EventEmitter {
|
|
|
1426
1492
|
console.log(`💾 Save: CRITICAL - Data loss may occur, only writeBuffer will be saved`)
|
|
1427
1493
|
}
|
|
1428
1494
|
// Last resort: at least save what we have in writeBuffer
|
|
1429
|
-
allData = writeBufferSnapshot.filter(record => !deletedIdsSnapshot.has(record.id))
|
|
1495
|
+
allData = writeBufferSnapshot.filter(record => !deletedIdsSnapshot.has(String(record.id)))
|
|
1430
1496
|
})
|
|
1431
1497
|
})
|
|
1432
1498
|
)
|
|
@@ -1440,7 +1506,12 @@ class Database extends EventEmitter {
|
|
|
1440
1506
|
// CRITICAL FIX: Use safe fallback to preserve existing data instead of losing it
|
|
1441
1507
|
try {
|
|
1442
1508
|
const fallbackRecords = await this._loadExistingRecordsFallback(deletedIdsSnapshot, writeBufferSnapshot)
|
|
1443
|
-
|
|
1509
|
+
// CRITICAL FIX: Avoid duplicating updated records
|
|
1510
|
+
const fallbackRecordIds = new Set(fallbackRecords.map(r => r.id))
|
|
1511
|
+
const newRecordsFromBuffer = writeBufferSnapshot.filter(record =>
|
|
1512
|
+
!deletedIdsSnapshot.has(String(record.id)) && !fallbackRecordIds.has(record.id)
|
|
1513
|
+
)
|
|
1514
|
+
allData = [...fallbackRecords, ...newRecordsFromBuffer]
|
|
1444
1515
|
if (this.opts.debugMode) {
|
|
1445
1516
|
console.log(`💾 Save: Fallback preserved ${fallbackRecords.length} existing records, total: ${allData.length}`)
|
|
1446
1517
|
}
|
|
@@ -1450,23 +1521,46 @@ class Database extends EventEmitter {
|
|
|
1450
1521
|
console.log(`💾 Save: CRITICAL - Data loss may occur, only writeBuffer will be saved`)
|
|
1451
1522
|
}
|
|
1452
1523
|
// Last resort: at least save what we have in writeBuffer
|
|
1453
|
-
allData = writeBufferSnapshot.filter(record => !deletedIdsSnapshot.has(record.id))
|
|
1524
|
+
allData = writeBufferSnapshot.filter(record => !deletedIdsSnapshot.has(String(record.id)))
|
|
1454
1525
|
}
|
|
1455
1526
|
}
|
|
1456
1527
|
} else {
|
|
1457
1528
|
// No existing data, use only writeBuffer
|
|
1458
|
-
allData = writeBufferSnapshot.filter(record => !deletedIdsSnapshot.has(record.id))
|
|
1529
|
+
allData = writeBufferSnapshot.filter(record => !deletedIdsSnapshot.has(String(record.id)))
|
|
1459
1530
|
}
|
|
1460
1531
|
}
|
|
1461
1532
|
|
|
1462
1533
|
// CRITICAL FIX: Calculate offsets based on actual serialized data that will be written
|
|
1463
1534
|
// This ensures consistency between offset calculation and file writing
|
|
1464
|
-
|
|
1465
|
-
|
|
1535
|
+
// CRITICAL FIX: Remove term IDs before serialization to ensure proper serialization
|
|
1536
|
+
const cleanedData = allData.map(record => {
|
|
1537
|
+
if (!record || typeof record !== 'object') {
|
|
1538
|
+
if (this.opts.debugMode) {
|
|
1539
|
+
console.log(`💾 Save: WARNING - Invalid record in allData:`, record)
|
|
1540
|
+
}
|
|
1541
|
+
return record
|
|
1542
|
+
}
|
|
1543
|
+
return this.removeTermIdsForSerialization(record)
|
|
1544
|
+
})
|
|
1545
|
+
|
|
1546
|
+
if (this.opts.debugMode) {
|
|
1547
|
+
console.log(`💾 Save: allData.length=${allData.length}, cleanedData.length=${cleanedData.length}`)
|
|
1548
|
+
console.log(`💾 Save: Sample cleaned record:`, cleanedData[0] ? Object.keys(cleanedData[0]) : 'null')
|
|
1549
|
+
}
|
|
1550
|
+
|
|
1551
|
+
const jsonlData = cleanedData.length > 0
|
|
1552
|
+
? this.serializer.serializeBatch(cleanedData)
|
|
1466
1553
|
: ''
|
|
1467
1554
|
const jsonlString = jsonlData.toString('utf8')
|
|
1468
1555
|
const lines = jsonlString.split('\n').filter(line => line.trim())
|
|
1469
1556
|
|
|
1557
|
+
if (this.opts.debugMode) {
|
|
1558
|
+
console.log(`💾 Save: Serialized ${lines.length} lines`)
|
|
1559
|
+
if (lines.length > 0) {
|
|
1560
|
+
console.log(`💾 Save: First line (first 200 chars):`, lines[0].substring(0, 200))
|
|
1561
|
+
}
|
|
1562
|
+
}
|
|
1563
|
+
|
|
1470
1564
|
this.offsets = []
|
|
1471
1565
|
let currentOffset = 0
|
|
1472
1566
|
for (let i = 0; i < lines.length; i++) {
|
|
@@ -1547,14 +1641,22 @@ class Database extends EventEmitter {
|
|
|
1547
1641
|
|
|
1548
1642
|
// Clear writeBuffer and deletedIds after successful save only if we had data to save
|
|
1549
1643
|
if (allData.length > 0) {
|
|
1550
|
-
// Rebuild index when records were deleted to maintain consistency
|
|
1644
|
+
// Rebuild index when records were deleted or updated to maintain consistency
|
|
1551
1645
|
const hadDeletedRecords = deletedIdsSnapshot.size > 0
|
|
1646
|
+
const hadUpdatedRecords = writeBufferSnapshot.length > 0
|
|
1552
1647
|
if (this.indexManager && this.indexManager.indexedFields && this.indexManager.indexedFields.length > 0) {
|
|
1553
|
-
if (hadDeletedRecords) {
|
|
1554
|
-
// Clear the index and rebuild it from the
|
|
1648
|
+
if (hadDeletedRecords || hadUpdatedRecords) {
|
|
1649
|
+
// Clear the index and rebuild it from the saved records
|
|
1650
|
+
// This ensures that lineNumbers point to the correct positions in the file
|
|
1555
1651
|
this.indexManager.clear()
|
|
1556
1652
|
if (this.opts.debugMode) {
|
|
1557
|
-
|
|
1653
|
+
if (hadDeletedRecords && hadUpdatedRecords) {
|
|
1654
|
+
console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records and updating ${writeBufferSnapshot.length} records`)
|
|
1655
|
+
} else if (hadDeletedRecords) {
|
|
1656
|
+
console.log(`🧹 Rebuilding index after removing ${deletedIdsSnapshot.size} deleted records`)
|
|
1657
|
+
} else {
|
|
1658
|
+
console.log(`🧹 Rebuilding index after updating ${writeBufferSnapshot.length} records`)
|
|
1659
|
+
}
|
|
1558
1660
|
}
|
|
1559
1661
|
|
|
1560
1662
|
// Rebuild index from the saved records
|
|
@@ -1675,12 +1777,21 @@ class Database extends EventEmitter {
|
|
|
1675
1777
|
this.termManager.decrementTermCount(termId)
|
|
1676
1778
|
}
|
|
1677
1779
|
} else if (oldRecord[field] && Array.isArray(oldRecord[field])) {
|
|
1678
|
-
//
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1780
|
+
// Check if field contains term IDs (numbers) or terms (strings)
|
|
1781
|
+
const firstValue = oldRecord[field][0]
|
|
1782
|
+
if (typeof firstValue === 'number') {
|
|
1783
|
+
// Field contains term IDs (from find with restoreTerms: false)
|
|
1784
|
+
for (const termId of oldRecord[field]) {
|
|
1682
1785
|
this.termManager.decrementTermCount(termId)
|
|
1683
1786
|
}
|
|
1787
|
+
} else if (typeof firstValue === 'string') {
|
|
1788
|
+
// Field contains terms (strings) - convert to term IDs
|
|
1789
|
+
for (const term of oldRecord[field]) {
|
|
1790
|
+
const termId = this.termManager.termToId.get(term)
|
|
1791
|
+
if (termId) {
|
|
1792
|
+
this.termManager.decrementTermCount(termId)
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1684
1795
|
}
|
|
1685
1796
|
}
|
|
1686
1797
|
}
|
|
@@ -1933,6 +2044,7 @@ class Database extends EventEmitter {
|
|
|
1933
2044
|
}
|
|
1934
2045
|
|
|
1935
2046
|
// Apply schema enforcement - convert to array format and back to enforce schema
|
|
2047
|
+
// This will discard any fields not in the schema
|
|
1936
2048
|
const schemaEnforcedRecord = this.applySchemaEnforcement(record)
|
|
1937
2049
|
|
|
1938
2050
|
// Don't store in this.data - only use writeBuffer and index
|
|
@@ -2477,9 +2589,11 @@ class Database extends EventEmitter {
|
|
|
2477
2589
|
}
|
|
2478
2590
|
}
|
|
2479
2591
|
|
|
2480
|
-
// Update record in writeBuffer or add to writeBuffer if not present
|
|
2592
|
+
// CRITICAL FIX: Update record in writeBuffer or add to writeBuffer if not present
|
|
2593
|
+
// For records in the file, we need to ensure they are properly marked for replacement
|
|
2481
2594
|
const index = this.writeBuffer.findIndex(r => r.id === record.id)
|
|
2482
2595
|
let lineNumber = null
|
|
2596
|
+
|
|
2483
2597
|
if (index !== -1) {
|
|
2484
2598
|
// Record is already in writeBuffer, update it
|
|
2485
2599
|
this.writeBuffer[index] = updated
|
|
@@ -2489,11 +2603,12 @@ class Database extends EventEmitter {
|
|
|
2489
2603
|
}
|
|
2490
2604
|
} else {
|
|
2491
2605
|
// Record is in file, add updated version to writeBuffer
|
|
2492
|
-
//
|
|
2606
|
+
// CRITICAL FIX: Ensure the old record in file will be replaced by checking if it exists in offsets
|
|
2607
|
+
// The save() method will handle replacement via _streamExistingRecords which checks updatedRecordsMap
|
|
2493
2608
|
this.writeBuffer.push(updated)
|
|
2494
2609
|
lineNumber = this._getAbsoluteLineNumber(this.writeBuffer.length - 1)
|
|
2495
2610
|
if (this.opts.debugMode) {
|
|
2496
|
-
console.log(`🔄 UPDATE: Added
|
|
2611
|
+
console.log(`🔄 UPDATE: Added updated record to writeBuffer (will replace file record ${record.id})`)
|
|
2497
2612
|
}
|
|
2498
2613
|
}
|
|
2499
2614
|
|
|
@@ -2628,16 +2743,7 @@ class Database extends EventEmitter {
|
|
|
2628
2743
|
return
|
|
2629
2744
|
}
|
|
2630
2745
|
|
|
2631
|
-
//
|
|
2632
|
-
if (this.opts.schema && Array.isArray(this.opts.schema)) {
|
|
2633
|
-
this.serializer.initializeSchema(this.opts.schema)
|
|
2634
|
-
if (this.opts.debugMode) {
|
|
2635
|
-
console.log(`🔍 Schema initialized from options: ${this.opts.schema.join(', ')} [${this.instanceId}]`)
|
|
2636
|
-
}
|
|
2637
|
-
return
|
|
2638
|
-
}
|
|
2639
|
-
|
|
2640
|
-
// Try to initialize from fields configuration (new format)
|
|
2746
|
+
// Initialize from fields configuration (mandatory)
|
|
2641
2747
|
if (this.opts.fields && typeof this.opts.fields === 'object') {
|
|
2642
2748
|
const fieldNames = Object.keys(this.opts.fields)
|
|
2643
2749
|
if (fieldNames.length > 0) {
|
|
@@ -2649,7 +2755,7 @@ class Database extends EventEmitter {
|
|
|
2649
2755
|
}
|
|
2650
2756
|
}
|
|
2651
2757
|
|
|
2652
|
-
// Try to auto-detect schema from existing data
|
|
2758
|
+
// Try to auto-detect schema from existing data (fallback for migration scenarios)
|
|
2653
2759
|
if (this.data && this.data.length > 0) {
|
|
2654
2760
|
this.serializer.initializeSchema(this.data, true) // autoDetect = true
|
|
2655
2761
|
if (this.opts.debugMode) {
|
|
@@ -2658,10 +2764,6 @@ class Database extends EventEmitter {
|
|
|
2658
2764
|
return
|
|
2659
2765
|
}
|
|
2660
2766
|
|
|
2661
|
-
// CRITICAL FIX: Don't initialize schema from indexes
|
|
2662
|
-
// This was causing data loss because only indexed fields were preserved
|
|
2663
|
-
// Let schema be auto-detected from actual data instead
|
|
2664
|
-
|
|
2665
2767
|
if (this.opts.debugMode) {
|
|
2666
2768
|
console.log(`🔍 No schema initialization possible - will auto-detect on first insert [${this.instanceId}]`)
|
|
2667
2769
|
}
|
|
@@ -3499,24 +3601,83 @@ class Database extends EventEmitter {
|
|
|
3499
3601
|
const lineNumbers = limitedEntries.map(([lineNumber]) => lineNumber)
|
|
3500
3602
|
const scoresByLineNumber = new Map(limitedEntries)
|
|
3501
3603
|
|
|
3502
|
-
|
|
3503
|
-
const ranges = this.getRanges(lineNumbers)
|
|
3504
|
-
const groupedRanges = await this.fileHandler.groupedRanges(ranges)
|
|
3604
|
+
const persistedCount = Array.isArray(this.offsets) ? this.offsets.length : 0
|
|
3505
3605
|
|
|
3506
|
-
|
|
3507
|
-
const
|
|
3606
|
+
// Separate lineNumbers into file records and writeBuffer records
|
|
3607
|
+
const fileLineNumbers = []
|
|
3608
|
+
const writeBufferLineNumbers = []
|
|
3609
|
+
|
|
3610
|
+
for (const lineNumber of lineNumbers) {
|
|
3611
|
+
if (lineNumber >= persistedCount) {
|
|
3612
|
+
// This lineNumber points to writeBuffer
|
|
3613
|
+
writeBufferLineNumbers.push(lineNumber)
|
|
3614
|
+
} else {
|
|
3615
|
+
// This lineNumber points to file
|
|
3616
|
+
fileLineNumbers.push(lineNumber)
|
|
3617
|
+
}
|
|
3618
|
+
}
|
|
3508
3619
|
|
|
3509
3620
|
const results = []
|
|
3510
3621
|
|
|
3511
|
-
|
|
3512
|
-
|
|
3513
|
-
|
|
3514
|
-
|
|
3515
|
-
|
|
3516
|
-
|
|
3517
|
-
|
|
3518
|
-
|
|
3519
|
-
|
|
3622
|
+
// Read records from file
|
|
3623
|
+
if (fileLineNumbers.length > 0) {
|
|
3624
|
+
const ranges = this.getRanges(fileLineNumbers)
|
|
3625
|
+
if (ranges.length > 0) {
|
|
3626
|
+
// Create a map from start offset to lineNumber for accurate mapping
|
|
3627
|
+
const startToLineNumber = new Map()
|
|
3628
|
+
for (const range of ranges) {
|
|
3629
|
+
if (range.index !== undefined) {
|
|
3630
|
+
startToLineNumber.set(range.start, range.index)
|
|
3631
|
+
}
|
|
3632
|
+
}
|
|
3633
|
+
|
|
3634
|
+
const groupedRanges = await this.fileHandler.groupedRanges(ranges)
|
|
3635
|
+
|
|
3636
|
+
const fs = await import('fs')
|
|
3637
|
+
const fd = await fs.promises.open(this.fileHandler.file, 'r')
|
|
3638
|
+
|
|
3639
|
+
try {
|
|
3640
|
+
for (const groupedRange of groupedRanges) {
|
|
3641
|
+
for await (const row of this.fileHandler.readGroupedRange(groupedRange, fd)) {
|
|
3642
|
+
try {
|
|
3643
|
+
const record = this.serializer.deserialize(row.line)
|
|
3644
|
+
|
|
3645
|
+
// Get line number from the row, fallback to start offset mapping
|
|
3646
|
+
let lineNumber = row._ !== null && row._ !== undefined ? row._ : (startToLineNumber.get(row.start) ?? 0)
|
|
3647
|
+
|
|
3648
|
+
// Restore term IDs to terms
|
|
3649
|
+
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record)
|
|
3650
|
+
|
|
3651
|
+
// Add line number
|
|
3652
|
+
recordWithTerms._ = lineNumber
|
|
3653
|
+
|
|
3654
|
+
// Add score if includeScore is true (default is true)
|
|
3655
|
+
if (opts.includeScore !== false) {
|
|
3656
|
+
recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0
|
|
3657
|
+
}
|
|
3658
|
+
|
|
3659
|
+
results.push(recordWithTerms)
|
|
3660
|
+
} catch (error) {
|
|
3661
|
+
// Skip invalid lines
|
|
3662
|
+
if (this.opts.debugMode) {
|
|
3663
|
+
console.error('Error deserializing record in score():', error)
|
|
3664
|
+
}
|
|
3665
|
+
}
|
|
3666
|
+
}
|
|
3667
|
+
}
|
|
3668
|
+
} finally {
|
|
3669
|
+
await fd.close()
|
|
3670
|
+
}
|
|
3671
|
+
}
|
|
3672
|
+
}
|
|
3673
|
+
|
|
3674
|
+
// Read records from writeBuffer
|
|
3675
|
+
if (writeBufferLineNumbers.length > 0 && this.writeBuffer) {
|
|
3676
|
+
for (const lineNumber of writeBufferLineNumbers) {
|
|
3677
|
+
const writeBufferIndex = lineNumber - persistedCount
|
|
3678
|
+
if (writeBufferIndex >= 0 && writeBufferIndex < this.writeBuffer.length) {
|
|
3679
|
+
const record = this.writeBuffer[writeBufferIndex]
|
|
3680
|
+
if (record) {
|
|
3520
3681
|
// Restore term IDs to terms
|
|
3521
3682
|
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record)
|
|
3522
3683
|
|
|
@@ -3529,16 +3690,9 @@ class Database extends EventEmitter {
|
|
|
3529
3690
|
}
|
|
3530
3691
|
|
|
3531
3692
|
results.push(recordWithTerms)
|
|
3532
|
-
} catch (error) {
|
|
3533
|
-
// Skip invalid lines
|
|
3534
|
-
if (this.opts.debugMode) {
|
|
3535
|
-
console.error('Error deserializing record in score():', error)
|
|
3536
|
-
}
|
|
3537
3693
|
}
|
|
3538
3694
|
}
|
|
3539
3695
|
}
|
|
3540
|
-
} finally {
|
|
3541
|
-
await fd.close()
|
|
3542
3696
|
}
|
|
3543
3697
|
|
|
3544
3698
|
// Re-sort results to maintain score order (since reads might be out of order)
|
|
@@ -3880,9 +4034,11 @@ class Database extends EventEmitter {
|
|
|
3880
4034
|
for (let i = 0; i < lines.length && i < this.offsets.length; i++) {
|
|
3881
4035
|
try {
|
|
3882
4036
|
const record = this.serializer.deserialize(lines[i])
|
|
3883
|
-
if (record && !deletedIdsSnapshot.has(record.id)) {
|
|
4037
|
+
if (record && !deletedIdsSnapshot.has(String(record.id))) {
|
|
3884
4038
|
// Check if this record is not being updated in writeBuffer
|
|
3885
|
-
|
|
4039
|
+
// CRITICAL FIX: Normalize IDs to strings for consistent comparison
|
|
4040
|
+
const normalizedRecordId = String(record.id)
|
|
4041
|
+
const updatedRecord = writeBufferSnapshot.find(r => r && r.id && String(r.id) === normalizedRecordId)
|
|
3886
4042
|
if (!updatedRecord) {
|
|
3887
4043
|
existingRecords.push(record)
|
|
3888
4044
|
}
|
|
@@ -3926,9 +4082,14 @@ class Database extends EventEmitter {
|
|
|
3926
4082
|
// existingRecords.length = this.offsets.length
|
|
3927
4083
|
|
|
3928
4084
|
// Create a map of updated records for quick lookup
|
|
4085
|
+
// CRITICAL FIX: Normalize IDs to strings for consistent comparison
|
|
3929
4086
|
const updatedRecordsMap = new Map()
|
|
3930
4087
|
writeBufferSnapshot.forEach(record => {
|
|
3931
|
-
|
|
4088
|
+
if (record && record.id !== undefined && record.id !== null) {
|
|
4089
|
+
// Normalize ID to string for consistent comparison
|
|
4090
|
+
const normalizedId = String(record.id)
|
|
4091
|
+
updatedRecordsMap.set(normalizedId, record)
|
|
4092
|
+
}
|
|
3932
4093
|
})
|
|
3933
4094
|
|
|
3934
4095
|
// OPTIMIZATION: Cache file stats to avoid repeated stat() calls
|
|
@@ -4096,7 +4257,8 @@ class Database extends EventEmitter {
|
|
|
4096
4257
|
if (recordId !== undefined && recordId !== null) {
|
|
4097
4258
|
recordId = String(recordId)
|
|
4098
4259
|
// Check if this record needs full parsing (updated or deleted)
|
|
4099
|
-
|
|
4260
|
+
// CRITICAL FIX: Normalize ID to string for consistent comparison
|
|
4261
|
+
needsFullParse = updatedRecordsMap.has(recordId) || deletedIdsSnapshot.has(String(recordId))
|
|
4100
4262
|
} else {
|
|
4101
4263
|
needsFullParse = true
|
|
4102
4264
|
}
|
|
@@ -4111,7 +4273,8 @@ class Database extends EventEmitter {
|
|
|
4111
4273
|
const idMatch = trimmedLine.match(/"id"\s*:\s*"([^"]+)"|"id"\s*:\s*(\d+)/)
|
|
4112
4274
|
if (idMatch) {
|
|
4113
4275
|
recordId = idMatch[1] || idMatch[2]
|
|
4114
|
-
|
|
4276
|
+
// CRITICAL FIX: Normalize ID to string for consistent comparison
|
|
4277
|
+
needsFullParse = updatedRecordsMap.has(String(recordId)) || deletedIdsSnapshot.has(String(recordId))
|
|
4115
4278
|
} else {
|
|
4116
4279
|
needsFullParse = true
|
|
4117
4280
|
}
|
|
@@ -4136,9 +4299,11 @@ class Database extends EventEmitter {
|
|
|
4136
4299
|
// Use record directly (no need to restore term IDs)
|
|
4137
4300
|
const recordWithIds = record
|
|
4138
4301
|
|
|
4139
|
-
|
|
4302
|
+
// CRITICAL FIX: Normalize ID to string for consistent comparison
|
|
4303
|
+
const normalizedId = String(recordWithIds.id)
|
|
4304
|
+
if (updatedRecordsMap.has(normalizedId)) {
|
|
4140
4305
|
// Replace with updated version
|
|
4141
|
-
const updatedRecord = updatedRecordsMap.get(
|
|
4306
|
+
const updatedRecord = updatedRecordsMap.get(normalizedId)
|
|
4142
4307
|
if (this.opts.debugMode) {
|
|
4143
4308
|
console.log(`💾 Save: Updated record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`)
|
|
4144
4309
|
}
|
|
@@ -4148,7 +4313,7 @@ class Database extends EventEmitter {
|
|
|
4148
4313
|
id: recordWithIds.id,
|
|
4149
4314
|
needsParse: false
|
|
4150
4315
|
}
|
|
4151
|
-
} else if (!deletedIdsSnapshot.has(recordWithIds.id)) {
|
|
4316
|
+
} else if (!deletedIdsSnapshot.has(String(recordWithIds.id))) {
|
|
4152
4317
|
// Keep existing record if not deleted
|
|
4153
4318
|
if (this.opts.debugMode) {
|
|
4154
4319
|
console.log(`💾 Save: Kept record ${recordWithIds.id} (${recordWithIds.name || 'Unnamed'})`)
|