jexidb 2.1.0 → 2.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Database.cjs +1642 -334
- package/docs/API.md +1057 -1051
- package/package.json +1 -1
- package/scripts/benchmark-array-serialization.js +108 -0
- package/scripts/score-mode-demo.js +45 -0
- package/src/Database.mjs +1362 -167
- package/src/FileHandler.mjs +83 -44
- package/src/OperationQueue.mjs +23 -23
- package/src/Serializer.mjs +214 -23
- package/src/managers/IndexManager.mjs +778 -87
- package/src/managers/QueryManager.mjs +266 -49
- package/src/managers/TermManager.mjs +7 -7
- package/src/utils/operatorNormalizer.mjs +116 -0
- package/test/coverage-method.test.js +93 -0
- package/test/deserialize-corruption-fixes.test.js +296 -0
- package/test/exists-method.test.js +318 -0
- package/test/explicit-indexes-comparison.test.js +219 -0
- package/test/filehandler-non-adjacent-ranges-bug.test.js +175 -0
- package/test/index-line-number-regression.test.js +100 -0
- package/test/index-missing-index-data.test.js +91 -0
- package/test/index-persistence.test.js +205 -20
- package/test/insert-session-auto-flush.test.js +353 -0
- package/test/legacy-operator-compat.test.js +154 -0
- package/test/score-method.test.js +60 -0
package/package.json
CHANGED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { performance } from 'node:perf_hooks'
|
|
2
|
+
import Serializer from '../src/Serializer.mjs'
|
|
3
|
+
|
|
4
|
+
const ITERATIONS = parseInt(process.env.BENCH_ITERATIONS ?? '200', 10)
|
|
5
|
+
const RECORDS_PER_ITERATION = parseInt(process.env.BENCH_RECORDS ?? '2000', 10)
|
|
6
|
+
|
|
7
|
+
const serializer = new Serializer({
|
|
8
|
+
enableAdvancedSerialization: true,
|
|
9
|
+
enableArraySerialization: true,
|
|
10
|
+
debugMode: false
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
// Ensure schema is initialized to trigger array serialization path
|
|
14
|
+
const schemaFields = ['id', 'name', 'value', 'tags', 'metadata', 'score', 'createdAt']
|
|
15
|
+
serializer.schemaManager.initializeSchema(schemaFields)
|
|
16
|
+
|
|
17
|
+
function createRecord(index) {
|
|
18
|
+
return {
|
|
19
|
+
id: `rec-${index}`,
|
|
20
|
+
name: `Record ${index}`,
|
|
21
|
+
value: index,
|
|
22
|
+
tags: [`tag-${index % 10}`, `tag-${(index + 3) % 10}`],
|
|
23
|
+
metadata: {
|
|
24
|
+
active: index % 2 === 0,
|
|
25
|
+
category: `category-${index % 5}`,
|
|
26
|
+
flags: [index % 7 === 0, index % 11 === 0]
|
|
27
|
+
},
|
|
28
|
+
score: Math.sin(index) * 100,
|
|
29
|
+
createdAt: new Date(1700000000000 + index * 60000).toISOString()
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function prepareData() {
|
|
34
|
+
const raw = new Array(RECORDS_PER_ITERATION)
|
|
35
|
+
const arrayFormat = new Array(RECORDS_PER_ITERATION)
|
|
36
|
+
const normalized = new Array(RECORDS_PER_ITERATION)
|
|
37
|
+
|
|
38
|
+
for (let i = 0; i < RECORDS_PER_ITERATION; i++) {
|
|
39
|
+
const record = createRecord(i)
|
|
40
|
+
raw[i] = record
|
|
41
|
+
const arr = serializer.convertToArrayFormat(record)
|
|
42
|
+
arrayFormat[i] = arr
|
|
43
|
+
normalized[i] = serializer.deepNormalizeEncoding(arr)
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return { raw, arrayFormat, normalized }
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function benchFastPath(normalizedArrays) {
|
|
50
|
+
let totalLength = 0
|
|
51
|
+
for (let i = 0; i < normalizedArrays.length; i++) {
|
|
52
|
+
const json = serializer._stringifyNormalizedArray(normalizedArrays[i])
|
|
53
|
+
totalLength += json.length
|
|
54
|
+
}
|
|
55
|
+
return totalLength
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function benchLegacyPath(normalizedArrays) {
|
|
59
|
+
let totalLength = 0
|
|
60
|
+
for (let i = 0; i < normalizedArrays.length; i++) {
|
|
61
|
+
const json = JSON.stringify(normalizedArrays[i])
|
|
62
|
+
totalLength += json.length
|
|
63
|
+
}
|
|
64
|
+
return totalLength
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function runBenchmarks() {
|
|
68
|
+
const { normalized } = prepareData()
|
|
69
|
+
|
|
70
|
+
// Warm-up
|
|
71
|
+
benchFastPath(normalized)
|
|
72
|
+
benchLegacyPath(normalized)
|
|
73
|
+
|
|
74
|
+
let fastDuration = 0
|
|
75
|
+
let legacyDuration = 0
|
|
76
|
+
let fastTotal = 0
|
|
77
|
+
let legacyTotal = 0
|
|
78
|
+
|
|
79
|
+
for (let i = 0; i < ITERATIONS; i++) {
|
|
80
|
+
const startFast = performance.now()
|
|
81
|
+
fastTotal += benchFastPath(normalized)
|
|
82
|
+
fastDuration += performance.now() - startFast
|
|
83
|
+
|
|
84
|
+
const startLegacy = performance.now()
|
|
85
|
+
legacyTotal += benchLegacyPath(normalized)
|
|
86
|
+
legacyDuration += performance.now() - startLegacy
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
console.log(`Benchmark settings:`)
|
|
90
|
+
console.log(` iterations: ${ITERATIONS}`)
|
|
91
|
+
console.log(` records/iteration: ${RECORDS_PER_ITERATION}`)
|
|
92
|
+
console.log('')
|
|
93
|
+
console.log(`Fast path total time: ${fastDuration.toFixed(2)} ms`)
|
|
94
|
+
console.log(`Legacy path total time: ${legacyDuration.toFixed(2)} ms`)
|
|
95
|
+
console.log('')
|
|
96
|
+
console.log(`Average fast path per iteration: ${(fastDuration / ITERATIONS).toFixed(4)} ms`)
|
|
97
|
+
console.log(`Average legacy path per iteration: ${(legacyDuration / ITERATIONS).toFixed(4)} ms`)
|
|
98
|
+
console.log('')
|
|
99
|
+
console.log(`Output size parity check: fast=${fastTotal} legacy=${legacyTotal}`)
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
runBenchmarks()
|
|
104
|
+
} catch (error) {
|
|
105
|
+
console.error('Benchmark failed:', error)
|
|
106
|
+
process.exit(1)
|
|
107
|
+
}
|
|
108
|
+
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { Database } from '../src/Database.mjs'
|
|
2
|
+
import path from 'path'
|
|
3
|
+
import fs from 'fs'
|
|
4
|
+
|
|
5
|
+
const dbPath = path.join(process.cwd(), 'temp-score-demo.jdb')
|
|
6
|
+
const idxPath = dbPath.replace('.jdb', '.idx.jdb')
|
|
7
|
+
|
|
8
|
+
for (const file of [dbPath, idxPath]) {
|
|
9
|
+
if (fs.existsSync(file)) {
|
|
10
|
+
fs.unlinkSync(file)
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const db = new Database(dbPath, {
|
|
15
|
+
indexes: { terms: 'array:string' }
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
await db.init()
|
|
19
|
+
|
|
20
|
+
await db.insert({ id: 1, title: 'Ação', terms: ['action'] })
|
|
21
|
+
await db.insert({ id: 2, title: 'Comédia', terms: ['comedy'] })
|
|
22
|
+
await db.insert({ id: 3, title: 'Ação + Comédia', terms: ['action', 'comedy'] })
|
|
23
|
+
await db.save()
|
|
24
|
+
|
|
25
|
+
const weights = { action: 2.0, comedy: 1.0 }
|
|
26
|
+
|
|
27
|
+
const modes = ['sum', 'max', 'avg', 'first']
|
|
28
|
+
|
|
29
|
+
for (const mode of modes) {
|
|
30
|
+
const results = await db.score('terms', weights, { mode })
|
|
31
|
+
console.log(`\nmode=${mode}`)
|
|
32
|
+
for (const entry of results) {
|
|
33
|
+
console.log(` ${entry.title.padEnd(16)} score=${entry.score}`)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
await db.destroy()
|
|
38
|
+
|
|
39
|
+
for (const file of [dbPath, idxPath]) {
|
|
40
|
+
if (fs.existsSync(file)) {
|
|
41
|
+
fs.unlinkSync(file)
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
|