jexidb 2.1.0 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,219 @@
1
+ import { Database } from '../src/Database.mjs'
2
+ import fs from 'fs'
3
+
4
+ describe('Explicit indexes with comparison operators', () => {
5
+ let testDbPath
6
+ let testIdxPath
7
+
8
+ beforeEach(() => {
9
+ const uniqueSuffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`
10
+ testDbPath = `test-explicit-index-${uniqueSuffix}.jdb`
11
+ testIdxPath = testDbPath.replace('.jdb', '.idx.jdb')
12
+ })
13
+
14
+ afterEach(() => {
15
+ for (const filePath of [testDbPath, testIdxPath]) {
16
+ if (filePath && fs.existsSync(filePath)) {
17
+ try {
18
+ fs.unlinkSync(filePath)
19
+ } catch (error) {
20
+ console.warn(`⚠️ Failed to delete ${filePath}: ${error.message}`)
21
+ }
22
+ }
23
+ }
24
+ })
25
+
26
+ test('should reuse persisted index configuration when explicit indexes array is provided', async () => {
27
+ const seedDb = new Database(testDbPath, {
28
+ indexes: {
29
+ channel: 'string',
30
+ end: 'number',
31
+ start: 'number',
32
+ terms: 'array:string'
33
+ },
34
+ debugMode: false
35
+ })
36
+ await seedDb.init()
37
+
38
+ const now = Math.floor(Date.now() / 1000)
39
+
40
+ await seedDb.insert({
41
+ id: '1',
42
+ channel: 'Example Channel',
43
+ end: now + 120
44
+ })
45
+
46
+ await seedDb.insert({
47
+ id: '2',
48
+ channel: 'Example Channel',
49
+ end: now - 120
50
+ })
51
+
52
+ await seedDb.insert({
53
+ id: '3',
54
+ channel: 'Other Channel',
55
+ start: now + 1800,
56
+ end: now + 3600,
57
+ terms: ['other']
58
+ })
59
+
60
+ await seedDb.save()
61
+ await seedDb.close()
62
+
63
+ const reopenedDb = new Database(testDbPath, {
64
+ create: false,
65
+ indexes: ['channel', 'end'],
66
+ debugMode: false
67
+ })
68
+
69
+ await reopenedDb.init()
70
+
71
+ const count = await reopenedDb.count({
72
+ channel: 'Example Channel',
73
+ end: { $gt: now }
74
+ })
75
+ expect(count).toBe(1)
76
+
77
+ const results = await reopenedDb.find({
78
+ channel: 'Example Channel',
79
+ end: { $gt: now }
80
+ })
81
+ expect(results.map(record => record.id)).toEqual(['1'])
82
+
83
+ await reopenedDb.destroy()
84
+ })
85
+
86
+ test('should return results for equality mixed with comparison operators after reopening with persisted indexes', async () => {
87
+ const seedDb = new Database(testDbPath, {
88
+ indexes: {
89
+ channel: 'string',
90
+ start: 'number',
91
+ end: 'number',
92
+ terms: 'array:string'
93
+ },
94
+ indexedQueryMode: 'permissive',
95
+ debugMode: false
96
+ })
97
+ await seedDb.init()
98
+
99
+ const now = Math.floor(Date.now() / 1000)
100
+
101
+ await seedDb.insert({
102
+ id: 'future-1',
103
+ channel: 'São Paulo|SP Disney Channel',
104
+ start: now + 600,
105
+ end: now + 3600,
106
+ terms: ['disney', 'kids']
107
+ })
108
+ await seedDb.insert({
109
+ id: 'past-1',
110
+ channel: 'São Paulo|SP Disney Channel',
111
+ start: now - 3600,
112
+ end: now - 300,
113
+ terms: ['disney', 'classic']
114
+ })
115
+ await seedDb.insert({
116
+ id: 'present-1',
117
+ channel: 'São Paulo|SP Disney Channel',
118
+ start: now - 60,
119
+ end: now + 60,
120
+ terms: ['disney', 'now']
121
+ })
122
+ await seedDb.insert({
123
+ id: 'other-1',
124
+ channel: 'Other Channel',
125
+ start: now + 600,
126
+ end: now + 3600,
127
+ terms: ['other']
128
+ })
129
+
130
+ await seedDb.save()
131
+ await seedDb.close()
132
+
133
+ const reopenedDb = new Database(testDbPath, {
134
+ create: false,
135
+ indexes: {
136
+ channel: 'string',
137
+ start: 'number',
138
+ end: 'number',
139
+ terms: 'array:string'
140
+ },
141
+ indexedQueryMode: 'permissive',
142
+ debugMode: false
143
+ })
144
+
145
+ await reopenedDb.init()
146
+
147
+ // Simulate regression: channel index line numbers loaded as strings while numeric index stays numeric
148
+ const reopenedChannelIndex = reopenedDb.indexManager.index.data.channel
149
+ for (const term in reopenedChannelIndex) {
150
+ const entry = reopenedChannelIndex[term]
151
+ if (entry?.set instanceof Set) {
152
+ reopenedChannelIndex[term].set = new Set(Array.from(entry.set).map(value => String(value)))
153
+ }
154
+ }
155
+
156
+ const disneyChannel = 'São Paulo|SP Disney Channel'
157
+
158
+ const equalityCount = await reopenedDb.count({ channel: disneyChannel })
159
+ expect(equalityCount).toBe(3)
160
+
161
+ // Ensure persisted channel index still contains stringified line numbers
162
+ const storedChannelLineTypes = Object.values(reopenedChannelIndex).flatMap(entry => {
163
+ if (entry?.set instanceof Set) {
164
+ return Array.from(entry.set).map(value => typeof value)
165
+ }
166
+ return []
167
+ })
168
+ expect(storedChannelLineTypes.every(type => type === 'string')).toBe(true)
169
+
170
+ // Normalization should convert returned line numbers to numeric form
171
+ const channelLineTypes = Array.from(reopenedDb.indexManager.query({ channel: disneyChannel })).map(value => typeof value)
172
+ expect(channelLineTypes.every(type => type === 'number')).toBe(true)
173
+
174
+ const endLineTypes = Array.from(reopenedDb.indexManager.query({ end: { $gt: now } })).map(value => typeof value)
175
+ expect(endLineTypes.every(type => type === 'number')).toBe(true)
176
+
177
+ const operators = [
178
+ { criteria: { channel: disneyChannel, end: { $gt: now } }, expectedIds: ['future-1', 'present-1'] },
179
+ { criteria: { channel: disneyChannel, end: { '>': now } }, expectedIds: ['future-1', 'present-1'] },
180
+ { criteria: { channel: disneyChannel, end: { $gte: now } }, expectedIds: ['future-1', 'present-1'] },
181
+ { criteria: { channel: disneyChannel, end: { $lt: now } }, expectedIds: ['past-1'] },
182
+ { criteria: { channel: disneyChannel, end: { '<': now } }, expectedIds: ['past-1'] },
183
+ { criteria: { channel: disneyChannel, end: { $lte: now } }, expectedIds: ['past-1'] },
184
+ { criteria: { channel: disneyChannel, end: { '<=': now } }, expectedIds: ['past-1'] },
185
+ { criteria: { channel: disneyChannel, start: { $lte: now } }, expectedIds: ['past-1', 'present-1'] },
186
+ { criteria: { channel: disneyChannel, start: { '<': now } }, expectedIds: ['past-1', 'present-1'] }
187
+ ]
188
+
189
+ for (const { criteria, expectedIds } of operators) {
190
+ const count = await reopenedDb.count(criteria)
191
+ expect(count).toBe(expectedIds.length)
192
+
193
+ const results = await reopenedDb.find(criteria)
194
+ expect(results.map(record => record.id).sort()).toEqual([...expectedIds].sort())
195
+ }
196
+
197
+ const termsComparisonCount = await reopenedDb.count({
198
+ terms: { $in: ['disney'] },
199
+ end: { $gt: now }
200
+ })
201
+ expect(termsComparisonCount).toBe(2)
202
+
203
+ const termsComparisonIds = (await reopenedDb.find({
204
+ terms: { $in: ['disney'] },
205
+ end: { $gt: now }
206
+ })).map(record => record.id).sort()
207
+ expect(termsComparisonIds).toEqual(['future-1', 'present-1'])
208
+
209
+ const integrationCheckCount = await reopenedDb.count({
210
+ channel: disneyChannel,
211
+ end: { $gt: now },
212
+ start: { $lte: now + 3600 }
213
+ })
214
+ expect(integrationCheckCount).toBe(2)
215
+
216
+ await reopenedDb.destroy()
217
+ })
218
+ })
219
+
@@ -0,0 +1,175 @@
1
+ import { Database } from '../src/Database.mjs'
2
+ import fs from 'fs'
3
+ import path from 'path'
4
+
5
+ describe('FileHandler readGroupedRange Bug - Non-adjacent Ranges', () => {
6
+ let testDir
7
+ let db
8
+
9
+ beforeEach(() => {
10
+ testDir = path.join(process.cwd(), 'test-files', 'filehandler-bug')
11
+ fs.mkdirSync(testDir, { recursive: true })
12
+ })
13
+
14
+ afterEach(async () => {
15
+ if (db) {
16
+ await db.destroy()
17
+ }
18
+ if (fs.existsSync(testDir)) {
19
+ try {
20
+ fs.rmSync(testDir, { recursive: true, force: true })
21
+ } catch (error) {
22
+ // Ignore cleanup errors
23
+ }
24
+ }
25
+ })
26
+
27
+ test('should detect deserialization error when readGroupedRange extracts incomplete lines', async () => {
28
+ // This test directly reproduces the bug where substring() extracts incomplete lines
29
+ // from non-adjacent ranges in the same buffer
30
+
31
+ const dbPath = path.join(testDir, 'bug-reproduction.jdb')
32
+
33
+ if (fs.existsSync(dbPath)) {
34
+ fs.unlinkSync(dbPath)
35
+ }
36
+ if (fs.existsSync(dbPath + '.idx')) {
37
+ fs.unlinkSync(dbPath + '.idx')
38
+ }
39
+
40
+ db = new Database(dbPath, {
41
+ clear: true,
42
+ create: true,
43
+ fields: {
44
+ id: 'number',
45
+ url: 'string',
46
+ name: 'string',
47
+ logo: 'string',
48
+ category: 'string',
49
+ nameTerms: 'array:string'
50
+ },
51
+ indexes: {
52
+ nameTerms: 'array:string'
53
+ },
54
+ debugMode: false
55
+ })
56
+
57
+ await db.init()
58
+
59
+ // Create a scenario that will force non-adjacent ranges to be grouped together
60
+ // Insert many records, with only a few matching the query
61
+ // The matching records should be far apart to create large gaps
62
+
63
+ const matchingRecords = [
64
+ { id: 1, url: 'http://olxi0fko.ukminlt.fun/iptv/HV7NY9RSQCHYZK/1066/index.m3u8', name: 'Дорама', logo: '', category: 'кино', nameTerms: ['кино'] },
65
+ { id: 50, url: 'http://olxi0fko.ukminlt.fun/iptv/HV7NY9RSQCHYZK/1066/index.m3u8', name: 'Дорама', logo: '', category: 'кино', nameTerms: ['кино'] },
66
+ { id: 100, url: 'http://olxi0fko.ukminlt.fun/iptv/HV7NY9RSQCHYZK/1066/index.m3u8', name: 'Дорама', logo: '', category: 'кино', nameTerms: ['кино'] },
67
+ { id: 150, url: 'http://olxi0fko.ukminlt.fun/iptv/HV7NY9RSQCHYZK/1066/index.m3u8', name: 'Дорама', logo: '', category: 'кино', nameTerms: ['кино'] },
68
+ { id: 200, url: 'http://olxi0fko.ukminlt.fun/iptv/HV7NY9RSQCHYZK/1066/index.m3u8', name: 'Дорама', logo: '', category: 'кино', nameTerms: ['кино'] }
69
+ ]
70
+
71
+ // Insert non-matching records between them to create gaps
72
+ for (let i = 1; i <= 200; i++) {
73
+ if (matchingRecords.find(r => r.id === i)) {
74
+ await db.insert(matchingRecords.find(r => r.id === i))
75
+ } else {
76
+ await db.insert({
77
+ id: i,
78
+ url: `http://example.com/${i}/index.m3u8`,
79
+ name: `Channel ${i}`,
80
+ logo: '',
81
+ category: 'other',
82
+ nameTerms: [`other${i}`]
83
+ })
84
+ }
85
+ }
86
+
87
+ await db.save()
88
+ await db.close()
89
+
90
+ // Reopen to force file-based reading
91
+ db = new Database(dbPath, {
92
+ fields: {
93
+ id: 'number',
94
+ url: 'string',
95
+ name: 'string',
96
+ logo: 'string',
97
+ category: 'string',
98
+ nameTerms: 'array:string'
99
+ },
100
+ indexes: {
101
+ nameTerms: 'array:string'
102
+ },
103
+ debugMode: false
104
+ })
105
+
106
+ await db.init()
107
+
108
+ // Query that returns only the 5 matching records (id: 1, 50, 100, 150, 200)
109
+ // These are non-adjacent and will be grouped together if they fit in 512KB
110
+ const query = { nameTerms: 'кино' }
111
+ const results = []
112
+ const errors = []
113
+
114
+ try {
115
+ for await (const record of db.walk(query)) {
116
+ // Try to access the record - this will fail if deserialization was incomplete
117
+ const recordStr = JSON.stringify(record)
118
+
119
+ // Check if record is complete (has all expected fields)
120
+ if (!record.id || !record.url || !record.name || !record.category || !record.nameTerms) {
121
+ errors.push(`Incomplete record: ${recordStr.substring(0, 200)}`)
122
+ }
123
+
124
+ // Verify UTF-8 characters are intact
125
+ if (record.name !== 'Дорама') {
126
+ errors.push(`UTF-8 corruption: expected 'Дорама', got '${record.name}'`)
127
+ }
128
+
129
+ results.push(record)
130
+ }
131
+ } catch (error) {
132
+ // This is the bug we're looking for!
133
+ const errorMsg = error.message || ''
134
+
135
+ if (errorMsg.includes('Failed to deserialize') ||
136
+ errorMsg.includes('Unexpected non-whitespace') ||
137
+ errorMsg.includes('JSON') ||
138
+ errorMsg.includes('position')) {
139
+
140
+ // FAIL THE TEST - bug detected!
141
+ throw new Error(`🐛 BUG CONFIRMED: Deserialization error with non-adjacent ranges!\n\n` +
142
+ `Error: ${errorMsg}\n\n` +
143
+ `Root Cause: readGroupedRange() in FileHandler.mjs line 311 uses:\n` +
144
+ ` content.substring(relativeStart, relativeEnd)\n\n` +
145
+ `When multiple non-adjacent ranges are in the same buffer, substring() can extract:\n` +
146
+ `1. Truncated lines (ending mid-UTF-8 character)\n` +
147
+ `2. Multiple lines concatenated together\n` +
148
+ `3. Part of one line + part of another\n\n` +
149
+ `The fix must extract complete lines by finding newline boundaries.\n` +
150
+ `Errors collected: ${errors.join('; ')}`)
151
+ }
152
+
153
+ // Re-throw other errors
154
+ throw error
155
+ }
156
+
157
+ // If we get here, verify no errors occurred
158
+ if (errors.length > 0) {
159
+ throw new Error(`Validation errors detected:\n${errors.join('\n')}`)
160
+ }
161
+
162
+ // Should have found 5 records
163
+ expect(results.length).toBe(5)
164
+
165
+ // Verify all results are correct
166
+ results.forEach(result => {
167
+ expect(result.name).toBe('Дорама')
168
+ expect(result.category).toBe('кино')
169
+ expect(result.nameTerms).toContain('кино')
170
+ })
171
+
172
+ await db.destroy()
173
+ })
174
+ })
175
+
@@ -0,0 +1,100 @@
1
+ import { Database } from '../src/Database.mjs'
2
+ import fs from 'fs'
3
+
4
+ describe('Term mapping index line number regression', () => {
5
+ let testDbPath
6
+ let testIdxPath
7
+
8
+ beforeEach(() => {
9
+ const uniqueSuffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`
10
+ testDbPath = `test-line-number-regression-${uniqueSuffix}.jdb`
11
+ testIdxPath = testDbPath.replace('.jdb', '.idx.jdb')
12
+ })
13
+
14
+ afterEach(() => {
15
+ for (const filePath of [testDbPath, testIdxPath]) {
16
+ if (filePath && fs.existsSync(filePath)) {
17
+ try {
18
+ fs.unlinkSync(filePath)
19
+ } catch (error) {
20
+ console.warn(`⚠️ Failed to delete ${filePath}: ${error.message}`)
21
+ }
22
+ }
23
+ }
24
+ })
25
+
26
+ test('should index new term mapping values appended after reload', async () => {
27
+ // Seed database with a first record and persist it to disk
28
+ const initialDb = new Database(testDbPath, {
29
+ indexes: { nameTerms: 'array:string' },
30
+ debugMode: false
31
+ })
32
+ await initialDb.init()
33
+ await initialDb.insert({
34
+ id: '1',
35
+ name: 'CANAL DO CLIENTE HD',
36
+ nameTerms: ['cliente', 'canal']
37
+ })
38
+ await initialDb.save()
39
+ await initialDb.close()
40
+
41
+ // Reopen the same database and append a new record with different terms
42
+ const db = new Database(testDbPath, {
43
+ indexes: { nameTerms: 'array:string' },
44
+ debugMode: false
45
+ })
46
+ await db.init()
47
+
48
+ const firstQuery = await db.find({ nameTerms: 'cliente' })
49
+ expect(firstQuery.map(record => record.id)).toEqual(['1'])
50
+
51
+ await db.insert({
52
+ id: '2',
53
+ name: 'Telecine Fun HD',
54
+ nameTerms: ['telecine', 'fun']
55
+ })
56
+ await db.save()
57
+
58
+ const telecineResults = await db.find({ nameTerms: 'telecine' })
59
+ const clienteResults = await db.find({ nameTerms: 'cliente' })
60
+
61
+ expect(telecineResults.map(record => record.id)).toEqual(['2'])
62
+ expect(clienteResults.map(record => record.id)).toEqual(['1'])
63
+
64
+ await db.destroy()
65
+ })
66
+
67
+ test('should remove old term mapping entries when record is updated', async () => {
68
+ const db = new Database(testDbPath, {
69
+ indexes: { nameTerms: 'array:string' },
70
+ debugMode: false
71
+ })
72
+ await db.init()
73
+
74
+ await db.insert({
75
+ id: '1',
76
+ name: 'CANAL DO CLIENTE HD',
77
+ nameTerms: ['cliente', 'canal']
78
+ })
79
+ await db.save()
80
+
81
+ await db.update({ id: '1' }, {
82
+ name: 'Telecine Fun HD',
83
+ nameTerms: ['telecine', 'fun']
84
+ })
85
+
86
+ await db.save()
87
+
88
+ const telecineResults = await db.find({ nameTerms: 'telecine' })
89
+ const clienteResults = await db.find({ nameTerms: 'cliente' })
90
+ const combinedResults = await db.find({ nameTerms: { $all: ['telecine', 'fun'] } })
91
+
92
+ expect(telecineResults.map(record => record.id)).toEqual(['1'])
93
+ expect(clienteResults.map(record => record.id)).toEqual([])
94
+ expect(combinedResults.map(record => record.id)).toEqual(['1'])
95
+
96
+ await db.destroy()
97
+ })
98
+
99
+ })
100
+
@@ -0,0 +1,91 @@
1
+ import { Database } from '../src/Database.mjs'
2
+ import fs from 'fs'
3
+
4
+ describe('Indexed query fallback when index data missing', () => {
5
+ let testDbPath
6
+ let testIdxPath
7
+
8
+ beforeEach(() => {
9
+ const uniqueSuffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`
10
+ testDbPath = `test-missing-index-${uniqueSuffix}.jdb`
11
+ testIdxPath = testDbPath.replace('.jdb', '.idx.jdb')
12
+ })
13
+
14
+ afterEach(() => {
15
+ for (const filePath of [testDbPath, testIdxPath]) {
16
+ if (filePath && fs.existsSync(filePath)) {
17
+ try {
18
+ fs.unlinkSync(filePath)
19
+ } catch (error) {
20
+ console.warn(`⚠️ Failed to delete ${filePath}: ${error.message}`)
21
+ }
22
+ }
23
+ }
24
+ })
25
+
26
+ test('should fall back to streaming when indexed field data is missing', async () => {
27
+ const db = new Database(testDbPath, {
28
+ indexes: {
29
+ channel: 'string',
30
+ start: 'number',
31
+ end: 'number'
32
+ },
33
+ debugMode: false
34
+ })
35
+ await db.init()
36
+
37
+ await db.insert({
38
+ id: '1',
39
+ channel: 'Sample Channel',
40
+ start: 1000,
41
+ end: 2000
42
+ })
43
+
44
+ await db.insert({
45
+ id: '2',
46
+ channel: 'Another Channel',
47
+ start: 3000,
48
+ end: 4000
49
+ })
50
+
51
+ await db.save()
52
+ await db.close()
53
+
54
+ const idxContentRaw = fs.readFileSync(testIdxPath, 'utf8')
55
+ const idxContent = JSON.parse(idxContentRaw)
56
+
57
+ if (!idxContent.index) {
58
+ idxContent.index = {}
59
+ }
60
+ if (!idxContent.index.data) {
61
+ idxContent.index.data = {}
62
+ }
63
+
64
+ // Simulate missing index data for channel field while keeping other index data intact
65
+ idxContent.index.data.channel = {}
66
+
67
+ fs.writeFileSync(testIdxPath, JSON.stringify(idxContent, null, 2), 'utf8')
68
+
69
+ const reopenedDb = new Database(testDbPath, {
70
+ create: false,
71
+ indexes: {
72
+ channel: 'string',
73
+ start: 'number',
74
+ end: 'number'
75
+ },
76
+ allowIndexRebuild: true, // Enable rebuild for partial index corruption
77
+ debugMode: false
78
+ })
79
+
80
+ await reopenedDb.init()
81
+
82
+ const count = await reopenedDb.count({ channel: 'Sample Channel' })
83
+ expect(count).toBe(1)
84
+
85
+ const results = await reopenedDb.find({ channel: 'Sample Channel' })
86
+ expect(results.map(record => record.id)).toEqual(['1'])
87
+
88
+ await reopenedDb.destroy()
89
+ })
90
+ })
91
+