jexidb 2.1.1 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Database.cjs +7621 -113
- package/package.json +9 -2
- package/src/Database.mjs +244 -79
- package/src/SchemaManager.mjs +325 -268
- package/src/Serializer.mjs +20 -1
- package/src/managers/QueryManager.mjs +74 -18
- package/.babelrc +0 -13
- package/.gitattributes +0 -2
- package/CHANGELOG.md +0 -140
- package/babel.config.json +0 -5
- package/docs/API.md +0 -1057
- package/docs/EXAMPLES.md +0 -701
- package/docs/README.md +0 -194
- package/examples/iterate-usage-example.js +0 -157
- package/examples/simple-iterate-example.js +0 -115
- package/jest.config.js +0 -24
- package/scripts/README.md +0 -47
- package/scripts/benchmark-array-serialization.js +0 -108
- package/scripts/clean-test-files.js +0 -75
- package/scripts/prepare.js +0 -31
- package/scripts/run-tests.js +0 -80
- package/scripts/score-mode-demo.js +0 -45
- package/test/$not-operator-with-and.test.js +0 -282
- package/test/README.md +0 -8
- package/test/close-init-cycle.test.js +0 -256
- package/test/coverage-method.test.js +0 -93
- package/test/critical-bugs-fixes.test.js +0 -1069
- package/test/deserialize-corruption-fixes.test.js +0 -296
- package/test/exists-method.test.js +0 -318
- package/test/explicit-indexes-comparison.test.js +0 -219
- package/test/filehandler-non-adjacent-ranges-bug.test.js +0 -175
- package/test/index-line-number-regression.test.js +0 -100
- package/test/index-missing-index-data.test.js +0 -91
- package/test/index-persistence.test.js +0 -491
- package/test/index-serialization.test.js +0 -314
- package/test/indexed-query-mode.test.js +0 -360
- package/test/insert-session-auto-flush.test.js +0 -353
- package/test/iterate-method.test.js +0 -272
- package/test/legacy-operator-compat.test.js +0 -154
- package/test/query-operators.test.js +0 -238
- package/test/regex-array-fields.test.js +0 -129
- package/test/score-method.test.js +0 -298
- package/test/setup.js +0 -17
- package/test/term-mapping-minimal.test.js +0 -154
- package/test/term-mapping-simple.test.js +0 -257
- package/test/term-mapping.test.js +0 -514
- package/test/writebuffer-flush-resilience.test.js +0 -204
|
@@ -1,256 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Close → Init Cycle Tests
|
|
3
|
-
*
|
|
4
|
-
* Tests the ability to call init() after close() to reopen a database
|
|
5
|
-
* This addresses the bug where databases couldn't be reopened after closing
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
import Database from '../src/Database.mjs'
|
|
9
|
-
import fs from 'fs'
|
|
10
|
-
|
|
11
|
-
describe('Close → Init Cycle', () => {
|
|
12
|
-
let db
|
|
13
|
-
const testFile = 'test-close-init-cycle.jdb'
|
|
14
|
-
|
|
15
|
-
beforeEach(() => {
|
|
16
|
-
// Clean up any existing test files
|
|
17
|
-
if (fs.existsSync(testFile)) fs.unlinkSync(testFile)
|
|
18
|
-
if (fs.existsSync(testFile.replace('.jdb', '.idx.jdb'))) fs.unlinkSync(testFile.replace('.jdb', '.idx.jdb'))
|
|
19
|
-
|
|
20
|
-
db = new Database(testFile, {
|
|
21
|
-
create: true,
|
|
22
|
-
debugMode: false
|
|
23
|
-
})
|
|
24
|
-
})
|
|
25
|
-
|
|
26
|
-
afterEach(async () => {
|
|
27
|
-
if (db && !db.destroyed) {
|
|
28
|
-
try {
|
|
29
|
-
// Save any pending data before destroying
|
|
30
|
-
if (!db.closed && db.writeBuffer && db.writeBuffer.length > 0) {
|
|
31
|
-
await db.save()
|
|
32
|
-
}
|
|
33
|
-
await db.destroy()
|
|
34
|
-
} catch (error) {
|
|
35
|
-
// Ignore destroy errors for this test
|
|
36
|
-
console.warn('Destroy error ignored:', error.message)
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
})
|
|
40
|
-
|
|
41
|
-
describe('Basic Close → Init Cycle', () => {
|
|
42
|
-
it('should allow init() after close()', async () => {
|
|
43
|
-
// Initialize database
|
|
44
|
-
await db.init()
|
|
45
|
-
expect(db.initialized).toBe(true)
|
|
46
|
-
expect(db.closed).toBe(false)
|
|
47
|
-
|
|
48
|
-
// Insert some data
|
|
49
|
-
await db.insert({ name: 'Test1', value: 100 })
|
|
50
|
-
await db.insert({ name: 'Test2', value: 200 })
|
|
51
|
-
|
|
52
|
-
// Close database
|
|
53
|
-
await db.close()
|
|
54
|
-
expect(db.closed).toBe(true)
|
|
55
|
-
expect(db.initialized).toBe(false)
|
|
56
|
-
expect(db.destroyed).toBe(false) // Should not be destroyed
|
|
57
|
-
|
|
58
|
-
// Reinitialize database
|
|
59
|
-
await db.init()
|
|
60
|
-
expect(db.initialized).toBe(true)
|
|
61
|
-
expect(db.closed).toBe(false)
|
|
62
|
-
|
|
63
|
-
// Verify data is still accessible
|
|
64
|
-
const results = await db.find({})
|
|
65
|
-
expect(results).toHaveLength(2)
|
|
66
|
-
expect(results.some(r => r.name === 'Test1')).toBe(true)
|
|
67
|
-
expect(results.some(r => r.name === 'Test2')).toBe(true)
|
|
68
|
-
})
|
|
69
|
-
|
|
70
|
-
it('should support multiple close → init cycles', async () => {
|
|
71
|
-
await db.init()
|
|
72
|
-
await db.insert({ name: 'Test', value: 123 })
|
|
73
|
-
|
|
74
|
-
// First cycle
|
|
75
|
-
await db.close()
|
|
76
|
-
await db.init()
|
|
77
|
-
let results = await db.find({})
|
|
78
|
-
expect(results).toHaveLength(1)
|
|
79
|
-
|
|
80
|
-
// Second cycle
|
|
81
|
-
await db.close()
|
|
82
|
-
await db.init()
|
|
83
|
-
results = await db.find({})
|
|
84
|
-
expect(results).toHaveLength(1)
|
|
85
|
-
|
|
86
|
-
// Third cycle
|
|
87
|
-
await db.close()
|
|
88
|
-
await db.init()
|
|
89
|
-
results = await db.find({})
|
|
90
|
-
expect(results).toHaveLength(1)
|
|
91
|
-
})
|
|
92
|
-
|
|
93
|
-
it('should preserve data across close → init cycles', async () => {
|
|
94
|
-
await db.init()
|
|
95
|
-
|
|
96
|
-
// Insert initial data
|
|
97
|
-
await db.insert({ name: 'Initial', value: 1 })
|
|
98
|
-
await db.close()
|
|
99
|
-
await db.init()
|
|
100
|
-
|
|
101
|
-
// Add more data
|
|
102
|
-
await db.insert({ name: 'AfterReopen', value: 2 })
|
|
103
|
-
await db.close()
|
|
104
|
-
await db.init()
|
|
105
|
-
|
|
106
|
-
// Verify all data is preserved
|
|
107
|
-
const results = await db.find({})
|
|
108
|
-
expect(results).toHaveLength(2)
|
|
109
|
-
expect(results.some(r => r.name === 'Initial')).toBe(true)
|
|
110
|
-
expect(results.some(r => r.name === 'AfterReopen')).toBe(true)
|
|
111
|
-
})
|
|
112
|
-
})
|
|
113
|
-
|
|
114
|
-
describe('Operations After Reinit', () => {
|
|
115
|
-
it('should support basic operations after reinit', async () => {
|
|
116
|
-
await db.init()
|
|
117
|
-
await db.insert({ name: 'Test1', value: 100 })
|
|
118
|
-
await db.save() // Ensure data is saved
|
|
119
|
-
await db.close()
|
|
120
|
-
await db.init()
|
|
121
|
-
|
|
122
|
-
// Should be able to query existing data
|
|
123
|
-
const results = await db.find({})
|
|
124
|
-
expect(results).toHaveLength(1)
|
|
125
|
-
expect(results[0].name).toBe('Test1')
|
|
126
|
-
})
|
|
127
|
-
|
|
128
|
-
it('should support insert operations after reinit', async () => {
|
|
129
|
-
await db.init()
|
|
130
|
-
await db.close()
|
|
131
|
-
await db.init()
|
|
132
|
-
|
|
133
|
-
await db.insert({ name: 'Test2', value: 200 })
|
|
134
|
-
const results = await db.find({})
|
|
135
|
-
expect(results).toHaveLength(1)
|
|
136
|
-
expect(results[0].name).toBe('Test2')
|
|
137
|
-
})
|
|
138
|
-
})
|
|
139
|
-
|
|
140
|
-
describe('Error Handling', () => {
|
|
141
|
-
it('should throw error for operations on closed database', async () => {
|
|
142
|
-
await db.init()
|
|
143
|
-
await db.insert({ name: 'Test', value: 123 })
|
|
144
|
-
await db.close()
|
|
145
|
-
|
|
146
|
-
// Operations on closed database should throw error
|
|
147
|
-
await expect(db.find({})).rejects.toThrow('Database is closed')
|
|
148
|
-
await expect(db.insert({ name: 'Test2', value: 456 })).rejects.toThrow('Database is closed')
|
|
149
|
-
await expect(db.update({ name: 'Test' }, { value: 999 })).rejects.toThrow('Database is closed')
|
|
150
|
-
await expect(db.delete({ name: 'Test' })).rejects.toThrow('Database is closed')
|
|
151
|
-
})
|
|
152
|
-
|
|
153
|
-
it('should allow reinit after operations on closed database', async () => {
|
|
154
|
-
await db.init()
|
|
155
|
-
await db.insert({ name: 'Test', value: 123 })
|
|
156
|
-
await db.close()
|
|
157
|
-
|
|
158
|
-
// Try operation on closed database (should fail)
|
|
159
|
-
await expect(db.find({})).rejects.toThrow('Database is closed')
|
|
160
|
-
|
|
161
|
-
// Reinit should work
|
|
162
|
-
await db.init()
|
|
163
|
-
const results = await db.find({})
|
|
164
|
-
expect(results).toHaveLength(1)
|
|
165
|
-
})
|
|
166
|
-
|
|
167
|
-
it('should not allow init() on destroyed database', async () => {
|
|
168
|
-
await db.init()
|
|
169
|
-
|
|
170
|
-
// Skip this test due to writeBuffer bug in destroy()
|
|
171
|
-
// TODO: Fix writeBuffer bug and re-enable this test
|
|
172
|
-
console.log('⚠️ Skipping destroyed database test due to writeBuffer bug')
|
|
173
|
-
})
|
|
174
|
-
})
|
|
175
|
-
|
|
176
|
-
describe('State Management', () => {
|
|
177
|
-
it('should have correct state flags during close → init cycle', async () => {
|
|
178
|
-
// Initial state
|
|
179
|
-
expect(db.initialized).toBe(false)
|
|
180
|
-
expect(db.closed).toBe(false)
|
|
181
|
-
expect(db.destroyed).toBe(false)
|
|
182
|
-
|
|
183
|
-
// After init
|
|
184
|
-
await db.init()
|
|
185
|
-
expect(db.initialized).toBe(true)
|
|
186
|
-
expect(db.closed).toBe(false)
|
|
187
|
-
expect(db.destroyed).toBe(false)
|
|
188
|
-
|
|
189
|
-
// After close
|
|
190
|
-
await db.close()
|
|
191
|
-
expect(db.initialized).toBe(false)
|
|
192
|
-
expect(db.closed).toBe(true)
|
|
193
|
-
expect(db.destroyed).toBe(false)
|
|
194
|
-
|
|
195
|
-
// After reinit
|
|
196
|
-
await db.init()
|
|
197
|
-
expect(db.initialized).toBe(true)
|
|
198
|
-
expect(db.closed).toBe(false)
|
|
199
|
-
expect(db.destroyed).toBe(false)
|
|
200
|
-
})
|
|
201
|
-
|
|
202
|
-
it('should reset write buffer state after close', async () => {
|
|
203
|
-
await db.init()
|
|
204
|
-
await db.insert({ name: 'Test', value: 123 })
|
|
205
|
-
|
|
206
|
-
// Write buffer should have data
|
|
207
|
-
expect(db.writeBuffer.length).toBeGreaterThan(0)
|
|
208
|
-
|
|
209
|
-
await db.close()
|
|
210
|
-
|
|
211
|
-
// Write buffer should be cleared after close
|
|
212
|
-
expect(db.writeBuffer.length).toBe(0)
|
|
213
|
-
expect(db.shouldSave).toBe(false)
|
|
214
|
-
expect(db.isSaving).toBe(false)
|
|
215
|
-
})
|
|
216
|
-
})
|
|
217
|
-
|
|
218
|
-
describe('Performance and Memory', () => {
|
|
219
|
-
it('should not leak memory during multiple close → init cycles', async () => {
|
|
220
|
-
const initialMemory = process.memoryUsage().heapUsed
|
|
221
|
-
|
|
222
|
-
for (let i = 0; i < 10; i++) {
|
|
223
|
-
await db.init()
|
|
224
|
-
await db.insert({ name: `Test${i}`, value: i })
|
|
225
|
-
await db.close()
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
const finalMemory = process.memoryUsage().heapUsed
|
|
229
|
-
const memoryIncrease = finalMemory - initialMemory
|
|
230
|
-
|
|
231
|
-
// Memory increase should be reasonable (less than 10MB)
|
|
232
|
-
expect(memoryIncrease).toBeLessThan(10 * 1024 * 1024)
|
|
233
|
-
})
|
|
234
|
-
|
|
235
|
-
it('should handle large datasets across close → init cycles', async () => {
|
|
236
|
-
await db.init()
|
|
237
|
-
|
|
238
|
-
// Insert large dataset
|
|
239
|
-
for (let i = 0; i < 1000; i++) {
|
|
240
|
-
await db.insert({ name: `Record${i}`, value: i, data: 'x'.repeat(100) })
|
|
241
|
-
}
|
|
242
|
-
|
|
243
|
-
await db.close()
|
|
244
|
-
await db.init()
|
|
245
|
-
|
|
246
|
-
// Verify all data is accessible
|
|
247
|
-
const results = await db.find({})
|
|
248
|
-
expect(results).toHaveLength(1000)
|
|
249
|
-
|
|
250
|
-
// Verify specific records
|
|
251
|
-
const specific = await db.find({ name: 'Record500' })
|
|
252
|
-
expect(specific).toHaveLength(1)
|
|
253
|
-
expect(specific[0].value).toBe(500)
|
|
254
|
-
})
|
|
255
|
-
})
|
|
256
|
-
})
|
|
@@ -1,93 +0,0 @@
|
|
|
1
|
-
import fs from 'fs'
|
|
2
|
-
import path from 'path'
|
|
3
|
-
import { Database } from '../src/Database.mjs'
|
|
4
|
-
|
|
5
|
-
const cleanUp = (filePath) => {
|
|
6
|
-
try {
|
|
7
|
-
if (fs.existsSync(filePath)) {
|
|
8
|
-
fs.unlinkSync(filePath)
|
|
9
|
-
}
|
|
10
|
-
} catch (error) {
|
|
11
|
-
// Ignore cleanup errors
|
|
12
|
-
}
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
describe('Coverage Method Tests', () => {
|
|
16
|
-
let db
|
|
17
|
-
let testDbPath
|
|
18
|
-
let testIdxPath
|
|
19
|
-
|
|
20
|
-
beforeEach(async () => {
|
|
21
|
-
const testId = Math.random().toString(36).substring(2, 10)
|
|
22
|
-
testDbPath = path.join(process.cwd(), `test-coverage-${testId}.jdb`)
|
|
23
|
-
testIdxPath = path.join(process.cwd(), `test-coverage-${testId}.idx.jdb`)
|
|
24
|
-
|
|
25
|
-
cleanUp(testDbPath)
|
|
26
|
-
cleanUp(testIdxPath)
|
|
27
|
-
|
|
28
|
-
db = new Database(testDbPath, {
|
|
29
|
-
indexes: {
|
|
30
|
-
nameTerms: 'array:string',
|
|
31
|
-
genre: 'string'
|
|
32
|
-
}
|
|
33
|
-
})
|
|
34
|
-
await db.init()
|
|
35
|
-
})
|
|
36
|
-
|
|
37
|
-
afterEach(async () => {
|
|
38
|
-
if (db) {
|
|
39
|
-
await db.close()
|
|
40
|
-
db = null
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
cleanUp(testDbPath)
|
|
44
|
-
cleanUp(testIdxPath)
|
|
45
|
-
})
|
|
46
|
-
|
|
47
|
-
describe('Grouped Coverage', () => {
|
|
48
|
-
test('calculates percentage with include and exclude groups', async () => {
|
|
49
|
-
await db.insert({ id: 1, title: 'Rede Brasil', nameTerms: ['rede', 'brasil', 'sul'] })
|
|
50
|
-
await db.insert({ id: 2, title: 'Band SP', nameTerms: ['band', 'sp'] })
|
|
51
|
-
await db.save()
|
|
52
|
-
|
|
53
|
-
const coverage = await db.coverage('nameTerms', [
|
|
54
|
-
{ terms: ['sbt'], excludes: [] },
|
|
55
|
-
{ terms: ['rede', 'brasil'], excludes: ['norte'] },
|
|
56
|
-
{ terms: ['tv', 'sancouper'], excludes: [] },
|
|
57
|
-
{ terms: ['band'] }
|
|
58
|
-
])
|
|
59
|
-
|
|
60
|
-
expect(coverage).toBeCloseTo(50)
|
|
61
|
-
})
|
|
62
|
-
|
|
63
|
-
test('applies excludes before counting matches', async () => {
|
|
64
|
-
await db.insert({ id: 1, title: 'Rede Norte', nameTerms: ['rede', 'brasil', 'norte'] })
|
|
65
|
-
await db.insert({ id: 2, title: 'Rede Sul', nameTerms: ['rede', 'sul'] })
|
|
66
|
-
await db.save()
|
|
67
|
-
|
|
68
|
-
const coverage = await db.coverage('nameTerms', [
|
|
69
|
-
{ terms: ['rede', 'brasil'], excludes: ['norte'] },
|
|
70
|
-
{ terms: ['rede'] }
|
|
71
|
-
])
|
|
72
|
-
|
|
73
|
-
expect(coverage).toBeCloseTo(50)
|
|
74
|
-
})
|
|
75
|
-
|
|
76
|
-
test('works with string indexed field and optional excludes', async () => {
|
|
77
|
-
await db.insert({ id: 1, title: 'Song A', genre: 'samba' })
|
|
78
|
-
await db.insert({ id: 2, title: 'Song B', genre: 'pagode' })
|
|
79
|
-
await db.insert({ id: 3, title: 'Song C', genre: 'rock' })
|
|
80
|
-
await db.save()
|
|
81
|
-
|
|
82
|
-
const coverage = await db.coverage('genre', [
|
|
83
|
-
{ terms: ['samba'] },
|
|
84
|
-
{ terms: ['pagode'], excludes: [] },
|
|
85
|
-
{ terms: ['rock'], excludes: ['rock'] },
|
|
86
|
-
{ terms: ['funk'] }
|
|
87
|
-
])
|
|
88
|
-
|
|
89
|
-
expect(coverage).toBeCloseTo(50)
|
|
90
|
-
})
|
|
91
|
-
})
|
|
92
|
-
})
|
|
93
|
-
|