jexidb 2.1.0 → 2.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Database.cjs +1642 -334
- package/docs/API.md +1057 -1051
- package/package.json +1 -1
- package/scripts/benchmark-array-serialization.js +108 -0
- package/scripts/score-mode-demo.js +45 -0
- package/src/Database.mjs +1362 -167
- package/src/FileHandler.mjs +83 -44
- package/src/OperationQueue.mjs +23 -23
- package/src/Serializer.mjs +214 -23
- package/src/managers/IndexManager.mjs +778 -87
- package/src/managers/QueryManager.mjs +266 -49
- package/src/managers/TermManager.mjs +7 -7
- package/src/utils/operatorNormalizer.mjs +116 -0
- package/test/coverage-method.test.js +93 -0
- package/test/deserialize-corruption-fixes.test.js +296 -0
- package/test/exists-method.test.js +318 -0
- package/test/explicit-indexes-comparison.test.js +219 -0
- package/test/filehandler-non-adjacent-ranges-bug.test.js +175 -0
- package/test/index-line-number-regression.test.js +100 -0
- package/test/index-missing-index-data.test.js +91 -0
- package/test/index-persistence.test.js +205 -20
- package/test/insert-session-auto-flush.test.js +353 -0
- package/test/legacy-operator-compat.test.js +154 -0
- package/test/score-method.test.js +60 -0
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
import { Database } from '../src/Database.mjs'
|
|
2
|
+
import fs from 'fs'
|
|
3
|
+
import path from 'path'
|
|
4
|
+
|
|
5
|
+
describe('Deserialize Corruption Fixes', () => {
|
|
6
|
+
let testDir
|
|
7
|
+
let db
|
|
8
|
+
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
testDir = path.join(process.cwd(), 'test-files', 'deserialize-corruption')
|
|
11
|
+
fs.mkdirSync(testDir, { recursive: true })
|
|
12
|
+
})
|
|
13
|
+
|
|
14
|
+
afterEach(async () => {
|
|
15
|
+
if (db) {
|
|
16
|
+
await db.close()
|
|
17
|
+
}
|
|
18
|
+
// Clean up test files
|
|
19
|
+
if (fs.existsSync(testDir)) {
|
|
20
|
+
try {
|
|
21
|
+
fs.rmSync(testDir, { recursive: true, force: true })
|
|
22
|
+
} catch (error) {
|
|
23
|
+
console.warn('Could not clean up test directory:', testDir)
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
describe('Multiple JSON objects in same line', () => {
|
|
29
|
+
test('deserialize should recover from multiple JSON objects in same string', async () => {
|
|
30
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
31
|
+
const serializer = new Serializer({ debugMode: false })
|
|
32
|
+
|
|
33
|
+
// Test direct deserialization with multiple objects
|
|
34
|
+
const corruptedData = '{"name":"Test1","value":1}{"name":"Test2","value":2}'
|
|
35
|
+
|
|
36
|
+
// Should recover the first object
|
|
37
|
+
const result = serializer.deserialize(corruptedData)
|
|
38
|
+
expect(result).toBeTruthy()
|
|
39
|
+
expect(result.name).toBe('Test1')
|
|
40
|
+
expect(result.value).toBe(1)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
test('deserialize should recover from multiple JSON arrays in same string', async () => {
|
|
44
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
45
|
+
const serializer = new Serializer({ debugMode: false })
|
|
46
|
+
|
|
47
|
+
// Test direct deserialization with multiple arrays
|
|
48
|
+
const corruptedData = '["http://test1.com","Test1"]["http://test2.com","Test2"]'
|
|
49
|
+
|
|
50
|
+
// Should recover the first array
|
|
51
|
+
const result = serializer.deserialize(corruptedData)
|
|
52
|
+
expect(result).toBeTruthy()
|
|
53
|
+
expect(Array.isArray(result)).toBe(true)
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
test('should handle deserialize with multiple objects gracefully', async () => {
|
|
57
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
58
|
+
const serializer = new Serializer({ debugMode: false })
|
|
59
|
+
|
|
60
|
+
// Test with multiple JSON objects in one string
|
|
61
|
+
const corruptedData = '{"name":"Test1","value":1}{"name":"Test2","value":2}'
|
|
62
|
+
|
|
63
|
+
// Should recover the first object
|
|
64
|
+
const result = serializer.deserialize(corruptedData)
|
|
65
|
+
expect(result).toBeTruthy()
|
|
66
|
+
expect(result.name).toBe('Test1')
|
|
67
|
+
expect(result.value).toBe(1)
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
test('should handle deserialize with multiple arrays gracefully', async () => {
|
|
71
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
72
|
+
const serializer = new Serializer({ debugMode: false })
|
|
73
|
+
|
|
74
|
+
// Test with multiple JSON arrays in one string
|
|
75
|
+
const corruptedData = '["http://test1.com","Test1"]["http://test2.com","Test2"]'
|
|
76
|
+
|
|
77
|
+
// Should recover the first array
|
|
78
|
+
const result = serializer.deserialize(corruptedData)
|
|
79
|
+
expect(result).toBeTruthy()
|
|
80
|
+
expect(Array.isArray(result)).toBe(true)
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
test('should handle JSON objects with braces inside string values', async () => {
|
|
84
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
85
|
+
const serializer = new Serializer({ debugMode: false })
|
|
86
|
+
|
|
87
|
+
// Test JSON object with braces inside string values
|
|
88
|
+
const dataWithBracesInString = '{"key": "value with { brace", "url": "http://example.com?param={value}"}'
|
|
89
|
+
|
|
90
|
+
const result = serializer.deserialize(dataWithBracesInString)
|
|
91
|
+
expect(result).toBeTruthy()
|
|
92
|
+
expect(result.key).toBe('value with { brace')
|
|
93
|
+
expect(result.url).toBe('http://example.com?param={value}')
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
test('should handle JSON arrays with brackets inside string values', async () => {
|
|
97
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
98
|
+
const serializer = new Serializer({ debugMode: false })
|
|
99
|
+
|
|
100
|
+
// Test JSON array with brackets inside string values
|
|
101
|
+
const dataWithBracketsInString = '["item with [ bracket", "url with [param]"]'
|
|
102
|
+
|
|
103
|
+
const result = serializer.deserialize(dataWithBracketsInString)
|
|
104
|
+
expect(result).toBeTruthy()
|
|
105
|
+
expect(Array.isArray(result)).toBe(true)
|
|
106
|
+
expect(result[0]).toBe('item with [ bracket')
|
|
107
|
+
expect(result[1]).toBe('url with [param]')
|
|
108
|
+
})
|
|
109
|
+
|
|
110
|
+
test('should handle JSON with escaped quotes and braces', async () => {
|
|
111
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
112
|
+
const serializer = new Serializer({ debugMode: false })
|
|
113
|
+
|
|
114
|
+
// Test JSON with escaped quotes and braces inside strings
|
|
115
|
+
const dataWithEscaped = '{"key": "value with \\" escaped quote and { brace", "nested": "text with } closing"}'
|
|
116
|
+
|
|
117
|
+
const result = serializer.deserialize(dataWithEscaped)
|
|
118
|
+
expect(result).toBeTruthy()
|
|
119
|
+
expect(result.key).toBe('value with " escaped quote and { brace')
|
|
120
|
+
expect(result.nested).toBe('text with } closing')
|
|
121
|
+
})
|
|
122
|
+
|
|
123
|
+
test('should handle multiple JSON objects when first has braces in strings', async () => {
|
|
124
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
125
|
+
const serializer = new Serializer({ debugMode: false })
|
|
126
|
+
|
|
127
|
+
// Test multiple JSON objects where first has braces in string values
|
|
128
|
+
const corruptedData = '{"key": "value with { brace", "url": "http://example.com?param={value}"}{"name":"Test2","value":2}'
|
|
129
|
+
|
|
130
|
+
// Should recover the first object correctly, ignoring braces inside strings
|
|
131
|
+
const result = serializer.deserialize(corruptedData)
|
|
132
|
+
expect(result).toBeTruthy()
|
|
133
|
+
expect(result.key).toBe('value with { brace')
|
|
134
|
+
expect(result.url).toBe('http://example.com?param={value}')
|
|
135
|
+
expect(result.name).toBeUndefined() // Should not include second object
|
|
136
|
+
})
|
|
137
|
+
|
|
138
|
+
test('should handle real-world URL array with special characters', async () => {
|
|
139
|
+
const Serializer = (await import('../src/Serializer.mjs')).default
|
|
140
|
+
const serializer = new Serializer({ debugMode: false })
|
|
141
|
+
|
|
142
|
+
// Test real-world scenario: URL array with special characters (similar to the original error)
|
|
143
|
+
const realWorldData = '["http://113.164.225.140:1935/live/quochoitvlive.stream_720p/playlist.m3u8?IMDSFULL","Quốc Hội","http://example.com?param={value}"]'
|
|
144
|
+
|
|
145
|
+
const result = serializer.deserialize(realWorldData)
|
|
146
|
+
expect(result).toBeTruthy()
|
|
147
|
+
expect(Array.isArray(result)).toBe(true)
|
|
148
|
+
expect(result[0]).toBe('http://113.164.225.140:1935/live/quochoitvlive.stream_720p/playlist.m3u8?IMDSFULL')
|
|
149
|
+
expect(result[1]).toBe('Quốc Hội')
|
|
150
|
+
expect(result[2]).toBe('http://example.com?param={value}')
|
|
151
|
+
})
|
|
152
|
+
})
|
|
153
|
+
|
|
154
|
+
describe('walk() error handling', () => {
|
|
155
|
+
test('should continue processing after encountering corrupted line', async () => {
|
|
156
|
+
const dbPath = path.join(testDir, 'walk-corruption.jdb')
|
|
157
|
+
db = new Database(dbPath, { clear: true, create: true, debugMode: false })
|
|
158
|
+
await db.init()
|
|
159
|
+
|
|
160
|
+
// Insert multiple records
|
|
161
|
+
for (let i = 0; i < 10; i++) {
|
|
162
|
+
await db.insert({ name: `Test${i}`, value: i })
|
|
163
|
+
}
|
|
164
|
+
await db.save()
|
|
165
|
+
|
|
166
|
+
// Manually corrupt one line by replacing with completely invalid data
|
|
167
|
+
// This tests that walk() continues processing other valid records
|
|
168
|
+
// Note: Corrupting the file manually can affect offsets, so we expect some records to be skipped
|
|
169
|
+
const fileContent = fs.readFileSync(dbPath, 'utf8')
|
|
170
|
+
const lines = fileContent.split('\n').filter(l => l.trim())
|
|
171
|
+
|
|
172
|
+
// Corrupt the 5th line - replace with invalid data
|
|
173
|
+
// The walk() should skip this and continue with other records
|
|
174
|
+
if (lines.length > 4) {
|
|
175
|
+
lines[4] = 'INVALID_LINE_WITH_NO_JSON'
|
|
176
|
+
const corruptedContent = lines.join('\n')
|
|
177
|
+
fs.writeFileSync(dbPath, corruptedContent, 'utf8')
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Walk should continue processing other records (skip the corrupted one)
|
|
181
|
+
let count = 0
|
|
182
|
+
const records = []
|
|
183
|
+
for await (const record of db.walk()) {
|
|
184
|
+
records.push(record)
|
|
185
|
+
count++
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// Should process at least some records (walk() should not stop completely)
|
|
189
|
+
// Note: Manual file corruption can affect offsets, so we're lenient with the count
|
|
190
|
+
expect(count).toBeGreaterThan(0)
|
|
191
|
+
expect(count).toBeLessThan(11) // Should be less than 10 due to corrupted line
|
|
192
|
+
|
|
193
|
+
// Most importantly: walk() should complete without throwing an unhandled error
|
|
194
|
+
// The fact that we got here means it handled the corruption gracefully
|
|
195
|
+
})
|
|
196
|
+
|
|
197
|
+
test('should log errors in debug mode', async () => {
|
|
198
|
+
const dbPath = path.join(testDir, 'walk-debug.jdb')
|
|
199
|
+
db = new Database(dbPath, { clear: true, create: true, debugMode: true })
|
|
200
|
+
await db.init()
|
|
201
|
+
|
|
202
|
+
await db.insert({ name: 'Test1', value: 1 })
|
|
203
|
+
await db.insert({ name: 'Test2', value: 2 })
|
|
204
|
+
await db.insert({ name: 'Test3', value: 3 })
|
|
205
|
+
await db.save()
|
|
206
|
+
|
|
207
|
+
// Manually corrupt one line with invalid JSON (not the first one to avoid offset issues)
|
|
208
|
+
const fileContent = fs.readFileSync(dbPath, 'utf8')
|
|
209
|
+
const lines = fileContent.split('\n').filter(l => l.trim())
|
|
210
|
+
// Corrupt middle line with invalid data (line 1, not 0, to avoid breaking offsets)
|
|
211
|
+
if (lines.length > 1) {
|
|
212
|
+
lines[1] = 'INVALID_JSON_NO_BRACES_OR_BRACKETS'
|
|
213
|
+
fs.writeFileSync(dbPath, lines.join('\n'), 'utf8')
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// Capture console.warn to verify errors are logged in debug mode
|
|
217
|
+
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation(() => {})
|
|
218
|
+
|
|
219
|
+
let count = 0
|
|
220
|
+
let walkCompleted = false
|
|
221
|
+
try {
|
|
222
|
+
for await (const record of db.walk()) {
|
|
223
|
+
count++
|
|
224
|
+
}
|
|
225
|
+
walkCompleted = true
|
|
226
|
+
} catch (error) {
|
|
227
|
+
// walk() should not throw unhandled errors - it should catch and log them
|
|
228
|
+
throw error
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Most importantly: walk() should complete without throwing an unhandled error
|
|
232
|
+
expect(walkCompleted).toBe(true)
|
|
233
|
+
|
|
234
|
+
// If records were processed, verify we got some
|
|
235
|
+
// Note: Manual file corruption can affect offsets, so count may be 0
|
|
236
|
+
// The key is that walk() handled the corruption gracefully
|
|
237
|
+
if (count > 0) {
|
|
238
|
+
expect(count).toBeGreaterThan(0)
|
|
239
|
+
// If we processed records, we likely skipped the corrupted one
|
|
240
|
+
expect(count).toBeLessThan(4) // Should be less than 3 due to corrupted line
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
consoleSpy.mockRestore()
|
|
244
|
+
})
|
|
245
|
+
})
|
|
246
|
+
|
|
247
|
+
describe('save() waits for auto-flushes', () => {
|
|
248
|
+
test('save() should wait for auto-flushes before writing', async () => {
|
|
249
|
+
const dbPath = path.join(testDir, 'save-waits-flush.jdb')
|
|
250
|
+
db = new Database(dbPath, { clear: true, create: true })
|
|
251
|
+
await db.init()
|
|
252
|
+
|
|
253
|
+
const session = db.beginInsertSession({ batchSize: 10 })
|
|
254
|
+
|
|
255
|
+
// Insert records to trigger auto-flushes
|
|
256
|
+
for (let i = 0; i < 50; i++) {
|
|
257
|
+
await session.add({ name: `Record ${i}`, value: i })
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Save should wait for all auto-flushes to complete
|
|
261
|
+
await db.save()
|
|
262
|
+
|
|
263
|
+
// Verify all data was saved
|
|
264
|
+
expect(db.length).toBe(50)
|
|
265
|
+
|
|
266
|
+
// Verify data is accessible
|
|
267
|
+
const results = await db.find({ name: 'Record 0' })
|
|
268
|
+
expect(results.length).toBe(1)
|
|
269
|
+
})
|
|
270
|
+
|
|
271
|
+
test('save() should handle multiple active sessions', async () => {
|
|
272
|
+
const dbPath = path.join(testDir, 'save-multiple-sessions.jdb')
|
|
273
|
+
db = new Database(dbPath, { clear: true, create: true })
|
|
274
|
+
await db.init()
|
|
275
|
+
|
|
276
|
+
const session1 = db.beginInsertSession({ batchSize: 10 })
|
|
277
|
+
const session2 = db.beginInsertSession({ batchSize: 10 })
|
|
278
|
+
|
|
279
|
+
// Insert records in both sessions
|
|
280
|
+
const promises = []
|
|
281
|
+
for (let i = 0; i < 25; i++) {
|
|
282
|
+
promises.push(session1.add({ name: `Session1-${i}`, value: i }))
|
|
283
|
+
promises.push(session2.add({ name: `Session2-${i}`, value: i }))
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
await Promise.all(promises)
|
|
287
|
+
|
|
288
|
+
// Save should wait for all auto-flushes from both sessions
|
|
289
|
+
await db.save()
|
|
290
|
+
|
|
291
|
+
// Verify all data was saved
|
|
292
|
+
expect(db.length).toBe(50)
|
|
293
|
+
})
|
|
294
|
+
})
|
|
295
|
+
})
|
|
296
|
+
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Comprehensive tests for exists() method
|
|
3
|
+
* Tests index-only existence checks with maximum performance
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
|
|
7
|
+
import Database from '../src/Database.mjs';
|
|
8
|
+
import fs from 'fs';
|
|
9
|
+
import path from 'path';
|
|
10
|
+
|
|
11
|
+
describe('exists() Method', () => {
|
|
12
|
+
let db;
|
|
13
|
+
const testDbPath = 'test-exists-method';
|
|
14
|
+
|
|
15
|
+
beforeEach(async () => {
|
|
16
|
+
// Clean up any existing test database
|
|
17
|
+
const files = [
|
|
18
|
+
testDbPath + '.jdb',
|
|
19
|
+
testDbPath + '.idx.jdb',
|
|
20
|
+
testDbPath + '.terms.jdb'
|
|
21
|
+
];
|
|
22
|
+
for (const file of files) {
|
|
23
|
+
if (fs.existsSync(file)) {
|
|
24
|
+
fs.unlinkSync(file);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
db = new Database(testDbPath, {
|
|
29
|
+
debugMode: false,
|
|
30
|
+
termMapping: true,
|
|
31
|
+
termMappingFields: ['nameTerms', 'tags'],
|
|
32
|
+
indexes: {
|
|
33
|
+
nameTerms: 'array:string',
|
|
34
|
+
tags: 'array:string',
|
|
35
|
+
group: 'string',
|
|
36
|
+
rating: 'number'
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
await db.init();
|
|
41
|
+
|
|
42
|
+
// Insert comprehensive test data
|
|
43
|
+
const testData = [
|
|
44
|
+
{ id: 1, name: 'TV Câmara', nameTerms: ['tv', 'câmara'], tags: ['news', 'politics'], group: 'Brazil', rating: 4.5 },
|
|
45
|
+
{ id: 2, name: 'TV Cultura', nameTerms: ['tv', 'cultura'], tags: ['culture', 'education'], group: 'Brazil', rating: 4.2 },
|
|
46
|
+
{ id: 3, name: 'SBT', nameTerms: ['sbt'], tags: ['entertainment'], group: 'Brazil', rating: 3.8 },
|
|
47
|
+
{ id: 4, name: 'Record News', nameTerms: ['record', 'news'], tags: ['news'], group: 'Brazil', rating: 4.0 },
|
|
48
|
+
{ id: 5, name: 'CNN', nameTerms: ['cnn'], tags: ['news', 'international'], group: 'International', rating: 4.7 },
|
|
49
|
+
{ id: 6, name: 'BBC', nameTerms: ['bbc'], tags: ['news', 'international'], group: 'International', rating: 4.6 },
|
|
50
|
+
{ id: 7, name: 'TV Globo', nameTerms: ['tv', 'globo'], tags: ['entertainment', 'news'], group: 'Brazil', rating: 4.3 },
|
|
51
|
+
{ id: 8, name: 'TV Record', nameTerms: ['tv', 'record'], tags: ['entertainment'], group: 'Brazil', rating: 3.9 },
|
|
52
|
+
{ id: 9, name: 'Discovery', nameTerms: ['discovery'], tags: ['documentary', 'education'], group: 'International', rating: 4.4 },
|
|
53
|
+
{ id: 10, name: 'National Geographic', nameTerms: ['national', 'geographic'], tags: ['documentary', 'nature'], group: 'International', rating: 4.8 }
|
|
54
|
+
];
|
|
55
|
+
|
|
56
|
+
for (const record of testData) {
|
|
57
|
+
await db.insert(record);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Save to ensure indexes are persisted
|
|
61
|
+
await db.save();
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
afterEach(async () => {
|
|
65
|
+
if (db) {
|
|
66
|
+
await db.close();
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Clean up test files
|
|
70
|
+
const files = [
|
|
71
|
+
testDbPath + '.jdb',
|
|
72
|
+
testDbPath + '.idx.jdb',
|
|
73
|
+
testDbPath + '.terms.jdb'
|
|
74
|
+
];
|
|
75
|
+
for (const file of files) {
|
|
76
|
+
if (fs.existsSync(file)) {
|
|
77
|
+
fs.unlinkSync(file);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
describe('Basic exists() - $in behavior (default)', () => {
|
|
83
|
+
it('should return true if term exists', async () => {
|
|
84
|
+
const exists = await db.exists('nameTerms', 'tv');
|
|
85
|
+
expect(exists).toBe(true);
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it('should return false if term does not exist', async () => {
|
|
89
|
+
const exists = await db.exists('nameTerms', 'nonexistent');
|
|
90
|
+
expect(exists).toBe(false);
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
it('should return true if any term in array exists', async () => {
|
|
94
|
+
const exists = await db.exists('nameTerms', ['tv', 'nonexistent']);
|
|
95
|
+
expect(exists).toBe(true);
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it('should return false if no terms in array exist', async () => {
|
|
99
|
+
const exists = await db.exists('nameTerms', ['nonexistent1', 'nonexistent2']);
|
|
100
|
+
expect(exists).toBe(false);
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
it('should match count() > 0 behavior', async () => {
|
|
104
|
+
const testTerms = ['tv', 'sbt', 'cnn', 'nonexistent'];
|
|
105
|
+
|
|
106
|
+
for (const term of testTerms) {
|
|
107
|
+
const exists = await db.exists('nameTerms', term);
|
|
108
|
+
const count = await db.count({ nameTerms: term });
|
|
109
|
+
expect(exists).toBe(count > 0);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
describe('exists() with $all option', () => {
|
|
115
|
+
it('should return true if all terms exist and have intersection', async () => {
|
|
116
|
+
// 'tv' and 'globo' both exist and have intersection (TV Globo)
|
|
117
|
+
const exists = await db.exists('nameTerms', ['tv', 'globo'], { $all: true });
|
|
118
|
+
expect(exists).toBe(true);
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
it('should return false if all terms exist but no intersection', async () => {
|
|
122
|
+
// 'sbt' and 'cnn' both exist but have no intersection
|
|
123
|
+
const exists = await db.exists('nameTerms', ['sbt', 'cnn'], { $all: true });
|
|
124
|
+
expect(exists).toBe(false);
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
it('should return false if any term does not exist', async () => {
|
|
128
|
+
const exists = await db.exists('nameTerms', ['tv', 'nonexistent'], { $all: true });
|
|
129
|
+
expect(exists).toBe(false);
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
it('should return true for single term with $all', async () => {
|
|
133
|
+
const exists = await db.exists('nameTerms', 'tv', { $all: true });
|
|
134
|
+
expect(exists).toBe(true);
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
it('should match count() with $all behavior', async () => {
|
|
138
|
+
const testCases = [
|
|
139
|
+
{ terms: ['tv', 'globo'], shouldExist: true },
|
|
140
|
+
{ terms: ['tv', 'cultura'], shouldExist: true },
|
|
141
|
+
{ terms: ['sbt', 'cnn'], shouldExist: false },
|
|
142
|
+
{ terms: ['tv', 'nonexistent'], shouldExist: false }
|
|
143
|
+
];
|
|
144
|
+
|
|
145
|
+
for (const testCase of testCases) {
|
|
146
|
+
const exists = await db.exists('nameTerms', testCase.terms, { $all: true });
|
|
147
|
+
const count = await db.count({ nameTerms: { $all: testCase.terms } });
|
|
148
|
+
expect(exists).toBe(count > 0);
|
|
149
|
+
expect(exists).toBe(testCase.shouldExist);
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
describe('exists() with term mapping', () => {
|
|
155
|
+
it('should work with term mapping fields', async () => {
|
|
156
|
+
const exists = await db.exists('nameTerms', 'tv');
|
|
157
|
+
expect(exists).toBe(true);
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it('should return false for unmapped terms', async () => {
|
|
161
|
+
// Term that was never inserted should not exist in term mapping
|
|
162
|
+
const exists = await db.exists('nameTerms', 'neverinserted');
|
|
163
|
+
expect(exists).toBe(false);
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
it('should work with $all and term mapping', async () => {
|
|
167
|
+
const exists = await db.exists('nameTerms', ['tv', 'globo'], { $all: true });
|
|
168
|
+
expect(exists).toBe(true);
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
describe('exists() with case-insensitive option', () => {
|
|
173
|
+
it('should find matches case-insensitively', async () => {
|
|
174
|
+
const exists = await db.exists('nameTerms', 'TV', { caseInsensitive: true });
|
|
175
|
+
expect(exists).toBe(true);
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
it('should work with case-insensitive and $all', async () => {
|
|
179
|
+
const exists = await db.exists('nameTerms', ['TV', 'GLOBO'], {
|
|
180
|
+
$all: true,
|
|
181
|
+
caseInsensitive: true
|
|
182
|
+
});
|
|
183
|
+
expect(exists).toBe(true);
|
|
184
|
+
});
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
describe('exists() edge cases', () => {
|
|
188
|
+
it('should return false for non-indexed field', async () => {
|
|
189
|
+
const exists = await db.exists('name', 'TV Câmara');
|
|
190
|
+
expect(exists).toBe(false);
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
it('should return false for invalid fieldName', async () => {
|
|
194
|
+
const exists = await db.exists('', 'tv');
|
|
195
|
+
expect(exists).toBe(false);
|
|
196
|
+
|
|
197
|
+
const exists2 = await db.exists(null, 'tv');
|
|
198
|
+
expect(exists2).toBe(false);
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
it('should return false for empty terms array', async () => {
|
|
202
|
+
const exists = await db.exists('nameTerms', []);
|
|
203
|
+
expect(exists).toBe(false);
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
it('should handle numeric fields', async () => {
|
|
207
|
+
const exists = await db.exists('rating', 4.5);
|
|
208
|
+
expect(exists).toBe(true);
|
|
209
|
+
|
|
210
|
+
const exists2 = await db.exists('rating', 999);
|
|
211
|
+
expect(exists2).toBe(false);
|
|
212
|
+
});
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
describe('exists() performance - index-only (no disk I/O)', () => {
|
|
216
|
+
it('should be faster than count() for existence checks', async () => {
|
|
217
|
+
const testTerm = 'tv';
|
|
218
|
+
|
|
219
|
+
// Warm up
|
|
220
|
+
await db.exists('nameTerms', testTerm);
|
|
221
|
+
await db.count({ nameTerms: testTerm });
|
|
222
|
+
|
|
223
|
+
// Measure exists()
|
|
224
|
+
const startExists = Date.now();
|
|
225
|
+
for (let i = 0; i < 100; i++) {
|
|
226
|
+
await db.exists('nameTerms', testTerm);
|
|
227
|
+
}
|
|
228
|
+
const timeExists = Date.now() - startExists;
|
|
229
|
+
|
|
230
|
+
// Measure count()
|
|
231
|
+
const startCount = Date.now();
|
|
232
|
+
for (let i = 0; i < 100; i++) {
|
|
233
|
+
await db.count({ nameTerms: testTerm });
|
|
234
|
+
}
|
|
235
|
+
const timeCount = Date.now() - startCount;
|
|
236
|
+
|
|
237
|
+
// exists() should be faster (index-only, no disk I/O)
|
|
238
|
+
// Note: This is a basic check - actual performance depends on many factors
|
|
239
|
+
console.log(`exists(): ${timeExists}ms, count(): ${timeCount}ms`);
|
|
240
|
+
expect(timeExists).toBeLessThan(timeCount * 10); // exists() should be significantly faster
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
it('should work directly with indexManager (synchronous)', () => {
|
|
244
|
+
// Test that indexManager.exists() is synchronous and works
|
|
245
|
+
const exists = db.indexManager.exists('nameTerms', 'tv');
|
|
246
|
+
expect(exists).toBe(true);
|
|
247
|
+
|
|
248
|
+
const notExists = db.indexManager.exists('nameTerms', 'nonexistent');
|
|
249
|
+
expect(notExists).toBe(false);
|
|
250
|
+
});
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
describe('exists() with multiple fields', () => {
|
|
254
|
+
it('should work with tags field', async () => {
|
|
255
|
+
const exists = await db.exists('tags', 'news');
|
|
256
|
+
expect(exists).toBe(true);
|
|
257
|
+
|
|
258
|
+
const existsAll = await db.exists('tags', ['news', 'politics'], { $all: true });
|
|
259
|
+
expect(existsAll).toBe(true); // TV Câmara has both
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
it('should work with group field', async () => {
|
|
263
|
+
const exists = await db.exists('group', 'Brazil');
|
|
264
|
+
expect(exists).toBe(true);
|
|
265
|
+
|
|
266
|
+
const exists2 = await db.exists('group', 'Nonexistent');
|
|
267
|
+
expect(exists2).toBe(false);
|
|
268
|
+
});
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
describe('exists() with excludes option', () => {
|
|
272
|
+
it('should return false if all matches are excluded', async () => {
|
|
273
|
+
// 'tv' exists, but if we exclude all records that have 'tv', should return false
|
|
274
|
+
// Actually, this tests if 'tv' exists but NOT 'globo' (TV Globo has both)
|
|
275
|
+
const exists = await db.exists('nameTerms', 'tv', { excludes: ['globo'] });
|
|
276
|
+
expect(exists).toBe(true); // TV Câmara, TV Cultura, TV Record have 'tv' but not 'globo'
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
it('should return true if some matches are not excluded', async () => {
|
|
280
|
+
// Records with 'tv' but not 'cultura'
|
|
281
|
+
const exists = await db.exists('nameTerms', 'tv', { excludes: ['cultura'] });
|
|
282
|
+
expect(exists).toBe(true); // TV Câmara, TV Globo, TV Record have 'tv' but not 'cultura'
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
it('should work with $all and excludes', async () => {
|
|
286
|
+
// Records with both 'tv' and 'globo' but not 'news'
|
|
287
|
+
const exists = await db.exists('nameTerms', ['tv', 'globo'], {
|
|
288
|
+
$all: true,
|
|
289
|
+
excludes: ['news']
|
|
290
|
+
});
|
|
291
|
+
expect(exists).toBe(true); // TV Globo has 'tv' and 'globo' but tags have 'news', not nameTerms
|
|
292
|
+
});
|
|
293
|
+
|
|
294
|
+
it('should return false if excludes remove all candidates', async () => {
|
|
295
|
+
// Try to find 'tv' but exclude something that all 'tv' records have
|
|
296
|
+
// This is tricky - let's use a term that doesn't exist to test the logic
|
|
297
|
+
const exists = await db.exists('nameTerms', 'nonexistent', { excludes: ['also-nonexistent'] });
|
|
298
|
+
expect(exists).toBe(false);
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
it('should work with excludes and case-insensitive', async () => {
|
|
302
|
+
const exists = await db.exists('nameTerms', 'TV', {
|
|
303
|
+
caseInsensitive: true,
|
|
304
|
+
excludes: ['globo']
|
|
305
|
+
});
|
|
306
|
+
expect(exists).toBe(true);
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
it('should work with multiple exclude terms', async () => {
|
|
310
|
+
// Records with 'tv' but not 'globo' and not 'cultura'
|
|
311
|
+
const exists = await db.exists('nameTerms', 'tv', {
|
|
312
|
+
excludes: ['globo', 'cultura']
|
|
313
|
+
});
|
|
314
|
+
expect(exists).toBe(true); // TV Câmara, TV Record have 'tv' but not 'globo' or 'cultura'
|
|
315
|
+
});
|
|
316
|
+
});
|
|
317
|
+
});
|
|
318
|
+
|