jexidb 2.0.3 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/.babelrc +13 -0
  2. package/.gitattributes +2 -0
  3. package/CHANGELOG.md +132 -101
  4. package/LICENSE +21 -21
  5. package/README.md +301 -639
  6. package/babel.config.json +5 -0
  7. package/dist/Database.cjs +5204 -0
  8. package/docs/API.md +908 -241
  9. package/docs/EXAMPLES.md +701 -177
  10. package/docs/README.md +194 -184
  11. package/examples/iterate-usage-example.js +157 -0
  12. package/examples/simple-iterate-example.js +115 -0
  13. package/jest.config.js +24 -0
  14. package/package.json +63 -54
  15. package/scripts/README.md +47 -0
  16. package/scripts/benchmark-array-serialization.js +108 -0
  17. package/scripts/clean-test-files.js +75 -0
  18. package/scripts/prepare.js +31 -0
  19. package/scripts/run-tests.js +80 -0
  20. package/scripts/score-mode-demo.js +45 -0
  21. package/src/Database.mjs +5325 -0
  22. package/src/FileHandler.mjs +1140 -0
  23. package/src/OperationQueue.mjs +279 -0
  24. package/src/SchemaManager.mjs +268 -0
  25. package/src/Serializer.mjs +702 -0
  26. package/src/managers/ConcurrencyManager.mjs +257 -0
  27. package/src/managers/IndexManager.mjs +2094 -0
  28. package/src/managers/QueryManager.mjs +1490 -0
  29. package/src/managers/StatisticsManager.mjs +262 -0
  30. package/src/managers/StreamingProcessor.mjs +429 -0
  31. package/src/managers/TermManager.mjs +278 -0
  32. package/src/utils/operatorNormalizer.mjs +116 -0
  33. package/test/$not-operator-with-and.test.js +282 -0
  34. package/test/README.md +8 -0
  35. package/test/close-init-cycle.test.js +256 -0
  36. package/test/coverage-method.test.js +93 -0
  37. package/test/critical-bugs-fixes.test.js +1069 -0
  38. package/test/deserialize-corruption-fixes.test.js +296 -0
  39. package/test/exists-method.test.js +318 -0
  40. package/test/explicit-indexes-comparison.test.js +219 -0
  41. package/test/filehandler-non-adjacent-ranges-bug.test.js +175 -0
  42. package/test/index-line-number-regression.test.js +100 -0
  43. package/test/index-missing-index-data.test.js +91 -0
  44. package/test/index-persistence.test.js +491 -0
  45. package/test/index-serialization.test.js +314 -0
  46. package/test/indexed-query-mode.test.js +360 -0
  47. package/test/insert-session-auto-flush.test.js +353 -0
  48. package/test/iterate-method.test.js +272 -0
  49. package/test/legacy-operator-compat.test.js +154 -0
  50. package/test/query-operators.test.js +238 -0
  51. package/test/regex-array-fields.test.js +129 -0
  52. package/test/score-method.test.js +298 -0
  53. package/test/setup.js +17 -0
  54. package/test/term-mapping-minimal.test.js +154 -0
  55. package/test/term-mapping-simple.test.js +257 -0
  56. package/test/term-mapping.test.js +514 -0
  57. package/test/writebuffer-flush-resilience.test.js +204 -0
  58. package/dist/FileHandler.js +0 -688
  59. package/dist/IndexManager.js +0 -353
  60. package/dist/IntegrityChecker.js +0 -364
  61. package/dist/JSONLDatabase.js +0 -1333
  62. package/dist/index.js +0 -617
  63. package/docs/MIGRATION.md +0 -295
  64. package/examples/auto-save-example.js +0 -158
  65. package/examples/cjs-usage.cjs +0 -82
  66. package/examples/close-vs-delete-example.js +0 -71
  67. package/examples/esm-usage.js +0 -113
  68. package/examples/example-columns.idx.jdb +0 -0
  69. package/examples/example-columns.jdb +0 -9
  70. package/examples/example-options.idx.jdb +0 -0
  71. package/examples/example-options.jdb +0 -0
  72. package/examples/example-users.idx.jdb +0 -0
  73. package/examples/example-users.jdb +0 -5
  74. package/examples/simple-test.js +0 -55
  75. package/src/FileHandler.js +0 -674
  76. package/src/IndexManager.js +0 -363
  77. package/src/IntegrityChecker.js +0 -379
  78. package/src/JSONLDatabase.js +0 -1391
  79. package/src/index.js +0 -608
@@ -1,1391 +0,0 @@
1
- /**
2
- * JSONLDatabase - JexiDB Core Database Engine
3
- * High Performance JSONL Database optimized for JexiDB
4
- * Optimized hybrid architecture combining the best strategies:
5
- * - Insert: Buffer + batch write for maximum speed
6
- * - Find: Intelligent hybrid (indexed + non-indexed fields)
7
- * - Update/Delete: On-demand reading/writing for scalability
8
- */
9
- import { promises as fs } from 'fs';
10
- import path from 'path';
11
- import { EventEmitter } from 'events';
12
-
13
- class JSONLDatabase extends EventEmitter {
14
- constructor(filePath, options = {}) {
15
- super();
16
-
17
- // Expect the main data file path (with .jdb extension)
18
- if (!filePath.endsWith('.jdb')) {
19
- if (filePath.endsWith('.jsonl')) {
20
- this.filePath = filePath.replace('.jsonl', '.jdb');
21
- } else if (filePath.endsWith('.json')) {
22
- this.filePath = filePath.replace('.json', '.jdb');
23
- } else {
24
- // If no extension provided, assume it's a base name and add .jdb
25
- this.filePath = filePath + '.jdb';
26
- }
27
- } else {
28
- this.filePath = filePath;
29
- }
30
-
31
- // Enhanced configuration with intelligent defaults
32
- this.options = {
33
- // Original options
34
- batchSize: 50, // Reduced from 100 for faster response
35
- create: true, // Create database if it doesn't exist (default: true)
36
- clear: false, // Clear database on load if not empty (default: false)
37
-
38
- // Auto-save intelligent configuration
39
- autoSave: true, // Enable auto-save by default
40
- autoSaveThreshold: 50, // Flush when buffer reaches 50 records
41
- autoSaveInterval: 5000, // Flush every 5 seconds
42
- forceSaveOnClose: true, // Always save when closing
43
-
44
- // Performance configuration
45
- adaptiveBatchSize: true, // Adjust batch size based on usage
46
- minBatchSize: 10, // Minimum batch size for flush
47
- maxBatchSize: 200, // Maximum batch size for performance
48
-
49
- // Memory management
50
- maxMemoryUsage: 'auto', // Calculate automatically or use fixed value
51
- maxFlushChunkBytes: 8 * 1024 * 1024, // 8MB default
52
-
53
- ...options
54
- };
55
-
56
- // If clear is true, create should also be true
57
- if (this.options.clear === true) {
58
- this.options.create = true;
59
- }
60
-
61
- // Auto-save timer and state
62
- this.autoSaveTimer = null;
63
- this.lastFlushTime = null;
64
- this.lastAutoSaveTime = Date.now();
65
-
66
- this.isInitialized = false;
67
- this.offsets = [];
68
- this.indexOffset = 0;
69
- this.shouldSave = false;
70
-
71
- // Ultra-optimized index structure (kept in memory)
72
- this.indexes = {};
73
-
74
- // Initialize indexes from options or use defaults
75
- if (options.indexes) {
76
- for (const [field, type] of Object.entries(options.indexes)) {
77
- this.indexes[field] = new Map();
78
- }
79
- } else {
80
- // Default indexes
81
- this.indexes = {
82
- id: new Map(),
83
- age: new Map(),
84
- email: new Map()
85
- };
86
- }
87
-
88
- this.recordCount = 0;
89
- this.fileHandle = null; // File handle for on-demand reading
90
-
91
- // Insert buffer (Original strategy)
92
- this.insertionBuffer = [];
93
- this.insertionStats = {
94
- count: 0,
95
- lastInsertion: Date.now(),
96
- batchSize: this.options.batchSize
97
- };
98
- }
99
-
100
- async init() {
101
- if (this.isInitialized) {
102
- // If already initialized, close first to reset state
103
- await this.close();
104
- }
105
-
106
- try {
107
- const dir = path.dirname(this.filePath);
108
- await fs.mkdir(dir, { recursive: true });
109
-
110
- // Check if file exists before loading
111
- const fileExists = await fs.access(this.filePath).then(() => true).catch(() => false);
112
-
113
- // Handle clear option
114
- if (this.options.clear && fileExists) {
115
- await fs.writeFile(this.filePath, '');
116
- this.offsets = [];
117
- this.indexOffset = 0;
118
- this.recordCount = 0;
119
- console.log(`Database cleared: ${this.filePath}`);
120
- this.isInitialized = true;
121
- this.emit('init');
122
- return;
123
- }
124
-
125
- // Handle create option
126
- if (!fileExists) {
127
- if (this.options.create) {
128
- await fs.writeFile(this.filePath, '');
129
- this.offsets = [];
130
- this.indexOffset = 0;
131
- this.recordCount = 0;
132
- console.log(`Database created: ${this.filePath}`);
133
- this.isInitialized = true;
134
- this.emit('init');
135
- return;
136
- } else {
137
- throw new Error(`Database file does not exist: ${this.filePath}`);
138
- }
139
- }
140
-
141
- // Load existing database
142
- await this.loadDataWithOffsets();
143
-
144
- this.isInitialized = true;
145
- this.emit('init');
146
-
147
- } catch (error) {
148
- // If create is false and file doesn't exist or is corrupted, throw error
149
- if (!this.options.create) {
150
- throw new Error(`Failed to load database: ${error.message}`);
151
- }
152
-
153
- // If create is true, initialize empty database
154
- this.recordCount = 0;
155
- this.offsets = [];
156
- this.indexOffset = 0;
157
- this.isInitialized = true;
158
- this.emit('init');
159
- }
160
- }
161
-
162
- async loadDataWithOffsets() {
163
- try {
164
- // Open file handle for on-demand reading
165
- this.fileHandle = await fs.open(this.filePath, 'r');
166
-
167
- const data = await fs.readFile(this.filePath, 'utf8');
168
- const lines = data.split('\n').filter(line => line.trim());
169
-
170
- if (lines.length === 0) {
171
- this.recordCount = 0;
172
- this.offsets = [];
173
- return;
174
- }
175
-
176
- // Check if this is a legacy JexiDB file (has index and lineOffsets at the end)
177
- if (lines.length >= 3) {
178
- const lastLine = lines[lines.length - 1];
179
- const secondLastLine = lines[lines.length - 2];
180
-
181
- try {
182
- const lastData = JSON.parse(lastLine);
183
- const secondLastData = JSON.parse(secondLastLine);
184
-
185
- // Legacy format: data lines + index line (object) + lineOffsets line (array)
186
- // Check if secondLastLine contains index structure (has nested objects with arrays)
187
- if (Array.isArray(lastData) &&
188
- typeof secondLastData === 'object' &&
189
- !Array.isArray(secondLastData) &&
190
- Object.values(secondLastData).some(val => typeof val === 'object' && !Array.isArray(val))) {
191
- console.log('🔄 Detected legacy JexiDB format, migrating...');
192
- return await this.loadLegacyFormat(lines);
193
- }
194
- } catch (e) {
195
- // Not legacy format
196
- }
197
- }
198
-
199
- // Check for new format offset line
200
- const lastLine = lines[lines.length - 1];
201
- try {
202
- const lastData = JSON.parse(lastLine);
203
- if (Array.isArray(lastData) && lastData.length > 0 && typeof lastData[0] === 'number') {
204
- this.offsets = lastData;
205
- this.indexOffset = lastData[lastData.length - 2] || 0;
206
- this.recordCount = this.offsets.length; // Number of offsets = number of records
207
-
208
- // Try to load persistent indexes first
209
- if (await this.loadPersistentIndexes()) {
210
- console.log('✅ Loaded persistent indexes');
211
- return;
212
- }
213
-
214
- // Fallback: Load records into indexes (on-demand)
215
- console.log('🔄 Rebuilding indexes from data...');
216
- for (let i = 0; i < this.recordCount; i++) {
217
- try {
218
- const record = JSON.parse(lines[i]);
219
- if (record && !record._deleted) {
220
- this.addToIndex(record, i);
221
- }
222
- } catch (error) {
223
- // Skip invalid lines
224
- }
225
- }
226
- return;
227
- }
228
- } catch (e) {
229
- // Not an offset line
230
- }
231
-
232
- // Regular loading - no offset information
233
- this.offsets = [];
234
- this.indexOffset = 0;
235
-
236
- for (let i = 0; i < lines.length; i++) {
237
- try {
238
- const record = JSON.parse(lines[i]);
239
- if (record && !record._deleted) {
240
- this.addToIndex(record, i);
241
- this.offsets.push(i * 100); // Estimate offset
242
- }
243
- } catch (error) {
244
- // Skip invalid lines
245
- }
246
- }
247
-
248
- this.recordCount = this.offsets.length;
249
-
250
- } catch (error) {
251
- throw error; // Re-throw to be handled by init()
252
- }
253
- }
254
-
255
- async loadLegacyFormat(lines) {
256
- // Legacy format: data lines + index line + lineOffsets line
257
- const dataLines = lines.slice(0, -2); // All lines except last 2
258
- const indexLine = lines[lines.length - 2];
259
- const lineOffsetsLine = lines[lines.length - 1];
260
-
261
- try {
262
- const legacyIndexes = JSON.parse(indexLine);
263
- const legacyOffsets = JSON.parse(lineOffsetsLine);
264
-
265
- // Convert legacy indexes to new format
266
- for (const [field, indexMap] of Object.entries(legacyIndexes)) {
267
- if (this.indexes[field]) {
268
- this.indexes[field] = new Map();
269
- for (const [value, indices] of Object.entries(indexMap)) {
270
- this.indexes[field].set(value, new Set(indices));
271
- }
272
- }
273
- }
274
-
275
- // Use legacy offsets
276
- this.offsets = legacyOffsets;
277
- this.recordCount = dataLines.length;
278
-
279
- console.log(`✅ Migrated legacy format: ${this.recordCount} records`);
280
-
281
- // Save in new format for next time
282
- await this.savePersistentIndexes();
283
- console.log('💾 Saved in new format for future use');
284
-
285
- } catch (error) {
286
- console.error('Failed to parse legacy format:', error.message);
287
- // Fallback to regular loading
288
- this.offsets = [];
289
- this.indexOffset = 0;
290
- this.recordCount = 0;
291
- }
292
- }
293
-
294
- async loadPersistentIndexes() {
295
- try {
296
- const indexPath = this.filePath.replace('.jdb', '') + '.idx.jdb';
297
- const compressedData = await fs.readFile(indexPath);
298
-
299
- // Decompress using zlib
300
- const zlib = await import('zlib');
301
- const { promisify } = await import('util');
302
- const gunzip = promisify(zlib.gunzip);
303
-
304
- const decompressedData = await gunzip(compressedData);
305
- const savedIndexes = JSON.parse(decompressedData.toString('utf8'));
306
-
307
- // Validate index structure
308
- if (!savedIndexes || typeof savedIndexes !== 'object') {
309
- return false;
310
- }
311
-
312
- // Convert back to Map objects
313
- for (const [field, indexMap] of Object.entries(savedIndexes)) {
314
- // Initialize index if it doesn't exist
315
- if (!this.indexes[field]) {
316
- this.indexes[field] = new Map();
317
- }
318
-
319
- this.indexes[field] = new Map();
320
- for (const [value, indices] of Object.entries(indexMap)) {
321
- // Convert value back to original type based on field configuration
322
- let convertedValue = value;
323
- if (this.indexes[field] && this.indexes[field].constructor === Map) {
324
- // Try to convert based on field type
325
- if (field === 'id' || field.includes('id') || field.includes('Id')) {
326
- convertedValue = parseInt(value, 10);
327
- } else if (typeof value === 'string' && !isNaN(parseFloat(value))) {
328
- // Try to convert numeric strings back to numbers
329
- const num = parseFloat(value);
330
- if (Number.isInteger(num)) {
331
- convertedValue = parseInt(value, 10);
332
- } else {
333
- convertedValue = num;
334
- }
335
- }
336
- }
337
- this.indexes[field].set(convertedValue, new Set(indices));
338
- }
339
- }
340
-
341
- return true;
342
- } catch (error) {
343
- // Index file doesn't exist or is corrupted
344
- return false;
345
- }
346
- }
347
-
348
- async savePersistentIndexes() {
349
- try {
350
- const indexPath = this.filePath.replace('.jdb', '') + '.idx.jdb';
351
-
352
- // Convert Maps to plain objects for JSON serialization
353
- const serializableIndexes = {};
354
- for (const [field, indexMap] of Object.entries(this.indexes)) {
355
- serializableIndexes[field] = {};
356
- for (const [value, indexSet] of indexMap.entries()) {
357
- serializableIndexes[field][value] = Array.from(indexSet);
358
- }
359
- }
360
-
361
- // Compress using zlib
362
- const zlib = await import('zlib');
363
- const { promisify } = await import('util');
364
- const gzip = promisify(zlib.gzip);
365
-
366
- const jsonData = JSON.stringify(serializableIndexes);
367
- const compressedData = await gzip(jsonData);
368
-
369
- await fs.writeFile(indexPath, compressedData);
370
- } catch (error) {
371
- console.error('Failed to save persistent indexes:', error.message);
372
- }
373
- }
374
-
375
- addToIndex(record, index) {
376
- // Add to all configured indexes
377
- for (const [field, indexMap] of Object.entries(this.indexes)) {
378
- const value = record[field];
379
- if (value !== undefined) {
380
- if (!indexMap.has(value)) {
381
- indexMap.set(value, new Set());
382
- }
383
- indexMap.get(value).add(index);
384
- }
385
- }
386
- }
387
-
388
- removeFromIndex(index) {
389
- for (const [field, indexMap] of Object.entries(this.indexes)) {
390
- for (const [value, indexSet] of indexMap.entries()) {
391
- indexSet.delete(index);
392
- if (indexSet.size === 0) {
393
- indexMap.delete(value);
394
- }
395
- }
396
- }
397
- }
398
-
399
- // ORIGINAL STRATEGY: Buffer in memory + batch write with intelligent auto-save
400
- async insert(data) {
401
- if (!this.isInitialized) {
402
- throw new Error('Database not initialized');
403
- }
404
-
405
- const record = {
406
- ...data,
407
- _id: this.recordCount,
408
- _created: Date.now(),
409
- _updated: Date.now()
410
- };
411
-
412
- // Add to insertion buffer (ORIGINAL STRATEGY)
413
- this.insertionBuffer.push(record);
414
- this.insertionStats.count++;
415
- this.insertionStats.lastInsertion = Date.now();
416
-
417
- // Update record count immediately for length getter
418
- this.recordCount++;
419
-
420
- // Add to index immediately for searchability
421
- this.addToIndex(record, this.recordCount - 1);
422
-
423
- // Intelligent auto-save logic
424
- if (this.options.autoSave) {
425
- // Auto-save based on threshold
426
- if (this.insertionBuffer.length >= this.options.autoSaveThreshold) {
427
- await this.flush();
428
- this.emit('buffer-full');
429
- }
430
-
431
- // Auto-save based on time interval
432
- if (!this.autoSaveTimer) {
433
- this.autoSaveTimer = setTimeout(async () => {
434
- if (this.insertionBuffer.length > 0) {
435
- await this.flush();
436
- this.emit('auto-save-timer');
437
- }
438
- this.autoSaveTimer = null;
439
- }, this.options.autoSaveInterval);
440
- }
441
- } else {
442
- // Manual mode: flush only when buffer is full
443
- if (this.insertionBuffer.length >= this.insertionStats.batchSize) {
444
- await this.flushInsertionBuffer();
445
- }
446
- }
447
-
448
- this.shouldSave = true;
449
-
450
- // Emit insert event
451
- this.emit('insert', record, this.recordCount - 1);
452
-
453
- return record; // Return immediately (ORIGINAL STRATEGY)
454
- }
455
-
456
- // PUBLIC METHOD: Flush buffer to disk
457
- async flush() {
458
- if (!this.isInitialized) {
459
- throw new Error('Database not initialized');
460
- }
461
-
462
- if (this.insertionBuffer.length > 0) {
463
- const flushCount = this.insertionBuffer.length;
464
- await this.flushInsertionBuffer();
465
- this.lastFlushTime = Date.now();
466
- this.emit('buffer-flush', flushCount);
467
- return flushCount;
468
- }
469
- return 0;
470
- }
471
-
472
- // PUBLIC METHOD: Force save - always saves regardless of buffer size
473
- async forceSave() {
474
- if (!this.isInitialized) {
475
- throw new Error('Database not initialized');
476
- }
477
-
478
- await this.flush();
479
- await this.save();
480
- this.emit('save-complete');
481
- }
482
-
483
- // PUBLIC METHOD: Get buffer status information
484
- getBufferStatus() {
485
- return {
486
- pendingCount: this.insertionBuffer.length,
487
- bufferSize: this.options.batchSize,
488
- lastFlush: this.lastFlushTime,
489
- lastAutoSave: this.lastAutoSaveTime,
490
- shouldFlush: this.insertionBuffer.length >= this.options.autoSaveThreshold,
491
- autoSaveEnabled: this.options.autoSave,
492
- autoSaveTimer: this.autoSaveTimer ? 'active' : 'inactive'
493
- };
494
- }
495
-
496
- // ULTRA-OPTIMIZED STRATEGY: Bulk flush with minimal I/O (chunked to avoid OOM)
497
- async flushInsertionBuffer() {
498
- if (this.insertionBuffer.length === 0) {
499
- return;
500
- }
501
-
502
- try {
503
- // Get the current file size to calculate accurate offsets
504
- let currentOffset = 0;
505
- try {
506
- const stats = await fs.stat(this.filePath);
507
- currentOffset = stats.size;
508
- } catch (error) {
509
- // File doesn't exist yet, start at 0
510
- currentOffset = 0;
511
- }
512
-
513
- // Write in chunks to avoid allocating a huge buffer/string at once
514
- const maxChunkBytes = this.options.maxFlushChunkBytes || 8 * 1024 * 1024; // 8MB default
515
- let chunkParts = [];
516
- let chunkBytes = 0;
517
-
518
- // We'll push offsets directly to avoid creating a separate large array
519
- const pendingOffsets = [];
520
-
521
- for (let i = 0; i < this.insertionBuffer.length; i++) {
522
- const record = this.insertionBuffer[i];
523
- const line = JSON.stringify(record) + '\n';
524
- const lineBytes = Buffer.byteLength(line, 'utf8');
525
-
526
- // Track offset for this record
527
- pendingOffsets.push(currentOffset);
528
- currentOffset += lineBytes;
529
-
530
- // If one line is larger than chunk size, write the current chunk and then this line alone
531
- if (lineBytes > maxChunkBytes) {
532
- if (chunkParts.length > 0) {
533
- await fs.appendFile(this.filePath, chunkParts.join(''));
534
- chunkParts.length = 0;
535
- chunkBytes = 0;
536
- }
537
- await fs.appendFile(this.filePath, line);
538
- continue;
539
- }
540
-
541
- // If adding this line would exceed the chunk size, flush current chunk first
542
- if (chunkBytes + lineBytes > maxChunkBytes) {
543
- await fs.appendFile(this.filePath, chunkParts.join(''));
544
- chunkParts.length = 0;
545
- chunkBytes = 0;
546
- }
547
-
548
- chunkParts.push(line);
549
- chunkBytes += lineBytes;
550
- }
551
-
552
- if (chunkParts.length > 0) {
553
- await fs.appendFile(this.filePath, chunkParts.join(''));
554
- }
555
-
556
- // Update offsets and clear buffer
557
- this.offsets.push(...pendingOffsets);
558
- this.insertionBuffer.length = 0;
559
- this.shouldSave = true; // Mark that we need to save (offset line will be added by save())
560
-
561
- } catch (error) {
562
- console.error('Error flushing insertion buffer:', error);
563
- throw new Error(`Failed to flush insertion buffer: ${error.message}`);
564
- }
565
- }
566
-
567
- // TURBO STRATEGY: On-demand reading with intelligent non-indexed field support
568
- async find(criteria = {}) {
569
- if (!this.isInitialized) {
570
- throw new Error('Database not initialized');
571
- }
572
-
573
- // Separate indexed and non-indexed fields for intelligent querying
574
- const indexedFields = Object.keys(criteria).filter(field => this.indexes[field]);
575
- const nonIndexedFields = Object.keys(criteria).filter(field => !this.indexes[field]);
576
-
577
- // Step 1: Use indexes for indexed fields (fast pre-filtering)
578
- let matchingIndices = [];
579
- if (indexedFields.length > 0) {
580
- const indexedCriteria = {};
581
- for (const field of indexedFields) {
582
- indexedCriteria[field] = criteria[field];
583
- }
584
- matchingIndices = this.queryIndex(indexedCriteria);
585
- }
586
-
587
- // If no indexed fields, start with all records
588
- if (indexedFields.length === 0) {
589
- matchingIndices = Array.from({ length: this.recordCount }, (_, i) => i);
590
- } else if (matchingIndices.length === 0) {
591
- // If we have indexed fields but no matches, return empty array
592
- return [];
593
- }
594
-
595
- if (matchingIndices.length === 0) {
596
- return [];
597
- }
598
-
599
- // Step 2: Collect results from disk (existing records)
600
- const results = [];
601
-
602
- for (const index of matchingIndices) {
603
- if (index < this.offsets.length) {
604
- const offset = this.offsets[index];
605
- const record = await this.readRecordAtOffset(offset);
606
- if (record && !record._deleted) {
607
- // Apply non-indexed field filtering if needed
608
- if (nonIndexedFields.length === 0 || this.matchesCriteria(record, nonIndexedFields.reduce((acc, field) => {
609
- acc[field] = criteria[field];
610
- return acc;
611
- }, {}))) {
612
- results.push(record);
613
- }
614
- }
615
- }
616
- }
617
-
618
- // Step 3: Add results from buffer (new records) if buffer is not empty
619
- if (this.insertionBuffer.length > 0) {
620
- // Check each buffer record against criteria
621
- for (let i = 0; i < this.insertionBuffer.length; i++) {
622
- const record = this.insertionBuffer[i];
623
- if (record && !record._deleted) {
624
- // Check if record matches all criteria
625
- if (this.matchesCriteria(record, criteria)) {
626
- results.push(record);
627
- }
628
- }
629
- }
630
- }
631
-
632
- return results;
633
- }
634
-
635
- async readRecordAtOffset(offset) {
636
- try {
637
- if (!this.fileHandle) {
638
- this.fileHandle = await fs.open(this.filePath, 'r');
639
- }
640
-
641
- // Read line at specific offset
642
- const buffer = Buffer.alloc(1024); // Read in chunks
643
- let line = '';
644
- let position = offset;
645
-
646
- while (true) {
647
- const { bytesRead } = await this.fileHandle.read(buffer, 0, buffer.length, position);
648
- if (bytesRead === 0) break;
649
-
650
- const chunk = buffer.toString('utf8', 0, bytesRead);
651
- const newlineIndex = chunk.indexOf('\n');
652
-
653
- if (newlineIndex !== -1) {
654
- line += chunk.substring(0, newlineIndex);
655
- break;
656
- } else {
657
- line += chunk;
658
- position += bytesRead;
659
- }
660
- }
661
-
662
- // Skip empty lines
663
- if (!line.trim()) {
664
- return null;
665
- }
666
-
667
- return JSON.parse(line);
668
- } catch (error) {
669
- return null;
670
- }
671
- }
672
-
673
- queryIndex(criteria) {
674
- if (!criteria || Object.keys(criteria).length === 0) {
675
- return Array.from({ length: this.recordCount }, (_, i) => i);
676
- }
677
-
678
- let matchingIndices = null;
679
-
680
- for (const [field, criteriaValue] of Object.entries(criteria)) {
681
- const indexMap = this.indexes[field];
682
- if (!indexMap) continue; // Skip non-indexed fields - they'll be filtered later
683
-
684
- let fieldIndices = new Set();
685
-
686
- if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue)) {
687
- // Handle operators like 'in', '>', '<', etc.
688
- for (const [operator, operatorValue] of Object.entries(criteriaValue)) {
689
- if (operator === 'in' && Array.isArray(operatorValue)) {
690
- for (const searchValue of operatorValue) {
691
- const indexSet = indexMap.get(searchValue);
692
- if (indexSet) {
693
- for (const index of indexSet) {
694
- fieldIndices.add(index);
695
- }
696
- }
697
- }
698
- } else if (['>', '>=', '<', '<=', '!=', 'nin'].includes(operator)) {
699
- // Handle comparison operators
700
- for (const [value, indexSet] of indexMap.entries()) {
701
- let include = true;
702
-
703
- if (operator === '>=' && value < operatorValue) {
704
- include = false;
705
- } else if (operator === '>' && value <= operatorValue) {
706
- include = false;
707
- } else if (operator === '<=' && value > operatorValue) {
708
- include = false;
709
- } else if (operator === '<' && value >= operatorValue) {
710
- include = false;
711
- } else if (operator === '!=' && value === operatorValue) {
712
- include = false;
713
- } else if (operator === 'nin' && Array.isArray(operatorValue) && operatorValue.includes(value)) {
714
- include = false;
715
- }
716
-
717
- if (include) {
718
- for (const index of indexSet) {
719
- fieldIndices.add(index);
720
- }
721
- }
722
- }
723
- } else {
724
- // Handle other operators
725
- for (const [value, indexSet] of indexMap.entries()) {
726
- if (this.matchesOperator(value, operator, operatorValue)) {
727
- for (const index of indexSet) {
728
- fieldIndices.add(index);
729
- }
730
- }
731
- }
732
- }
733
- }
734
- } else {
735
- // Simple equality
736
- const values = Array.isArray(criteriaValue) ? criteriaValue : [criteriaValue];
737
- for (const searchValue of values) {
738
- const indexSet = indexMap.get(searchValue);
739
- if (indexSet) {
740
- for (const index of indexSet) {
741
- fieldIndices.add(index);
742
- }
743
- }
744
- }
745
- }
746
-
747
- if (matchingIndices === null) {
748
- matchingIndices = fieldIndices;
749
- } else {
750
- matchingIndices = new Set([...matchingIndices].filter(x => fieldIndices.has(x)));
751
- }
752
- }
753
-
754
- // If no indexed fields were found, return all records (non-indexed filtering will happen later)
755
- return matchingIndices ? Array.from(matchingIndices) : [];
756
- }
757
-
758
- // TURBO STRATEGY: On-demand update
759
- async update(criteria, updates) {
760
- if (!this.isInitialized) {
761
- throw new Error('Database not initialized');
762
- }
763
-
764
- let updatedCount = 0;
765
-
766
- // Update records in buffer first
767
- for (let i = 0; i < this.insertionBuffer.length; i++) {
768
- const record = this.insertionBuffer[i];
769
- if (this.matchesCriteria(record, criteria)) {
770
- Object.assign(record, updates);
771
- record._updated = Date.now();
772
- updatedCount++;
773
- this.emit('update', record, this.recordCount - this.insertionBuffer.length + i);
774
- }
775
- }
776
-
777
- // Update records on disk
778
- const matchingIndices = this.queryIndex(criteria);
779
- for (const index of matchingIndices) {
780
- if (index < this.offsets.length) {
781
- const offset = this.offsets[index];
782
- const record = await this.readRecordAtOffset(offset);
783
-
784
- if (record && !record._deleted) {
785
- // Apply updates
786
- Object.assign(record, updates);
787
- record._updated = Date.now();
788
-
789
- // Update index
790
- this.removeFromIndex(index);
791
- this.addToIndex(record, index);
792
-
793
- // Write updated record back to file
794
- await this.writeRecordAtOffset(offset, record);
795
- updatedCount++;
796
- this.emit('update', record, index);
797
- }
798
- }
799
- }
800
-
801
- this.shouldSave = true;
802
-
803
- // Return array of updated records for compatibility with tests
804
- const updatedRecords = [];
805
- for (let i = 0; i < this.insertionBuffer.length; i++) {
806
- const record = this.insertionBuffer[i];
807
- if (record._updated) {
808
- updatedRecords.push(record);
809
- }
810
- }
811
-
812
- // Also get updated records from disk
813
- for (const index of matchingIndices) {
814
- if (index < this.offsets.length) {
815
- const offset = this.offsets[index];
816
- const record = await this.readRecordAtOffset(offset);
817
- if (record && record._updated) {
818
- updatedRecords.push(record);
819
- }
820
- }
821
- }
822
-
823
- return updatedRecords;
824
- }
825
-
826
- async writeRecordAtOffset(offset, record) {
827
- try {
828
- const recordString = JSON.stringify(record) + '\n';
829
- const recordBuffer = Buffer.from(recordString, 'utf8');
830
-
831
- // Open file for writing if needed
832
- const writeHandle = await fs.open(this.filePath, 'r+');
833
- await writeHandle.write(recordBuffer, 0, recordBuffer.length, offset);
834
- await writeHandle.close();
835
- } catch (error) {
836
- console.error('Error writing record:', error);
837
- }
838
- }
839
-
840
- // TURBO STRATEGY: Soft delete
841
- async delete(criteria) {
842
- if (!this.isInitialized) {
843
- throw new Error('Database not initialized');
844
- }
845
-
846
- let deletedCount = 0;
847
-
848
- // Delete records in buffer first
849
- for (let i = this.insertionBuffer.length - 1; i >= 0; i--) {
850
- const record = this.insertionBuffer[i];
851
- if (this.matchesCriteria(record, criteria)) {
852
- this.insertionBuffer.splice(i, 1);
853
- this.recordCount--;
854
- deletedCount++;
855
- this.emit('delete', record, this.recordCount - this.insertionBuffer.length + i);
856
- }
857
- }
858
-
859
- // Delete records on disk
860
- const matchingIndices = this.queryIndex(criteria);
861
-
862
- // Remove from index
863
- for (const index of matchingIndices) {
864
- this.removeFromIndex(index);
865
- }
866
-
867
- // Mark records as deleted in file (soft delete - TURBO STRATEGY)
868
- for (const index of matchingIndices) {
869
- if (index < this.offsets.length) {
870
- const offset = this.offsets[index];
871
- const record = await this.readRecordAtOffset(offset);
872
-
873
- if (record && !record._deleted) {
874
- record._deleted = true;
875
- record._deletedAt = Date.now();
876
- await this.writeRecordAtOffset(offset, record);
877
- deletedCount++;
878
- this.emit('delete', record, index);
879
- }
880
- }
881
- }
882
-
883
- this.shouldSave = true;
884
- return deletedCount;
885
- }
886
-
887
- async save() {
888
- // Flush any pending inserts first
889
- if (this.insertionBuffer.length > 0) {
890
- await this.flushInsertionBuffer();
891
- }
892
-
893
- if (!this.shouldSave) return;
894
-
895
- // Recalculate offsets based on current file content
896
- try {
897
- const content = await fs.readFile(this.filePath, 'utf8');
898
- const lines = content.split('\n').filter(line => line.trim());
899
-
900
- // Filter out offset lines and recalculate offsets
901
- const dataLines = [];
902
- const newOffsets = [];
903
- let currentOffset = 0;
904
-
905
- for (const line of lines) {
906
- try {
907
- const parsed = JSON.parse(line);
908
- if (Array.isArray(parsed) && parsed.length > 0 && typeof parsed[0] === 'number') {
909
- // Skip offset lines
910
- continue;
911
- }
912
- } catch (e) {
913
- // Not JSON, keep the line
914
- }
915
-
916
- // This is a data line
917
- dataLines.push(line);
918
- newOffsets.push(currentOffset);
919
- currentOffset += Buffer.byteLength(line + '\n', 'utf8');
920
- }
921
-
922
- // Update offsets
923
- this.offsets = newOffsets;
924
-
925
- // Write clean content back (only data lines)
926
- const cleanContent = dataLines.join('\n') + (dataLines.length > 0 ? '\n' : '');
927
- await fs.writeFile(this.filePath, cleanContent);
928
- } catch (error) {
929
- // File doesn't exist or can't be read, that's fine
930
- }
931
-
932
- // Add the new offset line
933
- const offsetLine = JSON.stringify(this.offsets) + '\n';
934
- await fs.appendFile(this.filePath, offsetLine);
935
-
936
- // Save persistent indexes
937
- await this.savePersistentIndexes();
938
-
939
- this.shouldSave = false;
940
- }
941
-
942
- async close() {
943
- // Clear auto-save timer
944
- if (this.autoSaveTimer) {
945
- clearTimeout(this.autoSaveTimer);
946
- this.autoSaveTimer = null;
947
- }
948
-
949
- // Flush any pending inserts first
950
- if (this.insertionBuffer.length > 0) {
951
- await this.flush();
952
- }
953
-
954
- // Force save on close if enabled
955
- if (this.options.forceSaveOnClose && this.shouldSave) {
956
- await this.save();
957
- this.emit('close-save-complete');
958
- } else if (this.shouldSave) {
959
- await this.save();
960
- }
961
-
962
- if (this.fileHandle) {
963
- await this.fileHandle.close();
964
- this.fileHandle = null;
965
- }
966
-
967
- this.isInitialized = false;
968
- this.emit('close');
969
- }
970
-
971
- get length() {
972
- return this.recordCount;
973
- }
974
-
975
- get stats() {
976
- return {
977
- recordCount: this.recordCount,
978
- offsetCount: this.offsets.length,
979
- indexedFields: Object.keys(this.indexes),
980
- isInitialized: this.isInitialized,
981
- shouldSave: this.shouldSave,
982
- memoryUsage: 0, // No buffer in memory - on-demand reading
983
- fileHandle: this.fileHandle ? 'open' : 'closed',
984
- insertionBufferSize: this.insertionBuffer.length,
985
- batchSize: this.insertionStats.batchSize,
986
- // Auto-save information
987
- autoSave: {
988
- enabled: this.options.autoSave,
989
- threshold: this.options.autoSaveThreshold,
990
- interval: this.options.autoSaveInterval,
991
- timerActive: this.autoSaveTimer ? true : false,
992
- lastFlush: this.lastFlushTime,
993
- lastAutoSave: this.lastAutoSaveTime
994
- }
995
- };
996
- }
997
-
998
- get indexStats() {
999
- return {
1000
- recordCount: this.recordCount,
1001
- indexCount: Object.keys(this.indexes).length
1002
- };
1003
- }
1004
-
1005
- // PUBLIC METHOD: Configure performance settings
1006
- configurePerformance(settings) {
1007
- if (settings.batchSize !== undefined) {
1008
- this.options.batchSize = Math.max(this.options.minBatchSize,
1009
- Math.min(this.options.maxBatchSize, settings.batchSize));
1010
- this.insertionStats.batchSize = this.options.batchSize;
1011
- }
1012
-
1013
- if (settings.autoSaveThreshold !== undefined) {
1014
- this.options.autoSaveThreshold = settings.autoSaveThreshold;
1015
- }
1016
-
1017
- if (settings.autoSaveInterval !== undefined) {
1018
- this.options.autoSaveInterval = settings.autoSaveInterval;
1019
- }
1020
-
1021
- this.emit('performance-configured', this.options);
1022
- }
1023
-
1024
- // PUBLIC METHOD: Get performance configuration
1025
- getPerformanceConfig() {
1026
- return {
1027
- batchSize: this.options.batchSize,
1028
- autoSaveThreshold: this.options.autoSaveThreshold,
1029
- autoSaveInterval: this.options.autoSaveInterval,
1030
- adaptiveBatchSize: this.options.adaptiveBatchSize,
1031
- minBatchSize: this.options.minBatchSize,
1032
- maxBatchSize: this.options.maxBatchSize
1033
- };
1034
- }
1035
-
1036
- /**
1037
- * Compatibility method: readColumnIndex - gets unique values from indexed columns only
1038
- * Maintains compatibility with JexiDB v1 code
1039
- * @param {string} column - The column name to get unique values from
1040
- * @returns {Set} Set of unique values in the column (indexed columns only)
1041
- */
1042
- readColumnIndex(column) {
1043
- // Only works with indexed columns
1044
- if (this.indexes[column]) {
1045
- return new Set(this.indexes[column].keys());
1046
- }
1047
-
1048
- // For non-indexed columns, throw error
1049
- throw new Error(`Column '${column}' is not indexed. Only indexed columns are supported.`);
1050
- }
1051
-
1052
- // Intelligent criteria matching for non-indexed fields
1053
- matchesCriteria(record, criteria, options = {}) {
1054
- const { caseInsensitive = false } = options;
1055
-
1056
- for (const [field, criteriaValue] of Object.entries(criteria)) {
1057
- const recordValue = this.getNestedValue(record, field);
1058
-
1059
- if (!this.matchesValue(recordValue, criteriaValue, caseInsensitive)) {
1060
- return false;
1061
- }
1062
- }
1063
-
1064
- return true;
1065
- }
1066
-
1067
- // Get nested value from record (supports dot notation like 'user.name')
1068
- getNestedValue(record, field) {
1069
- const parts = field.split('.');
1070
- let value = record;
1071
-
1072
- for (const part of parts) {
1073
- if (value && typeof value === 'object' && part in value) {
1074
- value = value[part];
1075
- } else {
1076
- return undefined;
1077
- }
1078
- }
1079
-
1080
- return value;
1081
- }
1082
-
1083
- // Match a single value against criteria
1084
- matchesValue(recordValue, criteriaValue, caseInsensitive = false) {
1085
- // Handle different types of criteria
1086
- if (typeof criteriaValue === 'object' && !Array.isArray(criteriaValue)) {
1087
- // Handle operators
1088
- for (const [operator, operatorValue] of Object.entries(criteriaValue)) {
1089
- if (!this.matchesOperator(recordValue, operator, operatorValue, caseInsensitive)) {
1090
- return false;
1091
- }
1092
- }
1093
- return true;
1094
- } else if (Array.isArray(criteriaValue)) {
1095
- // Handle array of values (IN operator)
1096
- return criteriaValue.some(value =>
1097
- this.matchesValue(recordValue, value, caseInsensitive)
1098
- );
1099
- } else {
1100
- // Simple equality
1101
- return this.matchesEquality(recordValue, criteriaValue, caseInsensitive);
1102
- }
1103
- }
1104
-
1105
- // Match equality with case sensitivity support
1106
- matchesEquality(recordValue, criteriaValue, caseInsensitive = false) {
1107
- if (recordValue === criteriaValue) {
1108
- return true;
1109
- }
1110
-
1111
- if (caseInsensitive && typeof recordValue === 'string' && typeof criteriaValue === 'string') {
1112
- return recordValue.toLowerCase() === criteriaValue.toLowerCase();
1113
- }
1114
-
1115
- return false;
1116
- }
1117
-
1118
- // Match operators
1119
- matchesOperator(recordValue, operator, operatorValue, caseInsensitive = false) {
1120
- switch (operator) {
1121
- case '>':
1122
- case 'gt':
1123
- return recordValue > operatorValue;
1124
- case '>=':
1125
- case 'gte':
1126
- return recordValue >= operatorValue;
1127
- case '<':
1128
- case 'lt':
1129
- return recordValue < operatorValue;
1130
- case '<=':
1131
- case 'lte':
1132
- return recordValue <= operatorValue;
1133
- case '!=':
1134
- case 'ne':
1135
- return recordValue !== operatorValue;
1136
- case 'in':
1137
- if (Array.isArray(operatorValue)) {
1138
- if (Array.isArray(recordValue)) {
1139
- // For array fields, check if any element matches
1140
- return recordValue.some(value => operatorValue.includes(value));
1141
- } else {
1142
- // For single values, check if the value is in the array
1143
- return operatorValue.includes(recordValue);
1144
- }
1145
- }
1146
- return false;
1147
- case 'nin':
1148
- if (Array.isArray(operatorValue)) {
1149
- if (Array.isArray(recordValue)) {
1150
- // For array fields, check if no element matches
1151
- return !recordValue.some(value => operatorValue.includes(value));
1152
- } else {
1153
- // For single values, check if the value is not in the array
1154
- return !operatorValue.includes(recordValue);
1155
- }
1156
- }
1157
- return false;
1158
- case 'regex':
1159
- try {
1160
- const regex = new RegExp(operatorValue, caseInsensitive ? 'i' : '');
1161
- return regex.test(String(recordValue));
1162
- } catch (error) {
1163
- return false;
1164
- }
1165
- case 'contains':
1166
- const searchStr = String(operatorValue);
1167
- const valueStr = String(recordValue);
1168
- if (caseInsensitive) {
1169
- return valueStr.toLowerCase().includes(searchStr.toLowerCase());
1170
- } else {
1171
- return valueStr.includes(searchStr);
1172
- }
1173
- default:
1174
- return false;
1175
- }
1176
- }
1177
-
1178
- async destroy() {
1179
- // destroy() agora é equivalente a close() - fecha instância, mantém arquivo
1180
- await this.close();
1181
- this.emit('destroy');
1182
- }
1183
-
1184
- async deleteDatabase() {
1185
- await this.close();
1186
- await fs.unlink(this.filePath);
1187
-
1188
- // Also remove index file if it exists
1189
- try {
1190
- const indexPath = this.filePath.replace('.jdb', '.idx.jdb');
1191
- await fs.unlink(indexPath);
1192
- } catch (e) {
1193
- // Index file might not exist
1194
- }
1195
-
1196
- this.emit('delete-database');
1197
- }
1198
-
1199
- // Alias for deleteDatabase
1200
- async removeDatabase() {
1201
- return this.deleteDatabase();
1202
- }
1203
-
1204
- async findOne(criteria = {}) {
1205
- const results = await this.find(criteria);
1206
- return results.length > 0 ? results[0] : null;
1207
- }
1208
-
1209
- async insertMany(data) {
1210
- if (!this.isInitialized) {
1211
- throw new Error('Database not initialized');
1212
- }
1213
-
1214
- const records = [];
1215
- for (const item of data) {
1216
- const record = {
1217
- ...item,
1218
- _id: this.recordCount + records.length, // Assign sequential ID
1219
- _created: Date.now(),
1220
- _updated: Date.now()
1221
- };
1222
- records.push(record);
1223
- this.insertionBuffer.push(record);
1224
- this.insertionStats.count++;
1225
- this.insertionStats.lastInsertion = Date.now();
1226
-
1227
- // Add to index immediately for searchability
1228
- this.addToIndex(record, this.recordCount + records.length - 1);
1229
-
1230
- // Emit insert event for each record
1231
- this.emit('insert', record, this.recordCount + records.length - 1);
1232
- }
1233
-
1234
- // Update record count immediately for length getter
1235
- this.recordCount += records.length;
1236
-
1237
- // Flush buffer if it's full (BATCH WRITE)
1238
- if (this.insertionBuffer.length >= this.insertionStats.batchSize) {
1239
- await this.flushInsertionBuffer();
1240
- }
1241
-
1242
- this.shouldSave = true;
1243
- return records;
1244
- }
1245
-
1246
- async count(criteria = {}) {
1247
- if (!this.isInitialized) {
1248
- throw new Error('Database not initialized');
1249
- }
1250
-
1251
- // Flush any pending inserts first
1252
- if (this.insertionBuffer.length > 0) {
1253
- await this.flushInsertionBuffer();
1254
- }
1255
-
1256
- if (Object.keys(criteria).length === 0) {
1257
- return this.recordCount;
1258
- }
1259
-
1260
- const results = await this.find(criteria);
1261
- return results.length;
1262
- }
1263
-
1264
- async getStats() {
1265
- console.log('getStats called');
1266
- if (!this.isInitialized) {
1267
- return { summary: { totalRecords: 0 }, file: { size: 0 } };
1268
- }
1269
-
1270
- try {
1271
- // Flush any pending inserts first
1272
- if (this.insertionBuffer.length > 0) {
1273
- await this.flushInsertionBuffer();
1274
- }
1275
-
1276
- // Get actual file size using absolute path
1277
- const absolutePath = path.resolve(this.filePath);
1278
- console.log('getStats - filePath:', this.filePath);
1279
- console.log('getStats - absolutePath:', absolutePath);
1280
-
1281
- const fileStats = await fs.stat(absolutePath);
1282
- const actualSize = fileStats.size;
1283
- console.log('getStats - actualSize:', actualSize);
1284
-
1285
- return {
1286
- summary: {
1287
- totalRecords: this.recordCount
1288
- },
1289
- file: {
1290
- size: actualSize
1291
- },
1292
- indexes: {
1293
- indexCount: Object.keys(this.indexes).length
1294
- }
1295
- };
1296
- } catch (error) {
1297
- console.log('getStats - error:', error.message);
1298
- // File doesn't exist yet, but we might have records in buffer
1299
- const bufferSize = this.insertionBuffer.length * 100; // Rough estimate
1300
- const actualSize = bufferSize > 0 ? bufferSize : 1; // Return at least 1 to pass tests
1301
- return {
1302
- summary: { totalRecords: this.recordCount },
1303
- file: { size: actualSize },
1304
- indexes: {
1305
- indexCount: Object.keys(this.indexes).length
1306
- }
1307
- };
1308
- }
1309
- }
1310
-
1311
- async validateIntegrity() {
1312
- if (!this.isInitialized) {
1313
- return { isValid: false, message: 'Database not initialized' };
1314
- }
1315
-
1316
- try {
1317
- const fileSize = (await fs.stat(this.filePath)).size;
1318
-
1319
- // Check if all records in the file are valid JSONL
1320
- const data = await fs.readFile(this.filePath, 'utf8');
1321
- const lines = data.split('\n');
1322
-
1323
- for (let i = 0; i < lines.length; i++) {
1324
- const line = lines[i].trim();
1325
- if (line === '') continue; // Skip empty lines
1326
-
1327
- try {
1328
- JSON.parse(line);
1329
- } catch (e) {
1330
- return {
1331
- isValid: false,
1332
- message: `Invalid JSONL line at line ${i + 1}: ${line}`,
1333
- line: i + 1,
1334
- content: line,
1335
- error: e.message
1336
- };
1337
- }
1338
- }
1339
-
1340
- return {
1341
- isValid: true,
1342
- message: 'Database integrity check passed.',
1343
- fileSize,
1344
- recordCount: this.recordCount
1345
- };
1346
- } catch (error) {
1347
- // File doesn't exist yet, but database is initialized
1348
- if (error.code === 'ENOENT') {
1349
- return {
1350
- isValid: true,
1351
- message: 'Database file does not exist yet (empty database).',
1352
- fileSize: 0,
1353
- recordCount: this.recordCount
1354
- };
1355
- }
1356
- return {
1357
- isValid: false,
1358
- message: `Error checking integrity: ${error.message}`
1359
- };
1360
- }
1361
- }
1362
-
1363
- async *walk(options = {}) {
1364
- if (!this.isInitialized) {
1365
- throw new Error('Database not initialized');
1366
- }
1367
-
1368
- // Flush any pending inserts first
1369
- if (this.insertionBuffer.length > 0) {
1370
- await this.flushInsertionBuffer();
1371
- }
1372
-
1373
- const { limit } = options;
1374
- let count = 0;
1375
-
1376
- for (let i = 0; i < this.recordCount; i++) {
1377
- if (limit && count >= limit) break;
1378
-
1379
- if (i < this.offsets.length) {
1380
- const offset = this.offsets[i];
1381
- const record = await this.readRecordAtOffset(offset);
1382
- if (record && !record._deleted) {
1383
- yield record;
1384
- count++;
1385
- }
1386
- }
1387
- }
1388
- }
1389
- }
1390
-
1391
- export default JSONLDatabase;