jexidb 2.1.6 → 2.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Database.cjs +1061 -240
- package/package.json +3 -2
- package/src/Database.mjs +380 -117
- package/src/FileHandler.mjs +130 -33
- package/src/managers/QueryManager.mjs +3 -4
package/dist/Database.cjs
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
var events = require('events');
|
|
4
|
-
var asyncMutex = require('async-mutex');
|
|
5
4
|
var fs = require('fs');
|
|
6
5
|
var readline = require('readline');
|
|
7
6
|
var path = require('path');
|
|
@@ -124,6 +123,182 @@ AsyncGenerator.prototype["function" == typeof Symbol && Symbol.asyncIterator ||
|
|
|
124
123
|
return this._invoke("return", e);
|
|
125
124
|
};
|
|
126
125
|
|
|
126
|
+
const E_CANCELED = new Error('request for lock canceled');
|
|
127
|
+
|
|
128
|
+
var __awaiter$2 = function (thisArg, _arguments, P, generator) {
|
|
129
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
130
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
131
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
132
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
133
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
134
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
135
|
+
});
|
|
136
|
+
};
|
|
137
|
+
class Semaphore {
|
|
138
|
+
constructor(_value, _cancelError = E_CANCELED) {
|
|
139
|
+
this._value = _value;
|
|
140
|
+
this._cancelError = _cancelError;
|
|
141
|
+
this._queue = [];
|
|
142
|
+
this._weightedWaiters = [];
|
|
143
|
+
}
|
|
144
|
+
acquire(weight = 1, priority = 0) {
|
|
145
|
+
if (weight <= 0)
|
|
146
|
+
throw new Error(`invalid weight ${weight}: must be positive`);
|
|
147
|
+
return new Promise((resolve, reject) => {
|
|
148
|
+
const task = { resolve, reject, weight, priority };
|
|
149
|
+
const i = findIndexFromEnd(this._queue, (other) => priority <= other.priority);
|
|
150
|
+
if (i === -1 && weight <= this._value) {
|
|
151
|
+
// Needs immediate dispatch, skip the queue
|
|
152
|
+
this._dispatchItem(task);
|
|
153
|
+
}
|
|
154
|
+
else {
|
|
155
|
+
this._queue.splice(i + 1, 0, task);
|
|
156
|
+
}
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
runExclusive(callback_1) {
|
|
160
|
+
return __awaiter$2(this, arguments, void 0, function* (callback, weight = 1, priority = 0) {
|
|
161
|
+
const [value, release] = yield this.acquire(weight, priority);
|
|
162
|
+
try {
|
|
163
|
+
return yield callback(value);
|
|
164
|
+
}
|
|
165
|
+
finally {
|
|
166
|
+
release();
|
|
167
|
+
}
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
waitForUnlock(weight = 1, priority = 0) {
|
|
171
|
+
if (weight <= 0)
|
|
172
|
+
throw new Error(`invalid weight ${weight}: must be positive`);
|
|
173
|
+
if (this._couldLockImmediately(weight, priority)) {
|
|
174
|
+
return Promise.resolve();
|
|
175
|
+
}
|
|
176
|
+
else {
|
|
177
|
+
return new Promise((resolve) => {
|
|
178
|
+
if (!this._weightedWaiters[weight - 1])
|
|
179
|
+
this._weightedWaiters[weight - 1] = [];
|
|
180
|
+
insertSorted(this._weightedWaiters[weight - 1], { resolve, priority });
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
isLocked() {
|
|
185
|
+
return this._value <= 0;
|
|
186
|
+
}
|
|
187
|
+
getValue() {
|
|
188
|
+
return this._value;
|
|
189
|
+
}
|
|
190
|
+
setValue(value) {
|
|
191
|
+
this._value = value;
|
|
192
|
+
this._dispatchQueue();
|
|
193
|
+
}
|
|
194
|
+
release(weight = 1) {
|
|
195
|
+
if (weight <= 0)
|
|
196
|
+
throw new Error(`invalid weight ${weight}: must be positive`);
|
|
197
|
+
this._value += weight;
|
|
198
|
+
this._dispatchQueue();
|
|
199
|
+
}
|
|
200
|
+
cancel() {
|
|
201
|
+
this._queue.forEach((entry) => entry.reject(this._cancelError));
|
|
202
|
+
this._queue = [];
|
|
203
|
+
}
|
|
204
|
+
_dispatchQueue() {
|
|
205
|
+
this._drainUnlockWaiters();
|
|
206
|
+
while (this._queue.length > 0 && this._queue[0].weight <= this._value) {
|
|
207
|
+
this._dispatchItem(this._queue.shift());
|
|
208
|
+
this._drainUnlockWaiters();
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
_dispatchItem(item) {
|
|
212
|
+
const previousValue = this._value;
|
|
213
|
+
this._value -= item.weight;
|
|
214
|
+
item.resolve([previousValue, this._newReleaser(item.weight)]);
|
|
215
|
+
}
|
|
216
|
+
_newReleaser(weight) {
|
|
217
|
+
let called = false;
|
|
218
|
+
return () => {
|
|
219
|
+
if (called)
|
|
220
|
+
return;
|
|
221
|
+
called = true;
|
|
222
|
+
this.release(weight);
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
_drainUnlockWaiters() {
|
|
226
|
+
if (this._queue.length === 0) {
|
|
227
|
+
for (let weight = this._value; weight > 0; weight--) {
|
|
228
|
+
const waiters = this._weightedWaiters[weight - 1];
|
|
229
|
+
if (!waiters)
|
|
230
|
+
continue;
|
|
231
|
+
waiters.forEach((waiter) => waiter.resolve());
|
|
232
|
+
this._weightedWaiters[weight - 1] = [];
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
else {
|
|
236
|
+
const queuedPriority = this._queue[0].priority;
|
|
237
|
+
for (let weight = this._value; weight > 0; weight--) {
|
|
238
|
+
const waiters = this._weightedWaiters[weight - 1];
|
|
239
|
+
if (!waiters)
|
|
240
|
+
continue;
|
|
241
|
+
const i = waiters.findIndex((waiter) => waiter.priority <= queuedPriority);
|
|
242
|
+
(i === -1 ? waiters : waiters.splice(0, i))
|
|
243
|
+
.forEach((waiter => waiter.resolve()));
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
_couldLockImmediately(weight, priority) {
|
|
248
|
+
return (this._queue.length === 0 || this._queue[0].priority < priority) &&
|
|
249
|
+
weight <= this._value;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
function insertSorted(a, v) {
|
|
253
|
+
const i = findIndexFromEnd(a, (other) => v.priority <= other.priority);
|
|
254
|
+
a.splice(i + 1, 0, v);
|
|
255
|
+
}
|
|
256
|
+
function findIndexFromEnd(a, predicate) {
|
|
257
|
+
for (let i = a.length - 1; i >= 0; i--) {
|
|
258
|
+
if (predicate(a[i])) {
|
|
259
|
+
return i;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
return -1;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
var __awaiter$1 = function (thisArg, _arguments, P, generator) {
|
|
266
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
267
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
268
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
269
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
270
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
271
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
272
|
+
});
|
|
273
|
+
};
|
|
274
|
+
class Mutex {
|
|
275
|
+
constructor(cancelError) {
|
|
276
|
+
this._semaphore = new Semaphore(1, cancelError);
|
|
277
|
+
}
|
|
278
|
+
acquire() {
|
|
279
|
+
return __awaiter$1(this, arguments, void 0, function* (priority = 0) {
|
|
280
|
+
const [, releaser] = yield this._semaphore.acquire(1, priority);
|
|
281
|
+
return releaser;
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
runExclusive(callback, priority = 0) {
|
|
285
|
+
return this._semaphore.runExclusive(() => callback(), 1, priority);
|
|
286
|
+
}
|
|
287
|
+
isLocked() {
|
|
288
|
+
return this._semaphore.isLocked();
|
|
289
|
+
}
|
|
290
|
+
waitForUnlock(priority = 0) {
|
|
291
|
+
return this._semaphore.waitForUnlock(1, priority);
|
|
292
|
+
}
|
|
293
|
+
release() {
|
|
294
|
+
if (this._semaphore.isLocked())
|
|
295
|
+
this._semaphore.release();
|
|
296
|
+
}
|
|
297
|
+
cancel() {
|
|
298
|
+
return this._semaphore.cancel();
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
127
302
|
const aliasToCanonical = {
|
|
128
303
|
'>': '$gt',
|
|
129
304
|
'>=': '$gte',
|
|
@@ -246,7 +421,7 @@ class IndexManager {
|
|
|
246
421
|
|
|
247
422
|
// CRITICAL: Use database mutex to prevent deadlocks
|
|
248
423
|
// If no database mutex provided, create a local one (for backward compatibility)
|
|
249
|
-
this.mutex = databaseMutex || new
|
|
424
|
+
this.mutex = databaseMutex || new Mutex();
|
|
250
425
|
this.indexedFields = [];
|
|
251
426
|
this.setIndexesConfig(this.opts.indexes);
|
|
252
427
|
}
|
|
@@ -3249,6 +3424,266 @@ class Serializer {
|
|
|
3249
3424
|
}
|
|
3250
3425
|
}
|
|
3251
3426
|
|
|
3427
|
+
const objectToString = Object.prototype.toString;
|
|
3428
|
+
|
|
3429
|
+
const isError = value => objectToString.call(value) === '[object Error]';
|
|
3430
|
+
|
|
3431
|
+
const errorMessages = new Set([
|
|
3432
|
+
'network error', // Chrome
|
|
3433
|
+
'Failed to fetch', // Chrome
|
|
3434
|
+
'NetworkError when attempting to fetch resource.', // Firefox
|
|
3435
|
+
'The Internet connection appears to be offline.', // Safari 16
|
|
3436
|
+
'Network request failed', // `cross-fetch`
|
|
3437
|
+
'fetch failed', // Undici (Node.js)
|
|
3438
|
+
'terminated', // Undici (Node.js)
|
|
3439
|
+
' A network error occurred.', // Bun (WebKit)
|
|
3440
|
+
'Network connection lost', // Cloudflare Workers (fetch)
|
|
3441
|
+
]);
|
|
3442
|
+
|
|
3443
|
+
function isNetworkError(error) {
|
|
3444
|
+
const isValid = error
|
|
3445
|
+
&& isError(error)
|
|
3446
|
+
&& error.name === 'TypeError'
|
|
3447
|
+
&& typeof error.message === 'string';
|
|
3448
|
+
|
|
3449
|
+
if (!isValid) {
|
|
3450
|
+
return false;
|
|
3451
|
+
}
|
|
3452
|
+
|
|
3453
|
+
const {message, stack} = error;
|
|
3454
|
+
|
|
3455
|
+
// Safari 17+ has generic message but no stack for network errors
|
|
3456
|
+
if (message === 'Load failed') {
|
|
3457
|
+
return stack === undefined
|
|
3458
|
+
// Sentry adds its own stack trace to the fetch error, so also check for that
|
|
3459
|
+
|| '__sentry_captured__' in error;
|
|
3460
|
+
}
|
|
3461
|
+
|
|
3462
|
+
// Deno network errors start with specific text
|
|
3463
|
+
if (message.startsWith('error sending request for url')) {
|
|
3464
|
+
return true;
|
|
3465
|
+
}
|
|
3466
|
+
|
|
3467
|
+
// Standard network error messages
|
|
3468
|
+
return errorMessages.has(message);
|
|
3469
|
+
}
|
|
3470
|
+
|
|
3471
|
+
function validateRetries(retries) {
|
|
3472
|
+
if (typeof retries === 'number') {
|
|
3473
|
+
if (retries < 0) {
|
|
3474
|
+
throw new TypeError('Expected `retries` to be a non-negative number.');
|
|
3475
|
+
}
|
|
3476
|
+
|
|
3477
|
+
if (Number.isNaN(retries)) {
|
|
3478
|
+
throw new TypeError('Expected `retries` to be a valid number or Infinity, got NaN.');
|
|
3479
|
+
}
|
|
3480
|
+
} else if (retries !== undefined) {
|
|
3481
|
+
throw new TypeError('Expected `retries` to be a number or Infinity.');
|
|
3482
|
+
}
|
|
3483
|
+
}
|
|
3484
|
+
|
|
3485
|
+
function validateNumberOption(name, value, {min = 0, allowInfinity = false} = {}) {
|
|
3486
|
+
if (value === undefined) {
|
|
3487
|
+
return;
|
|
3488
|
+
}
|
|
3489
|
+
|
|
3490
|
+
if (typeof value !== 'number' || Number.isNaN(value)) {
|
|
3491
|
+
throw new TypeError(`Expected \`${name}\` to be a number${allowInfinity ? ' or Infinity' : ''}.`);
|
|
3492
|
+
}
|
|
3493
|
+
|
|
3494
|
+
if (!allowInfinity && !Number.isFinite(value)) {
|
|
3495
|
+
throw new TypeError(`Expected \`${name}\` to be a finite number.`);
|
|
3496
|
+
}
|
|
3497
|
+
|
|
3498
|
+
if (value < min) {
|
|
3499
|
+
throw new TypeError(`Expected \`${name}\` to be \u2265 ${min}.`);
|
|
3500
|
+
}
|
|
3501
|
+
}
|
|
3502
|
+
|
|
3503
|
+
class AbortError extends Error {
|
|
3504
|
+
constructor(message) {
|
|
3505
|
+
super();
|
|
3506
|
+
|
|
3507
|
+
if (message instanceof Error) {
|
|
3508
|
+
this.originalError = message;
|
|
3509
|
+
({message} = message);
|
|
3510
|
+
} else {
|
|
3511
|
+
this.originalError = new Error(message);
|
|
3512
|
+
this.originalError.stack = this.stack;
|
|
3513
|
+
}
|
|
3514
|
+
|
|
3515
|
+
this.name = 'AbortError';
|
|
3516
|
+
this.message = message;
|
|
3517
|
+
}
|
|
3518
|
+
}
|
|
3519
|
+
|
|
3520
|
+
function calculateDelay(retriesConsumed, options) {
|
|
3521
|
+
const attempt = Math.max(1, retriesConsumed + 1);
|
|
3522
|
+
const random = options.randomize ? (Math.random() + 1) : 1;
|
|
3523
|
+
|
|
3524
|
+
let timeout = Math.round(random * options.minTimeout * (options.factor ** (attempt - 1)));
|
|
3525
|
+
timeout = Math.min(timeout, options.maxTimeout);
|
|
3526
|
+
|
|
3527
|
+
return timeout;
|
|
3528
|
+
}
|
|
3529
|
+
|
|
3530
|
+
function calculateRemainingTime(start, max) {
|
|
3531
|
+
if (!Number.isFinite(max)) {
|
|
3532
|
+
return max;
|
|
3533
|
+
}
|
|
3534
|
+
|
|
3535
|
+
return max - (performance.now() - start);
|
|
3536
|
+
}
|
|
3537
|
+
|
|
3538
|
+
async function onAttemptFailure({error, attemptNumber, retriesConsumed, startTime, options}) {
|
|
3539
|
+
const normalizedError = error instanceof Error
|
|
3540
|
+
? error
|
|
3541
|
+
: new TypeError(`Non-error was thrown: "${error}". You should only throw errors.`);
|
|
3542
|
+
|
|
3543
|
+
if (normalizedError instanceof AbortError) {
|
|
3544
|
+
throw normalizedError.originalError;
|
|
3545
|
+
}
|
|
3546
|
+
|
|
3547
|
+
const retriesLeft = Number.isFinite(options.retries)
|
|
3548
|
+
? Math.max(0, options.retries - retriesConsumed)
|
|
3549
|
+
: options.retries;
|
|
3550
|
+
|
|
3551
|
+
const maxRetryTime = options.maxRetryTime ?? Number.POSITIVE_INFINITY;
|
|
3552
|
+
|
|
3553
|
+
const context = Object.freeze({
|
|
3554
|
+
error: normalizedError,
|
|
3555
|
+
attemptNumber,
|
|
3556
|
+
retriesLeft,
|
|
3557
|
+
retriesConsumed,
|
|
3558
|
+
});
|
|
3559
|
+
|
|
3560
|
+
await options.onFailedAttempt(context);
|
|
3561
|
+
|
|
3562
|
+
if (calculateRemainingTime(startTime, maxRetryTime) <= 0) {
|
|
3563
|
+
throw normalizedError;
|
|
3564
|
+
}
|
|
3565
|
+
|
|
3566
|
+
const consumeRetry = await options.shouldConsumeRetry(context);
|
|
3567
|
+
|
|
3568
|
+
const remainingTime = calculateRemainingTime(startTime, maxRetryTime);
|
|
3569
|
+
|
|
3570
|
+
if (remainingTime <= 0 || retriesLeft <= 0) {
|
|
3571
|
+
throw normalizedError;
|
|
3572
|
+
}
|
|
3573
|
+
|
|
3574
|
+
if (normalizedError instanceof TypeError && !isNetworkError(normalizedError)) {
|
|
3575
|
+
if (consumeRetry) {
|
|
3576
|
+
throw normalizedError;
|
|
3577
|
+
}
|
|
3578
|
+
|
|
3579
|
+
options.signal?.throwIfAborted();
|
|
3580
|
+
return false;
|
|
3581
|
+
}
|
|
3582
|
+
|
|
3583
|
+
if (!await options.shouldRetry(context)) {
|
|
3584
|
+
throw normalizedError;
|
|
3585
|
+
}
|
|
3586
|
+
|
|
3587
|
+
if (!consumeRetry) {
|
|
3588
|
+
options.signal?.throwIfAborted();
|
|
3589
|
+
return false;
|
|
3590
|
+
}
|
|
3591
|
+
|
|
3592
|
+
const delayTime = calculateDelay(retriesConsumed, options);
|
|
3593
|
+
const finalDelay = Math.min(delayTime, remainingTime);
|
|
3594
|
+
|
|
3595
|
+
options.signal?.throwIfAborted();
|
|
3596
|
+
|
|
3597
|
+
if (finalDelay > 0) {
|
|
3598
|
+
await new Promise((resolve, reject) => {
|
|
3599
|
+
const onAbort = () => {
|
|
3600
|
+
clearTimeout(timeoutToken);
|
|
3601
|
+
options.signal?.removeEventListener('abort', onAbort);
|
|
3602
|
+
reject(options.signal.reason);
|
|
3603
|
+
};
|
|
3604
|
+
|
|
3605
|
+
const timeoutToken = setTimeout(() => {
|
|
3606
|
+
options.signal?.removeEventListener('abort', onAbort);
|
|
3607
|
+
resolve();
|
|
3608
|
+
}, finalDelay);
|
|
3609
|
+
|
|
3610
|
+
if (options.unref) {
|
|
3611
|
+
timeoutToken.unref?.();
|
|
3612
|
+
}
|
|
3613
|
+
|
|
3614
|
+
options.signal?.addEventListener('abort', onAbort, {once: true});
|
|
3615
|
+
});
|
|
3616
|
+
}
|
|
3617
|
+
|
|
3618
|
+
options.signal?.throwIfAborted();
|
|
3619
|
+
|
|
3620
|
+
return true;
|
|
3621
|
+
}
|
|
3622
|
+
|
|
3623
|
+
async function pRetry(input, options = {}) {
|
|
3624
|
+
options = {...options};
|
|
3625
|
+
|
|
3626
|
+
validateRetries(options.retries);
|
|
3627
|
+
|
|
3628
|
+
if (Object.hasOwn(options, 'forever')) {
|
|
3629
|
+
throw new Error('The `forever` option is no longer supported. For many use-cases, you can set `retries: Infinity` instead.');
|
|
3630
|
+
}
|
|
3631
|
+
|
|
3632
|
+
options.retries ??= 10;
|
|
3633
|
+
options.factor ??= 2;
|
|
3634
|
+
options.minTimeout ??= 1000;
|
|
3635
|
+
options.maxTimeout ??= Number.POSITIVE_INFINITY;
|
|
3636
|
+
options.maxRetryTime ??= Number.POSITIVE_INFINITY;
|
|
3637
|
+
options.randomize ??= false;
|
|
3638
|
+
options.onFailedAttempt ??= () => {};
|
|
3639
|
+
options.shouldRetry ??= () => true;
|
|
3640
|
+
options.shouldConsumeRetry ??= () => true;
|
|
3641
|
+
|
|
3642
|
+
// Validate numeric options and normalize edge cases
|
|
3643
|
+
validateNumberOption('factor', options.factor, {min: 0, allowInfinity: false});
|
|
3644
|
+
validateNumberOption('minTimeout', options.minTimeout, {min: 0, allowInfinity: false});
|
|
3645
|
+
validateNumberOption('maxTimeout', options.maxTimeout, {min: 0, allowInfinity: true});
|
|
3646
|
+
validateNumberOption('maxRetryTime', options.maxRetryTime, {min: 0, allowInfinity: true});
|
|
3647
|
+
|
|
3648
|
+
// Treat non-positive factor as 1 to avoid zero backoff or negative behavior
|
|
3649
|
+
if (!(options.factor > 0)) {
|
|
3650
|
+
options.factor = 1;
|
|
3651
|
+
}
|
|
3652
|
+
|
|
3653
|
+
options.signal?.throwIfAborted();
|
|
3654
|
+
|
|
3655
|
+
let attemptNumber = 0;
|
|
3656
|
+
let retriesConsumed = 0;
|
|
3657
|
+
const startTime = performance.now();
|
|
3658
|
+
|
|
3659
|
+
while (Number.isFinite(options.retries) ? retriesConsumed <= options.retries : true) {
|
|
3660
|
+
attemptNumber++;
|
|
3661
|
+
|
|
3662
|
+
try {
|
|
3663
|
+
options.signal?.throwIfAborted();
|
|
3664
|
+
|
|
3665
|
+
const result = await input(attemptNumber);
|
|
3666
|
+
|
|
3667
|
+
options.signal?.throwIfAborted();
|
|
3668
|
+
|
|
3669
|
+
return result;
|
|
3670
|
+
} catch (error) {
|
|
3671
|
+
if (await onAttemptFailure({
|
|
3672
|
+
error,
|
|
3673
|
+
attemptNumber,
|
|
3674
|
+
retriesConsumed,
|
|
3675
|
+
startTime,
|
|
3676
|
+
options,
|
|
3677
|
+
})) {
|
|
3678
|
+
retriesConsumed++;
|
|
3679
|
+
}
|
|
3680
|
+
}
|
|
3681
|
+
}
|
|
3682
|
+
|
|
3683
|
+
// Should not reach here, but in case it does, throw an error
|
|
3684
|
+
throw new Error('Retry attempts exhausted without throwing an error.');
|
|
3685
|
+
}
|
|
3686
|
+
|
|
3252
3687
|
/**
|
|
3253
3688
|
* OperationQueue - Queue system for database operations
|
|
3254
3689
|
* Resolves race conditions between concurrent operations
|
|
@@ -3691,6 +4126,37 @@ class FileHandler {
|
|
|
3691
4126
|
// Global I/O limiter to prevent file descriptor exhaustion in concurrent operations
|
|
3692
4127
|
this.readLimiter = pLimit(opts.maxConcurrentReads || 4);
|
|
3693
4128
|
}
|
|
4129
|
+
_getIoTimeoutMs(override) {
|
|
4130
|
+
if (typeof override === 'number') return override;
|
|
4131
|
+
if (typeof this.opts.ioTimeoutMs === 'number') return this.opts.ioTimeoutMs;
|
|
4132
|
+
return 0;
|
|
4133
|
+
}
|
|
4134
|
+
async _withIoTimeout(fn, timeoutMs, onTimeout) {
|
|
4135
|
+
if (!timeoutMs || timeoutMs <= 0) {
|
|
4136
|
+
return fn();
|
|
4137
|
+
}
|
|
4138
|
+
let timeoutId;
|
|
4139
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
4140
|
+
timeoutId = setTimeout(() => {
|
|
4141
|
+
if (onTimeout) {
|
|
4142
|
+
try {
|
|
4143
|
+
onTimeout();
|
|
4144
|
+
} catch {}
|
|
4145
|
+
}
|
|
4146
|
+
const err = new Error(`I/O timeout after ${timeoutMs}ms`);
|
|
4147
|
+
err.code = 'ETIMEDOUT';
|
|
4148
|
+
reject(err);
|
|
4149
|
+
}, timeoutMs);
|
|
4150
|
+
});
|
|
4151
|
+
try {
|
|
4152
|
+
return await Promise.race([fn(), timeoutPromise]);
|
|
4153
|
+
} finally {
|
|
4154
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
4155
|
+
}
|
|
4156
|
+
}
|
|
4157
|
+
async _readWithTimeout(fd, buffer, offset, length, position, timeoutMs) {
|
|
4158
|
+
return this._withIoTimeout(() => fd.read(buffer, offset, length, position), timeoutMs, () => fd.close().catch(() => {}));
|
|
4159
|
+
}
|
|
3694
4160
|
async truncate(offset) {
|
|
3695
4161
|
try {
|
|
3696
4162
|
await fs.promises.access(this.file, fs.constants.F_OK);
|
|
@@ -3795,6 +4261,7 @@ class FileHandler {
|
|
|
3795
4261
|
if (!(await this.exists())) {
|
|
3796
4262
|
return Buffer.alloc(0); // Return empty buffer if file doesn't exist
|
|
3797
4263
|
}
|
|
4264
|
+
const timeoutMs = this._getIoTimeoutMs();
|
|
3798
4265
|
let fd = await fs.promises.open(this.file, 'r');
|
|
3799
4266
|
try {
|
|
3800
4267
|
// CRITICAL FIX: Check file size before attempting to read
|
|
@@ -3819,7 +4286,7 @@ class FileHandler {
|
|
|
3819
4286
|
let buffer = Buffer.alloc(length);
|
|
3820
4287
|
const {
|
|
3821
4288
|
bytesRead
|
|
3822
|
-
} = await
|
|
4289
|
+
} = await this._readWithTimeout(fd, buffer, 0, length, start, timeoutMs);
|
|
3823
4290
|
await fd.close();
|
|
3824
4291
|
|
|
3825
4292
|
// CRITICAL FIX: Ensure we read the expected amount of data
|
|
@@ -4521,14 +4988,50 @@ class FileHandler {
|
|
|
4521
4988
|
// Add a small delay to ensure any pending operations complete
|
|
4522
4989
|
await new Promise(resolve => setTimeout(resolve, 5));
|
|
4523
4990
|
// Use global read limiter to prevent file descriptor exhaustion
|
|
4524
|
-
return this.readLimiter(() => this.
|
|
4991
|
+
return this.readLimiter(() => this._readWithStreamingRetry(criteria, options, matchesCriteria, serializer));
|
|
4525
4992
|
});
|
|
4526
4993
|
} else {
|
|
4527
4994
|
// Use global read limiter to prevent file descriptor exhaustion
|
|
4528
|
-
return this.readLimiter(() => this.
|
|
4995
|
+
return this.readLimiter(() => this._readWithStreamingRetry(criteria, options, matchesCriteria, serializer));
|
|
4529
4996
|
}
|
|
4530
4997
|
}
|
|
4531
|
-
async
|
|
4998
|
+
async _readWithStreamingRetry(criteria, options = {}, matchesCriteria, serializer = null) {
|
|
4999
|
+
// If no timeout configured, use original implementation without retry
|
|
5000
|
+
if (!options.ioTimeoutMs) {
|
|
5001
|
+
return this._readWithStreamingInternal(criteria, options, matchesCriteria, serializer);
|
|
5002
|
+
}
|
|
5003
|
+
const timeoutMs = options.ioTimeoutMs || 5000; // Default 5s timeout per attempt
|
|
5004
|
+
const maxRetries = options.maxRetries || 3;
|
|
5005
|
+
return pRetry(async attempt => {
|
|
5006
|
+
const controller = new AbortController();
|
|
5007
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
5008
|
+
try {
|
|
5009
|
+
const results = await this._readWithStreamingInternal(criteria, options, matchesCriteria, serializer, controller.signal);
|
|
5010
|
+
return results;
|
|
5011
|
+
} catch (error) {
|
|
5012
|
+
if (error.name === 'AbortError' || error.code === 'ETIMEDOUT') {
|
|
5013
|
+
if (this.opts.debugMode) {
|
|
5014
|
+
console.log(`⚠️ Streaming read attempt ${attempt} timed out, retrying...`);
|
|
5015
|
+
}
|
|
5016
|
+
throw error; // p-retry will retry
|
|
5017
|
+
}
|
|
5018
|
+
// For other errors, don't retry
|
|
5019
|
+
throw new pRetry.AbortError(error);
|
|
5020
|
+
} finally {
|
|
5021
|
+
clearTimeout(timeout);
|
|
5022
|
+
}
|
|
5023
|
+
}, {
|
|
5024
|
+
retries: maxRetries,
|
|
5025
|
+
minTimeout: 100,
|
|
5026
|
+
maxTimeout: 1000,
|
|
5027
|
+
onFailedAttempt: error => {
|
|
5028
|
+
if (this.opts.debugMode) {
|
|
5029
|
+
console.log(`Streaming read failed (attempt ${error.attemptNumber}), ${error.retriesLeft} retries left`);
|
|
5030
|
+
}
|
|
5031
|
+
}
|
|
5032
|
+
});
|
|
5033
|
+
}
|
|
5034
|
+
async _readWithStreamingInternal(criteria, options = {}, matchesCriteria, serializer = null, signal = null) {
|
|
4532
5035
|
const {
|
|
4533
5036
|
limit,
|
|
4534
5037
|
skip = 0
|
|
@@ -4558,6 +5061,14 @@ class FileHandler {
|
|
|
4558
5061
|
crlfDelay: Infinity // Better performance
|
|
4559
5062
|
});
|
|
4560
5063
|
|
|
5064
|
+
// Handle abort signal
|
|
5065
|
+
if (signal) {
|
|
5066
|
+
signal.addEventListener('abort', () => {
|
|
5067
|
+
stream.destroy();
|
|
5068
|
+
rl.close();
|
|
5069
|
+
});
|
|
5070
|
+
}
|
|
5071
|
+
|
|
4561
5072
|
// Process line by line
|
|
4562
5073
|
var _iteratorAbruptCompletion3 = false;
|
|
4563
5074
|
var _didIteratorError3 = false;
|
|
@@ -4566,41 +5077,46 @@ class FileHandler {
|
|
|
4566
5077
|
for (var _iterator3 = _asyncIterator(rl), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
|
|
4567
5078
|
const line = _step3.value;
|
|
4568
5079
|
{
|
|
4569
|
-
if (
|
|
4570
|
-
|
|
4571
|
-
|
|
4572
|
-
|
|
4573
|
-
// Use serializer for deserialization
|
|
4574
|
-
record = serializer.deserialize(line);
|
|
4575
|
-
} else {
|
|
4576
|
-
// Fallback to JSON.parse for backward compatibility
|
|
4577
|
-
record = JSON.parse(line);
|
|
4578
|
-
}
|
|
4579
|
-
if (record && matchesCriteria(record, criteria)) {
|
|
4580
|
-
// Return raw data - term mapping will be handled by Database layer
|
|
4581
|
-
results.push({
|
|
4582
|
-
...record,
|
|
4583
|
-
_: lineNumber
|
|
4584
|
-
});
|
|
4585
|
-
matched++;
|
|
5080
|
+
if (signal && signal.aborted) {
|
|
5081
|
+
break; // Stop if aborted
|
|
5082
|
+
}
|
|
5083
|
+
lineNumber++;
|
|
4586
5084
|
|
|
4587
|
-
|
|
4588
|
-
|
|
4589
|
-
|
|
4590
|
-
|
|
4591
|
-
|
|
4592
|
-
|
|
4593
|
-
|
|
4594
|
-
|
|
4595
|
-
|
|
4596
|
-
|
|
5085
|
+
// Skip lines that were already processed in previous attempts
|
|
5086
|
+
if (lineNumber <= skip) {
|
|
5087
|
+
skipped++;
|
|
5088
|
+
continue;
|
|
5089
|
+
}
|
|
5090
|
+
try {
|
|
5091
|
+
let record;
|
|
5092
|
+
if (serializer && typeof serializer.deserialize === 'function') {
|
|
5093
|
+
// Use serializer for deserialization
|
|
5094
|
+
record = serializer.deserialize(line);
|
|
5095
|
+
} else {
|
|
5096
|
+
// Fallback to JSON.parse for backward compatibility
|
|
5097
|
+
record = JSON.parse(line);
|
|
5098
|
+
}
|
|
5099
|
+
if (record && matchesCriteria(record, criteria)) {
|
|
5100
|
+
// Return raw data - term mapping will be handled by Database layer
|
|
5101
|
+
results.push({
|
|
5102
|
+
...record,
|
|
5103
|
+
_: lineNumber
|
|
5104
|
+
});
|
|
5105
|
+
matched++;
|
|
5106
|
+
|
|
5107
|
+
// Check if we've reached the limit
|
|
5108
|
+
if (results.length >= limit) {
|
|
5109
|
+
break;
|
|
4597
5110
|
}
|
|
4598
|
-
// Ignore invalid lines - they may be partial writes
|
|
4599
5111
|
}
|
|
4600
|
-
}
|
|
4601
|
-
|
|
5112
|
+
} catch (error) {
|
|
5113
|
+
// CRITICAL FIX: Only log errors if they're not expected during concurrent operations
|
|
5114
|
+
// Don't log JSON parsing errors that occur during file writes
|
|
5115
|
+
if (this.opts && this.opts.debugMode && !error.message.includes('Unexpected')) {
|
|
5116
|
+
console.log(`Error reading line ${lineNumber}:`, error.message);
|
|
5117
|
+
}
|
|
5118
|
+
// Ignore invalid lines - they may be partial writes
|
|
4602
5119
|
}
|
|
4603
|
-
lineNumber++;
|
|
4604
5120
|
processed++;
|
|
4605
5121
|
}
|
|
4606
5122
|
}
|
|
@@ -4623,6 +5139,10 @@ class FileHandler {
|
|
|
4623
5139
|
}
|
|
4624
5140
|
return results;
|
|
4625
5141
|
} catch (error) {
|
|
5142
|
+
if (error.message === 'AbortError') {
|
|
5143
|
+
// Return partial results if aborted
|
|
5144
|
+
return results;
|
|
5145
|
+
}
|
|
4626
5146
|
console.error('Error in readWithStreaming:', error);
|
|
4627
5147
|
throw error;
|
|
4628
5148
|
}
|
|
@@ -5513,7 +6033,6 @@ class QueryManager {
|
|
|
5513
6033
|
// OPTIMIZATION: Use ranges instead of reading entire file
|
|
5514
6034
|
const ranges = this.database.getRanges(batch);
|
|
5515
6035
|
const groupedRanges = await this.fileHandler.groupedRanges(ranges);
|
|
5516
|
-
const fs = await import('fs');
|
|
5517
6036
|
const fd = await fs.promises.open(this.fileHandler.file, 'r');
|
|
5518
6037
|
try {
|
|
5519
6038
|
for (const groupedRange of groupedRanges) {
|
|
@@ -5768,7 +6287,6 @@ class QueryManager {
|
|
|
5768
6287
|
const ranges = this.database.getRanges(fileLineNumbers);
|
|
5769
6288
|
if (ranges.length > 0) {
|
|
5770
6289
|
const groupedRanges = await this.database.fileHandler.groupedRanges(ranges);
|
|
5771
|
-
const fs = await import('fs');
|
|
5772
6290
|
const fd = await fs.promises.open(this.database.fileHandler.file, 'r');
|
|
5773
6291
|
try {
|
|
5774
6292
|
for (const groupedRange of groupedRanges) {
|
|
@@ -8106,7 +8624,7 @@ class Database extends events.EventEmitter {
|
|
|
8106
8624
|
this.initializeManagers();
|
|
8107
8625
|
|
|
8108
8626
|
// Initialize file mutex for thread safety
|
|
8109
|
-
this.fileMutex = new
|
|
8627
|
+
this.fileMutex = new Mutex();
|
|
8110
8628
|
|
|
8111
8629
|
// Initialize performance tracking
|
|
8112
8630
|
this.performanceStats = {
|
|
@@ -10703,94 +11221,147 @@ class Database extends events.EventEmitter {
|
|
|
10703
11221
|
let count = 0;
|
|
10704
11222
|
const startTime = Date.now();
|
|
10705
11223
|
|
|
10706
|
-
//
|
|
10707
|
-
if (
|
|
10708
|
-
|
|
10709
|
-
|
|
10710
|
-
|
|
10711
|
-
|
|
10712
|
-
|
|
10713
|
-
|
|
10714
|
-
|
|
10715
|
-
|
|
10716
|
-
|
|
10717
|
-
|
|
10718
|
-
|
|
10719
|
-
|
|
10720
|
-
|
|
10721
|
-
|
|
10722
|
-
|
|
10723
|
-
|
|
10724
|
-
|
|
10725
|
-
|
|
10726
|
-
|
|
10727
|
-
|
|
10728
|
-
|
|
10729
|
-
|
|
10730
|
-
|
|
10731
|
-
|
|
10732
|
-
|
|
10733
|
-
|
|
10734
|
-
|
|
10735
|
-
|
|
10736
|
-
|
|
10737
|
-
|
|
11224
|
+
// Use retry for the streaming rebuild only if timeout is configured
|
|
11225
|
+
if (this.opts.ioTimeoutMs && this.opts.ioTimeoutMs > 0) {
|
|
11226
|
+
count = await this._rebuildIndexesWithRetry();
|
|
11227
|
+
} else {
|
|
11228
|
+
// Use original logic without retry for backward compatibility
|
|
11229
|
+
count = await this._rebuildIndexesOriginal();
|
|
11230
|
+
}
|
|
11231
|
+
|
|
11232
|
+
// Update indexManager totalLines
|
|
11233
|
+
if (this.indexManager) {
|
|
11234
|
+
this.indexManager.setTotalLines(this.offsets.length);
|
|
11235
|
+
}
|
|
11236
|
+
this._indexRebuildNeeded = false;
|
|
11237
|
+
if (this.opts.debugMode) {
|
|
11238
|
+
console.log(`✅ Index rebuilt from ${count} records in ${Date.now() - startTime}ms`);
|
|
11239
|
+
}
|
|
11240
|
+
|
|
11241
|
+
// Save the rebuilt index
|
|
11242
|
+
await this._saveIndexDataToFile();
|
|
11243
|
+
} catch (error) {
|
|
11244
|
+
if (this.opts.debugMode) {
|
|
11245
|
+
console.error('❌ Failed to rebuild indexes:', error.message);
|
|
11246
|
+
}
|
|
11247
|
+
// Don't throw - queries will fall back to streaming
|
|
11248
|
+
}
|
|
11249
|
+
}
|
|
11250
|
+
|
|
11251
|
+
/**
|
|
11252
|
+
* Rebuild indexes with retry logic to handle I/O hangs
|
|
11253
|
+
* @private
|
|
11254
|
+
*/
|
|
11255
|
+
async _rebuildIndexesWithRetry() {
|
|
11256
|
+
// If no timeout configured, use original implementation without retry
|
|
11257
|
+
if (!this.opts.ioTimeoutMs) {
|
|
11258
|
+
return this._rebuildIndexesOriginal();
|
|
11259
|
+
}
|
|
11260
|
+
const timeoutMs = this.opts.ioTimeoutMs || 10000; // Longer timeout for rebuild
|
|
11261
|
+
const maxRetries = this.opts.maxRetries || 3;
|
|
11262
|
+
let count = 0;
|
|
11263
|
+
await pRetry(async attempt => {
|
|
11264
|
+
const controller = new AbortController();
|
|
11265
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
11266
|
+
try {
|
|
11267
|
+
// Auto-detect schema from first line if not initialized
|
|
11268
|
+
if (!this.serializer.schemaManager.isInitialized) {
|
|
11269
|
+
const stream = fs.createReadStream(this.fileHandler.file, {
|
|
11270
|
+
highWaterMark: 64 * 1024,
|
|
11271
|
+
encoding: 'utf8'
|
|
11272
|
+
});
|
|
11273
|
+
const rl = readline.createInterface({
|
|
11274
|
+
input: stream,
|
|
11275
|
+
crlfDelay: Infinity
|
|
11276
|
+
});
|
|
11277
|
+
|
|
11278
|
+
// Handle abort
|
|
11279
|
+
controller.signal.addEventListener('abort', () => {
|
|
11280
|
+
stream.destroy(new Error('AbortError'));
|
|
11281
|
+
rl.close();
|
|
11282
|
+
});
|
|
11283
|
+
var _iteratorAbruptCompletion = false;
|
|
11284
|
+
var _didIteratorError = false;
|
|
11285
|
+
var _iteratorError;
|
|
11286
|
+
try {
|
|
11287
|
+
for (var _iterator = _asyncIterator(rl), _step; _iteratorAbruptCompletion = !(_step = await _iterator.next()).done; _iteratorAbruptCompletion = false) {
|
|
11288
|
+
const line = _step.value;
|
|
11289
|
+
{
|
|
11290
|
+
if (controller.signal.aborted) break;
|
|
11291
|
+
if (line && line.trim()) {
|
|
11292
|
+
try {
|
|
11293
|
+
const firstRecord = JSON.parse(line);
|
|
11294
|
+
if (Array.isArray(firstRecord)) {
|
|
11295
|
+
// Try to infer schema from opts.fields if available
|
|
11296
|
+
if (this.opts.fields && typeof this.opts.fields === 'object') {
|
|
11297
|
+
const fieldNames = Object.keys(this.opts.fields);
|
|
11298
|
+
if (fieldNames.length >= firstRecord.length) {
|
|
11299
|
+
// Use first N fields from opts.fields to match array length
|
|
11300
|
+
const schema = fieldNames.slice(0, firstRecord.length);
|
|
11301
|
+
this.serializer.initializeSchema(schema);
|
|
11302
|
+
if (this.opts.debugMode) {
|
|
11303
|
+
console.log(`🔍 Inferred schema from opts.fields: ${schema.join(', ')}`);
|
|
11304
|
+
}
|
|
11305
|
+
} else {
|
|
11306
|
+
throw new Error(`Cannot rebuild index: array has ${firstRecord.length} elements but opts.fields only defines ${fieldNames.length} fields. Schema must be explicitly provided.`);
|
|
10738
11307
|
}
|
|
10739
11308
|
} else {
|
|
10740
|
-
throw new Error(
|
|
11309
|
+
throw new Error('Cannot rebuild index: schema missing, file uses array format, and opts.fields not provided. The .idx.jdb file is corrupted.');
|
|
10741
11310
|
}
|
|
10742
11311
|
} else {
|
|
10743
|
-
|
|
11312
|
+
// Object format, initialize from object keys
|
|
11313
|
+
this.serializer.initializeSchema(firstRecord, true);
|
|
11314
|
+
if (this.opts.debugMode) {
|
|
11315
|
+
console.log(`🔍 Auto-detected schema from object: ${Object.keys(firstRecord).join(', ')}`);
|
|
11316
|
+
}
|
|
10744
11317
|
}
|
|
10745
|
-
|
|
10746
|
-
|
|
10747
|
-
this.serializer.initializeSchema(firstRecord, true);
|
|
11318
|
+
break;
|
|
11319
|
+
} catch (error) {
|
|
10748
11320
|
if (this.opts.debugMode) {
|
|
10749
|
-
console.
|
|
11321
|
+
console.error('❌ Failed to auto-detect schema:', error.message);
|
|
10750
11322
|
}
|
|
11323
|
+
throw error;
|
|
10751
11324
|
}
|
|
10752
|
-
break;
|
|
10753
|
-
} catch (error) {
|
|
10754
|
-
if (this.opts.debugMode) {
|
|
10755
|
-
console.error('❌ Failed to auto-detect schema:', error.message);
|
|
10756
|
-
}
|
|
10757
|
-
throw error;
|
|
10758
11325
|
}
|
|
10759
11326
|
}
|
|
10760
11327
|
}
|
|
10761
|
-
}
|
|
10762
|
-
|
|
10763
|
-
|
|
10764
|
-
_iteratorError = err;
|
|
10765
|
-
} finally {
|
|
10766
|
-
try {
|
|
10767
|
-
if (_iteratorAbruptCompletion && _iterator.return != null) {
|
|
10768
|
-
await _iterator.return();
|
|
10769
|
-
}
|
|
11328
|
+
} catch (err) {
|
|
11329
|
+
_didIteratorError = true;
|
|
11330
|
+
_iteratorError = err;
|
|
10770
11331
|
} finally {
|
|
10771
|
-
|
|
10772
|
-
|
|
11332
|
+
try {
|
|
11333
|
+
if (_iteratorAbruptCompletion && _iterator.return != null) {
|
|
11334
|
+
await _iterator.return();
|
|
11335
|
+
}
|
|
11336
|
+
} finally {
|
|
11337
|
+
if (_didIteratorError) {
|
|
11338
|
+
throw _iteratorError;
|
|
11339
|
+
}
|
|
10773
11340
|
}
|
|
10774
11341
|
}
|
|
11342
|
+
stream.destroy();
|
|
10775
11343
|
}
|
|
10776
|
-
stream.destroy();
|
|
10777
|
-
}
|
|
10778
11344
|
|
|
10779
|
-
|
|
10780
|
-
|
|
10781
|
-
|
|
10782
|
-
|
|
10783
|
-
|
|
10784
|
-
|
|
10785
|
-
|
|
10786
|
-
|
|
10787
|
-
|
|
10788
|
-
|
|
10789
|
-
|
|
10790
|
-
|
|
10791
|
-
|
|
10792
|
-
|
|
10793
|
-
|
|
11345
|
+
// Use streaming to read records without loading everything into memory
|
|
11346
|
+
// Also rebuild offsets while we're at it
|
|
11347
|
+
|
|
11348
|
+
this.offsets = [];
|
|
11349
|
+
let currentOffset = 0;
|
|
11350
|
+
const stream = fs.createReadStream(this.fileHandler.file, {
|
|
11351
|
+
highWaterMark: 64 * 1024,
|
|
11352
|
+
encoding: 'utf8'
|
|
11353
|
+
});
|
|
11354
|
+
const rl = readline.createInterface({
|
|
11355
|
+
input: stream,
|
|
11356
|
+
crlfDelay: Infinity
|
|
11357
|
+
});
|
|
11358
|
+
|
|
11359
|
+
// Handle abort
|
|
11360
|
+
controller.signal.addEventListener('abort', () => {
|
|
11361
|
+
stream.destroy(new Error('AbortError'));
|
|
11362
|
+
rl.close();
|
|
11363
|
+
});
|
|
11364
|
+
let localCount = 0;
|
|
10794
11365
|
var _iteratorAbruptCompletion2 = false;
|
|
10795
11366
|
var _didIteratorError2 = false;
|
|
10796
11367
|
var _iteratorError2;
|
|
@@ -10798,18 +11369,19 @@ class Database extends events.EventEmitter {
|
|
|
10798
11369
|
for (var _iterator2 = _asyncIterator(rl), _step2; _iteratorAbruptCompletion2 = !(_step2 = await _iterator2.next()).done; _iteratorAbruptCompletion2 = false) {
|
|
10799
11370
|
const line = _step2.value;
|
|
10800
11371
|
{
|
|
11372
|
+
if (controller.signal.aborted) break;
|
|
10801
11373
|
if (line && line.trim()) {
|
|
10802
11374
|
try {
|
|
10803
11375
|
// Record the offset for this line
|
|
10804
11376
|
this.offsets.push(currentOffset);
|
|
10805
11377
|
const record = this.serializer.deserialize(line);
|
|
10806
11378
|
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
|
|
10807
|
-
await this.indexManager.add(recordWithTerms, count);
|
|
10808
|
-
|
|
11379
|
+
await this.indexManager.add(recordWithTerms, count + localCount);
|
|
11380
|
+
localCount++;
|
|
10809
11381
|
} catch (error) {
|
|
10810
11382
|
// Skip invalid lines
|
|
10811
11383
|
if (this.opts.debugMode) {
|
|
10812
|
-
console.log(`⚠️ Rebuild: Failed to deserialize line ${count}:`, error.message);
|
|
11384
|
+
console.log(`⚠️ Rebuild: Failed to deserialize line ${count + localCount}:`, error.message);
|
|
10813
11385
|
}
|
|
10814
11386
|
}
|
|
10815
11387
|
}
|
|
@@ -10831,27 +11403,31 @@ class Database extends events.EventEmitter {
|
|
|
10831
11403
|
}
|
|
10832
11404
|
}
|
|
10833
11405
|
}
|
|
10834
|
-
|
|
11406
|
+
count += localCount;
|
|
10835
11407
|
stream.destroy();
|
|
11408
|
+
} catch (error) {
|
|
11409
|
+
if (error.name === 'AbortError' || error.code === 'ETIMEDOUT') {
|
|
11410
|
+
if (this.opts.debugMode) {
|
|
11411
|
+
console.log(`⚠️ Index rebuild attempt ${attempt} timed out, retrying...`);
|
|
11412
|
+
}
|
|
11413
|
+
throw error; // p-retry will retry
|
|
11414
|
+
}
|
|
11415
|
+
// For other errors, don't retry
|
|
11416
|
+
throw new pRetry.AbortError(error);
|
|
11417
|
+
} finally {
|
|
11418
|
+
clearTimeout(timeout);
|
|
10836
11419
|
}
|
|
10837
|
-
|
|
10838
|
-
|
|
10839
|
-
|
|
10840
|
-
|
|
10841
|
-
|
|
10842
|
-
|
|
10843
|
-
|
|
10844
|
-
|
|
10845
|
-
}
|
|
10846
|
-
|
|
10847
|
-
// Save the rebuilt index
|
|
10848
|
-
await this._saveIndexDataToFile();
|
|
10849
|
-
} catch (error) {
|
|
10850
|
-
if (this.opts.debugMode) {
|
|
10851
|
-
console.error('❌ Failed to rebuild indexes:', error.message);
|
|
11420
|
+
}, {
|
|
11421
|
+
retries: maxRetries,
|
|
11422
|
+
minTimeout: 200,
|
|
11423
|
+
maxTimeout: 2000,
|
|
11424
|
+
onFailedAttempt: error => {
|
|
11425
|
+
if (this.opts.debugMode) {
|
|
11426
|
+
console.log(`Index rebuild failed (attempt ${error.attemptNumber}), ${error.retriesLeft} retries left`);
|
|
11427
|
+
}
|
|
10852
11428
|
}
|
|
10853
|
-
|
|
10854
|
-
|
|
11429
|
+
});
|
|
11430
|
+
return count;
|
|
10855
11431
|
}
|
|
10856
11432
|
|
|
10857
11433
|
/**
|
|
@@ -11632,56 +12208,11 @@ class Database extends events.EventEmitter {
|
|
|
11632
12208
|
}
|
|
11633
12209
|
}
|
|
11634
12210
|
const groupedRanges = await this.fileHandler.groupedRanges(ranges);
|
|
11635
|
-
const fs = await import('fs');
|
|
11636
12211
|
const fd = await fs.promises.open(this.fileHandler.file, 'r');
|
|
11637
12212
|
try {
|
|
11638
12213
|
for (const groupedRange of groupedRanges) {
|
|
11639
|
-
|
|
11640
|
-
|
|
11641
|
-
var _iteratorError3;
|
|
11642
|
-
try {
|
|
11643
|
-
for (var _iterator3 = _asyncIterator(this.fileHandler.readGroupedRange(groupedRange, fd)), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
|
|
11644
|
-
const row = _step3.value;
|
|
11645
|
-
{
|
|
11646
|
-
try {
|
|
11647
|
-
const record = this.serializer.deserialize(row.line);
|
|
11648
|
-
|
|
11649
|
-
// Get line number from the row, fallback to start offset mapping
|
|
11650
|
-
let lineNumber = row._ !== null && row._ !== undefined ? row._ : startToLineNumber.get(row.start) ?? 0;
|
|
11651
|
-
|
|
11652
|
-
// Restore term IDs to terms
|
|
11653
|
-
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
|
|
11654
|
-
|
|
11655
|
-
// Add line number
|
|
11656
|
-
recordWithTerms._ = lineNumber;
|
|
11657
|
-
|
|
11658
|
-
// Add score if includeScore is true (default is true)
|
|
11659
|
-
if (opts.includeScore !== false) {
|
|
11660
|
-
recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
|
|
11661
|
-
}
|
|
11662
|
-
results.push(recordWithTerms);
|
|
11663
|
-
} catch (error) {
|
|
11664
|
-
// Skip invalid lines
|
|
11665
|
-
if (this.opts.debugMode) {
|
|
11666
|
-
console.error('Error deserializing record in score():', error);
|
|
11667
|
-
}
|
|
11668
|
-
}
|
|
11669
|
-
}
|
|
11670
|
-
}
|
|
11671
|
-
} catch (err) {
|
|
11672
|
-
_didIteratorError3 = true;
|
|
11673
|
-
_iteratorError3 = err;
|
|
11674
|
-
} finally {
|
|
11675
|
-
try {
|
|
11676
|
-
if (_iteratorAbruptCompletion3 && _iterator3.return != null) {
|
|
11677
|
-
await _iterator3.return();
|
|
11678
|
-
}
|
|
11679
|
-
} finally {
|
|
11680
|
-
if (_didIteratorError3) {
|
|
11681
|
-
throw _iteratorError3;
|
|
11682
|
-
}
|
|
11683
|
-
}
|
|
11684
|
-
}
|
|
12214
|
+
const rangeResults = await this._readGroupedRangeWithRetry(groupedRange, fd, startToLineNumber, scoresByLineNumber, opts);
|
|
12215
|
+
results.push(...rangeResults);
|
|
11685
12216
|
}
|
|
11686
12217
|
} finally {
|
|
11687
12218
|
await fd.close();
|
|
@@ -11721,6 +12252,300 @@ class Database extends events.EventEmitter {
|
|
|
11721
12252
|
return results;
|
|
11722
12253
|
}
|
|
11723
12254
|
|
|
12255
|
+
/**
|
|
12256
|
+
* Read a grouped range with retry logic to handle I/O hangs
|
|
12257
|
+
* @private
|
|
12258
|
+
*/
|
|
12259
|
+
async _readGroupedRangeWithRetry(groupedRange, fd, startToLineNumber, scoresByLineNumber, opts) {
|
|
12260
|
+
// If no timeout configured, use original implementation without retry
|
|
12261
|
+
if (!this.opts.ioTimeoutMs) {
|
|
12262
|
+
return this._readGroupedRangeOriginal(groupedRange, fd, startToLineNumber, scoresByLineNumber, opts);
|
|
12263
|
+
}
|
|
12264
|
+
const timeoutMs = this.opts.ioTimeoutMs || 3000; // Shorter timeout for range reads
|
|
12265
|
+
const maxRetries = this.opts.maxRetries || 3;
|
|
12266
|
+
const results = [];
|
|
12267
|
+
await pRetry(async attempt => {
|
|
12268
|
+
const controller = new AbortController();
|
|
12269
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
12270
|
+
try {
|
|
12271
|
+
// Collect results from the generator
|
|
12272
|
+
const rangeResults = [];
|
|
12273
|
+
const generator = this.fileHandler.readGroupedRange(groupedRange, fd);
|
|
12274
|
+
|
|
12275
|
+
// Handle abort
|
|
12276
|
+
controller.signal.addEventListener('abort', () => {
|
|
12277
|
+
generator.return(); // Close the generator
|
|
12278
|
+
});
|
|
12279
|
+
var _iteratorAbruptCompletion3 = false;
|
|
12280
|
+
var _didIteratorError3 = false;
|
|
12281
|
+
var _iteratorError3;
|
|
12282
|
+
try {
|
|
12283
|
+
for (var _iterator3 = _asyncIterator(generator), _step3; _iteratorAbruptCompletion3 = !(_step3 = await _iterator3.next()).done; _iteratorAbruptCompletion3 = false) {
|
|
12284
|
+
const row = _step3.value;
|
|
12285
|
+
{
|
|
12286
|
+
if (controller.signal.aborted) break;
|
|
12287
|
+
try {
|
|
12288
|
+
const record = this.serializer.deserialize(row.line);
|
|
12289
|
+
|
|
12290
|
+
// Get line number from the row, fallback to start offset mapping
|
|
12291
|
+
let lineNumber = row._ !== null && row._ !== undefined ? row._ : startToLineNumber.get(row.start) ?? 0;
|
|
12292
|
+
|
|
12293
|
+
// Restore term IDs to terms
|
|
12294
|
+
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
|
|
12295
|
+
|
|
12296
|
+
// Add line number
|
|
12297
|
+
recordWithTerms._ = lineNumber;
|
|
12298
|
+
|
|
12299
|
+
// Add score if includeScore is true (default is true)
|
|
12300
|
+
if (opts.includeScore !== false) {
|
|
12301
|
+
recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
|
|
12302
|
+
}
|
|
12303
|
+
rangeResults.push(recordWithTerms);
|
|
12304
|
+
} catch (error) {
|
|
12305
|
+
// Skip invalid lines
|
|
12306
|
+
if (this.opts.debugMode) {
|
|
12307
|
+
console.error('Error deserializing record in score():', error);
|
|
12308
|
+
}
|
|
12309
|
+
}
|
|
12310
|
+
}
|
|
12311
|
+
}
|
|
12312
|
+
} catch (err) {
|
|
12313
|
+
_didIteratorError3 = true;
|
|
12314
|
+
_iteratorError3 = err;
|
|
12315
|
+
} finally {
|
|
12316
|
+
try {
|
|
12317
|
+
if (_iteratorAbruptCompletion3 && _iterator3.return != null) {
|
|
12318
|
+
await _iterator3.return();
|
|
12319
|
+
}
|
|
12320
|
+
} finally {
|
|
12321
|
+
if (_didIteratorError3) {
|
|
12322
|
+
throw _iteratorError3;
|
|
12323
|
+
}
|
|
12324
|
+
}
|
|
12325
|
+
}
|
|
12326
|
+
results.push(...rangeResults);
|
|
12327
|
+
} catch (error) {
|
|
12328
|
+
if (error.name === 'AbortError' || error.code === 'ETIMEDOUT') {
|
|
12329
|
+
if (this.opts.debugMode) {
|
|
12330
|
+
console.log(`⚠️ Score range read attempt ${attempt} timed out, retrying...`);
|
|
12331
|
+
}
|
|
12332
|
+
throw error; // p-retry will retry
|
|
12333
|
+
}
|
|
12334
|
+
// For other errors, don't retry
|
|
12335
|
+
throw new pRetry.AbortError(error);
|
|
12336
|
+
} finally {
|
|
12337
|
+
clearTimeout(timeout);
|
|
12338
|
+
}
|
|
12339
|
+
}, {
|
|
12340
|
+
retries: maxRetries,
|
|
12341
|
+
minTimeout: 100,
|
|
12342
|
+
maxTimeout: 500,
|
|
12343
|
+
onFailedAttempt: error => {
|
|
12344
|
+
if (this.opts.debugMode) {
|
|
12345
|
+
console.log(`Score range read failed (attempt ${error.attemptNumber}), ${error.retriesLeft} retries left`);
|
|
12346
|
+
}
|
|
12347
|
+
}
|
|
12348
|
+
});
|
|
12349
|
+
return results;
|
|
12350
|
+
}
|
|
12351
|
+
|
|
12352
|
+
/**
|
|
12353
|
+
* Original read grouped range logic without retry (for backward compatibility)
|
|
12354
|
+
* @private
|
|
12355
|
+
*/
|
|
12356
|
+
async _readGroupedRangeOriginal(groupedRange, fd, startToLineNumber, scoresByLineNumber, opts) {
|
|
12357
|
+
const results = [];
|
|
12358
|
+
|
|
12359
|
+
// Collect results from the generator
|
|
12360
|
+
const rangeResults = [];
|
|
12361
|
+
const generator = this.fileHandler.readGroupedRange(groupedRange, fd);
|
|
12362
|
+
var _iteratorAbruptCompletion4 = false;
|
|
12363
|
+
var _didIteratorError4 = false;
|
|
12364
|
+
var _iteratorError4;
|
|
12365
|
+
try {
|
|
12366
|
+
for (var _iterator4 = _asyncIterator(generator), _step4; _iteratorAbruptCompletion4 = !(_step4 = await _iterator4.next()).done; _iteratorAbruptCompletion4 = false) {
|
|
12367
|
+
const row = _step4.value;
|
|
12368
|
+
{
|
|
12369
|
+
try {
|
|
12370
|
+
const record = this.serializer.deserialize(row.line);
|
|
12371
|
+
|
|
12372
|
+
// Get line number from the row, fallback to start offset mapping
|
|
12373
|
+
let lineNumber = row._ !== null && row._ !== undefined ? row._ : startToLineNumber.get(row.start) ?? 0;
|
|
12374
|
+
|
|
12375
|
+
// Restore term IDs to terms
|
|
12376
|
+
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
|
|
12377
|
+
|
|
12378
|
+
// Add line number
|
|
12379
|
+
recordWithTerms._ = lineNumber;
|
|
12380
|
+
|
|
12381
|
+
// Add score if includeScore is true (default is true)
|
|
12382
|
+
if (opts.includeScore !== false) {
|
|
12383
|
+
recordWithTerms.score = scoresByLineNumber.get(lineNumber) || 0;
|
|
12384
|
+
}
|
|
12385
|
+
rangeResults.push(recordWithTerms);
|
|
12386
|
+
} catch (error) {
|
|
12387
|
+
// Skip invalid lines
|
|
12388
|
+
if (this.opts.debugMode) {
|
|
12389
|
+
console.error('Error deserializing record in score():', error);
|
|
12390
|
+
}
|
|
12391
|
+
}
|
|
12392
|
+
}
|
|
12393
|
+
}
|
|
12394
|
+
} catch (err) {
|
|
12395
|
+
_didIteratorError4 = true;
|
|
12396
|
+
_iteratorError4 = err;
|
|
12397
|
+
} finally {
|
|
12398
|
+
try {
|
|
12399
|
+
if (_iteratorAbruptCompletion4 && _iterator4.return != null) {
|
|
12400
|
+
await _iterator4.return();
|
|
12401
|
+
}
|
|
12402
|
+
} finally {
|
|
12403
|
+
if (_didIteratorError4) {
|
|
12404
|
+
throw _iteratorError4;
|
|
12405
|
+
}
|
|
12406
|
+
}
|
|
12407
|
+
}
|
|
12408
|
+
results.push(...rangeResults);
|
|
12409
|
+
return results;
|
|
12410
|
+
}
|
|
12411
|
+
|
|
12412
|
+
/**
|
|
12413
|
+
* Original rebuild indexes logic without retry (for backward compatibility)
|
|
12414
|
+
* @private
|
|
12415
|
+
*/
|
|
12416
|
+
async _rebuildIndexesOriginal() {
|
|
12417
|
+
let count = 0;
|
|
12418
|
+
|
|
12419
|
+
// Auto-detect schema from first line if not initialized
|
|
12420
|
+
if (!this.serializer.schemaManager.isInitialized) {
|
|
12421
|
+
const stream = fs.createReadStream(this.fileHandler.file, {
|
|
12422
|
+
highWaterMark: 64 * 1024,
|
|
12423
|
+
encoding: 'utf8'
|
|
12424
|
+
});
|
|
12425
|
+
const rl = readline.createInterface({
|
|
12426
|
+
input: stream,
|
|
12427
|
+
crlfDelay: Infinity
|
|
12428
|
+
});
|
|
12429
|
+
var _iteratorAbruptCompletion5 = false;
|
|
12430
|
+
var _didIteratorError5 = false;
|
|
12431
|
+
var _iteratorError5;
|
|
12432
|
+
try {
|
|
12433
|
+
for (var _iterator5 = _asyncIterator(rl), _step5; _iteratorAbruptCompletion5 = !(_step5 = await _iterator5.next()).done; _iteratorAbruptCompletion5 = false) {
|
|
12434
|
+
const line = _step5.value;
|
|
12435
|
+
{
|
|
12436
|
+
if (line && line.trim()) {
|
|
12437
|
+
try {
|
|
12438
|
+
const firstRecord = JSON.parse(line);
|
|
12439
|
+
if (Array.isArray(firstRecord)) {
|
|
12440
|
+
// Try to infer schema from opts.fields if available
|
|
12441
|
+
if (this.opts.fields && typeof this.opts.fields === 'object') {
|
|
12442
|
+
const fieldNames = Object.keys(this.opts.fields);
|
|
12443
|
+
if (fieldNames.length >= firstRecord.length) {
|
|
12444
|
+
// Use first N fields from opts.fields to match array length
|
|
12445
|
+
const schema = fieldNames.slice(0, firstRecord.length);
|
|
12446
|
+
this.serializer.initializeSchema(schema);
|
|
12447
|
+
if (this.opts.debugMode) {
|
|
12448
|
+
console.log(`🔍 Inferred schema from opts.fields: ${schema.join(', ')}`);
|
|
12449
|
+
}
|
|
12450
|
+
} else {
|
|
12451
|
+
throw new Error(`Cannot rebuild index: array has ${firstRecord.length} elements but opts.fields only defines ${fieldNames.length} fields. Schema must be explicitly provided.`);
|
|
12452
|
+
}
|
|
12453
|
+
} else {
|
|
12454
|
+
throw new Error('Cannot rebuild index: schema missing, file uses array format, and opts.fields not provided. The .idx.jdb file is corrupted.');
|
|
12455
|
+
}
|
|
12456
|
+
} else {
|
|
12457
|
+
// Object format, initialize from object keys
|
|
12458
|
+
this.serializer.initializeSchema(firstRecord, true);
|
|
12459
|
+
if (this.opts.debugMode) {
|
|
12460
|
+
console.log(`🔍 Auto-detected schema from object: ${Object.keys(firstRecord).join(', ')}`);
|
|
12461
|
+
}
|
|
12462
|
+
}
|
|
12463
|
+
break;
|
|
12464
|
+
} catch (error) {
|
|
12465
|
+
if (this.opts.debugMode) {
|
|
12466
|
+
console.error('❌ Failed to auto-detect schema:', error.message);
|
|
12467
|
+
}
|
|
12468
|
+
throw error;
|
|
12469
|
+
}
|
|
12470
|
+
}
|
|
12471
|
+
}
|
|
12472
|
+
}
|
|
12473
|
+
} catch (err) {
|
|
12474
|
+
_didIteratorError5 = true;
|
|
12475
|
+
_iteratorError5 = err;
|
|
12476
|
+
} finally {
|
|
12477
|
+
try {
|
|
12478
|
+
if (_iteratorAbruptCompletion5 && _iterator5.return != null) {
|
|
12479
|
+
await _iterator5.return();
|
|
12480
|
+
}
|
|
12481
|
+
} finally {
|
|
12482
|
+
if (_didIteratorError5) {
|
|
12483
|
+
throw _iteratorError5;
|
|
12484
|
+
}
|
|
12485
|
+
}
|
|
12486
|
+
}
|
|
12487
|
+
stream.destroy();
|
|
12488
|
+
}
|
|
12489
|
+
|
|
12490
|
+
// Use streaming to read records without loading everything into memory
|
|
12491
|
+
// Also rebuild offsets while we're at it
|
|
12492
|
+
this.offsets = [];
|
|
12493
|
+
let currentOffset = 0;
|
|
12494
|
+
const stream = fs.createReadStream(this.fileHandler.file, {
|
|
12495
|
+
highWaterMark: 64 * 1024,
|
|
12496
|
+
encoding: 'utf8'
|
|
12497
|
+
});
|
|
12498
|
+
const rl = readline.createInterface({
|
|
12499
|
+
input: stream,
|
|
12500
|
+
crlfDelay: Infinity
|
|
12501
|
+
});
|
|
12502
|
+
try {
|
|
12503
|
+
var _iteratorAbruptCompletion6 = false;
|
|
12504
|
+
var _didIteratorError6 = false;
|
|
12505
|
+
var _iteratorError6;
|
|
12506
|
+
try {
|
|
12507
|
+
for (var _iterator6 = _asyncIterator(rl), _step6; _iteratorAbruptCompletion6 = !(_step6 = await _iterator6.next()).done; _iteratorAbruptCompletion6 = false) {
|
|
12508
|
+
const line = _step6.value;
|
|
12509
|
+
{
|
|
12510
|
+
if (line && line.trim()) {
|
|
12511
|
+
try {
|
|
12512
|
+
// Record the offset for this line
|
|
12513
|
+
this.offsets.push(currentOffset);
|
|
12514
|
+
const record = this.serializer.deserialize(line);
|
|
12515
|
+
const recordWithTerms = this.restoreTermIdsAfterDeserialization(record);
|
|
12516
|
+
await this.indexManager.add(recordWithTerms, count);
|
|
12517
|
+
count++;
|
|
12518
|
+
} catch (error) {
|
|
12519
|
+
// Skip invalid lines
|
|
12520
|
+
if (this.opts.debugMode) {
|
|
12521
|
+
console.log(`⚠️ Rebuild: Failed to deserialize line ${count}:`, error.message);
|
|
12522
|
+
}
|
|
12523
|
+
}
|
|
12524
|
+
}
|
|
12525
|
+
// Update offset for next line (including newline character)
|
|
12526
|
+
currentOffset += Buffer.byteLength(line, 'utf8') + 1;
|
|
12527
|
+
}
|
|
12528
|
+
}
|
|
12529
|
+
} catch (err) {
|
|
12530
|
+
_didIteratorError6 = true;
|
|
12531
|
+
_iteratorError6 = err;
|
|
12532
|
+
} finally {
|
|
12533
|
+
try {
|
|
12534
|
+
if (_iteratorAbruptCompletion6 && _iterator6.return != null) {
|
|
12535
|
+
await _iterator6.return();
|
|
12536
|
+
}
|
|
12537
|
+
} finally {
|
|
12538
|
+
if (_didIteratorError6) {
|
|
12539
|
+
throw _iteratorError6;
|
|
12540
|
+
}
|
|
12541
|
+
}
|
|
12542
|
+
}
|
|
12543
|
+
} finally {
|
|
12544
|
+
stream.destroy();
|
|
12545
|
+
}
|
|
12546
|
+
return count;
|
|
12547
|
+
}
|
|
12548
|
+
|
|
11724
12549
|
/**
|
|
11725
12550
|
* Wait for all pending operations to complete
|
|
11726
12551
|
*/
|
|
@@ -12030,7 +12855,6 @@ class Database extends events.EventEmitter {
|
|
|
12030
12855
|
|
|
12031
12856
|
// Method 1: Try to read the entire file and filter
|
|
12032
12857
|
if (this.fileHandler.exists()) {
|
|
12033
|
-
const fs = await import('fs');
|
|
12034
12858
|
const fileContent = await fs.promises.readFile(this.normalizedFile, 'utf8');
|
|
12035
12859
|
const lines = fileContent.split('\n').filter(line => line.trim());
|
|
12036
12860
|
for (let i = 0; i < lines.length && i < this.offsets.length; i++) {
|
|
@@ -12730,10 +13554,9 @@ class Database extends events.EventEmitter {
|
|
|
12730
13554
|
return;
|
|
12731
13555
|
}
|
|
12732
13556
|
_this._offsetRecoveryInProgress = true;
|
|
12733
|
-
const fsModule = _this._fsModule || (_this._fsModule = yield _awaitAsyncGenerator(import('fs')));
|
|
12734
13557
|
let fd;
|
|
12735
13558
|
try {
|
|
12736
|
-
fd = yield _awaitAsyncGenerator(
|
|
13559
|
+
fd = yield _awaitAsyncGenerator(fs.promises.open(_this.fileHandler.file, 'r'));
|
|
12737
13560
|
} catch (error) {
|
|
12738
13561
|
_this._offsetRecoveryInProgress = false;
|
|
12739
13562
|
if (_this.opts.debugMode) {
|
|
@@ -13036,16 +13859,15 @@ class Database extends events.EventEmitter {
|
|
|
13036
13859
|
// OPTIMIZATION: Use ranges instead of reading entire file
|
|
13037
13860
|
const ranges = _this2.getRanges(map);
|
|
13038
13861
|
const groupedRanges = yield _awaitAsyncGenerator(_this2.fileHandler.groupedRanges(ranges));
|
|
13039
|
-
const fs = yield _awaitAsyncGenerator(import('fs'));
|
|
13040
13862
|
const fd = yield _awaitAsyncGenerator(fs.promises.open(_this2.fileHandler.file, 'r'));
|
|
13041
13863
|
try {
|
|
13042
13864
|
for (const groupedRange of groupedRanges) {
|
|
13043
|
-
var
|
|
13044
|
-
var
|
|
13045
|
-
var
|
|
13865
|
+
var _iteratorAbruptCompletion7 = false;
|
|
13866
|
+
var _didIteratorError7 = false;
|
|
13867
|
+
var _iteratorError7;
|
|
13046
13868
|
try {
|
|
13047
|
-
for (var
|
|
13048
|
-
const row =
|
|
13869
|
+
for (var _iterator7 = _asyncIterator(_this2.fileHandler.readGroupedRange(groupedRange, fd)), _step7; _iteratorAbruptCompletion7 = !(_step7 = yield _awaitAsyncGenerator(_iterator7.next())).done; _iteratorAbruptCompletion7 = false) {
|
|
13870
|
+
const row = _step7.value;
|
|
13049
13871
|
{
|
|
13050
13872
|
if (options.limit && count >= options.limit) {
|
|
13051
13873
|
break;
|
|
@@ -13119,28 +13941,28 @@ class Database extends events.EventEmitter {
|
|
|
13119
13941
|
}
|
|
13120
13942
|
}
|
|
13121
13943
|
if (!_this2._offsetRecoveryInProgress) {
|
|
13122
|
-
var
|
|
13123
|
-
var
|
|
13124
|
-
var
|
|
13944
|
+
var _iteratorAbruptCompletion8 = false;
|
|
13945
|
+
var _didIteratorError8 = false;
|
|
13946
|
+
var _iteratorError8;
|
|
13125
13947
|
try {
|
|
13126
|
-
for (var
|
|
13127
|
-
const recoveredEntry =
|
|
13948
|
+
for (var _iterator8 = _asyncIterator(_this2._streamingRecoveryGenerator(criteria, options, count, map, remainingSkip)), _step8; _iteratorAbruptCompletion8 = !(_step8 = yield _awaitAsyncGenerator(_iterator8.next())).done; _iteratorAbruptCompletion8 = false) {
|
|
13949
|
+
const recoveredEntry = _step8.value;
|
|
13128
13950
|
{
|
|
13129
13951
|
yield recoveredEntry;
|
|
13130
13952
|
count++;
|
|
13131
13953
|
}
|
|
13132
13954
|
}
|
|
13133
13955
|
} catch (err) {
|
|
13134
|
-
|
|
13135
|
-
|
|
13956
|
+
_didIteratorError8 = true;
|
|
13957
|
+
_iteratorError8 = err;
|
|
13136
13958
|
} finally {
|
|
13137
13959
|
try {
|
|
13138
|
-
if (
|
|
13139
|
-
yield _awaitAsyncGenerator(
|
|
13960
|
+
if (_iteratorAbruptCompletion8 && _iterator8.return != null) {
|
|
13961
|
+
yield _awaitAsyncGenerator(_iterator8.return());
|
|
13140
13962
|
}
|
|
13141
13963
|
} finally {
|
|
13142
|
-
if (
|
|
13143
|
-
throw
|
|
13964
|
+
if (_didIteratorError8) {
|
|
13965
|
+
throw _iteratorError8;
|
|
13144
13966
|
}
|
|
13145
13967
|
}
|
|
13146
13968
|
}
|
|
@@ -13152,16 +13974,16 @@ class Database extends events.EventEmitter {
|
|
|
13152
13974
|
}
|
|
13153
13975
|
}
|
|
13154
13976
|
} catch (err) {
|
|
13155
|
-
|
|
13156
|
-
|
|
13977
|
+
_didIteratorError7 = true;
|
|
13978
|
+
_iteratorError7 = err;
|
|
13157
13979
|
} finally {
|
|
13158
13980
|
try {
|
|
13159
|
-
if (
|
|
13160
|
-
yield _awaitAsyncGenerator(
|
|
13981
|
+
if (_iteratorAbruptCompletion7 && _iterator7.return != null) {
|
|
13982
|
+
yield _awaitAsyncGenerator(_iterator7.return());
|
|
13161
13983
|
}
|
|
13162
13984
|
} finally {
|
|
13163
|
-
if (
|
|
13164
|
-
throw
|
|
13985
|
+
if (_didIteratorError7) {
|
|
13986
|
+
throw _iteratorError7;
|
|
13165
13987
|
}
|
|
13166
13988
|
}
|
|
13167
13989
|
}
|
|
@@ -13227,12 +14049,12 @@ class Database extends events.EventEmitter {
|
|
|
13227
14049
|
if (options.limit && count >= options.limit) {
|
|
13228
14050
|
break;
|
|
13229
14051
|
}
|
|
13230
|
-
var
|
|
13231
|
-
var
|
|
13232
|
-
var
|
|
14052
|
+
var _iteratorAbruptCompletion9 = false;
|
|
14053
|
+
var _didIteratorError9 = false;
|
|
14054
|
+
var _iteratorError9;
|
|
13233
14055
|
try {
|
|
13234
|
-
for (var
|
|
13235
|
-
const row =
|
|
14056
|
+
for (var _iterator9 = _asyncIterator(_this2.fileHandler.readGroupedRange(groupedRange, fd)), _step9; _iteratorAbruptCompletion9 = !(_step9 = yield _awaitAsyncGenerator(_iterator9.next())).done; _iteratorAbruptCompletion9 = false) {
|
|
14057
|
+
const row = _step9.value;
|
|
13236
14058
|
{
|
|
13237
14059
|
if (options.limit && count >= options.limit) {
|
|
13238
14060
|
break;
|
|
@@ -13313,28 +14135,28 @@ class Database extends events.EventEmitter {
|
|
|
13313
14135
|
}
|
|
13314
14136
|
}
|
|
13315
14137
|
if (!_this2._offsetRecoveryInProgress) {
|
|
13316
|
-
var
|
|
13317
|
-
var
|
|
13318
|
-
var
|
|
14138
|
+
var _iteratorAbruptCompletion0 = false;
|
|
14139
|
+
var _didIteratorError0 = false;
|
|
14140
|
+
var _iteratorError0;
|
|
13319
14141
|
try {
|
|
13320
|
-
for (var
|
|
13321
|
-
const recoveredEntry =
|
|
14142
|
+
for (var _iterator0 = _asyncIterator(_this2._streamingRecoveryGenerator(criteria, options, count, map, remainingSkip)), _step0; _iteratorAbruptCompletion0 = !(_step0 = yield _awaitAsyncGenerator(_iterator0.next())).done; _iteratorAbruptCompletion0 = false) {
|
|
14143
|
+
const recoveredEntry = _step0.value;
|
|
13322
14144
|
{
|
|
13323
14145
|
yield recoveredEntry;
|
|
13324
14146
|
count++;
|
|
13325
14147
|
}
|
|
13326
14148
|
}
|
|
13327
14149
|
} catch (err) {
|
|
13328
|
-
|
|
13329
|
-
|
|
14150
|
+
_didIteratorError0 = true;
|
|
14151
|
+
_iteratorError0 = err;
|
|
13330
14152
|
} finally {
|
|
13331
14153
|
try {
|
|
13332
|
-
if (
|
|
13333
|
-
yield _awaitAsyncGenerator(
|
|
14154
|
+
if (_iteratorAbruptCompletion0 && _iterator0.return != null) {
|
|
14155
|
+
yield _awaitAsyncGenerator(_iterator0.return());
|
|
13334
14156
|
}
|
|
13335
14157
|
} finally {
|
|
13336
|
-
if (
|
|
13337
|
-
throw
|
|
14158
|
+
if (_didIteratorError0) {
|
|
14159
|
+
throw _iteratorError0;
|
|
13338
14160
|
}
|
|
13339
14161
|
}
|
|
13340
14162
|
}
|
|
@@ -13346,16 +14168,16 @@ class Database extends events.EventEmitter {
|
|
|
13346
14168
|
}
|
|
13347
14169
|
}
|
|
13348
14170
|
} catch (err) {
|
|
13349
|
-
|
|
13350
|
-
|
|
14171
|
+
_didIteratorError9 = true;
|
|
14172
|
+
_iteratorError9 = err;
|
|
13351
14173
|
} finally {
|
|
13352
14174
|
try {
|
|
13353
|
-
if (
|
|
13354
|
-
yield _awaitAsyncGenerator(
|
|
14175
|
+
if (_iteratorAbruptCompletion9 && _iterator9.return != null) {
|
|
14176
|
+
yield _awaitAsyncGenerator(_iterator9.return());
|
|
13355
14177
|
}
|
|
13356
14178
|
} finally {
|
|
13357
|
-
if (
|
|
13358
|
-
throw
|
|
14179
|
+
if (_didIteratorError9) {
|
|
14180
|
+
throw _iteratorError9;
|
|
13359
14181
|
}
|
|
13360
14182
|
}
|
|
13361
14183
|
}
|
|
@@ -13410,12 +14232,12 @@ class Database extends events.EventEmitter {
|
|
|
13410
14232
|
|
|
13411
14233
|
try {
|
|
13412
14234
|
// Always use walk() now that the bug is fixed - it works for both small and large datasets
|
|
13413
|
-
var
|
|
13414
|
-
var
|
|
13415
|
-
var
|
|
14235
|
+
var _iteratorAbruptCompletion1 = false;
|
|
14236
|
+
var _didIteratorError1 = false;
|
|
14237
|
+
var _iteratorError1;
|
|
13416
14238
|
try {
|
|
13417
|
-
for (var
|
|
13418
|
-
const entry =
|
|
14239
|
+
for (var _iterator1 = _asyncIterator(_this3.walk(criteria, options)), _step1; _iteratorAbruptCompletion1 = !(_step1 = yield _awaitAsyncGenerator(_iterator1.next())).done; _iteratorAbruptCompletion1 = false) {
|
|
14240
|
+
const entry = _step1.value;
|
|
13419
14241
|
{
|
|
13420
14242
|
processedCount++;
|
|
13421
14243
|
|
|
@@ -13475,16 +14297,16 @@ class Database extends events.EventEmitter {
|
|
|
13475
14297
|
|
|
13476
14298
|
// Process remaining records in buffers
|
|
13477
14299
|
} catch (err) {
|
|
13478
|
-
|
|
13479
|
-
|
|
14300
|
+
_didIteratorError1 = true;
|
|
14301
|
+
_iteratorError1 = err;
|
|
13480
14302
|
} finally {
|
|
13481
14303
|
try {
|
|
13482
|
-
if (
|
|
13483
|
-
yield _awaitAsyncGenerator(
|
|
14304
|
+
if (_iteratorAbruptCompletion1 && _iterator1.return != null) {
|
|
14305
|
+
yield _awaitAsyncGenerator(_iterator1.return());
|
|
13484
14306
|
}
|
|
13485
14307
|
} finally {
|
|
13486
|
-
if (
|
|
13487
|
-
throw
|
|
14308
|
+
if (_didIteratorError1) {
|
|
14309
|
+
throw _iteratorError1;
|
|
13488
14310
|
}
|
|
13489
14311
|
}
|
|
13490
14312
|
}
|
|
@@ -13677,7 +14499,6 @@ class Database extends events.EventEmitter {
|
|
|
13677
14499
|
// If the .idx.jdb file exists and has data, and we're trying to save empty index,
|
|
13678
14500
|
// skip the save to prevent corruption
|
|
13679
14501
|
if (isEmpty && !this.offsets?.length) {
|
|
13680
|
-
const fs = await import('fs');
|
|
13681
14502
|
if (fs.existsSync(idxPath)) {
|
|
13682
14503
|
try {
|
|
13683
14504
|
const existingData = JSON.parse(await fs.promises.readFile(idxPath, 'utf8'));
|