@joystick.js/db-canary 0.0.0-canary.2268 → 0.0.0-canary.2270
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/server/lib/batched_write_queue.js +1 -0
- package/dist/server/lib/processing_lane.js +1 -0
- package/dist/server/lib/write_queue.js +1 -1
- package/package.json +2 -2
- package/src/server/lib/batched_write_queue.js +331 -0
- package/src/server/lib/processing_lane.js +417 -0
- package/src/server/lib/write_queue.js +137 -2
- package/tests/server/lib/batched_write_queue.test.js +402 -0
- package/tests/server/lib/write_queue_integration.test.js +186 -0
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Processing lane for batched write operations.
|
|
3
|
+
* Each lane processes operations independently to enable parallel processing
|
|
4
|
+
* while maintaining operation ordering within each lane.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import create_logger from './logger.js';
|
|
8
|
+
|
|
9
|
+
const { create_context_logger } = create_logger('processing_lane');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Processing lane that batches and processes write operations independently.
|
|
13
|
+
* Provides batching, timeout handling, and transaction management per lane.
|
|
14
|
+
*/
|
|
15
|
+
class ProcessingLane {
|
|
16
|
+
/**
|
|
17
|
+
* Creates a new ProcessingLane instance.
|
|
18
|
+
* @param {Object} options - Configuration options
|
|
19
|
+
* @param {number} [options.batch_size=100] - Maximum operations per batch
|
|
20
|
+
* @param {number} [options.batch_timeout=10] - Maximum wait time in milliseconds
|
|
21
|
+
* @param {number} [options.lane_id=0] - Unique identifier for this lane
|
|
22
|
+
*/
|
|
23
|
+
constructor(options = {}) {
|
|
24
|
+
this.batch_size = options.batch_size || 100;
|
|
25
|
+
this.batch_timeout = options.batch_timeout || 10;
|
|
26
|
+
this.lane_id = options.lane_id || 0;
|
|
27
|
+
|
|
28
|
+
/** @type {Array<Object>} Current batch of operations */
|
|
29
|
+
this.current_batch = [];
|
|
30
|
+
|
|
31
|
+
/** @type {boolean} Whether lane is currently processing a batch */
|
|
32
|
+
this.processing = false;
|
|
33
|
+
|
|
34
|
+
/** @type {boolean} Whether lane is shutting down */
|
|
35
|
+
this.shutting_down = false;
|
|
36
|
+
|
|
37
|
+
/** @type {NodeJS.Timeout|null} Timeout handle for batch processing */
|
|
38
|
+
this.batch_timeout_handle = null;
|
|
39
|
+
|
|
40
|
+
/** @type {Object} Lane-specific statistics */
|
|
41
|
+
this.stats = {
|
|
42
|
+
total_operations: 0,
|
|
43
|
+
completed_operations: 0,
|
|
44
|
+
failed_operations: 0,
|
|
45
|
+
batches_processed: 0,
|
|
46
|
+
current_batch_size: 0,
|
|
47
|
+
max_batch_size: 0,
|
|
48
|
+
total_batch_wait_time_ms: 0,
|
|
49
|
+
total_batch_processing_time_ms: 0
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
this.log = create_context_logger(`lane_${this.lane_id}`);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Adds an operation to this lane's batch queue.
|
|
57
|
+
* @param {Object} operation - Operation to add to batch
|
|
58
|
+
* @param {function} operation.operation_fn - Async function that performs the write operation
|
|
59
|
+
* @param {Object} [operation.context={}] - Additional context for logging and debugging
|
|
60
|
+
* @returns {Promise<*>} Promise that resolves with the operation result
|
|
61
|
+
* @throws {Error} When lane is shutting down
|
|
62
|
+
*/
|
|
63
|
+
async add_operation(operation) {
|
|
64
|
+
if (this.shutting_down) {
|
|
65
|
+
throw new Error('Processing lane shutting down');
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return new Promise((resolve, reject) => {
|
|
69
|
+
if (this.shutting_down) {
|
|
70
|
+
reject(new Error('Processing lane shutting down'));
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const batch_item = {
|
|
75
|
+
...operation,
|
|
76
|
+
resolve,
|
|
77
|
+
reject,
|
|
78
|
+
enqueued_at: Date.now(),
|
|
79
|
+
id: this.generate_operation_id()
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
this.current_batch.push(batch_item);
|
|
83
|
+
this.stats.total_operations++;
|
|
84
|
+
this.stats.current_batch_size = this.current_batch.length;
|
|
85
|
+
|
|
86
|
+
if (this.stats.current_batch_size > this.stats.max_batch_size) {
|
|
87
|
+
this.stats.max_batch_size = this.stats.current_batch_size;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
this.log.debug('Operation added to batch', {
|
|
91
|
+
lane_id: this.lane_id,
|
|
92
|
+
operation_id: batch_item.id,
|
|
93
|
+
batch_size: this.stats.current_batch_size,
|
|
94
|
+
context: operation.context
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
// Process batch if it reaches the configured size
|
|
98
|
+
if (this.current_batch.length >= this.batch_size) {
|
|
99
|
+
this.process_current_batch();
|
|
100
|
+
} else if (this.current_batch.length === 1) {
|
|
101
|
+
// Start timeout for first operation in batch
|
|
102
|
+
this.start_batch_timeout();
|
|
103
|
+
}
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Starts the batch timeout to ensure batches are processed within time limit.
|
|
109
|
+
*/
|
|
110
|
+
start_batch_timeout() {
|
|
111
|
+
if (this.batch_timeout_handle) {
|
|
112
|
+
clearTimeout(this.batch_timeout_handle);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
this.batch_timeout_handle = setTimeout(() => {
|
|
116
|
+
if (this.current_batch.length > 0 && !this.processing) {
|
|
117
|
+
this.log.debug('Batch timeout triggered', {
|
|
118
|
+
lane_id: this.lane_id,
|
|
119
|
+
batch_size: this.current_batch.length
|
|
120
|
+
});
|
|
121
|
+
this.process_current_batch();
|
|
122
|
+
}
|
|
123
|
+
}, this.batch_timeout);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Processes the current batch of operations in a single transaction.
|
|
128
|
+
* @returns {Promise<void>} Promise that resolves when batch processing is complete
|
|
129
|
+
*/
|
|
130
|
+
async process_current_batch() {
|
|
131
|
+
if (this.processing || this.current_batch.length === 0 || this.shutting_down) {
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Clear timeout since we're processing now
|
|
136
|
+
if (this.batch_timeout_handle) {
|
|
137
|
+
clearTimeout(this.batch_timeout_handle);
|
|
138
|
+
this.batch_timeout_handle = null;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
this.processing = true;
|
|
142
|
+
const batch_to_process = [...this.current_batch];
|
|
143
|
+
this.current_batch = [];
|
|
144
|
+
this.stats.current_batch_size = 0;
|
|
145
|
+
|
|
146
|
+
const batch_start_time = Date.now();
|
|
147
|
+
const oldest_operation_time = Math.min(...batch_to_process.map(op => op.enqueued_at));
|
|
148
|
+
const batch_wait_time_ms = batch_start_time - oldest_operation_time;
|
|
149
|
+
|
|
150
|
+
this.stats.total_batch_wait_time_ms += batch_wait_time_ms;
|
|
151
|
+
this.stats.batches_processed++;
|
|
152
|
+
|
|
153
|
+
this.log.debug('Processing batch', {
|
|
154
|
+
lane_id: this.lane_id,
|
|
155
|
+
batch_size: batch_to_process.length,
|
|
156
|
+
batch_wait_time_ms
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
try {
|
|
160
|
+
// Execute all operations in the batch within a single transaction context
|
|
161
|
+
const results = await this.execute_batch_transaction(batch_to_process);
|
|
162
|
+
|
|
163
|
+
const batch_processing_time_ms = Date.now() - batch_start_time;
|
|
164
|
+
this.stats.total_batch_processing_time_ms += batch_processing_time_ms;
|
|
165
|
+
this.stats.completed_operations += batch_to_process.length;
|
|
166
|
+
|
|
167
|
+
// Resolve all operations with their results
|
|
168
|
+
batch_to_process.forEach((operation, index) => {
|
|
169
|
+
operation.resolve(results[index]);
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
this.log.debug('Batch completed successfully', {
|
|
173
|
+
lane_id: this.lane_id,
|
|
174
|
+
batch_size: batch_to_process.length,
|
|
175
|
+
batch_wait_time_ms,
|
|
176
|
+
batch_processing_time_ms
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
} catch (error) {
|
|
180
|
+
const batch_processing_time_ms = Date.now() - batch_start_time;
|
|
181
|
+
this.stats.total_batch_processing_time_ms += batch_processing_time_ms;
|
|
182
|
+
this.stats.failed_operations += batch_to_process.length;
|
|
183
|
+
|
|
184
|
+
// Reject all operations with the batch error
|
|
185
|
+
batch_to_process.forEach(operation => {
|
|
186
|
+
operation.reject(error);
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
this.log.error('Batch processing failed', {
|
|
190
|
+
lane_id: this.lane_id,
|
|
191
|
+
batch_size: batch_to_process.length,
|
|
192
|
+
batch_wait_time_ms,
|
|
193
|
+
batch_processing_time_ms,
|
|
194
|
+
error: error.message
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
this.processing = false;
|
|
199
|
+
|
|
200
|
+
// Process next batch if operations are waiting
|
|
201
|
+
if (this.current_batch.length > 0) {
|
|
202
|
+
if (this.current_batch.length >= this.batch_size) {
|
|
203
|
+
setImmediate(() => this.process_current_batch());
|
|
204
|
+
} else {
|
|
205
|
+
this.start_batch_timeout();
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
/**
|
|
211
|
+
* Executes all operations in a batch within a single transaction context.
|
|
212
|
+
* @param {Array<Object>} batch_operations - Operations to execute in batch
|
|
213
|
+
* @returns {Promise<Array<*>>} Promise that resolves with array of operation results
|
|
214
|
+
*/
|
|
215
|
+
async execute_batch_transaction(batch_operations) {
|
|
216
|
+
const results = [];
|
|
217
|
+
|
|
218
|
+
// Execute each operation and collect results with retry logic
|
|
219
|
+
for (const operation of batch_operations) {
|
|
220
|
+
try {
|
|
221
|
+
const result = await this.execute_with_retry(operation.operation_fn, operation.context);
|
|
222
|
+
results.push(result);
|
|
223
|
+
} catch (error) {
|
|
224
|
+
// If any operation fails, the entire batch fails
|
|
225
|
+
throw error;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return results;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Executes an operation with retry logic and exponential backoff.
|
|
234
|
+
* @param {function} operation_fn - Async function to execute
|
|
235
|
+
* @param {Object} context - Context for logging
|
|
236
|
+
* @param {number} [max_retries=3] - Maximum number of retry attempts
|
|
237
|
+
* @returns {Promise<*>} Promise that resolves with operation result
|
|
238
|
+
* @throws {Error} When all retry attempts are exhausted
|
|
239
|
+
*/
|
|
240
|
+
async execute_with_retry(operation_fn, context, max_retries = 3) {
|
|
241
|
+
let last_error = null;
|
|
242
|
+
|
|
243
|
+
for (let attempt = 1; attempt <= max_retries; attempt++) {
|
|
244
|
+
try {
|
|
245
|
+
return await operation_fn();
|
|
246
|
+
} catch (error) {
|
|
247
|
+
last_error = error;
|
|
248
|
+
|
|
249
|
+
if (this.is_retryable_error(error) && attempt < max_retries) {
|
|
250
|
+
const delay_ms = this.calculate_backoff_delay(attempt);
|
|
251
|
+
|
|
252
|
+
this.log.warn('Operation failed, retrying', {
|
|
253
|
+
lane_id: this.lane_id,
|
|
254
|
+
attempt,
|
|
255
|
+
max_retries,
|
|
256
|
+
delay_ms,
|
|
257
|
+
error: error.message,
|
|
258
|
+
context
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
await this.sleep(delay_ms);
|
|
262
|
+
continue;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
break;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
throw last_error;
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
/**
|
|
273
|
+
* Determines if an error is retryable based on error patterns.
|
|
274
|
+
* @param {Error} error - Error to check
|
|
275
|
+
* @returns {boolean} True if error is retryable, false otherwise
|
|
276
|
+
*/
|
|
277
|
+
is_retryable_error(error) {
|
|
278
|
+
const retryable_patterns = [
|
|
279
|
+
'MDB_MAP_FULL',
|
|
280
|
+
'MDB_TXN_FULL',
|
|
281
|
+
'MDB_READERS_FULL',
|
|
282
|
+
'EAGAIN',
|
|
283
|
+
'EBUSY'
|
|
284
|
+
];
|
|
285
|
+
|
|
286
|
+
return retryable_patterns.some(pattern =>
|
|
287
|
+
error.message.includes(pattern) || error.code === pattern
|
|
288
|
+
);
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* Calculates exponential backoff delay with jitter for retry attempts.
|
|
293
|
+
* @param {number} attempt - Current attempt number (1-based)
|
|
294
|
+
* @returns {number} Delay in milliseconds
|
|
295
|
+
*/
|
|
296
|
+
calculate_backoff_delay(attempt) {
|
|
297
|
+
const base_delay = 100;
|
|
298
|
+
const max_delay = 5000;
|
|
299
|
+
const exponential_delay = base_delay * Math.pow(2, attempt - 1);
|
|
300
|
+
const jitter = Math.random() * 0.1 * exponential_delay;
|
|
301
|
+
|
|
302
|
+
return Math.min(exponential_delay + jitter, max_delay);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
/**
|
|
306
|
+
* Utility function to sleep for specified milliseconds.
|
|
307
|
+
* @param {number} ms - Milliseconds to sleep
|
|
308
|
+
* @returns {Promise<void>} Promise that resolves after delay
|
|
309
|
+
*/
|
|
310
|
+
sleep(ms) {
|
|
311
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Generates a unique operation ID for tracking.
|
|
316
|
+
* @returns {string} Unique operation identifier
|
|
317
|
+
*/
|
|
318
|
+
generate_operation_id() {
|
|
319
|
+
return `lane_${this.lane_id}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
/**
|
|
323
|
+
* Gets comprehensive lane statistics including calculated averages.
|
|
324
|
+
* @returns {Object} Statistics object with performance metrics
|
|
325
|
+
*/
|
|
326
|
+
get_stats() {
|
|
327
|
+
const avg_batch_wait_time = this.stats.batches_processed > 0
|
|
328
|
+
? Math.round(this.stats.total_batch_wait_time_ms / this.stats.batches_processed)
|
|
329
|
+
: 0;
|
|
330
|
+
|
|
331
|
+
const avg_batch_processing_time = this.stats.batches_processed > 0
|
|
332
|
+
? Math.round(this.stats.total_batch_processing_time_ms / this.stats.batches_processed)
|
|
333
|
+
: 0;
|
|
334
|
+
|
|
335
|
+
const avg_batch_size = this.stats.batches_processed > 0
|
|
336
|
+
? Math.round(this.stats.completed_operations / this.stats.batches_processed)
|
|
337
|
+
: 0;
|
|
338
|
+
|
|
339
|
+
return {
|
|
340
|
+
lane_id: this.lane_id,
|
|
341
|
+
...this.stats,
|
|
342
|
+
avg_batch_wait_time_ms: avg_batch_wait_time,
|
|
343
|
+
avg_batch_processing_time_ms: avg_batch_processing_time,
|
|
344
|
+
avg_batch_size,
|
|
345
|
+
success_rate: this.stats.total_operations > 0
|
|
346
|
+
? Math.round((this.stats.completed_operations / this.stats.total_operations) * 100)
|
|
347
|
+
: 100
|
|
348
|
+
};
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
/**
|
|
352
|
+
* Clears all statistics while preserving current batch size.
|
|
353
|
+
*/
|
|
354
|
+
clear_stats() {
|
|
355
|
+
this.stats = {
|
|
356
|
+
total_operations: 0,
|
|
357
|
+
completed_operations: 0,
|
|
358
|
+
failed_operations: 0,
|
|
359
|
+
batches_processed: 0,
|
|
360
|
+
current_batch_size: this.current_batch.length,
|
|
361
|
+
max_batch_size: 0,
|
|
362
|
+
total_batch_wait_time_ms: 0,
|
|
363
|
+
total_batch_processing_time_ms: 0
|
|
364
|
+
};
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
/**
|
|
368
|
+
* Forces processing of current batch regardless of size or timeout.
|
|
369
|
+
* @returns {Promise<void>} Promise that resolves when batch is processed
|
|
370
|
+
*/
|
|
371
|
+
async flush_batch() {
|
|
372
|
+
if (this.current_batch.length > 0 && !this.processing) {
|
|
373
|
+
await this.process_current_batch();
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
/**
|
|
378
|
+
* Gracefully shuts down the processing lane.
|
|
379
|
+
* Processes any remaining operations and rejects new ones.
|
|
380
|
+
* @returns {Promise<void>} Promise that resolves when shutdown is complete
|
|
381
|
+
*/
|
|
382
|
+
async shutdown() {
|
|
383
|
+
this.log.info('Shutting down processing lane', {
|
|
384
|
+
lane_id: this.lane_id,
|
|
385
|
+
pending_operations: this.current_batch.length,
|
|
386
|
+
currently_processing: this.processing
|
|
387
|
+
});
|
|
388
|
+
|
|
389
|
+
this.shutting_down = true;
|
|
390
|
+
|
|
391
|
+
// Clear timeout
|
|
392
|
+
if (this.batch_timeout_handle) {
|
|
393
|
+
clearTimeout(this.batch_timeout_handle);
|
|
394
|
+
this.batch_timeout_handle = null;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// Process any remaining operations
|
|
398
|
+
if (this.current_batch.length > 0 && !this.processing) {
|
|
399
|
+
await this.process_current_batch();
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
// Wait for current processing to complete
|
|
403
|
+
while (this.processing) {
|
|
404
|
+
await new Promise(resolve => setTimeout(resolve, 10));
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// Reject any remaining operations
|
|
408
|
+
this.current_batch.forEach(operation => {
|
|
409
|
+
operation.reject(new Error('Processing lane shutting down'));
|
|
410
|
+
});
|
|
411
|
+
|
|
412
|
+
this.current_batch = [];
|
|
413
|
+
this.processing = false;
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
export default ProcessingLane;
|
|
@@ -2,9 +2,13 @@
|
|
|
2
2
|
* @fileoverview Write queue system for JoystickDB providing serialized write operations.
|
|
3
3
|
* Ensures write operations are processed sequentially to maintain data consistency and ACID properties.
|
|
4
4
|
* Includes retry logic, backoff strategies, performance monitoring, and graceful shutdown capabilities.
|
|
5
|
+
*
|
|
6
|
+
* Now supports both traditional sequential processing and high-performance batched processing
|
|
7
|
+
* with automatic fallback and transparent integration.
|
|
5
8
|
*/
|
|
6
9
|
|
|
7
10
|
import create_logger from './logger.js';
|
|
11
|
+
import { get_batched_write_queue, shutdown_batched_write_queue } from './batched_write_queue.js';
|
|
8
12
|
|
|
9
13
|
const { create_context_logger } = create_logger('write_queue');
|
|
10
14
|
|
|
@@ -312,13 +316,26 @@ class WriteQueue {
|
|
|
312
316
|
/** @type {WriteQueue|null} Singleton instance of the write queue */
|
|
313
317
|
let write_queue_instance = null;
|
|
314
318
|
|
|
319
|
+
/** @type {boolean} Whether to use batched write queue for improved performance */
|
|
320
|
+
let use_batched_queue = true;
|
|
321
|
+
|
|
315
322
|
/**
|
|
316
323
|
* Gets the singleton write queue instance, creating it if it doesn't exist.
|
|
324
|
+
* Automatically uses batched write queue for improved performance while maintaining
|
|
325
|
+
* complete backward compatibility.
|
|
326
|
+
* @param {Object} [options] - Configuration options for batched queue
|
|
317
327
|
* @returns {WriteQueue} The write queue instance
|
|
318
328
|
*/
|
|
319
|
-
export const get_write_queue = () => {
|
|
329
|
+
export const get_write_queue = (options) => {
|
|
320
330
|
if (!write_queue_instance) {
|
|
321
|
-
|
|
331
|
+
if (use_batched_queue) {
|
|
332
|
+
// Use batched write queue with WriteQueue-compatible wrapper
|
|
333
|
+
const batched_queue = get_batched_write_queue(options);
|
|
334
|
+
write_queue_instance = new WriteQueueWrapper(batched_queue);
|
|
335
|
+
} else {
|
|
336
|
+
// Use traditional sequential write queue
|
|
337
|
+
write_queue_instance = new WriteQueue();
|
|
338
|
+
}
|
|
322
339
|
}
|
|
323
340
|
return write_queue_instance;
|
|
324
341
|
};
|
|
@@ -332,4 +349,122 @@ export const shutdown_write_queue = async () => {
|
|
|
332
349
|
await write_queue_instance.shutdown();
|
|
333
350
|
write_queue_instance = null;
|
|
334
351
|
}
|
|
352
|
+
|
|
353
|
+
// Also shutdown batched queue if it was used
|
|
354
|
+
if (use_batched_queue) {
|
|
355
|
+
await shutdown_batched_write_queue();
|
|
356
|
+
}
|
|
335
357
|
};
|
|
358
|
+
|
|
359
|
+
/**
|
|
360
|
+
* Enables or disables batched write queue usage.
|
|
361
|
+
* @param {boolean} enabled - Whether to use batched queue
|
|
362
|
+
*/
|
|
363
|
+
export const set_batched_queue_enabled = (enabled) => {
|
|
364
|
+
use_batched_queue = enabled;
|
|
365
|
+
};
|
|
366
|
+
|
|
367
|
+
/**
|
|
368
|
+
* Wrapper class that provides WriteQueue-compatible API while using BatchedWriteQueue internally.
|
|
369
|
+
* Ensures complete backward compatibility with existing code.
|
|
370
|
+
*/
|
|
371
|
+
class WriteQueueWrapper {
|
|
372
|
+
/**
|
|
373
|
+
* Creates a new WriteQueueWrapper instance.
|
|
374
|
+
* @param {BatchedWriteQueue} batched_queue - The batched write queue instance to wrap
|
|
375
|
+
*/
|
|
376
|
+
constructor(batched_queue) {
|
|
377
|
+
this.batched_queue = batched_queue;
|
|
378
|
+
this.log = create_context_logger('write_queue_wrapper');
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
/**
|
|
382
|
+
* Enqueues a write operation using the batched queue.
|
|
383
|
+
* Maintains identical API to original WriteQueue.
|
|
384
|
+
* @param {function} operation_fn - Async function that performs the write operation
|
|
385
|
+
* @param {Object} [context={}] - Additional context for logging and debugging
|
|
386
|
+
* @returns {Promise<*>} Promise that resolves with the operation result
|
|
387
|
+
*/
|
|
388
|
+
async enqueue_write_operation(operation_fn, context = {}) {
|
|
389
|
+
return this.batched_queue.enqueue_write_operation(operation_fn, context);
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
/**
|
|
393
|
+
* Gets queue statistics with backward-compatible format.
|
|
394
|
+
* @returns {Object} Statistics object matching original WriteQueue format
|
|
395
|
+
*/
|
|
396
|
+
get_stats() {
|
|
397
|
+
const batched_stats = this.batched_queue.get_stats();
|
|
398
|
+
|
|
399
|
+
// Return stats in original WriteQueue format for backward compatibility
|
|
400
|
+
return {
|
|
401
|
+
total_operations: batched_stats.total_operations,
|
|
402
|
+
completed_operations: batched_stats.completed_operations,
|
|
403
|
+
failed_operations: batched_stats.failed_operations,
|
|
404
|
+
current_queue_depth: batched_stats.current_queue_depth,
|
|
405
|
+
max_queue_depth: batched_stats.max_queue_depth,
|
|
406
|
+
avg_wait_time_ms: batched_stats.avg_wait_time_ms,
|
|
407
|
+
avg_processing_time_ms: batched_stats.avg_processing_time_ms,
|
|
408
|
+
success_rate: batched_stats.success_rate
|
|
409
|
+
};
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
/**
|
|
413
|
+
* Clears all statistics.
|
|
414
|
+
*/
|
|
415
|
+
clear_stats() {
|
|
416
|
+
this.batched_queue.clear_stats();
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
/**
|
|
420
|
+
* Gracefully shuts down the wrapper and underlying batched queue.
|
|
421
|
+
* @returns {Promise<void>} Promise that resolves when shutdown is complete
|
|
422
|
+
*/
|
|
423
|
+
async shutdown() {
|
|
424
|
+
await this.batched_queue.shutdown();
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
/**
|
|
428
|
+
* Determines if an error is retryable based on error patterns.
|
|
429
|
+
* Exposed for backward compatibility with existing tests.
|
|
430
|
+
* @param {Error} error - Error to check
|
|
431
|
+
* @returns {boolean} True if error is retryable, false otherwise
|
|
432
|
+
*/
|
|
433
|
+
is_retryable_error(error) {
|
|
434
|
+
const retryable_patterns = [
|
|
435
|
+
'MDB_MAP_FULL',
|
|
436
|
+
'MDB_TXN_FULL',
|
|
437
|
+
'MDB_READERS_FULL',
|
|
438
|
+
'EAGAIN',
|
|
439
|
+
'EBUSY'
|
|
440
|
+
];
|
|
441
|
+
|
|
442
|
+
return retryable_patterns.some(pattern =>
|
|
443
|
+
error.message.includes(pattern) || error.code === pattern
|
|
444
|
+
);
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
/**
|
|
448
|
+
* Calculates exponential backoff delay with jitter for retry attempts.
|
|
449
|
+
* Exposed for backward compatibility with existing tests.
|
|
450
|
+
* @param {number} attempt - Current attempt number (1-based)
|
|
451
|
+
* @returns {number} Delay in milliseconds
|
|
452
|
+
*/
|
|
453
|
+
calculate_backoff_delay(attempt) {
|
|
454
|
+
const base_delay = 100;
|
|
455
|
+
const max_delay = 5000;
|
|
456
|
+
const exponential_delay = base_delay * Math.pow(2, attempt - 1);
|
|
457
|
+
const jitter = Math.random() * 0.1 * exponential_delay;
|
|
458
|
+
|
|
459
|
+
return Math.min(exponential_delay + jitter, max_delay);
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
/**
|
|
463
|
+
* Generates a unique operation ID for tracking.
|
|
464
|
+
* Exposed for backward compatibility with existing tests.
|
|
465
|
+
* @returns {string} Unique operation identifier
|
|
466
|
+
*/
|
|
467
|
+
generate_operation_id() {
|
|
468
|
+
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
469
|
+
}
|
|
470
|
+
}
|