@depup/p-queue 9.1.1-depup.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,790 @@
1
+ import { EventEmitter } from 'eventemitter3';
2
+ import pTimeout from 'p-timeout';
3
+ import PriorityQueue from './priority-queue.js';
4
+ /**
5
+ Promise queue with concurrency control.
6
+ */
7
+ export default class PQueue extends EventEmitter {
8
+ #carryoverIntervalCount;
9
+ #isIntervalIgnored;
10
+ #intervalCount = 0;
11
+ #intervalCap;
12
+ #rateLimitedInInterval = false;
13
+ #rateLimitFlushScheduled = false;
14
+ #interval;
15
+ #intervalEnd = 0;
16
+ #lastExecutionTime = 0;
17
+ #intervalId;
18
+ #timeoutId;
19
+ #strict;
20
+ // Circular buffer implementation for better performance
21
+ #strictTicks = [];
22
+ #strictTicksStartIndex = 0;
23
+ #queue;
24
+ #queueClass;
25
+ #pending = 0;
26
+ // The `!` is needed because of https://github.com/microsoft/TypeScript/issues/32194
27
+ #concurrency;
28
+ #isPaused;
29
+ // Use to assign a unique identifier to a promise function, if not explicitly specified
30
+ #idAssigner = 1n;
31
+ // Track currently running tasks for debugging
32
+ #runningTasks = new Map();
33
+ #queueAbortListenerCleanupFunctions = new Set();
34
+ /**
35
+ Get or set the default timeout for all tasks. Can be changed at runtime.
36
+
37
+ Operations will throw a `TimeoutError` if they don't complete within the specified time.
38
+
39
+ The timeout begins when the operation is dequeued and starts execution, not while it's waiting in the queue.
40
+
41
+ @example
42
+ ```
43
+ const queue = new PQueue({timeout: 5000});
44
+
45
+ // Change timeout for all future tasks
46
+ queue.timeout = 10000;
47
+ ```
48
+ */
49
+ timeout;
50
+ constructor(options) {
51
+ super();
52
+ // eslint-disable-next-line @typescript-eslint/consistent-type-assertions
53
+ options = {
54
+ carryoverIntervalCount: false,
55
+ intervalCap: Number.POSITIVE_INFINITY,
56
+ interval: 0,
57
+ concurrency: Number.POSITIVE_INFINITY,
58
+ autoStart: true,
59
+ queueClass: PriorityQueue,
60
+ strict: false,
61
+ ...options,
62
+ };
63
+ if (!(typeof options.intervalCap === 'number' && options.intervalCap >= 1)) {
64
+ throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${options.intervalCap?.toString() ?? ''}\` (${typeof options.intervalCap})`);
65
+ }
66
+ if (options.interval === undefined || !(Number.isFinite(options.interval) && options.interval >= 0)) {
67
+ throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${options.interval?.toString() ?? ''}\` (${typeof options.interval})`);
68
+ }
69
+ if (options.strict && options.interval === 0) {
70
+ throw new TypeError('The `strict` option requires a non-zero `interval`');
71
+ }
72
+ if (options.strict && options.intervalCap === Number.POSITIVE_INFINITY) {
73
+ throw new TypeError('The `strict` option requires a finite `intervalCap`');
74
+ }
75
+ // TODO: Remove this fallback in the next major version
76
+ // eslint-disable-next-line @typescript-eslint/no-deprecated
77
+ this.#carryoverIntervalCount = options.carryoverIntervalCount ?? options.carryoverConcurrencyCount ?? false;
78
+ this.#isIntervalIgnored = options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0;
79
+ this.#intervalCap = options.intervalCap;
80
+ this.#interval = options.interval;
81
+ this.#strict = options.strict;
82
+ this.#queue = new options.queueClass();
83
+ this.#queueClass = options.queueClass;
84
+ this.concurrency = options.concurrency;
85
+ if (options.timeout !== undefined && !(Number.isFinite(options.timeout) && options.timeout > 0)) {
86
+ throw new TypeError(`Expected \`timeout\` to be a positive finite number, got \`${options.timeout}\` (${typeof options.timeout})`);
87
+ }
88
+ this.timeout = options.timeout;
89
+ this.#isPaused = options.autoStart === false;
90
+ this.#setupRateLimitTracking();
91
+ }
92
+ #cleanupStrictTicks(now) {
93
+ // Remove ticks outside the current interval window using circular buffer approach
94
+ while (this.#strictTicksStartIndex < this.#strictTicks.length) {
95
+ const oldestTick = this.#strictTicks[this.#strictTicksStartIndex];
96
+ if (oldestTick !== undefined && now - oldestTick >= this.#interval) {
97
+ this.#strictTicksStartIndex++;
98
+ }
99
+ else {
100
+ break;
101
+ }
102
+ }
103
+ // Compact the array when it becomes inefficient or fully consumed
104
+ // Compact when: (start index is large AND more than half wasted) OR all ticks expired
105
+ const shouldCompact = (this.#strictTicksStartIndex > 100 && this.#strictTicksStartIndex > this.#strictTicks.length / 2)
106
+ || this.#strictTicksStartIndex === this.#strictTicks.length;
107
+ if (shouldCompact) {
108
+ this.#strictTicks = this.#strictTicks.slice(this.#strictTicksStartIndex);
109
+ this.#strictTicksStartIndex = 0;
110
+ }
111
+ }
112
+ // Helper methods for interval consumption
113
+ #consumeIntervalSlot(now) {
114
+ if (this.#strict) {
115
+ this.#strictTicks.push(now);
116
+ }
117
+ else {
118
+ this.#intervalCount++;
119
+ }
120
+ }
121
+ #rollbackIntervalSlot() {
122
+ if (this.#strict) {
123
+ // Pop from the end of the actual data (not from start index)
124
+ if (this.#strictTicks.length > this.#strictTicksStartIndex) {
125
+ this.#strictTicks.pop();
126
+ }
127
+ }
128
+ else if (this.#intervalCount > 0) {
129
+ this.#intervalCount--;
130
+ }
131
+ }
132
+ #getActiveTicksCount() {
133
+ return this.#strictTicks.length - this.#strictTicksStartIndex;
134
+ }
135
+ get #doesIntervalAllowAnother() {
136
+ if (this.#isIntervalIgnored) {
137
+ return true;
138
+ }
139
+ if (this.#strict) {
140
+ // Cleanup already done by #isIntervalPausedAt before this is called
141
+ return this.#getActiveTicksCount() < this.#intervalCap;
142
+ }
143
+ return this.#intervalCount < this.#intervalCap;
144
+ }
145
+ get #doesConcurrentAllowAnother() {
146
+ return this.#pending < this.#concurrency;
147
+ }
148
+ #next() {
149
+ this.#pending--;
150
+ if (this.#pending === 0) {
151
+ this.emit('pendingZero');
152
+ }
153
+ this.#tryToStartAnother();
154
+ this.emit('next');
155
+ }
156
+ #onResumeInterval() {
157
+ // Clear timeout ID before processing to prevent race condition
158
+ // Must clear before #onInterval to allow new timeouts to be scheduled
159
+ this.#timeoutId = undefined;
160
+ this.#onInterval();
161
+ this.#initializeIntervalIfNeeded();
162
+ }
163
+ #isIntervalPausedAt(now) {
164
+ // Strict mode: check if we need to wait for oldest tick to age out
165
+ if (this.#strict) {
166
+ this.#cleanupStrictTicks(now);
167
+ // If at capacity, need to wait for oldest tick to age out
168
+ const activeTicksCount = this.#getActiveTicksCount();
169
+ if (activeTicksCount >= this.#intervalCap) {
170
+ const oldestTick = this.#strictTicks[this.#strictTicksStartIndex];
171
+ // After cleanup, remaining ticks are within interval, so delay is always > 0
172
+ const delay = this.#interval - (now - oldestTick);
173
+ this.#createIntervalTimeout(delay);
174
+ return true;
175
+ }
176
+ return false;
177
+ }
178
+ // Fixed window mode (original logic)
179
+ if (this.#intervalId === undefined) {
180
+ const delay = this.#intervalEnd - now;
181
+ if (delay < 0) {
182
+ // If the interval has expired while idle, check if we should enforce the interval
183
+ // from the last task execution. This ensures proper spacing between tasks even
184
+ // when the queue becomes empty and then new tasks are added.
185
+ if (this.#lastExecutionTime > 0) {
186
+ const timeSinceLastExecution = now - this.#lastExecutionTime;
187
+ if (timeSinceLastExecution < this.#interval) {
188
+ // Not enough time has passed since the last task execution
189
+ this.#createIntervalTimeout(this.#interval - timeSinceLastExecution);
190
+ return true;
191
+ }
192
+ }
193
+ // Enough time has passed or no previous execution, allow execution
194
+ this.#intervalCount = (this.#carryoverIntervalCount) ? this.#pending : 0;
195
+ }
196
+ else {
197
+ // Act as the interval is pending
198
+ this.#createIntervalTimeout(delay);
199
+ return true;
200
+ }
201
+ }
202
+ return false;
203
+ }
204
+ #createIntervalTimeout(delay) {
205
+ if (this.#timeoutId !== undefined) {
206
+ return;
207
+ }
208
+ this.#timeoutId = setTimeout(() => {
209
+ this.#onResumeInterval();
210
+ }, delay);
211
+ }
212
+ #clearIntervalTimer() {
213
+ if (this.#intervalId) {
214
+ clearInterval(this.#intervalId);
215
+ this.#intervalId = undefined;
216
+ }
217
+ }
218
+ #clearTimeoutTimer() {
219
+ if (this.#timeoutId) {
220
+ clearTimeout(this.#timeoutId);
221
+ this.#timeoutId = undefined;
222
+ }
223
+ }
224
+ #tryToStartAnother() {
225
+ if (this.#queue.size === 0) {
226
+ // We can clear the interval ("pause")
227
+ // Because we can redo it later ("resume")
228
+ this.#clearIntervalTimer();
229
+ this.emit('empty');
230
+ if (this.#pending === 0) {
231
+ // Clear timeout as well when completely idle
232
+ this.#clearTimeoutTimer();
233
+ // Compact strict ticks when idle to free memory
234
+ if (this.#strict && this.#strictTicksStartIndex > 0) {
235
+ const now = Date.now();
236
+ this.#cleanupStrictTicks(now);
237
+ }
238
+ this.emit('idle');
239
+ }
240
+ return false;
241
+ }
242
+ let taskStarted = false;
243
+ if (!this.#isPaused) {
244
+ const now = Date.now();
245
+ const canInitializeInterval = !this.#isIntervalPausedAt(now);
246
+ if (this.#doesIntervalAllowAnother && this.#doesConcurrentAllowAnother) {
247
+ const job = this.#queue.dequeue();
248
+ if (!this.#isIntervalIgnored) {
249
+ this.#consumeIntervalSlot(now);
250
+ this.#scheduleRateLimitUpdate();
251
+ }
252
+ this.emit('active');
253
+ job();
254
+ if (canInitializeInterval) {
255
+ this.#initializeIntervalIfNeeded();
256
+ }
257
+ taskStarted = true;
258
+ }
259
+ }
260
+ return taskStarted;
261
+ }
262
+ #initializeIntervalIfNeeded() {
263
+ if (this.#isIntervalIgnored || this.#intervalId !== undefined) {
264
+ return;
265
+ }
266
+ // Strict mode uses timeouts instead of interval timers
267
+ if (this.#strict) {
268
+ return;
269
+ }
270
+ this.#intervalId = setInterval(() => {
271
+ this.#onInterval();
272
+ }, this.#interval);
273
+ this.#intervalEnd = Date.now() + this.#interval;
274
+ }
275
+ #onInterval() {
276
+ // Non-strict mode uses interval timers and intervalCount
277
+ if (!this.#strict) {
278
+ if (this.#intervalCount === 0 && this.#pending === 0 && this.#intervalId) {
279
+ this.#clearIntervalTimer();
280
+ }
281
+ this.#intervalCount = this.#carryoverIntervalCount ? this.#pending : 0;
282
+ }
283
+ this.#processQueue();
284
+ this.#scheduleRateLimitUpdate();
285
+ }
286
+ /**
287
+ Executes all queued functions until it reaches the limit.
288
+ */
289
+ #processQueue() {
290
+ // eslint-disable-next-line no-empty
291
+ while (this.#tryToStartAnother()) { }
292
+ }
293
+ get concurrency() {
294
+ return this.#concurrency;
295
+ }
296
+ set concurrency(newConcurrency) {
297
+ if (!(typeof newConcurrency === 'number' && newConcurrency >= 1)) {
298
+ throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`);
299
+ }
300
+ this.#concurrency = newConcurrency;
301
+ this.#processQueue();
302
+ }
303
+ /**
304
+ Updates the priority of a promise function by its id, affecting its execution order. Requires a defined concurrency limit to take effect.
305
+
306
+ For example, this can be used to prioritize a promise function to run earlier.
307
+
308
+ ```js
309
+ import PQueue from 'p-queue';
310
+
311
+ const queue = new PQueue({concurrency: 1});
312
+
313
+ queue.add(async () => '🦄', {priority: 1});
314
+ queue.add(async () => '🦀', {priority: 0, id: '🦀'});
315
+ queue.add(async () => '🦄', {priority: 1});
316
+ queue.add(async () => '🦄', {priority: 1});
317
+
318
+ queue.setPriority('🦀', 2);
319
+ ```
320
+
321
+ In this case, the promise function with `id: '🦀'` runs second.
322
+
323
+ You can also deprioritize a promise function to delay its execution:
324
+
325
+ ```js
326
+ import PQueue from 'p-queue';
327
+
328
+ const queue = new PQueue({concurrency: 1});
329
+
330
+ queue.add(async () => '🦄', {priority: 1});
331
+ queue.add(async () => '🦀', {priority: 1, id: '🦀'});
332
+ queue.add(async () => '🦄');
333
+ queue.add(async () => '🦄', {priority: 0});
334
+
335
+ queue.setPriority('🦀', -1);
336
+ ```
337
+ Here, the promise function with `id: '🦀'` executes last.
338
+ */
339
+ setPriority(id, priority) {
340
+ if (typeof priority !== 'number' || !Number.isFinite(priority)) {
341
+ throw new TypeError(`Expected \`priority\` to be a finite number, got \`${priority}\` (${typeof priority})`);
342
+ }
343
+ this.#queue.setPriority(id, priority);
344
+ }
345
+ async add(function_, options = {}) {
346
+ // Create a copy to avoid mutating the original options object
347
+ options = {
348
+ timeout: this.timeout,
349
+ ...options,
350
+ // Assign unique ID if not provided
351
+ id: options.id ?? (this.#idAssigner++).toString(),
352
+ };
353
+ return new Promise((resolve, reject) => {
354
+ // Create a unique symbol for tracking this task
355
+ const taskSymbol = Symbol(`task-${options.id}`);
356
+ let cleanupQueueAbortHandler = () => undefined;
357
+ const run = async () => {
358
+ // Task is now running — remove the queued-state abort listener
359
+ cleanupQueueAbortHandler();
360
+ this.#pending++;
361
+ // Track this running task
362
+ this.#runningTasks.set(taskSymbol, {
363
+ id: options.id,
364
+ priority: options.priority ?? 0, // Match priority-queue default
365
+ startTime: Date.now(),
366
+ timeout: options.timeout,
367
+ });
368
+ let eventListener;
369
+ try {
370
+ // Check abort signal - if aborted, need to decrement the counter
371
+ // that was incremented in tryToStartAnother
372
+ try {
373
+ options.signal?.throwIfAborted();
374
+ }
375
+ catch (error) {
376
+ this.#rollbackIntervalConsumption();
377
+ // Clean up tracking before throwing
378
+ this.#runningTasks.delete(taskSymbol);
379
+ throw error;
380
+ }
381
+ this.#lastExecutionTime = Date.now();
382
+ let operation = function_({ signal: options.signal });
383
+ if (options.timeout) {
384
+ operation = pTimeout(Promise.resolve(operation), {
385
+ milliseconds: options.timeout,
386
+ message: `Task timed out after ${options.timeout}ms (queue has ${this.#pending} running, ${this.#queue.size} waiting)`,
387
+ });
388
+ }
389
+ if (options.signal) {
390
+ const { signal } = options;
391
+ operation = Promise.race([operation, new Promise((_resolve, reject) => {
392
+ eventListener = () => {
393
+ reject(signal.reason);
394
+ };
395
+ signal.addEventListener('abort', eventListener, { once: true });
396
+ })]);
397
+ }
398
+ const result = await operation;
399
+ resolve(result);
400
+ this.emit('completed', result);
401
+ }
402
+ catch (error) {
403
+ reject(error);
404
+ this.emit('error', error);
405
+ }
406
+ finally {
407
+ // Clean up abort event listener
408
+ if (eventListener) {
409
+ options.signal?.removeEventListener('abort', eventListener);
410
+ }
411
+ // Remove from running tasks
412
+ this.#runningTasks.delete(taskSymbol);
413
+ // Use queueMicrotask to prevent deep recursion while maintaining timing
414
+ queueMicrotask(() => {
415
+ this.#next();
416
+ });
417
+ }
418
+ };
419
+ this.#queue.enqueue(run, options);
420
+ const removeQueuedTask = () => {
421
+ if (this.#queue instanceof PriorityQueue) {
422
+ this.#queue.remove(run);
423
+ return;
424
+ }
425
+ this.#queue.remove?.(options.id); // Intentionally best-effort: queued abort removal is only supported for queue classes that implement `.remove()`.
426
+ };
427
+ // Handle abort while task is waiting in the queue
428
+ if (options.signal) {
429
+ const { signal } = options;
430
+ const queueAbortHandler = () => {
431
+ cleanupQueueAbortHandler();
432
+ removeQueuedTask();
433
+ reject(signal.reason);
434
+ this.#tryToStartAnother();
435
+ this.emit('next');
436
+ };
437
+ cleanupQueueAbortHandler = () => {
438
+ signal.removeEventListener('abort', queueAbortHandler);
439
+ this.#queueAbortListenerCleanupFunctions.delete(cleanupQueueAbortHandler);
440
+ };
441
+ if (signal.aborted) {
442
+ queueAbortHandler();
443
+ return;
444
+ }
445
+ signal.addEventListener('abort', queueAbortHandler, { once: true });
446
+ this.#queueAbortListenerCleanupFunctions.add(cleanupQueueAbortHandler);
447
+ }
448
+ this.emit('add');
449
+ this.#tryToStartAnother();
450
+ });
451
+ }
452
+ async addAll(functions, options) {
453
+ return Promise.all(functions.map(async (function_) => this.add(function_, options)));
454
+ }
455
+ /**
456
+ Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.)
457
+ */
458
+ start() {
459
+ if (!this.#isPaused) {
460
+ return this;
461
+ }
462
+ this.#isPaused = false;
463
+ this.#processQueue();
464
+ return this;
465
+ }
466
+ /**
467
+ Put queue execution on hold.
468
+ */
469
+ pause() {
470
+ this.#isPaused = true;
471
+ }
472
+ /**
473
+ Clear the queue.
474
+ */
475
+ clear() {
476
+ for (const cleanupQueueAbortHandler of this.#queueAbortListenerCleanupFunctions) {
477
+ cleanupQueueAbortHandler();
478
+ }
479
+ this.#queue = new this.#queueClass();
480
+ // Clear interval timer since queue is now empty (consistent with #tryToStartAnother)
481
+ this.#clearIntervalTimer();
482
+ // Note: We preserve strict mode rate-limiting state (ticks and timeout)
483
+ // because clear() only clears queued tasks, not rate limit history.
484
+ // This ensures that rate limits are still enforced after clearing the queue.
485
+ // Note: We don't clear #runningTasks as those tasks are still running
486
+ // They will be removed when they complete in the finally block
487
+ // Force synchronous update since clear() should have immediate effect
488
+ this.#updateRateLimitState();
489
+ // Emit events so waiters (onEmpty, onIdle, onSizeLessThan) can resolve
490
+ this.emit('empty');
491
+ if (this.#pending === 0) {
492
+ this.#clearTimeoutTimer();
493
+ this.emit('idle');
494
+ }
495
+ this.emit('next');
496
+ }
497
+ /**
498
+ Can be called multiple times. Useful if you for example add additional items at a later time.
499
+
500
+ @returns A promise that settles when the queue becomes empty.
501
+ */
502
+ async onEmpty() {
503
+ // Instantly resolve if the queue is empty
504
+ if (this.#queue.size === 0) {
505
+ return;
506
+ }
507
+ await this.#onEvent('empty');
508
+ }
509
+ /**
510
+ @returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`.
511
+
512
+ If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item.
513
+
514
+ Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation.
515
+ */
516
+ async onSizeLessThan(limit) {
517
+ // Instantly resolve if the queue is empty.
518
+ if (this.#queue.size < limit) {
519
+ return;
520
+ }
521
+ await this.#onEvent('next', () => this.#queue.size < limit);
522
+ }
523
+ /**
524
+ The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet.
525
+
526
+ @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`.
527
+ */
528
+ async onIdle() {
529
+ // Instantly resolve if none pending and if nothing else is queued
530
+ if (this.#pending === 0 && this.#queue.size === 0) {
531
+ return;
532
+ }
533
+ await this.#onEvent('idle');
534
+ }
535
+ /**
536
+ The difference with `.onIdle` is that `.onPendingZero` only waits for currently running tasks to finish, ignoring queued tasks.
537
+
538
+ @returns A promise that settles when all currently running tasks have completed; `queue.pending === 0`.
539
+ */
540
+ async onPendingZero() {
541
+ if (this.#pending === 0) {
542
+ return;
543
+ }
544
+ await this.#onEvent('pendingZero');
545
+ }
546
+ /**
547
+ @returns A promise that settles when the queue becomes rate-limited due to intervalCap.
548
+ */
549
+ async onRateLimit() {
550
+ if (this.isRateLimited) {
551
+ return;
552
+ }
553
+ await this.#onEvent('rateLimit');
554
+ }
555
+ /**
556
+ @returns A promise that settles when the queue is no longer rate-limited.
557
+ */
558
+ async onRateLimitCleared() {
559
+ if (!this.isRateLimited) {
560
+ return;
561
+ }
562
+ await this.#onEvent('rateLimitCleared');
563
+ }
564
+ /**
565
+ @returns A promise that rejects when any task in the queue errors.
566
+
567
+ Use with `Promise.race([queue.onError(), queue.onIdle()])` to fail fast on the first error while still resolving normally when the queue goes idle.
568
+
569
+ Important: The promise returned by `add()` still rejects. You must handle each `add()` promise (for example, `.catch(() => {})`) to avoid unhandled rejections.
570
+
571
+ @example
572
+ ```
573
+ import PQueue from 'p-queue';
574
+
575
+ const queue = new PQueue({concurrency: 2});
576
+
577
+ queue.add(() => fetchData(1)).catch(() => {});
578
+ queue.add(() => fetchData(2)).catch(() => {});
579
+ queue.add(() => fetchData(3)).catch(() => {});
580
+
581
+ // Stop processing on first error
582
+ try {
583
+ await Promise.race([
584
+ queue.onError(),
585
+ queue.onIdle()
586
+ ]);
587
+ } catch (error) {
588
+ queue.pause(); // Stop processing remaining tasks
589
+ console.error('Queue failed:', error);
590
+ }
591
+ ```
592
+ */
593
+ // eslint-disable-next-line @typescript-eslint/promise-function-async
594
+ onError() {
595
+ return new Promise((_resolve, reject) => {
596
+ const handleError = (error) => {
597
+ this.off('error', handleError);
598
+ reject(error);
599
+ };
600
+ this.on('error', handleError);
601
+ });
602
+ }
603
+ async #onEvent(event, filter) {
604
+ return new Promise(resolve => {
605
+ const listener = () => {
606
+ if (filter && !filter()) {
607
+ return;
608
+ }
609
+ this.off(event, listener);
610
+ resolve();
611
+ };
612
+ this.on(event, listener);
613
+ });
614
+ }
615
+ /**
616
+ Size of the queue, the number of queued items waiting to run.
617
+ */
618
+ get size() {
619
+ return this.#queue.size;
620
+ }
621
+ /**
622
+ Size of the queue, filtered by the given options.
623
+
624
+ For example, this can be used to find the number of items remaining in the queue with a specific priority level.
625
+ */
626
+ sizeBy(options) {
627
+ // eslint-disable-next-line unicorn/no-array-callback-reference
628
+ return this.#queue.filter(options).length;
629
+ }
630
+ /**
631
+ Number of running items (no longer in the queue).
632
+ */
633
+ get pending() {
634
+ return this.#pending;
635
+ }
636
+ /**
637
+ Whether the queue is currently paused.
638
+ */
639
+ get isPaused() {
640
+ return this.#isPaused;
641
+ }
642
+ #setupRateLimitTracking() {
643
+ // Only schedule updates when rate limiting is enabled
644
+ if (this.#isIntervalIgnored) {
645
+ return;
646
+ }
647
+ // Wire up to lifecycle events that affect rate limit state
648
+ // Only 'add' and 'next' can actually change rate limit state
649
+ this.on('add', () => {
650
+ if (this.#queue.size > 0) {
651
+ this.#scheduleRateLimitUpdate();
652
+ }
653
+ });
654
+ this.on('next', () => {
655
+ this.#scheduleRateLimitUpdate();
656
+ });
657
+ }
658
+ #scheduleRateLimitUpdate() {
659
+ // Skip if rate limiting is not enabled or already scheduled
660
+ if (this.#isIntervalIgnored || this.#rateLimitFlushScheduled) {
661
+ return;
662
+ }
663
+ this.#rateLimitFlushScheduled = true;
664
+ queueMicrotask(() => {
665
+ this.#rateLimitFlushScheduled = false;
666
+ this.#updateRateLimitState();
667
+ });
668
+ }
669
+ #rollbackIntervalConsumption() {
670
+ if (this.#isIntervalIgnored) {
671
+ return;
672
+ }
673
+ this.#rollbackIntervalSlot();
674
+ this.#scheduleRateLimitUpdate();
675
+ }
676
+ #updateRateLimitState() {
677
+ const previous = this.#rateLimitedInInterval;
678
+ // Early exit if rate limiting is disabled or queue is empty
679
+ if (this.#isIntervalIgnored || this.#queue.size === 0) {
680
+ if (previous) {
681
+ this.#rateLimitedInInterval = false;
682
+ this.emit('rateLimitCleared');
683
+ }
684
+ return;
685
+ }
686
+ // Get the current count based on mode
687
+ let count;
688
+ if (this.#strict) {
689
+ const now = Date.now();
690
+ this.#cleanupStrictTicks(now);
691
+ count = this.#getActiveTicksCount();
692
+ }
693
+ else {
694
+ count = this.#intervalCount;
695
+ }
696
+ const shouldBeRateLimited = count >= this.#intervalCap;
697
+ if (shouldBeRateLimited !== previous) {
698
+ this.#rateLimitedInInterval = shouldBeRateLimited;
699
+ this.emit(shouldBeRateLimited ? 'rateLimit' : 'rateLimitCleared');
700
+ }
701
+ }
702
+ /**
703
+ Whether the queue is currently rate-limited due to intervalCap.
704
+ */
705
+ get isRateLimited() {
706
+ return this.#rateLimitedInInterval;
707
+ }
708
+ /**
709
+ Whether the queue is saturated. Returns `true` when:
710
+ - All concurrency slots are occupied and tasks are waiting, OR
711
+ - The queue is rate-limited and tasks are waiting
712
+
713
+ Useful for detecting backpressure and potential hanging tasks.
714
+
715
+ ```js
716
+ import PQueue from 'p-queue';
717
+
718
+ const queue = new PQueue({concurrency: 2});
719
+
720
+ // Backpressure handling
721
+ if (queue.isSaturated) {
722
+ console.log('Queue is saturated, waiting for capacity...');
723
+ await queue.onSizeLessThan(queue.concurrency);
724
+ }
725
+
726
+ // Monitoring for stuck tasks
727
+ setInterval(() => {
728
+ if (queue.isSaturated) {
729
+ console.warn(`Queue saturated: ${queue.pending} running, ${queue.size} waiting`);
730
+ }
731
+ }, 60000);
732
+ ```
733
+ */
734
+ get isSaturated() {
735
+ return (this.#pending === this.#concurrency && this.#queue.size > 0)
736
+ || (this.isRateLimited && this.#queue.size > 0);
737
+ }
738
+ /**
739
+ The tasks currently being executed. Each task includes its `id`, `priority`, `startTime`, and `timeout` (if set).
740
+
741
+ Returns an array of task info objects.
742
+
743
+ ```js
744
+ import PQueue from 'p-queue';
745
+
746
+ const queue = new PQueue({concurrency: 2});
747
+
748
+ // Add tasks with IDs for better debugging
749
+ queue.add(() => fetchUser(123), {id: 'user-123'});
750
+ queue.add(() => fetchPosts(456), {id: 'posts-456', priority: 1});
751
+
752
+ // Check what's running
753
+ console.log(queue.runningTasks);
754
+ // => [{
755
+ // id: 'user-123',
756
+ // priority: 0,
757
+ // startTime: 1759253001716,
758
+ // timeout: undefined
759
+ // }, {
760
+ // id: 'posts-456',
761
+ // priority: 1,
762
+ // startTime: 1759253001916,
763
+ // timeout: undefined
764
+ // }]
765
+ ```
766
+ */
767
+ get runningTasks() {
768
+ // Return fresh array with fresh objects to prevent mutations
769
+ return [...this.#runningTasks.values()].map(task => ({ ...task }));
770
+ }
771
+ }
772
+ /**
773
+ Error thrown when a task times out.
774
+
775
+ @example
776
+ ```
777
+ import PQueue, {TimeoutError} from 'p-queue';
778
+
779
+ const queue = new PQueue({timeout: 1000});
780
+
781
+ try {
782
+ await queue.add(() => someTask());
783
+ } catch (error) {
784
+ if (error instanceof TimeoutError) {
785
+ console.log('Task timed out');
786
+ }
787
+ }
788
+ ```
789
+ */
790
+ export { TimeoutError } from 'p-timeout';