@nocobase/plugin-workflow 0.13.0-alpha.9 → 0.14.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,807 @@
1
+ /**
2
+ * @module LRUCache
3
+ */
4
+ declare const TYPE: unique symbol;
5
+ type Index = number & {
6
+ [TYPE]: 'LRUCache Index';
7
+ };
8
+ type UintArray = Uint8Array | Uint16Array | Uint32Array;
9
+ type NumberArray = UintArray | number[];
10
+ declare class ZeroArray extends Array<number> {
11
+ constructor(size: number);
12
+ }
13
+ type StackLike = Stack | Index[];
14
+ declare class Stack {
15
+ #private;
16
+ heap: NumberArray;
17
+ length: number;
18
+ static create(max: number): StackLike;
19
+ constructor(max: number, HeapCls: {
20
+ new (n: number): NumberArray;
21
+ });
22
+ push(n: Index): void;
23
+ pop(): Index;
24
+ }
25
+ /**
26
+ * Promise representing an in-progress {@link LRUCache#fetch} call
27
+ */
28
+ export type BackgroundFetch<V> = Promise<V | undefined | void> & {
29
+ __returned: BackgroundFetch<V> | undefined;
30
+ __abortController: AbortController;
31
+ __staleWhileFetching: V | undefined;
32
+ };
33
+ export declare namespace LRUCache {
34
+ /**
35
+ * An integer greater than 0, reflecting the calculated size of items
36
+ */
37
+ type Size = number;
38
+ /**
39
+ * Integer greater than 0, representing some number of milliseconds, or the
40
+ * time at which a TTL started counting from.
41
+ */
42
+ type Milliseconds = number;
43
+ /**
44
+ * An integer greater than 0, reflecting a number of items
45
+ */
46
+ type Count = number;
47
+ /**
48
+ * The reason why an item was removed from the cache, passed
49
+ * to the {@link Disposer} methods.
50
+ */
51
+ type DisposeReason = 'evict' | 'set' | 'delete';
52
+ /**
53
+ * A method called upon item removal, passed as the
54
+ * {@link OptionsBase.dispose} and/or
55
+ * {@link OptionsBase.disposeAfter} options.
56
+ */
57
+ type Disposer<K, V> = (value: V, key: K, reason: DisposeReason) => void;
58
+ /**
59
+ * A function that returns the effective calculated size
60
+ * of an entry in the cache.
61
+ */
62
+ type SizeCalculator<K, V> = (value: V, key: K) => Size;
63
+ /**
64
+ * Options provided to the
65
+ * {@link OptionsBase.fetchMethod} function.
66
+ */
67
+ interface FetcherOptions<K, V, FC = unknown> {
68
+ signal: AbortSignal;
69
+ options: FetcherFetchOptions<K, V, FC>;
70
+ /**
71
+ * Object provided in the {@link FetchOptions.context} option to
72
+ * {@link LRUCache#fetch}
73
+ */
74
+ context: FC;
75
+ }
76
+ /**
77
+ * Status object that may be passed to {@link LRUCache#fetch},
78
+ * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.
79
+ */
80
+ interface Status<V> {
81
+ /**
82
+ * The status of a set() operation.
83
+ *
84
+ * - add: the item was not found in the cache, and was added
85
+ * - update: the item was in the cache, with the same value provided
86
+ * - replace: the item was in the cache, and replaced
87
+ * - miss: the item was not added to the cache for some reason
88
+ */
89
+ set?: 'add' | 'update' | 'replace' | 'miss';
90
+ /**
91
+ * the ttl stored for the item, or undefined if ttls are not used.
92
+ */
93
+ ttl?: Milliseconds;
94
+ /**
95
+ * the start time for the item, or undefined if ttls are not used.
96
+ */
97
+ start?: Milliseconds;
98
+ /**
99
+ * The timestamp used for TTL calculation
100
+ */
101
+ now?: Milliseconds;
102
+ /**
103
+ * the remaining ttl for the item, or undefined if ttls are not used.
104
+ */
105
+ remainingTTL?: Milliseconds;
106
+ /**
107
+ * The calculated size for the item, if sizes are used.
108
+ */
109
+ entrySize?: Size;
110
+ /**
111
+ * The total calculated size of the cache, if sizes are used.
112
+ */
113
+ totalCalculatedSize?: Size;
114
+ /**
115
+ * A flag indicating that the item was not stored, due to exceeding the
116
+ * {@link OptionsBase.maxEntrySize}
117
+ */
118
+ maxEntrySizeExceeded?: true;
119
+ /**
120
+ * The old value, specified in the case of `set:'update'` or
121
+ * `set:'replace'`
122
+ */
123
+ oldValue?: V;
124
+ /**
125
+ * The results of a {@link LRUCache#has} operation
126
+ *
127
+ * - hit: the item was found in the cache
128
+ * - stale: the item was found in the cache, but is stale
129
+ * - miss: the item was not found in the cache
130
+ */
131
+ has?: 'hit' | 'stale' | 'miss';
132
+ /**
133
+ * The status of a {@link LRUCache#fetch} operation.
134
+ * Note that this can change as the underlying fetch() moves through
135
+ * various states.
136
+ *
137
+ * - inflight: there is another fetch() for this key which is in process
138
+ * - get: there is no fetchMethod, so {@link LRUCache#get} was called.
139
+ * - miss: the item is not in cache, and will be fetched.
140
+ * - hit: the item is in the cache, and was resolved immediately.
141
+ * - stale: the item is in the cache, but stale.
142
+ * - refresh: the item is in the cache, and not stale, but
143
+ * {@link FetchOptions.forceRefresh} was specified.
144
+ */
145
+ fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh';
146
+ /**
147
+ * The {@link OptionsBase.fetchMethod} was called
148
+ */
149
+ fetchDispatched?: true;
150
+ /**
151
+ * The cached value was updated after a successful call to
152
+ * {@link OptionsBase.fetchMethod}
153
+ */
154
+ fetchUpdated?: true;
155
+ /**
156
+ * The reason for a fetch() rejection. Either the error raised by the
157
+ * {@link OptionsBase.fetchMethod}, or the reason for an
158
+ * AbortSignal.
159
+ */
160
+ fetchError?: Error;
161
+ /**
162
+ * The fetch received an abort signal
163
+ */
164
+ fetchAborted?: true;
165
+ /**
166
+ * The abort signal received was ignored, and the fetch was allowed to
167
+ * continue.
168
+ */
169
+ fetchAbortIgnored?: true;
170
+ /**
171
+ * The fetchMethod promise resolved successfully
172
+ */
173
+ fetchResolved?: true;
174
+ /**
175
+ * The fetchMethod promise was rejected
176
+ */
177
+ fetchRejected?: true;
178
+ /**
179
+ * The status of a {@link LRUCache#get} operation.
180
+ *
181
+ * - fetching: The item is currently being fetched. If a previous value
182
+ * is present and allowed, that will be returned.
183
+ * - stale: The item is in the cache, and is stale.
184
+ * - hit: the item is in the cache
185
+ * - miss: the item is not in the cache
186
+ */
187
+ get?: 'stale' | 'hit' | 'miss';
188
+ /**
189
+ * A fetch or get operation returned a stale value.
190
+ */
191
+ returnedStale?: true;
192
+ }
193
+ /**
194
+ * options which override the options set in the LRUCache constructor
195
+ * when calling {@link LRUCache#fetch}.
196
+ *
197
+ * This is the union of {@link GetOptions} and {@link SetOptions}, plus
198
+ * {@link OptionsBase.noDeleteOnFetchRejection},
199
+ * {@link OptionsBase.allowStaleOnFetchRejection},
200
+ * {@link FetchOptions.forceRefresh}, and
201
+ * {@link OptionsBase.context}
202
+ *
203
+ * Any of these may be modified in the {@link OptionsBase.fetchMethod}
204
+ * function, but the {@link GetOptions} fields will of course have no
205
+ * effect, as the {@link LRUCache#get} call already happened by the time
206
+ * the fetchMethod is called.
207
+ */
208
+ interface FetcherFetchOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> {
209
+ status?: Status<V>;
210
+ size?: Size;
211
+ }
212
+ /**
213
+ * Options that may be passed to the {@link LRUCache#fetch} method.
214
+ */
215
+ interface FetchOptions<K, V, FC> extends FetcherFetchOptions<K, V, FC> {
216
+ /**
217
+ * Set to true to force a re-load of the existing data, even if it
218
+ * is not yet stale.
219
+ */
220
+ forceRefresh?: boolean;
221
+ /**
222
+ * Context provided to the {@link OptionsBase.fetchMethod} as
223
+ * the {@link FetcherOptions.context} param.
224
+ *
225
+ * If the FC type is specified as unknown (the default),
226
+ * undefined or void, then this is optional. Otherwise, it will
227
+ * be required.
228
+ */
229
+ context?: FC;
230
+ signal?: AbortSignal;
231
+ status?: Status<V>;
232
+ }
233
+ /**
234
+ * Options provided to {@link LRUCache#fetch} when the FC type is something
235
+ * other than `unknown`, `undefined`, or `void`
236
+ */
237
+ interface FetchOptionsWithContext<K, V, FC> extends FetchOptions<K, V, FC> {
238
+ context: FC;
239
+ }
240
+ /**
241
+ * Options provided to {@link LRUCache#fetch} when the FC type is
242
+ * `undefined` or `void`
243
+ */
244
+ interface FetchOptionsNoContext<K, V, FC> extends FetchOptions<K, V, FC> {
245
+ context?: undefined;
246
+ }
247
+ /**
248
+ * Options that may be passed to the {@link LRUCache#has} method.
249
+ */
250
+ interface HasOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'updateAgeOnHas'> {
251
+ status?: Status<V>;
252
+ }
253
+ /**
254
+ * Options that may be passed to the {@link LRUCache#get} method.
255
+ */
256
+ interface GetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'> {
257
+ status?: Status<V>;
258
+ }
259
+ /**
260
+ * Options that may be passed to the {@link LRUCache#peek} method.
261
+ */
262
+ interface PeekOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale'> {
263
+ }
264
+ /**
265
+ * Options that may be passed to the {@link LRUCache#set} method.
266
+ */
267
+ interface SetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> {
268
+ /**
269
+ * If size tracking is enabled, then setting an explicit size
270
+ * in the {@link LRUCache#set} call will prevent calling the
271
+ * {@link OptionsBase.sizeCalculation} function.
272
+ */
273
+ size?: Size;
274
+ /**
275
+ * If TTL tracking is enabled, then setting an explicit start
276
+ * time in the {@link LRUCache#set} call will override the
277
+ * default time from `performance.now()` or `Date.now()`.
278
+ *
279
+ * Note that it must be a valid value for whichever time-tracking
280
+ * method is in use.
281
+ */
282
+ start?: Milliseconds;
283
+ status?: Status<V>;
284
+ }
285
+ /**
286
+ * The type signature for the {@link OptionsBase.fetchMethod} option.
287
+ */
288
+ type Fetcher<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: FetcherOptions<K, V, FC>) => Promise<V | void | undefined> | V | void | undefined;
289
+ /**
290
+ * Options which may be passed to the {@link LRUCache} constructor.
291
+ *
292
+ * Most of these may be overridden in the various options that use
293
+ * them.
294
+ *
295
+ * Despite all being technically optional, the constructor requires that
296
+ * a cache is at minimum limited by one or more of {@link OptionsBase.max},
297
+ * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.
298
+ *
299
+ * If {@link OptionsBase.ttl} is used alone, then it is strongly advised
300
+ * (and in fact required by the type definitions here) that the cache
301
+ * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially
302
+ * unbounded storage.
303
+ */
304
+ interface OptionsBase<K, V, FC> {
305
+ /**
306
+ * The maximum number of items to store in the cache before evicting
307
+ * old entries. This is read-only on the {@link LRUCache} instance,
308
+ * and may not be overridden.
309
+ *
310
+ * If set, then storage space will be pre-allocated at construction
311
+ * time, and the cache will perform significantly faster.
312
+ *
313
+ * Note that significantly fewer items may be stored, if
314
+ * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also
315
+ * set.
316
+ */
317
+ max?: Count;
318
+ /**
319
+ * Max time in milliseconds for items to live in cache before they are
320
+ * considered stale. Note that stale items are NOT preemptively removed
321
+ * by default, and MAY live in the cache long after they have expired.
322
+ *
323
+ * Also, as this cache is optimized for LRU/MRU operations, some of
324
+ * the staleness/TTL checks will reduce performance, as they will incur
325
+ * overhead by deleting items.
326
+ *
327
+ * Must be an integer number of ms. If set to 0, this indicates "no TTL"
328
+ *
329
+ * @default 0
330
+ */
331
+ ttl?: Milliseconds;
332
+ /**
333
+ * Minimum amount of time in ms in which to check for staleness.
334
+ * Defaults to 1, which means that the current time is checked
335
+ * at most once per millisecond.
336
+ *
337
+ * Set to 0 to check the current time every time staleness is tested.
338
+ * (This reduces performance, and is theoretically unnecessary.)
339
+ *
340
+ * Setting this to a higher value will improve performance somewhat
341
+ * while using ttl tracking, albeit at the expense of keeping stale
342
+ * items around a bit longer than their TTLs would indicate.
343
+ *
344
+ * @default 1
345
+ */
346
+ ttlResolution?: Milliseconds;
347
+ /**
348
+ * Preemptively remove stale items from the cache.
349
+ * Note that this may significantly degrade performance,
350
+ * especially if the cache is storing a large number of items.
351
+ * It is almost always best to just leave the stale items in
352
+ * the cache, and let them fall out as new items are added.
353
+ *
354
+ * Note that this means that {@link OptionsBase.allowStale} is a bit
355
+ * pointless, as stale items will be deleted almost as soon as they
356
+ * expire.
357
+ *
358
+ * @default false
359
+ */
360
+ ttlAutopurge?: boolean;
361
+ /**
362
+ * Update the age of items on {@link LRUCache#get}, renewing their TTL
363
+ *
364
+ * Has no effect if {@link OptionsBase.ttl} is not set.
365
+ *
366
+ * @default false
367
+ */
368
+ updateAgeOnGet?: boolean;
369
+ /**
370
+ * Update the age of items on {@link LRUCache#has}, renewing their TTL
371
+ *
372
+ * Has no effect if {@link OptionsBase.ttl} is not set.
373
+ *
374
+ * @default false
375
+ */
376
+ updateAgeOnHas?: boolean;
377
+ /**
378
+ * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return
379
+ * stale data, if available.
380
+ */
381
+ allowStale?: boolean;
382
+ /**
383
+ * Function that is called on items when they are dropped from the cache.
384
+ * This can be handy if you want to close file descriptors or do other
385
+ * cleanup tasks when items are no longer accessible. Called with `key,
386
+ * value`. It's called before actually removing the item from the
387
+ * internal cache, so it is *NOT* safe to re-add them.
388
+ *
389
+ * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after
390
+ * they have been full removed, when it is safe to add them back to the
391
+ * cache.
392
+ */
393
+ dispose?: Disposer<K, V>;
394
+ /**
395
+ * The same as {@link OptionsBase.dispose}, but called *after* the entry
396
+ * is completely removed and the cache is once again in a clean state.
397
+ * It is safe to add an item right back into the cache at this point.
398
+ * However, note that it is *very* easy to inadvertently create infinite
399
+ * recursion this way.
400
+ */
401
+ disposeAfter?: Disposer<K, V>;
402
+ /**
403
+ * Set to true to suppress calling the
404
+ * {@link OptionsBase.dispose} function if the entry key is
405
+ * still accessible within the cache.
406
+ * This may be overridden by passing an options object to
407
+ * {@link LRUCache#set}.
408
+ */
409
+ noDisposeOnSet?: boolean;
410
+ /**
411
+ * Boolean flag to tell the cache to not update the TTL when
412
+ * setting a new value for an existing key (ie, when updating a value
413
+ * rather than inserting a new value). Note that the TTL value is
414
+ * _always_ set (if provided) when adding a new entry into the cache.
415
+ *
416
+ * Has no effect if a {@link OptionsBase.ttl} is not set.
417
+ */
418
+ noUpdateTTL?: boolean;
419
+ /**
420
+ * If you wish to track item size, you must provide a maxSize
421
+ * note that we still will only keep up to max *actual items*,
422
+ * if max is set, so size tracking may cause fewer than max items
423
+ * to be stored. At the extreme, a single item of maxSize size
424
+ * will cause everything else in the cache to be dropped when it
425
+ * is added. Use with caution!
426
+ *
427
+ * Note also that size tracking can negatively impact performance,
428
+ * though for most cases, only minimally.
429
+ */
430
+ maxSize?: Size;
431
+ /**
432
+ * The maximum allowed size for any single item in the cache.
433
+ *
434
+ * If a larger item is passed to {@link LRUCache#set} or returned by a
435
+ * {@link OptionsBase.fetchMethod}, then it will not be stored in the
436
+ * cache.
437
+ */
438
+ maxEntrySize?: Size;
439
+ /**
440
+ * A function that returns a number indicating the item's size.
441
+ *
442
+ * If not provided, and {@link OptionsBase.maxSize} or
443
+ * {@link OptionsBase.maxEntrySize} are set, then all
444
+ * {@link LRUCache#set} calls **must** provide an explicit
445
+ * {@link SetOptions.size} or sizeCalculation param.
446
+ */
447
+ sizeCalculation?: SizeCalculator<K, V>;
448
+ /**
449
+ * Method that provides the implementation for {@link LRUCache#fetch}
450
+ */
451
+ fetchMethod?: Fetcher<K, V, FC>;
452
+ /**
453
+ * Set to true to suppress the deletion of stale data when a
454
+ * {@link OptionsBase.fetchMethod} returns a rejected promise.
455
+ */
456
+ noDeleteOnFetchRejection?: boolean;
457
+ /**
458
+ * Do not delete stale items when they are retrieved with
459
+ * {@link LRUCache#get}.
460
+ *
461
+ * Note that the `get` return value will still be `undefined`
462
+ * unless {@link OptionsBase.allowStale} is true.
463
+ */
464
+ noDeleteOnStaleGet?: boolean;
465
+ /**
466
+ * Set to true to allow returning stale data when a
467
+ * {@link OptionsBase.fetchMethod} throws an error or returns a rejected
468
+ * promise.
469
+ *
470
+ * This differs from using {@link OptionsBase.allowStale} in that stale
471
+ * data will ONLY be returned in the case that the
472
+ * {@link LRUCache#fetch} fails, not any other times.
473
+ */
474
+ allowStaleOnFetchRejection?: boolean;
475
+ /**
476
+ * Set to true to return a stale value from the cache when the
477
+ * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`
478
+ * event, whether user-triggered, or due to internal cache behavior.
479
+ *
480
+ * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying
481
+ * {@link OptionsBase.fetchMethod} will still be considered canceled, and its return
482
+ * value will be ignored and not cached.
483
+ */
484
+ allowStaleOnFetchAbort?: boolean;
485
+ /**
486
+ * Set to true to ignore the `abort` event emitted by the `AbortSignal`
487
+ * object passed to {@link OptionsBase.fetchMethod}, and still cache the
488
+ * resulting resolution value, as long as it is not `undefined`.
489
+ *
490
+ * When used on its own, this means aborted {@link LRUCache#fetch} calls are not
491
+ * immediately resolved or rejected when they are aborted, and instead
492
+ * take the full time to await.
493
+ *
494
+ * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted
495
+ * {@link LRUCache#fetch} calls will resolve immediately to their stale
496
+ * cached value or `undefined`, and will continue to process and eventually
497
+ * update the cache when they resolve, as long as the resulting value is
498
+ * not `undefined`, thus supporting a "return stale on timeout while
499
+ * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal.
500
+ *
501
+ * **Note**: regardless of this setting, an `abort` event _is still
502
+ * emitted on the `AbortSignal` object_, so may result in invalid results
503
+ * when passed to other underlying APIs that use AbortSignals.
504
+ *
505
+ * This may be overridden in the {@link OptionsBase.fetchMethod} or the
506
+ * call to {@link LRUCache#fetch}.
507
+ */
508
+ ignoreFetchAbort?: boolean;
509
+ }
510
+ interface OptionsMaxLimit<K, V, FC> extends OptionsBase<K, V, FC> {
511
+ max: Count;
512
+ }
513
+ interface OptionsTTLLimit<K, V, FC> extends OptionsBase<K, V, FC> {
514
+ ttl: Milliseconds;
515
+ ttlAutopurge: boolean;
516
+ }
517
+ interface OptionsSizeLimit<K, V, FC> extends OptionsBase<K, V, FC> {
518
+ maxSize: Size;
519
+ }
520
+ /**
521
+ * The valid safe options for the {@link LRUCache} constructor
522
+ */
523
+ type Options<K, V, FC> = OptionsMaxLimit<K, V, FC> | OptionsSizeLimit<K, V, FC> | OptionsTTLLimit<K, V, FC>;
524
+ /**
525
+ * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump}
526
+ */
527
+ interface Entry<V> {
528
+ value: V;
529
+ ttl?: Milliseconds;
530
+ size?: Size;
531
+ start?: Milliseconds;
532
+ }
533
+ }
534
+ /**
535
+ * Default export, the thing you're using this module to get.
536
+ *
537
+ * All properties from the options object (with the exception of
538
+ * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
539
+ * normal public members. (`max` and `maxBase` are read-only getters.)
540
+ * Changing any of these will alter the defaults for subsequent method calls,
541
+ * but is otherwise safe.
542
+ */
543
+ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> {
544
+ #private;
545
+ /**
546
+ * {@link LRUCache.OptionsBase.ttl}
547
+ */
548
+ ttl: LRUCache.Milliseconds;
549
+ /**
550
+ * {@link LRUCache.OptionsBase.ttlResolution}
551
+ */
552
+ ttlResolution: LRUCache.Milliseconds;
553
+ /**
554
+ * {@link LRUCache.OptionsBase.ttlAutopurge}
555
+ */
556
+ ttlAutopurge: boolean;
557
+ /**
558
+ * {@link LRUCache.OptionsBase.updateAgeOnGet}
559
+ */
560
+ updateAgeOnGet: boolean;
561
+ /**
562
+ * {@link LRUCache.OptionsBase.updateAgeOnHas}
563
+ */
564
+ updateAgeOnHas: boolean;
565
+ /**
566
+ * {@link LRUCache.OptionsBase.allowStale}
567
+ */
568
+ allowStale: boolean;
569
+ /**
570
+ * {@link LRUCache.OptionsBase.noDisposeOnSet}
571
+ */
572
+ noDisposeOnSet: boolean;
573
+ /**
574
+ * {@link LRUCache.OptionsBase.noUpdateTTL}
575
+ */
576
+ noUpdateTTL: boolean;
577
+ /**
578
+ * {@link LRUCache.OptionsBase.maxEntrySize}
579
+ */
580
+ maxEntrySize: LRUCache.Size;
581
+ /**
582
+ * {@link LRUCache.OptionsBase.sizeCalculation}
583
+ */
584
+ sizeCalculation?: LRUCache.SizeCalculator<K, V>;
585
+ /**
586
+ * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
587
+ */
588
+ noDeleteOnFetchRejection: boolean;
589
+ /**
590
+ * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
591
+ */
592
+ noDeleteOnStaleGet: boolean;
593
+ /**
594
+ * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
595
+ */
596
+ allowStaleOnFetchAbort: boolean;
597
+ /**
598
+ * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
599
+ */
600
+ allowStaleOnFetchRejection: boolean;
601
+ /**
602
+ * {@link LRUCache.OptionsBase.ignoreFetchAbort}
603
+ */
604
+ ignoreFetchAbort: boolean;
605
+ /**
606
+ * Do not call this method unless you need to inspect the
607
+ * inner workings of the cache. If anything returned by this
608
+ * object is modified in any way, strange breakage may occur.
609
+ *
610
+ * These fields are private for a reason!
611
+ *
612
+ * @internal
613
+ */
614
+ static unsafeExposeInternals<K extends {}, V extends {}, FC extends unknown = unknown>(c: LRUCache<K, V, FC>): {
615
+ starts: ZeroArray | undefined;
616
+ ttls: ZeroArray | undefined;
617
+ sizes: ZeroArray | undefined;
618
+ keyMap: Map<K, number>;
619
+ keyList: (K | undefined)[];
620
+ valList: (V | BackgroundFetch<V> | undefined)[];
621
+ next: NumberArray;
622
+ prev: NumberArray;
623
+ readonly head: Index;
624
+ readonly tail: Index;
625
+ free: StackLike;
626
+ isBackgroundFetch: (p: any) => boolean;
627
+ backgroundFetch: (k: K, index: number | undefined, options: LRUCache.FetchOptions<K, V, FC>, context: any) => BackgroundFetch<V>;
628
+ moveToTail: (index: number) => void;
629
+ indexes: (options?: {
630
+ allowStale: boolean;
631
+ }) => Generator<Index, void, unknown>;
632
+ rindexes: (options?: {
633
+ allowStale: boolean;
634
+ }) => Generator<Index, void, unknown>;
635
+ isStale: (index: number | undefined) => boolean;
636
+ };
637
+ /**
638
+ * {@link LRUCache.OptionsBase.max} (read-only)
639
+ */
640
+ get max(): LRUCache.Count;
641
+ /**
642
+ * {@link LRUCache.OptionsBase.maxSize} (read-only)
643
+ */
644
+ get maxSize(): LRUCache.Count;
645
+ /**
646
+ * The total computed size of items in the cache (read-only)
647
+ */
648
+ get calculatedSize(): LRUCache.Size;
649
+ /**
650
+ * The number of items stored in the cache (read-only)
651
+ */
652
+ get size(): LRUCache.Count;
653
+ /**
654
+ * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
655
+ */
656
+ get fetchMethod(): LRUCache.Fetcher<K, V, FC> | undefined;
657
+ /**
658
+ * {@link LRUCache.OptionsBase.dispose} (read-only)
659
+ */
660
+ get dispose(): LRUCache.Disposer<K, V> | undefined;
661
+ /**
662
+ * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
663
+ */
664
+ get disposeAfter(): LRUCache.Disposer<K, V> | undefined;
665
+ constructor(options: LRUCache.Options<K, V, FC> | LRUCache<K, V, FC>);
666
+ /**
667
+ * Return the remaining TTL time for a given entry key
668
+ */
669
+ getRemainingTTL(key: K): number;
670
+ /**
671
+ * Return a generator yielding `[key, value]` pairs,
672
+ * in order from most recently used to least recently used.
673
+ */
674
+ entries(): Generator<(K | V | BackgroundFetch<V> | undefined)[], void, unknown>;
675
+ /**
676
+ * Inverse order version of {@link LRUCache.entries}
677
+ *
678
+ * Return a generator yielding `[key, value]` pairs,
679
+ * in order from least recently used to most recently used.
680
+ */
681
+ rentries(): Generator<(K | V | BackgroundFetch<V> | undefined)[], void, unknown>;
682
+ /**
683
+ * Return a generator yielding the keys in the cache,
684
+ * in order from most recently used to least recently used.
685
+ */
686
+ keys(): Generator<K, void, unknown>;
687
+ /**
688
+ * Inverse order version of {@link LRUCache.keys}
689
+ *
690
+ * Return a generator yielding the keys in the cache,
691
+ * in order from least recently used to most recently used.
692
+ */
693
+ rkeys(): Generator<K, void, unknown>;
694
+ /**
695
+ * Return a generator yielding the values in the cache,
696
+ * in order from most recently used to least recently used.
697
+ */
698
+ values(): Generator<V | BackgroundFetch<V> | undefined, void, unknown>;
699
+ /**
700
+ * Inverse order version of {@link LRUCache.values}
701
+ *
702
+ * Return a generator yielding the values in the cache,
703
+ * in order from least recently used to most recently used.
704
+ */
705
+ rvalues(): Generator<V | BackgroundFetch<V> | undefined, void, unknown>;
706
+ /**
707
+ * Iterating over the cache itself yields the same results as
708
+ * {@link LRUCache.entries}
709
+ */
710
+ [Symbol.iterator](): Generator<(K | V | BackgroundFetch<V> | undefined)[], void, unknown>;
711
+ /**
712
+ * Find a value for which the supplied fn method returns a truthy value,
713
+ * similar to Array.find(). fn is called as fn(value, key, cache).
714
+ */
715
+ find(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => boolean, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
716
+ /**
717
+ * Call the supplied function on each item in the cache, in order from
718
+ * most recently used to least recently used. fn is called as
719
+ * fn(value, key, cache). Does not update age or recenty of use.
720
+ * Does not iterate over stale values.
721
+ */
722
+ forEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
723
+ /**
724
+ * The same as {@link LRUCache.forEach} but items are iterated over in
725
+ * reverse order. (ie, less recently used items are iterated over first.)
726
+ */
727
+ rforEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
728
+ /**
729
+ * Delete any stale entries. Returns true if anything was removed,
730
+ * false otherwise.
731
+ */
732
+ purgeStale(): boolean;
733
+ /**
734
+ * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
735
+ * passed to cache.load()
736
+ */
737
+ dump(): [K, LRUCache.Entry<V>][];
738
+ /**
739
+ * Reset the cache and load in the items in entries in the order listed.
740
+ * Note that the shape of the resulting cache may be different if the
741
+ * same options are not used in both caches.
742
+ */
743
+ load(arr: [K, LRUCache.Entry<V>][]): void;
744
+ /**
745
+ * Add a value to the cache.
746
+ */
747
+ set(k: K, v: V | BackgroundFetch<V>, setOptions?: LRUCache.SetOptions<K, V, FC>): this;
748
+ /**
749
+ * Evict the least recently used item, returning its value or
750
+ * `undefined` if cache is empty.
751
+ */
752
+ pop(): V | undefined;
753
+ /**
754
+ * Check if a key is in the cache, without updating the recency of use.
755
+ * Will return false if the item is stale, even though it is technically
756
+ * in the cache.
757
+ *
758
+ * Will not update item age unless
759
+ * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
760
+ */
761
+ has(k: K, hasOptions?: LRUCache.HasOptions<K, V, FC>): boolean;
762
+ /**
763
+ * Like {@link LRUCache#get} but doesn't update recency or delete stale
764
+ * items.
765
+ *
766
+ * Returns `undefined` if the item is stale, unless
767
+ * {@link LRUCache.OptionsBase.allowStale} is set.
768
+ */
769
+ peek(k: K, peekOptions?: LRUCache.PeekOptions<K, V, FC>): V | undefined;
770
+ /**
771
+ * Make an asynchronous cached fetch using the
772
+ * {@link LRUCache.OptionsBase.fetchMethod} function.
773
+ *
774
+ * If multiple fetches for the same key are issued, then they will all be
775
+ * coalesced into a single call to fetchMethod.
776
+ *
777
+ * Note that this means that handling options such as
778
+ * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},
779
+ * {@link LRUCache.FetchOptions.signal},
780
+ * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be
781
+ * determined by the FIRST fetch() call for a given key.
782
+ *
783
+ * This is a known (fixable) shortcoming which will be addresed on when
784
+ * someone complains about it, as the fix would involve added complexity and
785
+ * may not be worth the costs for this edge case.
786
+ */
787
+ fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V, FC> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<void | V>;
788
+ fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V, FC> : never): Promise<void | V>;
789
+ /**
790
+ * Return a value from the cache. Will update the recency of the cache
791
+ * entry found.
792
+ *
793
+ * If the key is not found, get() will return `undefined`.
794
+ */
795
+ get(k: K, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
796
+ /**
797
+ * Deletes a key out of the cache.
798
+ * Returns true if the key was deleted, false otherwise.
799
+ */
800
+ delete(k: K): boolean;
801
+ /**
802
+ * Clear the cache entirely, throwing away all values.
803
+ */
804
+ clear(): void;
805
+ }
806
+ export default LRUCache;
807
+ //# sourceMappingURL=index.d.ts.map