@adaptic/utils 0.0.366 → 0.0.368
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2110 -0
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +2108 -1
- package/dist/index.mjs.map +1 -1
- package/dist/types/cache/stampede-protected-cache.d.ts +565 -0
- package/dist/types/cache/stampede-protected-cache.d.ts.map +1 -0
- package/dist/types/index.d.ts +1 -0
- package/dist/types/index.d.ts.map +1 -1
- package/package.json +4 -2
package/dist/index.cjs
CHANGED
|
@@ -13497,6 +13497,2113 @@ class AlpacaTradingAPI {
|
|
|
13497
13497
|
}
|
|
13498
13498
|
}
|
|
13499
13499
|
|
|
13500
|
+
/**
|
|
13501
|
+
* @module LRUCache
|
|
13502
|
+
*/
|
|
13503
|
+
const defaultPerf = (typeof performance === 'object' &&
|
|
13504
|
+
performance &&
|
|
13505
|
+
typeof performance.now === 'function') ?
|
|
13506
|
+
performance
|
|
13507
|
+
: Date;
|
|
13508
|
+
const warned = new Set();
|
|
13509
|
+
/* c8 ignore start */
|
|
13510
|
+
const PROCESS = (typeof process === 'object' && !!process ?
|
|
13511
|
+
process
|
|
13512
|
+
: {});
|
|
13513
|
+
/* c8 ignore start */
|
|
13514
|
+
const emitWarning = (msg, type, code, fn) => {
|
|
13515
|
+
typeof PROCESS.emitWarning === 'function' ?
|
|
13516
|
+
PROCESS.emitWarning(msg, type, code, fn)
|
|
13517
|
+
: console.error(`[${code}] ${type}: ${msg}`);
|
|
13518
|
+
};
|
|
13519
|
+
let AC = globalThis.AbortController;
|
|
13520
|
+
let AS = globalThis.AbortSignal;
|
|
13521
|
+
/* c8 ignore start */
|
|
13522
|
+
if (typeof AC === 'undefined') {
|
|
13523
|
+
//@ts-ignore
|
|
13524
|
+
AS = class AbortSignal {
|
|
13525
|
+
onabort;
|
|
13526
|
+
_onabort = [];
|
|
13527
|
+
reason;
|
|
13528
|
+
aborted = false;
|
|
13529
|
+
addEventListener(_, fn) {
|
|
13530
|
+
this._onabort.push(fn);
|
|
13531
|
+
}
|
|
13532
|
+
};
|
|
13533
|
+
//@ts-ignore
|
|
13534
|
+
AC = class AbortController {
|
|
13535
|
+
constructor() {
|
|
13536
|
+
warnACPolyfill();
|
|
13537
|
+
}
|
|
13538
|
+
signal = new AS();
|
|
13539
|
+
abort(reason) {
|
|
13540
|
+
if (this.signal.aborted)
|
|
13541
|
+
return;
|
|
13542
|
+
//@ts-ignore
|
|
13543
|
+
this.signal.reason = reason;
|
|
13544
|
+
//@ts-ignore
|
|
13545
|
+
this.signal.aborted = true;
|
|
13546
|
+
//@ts-ignore
|
|
13547
|
+
for (const fn of this.signal._onabort) {
|
|
13548
|
+
fn(reason);
|
|
13549
|
+
}
|
|
13550
|
+
this.signal.onabort?.(reason);
|
|
13551
|
+
}
|
|
13552
|
+
};
|
|
13553
|
+
let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
|
|
13554
|
+
const warnACPolyfill = () => {
|
|
13555
|
+
if (!printACPolyfillWarning)
|
|
13556
|
+
return;
|
|
13557
|
+
printACPolyfillWarning = false;
|
|
13558
|
+
emitWarning('AbortController is not defined. If using lru-cache in ' +
|
|
13559
|
+
'node 14, load an AbortController polyfill from the ' +
|
|
13560
|
+
'`node-abort-controller` package. A minimal polyfill is ' +
|
|
13561
|
+
'provided for use by LRUCache.fetch(), but it should not be ' +
|
|
13562
|
+
'relied upon in other contexts (eg, passing it to other APIs that ' +
|
|
13563
|
+
'use AbortController/AbortSignal might have undesirable effects). ' +
|
|
13564
|
+
'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
|
|
13565
|
+
};
|
|
13566
|
+
}
|
|
13567
|
+
/* c8 ignore stop */
|
|
13568
|
+
const shouldWarn = (code) => !warned.has(code);
|
|
13569
|
+
const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
|
|
13570
|
+
/* c8 ignore start */
|
|
13571
|
+
// This is a little bit ridiculous, tbh.
|
|
13572
|
+
// The maximum array length is 2^32-1 or thereabouts on most JS impls.
|
|
13573
|
+
// And well before that point, you're caching the entire world, I mean,
|
|
13574
|
+
// that's ~32GB of just integers for the next/prev links, plus whatever
|
|
13575
|
+
// else to hold that many keys and values. Just filling the memory with
|
|
13576
|
+
// zeroes at init time is brutal when you get that big.
|
|
13577
|
+
// But why not be complete?
|
|
13578
|
+
// Maybe in the future, these limits will have expanded.
|
|
13579
|
+
const getUintArray = (max) => !isPosInt(max) ? null
|
|
13580
|
+
: max <= Math.pow(2, 8) ? Uint8Array
|
|
13581
|
+
: max <= Math.pow(2, 16) ? Uint16Array
|
|
13582
|
+
: max <= Math.pow(2, 32) ? Uint32Array
|
|
13583
|
+
: max <= Number.MAX_SAFE_INTEGER ? ZeroArray
|
|
13584
|
+
: null;
|
|
13585
|
+
/* c8 ignore stop */
|
|
13586
|
+
class ZeroArray extends Array {
|
|
13587
|
+
constructor(size) {
|
|
13588
|
+
super(size);
|
|
13589
|
+
this.fill(0);
|
|
13590
|
+
}
|
|
13591
|
+
}
|
|
13592
|
+
class Stack {
|
|
13593
|
+
heap;
|
|
13594
|
+
length;
|
|
13595
|
+
// private constructor
|
|
13596
|
+
static #constructing = false;
|
|
13597
|
+
static create(max) {
|
|
13598
|
+
const HeapCls = getUintArray(max);
|
|
13599
|
+
if (!HeapCls)
|
|
13600
|
+
return [];
|
|
13601
|
+
Stack.#constructing = true;
|
|
13602
|
+
const s = new Stack(max, HeapCls);
|
|
13603
|
+
Stack.#constructing = false;
|
|
13604
|
+
return s;
|
|
13605
|
+
}
|
|
13606
|
+
constructor(max, HeapCls) {
|
|
13607
|
+
/* c8 ignore start */
|
|
13608
|
+
if (!Stack.#constructing) {
|
|
13609
|
+
throw new TypeError('instantiate Stack using Stack.create(n)');
|
|
13610
|
+
}
|
|
13611
|
+
/* c8 ignore stop */
|
|
13612
|
+
this.heap = new HeapCls(max);
|
|
13613
|
+
this.length = 0;
|
|
13614
|
+
}
|
|
13615
|
+
push(n) {
|
|
13616
|
+
this.heap[this.length++] = n;
|
|
13617
|
+
}
|
|
13618
|
+
pop() {
|
|
13619
|
+
return this.heap[--this.length];
|
|
13620
|
+
}
|
|
13621
|
+
}
|
|
13622
|
+
/**
|
|
13623
|
+
* Default export, the thing you're using this module to get.
|
|
13624
|
+
*
|
|
13625
|
+
* The `K` and `V` types define the key and value types, respectively. The
|
|
13626
|
+
* optional `FC` type defines the type of the `context` object passed to
|
|
13627
|
+
* `cache.fetch()` and `cache.memo()`.
|
|
13628
|
+
*
|
|
13629
|
+
* Keys and values **must not** be `null` or `undefined`.
|
|
13630
|
+
*
|
|
13631
|
+
* All properties from the options object (with the exception of `max`,
|
|
13632
|
+
* `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
|
|
13633
|
+
* added as normal public members. (The listed options are read-only getters.)
|
|
13634
|
+
*
|
|
13635
|
+
* Changing any of these will alter the defaults for subsequent method calls.
|
|
13636
|
+
*/
|
|
13637
|
+
class LRUCache {
|
|
13638
|
+
// options that cannot be changed without disaster
|
|
13639
|
+
#max;
|
|
13640
|
+
#maxSize;
|
|
13641
|
+
#dispose;
|
|
13642
|
+
#onInsert;
|
|
13643
|
+
#disposeAfter;
|
|
13644
|
+
#fetchMethod;
|
|
13645
|
+
#memoMethod;
|
|
13646
|
+
#perf;
|
|
13647
|
+
/**
|
|
13648
|
+
* {@link LRUCache.OptionsBase.perf}
|
|
13649
|
+
*/
|
|
13650
|
+
get perf() {
|
|
13651
|
+
return this.#perf;
|
|
13652
|
+
}
|
|
13653
|
+
/**
|
|
13654
|
+
* {@link LRUCache.OptionsBase.ttl}
|
|
13655
|
+
*/
|
|
13656
|
+
ttl;
|
|
13657
|
+
/**
|
|
13658
|
+
* {@link LRUCache.OptionsBase.ttlResolution}
|
|
13659
|
+
*/
|
|
13660
|
+
ttlResolution;
|
|
13661
|
+
/**
|
|
13662
|
+
* {@link LRUCache.OptionsBase.ttlAutopurge}
|
|
13663
|
+
*/
|
|
13664
|
+
ttlAutopurge;
|
|
13665
|
+
/**
|
|
13666
|
+
* {@link LRUCache.OptionsBase.updateAgeOnGet}
|
|
13667
|
+
*/
|
|
13668
|
+
updateAgeOnGet;
|
|
13669
|
+
/**
|
|
13670
|
+
* {@link LRUCache.OptionsBase.updateAgeOnHas}
|
|
13671
|
+
*/
|
|
13672
|
+
updateAgeOnHas;
|
|
13673
|
+
/**
|
|
13674
|
+
* {@link LRUCache.OptionsBase.allowStale}
|
|
13675
|
+
*/
|
|
13676
|
+
allowStale;
|
|
13677
|
+
/**
|
|
13678
|
+
* {@link LRUCache.OptionsBase.noDisposeOnSet}
|
|
13679
|
+
*/
|
|
13680
|
+
noDisposeOnSet;
|
|
13681
|
+
/**
|
|
13682
|
+
* {@link LRUCache.OptionsBase.noUpdateTTL}
|
|
13683
|
+
*/
|
|
13684
|
+
noUpdateTTL;
|
|
13685
|
+
/**
|
|
13686
|
+
* {@link LRUCache.OptionsBase.maxEntrySize}
|
|
13687
|
+
*/
|
|
13688
|
+
maxEntrySize;
|
|
13689
|
+
/**
|
|
13690
|
+
* {@link LRUCache.OptionsBase.sizeCalculation}
|
|
13691
|
+
*/
|
|
13692
|
+
sizeCalculation;
|
|
13693
|
+
/**
|
|
13694
|
+
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
|
|
13695
|
+
*/
|
|
13696
|
+
noDeleteOnFetchRejection;
|
|
13697
|
+
/**
|
|
13698
|
+
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
|
|
13699
|
+
*/
|
|
13700
|
+
noDeleteOnStaleGet;
|
|
13701
|
+
/**
|
|
13702
|
+
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
|
|
13703
|
+
*/
|
|
13704
|
+
allowStaleOnFetchAbort;
|
|
13705
|
+
/**
|
|
13706
|
+
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
|
|
13707
|
+
*/
|
|
13708
|
+
allowStaleOnFetchRejection;
|
|
13709
|
+
/**
|
|
13710
|
+
* {@link LRUCache.OptionsBase.ignoreFetchAbort}
|
|
13711
|
+
*/
|
|
13712
|
+
ignoreFetchAbort;
|
|
13713
|
+
// computed properties
|
|
13714
|
+
#size;
|
|
13715
|
+
#calculatedSize;
|
|
13716
|
+
#keyMap;
|
|
13717
|
+
#keyList;
|
|
13718
|
+
#valList;
|
|
13719
|
+
#next;
|
|
13720
|
+
#prev;
|
|
13721
|
+
#head;
|
|
13722
|
+
#tail;
|
|
13723
|
+
#free;
|
|
13724
|
+
#disposed;
|
|
13725
|
+
#sizes;
|
|
13726
|
+
#starts;
|
|
13727
|
+
#ttls;
|
|
13728
|
+
#hasDispose;
|
|
13729
|
+
#hasFetchMethod;
|
|
13730
|
+
#hasDisposeAfter;
|
|
13731
|
+
#hasOnInsert;
|
|
13732
|
+
/**
|
|
13733
|
+
* Do not call this method unless you need to inspect the
|
|
13734
|
+
* inner workings of the cache. If anything returned by this
|
|
13735
|
+
* object is modified in any way, strange breakage may occur.
|
|
13736
|
+
*
|
|
13737
|
+
* These fields are private for a reason!
|
|
13738
|
+
*
|
|
13739
|
+
* @internal
|
|
13740
|
+
*/
|
|
13741
|
+
static unsafeExposeInternals(c) {
|
|
13742
|
+
return {
|
|
13743
|
+
// properties
|
|
13744
|
+
starts: c.#starts,
|
|
13745
|
+
ttls: c.#ttls,
|
|
13746
|
+
sizes: c.#sizes,
|
|
13747
|
+
keyMap: c.#keyMap,
|
|
13748
|
+
keyList: c.#keyList,
|
|
13749
|
+
valList: c.#valList,
|
|
13750
|
+
next: c.#next,
|
|
13751
|
+
prev: c.#prev,
|
|
13752
|
+
get head() {
|
|
13753
|
+
return c.#head;
|
|
13754
|
+
},
|
|
13755
|
+
get tail() {
|
|
13756
|
+
return c.#tail;
|
|
13757
|
+
},
|
|
13758
|
+
free: c.#free,
|
|
13759
|
+
// methods
|
|
13760
|
+
isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
|
|
13761
|
+
backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
|
|
13762
|
+
moveToTail: (index) => c.#moveToTail(index),
|
|
13763
|
+
indexes: (options) => c.#indexes(options),
|
|
13764
|
+
rindexes: (options) => c.#rindexes(options),
|
|
13765
|
+
isStale: (index) => c.#isStale(index),
|
|
13766
|
+
};
|
|
13767
|
+
}
|
|
13768
|
+
// Protected read-only members
|
|
13769
|
+
/**
|
|
13770
|
+
* {@link LRUCache.OptionsBase.max} (read-only)
|
|
13771
|
+
*/
|
|
13772
|
+
get max() {
|
|
13773
|
+
return this.#max;
|
|
13774
|
+
}
|
|
13775
|
+
/**
|
|
13776
|
+
* {@link LRUCache.OptionsBase.maxSize} (read-only)
|
|
13777
|
+
*/
|
|
13778
|
+
get maxSize() {
|
|
13779
|
+
return this.#maxSize;
|
|
13780
|
+
}
|
|
13781
|
+
/**
|
|
13782
|
+
* The total computed size of items in the cache (read-only)
|
|
13783
|
+
*/
|
|
13784
|
+
get calculatedSize() {
|
|
13785
|
+
return this.#calculatedSize;
|
|
13786
|
+
}
|
|
13787
|
+
/**
|
|
13788
|
+
* The number of items stored in the cache (read-only)
|
|
13789
|
+
*/
|
|
13790
|
+
get size() {
|
|
13791
|
+
return this.#size;
|
|
13792
|
+
}
|
|
13793
|
+
/**
|
|
13794
|
+
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
|
|
13795
|
+
*/
|
|
13796
|
+
get fetchMethod() {
|
|
13797
|
+
return this.#fetchMethod;
|
|
13798
|
+
}
|
|
13799
|
+
get memoMethod() {
|
|
13800
|
+
return this.#memoMethod;
|
|
13801
|
+
}
|
|
13802
|
+
/**
|
|
13803
|
+
* {@link LRUCache.OptionsBase.dispose} (read-only)
|
|
13804
|
+
*/
|
|
13805
|
+
get dispose() {
|
|
13806
|
+
return this.#dispose;
|
|
13807
|
+
}
|
|
13808
|
+
/**
|
|
13809
|
+
* {@link LRUCache.OptionsBase.onInsert} (read-only)
|
|
13810
|
+
*/
|
|
13811
|
+
get onInsert() {
|
|
13812
|
+
return this.#onInsert;
|
|
13813
|
+
}
|
|
13814
|
+
/**
|
|
13815
|
+
* {@link LRUCache.OptionsBase.disposeAfter} (read-only)
|
|
13816
|
+
*/
|
|
13817
|
+
get disposeAfter() {
|
|
13818
|
+
return this.#disposeAfter;
|
|
13819
|
+
}
|
|
13820
|
+
constructor(options) {
|
|
13821
|
+
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
|
|
13822
|
+
if (perf !== undefined) {
|
|
13823
|
+
if (typeof perf?.now !== 'function') {
|
|
13824
|
+
throw new TypeError('perf option must have a now() method if specified');
|
|
13825
|
+
}
|
|
13826
|
+
}
|
|
13827
|
+
this.#perf = perf ?? defaultPerf;
|
|
13828
|
+
if (max !== 0 && !isPosInt(max)) {
|
|
13829
|
+
throw new TypeError('max option must be a nonnegative integer');
|
|
13830
|
+
}
|
|
13831
|
+
const UintArray = max ? getUintArray(max) : Array;
|
|
13832
|
+
if (!UintArray) {
|
|
13833
|
+
throw new Error('invalid max value: ' + max);
|
|
13834
|
+
}
|
|
13835
|
+
this.#max = max;
|
|
13836
|
+
this.#maxSize = maxSize;
|
|
13837
|
+
this.maxEntrySize = maxEntrySize || this.#maxSize;
|
|
13838
|
+
this.sizeCalculation = sizeCalculation;
|
|
13839
|
+
if (this.sizeCalculation) {
|
|
13840
|
+
if (!this.#maxSize && !this.maxEntrySize) {
|
|
13841
|
+
throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
|
|
13842
|
+
}
|
|
13843
|
+
if (typeof this.sizeCalculation !== 'function') {
|
|
13844
|
+
throw new TypeError('sizeCalculation set to non-function');
|
|
13845
|
+
}
|
|
13846
|
+
}
|
|
13847
|
+
if (memoMethod !== undefined &&
|
|
13848
|
+
typeof memoMethod !== 'function') {
|
|
13849
|
+
throw new TypeError('memoMethod must be a function if defined');
|
|
13850
|
+
}
|
|
13851
|
+
this.#memoMethod = memoMethod;
|
|
13852
|
+
if (fetchMethod !== undefined &&
|
|
13853
|
+
typeof fetchMethod !== 'function') {
|
|
13854
|
+
throw new TypeError('fetchMethod must be a function if specified');
|
|
13855
|
+
}
|
|
13856
|
+
this.#fetchMethod = fetchMethod;
|
|
13857
|
+
this.#hasFetchMethod = !!fetchMethod;
|
|
13858
|
+
this.#keyMap = new Map();
|
|
13859
|
+
this.#keyList = new Array(max).fill(undefined);
|
|
13860
|
+
this.#valList = new Array(max).fill(undefined);
|
|
13861
|
+
this.#next = new UintArray(max);
|
|
13862
|
+
this.#prev = new UintArray(max);
|
|
13863
|
+
this.#head = 0;
|
|
13864
|
+
this.#tail = 0;
|
|
13865
|
+
this.#free = Stack.create(max);
|
|
13866
|
+
this.#size = 0;
|
|
13867
|
+
this.#calculatedSize = 0;
|
|
13868
|
+
if (typeof dispose === 'function') {
|
|
13869
|
+
this.#dispose = dispose;
|
|
13870
|
+
}
|
|
13871
|
+
if (typeof onInsert === 'function') {
|
|
13872
|
+
this.#onInsert = onInsert;
|
|
13873
|
+
}
|
|
13874
|
+
if (typeof disposeAfter === 'function') {
|
|
13875
|
+
this.#disposeAfter = disposeAfter;
|
|
13876
|
+
this.#disposed = [];
|
|
13877
|
+
}
|
|
13878
|
+
else {
|
|
13879
|
+
this.#disposeAfter = undefined;
|
|
13880
|
+
this.#disposed = undefined;
|
|
13881
|
+
}
|
|
13882
|
+
this.#hasDispose = !!this.#dispose;
|
|
13883
|
+
this.#hasOnInsert = !!this.#onInsert;
|
|
13884
|
+
this.#hasDisposeAfter = !!this.#disposeAfter;
|
|
13885
|
+
this.noDisposeOnSet = !!noDisposeOnSet;
|
|
13886
|
+
this.noUpdateTTL = !!noUpdateTTL;
|
|
13887
|
+
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
|
|
13888
|
+
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
|
|
13889
|
+
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
|
|
13890
|
+
this.ignoreFetchAbort = !!ignoreFetchAbort;
|
|
13891
|
+
// NB: maxEntrySize is set to maxSize if it's set
|
|
13892
|
+
if (this.maxEntrySize !== 0) {
|
|
13893
|
+
if (this.#maxSize !== 0) {
|
|
13894
|
+
if (!isPosInt(this.#maxSize)) {
|
|
13895
|
+
throw new TypeError('maxSize must be a positive integer if specified');
|
|
13896
|
+
}
|
|
13897
|
+
}
|
|
13898
|
+
if (!isPosInt(this.maxEntrySize)) {
|
|
13899
|
+
throw new TypeError('maxEntrySize must be a positive integer if specified');
|
|
13900
|
+
}
|
|
13901
|
+
this.#initializeSizeTracking();
|
|
13902
|
+
}
|
|
13903
|
+
this.allowStale = !!allowStale;
|
|
13904
|
+
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
|
|
13905
|
+
this.updateAgeOnGet = !!updateAgeOnGet;
|
|
13906
|
+
this.updateAgeOnHas = !!updateAgeOnHas;
|
|
13907
|
+
this.ttlResolution =
|
|
13908
|
+
isPosInt(ttlResolution) || ttlResolution === 0 ?
|
|
13909
|
+
ttlResolution
|
|
13910
|
+
: 1;
|
|
13911
|
+
this.ttlAutopurge = !!ttlAutopurge;
|
|
13912
|
+
this.ttl = ttl || 0;
|
|
13913
|
+
if (this.ttl) {
|
|
13914
|
+
if (!isPosInt(this.ttl)) {
|
|
13915
|
+
throw new TypeError('ttl must be a positive integer if specified');
|
|
13916
|
+
}
|
|
13917
|
+
this.#initializeTTLTracking();
|
|
13918
|
+
}
|
|
13919
|
+
// do not allow completely unbounded caches
|
|
13920
|
+
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
|
|
13921
|
+
throw new TypeError('At least one of max, maxSize, or ttl is required');
|
|
13922
|
+
}
|
|
13923
|
+
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
|
|
13924
|
+
const code = 'LRU_CACHE_UNBOUNDED';
|
|
13925
|
+
if (shouldWarn(code)) {
|
|
13926
|
+
warned.add(code);
|
|
13927
|
+
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
|
|
13928
|
+
'result in unbounded memory consumption.';
|
|
13929
|
+
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
|
|
13930
|
+
}
|
|
13931
|
+
}
|
|
13932
|
+
}
|
|
13933
|
+
/**
|
|
13934
|
+
* Return the number of ms left in the item's TTL. If item is not in cache,
|
|
13935
|
+
* returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
|
|
13936
|
+
*/
|
|
13937
|
+
getRemainingTTL(key) {
|
|
13938
|
+
return this.#keyMap.has(key) ? Infinity : 0;
|
|
13939
|
+
}
|
|
13940
|
+
#initializeTTLTracking() {
|
|
13941
|
+
const ttls = new ZeroArray(this.#max);
|
|
13942
|
+
const starts = new ZeroArray(this.#max);
|
|
13943
|
+
this.#ttls = ttls;
|
|
13944
|
+
this.#starts = starts;
|
|
13945
|
+
this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
|
|
13946
|
+
starts[index] = ttl !== 0 ? start : 0;
|
|
13947
|
+
ttls[index] = ttl;
|
|
13948
|
+
if (ttl !== 0 && this.ttlAutopurge) {
|
|
13949
|
+
const t = setTimeout(() => {
|
|
13950
|
+
if (this.#isStale(index)) {
|
|
13951
|
+
this.#delete(this.#keyList[index], 'expire');
|
|
13952
|
+
}
|
|
13953
|
+
}, ttl + 1);
|
|
13954
|
+
// unref() not supported on all platforms
|
|
13955
|
+
/* c8 ignore start */
|
|
13956
|
+
if (t.unref) {
|
|
13957
|
+
t.unref();
|
|
13958
|
+
}
|
|
13959
|
+
/* c8 ignore stop */
|
|
13960
|
+
}
|
|
13961
|
+
};
|
|
13962
|
+
this.#updateItemAge = index => {
|
|
13963
|
+
starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
|
|
13964
|
+
};
|
|
13965
|
+
this.#statusTTL = (status, index) => {
|
|
13966
|
+
if (ttls[index]) {
|
|
13967
|
+
const ttl = ttls[index];
|
|
13968
|
+
const start = starts[index];
|
|
13969
|
+
/* c8 ignore next */
|
|
13970
|
+
if (!ttl || !start)
|
|
13971
|
+
return;
|
|
13972
|
+
status.ttl = ttl;
|
|
13973
|
+
status.start = start;
|
|
13974
|
+
status.now = cachedNow || getNow();
|
|
13975
|
+
const age = status.now - start;
|
|
13976
|
+
status.remainingTTL = ttl - age;
|
|
13977
|
+
}
|
|
13978
|
+
};
|
|
13979
|
+
// debounce calls to perf.now() to 1s so we're not hitting
|
|
13980
|
+
// that costly call repeatedly.
|
|
13981
|
+
let cachedNow = 0;
|
|
13982
|
+
const getNow = () => {
|
|
13983
|
+
const n = this.#perf.now();
|
|
13984
|
+
if (this.ttlResolution > 0) {
|
|
13985
|
+
cachedNow = n;
|
|
13986
|
+
const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
|
|
13987
|
+
// not available on all platforms
|
|
13988
|
+
/* c8 ignore start */
|
|
13989
|
+
if (t.unref) {
|
|
13990
|
+
t.unref();
|
|
13991
|
+
}
|
|
13992
|
+
/* c8 ignore stop */
|
|
13993
|
+
}
|
|
13994
|
+
return n;
|
|
13995
|
+
};
|
|
13996
|
+
this.getRemainingTTL = key => {
|
|
13997
|
+
const index = this.#keyMap.get(key);
|
|
13998
|
+
if (index === undefined) {
|
|
13999
|
+
return 0;
|
|
14000
|
+
}
|
|
14001
|
+
const ttl = ttls[index];
|
|
14002
|
+
const start = starts[index];
|
|
14003
|
+
if (!ttl || !start) {
|
|
14004
|
+
return Infinity;
|
|
14005
|
+
}
|
|
14006
|
+
const age = (cachedNow || getNow()) - start;
|
|
14007
|
+
return ttl - age;
|
|
14008
|
+
};
|
|
14009
|
+
this.#isStale = index => {
|
|
14010
|
+
const s = starts[index];
|
|
14011
|
+
const t = ttls[index];
|
|
14012
|
+
return !!t && !!s && (cachedNow || getNow()) - s > t;
|
|
14013
|
+
};
|
|
14014
|
+
}
|
|
14015
|
+
// conditionally set private methods related to TTL
|
|
14016
|
+
#updateItemAge = () => { };
|
|
14017
|
+
#statusTTL = () => { };
|
|
14018
|
+
#setItemTTL = () => { };
|
|
14019
|
+
/* c8 ignore stop */
|
|
14020
|
+
#isStale = () => false;
|
|
14021
|
+
#initializeSizeTracking() {
|
|
14022
|
+
const sizes = new ZeroArray(this.#max);
|
|
14023
|
+
this.#calculatedSize = 0;
|
|
14024
|
+
this.#sizes = sizes;
|
|
14025
|
+
this.#removeItemSize = index => {
|
|
14026
|
+
this.#calculatedSize -= sizes[index];
|
|
14027
|
+
sizes[index] = 0;
|
|
14028
|
+
};
|
|
14029
|
+
this.#requireSize = (k, v, size, sizeCalculation) => {
|
|
14030
|
+
// provisionally accept background fetches.
|
|
14031
|
+
// actual value size will be checked when they return.
|
|
14032
|
+
if (this.#isBackgroundFetch(v)) {
|
|
14033
|
+
return 0;
|
|
14034
|
+
}
|
|
14035
|
+
if (!isPosInt(size)) {
|
|
14036
|
+
if (sizeCalculation) {
|
|
14037
|
+
if (typeof sizeCalculation !== 'function') {
|
|
14038
|
+
throw new TypeError('sizeCalculation must be a function');
|
|
14039
|
+
}
|
|
14040
|
+
size = sizeCalculation(v, k);
|
|
14041
|
+
if (!isPosInt(size)) {
|
|
14042
|
+
throw new TypeError('sizeCalculation return invalid (expect positive integer)');
|
|
14043
|
+
}
|
|
14044
|
+
}
|
|
14045
|
+
else {
|
|
14046
|
+
throw new TypeError('invalid size value (must be positive integer). ' +
|
|
14047
|
+
'When maxSize or maxEntrySize is used, sizeCalculation ' +
|
|
14048
|
+
'or size must be set.');
|
|
14049
|
+
}
|
|
14050
|
+
}
|
|
14051
|
+
return size;
|
|
14052
|
+
};
|
|
14053
|
+
this.#addItemSize = (index, size, status) => {
|
|
14054
|
+
sizes[index] = size;
|
|
14055
|
+
if (this.#maxSize) {
|
|
14056
|
+
const maxSize = this.#maxSize - sizes[index];
|
|
14057
|
+
while (this.#calculatedSize > maxSize) {
|
|
14058
|
+
this.#evict(true);
|
|
14059
|
+
}
|
|
14060
|
+
}
|
|
14061
|
+
this.#calculatedSize += sizes[index];
|
|
14062
|
+
if (status) {
|
|
14063
|
+
status.entrySize = size;
|
|
14064
|
+
status.totalCalculatedSize = this.#calculatedSize;
|
|
14065
|
+
}
|
|
14066
|
+
};
|
|
14067
|
+
}
|
|
14068
|
+
#removeItemSize = _i => { };
|
|
14069
|
+
#addItemSize = (_i, _s, _st) => { };
|
|
14070
|
+
#requireSize = (_k, _v, size, sizeCalculation) => {
|
|
14071
|
+
if (size || sizeCalculation) {
|
|
14072
|
+
throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
|
|
14073
|
+
}
|
|
14074
|
+
return 0;
|
|
14075
|
+
};
|
|
14076
|
+
*#indexes({ allowStale = this.allowStale } = {}) {
|
|
14077
|
+
if (this.#size) {
|
|
14078
|
+
for (let i = this.#tail; true;) {
|
|
14079
|
+
if (!this.#isValidIndex(i)) {
|
|
14080
|
+
break;
|
|
14081
|
+
}
|
|
14082
|
+
if (allowStale || !this.#isStale(i)) {
|
|
14083
|
+
yield i;
|
|
14084
|
+
}
|
|
14085
|
+
if (i === this.#head) {
|
|
14086
|
+
break;
|
|
14087
|
+
}
|
|
14088
|
+
else {
|
|
14089
|
+
i = this.#prev[i];
|
|
14090
|
+
}
|
|
14091
|
+
}
|
|
14092
|
+
}
|
|
14093
|
+
}
|
|
14094
|
+
*#rindexes({ allowStale = this.allowStale } = {}) {
|
|
14095
|
+
if (this.#size) {
|
|
14096
|
+
for (let i = this.#head; true;) {
|
|
14097
|
+
if (!this.#isValidIndex(i)) {
|
|
14098
|
+
break;
|
|
14099
|
+
}
|
|
14100
|
+
if (allowStale || !this.#isStale(i)) {
|
|
14101
|
+
yield i;
|
|
14102
|
+
}
|
|
14103
|
+
if (i === this.#tail) {
|
|
14104
|
+
break;
|
|
14105
|
+
}
|
|
14106
|
+
else {
|
|
14107
|
+
i = this.#next[i];
|
|
14108
|
+
}
|
|
14109
|
+
}
|
|
14110
|
+
}
|
|
14111
|
+
}
|
|
14112
|
+
#isValidIndex(index) {
|
|
14113
|
+
return (index !== undefined &&
|
|
14114
|
+
this.#keyMap.get(this.#keyList[index]) === index);
|
|
14115
|
+
}
|
|
14116
|
+
/**
|
|
14117
|
+
* Return a generator yielding `[key, value]` pairs,
|
|
14118
|
+
* in order from most recently used to least recently used.
|
|
14119
|
+
*/
|
|
14120
|
+
*entries() {
|
|
14121
|
+
for (const i of this.#indexes()) {
|
|
14122
|
+
if (this.#valList[i] !== undefined &&
|
|
14123
|
+
this.#keyList[i] !== undefined &&
|
|
14124
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
14125
|
+
yield [this.#keyList[i], this.#valList[i]];
|
|
14126
|
+
}
|
|
14127
|
+
}
|
|
14128
|
+
}
|
|
14129
|
+
/**
|
|
14130
|
+
* Inverse order version of {@link LRUCache.entries}
|
|
14131
|
+
*
|
|
14132
|
+
* Return a generator yielding `[key, value]` pairs,
|
|
14133
|
+
* in order from least recently used to most recently used.
|
|
14134
|
+
*/
|
|
14135
|
+
*rentries() {
|
|
14136
|
+
for (const i of this.#rindexes()) {
|
|
14137
|
+
if (this.#valList[i] !== undefined &&
|
|
14138
|
+
this.#keyList[i] !== undefined &&
|
|
14139
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
14140
|
+
yield [this.#keyList[i], this.#valList[i]];
|
|
14141
|
+
}
|
|
14142
|
+
}
|
|
14143
|
+
}
|
|
14144
|
+
/**
|
|
14145
|
+
* Return a generator yielding the keys in the cache,
|
|
14146
|
+
* in order from most recently used to least recently used.
|
|
14147
|
+
*/
|
|
14148
|
+
*keys() {
|
|
14149
|
+
for (const i of this.#indexes()) {
|
|
14150
|
+
const k = this.#keyList[i];
|
|
14151
|
+
if (k !== undefined &&
|
|
14152
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
14153
|
+
yield k;
|
|
14154
|
+
}
|
|
14155
|
+
}
|
|
14156
|
+
}
|
|
14157
|
+
/**
|
|
14158
|
+
* Inverse order version of {@link LRUCache.keys}
|
|
14159
|
+
*
|
|
14160
|
+
* Return a generator yielding the keys in the cache,
|
|
14161
|
+
* in order from least recently used to most recently used.
|
|
14162
|
+
*/
|
|
14163
|
+
*rkeys() {
|
|
14164
|
+
for (const i of this.#rindexes()) {
|
|
14165
|
+
const k = this.#keyList[i];
|
|
14166
|
+
if (k !== undefined &&
|
|
14167
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
14168
|
+
yield k;
|
|
14169
|
+
}
|
|
14170
|
+
}
|
|
14171
|
+
}
|
|
14172
|
+
/**
|
|
14173
|
+
* Return a generator yielding the values in the cache,
|
|
14174
|
+
* in order from most recently used to least recently used.
|
|
14175
|
+
*/
|
|
14176
|
+
*values() {
|
|
14177
|
+
for (const i of this.#indexes()) {
|
|
14178
|
+
const v = this.#valList[i];
|
|
14179
|
+
if (v !== undefined &&
|
|
14180
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
14181
|
+
yield this.#valList[i];
|
|
14182
|
+
}
|
|
14183
|
+
}
|
|
14184
|
+
}
|
|
14185
|
+
/**
|
|
14186
|
+
* Inverse order version of {@link LRUCache.values}
|
|
14187
|
+
*
|
|
14188
|
+
* Return a generator yielding the values in the cache,
|
|
14189
|
+
* in order from least recently used to most recently used.
|
|
14190
|
+
*/
|
|
14191
|
+
*rvalues() {
|
|
14192
|
+
for (const i of this.#rindexes()) {
|
|
14193
|
+
const v = this.#valList[i];
|
|
14194
|
+
if (v !== undefined &&
|
|
14195
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
14196
|
+
yield this.#valList[i];
|
|
14197
|
+
}
|
|
14198
|
+
}
|
|
14199
|
+
}
|
|
14200
|
+
/**
|
|
14201
|
+
* Iterating over the cache itself yields the same results as
|
|
14202
|
+
* {@link LRUCache.entries}
|
|
14203
|
+
*/
|
|
14204
|
+
[Symbol.iterator]() {
|
|
14205
|
+
return this.entries();
|
|
14206
|
+
}
|
|
14207
|
+
/**
|
|
14208
|
+
* A String value that is used in the creation of the default string
|
|
14209
|
+
* description of an object. Called by the built-in method
|
|
14210
|
+
* `Object.prototype.toString`.
|
|
14211
|
+
*/
|
|
14212
|
+
[Symbol.toStringTag] = 'LRUCache';
|
|
14213
|
+
/**
|
|
14214
|
+
* Find a value for which the supplied fn method returns a truthy value,
|
|
14215
|
+
* similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
|
|
14216
|
+
*/
|
|
14217
|
+
find(fn, getOptions = {}) {
|
|
14218
|
+
for (const i of this.#indexes()) {
|
|
14219
|
+
const v = this.#valList[i];
|
|
14220
|
+
const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
14221
|
+
if (value === undefined)
|
|
14222
|
+
continue;
|
|
14223
|
+
if (fn(value, this.#keyList[i], this)) {
|
|
14224
|
+
return this.get(this.#keyList[i], getOptions);
|
|
14225
|
+
}
|
|
14226
|
+
}
|
|
14227
|
+
}
|
|
14228
|
+
/**
|
|
14229
|
+
* Call the supplied function on each item in the cache, in order from most
|
|
14230
|
+
* recently used to least recently used.
|
|
14231
|
+
*
|
|
14232
|
+
* `fn` is called as `fn(value, key, cache)`.
|
|
14233
|
+
*
|
|
14234
|
+
* If `thisp` is provided, function will be called in the `this`-context of
|
|
14235
|
+
* the provided object, or the cache if no `thisp` object is provided.
|
|
14236
|
+
*
|
|
14237
|
+
* Does not update age or recenty of use, or iterate over stale values.
|
|
14238
|
+
*/
|
|
14239
|
+
forEach(fn, thisp = this) {
|
|
14240
|
+
for (const i of this.#indexes()) {
|
|
14241
|
+
const v = this.#valList[i];
|
|
14242
|
+
const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
14243
|
+
if (value === undefined)
|
|
14244
|
+
continue;
|
|
14245
|
+
fn.call(thisp, value, this.#keyList[i], this);
|
|
14246
|
+
}
|
|
14247
|
+
}
|
|
14248
|
+
/**
|
|
14249
|
+
* The same as {@link LRUCache.forEach} but items are iterated over in
|
|
14250
|
+
* reverse order. (ie, less recently used items are iterated over first.)
|
|
14251
|
+
*/
|
|
14252
|
+
rforEach(fn, thisp = this) {
|
|
14253
|
+
for (const i of this.#rindexes()) {
|
|
14254
|
+
const v = this.#valList[i];
|
|
14255
|
+
const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
14256
|
+
if (value === undefined)
|
|
14257
|
+
continue;
|
|
14258
|
+
fn.call(thisp, value, this.#keyList[i], this);
|
|
14259
|
+
}
|
|
14260
|
+
}
|
|
14261
|
+
/**
|
|
14262
|
+
* Delete any stale entries. Returns true if anything was removed,
|
|
14263
|
+
* false otherwise.
|
|
14264
|
+
*/
|
|
14265
|
+
purgeStale() {
|
|
14266
|
+
let deleted = false;
|
|
14267
|
+
for (const i of this.#rindexes({ allowStale: true })) {
|
|
14268
|
+
if (this.#isStale(i)) {
|
|
14269
|
+
this.#delete(this.#keyList[i], 'expire');
|
|
14270
|
+
deleted = true;
|
|
14271
|
+
}
|
|
14272
|
+
}
|
|
14273
|
+
return deleted;
|
|
14274
|
+
}
|
|
14275
|
+
/**
|
|
14276
|
+
* Get the extended info about a given entry, to get its value, size, and
|
|
14277
|
+
* TTL info simultaneously. Returns `undefined` if the key is not present.
|
|
14278
|
+
*
|
|
14279
|
+
* Unlike {@link LRUCache#dump}, which is designed to be portable and survive
|
|
14280
|
+
* serialization, the `start` value is always the current timestamp, and the
|
|
14281
|
+
* `ttl` is a calculated remaining time to live (negative if expired).
|
|
14282
|
+
*
|
|
14283
|
+
* Always returns stale values, if their info is found in the cache, so be
|
|
14284
|
+
* sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
|
|
14285
|
+
* if relevant.
|
|
14286
|
+
*/
|
|
14287
|
+
info(key) {
|
|
14288
|
+
const i = this.#keyMap.get(key);
|
|
14289
|
+
if (i === undefined)
|
|
14290
|
+
return undefined;
|
|
14291
|
+
const v = this.#valList[i];
|
|
14292
|
+
/* c8 ignore start - this isn't tested for the info function,
|
|
14293
|
+
* but it's the same logic as found in other places. */
|
|
14294
|
+
const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
14295
|
+
if (value === undefined)
|
|
14296
|
+
return undefined;
|
|
14297
|
+
/* c8 ignore end */
|
|
14298
|
+
const entry = { value };
|
|
14299
|
+
if (this.#ttls && this.#starts) {
|
|
14300
|
+
const ttl = this.#ttls[i];
|
|
14301
|
+
const start = this.#starts[i];
|
|
14302
|
+
if (ttl && start) {
|
|
14303
|
+
const remain = ttl - (this.#perf.now() - start);
|
|
14304
|
+
entry.ttl = remain;
|
|
14305
|
+
entry.start = Date.now();
|
|
14306
|
+
}
|
|
14307
|
+
}
|
|
14308
|
+
if (this.#sizes) {
|
|
14309
|
+
entry.size = this.#sizes[i];
|
|
14310
|
+
}
|
|
14311
|
+
return entry;
|
|
14312
|
+
}
|
|
14313
|
+
/**
|
|
14314
|
+
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
|
|
14315
|
+
* passed to {@link LRUCache#load}.
|
|
14316
|
+
*
|
|
14317
|
+
* The `start` fields are calculated relative to a portable `Date.now()`
|
|
14318
|
+
* timestamp, even if `performance.now()` is available.
|
|
14319
|
+
*
|
|
14320
|
+
* Stale entries are always included in the `dump`, even if
|
|
14321
|
+
* {@link LRUCache.OptionsBase.allowStale} is false.
|
|
14322
|
+
*
|
|
14323
|
+
* Note: this returns an actual array, not a generator, so it can be more
|
|
14324
|
+
* easily passed around.
|
|
14325
|
+
*/
|
|
14326
|
+
dump() {
|
|
14327
|
+
const arr = [];
|
|
14328
|
+
for (const i of this.#indexes({ allowStale: true })) {
|
|
14329
|
+
const key = this.#keyList[i];
|
|
14330
|
+
const v = this.#valList[i];
|
|
14331
|
+
const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
14332
|
+
if (value === undefined || key === undefined)
|
|
14333
|
+
continue;
|
|
14334
|
+
const entry = { value };
|
|
14335
|
+
if (this.#ttls && this.#starts) {
|
|
14336
|
+
entry.ttl = this.#ttls[i];
|
|
14337
|
+
// always dump the start relative to a portable timestamp
|
|
14338
|
+
// it's ok for this to be a bit slow, it's a rare operation.
|
|
14339
|
+
const age = this.#perf.now() - this.#starts[i];
|
|
14340
|
+
entry.start = Math.floor(Date.now() - age);
|
|
14341
|
+
}
|
|
14342
|
+
if (this.#sizes) {
|
|
14343
|
+
entry.size = this.#sizes[i];
|
|
14344
|
+
}
|
|
14345
|
+
arr.unshift([key, entry]);
|
|
14346
|
+
}
|
|
14347
|
+
return arr;
|
|
14348
|
+
}
|
|
14349
|
+
/**
|
|
14350
|
+
* Reset the cache and load in the items in entries in the order listed.
|
|
14351
|
+
*
|
|
14352
|
+
* The shape of the resulting cache may be different if the same options are
|
|
14353
|
+
* not used in both caches.
|
|
14354
|
+
*
|
|
14355
|
+
* The `start` fields are assumed to be calculated relative to a portable
|
|
14356
|
+
* `Date.now()` timestamp, even if `performance.now()` is available.
|
|
14357
|
+
*/
|
|
14358
|
+
load(arr) {
|
|
14359
|
+
this.clear();
|
|
14360
|
+
for (const [key, entry] of arr) {
|
|
14361
|
+
if (entry.start) {
|
|
14362
|
+
// entry.start is a portable timestamp, but we may be using
|
|
14363
|
+
// node's performance.now(), so calculate the offset, so that
|
|
14364
|
+
// we get the intended remaining TTL, no matter how long it's
|
|
14365
|
+
// been on ice.
|
|
14366
|
+
//
|
|
14367
|
+
// it's ok for this to be a bit slow, it's a rare operation.
|
|
14368
|
+
const age = Date.now() - entry.start;
|
|
14369
|
+
entry.start = this.#perf.now() - age;
|
|
14370
|
+
}
|
|
14371
|
+
this.set(key, entry.value, entry);
|
|
14372
|
+
}
|
|
14373
|
+
}
|
|
14374
|
+
/**
|
|
14375
|
+
* Add a value to the cache.
|
|
14376
|
+
*
|
|
14377
|
+
* Note: if `undefined` is specified as a value, this is an alias for
|
|
14378
|
+
* {@link LRUCache#delete}
|
|
14379
|
+
*
|
|
14380
|
+
* Fields on the {@link LRUCache.SetOptions} options param will override
|
|
14381
|
+
* their corresponding values in the constructor options for the scope
|
|
14382
|
+
* of this single `set()` operation.
|
|
14383
|
+
*
|
|
14384
|
+
* If `start` is provided, then that will set the effective start
|
|
14385
|
+
* time for the TTL calculation. Note that this must be a previous
|
|
14386
|
+
* value of `performance.now()` if supported, or a previous value of
|
|
14387
|
+
* `Date.now()` if not.
|
|
14388
|
+
*
|
|
14389
|
+
* Options object may also include `size`, which will prevent
|
|
14390
|
+
* calling the `sizeCalculation` function and just use the specified
|
|
14391
|
+
* number if it is a positive integer, and `noDisposeOnSet` which
|
|
14392
|
+
* will prevent calling a `dispose` function in the case of
|
|
14393
|
+
* overwrites.
|
|
14394
|
+
*
|
|
14395
|
+
* If the `size` (or return value of `sizeCalculation`) for a given
|
|
14396
|
+
* entry is greater than `maxEntrySize`, then the item will not be
|
|
14397
|
+
* added to the cache.
|
|
14398
|
+
*
|
|
14399
|
+
* Will update the recency of the entry.
|
|
14400
|
+
*
|
|
14401
|
+
* If the value is `undefined`, then this is an alias for
|
|
14402
|
+
* `cache.delete(key)`. `undefined` is never stored in the cache.
|
|
14403
|
+
*/
|
|
14404
|
+
set(k, v, setOptions = {}) {
|
|
14405
|
+
if (v === undefined) {
|
|
14406
|
+
this.delete(k);
|
|
14407
|
+
return this;
|
|
14408
|
+
}
|
|
14409
|
+
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
|
|
14410
|
+
let { noUpdateTTL = this.noUpdateTTL } = setOptions;
|
|
14411
|
+
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
|
|
14412
|
+
// if the item doesn't fit, don't do anything
|
|
14413
|
+
// NB: maxEntrySize set to maxSize by default
|
|
14414
|
+
if (this.maxEntrySize && size > this.maxEntrySize) {
|
|
14415
|
+
if (status) {
|
|
14416
|
+
status.set = 'miss';
|
|
14417
|
+
status.maxEntrySizeExceeded = true;
|
|
14418
|
+
}
|
|
14419
|
+
// have to delete, in case something is there already.
|
|
14420
|
+
this.#delete(k, 'set');
|
|
14421
|
+
return this;
|
|
14422
|
+
}
|
|
14423
|
+
let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
|
|
14424
|
+
if (index === undefined) {
|
|
14425
|
+
// addition
|
|
14426
|
+
index = (this.#size === 0 ? this.#tail
|
|
14427
|
+
: this.#free.length !== 0 ? this.#free.pop()
|
|
14428
|
+
: this.#size === this.#max ? this.#evict(false)
|
|
14429
|
+
: this.#size);
|
|
14430
|
+
this.#keyList[index] = k;
|
|
14431
|
+
this.#valList[index] = v;
|
|
14432
|
+
this.#keyMap.set(k, index);
|
|
14433
|
+
this.#next[this.#tail] = index;
|
|
14434
|
+
this.#prev[index] = this.#tail;
|
|
14435
|
+
this.#tail = index;
|
|
14436
|
+
this.#size++;
|
|
14437
|
+
this.#addItemSize(index, size, status);
|
|
14438
|
+
if (status)
|
|
14439
|
+
status.set = 'add';
|
|
14440
|
+
noUpdateTTL = false;
|
|
14441
|
+
if (this.#hasOnInsert) {
|
|
14442
|
+
this.#onInsert?.(v, k, 'add');
|
|
14443
|
+
}
|
|
14444
|
+
}
|
|
14445
|
+
else {
|
|
14446
|
+
// update
|
|
14447
|
+
this.#moveToTail(index);
|
|
14448
|
+
const oldVal = this.#valList[index];
|
|
14449
|
+
if (v !== oldVal) {
|
|
14450
|
+
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
|
|
14451
|
+
oldVal.__abortController.abort(new Error('replaced'));
|
|
14452
|
+
const { __staleWhileFetching: s } = oldVal;
|
|
14453
|
+
if (s !== undefined && !noDisposeOnSet) {
|
|
14454
|
+
if (this.#hasDispose) {
|
|
14455
|
+
this.#dispose?.(s, k, 'set');
|
|
14456
|
+
}
|
|
14457
|
+
if (this.#hasDisposeAfter) {
|
|
14458
|
+
this.#disposed?.push([s, k, 'set']);
|
|
14459
|
+
}
|
|
14460
|
+
}
|
|
14461
|
+
}
|
|
14462
|
+
else if (!noDisposeOnSet) {
|
|
14463
|
+
if (this.#hasDispose) {
|
|
14464
|
+
this.#dispose?.(oldVal, k, 'set');
|
|
14465
|
+
}
|
|
14466
|
+
if (this.#hasDisposeAfter) {
|
|
14467
|
+
this.#disposed?.push([oldVal, k, 'set']);
|
|
14468
|
+
}
|
|
14469
|
+
}
|
|
14470
|
+
this.#removeItemSize(index);
|
|
14471
|
+
this.#addItemSize(index, size, status);
|
|
14472
|
+
this.#valList[index] = v;
|
|
14473
|
+
if (status) {
|
|
14474
|
+
status.set = 'replace';
|
|
14475
|
+
const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
|
|
14476
|
+
oldVal.__staleWhileFetching
|
|
14477
|
+
: oldVal;
|
|
14478
|
+
if (oldValue !== undefined)
|
|
14479
|
+
status.oldValue = oldValue;
|
|
14480
|
+
}
|
|
14481
|
+
}
|
|
14482
|
+
else if (status) {
|
|
14483
|
+
status.set = 'update';
|
|
14484
|
+
}
|
|
14485
|
+
if (this.#hasOnInsert) {
|
|
14486
|
+
this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
|
|
14487
|
+
}
|
|
14488
|
+
}
|
|
14489
|
+
if (ttl !== 0 && !this.#ttls) {
|
|
14490
|
+
this.#initializeTTLTracking();
|
|
14491
|
+
}
|
|
14492
|
+
if (this.#ttls) {
|
|
14493
|
+
if (!noUpdateTTL) {
|
|
14494
|
+
this.#setItemTTL(index, ttl, start);
|
|
14495
|
+
}
|
|
14496
|
+
if (status)
|
|
14497
|
+
this.#statusTTL(status, index);
|
|
14498
|
+
}
|
|
14499
|
+
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
|
|
14500
|
+
const dt = this.#disposed;
|
|
14501
|
+
let task;
|
|
14502
|
+
while ((task = dt?.shift())) {
|
|
14503
|
+
this.#disposeAfter?.(...task);
|
|
14504
|
+
}
|
|
14505
|
+
}
|
|
14506
|
+
return this;
|
|
14507
|
+
}
|
|
14508
|
+
/**
|
|
14509
|
+
* Evict the least recently used item, returning its value or
|
|
14510
|
+
* `undefined` if cache is empty.
|
|
14511
|
+
*/
|
|
14512
|
+
pop() {
|
|
14513
|
+
try {
|
|
14514
|
+
while (this.#size) {
|
|
14515
|
+
const val = this.#valList[this.#head];
|
|
14516
|
+
this.#evict(true);
|
|
14517
|
+
if (this.#isBackgroundFetch(val)) {
|
|
14518
|
+
if (val.__staleWhileFetching) {
|
|
14519
|
+
return val.__staleWhileFetching;
|
|
14520
|
+
}
|
|
14521
|
+
}
|
|
14522
|
+
else if (val !== undefined) {
|
|
14523
|
+
return val;
|
|
14524
|
+
}
|
|
14525
|
+
}
|
|
14526
|
+
}
|
|
14527
|
+
finally {
|
|
14528
|
+
if (this.#hasDisposeAfter && this.#disposed) {
|
|
14529
|
+
const dt = this.#disposed;
|
|
14530
|
+
let task;
|
|
14531
|
+
while ((task = dt?.shift())) {
|
|
14532
|
+
this.#disposeAfter?.(...task);
|
|
14533
|
+
}
|
|
14534
|
+
}
|
|
14535
|
+
}
|
|
14536
|
+
}
|
|
14537
|
+
#evict(free) {
|
|
14538
|
+
const head = this.#head;
|
|
14539
|
+
const k = this.#keyList[head];
|
|
14540
|
+
const v = this.#valList[head];
|
|
14541
|
+
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
|
|
14542
|
+
v.__abortController.abort(new Error('evicted'));
|
|
14543
|
+
}
|
|
14544
|
+
else if (this.#hasDispose || this.#hasDisposeAfter) {
|
|
14545
|
+
if (this.#hasDispose) {
|
|
14546
|
+
this.#dispose?.(v, k, 'evict');
|
|
14547
|
+
}
|
|
14548
|
+
if (this.#hasDisposeAfter) {
|
|
14549
|
+
this.#disposed?.push([v, k, 'evict']);
|
|
14550
|
+
}
|
|
14551
|
+
}
|
|
14552
|
+
this.#removeItemSize(head);
|
|
14553
|
+
// if we aren't about to use the index, then null these out
|
|
14554
|
+
if (free) {
|
|
14555
|
+
this.#keyList[head] = undefined;
|
|
14556
|
+
this.#valList[head] = undefined;
|
|
14557
|
+
this.#free.push(head);
|
|
14558
|
+
}
|
|
14559
|
+
if (this.#size === 1) {
|
|
14560
|
+
this.#head = this.#tail = 0;
|
|
14561
|
+
this.#free.length = 0;
|
|
14562
|
+
}
|
|
14563
|
+
else {
|
|
14564
|
+
this.#head = this.#next[head];
|
|
14565
|
+
}
|
|
14566
|
+
this.#keyMap.delete(k);
|
|
14567
|
+
this.#size--;
|
|
14568
|
+
return head;
|
|
14569
|
+
}
|
|
14570
|
+
/**
|
|
14571
|
+
* Check if a key is in the cache, without updating the recency of use.
|
|
14572
|
+
* Will return false if the item is stale, even though it is technically
|
|
14573
|
+
* in the cache.
|
|
14574
|
+
*
|
|
14575
|
+
* Check if a key is in the cache, without updating the recency of
|
|
14576
|
+
* use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
|
|
14577
|
+
* to `true` in either the options or the constructor.
|
|
14578
|
+
*
|
|
14579
|
+
* Will return `false` if the item is stale, even though it is technically in
|
|
14580
|
+
* the cache. The difference can be determined (if it matters) by using a
|
|
14581
|
+
* `status` argument, and inspecting the `has` field.
|
|
14582
|
+
*
|
|
14583
|
+
* Will not update item age unless
|
|
14584
|
+
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
|
|
14585
|
+
*/
|
|
14586
|
+
has(k, hasOptions = {}) {
|
|
14587
|
+
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
|
|
14588
|
+
const index = this.#keyMap.get(k);
|
|
14589
|
+
if (index !== undefined) {
|
|
14590
|
+
const v = this.#valList[index];
|
|
14591
|
+
if (this.#isBackgroundFetch(v) &&
|
|
14592
|
+
v.__staleWhileFetching === undefined) {
|
|
14593
|
+
return false;
|
|
14594
|
+
}
|
|
14595
|
+
if (!this.#isStale(index)) {
|
|
14596
|
+
if (updateAgeOnHas) {
|
|
14597
|
+
this.#updateItemAge(index);
|
|
14598
|
+
}
|
|
14599
|
+
if (status) {
|
|
14600
|
+
status.has = 'hit';
|
|
14601
|
+
this.#statusTTL(status, index);
|
|
14602
|
+
}
|
|
14603
|
+
return true;
|
|
14604
|
+
}
|
|
14605
|
+
else if (status) {
|
|
14606
|
+
status.has = 'stale';
|
|
14607
|
+
this.#statusTTL(status, index);
|
|
14608
|
+
}
|
|
14609
|
+
}
|
|
14610
|
+
else if (status) {
|
|
14611
|
+
status.has = 'miss';
|
|
14612
|
+
}
|
|
14613
|
+
return false;
|
|
14614
|
+
}
|
|
14615
|
+
/**
|
|
14616
|
+
* Like {@link LRUCache#get} but doesn't update recency or delete stale
|
|
14617
|
+
* items.
|
|
14618
|
+
*
|
|
14619
|
+
* Returns `undefined` if the item is stale, unless
|
|
14620
|
+
* {@link LRUCache.OptionsBase.allowStale} is set.
|
|
14621
|
+
*/
|
|
14622
|
+
peek(k, peekOptions = {}) {
|
|
14623
|
+
const { allowStale = this.allowStale } = peekOptions;
|
|
14624
|
+
const index = this.#keyMap.get(k);
|
|
14625
|
+
if (index === undefined ||
|
|
14626
|
+
(!allowStale && this.#isStale(index))) {
|
|
14627
|
+
return;
|
|
14628
|
+
}
|
|
14629
|
+
const v = this.#valList[index];
|
|
14630
|
+
// either stale and allowed, or forcing a refresh of non-stale value
|
|
14631
|
+
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
14632
|
+
}
|
|
14633
|
+
#backgroundFetch(k, index, options, context) {
|
|
14634
|
+
const v = index === undefined ? undefined : this.#valList[index];
|
|
14635
|
+
if (this.#isBackgroundFetch(v)) {
|
|
14636
|
+
return v;
|
|
14637
|
+
}
|
|
14638
|
+
const ac = new AC();
|
|
14639
|
+
const { signal } = options;
|
|
14640
|
+
// when/if our AC signals, then stop listening to theirs.
|
|
14641
|
+
signal?.addEventListener('abort', () => ac.abort(signal.reason), {
|
|
14642
|
+
signal: ac.signal,
|
|
14643
|
+
});
|
|
14644
|
+
const fetchOpts = {
|
|
14645
|
+
signal: ac.signal,
|
|
14646
|
+
options,
|
|
14647
|
+
context,
|
|
14648
|
+
};
|
|
14649
|
+
const cb = (v, updateCache = false) => {
|
|
14650
|
+
const { aborted } = ac.signal;
|
|
14651
|
+
const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
|
|
14652
|
+
if (options.status) {
|
|
14653
|
+
if (aborted && !updateCache) {
|
|
14654
|
+
options.status.fetchAborted = true;
|
|
14655
|
+
options.status.fetchError = ac.signal.reason;
|
|
14656
|
+
if (ignoreAbort)
|
|
14657
|
+
options.status.fetchAbortIgnored = true;
|
|
14658
|
+
}
|
|
14659
|
+
else {
|
|
14660
|
+
options.status.fetchResolved = true;
|
|
14661
|
+
}
|
|
14662
|
+
}
|
|
14663
|
+
if (aborted && !ignoreAbort && !updateCache) {
|
|
14664
|
+
return fetchFail(ac.signal.reason);
|
|
14665
|
+
}
|
|
14666
|
+
// either we didn't abort, and are still here, or we did, and ignored
|
|
14667
|
+
const bf = p;
|
|
14668
|
+
// if nothing else has been written there but we're set to update the
|
|
14669
|
+
// cache and ignore the abort, or if it's still pending on this specific
|
|
14670
|
+
// background request, then write it to the cache.
|
|
14671
|
+
const vl = this.#valList[index];
|
|
14672
|
+
if (vl === p || ignoreAbort && updateCache && vl === undefined) {
|
|
14673
|
+
if (v === undefined) {
|
|
14674
|
+
if (bf.__staleWhileFetching !== undefined) {
|
|
14675
|
+
this.#valList[index] = bf.__staleWhileFetching;
|
|
14676
|
+
}
|
|
14677
|
+
else {
|
|
14678
|
+
this.#delete(k, 'fetch');
|
|
14679
|
+
}
|
|
14680
|
+
}
|
|
14681
|
+
else {
|
|
14682
|
+
if (options.status)
|
|
14683
|
+
options.status.fetchUpdated = true;
|
|
14684
|
+
this.set(k, v, fetchOpts.options);
|
|
14685
|
+
}
|
|
14686
|
+
}
|
|
14687
|
+
return v;
|
|
14688
|
+
};
|
|
14689
|
+
const eb = (er) => {
|
|
14690
|
+
if (options.status) {
|
|
14691
|
+
options.status.fetchRejected = true;
|
|
14692
|
+
options.status.fetchError = er;
|
|
14693
|
+
}
|
|
14694
|
+
return fetchFail(er);
|
|
14695
|
+
};
|
|
14696
|
+
const fetchFail = (er) => {
|
|
14697
|
+
const { aborted } = ac.signal;
|
|
14698
|
+
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
|
|
14699
|
+
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
|
|
14700
|
+
const noDelete = allowStale || options.noDeleteOnFetchRejection;
|
|
14701
|
+
const bf = p;
|
|
14702
|
+
if (this.#valList[index] === p) {
|
|
14703
|
+
// if we allow stale on fetch rejections, then we need to ensure that
|
|
14704
|
+
// the stale value is not removed from the cache when the fetch fails.
|
|
14705
|
+
const del = !noDelete || bf.__staleWhileFetching === undefined;
|
|
14706
|
+
if (del) {
|
|
14707
|
+
this.#delete(k, 'fetch');
|
|
14708
|
+
}
|
|
14709
|
+
else if (!allowStaleAborted) {
|
|
14710
|
+
// still replace the *promise* with the stale value,
|
|
14711
|
+
// since we are done with the promise at this point.
|
|
14712
|
+
// leave it untouched if we're still waiting for an
|
|
14713
|
+
// aborted background fetch that hasn't yet returned.
|
|
14714
|
+
this.#valList[index] = bf.__staleWhileFetching;
|
|
14715
|
+
}
|
|
14716
|
+
}
|
|
14717
|
+
if (allowStale) {
|
|
14718
|
+
if (options.status && bf.__staleWhileFetching !== undefined) {
|
|
14719
|
+
options.status.returnedStale = true;
|
|
14720
|
+
}
|
|
14721
|
+
return bf.__staleWhileFetching;
|
|
14722
|
+
}
|
|
14723
|
+
else if (bf.__returned === bf) {
|
|
14724
|
+
throw er;
|
|
14725
|
+
}
|
|
14726
|
+
};
|
|
14727
|
+
const pcall = (res, rej) => {
|
|
14728
|
+
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
|
|
14729
|
+
if (fmp && fmp instanceof Promise) {
|
|
14730
|
+
fmp.then(v => res(v === undefined ? undefined : v), rej);
|
|
14731
|
+
}
|
|
14732
|
+
// ignored, we go until we finish, regardless.
|
|
14733
|
+
// defer check until we are actually aborting,
|
|
14734
|
+
// so fetchMethod can override.
|
|
14735
|
+
ac.signal.addEventListener('abort', () => {
|
|
14736
|
+
if (!options.ignoreFetchAbort ||
|
|
14737
|
+
options.allowStaleOnFetchAbort) {
|
|
14738
|
+
res(undefined);
|
|
14739
|
+
// when it eventually resolves, update the cache.
|
|
14740
|
+
if (options.allowStaleOnFetchAbort) {
|
|
14741
|
+
res = v => cb(v, true);
|
|
14742
|
+
}
|
|
14743
|
+
}
|
|
14744
|
+
});
|
|
14745
|
+
};
|
|
14746
|
+
if (options.status)
|
|
14747
|
+
options.status.fetchDispatched = true;
|
|
14748
|
+
const p = new Promise(pcall).then(cb, eb);
|
|
14749
|
+
const bf = Object.assign(p, {
|
|
14750
|
+
__abortController: ac,
|
|
14751
|
+
__staleWhileFetching: v,
|
|
14752
|
+
__returned: undefined,
|
|
14753
|
+
});
|
|
14754
|
+
if (index === undefined) {
|
|
14755
|
+
// internal, don't expose status.
|
|
14756
|
+
this.set(k, bf, { ...fetchOpts.options, status: undefined });
|
|
14757
|
+
index = this.#keyMap.get(k);
|
|
14758
|
+
}
|
|
14759
|
+
else {
|
|
14760
|
+
this.#valList[index] = bf;
|
|
14761
|
+
}
|
|
14762
|
+
return bf;
|
|
14763
|
+
}
|
|
14764
|
+
#isBackgroundFetch(p) {
|
|
14765
|
+
if (!this.#hasFetchMethod)
|
|
14766
|
+
return false;
|
|
14767
|
+
const b = p;
|
|
14768
|
+
return (!!b &&
|
|
14769
|
+
b instanceof Promise &&
|
|
14770
|
+
b.hasOwnProperty('__staleWhileFetching') &&
|
|
14771
|
+
b.__abortController instanceof AC);
|
|
14772
|
+
}
|
|
14773
|
+
async fetch(k, fetchOptions = {}) {
|
|
14774
|
+
const {
|
|
14775
|
+
// get options
|
|
14776
|
+
allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
|
|
14777
|
+
// set options
|
|
14778
|
+
ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
|
|
14779
|
+
// fetch exclusive options
|
|
14780
|
+
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
|
|
14781
|
+
if (!this.#hasFetchMethod) {
|
|
14782
|
+
if (status)
|
|
14783
|
+
status.fetch = 'get';
|
|
14784
|
+
return this.get(k, {
|
|
14785
|
+
allowStale,
|
|
14786
|
+
updateAgeOnGet,
|
|
14787
|
+
noDeleteOnStaleGet,
|
|
14788
|
+
status,
|
|
14789
|
+
});
|
|
14790
|
+
}
|
|
14791
|
+
const options = {
|
|
14792
|
+
allowStale,
|
|
14793
|
+
updateAgeOnGet,
|
|
14794
|
+
noDeleteOnStaleGet,
|
|
14795
|
+
ttl,
|
|
14796
|
+
noDisposeOnSet,
|
|
14797
|
+
size,
|
|
14798
|
+
sizeCalculation,
|
|
14799
|
+
noUpdateTTL,
|
|
14800
|
+
noDeleteOnFetchRejection,
|
|
14801
|
+
allowStaleOnFetchRejection,
|
|
14802
|
+
allowStaleOnFetchAbort,
|
|
14803
|
+
ignoreFetchAbort,
|
|
14804
|
+
status,
|
|
14805
|
+
signal,
|
|
14806
|
+
};
|
|
14807
|
+
let index = this.#keyMap.get(k);
|
|
14808
|
+
if (index === undefined) {
|
|
14809
|
+
if (status)
|
|
14810
|
+
status.fetch = 'miss';
|
|
14811
|
+
const p = this.#backgroundFetch(k, index, options, context);
|
|
14812
|
+
return (p.__returned = p);
|
|
14813
|
+
}
|
|
14814
|
+
else {
|
|
14815
|
+
// in cache, maybe already fetching
|
|
14816
|
+
const v = this.#valList[index];
|
|
14817
|
+
if (this.#isBackgroundFetch(v)) {
|
|
14818
|
+
const stale = allowStale && v.__staleWhileFetching !== undefined;
|
|
14819
|
+
if (status) {
|
|
14820
|
+
status.fetch = 'inflight';
|
|
14821
|
+
if (stale)
|
|
14822
|
+
status.returnedStale = true;
|
|
14823
|
+
}
|
|
14824
|
+
return stale ? v.__staleWhileFetching : (v.__returned = v);
|
|
14825
|
+
}
|
|
14826
|
+
// if we force a refresh, that means do NOT serve the cached value,
|
|
14827
|
+
// unless we are already in the process of refreshing the cache.
|
|
14828
|
+
const isStale = this.#isStale(index);
|
|
14829
|
+
if (!forceRefresh && !isStale) {
|
|
14830
|
+
if (status)
|
|
14831
|
+
status.fetch = 'hit';
|
|
14832
|
+
this.#moveToTail(index);
|
|
14833
|
+
if (updateAgeOnGet) {
|
|
14834
|
+
this.#updateItemAge(index);
|
|
14835
|
+
}
|
|
14836
|
+
if (status)
|
|
14837
|
+
this.#statusTTL(status, index);
|
|
14838
|
+
return v;
|
|
14839
|
+
}
|
|
14840
|
+
// ok, it is stale or a forced refresh, and not already fetching.
|
|
14841
|
+
// refresh the cache.
|
|
14842
|
+
const p = this.#backgroundFetch(k, index, options, context);
|
|
14843
|
+
const hasStale = p.__staleWhileFetching !== undefined;
|
|
14844
|
+
const staleVal = hasStale && allowStale;
|
|
14845
|
+
if (status) {
|
|
14846
|
+
status.fetch = isStale ? 'stale' : 'refresh';
|
|
14847
|
+
if (staleVal && isStale)
|
|
14848
|
+
status.returnedStale = true;
|
|
14849
|
+
}
|
|
14850
|
+
return staleVal ? p.__staleWhileFetching : (p.__returned = p);
|
|
14851
|
+
}
|
|
14852
|
+
}
|
|
14853
|
+
async forceFetch(k, fetchOptions = {}) {
|
|
14854
|
+
const v = await this.fetch(k, fetchOptions);
|
|
14855
|
+
if (v === undefined)
|
|
14856
|
+
throw new Error('fetch() returned undefined');
|
|
14857
|
+
return v;
|
|
14858
|
+
}
|
|
14859
|
+
memo(k, memoOptions = {}) {
|
|
14860
|
+
const memoMethod = this.#memoMethod;
|
|
14861
|
+
if (!memoMethod) {
|
|
14862
|
+
throw new Error('no memoMethod provided to constructor');
|
|
14863
|
+
}
|
|
14864
|
+
const { context, forceRefresh, ...options } = memoOptions;
|
|
14865
|
+
const v = this.get(k, options);
|
|
14866
|
+
if (!forceRefresh && v !== undefined)
|
|
14867
|
+
return v;
|
|
14868
|
+
const vv = memoMethod(k, v, {
|
|
14869
|
+
options,
|
|
14870
|
+
context,
|
|
14871
|
+
});
|
|
14872
|
+
this.set(k, vv, options);
|
|
14873
|
+
return vv;
|
|
14874
|
+
}
|
|
14875
|
+
/**
|
|
14876
|
+
* Return a value from the cache. Will update the recency of the cache
|
|
14877
|
+
* entry found.
|
|
14878
|
+
*
|
|
14879
|
+
* If the key is not found, get() will return `undefined`.
|
|
14880
|
+
*/
|
|
14881
|
+
get(k, getOptions = {}) {
|
|
14882
|
+
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
|
|
14883
|
+
const index = this.#keyMap.get(k);
|
|
14884
|
+
if (index !== undefined) {
|
|
14885
|
+
const value = this.#valList[index];
|
|
14886
|
+
const fetching = this.#isBackgroundFetch(value);
|
|
14887
|
+
if (status)
|
|
14888
|
+
this.#statusTTL(status, index);
|
|
14889
|
+
if (this.#isStale(index)) {
|
|
14890
|
+
if (status)
|
|
14891
|
+
status.get = 'stale';
|
|
14892
|
+
// delete only if not an in-flight background fetch
|
|
14893
|
+
if (!fetching) {
|
|
14894
|
+
if (!noDeleteOnStaleGet) {
|
|
14895
|
+
this.#delete(k, 'expire');
|
|
14896
|
+
}
|
|
14897
|
+
if (status && allowStale)
|
|
14898
|
+
status.returnedStale = true;
|
|
14899
|
+
return allowStale ? value : undefined;
|
|
14900
|
+
}
|
|
14901
|
+
else {
|
|
14902
|
+
if (status &&
|
|
14903
|
+
allowStale &&
|
|
14904
|
+
value.__staleWhileFetching !== undefined) {
|
|
14905
|
+
status.returnedStale = true;
|
|
14906
|
+
}
|
|
14907
|
+
return allowStale ? value.__staleWhileFetching : undefined;
|
|
14908
|
+
}
|
|
14909
|
+
}
|
|
14910
|
+
else {
|
|
14911
|
+
if (status)
|
|
14912
|
+
status.get = 'hit';
|
|
14913
|
+
// if we're currently fetching it, we don't actually have it yet
|
|
14914
|
+
// it's not stale, which means this isn't a staleWhileRefetching.
|
|
14915
|
+
// If it's not stale, and fetching, AND has a __staleWhileFetching
|
|
14916
|
+
// value, then that means the user fetched with {forceRefresh:true},
|
|
14917
|
+
// so it's safe to return that value.
|
|
14918
|
+
if (fetching) {
|
|
14919
|
+
return value.__staleWhileFetching;
|
|
14920
|
+
}
|
|
14921
|
+
this.#moveToTail(index);
|
|
14922
|
+
if (updateAgeOnGet) {
|
|
14923
|
+
this.#updateItemAge(index);
|
|
14924
|
+
}
|
|
14925
|
+
return value;
|
|
14926
|
+
}
|
|
14927
|
+
}
|
|
14928
|
+
else if (status) {
|
|
14929
|
+
status.get = 'miss';
|
|
14930
|
+
}
|
|
14931
|
+
}
|
|
14932
|
+
#connect(p, n) {
|
|
14933
|
+
this.#prev[n] = p;
|
|
14934
|
+
this.#next[p] = n;
|
|
14935
|
+
}
|
|
14936
|
+
#moveToTail(index) {
|
|
14937
|
+
// if tail already, nothing to do
|
|
14938
|
+
// if head, move head to next[index]
|
|
14939
|
+
// else
|
|
14940
|
+
// move next[prev[index]] to next[index] (head has no prev)
|
|
14941
|
+
// move prev[next[index]] to prev[index]
|
|
14942
|
+
// prev[index] = tail
|
|
14943
|
+
// next[tail] = index
|
|
14944
|
+
// tail = index
|
|
14945
|
+
if (index !== this.#tail) {
|
|
14946
|
+
if (index === this.#head) {
|
|
14947
|
+
this.#head = this.#next[index];
|
|
14948
|
+
}
|
|
14949
|
+
else {
|
|
14950
|
+
this.#connect(this.#prev[index], this.#next[index]);
|
|
14951
|
+
}
|
|
14952
|
+
this.#connect(this.#tail, index);
|
|
14953
|
+
this.#tail = index;
|
|
14954
|
+
}
|
|
14955
|
+
}
|
|
14956
|
+
/**
|
|
14957
|
+
* Deletes a key out of the cache.
|
|
14958
|
+
*
|
|
14959
|
+
* Returns true if the key was deleted, false otherwise.
|
|
14960
|
+
*/
|
|
14961
|
+
delete(k) {
|
|
14962
|
+
return this.#delete(k, 'delete');
|
|
14963
|
+
}
|
|
14964
|
+
#delete(k, reason) {
|
|
14965
|
+
let deleted = false;
|
|
14966
|
+
if (this.#size !== 0) {
|
|
14967
|
+
const index = this.#keyMap.get(k);
|
|
14968
|
+
if (index !== undefined) {
|
|
14969
|
+
deleted = true;
|
|
14970
|
+
if (this.#size === 1) {
|
|
14971
|
+
this.#clear(reason);
|
|
14972
|
+
}
|
|
14973
|
+
else {
|
|
14974
|
+
this.#removeItemSize(index);
|
|
14975
|
+
const v = this.#valList[index];
|
|
14976
|
+
if (this.#isBackgroundFetch(v)) {
|
|
14977
|
+
v.__abortController.abort(new Error('deleted'));
|
|
14978
|
+
}
|
|
14979
|
+
else if (this.#hasDispose || this.#hasDisposeAfter) {
|
|
14980
|
+
if (this.#hasDispose) {
|
|
14981
|
+
this.#dispose?.(v, k, reason);
|
|
14982
|
+
}
|
|
14983
|
+
if (this.#hasDisposeAfter) {
|
|
14984
|
+
this.#disposed?.push([v, k, reason]);
|
|
14985
|
+
}
|
|
14986
|
+
}
|
|
14987
|
+
this.#keyMap.delete(k);
|
|
14988
|
+
this.#keyList[index] = undefined;
|
|
14989
|
+
this.#valList[index] = undefined;
|
|
14990
|
+
if (index === this.#tail) {
|
|
14991
|
+
this.#tail = this.#prev[index];
|
|
14992
|
+
}
|
|
14993
|
+
else if (index === this.#head) {
|
|
14994
|
+
this.#head = this.#next[index];
|
|
14995
|
+
}
|
|
14996
|
+
else {
|
|
14997
|
+
const pi = this.#prev[index];
|
|
14998
|
+
this.#next[pi] = this.#next[index];
|
|
14999
|
+
const ni = this.#next[index];
|
|
15000
|
+
this.#prev[ni] = this.#prev[index];
|
|
15001
|
+
}
|
|
15002
|
+
this.#size--;
|
|
15003
|
+
this.#free.push(index);
|
|
15004
|
+
}
|
|
15005
|
+
}
|
|
15006
|
+
}
|
|
15007
|
+
if (this.#hasDisposeAfter && this.#disposed?.length) {
|
|
15008
|
+
const dt = this.#disposed;
|
|
15009
|
+
let task;
|
|
15010
|
+
while ((task = dt?.shift())) {
|
|
15011
|
+
this.#disposeAfter?.(...task);
|
|
15012
|
+
}
|
|
15013
|
+
}
|
|
15014
|
+
return deleted;
|
|
15015
|
+
}
|
|
15016
|
+
/**
|
|
15017
|
+
* Clear the cache entirely, throwing away all values.
|
|
15018
|
+
*/
|
|
15019
|
+
clear() {
|
|
15020
|
+
return this.#clear('delete');
|
|
15021
|
+
}
|
|
15022
|
+
#clear(reason) {
|
|
15023
|
+
for (const index of this.#rindexes({ allowStale: true })) {
|
|
15024
|
+
const v = this.#valList[index];
|
|
15025
|
+
if (this.#isBackgroundFetch(v)) {
|
|
15026
|
+
v.__abortController.abort(new Error('deleted'));
|
|
15027
|
+
}
|
|
15028
|
+
else {
|
|
15029
|
+
const k = this.#keyList[index];
|
|
15030
|
+
if (this.#hasDispose) {
|
|
15031
|
+
this.#dispose?.(v, k, reason);
|
|
15032
|
+
}
|
|
15033
|
+
if (this.#hasDisposeAfter) {
|
|
15034
|
+
this.#disposed?.push([v, k, reason]);
|
|
15035
|
+
}
|
|
15036
|
+
}
|
|
15037
|
+
}
|
|
15038
|
+
this.#keyMap.clear();
|
|
15039
|
+
this.#valList.fill(undefined);
|
|
15040
|
+
this.#keyList.fill(undefined);
|
|
15041
|
+
if (this.#ttls && this.#starts) {
|
|
15042
|
+
this.#ttls.fill(0);
|
|
15043
|
+
this.#starts.fill(0);
|
|
15044
|
+
}
|
|
15045
|
+
if (this.#sizes) {
|
|
15046
|
+
this.#sizes.fill(0);
|
|
15047
|
+
}
|
|
15048
|
+
this.#head = 0;
|
|
15049
|
+
this.#tail = 0;
|
|
15050
|
+
this.#free.length = 0;
|
|
15051
|
+
this.#calculatedSize = 0;
|
|
15052
|
+
this.#size = 0;
|
|
15053
|
+
if (this.#hasDisposeAfter && this.#disposed) {
|
|
15054
|
+
const dt = this.#disposed;
|
|
15055
|
+
let task;
|
|
15056
|
+
while ((task = dt?.shift())) {
|
|
15057
|
+
this.#disposeAfter?.(...task);
|
|
15058
|
+
}
|
|
15059
|
+
}
|
|
15060
|
+
}
|
|
15061
|
+
}
|
|
15062
|
+
|
|
15063
|
+
/**
|
|
15064
|
+
* StampedeProtectedCache provides three-layer protection against cache stampedes
|
|
15065
|
+
*
|
|
15066
|
+
* @description High-performance caching system implementing multiple stampede prevention
|
|
15067
|
+
* strategies to protect downstream services (market data APIs, position services) from
|
|
15068
|
+
* request bursts during synchronized cache expiration events.
|
|
15069
|
+
*
|
|
15070
|
+
* @rationale In algorithmic trading, cache stampedes can:
|
|
15071
|
+
* - Overwhelm market data APIs (Alpaca, Polygon) causing rate limiting (200 req/min limits)
|
|
15072
|
+
* - Introduce latency spikes during critical trading windows (market open/close)
|
|
15073
|
+
* - Trigger cascading failures when position data becomes unavailable
|
|
15074
|
+
* - Cause missed trading opportunities due to stale or unavailable data
|
|
15075
|
+
*
|
|
15076
|
+
* Three-layer protection:
|
|
15077
|
+
* 1. Request coalescing - Multiple concurrent requests for the same key share a single promise
|
|
15078
|
+
* 2. Stale-while-revalidate - Serve stale data while refreshing in background
|
|
15079
|
+
* 3. Probabilistic early expiration - Add jitter to prevent synchronized expiration
|
|
15080
|
+
*
|
|
15081
|
+
* @template T - Type of cached data (e.g., AlpacaPosition[], MarketQuote, AccountInfo)
|
|
15082
|
+
*
|
|
15083
|
+
* @example
|
|
15084
|
+
* ```typescript
|
|
15085
|
+
* // Initialize cache for position data
|
|
15086
|
+
* const positionCache = new StampedeProtectedCache<AlpacaPosition[]>({
|
|
15087
|
+
* maxSize: 1000,
|
|
15088
|
+
* defaultTtl: 30000, // 30 seconds
|
|
15089
|
+
* staleWhileRevalidateTtl: 60000, // 60 seconds grace
|
|
15090
|
+
* minJitter: 0.9,
|
|
15091
|
+
* maxJitter: 1.1,
|
|
15092
|
+
* enableBackgroundRefresh: true,
|
|
15093
|
+
* logger: pinoLogger
|
|
15094
|
+
* });
|
|
15095
|
+
*
|
|
15096
|
+
* // Fetch with automatic caching and stampede protection
|
|
15097
|
+
* const positions = await positionCache.get(
|
|
15098
|
+
* accountId,
|
|
15099
|
+
* async (key) => await alpacaApi.getPositions(key)
|
|
15100
|
+
* );
|
|
15101
|
+
* ```
|
|
15102
|
+
*
|
|
15103
|
+
* @businessLogic
|
|
15104
|
+
* 1. On cache.get(), check for existing entry
|
|
15105
|
+
* 2. If found and fresh (< TTL with jitter): return cached value (HIT)
|
|
15106
|
+
* 3. If found but stale (< staleWhileRevalidateTtl): return stale value, trigger background refresh (STALE HIT)
|
|
15107
|
+
* 4. If not found or expired beyond grace period: fetch from source (MISS)
|
|
15108
|
+
* 5. During fetch, coalesce duplicate concurrent requests to single API call
|
|
15109
|
+
* 6. After successful fetch, cache result with jittered TTL to prevent synchronized expiration
|
|
15110
|
+
*
|
|
15111
|
+
* @auditTrail
|
|
15112
|
+
* - All cache operations logged with timestamps and metadata
|
|
15113
|
+
* - Statistics tracked: hits, misses, stale hits, coalesced requests, refresh errors
|
|
15114
|
+
* - Performance metrics exposed via getStats() for monitoring dashboards
|
|
15115
|
+
*/
|
|
15116
|
+
class StampedeProtectedCache {
|
|
15117
|
+
cache;
|
|
15118
|
+
options;
|
|
15119
|
+
pendingRefreshes = new Map();
|
|
15120
|
+
stats = {
|
|
15121
|
+
totalGets: 0,
|
|
15122
|
+
hits: 0,
|
|
15123
|
+
misses: 0,
|
|
15124
|
+
staleHits: 0,
|
|
15125
|
+
coalescedRequests: 0,
|
|
15126
|
+
backgroundRefreshes: 0,
|
|
15127
|
+
refreshErrors: 0,
|
|
15128
|
+
};
|
|
15129
|
+
constructor(options) {
|
|
15130
|
+
this.options = {
|
|
15131
|
+
...options,
|
|
15132
|
+
staleWhileRevalidateTtl: options.staleWhileRevalidateTtl ?? options.defaultTtl * 2,
|
|
15133
|
+
minJitter: options.minJitter ?? 0.9,
|
|
15134
|
+
maxJitter: options.maxJitter ?? 1.1,
|
|
15135
|
+
enableBackgroundRefresh: options.enableBackgroundRefresh ?? true,
|
|
15136
|
+
logger: options.logger ?? {
|
|
15137
|
+
debug: () => { },
|
|
15138
|
+
info: () => { },
|
|
15139
|
+
warn: () => { },
|
|
15140
|
+
error: () => { },
|
|
15141
|
+
},
|
|
15142
|
+
};
|
|
15143
|
+
this.cache = new LRUCache({
|
|
15144
|
+
max: this.options.maxSize,
|
|
15145
|
+
ttl: undefined, // We manage TTL ourselves
|
|
15146
|
+
allowStale: true,
|
|
15147
|
+
updateAgeOnGet: false,
|
|
15148
|
+
updateAgeOnHas: false,
|
|
15149
|
+
});
|
|
15150
|
+
this.options.logger.info('StampedeProtectedCache initialized', {
|
|
15151
|
+
maxSize: this.options.maxSize,
|
|
15152
|
+
defaultTtl: this.options.defaultTtl,
|
|
15153
|
+
staleWhileRevalidateTtl: this.options.staleWhileRevalidateTtl,
|
|
15154
|
+
jitterRange: [this.options.minJitter, this.options.maxJitter],
|
|
15155
|
+
});
|
|
15156
|
+
}
|
|
15157
|
+
/**
|
|
15158
|
+
* Get a value from the cache, loading it if necessary
|
|
15159
|
+
*
|
|
15160
|
+
* @description Primary cache access method implementing three-layer stampede protection.
|
|
15161
|
+
* Returns cached data if fresh, serves stale data while refreshing if within grace period,
|
|
15162
|
+
* or fetches fresh data with request coalescing if expired.
|
|
15163
|
+
*
|
|
15164
|
+
* @param key - Unique cache key (e.g., accountId, symbol, "positions:ACCT123")
|
|
15165
|
+
* @param loader - Async function to load data on cache miss
|
|
15166
|
+
* @param ttl - Optional TTL override in milliseconds. If not provided, uses defaultTtl from config
|
|
15167
|
+
*
|
|
15168
|
+
* @returns Promise resolving to cached or freshly loaded data
|
|
15169
|
+
*
|
|
15170
|
+
* @throws Error if loader function fails and no stale data is available
|
|
15171
|
+
*
|
|
15172
|
+
* @example
|
|
15173
|
+
* ```typescript
|
|
15174
|
+
* // Get positions with default TTL
|
|
15175
|
+
* const positions = await cache.get(
|
|
15176
|
+
* accountId,
|
|
15177
|
+
* async (key) => await alpacaApi.getPositions(key)
|
|
15178
|
+
* );
|
|
15179
|
+
*
|
|
15180
|
+
* // Get market quote with custom TTL (5 seconds for real-time data)
|
|
15181
|
+
* const quote = await cache.get(
|
|
15182
|
+
* `quote:${symbol}`,
|
|
15183
|
+
* async (key) => await polygonApi.getQuote(symbol),
|
|
15184
|
+
* 5000
|
|
15185
|
+
* );
|
|
15186
|
+
* ```
|
|
15187
|
+
*
|
|
15188
|
+
* @businessLogic
|
|
15189
|
+
* 1. Increment totalGets counter for statistics
|
|
15190
|
+
* 2. Calculate effective TTL (custom or default)
|
|
15191
|
+
* 3. Attempt cache lookup by key
|
|
15192
|
+
* 4. If entry exists:
|
|
15193
|
+
* a. Increment access count and update lastAccessedAt
|
|
15194
|
+
* b. Apply probabilistic jitter to expiration time
|
|
15195
|
+
* c. If still fresh (now < jitteredExpiresAt): return cached value (HIT)
|
|
15196
|
+
* d. If stale but within grace period (now < staleExpiresAt) and not already refreshing:
|
|
15197
|
+
* - Serve stale value immediately
|
|
15198
|
+
* - Trigger background refresh if enabled
|
|
15199
|
+
* - Return stale value (STALE HIT)
|
|
15200
|
+
* 5. If entry not found or expired beyond grace: load fresh data with coalescing (MISS)
|
|
15201
|
+
*/
|
|
15202
|
+
async get(key, loader, ttl) {
|
|
15203
|
+
this.stats.totalGets++;
|
|
15204
|
+
const effectiveTtl = ttl ?? this.options.defaultTtl;
|
|
15205
|
+
const now = Date.now();
|
|
15206
|
+
// Check if we have a cached entry
|
|
15207
|
+
const cached = this.cache.get(key);
|
|
15208
|
+
if (cached) {
|
|
15209
|
+
cached.accessCount++;
|
|
15210
|
+
cached.lastAccessedAt = now;
|
|
15211
|
+
// Check if entry is still fresh (considering probabilistic expiration)
|
|
15212
|
+
const jitteredExpiresAt = this.applyJitter(cached.expiresAt);
|
|
15213
|
+
if (now < jitteredExpiresAt) {
|
|
15214
|
+
// Fresh hit
|
|
15215
|
+
this.stats.hits++;
|
|
15216
|
+
this.options.logger.debug('Cache hit (fresh)', { key, age: now - cached.createdAt });
|
|
15217
|
+
return cached.value;
|
|
15218
|
+
}
|
|
15219
|
+
// Check if we can serve stale while revalidating
|
|
15220
|
+
const staleExpiresAt = cached.createdAt + this.options.staleWhileRevalidateTtl;
|
|
15221
|
+
if (now < staleExpiresAt && !cached.isRefreshing) {
|
|
15222
|
+
// Serve stale and trigger background refresh
|
|
15223
|
+
this.stats.staleHits++;
|
|
15224
|
+
this.options.logger.debug('Cache hit (stale-while-revalidate)', {
|
|
15225
|
+
key,
|
|
15226
|
+
age: now - cached.createdAt,
|
|
15227
|
+
staleAge: now - cached.expiresAt
|
|
15228
|
+
});
|
|
15229
|
+
if (this.options.enableBackgroundRefresh) {
|
|
15230
|
+
this.refreshInBackground(key, loader, effectiveTtl);
|
|
15231
|
+
}
|
|
15232
|
+
return cached.value;
|
|
15233
|
+
}
|
|
15234
|
+
}
|
|
15235
|
+
// Cache miss or expired - need to load
|
|
15236
|
+
this.stats.misses++;
|
|
15237
|
+
this.options.logger.debug('Cache miss', { key, hadCached: !!cached });
|
|
15238
|
+
return this.loadWithCoalescing(key, loader, effectiveTtl);
|
|
15239
|
+
}
|
|
15240
|
+
/**
|
|
15241
|
+
* Set a value in the cache
|
|
15242
|
+
*
|
|
15243
|
+
* @description Manually store a value in the cache with optional custom TTL.
|
|
15244
|
+
* Useful for pre-warming cache or storing computed results.
|
|
15245
|
+
*
|
|
15246
|
+
* @param key - Unique cache key
|
|
15247
|
+
* @param value - Data to cache
|
|
15248
|
+
* @param ttl - Optional TTL in milliseconds. If not provided, uses defaultTtl
|
|
15249
|
+
*
|
|
15250
|
+
* @returns void
|
|
15251
|
+
*
|
|
15252
|
+
* @example
|
|
15253
|
+
* ```typescript
|
|
15254
|
+
* // Pre-warm cache with known data
|
|
15255
|
+
* cache.set('positions:ACCT123', positions, 30000);
|
|
15256
|
+
*
|
|
15257
|
+
* // Cache computed result
|
|
15258
|
+
* const aggregatedData = computeAggregation(positions);
|
|
15259
|
+
* cache.set('aggregated:ACCT123', aggregatedData, 60000);
|
|
15260
|
+
* ```
|
|
15261
|
+
*/
|
|
15262
|
+
set(key, value, ttl) {
|
|
15263
|
+
const effectiveTtl = ttl ?? this.options.defaultTtl;
|
|
15264
|
+
const now = Date.now();
|
|
15265
|
+
const entry = {
|
|
15266
|
+
value,
|
|
15267
|
+
createdAt: now,
|
|
15268
|
+
ttl: effectiveTtl,
|
|
15269
|
+
expiresAt: now + effectiveTtl,
|
|
15270
|
+
accessCount: 0,
|
|
15271
|
+
lastAccessedAt: now,
|
|
15272
|
+
isRefreshing: false,
|
|
15273
|
+
};
|
|
15274
|
+
this.cache.set(key, entry);
|
|
15275
|
+
this.options.logger.debug('Cache set', { key, ttl: effectiveTtl });
|
|
15276
|
+
}
|
|
15277
|
+
/**
|
|
15278
|
+
* Check if a key exists in the cache (regardless of expiration)
|
|
15279
|
+
*
|
|
15280
|
+
* @description Checks for cache entry existence without considering TTL or freshness.
|
|
15281
|
+
* Does not update access statistics or timestamps.
|
|
15282
|
+
*
|
|
15283
|
+
* @param key - Cache key to check
|
|
15284
|
+
*
|
|
15285
|
+
* @returns true if entry exists (fresh or stale), false otherwise
|
|
15286
|
+
*
|
|
15287
|
+
* @example
|
|
15288
|
+
* ```typescript
|
|
15289
|
+
* if (cache.has(accountId)) {
|
|
15290
|
+
* // Entry exists, may be fresh or stale
|
|
15291
|
+
* }
|
|
15292
|
+
* ```
|
|
15293
|
+
*/
|
|
15294
|
+
has(key) {
|
|
15295
|
+
return this.cache.has(key);
|
|
15296
|
+
}
|
|
15297
|
+
/**
|
|
15298
|
+
* Delete a specific key from the cache
|
|
15299
|
+
*
|
|
15300
|
+
* @description Immediately removes cache entry and any pending refreshes for the key.
|
|
15301
|
+
* Useful for cache invalidation when source data changes.
|
|
15302
|
+
*
|
|
15303
|
+
* @param key - Cache key to delete
|
|
15304
|
+
*
|
|
15305
|
+
* @returns true if entry was deleted, false if key did not exist
|
|
15306
|
+
*
|
|
15307
|
+
* @example
|
|
15308
|
+
* ```typescript
|
|
15309
|
+
* // Invalidate after position update
|
|
15310
|
+
* await alpacaApi.submitOrder(order);
|
|
15311
|
+
* cache.delete(`positions:${accountId}`);
|
|
15312
|
+
* ```
|
|
15313
|
+
*/
|
|
15314
|
+
delete(key) {
|
|
15315
|
+
const deleted = this.cache.delete(key);
|
|
15316
|
+
if (deleted) {
|
|
15317
|
+
this.options.logger.debug('Cache entry deleted', { key });
|
|
15318
|
+
}
|
|
15319
|
+
return deleted;
|
|
15320
|
+
}
|
|
15321
|
+
/**
|
|
15322
|
+
* Invalidate a key (alias for delete)
|
|
15323
|
+
*
|
|
15324
|
+
* @description Semantic alias for delete() method. Use for clarity when invalidating
|
|
15325
|
+
* cache after data mutations.
|
|
15326
|
+
*
|
|
15327
|
+
* @param key - Cache key to invalidate
|
|
15328
|
+
*
|
|
15329
|
+
* @returns true if entry was invalidated, false if key did not exist
|
|
15330
|
+
*
|
|
15331
|
+
* @example
|
|
15332
|
+
* ```typescript
|
|
15333
|
+
* // Invalidate after trade execution
|
|
15334
|
+
* cache.invalidate(`positions:${accountId}`);
|
|
15335
|
+
* ```
|
|
15336
|
+
*/
|
|
15337
|
+
invalidate(key) {
|
|
15338
|
+
return this.delete(key);
|
|
15339
|
+
}
|
|
15340
|
+
/**
|
|
15341
|
+
* Clear all entries from the cache
|
|
15342
|
+
*
|
|
15343
|
+
* @description Removes all cached entries and pending refreshes. Use during system
|
|
15344
|
+
* resets or configuration changes requiring fresh data.
|
|
15345
|
+
*
|
|
15346
|
+
* @returns void
|
|
15347
|
+
*
|
|
15348
|
+
* @example
|
|
15349
|
+
* ```typescript
|
|
15350
|
+
* // Clear cache during market hours transition
|
|
15351
|
+
* if (marketJustOpened) {
|
|
15352
|
+
* cache.clear();
|
|
15353
|
+
* }
|
|
15354
|
+
* ```
|
|
15355
|
+
*/
|
|
15356
|
+
clear() {
|
|
15357
|
+
const sizeBefore = this.cache.size;
|
|
15358
|
+
this.cache.clear();
|
|
15359
|
+
this.pendingRefreshes.clear();
|
|
15360
|
+
this.options.logger.info('Cache cleared', { entriesRemoved: sizeBefore });
|
|
15361
|
+
}
|
|
15362
|
+
/**
|
|
15363
|
+
* Get cache statistics
|
|
15364
|
+
*
|
|
15365
|
+
* @description Returns comprehensive performance metrics for monitoring and analysis.
|
|
15366
|
+
* Statistics include hit/miss ratios, active refreshes, coalesced requests, and errors.
|
|
15367
|
+
*
|
|
15368
|
+
* @returns CacheStats object with current performance metrics
|
|
15369
|
+
*
|
|
15370
|
+
* @example
|
|
15371
|
+
* ```typescript
|
|
15372
|
+
* const stats = cache.getStats();
|
|
15373
|
+
* logger.info('Cache performance', {
|
|
15374
|
+
* hitRatio: stats.hitRatio,
|
|
15375
|
+
* size: stats.size,
|
|
15376
|
+
* activeRefreshes: stats.activeRefreshes
|
|
15377
|
+
* });
|
|
15378
|
+
*
|
|
15379
|
+
* // Alert on poor performance
|
|
15380
|
+
* if (stats.hitRatio < 0.7) {
|
|
15381
|
+
* alerting.send('Low cache hit ratio', stats);
|
|
15382
|
+
* }
|
|
15383
|
+
* ```
|
|
15384
|
+
*/
|
|
15385
|
+
getStats() {
|
|
15386
|
+
return {
|
|
15387
|
+
totalGets: this.stats.totalGets,
|
|
15388
|
+
hits: this.stats.hits,
|
|
15389
|
+
misses: this.stats.misses,
|
|
15390
|
+
staleHits: this.stats.staleHits,
|
|
15391
|
+
hitRatio: this.stats.totalGets > 0 ? this.stats.hits / this.stats.totalGets : 0,
|
|
15392
|
+
size: this.cache.size,
|
|
15393
|
+
maxSize: this.options.maxSize,
|
|
15394
|
+
activeRefreshes: this.pendingRefreshes.size,
|
|
15395
|
+
coalescedRequests: this.stats.coalescedRequests,
|
|
15396
|
+
backgroundRefreshes: this.stats.backgroundRefreshes,
|
|
15397
|
+
refreshErrors: this.stats.refreshErrors,
|
|
15398
|
+
};
|
|
15399
|
+
}
|
|
15400
|
+
/**
|
|
15401
|
+
* Get all cached keys
|
|
15402
|
+
*
|
|
15403
|
+
* @description Returns array of all cache keys currently stored, regardless of freshness.
|
|
15404
|
+
* Useful for debugging and cache inspection.
|
|
15405
|
+
*
|
|
15406
|
+
* @returns Array of cache keys
|
|
15407
|
+
*
|
|
15408
|
+
* @example
|
|
15409
|
+
* ```typescript
|
|
15410
|
+
* const keys = cache.keys();
|
|
15411
|
+
* console.log('Cached accounts:', keys);
|
|
15412
|
+
* // ['positions:ACCT123', 'positions:ACCT456', 'quote:AAPL']
|
|
15413
|
+
* ```
|
|
15414
|
+
*/
|
|
15415
|
+
keys() {
|
|
15416
|
+
return Array.from(this.cache.keys());
|
|
15417
|
+
}
|
|
15418
|
+
/**
|
|
15419
|
+
* Get the size of the cache
|
|
15420
|
+
*
|
|
15421
|
+
* @description Returns current number of entries in cache. Compare to maxSize to
|
|
15422
|
+
* monitor capacity utilization.
|
|
15423
|
+
*
|
|
15424
|
+
* @returns Number of cached entries
|
|
15425
|
+
*
|
|
15426
|
+
* @example
|
|
15427
|
+
* ```typescript
|
|
15428
|
+
* const utilizationPct = (cache.size / cache.getStats().maxSize) * 100;
|
|
15429
|
+
* if (utilizationPct > 90) {
|
|
15430
|
+
* logger.warn('Cache near capacity', { size: cache.size });
|
|
15431
|
+
* }
|
|
15432
|
+
* ```
|
|
15433
|
+
*/
|
|
15434
|
+
get size() {
|
|
15435
|
+
return this.cache.size;
|
|
15436
|
+
}
|
|
15437
|
+
/**
|
|
15438
|
+
* Load data with request coalescing to prevent duplicate requests
|
|
15439
|
+
*/
|
|
15440
|
+
async loadWithCoalescing(key, loader, ttl) {
|
|
15441
|
+
// Check if there's already a pending refresh for this key
|
|
15442
|
+
const existingPromise = this.pendingRefreshes.get(key);
|
|
15443
|
+
if (existingPromise) {
|
|
15444
|
+
this.stats.coalescedRequests++;
|
|
15445
|
+
this.options.logger.debug('Request coalesced', { key });
|
|
15446
|
+
return existingPromise;
|
|
15447
|
+
}
|
|
15448
|
+
// Create new promise and store it
|
|
15449
|
+
const promise = this.loadAndCache(key, loader, ttl);
|
|
15450
|
+
this.pendingRefreshes.set(key, promise);
|
|
15451
|
+
try {
|
|
15452
|
+
const result = await promise;
|
|
15453
|
+
return result;
|
|
15454
|
+
}
|
|
15455
|
+
finally {
|
|
15456
|
+
// Clean up the pending promise
|
|
15457
|
+
this.pendingRefreshes.delete(key);
|
|
15458
|
+
}
|
|
15459
|
+
}
|
|
15460
|
+
/**
|
|
15461
|
+
* Load data and cache it
|
|
15462
|
+
*/
|
|
15463
|
+
async loadAndCache(key, loader, ttl) {
|
|
15464
|
+
const startTime = Date.now();
|
|
15465
|
+
try {
|
|
15466
|
+
this.options.logger.debug('Loading data', { key });
|
|
15467
|
+
const value = await loader(key);
|
|
15468
|
+
// Cache the loaded value
|
|
15469
|
+
this.set(key, value, ttl);
|
|
15470
|
+
const loadTime = Date.now() - startTime;
|
|
15471
|
+
this.options.logger.debug('Data loaded and cached', { key, loadTime });
|
|
15472
|
+
return value;
|
|
15473
|
+
}
|
|
15474
|
+
catch (error) {
|
|
15475
|
+
this.stats.refreshErrors++;
|
|
15476
|
+
const loadTime = Date.now() - startTime;
|
|
15477
|
+
this.options.logger.error('Failed to load data', { key, error, loadTime });
|
|
15478
|
+
// Update cached entry with error if it exists
|
|
15479
|
+
const cached = this.cache.get(key);
|
|
15480
|
+
if (cached) {
|
|
15481
|
+
cached.lastError = error;
|
|
15482
|
+
cached.isRefreshing = false;
|
|
15483
|
+
}
|
|
15484
|
+
throw error;
|
|
15485
|
+
}
|
|
15486
|
+
}
|
|
15487
|
+
/**
|
|
15488
|
+
* Refresh data in the background
|
|
15489
|
+
*/
|
|
15490
|
+
refreshInBackground(key, loader, ttl) {
|
|
15491
|
+
// Mark the entry as refreshing
|
|
15492
|
+
const cached = this.cache.get(key);
|
|
15493
|
+
if (cached) {
|
|
15494
|
+
cached.isRefreshing = true;
|
|
15495
|
+
}
|
|
15496
|
+
// Don't wait for the refresh to complete
|
|
15497
|
+
this.loadWithCoalescing(key, loader, ttl)
|
|
15498
|
+
.then(() => {
|
|
15499
|
+
this.stats.backgroundRefreshes++;
|
|
15500
|
+
this.options.logger.debug('Background refresh completed', { key });
|
|
15501
|
+
})
|
|
15502
|
+
.catch((error) => {
|
|
15503
|
+
this.options.logger.warn('Background refresh failed', { key, error });
|
|
15504
|
+
})
|
|
15505
|
+
.finally(() => {
|
|
15506
|
+
// Mark as no longer refreshing
|
|
15507
|
+
const entry = this.cache.get(key);
|
|
15508
|
+
if (entry) {
|
|
15509
|
+
entry.isRefreshing = false;
|
|
15510
|
+
}
|
|
15511
|
+
});
|
|
15512
|
+
}
|
|
15513
|
+
/**
|
|
15514
|
+
* Apply probabilistic jitter to expiration time
|
|
15515
|
+
*/
|
|
15516
|
+
applyJitter(originalExpiresAt) {
|
|
15517
|
+
const range = this.options.maxJitter - this.options.minJitter;
|
|
15518
|
+
const jitter = this.options.minJitter + (Math.random() * range);
|
|
15519
|
+
const createdAt = originalExpiresAt - this.options.defaultTtl;
|
|
15520
|
+
const jitteredTtl = this.options.defaultTtl * jitter;
|
|
15521
|
+
return createdAt + jitteredTtl;
|
|
15522
|
+
}
|
|
15523
|
+
/**
|
|
15524
|
+
* Reset statistics (useful for testing)
|
|
15525
|
+
*
|
|
15526
|
+
* @description Clears all performance counters to zero. Use for testing or when starting
|
|
15527
|
+
* fresh metrics collection period.
|
|
15528
|
+
*
|
|
15529
|
+
* @returns void
|
|
15530
|
+
*
|
|
15531
|
+
* @example
|
|
15532
|
+
* ```typescript
|
|
15533
|
+
* // Reset stats at start of trading day
|
|
15534
|
+
* cache.resetStats();
|
|
15535
|
+
* ```
|
|
15536
|
+
*/
|
|
15537
|
+
resetStats() {
|
|
15538
|
+
this.stats.totalGets = 0;
|
|
15539
|
+
this.stats.hits = 0;
|
|
15540
|
+
this.stats.misses = 0;
|
|
15541
|
+
this.stats.staleHits = 0;
|
|
15542
|
+
this.stats.coalescedRequests = 0;
|
|
15543
|
+
this.stats.backgroundRefreshes = 0;
|
|
15544
|
+
this.stats.refreshErrors = 0;
|
|
15545
|
+
}
|
|
15546
|
+
}
|
|
15547
|
+
/**
|
|
15548
|
+
* Factory function to create a new StampedeProtectedCache instance
|
|
15549
|
+
*
|
|
15550
|
+
* @description Convenience factory for creating cache instances with type inference.
|
|
15551
|
+
* Alternative to using 'new StampedeProtectedCache<T>()'.
|
|
15552
|
+
*
|
|
15553
|
+
* @template T - Type of cached data
|
|
15554
|
+
* @param options - Cache configuration options
|
|
15555
|
+
*
|
|
15556
|
+
* @returns New StampedeProtectedCache instance
|
|
15557
|
+
*
|
|
15558
|
+
* @example
|
|
15559
|
+
* ```typescript
|
|
15560
|
+
* // Type is automatically inferred
|
|
15561
|
+
* const cache = createStampedeProtectedCache<AlpacaPosition[]>({
|
|
15562
|
+
* maxSize: 1000,
|
|
15563
|
+
* defaultTtl: 30000
|
|
15564
|
+
* });
|
|
15565
|
+
* ```
|
|
15566
|
+
*/
|
|
15567
|
+
function createStampedeProtectedCache(options) {
|
|
15568
|
+
return new StampedeProtectedCache(options);
|
|
15569
|
+
}
|
|
15570
|
+
/**
|
|
15571
|
+
* Default cache options for common use cases
|
|
15572
|
+
*
|
|
15573
|
+
* @description Production-tested default configuration suitable for most trading applications.
|
|
15574
|
+
* Provides balanced performance for position and market data caching.
|
|
15575
|
+
*
|
|
15576
|
+
* @rationale These defaults are optimized for:
|
|
15577
|
+
* - Position data refresh frequency (30-60s acceptable staleness)
|
|
15578
|
+
* - API rate limit protection (Alpaca: 200 req/min)
|
|
15579
|
+
* - Memory efficiency (1000 entries ≈ 10MB for typical position data)
|
|
15580
|
+
* - Stampede prevention (±10% jitter prevents synchronized expiration)
|
|
15581
|
+
*
|
|
15582
|
+
* @example
|
|
15583
|
+
* ```typescript
|
|
15584
|
+
* // Use defaults for quick setup
|
|
15585
|
+
* const cache = new StampedeProtectedCache({
|
|
15586
|
+
* ...DEFAULT_CACHE_OPTIONS,
|
|
15587
|
+
* logger: customLogger
|
|
15588
|
+
* });
|
|
15589
|
+
*
|
|
15590
|
+
* // Override specific settings
|
|
15591
|
+
* const realtimeCache = new StampedeProtectedCache({
|
|
15592
|
+
* ...DEFAULT_CACHE_OPTIONS,
|
|
15593
|
+
* defaultTtl: 5000, // 5s for real-time quotes
|
|
15594
|
+
* maxSize: 10000
|
|
15595
|
+
* });
|
|
15596
|
+
* ```
|
|
15597
|
+
*/
|
|
15598
|
+
const DEFAULT_CACHE_OPTIONS = {
|
|
15599
|
+
maxSize: 1000,
|
|
15600
|
+
defaultTtl: 60000, // 1 minute
|
|
15601
|
+
staleWhileRevalidateTtl: 120000, // 2 minutes
|
|
15602
|
+
minJitter: 0.9, // 90%
|
|
15603
|
+
maxJitter: 1.1, // 110%
|
|
15604
|
+
enableBackgroundRefresh: true,
|
|
15605
|
+
};
|
|
15606
|
+
|
|
13500
15607
|
// Export factory functions for easier instantiation
|
|
13501
15608
|
const createAlpacaTradingAPI = (credentials) => {
|
|
13502
15609
|
return new AlpacaTradingAPI(credentials);
|
|
@@ -13643,8 +15750,11 @@ const adptc = adaptic;
|
|
|
13643
15750
|
|
|
13644
15751
|
exports.AlpacaMarketDataAPI = AlpacaMarketDataAPI;
|
|
13645
15752
|
exports.AlpacaTradingAPI = AlpacaTradingAPI;
|
|
15753
|
+
exports.DEFAULT_CACHE_OPTIONS = DEFAULT_CACHE_OPTIONS;
|
|
15754
|
+
exports.StampedeProtectedCache = StampedeProtectedCache;
|
|
13646
15755
|
exports.adaptic = adaptic;
|
|
13647
15756
|
exports.adptc = adptc;
|
|
13648
15757
|
exports.createAlpacaMarketDataAPI = createAlpacaMarketDataAPI;
|
|
13649
15758
|
exports.createAlpacaTradingAPI = createAlpacaTradingAPI;
|
|
15759
|
+
exports.createStampedeProtectedCache = createStampedeProtectedCache;
|
|
13650
15760
|
//# sourceMappingURL=index.cjs.map
|