@adaptic/utils 0.0.366 → 0.0.368

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -13475,6 +13475,2113 @@ class AlpacaTradingAPI {
13475
13475
  }
13476
13476
  }
13477
13477
 
13478
+ /**
13479
+ * @module LRUCache
13480
+ */
13481
+ const defaultPerf = (typeof performance === 'object' &&
13482
+ performance &&
13483
+ typeof performance.now === 'function') ?
13484
+ performance
13485
+ : Date;
13486
+ const warned = new Set();
13487
+ /* c8 ignore start */
13488
+ const PROCESS = (typeof process === 'object' && !!process ?
13489
+ process
13490
+ : {});
13491
+ /* c8 ignore start */
13492
+ const emitWarning = (msg, type, code, fn) => {
13493
+ typeof PROCESS.emitWarning === 'function' ?
13494
+ PROCESS.emitWarning(msg, type, code, fn)
13495
+ : console.error(`[${code}] ${type}: ${msg}`);
13496
+ };
13497
+ let AC = globalThis.AbortController;
13498
+ let AS = globalThis.AbortSignal;
13499
+ /* c8 ignore start */
13500
+ if (typeof AC === 'undefined') {
13501
+ //@ts-ignore
13502
+ AS = class AbortSignal {
13503
+ onabort;
13504
+ _onabort = [];
13505
+ reason;
13506
+ aborted = false;
13507
+ addEventListener(_, fn) {
13508
+ this._onabort.push(fn);
13509
+ }
13510
+ };
13511
+ //@ts-ignore
13512
+ AC = class AbortController {
13513
+ constructor() {
13514
+ warnACPolyfill();
13515
+ }
13516
+ signal = new AS();
13517
+ abort(reason) {
13518
+ if (this.signal.aborted)
13519
+ return;
13520
+ //@ts-ignore
13521
+ this.signal.reason = reason;
13522
+ //@ts-ignore
13523
+ this.signal.aborted = true;
13524
+ //@ts-ignore
13525
+ for (const fn of this.signal._onabort) {
13526
+ fn(reason);
13527
+ }
13528
+ this.signal.onabort?.(reason);
13529
+ }
13530
+ };
13531
+ let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
13532
+ const warnACPolyfill = () => {
13533
+ if (!printACPolyfillWarning)
13534
+ return;
13535
+ printACPolyfillWarning = false;
13536
+ emitWarning('AbortController is not defined. If using lru-cache in ' +
13537
+ 'node 14, load an AbortController polyfill from the ' +
13538
+ '`node-abort-controller` package. A minimal polyfill is ' +
13539
+ 'provided for use by LRUCache.fetch(), but it should not be ' +
13540
+ 'relied upon in other contexts (eg, passing it to other APIs that ' +
13541
+ 'use AbortController/AbortSignal might have undesirable effects). ' +
13542
+ 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
13543
+ };
13544
+ }
13545
+ /* c8 ignore stop */
13546
+ const shouldWarn = (code) => !warned.has(code);
13547
+ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
13548
+ /* c8 ignore start */
13549
+ // This is a little bit ridiculous, tbh.
13550
+ // The maximum array length is 2^32-1 or thereabouts on most JS impls.
13551
+ // And well before that point, you're caching the entire world, I mean,
13552
+ // that's ~32GB of just integers for the next/prev links, plus whatever
13553
+ // else to hold that many keys and values. Just filling the memory with
13554
+ // zeroes at init time is brutal when you get that big.
13555
+ // But why not be complete?
13556
+ // Maybe in the future, these limits will have expanded.
13557
+ const getUintArray = (max) => !isPosInt(max) ? null
13558
+ : max <= Math.pow(2, 8) ? Uint8Array
13559
+ : max <= Math.pow(2, 16) ? Uint16Array
13560
+ : max <= Math.pow(2, 32) ? Uint32Array
13561
+ : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
13562
+ : null;
13563
+ /* c8 ignore stop */
13564
+ class ZeroArray extends Array {
13565
+ constructor(size) {
13566
+ super(size);
13567
+ this.fill(0);
13568
+ }
13569
+ }
13570
+ class Stack {
13571
+ heap;
13572
+ length;
13573
+ // private constructor
13574
+ static #constructing = false;
13575
+ static create(max) {
13576
+ const HeapCls = getUintArray(max);
13577
+ if (!HeapCls)
13578
+ return [];
13579
+ Stack.#constructing = true;
13580
+ const s = new Stack(max, HeapCls);
13581
+ Stack.#constructing = false;
13582
+ return s;
13583
+ }
13584
+ constructor(max, HeapCls) {
13585
+ /* c8 ignore start */
13586
+ if (!Stack.#constructing) {
13587
+ throw new TypeError('instantiate Stack using Stack.create(n)');
13588
+ }
13589
+ /* c8 ignore stop */
13590
+ this.heap = new HeapCls(max);
13591
+ this.length = 0;
13592
+ }
13593
+ push(n) {
13594
+ this.heap[this.length++] = n;
13595
+ }
13596
+ pop() {
13597
+ return this.heap[--this.length];
13598
+ }
13599
+ }
13600
+ /**
13601
+ * Default export, the thing you're using this module to get.
13602
+ *
13603
+ * The `K` and `V` types define the key and value types, respectively. The
13604
+ * optional `FC` type defines the type of the `context` object passed to
13605
+ * `cache.fetch()` and `cache.memo()`.
13606
+ *
13607
+ * Keys and values **must not** be `null` or `undefined`.
13608
+ *
13609
+ * All properties from the options object (with the exception of `max`,
13610
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
13611
+ * added as normal public members. (The listed options are read-only getters.)
13612
+ *
13613
+ * Changing any of these will alter the defaults for subsequent method calls.
13614
+ */
13615
+ class LRUCache {
13616
+ // options that cannot be changed without disaster
13617
+ #max;
13618
+ #maxSize;
13619
+ #dispose;
13620
+ #onInsert;
13621
+ #disposeAfter;
13622
+ #fetchMethod;
13623
+ #memoMethod;
13624
+ #perf;
13625
+ /**
13626
+ * {@link LRUCache.OptionsBase.perf}
13627
+ */
13628
+ get perf() {
13629
+ return this.#perf;
13630
+ }
13631
+ /**
13632
+ * {@link LRUCache.OptionsBase.ttl}
13633
+ */
13634
+ ttl;
13635
+ /**
13636
+ * {@link LRUCache.OptionsBase.ttlResolution}
13637
+ */
13638
+ ttlResolution;
13639
+ /**
13640
+ * {@link LRUCache.OptionsBase.ttlAutopurge}
13641
+ */
13642
+ ttlAutopurge;
13643
+ /**
13644
+ * {@link LRUCache.OptionsBase.updateAgeOnGet}
13645
+ */
13646
+ updateAgeOnGet;
13647
+ /**
13648
+ * {@link LRUCache.OptionsBase.updateAgeOnHas}
13649
+ */
13650
+ updateAgeOnHas;
13651
+ /**
13652
+ * {@link LRUCache.OptionsBase.allowStale}
13653
+ */
13654
+ allowStale;
13655
+ /**
13656
+ * {@link LRUCache.OptionsBase.noDisposeOnSet}
13657
+ */
13658
+ noDisposeOnSet;
13659
+ /**
13660
+ * {@link LRUCache.OptionsBase.noUpdateTTL}
13661
+ */
13662
+ noUpdateTTL;
13663
+ /**
13664
+ * {@link LRUCache.OptionsBase.maxEntrySize}
13665
+ */
13666
+ maxEntrySize;
13667
+ /**
13668
+ * {@link LRUCache.OptionsBase.sizeCalculation}
13669
+ */
13670
+ sizeCalculation;
13671
+ /**
13672
+ * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
13673
+ */
13674
+ noDeleteOnFetchRejection;
13675
+ /**
13676
+ * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
13677
+ */
13678
+ noDeleteOnStaleGet;
13679
+ /**
13680
+ * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
13681
+ */
13682
+ allowStaleOnFetchAbort;
13683
+ /**
13684
+ * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
13685
+ */
13686
+ allowStaleOnFetchRejection;
13687
+ /**
13688
+ * {@link LRUCache.OptionsBase.ignoreFetchAbort}
13689
+ */
13690
+ ignoreFetchAbort;
13691
+ // computed properties
13692
+ #size;
13693
+ #calculatedSize;
13694
+ #keyMap;
13695
+ #keyList;
13696
+ #valList;
13697
+ #next;
13698
+ #prev;
13699
+ #head;
13700
+ #tail;
13701
+ #free;
13702
+ #disposed;
13703
+ #sizes;
13704
+ #starts;
13705
+ #ttls;
13706
+ #hasDispose;
13707
+ #hasFetchMethod;
13708
+ #hasDisposeAfter;
13709
+ #hasOnInsert;
13710
+ /**
13711
+ * Do not call this method unless you need to inspect the
13712
+ * inner workings of the cache. If anything returned by this
13713
+ * object is modified in any way, strange breakage may occur.
13714
+ *
13715
+ * These fields are private for a reason!
13716
+ *
13717
+ * @internal
13718
+ */
13719
+ static unsafeExposeInternals(c) {
13720
+ return {
13721
+ // properties
13722
+ starts: c.#starts,
13723
+ ttls: c.#ttls,
13724
+ sizes: c.#sizes,
13725
+ keyMap: c.#keyMap,
13726
+ keyList: c.#keyList,
13727
+ valList: c.#valList,
13728
+ next: c.#next,
13729
+ prev: c.#prev,
13730
+ get head() {
13731
+ return c.#head;
13732
+ },
13733
+ get tail() {
13734
+ return c.#tail;
13735
+ },
13736
+ free: c.#free,
13737
+ // methods
13738
+ isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
13739
+ backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
13740
+ moveToTail: (index) => c.#moveToTail(index),
13741
+ indexes: (options) => c.#indexes(options),
13742
+ rindexes: (options) => c.#rindexes(options),
13743
+ isStale: (index) => c.#isStale(index),
13744
+ };
13745
+ }
13746
+ // Protected read-only members
13747
+ /**
13748
+ * {@link LRUCache.OptionsBase.max} (read-only)
13749
+ */
13750
+ get max() {
13751
+ return this.#max;
13752
+ }
13753
+ /**
13754
+ * {@link LRUCache.OptionsBase.maxSize} (read-only)
13755
+ */
13756
+ get maxSize() {
13757
+ return this.#maxSize;
13758
+ }
13759
+ /**
13760
+ * The total computed size of items in the cache (read-only)
13761
+ */
13762
+ get calculatedSize() {
13763
+ return this.#calculatedSize;
13764
+ }
13765
+ /**
13766
+ * The number of items stored in the cache (read-only)
13767
+ */
13768
+ get size() {
13769
+ return this.#size;
13770
+ }
13771
+ /**
13772
+ * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
13773
+ */
13774
+ get fetchMethod() {
13775
+ return this.#fetchMethod;
13776
+ }
13777
+ get memoMethod() {
13778
+ return this.#memoMethod;
13779
+ }
13780
+ /**
13781
+ * {@link LRUCache.OptionsBase.dispose} (read-only)
13782
+ */
13783
+ get dispose() {
13784
+ return this.#dispose;
13785
+ }
13786
+ /**
13787
+ * {@link LRUCache.OptionsBase.onInsert} (read-only)
13788
+ */
13789
+ get onInsert() {
13790
+ return this.#onInsert;
13791
+ }
13792
+ /**
13793
+ * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
13794
+ */
13795
+ get disposeAfter() {
13796
+ return this.#disposeAfter;
13797
+ }
13798
+ constructor(options) {
13799
+ const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
13800
+ if (perf !== undefined) {
13801
+ if (typeof perf?.now !== 'function') {
13802
+ throw new TypeError('perf option must have a now() method if specified');
13803
+ }
13804
+ }
13805
+ this.#perf = perf ?? defaultPerf;
13806
+ if (max !== 0 && !isPosInt(max)) {
13807
+ throw new TypeError('max option must be a nonnegative integer');
13808
+ }
13809
+ const UintArray = max ? getUintArray(max) : Array;
13810
+ if (!UintArray) {
13811
+ throw new Error('invalid max value: ' + max);
13812
+ }
13813
+ this.#max = max;
13814
+ this.#maxSize = maxSize;
13815
+ this.maxEntrySize = maxEntrySize || this.#maxSize;
13816
+ this.sizeCalculation = sizeCalculation;
13817
+ if (this.sizeCalculation) {
13818
+ if (!this.#maxSize && !this.maxEntrySize) {
13819
+ throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
13820
+ }
13821
+ if (typeof this.sizeCalculation !== 'function') {
13822
+ throw new TypeError('sizeCalculation set to non-function');
13823
+ }
13824
+ }
13825
+ if (memoMethod !== undefined &&
13826
+ typeof memoMethod !== 'function') {
13827
+ throw new TypeError('memoMethod must be a function if defined');
13828
+ }
13829
+ this.#memoMethod = memoMethod;
13830
+ if (fetchMethod !== undefined &&
13831
+ typeof fetchMethod !== 'function') {
13832
+ throw new TypeError('fetchMethod must be a function if specified');
13833
+ }
13834
+ this.#fetchMethod = fetchMethod;
13835
+ this.#hasFetchMethod = !!fetchMethod;
13836
+ this.#keyMap = new Map();
13837
+ this.#keyList = new Array(max).fill(undefined);
13838
+ this.#valList = new Array(max).fill(undefined);
13839
+ this.#next = new UintArray(max);
13840
+ this.#prev = new UintArray(max);
13841
+ this.#head = 0;
13842
+ this.#tail = 0;
13843
+ this.#free = Stack.create(max);
13844
+ this.#size = 0;
13845
+ this.#calculatedSize = 0;
13846
+ if (typeof dispose === 'function') {
13847
+ this.#dispose = dispose;
13848
+ }
13849
+ if (typeof onInsert === 'function') {
13850
+ this.#onInsert = onInsert;
13851
+ }
13852
+ if (typeof disposeAfter === 'function') {
13853
+ this.#disposeAfter = disposeAfter;
13854
+ this.#disposed = [];
13855
+ }
13856
+ else {
13857
+ this.#disposeAfter = undefined;
13858
+ this.#disposed = undefined;
13859
+ }
13860
+ this.#hasDispose = !!this.#dispose;
13861
+ this.#hasOnInsert = !!this.#onInsert;
13862
+ this.#hasDisposeAfter = !!this.#disposeAfter;
13863
+ this.noDisposeOnSet = !!noDisposeOnSet;
13864
+ this.noUpdateTTL = !!noUpdateTTL;
13865
+ this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
13866
+ this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
13867
+ this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
13868
+ this.ignoreFetchAbort = !!ignoreFetchAbort;
13869
+ // NB: maxEntrySize is set to maxSize if it's set
13870
+ if (this.maxEntrySize !== 0) {
13871
+ if (this.#maxSize !== 0) {
13872
+ if (!isPosInt(this.#maxSize)) {
13873
+ throw new TypeError('maxSize must be a positive integer if specified');
13874
+ }
13875
+ }
13876
+ if (!isPosInt(this.maxEntrySize)) {
13877
+ throw new TypeError('maxEntrySize must be a positive integer if specified');
13878
+ }
13879
+ this.#initializeSizeTracking();
13880
+ }
13881
+ this.allowStale = !!allowStale;
13882
+ this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
13883
+ this.updateAgeOnGet = !!updateAgeOnGet;
13884
+ this.updateAgeOnHas = !!updateAgeOnHas;
13885
+ this.ttlResolution =
13886
+ isPosInt(ttlResolution) || ttlResolution === 0 ?
13887
+ ttlResolution
13888
+ : 1;
13889
+ this.ttlAutopurge = !!ttlAutopurge;
13890
+ this.ttl = ttl || 0;
13891
+ if (this.ttl) {
13892
+ if (!isPosInt(this.ttl)) {
13893
+ throw new TypeError('ttl must be a positive integer if specified');
13894
+ }
13895
+ this.#initializeTTLTracking();
13896
+ }
13897
+ // do not allow completely unbounded caches
13898
+ if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
13899
+ throw new TypeError('At least one of max, maxSize, or ttl is required');
13900
+ }
13901
+ if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
13902
+ const code = 'LRU_CACHE_UNBOUNDED';
13903
+ if (shouldWarn(code)) {
13904
+ warned.add(code);
13905
+ const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
13906
+ 'result in unbounded memory consumption.';
13907
+ emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
13908
+ }
13909
+ }
13910
+ }
13911
+ /**
13912
+ * Return the number of ms left in the item's TTL. If item is not in cache,
13913
+ * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
13914
+ */
13915
+ getRemainingTTL(key) {
13916
+ return this.#keyMap.has(key) ? Infinity : 0;
13917
+ }
13918
+ #initializeTTLTracking() {
13919
+ const ttls = new ZeroArray(this.#max);
13920
+ const starts = new ZeroArray(this.#max);
13921
+ this.#ttls = ttls;
13922
+ this.#starts = starts;
13923
+ this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
13924
+ starts[index] = ttl !== 0 ? start : 0;
13925
+ ttls[index] = ttl;
13926
+ if (ttl !== 0 && this.ttlAutopurge) {
13927
+ const t = setTimeout(() => {
13928
+ if (this.#isStale(index)) {
13929
+ this.#delete(this.#keyList[index], 'expire');
13930
+ }
13931
+ }, ttl + 1);
13932
+ // unref() not supported on all platforms
13933
+ /* c8 ignore start */
13934
+ if (t.unref) {
13935
+ t.unref();
13936
+ }
13937
+ /* c8 ignore stop */
13938
+ }
13939
+ };
13940
+ this.#updateItemAge = index => {
13941
+ starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
13942
+ };
13943
+ this.#statusTTL = (status, index) => {
13944
+ if (ttls[index]) {
13945
+ const ttl = ttls[index];
13946
+ const start = starts[index];
13947
+ /* c8 ignore next */
13948
+ if (!ttl || !start)
13949
+ return;
13950
+ status.ttl = ttl;
13951
+ status.start = start;
13952
+ status.now = cachedNow || getNow();
13953
+ const age = status.now - start;
13954
+ status.remainingTTL = ttl - age;
13955
+ }
13956
+ };
13957
+ // debounce calls to perf.now() to 1s so we're not hitting
13958
+ // that costly call repeatedly.
13959
+ let cachedNow = 0;
13960
+ const getNow = () => {
13961
+ const n = this.#perf.now();
13962
+ if (this.ttlResolution > 0) {
13963
+ cachedNow = n;
13964
+ const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
13965
+ // not available on all platforms
13966
+ /* c8 ignore start */
13967
+ if (t.unref) {
13968
+ t.unref();
13969
+ }
13970
+ /* c8 ignore stop */
13971
+ }
13972
+ return n;
13973
+ };
13974
+ this.getRemainingTTL = key => {
13975
+ const index = this.#keyMap.get(key);
13976
+ if (index === undefined) {
13977
+ return 0;
13978
+ }
13979
+ const ttl = ttls[index];
13980
+ const start = starts[index];
13981
+ if (!ttl || !start) {
13982
+ return Infinity;
13983
+ }
13984
+ const age = (cachedNow || getNow()) - start;
13985
+ return ttl - age;
13986
+ };
13987
+ this.#isStale = index => {
13988
+ const s = starts[index];
13989
+ const t = ttls[index];
13990
+ return !!t && !!s && (cachedNow || getNow()) - s > t;
13991
+ };
13992
+ }
13993
+ // conditionally set private methods related to TTL
13994
+ #updateItemAge = () => { };
13995
+ #statusTTL = () => { };
13996
+ #setItemTTL = () => { };
13997
+ /* c8 ignore stop */
13998
+ #isStale = () => false;
13999
+ #initializeSizeTracking() {
14000
+ const sizes = new ZeroArray(this.#max);
14001
+ this.#calculatedSize = 0;
14002
+ this.#sizes = sizes;
14003
+ this.#removeItemSize = index => {
14004
+ this.#calculatedSize -= sizes[index];
14005
+ sizes[index] = 0;
14006
+ };
14007
+ this.#requireSize = (k, v, size, sizeCalculation) => {
14008
+ // provisionally accept background fetches.
14009
+ // actual value size will be checked when they return.
14010
+ if (this.#isBackgroundFetch(v)) {
14011
+ return 0;
14012
+ }
14013
+ if (!isPosInt(size)) {
14014
+ if (sizeCalculation) {
14015
+ if (typeof sizeCalculation !== 'function') {
14016
+ throw new TypeError('sizeCalculation must be a function');
14017
+ }
14018
+ size = sizeCalculation(v, k);
14019
+ if (!isPosInt(size)) {
14020
+ throw new TypeError('sizeCalculation return invalid (expect positive integer)');
14021
+ }
14022
+ }
14023
+ else {
14024
+ throw new TypeError('invalid size value (must be positive integer). ' +
14025
+ 'When maxSize or maxEntrySize is used, sizeCalculation ' +
14026
+ 'or size must be set.');
14027
+ }
14028
+ }
14029
+ return size;
14030
+ };
14031
+ this.#addItemSize = (index, size, status) => {
14032
+ sizes[index] = size;
14033
+ if (this.#maxSize) {
14034
+ const maxSize = this.#maxSize - sizes[index];
14035
+ while (this.#calculatedSize > maxSize) {
14036
+ this.#evict(true);
14037
+ }
14038
+ }
14039
+ this.#calculatedSize += sizes[index];
14040
+ if (status) {
14041
+ status.entrySize = size;
14042
+ status.totalCalculatedSize = this.#calculatedSize;
14043
+ }
14044
+ };
14045
+ }
14046
+ #removeItemSize = _i => { };
14047
+ #addItemSize = (_i, _s, _st) => { };
14048
+ #requireSize = (_k, _v, size, sizeCalculation) => {
14049
+ if (size || sizeCalculation) {
14050
+ throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
14051
+ }
14052
+ return 0;
14053
+ };
14054
+ *#indexes({ allowStale = this.allowStale } = {}) {
14055
+ if (this.#size) {
14056
+ for (let i = this.#tail; true;) {
14057
+ if (!this.#isValidIndex(i)) {
14058
+ break;
14059
+ }
14060
+ if (allowStale || !this.#isStale(i)) {
14061
+ yield i;
14062
+ }
14063
+ if (i === this.#head) {
14064
+ break;
14065
+ }
14066
+ else {
14067
+ i = this.#prev[i];
14068
+ }
14069
+ }
14070
+ }
14071
+ }
14072
+ *#rindexes({ allowStale = this.allowStale } = {}) {
14073
+ if (this.#size) {
14074
+ for (let i = this.#head; true;) {
14075
+ if (!this.#isValidIndex(i)) {
14076
+ break;
14077
+ }
14078
+ if (allowStale || !this.#isStale(i)) {
14079
+ yield i;
14080
+ }
14081
+ if (i === this.#tail) {
14082
+ break;
14083
+ }
14084
+ else {
14085
+ i = this.#next[i];
14086
+ }
14087
+ }
14088
+ }
14089
+ }
14090
+ #isValidIndex(index) {
14091
+ return (index !== undefined &&
14092
+ this.#keyMap.get(this.#keyList[index]) === index);
14093
+ }
14094
+ /**
14095
+ * Return a generator yielding `[key, value]` pairs,
14096
+ * in order from most recently used to least recently used.
14097
+ */
14098
+ *entries() {
14099
+ for (const i of this.#indexes()) {
14100
+ if (this.#valList[i] !== undefined &&
14101
+ this.#keyList[i] !== undefined &&
14102
+ !this.#isBackgroundFetch(this.#valList[i])) {
14103
+ yield [this.#keyList[i], this.#valList[i]];
14104
+ }
14105
+ }
14106
+ }
14107
+ /**
14108
+ * Inverse order version of {@link LRUCache.entries}
14109
+ *
14110
+ * Return a generator yielding `[key, value]` pairs,
14111
+ * in order from least recently used to most recently used.
14112
+ */
14113
+ *rentries() {
14114
+ for (const i of this.#rindexes()) {
14115
+ if (this.#valList[i] !== undefined &&
14116
+ this.#keyList[i] !== undefined &&
14117
+ !this.#isBackgroundFetch(this.#valList[i])) {
14118
+ yield [this.#keyList[i], this.#valList[i]];
14119
+ }
14120
+ }
14121
+ }
14122
+ /**
14123
+ * Return a generator yielding the keys in the cache,
14124
+ * in order from most recently used to least recently used.
14125
+ */
14126
+ *keys() {
14127
+ for (const i of this.#indexes()) {
14128
+ const k = this.#keyList[i];
14129
+ if (k !== undefined &&
14130
+ !this.#isBackgroundFetch(this.#valList[i])) {
14131
+ yield k;
14132
+ }
14133
+ }
14134
+ }
14135
+ /**
14136
+ * Inverse order version of {@link LRUCache.keys}
14137
+ *
14138
+ * Return a generator yielding the keys in the cache,
14139
+ * in order from least recently used to most recently used.
14140
+ */
14141
+ *rkeys() {
14142
+ for (const i of this.#rindexes()) {
14143
+ const k = this.#keyList[i];
14144
+ if (k !== undefined &&
14145
+ !this.#isBackgroundFetch(this.#valList[i])) {
14146
+ yield k;
14147
+ }
14148
+ }
14149
+ }
14150
+ /**
14151
+ * Return a generator yielding the values in the cache,
14152
+ * in order from most recently used to least recently used.
14153
+ */
14154
+ *values() {
14155
+ for (const i of this.#indexes()) {
14156
+ const v = this.#valList[i];
14157
+ if (v !== undefined &&
14158
+ !this.#isBackgroundFetch(this.#valList[i])) {
14159
+ yield this.#valList[i];
14160
+ }
14161
+ }
14162
+ }
14163
+ /**
14164
+ * Inverse order version of {@link LRUCache.values}
14165
+ *
14166
+ * Return a generator yielding the values in the cache,
14167
+ * in order from least recently used to most recently used.
14168
+ */
14169
+ *rvalues() {
14170
+ for (const i of this.#rindexes()) {
14171
+ const v = this.#valList[i];
14172
+ if (v !== undefined &&
14173
+ !this.#isBackgroundFetch(this.#valList[i])) {
14174
+ yield this.#valList[i];
14175
+ }
14176
+ }
14177
+ }
14178
+ /**
14179
+ * Iterating over the cache itself yields the same results as
14180
+ * {@link LRUCache.entries}
14181
+ */
14182
+ [Symbol.iterator]() {
14183
+ return this.entries();
14184
+ }
14185
+ /**
14186
+ * A String value that is used in the creation of the default string
14187
+ * description of an object. Called by the built-in method
14188
+ * `Object.prototype.toString`.
14189
+ */
14190
+ [Symbol.toStringTag] = 'LRUCache';
14191
+ /**
14192
+ * Find a value for which the supplied fn method returns a truthy value,
14193
+ * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
14194
+ */
14195
+ find(fn, getOptions = {}) {
14196
+ for (const i of this.#indexes()) {
14197
+ const v = this.#valList[i];
14198
+ const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
14199
+ if (value === undefined)
14200
+ continue;
14201
+ if (fn(value, this.#keyList[i], this)) {
14202
+ return this.get(this.#keyList[i], getOptions);
14203
+ }
14204
+ }
14205
+ }
14206
+ /**
14207
+ * Call the supplied function on each item in the cache, in order from most
14208
+ * recently used to least recently used.
14209
+ *
14210
+ * `fn` is called as `fn(value, key, cache)`.
14211
+ *
14212
+ * If `thisp` is provided, function will be called in the `this`-context of
14213
+ * the provided object, or the cache if no `thisp` object is provided.
14214
+ *
14215
+ * Does not update age or recenty of use, or iterate over stale values.
14216
+ */
14217
+ forEach(fn, thisp = this) {
14218
+ for (const i of this.#indexes()) {
14219
+ const v = this.#valList[i];
14220
+ const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
14221
+ if (value === undefined)
14222
+ continue;
14223
+ fn.call(thisp, value, this.#keyList[i], this);
14224
+ }
14225
+ }
14226
+ /**
14227
+ * The same as {@link LRUCache.forEach} but items are iterated over in
14228
+ * reverse order. (ie, less recently used items are iterated over first.)
14229
+ */
14230
+ rforEach(fn, thisp = this) {
14231
+ for (const i of this.#rindexes()) {
14232
+ const v = this.#valList[i];
14233
+ const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
14234
+ if (value === undefined)
14235
+ continue;
14236
+ fn.call(thisp, value, this.#keyList[i], this);
14237
+ }
14238
+ }
14239
+ /**
14240
+ * Delete any stale entries. Returns true if anything was removed,
14241
+ * false otherwise.
14242
+ */
14243
+ purgeStale() {
14244
+ let deleted = false;
14245
+ for (const i of this.#rindexes({ allowStale: true })) {
14246
+ if (this.#isStale(i)) {
14247
+ this.#delete(this.#keyList[i], 'expire');
14248
+ deleted = true;
14249
+ }
14250
+ }
14251
+ return deleted;
14252
+ }
14253
+ /**
14254
+ * Get the extended info about a given entry, to get its value, size, and
14255
+ * TTL info simultaneously. Returns `undefined` if the key is not present.
14256
+ *
14257
+ * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
14258
+ * serialization, the `start` value is always the current timestamp, and the
14259
+ * `ttl` is a calculated remaining time to live (negative if expired).
14260
+ *
14261
+ * Always returns stale values, if their info is found in the cache, so be
14262
+ * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
14263
+ * if relevant.
14264
+ */
14265
+ info(key) {
14266
+ const i = this.#keyMap.get(key);
14267
+ if (i === undefined)
14268
+ return undefined;
14269
+ const v = this.#valList[i];
14270
+ /* c8 ignore start - this isn't tested for the info function,
14271
+ * but it's the same logic as found in other places. */
14272
+ const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
14273
+ if (value === undefined)
14274
+ return undefined;
14275
+ /* c8 ignore end */
14276
+ const entry = { value };
14277
+ if (this.#ttls && this.#starts) {
14278
+ const ttl = this.#ttls[i];
14279
+ const start = this.#starts[i];
14280
+ if (ttl && start) {
14281
+ const remain = ttl - (this.#perf.now() - start);
14282
+ entry.ttl = remain;
14283
+ entry.start = Date.now();
14284
+ }
14285
+ }
14286
+ if (this.#sizes) {
14287
+ entry.size = this.#sizes[i];
14288
+ }
14289
+ return entry;
14290
+ }
14291
+ /**
14292
+ * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
14293
+ * passed to {@link LRUCache#load}.
14294
+ *
14295
+ * The `start` fields are calculated relative to a portable `Date.now()`
14296
+ * timestamp, even if `performance.now()` is available.
14297
+ *
14298
+ * Stale entries are always included in the `dump`, even if
14299
+ * {@link LRUCache.OptionsBase.allowStale} is false.
14300
+ *
14301
+ * Note: this returns an actual array, not a generator, so it can be more
14302
+ * easily passed around.
14303
+ */
14304
+ dump() {
14305
+ const arr = [];
14306
+ for (const i of this.#indexes({ allowStale: true })) {
14307
+ const key = this.#keyList[i];
14308
+ const v = this.#valList[i];
14309
+ const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
14310
+ if (value === undefined || key === undefined)
14311
+ continue;
14312
+ const entry = { value };
14313
+ if (this.#ttls && this.#starts) {
14314
+ entry.ttl = this.#ttls[i];
14315
+ // always dump the start relative to a portable timestamp
14316
+ // it's ok for this to be a bit slow, it's a rare operation.
14317
+ const age = this.#perf.now() - this.#starts[i];
14318
+ entry.start = Math.floor(Date.now() - age);
14319
+ }
14320
+ if (this.#sizes) {
14321
+ entry.size = this.#sizes[i];
14322
+ }
14323
+ arr.unshift([key, entry]);
14324
+ }
14325
+ return arr;
14326
+ }
14327
+ /**
14328
+ * Reset the cache and load in the items in entries in the order listed.
14329
+ *
14330
+ * The shape of the resulting cache may be different if the same options are
14331
+ * not used in both caches.
14332
+ *
14333
+ * The `start` fields are assumed to be calculated relative to a portable
14334
+ * `Date.now()` timestamp, even if `performance.now()` is available.
14335
+ */
14336
+ load(arr) {
14337
+ this.clear();
14338
+ for (const [key, entry] of arr) {
14339
+ if (entry.start) {
14340
+ // entry.start is a portable timestamp, but we may be using
14341
+ // node's performance.now(), so calculate the offset, so that
14342
+ // we get the intended remaining TTL, no matter how long it's
14343
+ // been on ice.
14344
+ //
14345
+ // it's ok for this to be a bit slow, it's a rare operation.
14346
+ const age = Date.now() - entry.start;
14347
+ entry.start = this.#perf.now() - age;
14348
+ }
14349
+ this.set(key, entry.value, entry);
14350
+ }
14351
+ }
14352
+ /**
14353
+ * Add a value to the cache.
14354
+ *
14355
+ * Note: if `undefined` is specified as a value, this is an alias for
14356
+ * {@link LRUCache#delete}
14357
+ *
14358
+ * Fields on the {@link LRUCache.SetOptions} options param will override
14359
+ * their corresponding values in the constructor options for the scope
14360
+ * of this single `set()` operation.
14361
+ *
14362
+ * If `start` is provided, then that will set the effective start
14363
+ * time for the TTL calculation. Note that this must be a previous
14364
+ * value of `performance.now()` if supported, or a previous value of
14365
+ * `Date.now()` if not.
14366
+ *
14367
+ * Options object may also include `size`, which will prevent
14368
+ * calling the `sizeCalculation` function and just use the specified
14369
+ * number if it is a positive integer, and `noDisposeOnSet` which
14370
+ * will prevent calling a `dispose` function in the case of
14371
+ * overwrites.
14372
+ *
14373
+ * If the `size` (or return value of `sizeCalculation`) for a given
14374
+ * entry is greater than `maxEntrySize`, then the item will not be
14375
+ * added to the cache.
14376
+ *
14377
+ * Will update the recency of the entry.
14378
+ *
14379
+ * If the value is `undefined`, then this is an alias for
14380
+ * `cache.delete(key)`. `undefined` is never stored in the cache.
14381
+ */
14382
+ set(k, v, setOptions = {}) {
14383
+ if (v === undefined) {
14384
+ this.delete(k);
14385
+ return this;
14386
+ }
14387
+ const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
14388
+ let { noUpdateTTL = this.noUpdateTTL } = setOptions;
14389
+ const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
14390
+ // if the item doesn't fit, don't do anything
14391
+ // NB: maxEntrySize set to maxSize by default
14392
+ if (this.maxEntrySize && size > this.maxEntrySize) {
14393
+ if (status) {
14394
+ status.set = 'miss';
14395
+ status.maxEntrySizeExceeded = true;
14396
+ }
14397
+ // have to delete, in case something is there already.
14398
+ this.#delete(k, 'set');
14399
+ return this;
14400
+ }
14401
+ let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
14402
+ if (index === undefined) {
14403
+ // addition
14404
+ index = (this.#size === 0 ? this.#tail
14405
+ : this.#free.length !== 0 ? this.#free.pop()
14406
+ : this.#size === this.#max ? this.#evict(false)
14407
+ : this.#size);
14408
+ this.#keyList[index] = k;
14409
+ this.#valList[index] = v;
14410
+ this.#keyMap.set(k, index);
14411
+ this.#next[this.#tail] = index;
14412
+ this.#prev[index] = this.#tail;
14413
+ this.#tail = index;
14414
+ this.#size++;
14415
+ this.#addItemSize(index, size, status);
14416
+ if (status)
14417
+ status.set = 'add';
14418
+ noUpdateTTL = false;
14419
+ if (this.#hasOnInsert) {
14420
+ this.#onInsert?.(v, k, 'add');
14421
+ }
14422
+ }
14423
+ else {
14424
+ // update
14425
+ this.#moveToTail(index);
14426
+ const oldVal = this.#valList[index];
14427
+ if (v !== oldVal) {
14428
+ if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
14429
+ oldVal.__abortController.abort(new Error('replaced'));
14430
+ const { __staleWhileFetching: s } = oldVal;
14431
+ if (s !== undefined && !noDisposeOnSet) {
14432
+ if (this.#hasDispose) {
14433
+ this.#dispose?.(s, k, 'set');
14434
+ }
14435
+ if (this.#hasDisposeAfter) {
14436
+ this.#disposed?.push([s, k, 'set']);
14437
+ }
14438
+ }
14439
+ }
14440
+ else if (!noDisposeOnSet) {
14441
+ if (this.#hasDispose) {
14442
+ this.#dispose?.(oldVal, k, 'set');
14443
+ }
14444
+ if (this.#hasDisposeAfter) {
14445
+ this.#disposed?.push([oldVal, k, 'set']);
14446
+ }
14447
+ }
14448
+ this.#removeItemSize(index);
14449
+ this.#addItemSize(index, size, status);
14450
+ this.#valList[index] = v;
14451
+ if (status) {
14452
+ status.set = 'replace';
14453
+ const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
14454
+ oldVal.__staleWhileFetching
14455
+ : oldVal;
14456
+ if (oldValue !== undefined)
14457
+ status.oldValue = oldValue;
14458
+ }
14459
+ }
14460
+ else if (status) {
14461
+ status.set = 'update';
14462
+ }
14463
+ if (this.#hasOnInsert) {
14464
+ this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
14465
+ }
14466
+ }
14467
+ if (ttl !== 0 && !this.#ttls) {
14468
+ this.#initializeTTLTracking();
14469
+ }
14470
+ if (this.#ttls) {
14471
+ if (!noUpdateTTL) {
14472
+ this.#setItemTTL(index, ttl, start);
14473
+ }
14474
+ if (status)
14475
+ this.#statusTTL(status, index);
14476
+ }
14477
+ if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
14478
+ const dt = this.#disposed;
14479
+ let task;
14480
+ while ((task = dt?.shift())) {
14481
+ this.#disposeAfter?.(...task);
14482
+ }
14483
+ }
14484
+ return this;
14485
+ }
14486
+ /**
14487
+ * Evict the least recently used item, returning its value or
14488
+ * `undefined` if cache is empty.
14489
+ */
14490
+ pop() {
14491
+ try {
14492
+ while (this.#size) {
14493
+ const val = this.#valList[this.#head];
14494
+ this.#evict(true);
14495
+ if (this.#isBackgroundFetch(val)) {
14496
+ if (val.__staleWhileFetching) {
14497
+ return val.__staleWhileFetching;
14498
+ }
14499
+ }
14500
+ else if (val !== undefined) {
14501
+ return val;
14502
+ }
14503
+ }
14504
+ }
14505
+ finally {
14506
+ if (this.#hasDisposeAfter && this.#disposed) {
14507
+ const dt = this.#disposed;
14508
+ let task;
14509
+ while ((task = dt?.shift())) {
14510
+ this.#disposeAfter?.(...task);
14511
+ }
14512
+ }
14513
+ }
14514
+ }
14515
+ #evict(free) {
14516
+ const head = this.#head;
14517
+ const k = this.#keyList[head];
14518
+ const v = this.#valList[head];
14519
+ if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
14520
+ v.__abortController.abort(new Error('evicted'));
14521
+ }
14522
+ else if (this.#hasDispose || this.#hasDisposeAfter) {
14523
+ if (this.#hasDispose) {
14524
+ this.#dispose?.(v, k, 'evict');
14525
+ }
14526
+ if (this.#hasDisposeAfter) {
14527
+ this.#disposed?.push([v, k, 'evict']);
14528
+ }
14529
+ }
14530
+ this.#removeItemSize(head);
14531
+ // if we aren't about to use the index, then null these out
14532
+ if (free) {
14533
+ this.#keyList[head] = undefined;
14534
+ this.#valList[head] = undefined;
14535
+ this.#free.push(head);
14536
+ }
14537
+ if (this.#size === 1) {
14538
+ this.#head = this.#tail = 0;
14539
+ this.#free.length = 0;
14540
+ }
14541
+ else {
14542
+ this.#head = this.#next[head];
14543
+ }
14544
+ this.#keyMap.delete(k);
14545
+ this.#size--;
14546
+ return head;
14547
+ }
14548
+ /**
14549
+ * Check if a key is in the cache, without updating the recency of use.
14550
+ * Will return false if the item is stale, even though it is technically
14551
+ * in the cache.
14552
+ *
14553
+ * Check if a key is in the cache, without updating the recency of
14554
+ * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
14555
+ * to `true` in either the options or the constructor.
14556
+ *
14557
+ * Will return `false` if the item is stale, even though it is technically in
14558
+ * the cache. The difference can be determined (if it matters) by using a
14559
+ * `status` argument, and inspecting the `has` field.
14560
+ *
14561
+ * Will not update item age unless
14562
+ * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
14563
+ */
14564
+ has(k, hasOptions = {}) {
14565
+ const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
14566
+ const index = this.#keyMap.get(k);
14567
+ if (index !== undefined) {
14568
+ const v = this.#valList[index];
14569
+ if (this.#isBackgroundFetch(v) &&
14570
+ v.__staleWhileFetching === undefined) {
14571
+ return false;
14572
+ }
14573
+ if (!this.#isStale(index)) {
14574
+ if (updateAgeOnHas) {
14575
+ this.#updateItemAge(index);
14576
+ }
14577
+ if (status) {
14578
+ status.has = 'hit';
14579
+ this.#statusTTL(status, index);
14580
+ }
14581
+ return true;
14582
+ }
14583
+ else if (status) {
14584
+ status.has = 'stale';
14585
+ this.#statusTTL(status, index);
14586
+ }
14587
+ }
14588
+ else if (status) {
14589
+ status.has = 'miss';
14590
+ }
14591
+ return false;
14592
+ }
14593
+ /**
14594
+ * Like {@link LRUCache#get} but doesn't update recency or delete stale
14595
+ * items.
14596
+ *
14597
+ * Returns `undefined` if the item is stale, unless
14598
+ * {@link LRUCache.OptionsBase.allowStale} is set.
14599
+ */
14600
+ peek(k, peekOptions = {}) {
14601
+ const { allowStale = this.allowStale } = peekOptions;
14602
+ const index = this.#keyMap.get(k);
14603
+ if (index === undefined ||
14604
+ (!allowStale && this.#isStale(index))) {
14605
+ return;
14606
+ }
14607
+ const v = this.#valList[index];
14608
+ // either stale and allowed, or forcing a refresh of non-stale value
14609
+ return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
14610
+ }
14611
+ #backgroundFetch(k, index, options, context) {
14612
+ const v = index === undefined ? undefined : this.#valList[index];
14613
+ if (this.#isBackgroundFetch(v)) {
14614
+ return v;
14615
+ }
14616
+ const ac = new AC();
14617
+ const { signal } = options;
14618
+ // when/if our AC signals, then stop listening to theirs.
14619
+ signal?.addEventListener('abort', () => ac.abort(signal.reason), {
14620
+ signal: ac.signal,
14621
+ });
14622
+ const fetchOpts = {
14623
+ signal: ac.signal,
14624
+ options,
14625
+ context,
14626
+ };
14627
+ const cb = (v, updateCache = false) => {
14628
+ const { aborted } = ac.signal;
14629
+ const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
14630
+ if (options.status) {
14631
+ if (aborted && !updateCache) {
14632
+ options.status.fetchAborted = true;
14633
+ options.status.fetchError = ac.signal.reason;
14634
+ if (ignoreAbort)
14635
+ options.status.fetchAbortIgnored = true;
14636
+ }
14637
+ else {
14638
+ options.status.fetchResolved = true;
14639
+ }
14640
+ }
14641
+ if (aborted && !ignoreAbort && !updateCache) {
14642
+ return fetchFail(ac.signal.reason);
14643
+ }
14644
+ // either we didn't abort, and are still here, or we did, and ignored
14645
+ const bf = p;
14646
+ // if nothing else has been written there but we're set to update the
14647
+ // cache and ignore the abort, or if it's still pending on this specific
14648
+ // background request, then write it to the cache.
14649
+ const vl = this.#valList[index];
14650
+ if (vl === p || ignoreAbort && updateCache && vl === undefined) {
14651
+ if (v === undefined) {
14652
+ if (bf.__staleWhileFetching !== undefined) {
14653
+ this.#valList[index] = bf.__staleWhileFetching;
14654
+ }
14655
+ else {
14656
+ this.#delete(k, 'fetch');
14657
+ }
14658
+ }
14659
+ else {
14660
+ if (options.status)
14661
+ options.status.fetchUpdated = true;
14662
+ this.set(k, v, fetchOpts.options);
14663
+ }
14664
+ }
14665
+ return v;
14666
+ };
14667
+ const eb = (er) => {
14668
+ if (options.status) {
14669
+ options.status.fetchRejected = true;
14670
+ options.status.fetchError = er;
14671
+ }
14672
+ return fetchFail(er);
14673
+ };
14674
+ const fetchFail = (er) => {
14675
+ const { aborted } = ac.signal;
14676
+ const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
14677
+ const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
14678
+ const noDelete = allowStale || options.noDeleteOnFetchRejection;
14679
+ const bf = p;
14680
+ if (this.#valList[index] === p) {
14681
+ // if we allow stale on fetch rejections, then we need to ensure that
14682
+ // the stale value is not removed from the cache when the fetch fails.
14683
+ const del = !noDelete || bf.__staleWhileFetching === undefined;
14684
+ if (del) {
14685
+ this.#delete(k, 'fetch');
14686
+ }
14687
+ else if (!allowStaleAborted) {
14688
+ // still replace the *promise* with the stale value,
14689
+ // since we are done with the promise at this point.
14690
+ // leave it untouched if we're still waiting for an
14691
+ // aborted background fetch that hasn't yet returned.
14692
+ this.#valList[index] = bf.__staleWhileFetching;
14693
+ }
14694
+ }
14695
+ if (allowStale) {
14696
+ if (options.status && bf.__staleWhileFetching !== undefined) {
14697
+ options.status.returnedStale = true;
14698
+ }
14699
+ return bf.__staleWhileFetching;
14700
+ }
14701
+ else if (bf.__returned === bf) {
14702
+ throw er;
14703
+ }
14704
+ };
14705
+ const pcall = (res, rej) => {
14706
+ const fmp = this.#fetchMethod?.(k, v, fetchOpts);
14707
+ if (fmp && fmp instanceof Promise) {
14708
+ fmp.then(v => res(v === undefined ? undefined : v), rej);
14709
+ }
14710
+ // ignored, we go until we finish, regardless.
14711
+ // defer check until we are actually aborting,
14712
+ // so fetchMethod can override.
14713
+ ac.signal.addEventListener('abort', () => {
14714
+ if (!options.ignoreFetchAbort ||
14715
+ options.allowStaleOnFetchAbort) {
14716
+ res(undefined);
14717
+ // when it eventually resolves, update the cache.
14718
+ if (options.allowStaleOnFetchAbort) {
14719
+ res = v => cb(v, true);
14720
+ }
14721
+ }
14722
+ });
14723
+ };
14724
+ if (options.status)
14725
+ options.status.fetchDispatched = true;
14726
+ const p = new Promise(pcall).then(cb, eb);
14727
+ const bf = Object.assign(p, {
14728
+ __abortController: ac,
14729
+ __staleWhileFetching: v,
14730
+ __returned: undefined,
14731
+ });
14732
+ if (index === undefined) {
14733
+ // internal, don't expose status.
14734
+ this.set(k, bf, { ...fetchOpts.options, status: undefined });
14735
+ index = this.#keyMap.get(k);
14736
+ }
14737
+ else {
14738
+ this.#valList[index] = bf;
14739
+ }
14740
+ return bf;
14741
+ }
14742
+ #isBackgroundFetch(p) {
14743
+ if (!this.#hasFetchMethod)
14744
+ return false;
14745
+ const b = p;
14746
+ return (!!b &&
14747
+ b instanceof Promise &&
14748
+ b.hasOwnProperty('__staleWhileFetching') &&
14749
+ b.__abortController instanceof AC);
14750
+ }
14751
+ async fetch(k, fetchOptions = {}) {
14752
+ const {
14753
+ // get options
14754
+ allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
14755
+ // set options
14756
+ ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
14757
+ // fetch exclusive options
14758
+ noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
14759
+ if (!this.#hasFetchMethod) {
14760
+ if (status)
14761
+ status.fetch = 'get';
14762
+ return this.get(k, {
14763
+ allowStale,
14764
+ updateAgeOnGet,
14765
+ noDeleteOnStaleGet,
14766
+ status,
14767
+ });
14768
+ }
14769
+ const options = {
14770
+ allowStale,
14771
+ updateAgeOnGet,
14772
+ noDeleteOnStaleGet,
14773
+ ttl,
14774
+ noDisposeOnSet,
14775
+ size,
14776
+ sizeCalculation,
14777
+ noUpdateTTL,
14778
+ noDeleteOnFetchRejection,
14779
+ allowStaleOnFetchRejection,
14780
+ allowStaleOnFetchAbort,
14781
+ ignoreFetchAbort,
14782
+ status,
14783
+ signal,
14784
+ };
14785
+ let index = this.#keyMap.get(k);
14786
+ if (index === undefined) {
14787
+ if (status)
14788
+ status.fetch = 'miss';
14789
+ const p = this.#backgroundFetch(k, index, options, context);
14790
+ return (p.__returned = p);
14791
+ }
14792
+ else {
14793
+ // in cache, maybe already fetching
14794
+ const v = this.#valList[index];
14795
+ if (this.#isBackgroundFetch(v)) {
14796
+ const stale = allowStale && v.__staleWhileFetching !== undefined;
14797
+ if (status) {
14798
+ status.fetch = 'inflight';
14799
+ if (stale)
14800
+ status.returnedStale = true;
14801
+ }
14802
+ return stale ? v.__staleWhileFetching : (v.__returned = v);
14803
+ }
14804
+ // if we force a refresh, that means do NOT serve the cached value,
14805
+ // unless we are already in the process of refreshing the cache.
14806
+ const isStale = this.#isStale(index);
14807
+ if (!forceRefresh && !isStale) {
14808
+ if (status)
14809
+ status.fetch = 'hit';
14810
+ this.#moveToTail(index);
14811
+ if (updateAgeOnGet) {
14812
+ this.#updateItemAge(index);
14813
+ }
14814
+ if (status)
14815
+ this.#statusTTL(status, index);
14816
+ return v;
14817
+ }
14818
+ // ok, it is stale or a forced refresh, and not already fetching.
14819
+ // refresh the cache.
14820
+ const p = this.#backgroundFetch(k, index, options, context);
14821
+ const hasStale = p.__staleWhileFetching !== undefined;
14822
+ const staleVal = hasStale && allowStale;
14823
+ if (status) {
14824
+ status.fetch = isStale ? 'stale' : 'refresh';
14825
+ if (staleVal && isStale)
14826
+ status.returnedStale = true;
14827
+ }
14828
+ return staleVal ? p.__staleWhileFetching : (p.__returned = p);
14829
+ }
14830
+ }
14831
+ async forceFetch(k, fetchOptions = {}) {
14832
+ const v = await this.fetch(k, fetchOptions);
14833
+ if (v === undefined)
14834
+ throw new Error('fetch() returned undefined');
14835
+ return v;
14836
+ }
14837
+ memo(k, memoOptions = {}) {
14838
+ const memoMethod = this.#memoMethod;
14839
+ if (!memoMethod) {
14840
+ throw new Error('no memoMethod provided to constructor');
14841
+ }
14842
+ const { context, forceRefresh, ...options } = memoOptions;
14843
+ const v = this.get(k, options);
14844
+ if (!forceRefresh && v !== undefined)
14845
+ return v;
14846
+ const vv = memoMethod(k, v, {
14847
+ options,
14848
+ context,
14849
+ });
14850
+ this.set(k, vv, options);
14851
+ return vv;
14852
+ }
14853
+ /**
14854
+ * Return a value from the cache. Will update the recency of the cache
14855
+ * entry found.
14856
+ *
14857
+ * If the key is not found, get() will return `undefined`.
14858
+ */
14859
+ get(k, getOptions = {}) {
14860
+ const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
14861
+ const index = this.#keyMap.get(k);
14862
+ if (index !== undefined) {
14863
+ const value = this.#valList[index];
14864
+ const fetching = this.#isBackgroundFetch(value);
14865
+ if (status)
14866
+ this.#statusTTL(status, index);
14867
+ if (this.#isStale(index)) {
14868
+ if (status)
14869
+ status.get = 'stale';
14870
+ // delete only if not an in-flight background fetch
14871
+ if (!fetching) {
14872
+ if (!noDeleteOnStaleGet) {
14873
+ this.#delete(k, 'expire');
14874
+ }
14875
+ if (status && allowStale)
14876
+ status.returnedStale = true;
14877
+ return allowStale ? value : undefined;
14878
+ }
14879
+ else {
14880
+ if (status &&
14881
+ allowStale &&
14882
+ value.__staleWhileFetching !== undefined) {
14883
+ status.returnedStale = true;
14884
+ }
14885
+ return allowStale ? value.__staleWhileFetching : undefined;
14886
+ }
14887
+ }
14888
+ else {
14889
+ if (status)
14890
+ status.get = 'hit';
14891
+ // if we're currently fetching it, we don't actually have it yet
14892
+ // it's not stale, which means this isn't a staleWhileRefetching.
14893
+ // If it's not stale, and fetching, AND has a __staleWhileFetching
14894
+ // value, then that means the user fetched with {forceRefresh:true},
14895
+ // so it's safe to return that value.
14896
+ if (fetching) {
14897
+ return value.__staleWhileFetching;
14898
+ }
14899
+ this.#moveToTail(index);
14900
+ if (updateAgeOnGet) {
14901
+ this.#updateItemAge(index);
14902
+ }
14903
+ return value;
14904
+ }
14905
+ }
14906
+ else if (status) {
14907
+ status.get = 'miss';
14908
+ }
14909
+ }
14910
+ #connect(p, n) {
14911
+ this.#prev[n] = p;
14912
+ this.#next[p] = n;
14913
+ }
14914
+ #moveToTail(index) {
14915
+ // if tail already, nothing to do
14916
+ // if head, move head to next[index]
14917
+ // else
14918
+ // move next[prev[index]] to next[index] (head has no prev)
14919
+ // move prev[next[index]] to prev[index]
14920
+ // prev[index] = tail
14921
+ // next[tail] = index
14922
+ // tail = index
14923
+ if (index !== this.#tail) {
14924
+ if (index === this.#head) {
14925
+ this.#head = this.#next[index];
14926
+ }
14927
+ else {
14928
+ this.#connect(this.#prev[index], this.#next[index]);
14929
+ }
14930
+ this.#connect(this.#tail, index);
14931
+ this.#tail = index;
14932
+ }
14933
+ }
14934
+ /**
14935
+ * Deletes a key out of the cache.
14936
+ *
14937
+ * Returns true if the key was deleted, false otherwise.
14938
+ */
14939
+ delete(k) {
14940
+ return this.#delete(k, 'delete');
14941
+ }
14942
+ #delete(k, reason) {
14943
+ let deleted = false;
14944
+ if (this.#size !== 0) {
14945
+ const index = this.#keyMap.get(k);
14946
+ if (index !== undefined) {
14947
+ deleted = true;
14948
+ if (this.#size === 1) {
14949
+ this.#clear(reason);
14950
+ }
14951
+ else {
14952
+ this.#removeItemSize(index);
14953
+ const v = this.#valList[index];
14954
+ if (this.#isBackgroundFetch(v)) {
14955
+ v.__abortController.abort(new Error('deleted'));
14956
+ }
14957
+ else if (this.#hasDispose || this.#hasDisposeAfter) {
14958
+ if (this.#hasDispose) {
14959
+ this.#dispose?.(v, k, reason);
14960
+ }
14961
+ if (this.#hasDisposeAfter) {
14962
+ this.#disposed?.push([v, k, reason]);
14963
+ }
14964
+ }
14965
+ this.#keyMap.delete(k);
14966
+ this.#keyList[index] = undefined;
14967
+ this.#valList[index] = undefined;
14968
+ if (index === this.#tail) {
14969
+ this.#tail = this.#prev[index];
14970
+ }
14971
+ else if (index === this.#head) {
14972
+ this.#head = this.#next[index];
14973
+ }
14974
+ else {
14975
+ const pi = this.#prev[index];
14976
+ this.#next[pi] = this.#next[index];
14977
+ const ni = this.#next[index];
14978
+ this.#prev[ni] = this.#prev[index];
14979
+ }
14980
+ this.#size--;
14981
+ this.#free.push(index);
14982
+ }
14983
+ }
14984
+ }
14985
+ if (this.#hasDisposeAfter && this.#disposed?.length) {
14986
+ const dt = this.#disposed;
14987
+ let task;
14988
+ while ((task = dt?.shift())) {
14989
+ this.#disposeAfter?.(...task);
14990
+ }
14991
+ }
14992
+ return deleted;
14993
+ }
14994
+ /**
14995
+ * Clear the cache entirely, throwing away all values.
14996
+ */
14997
+ clear() {
14998
+ return this.#clear('delete');
14999
+ }
15000
+ #clear(reason) {
15001
+ for (const index of this.#rindexes({ allowStale: true })) {
15002
+ const v = this.#valList[index];
15003
+ if (this.#isBackgroundFetch(v)) {
15004
+ v.__abortController.abort(new Error('deleted'));
15005
+ }
15006
+ else {
15007
+ const k = this.#keyList[index];
15008
+ if (this.#hasDispose) {
15009
+ this.#dispose?.(v, k, reason);
15010
+ }
15011
+ if (this.#hasDisposeAfter) {
15012
+ this.#disposed?.push([v, k, reason]);
15013
+ }
15014
+ }
15015
+ }
15016
+ this.#keyMap.clear();
15017
+ this.#valList.fill(undefined);
15018
+ this.#keyList.fill(undefined);
15019
+ if (this.#ttls && this.#starts) {
15020
+ this.#ttls.fill(0);
15021
+ this.#starts.fill(0);
15022
+ }
15023
+ if (this.#sizes) {
15024
+ this.#sizes.fill(0);
15025
+ }
15026
+ this.#head = 0;
15027
+ this.#tail = 0;
15028
+ this.#free.length = 0;
15029
+ this.#calculatedSize = 0;
15030
+ this.#size = 0;
15031
+ if (this.#hasDisposeAfter && this.#disposed) {
15032
+ const dt = this.#disposed;
15033
+ let task;
15034
+ while ((task = dt?.shift())) {
15035
+ this.#disposeAfter?.(...task);
15036
+ }
15037
+ }
15038
+ }
15039
+ }
15040
+
15041
+ /**
15042
+ * StampedeProtectedCache provides three-layer protection against cache stampedes
15043
+ *
15044
+ * @description High-performance caching system implementing multiple stampede prevention
15045
+ * strategies to protect downstream services (market data APIs, position services) from
15046
+ * request bursts during synchronized cache expiration events.
15047
+ *
15048
+ * @rationale In algorithmic trading, cache stampedes can:
15049
+ * - Overwhelm market data APIs (Alpaca, Polygon) causing rate limiting (200 req/min limits)
15050
+ * - Introduce latency spikes during critical trading windows (market open/close)
15051
+ * - Trigger cascading failures when position data becomes unavailable
15052
+ * - Cause missed trading opportunities due to stale or unavailable data
15053
+ *
15054
+ * Three-layer protection:
15055
+ * 1. Request coalescing - Multiple concurrent requests for the same key share a single promise
15056
+ * 2. Stale-while-revalidate - Serve stale data while refreshing in background
15057
+ * 3. Probabilistic early expiration - Add jitter to prevent synchronized expiration
15058
+ *
15059
+ * @template T - Type of cached data (e.g., AlpacaPosition[], MarketQuote, AccountInfo)
15060
+ *
15061
+ * @example
15062
+ * ```typescript
15063
+ * // Initialize cache for position data
15064
+ * const positionCache = new StampedeProtectedCache<AlpacaPosition[]>({
15065
+ * maxSize: 1000,
15066
+ * defaultTtl: 30000, // 30 seconds
15067
+ * staleWhileRevalidateTtl: 60000, // 60 seconds grace
15068
+ * minJitter: 0.9,
15069
+ * maxJitter: 1.1,
15070
+ * enableBackgroundRefresh: true,
15071
+ * logger: pinoLogger
15072
+ * });
15073
+ *
15074
+ * // Fetch with automatic caching and stampede protection
15075
+ * const positions = await positionCache.get(
15076
+ * accountId,
15077
+ * async (key) => await alpacaApi.getPositions(key)
15078
+ * );
15079
+ * ```
15080
+ *
15081
+ * @businessLogic
15082
+ * 1. On cache.get(), check for existing entry
15083
+ * 2. If found and fresh (< TTL with jitter): return cached value (HIT)
15084
+ * 3. If found but stale (< staleWhileRevalidateTtl): return stale value, trigger background refresh (STALE HIT)
15085
+ * 4. If not found or expired beyond grace period: fetch from source (MISS)
15086
+ * 5. During fetch, coalesce duplicate concurrent requests to single API call
15087
+ * 6. After successful fetch, cache result with jittered TTL to prevent synchronized expiration
15088
+ *
15089
+ * @auditTrail
15090
+ * - All cache operations logged with timestamps and metadata
15091
+ * - Statistics tracked: hits, misses, stale hits, coalesced requests, refresh errors
15092
+ * - Performance metrics exposed via getStats() for monitoring dashboards
15093
+ */
15094
+ class StampedeProtectedCache {
15095
+ cache;
15096
+ options;
15097
+ pendingRefreshes = new Map();
15098
+ stats = {
15099
+ totalGets: 0,
15100
+ hits: 0,
15101
+ misses: 0,
15102
+ staleHits: 0,
15103
+ coalescedRequests: 0,
15104
+ backgroundRefreshes: 0,
15105
+ refreshErrors: 0,
15106
+ };
15107
+ constructor(options) {
15108
+ this.options = {
15109
+ ...options,
15110
+ staleWhileRevalidateTtl: options.staleWhileRevalidateTtl ?? options.defaultTtl * 2,
15111
+ minJitter: options.minJitter ?? 0.9,
15112
+ maxJitter: options.maxJitter ?? 1.1,
15113
+ enableBackgroundRefresh: options.enableBackgroundRefresh ?? true,
15114
+ logger: options.logger ?? {
15115
+ debug: () => { },
15116
+ info: () => { },
15117
+ warn: () => { },
15118
+ error: () => { },
15119
+ },
15120
+ };
15121
+ this.cache = new LRUCache({
15122
+ max: this.options.maxSize,
15123
+ ttl: undefined, // We manage TTL ourselves
15124
+ allowStale: true,
15125
+ updateAgeOnGet: false,
15126
+ updateAgeOnHas: false,
15127
+ });
15128
+ this.options.logger.info('StampedeProtectedCache initialized', {
15129
+ maxSize: this.options.maxSize,
15130
+ defaultTtl: this.options.defaultTtl,
15131
+ staleWhileRevalidateTtl: this.options.staleWhileRevalidateTtl,
15132
+ jitterRange: [this.options.minJitter, this.options.maxJitter],
15133
+ });
15134
+ }
15135
+ /**
15136
+ * Get a value from the cache, loading it if necessary
15137
+ *
15138
+ * @description Primary cache access method implementing three-layer stampede protection.
15139
+ * Returns cached data if fresh, serves stale data while refreshing if within grace period,
15140
+ * or fetches fresh data with request coalescing if expired.
15141
+ *
15142
+ * @param key - Unique cache key (e.g., accountId, symbol, "positions:ACCT123")
15143
+ * @param loader - Async function to load data on cache miss
15144
+ * @param ttl - Optional TTL override in milliseconds. If not provided, uses defaultTtl from config
15145
+ *
15146
+ * @returns Promise resolving to cached or freshly loaded data
15147
+ *
15148
+ * @throws Error if loader function fails and no stale data is available
15149
+ *
15150
+ * @example
15151
+ * ```typescript
15152
+ * // Get positions with default TTL
15153
+ * const positions = await cache.get(
15154
+ * accountId,
15155
+ * async (key) => await alpacaApi.getPositions(key)
15156
+ * );
15157
+ *
15158
+ * // Get market quote with custom TTL (5 seconds for real-time data)
15159
+ * const quote = await cache.get(
15160
+ * `quote:${symbol}`,
15161
+ * async (key) => await polygonApi.getQuote(symbol),
15162
+ * 5000
15163
+ * );
15164
+ * ```
15165
+ *
15166
+ * @businessLogic
15167
+ * 1. Increment totalGets counter for statistics
15168
+ * 2. Calculate effective TTL (custom or default)
15169
+ * 3. Attempt cache lookup by key
15170
+ * 4. If entry exists:
15171
+ * a. Increment access count and update lastAccessedAt
15172
+ * b. Apply probabilistic jitter to expiration time
15173
+ * c. If still fresh (now < jitteredExpiresAt): return cached value (HIT)
15174
+ * d. If stale but within grace period (now < staleExpiresAt) and not already refreshing:
15175
+ * - Serve stale value immediately
15176
+ * - Trigger background refresh if enabled
15177
+ * - Return stale value (STALE HIT)
15178
+ * 5. If entry not found or expired beyond grace: load fresh data with coalescing (MISS)
15179
+ */
15180
+ async get(key, loader, ttl) {
15181
+ this.stats.totalGets++;
15182
+ const effectiveTtl = ttl ?? this.options.defaultTtl;
15183
+ const now = Date.now();
15184
+ // Check if we have a cached entry
15185
+ const cached = this.cache.get(key);
15186
+ if (cached) {
15187
+ cached.accessCount++;
15188
+ cached.lastAccessedAt = now;
15189
+ // Check if entry is still fresh (considering probabilistic expiration)
15190
+ const jitteredExpiresAt = this.applyJitter(cached.expiresAt);
15191
+ if (now < jitteredExpiresAt) {
15192
+ // Fresh hit
15193
+ this.stats.hits++;
15194
+ this.options.logger.debug('Cache hit (fresh)', { key, age: now - cached.createdAt });
15195
+ return cached.value;
15196
+ }
15197
+ // Check if we can serve stale while revalidating
15198
+ const staleExpiresAt = cached.createdAt + this.options.staleWhileRevalidateTtl;
15199
+ if (now < staleExpiresAt && !cached.isRefreshing) {
15200
+ // Serve stale and trigger background refresh
15201
+ this.stats.staleHits++;
15202
+ this.options.logger.debug('Cache hit (stale-while-revalidate)', {
15203
+ key,
15204
+ age: now - cached.createdAt,
15205
+ staleAge: now - cached.expiresAt
15206
+ });
15207
+ if (this.options.enableBackgroundRefresh) {
15208
+ this.refreshInBackground(key, loader, effectiveTtl);
15209
+ }
15210
+ return cached.value;
15211
+ }
15212
+ }
15213
+ // Cache miss or expired - need to load
15214
+ this.stats.misses++;
15215
+ this.options.logger.debug('Cache miss', { key, hadCached: !!cached });
15216
+ return this.loadWithCoalescing(key, loader, effectiveTtl);
15217
+ }
15218
+ /**
15219
+ * Set a value in the cache
15220
+ *
15221
+ * @description Manually store a value in the cache with optional custom TTL.
15222
+ * Useful for pre-warming cache or storing computed results.
15223
+ *
15224
+ * @param key - Unique cache key
15225
+ * @param value - Data to cache
15226
+ * @param ttl - Optional TTL in milliseconds. If not provided, uses defaultTtl
15227
+ *
15228
+ * @returns void
15229
+ *
15230
+ * @example
15231
+ * ```typescript
15232
+ * // Pre-warm cache with known data
15233
+ * cache.set('positions:ACCT123', positions, 30000);
15234
+ *
15235
+ * // Cache computed result
15236
+ * const aggregatedData = computeAggregation(positions);
15237
+ * cache.set('aggregated:ACCT123', aggregatedData, 60000);
15238
+ * ```
15239
+ */
15240
+ set(key, value, ttl) {
15241
+ const effectiveTtl = ttl ?? this.options.defaultTtl;
15242
+ const now = Date.now();
15243
+ const entry = {
15244
+ value,
15245
+ createdAt: now,
15246
+ ttl: effectiveTtl,
15247
+ expiresAt: now + effectiveTtl,
15248
+ accessCount: 0,
15249
+ lastAccessedAt: now,
15250
+ isRefreshing: false,
15251
+ };
15252
+ this.cache.set(key, entry);
15253
+ this.options.logger.debug('Cache set', { key, ttl: effectiveTtl });
15254
+ }
15255
+ /**
15256
+ * Check if a key exists in the cache (regardless of expiration)
15257
+ *
15258
+ * @description Checks for cache entry existence without considering TTL or freshness.
15259
+ * Does not update access statistics or timestamps.
15260
+ *
15261
+ * @param key - Cache key to check
15262
+ *
15263
+ * @returns true if entry exists (fresh or stale), false otherwise
15264
+ *
15265
+ * @example
15266
+ * ```typescript
15267
+ * if (cache.has(accountId)) {
15268
+ * // Entry exists, may be fresh or stale
15269
+ * }
15270
+ * ```
15271
+ */
15272
+ has(key) {
15273
+ return this.cache.has(key);
15274
+ }
15275
+ /**
15276
+ * Delete a specific key from the cache
15277
+ *
15278
+ * @description Immediately removes cache entry and any pending refreshes for the key.
15279
+ * Useful for cache invalidation when source data changes.
15280
+ *
15281
+ * @param key - Cache key to delete
15282
+ *
15283
+ * @returns true if entry was deleted, false if key did not exist
15284
+ *
15285
+ * @example
15286
+ * ```typescript
15287
+ * // Invalidate after position update
15288
+ * await alpacaApi.submitOrder(order);
15289
+ * cache.delete(`positions:${accountId}`);
15290
+ * ```
15291
+ */
15292
+ delete(key) {
15293
+ const deleted = this.cache.delete(key);
15294
+ if (deleted) {
15295
+ this.options.logger.debug('Cache entry deleted', { key });
15296
+ }
15297
+ return deleted;
15298
+ }
15299
+ /**
15300
+ * Invalidate a key (alias for delete)
15301
+ *
15302
+ * @description Semantic alias for delete() method. Use for clarity when invalidating
15303
+ * cache after data mutations.
15304
+ *
15305
+ * @param key - Cache key to invalidate
15306
+ *
15307
+ * @returns true if entry was invalidated, false if key did not exist
15308
+ *
15309
+ * @example
15310
+ * ```typescript
15311
+ * // Invalidate after trade execution
15312
+ * cache.invalidate(`positions:${accountId}`);
15313
+ * ```
15314
+ */
15315
+ invalidate(key) {
15316
+ return this.delete(key);
15317
+ }
15318
+ /**
15319
+ * Clear all entries from the cache
15320
+ *
15321
+ * @description Removes all cached entries and pending refreshes. Use during system
15322
+ * resets or configuration changes requiring fresh data.
15323
+ *
15324
+ * @returns void
15325
+ *
15326
+ * @example
15327
+ * ```typescript
15328
+ * // Clear cache during market hours transition
15329
+ * if (marketJustOpened) {
15330
+ * cache.clear();
15331
+ * }
15332
+ * ```
15333
+ */
15334
+ clear() {
15335
+ const sizeBefore = this.cache.size;
15336
+ this.cache.clear();
15337
+ this.pendingRefreshes.clear();
15338
+ this.options.logger.info('Cache cleared', { entriesRemoved: sizeBefore });
15339
+ }
15340
+ /**
15341
+ * Get cache statistics
15342
+ *
15343
+ * @description Returns comprehensive performance metrics for monitoring and analysis.
15344
+ * Statistics include hit/miss ratios, active refreshes, coalesced requests, and errors.
15345
+ *
15346
+ * @returns CacheStats object with current performance metrics
15347
+ *
15348
+ * @example
15349
+ * ```typescript
15350
+ * const stats = cache.getStats();
15351
+ * logger.info('Cache performance', {
15352
+ * hitRatio: stats.hitRatio,
15353
+ * size: stats.size,
15354
+ * activeRefreshes: stats.activeRefreshes
15355
+ * });
15356
+ *
15357
+ * // Alert on poor performance
15358
+ * if (stats.hitRatio < 0.7) {
15359
+ * alerting.send('Low cache hit ratio', stats);
15360
+ * }
15361
+ * ```
15362
+ */
15363
+ getStats() {
15364
+ return {
15365
+ totalGets: this.stats.totalGets,
15366
+ hits: this.stats.hits,
15367
+ misses: this.stats.misses,
15368
+ staleHits: this.stats.staleHits,
15369
+ hitRatio: this.stats.totalGets > 0 ? this.stats.hits / this.stats.totalGets : 0,
15370
+ size: this.cache.size,
15371
+ maxSize: this.options.maxSize,
15372
+ activeRefreshes: this.pendingRefreshes.size,
15373
+ coalescedRequests: this.stats.coalescedRequests,
15374
+ backgroundRefreshes: this.stats.backgroundRefreshes,
15375
+ refreshErrors: this.stats.refreshErrors,
15376
+ };
15377
+ }
15378
+ /**
15379
+ * Get all cached keys
15380
+ *
15381
+ * @description Returns array of all cache keys currently stored, regardless of freshness.
15382
+ * Useful for debugging and cache inspection.
15383
+ *
15384
+ * @returns Array of cache keys
15385
+ *
15386
+ * @example
15387
+ * ```typescript
15388
+ * const keys = cache.keys();
15389
+ * console.log('Cached accounts:', keys);
15390
+ * // ['positions:ACCT123', 'positions:ACCT456', 'quote:AAPL']
15391
+ * ```
15392
+ */
15393
+ keys() {
15394
+ return Array.from(this.cache.keys());
15395
+ }
15396
+ /**
15397
+ * Get the size of the cache
15398
+ *
15399
+ * @description Returns current number of entries in cache. Compare to maxSize to
15400
+ * monitor capacity utilization.
15401
+ *
15402
+ * @returns Number of cached entries
15403
+ *
15404
+ * @example
15405
+ * ```typescript
15406
+ * const utilizationPct = (cache.size / cache.getStats().maxSize) * 100;
15407
+ * if (utilizationPct > 90) {
15408
+ * logger.warn('Cache near capacity', { size: cache.size });
15409
+ * }
15410
+ * ```
15411
+ */
15412
+ get size() {
15413
+ return this.cache.size;
15414
+ }
15415
+ /**
15416
+ * Load data with request coalescing to prevent duplicate requests
15417
+ */
15418
+ async loadWithCoalescing(key, loader, ttl) {
15419
+ // Check if there's already a pending refresh for this key
15420
+ const existingPromise = this.pendingRefreshes.get(key);
15421
+ if (existingPromise) {
15422
+ this.stats.coalescedRequests++;
15423
+ this.options.logger.debug('Request coalesced', { key });
15424
+ return existingPromise;
15425
+ }
15426
+ // Create new promise and store it
15427
+ const promise = this.loadAndCache(key, loader, ttl);
15428
+ this.pendingRefreshes.set(key, promise);
15429
+ try {
15430
+ const result = await promise;
15431
+ return result;
15432
+ }
15433
+ finally {
15434
+ // Clean up the pending promise
15435
+ this.pendingRefreshes.delete(key);
15436
+ }
15437
+ }
15438
+ /**
15439
+ * Load data and cache it
15440
+ */
15441
+ async loadAndCache(key, loader, ttl) {
15442
+ const startTime = Date.now();
15443
+ try {
15444
+ this.options.logger.debug('Loading data', { key });
15445
+ const value = await loader(key);
15446
+ // Cache the loaded value
15447
+ this.set(key, value, ttl);
15448
+ const loadTime = Date.now() - startTime;
15449
+ this.options.logger.debug('Data loaded and cached', { key, loadTime });
15450
+ return value;
15451
+ }
15452
+ catch (error) {
15453
+ this.stats.refreshErrors++;
15454
+ const loadTime = Date.now() - startTime;
15455
+ this.options.logger.error('Failed to load data', { key, error, loadTime });
15456
+ // Update cached entry with error if it exists
15457
+ const cached = this.cache.get(key);
15458
+ if (cached) {
15459
+ cached.lastError = error;
15460
+ cached.isRefreshing = false;
15461
+ }
15462
+ throw error;
15463
+ }
15464
+ }
15465
+ /**
15466
+ * Refresh data in the background
15467
+ */
15468
+ refreshInBackground(key, loader, ttl) {
15469
+ // Mark the entry as refreshing
15470
+ const cached = this.cache.get(key);
15471
+ if (cached) {
15472
+ cached.isRefreshing = true;
15473
+ }
15474
+ // Don't wait for the refresh to complete
15475
+ this.loadWithCoalescing(key, loader, ttl)
15476
+ .then(() => {
15477
+ this.stats.backgroundRefreshes++;
15478
+ this.options.logger.debug('Background refresh completed', { key });
15479
+ })
15480
+ .catch((error) => {
15481
+ this.options.logger.warn('Background refresh failed', { key, error });
15482
+ })
15483
+ .finally(() => {
15484
+ // Mark as no longer refreshing
15485
+ const entry = this.cache.get(key);
15486
+ if (entry) {
15487
+ entry.isRefreshing = false;
15488
+ }
15489
+ });
15490
+ }
15491
+ /**
15492
+ * Apply probabilistic jitter to expiration time
15493
+ */
15494
+ applyJitter(originalExpiresAt) {
15495
+ const range = this.options.maxJitter - this.options.minJitter;
15496
+ const jitter = this.options.minJitter + (Math.random() * range);
15497
+ const createdAt = originalExpiresAt - this.options.defaultTtl;
15498
+ const jitteredTtl = this.options.defaultTtl * jitter;
15499
+ return createdAt + jitteredTtl;
15500
+ }
15501
+ /**
15502
+ * Reset statistics (useful for testing)
15503
+ *
15504
+ * @description Clears all performance counters to zero. Use for testing or when starting
15505
+ * fresh metrics collection period.
15506
+ *
15507
+ * @returns void
15508
+ *
15509
+ * @example
15510
+ * ```typescript
15511
+ * // Reset stats at start of trading day
15512
+ * cache.resetStats();
15513
+ * ```
15514
+ */
15515
+ resetStats() {
15516
+ this.stats.totalGets = 0;
15517
+ this.stats.hits = 0;
15518
+ this.stats.misses = 0;
15519
+ this.stats.staleHits = 0;
15520
+ this.stats.coalescedRequests = 0;
15521
+ this.stats.backgroundRefreshes = 0;
15522
+ this.stats.refreshErrors = 0;
15523
+ }
15524
+ }
15525
+ /**
15526
+ * Factory function to create a new StampedeProtectedCache instance
15527
+ *
15528
+ * @description Convenience factory for creating cache instances with type inference.
15529
+ * Alternative to using 'new StampedeProtectedCache<T>()'.
15530
+ *
15531
+ * @template T - Type of cached data
15532
+ * @param options - Cache configuration options
15533
+ *
15534
+ * @returns New StampedeProtectedCache instance
15535
+ *
15536
+ * @example
15537
+ * ```typescript
15538
+ * // Type is automatically inferred
15539
+ * const cache = createStampedeProtectedCache<AlpacaPosition[]>({
15540
+ * maxSize: 1000,
15541
+ * defaultTtl: 30000
15542
+ * });
15543
+ * ```
15544
+ */
15545
+ function createStampedeProtectedCache(options) {
15546
+ return new StampedeProtectedCache(options);
15547
+ }
15548
+ /**
15549
+ * Default cache options for common use cases
15550
+ *
15551
+ * @description Production-tested default configuration suitable for most trading applications.
15552
+ * Provides balanced performance for position and market data caching.
15553
+ *
15554
+ * @rationale These defaults are optimized for:
15555
+ * - Position data refresh frequency (30-60s acceptable staleness)
15556
+ * - API rate limit protection (Alpaca: 200 req/min)
15557
+ * - Memory efficiency (1000 entries ≈ 10MB for typical position data)
15558
+ * - Stampede prevention (±10% jitter prevents synchronized expiration)
15559
+ *
15560
+ * @example
15561
+ * ```typescript
15562
+ * // Use defaults for quick setup
15563
+ * const cache = new StampedeProtectedCache({
15564
+ * ...DEFAULT_CACHE_OPTIONS,
15565
+ * logger: customLogger
15566
+ * });
15567
+ *
15568
+ * // Override specific settings
15569
+ * const realtimeCache = new StampedeProtectedCache({
15570
+ * ...DEFAULT_CACHE_OPTIONS,
15571
+ * defaultTtl: 5000, // 5s for real-time quotes
15572
+ * maxSize: 10000
15573
+ * });
15574
+ * ```
15575
+ */
15576
+ const DEFAULT_CACHE_OPTIONS = {
15577
+ maxSize: 1000,
15578
+ defaultTtl: 60000, // 1 minute
15579
+ staleWhileRevalidateTtl: 120000, // 2 minutes
15580
+ minJitter: 0.9, // 90%
15581
+ maxJitter: 1.1, // 110%
15582
+ enableBackgroundRefresh: true,
15583
+ };
15584
+
13478
15585
  // Export factory functions for easier instantiation
13479
15586
  const createAlpacaTradingAPI = (credentials) => {
13480
15587
  return new AlpacaTradingAPI(credentials);
@@ -13619,5 +15726,5 @@ const adaptic = {
13619
15726
  };
13620
15727
  const adptc = adaptic;
13621
15728
 
13622
- export { AlpacaMarketDataAPI, AlpacaTradingAPI, adaptic, adptc, createAlpacaMarketDataAPI, createAlpacaTradingAPI };
15729
+ export { AlpacaMarketDataAPI, AlpacaTradingAPI, DEFAULT_CACHE_OPTIONS, StampedeProtectedCache, adaptic, adptc, createAlpacaMarketDataAPI, createAlpacaTradingAPI, createStampedeProtectedCache };
13623
15730
  //# sourceMappingURL=index.mjs.map