@nocobase/plugin-workflow 0.13.0-alpha.9 → 0.14.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/index.js +16 -16
- package/dist/externalVersion.js +24 -0
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/LICENSE +15 -0
- package/dist/node_modules/lru-cache/dist/cjs/index-cjs.js +1 -0
- package/dist/node_modules/lru-cache/dist/cjs/package.json +3 -0
- package/dist/node_modules/lru-cache/dist/mjs/index.d.ts +807 -0
- package/dist/node_modules/lru-cache/dist/mjs/index.js +1330 -0
- package/dist/node_modules/lru-cache/dist/mjs/index.min.js +2 -0
- package/dist/node_modules/lru-cache/dist/mjs/package.json +3 -0
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/server/Plugin.d.ts +1 -0
- package/dist/server/Plugin.js +37 -29
- package/package.json +2 -2
- package/dist/node_modules/lru-cache/index-cjs.js +0 -1
- /package/dist/node_modules/lru-cache/{index-cjs.d.ts → dist/cjs/index-cjs.d.ts} +0 -0
- /package/dist/node_modules/lru-cache/{index.d.ts → dist/cjs/index.d.ts} +0 -0
- /package/dist/node_modules/lru-cache/{index.js → dist/cjs/index.js} +0 -0
- /package/dist/node_modules/lru-cache/{index.min.js → dist/cjs/index.min.js} +0 -0
|
@@ -0,0 +1,1330 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module LRUCache
|
|
3
|
+
*/
|
|
4
|
+
const perf = typeof performance === 'object' &&
|
|
5
|
+
performance &&
|
|
6
|
+
typeof performance.now === 'function'
|
|
7
|
+
? performance
|
|
8
|
+
: Date;
|
|
9
|
+
const warned = new Set();
|
|
10
|
+
const emitWarning = (msg, type, code, fn) => {
|
|
11
|
+
typeof process === 'object' &&
|
|
12
|
+
process &&
|
|
13
|
+
typeof process.emitWarning === 'function'
|
|
14
|
+
? process.emitWarning(msg, type, code, fn)
|
|
15
|
+
: console.error(`[${code}] ${type}: ${msg}`);
|
|
16
|
+
};
|
|
17
|
+
const shouldWarn = (code) => !warned.has(code);
|
|
18
|
+
const TYPE = Symbol('type');
|
|
19
|
+
const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
|
|
20
|
+
/* c8 ignore start */
|
|
21
|
+
// This is a little bit ridiculous, tbh.
|
|
22
|
+
// The maximum array length is 2^32-1 or thereabouts on most JS impls.
|
|
23
|
+
// And well before that point, you're caching the entire world, I mean,
|
|
24
|
+
// that's ~32GB of just integers for the next/prev links, plus whatever
|
|
25
|
+
// else to hold that many keys and values. Just filling the memory with
|
|
26
|
+
// zeroes at init time is brutal when you get that big.
|
|
27
|
+
// But why not be complete?
|
|
28
|
+
// Maybe in the future, these limits will have expanded.
|
|
29
|
+
const getUintArray = (max) => !isPosInt(max)
|
|
30
|
+
? null
|
|
31
|
+
: max <= Math.pow(2, 8)
|
|
32
|
+
? Uint8Array
|
|
33
|
+
: max <= Math.pow(2, 16)
|
|
34
|
+
? Uint16Array
|
|
35
|
+
: max <= Math.pow(2, 32)
|
|
36
|
+
? Uint32Array
|
|
37
|
+
: max <= Number.MAX_SAFE_INTEGER
|
|
38
|
+
? ZeroArray
|
|
39
|
+
: null;
|
|
40
|
+
/* c8 ignore stop */
|
|
41
|
+
class ZeroArray extends Array {
|
|
42
|
+
constructor(size) {
|
|
43
|
+
super(size);
|
|
44
|
+
this.fill(0);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
class Stack {
|
|
48
|
+
heap;
|
|
49
|
+
length;
|
|
50
|
+
// private constructor
|
|
51
|
+
static #constructing = false;
|
|
52
|
+
static create(max) {
|
|
53
|
+
const HeapCls = getUintArray(max);
|
|
54
|
+
if (!HeapCls)
|
|
55
|
+
return [];
|
|
56
|
+
Stack.#constructing = true;
|
|
57
|
+
const s = new Stack(max, HeapCls);
|
|
58
|
+
Stack.#constructing = false;
|
|
59
|
+
return s;
|
|
60
|
+
}
|
|
61
|
+
constructor(max, HeapCls) {
|
|
62
|
+
/* c8 ignore start */
|
|
63
|
+
if (!Stack.#constructing) {
|
|
64
|
+
throw new TypeError('instantiate Stack using Stack.create(n)');
|
|
65
|
+
}
|
|
66
|
+
/* c8 ignore stop */
|
|
67
|
+
this.heap = new HeapCls(max);
|
|
68
|
+
this.length = 0;
|
|
69
|
+
}
|
|
70
|
+
push(n) {
|
|
71
|
+
this.heap[this.length++] = n;
|
|
72
|
+
}
|
|
73
|
+
pop() {
|
|
74
|
+
return this.heap[--this.length];
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Default export, the thing you're using this module to get.
|
|
79
|
+
*
|
|
80
|
+
* All properties from the options object (with the exception of
|
|
81
|
+
* {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
|
|
82
|
+
* normal public members. (`max` and `maxBase` are read-only getters.)
|
|
83
|
+
* Changing any of these will alter the defaults for subsequent method calls,
|
|
84
|
+
* but is otherwise safe.
|
|
85
|
+
*/
|
|
86
|
+
export class LRUCache {
|
|
87
|
+
// properties coming in from the options of these, only max and maxSize
|
|
88
|
+
// really *need* to be protected. The rest can be modified, as they just
|
|
89
|
+
// set defaults for various methods.
|
|
90
|
+
#max;
|
|
91
|
+
#maxSize;
|
|
92
|
+
#dispose;
|
|
93
|
+
#disposeAfter;
|
|
94
|
+
#fetchMethod;
|
|
95
|
+
/**
|
|
96
|
+
* {@link LRUCache.OptionsBase.ttl}
|
|
97
|
+
*/
|
|
98
|
+
ttl;
|
|
99
|
+
/**
|
|
100
|
+
* {@link LRUCache.OptionsBase.ttlResolution}
|
|
101
|
+
*/
|
|
102
|
+
ttlResolution;
|
|
103
|
+
/**
|
|
104
|
+
* {@link LRUCache.OptionsBase.ttlAutopurge}
|
|
105
|
+
*/
|
|
106
|
+
ttlAutopurge;
|
|
107
|
+
/**
|
|
108
|
+
* {@link LRUCache.OptionsBase.updateAgeOnGet}
|
|
109
|
+
*/
|
|
110
|
+
updateAgeOnGet;
|
|
111
|
+
/**
|
|
112
|
+
* {@link LRUCache.OptionsBase.updateAgeOnHas}
|
|
113
|
+
*/
|
|
114
|
+
updateAgeOnHas;
|
|
115
|
+
/**
|
|
116
|
+
* {@link LRUCache.OptionsBase.allowStale}
|
|
117
|
+
*/
|
|
118
|
+
allowStale;
|
|
119
|
+
/**
|
|
120
|
+
* {@link LRUCache.OptionsBase.noDisposeOnSet}
|
|
121
|
+
*/
|
|
122
|
+
noDisposeOnSet;
|
|
123
|
+
/**
|
|
124
|
+
* {@link LRUCache.OptionsBase.noUpdateTTL}
|
|
125
|
+
*/
|
|
126
|
+
noUpdateTTL;
|
|
127
|
+
/**
|
|
128
|
+
* {@link LRUCache.OptionsBase.maxEntrySize}
|
|
129
|
+
*/
|
|
130
|
+
maxEntrySize;
|
|
131
|
+
/**
|
|
132
|
+
* {@link LRUCache.OptionsBase.sizeCalculation}
|
|
133
|
+
*/
|
|
134
|
+
sizeCalculation;
|
|
135
|
+
/**
|
|
136
|
+
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
|
|
137
|
+
*/
|
|
138
|
+
noDeleteOnFetchRejection;
|
|
139
|
+
/**
|
|
140
|
+
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
|
|
141
|
+
*/
|
|
142
|
+
noDeleteOnStaleGet;
|
|
143
|
+
/**
|
|
144
|
+
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
|
|
145
|
+
*/
|
|
146
|
+
allowStaleOnFetchAbort;
|
|
147
|
+
/**
|
|
148
|
+
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
|
|
149
|
+
*/
|
|
150
|
+
allowStaleOnFetchRejection;
|
|
151
|
+
/**
|
|
152
|
+
* {@link LRUCache.OptionsBase.ignoreFetchAbort}
|
|
153
|
+
*/
|
|
154
|
+
ignoreFetchAbort;
|
|
155
|
+
// computed properties
|
|
156
|
+
#size;
|
|
157
|
+
#calculatedSize;
|
|
158
|
+
#keyMap;
|
|
159
|
+
#keyList;
|
|
160
|
+
#valList;
|
|
161
|
+
#next;
|
|
162
|
+
#prev;
|
|
163
|
+
#head;
|
|
164
|
+
#tail;
|
|
165
|
+
#free;
|
|
166
|
+
#disposed;
|
|
167
|
+
#sizes;
|
|
168
|
+
#starts;
|
|
169
|
+
#ttls;
|
|
170
|
+
#hasDispose;
|
|
171
|
+
#hasFetchMethod;
|
|
172
|
+
#hasDisposeAfter;
|
|
173
|
+
/**
|
|
174
|
+
* Do not call this method unless you need to inspect the
|
|
175
|
+
* inner workings of the cache. If anything returned by this
|
|
176
|
+
* object is modified in any way, strange breakage may occur.
|
|
177
|
+
*
|
|
178
|
+
* These fields are private for a reason!
|
|
179
|
+
*
|
|
180
|
+
* @internal
|
|
181
|
+
*/
|
|
182
|
+
static unsafeExposeInternals(c) {
|
|
183
|
+
return {
|
|
184
|
+
// properties
|
|
185
|
+
starts: c.#starts,
|
|
186
|
+
ttls: c.#ttls,
|
|
187
|
+
sizes: c.#sizes,
|
|
188
|
+
keyMap: c.#keyMap,
|
|
189
|
+
keyList: c.#keyList,
|
|
190
|
+
valList: c.#valList,
|
|
191
|
+
next: c.#next,
|
|
192
|
+
prev: c.#prev,
|
|
193
|
+
get head() {
|
|
194
|
+
return c.#head;
|
|
195
|
+
},
|
|
196
|
+
get tail() {
|
|
197
|
+
return c.#tail;
|
|
198
|
+
},
|
|
199
|
+
free: c.#free,
|
|
200
|
+
// methods
|
|
201
|
+
isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
|
|
202
|
+
backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
|
|
203
|
+
moveToTail: (index) => c.#moveToTail(index),
|
|
204
|
+
indexes: (options) => c.#indexes(options),
|
|
205
|
+
rindexes: (options) => c.#rindexes(options),
|
|
206
|
+
isStale: (index) => c.#isStale(index),
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
// Protected read-only members
|
|
210
|
+
/**
|
|
211
|
+
* {@link LRUCache.OptionsBase.max} (read-only)
|
|
212
|
+
*/
|
|
213
|
+
get max() {
|
|
214
|
+
return this.#max;
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* {@link LRUCache.OptionsBase.maxSize} (read-only)
|
|
218
|
+
*/
|
|
219
|
+
get maxSize() {
|
|
220
|
+
return this.#maxSize;
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* The total computed size of items in the cache (read-only)
|
|
224
|
+
*/
|
|
225
|
+
get calculatedSize() {
|
|
226
|
+
return this.#calculatedSize;
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* The number of items stored in the cache (read-only)
|
|
230
|
+
*/
|
|
231
|
+
get size() {
|
|
232
|
+
return this.#size;
|
|
233
|
+
}
|
|
234
|
+
/**
|
|
235
|
+
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
|
|
236
|
+
*/
|
|
237
|
+
get fetchMethod() {
|
|
238
|
+
return this.#fetchMethod;
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* {@link LRUCache.OptionsBase.dispose} (read-only)
|
|
242
|
+
*/
|
|
243
|
+
get dispose() {
|
|
244
|
+
return this.#dispose;
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* {@link LRUCache.OptionsBase.disposeAfter} (read-only)
|
|
248
|
+
*/
|
|
249
|
+
get disposeAfter() {
|
|
250
|
+
return this.#disposeAfter;
|
|
251
|
+
}
|
|
252
|
+
constructor(options) {
|
|
253
|
+
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
|
|
254
|
+
if (max !== 0 && !isPosInt(max)) {
|
|
255
|
+
throw new TypeError('max option must be a nonnegative integer');
|
|
256
|
+
}
|
|
257
|
+
const UintArray = max ? getUintArray(max) : Array;
|
|
258
|
+
if (!UintArray) {
|
|
259
|
+
throw new Error('invalid max value: ' + max);
|
|
260
|
+
}
|
|
261
|
+
this.#max = max;
|
|
262
|
+
this.#maxSize = maxSize;
|
|
263
|
+
this.maxEntrySize = maxEntrySize || this.#maxSize;
|
|
264
|
+
this.sizeCalculation = sizeCalculation;
|
|
265
|
+
if (this.sizeCalculation) {
|
|
266
|
+
if (!this.#maxSize && !this.maxEntrySize) {
|
|
267
|
+
throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
|
|
268
|
+
}
|
|
269
|
+
if (typeof this.sizeCalculation !== 'function') {
|
|
270
|
+
throw new TypeError('sizeCalculation set to non-function');
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
if (fetchMethod !== undefined &&
|
|
274
|
+
typeof fetchMethod !== 'function') {
|
|
275
|
+
throw new TypeError('fetchMethod must be a function if specified');
|
|
276
|
+
}
|
|
277
|
+
this.#fetchMethod = fetchMethod;
|
|
278
|
+
this.#hasFetchMethod = !!fetchMethod;
|
|
279
|
+
this.#keyMap = new Map();
|
|
280
|
+
this.#keyList = new Array(max).fill(undefined);
|
|
281
|
+
this.#valList = new Array(max).fill(undefined);
|
|
282
|
+
this.#next = new UintArray(max);
|
|
283
|
+
this.#prev = new UintArray(max);
|
|
284
|
+
this.#head = 0;
|
|
285
|
+
this.#tail = 0;
|
|
286
|
+
this.#free = Stack.create(max);
|
|
287
|
+
this.#size = 0;
|
|
288
|
+
this.#calculatedSize = 0;
|
|
289
|
+
if (typeof dispose === 'function') {
|
|
290
|
+
this.#dispose = dispose;
|
|
291
|
+
}
|
|
292
|
+
if (typeof disposeAfter === 'function') {
|
|
293
|
+
this.#disposeAfter = disposeAfter;
|
|
294
|
+
this.#disposed = [];
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
this.#disposeAfter = undefined;
|
|
298
|
+
this.#disposed = undefined;
|
|
299
|
+
}
|
|
300
|
+
this.#hasDispose = !!this.#dispose;
|
|
301
|
+
this.#hasDisposeAfter = !!this.#disposeAfter;
|
|
302
|
+
this.noDisposeOnSet = !!noDisposeOnSet;
|
|
303
|
+
this.noUpdateTTL = !!noUpdateTTL;
|
|
304
|
+
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
|
|
305
|
+
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
|
|
306
|
+
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
|
|
307
|
+
this.ignoreFetchAbort = !!ignoreFetchAbort;
|
|
308
|
+
// NB: maxEntrySize is set to maxSize if it's set
|
|
309
|
+
if (this.maxEntrySize !== 0) {
|
|
310
|
+
if (this.#maxSize !== 0) {
|
|
311
|
+
if (!isPosInt(this.#maxSize)) {
|
|
312
|
+
throw new TypeError('maxSize must be a positive integer if specified');
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
if (!isPosInt(this.maxEntrySize)) {
|
|
316
|
+
throw new TypeError('maxEntrySize must be a positive integer if specified');
|
|
317
|
+
}
|
|
318
|
+
this.#initializeSizeTracking();
|
|
319
|
+
}
|
|
320
|
+
this.allowStale = !!allowStale;
|
|
321
|
+
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
|
|
322
|
+
this.updateAgeOnGet = !!updateAgeOnGet;
|
|
323
|
+
this.updateAgeOnHas = !!updateAgeOnHas;
|
|
324
|
+
this.ttlResolution =
|
|
325
|
+
isPosInt(ttlResolution) || ttlResolution === 0
|
|
326
|
+
? ttlResolution
|
|
327
|
+
: 1;
|
|
328
|
+
this.ttlAutopurge = !!ttlAutopurge;
|
|
329
|
+
this.ttl = ttl || 0;
|
|
330
|
+
if (this.ttl) {
|
|
331
|
+
if (!isPosInt(this.ttl)) {
|
|
332
|
+
throw new TypeError('ttl must be a positive integer if specified');
|
|
333
|
+
}
|
|
334
|
+
this.#initializeTTLTracking();
|
|
335
|
+
}
|
|
336
|
+
// do not allow completely unbounded caches
|
|
337
|
+
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
|
|
338
|
+
throw new TypeError('At least one of max, maxSize, or ttl is required');
|
|
339
|
+
}
|
|
340
|
+
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
|
|
341
|
+
const code = 'LRU_CACHE_UNBOUNDED';
|
|
342
|
+
if (shouldWarn(code)) {
|
|
343
|
+
warned.add(code);
|
|
344
|
+
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
|
|
345
|
+
'result in unbounded memory consumption.';
|
|
346
|
+
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Return the remaining TTL time for a given entry key
|
|
352
|
+
*/
|
|
353
|
+
getRemainingTTL(key) {
|
|
354
|
+
return this.#keyMap.has(key) ? Infinity : 0;
|
|
355
|
+
}
|
|
356
|
+
#initializeTTLTracking() {
|
|
357
|
+
const ttls = new ZeroArray(this.#max);
|
|
358
|
+
const starts = new ZeroArray(this.#max);
|
|
359
|
+
this.#ttls = ttls;
|
|
360
|
+
this.#starts = starts;
|
|
361
|
+
this.#setItemTTL = (index, ttl, start = perf.now()) => {
|
|
362
|
+
starts[index] = ttl !== 0 ? start : 0;
|
|
363
|
+
ttls[index] = ttl;
|
|
364
|
+
if (ttl !== 0 && this.ttlAutopurge) {
|
|
365
|
+
const t = setTimeout(() => {
|
|
366
|
+
if (this.#isStale(index)) {
|
|
367
|
+
this.delete(this.#keyList[index]);
|
|
368
|
+
}
|
|
369
|
+
}, ttl + 1);
|
|
370
|
+
// unref() not supported on all platforms
|
|
371
|
+
/* c8 ignore start */
|
|
372
|
+
if (t.unref) {
|
|
373
|
+
t.unref();
|
|
374
|
+
}
|
|
375
|
+
/* c8 ignore stop */
|
|
376
|
+
}
|
|
377
|
+
};
|
|
378
|
+
this.#updateItemAge = index => {
|
|
379
|
+
starts[index] = ttls[index] !== 0 ? perf.now() : 0;
|
|
380
|
+
};
|
|
381
|
+
this.#statusTTL = (status, index) => {
|
|
382
|
+
if (ttls[index]) {
|
|
383
|
+
const ttl = ttls[index];
|
|
384
|
+
const start = starts[index];
|
|
385
|
+
status.ttl = ttl;
|
|
386
|
+
status.start = start;
|
|
387
|
+
status.now = cachedNow || getNow();
|
|
388
|
+
status.remainingTTL = status.now + ttl - start;
|
|
389
|
+
}
|
|
390
|
+
};
|
|
391
|
+
// debounce calls to perf.now() to 1s so we're not hitting
|
|
392
|
+
// that costly call repeatedly.
|
|
393
|
+
let cachedNow = 0;
|
|
394
|
+
const getNow = () => {
|
|
395
|
+
const n = perf.now();
|
|
396
|
+
if (this.ttlResolution > 0) {
|
|
397
|
+
cachedNow = n;
|
|
398
|
+
const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
|
|
399
|
+
// not available on all platforms
|
|
400
|
+
/* c8 ignore start */
|
|
401
|
+
if (t.unref) {
|
|
402
|
+
t.unref();
|
|
403
|
+
}
|
|
404
|
+
/* c8 ignore stop */
|
|
405
|
+
}
|
|
406
|
+
return n;
|
|
407
|
+
};
|
|
408
|
+
this.getRemainingTTL = key => {
|
|
409
|
+
const index = this.#keyMap.get(key);
|
|
410
|
+
if (index === undefined) {
|
|
411
|
+
return 0;
|
|
412
|
+
}
|
|
413
|
+
return ttls[index] === 0 || starts[index] === 0
|
|
414
|
+
? Infinity
|
|
415
|
+
: starts[index] + ttls[index] - (cachedNow || getNow());
|
|
416
|
+
};
|
|
417
|
+
this.#isStale = index => {
|
|
418
|
+
return (ttls[index] !== 0 &&
|
|
419
|
+
starts[index] !== 0 &&
|
|
420
|
+
(cachedNow || getNow()) - starts[index] > ttls[index]);
|
|
421
|
+
};
|
|
422
|
+
}
|
|
423
|
+
// conditionally set private methods related to TTL
|
|
424
|
+
#updateItemAge = () => { };
|
|
425
|
+
#statusTTL = () => { };
|
|
426
|
+
#setItemTTL = () => { };
|
|
427
|
+
/* c8 ignore stop */
|
|
428
|
+
#isStale = () => false;
|
|
429
|
+
#initializeSizeTracking() {
|
|
430
|
+
const sizes = new ZeroArray(this.#max);
|
|
431
|
+
this.#calculatedSize = 0;
|
|
432
|
+
this.#sizes = sizes;
|
|
433
|
+
this.#removeItemSize = index => {
|
|
434
|
+
this.#calculatedSize -= sizes[index];
|
|
435
|
+
sizes[index] = 0;
|
|
436
|
+
};
|
|
437
|
+
this.#requireSize = (k, v, size, sizeCalculation) => {
|
|
438
|
+
// provisionally accept background fetches.
|
|
439
|
+
// actual value size will be checked when they return.
|
|
440
|
+
if (this.#isBackgroundFetch(v)) {
|
|
441
|
+
return 0;
|
|
442
|
+
}
|
|
443
|
+
if (!isPosInt(size)) {
|
|
444
|
+
if (sizeCalculation) {
|
|
445
|
+
if (typeof sizeCalculation !== 'function') {
|
|
446
|
+
throw new TypeError('sizeCalculation must be a function');
|
|
447
|
+
}
|
|
448
|
+
size = sizeCalculation(v, k);
|
|
449
|
+
if (!isPosInt(size)) {
|
|
450
|
+
throw new TypeError('sizeCalculation return invalid (expect positive integer)');
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
else {
|
|
454
|
+
throw new TypeError('invalid size value (must be positive integer). ' +
|
|
455
|
+
'When maxSize or maxEntrySize is used, sizeCalculation ' +
|
|
456
|
+
'or size must be set.');
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
return size;
|
|
460
|
+
};
|
|
461
|
+
this.#addItemSize = (index, size, status) => {
|
|
462
|
+
sizes[index] = size;
|
|
463
|
+
if (this.#maxSize) {
|
|
464
|
+
const maxSize = this.#maxSize - sizes[index];
|
|
465
|
+
while (this.#calculatedSize > maxSize) {
|
|
466
|
+
this.#evict(true);
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
this.#calculatedSize += sizes[index];
|
|
470
|
+
if (status) {
|
|
471
|
+
status.entrySize = size;
|
|
472
|
+
status.totalCalculatedSize = this.#calculatedSize;
|
|
473
|
+
}
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
#removeItemSize = _i => { };
|
|
477
|
+
#addItemSize = (_i, _s, _st) => { };
|
|
478
|
+
#requireSize = (_k, _v, size, sizeCalculation) => {
|
|
479
|
+
if (size || sizeCalculation) {
|
|
480
|
+
throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
|
|
481
|
+
}
|
|
482
|
+
return 0;
|
|
483
|
+
};
|
|
484
|
+
*#indexes({ allowStale = this.allowStale } = {}) {
|
|
485
|
+
if (this.#size) {
|
|
486
|
+
for (let i = this.#tail; true;) {
|
|
487
|
+
if (!this.#isValidIndex(i)) {
|
|
488
|
+
break;
|
|
489
|
+
}
|
|
490
|
+
if (allowStale || !this.#isStale(i)) {
|
|
491
|
+
yield i;
|
|
492
|
+
}
|
|
493
|
+
if (i === this.#head) {
|
|
494
|
+
break;
|
|
495
|
+
}
|
|
496
|
+
else {
|
|
497
|
+
i = this.#prev[i];
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
*#rindexes({ allowStale = this.allowStale } = {}) {
|
|
503
|
+
if (this.#size) {
|
|
504
|
+
for (let i = this.#head; true;) {
|
|
505
|
+
if (!this.#isValidIndex(i)) {
|
|
506
|
+
break;
|
|
507
|
+
}
|
|
508
|
+
if (allowStale || !this.#isStale(i)) {
|
|
509
|
+
yield i;
|
|
510
|
+
}
|
|
511
|
+
if (i === this.#tail) {
|
|
512
|
+
break;
|
|
513
|
+
}
|
|
514
|
+
else {
|
|
515
|
+
i = this.#next[i];
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
#isValidIndex(index) {
|
|
521
|
+
return (index !== undefined &&
|
|
522
|
+
this.#keyMap.get(this.#keyList[index]) === index);
|
|
523
|
+
}
|
|
524
|
+
/**
|
|
525
|
+
* Return a generator yielding `[key, value]` pairs,
|
|
526
|
+
* in order from most recently used to least recently used.
|
|
527
|
+
*/
|
|
528
|
+
*entries() {
|
|
529
|
+
for (const i of this.#indexes()) {
|
|
530
|
+
if (this.#valList[i] !== undefined &&
|
|
531
|
+
this.#keyList[i] !== undefined &&
|
|
532
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
533
|
+
yield [this.#keyList[i], this.#valList[i]];
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
/**
|
|
538
|
+
* Inverse order version of {@link LRUCache.entries}
|
|
539
|
+
*
|
|
540
|
+
* Return a generator yielding `[key, value]` pairs,
|
|
541
|
+
* in order from least recently used to most recently used.
|
|
542
|
+
*/
|
|
543
|
+
*rentries() {
|
|
544
|
+
for (const i of this.#rindexes()) {
|
|
545
|
+
if (this.#valList[i] !== undefined &&
|
|
546
|
+
this.#keyList[i] !== undefined &&
|
|
547
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
548
|
+
yield [this.#keyList[i], this.#valList[i]];
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
/**
|
|
553
|
+
* Return a generator yielding the keys in the cache,
|
|
554
|
+
* in order from most recently used to least recently used.
|
|
555
|
+
*/
|
|
556
|
+
*keys() {
|
|
557
|
+
for (const i of this.#indexes()) {
|
|
558
|
+
const k = this.#keyList[i];
|
|
559
|
+
if (k !== undefined &&
|
|
560
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
561
|
+
yield k;
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
/**
|
|
566
|
+
* Inverse order version of {@link LRUCache.keys}
|
|
567
|
+
*
|
|
568
|
+
* Return a generator yielding the keys in the cache,
|
|
569
|
+
* in order from least recently used to most recently used.
|
|
570
|
+
*/
|
|
571
|
+
*rkeys() {
|
|
572
|
+
for (const i of this.#rindexes()) {
|
|
573
|
+
const k = this.#keyList[i];
|
|
574
|
+
if (k !== undefined &&
|
|
575
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
576
|
+
yield k;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
/**
|
|
581
|
+
* Return a generator yielding the values in the cache,
|
|
582
|
+
* in order from most recently used to least recently used.
|
|
583
|
+
*/
|
|
584
|
+
*values() {
|
|
585
|
+
for (const i of this.#indexes()) {
|
|
586
|
+
const v = this.#valList[i];
|
|
587
|
+
if (v !== undefined &&
|
|
588
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
589
|
+
yield this.#valList[i];
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
/**
|
|
594
|
+
* Inverse order version of {@link LRUCache.values}
|
|
595
|
+
*
|
|
596
|
+
* Return a generator yielding the values in the cache,
|
|
597
|
+
* in order from least recently used to most recently used.
|
|
598
|
+
*/
|
|
599
|
+
*rvalues() {
|
|
600
|
+
for (const i of this.#rindexes()) {
|
|
601
|
+
const v = this.#valList[i];
|
|
602
|
+
if (v !== undefined &&
|
|
603
|
+
!this.#isBackgroundFetch(this.#valList[i])) {
|
|
604
|
+
yield this.#valList[i];
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
/**
|
|
609
|
+
* Iterating over the cache itself yields the same results as
|
|
610
|
+
* {@link LRUCache.entries}
|
|
611
|
+
*/
|
|
612
|
+
[Symbol.iterator]() {
|
|
613
|
+
return this.entries();
|
|
614
|
+
}
|
|
615
|
+
/**
|
|
616
|
+
* Find a value for which the supplied fn method returns a truthy value,
|
|
617
|
+
* similar to Array.find(). fn is called as fn(value, key, cache).
|
|
618
|
+
*/
|
|
619
|
+
find(fn, getOptions = {}) {
|
|
620
|
+
for (const i of this.#indexes()) {
|
|
621
|
+
const v = this.#valList[i];
|
|
622
|
+
const value = this.#isBackgroundFetch(v)
|
|
623
|
+
? v.__staleWhileFetching
|
|
624
|
+
: v;
|
|
625
|
+
if (value === undefined)
|
|
626
|
+
continue;
|
|
627
|
+
if (fn(value, this.#keyList[i], this)) {
|
|
628
|
+
return this.get(this.#keyList[i], getOptions);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
/**
|
|
633
|
+
* Call the supplied function on each item in the cache, in order from
|
|
634
|
+
* most recently used to least recently used. fn is called as
|
|
635
|
+
* fn(value, key, cache). Does not update age or recenty of use.
|
|
636
|
+
* Does not iterate over stale values.
|
|
637
|
+
*/
|
|
638
|
+
forEach(fn, thisp = this) {
|
|
639
|
+
for (const i of this.#indexes()) {
|
|
640
|
+
const v = this.#valList[i];
|
|
641
|
+
const value = this.#isBackgroundFetch(v)
|
|
642
|
+
? v.__staleWhileFetching
|
|
643
|
+
: v;
|
|
644
|
+
if (value === undefined)
|
|
645
|
+
continue;
|
|
646
|
+
fn.call(thisp, value, this.#keyList[i], this);
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
/**
|
|
650
|
+
* The same as {@link LRUCache.forEach} but items are iterated over in
|
|
651
|
+
* reverse order. (ie, less recently used items are iterated over first.)
|
|
652
|
+
*/
|
|
653
|
+
rforEach(fn, thisp = this) {
|
|
654
|
+
for (const i of this.#rindexes()) {
|
|
655
|
+
const v = this.#valList[i];
|
|
656
|
+
const value = this.#isBackgroundFetch(v)
|
|
657
|
+
? v.__staleWhileFetching
|
|
658
|
+
: v;
|
|
659
|
+
if (value === undefined)
|
|
660
|
+
continue;
|
|
661
|
+
fn.call(thisp, value, this.#keyList[i], this);
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
/**
|
|
665
|
+
* Delete any stale entries. Returns true if anything was removed,
|
|
666
|
+
* false otherwise.
|
|
667
|
+
*/
|
|
668
|
+
purgeStale() {
|
|
669
|
+
let deleted = false;
|
|
670
|
+
for (const i of this.#rindexes({ allowStale: true })) {
|
|
671
|
+
if (this.#isStale(i)) {
|
|
672
|
+
this.delete(this.#keyList[i]);
|
|
673
|
+
deleted = true;
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
return deleted;
|
|
677
|
+
}
|
|
678
|
+
/**
|
|
679
|
+
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
|
|
680
|
+
* passed to cache.load()
|
|
681
|
+
*/
|
|
682
|
+
dump() {
|
|
683
|
+
const arr = [];
|
|
684
|
+
for (const i of this.#indexes({ allowStale: true })) {
|
|
685
|
+
const key = this.#keyList[i];
|
|
686
|
+
const v = this.#valList[i];
|
|
687
|
+
const value = this.#isBackgroundFetch(v)
|
|
688
|
+
? v.__staleWhileFetching
|
|
689
|
+
: v;
|
|
690
|
+
if (value === undefined || key === undefined)
|
|
691
|
+
continue;
|
|
692
|
+
const entry = { value };
|
|
693
|
+
if (this.#ttls && this.#starts) {
|
|
694
|
+
entry.ttl = this.#ttls[i];
|
|
695
|
+
// always dump the start relative to a portable timestamp
|
|
696
|
+
// it's ok for this to be a bit slow, it's a rare operation.
|
|
697
|
+
const age = perf.now() - this.#starts[i];
|
|
698
|
+
entry.start = Math.floor(Date.now() - age);
|
|
699
|
+
}
|
|
700
|
+
if (this.#sizes) {
|
|
701
|
+
entry.size = this.#sizes[i];
|
|
702
|
+
}
|
|
703
|
+
arr.unshift([key, entry]);
|
|
704
|
+
}
|
|
705
|
+
return arr;
|
|
706
|
+
}
|
|
707
|
+
/**
|
|
708
|
+
* Reset the cache and load in the items in entries in the order listed.
|
|
709
|
+
* Note that the shape of the resulting cache may be different if the
|
|
710
|
+
* same options are not used in both caches.
|
|
711
|
+
*/
|
|
712
|
+
load(arr) {
|
|
713
|
+
this.clear();
|
|
714
|
+
for (const [key, entry] of arr) {
|
|
715
|
+
if (entry.start) {
|
|
716
|
+
// entry.start is a portable timestamp, but we may be using
|
|
717
|
+
// node's performance.now(), so calculate the offset, so that
|
|
718
|
+
// we get the intended remaining TTL, no matter how long it's
|
|
719
|
+
// been on ice.
|
|
720
|
+
//
|
|
721
|
+
// it's ok for this to be a bit slow, it's a rare operation.
|
|
722
|
+
const age = Date.now() - entry.start;
|
|
723
|
+
entry.start = perf.now() - age;
|
|
724
|
+
}
|
|
725
|
+
this.set(key, entry.value, entry);
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
/**
|
|
729
|
+
* Add a value to the cache.
|
|
730
|
+
*/
|
|
731
|
+
set(k, v, setOptions = {}) {
|
|
732
|
+
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
|
|
733
|
+
let { noUpdateTTL = this.noUpdateTTL } = setOptions;
|
|
734
|
+
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
|
|
735
|
+
// if the item doesn't fit, don't do anything
|
|
736
|
+
// NB: maxEntrySize set to maxSize by default
|
|
737
|
+
if (this.maxEntrySize && size > this.maxEntrySize) {
|
|
738
|
+
if (status) {
|
|
739
|
+
status.set = 'miss';
|
|
740
|
+
status.maxEntrySizeExceeded = true;
|
|
741
|
+
}
|
|
742
|
+
// have to delete, in case something is there already.
|
|
743
|
+
this.delete(k);
|
|
744
|
+
return this;
|
|
745
|
+
}
|
|
746
|
+
let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
|
|
747
|
+
if (index === undefined) {
|
|
748
|
+
// addition
|
|
749
|
+
index = (this.#size === 0
|
|
750
|
+
? this.#tail
|
|
751
|
+
: this.#free.length !== 0
|
|
752
|
+
? this.#free.pop()
|
|
753
|
+
: this.#size === this.#max
|
|
754
|
+
? this.#evict(false)
|
|
755
|
+
: this.#size);
|
|
756
|
+
this.#keyList[index] = k;
|
|
757
|
+
this.#valList[index] = v;
|
|
758
|
+
this.#keyMap.set(k, index);
|
|
759
|
+
this.#next[this.#tail] = index;
|
|
760
|
+
this.#prev[index] = this.#tail;
|
|
761
|
+
this.#tail = index;
|
|
762
|
+
this.#size++;
|
|
763
|
+
this.#addItemSize(index, size, status);
|
|
764
|
+
if (status)
|
|
765
|
+
status.set = 'add';
|
|
766
|
+
noUpdateTTL = false;
|
|
767
|
+
}
|
|
768
|
+
else {
|
|
769
|
+
// update
|
|
770
|
+
this.#moveToTail(index);
|
|
771
|
+
const oldVal = this.#valList[index];
|
|
772
|
+
if (v !== oldVal) {
|
|
773
|
+
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
|
|
774
|
+
oldVal.__abortController.abort(new Error('replaced'));
|
|
775
|
+
}
|
|
776
|
+
else if (!noDisposeOnSet) {
|
|
777
|
+
if (this.#hasDispose) {
|
|
778
|
+
this.#dispose?.(oldVal, k, 'set');
|
|
779
|
+
}
|
|
780
|
+
if (this.#hasDisposeAfter) {
|
|
781
|
+
this.#disposed?.push([oldVal, k, 'set']);
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
this.#removeItemSize(index);
|
|
785
|
+
this.#addItemSize(index, size, status);
|
|
786
|
+
this.#valList[index] = v;
|
|
787
|
+
if (status) {
|
|
788
|
+
status.set = 'replace';
|
|
789
|
+
const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
|
|
790
|
+
? oldVal.__staleWhileFetching
|
|
791
|
+
: oldVal;
|
|
792
|
+
if (oldValue !== undefined)
|
|
793
|
+
status.oldValue = oldValue;
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
else if (status) {
|
|
797
|
+
status.set = 'update';
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
if (ttl !== 0 && !this.#ttls) {
|
|
801
|
+
this.#initializeTTLTracking();
|
|
802
|
+
}
|
|
803
|
+
if (this.#ttls) {
|
|
804
|
+
if (!noUpdateTTL) {
|
|
805
|
+
this.#setItemTTL(index, ttl, start);
|
|
806
|
+
}
|
|
807
|
+
if (status)
|
|
808
|
+
this.#statusTTL(status, index);
|
|
809
|
+
}
|
|
810
|
+
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
|
|
811
|
+
const dt = this.#disposed;
|
|
812
|
+
let task;
|
|
813
|
+
while ((task = dt?.shift())) {
|
|
814
|
+
this.#disposeAfter?.(...task);
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
return this;
|
|
818
|
+
}
|
|
819
|
+
/**
|
|
820
|
+
* Evict the least recently used item, returning its value or
|
|
821
|
+
* `undefined` if cache is empty.
|
|
822
|
+
*/
|
|
823
|
+
pop() {
|
|
824
|
+
try {
|
|
825
|
+
while (this.#size) {
|
|
826
|
+
const val = this.#valList[this.#head];
|
|
827
|
+
this.#evict(true);
|
|
828
|
+
if (this.#isBackgroundFetch(val)) {
|
|
829
|
+
if (val.__staleWhileFetching) {
|
|
830
|
+
return val.__staleWhileFetching;
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
else if (val !== undefined) {
|
|
834
|
+
return val;
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
finally {
|
|
839
|
+
if (this.#hasDisposeAfter && this.#disposed) {
|
|
840
|
+
const dt = this.#disposed;
|
|
841
|
+
let task;
|
|
842
|
+
while ((task = dt?.shift())) {
|
|
843
|
+
this.#disposeAfter?.(...task);
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
}
|
|
848
|
+
#evict(free) {
|
|
849
|
+
const head = this.#head;
|
|
850
|
+
const k = this.#keyList[head];
|
|
851
|
+
const v = this.#valList[head];
|
|
852
|
+
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
|
|
853
|
+
v.__abortController.abort(new Error('evicted'));
|
|
854
|
+
}
|
|
855
|
+
else if (this.#hasDispose || this.#hasDisposeAfter) {
|
|
856
|
+
if (this.#hasDispose) {
|
|
857
|
+
this.#dispose?.(v, k, 'evict');
|
|
858
|
+
}
|
|
859
|
+
if (this.#hasDisposeAfter) {
|
|
860
|
+
this.#disposed?.push([v, k, 'evict']);
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
this.#removeItemSize(head);
|
|
864
|
+
// if we aren't about to use the index, then null these out
|
|
865
|
+
if (free) {
|
|
866
|
+
this.#keyList[head] = undefined;
|
|
867
|
+
this.#valList[head] = undefined;
|
|
868
|
+
this.#free.push(head);
|
|
869
|
+
}
|
|
870
|
+
if (this.#size === 1) {
|
|
871
|
+
this.#head = this.#tail = 0;
|
|
872
|
+
this.#free.length = 0;
|
|
873
|
+
}
|
|
874
|
+
else {
|
|
875
|
+
this.#head = this.#next[head];
|
|
876
|
+
}
|
|
877
|
+
this.#keyMap.delete(k);
|
|
878
|
+
this.#size--;
|
|
879
|
+
return head;
|
|
880
|
+
}
|
|
881
|
+
/**
|
|
882
|
+
* Check if a key is in the cache, without updating the recency of use.
|
|
883
|
+
* Will return false if the item is stale, even though it is technically
|
|
884
|
+
* in the cache.
|
|
885
|
+
*
|
|
886
|
+
* Will not update item age unless
|
|
887
|
+
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
|
|
888
|
+
*/
|
|
889
|
+
has(k, hasOptions = {}) {
|
|
890
|
+
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
|
|
891
|
+
const index = this.#keyMap.get(k);
|
|
892
|
+
if (index !== undefined) {
|
|
893
|
+
const v = this.#valList[index];
|
|
894
|
+
if (this.#isBackgroundFetch(v) &&
|
|
895
|
+
v.__staleWhileFetching === undefined) {
|
|
896
|
+
return false;
|
|
897
|
+
}
|
|
898
|
+
if (!this.#isStale(index)) {
|
|
899
|
+
if (updateAgeOnHas) {
|
|
900
|
+
this.#updateItemAge(index);
|
|
901
|
+
}
|
|
902
|
+
if (status) {
|
|
903
|
+
status.has = 'hit';
|
|
904
|
+
this.#statusTTL(status, index);
|
|
905
|
+
}
|
|
906
|
+
return true;
|
|
907
|
+
}
|
|
908
|
+
else if (status) {
|
|
909
|
+
status.has = 'stale';
|
|
910
|
+
this.#statusTTL(status, index);
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
else if (status) {
|
|
914
|
+
status.has = 'miss';
|
|
915
|
+
}
|
|
916
|
+
return false;
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Like {@link LRUCache#get} but doesn't update recency or delete stale
|
|
920
|
+
* items.
|
|
921
|
+
*
|
|
922
|
+
* Returns `undefined` if the item is stale, unless
|
|
923
|
+
* {@link LRUCache.OptionsBase.allowStale} is set.
|
|
924
|
+
*/
|
|
925
|
+
peek(k, peekOptions = {}) {
|
|
926
|
+
const { allowStale = this.allowStale } = peekOptions;
|
|
927
|
+
const index = this.#keyMap.get(k);
|
|
928
|
+
if (index !== undefined &&
|
|
929
|
+
(allowStale || !this.#isStale(index))) {
|
|
930
|
+
const v = this.#valList[index];
|
|
931
|
+
// either stale and allowed, or forcing a refresh of non-stale value
|
|
932
|
+
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
|
933
|
+
}
|
|
934
|
+
}
|
|
935
|
+
#backgroundFetch(k, index, options, context) {
|
|
936
|
+
const v = index === undefined ? undefined : this.#valList[index];
|
|
937
|
+
if (this.#isBackgroundFetch(v)) {
|
|
938
|
+
return v;
|
|
939
|
+
}
|
|
940
|
+
const ac = new AbortController();
|
|
941
|
+
const { signal } = options;
|
|
942
|
+
// when/if our AC signals, then stop listening to theirs.
|
|
943
|
+
signal?.addEventListener('abort', () => ac.abort(signal.reason), {
|
|
944
|
+
signal: ac.signal,
|
|
945
|
+
});
|
|
946
|
+
const fetchOpts = {
|
|
947
|
+
signal: ac.signal,
|
|
948
|
+
options,
|
|
949
|
+
context,
|
|
950
|
+
};
|
|
951
|
+
const cb = (v, updateCache = false) => {
|
|
952
|
+
const { aborted } = ac.signal;
|
|
953
|
+
const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
|
|
954
|
+
if (options.status) {
|
|
955
|
+
if (aborted && !updateCache) {
|
|
956
|
+
options.status.fetchAborted = true;
|
|
957
|
+
options.status.fetchError = ac.signal.reason;
|
|
958
|
+
if (ignoreAbort)
|
|
959
|
+
options.status.fetchAbortIgnored = true;
|
|
960
|
+
}
|
|
961
|
+
else {
|
|
962
|
+
options.status.fetchResolved = true;
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
if (aborted && !ignoreAbort && !updateCache) {
|
|
966
|
+
return fetchFail(ac.signal.reason);
|
|
967
|
+
}
|
|
968
|
+
// either we didn't abort, and are still here, or we did, and ignored
|
|
969
|
+
const bf = p;
|
|
970
|
+
if (this.#valList[index] === p) {
|
|
971
|
+
if (v === undefined) {
|
|
972
|
+
if (bf.__staleWhileFetching) {
|
|
973
|
+
this.#valList[index] = bf.__staleWhileFetching;
|
|
974
|
+
}
|
|
975
|
+
else {
|
|
976
|
+
this.delete(k);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
else {
|
|
980
|
+
if (options.status)
|
|
981
|
+
options.status.fetchUpdated = true;
|
|
982
|
+
this.set(k, v, fetchOpts.options);
|
|
983
|
+
}
|
|
984
|
+
}
|
|
985
|
+
return v;
|
|
986
|
+
};
|
|
987
|
+
const eb = (er) => {
|
|
988
|
+
if (options.status) {
|
|
989
|
+
options.status.fetchRejected = true;
|
|
990
|
+
options.status.fetchError = er;
|
|
991
|
+
}
|
|
992
|
+
return fetchFail(er);
|
|
993
|
+
};
|
|
994
|
+
const fetchFail = (er) => {
|
|
995
|
+
const { aborted } = ac.signal;
|
|
996
|
+
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
|
|
997
|
+
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
|
|
998
|
+
const noDelete = allowStale || options.noDeleteOnFetchRejection;
|
|
999
|
+
const bf = p;
|
|
1000
|
+
if (this.#valList[index] === p) {
|
|
1001
|
+
// if we allow stale on fetch rejections, then we need to ensure that
|
|
1002
|
+
// the stale value is not removed from the cache when the fetch fails.
|
|
1003
|
+
const del = !noDelete || bf.__staleWhileFetching === undefined;
|
|
1004
|
+
if (del) {
|
|
1005
|
+
this.delete(k);
|
|
1006
|
+
}
|
|
1007
|
+
else if (!allowStaleAborted) {
|
|
1008
|
+
// still replace the *promise* with the stale value,
|
|
1009
|
+
// since we are done with the promise at this point.
|
|
1010
|
+
// leave it untouched if we're still waiting for an
|
|
1011
|
+
// aborted background fetch that hasn't yet returned.
|
|
1012
|
+
this.#valList[index] = bf.__staleWhileFetching;
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
1015
|
+
if (allowStale) {
|
|
1016
|
+
if (options.status && bf.__staleWhileFetching !== undefined) {
|
|
1017
|
+
options.status.returnedStale = true;
|
|
1018
|
+
}
|
|
1019
|
+
return bf.__staleWhileFetching;
|
|
1020
|
+
}
|
|
1021
|
+
else if (bf.__returned === bf) {
|
|
1022
|
+
throw er;
|
|
1023
|
+
}
|
|
1024
|
+
};
|
|
1025
|
+
const pcall = (res, rej) => {
|
|
1026
|
+
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
|
|
1027
|
+
if (fmp && fmp instanceof Promise) {
|
|
1028
|
+
fmp.then(v => res(v), rej);
|
|
1029
|
+
}
|
|
1030
|
+
// ignored, we go until we finish, regardless.
|
|
1031
|
+
// defer check until we are actually aborting,
|
|
1032
|
+
// so fetchMethod can override.
|
|
1033
|
+
ac.signal.addEventListener('abort', () => {
|
|
1034
|
+
if (!options.ignoreFetchAbort ||
|
|
1035
|
+
options.allowStaleOnFetchAbort) {
|
|
1036
|
+
res();
|
|
1037
|
+
// when it eventually resolves, update the cache.
|
|
1038
|
+
if (options.allowStaleOnFetchAbort) {
|
|
1039
|
+
res = v => cb(v, true);
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
});
|
|
1043
|
+
};
|
|
1044
|
+
if (options.status)
|
|
1045
|
+
options.status.fetchDispatched = true;
|
|
1046
|
+
const p = new Promise(pcall).then(cb, eb);
|
|
1047
|
+
const bf = Object.assign(p, {
|
|
1048
|
+
__abortController: ac,
|
|
1049
|
+
__staleWhileFetching: v,
|
|
1050
|
+
__returned: undefined,
|
|
1051
|
+
});
|
|
1052
|
+
if (index === undefined) {
|
|
1053
|
+
// internal, don't expose status.
|
|
1054
|
+
this.set(k, bf, { ...fetchOpts.options, status: undefined });
|
|
1055
|
+
index = this.#keyMap.get(k);
|
|
1056
|
+
}
|
|
1057
|
+
else {
|
|
1058
|
+
this.#valList[index] = bf;
|
|
1059
|
+
}
|
|
1060
|
+
return bf;
|
|
1061
|
+
}
|
|
1062
|
+
#isBackgroundFetch(p) {
|
|
1063
|
+
if (!this.#hasFetchMethod)
|
|
1064
|
+
return false;
|
|
1065
|
+
const b = p;
|
|
1066
|
+
return (!!b &&
|
|
1067
|
+
b instanceof Promise &&
|
|
1068
|
+
b.hasOwnProperty('__staleWhileFetching') &&
|
|
1069
|
+
b.__abortController instanceof AbortController);
|
|
1070
|
+
}
|
|
1071
|
+
async fetch(k, fetchOptions = {}) {
|
|
1072
|
+
const {
|
|
1073
|
+
// get options
|
|
1074
|
+
allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
|
|
1075
|
+
// set options
|
|
1076
|
+
ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
|
|
1077
|
+
// fetch exclusive options
|
|
1078
|
+
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
|
|
1079
|
+
if (!this.#hasFetchMethod) {
|
|
1080
|
+
if (status)
|
|
1081
|
+
status.fetch = 'get';
|
|
1082
|
+
return this.get(k, {
|
|
1083
|
+
allowStale,
|
|
1084
|
+
updateAgeOnGet,
|
|
1085
|
+
noDeleteOnStaleGet,
|
|
1086
|
+
status,
|
|
1087
|
+
});
|
|
1088
|
+
}
|
|
1089
|
+
const options = {
|
|
1090
|
+
allowStale,
|
|
1091
|
+
updateAgeOnGet,
|
|
1092
|
+
noDeleteOnStaleGet,
|
|
1093
|
+
ttl,
|
|
1094
|
+
noDisposeOnSet,
|
|
1095
|
+
size,
|
|
1096
|
+
sizeCalculation,
|
|
1097
|
+
noUpdateTTL,
|
|
1098
|
+
noDeleteOnFetchRejection,
|
|
1099
|
+
allowStaleOnFetchRejection,
|
|
1100
|
+
allowStaleOnFetchAbort,
|
|
1101
|
+
ignoreFetchAbort,
|
|
1102
|
+
status,
|
|
1103
|
+
signal,
|
|
1104
|
+
};
|
|
1105
|
+
let index = this.#keyMap.get(k);
|
|
1106
|
+
if (index === undefined) {
|
|
1107
|
+
if (status)
|
|
1108
|
+
status.fetch = 'miss';
|
|
1109
|
+
const p = this.#backgroundFetch(k, index, options, context);
|
|
1110
|
+
return (p.__returned = p);
|
|
1111
|
+
}
|
|
1112
|
+
else {
|
|
1113
|
+
// in cache, maybe already fetching
|
|
1114
|
+
const v = this.#valList[index];
|
|
1115
|
+
if (this.#isBackgroundFetch(v)) {
|
|
1116
|
+
const stale = allowStale && v.__staleWhileFetching !== undefined;
|
|
1117
|
+
if (status) {
|
|
1118
|
+
status.fetch = 'inflight';
|
|
1119
|
+
if (stale)
|
|
1120
|
+
status.returnedStale = true;
|
|
1121
|
+
}
|
|
1122
|
+
return stale ? v.__staleWhileFetching : (v.__returned = v);
|
|
1123
|
+
}
|
|
1124
|
+
// if we force a refresh, that means do NOT serve the cached value,
|
|
1125
|
+
// unless we are already in the process of refreshing the cache.
|
|
1126
|
+
const isStale = this.#isStale(index);
|
|
1127
|
+
if (!forceRefresh && !isStale) {
|
|
1128
|
+
if (status)
|
|
1129
|
+
status.fetch = 'hit';
|
|
1130
|
+
this.#moveToTail(index);
|
|
1131
|
+
if (updateAgeOnGet) {
|
|
1132
|
+
this.#updateItemAge(index);
|
|
1133
|
+
}
|
|
1134
|
+
if (status)
|
|
1135
|
+
this.#statusTTL(status, index);
|
|
1136
|
+
return v;
|
|
1137
|
+
}
|
|
1138
|
+
// ok, it is stale or a forced refresh, and not already fetching.
|
|
1139
|
+
// refresh the cache.
|
|
1140
|
+
const p = this.#backgroundFetch(k, index, options, context);
|
|
1141
|
+
const hasStale = p.__staleWhileFetching !== undefined;
|
|
1142
|
+
const staleVal = hasStale && allowStale;
|
|
1143
|
+
if (status) {
|
|
1144
|
+
status.fetch = isStale ? 'stale' : 'refresh';
|
|
1145
|
+
if (staleVal && isStale)
|
|
1146
|
+
status.returnedStale = true;
|
|
1147
|
+
}
|
|
1148
|
+
return staleVal ? p.__staleWhileFetching : (p.__returned = p);
|
|
1149
|
+
}
|
|
1150
|
+
}
|
|
1151
|
+
/**
|
|
1152
|
+
* Return a value from the cache. Will update the recency of the cache
|
|
1153
|
+
* entry found.
|
|
1154
|
+
*
|
|
1155
|
+
* If the key is not found, get() will return `undefined`.
|
|
1156
|
+
*/
|
|
1157
|
+
get(k, getOptions = {}) {
|
|
1158
|
+
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
|
|
1159
|
+
const index = this.#keyMap.get(k);
|
|
1160
|
+
if (index !== undefined) {
|
|
1161
|
+
const value = this.#valList[index];
|
|
1162
|
+
const fetching = this.#isBackgroundFetch(value);
|
|
1163
|
+
if (status)
|
|
1164
|
+
this.#statusTTL(status, index);
|
|
1165
|
+
if (this.#isStale(index)) {
|
|
1166
|
+
if (status)
|
|
1167
|
+
status.get = 'stale';
|
|
1168
|
+
// delete only if not an in-flight background fetch
|
|
1169
|
+
if (!fetching) {
|
|
1170
|
+
if (!noDeleteOnStaleGet) {
|
|
1171
|
+
this.delete(k);
|
|
1172
|
+
}
|
|
1173
|
+
if (status && allowStale)
|
|
1174
|
+
status.returnedStale = true;
|
|
1175
|
+
return allowStale ? value : undefined;
|
|
1176
|
+
}
|
|
1177
|
+
else {
|
|
1178
|
+
if (status &&
|
|
1179
|
+
allowStale &&
|
|
1180
|
+
value.__staleWhileFetching !== undefined) {
|
|
1181
|
+
status.returnedStale = true;
|
|
1182
|
+
}
|
|
1183
|
+
return allowStale ? value.__staleWhileFetching : undefined;
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
else {
|
|
1187
|
+
if (status)
|
|
1188
|
+
status.get = 'hit';
|
|
1189
|
+
// if we're currently fetching it, we don't actually have it yet
|
|
1190
|
+
// it's not stale, which means this isn't a staleWhileRefetching.
|
|
1191
|
+
// If it's not stale, and fetching, AND has a __staleWhileFetching
|
|
1192
|
+
// value, then that means the user fetched with {forceRefresh:true},
|
|
1193
|
+
// so it's safe to return that value.
|
|
1194
|
+
if (fetching) {
|
|
1195
|
+
return value.__staleWhileFetching;
|
|
1196
|
+
}
|
|
1197
|
+
this.#moveToTail(index);
|
|
1198
|
+
if (updateAgeOnGet) {
|
|
1199
|
+
this.#updateItemAge(index);
|
|
1200
|
+
}
|
|
1201
|
+
return value;
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
else if (status) {
|
|
1205
|
+
status.get = 'miss';
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
#connect(p, n) {
|
|
1209
|
+
this.#prev[n] = p;
|
|
1210
|
+
this.#next[p] = n;
|
|
1211
|
+
}
|
|
1212
|
+
#moveToTail(index) {
|
|
1213
|
+
// if tail already, nothing to do
|
|
1214
|
+
// if head, move head to next[index]
|
|
1215
|
+
// else
|
|
1216
|
+
// move next[prev[index]] to next[index] (head has no prev)
|
|
1217
|
+
// move prev[next[index]] to prev[index]
|
|
1218
|
+
// prev[index] = tail
|
|
1219
|
+
// next[tail] = index
|
|
1220
|
+
// tail = index
|
|
1221
|
+
if (index !== this.#tail) {
|
|
1222
|
+
if (index === this.#head) {
|
|
1223
|
+
this.#head = this.#next[index];
|
|
1224
|
+
}
|
|
1225
|
+
else {
|
|
1226
|
+
this.#connect(this.#prev[index], this.#next[index]);
|
|
1227
|
+
}
|
|
1228
|
+
this.#connect(this.#tail, index);
|
|
1229
|
+
this.#tail = index;
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
/**
|
|
1233
|
+
* Deletes a key out of the cache.
|
|
1234
|
+
* Returns true if the key was deleted, false otherwise.
|
|
1235
|
+
*/
|
|
1236
|
+
delete(k) {
|
|
1237
|
+
let deleted = false;
|
|
1238
|
+
if (this.#size !== 0) {
|
|
1239
|
+
const index = this.#keyMap.get(k);
|
|
1240
|
+
if (index !== undefined) {
|
|
1241
|
+
deleted = true;
|
|
1242
|
+
if (this.#size === 1) {
|
|
1243
|
+
this.clear();
|
|
1244
|
+
}
|
|
1245
|
+
else {
|
|
1246
|
+
this.#removeItemSize(index);
|
|
1247
|
+
const v = this.#valList[index];
|
|
1248
|
+
if (this.#isBackgroundFetch(v)) {
|
|
1249
|
+
v.__abortController.abort(new Error('deleted'));
|
|
1250
|
+
}
|
|
1251
|
+
else if (this.#hasDispose || this.#hasDisposeAfter) {
|
|
1252
|
+
if (this.#hasDispose) {
|
|
1253
|
+
this.#dispose?.(v, k, 'delete');
|
|
1254
|
+
}
|
|
1255
|
+
if (this.#hasDisposeAfter) {
|
|
1256
|
+
this.#disposed?.push([v, k, 'delete']);
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
this.#keyMap.delete(k);
|
|
1260
|
+
this.#keyList[index] = undefined;
|
|
1261
|
+
this.#valList[index] = undefined;
|
|
1262
|
+
if (index === this.#tail) {
|
|
1263
|
+
this.#tail = this.#prev[index];
|
|
1264
|
+
}
|
|
1265
|
+
else if (index === this.#head) {
|
|
1266
|
+
this.#head = this.#next[index];
|
|
1267
|
+
}
|
|
1268
|
+
else {
|
|
1269
|
+
this.#next[this.#prev[index]] = this.#next[index];
|
|
1270
|
+
this.#prev[this.#next[index]] = this.#prev[index];
|
|
1271
|
+
}
|
|
1272
|
+
this.#size--;
|
|
1273
|
+
this.#free.push(index);
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
if (this.#hasDisposeAfter && this.#disposed?.length) {
|
|
1278
|
+
const dt = this.#disposed;
|
|
1279
|
+
let task;
|
|
1280
|
+
while ((task = dt?.shift())) {
|
|
1281
|
+
this.#disposeAfter?.(...task);
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
return deleted;
|
|
1285
|
+
}
|
|
1286
|
+
/**
|
|
1287
|
+
* Clear the cache entirely, throwing away all values.
|
|
1288
|
+
*/
|
|
1289
|
+
clear() {
|
|
1290
|
+
for (const index of this.#rindexes({ allowStale: true })) {
|
|
1291
|
+
const v = this.#valList[index];
|
|
1292
|
+
if (this.#isBackgroundFetch(v)) {
|
|
1293
|
+
v.__abortController.abort(new Error('deleted'));
|
|
1294
|
+
}
|
|
1295
|
+
else {
|
|
1296
|
+
const k = this.#keyList[index];
|
|
1297
|
+
if (this.#hasDispose) {
|
|
1298
|
+
this.#dispose?.(v, k, 'delete');
|
|
1299
|
+
}
|
|
1300
|
+
if (this.#hasDisposeAfter) {
|
|
1301
|
+
this.#disposed?.push([v, k, 'delete']);
|
|
1302
|
+
}
|
|
1303
|
+
}
|
|
1304
|
+
}
|
|
1305
|
+
this.#keyMap.clear();
|
|
1306
|
+
this.#valList.fill(undefined);
|
|
1307
|
+
this.#keyList.fill(undefined);
|
|
1308
|
+
if (this.#ttls && this.#starts) {
|
|
1309
|
+
this.#ttls.fill(0);
|
|
1310
|
+
this.#starts.fill(0);
|
|
1311
|
+
}
|
|
1312
|
+
if (this.#sizes) {
|
|
1313
|
+
this.#sizes.fill(0);
|
|
1314
|
+
}
|
|
1315
|
+
this.#head = 0;
|
|
1316
|
+
this.#tail = 0;
|
|
1317
|
+
this.#free.length = 0;
|
|
1318
|
+
this.#calculatedSize = 0;
|
|
1319
|
+
this.#size = 0;
|
|
1320
|
+
if (this.#hasDisposeAfter && this.#disposed) {
|
|
1321
|
+
const dt = this.#disposed;
|
|
1322
|
+
let task;
|
|
1323
|
+
while ((task = dt?.shift())) {
|
|
1324
|
+
this.#disposeAfter?.(...task);
|
|
1325
|
+
}
|
|
1326
|
+
}
|
|
1327
|
+
}
|
|
1328
|
+
}
|
|
1329
|
+
export default LRUCache;
|
|
1330
|
+
//# sourceMappingURL=index.js.map
|