cloudstructs 0.9.10 → 0.9.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.jsii +3 -3
- package/assets/slack-textract/detect.lambda/index.js +2 -1
- package/lib/codecommit-mirror/index.js +2 -2
- package/lib/dmarc/index.js +1 -1
- package/lib/ecs-service-roller/index.js +2 -2
- package/lib/email-receiver/receiver.js +1 -1
- package/lib/mjml-template/index.js +1 -1
- package/lib/saml-identity-provider/index.js +2 -2
- package/lib/slack-app/manifest.js +1 -1
- package/lib/slack-app/slack-app.js +2 -2
- package/lib/slack-events/index.js +1 -1
- package/lib/slack-textract/index.js +1 -1
- package/lib/ssl-server-test/index.js +1 -1
- package/lib/state-machine-cr-provider/index.js +1 -1
- package/lib/static-website/index.js +1 -1
- package/lib/toolkit-cleaner/index.js +1 -1
- package/lib/url-shortener/index.js +1 -1
- package/node_modules/@babel/runtime/helpers/applyDecoratedDescriptor.js +1 -1
- package/node_modules/@babel/runtime/helpers/esm/applyDecoratedDescriptor.js +1 -1
- package/node_modules/@babel/runtime/helpers/esm/objectWithoutProperties.js +2 -2
- package/node_modules/@babel/runtime/helpers/esm/objectWithoutPropertiesLoose.js +1 -1
- package/node_modules/@babel/runtime/helpers/esm/superPropGet.js +9 -0
- package/node_modules/@babel/runtime/helpers/esm/superPropSet.js +6 -0
- package/node_modules/@babel/runtime/helpers/esm/usingCtx.js +36 -24
- package/node_modules/@babel/runtime/helpers/objectWithoutProperties.js +2 -2
- package/node_modules/@babel/runtime/helpers/objectWithoutPropertiesLoose.js +1 -1
- package/node_modules/@babel/runtime/helpers/superPropGet.js +9 -0
- package/node_modules/@babel/runtime/helpers/superPropSet.js +6 -0
- package/node_modules/@babel/runtime/helpers/usingCtx.js +36 -24
- package/node_modules/@babel/runtime/package.json +19 -1
- package/node_modules/@slack/logger/node_modules/@types/node/README.md +1 -1
- package/node_modules/@slack/logger/node_modules/@types/node/buffer.d.ts +0 -81
- package/node_modules/@slack/logger/node_modules/@types/node/child_process.d.ts +2 -2
- package/node_modules/@slack/logger/node_modules/@types/node/crypto.d.ts +5 -4
- package/node_modules/@slack/logger/node_modules/@types/node/dns/promises.d.ts +25 -23
- package/node_modules/@slack/logger/node_modules/@types/node/dns.d.ts +23 -23
- package/node_modules/@slack/logger/node_modules/@types/node/events.d.ts +11 -27
- package/node_modules/@slack/logger/node_modules/@types/node/package.json +3 -3
- package/node_modules/@slack/logger/node_modules/@types/node/test.d.ts +13 -2
- package/node_modules/@slack/web-api/node_modules/@types/node/README.md +1 -1
- package/node_modules/@slack/web-api/node_modules/@types/node/buffer.d.ts +0 -81
- package/node_modules/@slack/web-api/node_modules/@types/node/child_process.d.ts +2 -2
- package/node_modules/@slack/web-api/node_modules/@types/node/crypto.d.ts +5 -4
- package/node_modules/@slack/web-api/node_modules/@types/node/dns/promises.d.ts +25 -23
- package/node_modules/@slack/web-api/node_modules/@types/node/dns.d.ts +23 -23
- package/node_modules/@slack/web-api/node_modules/@types/node/events.d.ts +11 -27
- package/node_modules/@slack/web-api/node_modules/@types/node/package.json +3 -3
- package/node_modules/@slack/web-api/node_modules/@types/node/test.d.ts +13 -2
- package/node_modules/@types/cacheable-request/node_modules/@types/node/README.md +1 -1
- package/node_modules/@types/cacheable-request/node_modules/@types/node/buffer.d.ts +0 -81
- package/node_modules/@types/cacheable-request/node_modules/@types/node/child_process.d.ts +2 -2
- package/node_modules/@types/cacheable-request/node_modules/@types/node/crypto.d.ts +5 -4
- package/node_modules/@types/cacheable-request/node_modules/@types/node/dns/promises.d.ts +25 -23
- package/node_modules/@types/cacheable-request/node_modules/@types/node/dns.d.ts +23 -23
- package/node_modules/@types/cacheable-request/node_modules/@types/node/events.d.ts +11 -27
- package/node_modules/@types/cacheable-request/node_modules/@types/node/package.json +3 -3
- package/node_modules/@types/cacheable-request/node_modules/@types/node/test.d.ts +13 -2
- package/node_modules/@types/is-stream/node_modules/@types/node/README.md +1 -1
- package/node_modules/@types/is-stream/node_modules/@types/node/buffer.d.ts +0 -81
- package/node_modules/@types/is-stream/node_modules/@types/node/child_process.d.ts +2 -2
- package/node_modules/@types/is-stream/node_modules/@types/node/crypto.d.ts +5 -4
- package/node_modules/@types/is-stream/node_modules/@types/node/dns/promises.d.ts +25 -23
- package/node_modules/@types/is-stream/node_modules/@types/node/dns.d.ts +23 -23
- package/node_modules/@types/is-stream/node_modules/@types/node/events.d.ts +11 -27
- package/node_modules/@types/is-stream/node_modules/@types/node/package.json +3 -3
- package/node_modules/@types/is-stream/node_modules/@types/node/test.d.ts +13 -2
- package/node_modules/@types/keyv/node_modules/@types/node/README.md +1 -1
- package/node_modules/@types/keyv/node_modules/@types/node/buffer.d.ts +0 -81
- package/node_modules/@types/keyv/node_modules/@types/node/child_process.d.ts +2 -2
- package/node_modules/@types/keyv/node_modules/@types/node/crypto.d.ts +5 -4
- package/node_modules/@types/keyv/node_modules/@types/node/dns/promises.d.ts +25 -23
- package/node_modules/@types/keyv/node_modules/@types/node/dns.d.ts +23 -23
- package/node_modules/@types/keyv/node_modules/@types/node/events.d.ts +11 -27
- package/node_modules/@types/keyv/node_modules/@types/node/package.json +3 -3
- package/node_modules/@types/keyv/node_modules/@types/node/test.d.ts +13 -2
- package/node_modules/@types/responselike/node_modules/@types/node/README.md +1 -1
- package/node_modules/@types/responselike/node_modules/@types/node/buffer.d.ts +0 -81
- package/node_modules/@types/responselike/node_modules/@types/node/child_process.d.ts +2 -2
- package/node_modules/@types/responselike/node_modules/@types/node/crypto.d.ts +5 -4
- package/node_modules/@types/responselike/node_modules/@types/node/dns/promises.d.ts +25 -23
- package/node_modules/@types/responselike/node_modules/@types/node/dns.d.ts +23 -23
- package/node_modules/@types/responselike/node_modules/@types/node/events.d.ts +11 -27
- package/node_modules/@types/responselike/node_modules/@types/node/package.json +3 -3
- package/node_modules/@types/responselike/node_modules/@types/node/test.d.ts +13 -2
- package/node_modules/jackspeak/dist/commonjs/index.js +2 -2
- package/node_modules/jackspeak/dist/commonjs/index.js.map +1 -1
- package/node_modules/jackspeak/dist/esm/index.js +2 -2
- package/node_modules/jackspeak/dist/esm/index.js.map +1 -1
- package/node_modules/jackspeak/package.json +4 -4
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/glob.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/glob.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/ignore.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/ignore.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/index.d.ts +2 -2
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/index.js +6 -6
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/index.js.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/pattern.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/pattern.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/walker.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/commonjs/walker.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/glob.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/glob.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/ignore.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/ignore.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/index.d.ts +2 -2
- package/node_modules/js-beautify/node_modules/glob/dist/esm/pattern.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/pattern.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/walker.d.ts +0 -1
- package/node_modules/js-beautify/node_modules/glob/dist/esm/walker.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/glob/package.json +5 -4
- package/node_modules/js-beautify/node_modules/minimatch/README.md +1 -1
- package/node_modules/js-beautify/node_modules/minimatch/dist/commonjs/index.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/minimatch/dist/commonjs/index.js +5 -4
- package/node_modules/js-beautify/node_modules/minimatch/dist/commonjs/index.js.map +1 -1
- package/node_modules/js-beautify/node_modules/minimatch/dist/esm/index.d.ts.map +1 -1
- package/node_modules/js-beautify/node_modules/minimatch/dist/esm/index.js +5 -4
- package/node_modules/js-beautify/node_modules/minimatch/dist/esm/index.js.map +1 -1
- package/node_modules/js-beautify/node_modules/minimatch/package.json +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/glob.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/glob.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/ignore.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/ignore.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/index.d.ts +2 -2
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/index.js +6 -6
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/index.js.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/pattern.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/pattern.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/walker.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/commonjs/walker.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/glob.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/glob.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/ignore.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/ignore.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/index.d.ts +2 -2
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/pattern.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/pattern.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/walker.d.ts +0 -1
- package/node_modules/mjml-cli/node_modules/glob/dist/esm/walker.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/glob/package.json +5 -4
- package/node_modules/mjml-cli/node_modules/minimatch/README.md +1 -1
- package/node_modules/mjml-cli/node_modules/minimatch/dist/commonjs/index.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/minimatch/dist/commonjs/index.js +5 -4
- package/node_modules/mjml-cli/node_modules/minimatch/dist/commonjs/index.js.map +1 -1
- package/node_modules/mjml-cli/node_modules/minimatch/dist/esm/index.d.ts.map +1 -1
- package/node_modules/mjml-cli/node_modules/minimatch/dist/esm/index.js +5 -4
- package/node_modules/mjml-cli/node_modules/minimatch/dist/esm/index.js.map +1 -1
- package/node_modules/mjml-cli/node_modules/minimatch/package.json +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/README.md +11 -884
- package/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.d.ts +486 -65
- package/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.d.ts.map +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js +134 -34
- package/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js.map +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js.map +3 -3
- package/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.d.ts +486 -65
- package/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.d.ts.map +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js +134 -34
- package/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js.map +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js +1 -1
- package/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js.map +3 -3
- package/node_modules/path-scurry/node_modules/lru-cache/package.json +8 -9
- package/node_modules/semver/README.md +1 -1
- package/node_modules/semver/classes/range.js +24 -10
- package/node_modules/semver/package.json +1 -1
- package/node_modules/uglify-js/README.md +5 -5
- package/node_modules/uglify-js/lib/ast.js +5 -4
- package/node_modules/uglify-js/lib/compress.js +199 -84
- package/node_modules/uglify-js/lib/output.js +17 -4
- package/node_modules/uglify-js/package.json +1 -1
- package/package.json +20 -20
|
@@ -57,8 +57,18 @@ export declare namespace LRUCache {
|
|
|
57
57
|
/**
|
|
58
58
|
* The reason why an item was removed from the cache, passed
|
|
59
59
|
* to the {@link Disposer} methods.
|
|
60
|
-
|
|
61
|
-
|
|
60
|
+
*
|
|
61
|
+
* - `evict`: The item was evicted because it is the least recently used,
|
|
62
|
+
* and the cache is full.
|
|
63
|
+
* - `set`: A new value was set, overwriting the old value being disposed.
|
|
64
|
+
* - `delete`: The item was explicitly deleted, either by calling
|
|
65
|
+
* {@link LRUCache#delete}, {@link LRUCache#clear}, or
|
|
66
|
+
* {@link LRUCache#set} with an undefined value.
|
|
67
|
+
* - `expire`: The item was removed due to exceeding its TTL.
|
|
68
|
+
* - `fetch`: A {@link OptionsBase#fetchMethod} operation returned
|
|
69
|
+
* `undefined` or was aborted, causing the item to be deleted.
|
|
70
|
+
*/
|
|
71
|
+
type DisposeReason = 'evict' | 'set' | 'delete' | 'expire' | 'fetch';
|
|
62
72
|
/**
|
|
63
73
|
* A method called upon item removal, passed as the
|
|
64
74
|
* {@link OptionsBase.dispose} and/or
|
|
@@ -84,8 +94,14 @@ export declare namespace LRUCache {
|
|
|
84
94
|
context: FC;
|
|
85
95
|
}
|
|
86
96
|
/**
|
|
87
|
-
*
|
|
88
|
-
*
|
|
97
|
+
* Occasionally, it may be useful to track the internal behavior of the
|
|
98
|
+
* cache, particularly for logging, debugging, or for behavior within the
|
|
99
|
+
* `fetchMethod`. To do this, you can pass a `status` object to the
|
|
100
|
+
* {@link LRUCache#fetch}, {@link LRUCache#get}, {@link LRUCache#set},
|
|
101
|
+
* {@link LRUCache#memo}, and {@link LRUCache#has} methods.
|
|
102
|
+
*
|
|
103
|
+
* The `status` option should be a plain JavaScript object. The following
|
|
104
|
+
* fields will be set on it appropriately, depending on the situation.
|
|
89
105
|
*/
|
|
90
106
|
interface Status<V> {
|
|
91
107
|
/**
|
|
@@ -145,7 +161,8 @@ export declare namespace LRUCache {
|
|
|
145
161
|
* various states.
|
|
146
162
|
*
|
|
147
163
|
* - inflight: there is another fetch() for this key which is in process
|
|
148
|
-
* - get: there is no
|
|
164
|
+
* - get: there is no {@link OptionsBase.fetchMethod}, so
|
|
165
|
+
* {@link LRUCache#get} was called.
|
|
149
166
|
* - miss: the item is not in cache, and will be fetched.
|
|
150
167
|
* - hit: the item is in the cache, and was resolved immediately.
|
|
151
168
|
* - stale: the item is in the cache, but stale.
|
|
@@ -254,6 +271,67 @@ export declare namespace LRUCache {
|
|
|
254
271
|
interface FetchOptionsNoContext<K, V> extends FetchOptions<K, V, undefined> {
|
|
255
272
|
context?: undefined;
|
|
256
273
|
}
|
|
274
|
+
interface MemoOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> {
|
|
275
|
+
/**
|
|
276
|
+
* Set to true to force a re-load of the existing data, even if it
|
|
277
|
+
* is not yet stale.
|
|
278
|
+
*/
|
|
279
|
+
forceRefresh?: boolean;
|
|
280
|
+
/**
|
|
281
|
+
* Context provided to the {@link OptionsBase.memoMethod} as
|
|
282
|
+
* the {@link MemoizerOptions.context} param.
|
|
283
|
+
*
|
|
284
|
+
* If the FC type is specified as unknown (the default),
|
|
285
|
+
* undefined or void, then this is optional. Otherwise, it will
|
|
286
|
+
* be required.
|
|
287
|
+
*/
|
|
288
|
+
context?: FC;
|
|
289
|
+
status?: Status<V>;
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* Options provided to {@link LRUCache#memo} when the FC type is something
|
|
293
|
+
* other than `unknown`, `undefined`, or `void`
|
|
294
|
+
*/
|
|
295
|
+
interface MemoOptionsWithContext<K, V, FC> extends MemoOptions<K, V, FC> {
|
|
296
|
+
context: FC;
|
|
297
|
+
}
|
|
298
|
+
/**
|
|
299
|
+
* Options provided to {@link LRUCache#memo} when the FC type is
|
|
300
|
+
* `undefined` or `void`
|
|
301
|
+
*/
|
|
302
|
+
interface MemoOptionsNoContext<K, V> extends MemoOptions<K, V, undefined> {
|
|
303
|
+
context?: undefined;
|
|
304
|
+
}
|
|
305
|
+
/**
|
|
306
|
+
* Options provided to the
|
|
307
|
+
* {@link OptionsBase.memoMethod} function.
|
|
308
|
+
*/
|
|
309
|
+
interface MemoizerOptions<K, V, FC = unknown> {
|
|
310
|
+
options: MemoizerMemoOptions<K, V, FC>;
|
|
311
|
+
/**
|
|
312
|
+
* Object provided in the {@link MemoOptions.context} option to
|
|
313
|
+
* {@link LRUCache#memo}
|
|
314
|
+
*/
|
|
315
|
+
context: FC;
|
|
316
|
+
}
|
|
317
|
+
/**
|
|
318
|
+
* options which override the options set in the LRUCache constructor
|
|
319
|
+
* when calling {@link LRUCache#memo}.
|
|
320
|
+
*
|
|
321
|
+
* This is the union of {@link GetOptions} and {@link SetOptions}, plus
|
|
322
|
+
* {@link MemoOptions.forceRefresh}, and
|
|
323
|
+
* {@link MemoerOptions.context}
|
|
324
|
+
*
|
|
325
|
+
* Any of these may be modified in the {@link OptionsBase.memoMethod}
|
|
326
|
+
* function, but the {@link GetOptions} fields will of course have no
|
|
327
|
+
* effect, as the {@link LRUCache#get} call already happened by the time
|
|
328
|
+
* the memoMethod is called.
|
|
329
|
+
*/
|
|
330
|
+
interface MemoizerMemoOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> {
|
|
331
|
+
status?: Status<V>;
|
|
332
|
+
size?: Size;
|
|
333
|
+
start?: Milliseconds;
|
|
334
|
+
}
|
|
257
335
|
/**
|
|
258
336
|
* Options that may be passed to the {@link LRUCache#has} method.
|
|
259
337
|
*/
|
|
@@ -296,6 +374,10 @@ export declare namespace LRUCache {
|
|
|
296
374
|
* The type signature for the {@link OptionsBase.fetchMethod} option.
|
|
297
375
|
*/
|
|
298
376
|
type Fetcher<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: FetcherOptions<K, V, FC>) => Promise<V | undefined | void> | V | undefined | void;
|
|
377
|
+
/**
|
|
378
|
+
* the type signature for the {@link OptionsBase.memoMethod} option.
|
|
379
|
+
*/
|
|
380
|
+
type Memoizer<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: MemoizerOptions<K, V, FC>) => V;
|
|
299
381
|
/**
|
|
300
382
|
* Options which may be passed to the {@link LRUCache} constructor.
|
|
301
383
|
*
|
|
@@ -310,6 +392,14 @@ export declare namespace LRUCache {
|
|
|
310
392
|
* (and in fact required by the type definitions here) that the cache
|
|
311
393
|
* also set {@link OptionsBase.ttlAutopurge}, to prevent potentially
|
|
312
394
|
* unbounded storage.
|
|
395
|
+
*
|
|
396
|
+
* All options are also available on the {@link LRUCache} instance, making
|
|
397
|
+
* it safe to pass an LRUCache instance as the options argumemnt to
|
|
398
|
+
* make another empty cache of the same type.
|
|
399
|
+
*
|
|
400
|
+
* Some options are marked as read-only, because changing them after
|
|
401
|
+
* instantiation is not safe. Changing any of the other options will of
|
|
402
|
+
* course only have an effect on subsequent method calls.
|
|
313
403
|
*/
|
|
314
404
|
interface OptionsBase<K, V, FC> {
|
|
315
405
|
/**
|
|
@@ -323,20 +413,44 @@ export declare namespace LRUCache {
|
|
|
323
413
|
* Note that significantly fewer items may be stored, if
|
|
324
414
|
* {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also
|
|
325
415
|
* set.
|
|
416
|
+
*
|
|
417
|
+
* **It is strongly recommended to set a `max` to prevent unbounded growth
|
|
418
|
+
* of the cache.**
|
|
326
419
|
*/
|
|
327
420
|
max?: Count;
|
|
328
421
|
/**
|
|
329
422
|
* Max time in milliseconds for items to live in cache before they are
|
|
330
|
-
* considered stale. Note that stale items are NOT preemptively removed
|
|
331
|
-
*
|
|
423
|
+
* considered stale. Note that stale items are NOT preemptively removed by
|
|
424
|
+
* default, and MAY live in the cache, contributing to its LRU max, long
|
|
425
|
+
* after they have expired, unless {@link OptionsBase.ttlAutopurge} is
|
|
426
|
+
* set.
|
|
427
|
+
*
|
|
428
|
+
* If set to `0` (the default value), then that means "do not track
|
|
429
|
+
* TTL", not "expire immediately".
|
|
332
430
|
*
|
|
333
431
|
* Also, as this cache is optimized for LRU/MRU operations, some of
|
|
334
432
|
* the staleness/TTL checks will reduce performance, as they will incur
|
|
335
433
|
* overhead by deleting items.
|
|
336
434
|
*
|
|
337
|
-
*
|
|
435
|
+
* This is not primarily a TTL cache, and does not make strong TTL
|
|
436
|
+
* guarantees. There is no pre-emptive pruning of expired items, but you
|
|
437
|
+
* _may_ set a TTL on the cache, and it will treat expired items as missing
|
|
438
|
+
* when they are fetched, and delete them.
|
|
439
|
+
*
|
|
440
|
+
* Optional, but must be a non-negative integer in ms if specified.
|
|
441
|
+
*
|
|
442
|
+
* This may be overridden by passing an options object to `cache.set()`.
|
|
443
|
+
*
|
|
444
|
+
* At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
|
445
|
+
* positive integer if set.
|
|
446
|
+
*
|
|
447
|
+
* Even if ttl tracking is enabled, **it is strongly recommended to set a
|
|
448
|
+
* `max` to prevent unbounded growth of the cache.**
|
|
338
449
|
*
|
|
339
|
-
*
|
|
450
|
+
* If ttl tracking is enabled, and `max` and `maxSize` are not set,
|
|
451
|
+
* and `ttlAutopurge` is not set, then a warning will be emitted
|
|
452
|
+
* cautioning about the potential for unbounded memory consumption.
|
|
453
|
+
* (The TypeScript definitions will also discourage this.)
|
|
340
454
|
*/
|
|
341
455
|
ttl?: Milliseconds;
|
|
342
456
|
/**
|
|
@@ -356,54 +470,95 @@ export declare namespace LRUCache {
|
|
|
356
470
|
ttlResolution?: Milliseconds;
|
|
357
471
|
/**
|
|
358
472
|
* Preemptively remove stale items from the cache.
|
|
359
|
-
*
|
|
360
|
-
*
|
|
361
|
-
*
|
|
362
|
-
* the cache, and let them fall out as new
|
|
473
|
+
*
|
|
474
|
+
* Note that this may *significantly* degrade performance, especially if
|
|
475
|
+
* the cache is storing a large number of items. It is almost always best
|
|
476
|
+
* to just leave the stale items in the cache, and let them fall out as new
|
|
477
|
+
* items are added.
|
|
363
478
|
*
|
|
364
479
|
* Note that this means that {@link OptionsBase.allowStale} is a bit
|
|
365
480
|
* pointless, as stale items will be deleted almost as soon as they
|
|
366
481
|
* expire.
|
|
367
482
|
*
|
|
368
|
-
*
|
|
483
|
+
* Use with caution!
|
|
369
484
|
*/
|
|
370
485
|
ttlAutopurge?: boolean;
|
|
371
486
|
/**
|
|
372
|
-
*
|
|
487
|
+
* When using time-expiring entries with `ttl`, setting this to `true` will
|
|
488
|
+
* make each item's age reset to 0 whenever it is retrieved from cache with
|
|
489
|
+
* {@link LRUCache#get}, causing it to not expire. (It can still fall out
|
|
490
|
+
* of cache based on recency of use, of course.)
|
|
373
491
|
*
|
|
374
492
|
* Has no effect if {@link OptionsBase.ttl} is not set.
|
|
375
493
|
*
|
|
376
|
-
*
|
|
494
|
+
* This may be overridden by passing an options object to `cache.get()`.
|
|
377
495
|
*/
|
|
378
496
|
updateAgeOnGet?: boolean;
|
|
379
497
|
/**
|
|
380
|
-
*
|
|
498
|
+
* When using time-expiring entries with `ttl`, setting this to `true` will
|
|
499
|
+
* make each item's age reset to 0 whenever its presence in the cache is
|
|
500
|
+
* checked with {@link LRUCache#has}, causing it to not expire. (It can
|
|
501
|
+
* still fall out of cache based on recency of use, of course.)
|
|
381
502
|
*
|
|
382
503
|
* Has no effect if {@link OptionsBase.ttl} is not set.
|
|
383
|
-
*
|
|
384
|
-
* @default false
|
|
385
504
|
*/
|
|
386
505
|
updateAgeOnHas?: boolean;
|
|
387
506
|
/**
|
|
388
507
|
* Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return
|
|
389
508
|
* stale data, if available.
|
|
509
|
+
*
|
|
510
|
+
* By default, if you set `ttl`, stale items will only be deleted from the
|
|
511
|
+
* cache when you `get(key)`. That is, it's not preemptively pruning items,
|
|
512
|
+
* unless {@link OptionsBase.ttlAutopurge} is set.
|
|
513
|
+
*
|
|
514
|
+
* If you set `allowStale:true`, it'll return the stale value *as well as*
|
|
515
|
+
* deleting it. If you don't set this, then it'll return `undefined` when
|
|
516
|
+
* you try to get a stale entry.
|
|
517
|
+
*
|
|
518
|
+
* Note that when a stale entry is fetched, _even if it is returned due to
|
|
519
|
+
* `allowStale` being set_, it is removed from the cache immediately. You
|
|
520
|
+
* can suppress this behavior by setting
|
|
521
|
+
* {@link OptionsBase.noDeleteOnStaleGet}, either in the constructor, or in
|
|
522
|
+
* the options provided to {@link LRUCache#get}.
|
|
523
|
+
*
|
|
524
|
+
* This may be overridden by passing an options object to `cache.get()`.
|
|
525
|
+
* The `cache.has()` method will always return `false` for stale items.
|
|
526
|
+
*
|
|
527
|
+
* Only relevant if a ttl is set.
|
|
390
528
|
*/
|
|
391
529
|
allowStale?: boolean;
|
|
392
530
|
/**
|
|
393
|
-
* Function that is called on items when they are dropped from the
|
|
394
|
-
*
|
|
395
|
-
*
|
|
396
|
-
*
|
|
397
|
-
*
|
|
531
|
+
* Function that is called on items when they are dropped from the
|
|
532
|
+
* cache, as `dispose(value, key, reason)`.
|
|
533
|
+
*
|
|
534
|
+
* This can be handy if you want to close file descriptors or do
|
|
535
|
+
* other cleanup tasks when items are no longer stored in the cache.
|
|
536
|
+
*
|
|
537
|
+
* **NOTE**: It is called _before_ the item has been fully removed
|
|
538
|
+
* from the cache, so if you want to put it right back in, you need
|
|
539
|
+
* to wait until the next tick. If you try to add it back in during
|
|
540
|
+
* the `dispose()` function call, it will break things in subtle and
|
|
541
|
+
* weird ways.
|
|
398
542
|
*
|
|
399
|
-
*
|
|
400
|
-
*
|
|
401
|
-
*
|
|
543
|
+
* Unlike several other options, this may _not_ be overridden by
|
|
544
|
+
* passing an option to `set()`, for performance reasons.
|
|
545
|
+
*
|
|
546
|
+
* The `reason` will be one of the following strings, corresponding
|
|
547
|
+
* to the reason for the item's deletion:
|
|
548
|
+
*
|
|
549
|
+
* - `evict` Item was evicted to make space for a new addition
|
|
550
|
+
* - `set` Item was overwritten by a new value
|
|
551
|
+
* - `expire` Item expired its TTL
|
|
552
|
+
* - `fetch` Item was deleted due to a failed or aborted fetch, or a
|
|
553
|
+
* fetchMethod returning `undefined.
|
|
554
|
+
* - `delete` Item was removed by explicit `cache.delete(key)`,
|
|
555
|
+
* `cache.clear()`, or `cache.set(key, undefined)`.
|
|
402
556
|
*/
|
|
403
557
|
dispose?: Disposer<K, V>;
|
|
404
558
|
/**
|
|
405
559
|
* The same as {@link OptionsBase.dispose}, but called *after* the entry
|
|
406
560
|
* is completely removed and the cache is once again in a clean state.
|
|
561
|
+
*
|
|
407
562
|
* It is safe to add an item right back into the cache at this point.
|
|
408
563
|
* However, note that it is *very* easy to inadvertently create infinite
|
|
409
564
|
* recursion this way.
|
|
@@ -413,26 +568,43 @@ export declare namespace LRUCache {
|
|
|
413
568
|
* Set to true to suppress calling the
|
|
414
569
|
* {@link OptionsBase.dispose} function if the entry key is
|
|
415
570
|
* still accessible within the cache.
|
|
571
|
+
*
|
|
416
572
|
* This may be overridden by passing an options object to
|
|
417
573
|
* {@link LRUCache#set}.
|
|
574
|
+
*
|
|
575
|
+
* Only relevant if `dispose` or `disposeAfter` are set.
|
|
418
576
|
*/
|
|
419
577
|
noDisposeOnSet?: boolean;
|
|
420
578
|
/**
|
|
421
|
-
* Boolean flag to tell the cache to not update the TTL when
|
|
422
|
-
*
|
|
423
|
-
*
|
|
424
|
-
*
|
|
579
|
+
* Boolean flag to tell the cache to not update the TTL when setting a new
|
|
580
|
+
* value for an existing key (ie, when updating a value rather than
|
|
581
|
+
* inserting a new value). Note that the TTL value is _always_ set (if
|
|
582
|
+
* provided) when adding a new entry into the cache.
|
|
425
583
|
*
|
|
426
584
|
* Has no effect if a {@link OptionsBase.ttl} is not set.
|
|
585
|
+
*
|
|
586
|
+
* May be passed as an option to {@link LRUCache#set}.
|
|
427
587
|
*/
|
|
428
588
|
noUpdateTTL?: boolean;
|
|
429
589
|
/**
|
|
430
|
-
*
|
|
431
|
-
*
|
|
432
|
-
*
|
|
433
|
-
*
|
|
434
|
-
*
|
|
435
|
-
*
|
|
590
|
+
* Set to a positive integer to track the sizes of items added to the
|
|
591
|
+
* cache, and automatically evict items in order to stay below this size.
|
|
592
|
+
* Note that this may result in fewer than `max` items being stored.
|
|
593
|
+
*
|
|
594
|
+
* Attempting to add an item to the cache whose calculated size is greater
|
|
595
|
+
* that this amount will be a no-op. The item will not be cached, and no
|
|
596
|
+
* other items will be evicted.
|
|
597
|
+
*
|
|
598
|
+
* Optional, must be a positive integer if provided.
|
|
599
|
+
*
|
|
600
|
+
* Sets `maxEntrySize` to the same value, unless a different value is
|
|
601
|
+
* provided for `maxEntrySize`.
|
|
602
|
+
*
|
|
603
|
+
* At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
|
604
|
+
* positive integer if set.
|
|
605
|
+
*
|
|
606
|
+
* Even if size tracking is enabled, **it is strongly recommended to set a
|
|
607
|
+
* `max` to prevent unbounded growth of the cache.**
|
|
436
608
|
*
|
|
437
609
|
* Note also that size tracking can negatively impact performance,
|
|
438
610
|
* though for most cases, only minimally.
|
|
@@ -442,13 +614,22 @@ export declare namespace LRUCache {
|
|
|
442
614
|
* The maximum allowed size for any single item in the cache.
|
|
443
615
|
*
|
|
444
616
|
* If a larger item is passed to {@link LRUCache#set} or returned by a
|
|
445
|
-
* {@link OptionsBase.fetchMethod}
|
|
446
|
-
* cache.
|
|
617
|
+
* {@link OptionsBase.fetchMethod} or {@link OptionsBase.memoMethod}, then
|
|
618
|
+
* it will not be stored in the cache.
|
|
619
|
+
*
|
|
620
|
+
* Attempting to add an item whose calculated size is greater than
|
|
621
|
+
* this amount will not cache the item or evict any old items, but
|
|
622
|
+
* WILL delete an existing value if one is already present.
|
|
623
|
+
*
|
|
624
|
+
* Optional, must be a positive integer if provided. Defaults to
|
|
625
|
+
* the value of `maxSize` if provided.
|
|
447
626
|
*/
|
|
448
627
|
maxEntrySize?: Size;
|
|
449
628
|
/**
|
|
450
629
|
* A function that returns a number indicating the item's size.
|
|
451
630
|
*
|
|
631
|
+
* Requires {@link OptionsBase.maxSize} to be set.
|
|
632
|
+
*
|
|
452
633
|
* If not provided, and {@link OptionsBase.maxSize} or
|
|
453
634
|
* {@link OptionsBase.maxEntrySize} are set, then all
|
|
454
635
|
* {@link LRUCache#set} calls **must** provide an explicit
|
|
@@ -457,8 +638,41 @@ export declare namespace LRUCache {
|
|
|
457
638
|
sizeCalculation?: SizeCalculator<K, V>;
|
|
458
639
|
/**
|
|
459
640
|
* Method that provides the implementation for {@link LRUCache#fetch}
|
|
641
|
+
*
|
|
642
|
+
* ```ts
|
|
643
|
+
* fetchMethod(key, staleValue, { signal, options, context })
|
|
644
|
+
* ```
|
|
645
|
+
*
|
|
646
|
+
* If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent
|
|
647
|
+
* to `Promise.resolve(cache.get(key))`.
|
|
648
|
+
*
|
|
649
|
+
* If at any time, `signal.aborted` is set to `true`, or if the
|
|
650
|
+
* `signal.onabort` method is called, or if it emits an `'abort'` event
|
|
651
|
+
* which you can listen to with `addEventListener`, then that means that
|
|
652
|
+
* the fetch should be abandoned. This may be passed along to async
|
|
653
|
+
* functions aware of AbortController/AbortSignal behavior.
|
|
654
|
+
*
|
|
655
|
+
* The `fetchMethod` should **only** return `undefined` or a Promise
|
|
656
|
+
* resolving to `undefined` if the AbortController signaled an `abort`
|
|
657
|
+
* event. In all other cases, it should return or resolve to a value
|
|
658
|
+
* suitable for adding to the cache.
|
|
659
|
+
*
|
|
660
|
+
* The `options` object is a union of the options that may be provided to
|
|
661
|
+
* `set()` and `get()`. If they are modified, then that will result in
|
|
662
|
+
* modifying the settings to `cache.set()` when the value is resolved, and
|
|
663
|
+
* in the case of
|
|
664
|
+
* {@link OptionsBase.noDeleteOnFetchRejection} and
|
|
665
|
+
* {@link OptionsBase.allowStaleOnFetchRejection}, the handling of
|
|
666
|
+
* `fetchMethod` failures.
|
|
667
|
+
*
|
|
668
|
+
* For example, a DNS cache may update the TTL based on the value returned
|
|
669
|
+
* from a remote DNS server by changing `options.ttl` in the `fetchMethod`.
|
|
460
670
|
*/
|
|
461
671
|
fetchMethod?: Fetcher<K, V, FC>;
|
|
672
|
+
/**
|
|
673
|
+
* Method that provides the implementation for {@link LRUCache#memo}
|
|
674
|
+
*/
|
|
675
|
+
memoMethod?: Memoizer<K, V, FC>;
|
|
462
676
|
/**
|
|
463
677
|
* Set to true to suppress the deletion of stale data when a
|
|
464
678
|
* {@link OptionsBase.fetchMethod} returns a rejected promise.
|
|
@@ -470,6 +684,18 @@ export declare namespace LRUCache {
|
|
|
470
684
|
*
|
|
471
685
|
* Note that the `get` return value will still be `undefined`
|
|
472
686
|
* unless {@link OptionsBase.allowStale} is true.
|
|
687
|
+
*
|
|
688
|
+
* When using time-expiring entries with `ttl`, by default stale
|
|
689
|
+
* items will be removed from the cache when the key is accessed
|
|
690
|
+
* with `cache.get()`.
|
|
691
|
+
*
|
|
692
|
+
* Setting this option will cause stale items to remain in the cache, until
|
|
693
|
+
* they are explicitly deleted with `cache.delete(key)`, or retrieved with
|
|
694
|
+
* `noDeleteOnStaleGet` set to `false`.
|
|
695
|
+
*
|
|
696
|
+
* This may be overridden by passing an options object to `cache.get()`.
|
|
697
|
+
*
|
|
698
|
+
* Only relevant if a ttl is used.
|
|
473
699
|
*/
|
|
474
700
|
noDeleteOnStaleGet?: boolean;
|
|
475
701
|
/**
|
|
@@ -478,14 +704,24 @@ export declare namespace LRUCache {
|
|
|
478
704
|
* promise.
|
|
479
705
|
*
|
|
480
706
|
* This differs from using {@link OptionsBase.allowStale} in that stale
|
|
481
|
-
* data will ONLY be returned in the case that the
|
|
482
|
-
*
|
|
707
|
+
* data will ONLY be returned in the case that the {@link LRUCache#fetch}
|
|
708
|
+
* fails, not any other times.
|
|
709
|
+
*
|
|
710
|
+
* If a `fetchMethod` fails, and there is no stale value available, the
|
|
711
|
+
* `fetch()` will resolve to `undefined`. Ie, all `fetchMethod` errors are
|
|
712
|
+
* suppressed.
|
|
713
|
+
*
|
|
714
|
+
* Implies `noDeleteOnFetchRejection`.
|
|
715
|
+
*
|
|
716
|
+
* This may be set in calls to `fetch()`, or defaulted on the constructor,
|
|
717
|
+
* or overridden by modifying the options object in the `fetchMethod`.
|
|
483
718
|
*/
|
|
484
719
|
allowStaleOnFetchRejection?: boolean;
|
|
485
720
|
/**
|
|
486
721
|
* Set to true to return a stale value from the cache when the
|
|
487
|
-
* `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches
|
|
488
|
-
* event, whether user-triggered, or due to internal cache
|
|
722
|
+
* `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches
|
|
723
|
+
* an `'abort'` event, whether user-triggered, or due to internal cache
|
|
724
|
+
* behavior.
|
|
489
725
|
*
|
|
490
726
|
* Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying
|
|
491
727
|
* {@link OptionsBase.fetchMethod} will still be considered canceled, and
|
|
@@ -521,9 +757,9 @@ export declare namespace LRUCache {
|
|
|
521
757
|
* object passed to {@link OptionsBase.fetchMethod}, and still cache the
|
|
522
758
|
* resulting resolution value, as long as it is not `undefined`.
|
|
523
759
|
*
|
|
524
|
-
* When used on its own, this means aborted {@link LRUCache#fetch} calls
|
|
525
|
-
* immediately resolved or rejected when they are aborted, and
|
|
526
|
-
* take the full time to await.
|
|
760
|
+
* When used on its own, this means aborted {@link LRUCache#fetch} calls
|
|
761
|
+
* are not immediately resolved or rejected when they are aborted, and
|
|
762
|
+
* instead take the full time to await.
|
|
527
763
|
*
|
|
528
764
|
* When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted
|
|
529
765
|
* {@link LRUCache#fetch} calls will resolve immediately to their stale
|
|
@@ -532,6 +768,26 @@ export declare namespace LRUCache {
|
|
|
532
768
|
* not `undefined`, thus supporting a "return stale on timeout while
|
|
533
769
|
* refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal.
|
|
534
770
|
*
|
|
771
|
+
* For example:
|
|
772
|
+
*
|
|
773
|
+
* ```ts
|
|
774
|
+
* const c = new LRUCache({
|
|
775
|
+
* ttl: 100,
|
|
776
|
+
* ignoreFetchAbort: true,
|
|
777
|
+
* allowStaleOnFetchAbort: true,
|
|
778
|
+
* fetchMethod: async (key, oldValue, { signal }) => {
|
|
779
|
+
* // note: do NOT pass the signal to fetch()!
|
|
780
|
+
* // let's say this fetch can take a long time.
|
|
781
|
+
* const res = await fetch(`https://slow-backend-server/${key}`)
|
|
782
|
+
* return await res.json()
|
|
783
|
+
* },
|
|
784
|
+
* })
|
|
785
|
+
*
|
|
786
|
+
* // this will return the stale value after 100ms, while still
|
|
787
|
+
* // updating in the background for next time.
|
|
788
|
+
* const val = await c.fetch('key', { signal: AbortSignal.timeout(100) })
|
|
789
|
+
* ```
|
|
790
|
+
*
|
|
535
791
|
* **Note**: regardless of this setting, an `abort` event _is still
|
|
536
792
|
* emitted on the `AbortSignal` object_, so may result in invalid results
|
|
537
793
|
* when passed to other underlying APIs that use AbortSignals.
|
|
@@ -569,11 +825,17 @@ export declare namespace LRUCache {
|
|
|
569
825
|
/**
|
|
570
826
|
* Default export, the thing you're using this module to get.
|
|
571
827
|
*
|
|
572
|
-
*
|
|
573
|
-
*
|
|
574
|
-
*
|
|
575
|
-
*
|
|
576
|
-
*
|
|
828
|
+
* The `K` and `V` types define the key and value types, respectively. The
|
|
829
|
+
* optional `FC` type defines the type of the `context` object passed to
|
|
830
|
+
* `cache.fetch()` and `cache.memo()`.
|
|
831
|
+
*
|
|
832
|
+
* Keys and values **must not** be `null` or `undefined`.
|
|
833
|
+
*
|
|
834
|
+
* All properties from the options object (with the exception of `max`,
|
|
835
|
+
* `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
|
|
836
|
+
* added as normal public members. (The listed options are read-only getters.)
|
|
837
|
+
*
|
|
838
|
+
* Changing any of these will alter the defaults for subsequent method calls.
|
|
577
839
|
*/
|
|
578
840
|
export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implements Map<K, V> {
|
|
579
841
|
#private;
|
|
@@ -689,6 +951,7 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
689
951
|
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
|
|
690
952
|
*/
|
|
691
953
|
get fetchMethod(): LRUCache.Fetcher<K, V, FC> | undefined;
|
|
954
|
+
get memoMethod(): LRUCache.Memoizer<K, V, FC> | undefined;
|
|
692
955
|
/**
|
|
693
956
|
* {@link LRUCache.OptionsBase.dispose} (read-only)
|
|
694
957
|
*/
|
|
@@ -699,7 +962,8 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
699
962
|
get disposeAfter(): LRUCache.Disposer<K, V> | undefined;
|
|
700
963
|
constructor(options: LRUCache.Options<K, V, FC> | LRUCache<K, V, FC>);
|
|
701
964
|
/**
|
|
702
|
-
* Return the
|
|
965
|
+
* Return the number of ms left in the item's TTL. If item is not in cache,
|
|
966
|
+
* returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
|
|
703
967
|
*/
|
|
704
968
|
getRemainingTTL(key: K): number;
|
|
705
969
|
/**
|
|
@@ -744,20 +1008,26 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
744
1008
|
*/
|
|
745
1009
|
[Symbol.iterator](): Generator<[K, V], void, unknown>;
|
|
746
1010
|
/**
|
|
747
|
-
* A String value that is used in the creation of the default string
|
|
748
|
-
* Called by the built-in method
|
|
1011
|
+
* A String value that is used in the creation of the default string
|
|
1012
|
+
* description of an object. Called by the built-in method
|
|
1013
|
+
* `Object.prototype.toString`.
|
|
749
1014
|
*/
|
|
750
1015
|
[Symbol.toStringTag]: string;
|
|
751
1016
|
/**
|
|
752
1017
|
* Find a value for which the supplied fn method returns a truthy value,
|
|
753
|
-
* similar to Array.find()
|
|
1018
|
+
* similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
|
|
754
1019
|
*/
|
|
755
1020
|
find(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => boolean, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
|
|
756
1021
|
/**
|
|
757
|
-
* Call the supplied function on each item in the cache, in order from
|
|
758
|
-
*
|
|
759
|
-
*
|
|
760
|
-
*
|
|
1022
|
+
* Call the supplied function on each item in the cache, in order from most
|
|
1023
|
+
* recently used to least recently used.
|
|
1024
|
+
*
|
|
1025
|
+
* `fn` is called as `fn(value, key, cache)`.
|
|
1026
|
+
*
|
|
1027
|
+
* If `thisp` is provided, function will be called in the `this`-context of
|
|
1028
|
+
* the provided object, or the cache if no `thisp` object is provided.
|
|
1029
|
+
*
|
|
1030
|
+
* Does not update age or recenty of use, or iterate over stale values.
|
|
761
1031
|
*/
|
|
762
1032
|
forEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
|
|
763
1033
|
/**
|
|
@@ -772,20 +1042,39 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
772
1042
|
purgeStale(): boolean;
|
|
773
1043
|
/**
|
|
774
1044
|
* Get the extended info about a given entry, to get its value, size, and
|
|
775
|
-
* TTL info simultaneously.
|
|
776
|
-
*
|
|
777
|
-
*
|
|
1045
|
+
* TTL info simultaneously. Returns `undefined` if the key is not present.
|
|
1046
|
+
*
|
|
1047
|
+
* Unlike {@link LRUCache#dump}, which is designed to be portable and survive
|
|
1048
|
+
* serialization, the `start` value is always the current timestamp, and the
|
|
1049
|
+
* `ttl` is a calculated remaining time to live (negative if expired).
|
|
1050
|
+
*
|
|
1051
|
+
* Always returns stale values, if their info is found in the cache, so be
|
|
1052
|
+
* sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
|
|
1053
|
+
* if relevant.
|
|
778
1054
|
*/
|
|
779
1055
|
info(key: K): LRUCache.Entry<V> | undefined;
|
|
780
1056
|
/**
|
|
781
1057
|
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
|
|
782
|
-
* passed to
|
|
1058
|
+
* passed to {@link LRLUCache#load}.
|
|
1059
|
+
*
|
|
1060
|
+
* The `start` fields are calculated relative to a portable `Date.now()`
|
|
1061
|
+
* timestamp, even if `performance.now()` is available.
|
|
1062
|
+
*
|
|
1063
|
+
* Stale entries are always included in the `dump`, even if
|
|
1064
|
+
* {@link LRUCache.OptionsBase.allowStale} is false.
|
|
1065
|
+
*
|
|
1066
|
+
* Note: this returns an actual array, not a generator, so it can be more
|
|
1067
|
+
* easily passed around.
|
|
783
1068
|
*/
|
|
784
1069
|
dump(): [K, LRUCache.Entry<V>][];
|
|
785
1070
|
/**
|
|
786
1071
|
* Reset the cache and load in the items in entries in the order listed.
|
|
787
|
-
*
|
|
788
|
-
*
|
|
1072
|
+
*
|
|
1073
|
+
* The shape of the resulting cache may be different if the same options are
|
|
1074
|
+
* not used in both caches.
|
|
1075
|
+
*
|
|
1076
|
+
* The `start` fields are assumed to be calculated relative to a portable
|
|
1077
|
+
* `Date.now()` timestamp, even if `performance.now()` is available.
|
|
789
1078
|
*/
|
|
790
1079
|
load(arr: [K, LRUCache.Entry<V>][]): void;
|
|
791
1080
|
/**
|
|
@@ -793,6 +1082,30 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
793
1082
|
*
|
|
794
1083
|
* Note: if `undefined` is specified as a value, this is an alias for
|
|
795
1084
|
* {@link LRUCache#delete}
|
|
1085
|
+
*
|
|
1086
|
+
* Fields on the {@link LRUCache.SetOptions} options param will override
|
|
1087
|
+
* their corresponding values in the constructor options for the scope
|
|
1088
|
+
* of this single `set()` operation.
|
|
1089
|
+
*
|
|
1090
|
+
* If `start` is provided, then that will set the effective start
|
|
1091
|
+
* time for the TTL calculation. Note that this must be a previous
|
|
1092
|
+
* value of `performance.now()` if supported, or a previous value of
|
|
1093
|
+
* `Date.now()` if not.
|
|
1094
|
+
*
|
|
1095
|
+
* Options object may also include `size`, which will prevent
|
|
1096
|
+
* calling the `sizeCalculation` function and just use the specified
|
|
1097
|
+
* number if it is a positive integer, and `noDisposeOnSet` which
|
|
1098
|
+
* will prevent calling a `dispose` function in the case of
|
|
1099
|
+
* overwrites.
|
|
1100
|
+
*
|
|
1101
|
+
* If the `size` (or return value of `sizeCalculation`) for a given
|
|
1102
|
+
* entry is greater than `maxEntrySize`, then the item will not be
|
|
1103
|
+
* added to the cache.
|
|
1104
|
+
*
|
|
1105
|
+
* Will update the recency of the entry.
|
|
1106
|
+
*
|
|
1107
|
+
* If the value is `undefined`, then this is an alias for
|
|
1108
|
+
* `cache.delete(key)`. `undefined` is never stored in the cache.
|
|
796
1109
|
*/
|
|
797
1110
|
set(k: K, v: V | BackgroundFetch<V> | undefined, setOptions?: LRUCache.SetOptions<K, V, FC>): this;
|
|
798
1111
|
/**
|
|
@@ -805,6 +1118,14 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
805
1118
|
* Will return false if the item is stale, even though it is technically
|
|
806
1119
|
* in the cache.
|
|
807
1120
|
*
|
|
1121
|
+
* Check if a key is in the cache, without updating the recency of
|
|
1122
|
+
* use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
|
|
1123
|
+
* to `true` in either the options or the constructor.
|
|
1124
|
+
*
|
|
1125
|
+
* Will return `false` if the item is stale, even though it is technically in
|
|
1126
|
+
* the cache. The difference can be determined (if it matters) by using a
|
|
1127
|
+
* `status` argument, and inspecting the `has` field.
|
|
1128
|
+
*
|
|
808
1129
|
* Will not update item age unless
|
|
809
1130
|
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
|
|
810
1131
|
*/
|
|
@@ -821,6 +1142,25 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
821
1142
|
* Make an asynchronous cached fetch using the
|
|
822
1143
|
* {@link LRUCache.OptionsBase.fetchMethod} function.
|
|
823
1144
|
*
|
|
1145
|
+
* If the value is in the cache and not stale, then the returned
|
|
1146
|
+
* Promise resolves to the value.
|
|
1147
|
+
*
|
|
1148
|
+
* If not in the cache, or beyond its TTL staleness, then
|
|
1149
|
+
* `fetchMethod(key, staleValue, { options, signal, context })` is
|
|
1150
|
+
* called, and the value returned will be added to the cache once
|
|
1151
|
+
* resolved.
|
|
1152
|
+
*
|
|
1153
|
+
* If called with `allowStale`, and an asynchronous fetch is
|
|
1154
|
+
* currently in progress to reload a stale value, then the former
|
|
1155
|
+
* stale value will be returned.
|
|
1156
|
+
*
|
|
1157
|
+
* If called with `forceRefresh`, then the cached item will be
|
|
1158
|
+
* re-fetched, even if it is not stale. However, if `allowStale` is also
|
|
1159
|
+
* set, then the old value will still be returned. This is useful
|
|
1160
|
+
* in cases where you want to force a reload of a cached value. If
|
|
1161
|
+
* a background fetch is already in progress, then `forceRefresh`
|
|
1162
|
+
* has no effect.
|
|
1163
|
+
*
|
|
824
1164
|
* If multiple fetches for the same key are issued, then they will all be
|
|
825
1165
|
* coalesced into a single call to fetchMethod.
|
|
826
1166
|
*
|
|
@@ -833,9 +1173,89 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
833
1173
|
* This is a known (fixable) shortcoming which will be addresed on when
|
|
834
1174
|
* someone complains about it, as the fix would involve added complexity and
|
|
835
1175
|
* may not be worth the costs for this edge case.
|
|
1176
|
+
*
|
|
1177
|
+
* If {@link LRUCache.OptionsBase.fetchMethod} is not specified, then this is
|
|
1178
|
+
* effectively an alias for `Promise.resolve(cache.get(key))`.
|
|
1179
|
+
*
|
|
1180
|
+
* When the fetch method resolves to a value, if the fetch has not
|
|
1181
|
+
* been aborted due to deletion, eviction, or being overwritten,
|
|
1182
|
+
* then it is added to the cache using the options provided.
|
|
1183
|
+
*
|
|
1184
|
+
* If the key is evicted or deleted before the `fetchMethod`
|
|
1185
|
+
* resolves, then the AbortSignal passed to the `fetchMethod` will
|
|
1186
|
+
* receive an `abort` event, and the promise returned by `fetch()`
|
|
1187
|
+
* will reject with the reason for the abort.
|
|
1188
|
+
*
|
|
1189
|
+
* If a `signal` is passed to the `fetch()` call, then aborting the
|
|
1190
|
+
* signal will abort the fetch and cause the `fetch()` promise to
|
|
1191
|
+
* reject with the reason provided.
|
|
1192
|
+
*
|
|
1193
|
+
* **Setting `context`**
|
|
1194
|
+
*
|
|
1195
|
+
* If an `FC` type is set to a type other than `unknown`, `void`, or
|
|
1196
|
+
* `undefined` in the {@link LRUCache} constructor, then all
|
|
1197
|
+
* calls to `cache.fetch()` _must_ provide a `context` option. If
|
|
1198
|
+
* set to `undefined` or `void`, then calls to fetch _must not_
|
|
1199
|
+
* provide a `context` option.
|
|
1200
|
+
*
|
|
1201
|
+
* The `context` param allows you to provide arbitrary data that
|
|
1202
|
+
* might be relevant in the course of fetching the data. It is only
|
|
1203
|
+
* relevant for the course of a single `fetch()` operation, and
|
|
1204
|
+
* discarded afterwards.
|
|
1205
|
+
*
|
|
1206
|
+
* **Note: `fetch()` calls are inflight-unique**
|
|
1207
|
+
*
|
|
1208
|
+
* If you call `fetch()` multiple times with the same key value,
|
|
1209
|
+
* then every call after the first will resolve on the same
|
|
1210
|
+
* promise<sup>1</sup>,
|
|
1211
|
+
* _even if they have different settings that would otherwise change
|
|
1212
|
+
* the behavior of the fetch_, such as `noDeleteOnFetchRejection`
|
|
1213
|
+
* or `ignoreFetchAbort`.
|
|
1214
|
+
*
|
|
1215
|
+
* In most cases, this is not a problem (in fact, only fetching
|
|
1216
|
+
* something once is what you probably want, if you're caching in
|
|
1217
|
+
* the first place). If you are changing the fetch() options
|
|
1218
|
+
* dramatically between runs, there's a good chance that you might
|
|
1219
|
+
* be trying to fit divergent semantics into a single object, and
|
|
1220
|
+
* would be better off with multiple cache instances.
|
|
1221
|
+
*
|
|
1222
|
+
* **1**: Ie, they're not the "same Promise", but they resolve at
|
|
1223
|
+
* the same time, because they're both waiting on the same
|
|
1224
|
+
* underlying fetchMethod response.
|
|
836
1225
|
*/
|
|
837
1226
|
fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<undefined | V>;
|
|
838
1227
|
fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : never): Promise<undefined | V>;
|
|
1228
|
+
/**
|
|
1229
|
+
* In some cases, `cache.fetch()` may resolve to `undefined`, either because
|
|
1230
|
+
* a {@link LRUCache.OptionsBase#fetchMethod} was not provided (turning
|
|
1231
|
+
* `cache.fetch(k)` into just an async wrapper around `cache.get(k)`) or
|
|
1232
|
+
* because `ignoreFetchAbort` was specified (either to the constructor or
|
|
1233
|
+
* in the {@link LRUCache.FetchOptions}). Also, the
|
|
1234
|
+
* {@link OptionsBase.fetchMethod} may return `undefined` or `void`, making
|
|
1235
|
+
* the test even more complicated.
|
|
1236
|
+
*
|
|
1237
|
+
* Because inferring the cases where `undefined` might be returned are so
|
|
1238
|
+
* cumbersome, but testing for `undefined` can also be annoying, this method
|
|
1239
|
+
* can be used, which will reject if `this.fetch()` resolves to undefined.
|
|
1240
|
+
*/
|
|
1241
|
+
forceFetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<V>;
|
|
1242
|
+
forceFetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : never): Promise<V>;
|
|
1243
|
+
/**
|
|
1244
|
+
* If the key is found in the cache, then this is equivalent to
|
|
1245
|
+
* {@link LRUCache#get}. If not, in the cache, then calculate the value using
|
|
1246
|
+
* the {@link LRUCache.OptionsBase.memoMethod}, and add it to the cache.
|
|
1247
|
+
*
|
|
1248
|
+
* If an `FC` type is set to a type other than `unknown`, `void`, or
|
|
1249
|
+
* `undefined` in the LRUCache constructor, then all calls to `cache.memo()`
|
|
1250
|
+
* _must_ provide a `context` option. If set to `undefined` or `void`, then
|
|
1251
|
+
* calls to memo _must not_ provide a `context` option.
|
|
1252
|
+
*
|
|
1253
|
+
* The `context` param allows you to provide arbitrary data that might be
|
|
1254
|
+
* relevant in the course of fetching the data. It is only relevant for the
|
|
1255
|
+
* course of a single `memo()` operation, and discarded afterwards.
|
|
1256
|
+
*/
|
|
1257
|
+
memo(k: K, memoOptions: unknown extends FC ? LRUCache.MemoOptions<K, V, FC> : FC extends undefined | void ? LRUCache.MemoOptionsNoContext<K, V> : LRUCache.MemoOptionsWithContext<K, V, FC>): V;
|
|
1258
|
+
memo(k: unknown extends FC ? K : FC extends undefined | void ? K : never, memoOptions?: unknown extends FC ? LRUCache.MemoOptions<K, V, FC> : FC extends undefined | void ? LRUCache.MemoOptionsNoContext<K, V> : never): V;
|
|
839
1259
|
/**
|
|
840
1260
|
* Return a value from the cache. Will update the recency of the cache
|
|
841
1261
|
* entry found.
|
|
@@ -845,6 +1265,7 @@ export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implemen
|
|
|
845
1265
|
get(k: K, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
|
|
846
1266
|
/**
|
|
847
1267
|
* Deletes a key out of the cache.
|
|
1268
|
+
*
|
|
848
1269
|
* Returns true if the key was deleted, false otherwise.
|
|
849
1270
|
*/
|
|
850
1271
|
delete(k: K): boolean;
|