cachette 2.1.9 → 4.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,7 +2,8 @@
2
2
 
3
3
  Resilient cache library supporting concurrent requests through local cache or Redis.
4
4
 
5
- > **This repo is a work-in-progress and is not ready for general use.**
5
+ > **This library is undocumented, and only meant for internal Unito use.**
6
+ > **It remains licensed as MIT; TS source code is bundled in the npm tarball.**
6
7
 
7
8
  ## Installation
8
9
 
package/package.json CHANGED
@@ -1,14 +1,13 @@
1
1
  {
2
2
  "name": "cachette",
3
- "version": "2.1.9",
3
+ "version": "4.0.1",
4
4
  "engines": {
5
- "node": ">=18",
6
- "npm": ">=9.5.0"
5
+ "node": ">=20",
6
+ "npm": ">=10"
7
7
  },
8
8
  "description": "Resilient cache library supporting concurrent requests through local cache or Redis.",
9
9
  "main": "dist/src/index.js",
10
10
  "typings": "dist/src/index.d.ts",
11
- "repository": "https://github.com/unitoio/cachette",
12
11
  "author": {
13
12
  "name": "Unito",
14
13
  "email": "hello@unito.io"
@@ -31,7 +30,7 @@
31
30
  "test:ci": "npm run compile && mkdir -p ci_output/testresults && mocha --config test/mocharc-ci.js 'dist/test/**/*.js'",
32
31
  "relock": "rm -rf node_modules package-lock.json; npm install --package-lock; npm out; true",
33
32
  "postprepare": "npm run --silent githook-install",
34
- "githook-install": "mkdir -p .git/hooks/ && echo '#!/usr/bin/env sh\necho \"⚠️ Reminder that cachette is a *public* repo! ⚠️\"\necho \"No private info in: branch name, commit message, PR title & description & comments!\"\necho \"Ctrl+C to abort git push, Enter to proceed.\"\nread REPLY < /dev/tty' > '.git/hooks/pre-push' && chmod +x '.git/hooks/pre-push'"
33
+ "githook-install": "mkdir -p .git/hooks/ && echo '#!/usr/bin/env sh\necho \"⚠️ Reminder that cachette is a *source-public* package! ⚠️\"\necho \"No private info in comments!!!\"\necho \"Ctrl+C to abort git push, Enter to proceed.\"\nread REPLY < /dev/tty' > '.git/hooks/pre-push' && chmod +x '.git/hooks/pre-push'"
35
34
  },
36
35
  "nyc": {
37
36
  "cache": false,
@@ -57,7 +56,7 @@
57
56
  "@types/chai": "4.x",
58
57
  "@types/eslint__js": "8.x",
59
58
  "@types/mocha": "10.x",
60
- "@types/node": "18.x",
59
+ "@types/node": "20.x",
61
60
  "@types/redlock": "4.x",
62
61
  "@types/sinon": "17.x",
63
62
  "chai": "4.x",
@@ -72,7 +71,7 @@
72
71
  },
73
72
  "dependencies": {
74
73
  "ioredis": "5.x",
75
- "lru-cache": "10.x",
74
+ "lru-cache": "11.x",
76
75
  "redlock": "4.x"
77
76
  }
78
77
  }
package/src/index.ts ADDED
@@ -0,0 +1,5 @@
1
+ export * from './lib/CacheClient';
2
+ export * from './lib/CacheInstance';
3
+ export * from './lib/LocalCache';
4
+ export * from './lib/RedisCache';
5
+ export * from './lib/WriteThroughCache';
@@ -0,0 +1,145 @@
1
+ import { CacheInstance, CachableValue } from './CacheInstance';
2
+
3
+ export abstract class CacheClient {
4
+
5
+ protected cacheInstance: CacheInstance;
6
+ protected buildCacheKey(propertyKey: string, args: any[]): string {
7
+
8
+ const buildKeyArgs = (args: any[]) => args
9
+ .filter(x =>
10
+ typeof x !== 'object' ||
11
+ // If the arg is an object, we check that it's not an instance of a class
12
+ (typeof x === 'object' && (x?.constructor.name === 'Object' || x?.constructor.name === 'Array')) ||
13
+ // typeof null === object, then we need to have another condition to accept null as well
14
+ x === null
15
+ ).map(x => {
16
+ if (typeof x === 'object' && !Array.isArray(x) && x) {
17
+ // Check if we have a circular reference in the plain object
18
+ JSON.stringify(x);
19
+
20
+ return Object.entries(x).sort().map(([key, value]) => {
21
+ if (typeof value === 'object') {
22
+ const nestedObjectKeys = buildKeyArgs([value])
23
+ return `${key}-${nestedObjectKeys}`
24
+ }
25
+ return `${key}-${value}`
26
+ }).join('-');
27
+ }
28
+
29
+ if (Array.isArray(x)) {
30
+ const builtKey = buildKeyArgs(x.sort());
31
+ return builtKey.join('-');
32
+ }
33
+ return new String(x).valueOf();
34
+ });
35
+
36
+ const builtKey = [
37
+ propertyKey,
38
+ ...buildKeyArgs(args),
39
+ ].join('-');
40
+
41
+ const maxKeyLength = process.env.UNITO_CACHE_MAX_KEY_LENGTH && parseInt(process.env.UNITO_CACHE_MAX_KEY_LENGTH, 10) || 1000;
42
+ if (builtKey.length > maxKeyLength) {
43
+ throw new Error(`Built key is bigger than ${maxKeyLength} chars`);
44
+ }
45
+
46
+ return builtKey;
47
+ }
48
+
49
+ /**
50
+ * Decorator to cache the calls to a function.
51
+ *
52
+ * @param ttl How long the cache should last, in seconds
53
+ * @param shouldCacheError How the error-caching function (accessible by calling
54
+ * `getErrorCachingFunction`) should decide which errors to cache.
55
+ * Defaults to caching *all* errors. *Again, to insist*: this does not
56
+ * mean the _decorated function_ will cache all errors, it means that
57
+ * _the error-caching function_ will. They live apart and each honors
58
+ * its behavior (decorated function *never* caches errors, the other does)
59
+ */
60
+ public static cached(
61
+ ttl = 0,
62
+ shouldCacheError = (err: Error) => true,
63
+ ): any {
64
+ return function (target: any, propertyKey: string, descriptor: PropertyDescriptor): PropertyDescriptor {
65
+ const origFunction = descriptor.value;
66
+
67
+ // don't use an => function here, or you lose access to 'this'
68
+ const functionCachingResults = function (...args): Promise<CachableValue> {
69
+ const key = this.buildCacheKey(propertyKey, args);
70
+ const fetchFunction = origFunction.bind(this, ...args);
71
+ return this.cacheInstance.getOrFetchValue(
72
+ key,
73
+ ttl,
74
+ fetchFunction,
75
+ undefined,
76
+ );
77
+ };
78
+ const functionCachingResultsAndErrors = function (...args): Promise<CachableValue> {
79
+ const key = this.buildCacheKey(propertyKey, args);
80
+ const fetchFunction = origFunction.bind(this, ...args);
81
+ return this.cacheInstance.getOrFetchValue(
82
+ key,
83
+ ttl,
84
+ fetchFunction,
85
+ undefined,
86
+ shouldCacheError,
87
+ );
88
+ };
89
+
90
+ target[`${propertyKey}NoCache`] = origFunction;
91
+ target[`${propertyKey}ErrorCaching`] = functionCachingResultsAndErrors;
92
+
93
+ descriptor.value = functionCachingResults;
94
+ return descriptor;
95
+ };
96
+ }
97
+
98
+ // We *do* want a loosely-typed `Function` here, by nature of the library
99
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
100
+ public getUncachedFunction(functionName: string): Function {
101
+ if (this[`${functionName}NoCache`]) {
102
+ return this[`${functionName}NoCache`].bind(this);
103
+ }
104
+ return this[functionName].bind(this);
105
+ }
106
+
107
+ public getErrorCachingFunction(functionName: string): (...args: any) => Promise<any> {
108
+ if (this[`${functionName}ErrorCaching`]) {
109
+ return this[`${functionName}ErrorCaching`].bind(this);
110
+ }
111
+ return this[functionName].bind(this);
112
+ }
113
+
114
+ /**
115
+ * Clears the valued returned from a cached function call,
116
+ * using the CacheClient.cached.
117
+ */
118
+ public async clearCachedFunctionCall(functionName: string, ...args: any[]): Promise<void> {
119
+ const key = this.buildCacheKey(functionName, args);
120
+ await this.cacheInstance.delValue(key);
121
+ }
122
+
123
+ /**
124
+ * Wait for the write commands to be acknowledged by the replicas.
125
+ * This is useful when you want to ensure data is freshness on all nodes of the cluster.
126
+ * We're defaulting to 5 replicas because it is the maximum number of read-only replica nodes
127
+ * that you can have for each shard in AWS-Elastic cache (https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Replication.Redis.Groups.html)
128
+ *
129
+ * /!\ If the number of replicas asked for acknowledgment is greater than the number of replicas in the cluster, the function will always block
130
+ * /!\ until the timeout is reached. Make sure you know the number of replicas in your cluster when calling this function.
131
+ */
132
+ public async waitForReplication(replicas: number = 5, timeout: number = 50): Promise<number> {
133
+ return this.cacheInstance.waitForReplication(replicas, timeout);
134
+ }
135
+
136
+ /**
137
+ * Gets the valued returned from a cached function call,
138
+ * using the CacheClient.cached.
139
+ */
140
+ public async getCachedFunctionCall(functionName: string, ...args: any[]): Promise<CachableValue> {
141
+ const key = this.buildCacheKey(functionName, args);
142
+ return this.cacheInstance.getValue(key);
143
+ }
144
+
145
+ }
@@ -0,0 +1,222 @@
1
+ import { EventEmitter } from 'node:events';
2
+
3
+ export type CachableValue = any;
4
+ export type FetchingFunction = () => Promise<CachableValue>;
5
+
6
+
7
+ export abstract class CacheInstance extends EventEmitter {
8
+
9
+ /**
10
+ * Will resolve when the cache instance connection is ready.
11
+ */
12
+ public abstract isReady(): Promise<void>;
13
+
14
+ /**
15
+ * Get the number of items in the cache.
16
+ */
17
+ public abstract itemCount(): Promise<number>;
18
+
19
+ /**
20
+ * Get a value from the cache.
21
+ *
22
+ * @param key The key of the value to get.
23
+ *
24
+ * @return The value associated with the key, or undefined if
25
+ * no such value exists.
26
+ *
27
+ */
28
+ public abstract getValue(key: string): Promise<CachableValue>;
29
+
30
+ /**
31
+ * Get the TTL of an entry, in ms
32
+ *
33
+ * @param key The key of the entry whose ttl to retrieve
34
+ *
35
+ * @return The remaining TTL on the entry, in ms.
36
+ * undefined if the entry does not exist.
37
+ * 0 if the entry does not expire.
38
+ */
39
+ public abstract getTtl(key: string): Promise<number | undefined>;
40
+
41
+ /**
42
+ * Set a value in the cache.
43
+ *
44
+ * @param key The key of the value to set.
45
+ * @param value The value to set.
46
+ * @param ttl The time to live of the value in seconds.
47
+ * By default, the value will not expire
48
+ *
49
+ * @return true if the value was stored, false otherwise.
50
+ */
51
+ public abstract setValue(key: string, value: CachableValue, ttl?: number): Promise<boolean>;
52
+
53
+ /**
54
+ * Delete a value from the cache.
55
+ *
56
+ * @param key The key of the value to set.
57
+ *
58
+ */
59
+ public abstract delValue(key: string): Promise<void>;
60
+
61
+ /**
62
+ * This command blocks the current client until all the previous write commands are
63
+ * successfully transferred and acknowledged by at least the specified number of replicas.
64
+ *
65
+ * @param replicas The number of replicas that should acknowledge write operations.
66
+ * @param timeout The maximum amount of time to wait in milliseconds.
67
+ *
68
+ */
69
+ public abstract waitForReplication(replicas: number, timeout: number): Promise<number>;
70
+
71
+
72
+ /**
73
+ * clear the whole cache
74
+ */
75
+ public abstract clear(): Promise<void>;
76
+
77
+ /**
78
+ * clear any in-memory cache item.
79
+ */
80
+ public abstract clearMemory(): Promise<void>;
81
+
82
+ /**
83
+ * Determines if locking is supported in the cache implementation
84
+ */
85
+ public isLockingSupported(): boolean {
86
+ return false;
87
+ }
88
+
89
+ /**
90
+ * Globally lock a named resource
91
+ *
92
+ * @param resource The name of the resource to lock
93
+ * @param ttlMs The time to live of the lock in ms
94
+ * @param retry Whether or not to retry attempts to lock
95
+ *
96
+ * @returns The lock, an opaque object that must be passed to unlock()
97
+ */
98
+ public lock(resource: string, ttlMs: number, retry?: boolean): Promise<any> {
99
+ throw new Error('unsupported');
100
+ }
101
+
102
+ /**
103
+ * Unlock a named resource aquired with lock()
104
+ *
105
+ * @param lock The lock object
106
+ */
107
+ public unlock(lock: any): Promise<void> {
108
+ throw new Error('unsupported');
109
+ }
110
+
111
+ /**
112
+ * Determine whether *at least one non-expired lock* starts with the given pattern.
113
+ */
114
+ public hasLock(prefix: string): Promise<boolean> {
115
+ throw new Error('unsupported');
116
+ }
117
+
118
+ /**
119
+ * Terminate / exit / quit the instance
120
+ */
121
+ public quit(): Promise<void> {
122
+ return new Promise((resolve) => { resolve() });
123
+ }
124
+
125
+
126
+ /**
127
+ * Keep track of active fetches to prevent
128
+ * simultaneous requests to the same resource in parallel.
129
+ */
130
+ private activeFetches: { [key: string]: Promise<CachableValue> } = {};
131
+
132
+ /**
133
+ * Get or fetch a value
134
+ *
135
+ * @param key The key of the value to get
136
+ * @param ttl The time to live of the value in seconds.
137
+ * @param fetchFn The function that can retrieve the original value
138
+ * @param lockTtl Global distributed lock TTL (in seconds) protecting fetching.
139
+ * If undefined, 0 or falsy, locking is not preformed
140
+ * @param shouldCacheError A callback being passed errors, controlling whether
141
+ * to cache or not errors. Defaults to never cache.
142
+ *
143
+ * @returns The cached or fetched value
144
+ */
145
+ public async getOrFetchValue<F extends FetchingFunction = FetchingFunction>(
146
+ key: string,
147
+ ttl: number,
148
+ fetchFunction: F,
149
+ lockTtl?: number,
150
+ shouldCacheError?: (err: Error) => boolean,
151
+ ): Promise<ReturnType<F>> {
152
+
153
+ // already cached?
154
+ let cached = await this.getValue(key);
155
+ if (cached instanceof Error) {
156
+ if (shouldCacheError) {
157
+ throw cached;
158
+ } else {
159
+ cached = undefined;
160
+ }
161
+ }
162
+ if (cached !== undefined) {
163
+ return cached;
164
+ }
165
+
166
+ // already fetching?
167
+ const currentFetch = this.activeFetches[key];
168
+ if (currentFetch) {
169
+ return currentFetch;
170
+ }
171
+
172
+ // I'm the one fetching.
173
+ let lock: any;
174
+ try {
175
+ // get the lock if needed
176
+ const lockName = `lock__${key}`;
177
+ if (lockTtl && this.isLockingSupported()) {
178
+ lock = await this.lock(lockName, lockTtl * 1000);
179
+ // check if the value has been populated while we were locking
180
+ let cachedValue = await this.getValue(key);
181
+ if (cachedValue instanceof Error) {
182
+ if (shouldCacheError) {
183
+ throw cachedValue;
184
+ } else {
185
+ cachedValue = undefined;
186
+ }
187
+ }
188
+ if (cachedValue !== undefined) {
189
+ return cachedValue;
190
+ }
191
+ }
192
+
193
+ // fetch!
194
+ let error: Error | undefined;
195
+ let result: any;
196
+ try {
197
+ const fetchPromise = this.activeFetches[key] = fetchFunction();
198
+ result = await fetchPromise;
199
+ } catch (err) {
200
+ error = err;
201
+ }
202
+
203
+ // cache! results: always, errors: only if satisfying user assertion
204
+ if (error && shouldCacheError && shouldCacheError(error)) {
205
+ await this.setValue(key, error, ttl);
206
+ } else if (result !== undefined) {
207
+ await this.setValue(key, result, ttl);
208
+ }
209
+
210
+ if (error) {
211
+ throw error;
212
+ }
213
+ return result;
214
+ } finally {
215
+ delete this.activeFetches[key];
216
+ if (lock) {
217
+ await this.unlock(lock);
218
+ }
219
+ }
220
+ }
221
+
222
+ }
@@ -0,0 +1,178 @@
1
+ import { LRUCache } from 'lru-cache';
2
+
3
+ import { CachableValue, CacheInstance } from './CacheInstance';
4
+
5
+ async function sleep(ms: number): Promise<void> {
6
+ return new Promise(resolve => setTimeout(resolve, ms));
7
+ }
8
+
9
+ export class LocalCache extends CacheInstance {
10
+
11
+ public static DEFAULT_MAX_ITEMS = 5000;
12
+ // Default maximum age for the items, in MS.
13
+ public static DEFAULT_MAX_AGE: number = 30 * 60 * 1000;
14
+
15
+ public static LOCK_ACQUIRE_TIMEOUT = 2000;
16
+
17
+ // See https://github.com/isaacs/node-lru-cache#options
18
+ // for options.
19
+ private cache = new LRUCache<string, any>({
20
+ max: Number.parseInt(process.env.CACHETTE_LC_MAX_ITEMS as string, 10) || LocalCache.DEFAULT_MAX_ITEMS,
21
+ ttl: Number.parseInt(process.env.CACHETTE_LC_MAX_AGE as string, 10) || LocalCache.DEFAULT_MAX_AGE,
22
+ });
23
+
24
+ /**
25
+ * @inheritdoc
26
+ */
27
+ public async isReady(): Promise<void> {
28
+ return;
29
+ }
30
+
31
+ /**
32
+ * @inheritdoc
33
+ */
34
+ public async itemCount(): Promise<number> {
35
+ return this.cache.size;
36
+ }
37
+
38
+ /**
39
+ * @inheritdoc
40
+ */
41
+ public async setValue(key: string, value: CachableValue, ttl = 0): Promise<boolean> {
42
+ this.emit('set', key, value);
43
+
44
+ if (value === undefined) {
45
+ this.emit('warn', `Cannot set ${key} to undefined!`);
46
+ return false;
47
+ }
48
+
49
+ // The lru cache interprets 0 as no expiration date.
50
+ if (ttl === 0) {
51
+ this.cache.set(key, value);
52
+ } else {
53
+ this.cache.set(key, value, { ttl: ttl * 1000 });
54
+ }
55
+ return true;
56
+ }
57
+
58
+ /**
59
+ * @inheritdoc
60
+ */
61
+ public async getValue(key: string): Promise<any> {
62
+ const value = await this.cache.get(key);
63
+ this.emit('get', key, value);
64
+ return value;
65
+ }
66
+
67
+ /**
68
+ * @inheritdoc
69
+ * Return the number of ms left in the item's TTL.
70
+ * If item is not in cache, returns 0.
71
+ * Returns a very large number (e.g. 1799999.9158420563) if item is in cache without a defined TTL.
72
+ * Docs: https://github.com/isaacs/node-lru-cache#getremainingttlkey
73
+ */
74
+ public async getTtl(key: string): Promise<number | undefined> {
75
+ const remainingTtl = await this.cache.getRemainingTTL(key);
76
+ /** If entry is not cached, return undefined */
77
+ if (remainingTtl === 0) {
78
+ return undefined;
79
+ }
80
+ /** If entry does not expire, return 0 */
81
+ if (remainingTtl > 1799999) {
82
+ return 0;
83
+ }
84
+
85
+ return remainingTtl;
86
+ }
87
+
88
+ /**
89
+ * @inheritdoc
90
+ */
91
+ public async delValue(key: string): Promise<void> {
92
+ this.cache.delete(key);
93
+ }
94
+
95
+ /**
96
+ * @inheritdoc
97
+ */
98
+ public async waitForReplication(replicas: number, timeout: number): Promise<number> {
99
+ return 0;
100
+ }
101
+
102
+ /**
103
+ * @inheritdoc
104
+ */
105
+ public async clear(): Promise<void> {
106
+ this.cache.clear();
107
+ }
108
+
109
+ /**
110
+ * @inheritdoc
111
+ */
112
+ public async clearMemory(): Promise<void> {
113
+ this.cache.clear();
114
+ }
115
+
116
+ /**
117
+ * @inheritdoc
118
+ * Dumb locking is supported for local development work
119
+ */
120
+ public isLockingSupported(): boolean {
121
+ return true;
122
+ }
123
+
124
+ /**
125
+ * @inheritdoc
126
+ */
127
+ public async lock(resource: string, ttlMs: number): Promise<any> {
128
+ let isLocked = true;
129
+ const startTimestamp = Date.now()
130
+ while(isLocked) {
131
+ if (Date.now() - startTimestamp > LocalCache.LOCK_ACQUIRE_TIMEOUT) {
132
+ throw new Error(`Abandoning locking ${resource} , as timed out while waiting for other lock to be released.`)
133
+ }
134
+ this.cache.purgeStale()
135
+ if (!this.cache.has(resource)) {
136
+ isLocked = false;
137
+ } else {
138
+ // LRU keeps its TTL information private, so we don't know how long to wait.
139
+ // Whatever, we just loop on waiting a bit and retrying.
140
+ await sleep(10);
141
+ }
142
+ }
143
+ this.cache.set(resource, 1, { ttl: ttlMs });
144
+ return new Promise(resolve => { resolve(resource) });
145
+ }
146
+
147
+ /**
148
+ * @inheritdoc
149
+ */
150
+ public async unlock(lock: any): Promise<void> {
151
+ this.cache.delete(lock);
152
+ return new Promise(resolve => { resolve() });
153
+ }
154
+
155
+ /**
156
+ * @inheritdoc
157
+ *
158
+ * Note that this specific implementation in `LocalCache` is not very efficient.
159
+ * Use RedisCache for a performant implementation.
160
+ */
161
+ public async hasLock(prefix: string): Promise<boolean> {
162
+ const startsWithPattern = prefix.replace(/\*$/, '');
163
+ let found = false;
164
+ this.cache.purgeStale();
165
+ this.cache.forEach((value, key) => {
166
+ // Doing a full CPU-inefficient traversal because `lru-cache.LRU` doesn't
167
+ // provide a `some` function or a way to exit this `forEach`. An alternative
168
+ // would be to work on `keys()`, which then would be RAM-inefficient.
169
+ // Neither is a big deal, this cache is meant to be used for small local/dev
170
+ // If this needs fixing, TODO move away from LRU.lru-cache.
171
+ if (key.startsWith(startsWithPattern)) {
172
+ found = true;
173
+ }
174
+ });
175
+ return new Promise((resolve) => { resolve(found) });
176
+ }
177
+
178
+ }