@trieb.work/nextjs-turbo-redis-cache 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +9 -0
- package/CHANGELOG.md +26 -0
- package/README.md +31 -1
- package/package.json +24 -3
- package/src/RedisStringsHandler.ts +0 -4
- package/dist/CachedHandler.d.ts +0 -9
- package/dist/CachedHandler.js +0 -28
- package/dist/DeduplicatedRequestHandler.d.ts +0 -9
- package/dist/DeduplicatedRequestHandler.js +0 -48
- package/dist/RedisStringsHandler.d.ts +0 -48
- package/dist/RedisStringsHandler.js +0 -215
- package/dist/SyncedMap.d.ts +0 -51
- package/dist/SyncedMap.js +0 -203
- package/dist/index.d.ts +0 -2
- package/dist/index.js +0 -7
- package/dist/index.test.d.ts +0 -1
- package/dist/index.test.js +0 -8
package/.github/workflows/ci.yml
CHANGED
package/CHANGELOG.md
CHANGED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# [1.1.0](https://github.com/trieb-work/nextjs-turbo-redis-cache/compare/v1.0.0...v1.1.0) (2025-03-28)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Features
|
|
5
|
+
|
|
6
|
+
* Update README.md ([10b474b](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/10b474b456803be924bf4170b6cda662827202c4))
|
|
7
|
+
|
|
8
|
+
# 1.0.0 (2025-03-28)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
### Bug Fixes
|
|
12
|
+
|
|
13
|
+
* double sync on key expiration ([14afef6](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/14afef6b08e3399a2aa7d6cf42a4b9b7b5ea5d33))
|
|
14
|
+
* lint errors ([2b9b138](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/2b9b138759f5754577205b58a998cc034b3b0db5))
|
|
15
|
+
* rEADME ([9e4fab1](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/9e4fab163002c34e8077285064c24ee05ba92bac))
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
### Features
|
|
19
|
+
|
|
20
|
+
* add handler code ([72251f5](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/72251f58446ec6fb3819ea0bdd67fc012e8a5c38))
|
|
21
|
+
* add handler code ([f674f26](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/f674f262f292e47fd228a827590e8dc10391e5cb))
|
|
22
|
+
* add handler code ([24c497f](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/24c497f1d67898e64528105c61a90b00f55ba02a))
|
|
23
|
+
* improve readme and remove logs ([79408fb](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/79408fbd488db11fcc7472b690f1fff237816da8))
|
|
24
|
+
* improve readme and remove logs ([de7a6aa](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/de7a6aa735d6295299d3a5d41d0fd00d64ac6f89))
|
|
25
|
+
* rename package and extend readme ([e16bcdc](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/e16bcdc6329ee913e1794f2bce05e1e88a08d91b))
|
|
26
|
+
* update to next 15 types + feat: add delete sync to deduplication cache ([832c28f](https://github.com/trieb-work/nextjs-turbo-redis-cache/commit/832c28f1fe0831b87790c2d60e33b314be0adf58))
|
package/README.md
CHANGED
|
@@ -17,7 +17,32 @@ TODO
|
|
|
17
17
|
|
|
18
18
|
## Getting started
|
|
19
19
|
|
|
20
|
-
|
|
20
|
+
```bash
|
|
21
|
+
pnpm install @trieb.work/nextjs-turbo-redis-cache
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
extend `next.config.js` with:
|
|
25
|
+
```
|
|
26
|
+
const nextConfig = {
|
|
27
|
+
cacheHandler:
|
|
28
|
+
process.env.NODE_ENV === "production" || process.env.DEV_REDIS_CACHE
|
|
29
|
+
? new CachedHandler({
|
|
30
|
+
// Default Options:
|
|
31
|
+
// maxMemoryCacheSize, // deprecated
|
|
32
|
+
// database = process.env.VERCEL_ENV === 'production' ? 0 : 1,
|
|
33
|
+
// keyPrefix = process.env.VERCEL_URL || 'UNDEFINED_URL_',
|
|
34
|
+
// sharedTagsKey = '__sharedTags__',
|
|
35
|
+
// timeoutMs = 5000,
|
|
36
|
+
// revalidateTagQuerySize = 250,
|
|
37
|
+
// avgResyncIntervalMs = 60 * 60 * 1000,
|
|
38
|
+
// redisGetDeduplication = true,
|
|
39
|
+
// inMemoryCachingTime = 10_000,
|
|
40
|
+
// defaultStaleAge = 60 * 60 * 24 * 14,
|
|
41
|
+
// estimateExpireAge = (staleAge) =>
|
|
42
|
+
// process.env.VERCEL_ENV === 'preview' ? staleAge * 1.2 : staleAge * 2,
|
|
43
|
+
})
|
|
44
|
+
: undefined,
|
|
45
|
+
```
|
|
21
46
|
|
|
22
47
|
## Consistency
|
|
23
48
|
|
|
@@ -61,3 +86,8 @@ Since all caching calls in one api/page/server action request is always served b
|
|
|
61
86
|
## License
|
|
62
87
|
|
|
63
88
|
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
## Sponsor
|
|
92
|
+
|
|
93
|
+
This project is created and maintained by the Next.js & Payload CMS agency [trieb.work](https://trieb.work)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@trieb.work/nextjs-turbo-redis-cache",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.1.0",
|
|
4
4
|
"scripts": {
|
|
5
5
|
"dev": "pnpm test",
|
|
6
6
|
"build": "tsc",
|
|
@@ -13,8 +13,29 @@
|
|
|
13
13
|
"main": "dist/index.js",
|
|
14
14
|
"type": "module",
|
|
15
15
|
"types": "dist/index.d.ts",
|
|
16
|
-
"keywords": [
|
|
17
|
-
|
|
16
|
+
"keywords": [
|
|
17
|
+
"nextjs",
|
|
18
|
+
"redis",
|
|
19
|
+
"cache",
|
|
20
|
+
"nextjs-cache",
|
|
21
|
+
"redis-cache",
|
|
22
|
+
"turbo-cache",
|
|
23
|
+
"production",
|
|
24
|
+
"performance",
|
|
25
|
+
"optimized",
|
|
26
|
+
"fast",
|
|
27
|
+
"large-scale",
|
|
28
|
+
"scalable",
|
|
29
|
+
"caching",
|
|
30
|
+
"nextjs-redis",
|
|
31
|
+
"nodejs",
|
|
32
|
+
"in-memory-cache",
|
|
33
|
+
"revalidate",
|
|
34
|
+
"deduplication",
|
|
35
|
+
"batch-invalidation",
|
|
36
|
+
"tags-cache"
|
|
37
|
+
],
|
|
38
|
+
"author": "Designed for speed, scalability, and optimized performance, nextjs-turbo-redis-cache is your go-to solution for Next.js caching in demanding production environments.",
|
|
18
39
|
"license": "ISC",
|
|
19
40
|
"description": "",
|
|
20
41
|
"publishConfig": {
|
|
@@ -24,7 +24,6 @@ export type CreateRedisStringsHandlerOptions = {
|
|
|
24
24
|
inMemoryCachingTime?: number;
|
|
25
25
|
defaultStaleAge?: number;
|
|
26
26
|
estimateExpireAge?: (staleAge: number) => number;
|
|
27
|
-
maxMemoryCacheSize?: number;
|
|
28
27
|
};
|
|
29
28
|
|
|
30
29
|
const NEXT_CACHE_IMPLICIT_TAG_ID = '_N_T_';
|
|
@@ -41,7 +40,6 @@ export function getTimeoutRedisCommandOptions(
|
|
|
41
40
|
}
|
|
42
41
|
|
|
43
42
|
export default class RedisStringsHandler implements CacheHandler {
|
|
44
|
-
private maxMemoryCacheSize: undefined | number;
|
|
45
43
|
private client: Client;
|
|
46
44
|
private sharedTagsMap: SyncedMap<string[]>;
|
|
47
45
|
private revalidatedTagsMap: SyncedMap<number>;
|
|
@@ -62,7 +60,6 @@ export default class RedisStringsHandler implements CacheHandler {
|
|
|
62
60
|
private estimateExpireAge: (staleAge: number) => number;
|
|
63
61
|
|
|
64
62
|
constructor({
|
|
65
|
-
maxMemoryCacheSize,
|
|
66
63
|
database = process.env.VERCEL_ENV === 'production' ? 0 : 1,
|
|
67
64
|
keyPrefix = process.env.VERCEL_URL || 'UNDEFINED_URL_',
|
|
68
65
|
sharedTagsKey = '__sharedTags__',
|
|
@@ -75,7 +72,6 @@ export default class RedisStringsHandler implements CacheHandler {
|
|
|
75
72
|
estimateExpireAge = (staleAge) =>
|
|
76
73
|
process.env.VERCEL_ENV === 'preview' ? staleAge * 1.2 : staleAge * 2,
|
|
77
74
|
}: CreateRedisStringsHandlerOptions) {
|
|
78
|
-
this.maxMemoryCacheSize = maxMemoryCacheSize;
|
|
79
75
|
this.keyPrefix = keyPrefix;
|
|
80
76
|
this.timeoutMs = timeoutMs;
|
|
81
77
|
this.redisGetDeduplication = redisGetDeduplication;
|
package/dist/CachedHandler.d.ts
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { CacheHandler } from "next/dist/server/lib/incremental-cache";
|
|
2
|
-
import RedisStringsHandler, { CreateRedisStringsHandlerOptions } from "./RedisStringsHandler";
|
|
3
|
-
export default class CachedHandler implements CacheHandler {
|
|
4
|
-
constructor(options: CreateRedisStringsHandlerOptions);
|
|
5
|
-
get(...args: Parameters<RedisStringsHandler["get"]>): ReturnType<RedisStringsHandler["get"]>;
|
|
6
|
-
set(...args: Parameters<RedisStringsHandler["set"]>): ReturnType<RedisStringsHandler["set"]>;
|
|
7
|
-
revalidateTag(...args: Parameters<RedisStringsHandler["revalidateTag"]>): ReturnType<RedisStringsHandler["revalidateTag"]>;
|
|
8
|
-
resetRequestCache(...args: Parameters<RedisStringsHandler["resetRequestCache"]>): ReturnType<RedisStringsHandler["resetRequestCache"]>;
|
|
9
|
-
}
|
package/dist/CachedHandler.js
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const RedisStringsHandler_1 = __importDefault(require("./RedisStringsHandler"));
|
|
7
|
-
let cachedHandler;
|
|
8
|
-
class CachedHandler {
|
|
9
|
-
constructor(options) {
|
|
10
|
-
if (!cachedHandler) {
|
|
11
|
-
console.log("created cached handler");
|
|
12
|
-
cachedHandler = new RedisStringsHandler_1.default(options);
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
get(...args) {
|
|
16
|
-
return cachedHandler.get(...args);
|
|
17
|
-
}
|
|
18
|
-
set(...args) {
|
|
19
|
-
return cachedHandler.set(...args);
|
|
20
|
-
}
|
|
21
|
-
revalidateTag(...args) {
|
|
22
|
-
return cachedHandler.revalidateTag(...args);
|
|
23
|
-
}
|
|
24
|
-
resetRequestCache(...args) {
|
|
25
|
-
return cachedHandler.resetRequestCache(...args);
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
exports.default = CachedHandler;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { SyncedMap } from './SyncedMap';
|
|
2
|
-
export declare class DeduplicatedRequestHandler<T extends (...args: [never, never]) => Promise<K>, K> {
|
|
3
|
-
private inMemoryDeduplicationCache;
|
|
4
|
-
private cachingTimeMs;
|
|
5
|
-
private fn;
|
|
6
|
-
constructor(fn: T, cachingTimeMs: number, inMemoryDeduplicationCache: SyncedMap<Promise<K>>);
|
|
7
|
-
seedRequestReturn(key: string, value: K): void;
|
|
8
|
-
deduplicatedFunction: (key: string) => T;
|
|
9
|
-
}
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.DeduplicatedRequestHandler = void 0;
|
|
4
|
-
class DeduplicatedRequestHandler {
|
|
5
|
-
constructor(fn, cachingTimeMs, inMemoryDeduplicationCache) {
|
|
6
|
-
// Method to handle deduplicated requests
|
|
7
|
-
this.deduplicatedFunction = (key) => {
|
|
8
|
-
//eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
9
|
-
const self = this;
|
|
10
|
-
const dedupedFn = async (...args) => {
|
|
11
|
-
// If there's already a pending request with the same key, return it
|
|
12
|
-
if (self.inMemoryDeduplicationCache &&
|
|
13
|
-
self.inMemoryDeduplicationCache.has(key)) {
|
|
14
|
-
const res = await self.inMemoryDeduplicationCache
|
|
15
|
-
.get(key)
|
|
16
|
-
.then((v) => structuredClone(v));
|
|
17
|
-
return res;
|
|
18
|
-
}
|
|
19
|
-
// If no pending request, call the original function and store the promise
|
|
20
|
-
const promise = self.fn(...args);
|
|
21
|
-
self.inMemoryDeduplicationCache.set(key, promise);
|
|
22
|
-
try {
|
|
23
|
-
const result = await promise;
|
|
24
|
-
return structuredClone(result);
|
|
25
|
-
}
|
|
26
|
-
finally {
|
|
27
|
-
// Once the promise is resolved/rejected, remove it from the map
|
|
28
|
-
setTimeout(() => {
|
|
29
|
-
self.inMemoryDeduplicationCache.delete(key);
|
|
30
|
-
}, self.cachingTimeMs);
|
|
31
|
-
}
|
|
32
|
-
};
|
|
33
|
-
return dedupedFn;
|
|
34
|
-
};
|
|
35
|
-
this.fn = fn;
|
|
36
|
-
this.cachingTimeMs = cachingTimeMs;
|
|
37
|
-
this.inMemoryDeduplicationCache = inMemoryDeduplicationCache;
|
|
38
|
-
}
|
|
39
|
-
// Method to manually seed a result into the cache
|
|
40
|
-
seedRequestReturn(key, value) {
|
|
41
|
-
const resultPromise = new Promise((res) => res(value));
|
|
42
|
-
this.inMemoryDeduplicationCache.set(key, resultPromise);
|
|
43
|
-
setTimeout(() => {
|
|
44
|
-
this.inMemoryDeduplicationCache.delete(key);
|
|
45
|
-
}, this.cachingTimeMs);
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
exports.DeduplicatedRequestHandler = DeduplicatedRequestHandler;
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
import { commandOptions, createClient } from 'redis';
|
|
2
|
-
import { CacheHandler, CacheHandlerValue, IncrementalCache } from 'next/dist/server/lib/incremental-cache';
|
|
3
|
-
export type CommandOptions = ReturnType<typeof commandOptions>;
|
|
4
|
-
type GetParams = Parameters<IncrementalCache['get']>;
|
|
5
|
-
type SetParams = Parameters<IncrementalCache['set']>;
|
|
6
|
-
type RevalidateParams = Parameters<IncrementalCache['revalidateTag']>;
|
|
7
|
-
export type Client = ReturnType<typeof createClient>;
|
|
8
|
-
export type CreateRedisStringsHandlerOptions = {
|
|
9
|
-
database?: number;
|
|
10
|
-
keyPrefix?: string;
|
|
11
|
-
timeoutMs?: number;
|
|
12
|
-
revalidateTagQuerySize?: number;
|
|
13
|
-
sharedTagsKey?: string;
|
|
14
|
-
avgResyncIntervalMs?: number;
|
|
15
|
-
redisGetDeduplication?: boolean;
|
|
16
|
-
inMemoryCachingTime?: number;
|
|
17
|
-
defaultStaleAge?: number;
|
|
18
|
-
estimateExpireAge?: (staleAge: number) => number;
|
|
19
|
-
maxMemoryCacheSize?: number;
|
|
20
|
-
};
|
|
21
|
-
export declare function getTimeoutRedisCommandOptions(timeoutMs: number): CommandOptions;
|
|
22
|
-
export default class RedisStringsHandler implements CacheHandler {
|
|
23
|
-
private maxMemoryCacheSize;
|
|
24
|
-
private client;
|
|
25
|
-
private sharedTagsMap;
|
|
26
|
-
private revalidatedTagsMap;
|
|
27
|
-
private inMemoryDeduplicationCache;
|
|
28
|
-
private redisGet;
|
|
29
|
-
private redisDeduplicationHandler;
|
|
30
|
-
private deduplicatedRedisGet;
|
|
31
|
-
private timeoutMs;
|
|
32
|
-
private keyPrefix;
|
|
33
|
-
private redisGetDeduplication;
|
|
34
|
-
private inMemoryCachingTime;
|
|
35
|
-
private defaultStaleAge;
|
|
36
|
-
private estimateExpireAge;
|
|
37
|
-
constructor({ maxMemoryCacheSize, database, keyPrefix, sharedTagsKey, timeoutMs, revalidateTagQuerySize, avgResyncIntervalMs, redisGetDeduplication, inMemoryCachingTime, defaultStaleAge, estimateExpireAge, }: CreateRedisStringsHandlerOptions);
|
|
38
|
-
resetRequestCache(...args: never[]): void;
|
|
39
|
-
private assertClientIsReady;
|
|
40
|
-
get(key: GetParams[0], ctx: GetParams[1]): Promise<(CacheHandlerValue & {
|
|
41
|
-
lastModified: number;
|
|
42
|
-
}) | null>;
|
|
43
|
-
set(key: SetParams[0], data: SetParams[1] & {
|
|
44
|
-
lastModified: number;
|
|
45
|
-
}, ctx: SetParams[2]): Promise<void>;
|
|
46
|
-
revalidateTag(tagOrTags: RevalidateParams[0]): Promise<void>;
|
|
47
|
-
}
|
|
48
|
-
export {};
|
|
@@ -1,215 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getTimeoutRedisCommandOptions = getTimeoutRedisCommandOptions;
|
|
4
|
-
const redis_1 = require("redis");
|
|
5
|
-
const SyncedMap_1 = require("./SyncedMap");
|
|
6
|
-
const DeduplicatedRequestHandler_1 = require("./DeduplicatedRequestHandler");
|
|
7
|
-
const NEXT_CACHE_IMPLICIT_TAG_ID = '_N_T_';
|
|
8
|
-
const REVALIDATED_TAGS_KEY = '__revalidated_tags__';
|
|
9
|
-
function isImplicitTag(tag) {
|
|
10
|
-
return tag.startsWith(NEXT_CACHE_IMPLICIT_TAG_ID);
|
|
11
|
-
}
|
|
12
|
-
function getTimeoutRedisCommandOptions(timeoutMs) {
|
|
13
|
-
return (0, redis_1.commandOptions)({ signal: AbortSignal.timeout(timeoutMs) });
|
|
14
|
-
}
|
|
15
|
-
class RedisStringsHandler {
|
|
16
|
-
constructor({ maxMemoryCacheSize, database = process.env.VERCEL_ENV === 'production' ? 0 : 1, keyPrefix = process.env.VERCEL_URL || 'UNDEFINED_URL_', sharedTagsKey = '__sharedTags__', timeoutMs = 5000, revalidateTagQuerySize = 250, avgResyncIntervalMs = 60 * 60 * 1000, redisGetDeduplication = true, inMemoryCachingTime = 10000, defaultStaleAge = 60 * 60 * 24 * 14, estimateExpireAge = (staleAge) => process.env.VERCEL_ENV === 'preview' ? staleAge * 1.2 : staleAge * 2, }) {
|
|
17
|
-
this.maxMemoryCacheSize = maxMemoryCacheSize;
|
|
18
|
-
this.keyPrefix = keyPrefix;
|
|
19
|
-
this.timeoutMs = timeoutMs;
|
|
20
|
-
this.redisGetDeduplication = redisGetDeduplication;
|
|
21
|
-
this.inMemoryCachingTime = inMemoryCachingTime;
|
|
22
|
-
this.defaultStaleAge = defaultStaleAge;
|
|
23
|
-
this.estimateExpireAge = estimateExpireAge;
|
|
24
|
-
try {
|
|
25
|
-
this.client = (0, redis_1.createClient)({
|
|
26
|
-
...(database !== 0 ? { database } : {}),
|
|
27
|
-
url: process.env.REDISHOST
|
|
28
|
-
? `redis://${process.env.REDISHOST}:${process.env.REDISPORT}`
|
|
29
|
-
: 'redis://localhost:6379',
|
|
30
|
-
});
|
|
31
|
-
this.client.on('error', (error) => {
|
|
32
|
-
console.error('Redis client error', error);
|
|
33
|
-
});
|
|
34
|
-
this.client
|
|
35
|
-
.connect()
|
|
36
|
-
.then(() => {
|
|
37
|
-
console.info('Redis client connected.');
|
|
38
|
-
})
|
|
39
|
-
.catch((error) => {
|
|
40
|
-
console.error('Failed to connect Redis client:', error);
|
|
41
|
-
this.client.disconnect();
|
|
42
|
-
});
|
|
43
|
-
}
|
|
44
|
-
catch (error) {
|
|
45
|
-
console.error('Failed to initialize Redis client');
|
|
46
|
-
throw error;
|
|
47
|
-
}
|
|
48
|
-
const filterKeys = (key) => key !== REVALIDATED_TAGS_KEY && key !== sharedTagsKey;
|
|
49
|
-
this.sharedTagsMap = new SyncedMap_1.SyncedMap({
|
|
50
|
-
client: this.client,
|
|
51
|
-
keyPrefix,
|
|
52
|
-
redisKey: sharedTagsKey,
|
|
53
|
-
database,
|
|
54
|
-
timeoutMs,
|
|
55
|
-
querySize: revalidateTagQuerySize,
|
|
56
|
-
filterKeys,
|
|
57
|
-
resyncIntervalMs: avgResyncIntervalMs -
|
|
58
|
-
avgResyncIntervalMs / 10 +
|
|
59
|
-
Math.random() * (avgResyncIntervalMs / 10),
|
|
60
|
-
});
|
|
61
|
-
this.revalidatedTagsMap = new SyncedMap_1.SyncedMap({
|
|
62
|
-
client: this.client,
|
|
63
|
-
keyPrefix,
|
|
64
|
-
redisKey: REVALIDATED_TAGS_KEY,
|
|
65
|
-
database,
|
|
66
|
-
timeoutMs,
|
|
67
|
-
querySize: revalidateTagQuerySize,
|
|
68
|
-
filterKeys,
|
|
69
|
-
resyncIntervalMs: avgResyncIntervalMs +
|
|
70
|
-
avgResyncIntervalMs / 10 +
|
|
71
|
-
Math.random() * (avgResyncIntervalMs / 10),
|
|
72
|
-
});
|
|
73
|
-
this.inMemoryDeduplicationCache = new SyncedMap_1.SyncedMap({
|
|
74
|
-
client: this.client,
|
|
75
|
-
keyPrefix,
|
|
76
|
-
redisKey: 'inMemoryDeduplicationCache',
|
|
77
|
-
database,
|
|
78
|
-
timeoutMs,
|
|
79
|
-
querySize: revalidateTagQuerySize,
|
|
80
|
-
filterKeys,
|
|
81
|
-
customizedSync: {
|
|
82
|
-
withoutRedisHashmap: true,
|
|
83
|
-
withoutSetSync: true,
|
|
84
|
-
},
|
|
85
|
-
});
|
|
86
|
-
const redisGet = this.client.get.bind(this.client);
|
|
87
|
-
this.redisDeduplicationHandler = new DeduplicatedRequestHandler_1.DeduplicatedRequestHandler(redisGet, inMemoryCachingTime, this.inMemoryDeduplicationCache);
|
|
88
|
-
this.redisGet = redisGet;
|
|
89
|
-
this.deduplicatedRedisGet =
|
|
90
|
-
this.redisDeduplicationHandler.deduplicatedFunction;
|
|
91
|
-
}
|
|
92
|
-
resetRequestCache(...args) {
|
|
93
|
-
console.warn('WARNING resetRequestCache() was called', args);
|
|
94
|
-
}
|
|
95
|
-
async assertClientIsReady() {
|
|
96
|
-
await Promise.all([
|
|
97
|
-
this.sharedTagsMap.waitUntilReady(),
|
|
98
|
-
this.revalidatedTagsMap.waitUntilReady(),
|
|
99
|
-
]);
|
|
100
|
-
if (!this.client.isReady) {
|
|
101
|
-
throw new Error('Redis client is not ready yet or connection is lost.');
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
async get(key, ctx) {
|
|
105
|
-
await this.assertClientIsReady();
|
|
106
|
-
const clientGet = this.redisGetDeduplication
|
|
107
|
-
? this.deduplicatedRedisGet(key)
|
|
108
|
-
: this.redisGet;
|
|
109
|
-
const result = await clientGet(getTimeoutRedisCommandOptions(this.timeoutMs), this.keyPrefix + key);
|
|
110
|
-
if (!result) {
|
|
111
|
-
return null;
|
|
112
|
-
}
|
|
113
|
-
const cacheValue = JSON.parse(result);
|
|
114
|
-
if (!cacheValue) {
|
|
115
|
-
return null;
|
|
116
|
-
}
|
|
117
|
-
if (cacheValue.value?.kind === 'FETCH') {
|
|
118
|
-
cacheValue.value.data.body = Buffer.from(cacheValue.value.data.body).toString('base64');
|
|
119
|
-
}
|
|
120
|
-
const combinedTags = new Set([
|
|
121
|
-
...(ctx?.softTags || []),
|
|
122
|
-
...(ctx?.tags || []),
|
|
123
|
-
]);
|
|
124
|
-
if (combinedTags.size === 0) {
|
|
125
|
-
return cacheValue;
|
|
126
|
-
}
|
|
127
|
-
for (const tag of combinedTags) {
|
|
128
|
-
// TODO: check how this revalidatedTagsMap is used or if it can be deleted
|
|
129
|
-
const revalidationTime = this.revalidatedTagsMap.get(tag);
|
|
130
|
-
if (revalidationTime && revalidationTime > cacheValue.lastModified) {
|
|
131
|
-
const redisKey = this.keyPrefix + key;
|
|
132
|
-
// Do not await here as this can happen in the background while we can already serve the cacheValue
|
|
133
|
-
this.client
|
|
134
|
-
.unlink(getTimeoutRedisCommandOptions(this.timeoutMs), redisKey)
|
|
135
|
-
.catch((err) => {
|
|
136
|
-
console.error('Error occurred while unlinking stale data. Retrying now. Error was:', err);
|
|
137
|
-
this.client.unlink(getTimeoutRedisCommandOptions(this.timeoutMs), redisKey);
|
|
138
|
-
})
|
|
139
|
-
.finally(async () => {
|
|
140
|
-
await this.sharedTagsMap.delete(key);
|
|
141
|
-
await this.revalidatedTagsMap.delete(tag);
|
|
142
|
-
});
|
|
143
|
-
return null;
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
return cacheValue;
|
|
147
|
-
}
|
|
148
|
-
async set(key, data, ctx) {
|
|
149
|
-
if (data.kind === 'FETCH') {
|
|
150
|
-
console.time('encoding' + key);
|
|
151
|
-
data.data.body = Buffer.from(data.data.body, 'base64').toString();
|
|
152
|
-
console.timeEnd('encoding' + key);
|
|
153
|
-
}
|
|
154
|
-
await this.assertClientIsReady();
|
|
155
|
-
data.lastModified = Date.now();
|
|
156
|
-
const value = JSON.stringify(data);
|
|
157
|
-
// pre seed data into deduplicated get client. This will reduce redis load by not requesting
|
|
158
|
-
// the same value from redis which was just set.
|
|
159
|
-
if (this.redisGetDeduplication) {
|
|
160
|
-
this.redisDeduplicationHandler.seedRequestReturn(key, value);
|
|
161
|
-
}
|
|
162
|
-
const expireAt = ctx.revalidate &&
|
|
163
|
-
Number.isSafeInteger(ctx.revalidate) &&
|
|
164
|
-
ctx.revalidate > 0
|
|
165
|
-
? this.estimateExpireAge(ctx.revalidate)
|
|
166
|
-
: this.estimateExpireAge(this.defaultStaleAge);
|
|
167
|
-
const options = getTimeoutRedisCommandOptions(this.timeoutMs);
|
|
168
|
-
const setOperation = this.client.set(options, this.keyPrefix + key, value, {
|
|
169
|
-
EX: expireAt,
|
|
170
|
-
});
|
|
171
|
-
let setTagsOperation;
|
|
172
|
-
if (ctx.tags && ctx.tags.length > 0) {
|
|
173
|
-
const currentTags = this.sharedTagsMap.get(key);
|
|
174
|
-
const currentIsSameAsNew = currentTags?.length === ctx.tags.length &&
|
|
175
|
-
currentTags.every((v) => ctx.tags.includes(v)) &&
|
|
176
|
-
ctx.tags.every((v) => currentTags.includes(v));
|
|
177
|
-
if (!currentIsSameAsNew) {
|
|
178
|
-
setTagsOperation = this.sharedTagsMap.set(key, structuredClone(ctx.tags));
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
await Promise.all([setOperation, setTagsOperation]);
|
|
182
|
-
}
|
|
183
|
-
async revalidateTag(tagOrTags) {
|
|
184
|
-
const tags = new Set([tagOrTags || []].flat());
|
|
185
|
-
await this.assertClientIsReady();
|
|
186
|
-
// TODO: check how this revalidatedTagsMap is used or if it can be deleted
|
|
187
|
-
for (const tag of tags) {
|
|
188
|
-
if (isImplicitTag(tag)) {
|
|
189
|
-
const now = Date.now();
|
|
190
|
-
await this.revalidatedTagsMap.set(tag, now);
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
const keysToDelete = [];
|
|
194
|
-
for (const [key, sharedTags] of this.sharedTagsMap.entries()) {
|
|
195
|
-
if (sharedTags.some((tag) => tags.has(tag))) {
|
|
196
|
-
keysToDelete.push(key);
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
if (keysToDelete.length === 0) {
|
|
200
|
-
return;
|
|
201
|
-
}
|
|
202
|
-
const fullRedisKeys = keysToDelete.map((key) => this.keyPrefix + key);
|
|
203
|
-
const options = getTimeoutRedisCommandOptions(this.timeoutMs);
|
|
204
|
-
const deleteKeysOperation = this.client.unlink(options, fullRedisKeys);
|
|
205
|
-
// delete entries from in-memory deduplication cache
|
|
206
|
-
if (this.redisGetDeduplication && this.inMemoryCachingTime > 0) {
|
|
207
|
-
for (const key of keysToDelete) {
|
|
208
|
-
this.inMemoryDeduplicationCache.delete(key);
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
const deleteTagsOperation = this.sharedTagsMap.delete(keysToDelete);
|
|
212
|
-
await Promise.all([deleteKeysOperation, deleteTagsOperation]);
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
|
-
exports.default = RedisStringsHandler;
|
package/dist/SyncedMap.d.ts
DELETED
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
import { Client } from './RedisStringsHandler';
|
|
2
|
-
type CustomizedSync = {
|
|
3
|
-
withoutRedisHashmap?: boolean;
|
|
4
|
-
withoutSetSync?: boolean;
|
|
5
|
-
};
|
|
6
|
-
type SyncedMapOptions = {
|
|
7
|
-
client: Client;
|
|
8
|
-
keyPrefix: string;
|
|
9
|
-
redisKey: string;
|
|
10
|
-
database: number;
|
|
11
|
-
timeoutMs: number;
|
|
12
|
-
querySize: number;
|
|
13
|
-
filterKeys: (key: string) => boolean;
|
|
14
|
-
resyncIntervalMs?: number;
|
|
15
|
-
customizedSync?: CustomizedSync;
|
|
16
|
-
};
|
|
17
|
-
export type SyncMessage<V> = {
|
|
18
|
-
type: 'insert' | 'delete';
|
|
19
|
-
key?: string;
|
|
20
|
-
value?: V;
|
|
21
|
-
keys?: string[];
|
|
22
|
-
};
|
|
23
|
-
export declare class SyncedMap<V> {
|
|
24
|
-
private client;
|
|
25
|
-
private subscriberClient;
|
|
26
|
-
private map;
|
|
27
|
-
private keyPrefix;
|
|
28
|
-
private syncChannel;
|
|
29
|
-
private redisKey;
|
|
30
|
-
private database;
|
|
31
|
-
private timeoutMs;
|
|
32
|
-
private querySize;
|
|
33
|
-
private filterKeys;
|
|
34
|
-
private resyncIntervalMs?;
|
|
35
|
-
private customizedSync?;
|
|
36
|
-
private setupLock;
|
|
37
|
-
private setupLockResolve;
|
|
38
|
-
constructor(options: SyncedMapOptions);
|
|
39
|
-
private setup;
|
|
40
|
-
private initialSync;
|
|
41
|
-
private cleanupKeysNotInRedis;
|
|
42
|
-
private setupPeriodicResync;
|
|
43
|
-
private setupPubSub;
|
|
44
|
-
waitUntilReady(): Promise<void>;
|
|
45
|
-
get(key: string): V | undefined;
|
|
46
|
-
set(key: string, value: V): Promise<void>;
|
|
47
|
-
delete(keys: string[] | string, withoutSyncMessage?: boolean): Promise<void>;
|
|
48
|
-
has(key: string): boolean;
|
|
49
|
-
entries(): IterableIterator<[string, V]>;
|
|
50
|
-
}
|
|
51
|
-
export {};
|
package/dist/SyncedMap.js
DELETED
|
@@ -1,203 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.SyncedMap = void 0;
|
|
4
|
-
// SyncedMap.ts
|
|
5
|
-
const RedisStringsHandler_1 = require("./RedisStringsHandler");
|
|
6
|
-
const SYNC_CHANNEL_SUFFIX = ':sync-channel:';
|
|
7
|
-
class SyncedMap {
|
|
8
|
-
constructor(options) {
|
|
9
|
-
this.client = options.client;
|
|
10
|
-
this.keyPrefix = options.keyPrefix;
|
|
11
|
-
this.redisKey = options.redisKey;
|
|
12
|
-
this.syncChannel = `${options.keyPrefix}${SYNC_CHANNEL_SUFFIX}${options.redisKey}`;
|
|
13
|
-
this.database = options.database;
|
|
14
|
-
this.timeoutMs = options.timeoutMs;
|
|
15
|
-
this.querySize = options.querySize;
|
|
16
|
-
this.filterKeys = options.filterKeys;
|
|
17
|
-
this.resyncIntervalMs = options.resyncIntervalMs;
|
|
18
|
-
this.customizedSync = options.customizedSync;
|
|
19
|
-
this.map = new Map();
|
|
20
|
-
this.subscriberClient = this.client.duplicate();
|
|
21
|
-
this.setupLock = new Promise((resolve) => {
|
|
22
|
-
this.setupLockResolve = resolve;
|
|
23
|
-
});
|
|
24
|
-
this.setup().catch((error) => {
|
|
25
|
-
console.error('Failed to setup SyncedMap:', error);
|
|
26
|
-
throw error;
|
|
27
|
-
});
|
|
28
|
-
}
|
|
29
|
-
async setup() {
|
|
30
|
-
let setupPromises = [];
|
|
31
|
-
if (!this.customizedSync?.withoutRedisHashmap) {
|
|
32
|
-
setupPromises.push(this.initialSync());
|
|
33
|
-
this.setupPeriodicResync();
|
|
34
|
-
}
|
|
35
|
-
setupPromises.push(this.setupPubSub());
|
|
36
|
-
await Promise.all(setupPromises);
|
|
37
|
-
this.setupLockResolve();
|
|
38
|
-
}
|
|
39
|
-
async initialSync() {
|
|
40
|
-
let cursor = 0;
|
|
41
|
-
const hScanOptions = { COUNT: this.querySize };
|
|
42
|
-
try {
|
|
43
|
-
do {
|
|
44
|
-
const remoteItems = await this.client.hScan((0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs), this.keyPrefix + this.redisKey, cursor, hScanOptions);
|
|
45
|
-
for (const { field, value } of remoteItems.tuples) {
|
|
46
|
-
if (this.filterKeys(field)) {
|
|
47
|
-
const parsedValue = JSON.parse(value);
|
|
48
|
-
this.map.set(field, parsedValue);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
cursor = remoteItems.cursor;
|
|
52
|
-
} while (cursor !== 0);
|
|
53
|
-
// Clean up keys not in Redis
|
|
54
|
-
await this.cleanupKeysNotInRedis();
|
|
55
|
-
}
|
|
56
|
-
catch (error) {
|
|
57
|
-
console.error('Error during initial sync:', error);
|
|
58
|
-
throw error;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
async cleanupKeysNotInRedis() {
|
|
62
|
-
let cursor = 0;
|
|
63
|
-
const scanOptions = { COUNT: this.querySize, MATCH: `${this.keyPrefix}*` };
|
|
64
|
-
let remoteKeys = [];
|
|
65
|
-
try {
|
|
66
|
-
do {
|
|
67
|
-
const remoteKeysPortion = await this.client.scan((0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs), cursor, scanOptions);
|
|
68
|
-
remoteKeys = remoteKeys.concat(remoteKeysPortion.keys);
|
|
69
|
-
cursor = remoteKeysPortion.cursor;
|
|
70
|
-
} while (cursor !== 0);
|
|
71
|
-
const remoteKeysSet = new Set(remoteKeys.map((key) => key.substring(this.keyPrefix.length)));
|
|
72
|
-
const keysToDelete = [];
|
|
73
|
-
for (const key of this.map.keys()) {
|
|
74
|
-
const keyStr = key;
|
|
75
|
-
if (!remoteKeysSet.has(keyStr) && this.filterKeys(keyStr)) {
|
|
76
|
-
keysToDelete.push(keyStr);
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
if (keysToDelete.length > 0) {
|
|
80
|
-
await this.delete(keysToDelete);
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
catch (error) {
|
|
84
|
-
console.error('Error during cleanup of keys not in Redis:', error);
|
|
85
|
-
throw error;
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
setupPeriodicResync() {
|
|
89
|
-
if (this.resyncIntervalMs && this.resyncIntervalMs > 0) {
|
|
90
|
-
setInterval(() => {
|
|
91
|
-
this.initialSync().catch((error) => {
|
|
92
|
-
console.error('Error during periodic resync:', error);
|
|
93
|
-
});
|
|
94
|
-
}, this.resyncIntervalMs);
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
async setupPubSub() {
|
|
98
|
-
const syncHandler = async (message) => {
|
|
99
|
-
const syncMessage = JSON.parse(message);
|
|
100
|
-
if (syncMessage.type === 'insert') {
|
|
101
|
-
if (syncMessage.key !== undefined && syncMessage.value !== undefined) {
|
|
102
|
-
this.map.set(syncMessage.key, syncMessage.value);
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
else if (syncMessage.type === 'delete') {
|
|
106
|
-
if (syncMessage.keys) {
|
|
107
|
-
for (const key of syncMessage.keys) {
|
|
108
|
-
this.map.delete(key);
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
};
|
|
113
|
-
const keyEventHandler = async (_channel, message) => {
|
|
114
|
-
const key = message;
|
|
115
|
-
if (key.startsWith(this.keyPrefix)) {
|
|
116
|
-
const keyInMap = key.substring(this.keyPrefix.length);
|
|
117
|
-
if (this.filterKeys(keyInMap)) {
|
|
118
|
-
await this.delete(keyInMap, true);
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
};
|
|
122
|
-
try {
|
|
123
|
-
await this.subscriberClient.connect();
|
|
124
|
-
await Promise.all([
|
|
125
|
-
// We use a custom channel for insert/delete For the following reason:
|
|
126
|
-
// With custom channel we can delete multiple entries in one message. If we would listen to unlink / del we
|
|
127
|
-
// could get thousands of messages for one revalidateTag (For example revalidateTag("algolia") would send an enormous amount of network packages)
|
|
128
|
-
// Also we can send the value in the message for insert
|
|
129
|
-
this.subscriberClient.subscribe(this.syncChannel, syncHandler),
|
|
130
|
-
// Subscribe to Redis keyspace notifications for evicted and expired keys
|
|
131
|
-
this.subscriberClient.subscribe(`__keyevent@${this.database}__:evicted`, keyEventHandler),
|
|
132
|
-
this.subscriberClient.subscribe(`__keyevent@${this.database}__:expired`, keyEventHandler),
|
|
133
|
-
]);
|
|
134
|
-
// Error handling for reconnection
|
|
135
|
-
this.subscriberClient.on('error', async (err) => {
|
|
136
|
-
console.error('Subscriber client error:', err);
|
|
137
|
-
try {
|
|
138
|
-
await this.subscriberClient.quit();
|
|
139
|
-
this.subscriberClient = this.client.duplicate();
|
|
140
|
-
await this.setupPubSub();
|
|
141
|
-
}
|
|
142
|
-
catch (reconnectError) {
|
|
143
|
-
console.error('Failed to reconnect subscriber client:', reconnectError);
|
|
144
|
-
}
|
|
145
|
-
});
|
|
146
|
-
}
|
|
147
|
-
catch (error) {
|
|
148
|
-
console.error('Error setting up pub/sub client:', error);
|
|
149
|
-
throw error;
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
async waitUntilReady() {
|
|
153
|
-
await this.setupLock;
|
|
154
|
-
}
|
|
155
|
-
get(key) {
|
|
156
|
-
return this.map.get(key);
|
|
157
|
-
}
|
|
158
|
-
async set(key, value) {
|
|
159
|
-
this.map.set(key, value);
|
|
160
|
-
const operations = [];
|
|
161
|
-
// This is needed if we only want to sync delete commands. This is especially useful for non serializable data like a promise map
|
|
162
|
-
if (this.customizedSync?.withoutSetSync) {
|
|
163
|
-
return;
|
|
164
|
-
}
|
|
165
|
-
if (!this.customizedSync?.withoutRedisHashmap) {
|
|
166
|
-
const options = (0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs);
|
|
167
|
-
operations.push(this.client.hSet(options, this.keyPrefix + this.redisKey, key, JSON.stringify(value)));
|
|
168
|
-
}
|
|
169
|
-
const insertMessage = {
|
|
170
|
-
type: 'insert',
|
|
171
|
-
key: key,
|
|
172
|
-
value,
|
|
173
|
-
};
|
|
174
|
-
operations.push(this.client.publish(this.syncChannel, JSON.stringify(insertMessage)));
|
|
175
|
-
await Promise.all(operations);
|
|
176
|
-
}
|
|
177
|
-
async delete(keys, withoutSyncMessage = false) {
|
|
178
|
-
const keysArray = Array.isArray(keys) ? keys : [keys];
|
|
179
|
-
const operations = [];
|
|
180
|
-
for (const key of keysArray) {
|
|
181
|
-
this.map.delete(key);
|
|
182
|
-
}
|
|
183
|
-
if (!this.customizedSync?.withoutRedisHashmap) {
|
|
184
|
-
const options = (0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs);
|
|
185
|
-
operations.push(this.client.hDel(options, this.keyPrefix + this.redisKey, keysArray));
|
|
186
|
-
}
|
|
187
|
-
if (!withoutSyncMessage) {
|
|
188
|
-
const deletionMessage = {
|
|
189
|
-
type: 'delete',
|
|
190
|
-
keys: keysArray,
|
|
191
|
-
};
|
|
192
|
-
operations.push(this.client.publish(this.syncChannel, JSON.stringify(deletionMessage)));
|
|
193
|
-
}
|
|
194
|
-
await Promise.all(operations);
|
|
195
|
-
}
|
|
196
|
-
has(key) {
|
|
197
|
-
return this.map.has(key);
|
|
198
|
-
}
|
|
199
|
-
entries() {
|
|
200
|
-
return this.map.entries();
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
exports.SyncedMap = SyncedMap;
|
package/dist/index.d.ts
DELETED
package/dist/index.js
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const CachedHandler_1 = __importDefault(require("./CachedHandler"));
|
|
7
|
-
exports.default = CachedHandler_1.default;
|
package/dist/index.test.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/dist/index.test.js
DELETED