@trieb.work/nextjs-turbo-redis-cache 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +50 -0
- package/.github/workflows/release.yml +30 -0
- package/.prettierrc.json +6 -0
- package/CHANGELOG.md +0 -0
- package/LICENSE +21 -0
- package/README.md +63 -0
- package/commitlint.config.cjs +4 -0
- package/dist/CachedHandler.d.ts +9 -0
- package/dist/CachedHandler.js +28 -0
- package/dist/DeduplicatedRequestHandler.d.ts +9 -0
- package/dist/DeduplicatedRequestHandler.js +48 -0
- package/dist/RedisStringsHandler.d.ts +48 -0
- package/dist/RedisStringsHandler.js +215 -0
- package/dist/SyncedMap.d.ts +51 -0
- package/dist/SyncedMap.js +203 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +7 -0
- package/dist/index.test.d.ts +1 -0
- package/dist/index.test.js +8 -0
- package/eslint.config.mjs +26 -0
- package/hooks/commit-msg +10 -0
- package/hooks/pre-commit +11 -0
- package/package.json +54 -0
- package/release.config.cjs +31 -0
- package/scripts/prepare.sh +7 -0
- package/scripts/setup-git.sh +8 -0
- package/scripts/setup-repo-name.sh +17 -0
- package/scripts/vitest-run-staged.cjs +44 -0
- package/src/CachedHandler.ts +25 -0
- package/src/DeduplicatedRequestHandler.ts +61 -0
- package/src/RedisStringsHandler.ts +344 -0
- package/src/SyncedMap.ts +298 -0
- package/src/index.test.ts +7 -0
- package/src/index.ts +2 -0
- package/tsconfig.json +14 -0
- package/vite.config.ts +18 -0
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SyncedMap = void 0;
|
|
4
|
+
// SyncedMap.ts
|
|
5
|
+
const RedisStringsHandler_1 = require("./RedisStringsHandler");
|
|
6
|
+
const SYNC_CHANNEL_SUFFIX = ':sync-channel:';
|
|
7
|
+
class SyncedMap {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
this.client = options.client;
|
|
10
|
+
this.keyPrefix = options.keyPrefix;
|
|
11
|
+
this.redisKey = options.redisKey;
|
|
12
|
+
this.syncChannel = `${options.keyPrefix}${SYNC_CHANNEL_SUFFIX}${options.redisKey}`;
|
|
13
|
+
this.database = options.database;
|
|
14
|
+
this.timeoutMs = options.timeoutMs;
|
|
15
|
+
this.querySize = options.querySize;
|
|
16
|
+
this.filterKeys = options.filterKeys;
|
|
17
|
+
this.resyncIntervalMs = options.resyncIntervalMs;
|
|
18
|
+
this.customizedSync = options.customizedSync;
|
|
19
|
+
this.map = new Map();
|
|
20
|
+
this.subscriberClient = this.client.duplicate();
|
|
21
|
+
this.setupLock = new Promise((resolve) => {
|
|
22
|
+
this.setupLockResolve = resolve;
|
|
23
|
+
});
|
|
24
|
+
this.setup().catch((error) => {
|
|
25
|
+
console.error('Failed to setup SyncedMap:', error);
|
|
26
|
+
throw error;
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
async setup() {
|
|
30
|
+
let setupPromises = [];
|
|
31
|
+
if (!this.customizedSync?.withoutRedisHashmap) {
|
|
32
|
+
setupPromises.push(this.initialSync());
|
|
33
|
+
this.setupPeriodicResync();
|
|
34
|
+
}
|
|
35
|
+
setupPromises.push(this.setupPubSub());
|
|
36
|
+
await Promise.all(setupPromises);
|
|
37
|
+
this.setupLockResolve();
|
|
38
|
+
}
|
|
39
|
+
async initialSync() {
|
|
40
|
+
let cursor = 0;
|
|
41
|
+
const hScanOptions = { COUNT: this.querySize };
|
|
42
|
+
try {
|
|
43
|
+
do {
|
|
44
|
+
const remoteItems = await this.client.hScan((0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs), this.keyPrefix + this.redisKey, cursor, hScanOptions);
|
|
45
|
+
for (const { field, value } of remoteItems.tuples) {
|
|
46
|
+
if (this.filterKeys(field)) {
|
|
47
|
+
const parsedValue = JSON.parse(value);
|
|
48
|
+
this.map.set(field, parsedValue);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
cursor = remoteItems.cursor;
|
|
52
|
+
} while (cursor !== 0);
|
|
53
|
+
// Clean up keys not in Redis
|
|
54
|
+
await this.cleanupKeysNotInRedis();
|
|
55
|
+
}
|
|
56
|
+
catch (error) {
|
|
57
|
+
console.error('Error during initial sync:', error);
|
|
58
|
+
throw error;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async cleanupKeysNotInRedis() {
|
|
62
|
+
let cursor = 0;
|
|
63
|
+
const scanOptions = { COUNT: this.querySize, MATCH: `${this.keyPrefix}*` };
|
|
64
|
+
let remoteKeys = [];
|
|
65
|
+
try {
|
|
66
|
+
do {
|
|
67
|
+
const remoteKeysPortion = await this.client.scan((0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs), cursor, scanOptions);
|
|
68
|
+
remoteKeys = remoteKeys.concat(remoteKeysPortion.keys);
|
|
69
|
+
cursor = remoteKeysPortion.cursor;
|
|
70
|
+
} while (cursor !== 0);
|
|
71
|
+
const remoteKeysSet = new Set(remoteKeys.map((key) => key.substring(this.keyPrefix.length)));
|
|
72
|
+
const keysToDelete = [];
|
|
73
|
+
for (const key of this.map.keys()) {
|
|
74
|
+
const keyStr = key;
|
|
75
|
+
if (!remoteKeysSet.has(keyStr) && this.filterKeys(keyStr)) {
|
|
76
|
+
keysToDelete.push(keyStr);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (keysToDelete.length > 0) {
|
|
80
|
+
await this.delete(keysToDelete);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
catch (error) {
|
|
84
|
+
console.error('Error during cleanup of keys not in Redis:', error);
|
|
85
|
+
throw error;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
setupPeriodicResync() {
|
|
89
|
+
if (this.resyncIntervalMs && this.resyncIntervalMs > 0) {
|
|
90
|
+
setInterval(() => {
|
|
91
|
+
this.initialSync().catch((error) => {
|
|
92
|
+
console.error('Error during periodic resync:', error);
|
|
93
|
+
});
|
|
94
|
+
}, this.resyncIntervalMs);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
async setupPubSub() {
|
|
98
|
+
const syncHandler = async (message) => {
|
|
99
|
+
const syncMessage = JSON.parse(message);
|
|
100
|
+
if (syncMessage.type === 'insert') {
|
|
101
|
+
if (syncMessage.key !== undefined && syncMessage.value !== undefined) {
|
|
102
|
+
this.map.set(syncMessage.key, syncMessage.value);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
else if (syncMessage.type === 'delete') {
|
|
106
|
+
if (syncMessage.keys) {
|
|
107
|
+
for (const key of syncMessage.keys) {
|
|
108
|
+
this.map.delete(key);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
const keyEventHandler = async (_channel, message) => {
|
|
114
|
+
const key = message;
|
|
115
|
+
if (key.startsWith(this.keyPrefix)) {
|
|
116
|
+
const keyInMap = key.substring(this.keyPrefix.length);
|
|
117
|
+
if (this.filterKeys(keyInMap)) {
|
|
118
|
+
await this.delete(keyInMap, true);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
try {
|
|
123
|
+
await this.subscriberClient.connect();
|
|
124
|
+
await Promise.all([
|
|
125
|
+
// We use a custom channel for insert/delete For the following reason:
|
|
126
|
+
// With custom channel we can delete multiple entries in one message. If we would listen to unlink / del we
|
|
127
|
+
// could get thousands of messages for one revalidateTag (For example revalidateTag("algolia") would send an enormous amount of network packages)
|
|
128
|
+
// Also we can send the value in the message for insert
|
|
129
|
+
this.subscriberClient.subscribe(this.syncChannel, syncHandler),
|
|
130
|
+
// Subscribe to Redis keyspace notifications for evicted and expired keys
|
|
131
|
+
this.subscriberClient.subscribe(`__keyevent@${this.database}__:evicted`, keyEventHandler),
|
|
132
|
+
this.subscriberClient.subscribe(`__keyevent@${this.database}__:expired`, keyEventHandler),
|
|
133
|
+
]);
|
|
134
|
+
// Error handling for reconnection
|
|
135
|
+
this.subscriberClient.on('error', async (err) => {
|
|
136
|
+
console.error('Subscriber client error:', err);
|
|
137
|
+
try {
|
|
138
|
+
await this.subscriberClient.quit();
|
|
139
|
+
this.subscriberClient = this.client.duplicate();
|
|
140
|
+
await this.setupPubSub();
|
|
141
|
+
}
|
|
142
|
+
catch (reconnectError) {
|
|
143
|
+
console.error('Failed to reconnect subscriber client:', reconnectError);
|
|
144
|
+
}
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
catch (error) {
|
|
148
|
+
console.error('Error setting up pub/sub client:', error);
|
|
149
|
+
throw error;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
async waitUntilReady() {
|
|
153
|
+
await this.setupLock;
|
|
154
|
+
}
|
|
155
|
+
get(key) {
|
|
156
|
+
return this.map.get(key);
|
|
157
|
+
}
|
|
158
|
+
async set(key, value) {
|
|
159
|
+
this.map.set(key, value);
|
|
160
|
+
const operations = [];
|
|
161
|
+
// This is needed if we only want to sync delete commands. This is especially useful for non serializable data like a promise map
|
|
162
|
+
if (this.customizedSync?.withoutSetSync) {
|
|
163
|
+
return;
|
|
164
|
+
}
|
|
165
|
+
if (!this.customizedSync?.withoutRedisHashmap) {
|
|
166
|
+
const options = (0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs);
|
|
167
|
+
operations.push(this.client.hSet(options, this.keyPrefix + this.redisKey, key, JSON.stringify(value)));
|
|
168
|
+
}
|
|
169
|
+
const insertMessage = {
|
|
170
|
+
type: 'insert',
|
|
171
|
+
key: key,
|
|
172
|
+
value,
|
|
173
|
+
};
|
|
174
|
+
operations.push(this.client.publish(this.syncChannel, JSON.stringify(insertMessage)));
|
|
175
|
+
await Promise.all(operations);
|
|
176
|
+
}
|
|
177
|
+
async delete(keys, withoutSyncMessage = false) {
|
|
178
|
+
const keysArray = Array.isArray(keys) ? keys : [keys];
|
|
179
|
+
const operations = [];
|
|
180
|
+
for (const key of keysArray) {
|
|
181
|
+
this.map.delete(key);
|
|
182
|
+
}
|
|
183
|
+
if (!this.customizedSync?.withoutRedisHashmap) {
|
|
184
|
+
const options = (0, RedisStringsHandler_1.getTimeoutRedisCommandOptions)(this.timeoutMs);
|
|
185
|
+
operations.push(this.client.hDel(options, this.keyPrefix + this.redisKey, keysArray));
|
|
186
|
+
}
|
|
187
|
+
if (!withoutSyncMessage) {
|
|
188
|
+
const deletionMessage = {
|
|
189
|
+
type: 'delete',
|
|
190
|
+
keys: keysArray,
|
|
191
|
+
};
|
|
192
|
+
operations.push(this.client.publish(this.syncChannel, JSON.stringify(deletionMessage)));
|
|
193
|
+
}
|
|
194
|
+
await Promise.all(operations);
|
|
195
|
+
}
|
|
196
|
+
has(key) {
|
|
197
|
+
return this.map.has(key);
|
|
198
|
+
}
|
|
199
|
+
entries() {
|
|
200
|
+
return this.map.entries();
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
exports.SyncedMap = SyncedMap;
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const CachedHandler_1 = __importDefault(require("./CachedHandler"));
|
|
7
|
+
exports.default = CachedHandler_1.default;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import tsPlugin from '@typescript-eslint/eslint-plugin'; // Changed to default import
|
|
2
|
+
import tsParser from '@typescript-eslint/parser';
|
|
3
|
+
|
|
4
|
+
const config = [
|
|
5
|
+
{
|
|
6
|
+
ignores: ['dist', 'node_modules'],
|
|
7
|
+
},
|
|
8
|
+
{
|
|
9
|
+
files: ['src/**/*.ts', 'src/**/*.tsx'],
|
|
10
|
+
languageOptions: {
|
|
11
|
+
parser: tsParser,
|
|
12
|
+
ecmaVersion: 2021,
|
|
13
|
+
sourceType: 'module',
|
|
14
|
+
},
|
|
15
|
+
plugins: {
|
|
16
|
+
'@typescript-eslint': {
|
|
17
|
+
rules: tsPlugin.rules,
|
|
18
|
+
},
|
|
19
|
+
},
|
|
20
|
+
rules: {
|
|
21
|
+
...tsPlugin.configs.recommended.rules,
|
|
22
|
+
},
|
|
23
|
+
},
|
|
24
|
+
];
|
|
25
|
+
|
|
26
|
+
export default config;
|
package/hooks/commit-msg
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Commit message hook to enforce conventional commits
|
|
3
|
+
npx --no-install commitlint --edit "$1"
|
|
4
|
+
|
|
5
|
+
# Check if commitlint failed
|
|
6
|
+
if [ $? -ne 0 ]; then
|
|
7
|
+
echo "❌ Commit message does not follow Conventional Commit format."
|
|
8
|
+
echo "👉 Example: 'feat: add user authentication'"
|
|
9
|
+
exit 1
|
|
10
|
+
fi
|
package/hooks/pre-commit
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Commit message hook to run linting on staged files
|
|
3
|
+
BRANCH_NAME=$(git symbolic-ref --short HEAD)
|
|
4
|
+
if [ "$BRANCH_NAME" = "main" ]; then
|
|
5
|
+
echo "❌ Direct commits to the main branch are not allowed."
|
|
6
|
+
echo "👉 Please switch to a feature branch and create a pull request."
|
|
7
|
+
exit 1
|
|
8
|
+
fi
|
|
9
|
+
|
|
10
|
+
npx --no-install lint-staged
|
|
11
|
+
node ./scripts/vitest-run-staged.cjs
|
package/package.json
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@trieb.work/nextjs-turbo-redis-cache",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"scripts": {
|
|
5
|
+
"dev": "pnpm test",
|
|
6
|
+
"build": "tsc",
|
|
7
|
+
"lint": "eslint -c eslint.config.mjs --fix",
|
|
8
|
+
"fmt": "prettier --write 'src/**/*.ts' 'src/*.ts'",
|
|
9
|
+
"test": "vitest --coverage --config vite.config.ts",
|
|
10
|
+
"test:ui": "vitest --ui --config vite.config.ts",
|
|
11
|
+
"prepare": "./scripts/prepare.sh"
|
|
12
|
+
},
|
|
13
|
+
"main": "dist/index.js",
|
|
14
|
+
"type": "module",
|
|
15
|
+
"types": "dist/index.d.ts",
|
|
16
|
+
"keywords": [],
|
|
17
|
+
"author": "",
|
|
18
|
+
"license": "ISC",
|
|
19
|
+
"description": "",
|
|
20
|
+
"publishConfig": {
|
|
21
|
+
"access": "public"
|
|
22
|
+
},
|
|
23
|
+
"lint-staged": {
|
|
24
|
+
"*.{ts,tsx}": [
|
|
25
|
+
"eslint --fix",
|
|
26
|
+
"prettier --write"
|
|
27
|
+
],
|
|
28
|
+
"*.{js,json,md}": [
|
|
29
|
+
"prettier --write"
|
|
30
|
+
]
|
|
31
|
+
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"@commitlint/cli": "^19.6.0",
|
|
34
|
+
"@commitlint/config-conventional": "^19.6.0",
|
|
35
|
+
"@semantic-release/changelog": "^6.0.3",
|
|
36
|
+
"@semantic-release/git": "^10.0.1",
|
|
37
|
+
"@semantic-release/github": "^11.0.1",
|
|
38
|
+
"@semantic-release/npm": "^12.0.1",
|
|
39
|
+
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
|
40
|
+
"@typescript-eslint/parser": "^8.15.0",
|
|
41
|
+
"@vitest/coverage-v8": "^2.1.5",
|
|
42
|
+
"@vitest/ui": "^2.1.5",
|
|
43
|
+
"eslint": "^9.15.0",
|
|
44
|
+
"lint-staged": "^15.2.10",
|
|
45
|
+
"prettier": "^3.3.3",
|
|
46
|
+
"semantic-release": "^24.2.0",
|
|
47
|
+
"typescript": "^5.6.3",
|
|
48
|
+
"vitest": "^2.1.5"
|
|
49
|
+
},
|
|
50
|
+
"dependencies": {
|
|
51
|
+
"next": "^15.0.3",
|
|
52
|
+
"redis": "^4.7.0"
|
|
53
|
+
}
|
|
54
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
module.exports = {
|
|
2
|
+
branches: [
|
|
3
|
+
{ name: 'main' }, // Stable releases
|
|
4
|
+
{ name: 'beta', prerelease: true }, // Beta releases
|
|
5
|
+
],
|
|
6
|
+
plugins: [
|
|
7
|
+
'@semantic-release/commit-analyzer', // Analyze commits to determine the release type
|
|
8
|
+
'@semantic-release/release-notes-generator', // Generate release notes
|
|
9
|
+
'@semantic-release/changelog', // Update changelog
|
|
10
|
+
[
|
|
11
|
+
'@semantic-release/npm',
|
|
12
|
+
{
|
|
13
|
+
npmPublish: true, // Publish to NPM
|
|
14
|
+
tag: 'beta', // Publish beta releases under the "beta" tag
|
|
15
|
+
},
|
|
16
|
+
],
|
|
17
|
+
[
|
|
18
|
+
'@semantic-release/github',
|
|
19
|
+
{
|
|
20
|
+
assets: ['dist/*.tgz'], // Attach tarballs to GitHub releases
|
|
21
|
+
},
|
|
22
|
+
],
|
|
23
|
+
[
|
|
24
|
+
'@semantic-release/git',
|
|
25
|
+
{
|
|
26
|
+
assets: ['package.json', 'package-lock.json', 'CHANGELOG.md'],
|
|
27
|
+
message: 'chore(release): ${nextRelease.version} [skip ci]',
|
|
28
|
+
},
|
|
29
|
+
],
|
|
30
|
+
],
|
|
31
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Get the remote repository URL
|
|
3
|
+
REPO_URL=$(git config --get remote.origin.url)
|
|
4
|
+
|
|
5
|
+
# Extract the repository name from the URL
|
|
6
|
+
REPO_NAME=$(basename -s .git "$REPO_URL")
|
|
7
|
+
echo "Renaming package to: $REPO_NAME"
|
|
8
|
+
|
|
9
|
+
# Rename README_FINAL.md to README.md
|
|
10
|
+
mv README_FINAL.md README.md
|
|
11
|
+
|
|
12
|
+
# Replace placeholders in regular files only
|
|
13
|
+
for file in *; do
|
|
14
|
+
if [ -f "$file" ]; then
|
|
15
|
+
sed -i '' "s/public-typescript-npm-package-starter/$REPO_NAME/g" "$file"
|
|
16
|
+
fi
|
|
17
|
+
done
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
const { execSync } = require('child_process');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
|
|
6
|
+
// Helper to find the co-aligned test file
|
|
7
|
+
const findTestFile = (filePath) => {
|
|
8
|
+
const ext = path.extname(filePath);
|
|
9
|
+
const testFile = filePath.replace(ext, `.test${ext}`);
|
|
10
|
+
return fs.existsSync(testFile) ? testFile : null;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
// Run Vitest for staged test files
|
|
14
|
+
(async () => {
|
|
15
|
+
const stagedFilesOutput = execSync('git diff --cached --name-only', {
|
|
16
|
+
encoding: 'utf-8',
|
|
17
|
+
});
|
|
18
|
+
const stagedFiles = stagedFilesOutput.trim().split('\n');
|
|
19
|
+
|
|
20
|
+
const stagedTsFiles = stagedFiles
|
|
21
|
+
.filter((file) => file.endsWith('.ts') && !file.endsWith('.test.ts'))
|
|
22
|
+
|
|
23
|
+
const testFiles = stagedTsFiles
|
|
24
|
+
.map((file) => findTestFile(file))
|
|
25
|
+
.filter(Boolean); // Remove nulls
|
|
26
|
+
|
|
27
|
+
const nonTestFiles = stagedTsFiles
|
|
28
|
+
.filter((file) => !findTestFile(file))
|
|
29
|
+
|
|
30
|
+
if (testFiles.length > 0) {
|
|
31
|
+
console.log('Running tests for staged files:\n ', testFiles.join('\n '));
|
|
32
|
+
|
|
33
|
+
const includeFiles = testFiles.map((testFile) => testFile.replace(/\.test\.ts$/, ".ts"));
|
|
34
|
+
process.env.VITEST_COVERAGE_INCLUDE = includeFiles.join(",");
|
|
35
|
+
|
|
36
|
+
execSync(`npm test -- run ${testFiles.join(' ')}`, { stdio: 'inherit' });
|
|
37
|
+
} else if (nonTestFiles.length === 0) {
|
|
38
|
+
console.log('No co-aligned test files found for staged files.');
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (nonTestFiles.length > 0) {
|
|
42
|
+
console.error('Found some staged files without a co-aligned test files:\n\t', nonTestFiles.join('\n\t'));
|
|
43
|
+
}
|
|
44
|
+
})();
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { CacheHandler } from "next/dist/server/lib/incremental-cache";
|
|
2
|
+
import RedisStringsHandler, { CreateRedisStringsHandlerOptions } from "./RedisStringsHandler";
|
|
3
|
+
|
|
4
|
+
let cachedHandler: RedisStringsHandler;
|
|
5
|
+
|
|
6
|
+
export default class CachedHandler implements CacheHandler {
|
|
7
|
+
constructor(options: CreateRedisStringsHandlerOptions) {
|
|
8
|
+
if (!cachedHandler) {
|
|
9
|
+
console.log("created cached handler");
|
|
10
|
+
cachedHandler = new RedisStringsHandler(options);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
get(...args: Parameters<RedisStringsHandler["get"]>): ReturnType<RedisStringsHandler["get"]> {
|
|
14
|
+
return cachedHandler.get(...args);
|
|
15
|
+
}
|
|
16
|
+
set(...args: Parameters<RedisStringsHandler["set"]>): ReturnType<RedisStringsHandler["set"]> {
|
|
17
|
+
return cachedHandler.set(...args);
|
|
18
|
+
}
|
|
19
|
+
revalidateTag(...args: Parameters<RedisStringsHandler["revalidateTag"]>): ReturnType<RedisStringsHandler["revalidateTag"]> {
|
|
20
|
+
return cachedHandler.revalidateTag(...args);
|
|
21
|
+
}
|
|
22
|
+
resetRequestCache(...args: Parameters<RedisStringsHandler["resetRequestCache"]>): ReturnType<RedisStringsHandler["resetRequestCache"]> {
|
|
23
|
+
return cachedHandler.resetRequestCache(...args);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { SyncedMap } from './SyncedMap';
|
|
2
|
+
export class DeduplicatedRequestHandler<
|
|
3
|
+
T extends (...args: [never, never]) => Promise<K>,
|
|
4
|
+
K,
|
|
5
|
+
> {
|
|
6
|
+
private inMemoryDeduplicationCache: SyncedMap<Promise<K>>;
|
|
7
|
+
private cachingTimeMs: number;
|
|
8
|
+
private fn: T;
|
|
9
|
+
|
|
10
|
+
constructor(
|
|
11
|
+
fn: T,
|
|
12
|
+
cachingTimeMs: number,
|
|
13
|
+
inMemoryDeduplicationCache: SyncedMap<Promise<K>>,
|
|
14
|
+
) {
|
|
15
|
+
this.fn = fn;
|
|
16
|
+
this.cachingTimeMs = cachingTimeMs;
|
|
17
|
+
this.inMemoryDeduplicationCache = inMemoryDeduplicationCache;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Method to manually seed a result into the cache
|
|
21
|
+
seedRequestReturn(key: string, value: K): void {
|
|
22
|
+
const resultPromise = new Promise<K>((res) => res(value));
|
|
23
|
+
this.inMemoryDeduplicationCache.set(key, resultPromise);
|
|
24
|
+
setTimeout(() => {
|
|
25
|
+
this.inMemoryDeduplicationCache.delete(key);
|
|
26
|
+
}, this.cachingTimeMs);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Method to handle deduplicated requests
|
|
30
|
+
deduplicatedFunction = (key: string): T => {
|
|
31
|
+
//eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
32
|
+
const self = this;
|
|
33
|
+
const dedupedFn = async (...args: [never, never]): Promise<K> => {
|
|
34
|
+
// If there's already a pending request with the same key, return it
|
|
35
|
+
if (
|
|
36
|
+
self.inMemoryDeduplicationCache &&
|
|
37
|
+
self.inMemoryDeduplicationCache.has(key)
|
|
38
|
+
) {
|
|
39
|
+
const res = await self.inMemoryDeduplicationCache
|
|
40
|
+
.get(key)!
|
|
41
|
+
.then((v) => structuredClone(v));
|
|
42
|
+
return res;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// If no pending request, call the original function and store the promise
|
|
46
|
+
const promise = self.fn(...args);
|
|
47
|
+
self.inMemoryDeduplicationCache.set(key, promise);
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
const result = await promise;
|
|
51
|
+
return structuredClone(result);
|
|
52
|
+
} finally {
|
|
53
|
+
// Once the promise is resolved/rejected, remove it from the map
|
|
54
|
+
setTimeout(() => {
|
|
55
|
+
self.inMemoryDeduplicationCache.delete(key);
|
|
56
|
+
}, self.cachingTimeMs);
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
return dedupedFn as T;
|
|
60
|
+
};
|
|
61
|
+
}
|