@akinon/next 1.95.0-rc.54 → 1.95.0-snapshot-ZERO-3586-20250901132537
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +70 -0
- package/api/cache.ts +41 -5
- package/data/server/category.ts +4 -2
- package/data/server/flatpage.ts +4 -1
- package/data/server/form.ts +4 -1
- package/data/server/landingpage.ts +4 -1
- package/data/server/list.ts +2 -1
- package/data/server/menu.ts +4 -1
- package/data/server/product.ts +2 -1
- package/data/server/seo.ts +4 -1
- package/data/server/special-page.ts +2 -1
- package/data/server/widget.ts +4 -1
- package/lib/cache-handler.mjs +359 -85
- package/lib/cache.ts +252 -25
- package/middlewares/pretty-url.ts +2 -1
- package/package.json +4 -3
- package/types/index.ts +1 -0
- package/with-pz-config.js +2 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,75 @@
|
|
|
1
1
|
# @akinon/next
|
|
2
2
|
|
|
3
|
+
## 1.95.0-snapshot-ZERO-3586-20250901132537
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- 5dfeea04a: ZERO-2801: Revert ZERO-2801
|
|
8
|
+
- 823d82f9: ZERO-3393: Enhance error handling in checkout middleware to ensure errors are checked for existence before processing
|
|
9
|
+
- 412f0e2: ZERO-3586: Enhance caching functionality by adding support for compressed data storage and retrieval, along with a new method for setting multiple key-value pairs.
|
|
10
|
+
- 28c7ea79: ZERO-3427: Refactor redirect utility to handle undefined URL and improve locale handling
|
|
11
|
+
- e1aa030d: ZERO-3473: Refactor locale handling to prioritize cookie value for matched locale
|
|
12
|
+
- 63774a6a: ZERO-3351: Add commerce redirection ignore list functionality and related utility
|
|
13
|
+
- 2d9b2b2c9: ZERO-2816: Add segment to headers
|
|
14
|
+
- 5e1feca6: Revert "ZERO-3286: Add notFound handling for chunk URLs starting with \_next"
|
|
15
|
+
- 40a46853: ZERO-3182: Optimize basket update mutation with optimistic update
|
|
16
|
+
- 5f7edd6: ZERO-3571: Enhance Jest configuration by adding base directory resolution and module name mapping
|
|
17
|
+
- 68bbcb27: ZERO-3393: Fix error handling in checkout middleware to check for errors array length
|
|
18
|
+
- d8be48fb: ZERO-3422: Update fetch method to use dynamic request method in wallet complete redirection middleware
|
|
19
|
+
- b55acb76: ZERO-2577: Fix pagination bug and update usePagination hook and ensure pagination controls rendering correctly
|
|
20
|
+
- f49bb74f: ZERO-3097: Add setCookie to logging in payment redirection middlewares
|
|
21
|
+
- 0ad91bb: ZERO-3489: Improve error handling in data fetching across multiple pages and server functions
|
|
22
|
+
- 143be2b9: ZERO-3457: Crop styles are customizable and logic improved for rendering similar products modal
|
|
23
|
+
- e9541a13d: ZERO-2816: Add headers to url
|
|
24
|
+
- 9b7d0de6: ZERO-3393: Improve error handling in checkout middleware to support both object and array error formats
|
|
25
|
+
- 72fd4d67: ZERO-3084: Fix URL search parameters encoding in default middleware
|
|
26
|
+
- c53ef7b95: ZERO-2668: The Link component has been updated to improve the logic for handling href values. Previously, if the href was not a string or started with 'http', it would return the href as is. Now, if the href is not provided, it will default to '#' to prevent any potential errors. Additionally, if the href is a string and does not start with 'http', it will be formatted with the locale and pathname, based on the localeUrlStrategy and defaultLocaleValue. This ensures that the correct href is generated based on the localization settings.
|
|
27
|
+
- a8539c8c: ZERO-3439: Enhance locale handling in middleware and redirect utility
|
|
28
|
+
- 16aff543: ZERO-3431: Add test script for redirect utility in package.json
|
|
29
|
+
- 64699d3ff: ZERO-2761: Fix invalid import for plugin module
|
|
30
|
+
- 9f8cd3bc: ZERO-3449: AI Search Active Filters & Crop Style changes have been implemented
|
|
31
|
+
- e974d8e8: ZERO-3406: Fix rc build
|
|
32
|
+
- 89ce46f: ZERO-3493: return 404 status code for pz-not-found pages
|
|
33
|
+
- 8645d90: ZERO-3574:Refactor redirect tests: streamline mock setup, enhance locale handling, and improve URL path resolution logic
|
|
34
|
+
- 7eb51ca9: ZERO-3424 :Update package versions
|
|
35
|
+
- 7727ae55: ZERO-3073: Refactor basket page to use server-side data fetching and simplify component structure
|
|
36
|
+
- 8b1d24eb: ZERO-3422: Update fetch method to use dynamic request method in wallet complete redirection middleware
|
|
37
|
+
- d552629f: ZERO-3182: Refactor basketApi to use invalidatesTags and comment out onQueryStarted logic
|
|
38
|
+
- 17f87524e: ZERO-2816: Make the incoming currency lowercase
|
|
39
|
+
- 65d3b862: ZERO-3054: Update headers in appFetch
|
|
40
|
+
- c39c7000: ZERO-3420: Refactor Modal component
|
|
41
|
+
- bbe18b9ff: ZERO-2575: Fix build error
|
|
42
|
+
- 35dfb8f8: ZERO-3363: Refactor URL handling in checkout and redirection middlewares to use url.origin instead of process.env.NEXT_PUBLIC_URL
|
|
43
|
+
- 4920742c2: Disable getCachedTranslations
|
|
44
|
+
- b6e5b624: ZERO-3257: Enhance locale middleware to redirect using existing or default locale and support 303 status for POST requests
|
|
45
|
+
- 0de55738: ZERO-3418: Update remotePatterns hostname to allow all subdomains
|
|
46
|
+
- 7e56d6b6b: ZERO-2841: Update api tagTypes
|
|
47
|
+
- d99a6a7d: ZERO-3457: Fixed the settings prop and made sure everything is customizable.
|
|
48
|
+
- 9dc7298a: ZERO-3416: Refactor Accordion component to enhance props and improve styling flexibility
|
|
49
|
+
- 33377cfd: ZERO-3267: Refactor import statement for ROUTES in error-page component
|
|
50
|
+
- 43c182ee: ZERO-3054: Update Redis variable checks to conditionally include CACHE_SECRET
|
|
51
|
+
- c480272: ZERO-3531: Refactor checkoutApi: Remove unnecessary invalidatesTags property from POST request from sample products
|
|
52
|
+
- b00a90b1: ZERO-3436: Preserve query params on redirect
|
|
53
|
+
- facf1ada: ZERO-3445: Add SameSite and Secure attributes
|
|
54
|
+
- 26b2d0b: ZERO-3571: Remove test script execution from prebuild and simplify Jest module name mapping
|
|
55
|
+
- eeb20bea: Revert "ZERO-3054: Refactor cache handler to use custom Redis handler and implement key hashing"
|
|
56
|
+
- 99b6e7b9: ZERO-3421: Enhance Sentry error handling by adding network error detection logic and refining initialization options
|
|
57
|
+
- 3bf63c8a: ZERO-3286: Add notFound handling for chunk URLs starting with \_next
|
|
58
|
+
- 9be2c081: ZERO-3243: Improve basket update query handling with optimistic updates
|
|
59
|
+
- f7fd459b: ZERO-3445: Refactor setCookie function to include domain handling and improve cookie string construction
|
|
60
|
+
- 4de5303c: ZERO-2504: add cookie filter to api client request
|
|
61
|
+
- dc678c3: ZERO-3523: Enhance redirect tests with dynamic locale handling and settings integration
|
|
62
|
+
- f2c92d5c7: ZERO-2816: Update cookie name
|
|
63
|
+
- a420947d: ZERO-3517: Fix optional chaining for rawData in error logging for category data handlers
|
|
64
|
+
- 7bd3d9928: ZERO-2801: Refactor locale middleware to handle single locale configuration
|
|
65
|
+
- acd2afdf: ZERO-3431: Fix import statement for findBaseDir in next-config test
|
|
66
|
+
- 2d3f1788: ZERO-3417: Enhance FileInput component with additional props for customization
|
|
67
|
+
- fdd255ee: ZERO-3054: Refactor cache handler to use custom Redis handler and implement key hashing
|
|
68
|
+
- 49eeebfa: ZERO-2909: Add deleteCollectionItem query to wishlistApi
|
|
69
|
+
- 3f9b8d7e7: ZERO-2761: Update plugins.js for akinon-next
|
|
70
|
+
- cbdb5c14: ZERO-3448: fix set cookie domain handling for subdomain locale strategy
|
|
71
|
+
- 0e82301: ZERO-3531: Add saveSampleProducts endpoint
|
|
72
|
+
|
|
3
73
|
## 1.95.0-rc.54
|
|
4
74
|
|
|
5
75
|
### Minor Changes
|
package/api/cache.ts
CHANGED
|
@@ -21,20 +21,56 @@ async function handleRequest(...args) {
|
|
|
21
21
|
}
|
|
22
22
|
|
|
23
23
|
const formData = await req.formData();
|
|
24
|
-
const body = {} as {
|
|
24
|
+
const body = {} as {
|
|
25
|
+
key: string;
|
|
26
|
+
value?: string;
|
|
27
|
+
expire?: number;
|
|
28
|
+
keyValuePairs?: string;
|
|
29
|
+
compressed?: string;
|
|
30
|
+
};
|
|
25
31
|
|
|
26
32
|
formData.forEach((value, key) => {
|
|
27
33
|
body[key] = value;
|
|
28
34
|
});
|
|
29
35
|
|
|
30
|
-
const { key, value, expire } = body;
|
|
31
|
-
let response:
|
|
36
|
+
const { key, value, expire, keyValuePairs, compressed } = body;
|
|
37
|
+
let response: any;
|
|
32
38
|
|
|
33
39
|
try {
|
|
34
40
|
if (req.method === 'POST') {
|
|
35
|
-
|
|
41
|
+
// GET request - check if compressed flag is set
|
|
42
|
+
if (compressed === 'true') {
|
|
43
|
+
response = await Cache.getCompressed(key);
|
|
44
|
+
} else {
|
|
45
|
+
response = await Cache.get(key);
|
|
46
|
+
}
|
|
36
47
|
} else if (req.method === 'PUT') {
|
|
37
|
-
|
|
48
|
+
if (keyValuePairs) {
|
|
49
|
+
try {
|
|
50
|
+
const parsedKeyValuePairs = JSON.parse(keyValuePairs);
|
|
51
|
+
if (
|
|
52
|
+
typeof parsedKeyValuePairs !== 'object' ||
|
|
53
|
+
parsedKeyValuePairs === null ||
|
|
54
|
+
Array.isArray(parsedKeyValuePairs)
|
|
55
|
+
) {
|
|
56
|
+
throw new Error('Invalid keyValuePairs format - must be an object');
|
|
57
|
+
}
|
|
58
|
+
response = await Cache.mset(parsedKeyValuePairs, expire);
|
|
59
|
+
} catch (error) {
|
|
60
|
+
logger.error('Invalid keyValuePairs in mset request', { error });
|
|
61
|
+
return NextResponse.json(
|
|
62
|
+
{ error: 'Invalid keyValuePairs format' },
|
|
63
|
+
{ status: 400 }
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
} else {
|
|
67
|
+
// SET request - check if compressed flag is set
|
|
68
|
+
if (compressed === 'true') {
|
|
69
|
+
response = await Cache.setCompressed(key, value, expire);
|
|
70
|
+
} else {
|
|
71
|
+
response = await Cache.set(key, value, expire);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
38
74
|
}
|
|
39
75
|
} catch (error) {
|
|
40
76
|
logger.error(error);
|
package/data/server/category.ts
CHANGED
|
@@ -118,7 +118,8 @@ export const getCategoryData = ({
|
|
|
118
118
|
locale,
|
|
119
119
|
getCategoryDataHandler(pk, locale, currency, searchParams, headers),
|
|
120
120
|
{
|
|
121
|
-
expire: 300
|
|
121
|
+
expire: 300,
|
|
122
|
+
compressed: true
|
|
122
123
|
}
|
|
123
124
|
);
|
|
124
125
|
};
|
|
@@ -178,7 +179,8 @@ export const getCategoryBySlugData = async ({
|
|
|
178
179
|
locale,
|
|
179
180
|
getCategoryBySlugDataHandler(slug, locale, currency),
|
|
180
181
|
{
|
|
181
|
-
expire: 300
|
|
182
|
+
expire: 300,
|
|
183
|
+
compressed: true // Compress category data for memory savings
|
|
182
184
|
}
|
|
183
185
|
);
|
|
184
186
|
};
|
package/data/server/flatpage.ts
CHANGED
package/data/server/form.ts
CHANGED
package/data/server/list.ts
CHANGED
package/data/server/menu.ts
CHANGED
|
@@ -48,6 +48,9 @@ export const getMenu = async (params?: MenuHandlerParams) => {
|
|
|
48
48
|
return Cache.wrap(
|
|
49
49
|
CacheKey.Menu(params?.depth ?? DEFAULT_DEPTH, params?.parent),
|
|
50
50
|
params?.locale ?? ServerVariables.locale,
|
|
51
|
-
getMenuHandler(params)
|
|
51
|
+
getMenuHandler(params),
|
|
52
|
+
{
|
|
53
|
+
compressed: true
|
|
54
|
+
}
|
|
52
55
|
);
|
|
53
56
|
};
|
package/data/server/product.ts
CHANGED
package/data/server/seo.ts
CHANGED
package/data/server/widget.ts
CHANGED
package/lib/cache-handler.mjs
CHANGED
|
@@ -2,8 +2,236 @@ import { CacheHandler } from '@neshca/cache-handler';
|
|
|
2
2
|
import createLruHandler from '@neshca/cache-handler/local-lru';
|
|
3
3
|
import createRedisHandler from '@neshca/cache-handler/redis-strings';
|
|
4
4
|
import { createClient } from 'redis';
|
|
5
|
+
import * as zstdWasm from '@bokuweb/zstd-wasm';
|
|
6
|
+
|
|
7
|
+
let zstd;
|
|
8
|
+
|
|
9
|
+
(async () => {
|
|
10
|
+
try {
|
|
11
|
+
await zstdWasm.init();
|
|
12
|
+
zstd = zstdWasm;
|
|
13
|
+
} catch (error) {
|
|
14
|
+
zstd = false;
|
|
15
|
+
}
|
|
16
|
+
})();
|
|
17
|
+
|
|
18
|
+
const getZstd = () => {
|
|
19
|
+
return zstd;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
const compressValue = async (value) => {
|
|
23
|
+
try {
|
|
24
|
+
if (value && typeof value === 'object' && value.value !== undefined) {
|
|
25
|
+
const nestedValue = value.value;
|
|
26
|
+
const serializedNestedValue =
|
|
27
|
+
typeof nestedValue === 'string'
|
|
28
|
+
? nestedValue
|
|
29
|
+
: JSON.stringify(nestedValue);
|
|
30
|
+
const originalSize = Buffer.byteLength(serializedNestedValue, 'utf8');
|
|
31
|
+
|
|
32
|
+
if (originalSize < 1024) {
|
|
33
|
+
const result = {
|
|
34
|
+
...value,
|
|
35
|
+
tags: Array.isArray(value.tags) ? value.tags : []
|
|
36
|
+
};
|
|
37
|
+
return result;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const zstdLib = getZstd();
|
|
41
|
+
let compressed;
|
|
42
|
+
|
|
43
|
+
if (zstdLib && zstdLib !== false) {
|
|
44
|
+
compressed = zstdLib.compress(
|
|
45
|
+
Buffer.from(serializedNestedValue, 'utf8'),
|
|
46
|
+
3
|
|
47
|
+
);
|
|
48
|
+
} else {
|
|
49
|
+
return {
|
|
50
|
+
...value,
|
|
51
|
+
tags: Array.isArray(value.tags) ? value.tags : []
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const compressedBase64 = Buffer.from(compressed).toString('base64');
|
|
56
|
+
|
|
57
|
+
const result = {
|
|
58
|
+
...value,
|
|
59
|
+
tags: Array.isArray(value.tags) ? value.tags : [],
|
|
60
|
+
lifespan: {
|
|
61
|
+
...value.lifespan,
|
|
62
|
+
expireAge: value.lifespan?.revalidate || value.lifespan?.expireAge,
|
|
63
|
+
expireAt:
|
|
64
|
+
value.lifespan?.lastModifiedAt && value.lifespan?.revalidate
|
|
65
|
+
? value.lifespan.lastModifiedAt + value.lifespan.revalidate
|
|
66
|
+
: value.lifespan?.expireAt
|
|
67
|
+
},
|
|
68
|
+
value: {
|
|
69
|
+
__compressed: true,
|
|
70
|
+
__method: 'zstd',
|
|
71
|
+
__originalSize: originalSize,
|
|
72
|
+
__compressedSize: compressed.length,
|
|
73
|
+
__data: compressedBase64
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
return result;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const serializedValue =
|
|
81
|
+
typeof value === 'string' ? value : JSON.stringify(value);
|
|
82
|
+
const originalSize = Buffer.byteLength(serializedValue, 'utf8');
|
|
83
|
+
|
|
84
|
+
if (originalSize < 1024) {
|
|
85
|
+
if (
|
|
86
|
+
value &&
|
|
87
|
+
typeof value === 'object' &&
|
|
88
|
+
value.lastModified === undefined &&
|
|
89
|
+
value.lifespan === undefined &&
|
|
90
|
+
value.value === undefined
|
|
91
|
+
) {
|
|
92
|
+
return {
|
|
93
|
+
...value,
|
|
94
|
+
tags: value.tags || [],
|
|
95
|
+
lastModified: Date.now(),
|
|
96
|
+
lifespan: {
|
|
97
|
+
expireAt: Math.floor(Date.now() / 1000) + 3600
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
if (
|
|
102
|
+
value &&
|
|
103
|
+
typeof value === 'object' &&
|
|
104
|
+
value.lifespan &&
|
|
105
|
+
value.lifespan.revalidate
|
|
106
|
+
) {
|
|
107
|
+
return {
|
|
108
|
+
...value,
|
|
109
|
+
lifespan: {
|
|
110
|
+
...value.lifespan,
|
|
111
|
+
expireAge: value.lifespan.revalidate,
|
|
112
|
+
expireAt:
|
|
113
|
+
value.lifespan.lastModifiedAt && value.lifespan.revalidate
|
|
114
|
+
? value.lifespan.lastModifiedAt + value.lifespan.revalidate
|
|
115
|
+
: value.lifespan.expireAt
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
return value;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const zstdLib = getZstd();
|
|
123
|
+
let compressed;
|
|
124
|
+
|
|
125
|
+
if (zstdLib && zstdLib !== false) {
|
|
126
|
+
compressed = zstdLib.compress(Buffer.from(serializedValue, 'utf8'), 3);
|
|
127
|
+
} else {
|
|
128
|
+
if (
|
|
129
|
+
value &&
|
|
130
|
+
typeof value === 'object' &&
|
|
131
|
+
value.lastModified === undefined &&
|
|
132
|
+
value.lifespan === undefined &&
|
|
133
|
+
value.value === undefined
|
|
134
|
+
) {
|
|
135
|
+
return {
|
|
136
|
+
...value,
|
|
137
|
+
tags: value.tags || [],
|
|
138
|
+
lastModified: Date.now(),
|
|
139
|
+
lifespan: {
|
|
140
|
+
expireAt: Math.floor(Date.now() / 1000) + 3600
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
return value;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const compressedBase64 = Buffer.from(compressed).toString('base64');
|
|
148
|
+
|
|
149
|
+
const compressedResult = {
|
|
150
|
+
__compressed: true,
|
|
151
|
+
__method: 'zstd',
|
|
152
|
+
__originalSize: originalSize,
|
|
153
|
+
__compressedSize: compressed.length,
|
|
154
|
+
__data: compressedBase64,
|
|
155
|
+
tags: [],
|
|
156
|
+
lastModified: Date.now(),
|
|
157
|
+
lifespan: { expireAt: Math.floor(Date.now() / 1000) + 3600 }
|
|
158
|
+
};
|
|
159
|
+
|
|
160
|
+
return compressedResult;
|
|
161
|
+
} catch (error) {
|
|
162
|
+
console.warn(
|
|
163
|
+
'[Cache Handler] Compression failed, storing uncompressed:',
|
|
164
|
+
error.message
|
|
165
|
+
);
|
|
166
|
+
return value;
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
const decompressValue = async (compressedData) => {
|
|
171
|
+
try {
|
|
172
|
+
if (
|
|
173
|
+
compressedData &&
|
|
174
|
+
typeof compressedData === 'object' &&
|
|
175
|
+
compressedData.value &&
|
|
176
|
+
typeof compressedData.value === 'object' &&
|
|
177
|
+
compressedData.value.__compressed
|
|
178
|
+
) {
|
|
179
|
+
const compressedNestedValue = compressedData.value;
|
|
180
|
+
const compressedBuffer = Buffer.from(
|
|
181
|
+
compressedNestedValue.__data,
|
|
182
|
+
'base64'
|
|
183
|
+
);
|
|
184
|
+
let decompressed;
|
|
185
|
+
|
|
186
|
+
if (compressedNestedValue.__method === 'zstd') {
|
|
187
|
+
const zstdLib = getZstd();
|
|
188
|
+
if (zstdLib && zstdLib !== false) {
|
|
189
|
+
decompressed = zstdLib.decompress(compressedBuffer).toString('utf8');
|
|
190
|
+
} else {
|
|
191
|
+
throw new Error('zstd not available for decompression');
|
|
192
|
+
}
|
|
193
|
+
} else {
|
|
194
|
+
throw new Error(
|
|
195
|
+
'gzip decompression no longer supported - please invalidate cache'
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
return {
|
|
200
|
+
...compressedData,
|
|
201
|
+
value: JSON.parse(decompressed)
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
if (
|
|
206
|
+
compressedData &&
|
|
207
|
+
typeof compressedData === 'object' &&
|
|
208
|
+
compressedData.__compressed
|
|
209
|
+
) {
|
|
210
|
+
const compressedBuffer = Buffer.from(compressedData.__data, 'base64');
|
|
211
|
+
let decompressed;
|
|
212
|
+
|
|
213
|
+
if (compressedData.__method === 'zstd') {
|
|
214
|
+
const zstdLib = getZstd();
|
|
215
|
+
if (zstdLib && zstdLib !== false) {
|
|
216
|
+
decompressed = zstdLib.decompress(compressedBuffer).toString('utf8');
|
|
217
|
+
} else {
|
|
218
|
+
throw new Error('zstd not available for decompression');
|
|
219
|
+
}
|
|
220
|
+
} else {
|
|
221
|
+
throw new Error(
|
|
222
|
+
'gzip decompression no longer supported - please invalidate cache'
|
|
223
|
+
);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
return JSON.parse(decompressed);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return compressedData;
|
|
230
|
+
} catch (error) {
|
|
231
|
+
return compressedData;
|
|
232
|
+
}
|
|
233
|
+
};
|
|
5
234
|
|
|
6
|
-
// Cache configuration
|
|
7
235
|
const CACHE_CONFIG = {
|
|
8
236
|
lru: {
|
|
9
237
|
maxItemCount: 2000
|
|
@@ -22,7 +250,6 @@ const CACHE_CONFIG = {
|
|
|
22
250
|
version: process.env.ACC_APP_VERSION || ''
|
|
23
251
|
};
|
|
24
252
|
|
|
25
|
-
// Use global to persist across module reloads in development
|
|
26
253
|
const globalForRedis = global;
|
|
27
254
|
if (!globalForRedis.redisClient) {
|
|
28
255
|
globalForRedis.redisClient = null;
|
|
@@ -31,42 +258,22 @@ if (!globalForRedis.redisClient) {
|
|
|
31
258
|
globalForRedis.connectionAttempts = 0;
|
|
32
259
|
}
|
|
33
260
|
|
|
34
|
-
// Logging configuration
|
|
35
|
-
const debugValue = process.env.NEXT_PRIVATE_DEBUG_CACHE;
|
|
36
|
-
const debug = debugValue === 'true' || debugValue === '1';
|
|
37
|
-
|
|
38
|
-
let console_log;
|
|
39
|
-
if (debug) {
|
|
40
|
-
// eslint-disable-next-line no-console
|
|
41
|
-
console_log = (...args) => console.log('[Cache Handler]', ...args);
|
|
42
|
-
} else {
|
|
43
|
-
console_log = () => {};
|
|
44
|
-
}
|
|
45
|
-
|
|
46
261
|
async function getRedisClient() {
|
|
47
|
-
// If client exists and is ready, return it
|
|
48
262
|
if (globalForRedis.redisClient?.isReady) {
|
|
49
|
-
console_log('Reusing existing Redis connection');
|
|
50
263
|
return globalForRedis.redisClient;
|
|
51
264
|
}
|
|
52
265
|
|
|
53
|
-
// If we're already connecting, wait a bit and retry
|
|
54
266
|
if (globalForRedis.isConnecting) {
|
|
55
267
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
56
268
|
return getRedisClient();
|
|
57
269
|
}
|
|
58
270
|
|
|
59
|
-
// Start new connection
|
|
60
271
|
globalForRedis.isConnecting = true;
|
|
61
272
|
globalForRedis.connectionAttempts++;
|
|
62
273
|
|
|
63
274
|
try {
|
|
64
275
|
const redisUrl = `redis://${CACHE_CONFIG.host}:${CACHE_CONFIG.port}/${CACHE_CONFIG.bucket}`;
|
|
65
276
|
|
|
66
|
-
if (globalForRedis.connectionAttempts === 1) {
|
|
67
|
-
console_log('Creating Redis connection:', redisUrl);
|
|
68
|
-
}
|
|
69
|
-
|
|
70
277
|
const redisClient = createClient({
|
|
71
278
|
url: redisUrl,
|
|
72
279
|
socket: {
|
|
@@ -87,7 +294,6 @@ async function getRedisClient() {
|
|
|
87
294
|
});
|
|
88
295
|
|
|
89
296
|
redisClient.on('error', (error) => {
|
|
90
|
-
// Only log the first connection error to avoid spam
|
|
91
297
|
if (!globalForRedis.hasLoggedConnectionError) {
|
|
92
298
|
if (error.code === 'ECONNREFUSED') {
|
|
93
299
|
console.warn(
|
|
@@ -101,12 +307,10 @@ async function getRedisClient() {
|
|
|
101
307
|
});
|
|
102
308
|
|
|
103
309
|
redisClient.on('connect', () => {
|
|
104
|
-
console_log('Redis connected');
|
|
105
310
|
globalForRedis.hasLoggedConnectionError = false;
|
|
106
311
|
});
|
|
107
312
|
|
|
108
313
|
redisClient.on('ready', () => {
|
|
109
|
-
console_log('Redis ready');
|
|
110
314
|
globalForRedis.hasLoggedConnectionError = false;
|
|
111
315
|
});
|
|
112
316
|
|
|
@@ -115,16 +319,6 @@ async function getRedisClient() {
|
|
|
115
319
|
return redisClient;
|
|
116
320
|
} catch (error) {
|
|
117
321
|
if (!globalForRedis.hasLoggedConnectionError) {
|
|
118
|
-
if (error.code === 'ECONNREFUSED') {
|
|
119
|
-
console.warn(
|
|
120
|
-
'[Cache Handler] Could not connect to Redis - using local cache only'
|
|
121
|
-
);
|
|
122
|
-
} else {
|
|
123
|
-
console.error(
|
|
124
|
-
'[Cache Handler] Failed to connect to Redis:',
|
|
125
|
-
error.message
|
|
126
|
-
);
|
|
127
|
-
}
|
|
128
322
|
globalForRedis.hasLoggedConnectionError = true;
|
|
129
323
|
}
|
|
130
324
|
globalForRedis.redisClient = null;
|
|
@@ -135,13 +329,10 @@ async function getRedisClient() {
|
|
|
135
329
|
}
|
|
136
330
|
|
|
137
331
|
CacheHandler.onCreation(async () => {
|
|
138
|
-
console_log('Initializing cache handlers...');
|
|
139
|
-
|
|
140
332
|
let client;
|
|
141
333
|
try {
|
|
142
334
|
client = await getRedisClient();
|
|
143
335
|
} catch (error) {
|
|
144
|
-
// Error already logged in getRedisClient, just return local handler
|
|
145
336
|
return {
|
|
146
337
|
handlers: [createLruHandler(CACHE_CONFIG.lru)]
|
|
147
338
|
};
|
|
@@ -150,98 +341,183 @@ CacheHandler.onCreation(async () => {
|
|
|
150
341
|
const redisHandler = createRedisHandler({
|
|
151
342
|
client,
|
|
152
343
|
timeoutMs: CACHE_CONFIG.redis.timeoutMs,
|
|
153
|
-
keyExpirationStrategy: '
|
|
344
|
+
keyExpirationStrategy: 'EXPIREAT'
|
|
154
345
|
});
|
|
155
346
|
|
|
156
347
|
const localHandler = createLruHandler(CACHE_CONFIG.lru);
|
|
157
348
|
|
|
158
|
-
|
|
159
|
-
const versionPrefix =
|
|
349
|
+
const CACHE_VERSION = 'v2';
|
|
350
|
+
const versionPrefix = `${CACHE_VERSION}_`;
|
|
160
351
|
|
|
161
|
-
|
|
162
|
-
const
|
|
163
|
-
|
|
164
|
-
:
|
|
165
|
-
|
|
166
|
-
const versionKeyObject = versionPrefix
|
|
167
|
-
? (key) => ({ ...key, key: `${versionPrefix}${key.key}` })
|
|
168
|
-
: (key) => key;
|
|
352
|
+
const versionKeyString = (key) => `${versionPrefix}${key}`;
|
|
353
|
+
const versionKeyObject = (key) => ({
|
|
354
|
+
...key,
|
|
355
|
+
key: `${versionPrefix}${key.key}`
|
|
356
|
+
});
|
|
169
357
|
|
|
170
|
-
// Main version key function that routes to optimized paths
|
|
171
358
|
const versionKey = (key) => {
|
|
172
359
|
return typeof key === 'string'
|
|
173
360
|
? versionKeyString(key)
|
|
174
361
|
: versionKeyObject(key);
|
|
175
362
|
};
|
|
176
363
|
|
|
177
|
-
// Create a custom handler that checks local first, then Redis
|
|
178
364
|
const customHandler = {
|
|
179
365
|
name: 'custom-local-then-redis',
|
|
180
366
|
get: async (key, context) => {
|
|
181
367
|
const vKey = versionKey(key);
|
|
182
|
-
console_log(
|
|
183
|
-
'GET called for key:',
|
|
184
|
-
typeof vKey === 'string' ? vKey : vKey?.key
|
|
185
|
-
);
|
|
186
368
|
|
|
187
|
-
// Check local cache first
|
|
188
|
-
console_log('Checking local cache...');
|
|
189
369
|
const localResult = await localHandler.get(vKey, context);
|
|
190
370
|
|
|
191
371
|
if (localResult) {
|
|
192
|
-
|
|
372
|
+
if (
|
|
373
|
+
localResult &&
|
|
374
|
+
typeof localResult === 'object' &&
|
|
375
|
+
(localResult.__compressed ||
|
|
376
|
+
(localResult.value && localResult.value.__compressed) ||
|
|
377
|
+
localResult.compressed !== undefined)
|
|
378
|
+
) {
|
|
379
|
+
try {
|
|
380
|
+
const decompressed = await decompressValue(localResult);
|
|
381
|
+
return typeof decompressed === 'string'
|
|
382
|
+
? JSON.parse(decompressed)
|
|
383
|
+
: decompressed;
|
|
384
|
+
} catch (error) {
|
|
385
|
+
console.warn(
|
|
386
|
+
'[Cache Handler] Failed to decompress local cache value:',
|
|
387
|
+
error.message
|
|
388
|
+
);
|
|
389
|
+
return localResult;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
193
393
|
return localResult;
|
|
194
394
|
}
|
|
195
395
|
|
|
196
|
-
console_log('Not found in local, checking Redis...');
|
|
197
396
|
try {
|
|
198
397
|
const redisResult = await redisHandler.get(vKey, context);
|
|
199
398
|
|
|
200
399
|
if (redisResult) {
|
|
201
|
-
|
|
202
|
-
|
|
400
|
+
let finalResult = redisResult;
|
|
401
|
+
|
|
402
|
+
if (typeof redisResult === 'string') {
|
|
403
|
+
try {
|
|
404
|
+
finalResult = JSON.parse(redisResult);
|
|
405
|
+
} catch (parseError) {
|
|
406
|
+
finalResult = redisResult;
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
if (
|
|
411
|
+
finalResult &&
|
|
412
|
+
typeof finalResult === 'object' &&
|
|
413
|
+
(finalResult.__compressed ||
|
|
414
|
+
(finalResult.value && finalResult.value.__compressed) ||
|
|
415
|
+
finalResult.compressed !== undefined)
|
|
416
|
+
) {
|
|
417
|
+
try {
|
|
418
|
+
const decompressed = await decompressValue(finalResult);
|
|
419
|
+
finalResult =
|
|
420
|
+
typeof decompressed === 'string'
|
|
421
|
+
? JSON.parse(decompressed)
|
|
422
|
+
: decompressed;
|
|
423
|
+
} catch (error) {
|
|
424
|
+
console.warn(
|
|
425
|
+
'[Cache Handler] Failed to decompress Redis cache value:',
|
|
426
|
+
error.message
|
|
427
|
+
);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
|
|
203
431
|
try {
|
|
204
|
-
await localHandler.set(vKey,
|
|
205
|
-
console_log('Synced to local cache');
|
|
432
|
+
await localHandler.set(vKey, finalResult, context);
|
|
206
433
|
} catch (error) {
|
|
207
|
-
|
|
434
|
+
console.warn(
|
|
435
|
+
'[Cache Handler] Failed to sync to local:',
|
|
436
|
+
error.message
|
|
437
|
+
);
|
|
208
438
|
}
|
|
209
|
-
return
|
|
439
|
+
return finalResult;
|
|
210
440
|
}
|
|
211
441
|
} catch (error) {
|
|
212
|
-
|
|
442
|
+
console.warn('[Cache Handler] Redis error:', error.message);
|
|
213
443
|
}
|
|
214
444
|
|
|
215
|
-
console_log('Not found in any cache');
|
|
216
445
|
return undefined;
|
|
217
446
|
},
|
|
218
447
|
set: async (key, value, context) => {
|
|
219
448
|
const vKey = versionKey(key);
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
449
|
+
|
|
450
|
+
let compressedValue;
|
|
451
|
+
let shouldUseCompressed = false;
|
|
452
|
+
|
|
453
|
+
try {
|
|
454
|
+
compressedValue = await compressValue(value);
|
|
455
|
+
|
|
456
|
+
shouldUseCompressed =
|
|
457
|
+
compressedValue !== value &&
|
|
458
|
+
(compressedValue?.__compressed ||
|
|
459
|
+
compressedValue?.value?.__compressed);
|
|
460
|
+
} catch (error) {
|
|
461
|
+
console.warn(
|
|
462
|
+
'[Cache Handler] Compression failed, using original value:',
|
|
463
|
+
error.message
|
|
464
|
+
);
|
|
465
|
+
compressedValue = value;
|
|
466
|
+
shouldUseCompressed = false;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
let redisSetResult;
|
|
470
|
+
|
|
471
|
+
if (shouldUseCompressed) {
|
|
472
|
+
try {
|
|
473
|
+
await redisHandler.set(vKey, compressedValue, context);
|
|
474
|
+
|
|
475
|
+
redisSetResult = { status: 'fulfilled' };
|
|
476
|
+
} catch (compressionError) {
|
|
477
|
+
try {
|
|
478
|
+
await redisHandler.set(vKey, value, context);
|
|
479
|
+
|
|
480
|
+
redisSetResult = { status: 'fulfilled' };
|
|
481
|
+
} catch (fallbackError) {
|
|
482
|
+
redisSetResult = { status: 'rejected', reason: fallbackError };
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
} else {
|
|
486
|
+
try {
|
|
487
|
+
await redisHandler.set(vKey, value, context);
|
|
488
|
+
redisSetResult = { status: 'fulfilled' };
|
|
489
|
+
} catch (error) {
|
|
490
|
+
redisSetResult = { status: 'rejected', reason: error };
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
let localSetResult;
|
|
495
|
+
try {
|
|
496
|
+
await localHandler.set(vKey, value, context);
|
|
497
|
+
localSetResult = { status: 'fulfilled' };
|
|
498
|
+
} catch (error) {
|
|
499
|
+
localSetResult = { status: 'rejected', reason: error };
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
const results = [localSetResult, redisSetResult];
|
|
503
|
+
|
|
504
|
+
console.warn('SET Results:', {
|
|
505
|
+
local: results[0].status,
|
|
506
|
+
redis: results[1].status,
|
|
507
|
+
localError: results[0].reason?.message,
|
|
508
|
+
redisError: results[1].reason?.message,
|
|
509
|
+
compressionUsed: shouldUseCompressed
|
|
510
|
+
});
|
|
231
511
|
},
|
|
232
512
|
delete: async (key, context) => {
|
|
233
513
|
const vKey = versionKey(key);
|
|
234
|
-
|
|
235
|
-
'DELETE called for key:',
|
|
236
|
-
typeof vKey === 'string' ? vKey : vKey?.key
|
|
237
|
-
);
|
|
514
|
+
|
|
238
515
|
await Promise.allSettled([
|
|
239
516
|
localHandler.delete?.(vKey, context),
|
|
240
517
|
redisHandler.delete?.(vKey, context)
|
|
241
518
|
]);
|
|
242
519
|
},
|
|
243
520
|
revalidateTag: async (tags, context) => {
|
|
244
|
-
console_log('REVALIDATE_TAG called for tags:', tags);
|
|
245
521
|
await Promise.allSettled([
|
|
246
522
|
localHandler.revalidateTag?.(tags, context),
|
|
247
523
|
redisHandler.revalidateTag?.(tags, context)
|
|
@@ -249,8 +525,6 @@ CacheHandler.onCreation(async () => {
|
|
|
249
525
|
}
|
|
250
526
|
};
|
|
251
527
|
|
|
252
|
-
console_log('[Cache Handler] Handlers initialized successfully');
|
|
253
|
-
|
|
254
528
|
return {
|
|
255
529
|
handlers: [customHandler]
|
|
256
530
|
};
|
package/lib/cache.ts
CHANGED
|
@@ -3,6 +3,65 @@ import { RedisClientType } from 'redis';
|
|
|
3
3
|
import Settings from 'settings';
|
|
4
4
|
import { CacheOptions } from '../types';
|
|
5
5
|
import logger from '../utils/log';
|
|
6
|
+
const CACHE_VERSION = 'v2';
|
|
7
|
+
|
|
8
|
+
const compressData = async (data: string): Promise<Uint8Array> => {
|
|
9
|
+
const stream = new CompressionStream('gzip');
|
|
10
|
+
const writer = stream.writable.getWriter();
|
|
11
|
+
const reader = stream.readable.getReader();
|
|
12
|
+
|
|
13
|
+
writer.write(new TextEncoder().encode(data));
|
|
14
|
+
writer.close();
|
|
15
|
+
|
|
16
|
+
const chunks: Uint8Array[] = [];
|
|
17
|
+
let done = false;
|
|
18
|
+
|
|
19
|
+
while (!done) {
|
|
20
|
+
const { value, done: readerDone } = await reader.read();
|
|
21
|
+
done = readerDone;
|
|
22
|
+
if (value) chunks.push(value);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
26
|
+
const result = new Uint8Array(totalLength);
|
|
27
|
+
let offset = 0;
|
|
28
|
+
|
|
29
|
+
for (const chunk of chunks) {
|
|
30
|
+
result.set(chunk, offset);
|
|
31
|
+
offset += chunk.length;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return result;
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const decompressData = async (compressed: Uint8Array): Promise<string> => {
|
|
38
|
+
const stream = new DecompressionStream('gzip');
|
|
39
|
+
const writer = stream.writable.getWriter();
|
|
40
|
+
const reader = stream.readable.getReader();
|
|
41
|
+
|
|
42
|
+
writer.write(compressed);
|
|
43
|
+
writer.close();
|
|
44
|
+
|
|
45
|
+
const chunks: Uint8Array[] = [];
|
|
46
|
+
let done = false;
|
|
47
|
+
|
|
48
|
+
while (!done) {
|
|
49
|
+
const { value, done: readerDone } = await reader.read();
|
|
50
|
+
done = readerDone;
|
|
51
|
+
if (value) chunks.push(value);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
55
|
+
const result = new Uint8Array(totalLength);
|
|
56
|
+
let offset = 0;
|
|
57
|
+
|
|
58
|
+
for (const chunk of chunks) {
|
|
59
|
+
result.set(chunk, offset);
|
|
60
|
+
offset += chunk.length;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return new TextDecoder().decode(result);
|
|
64
|
+
};
|
|
6
65
|
|
|
7
66
|
const hashCacheKey = (object?: Record<string, string>) => {
|
|
8
67
|
if (!object) {
|
|
@@ -60,8 +119,32 @@ export const CacheKey = {
|
|
|
60
119
|
export class Cache {
|
|
61
120
|
static PROXY_URL = `${process.env.NEXT_PUBLIC_URL}/api/cache`;
|
|
62
121
|
|
|
122
|
+
private static serializeValue(value: any): string {
|
|
123
|
+
return typeof value === 'object' ? JSON.stringify(value) : String(value);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
private static validateKey(key: string): boolean {
|
|
127
|
+
return !(!key || key.trim() === '');
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
private static validateKeyValuePairs(keyValuePairs: Record<string, any>): {
|
|
131
|
+
isValid: boolean;
|
|
132
|
+
invalidKeys: string[];
|
|
133
|
+
} {
|
|
134
|
+
if (!keyValuePairs || Object.keys(keyValuePairs).length === 0) {
|
|
135
|
+
return { isValid: false, invalidKeys: [] };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const invalidKeys = Object.keys(keyValuePairs).filter(
|
|
139
|
+
(key) => !this.validateKey(key)
|
|
140
|
+
);
|
|
141
|
+
return { isValid: invalidKeys.length === 0, invalidKeys };
|
|
142
|
+
}
|
|
143
|
+
|
|
63
144
|
static formatKey(key: string, locale: string) {
|
|
64
|
-
return encodeURIComponent(
|
|
145
|
+
return encodeURIComponent(
|
|
146
|
+
`${CACHE_VERSION}_${Settings.commerceUrl}_${locale}_${key}`
|
|
147
|
+
);
|
|
65
148
|
}
|
|
66
149
|
|
|
67
150
|
static clientPool: Pool<RedisClientType> = createPool(
|
|
@@ -98,9 +181,9 @@ export class Cache {
|
|
|
98
181
|
return await Cache.clientPool.acquire();
|
|
99
182
|
}
|
|
100
183
|
|
|
101
|
-
static async get(key: string) {
|
|
102
|
-
let value;
|
|
103
|
-
let client;
|
|
184
|
+
static async get(key: string): Promise<any> {
|
|
185
|
+
let value: any;
|
|
186
|
+
let client: RedisClientType | undefined;
|
|
104
187
|
|
|
105
188
|
try {
|
|
106
189
|
client = await Cache.getClient();
|
|
@@ -110,9 +193,7 @@ export class Cache {
|
|
|
110
193
|
} else {
|
|
111
194
|
value = null;
|
|
112
195
|
}
|
|
113
|
-
logger.debug('Redis get success', { key, value });
|
|
114
196
|
} catch (error) {
|
|
115
|
-
logger.error('Redis get error', { key, error });
|
|
116
197
|
value = null;
|
|
117
198
|
} finally {
|
|
118
199
|
if (client) {
|
|
@@ -123,14 +204,13 @@ export class Cache {
|
|
|
123
204
|
return value;
|
|
124
205
|
}
|
|
125
206
|
|
|
126
|
-
static async set(key: string, value: any, expire?: number) {
|
|
207
|
+
static async set(key: string, value: any, expire?: number): Promise<boolean> {
|
|
127
208
|
let success = false;
|
|
128
|
-
let client;
|
|
209
|
+
let client: RedisClientType | undefined;
|
|
129
210
|
|
|
130
211
|
try {
|
|
131
212
|
client = await Cache.getClient();
|
|
132
|
-
const serializedValue =
|
|
133
|
-
typeof value === 'object' ? JSON.stringify(value) : value;
|
|
213
|
+
const serializedValue = Cache.serializeValue(value);
|
|
134
214
|
|
|
135
215
|
if (expire) {
|
|
136
216
|
await client.set(key, serializedValue, { EX: expire });
|
|
@@ -139,9 +219,7 @@ export class Cache {
|
|
|
139
219
|
}
|
|
140
220
|
|
|
141
221
|
success = true;
|
|
142
|
-
logger.debug('Redis set success', { key, value });
|
|
143
222
|
} catch (error) {
|
|
144
|
-
logger.error('Redis set error', { key, error });
|
|
145
223
|
success = false;
|
|
146
224
|
} finally {
|
|
147
225
|
if (client) {
|
|
@@ -170,7 +248,8 @@ export class Cache {
|
|
|
170
248
|
|
|
171
249
|
const defaultOptions: CacheOptions = {
|
|
172
250
|
cache: true,
|
|
173
|
-
expire: Settings.redis.defaultExpirationTime
|
|
251
|
+
expire: Settings.redis.defaultExpirationTime,
|
|
252
|
+
compressed: process.env.CACHE_COMPRESSION_ENABLED !== 'false'
|
|
174
253
|
};
|
|
175
254
|
|
|
176
255
|
const _options = Object.assign(defaultOptions, options);
|
|
@@ -180,21 +259,22 @@ export class Cache {
|
|
|
180
259
|
_options.expire = 120;
|
|
181
260
|
}
|
|
182
261
|
|
|
183
|
-
logger.debug('Cache wrap', { key, formattedKey, _options });
|
|
184
|
-
|
|
185
262
|
if (_options.cache) {
|
|
186
|
-
let cachedValue;
|
|
263
|
+
let cachedValue: any;
|
|
187
264
|
|
|
188
265
|
if (_options.useProxy) {
|
|
189
266
|
const body = new URLSearchParams();
|
|
190
267
|
|
|
191
268
|
body.append('key', formattedKey);
|
|
269
|
+
if (_options.compressed) {
|
|
270
|
+
body.append('compressed', 'true');
|
|
271
|
+
}
|
|
192
272
|
|
|
193
273
|
cachedValue = await Cache.proxyRequest('POST', body);
|
|
194
|
-
logger.debug('Cache proxy request success', { key });
|
|
195
|
-
logger.trace('Cache proxy request', { key, cachedValue });
|
|
196
274
|
} else {
|
|
197
|
-
cachedValue =
|
|
275
|
+
cachedValue = _options.compressed
|
|
276
|
+
? await Cache.getCompressed(formattedKey)
|
|
277
|
+
: await Cache.get(formattedKey);
|
|
198
278
|
}
|
|
199
279
|
|
|
200
280
|
if (cachedValue) {
|
|
@@ -202,8 +282,6 @@ export class Cache {
|
|
|
202
282
|
}
|
|
203
283
|
}
|
|
204
284
|
|
|
205
|
-
logger.debug('Redis cache miss. Setting new value...', { key });
|
|
206
|
-
|
|
207
285
|
const data = await handler();
|
|
208
286
|
|
|
209
287
|
if (data && _options.cache) {
|
|
@@ -217,14 +295,19 @@ export class Cache {
|
|
|
217
295
|
'expire',
|
|
218
296
|
String(_options?.expire ?? Settings.redis.defaultExpirationTime)
|
|
219
297
|
);
|
|
298
|
+
if (_options.compressed) {
|
|
299
|
+
body.append('compressed', 'true');
|
|
300
|
+
}
|
|
220
301
|
await Cache.proxyRequest('PUT', body);
|
|
221
|
-
|
|
222
|
-
logger.debug('Cache proxy request', { key, body: body.toString() });
|
|
223
302
|
} catch (error) {
|
|
224
303
|
logger.error('Cache proxy error', error);
|
|
225
304
|
}
|
|
226
305
|
} else {
|
|
227
|
-
|
|
306
|
+
if (_options.compressed) {
|
|
307
|
+
await Cache.setCompressed(formattedKey, data, _options?.expire);
|
|
308
|
+
} else {
|
|
309
|
+
await Cache.set(formattedKey, JSON.stringify(data), _options?.expire);
|
|
310
|
+
}
|
|
228
311
|
}
|
|
229
312
|
}
|
|
230
313
|
|
|
@@ -236,7 +319,7 @@ export class Cache {
|
|
|
236
319
|
await fetch(Cache.PROXY_URL, {
|
|
237
320
|
method,
|
|
238
321
|
headers: {
|
|
239
|
-
authorization: process.env.CACHE_SECRET
|
|
322
|
+
authorization: process.env.CACHE_SECRET || ''
|
|
240
323
|
},
|
|
241
324
|
body
|
|
242
325
|
})
|
|
@@ -244,4 +327,148 @@ export class Cache {
|
|
|
244
327
|
|
|
245
328
|
return response;
|
|
246
329
|
}
|
|
330
|
+
|
|
331
|
+
static async mset(
|
|
332
|
+
keyValuePairs: Record<string, any>,
|
|
333
|
+
expire?: number
|
|
334
|
+
): Promise<boolean> {
|
|
335
|
+
const validation = Cache.validateKeyValuePairs(keyValuePairs);
|
|
336
|
+
if (!validation.isValid) {
|
|
337
|
+
if (validation.invalidKeys.length > 0) {
|
|
338
|
+
logger.error('Invalid keys in mset', {
|
|
339
|
+
invalidKeys: validation.invalidKeys
|
|
340
|
+
});
|
|
341
|
+
} else {
|
|
342
|
+
logger.warn('mset called with empty keyValuePairs');
|
|
343
|
+
}
|
|
344
|
+
return false;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
let success = false;
|
|
348
|
+
let client: RedisClientType | undefined;
|
|
349
|
+
|
|
350
|
+
try {
|
|
351
|
+
client = await Cache.getClient();
|
|
352
|
+
const pipeline = client.multi();
|
|
353
|
+
|
|
354
|
+
Object.entries(keyValuePairs).forEach(([key, value]) => {
|
|
355
|
+
const serializedValue = Cache.serializeValue(value);
|
|
356
|
+
if (expire) {
|
|
357
|
+
pipeline.set(key, serializedValue, { EX: expire });
|
|
358
|
+
} else {
|
|
359
|
+
pipeline.set(key, serializedValue);
|
|
360
|
+
}
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
const results = await pipeline.exec();
|
|
364
|
+
|
|
365
|
+
const failures =
|
|
366
|
+
results?.filter((result) => result instanceof Error) || [];
|
|
367
|
+
|
|
368
|
+
if (failures.length > 0) {
|
|
369
|
+
success = false;
|
|
370
|
+
} else {
|
|
371
|
+
success = true;
|
|
372
|
+
}
|
|
373
|
+
} catch (error) {
|
|
374
|
+
success = false;
|
|
375
|
+
} finally {
|
|
376
|
+
if (client) {
|
|
377
|
+
await Cache.clientPool.release(client);
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
return success;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
static async setCompressed(
|
|
385
|
+
key: string,
|
|
386
|
+
value: any,
|
|
387
|
+
expire?: number
|
|
388
|
+
): Promise<boolean> {
|
|
389
|
+
if (!Cache.validateKey(key)) {
|
|
390
|
+
return false;
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
let success = false;
|
|
394
|
+
let client: RedisClientType | undefined;
|
|
395
|
+
|
|
396
|
+
try {
|
|
397
|
+
client = await Cache.getClient();
|
|
398
|
+
const serializedValue = Cache.serializeValue(value);
|
|
399
|
+
|
|
400
|
+
try {
|
|
401
|
+
const compressed = await compressData(serializedValue);
|
|
402
|
+
const compressedBase64 = Buffer.from(compressed).toString('base64');
|
|
403
|
+
|
|
404
|
+
if (expire) {
|
|
405
|
+
await client.set(key, compressedBase64, { EX: expire });
|
|
406
|
+
} else {
|
|
407
|
+
await client.set(key, compressedBase64);
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
success = true;
|
|
411
|
+
} catch (compressionError) {
|
|
412
|
+
if (expire) {
|
|
413
|
+
await client.set(key, serializedValue, { EX: expire });
|
|
414
|
+
} else {
|
|
415
|
+
await client.set(key, serializedValue);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
success = true;
|
|
419
|
+
}
|
|
420
|
+
} catch (error) {
|
|
421
|
+
success = false;
|
|
422
|
+
} finally {
|
|
423
|
+
if (client) {
|
|
424
|
+
await Cache.clientPool.release(client);
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
return success;
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
static async getCompressed(key: string): Promise<unknown> {
|
|
432
|
+
if (!Cache.validateKey(key)) {
|
|
433
|
+
return null;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
let value: unknown;
|
|
437
|
+
let client: RedisClientType | undefined;
|
|
438
|
+
|
|
439
|
+
try {
|
|
440
|
+
client = await Cache.getClient();
|
|
441
|
+
const compressed = await client.get(key);
|
|
442
|
+
|
|
443
|
+
if (compressed) {
|
|
444
|
+
const compressedBuffer = Buffer.from(compressed, 'base64');
|
|
445
|
+
|
|
446
|
+
try {
|
|
447
|
+
const decompressedString = await decompressData(
|
|
448
|
+
new Uint8Array(compressedBuffer)
|
|
449
|
+
);
|
|
450
|
+
value = JSON.parse(decompressedString);
|
|
451
|
+
return value;
|
|
452
|
+
} catch (decompressionError) {
|
|
453
|
+
try {
|
|
454
|
+
const rawString = compressed;
|
|
455
|
+
const parsedData = JSON.parse(rawString);
|
|
456
|
+
return parsedData;
|
|
457
|
+
} catch (jsonError) {
|
|
458
|
+
return null;
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
} else {
|
|
462
|
+
value = null;
|
|
463
|
+
}
|
|
464
|
+
} catch (error) {
|
|
465
|
+
value = null;
|
|
466
|
+
} finally {
|
|
467
|
+
if (client) {
|
|
468
|
+
await Cache.clientPool.release(client);
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
return value;
|
|
473
|
+
}
|
|
247
474
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@akinon/next",
|
|
3
3
|
"description": "Core package for Project Zero Next",
|
|
4
|
-
"version": "1.95.0-
|
|
4
|
+
"version": "1.95.0-snapshot-ZERO-3586-20250901132537",
|
|
5
5
|
"private": false,
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"bin": {
|
|
@@ -17,13 +17,14 @@
|
|
|
17
17
|
"test": "jest"
|
|
18
18
|
},
|
|
19
19
|
"dependencies": {
|
|
20
|
+
"@bokuweb/zstd-wasm": "^0.0.27",
|
|
21
|
+
"@neshca/cache-handler": "1.9.0",
|
|
20
22
|
"@opentelemetry/exporter-trace-otlp-http": "0.46.0",
|
|
21
23
|
"@opentelemetry/resources": "1.19.0",
|
|
22
24
|
"@opentelemetry/sdk-node": "0.46.0",
|
|
23
25
|
"@opentelemetry/sdk-trace-node": "1.19.0",
|
|
24
26
|
"@opentelemetry/semantic-conventions": "1.19.0",
|
|
25
27
|
"@reduxjs/toolkit": "1.9.7",
|
|
26
|
-
"@neshca/cache-handler": "1.9.0",
|
|
27
28
|
"@sentry/nextjs": "9.5.0",
|
|
28
29
|
"cross-spawn": "7.0.3",
|
|
29
30
|
"generic-pool": "3.9.0",
|
|
@@ -34,7 +35,7 @@
|
|
|
34
35
|
"set-cookie-parser": "2.6.0"
|
|
35
36
|
},
|
|
36
37
|
"devDependencies": {
|
|
37
|
-
"@akinon/eslint-plugin-projectzero": "1.95.0-
|
|
38
|
+
"@akinon/eslint-plugin-projectzero": "1.95.0-snapshot-ZERO-3586-20250901132537",
|
|
38
39
|
"@babel/core": "7.26.10",
|
|
39
40
|
"@babel/preset-env": "7.26.9",
|
|
40
41
|
"@babel/preset-typescript": "7.27.0",
|
package/types/index.ts
CHANGED
package/with-pz-config.js
CHANGED