@akinon/next 1.93.0-snapshot-ZERO-3586-20250827144743 → 1.93.0-snapshot-ZERO-3586-20250827165044

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # @akinon/next
2
2
 
3
- ## 1.93.0-snapshot-ZERO-3586-20250827144743
3
+ ## 1.93.0-snapshot-ZERO-3586-20250827165044
4
4
 
5
5
  ### Minor Changes
6
6
 
@@ -11,7 +11,7 @@
11
11
  - e1aa030d: ZERO-3473: Refactor locale handling to prioritize cookie value for matched locale
12
12
  - 6e6b0a9e: ZERO-3422: Add pz-flow-payment package
13
13
  - 63774a6a: ZERO-3351: Add commerce redirection ignore list functionality and related utility
14
- - 2d9b2b2c9: ZERO-2816: Add segment to headers
14
+ - 2d9b2b2c: ZERO-2816: Add segment to headers
15
15
  - 5e1feca6: Revert "ZERO-3286: Add notFound handling for chunk URLs starting with \_next"
16
16
  - 40a46853: ZERO-3182: Optimize basket update mutation with optimistic update
17
17
  - 5f7edd6: ZERO-3571: Enhance Jest configuration by adding base directory resolution and module name mapping
@@ -21,13 +21,13 @@
21
21
  - f49bb74f: ZERO-3097: Add setCookie to logging in payment redirection middlewares
22
22
  - 0ad91bbd: ZERO-3489: Improve error handling in data fetching across multiple pages and server functions
23
23
  - 143be2b9: ZERO-3457: Crop styles are customizable and logic improved for rendering similar products modal
24
- - e9541a13d: ZERO-2816: Add headers to url
24
+ - e9541a13: ZERO-2816: Add headers to url
25
25
  - 9b7d0de6: ZERO-3393: Improve error handling in checkout middleware to support both object and array error formats
26
26
  - 72fd4d67: ZERO-3084: Fix URL search parameters encoding in default middleware
27
27
  - c53ef7b95: ZERO-2668: The Link component has been updated to improve the logic for handling href values. Previously, if the href was not a string or started with 'http', it would return the href as is. Now, if the href is not provided, it will default to '#' to prevent any potential errors. Additionally, if the href is a string and does not start with 'http', it will be formatted with the locale and pathname, based on the localeUrlStrategy and defaultLocaleValue. This ensures that the correct href is generated based on the localization settings.
28
28
  - a8539c8c: ZERO-3439: Enhance locale handling in middleware and redirect utility
29
29
  - 16aff543: ZERO-3431: Add test script for redirect utility in package.json
30
- - 64699d3ff: ZERO-2761: Fix invalid import for plugin module
30
+ - 64699d3f: ZERO-2761: Fix invalid import for plugin module
31
31
  - 9f8cd3bc: ZERO-3449: AI Search Active Filters & Crop Style changes have been implemented
32
32
  - e974d8e8: ZERO-3406: Fix rc build
33
33
  - 89ce46fc: ZERO-3493: return 404 status code for pz-not-found pages
@@ -37,7 +37,7 @@
37
37
  - 7727ae55: ZERO-3073: Refactor basket page to use server-side data fetching and simplify component structure
38
38
  - 8b1d24eb: ZERO-3422: Update fetch method to use dynamic request method in wallet complete redirection middleware
39
39
  - d552629f: ZERO-3182: Refactor basketApi to use invalidatesTags and comment out onQueryStarted logic
40
- - 17f87524e: ZERO-2816: Make the incoming currency lowercase
40
+ - 17f87524: ZERO-2816: Make the incoming currency lowercase
41
41
  - 65d3b862: ZERO-3054: Update headers in appFetch
42
42
  - 0abde6bb: ZERO-3422: Update fetch method to use dynamic request method in wallet complete redirection middleware
43
43
  - 72ad7bb1: ZERO-3422: Add Flow Payment to the list of available plugins
@@ -46,7 +46,7 @@
46
46
  - bbe18b9ff: ZERO-2575: Fix build error
47
47
  - 17bfadc4: ZERO-3275: Disable OpenTelemetry monitoring in production environment
48
48
  - 35dfb8f8: ZERO-3363: Refactor URL handling in checkout and redirection middlewares to use url.origin instead of process.env.NEXT_PUBLIC_URL
49
- - 4920742c2: Disable getCachedTranslations
49
+ - 4920742c: Disable getCachedTranslations
50
50
  - b6e5b624: ZERO-3257: Enhance locale middleware to redirect using existing or default locale and support 303 status for POST requests
51
51
  - 0de55738: ZERO-3418: Update remotePatterns hostname to allow all subdomains
52
52
  - 7e56d6b6: ZERO-2841: Update api tagTypes
@@ -68,18 +68,18 @@
68
68
  - f7fd459b: ZERO-3445: Refactor setCookie function to include domain handling and improve cookie string construction
69
69
  - 4de5303c: ZERO-2504: add cookie filter to api client request
70
70
  - dc678c3: ZERO-3523: Enhance redirect tests with dynamic locale handling and settings integration
71
- - f2c92d5c7: ZERO-2816: Update cookie name
71
+ - f2c92d5c: ZERO-2816: Update cookie name
72
72
  - a420947d: ZERO-3517: Fix optional chaining for rawData in error logging for category data handlers
73
- - 7bd3d9928: ZERO-2801: Refactor locale middleware to handle single locale configuration
73
+ - 7bd3d992: ZERO-2801: Refactor locale middleware to handle single locale configuration
74
74
  - acd2afdf: ZERO-3431: Fix import statement for findBaseDir in next-config test
75
75
  - 2d3f1788: ZERO-3417: Enhance FileInput component with additional props for customization
76
76
  - fdd255ee: ZERO-3054: Refactor cache handler to use custom Redis handler and implement key hashing
77
77
  - b434ac8: ZERO-3545: Update fetchCheckout API URL to include page parameter
78
78
  - 49eeebfa: ZERO-2909: Add deleteCollectionItem query to wishlistApi
79
- - 3f9b8d7e7: ZERO-2761: Update plugins.js for akinon-next
79
+ - 3f9b8d7e: ZERO-2761: Update plugins.js for akinon-next
80
80
  - fee608dd: ZERO-3422: Refactor body handling in wallet complete redirection middleware
81
81
  - cbdb5c14: ZERO-3448: fix set cookie domain handling for subdomain locale strategy
82
- - 0e82301: ZERO-3531: Add saveSampleProducts endpoint
82
+ - 0e823010: ZERO-3531: Add saveSampleProducts endpoint
83
83
 
84
84
  ## 1.93.0-rc.52
85
85
 
@@ -3,6 +3,179 @@ import createLruHandler from '@neshca/cache-handler/local-lru';
3
3
  import createRedisHandler from '@neshca/cache-handler/redis-strings';
4
4
  import { createClient } from 'redis';
5
5
 
6
+ // Compression utilities for route cache
7
+ let zstd = null;
8
+ let isInitialized = false;
9
+
10
+ const getZstd = async () => {
11
+ if (zstd === null) {
12
+ try {
13
+ zstd = await import('@bokuweb/zstd-wasm');
14
+ if (!isInitialized) {
15
+ await zstd.init();
16
+ isInitialized = true;
17
+ }
18
+ } catch (error) {
19
+ console.warn('[Cache Handler] ZSTD compression failed to initialize, will use edge runtime fallback');
20
+ zstd = false;
21
+ }
22
+ }
23
+ return zstd;
24
+ };
25
+
26
+ // Edge runtime compatible compression fallback
27
+ const edgeCompress = async (data) => {
28
+ if (typeof CompressionStream !== 'undefined') {
29
+ const stream = new CompressionStream('gzip');
30
+ const writer = stream.writable.getWriter();
31
+ const reader = stream.readable.getReader();
32
+
33
+ writer.write(new TextEncoder().encode(data));
34
+ writer.close();
35
+
36
+ const chunks = [];
37
+ let done = false;
38
+
39
+ while (!done) {
40
+ const { value, done: readerDone } = await reader.read();
41
+ done = readerDone;
42
+ if (value) chunks.push(value);
43
+ }
44
+
45
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
46
+ const result = new Uint8Array(totalLength);
47
+ let offset = 0;
48
+
49
+ for (const chunk of chunks) {
50
+ result.set(chunk, offset);
51
+ offset += chunk.length;
52
+ }
53
+
54
+ return result;
55
+ }
56
+
57
+ // Node.js fallback
58
+ const { deflate } = await import('zlib');
59
+ const { promisify } = await import('util');
60
+ const deflateAsync = promisify(deflate);
61
+ const inputBuffer = new TextEncoder().encode(data);
62
+ const compressed = await deflateAsync(inputBuffer);
63
+ return new Uint8Array(compressed.buffer, compressed.byteOffset, compressed.byteLength);
64
+ };
65
+
66
+ const edgeDecompress = async (compressed) => {
67
+ if (typeof DecompressionStream !== 'undefined') {
68
+ const stream = new DecompressionStream('gzip');
69
+ const writer = stream.writable.getWriter();
70
+ const reader = stream.readable.getReader();
71
+
72
+ writer.write(compressed);
73
+ writer.close();
74
+
75
+ const chunks = [];
76
+ let done = false;
77
+
78
+ while (!done) {
79
+ const { value, done: readerDone } = await reader.read();
80
+ done = readerDone;
81
+ if (value) chunks.push(value);
82
+ }
83
+
84
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
85
+ const result = new Uint8Array(totalLength);
86
+ let offset = 0;
87
+
88
+ for (const chunk of chunks) {
89
+ result.set(chunk, offset);
90
+ offset += chunk.length;
91
+ }
92
+
93
+ return new TextDecoder().decode(result);
94
+ }
95
+
96
+ // Node.js fallback
97
+ const { inflate } = await import('zlib');
98
+ const { promisify } = await import('util');
99
+ const inflateAsync = promisify(inflate);
100
+ const inputBuffer = new Uint8Array(compressed.buffer, compressed.byteOffset, compressed.byteLength);
101
+ const decompressed = await inflateAsync(inputBuffer);
102
+ return decompressed.toString('utf8');
103
+ };
104
+
105
+ // Compression/decompression functions for cache values
106
+ const compressValue = async (value) => {
107
+ try {
108
+ const serializedValue = typeof value === 'string' ? value : JSON.stringify(value);
109
+ const originalSize = Buffer.byteLength(serializedValue, 'utf8');
110
+
111
+ // Only compress if larger than 1KB to avoid overhead for small values
112
+ if (originalSize < 1024) {
113
+ return { compressed: false, data: serializedValue };
114
+ }
115
+
116
+ const zstdLib = await getZstd();
117
+ let compressed;
118
+ let method = 'none';
119
+
120
+ if (zstdLib && zstdLib !== false) {
121
+ // Use zstd compression in Node.js
122
+ compressed = zstdLib.compress(Buffer.from(serializedValue, 'utf8'), 3);
123
+ method = 'zstd';
124
+ } else {
125
+ // Use Edge runtime fallback compression
126
+ compressed = await edgeCompress(serializedValue);
127
+ method = 'gzip';
128
+ }
129
+
130
+ const compressedBase64 = Buffer.from(compressed).toString('base64');
131
+ const compressionRatio = ((originalSize - compressed.length) / originalSize * 100);
132
+
133
+ console_log(`[Cache Handler] Compressed ${originalSize} → ${compressed.length} bytes (${compressionRatio.toFixed(1)}% reduction, method: ${method})`);
134
+
135
+ return {
136
+ compressed: true,
137
+ data: compressedBase64,
138
+ originalSize,
139
+ compressedSize: compressed.length,
140
+ method
141
+ };
142
+ } catch (error) {
143
+ console.warn('[Cache Handler] Compression failed, storing uncompressed:', error.message);
144
+ const serializedValue = typeof value === 'string' ? value : JSON.stringify(value);
145
+ return { compressed: false, data: serializedValue };
146
+ }
147
+ };
148
+
149
+ const decompressValue = async (compressedData) => {
150
+ try {
151
+ if (!compressedData.compressed) {
152
+ return typeof compressedData.data === 'string' ? compressedData.data : JSON.stringify(compressedData.data);
153
+ }
154
+
155
+ const compressedBuffer = Buffer.from(compressedData.data, 'base64');
156
+ let decompressed;
157
+
158
+ if (compressedData.method === 'zstd') {
159
+ const zstdLib = await getZstd();
160
+ if (zstdLib && zstdLib !== false) {
161
+ decompressed = zstdLib.decompress(compressedBuffer).toString('utf8');
162
+ } else {
163
+ throw new Error('zstd not available for decompression');
164
+ }
165
+ } else {
166
+ // Use edge runtime fallback decompression
167
+ decompressed = await edgeDecompress(new Uint8Array(compressedBuffer));
168
+ }
169
+
170
+ console_log(`[Cache Handler] Decompressed ${compressedBuffer.length} → ${decompressed.length} bytes`);
171
+ return decompressed;
172
+ } catch (error) {
173
+ console.warn('[Cache Handler] Decompression failed:', error.message);
174
+ // Return original data if decompression fails
175
+ return typeof compressedData.data === 'string' ? compressedData.data : JSON.stringify(compressedData.data);
176
+ }
177
+ };
178
+
6
179
  // Cache configuration
7
180
  const CACHE_CONFIG = {
8
181
  lru: {
@@ -188,6 +361,16 @@ CacheHandler.onCreation(async () => {
188
361
 
189
362
  if (localResult) {
190
363
  console_log('Found in local cache');
364
+ // Check if it's compressed data
365
+ if (localResult && typeof localResult === 'object' && localResult.compressed !== undefined) {
366
+ try {
367
+ const decompressed = await decompressValue(localResult);
368
+ return JSON.parse(decompressed);
369
+ } catch (error) {
370
+ console.warn('[Cache Handler] Failed to decompress local cache value:', error.message);
371
+ return localResult;
372
+ }
373
+ }
191
374
  return localResult;
192
375
  }
193
376
 
@@ -197,14 +380,28 @@ CacheHandler.onCreation(async () => {
197
380
 
198
381
  if (redisResult) {
199
382
  console_log('Found in Redis');
383
+
384
+ let finalResult = redisResult;
385
+
386
+ // Check if it's compressed data and decompress
387
+ if (redisResult && typeof redisResult === 'object' && redisResult.compressed !== undefined) {
388
+ try {
389
+ const decompressed = await decompressValue(redisResult);
390
+ finalResult = JSON.parse(decompressed);
391
+ } catch (error) {
392
+ console.warn('[Cache Handler] Failed to decompress Redis cache value:', error.message);
393
+ finalResult = redisResult;
394
+ }
395
+ }
396
+
200
397
  // Sync back to local cache for faster future access
201
398
  try {
202
- await localHandler.set(vKey, redisResult, context);
399
+ await localHandler.set(vKey, finalResult, context);
203
400
  console_log('Synced to local cache');
204
401
  } catch (error) {
205
402
  console_log('Failed to sync to local:', error.message);
206
403
  }
207
- return redisResult;
404
+ return finalResult;
208
405
  }
209
406
  } catch (error) {
210
407
  console_log('Redis error:', error.message);
@@ -219,11 +416,21 @@ CacheHandler.onCreation(async () => {
219
416
  'SET called for key:',
220
417
  typeof vKey === 'string' ? vKey : vKey?.key
221
418
  );
222
- // Set to both caches
419
+
420
+ // Compress the value before storing in Redis
421
+ let compressedValue;
422
+ try {
423
+ compressedValue = await compressValue(value);
424
+ } catch (error) {
425
+ console.warn('[Cache Handler] Compression failed, using original value:', error.message);
426
+ compressedValue = { compressed: false, data: value };
427
+ }
428
+
429
+ // Set to both caches - local gets original, Redis gets compressed
223
430
  await Promise.allSettled([
224
- localHandler.set(vKey, value, context),
431
+ localHandler.set(vKey, value, context), // Local cache uses original value for speed
225
432
  redisHandler
226
- .set(vKey, value, context)
433
+ .set(vKey, compressedValue, context)
227
434
  .catch((error) => console_log('Redis SET error:', error.message))
228
435
  ]);
229
436
  },
package/lib/cache.ts CHANGED
@@ -8,6 +8,85 @@ const CACHE_VERSION = 'v2';
8
8
  let zstd: any = null;
9
9
  let isInitialized = false;
10
10
 
11
+ // Edge runtime compatible compression fallback
12
+ const edgeCompress = async (data: string): Promise<Uint8Array> => {
13
+ if (typeof CompressionStream !== 'undefined') {
14
+ const stream = new CompressionStream('gzip');
15
+ const writer = stream.writable.getWriter();
16
+ const reader = stream.readable.getReader();
17
+
18
+ writer.write(new TextEncoder().encode(data));
19
+ writer.close();
20
+
21
+ const chunks: Uint8Array[] = [];
22
+ let done = false;
23
+
24
+ while (!done) {
25
+ const { value, done: readerDone } = await reader.read();
26
+ done = readerDone;
27
+ if (value) chunks.push(value);
28
+ }
29
+
30
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
31
+ const result = new Uint8Array(totalLength);
32
+ let offset = 0;
33
+
34
+ for (const chunk of chunks) {
35
+ result.set(chunk, offset);
36
+ offset += chunk.length;
37
+ }
38
+
39
+ return result;
40
+ }
41
+
42
+ // Node.js fallback
43
+ const { deflate } = await import('zlib');
44
+ const { promisify } = await import('util');
45
+ const deflateAsync = promisify(deflate);
46
+ const inputBuffer = new TextEncoder().encode(data);
47
+ const compressed = await deflateAsync(inputBuffer);
48
+ return new Uint8Array(compressed.buffer, compressed.byteOffset, compressed.byteLength);
49
+ };
50
+
51
+ const edgeDecompress = async (compressed: Uint8Array): Promise<string> => {
52
+ if (typeof DecompressionStream !== 'undefined') {
53
+ const stream = new DecompressionStream('gzip');
54
+ const writer = stream.writable.getWriter();
55
+ const reader = stream.readable.getReader();
56
+
57
+ writer.write(compressed);
58
+ writer.close();
59
+
60
+ const chunks: Uint8Array[] = [];
61
+ let done = false;
62
+
63
+ while (!done) {
64
+ const { value, done: readerDone } = await reader.read();
65
+ done = readerDone;
66
+ if (value) chunks.push(value);
67
+ }
68
+
69
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
70
+ const result = new Uint8Array(totalLength);
71
+ let offset = 0;
72
+
73
+ for (const chunk of chunks) {
74
+ result.set(chunk, offset);
75
+ offset += chunk.length;
76
+ }
77
+
78
+ return new TextDecoder().decode(result);
79
+ }
80
+
81
+ // Node.js fallback
82
+ const { inflate } = await import('zlib');
83
+ const { promisify } = await import('util');
84
+ const inflateAsync = promisify(inflate);
85
+ const inputBuffer = new Uint8Array(compressed.buffer, compressed.byteOffset, compressed.byteLength);
86
+ const decompressed = await inflateAsync(inputBuffer);
87
+ return decompressed.toString('utf8');
88
+ };
89
+
11
90
  const getZstd = async () => {
12
91
  if (zstd === null) {
13
92
  try {
@@ -18,7 +97,7 @@ const getZstd = async () => {
18
97
  logger.debug('ZSTD compression initialized successfully');
19
98
  }
20
99
  } catch (error) {
21
- logger.warn('ZSTD compression failed to initialize', { error });
100
+ logger.warn('ZSTD compression failed to initialize, will use edge runtime fallback', { error });
22
101
  zstd = false;
23
102
  }
24
103
  }
@@ -403,21 +482,48 @@ export class Cache {
403
482
  });
404
483
 
405
484
  if (!zstdModule) {
406
- // Compression not available, store uncompressed
407
- if (expire) {
408
- await client.set(key, serializedValue, { EX: expire });
409
- } else {
410
- await client.set(key, serializedValue);
411
- }
485
+ // Use Edge runtime fallback compression
486
+ try {
487
+ const compressed = await edgeCompress(serializedValue);
488
+ const compressedBase64 = Buffer.from(compressed).toString('base64');
412
489
 
413
- success = true;
414
- logger.debug(
415
- 'Redis set success (uncompressed - compression unavailable)',
416
- {
490
+ if (expire) {
491
+ await client.set(key, compressedBase64, { EX: expire });
492
+ } else {
493
+ await client.set(key, compressedBase64);
494
+ }
495
+
496
+ success = true;
497
+ const compressionRatio = (
498
+ (1 - compressed.length / serializedValue.length) *
499
+ 100
500
+ ).toFixed(2);
501
+ logger.debug('Redis setCompressed success (edge runtime fallback)', {
417
502
  key,
418
- size: serializedValue.length
503
+ originalSize: serializedValue.length,
504
+ compressedSize: compressed.length,
505
+ compressionRatio: `${compressionRatio}%`,
506
+ method: 'edge-runtime'
507
+ });
508
+ } catch (edgeError) {
509
+ // If edge compression also fails, store uncompressed
510
+ logger.warn('Edge runtime compression failed, storing uncompressed', {
511
+ key,
512
+ error: edgeError
513
+ });
514
+
515
+ if (expire) {
516
+ await client.set(key, serializedValue, { EX: expire });
517
+ } else {
518
+ await client.set(key, serializedValue);
419
519
  }
420
- );
520
+
521
+ success = true;
522
+ logger.debug('Redis set success (uncompressed fallback)', {
523
+ key,
524
+ size: serializedValue.length
525
+ });
526
+ }
421
527
  } else {
422
528
  try {
423
529
  // Use @bokuweb/zstd-wasm API
@@ -501,16 +607,40 @@ export class Cache {
501
607
  const zstdModule = await getZstd();
502
608
 
503
609
  if (!zstdModule) {
504
- // Failed to decompress, try direct JSON parse
505
- const rawString = compressed;
506
- const parsedData = JSON.parse(rawString);
507
- logger.debug(
508
- 'Data read as uncompressed (no compression available)',
509
- {
510
- key
610
+ // Try edge runtime decompression first
611
+ try {
612
+ const decompressedString = await edgeDecompress(new Uint8Array(compressedBuffer));
613
+ value = JSON.parse(decompressedString);
614
+ logger.debug('Redis getCompressed success (edge runtime fallback)', {
615
+ key,
616
+ compressedSize: compressedBuffer.length,
617
+ decompressedSize: decompressedString.length,
618
+ method: 'edge-runtime'
619
+ });
620
+ return value;
621
+ } catch (edgeError) {
622
+ // Failed to decompress with edge runtime, try direct JSON parse
623
+ logger.debug('Edge runtime decompression failed, trying direct JSON parse', {
624
+ key,
625
+ error: edgeError
626
+ });
627
+
628
+ try {
629
+ const rawString = compressed;
630
+ const parsedData = JSON.parse(rawString);
631
+ logger.debug('Data read as uncompressed (no compression available)', {
632
+ key
633
+ });
634
+ return parsedData;
635
+ } catch (jsonError) {
636
+ logger.error('Failed to parse data in all formats', {
637
+ key,
638
+ edgeError,
639
+ jsonError
640
+ });
641
+ return null;
511
642
  }
512
- );
513
- return parsedData;
643
+ }
514
644
  }
515
645
 
516
646
  // Use @bokuweb/zstd-wasm API
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@akinon/next",
3
3
  "description": "Core package for Project Zero Next",
4
- "version": "1.93.0-snapshot-ZERO-3586-20250827144743",
4
+ "version": "1.93.0-snapshot-ZERO-3586-20250827165044",
5
5
  "private": false,
6
6
  "license": "MIT",
7
7
  "bin": {
@@ -35,7 +35,7 @@
35
35
  "set-cookie-parser": "2.6.0"
36
36
  },
37
37
  "devDependencies": {
38
- "@akinon/eslint-plugin-projectzero": "1.93.0-snapshot-ZERO-3586-20250827144743",
38
+ "@akinon/eslint-plugin-projectzero": "1.93.0-snapshot-ZERO-3586-20250827165044",
39
39
  "@babel/core": "7.26.10",
40
40
  "@babel/preset-env": "7.26.9",
41
41
  "@babel/preset-typescript": "7.27.0",