@pioneer-platform/pioneer-cache 1.0.6 → 1.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/__tests__/FIX_VERIFICATION.md +290 -0
- package/__tests__/README.md +219 -0
- package/__tests__/TEST_RESULTS.md +309 -0
- package/__tests__/brpop-issue-reproduction.test.ts +356 -0
- package/__tests__/cache-concurrent-operations.test.ts +393 -0
- package/__tests__/redis-connection-pool.test.ts +374 -0
- package/dist/core/base-cache.js +6 -13
- package/jest.config.js +16 -0
- package/package.json +5 -2
- package/src/core/base-cache.ts +7 -14
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cache Concurrent Operations Stress Tests
|
|
3
|
+
*
|
|
4
|
+
* Tests to reproduce real-world cache usage patterns that cause timeouts:
|
|
5
|
+
* 1. Pioneer cache with 1000ms timeout showing warnings
|
|
6
|
+
* 2. Multiple cache types (price, balance) accessing Redis simultaneously
|
|
7
|
+
* 3. Queue workers + cache operations
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
const { redis, redisQueue } = require('@pioneer-platform/default-redis');
|
|
11
|
+
import { BaseCache } from '../src/core/base-cache';
|
|
12
|
+
import type { CacheConfig, CachedValue, CacheResult } from '../src/types';
|
|
13
|
+
|
|
14
|
+
// Mock cache implementation for testing
|
|
15
|
+
class TestCache extends BaseCache<string> {
|
|
16
|
+
constructor(redis: any, config: Partial<CacheConfig>) {
|
|
17
|
+
const fullConfig: CacheConfig = {
|
|
18
|
+
name: 'test',
|
|
19
|
+
keyPrefix: 'test:',
|
|
20
|
+
ttl: 60000,
|
|
21
|
+
staleThreshold: 30000,
|
|
22
|
+
defaultValue: 'default',
|
|
23
|
+
enableQueue: false,
|
|
24
|
+
queueName: 'test-queue',
|
|
25
|
+
enableLegacyFallback: false,
|
|
26
|
+
blockOnMiss: false,
|
|
27
|
+
enableTTL: true,
|
|
28
|
+
logCacheHits: false,
|
|
29
|
+
logCacheMisses: false,
|
|
30
|
+
logRefreshJobs: false,
|
|
31
|
+
...config
|
|
32
|
+
};
|
|
33
|
+
super(redis, fullConfig);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
protected buildKey(params: Record<string, any>): string {
|
|
37
|
+
return `${this.config.keyPrefix}${params.id}`;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
protected async fetchFromSource(params: Record<string, any>): Promise<string> {
|
|
41
|
+
// Simulate network delay
|
|
42
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
43
|
+
return `fetched_${params.id}`;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
protected async getLegacyCached(params: Record<string, any>): Promise<string | null> {
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
describe('Cache Concurrent Operations Stress Tests', () => {
|
|
52
|
+
let priceCache: TestCache;
|
|
53
|
+
let balanceCache: TestCache;
|
|
54
|
+
|
|
55
|
+
beforeAll(async () => {
|
|
56
|
+
// Initialize multiple cache instances (simulating real usage)
|
|
57
|
+
priceCache = new TestCache(redis, {
|
|
58
|
+
name: 'price',
|
|
59
|
+
keyPrefix: 'test:price:',
|
|
60
|
+
staleThreshold: 10000
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
balanceCache = new TestCache(redis, {
|
|
64
|
+
name: 'balance',
|
|
65
|
+
keyPrefix: 'test:balance:',
|
|
66
|
+
staleThreshold: 5000
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
// Clear test data
|
|
70
|
+
const keys = await redis.keys('test:*');
|
|
71
|
+
if (keys.length > 0) {
|
|
72
|
+
await redis.del(...keys);
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
afterAll(async () => {
|
|
77
|
+
// Cleanup
|
|
78
|
+
const keys = await redis.keys('test:*');
|
|
79
|
+
if (keys.length > 0) {
|
|
80
|
+
await redis.del(...keys);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
describe('Reproduce Real-World Timeout Scenarios', () => {
|
|
85
|
+
test('should reproduce price cache timeout warnings under load', async () => {
|
|
86
|
+
// Simulate the exact scenario from logs:
|
|
87
|
+
// Multiple concurrent cache.get() calls hitting 1000ms timeout
|
|
88
|
+
|
|
89
|
+
const assetIds = Array.from({ length: 50 }, (_, i) => i);
|
|
90
|
+
const startTime = Date.now();
|
|
91
|
+
let timeoutWarnings = 0;
|
|
92
|
+
|
|
93
|
+
// Hook into redis.get to detect timeouts
|
|
94
|
+
const originalGet = redis.get.bind(redis);
|
|
95
|
+
const monitoredGet = async (key: string) => {
|
|
96
|
+
const callStart = Date.now();
|
|
97
|
+
const result = await originalGet(key);
|
|
98
|
+
const duration = Date.now() - callStart;
|
|
99
|
+
if (duration > 1000) {
|
|
100
|
+
timeoutWarnings++;
|
|
101
|
+
console.warn(`⏱️ Redis timeout after ${duration}ms for ${key}`);
|
|
102
|
+
}
|
|
103
|
+
return result;
|
|
104
|
+
};
|
|
105
|
+
redis.get = monitoredGet;
|
|
106
|
+
|
|
107
|
+
try {
|
|
108
|
+
// Simulate portfolio loading: 50 concurrent price lookups
|
|
109
|
+
const promises = assetIds.map(id =>
|
|
110
|
+
priceCache.get({ id: `asset_${id}` })
|
|
111
|
+
);
|
|
112
|
+
|
|
113
|
+
const results = await Promise.all(promises);
|
|
114
|
+
const totalDuration = Date.now() - startTime;
|
|
115
|
+
|
|
116
|
+
console.log(`Portfolio load: ${results.length} prices in ${totalDuration}ms`);
|
|
117
|
+
console.log(`Timeout warnings: ${timeoutWarnings}`);
|
|
118
|
+
|
|
119
|
+
expect(results).toHaveLength(50);
|
|
120
|
+
expect(timeoutWarnings).toBe(0); // Should be 0 with proper pooling
|
|
121
|
+
} finally {
|
|
122
|
+
redis.get = originalGet;
|
|
123
|
+
}
|
|
124
|
+
}, 15000);
|
|
125
|
+
|
|
126
|
+
test('should handle cache stampede without timeouts', async () => {
|
|
127
|
+
// Reproduce: Multiple users requesting same uncached asset simultaneously
|
|
128
|
+
const assetId = 'popular_asset';
|
|
129
|
+
const concurrentUsers = 100;
|
|
130
|
+
|
|
131
|
+
const startTime = Date.now();
|
|
132
|
+
const promises = Array.from({ length: concurrentUsers }, () =>
|
|
133
|
+
priceCache.get({ id: assetId }, false) // Don't block on miss
|
|
134
|
+
);
|
|
135
|
+
|
|
136
|
+
const results = await Promise.all(promises);
|
|
137
|
+
const duration = Date.now() - startTime;
|
|
138
|
+
|
|
139
|
+
console.log(`Cache stampede: ${concurrentUsers} concurrent requests in ${duration}ms`);
|
|
140
|
+
|
|
141
|
+
// All should complete without timeout
|
|
142
|
+
expect(results).toHaveLength(concurrentUsers);
|
|
143
|
+
expect(duration).toBeLessThan(5000);
|
|
144
|
+
}, 10000);
|
|
145
|
+
|
|
146
|
+
test('should handle mixed cache types concurrently', async () => {
|
|
147
|
+
// Simulate real usage: price cache + balance cache + queue operations
|
|
148
|
+
const operations = [];
|
|
149
|
+
|
|
150
|
+
// 20 price cache operations
|
|
151
|
+
for (let i = 0; i < 20; i++) {
|
|
152
|
+
operations.push(priceCache.get({ id: `asset_${i}` }));
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// 20 balance cache operations
|
|
156
|
+
for (let i = 0; i < 20; i++) {
|
|
157
|
+
operations.push(balanceCache.get({ id: `wallet_${i}` }));
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// 10 queue operations (non-blocking)
|
|
161
|
+
for (let i = 0; i < 10; i++) {
|
|
162
|
+
operations.push(
|
|
163
|
+
redis.lpush(`test:queue:${i}`, `job_${i}`)
|
|
164
|
+
.then(() => redis.rpop(`test:queue:${i}`))
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const startTime = Date.now();
|
|
169
|
+
const results = await Promise.all(operations);
|
|
170
|
+
const duration = Date.now() - startTime;
|
|
171
|
+
|
|
172
|
+
console.log(`Mixed operations: ${results.length} ops in ${duration}ms`);
|
|
173
|
+
expect(duration).toBeLessThan(5000);
|
|
174
|
+
}, 15000);
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
describe('Cache + Queue Worker Interference', () => {
|
|
178
|
+
test('should not timeout cache operations while queue worker polls', async () => {
|
|
179
|
+
const queueName = 'test:worker:queue';
|
|
180
|
+
|
|
181
|
+
// Start simulated queue worker (polls every 5 seconds)
|
|
182
|
+
const workerPoll = async () => {
|
|
183
|
+
const result = await redisQueue.brpop(queueName, 10);
|
|
184
|
+
return result;
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
// Start worker poll (will block for 10 seconds)
|
|
188
|
+
const workerPromise = workerPoll();
|
|
189
|
+
|
|
190
|
+
// Meanwhile, do intensive cache operations
|
|
191
|
+
const cacheOps = [];
|
|
192
|
+
for (let i = 0; i < 100; i++) {
|
|
193
|
+
cacheOps.push(
|
|
194
|
+
priceCache.get({ id: `asset_${i}` })
|
|
195
|
+
);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
const startTime = Date.now();
|
|
199
|
+
const results = await Promise.all(cacheOps);
|
|
200
|
+
const duration = Date.now() - startTime;
|
|
201
|
+
|
|
202
|
+
console.log(`Cache ops while worker blocking: ${results.length} in ${duration}ms`);
|
|
203
|
+
|
|
204
|
+
// Cleanup: push to queue to unblock worker
|
|
205
|
+
await redis.lpush(queueName, 'cleanup');
|
|
206
|
+
await workerPromise;
|
|
207
|
+
await redis.del(queueName);
|
|
208
|
+
|
|
209
|
+
expect(results).toHaveLength(100);
|
|
210
|
+
expect(duration).toBeLessThan(5000); // Should not be blocked by BRPOP
|
|
211
|
+
}, 20000);
|
|
212
|
+
|
|
213
|
+
test('should handle cache writes while queue processing', async () => {
|
|
214
|
+
const queueName = 'test:process:queue';
|
|
215
|
+
|
|
216
|
+
// Fill queue with jobs
|
|
217
|
+
for (let i = 0; i < 10; i++) {
|
|
218
|
+
await redis.lpush(queueName, JSON.stringify({ id: i, type: 'job' }));
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Process queue in background
|
|
222
|
+
const processQueue = async () => {
|
|
223
|
+
const results = [];
|
|
224
|
+
for (let i = 0; i < 10; i++) {
|
|
225
|
+
const job = await redisQueue.brpop(queueName, 5);
|
|
226
|
+
if (job) {
|
|
227
|
+
results.push(job);
|
|
228
|
+
// Simulate job processing
|
|
229
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
return results;
|
|
233
|
+
};
|
|
234
|
+
|
|
235
|
+
const processingPromise = processQueue();
|
|
236
|
+
|
|
237
|
+
// While processing, do cache updates
|
|
238
|
+
const cacheUpdates = [];
|
|
239
|
+
for (let i = 0; i < 50; i++) {
|
|
240
|
+
cacheUpdates.push(
|
|
241
|
+
priceCache.updateCache(`test:price:update_${i}`, `value_${i}`)
|
|
242
|
+
);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
const [jobResults, cacheResults] = await Promise.all([
|
|
246
|
+
processingPromise,
|
|
247
|
+
Promise.all(cacheUpdates)
|
|
248
|
+
]);
|
|
249
|
+
|
|
250
|
+
console.log(`Processed ${jobResults.length} jobs while updating ${cacheResults.length} cache entries`);
|
|
251
|
+
|
|
252
|
+
expect(jobResults).toHaveLength(10);
|
|
253
|
+
expect(cacheResults).toHaveLength(50);
|
|
254
|
+
}, 15000);
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
describe('High Concurrency Stress Tests', () => {
|
|
258
|
+
test('should handle 500 concurrent cache gets', async () => {
|
|
259
|
+
const operations = 500;
|
|
260
|
+
const startTime = Date.now();
|
|
261
|
+
|
|
262
|
+
const promises = Array.from({ length: operations }, (_, i) =>
|
|
263
|
+
priceCache.get({ id: `stress_${i % 50}` }) // 50 unique keys, 10x requests each
|
|
264
|
+
);
|
|
265
|
+
|
|
266
|
+
const results = await Promise.all(promises);
|
|
267
|
+
const duration = Date.now() - startTime;
|
|
268
|
+
|
|
269
|
+
const timeoutsOrErrors = results.filter(r => r.error).length;
|
|
270
|
+
|
|
271
|
+
console.log(`${operations} concurrent gets in ${duration}ms`);
|
|
272
|
+
console.log(`Errors/timeouts: ${timeoutsOrErrors}`);
|
|
273
|
+
|
|
274
|
+
expect(results).toHaveLength(operations);
|
|
275
|
+
expect(timeoutsOrErrors).toBe(0);
|
|
276
|
+
expect(duration).toBeLessThan(10000);
|
|
277
|
+
}, 20000);
|
|
278
|
+
|
|
279
|
+
test('should handle alternating read/write load', async () => {
|
|
280
|
+
const cycles = 100;
|
|
281
|
+
const startTime = Date.now();
|
|
282
|
+
let writeCount = 0;
|
|
283
|
+
let readCount = 0;
|
|
284
|
+
|
|
285
|
+
const operations = [];
|
|
286
|
+
for (let i = 0; i < cycles; i++) {
|
|
287
|
+
// Write
|
|
288
|
+
operations.push(
|
|
289
|
+
priceCache.updateCache(`test:price:rw_${i}`, `value_${i}`)
|
|
290
|
+
.then(() => writeCount++)
|
|
291
|
+
);
|
|
292
|
+
|
|
293
|
+
// Read
|
|
294
|
+
operations.push(
|
|
295
|
+
priceCache.get({ id: `rw_${i}` })
|
|
296
|
+
.then(() => readCount++)
|
|
297
|
+
);
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
await Promise.all(operations);
|
|
301
|
+
const duration = Date.now() - startTime;
|
|
302
|
+
|
|
303
|
+
console.log(`${cycles} read/write cycles: ${writeCount} writes, ${readCount} reads in ${duration}ms`);
|
|
304
|
+
|
|
305
|
+
expect(writeCount).toBe(cycles);
|
|
306
|
+
expect(readCount).toBe(cycles);
|
|
307
|
+
expect(duration).toBeLessThan(10000);
|
|
308
|
+
}, 20000);
|
|
309
|
+
|
|
310
|
+
test('should maintain performance under sustained concurrent load', async () => {
|
|
311
|
+
// Run for 10 seconds with constant load
|
|
312
|
+
const testDuration = 10000;
|
|
313
|
+
const startTime = Date.now();
|
|
314
|
+
let totalOps = 0;
|
|
315
|
+
let errors = 0;
|
|
316
|
+
|
|
317
|
+
const runConcurrentLoad = async (workerId: number) => {
|
|
318
|
+
while (Date.now() - startTime < testDuration) {
|
|
319
|
+
try {
|
|
320
|
+
// Random operation
|
|
321
|
+
const op = Math.random();
|
|
322
|
+
if (op < 0.5) {
|
|
323
|
+
// Price cache read
|
|
324
|
+
await priceCache.get({ id: `load_${workerId}_${Math.floor(Math.random() * 10)}` });
|
|
325
|
+
} else if (op < 0.8) {
|
|
326
|
+
// Balance cache read
|
|
327
|
+
await balanceCache.get({ id: `balance_${workerId}` });
|
|
328
|
+
} else {
|
|
329
|
+
// Direct redis operation
|
|
330
|
+
const key = `test:direct:${workerId}`;
|
|
331
|
+
await redis.set(key, `value_${totalOps}`);
|
|
332
|
+
await redis.get(key);
|
|
333
|
+
}
|
|
334
|
+
totalOps++;
|
|
335
|
+
} catch (error) {
|
|
336
|
+
errors++;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
|
|
341
|
+
// Run 10 concurrent workers
|
|
342
|
+
const workers = Array.from({ length: 10 }, (_, i) => runConcurrentLoad(i));
|
|
343
|
+
await Promise.all(workers);
|
|
344
|
+
|
|
345
|
+
const actualDuration = Date.now() - startTime;
|
|
346
|
+
const opsPerSecond = Math.floor((totalOps / actualDuration) * 1000);
|
|
347
|
+
|
|
348
|
+
console.log(`Sustained load test:`);
|
|
349
|
+
console.log(` Duration: ${actualDuration}ms`);
|
|
350
|
+
console.log(` Operations: ${totalOps}`);
|
|
351
|
+
console.log(` Throughput: ${opsPerSecond} ops/sec`);
|
|
352
|
+
console.log(` Errors: ${errors} (${((errors/totalOps)*100).toFixed(2)}%)`);
|
|
353
|
+
|
|
354
|
+
expect(errors).toBeLessThan(totalOps * 0.01); // <1% error rate
|
|
355
|
+
expect(opsPerSecond).toBeGreaterThan(100); // At least 100 ops/sec
|
|
356
|
+
}, 15000);
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
describe('Cache Timeout Reproduction', () => {
|
|
360
|
+
test('should not timeout with proper connection pooling', async () => {
|
|
361
|
+
// This test specifically reproduces the warning from logs:
|
|
362
|
+
// "Redis timeout after 1000ms, returning cache miss"
|
|
363
|
+
|
|
364
|
+
const iterations = 20;
|
|
365
|
+
let timeouts = 0;
|
|
366
|
+
|
|
367
|
+
for (let batch = 0; batch < iterations; batch++) {
|
|
368
|
+
const batchStart = Date.now();
|
|
369
|
+
|
|
370
|
+
// Simulate cache.get with 1000ms timeout (from base-cache.ts:162)
|
|
371
|
+
const promises = Array.from({ length: 50 }, (_, i) =>
|
|
372
|
+
Promise.race([
|
|
373
|
+
priceCache.get({ id: `timeout_test_${i}` }),
|
|
374
|
+
new Promise((resolve) => setTimeout(() => {
|
|
375
|
+
timeouts++;
|
|
376
|
+
resolve({ timeout: true });
|
|
377
|
+
}, 1000))
|
|
378
|
+
])
|
|
379
|
+
);
|
|
380
|
+
|
|
381
|
+
const results = await Promise.all(promises);
|
|
382
|
+
const batchDuration = Date.now() - batchStart;
|
|
383
|
+
|
|
384
|
+
if (batchDuration > 1000) {
|
|
385
|
+
console.warn(`Batch ${batch} took ${batchDuration}ms`);
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
console.log(`Timeout test: ${timeouts} timeouts in ${iterations * 50} operations`);
|
|
390
|
+
expect(timeouts).toBe(0); // No timeouts expected with proper pool
|
|
391
|
+
}, 30000);
|
|
392
|
+
});
|
|
393
|
+
});
|