@pioneer-platform/pioneer-cache 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -0
- package/README.md +451 -0
- package/dist/core/base-cache.d.ts +75 -0
- package/dist/core/base-cache.js +493 -0
- package/dist/core/cache-manager.d.ts +62 -0
- package/dist/core/cache-manager.js +238 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.js +25 -0
- package/dist/stores/balance-cache.d.ts +47 -0
- package/dist/stores/balance-cache.js +158 -0
- package/dist/stores/price-cache.d.ts +39 -0
- package/dist/stores/price-cache.js +179 -0
- package/dist/stores/transaction-cache.d.ts +42 -0
- package/dist/stores/transaction-cache.js +148 -0
- package/dist/types/index.d.ts +98 -0
- package/dist/types/index.js +5 -0
- package/dist/workers/refresh-worker.d.ts +57 -0
- package/dist/workers/refresh-worker.js +212 -0
- package/package.json +31 -0
- package/src/core/base-cache.ts +595 -0
- package/src/core/cache-manager.ts +293 -0
- package/src/index.ts +36 -0
- package/src/stores/balance-cache.ts +196 -0
- package/src/stores/price-cache.ts +215 -0
- package/src/stores/transaction-cache.ts +172 -0
- package/src/types/index.ts +121 -0
- package/src/workers/refresh-worker.ts +267 -0
- package/tsconfig.json +18 -0
|
@@ -0,0 +1,595 @@
|
|
|
1
|
+
/*
|
|
2
|
+
BaseCache - Abstract base class for all cache implementations
|
|
3
|
+
|
|
4
|
+
Contains all shared logic that was duplicated across balance/price caches.
|
|
5
|
+
Specific caches extend this and implement only their unique logic.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type {
|
|
9
|
+
CacheConfig,
|
|
10
|
+
CachedValue,
|
|
11
|
+
CacheResult,
|
|
12
|
+
HealthCheckResult,
|
|
13
|
+
CacheStats,
|
|
14
|
+
RefreshJob,
|
|
15
|
+
SourceFetcher,
|
|
16
|
+
KeyBuilder,
|
|
17
|
+
LegacyKeyPattern
|
|
18
|
+
} from '../types';
|
|
19
|
+
|
|
20
|
+
const log = require('@pioneer-platform/loggerdog')();
|
|
21
|
+
|
|
22
|
+
export abstract class BaseCache<T> {
|
|
23
|
+
protected redis: any;
|
|
24
|
+
protected redisQueue: any;
|
|
25
|
+
protected config: CacheConfig;
|
|
26
|
+
protected queueInitialized: boolean = false;
|
|
27
|
+
protected TAG: string;
|
|
28
|
+
|
|
29
|
+
// Cache for stats (to avoid expensive SCAN operations on every health check)
|
|
30
|
+
private cachedStats: CacheStats | null = null;
|
|
31
|
+
private cachedStatsTimestamp: number = 0;
|
|
32
|
+
private readonly STATS_CACHE_TTL = 30000; // 30 seconds
|
|
33
|
+
|
|
34
|
+
constructor(redis: any, config: CacheConfig) {
|
|
35
|
+
this.redis = redis;
|
|
36
|
+
this.config = config;
|
|
37
|
+
this.TAG = ` | ${config.name}Cache | `;
|
|
38
|
+
|
|
39
|
+
// Initialize queue if enabled
|
|
40
|
+
if (config.enableQueue) {
|
|
41
|
+
this.initializeQueue();
|
|
42
|
+
} else {
|
|
43
|
+
log.info(this.TAG, `Queue disabled for ${config.name} cache`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Initialize Redis queue for background refresh
|
|
49
|
+
*/
|
|
50
|
+
private initializeQueue(): void {
|
|
51
|
+
try {
|
|
52
|
+
const redisQueueModule = require('@pioneer-platform/redis-queue');
|
|
53
|
+
redisQueueModule.init(this.config.queueName);
|
|
54
|
+
this.redisQueue = redisQueueModule;
|
|
55
|
+
this.queueInitialized = true;
|
|
56
|
+
log.info(this.TAG, `✅ Queue initialized: ${this.config.queueName}`);
|
|
57
|
+
} catch (error) {
|
|
58
|
+
// CRITICAL FIX #3: Make queue failures VERY visible
|
|
59
|
+
log.error(this.TAG, `❌ CRITICAL: Failed to initialize queue '${this.config.queueName}'!`);
|
|
60
|
+
log.error(this.TAG, 'Background refresh is DISABLED - cache will NOT update automatically!');
|
|
61
|
+
log.error(this.TAG, 'Error:', error);
|
|
62
|
+
this.redisQueue = null;
|
|
63
|
+
this.queueInitialized = false;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Main cache get method
|
|
69
|
+
* Implements stale-while-revalidate pattern with optional blocking
|
|
70
|
+
*/
|
|
71
|
+
async get(params: Record<string, any>, waitForFresh?: boolean): Promise<CacheResult<T>> {
|
|
72
|
+
const tag = this.TAG + 'get | ';
|
|
73
|
+
const startTime = Date.now();
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
const key = this.buildKey(params);
|
|
77
|
+
|
|
78
|
+
// Step 1: Try new cache format
|
|
79
|
+
const cachedValue = await this.getCached(key);
|
|
80
|
+
|
|
81
|
+
if (cachedValue) {
|
|
82
|
+
const age = Date.now() - cachedValue.timestamp;
|
|
83
|
+
const responseTime = Date.now() - startTime;
|
|
84
|
+
|
|
85
|
+
if (this.config.logCacheHits) {
|
|
86
|
+
log.debug(tag, `Cache hit: ${key} (${responseTime}ms, age: ${age}ms)`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Check staleness async (don't wait)
|
|
90
|
+
if (this.config.staleThreshold && age > this.config.staleThreshold) {
|
|
91
|
+
this.triggerAsyncRefresh(params, 'normal');
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return {
|
|
95
|
+
success: true,
|
|
96
|
+
value: cachedValue.value,
|
|
97
|
+
cached: true,
|
|
98
|
+
fresh: this.config.staleThreshold ? age <= this.config.staleThreshold : true,
|
|
99
|
+
age
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Step 2: Try legacy cache fallback
|
|
104
|
+
if (this.config.enableLegacyFallback) {
|
|
105
|
+
const legacyValue = await this.getLegacyCached(params);
|
|
106
|
+
if (legacyValue) {
|
|
107
|
+
const responseTime = Date.now() - startTime;
|
|
108
|
+
log.info(tag, `Legacy cache hit: ${key} (${responseTime}ms)`);
|
|
109
|
+
|
|
110
|
+
// Migrate to new format (async)
|
|
111
|
+
this.migrateLegacyValue(key, legacyValue);
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
success: true,
|
|
115
|
+
value: legacyValue,
|
|
116
|
+
cached: true,
|
|
117
|
+
fresh: false, // Legacy data is considered stale
|
|
118
|
+
age: undefined
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Step 3: Cache miss
|
|
124
|
+
const responseTime = Date.now() - startTime;
|
|
125
|
+
if (this.config.logCacheMisses) {
|
|
126
|
+
log.info(tag, `Cache miss: ${key} (${responseTime}ms)`);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// FIX #1: Optional blocking on cache miss
|
|
130
|
+
const shouldBlock = waitForFresh !== undefined ? waitForFresh : this.config.blockOnMiss;
|
|
131
|
+
if (shouldBlock) {
|
|
132
|
+
log.info(tag, `Blocking to fetch fresh data: ${key}`);
|
|
133
|
+
const freshValue = await this.fetchFresh(params);
|
|
134
|
+
return {
|
|
135
|
+
success: true,
|
|
136
|
+
value: freshValue,
|
|
137
|
+
cached: false,
|
|
138
|
+
fresh: true,
|
|
139
|
+
age: 0
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Non-blocking: trigger async refresh and return default
|
|
144
|
+
this.triggerAsyncRefresh(params, 'high');
|
|
145
|
+
|
|
146
|
+
return {
|
|
147
|
+
success: true,
|
|
148
|
+
value: this.config.defaultValue,
|
|
149
|
+
cached: false,
|
|
150
|
+
fresh: false,
|
|
151
|
+
age: undefined
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
} catch (error) {
|
|
155
|
+
log.error(tag, 'Error getting cache value:', error);
|
|
156
|
+
return {
|
|
157
|
+
success: false,
|
|
158
|
+
value: this.config.defaultValue,
|
|
159
|
+
cached: false,
|
|
160
|
+
fresh: false,
|
|
161
|
+
error: error instanceof Error ? error.message : String(error)
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Get value from cache
|
|
168
|
+
*/
|
|
169
|
+
protected async getCached(key: string): Promise<CachedValue<T> | null> {
|
|
170
|
+
const tag = this.TAG + 'getCached | ';
|
|
171
|
+
|
|
172
|
+
try {
|
|
173
|
+
const cached = await this.redis.get(key);
|
|
174
|
+
if (!cached) {
|
|
175
|
+
return null;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const parsed: CachedValue<T> = JSON.parse(cached);
|
|
179
|
+
|
|
180
|
+
// Validate structure
|
|
181
|
+
if (!parsed.value || typeof parsed.timestamp !== 'number') {
|
|
182
|
+
log.warn(tag, `Invalid cache structure for ${key}, removing`);
|
|
183
|
+
await this.redis.del(key);
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
return parsed;
|
|
188
|
+
|
|
189
|
+
} catch (error) {
|
|
190
|
+
log.error(tag, `Error parsing cached value for ${key}:`, error);
|
|
191
|
+
return null;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Update cache with new value
|
|
197
|
+
* FIX #2: Always includes TTL
|
|
198
|
+
*/
|
|
199
|
+
async updateCache(key: string, value: T, metadata?: Record<string, any>): Promise<void> {
|
|
200
|
+
const tag = this.TAG + 'updateCache | ';
|
|
201
|
+
|
|
202
|
+
try {
|
|
203
|
+
const cachedValue: CachedValue<T> = {
|
|
204
|
+
value,
|
|
205
|
+
timestamp: Date.now(),
|
|
206
|
+
source: 'network',
|
|
207
|
+
lastUpdated: new Date().toISOString(),
|
|
208
|
+
metadata
|
|
209
|
+
};
|
|
210
|
+
|
|
211
|
+
// FIX #2: Always set TTL (unless explicitly disabled)
|
|
212
|
+
if (this.config.enableTTL) {
|
|
213
|
+
const ttlSeconds = Math.floor(this.config.ttl / 1000);
|
|
214
|
+
await this.redis.set(key, JSON.stringify(cachedValue), 'EX', ttlSeconds);
|
|
215
|
+
log.info(tag, `Updated cache: ${key} [TTL: ${ttlSeconds}s]`);
|
|
216
|
+
} else {
|
|
217
|
+
// Permanent caching (for transactions)
|
|
218
|
+
await this.redis.set(key, JSON.stringify(cachedValue));
|
|
219
|
+
log.info(tag, `Updated cache: ${key} [PERMANENT]`);
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
} catch (error) {
|
|
223
|
+
log.error(tag, `Error updating cache for ${key}:`, error);
|
|
224
|
+
throw error;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Trigger background refresh job
|
|
230
|
+
* FIX #3: Loud error logging
|
|
231
|
+
* FIX #4: Synchronous fallback for high-priority
|
|
232
|
+
*/
|
|
233
|
+
protected triggerAsyncRefresh(params: Record<string, any>, priority: 'high' | 'normal' | 'low' = 'normal'): void {
|
|
234
|
+
const tag = this.TAG + 'triggerAsyncRefresh | ';
|
|
235
|
+
|
|
236
|
+
try {
|
|
237
|
+
// Check if queue is enabled
|
|
238
|
+
if (!this.config.enableQueue) {
|
|
239
|
+
return; // Queue disabled, no background refresh
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
// FIX #3: Fail loudly if queue not initialized
|
|
243
|
+
if (!this.queueInitialized || !this.redisQueue) {
|
|
244
|
+
const key = this.buildKey(params);
|
|
245
|
+
log.error(tag, `❌ QUEUE NOT INITIALIZED! Cannot refresh ${key}`);
|
|
246
|
+
log.error(tag, `Background refresh is BROKEN - cache will NOT update!`);
|
|
247
|
+
|
|
248
|
+
// FIX #4: Synchronous fallback for high-priority
|
|
249
|
+
if (priority === 'high') {
|
|
250
|
+
log.warn(tag, `Using synchronous fallback for high-priority refresh`);
|
|
251
|
+
setImmediate(async () => {
|
|
252
|
+
try {
|
|
253
|
+
await this.fetchFresh(params);
|
|
254
|
+
} catch (error) {
|
|
255
|
+
log.error(tag, `Synchronous fallback failed:`, error);
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
const job: RefreshJob = {
|
|
263
|
+
type: `REFRESH_${this.config.name.toUpperCase()}`,
|
|
264
|
+
key: this.buildKey(params),
|
|
265
|
+
params,
|
|
266
|
+
priority,
|
|
267
|
+
retryCount: 0,
|
|
268
|
+
timestamp: Date.now()
|
|
269
|
+
};
|
|
270
|
+
|
|
271
|
+
// Queue job async (don't wait)
|
|
272
|
+
setImmediate(async () => {
|
|
273
|
+
try {
|
|
274
|
+
await this.redisQueue.createWork(this.config.queueName, job);
|
|
275
|
+
|
|
276
|
+
if (this.config.logRefreshJobs) {
|
|
277
|
+
log.debug(tag, `Queued refresh job: ${job.key} (priority: ${priority})`);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
} catch (error) {
|
|
281
|
+
log.error(tag, `Error queuing refresh job:`, error);
|
|
282
|
+
}
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
} catch (error) {
|
|
286
|
+
log.error(tag, `Error triggering refresh:`, error);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
/**
|
|
291
|
+
* Fetch fresh data and update cache
|
|
292
|
+
* FIX #1 & #4: Used for blocking requests and fallback
|
|
293
|
+
*/
|
|
294
|
+
async fetchFresh(params: Record<string, any>): Promise<T> {
|
|
295
|
+
const tag = this.TAG + 'fetchFresh | ';
|
|
296
|
+
const startTime = Date.now();
|
|
297
|
+
|
|
298
|
+
try {
|
|
299
|
+
const key = this.buildKey(params);
|
|
300
|
+
log.info(tag, `Fetching fresh data: ${key}`);
|
|
301
|
+
|
|
302
|
+
// Call subclass-specific fetch implementation
|
|
303
|
+
const value = await this.fetchFromSource(params);
|
|
304
|
+
|
|
305
|
+
// Update cache
|
|
306
|
+
await this.updateCache(key, value);
|
|
307
|
+
|
|
308
|
+
const fetchTime = Date.now() - startTime;
|
|
309
|
+
log.info(tag, `✅ Fetched fresh data in ${fetchTime}ms: ${key}`);
|
|
310
|
+
|
|
311
|
+
return value;
|
|
312
|
+
|
|
313
|
+
} catch (error) {
|
|
314
|
+
const fetchTime = Date.now() - startTime;
|
|
315
|
+
log.error(tag, `Failed to fetch fresh data after ${fetchTime}ms:`, error);
|
|
316
|
+
return this.config.defaultValue;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
/**
|
|
321
|
+
* Migrate legacy cache value to new format
|
|
322
|
+
* FIX #2: Includes TTL
|
|
323
|
+
*/
|
|
324
|
+
protected migrateLegacyValue(key: string, value: T): void {
|
|
325
|
+
const tag = this.TAG + 'migrateLegacyValue | ';
|
|
326
|
+
|
|
327
|
+
setImmediate(async () => {
|
|
328
|
+
try {
|
|
329
|
+
const cachedValue: CachedValue<T> = {
|
|
330
|
+
value,
|
|
331
|
+
timestamp: Date.now() - (this.config.staleThreshold || this.config.ttl), // Mark as stale
|
|
332
|
+
source: 'legacy',
|
|
333
|
+
lastUpdated: new Date().toISOString()
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
// FIX #2: Set TTL on migrated data
|
|
337
|
+
if (this.config.enableTTL) {
|
|
338
|
+
const ttlSeconds = Math.floor(this.config.ttl / 1000);
|
|
339
|
+
await this.redis.set(key, JSON.stringify(cachedValue), 'EX', ttlSeconds);
|
|
340
|
+
log.debug(tag, `Migrated legacy value: ${key} [TTL: ${ttlSeconds}s]`);
|
|
341
|
+
} else {
|
|
342
|
+
await this.redis.set(key, JSON.stringify(cachedValue));
|
|
343
|
+
log.debug(tag, `Migrated legacy value: ${key} [PERMANENT]`);
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
} catch (error) {
|
|
347
|
+
log.error(tag, `Error migrating legacy value:`, error);
|
|
348
|
+
}
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* Get cache statistics
|
|
354
|
+
* @param forceRefresh - Skip cache and fetch fresh stats (for testing/debugging)
|
|
355
|
+
*/
|
|
356
|
+
async getCacheStats(forceRefresh: boolean = false): Promise<CacheStats> {
|
|
357
|
+
const tag = this.TAG + 'getCacheStats | ';
|
|
358
|
+
|
|
359
|
+
// Return cached stats if fresh (unless forceRefresh requested)
|
|
360
|
+
if (!forceRefresh && this.cachedStats && this.cachedStatsTimestamp) {
|
|
361
|
+
const age = Date.now() - this.cachedStatsTimestamp;
|
|
362
|
+
if (age < this.STATS_CACHE_TTL) {
|
|
363
|
+
log.debug(tag, `Returning cached stats (age: ${age}ms)`);
|
|
364
|
+
return this.cachedStats;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
try {
|
|
369
|
+
const pattern = this.config.keyPrefix + '*';
|
|
370
|
+
const keys: string[] = [];
|
|
371
|
+
|
|
372
|
+
// Use SCAN instead of KEYS for non-blocking iteration
|
|
373
|
+
// SCAN is production-safe and doesn't block Redis
|
|
374
|
+
let cursor = '0';
|
|
375
|
+
const maxKeys = 20; // Sample up to 20 keys for stats (performance)
|
|
376
|
+
|
|
377
|
+
do {
|
|
378
|
+
// SCAN with MATCH pattern, COUNT 50 per iteration for faster collection
|
|
379
|
+
const result = await this.redis.scan(
|
|
380
|
+
cursor,
|
|
381
|
+
'MATCH', pattern,
|
|
382
|
+
'COUNT', 50
|
|
383
|
+
);
|
|
384
|
+
|
|
385
|
+
cursor = result[0]; // Next cursor position
|
|
386
|
+
const foundKeys = result[1]; // Array of matching keys
|
|
387
|
+
|
|
388
|
+
keys.push(...foundKeys);
|
|
389
|
+
|
|
390
|
+
// Stop if we have enough samples or completed scan
|
|
391
|
+
if (keys.length >= maxKeys || cursor === '0') {
|
|
392
|
+
break;
|
|
393
|
+
}
|
|
394
|
+
} while (cursor !== '0');
|
|
395
|
+
|
|
396
|
+
let totalEntries = 0;
|
|
397
|
+
let staleEntries = 0;
|
|
398
|
+
let entriesWithoutTTL = 0;
|
|
399
|
+
const sources: Record<string, number> = {};
|
|
400
|
+
|
|
401
|
+
// Analyze sampled keys
|
|
402
|
+
const sampled = keys.slice(0, maxKeys);
|
|
403
|
+
|
|
404
|
+
for (const key of sampled) {
|
|
405
|
+
try {
|
|
406
|
+
const cached = await this.redis.get(key);
|
|
407
|
+
if (cached) {
|
|
408
|
+
const parsed: CachedValue<T> = JSON.parse(cached);
|
|
409
|
+
totalEntries++;
|
|
410
|
+
|
|
411
|
+
// Check staleness
|
|
412
|
+
if (this.config.staleThreshold) {
|
|
413
|
+
const age = Date.now() - parsed.timestamp;
|
|
414
|
+
if (age > this.config.staleThreshold) {
|
|
415
|
+
staleEntries++;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
// Track sources
|
|
420
|
+
if (parsed.source) {
|
|
421
|
+
sources[parsed.source] = (sources[parsed.source] || 0) + 1;
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// Check TTL
|
|
425
|
+
if (this.config.enableTTL) {
|
|
426
|
+
const ttl = await this.redis.ttl(key);
|
|
427
|
+
if (ttl === -1) {
|
|
428
|
+
entriesWithoutTTL++;
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
} catch (parseError) {
|
|
433
|
+
log.warn(tag, `Invalid cache entry: ${key}`);
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
const freshEntries = totalEntries - staleEntries;
|
|
438
|
+
const stalenessRate = totalEntries > 0
|
|
439
|
+
? ((staleEntries / totalEntries) * 100).toFixed(1) + '%'
|
|
440
|
+
: '0%';
|
|
441
|
+
|
|
442
|
+
const stats: CacheStats = {
|
|
443
|
+
totalEntries: keys.length, // Sampled keys count (≤20)
|
|
444
|
+
staleEntries,
|
|
445
|
+
freshEntries,
|
|
446
|
+
stalenessRate,
|
|
447
|
+
sources,
|
|
448
|
+
entriesWithoutTTL: this.config.enableTTL ? entriesWithoutTTL : undefined,
|
|
449
|
+
ttl: this.config.ttl,
|
|
450
|
+
staleThreshold: this.config.staleThreshold
|
|
451
|
+
};
|
|
452
|
+
|
|
453
|
+
// Cache the stats for future requests
|
|
454
|
+
this.cachedStats = stats;
|
|
455
|
+
this.cachedStatsTimestamp = Date.now();
|
|
456
|
+
log.debug(tag, `Stats refreshed and cached`);
|
|
457
|
+
|
|
458
|
+
return stats;
|
|
459
|
+
|
|
460
|
+
} catch (error) {
|
|
461
|
+
log.error(tag, 'Error getting cache stats:', error);
|
|
462
|
+
return {
|
|
463
|
+
totalEntries: 0,
|
|
464
|
+
staleEntries: 0,
|
|
465
|
+
freshEntries: 0,
|
|
466
|
+
stalenessRate: '0%'
|
|
467
|
+
};
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
/**
|
|
472
|
+
* FIX #5: Health check
|
|
473
|
+
* @param forceRefresh - Force refresh stats (bypasses 30s cache)
|
|
474
|
+
*/
|
|
475
|
+
async getHealth(forceRefresh: boolean = false): Promise<HealthCheckResult> {
|
|
476
|
+
const tag = this.TAG + 'getHealth | ';
|
|
477
|
+
|
|
478
|
+
try {
|
|
479
|
+
const issues: string[] = [];
|
|
480
|
+
const warnings: string[] = [];
|
|
481
|
+
|
|
482
|
+
// Check queue initialization
|
|
483
|
+
if (this.config.enableQueue && !this.queueInitialized) {
|
|
484
|
+
issues.push('Queue not initialized - background refresh disabled');
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
// Check Redis connection
|
|
488
|
+
try {
|
|
489
|
+
await this.redis.ping();
|
|
490
|
+
} catch (error) {
|
|
491
|
+
issues.push('Redis connection failed');
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// Get cache stats (with optional force refresh)
|
|
495
|
+
const stats = await this.getCacheStats(forceRefresh);
|
|
496
|
+
|
|
497
|
+
// Check for entries without TTL
|
|
498
|
+
if (stats.entriesWithoutTTL && stats.entriesWithoutTTL > 0) {
|
|
499
|
+
issues.push(`${stats.entriesWithoutTTL} cache entries without TTL detected`);
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
// Check staleness rate
|
|
503
|
+
if (this.config.staleThreshold) {
|
|
504
|
+
const stalenessRate = parseFloat(String(stats.stalenessRate).replace('%', ''));
|
|
505
|
+
if (stalenessRate > 50) {
|
|
506
|
+
issues.push(`High staleness rate: ${stalenessRate}%`);
|
|
507
|
+
} else if (stalenessRate > 30) {
|
|
508
|
+
warnings.push(`Elevated staleness rate: ${stalenessRate}%`);
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
// Determine overall status
|
|
513
|
+
let status: 'healthy' | 'degraded' | 'unhealthy' = 'healthy';
|
|
514
|
+
if (issues.length > 0) {
|
|
515
|
+
status = 'unhealthy';
|
|
516
|
+
} else if (warnings.length > 0) {
|
|
517
|
+
status = 'degraded';
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
return {
|
|
521
|
+
status,
|
|
522
|
+
queueInitialized: this.queueInitialized,
|
|
523
|
+
redisConnected: true,
|
|
524
|
+
stats,
|
|
525
|
+
issues,
|
|
526
|
+
warnings,
|
|
527
|
+
timestamp: Date.now(),
|
|
528
|
+
timestampISO: new Date().toISOString()
|
|
529
|
+
};
|
|
530
|
+
|
|
531
|
+
} catch (error) {
|
|
532
|
+
log.error(tag, 'Health check failed:', error);
|
|
533
|
+
return {
|
|
534
|
+
status: 'unhealthy',
|
|
535
|
+
queueInitialized: this.queueInitialized,
|
|
536
|
+
redisConnected: false,
|
|
537
|
+
stats: {
|
|
538
|
+
totalEntries: 0,
|
|
539
|
+
staleEntries: 0,
|
|
540
|
+
freshEntries: 0,
|
|
541
|
+
stalenessRate: '0%'
|
|
542
|
+
},
|
|
543
|
+
issues: [`Health check error: ${error instanceof Error ? error.message : String(error)}`],
|
|
544
|
+
warnings: [],
|
|
545
|
+
timestamp: Date.now(),
|
|
546
|
+
timestampISO: new Date().toISOString()
|
|
547
|
+
};
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
/**
|
|
552
|
+
* Clear all cache entries (use with caution)
|
|
553
|
+
*/
|
|
554
|
+
async clearAll(): Promise<number> {
|
|
555
|
+
const tag = this.TAG + 'clearAll | ';
|
|
556
|
+
|
|
557
|
+
try {
|
|
558
|
+
const pattern = this.config.keyPrefix + '*';
|
|
559
|
+
const keys = await this.redis.keys(pattern);
|
|
560
|
+
|
|
561
|
+
if (keys.length === 0) {
|
|
562
|
+
log.info(tag, 'No cache entries to clear');
|
|
563
|
+
return 0;
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
await this.redis.del(...keys);
|
|
567
|
+
log.info(tag, `Cleared ${keys.length} cache entries`);
|
|
568
|
+
return keys.length;
|
|
569
|
+
|
|
570
|
+
} catch (error) {
|
|
571
|
+
log.error(tag, 'Error clearing cache:', error);
|
|
572
|
+
return 0;
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
// ========== ABSTRACT METHODS (must be implemented by subclasses) ==========
|
|
577
|
+
|
|
578
|
+
/**
|
|
579
|
+
* Build Redis key from parameters
|
|
580
|
+
* Each cache type implements its own key structure
|
|
581
|
+
*/
|
|
582
|
+
protected abstract buildKey(params: Record<string, any>): string;
|
|
583
|
+
|
|
584
|
+
/**
|
|
585
|
+
* Fetch data from source (blockchain, API, etc.)
|
|
586
|
+
* Each cache type implements its own data fetching
|
|
587
|
+
*/
|
|
588
|
+
protected abstract fetchFromSource(params: Record<string, any>): Promise<T>;
|
|
589
|
+
|
|
590
|
+
/**
|
|
591
|
+
* Get legacy cached value (optional)
|
|
592
|
+
* Each cache type can implement legacy key migration
|
|
593
|
+
*/
|
|
594
|
+
protected abstract getLegacyCached(params: Record<string, any>): Promise<T | null>;
|
|
595
|
+
}
|