@pioneer-platform/pioneer-cache 1.0.1 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -1
- package/CHANGELOG.md +6 -0
- package/dist/core/base-cache.d.ts +1 -0
- package/dist/core/base-cache.js +73 -9
- package/dist/core/cache-manager.d.ts +6 -1
- package/dist/core/cache-manager.js +26 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +3 -1
- package/dist/stores/balance-cache.d.ts +1 -0
- package/dist/stores/balance-cache.js +63 -10
- package/dist/stores/portfolio-cache.d.ts +79 -0
- package/dist/stores/portfolio-cache.js +189 -0
- package/dist/stores/price-cache.d.ts +1 -1
- package/dist/stores/price-cache.js +35 -15
- package/dist/types/index.d.ts +1 -0
- package/package.json +1 -1
- package/src/core/base-cache.ts +84 -9
- package/src/core/cache-manager.ts +34 -2
- package/src/index.ts +2 -0
- package/src/stores/balance-cache.ts +69 -10
- package/src/stores/portfolio-cache.ts +244 -0
- package/src/stores/price-cache.ts +38 -15
- package/src/types/index.ts +1 -0
- package/test/redis-persistence.test.ts +265 -0
package/.turbo/turbo-build.log
CHANGED
|
@@ -1 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
|
|
2
|
+
[0m[2m[35m$[0m [2m[1mtsc[0m
|
package/CHANGELOG.md
CHANGED
|
@@ -38,6 +38,7 @@ export declare abstract class BaseCache<T> {
|
|
|
38
38
|
* Fetch fresh data and update cache
|
|
39
39
|
* FIX #1 & #4: Used for blocking requests and fallback
|
|
40
40
|
* FIX #6: Request deduplication to prevent thundering herd
|
|
41
|
+
* FIX #8: Return stale cache on fetch failures
|
|
41
42
|
*/
|
|
42
43
|
fetchFresh(params: Record<string, any>): Promise<T>;
|
|
43
44
|
/**
|
package/dist/core/base-cache.js
CHANGED
|
@@ -57,9 +57,13 @@ class BaseCache {
|
|
|
57
57
|
const tag = this.TAG + 'get | ';
|
|
58
58
|
const startTime = Date.now();
|
|
59
59
|
try {
|
|
60
|
+
const t1 = Date.now();
|
|
60
61
|
const key = this.buildKey(params);
|
|
62
|
+
log.info(tag, `⏱️ buildKey took ${Date.now() - t1}ms`);
|
|
61
63
|
// Step 1: Try new cache format
|
|
64
|
+
const t2 = Date.now();
|
|
62
65
|
const cachedValue = await this.getCached(key);
|
|
66
|
+
log.info(tag, `⏱️ getCached took ${Date.now() - t2}ms`);
|
|
63
67
|
if (cachedValue) {
|
|
64
68
|
const age = Date.now() - cachedValue.timestamp;
|
|
65
69
|
const responseTime = Date.now() - startTime;
|
|
@@ -80,7 +84,9 @@ class BaseCache {
|
|
|
80
84
|
}
|
|
81
85
|
// Step 2: Try legacy cache fallback
|
|
82
86
|
if (this.config.enableLegacyFallback) {
|
|
87
|
+
const t3 = Date.now();
|
|
83
88
|
const legacyValue = await this.getLegacyCached(params);
|
|
89
|
+
log.info(tag, `⏱️ getLegacyCached took ${Date.now() - t3}ms`);
|
|
84
90
|
if (legacyValue) {
|
|
85
91
|
const responseTime = Date.now() - startTime;
|
|
86
92
|
log.info(tag, `Legacy cache hit: ${key} (${responseTime}ms)`);
|
|
@@ -114,7 +120,10 @@ class BaseCache {
|
|
|
114
120
|
};
|
|
115
121
|
}
|
|
116
122
|
// Non-blocking: trigger async refresh and return default
|
|
123
|
+
const t4 = Date.now();
|
|
117
124
|
this.triggerAsyncRefresh(params, 'high');
|
|
125
|
+
log.info(tag, `⏱️ triggerAsyncRefresh took ${Date.now() - t4}ms`);
|
|
126
|
+
log.info(tag, `⏱️ Returning default value after ${Date.now() - startTime}ms TOTAL`);
|
|
118
127
|
return {
|
|
119
128
|
success: true,
|
|
120
129
|
value: this.config.defaultValue,
|
|
@@ -139,18 +148,32 @@ class BaseCache {
|
|
|
139
148
|
*/
|
|
140
149
|
async getCached(key) {
|
|
141
150
|
const tag = this.TAG + 'getCached | ';
|
|
151
|
+
const t0 = Date.now();
|
|
142
152
|
try {
|
|
143
|
-
|
|
153
|
+
// Redis timeout for cache reads
|
|
154
|
+
// Increased from 100ms to 2000ms - 100ms was too aggressive and caused false cache misses
|
|
155
|
+
// Increased from 2000ms to 10000ms - IPv4/IPv6 DNS resolution can cause delays
|
|
156
|
+
const timeoutMs = 10000;
|
|
157
|
+
const cached = await Promise.race([
|
|
158
|
+
this.redis.get(key),
|
|
159
|
+
new Promise((resolve) => setTimeout(() => {
|
|
160
|
+
log.warn(tag, `⏱️ Redis timeout after ${timeoutMs}ms, returning cache miss`);
|
|
161
|
+
resolve(null);
|
|
162
|
+
}, timeoutMs))
|
|
163
|
+
]);
|
|
144
164
|
if (!cached) {
|
|
165
|
+
log.debug(tag, `Cache miss: ${key}`);
|
|
145
166
|
return null;
|
|
146
167
|
}
|
|
147
168
|
const parsed = JSON.parse(cached);
|
|
148
|
-
// Validate structure
|
|
149
|
-
|
|
169
|
+
// Validate structure - Check for undefined/null, NOT falsy values!
|
|
170
|
+
// CRITICAL: Balance "0", empty arrays [], and empty objects {} are VALID!
|
|
171
|
+
if (parsed.value === undefined || parsed.value === null || typeof parsed.timestamp !== 'number') {
|
|
150
172
|
log.warn(tag, `Invalid cache structure for ${key}, removing`);
|
|
151
173
|
await this.redis.del(key);
|
|
152
174
|
return null;
|
|
153
175
|
}
|
|
176
|
+
log.debug(tag, `Cache hit: ${key}`);
|
|
154
177
|
return parsed;
|
|
155
178
|
}
|
|
156
179
|
catch (error) {
|
|
@@ -172,20 +195,29 @@ class BaseCache {
|
|
|
172
195
|
lastUpdated: new Date().toISOString(),
|
|
173
196
|
metadata
|
|
174
197
|
};
|
|
198
|
+
// DIAGNOSTIC: Log Redis connection details
|
|
199
|
+
const redisConstructor = this.redis.constructor?.name || 'unknown';
|
|
200
|
+
const redisOptions = this.redis.options || {};
|
|
175
201
|
// FIX #2: Always set TTL (unless explicitly disabled)
|
|
176
202
|
if (this.config.enableTTL) {
|
|
177
203
|
const ttlSeconds = Math.floor(this.config.ttl / 1000);
|
|
204
|
+
// PERF: Reduced logging for production performance
|
|
205
|
+
log.debug(tag, `📝 Writing to Redis: ${key} [${JSON.stringify(cachedValue).length} bytes, TTL: ${ttlSeconds}s]`);
|
|
206
|
+
// Write
|
|
178
207
|
await this.redis.set(key, JSON.stringify(cachedValue), 'EX', ttlSeconds);
|
|
179
|
-
|
|
208
|
+
// PERF: Verification disabled for production performance
|
|
209
|
+
// Only enable for debugging cache issues
|
|
210
|
+
log.debug(tag, `✅ Updated cache: ${key} [TTL: ${ttlSeconds}s]`);
|
|
180
211
|
}
|
|
181
212
|
else {
|
|
182
213
|
// Permanent caching (for transactions)
|
|
183
214
|
await this.redis.set(key, JSON.stringify(cachedValue));
|
|
184
|
-
|
|
215
|
+
// PERF: Verification disabled for production performance
|
|
216
|
+
log.debug(tag, `✅ Updated cache: ${key} [PERMANENT]`);
|
|
185
217
|
}
|
|
186
218
|
}
|
|
187
219
|
catch (error) {
|
|
188
|
-
log.error(tag,
|
|
220
|
+
log.error(tag, `❌ Error updating cache for ${key}:`, error);
|
|
189
221
|
throw error;
|
|
190
222
|
}
|
|
191
223
|
}
|
|
@@ -206,8 +238,12 @@ class BaseCache {
|
|
|
206
238
|
const key = this.buildKey(params);
|
|
207
239
|
log.error(tag, `❌ QUEUE NOT INITIALIZED! Cannot refresh ${key}`);
|
|
208
240
|
log.error(tag, `Background refresh is BROKEN - cache will NOT update!`);
|
|
209
|
-
// FIX #4: Synchronous fallback for high-priority
|
|
210
|
-
|
|
241
|
+
// FIX #4: Synchronous fallback for high-priority (only if useSyncFallback is enabled)
|
|
242
|
+
// Default: use sync fallback only for blocking caches (blockOnMiss=true)
|
|
243
|
+
const shouldUseSyncFallback = this.config.useSyncFallback !== undefined
|
|
244
|
+
? this.config.useSyncFallback
|
|
245
|
+
: this.config.blockOnMiss;
|
|
246
|
+
if (priority === 'high' && shouldUseSyncFallback) {
|
|
211
247
|
log.warn(tag, `Using synchronous fallback for high-priority refresh`);
|
|
212
248
|
setImmediate(async () => {
|
|
213
249
|
try {
|
|
@@ -249,6 +285,7 @@ class BaseCache {
|
|
|
249
285
|
* Fetch fresh data and update cache
|
|
250
286
|
* FIX #1 & #4: Used for blocking requests and fallback
|
|
251
287
|
* FIX #6: Request deduplication to prevent thundering herd
|
|
288
|
+
* FIX #8: Return stale cache on fetch failures
|
|
252
289
|
*/
|
|
253
290
|
async fetchFresh(params) {
|
|
254
291
|
const tag = this.TAG + 'fetchFresh | ';
|
|
@@ -257,6 +294,12 @@ class BaseCache {
|
|
|
257
294
|
// FIX #6: Check if there's already a pending fetch for this key
|
|
258
295
|
const existingFetch = this.pendingFetches.get(key);
|
|
259
296
|
if (existingFetch) {
|
|
297
|
+
// For non-blocking caches, return default value immediately instead of waiting
|
|
298
|
+
if (!this.config.blockOnMiss) {
|
|
299
|
+
log.debug(tag, `Non-blocking cache: returning default value while fetch in progress: ${key}`);
|
|
300
|
+
return this.config.defaultValue;
|
|
301
|
+
}
|
|
302
|
+
// For blocking caches, coalesce to prevent thundering herd
|
|
260
303
|
log.debug(tag, `Coalescing request for: ${key} (fetch already in progress)`);
|
|
261
304
|
return existingFetch;
|
|
262
305
|
}
|
|
@@ -274,7 +317,28 @@ class BaseCache {
|
|
|
274
317
|
}
|
|
275
318
|
catch (error) {
|
|
276
319
|
const fetchTime = Date.now() - startTime;
|
|
277
|
-
|
|
320
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
321
|
+
// FIX #8: Try to return stale cache on fetch failures
|
|
322
|
+
const cachedValue = await this.getCached(key);
|
|
323
|
+
if (cachedValue) {
|
|
324
|
+
log.warn(tag, `Fetch failed after ${fetchTime}ms, returning stale cache: ${key}`);
|
|
325
|
+
return cachedValue.value;
|
|
326
|
+
}
|
|
327
|
+
// Try legacy cache as last resort
|
|
328
|
+
if (this.config.enableLegacyFallback) {
|
|
329
|
+
const legacyValue = await this.getLegacyCached(params);
|
|
330
|
+
if (legacyValue) {
|
|
331
|
+
log.warn(tag, `Fetch failed after ${fetchTime}ms, returning legacy cache: ${key}`);
|
|
332
|
+
return legacyValue;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
// Log as warning for expected issues, error for unexpected
|
|
336
|
+
if (errorMsg.includes('rate limit') || errorMsg.includes('timeout') || errorMsg.includes('No valid')) {
|
|
337
|
+
log.warn(tag, `Expected fetch failure after ${fetchTime}ms: ${errorMsg}`);
|
|
338
|
+
}
|
|
339
|
+
else {
|
|
340
|
+
log.error(tag, `Unexpected fetch failure after ${fetchTime}ms:`, error);
|
|
341
|
+
}
|
|
278
342
|
return this.config.defaultValue;
|
|
279
343
|
}
|
|
280
344
|
finally {
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { BalanceCache } from '../stores/balance-cache';
|
|
2
2
|
import { PriceCache } from '../stores/price-cache';
|
|
3
|
+
import { PortfolioCache } from '../stores/portfolio-cache';
|
|
3
4
|
import { TransactionCache } from '../stores/transaction-cache';
|
|
4
5
|
import type { HealthCheckResult } from '../types';
|
|
5
6
|
/**
|
|
@@ -11,6 +12,7 @@ export interface CacheManagerConfig {
|
|
|
11
12
|
markets?: any;
|
|
12
13
|
enableBalanceCache?: boolean;
|
|
13
14
|
enablePriceCache?: boolean;
|
|
15
|
+
enablePortfolioCache?: boolean;
|
|
14
16
|
enableTransactionCache?: boolean;
|
|
15
17
|
startWorkers?: boolean;
|
|
16
18
|
}
|
|
@@ -21,6 +23,7 @@ export declare class CacheManager {
|
|
|
21
23
|
private redis;
|
|
22
24
|
private balanceCache?;
|
|
23
25
|
private priceCache?;
|
|
26
|
+
private portfolioCache?;
|
|
24
27
|
private transactionCache?;
|
|
25
28
|
private workers;
|
|
26
29
|
constructor(config: CacheManagerConfig);
|
|
@@ -45,18 +48,20 @@ export declare class CacheManager {
|
|
|
45
48
|
getCaches(): {
|
|
46
49
|
balance: BalanceCache | undefined;
|
|
47
50
|
price: PriceCache | undefined;
|
|
51
|
+
portfolio: PortfolioCache | undefined;
|
|
48
52
|
transaction: TransactionCache | undefined;
|
|
49
53
|
};
|
|
50
54
|
/**
|
|
51
55
|
* Get specific cache by name
|
|
52
56
|
*/
|
|
53
|
-
getCache(name: 'balance' | 'price' | 'transaction'): BalanceCache | PriceCache | TransactionCache | undefined;
|
|
57
|
+
getCache(name: 'balance' | 'price' | 'portfolio' | 'transaction'): BalanceCache | PriceCache | PortfolioCache | TransactionCache | undefined;
|
|
54
58
|
/**
|
|
55
59
|
* Clear all caches (use with caution!)
|
|
56
60
|
*/
|
|
57
61
|
clearAll(): Promise<{
|
|
58
62
|
balance?: number;
|
|
59
63
|
price?: number;
|
|
64
|
+
portfolio?: number;
|
|
60
65
|
transaction?: number;
|
|
61
66
|
}>;
|
|
62
67
|
}
|
|
@@ -9,6 +9,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
9
9
|
exports.CacheManager = void 0;
|
|
10
10
|
const balance_cache_1 = require("../stores/balance-cache");
|
|
11
11
|
const price_cache_1 = require("../stores/price-cache");
|
|
12
|
+
const portfolio_cache_1 = require("../stores/portfolio-cache");
|
|
12
13
|
const transaction_cache_1 = require("../stores/transaction-cache");
|
|
13
14
|
const refresh_worker_1 = require("../workers/refresh-worker");
|
|
14
15
|
const log = require('@pioneer-platform/loggerdog')();
|
|
@@ -30,6 +31,11 @@ class CacheManager {
|
|
|
30
31
|
this.priceCache = new price_cache_1.PriceCache(this.redis, config.markets);
|
|
31
32
|
log.info(TAG, '✅ Price cache initialized');
|
|
32
33
|
}
|
|
34
|
+
// Initialize Portfolio Cache
|
|
35
|
+
if (config.enablePortfolioCache !== false && config.balanceModule && config.markets) {
|
|
36
|
+
this.portfolioCache = new portfolio_cache_1.PortfolioCache(this.redis, config.balanceModule, config.markets);
|
|
37
|
+
log.info(TAG, '✅ Portfolio cache initialized');
|
|
38
|
+
}
|
|
33
39
|
// Initialize Transaction Cache
|
|
34
40
|
if (config.enableTransactionCache !== false) {
|
|
35
41
|
this.transactionCache = new transaction_cache_1.TransactionCache(this.redis);
|
|
@@ -58,6 +64,9 @@ class CacheManager {
|
|
|
58
64
|
if (this.priceCache) {
|
|
59
65
|
cacheRegistry.set('price', this.priceCache);
|
|
60
66
|
}
|
|
67
|
+
if (this.portfolioCache) {
|
|
68
|
+
cacheRegistry.set('portfolio', this.portfolioCache);
|
|
69
|
+
}
|
|
61
70
|
// Start unified worker if we have any caches with queues
|
|
62
71
|
if (cacheRegistry.size > 0) {
|
|
63
72
|
const worker = await (0, refresh_worker_1.startUnifiedWorker)(this.redis, cacheRegistry, 'cache-refresh', // Unified queue name
|
|
@@ -125,6 +134,17 @@ class CacheManager {
|
|
|
125
134
|
warnings.push(...priceHealth.warnings.map(w => `Price: ${w}`));
|
|
126
135
|
}
|
|
127
136
|
}
|
|
137
|
+
// Check portfolio cache
|
|
138
|
+
if (this.portfolioCache) {
|
|
139
|
+
const portfolioHealth = await this.portfolioCache.getHealth(forceRefresh);
|
|
140
|
+
checks.portfolio = portfolioHealth;
|
|
141
|
+
if (portfolioHealth.status === 'unhealthy') {
|
|
142
|
+
issues.push(...portfolioHealth.issues.map(i => `Portfolio: ${i}`));
|
|
143
|
+
}
|
|
144
|
+
else if (portfolioHealth.status === 'degraded') {
|
|
145
|
+
warnings.push(...portfolioHealth.warnings.map(w => `Portfolio: ${w}`));
|
|
146
|
+
}
|
|
147
|
+
}
|
|
128
148
|
// Check transaction cache (simple stats check)
|
|
129
149
|
if (this.transactionCache) {
|
|
130
150
|
const txStats = await this.transactionCache.getStats();
|
|
@@ -192,6 +212,7 @@ class CacheManager {
|
|
|
192
212
|
return {
|
|
193
213
|
balance: this.balanceCache,
|
|
194
214
|
price: this.priceCache,
|
|
215
|
+
portfolio: this.portfolioCache,
|
|
195
216
|
transaction: this.transactionCache
|
|
196
217
|
};
|
|
197
218
|
}
|
|
@@ -204,6 +225,8 @@ class CacheManager {
|
|
|
204
225
|
return this.balanceCache;
|
|
205
226
|
case 'price':
|
|
206
227
|
return this.priceCache;
|
|
228
|
+
case 'portfolio':
|
|
229
|
+
return this.portfolioCache;
|
|
207
230
|
case 'transaction':
|
|
208
231
|
return this.transactionCache;
|
|
209
232
|
default:
|
|
@@ -223,6 +246,9 @@ class CacheManager {
|
|
|
223
246
|
if (this.priceCache) {
|
|
224
247
|
result.price = await this.priceCache.clearAll();
|
|
225
248
|
}
|
|
249
|
+
if (this.portfolioCache) {
|
|
250
|
+
result.portfolio = await this.portfolioCache.clearAll();
|
|
251
|
+
}
|
|
226
252
|
if (this.transactionCache) {
|
|
227
253
|
result.transaction = await this.transactionCache.clearAll();
|
|
228
254
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -2,10 +2,12 @@ export { BaseCache } from './core/base-cache';
|
|
|
2
2
|
export { CacheManager } from './core/cache-manager';
|
|
3
3
|
export { BalanceCache } from './stores/balance-cache';
|
|
4
4
|
export { PriceCache } from './stores/price-cache';
|
|
5
|
+
export { PortfolioCache } from './stores/portfolio-cache';
|
|
5
6
|
export { TransactionCache } from './stores/transaction-cache';
|
|
6
7
|
export { RefreshWorker, startUnifiedWorker } from './workers/refresh-worker';
|
|
7
8
|
export type { CacheConfig, CachedValue, CacheResult, RefreshJob, HealthCheckResult, CacheStats } from './types';
|
|
8
9
|
export type { BalanceData } from './stores/balance-cache';
|
|
9
10
|
export type { PriceData } from './stores/price-cache';
|
|
11
|
+
export type { PortfolioData, ChartData } from './stores/portfolio-cache';
|
|
10
12
|
export type { CacheManagerConfig } from './core/cache-manager';
|
|
11
13
|
export type { WorkerConfig } from './workers/refresh-worker';
|
package/dist/index.js
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
Provides stale-while-revalidate caching with TTL, background refresh, and health monitoring.
|
|
7
7
|
*/
|
|
8
8
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
-
exports.startUnifiedWorker = exports.RefreshWorker = exports.TransactionCache = exports.PriceCache = exports.BalanceCache = exports.CacheManager = exports.BaseCache = void 0;
|
|
9
|
+
exports.startUnifiedWorker = exports.RefreshWorker = exports.TransactionCache = exports.PortfolioCache = exports.PriceCache = exports.BalanceCache = exports.CacheManager = exports.BaseCache = void 0;
|
|
10
10
|
// Core exports
|
|
11
11
|
var base_cache_1 = require("./core/base-cache");
|
|
12
12
|
Object.defineProperty(exports, "BaseCache", { enumerable: true, get: function () { return base_cache_1.BaseCache; } });
|
|
@@ -17,6 +17,8 @@ var balance_cache_1 = require("./stores/balance-cache");
|
|
|
17
17
|
Object.defineProperty(exports, "BalanceCache", { enumerable: true, get: function () { return balance_cache_1.BalanceCache; } });
|
|
18
18
|
var price_cache_1 = require("./stores/price-cache");
|
|
19
19
|
Object.defineProperty(exports, "PriceCache", { enumerable: true, get: function () { return price_cache_1.PriceCache; } });
|
|
20
|
+
var portfolio_cache_1 = require("./stores/portfolio-cache");
|
|
21
|
+
Object.defineProperty(exports, "PortfolioCache", { enumerable: true, get: function () { return portfolio_cache_1.PortfolioCache; } });
|
|
20
22
|
var transaction_cache_1 = require("./stores/transaction-cache");
|
|
21
23
|
Object.defineProperty(exports, "TransactionCache", { enumerable: true, get: function () { return transaction_cache_1.TransactionCache; } });
|
|
22
24
|
// Worker exports
|
|
@@ -39,6 +39,7 @@ export declare class BalanceCache extends BaseCache<BalanceData> {
|
|
|
39
39
|
getBalance(caip: string, pubkey: string, waitForFresh?: boolean): Promise<BalanceData>;
|
|
40
40
|
/**
|
|
41
41
|
* Get balances for multiple assets (batch operation)
|
|
42
|
+
* OPTIMIZED: Uses Redis MGET for single round-trip instead of N individual GETs
|
|
42
43
|
*/
|
|
43
44
|
getBatchBalances(items: Array<{
|
|
44
45
|
caip: string;
|
|
@@ -17,14 +17,14 @@ class BalanceCache extends base_cache_1.BaseCache {
|
|
|
17
17
|
const defaultConfig = {
|
|
18
18
|
name: 'balance',
|
|
19
19
|
keyPrefix: 'balance_v2:',
|
|
20
|
-
ttl:
|
|
21
|
-
staleThreshold:
|
|
22
|
-
enableTTL:
|
|
23
|
-
queueName: '
|
|
20
|
+
ttl: 0, // Ignored when enableTTL: false
|
|
21
|
+
staleThreshold: 5 * 60 * 1000, // 5 minutes - triggers background refresh
|
|
22
|
+
enableTTL: false, // NEVER EXPIRE - data persists forever
|
|
23
|
+
queueName: 'cache-refresh',
|
|
24
24
|
enableQueue: true,
|
|
25
25
|
maxRetries: 3,
|
|
26
26
|
retryDelay: 10000,
|
|
27
|
-
blockOnMiss: true, // Wait for fresh data on first request
|
|
27
|
+
blockOnMiss: true, // Wait for fresh data on first request - users need real balances!
|
|
28
28
|
enableLegacyFallback: true,
|
|
29
29
|
defaultValue: {
|
|
30
30
|
caip: '',
|
|
@@ -131,17 +131,70 @@ class BalanceCache extends base_cache_1.BaseCache {
|
|
|
131
131
|
}
|
|
132
132
|
/**
|
|
133
133
|
* Get balances for multiple assets (batch operation)
|
|
134
|
+
* OPTIMIZED: Uses Redis MGET for single round-trip instead of N individual GETs
|
|
134
135
|
*/
|
|
135
136
|
async getBatchBalances(items, waitForFresh) {
|
|
136
137
|
const tag = this.TAG + 'getBatchBalances | ';
|
|
137
138
|
const startTime = Date.now();
|
|
138
139
|
try {
|
|
139
|
-
log.info(tag, `Batch request for ${items.length} balances`);
|
|
140
|
-
//
|
|
141
|
-
const
|
|
142
|
-
|
|
140
|
+
log.info(tag, `Batch request for ${items.length} balances using Redis MGET`);
|
|
141
|
+
// Build all Redis keys
|
|
142
|
+
const keys = items.map(item => this.buildKey({ caip: item.caip, pubkey: item.pubkey }));
|
|
143
|
+
// PERF: Use MGET to fetch all keys in ONE Redis round-trip
|
|
144
|
+
const cachedValues = await this.redis.mget(...keys);
|
|
145
|
+
// Process results
|
|
146
|
+
const results = [];
|
|
147
|
+
const missedItems = [];
|
|
148
|
+
for (let i = 0; i < items.length; i++) {
|
|
149
|
+
const item = items[i];
|
|
150
|
+
const cached = cachedValues[i];
|
|
151
|
+
if (cached) {
|
|
152
|
+
try {
|
|
153
|
+
const parsed = JSON.parse(cached);
|
|
154
|
+
if (parsed.value && parsed.value.caip && parsed.value.pubkey) {
|
|
155
|
+
results[i] = parsed.value;
|
|
156
|
+
continue;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
catch (e) {
|
|
160
|
+
log.warn(tag, `Failed to parse cached value for ${keys[i]}`);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
// Cache miss - record for fetching
|
|
164
|
+
missedItems.push({ ...item, index: i });
|
|
165
|
+
results[i] = this.config.defaultValue; // Placeholder
|
|
166
|
+
}
|
|
143
167
|
const responseTime = Date.now() - startTime;
|
|
144
|
-
|
|
168
|
+
const hitRate = ((items.length - missedItems.length) / items.length * 100).toFixed(1);
|
|
169
|
+
log.info(tag, `MGET completed: ${items.length} keys in ${responseTime}ms (${hitRate}% hit rate)`);
|
|
170
|
+
// If we have cache misses and blocking is enabled, fetch them
|
|
171
|
+
if (missedItems.length > 0) {
|
|
172
|
+
const shouldBlock = waitForFresh !== undefined ? waitForFresh : this.config.blockOnMiss;
|
|
173
|
+
if (shouldBlock) {
|
|
174
|
+
log.info(tag, `Fetching ${missedItems.length} cache misses...`);
|
|
175
|
+
const fetchStart = Date.now();
|
|
176
|
+
// Fetch all misses in parallel
|
|
177
|
+
const fetchPromises = missedItems.map(async (item) => {
|
|
178
|
+
try {
|
|
179
|
+
// Use fetchFresh to ensure Redis is updated and requests are deduplicated
|
|
180
|
+
const freshData = await this.fetchFresh({ caip: item.caip, pubkey: item.pubkey });
|
|
181
|
+
results[item.index] = freshData;
|
|
182
|
+
}
|
|
183
|
+
catch (error) {
|
|
184
|
+
log.error(tag, `Failed to fetch ${item.caip}/${item.pubkey}:`, error);
|
|
185
|
+
results[item.index] = { caip: item.caip, pubkey: item.pubkey, balance: '0' };
|
|
186
|
+
}
|
|
187
|
+
});
|
|
188
|
+
await Promise.all(fetchPromises);
|
|
189
|
+
log.info(tag, `Fetched ${missedItems.length} misses in ${Date.now() - fetchStart}ms`);
|
|
190
|
+
}
|
|
191
|
+
else {
|
|
192
|
+
// Non-blocking: trigger background refresh for misses
|
|
193
|
+
missedItems.forEach(item => {
|
|
194
|
+
this.triggerAsyncRefresh({ caip: item.caip, pubkey: item.pubkey }, 'high');
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
}
|
|
145
198
|
return results;
|
|
146
199
|
}
|
|
147
200
|
catch (error) {
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { BaseCache } from '../core/base-cache';
|
|
2
|
+
import type { CacheConfig } from '../types';
|
|
3
|
+
/**
|
|
4
|
+
* Portfolio chart data structure
|
|
5
|
+
* Represents a single asset balance with pricing for charts
|
|
6
|
+
*/
|
|
7
|
+
export interface ChartData {
|
|
8
|
+
caip: string;
|
|
9
|
+
pubkey: string;
|
|
10
|
+
networkId: string;
|
|
11
|
+
symbol: string;
|
|
12
|
+
name: string;
|
|
13
|
+
balance: string;
|
|
14
|
+
priceUsd: number;
|
|
15
|
+
valueUsd: number;
|
|
16
|
+
icon?: string;
|
|
17
|
+
type?: string;
|
|
18
|
+
decimal?: number;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Full portfolio data for a pubkey set
|
|
22
|
+
*/
|
|
23
|
+
export interface PortfolioData {
|
|
24
|
+
pubkeys: Array<{
|
|
25
|
+
pubkey: string;
|
|
26
|
+
caip: string;
|
|
27
|
+
}>;
|
|
28
|
+
charts: ChartData[];
|
|
29
|
+
totalValueUsd: number;
|
|
30
|
+
timestamp: number;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* PortfolioCache - Caches portfolio/chart data
|
|
34
|
+
*
|
|
35
|
+
* CRITICAL: This cache is NON-BLOCKING by design
|
|
36
|
+
* - Returns empty arrays immediately on cache miss
|
|
37
|
+
* - Never blocks waiting for blockchain APIs
|
|
38
|
+
* - Background jobs populate cache for next request
|
|
39
|
+
*/
|
|
40
|
+
export declare class PortfolioCache extends BaseCache<PortfolioData> {
|
|
41
|
+
private balanceModule;
|
|
42
|
+
private marketsModule;
|
|
43
|
+
constructor(redis: any, balanceModule: any, marketsModule: any, config?: Partial<CacheConfig>);
|
|
44
|
+
/**
|
|
45
|
+
* Build Redis key for portfolio data
|
|
46
|
+
*
|
|
47
|
+
* Key strategy: Hash all pubkeys+caips to create a stable identifier
|
|
48
|
+
* Format: portfolio_v2:hash(pubkeys)
|
|
49
|
+
*
|
|
50
|
+
* This allows caching the same portfolio regardless of pubkey order
|
|
51
|
+
*/
|
|
52
|
+
protected buildKey(params: Record<string, any>): string;
|
|
53
|
+
/**
|
|
54
|
+
* Simple hash function for cache keys
|
|
55
|
+
* Not cryptographic - just needs to be stable and collision-resistant
|
|
56
|
+
*/
|
|
57
|
+
private simpleHash;
|
|
58
|
+
/**
|
|
59
|
+
* Fetch portfolio from blockchain APIs
|
|
60
|
+
*
|
|
61
|
+
* This is the SLOW operation that happens in the background
|
|
62
|
+
* It fetches balances for all pubkeys and enriches with pricing
|
|
63
|
+
*/
|
|
64
|
+
protected fetchFromSource(params: Record<string, any>): Promise<PortfolioData>;
|
|
65
|
+
/**
|
|
66
|
+
* No legacy cache format for portfolios
|
|
67
|
+
*/
|
|
68
|
+
protected getLegacyCached(params: Record<string, any>): Promise<PortfolioData | null>;
|
|
69
|
+
/**
|
|
70
|
+
* Get portfolio for a set of pubkeys
|
|
71
|
+
* Convenience method that wraps base get()
|
|
72
|
+
*
|
|
73
|
+
* RETURNS INSTANTLY - either cached data or empty arrays
|
|
74
|
+
*/
|
|
75
|
+
getPortfolio(pubkeys: Array<{
|
|
76
|
+
pubkey: string;
|
|
77
|
+
caip: string;
|
|
78
|
+
}>, waitForFresh?: boolean): Promise<PortfolioData>;
|
|
79
|
+
}
|