@pioneer-platform/pioneer-cache 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,13 @@
1
+ # @pioneer-platform/pioneer-cache
2
+
3
+ ## 1.0.2
4
+
5
+ ### Patch Changes
6
+
7
+ - feed4f1: Increase Redis cache timeout from 100ms to 2000ms to prevent false cache misses and reduce debug logging
8
+
9
+ ## 1.0.1
10
+
11
+ ### Patch Changes
12
+
13
+ - Fix workspace dependencies to use published versions for Docker build compatibility
@@ -8,6 +8,7 @@ export declare abstract class BaseCache<T> {
8
8
  private cachedStats;
9
9
  private cachedStatsTimestamp;
10
10
  private readonly STATS_CACHE_TTL;
11
+ private pendingFetches;
11
12
  constructor(redis: any, config: CacheConfig);
12
13
  /**
13
14
  * Initialize Redis queue for background refresh
@@ -36,6 +37,8 @@ export declare abstract class BaseCache<T> {
36
37
  /**
37
38
  * Fetch fresh data and update cache
38
39
  * FIX #1 & #4: Used for blocking requests and fallback
40
+ * FIX #6: Request deduplication to prevent thundering herd
41
+ * FIX #8: Return stale cache on fetch failures
39
42
  */
40
43
  fetchFresh(params: Record<string, any>): Promise<T>;
41
44
  /**
@@ -15,6 +15,9 @@ class BaseCache {
15
15
  this.cachedStats = null;
16
16
  this.cachedStatsTimestamp = 0;
17
17
  this.STATS_CACHE_TTL = 30000; // 30 seconds
18
+ // FIX #6: Request deduplication to prevent thundering herd
19
+ // Tracks in-flight network requests to prevent duplicate API calls
20
+ this.pendingFetches = new Map();
18
21
  this.redis = redis;
19
22
  this.config = config;
20
23
  this.TAG = ` | ${config.name}Cache | `;
@@ -54,9 +57,13 @@ class BaseCache {
54
57
  const tag = this.TAG + 'get | ';
55
58
  const startTime = Date.now();
56
59
  try {
60
+ const t1 = Date.now();
57
61
  const key = this.buildKey(params);
62
+ log.info(tag, `⏱️ buildKey took ${Date.now() - t1}ms`);
58
63
  // Step 1: Try new cache format
64
+ const t2 = Date.now();
59
65
  const cachedValue = await this.getCached(key);
66
+ log.info(tag, `⏱️ getCached took ${Date.now() - t2}ms`);
60
67
  if (cachedValue) {
61
68
  const age = Date.now() - cachedValue.timestamp;
62
69
  const responseTime = Date.now() - startTime;
@@ -77,7 +84,9 @@ class BaseCache {
77
84
  }
78
85
  // Step 2: Try legacy cache fallback
79
86
  if (this.config.enableLegacyFallback) {
87
+ const t3 = Date.now();
80
88
  const legacyValue = await this.getLegacyCached(params);
89
+ log.info(tag, `⏱️ getLegacyCached took ${Date.now() - t3}ms`);
81
90
  if (legacyValue) {
82
91
  const responseTime = Date.now() - startTime;
83
92
  log.info(tag, `Legacy cache hit: ${key} (${responseTime}ms)`);
@@ -111,7 +120,10 @@ class BaseCache {
111
120
  };
112
121
  }
113
122
  // Non-blocking: trigger async refresh and return default
123
+ const t4 = Date.now();
114
124
  this.triggerAsyncRefresh(params, 'high');
125
+ log.info(tag, `⏱️ triggerAsyncRefresh took ${Date.now() - t4}ms`);
126
+ log.info(tag, `⏱️ Returning default value after ${Date.now() - startTime}ms TOTAL`);
115
127
  return {
116
128
  success: true,
117
129
  value: this.config.defaultValue,
@@ -136,18 +148,32 @@ class BaseCache {
136
148
  */
137
149
  async getCached(key) {
138
150
  const tag = this.TAG + 'getCached | ';
151
+ const t0 = Date.now();
139
152
  try {
140
- const cached = await this.redis.get(key);
153
+ // Redis timeout for cache reads
154
+ // Increased from 100ms to 2000ms - 100ms was too aggressive and caused false cache misses
155
+ // Increased from 2000ms to 10000ms - IPv4/IPv6 DNS resolution can cause delays
156
+ const timeoutMs = 10000;
157
+ const cached = await Promise.race([
158
+ this.redis.get(key),
159
+ new Promise((resolve) => setTimeout(() => {
160
+ log.warn(tag, `⏱️ Redis timeout after ${timeoutMs}ms, returning cache miss`);
161
+ resolve(null);
162
+ }, timeoutMs))
163
+ ]);
141
164
  if (!cached) {
165
+ log.debug(tag, `Cache miss: ${key}`);
142
166
  return null;
143
167
  }
144
168
  const parsed = JSON.parse(cached);
145
- // Validate structure
146
- if (!parsed.value || typeof parsed.timestamp !== 'number') {
169
+ // Validate structure - Check for undefined/null, NOT falsy values!
170
+ // CRITICAL: Balance "0", empty arrays [], and empty objects {} are VALID!
171
+ if (parsed.value === undefined || parsed.value === null || typeof parsed.timestamp !== 'number') {
147
172
  log.warn(tag, `Invalid cache structure for ${key}, removing`);
148
173
  await this.redis.del(key);
149
174
  return null;
150
175
  }
176
+ log.debug(tag, `Cache hit: ${key}`);
151
177
  return parsed;
152
178
  }
153
179
  catch (error) {
@@ -169,20 +195,29 @@ class BaseCache {
169
195
  lastUpdated: new Date().toISOString(),
170
196
  metadata
171
197
  };
198
+ // DIAGNOSTIC: Log Redis connection details
199
+ const redisConstructor = this.redis.constructor?.name || 'unknown';
200
+ const redisOptions = this.redis.options || {};
172
201
  // FIX #2: Always set TTL (unless explicitly disabled)
173
202
  if (this.config.enableTTL) {
174
203
  const ttlSeconds = Math.floor(this.config.ttl / 1000);
204
+ // PERF: Reduced logging for production performance
205
+ log.debug(tag, `📝 Writing to Redis: ${key} [${JSON.stringify(cachedValue).length} bytes, TTL: ${ttlSeconds}s]`);
206
+ // Write
175
207
  await this.redis.set(key, JSON.stringify(cachedValue), 'EX', ttlSeconds);
176
- log.info(tag, `Updated cache: ${key} [TTL: ${ttlSeconds}s]`);
208
+ // PERF: Verification disabled for production performance
209
+ // Only enable for debugging cache issues
210
+ log.debug(tag, `✅ Updated cache: ${key} [TTL: ${ttlSeconds}s]`);
177
211
  }
178
212
  else {
179
213
  // Permanent caching (for transactions)
180
214
  await this.redis.set(key, JSON.stringify(cachedValue));
181
- log.info(tag, `Updated cache: ${key} [PERMANENT]`);
215
+ // PERF: Verification disabled for production performance
216
+ log.debug(tag, `✅ Updated cache: ${key} [PERMANENT]`);
182
217
  }
183
218
  }
184
219
  catch (error) {
185
- log.error(tag, `Error updating cache for ${key}:`, error);
220
+ log.error(tag, `❌ Error updating cache for ${key}:`, error);
186
221
  throw error;
187
222
  }
188
223
  }
@@ -203,8 +238,12 @@ class BaseCache {
203
238
  const key = this.buildKey(params);
204
239
  log.error(tag, `❌ QUEUE NOT INITIALIZED! Cannot refresh ${key}`);
205
240
  log.error(tag, `Background refresh is BROKEN - cache will NOT update!`);
206
- // FIX #4: Synchronous fallback for high-priority
207
- if (priority === 'high') {
241
+ // FIX #4: Synchronous fallback for high-priority (only if useSyncFallback is enabled)
242
+ // Default: use sync fallback only for blocking caches (blockOnMiss=true)
243
+ const shouldUseSyncFallback = this.config.useSyncFallback !== undefined
244
+ ? this.config.useSyncFallback
245
+ : this.config.blockOnMiss;
246
+ if (priority === 'high' && shouldUseSyncFallback) {
208
247
  log.warn(tag, `Using synchronous fallback for high-priority refresh`);
209
248
  setImmediate(async () => {
210
249
  try {
@@ -245,26 +284,71 @@ class BaseCache {
245
284
  /**
246
285
  * Fetch fresh data and update cache
247
286
  * FIX #1 & #4: Used for blocking requests and fallback
287
+ * FIX #6: Request deduplication to prevent thundering herd
288
+ * FIX #8: Return stale cache on fetch failures
248
289
  */
249
290
  async fetchFresh(params) {
250
291
  const tag = this.TAG + 'fetchFresh | ';
251
292
  const startTime = Date.now();
252
- try {
253
- const key = this.buildKey(params);
254
- log.info(tag, `Fetching fresh data: ${key}`);
255
- // Call subclass-specific fetch implementation
256
- const value = await this.fetchFromSource(params);
257
- // Update cache
258
- await this.updateCache(key, value);
259
- const fetchTime = Date.now() - startTime;
260
- log.info(tag, `✅ Fetched fresh data in ${fetchTime}ms: ${key}`);
261
- return value;
262
- }
263
- catch (error) {
264
- const fetchTime = Date.now() - startTime;
265
- log.error(tag, `Failed to fetch fresh data after ${fetchTime}ms:`, error);
266
- return this.config.defaultValue;
293
+ const key = this.buildKey(params);
294
+ // FIX #6: Check if there's already a pending fetch for this key
295
+ const existingFetch = this.pendingFetches.get(key);
296
+ if (existingFetch) {
297
+ // For non-blocking caches, return default value immediately instead of waiting
298
+ if (!this.config.blockOnMiss) {
299
+ log.debug(tag, `Non-blocking cache: returning default value while fetch in progress: ${key}`);
300
+ return this.config.defaultValue;
301
+ }
302
+ // For blocking caches, coalesce to prevent thundering herd
303
+ log.debug(tag, `Coalescing request for: ${key} (fetch already in progress)`);
304
+ return existingFetch;
267
305
  }
306
+ // Create the fetch promise
307
+ const fetchPromise = (async () => {
308
+ try {
309
+ log.info(tag, `Fetching fresh data: ${key}`);
310
+ // Call subclass-specific fetch implementation
311
+ const value = await this.fetchFromSource(params);
312
+ // Update cache
313
+ await this.updateCache(key, value);
314
+ const fetchTime = Date.now() - startTime;
315
+ log.info(tag, `✅ Fetched fresh data in ${fetchTime}ms: ${key}`);
316
+ return value;
317
+ }
318
+ catch (error) {
319
+ const fetchTime = Date.now() - startTime;
320
+ const errorMsg = error instanceof Error ? error.message : String(error);
321
+ // FIX #8: Try to return stale cache on fetch failures
322
+ const cachedValue = await this.getCached(key);
323
+ if (cachedValue) {
324
+ log.warn(tag, `Fetch failed after ${fetchTime}ms, returning stale cache: ${key}`);
325
+ return cachedValue.value;
326
+ }
327
+ // Try legacy cache as last resort
328
+ if (this.config.enableLegacyFallback) {
329
+ const legacyValue = await this.getLegacyCached(params);
330
+ if (legacyValue) {
331
+ log.warn(tag, `Fetch failed after ${fetchTime}ms, returning legacy cache: ${key}`);
332
+ return legacyValue;
333
+ }
334
+ }
335
+ // Log as warning for expected issues, error for unexpected
336
+ if (errorMsg.includes('rate limit') || errorMsg.includes('timeout') || errorMsg.includes('No valid')) {
337
+ log.warn(tag, `Expected fetch failure after ${fetchTime}ms: ${errorMsg}`);
338
+ }
339
+ else {
340
+ log.error(tag, `Unexpected fetch failure after ${fetchTime}ms:`, error);
341
+ }
342
+ return this.config.defaultValue;
343
+ }
344
+ finally {
345
+ // Clean up pending fetch
346
+ this.pendingFetches.delete(key);
347
+ }
348
+ })();
349
+ // Store the promise so concurrent requests can reuse it
350
+ this.pendingFetches.set(key, fetchPromise);
351
+ return fetchPromise;
268
352
  }
269
353
  /**
270
354
  * Migrate legacy cache value to new format
@@ -1,5 +1,6 @@
1
1
  import { BalanceCache } from '../stores/balance-cache';
2
2
  import { PriceCache } from '../stores/price-cache';
3
+ import { PortfolioCache } from '../stores/portfolio-cache';
3
4
  import { TransactionCache } from '../stores/transaction-cache';
4
5
  import type { HealthCheckResult } from '../types';
5
6
  /**
@@ -11,6 +12,7 @@ export interface CacheManagerConfig {
11
12
  markets?: any;
12
13
  enableBalanceCache?: boolean;
13
14
  enablePriceCache?: boolean;
15
+ enablePortfolioCache?: boolean;
14
16
  enableTransactionCache?: boolean;
15
17
  startWorkers?: boolean;
16
18
  }
@@ -21,6 +23,7 @@ export declare class CacheManager {
21
23
  private redis;
22
24
  private balanceCache?;
23
25
  private priceCache?;
26
+ private portfolioCache?;
24
27
  private transactionCache?;
25
28
  private workers;
26
29
  constructor(config: CacheManagerConfig);
@@ -45,18 +48,20 @@ export declare class CacheManager {
45
48
  getCaches(): {
46
49
  balance: BalanceCache | undefined;
47
50
  price: PriceCache | undefined;
51
+ portfolio: PortfolioCache | undefined;
48
52
  transaction: TransactionCache | undefined;
49
53
  };
50
54
  /**
51
55
  * Get specific cache by name
52
56
  */
53
- getCache(name: 'balance' | 'price' | 'transaction'): BalanceCache | PriceCache | TransactionCache | undefined;
57
+ getCache(name: 'balance' | 'price' | 'portfolio' | 'transaction'): BalanceCache | PriceCache | PortfolioCache | TransactionCache | undefined;
54
58
  /**
55
59
  * Clear all caches (use with caution!)
56
60
  */
57
61
  clearAll(): Promise<{
58
62
  balance?: number;
59
63
  price?: number;
64
+ portfolio?: number;
60
65
  transaction?: number;
61
66
  }>;
62
67
  }
@@ -9,6 +9,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
9
9
  exports.CacheManager = void 0;
10
10
  const balance_cache_1 = require("../stores/balance-cache");
11
11
  const price_cache_1 = require("../stores/price-cache");
12
+ const portfolio_cache_1 = require("../stores/portfolio-cache");
12
13
  const transaction_cache_1 = require("../stores/transaction-cache");
13
14
  const refresh_worker_1 = require("../workers/refresh-worker");
14
15
  const log = require('@pioneer-platform/loggerdog')();
@@ -30,6 +31,11 @@ class CacheManager {
30
31
  this.priceCache = new price_cache_1.PriceCache(this.redis, config.markets);
31
32
  log.info(TAG, '✅ Price cache initialized');
32
33
  }
34
+ // Initialize Portfolio Cache
35
+ if (config.enablePortfolioCache !== false && config.balanceModule && config.markets) {
36
+ this.portfolioCache = new portfolio_cache_1.PortfolioCache(this.redis, config.balanceModule, config.markets);
37
+ log.info(TAG, '✅ Portfolio cache initialized');
38
+ }
33
39
  // Initialize Transaction Cache
34
40
  if (config.enableTransactionCache !== false) {
35
41
  this.transactionCache = new transaction_cache_1.TransactionCache(this.redis);
@@ -58,6 +64,9 @@ class CacheManager {
58
64
  if (this.priceCache) {
59
65
  cacheRegistry.set('price', this.priceCache);
60
66
  }
67
+ if (this.portfolioCache) {
68
+ cacheRegistry.set('portfolio', this.portfolioCache);
69
+ }
61
70
  // Start unified worker if we have any caches with queues
62
71
  if (cacheRegistry.size > 0) {
63
72
  const worker = await (0, refresh_worker_1.startUnifiedWorker)(this.redis, cacheRegistry, 'cache-refresh', // Unified queue name
@@ -125,6 +134,17 @@ class CacheManager {
125
134
  warnings.push(...priceHealth.warnings.map(w => `Price: ${w}`));
126
135
  }
127
136
  }
137
+ // Check portfolio cache
138
+ if (this.portfolioCache) {
139
+ const portfolioHealth = await this.portfolioCache.getHealth(forceRefresh);
140
+ checks.portfolio = portfolioHealth;
141
+ if (portfolioHealth.status === 'unhealthy') {
142
+ issues.push(...portfolioHealth.issues.map(i => `Portfolio: ${i}`));
143
+ }
144
+ else if (portfolioHealth.status === 'degraded') {
145
+ warnings.push(...portfolioHealth.warnings.map(w => `Portfolio: ${w}`));
146
+ }
147
+ }
128
148
  // Check transaction cache (simple stats check)
129
149
  if (this.transactionCache) {
130
150
  const txStats = await this.transactionCache.getStats();
@@ -192,6 +212,7 @@ class CacheManager {
192
212
  return {
193
213
  balance: this.balanceCache,
194
214
  price: this.priceCache,
215
+ portfolio: this.portfolioCache,
195
216
  transaction: this.transactionCache
196
217
  };
197
218
  }
@@ -204,6 +225,8 @@ class CacheManager {
204
225
  return this.balanceCache;
205
226
  case 'price':
206
227
  return this.priceCache;
228
+ case 'portfolio':
229
+ return this.portfolioCache;
207
230
  case 'transaction':
208
231
  return this.transactionCache;
209
232
  default:
@@ -223,6 +246,9 @@ class CacheManager {
223
246
  if (this.priceCache) {
224
247
  result.price = await this.priceCache.clearAll();
225
248
  }
249
+ if (this.portfolioCache) {
250
+ result.portfolio = await this.portfolioCache.clearAll();
251
+ }
226
252
  if (this.transactionCache) {
227
253
  result.transaction = await this.transactionCache.clearAll();
228
254
  }
package/dist/index.d.ts CHANGED
@@ -2,10 +2,12 @@ export { BaseCache } from './core/base-cache';
2
2
  export { CacheManager } from './core/cache-manager';
3
3
  export { BalanceCache } from './stores/balance-cache';
4
4
  export { PriceCache } from './stores/price-cache';
5
+ export { PortfolioCache } from './stores/portfolio-cache';
5
6
  export { TransactionCache } from './stores/transaction-cache';
6
7
  export { RefreshWorker, startUnifiedWorker } from './workers/refresh-worker';
7
8
  export type { CacheConfig, CachedValue, CacheResult, RefreshJob, HealthCheckResult, CacheStats } from './types';
8
9
  export type { BalanceData } from './stores/balance-cache';
9
10
  export type { PriceData } from './stores/price-cache';
11
+ export type { PortfolioData, ChartData } from './stores/portfolio-cache';
10
12
  export type { CacheManagerConfig } from './core/cache-manager';
11
13
  export type { WorkerConfig } from './workers/refresh-worker';
package/dist/index.js CHANGED
@@ -6,7 +6,7 @@
6
6
  Provides stale-while-revalidate caching with TTL, background refresh, and health monitoring.
7
7
  */
8
8
  Object.defineProperty(exports, "__esModule", { value: true });
9
- exports.startUnifiedWorker = exports.RefreshWorker = exports.TransactionCache = exports.PriceCache = exports.BalanceCache = exports.CacheManager = exports.BaseCache = void 0;
9
+ exports.startUnifiedWorker = exports.RefreshWorker = exports.TransactionCache = exports.PortfolioCache = exports.PriceCache = exports.BalanceCache = exports.CacheManager = exports.BaseCache = void 0;
10
10
  // Core exports
11
11
  var base_cache_1 = require("./core/base-cache");
12
12
  Object.defineProperty(exports, "BaseCache", { enumerable: true, get: function () { return base_cache_1.BaseCache; } });
@@ -17,6 +17,8 @@ var balance_cache_1 = require("./stores/balance-cache");
17
17
  Object.defineProperty(exports, "BalanceCache", { enumerable: true, get: function () { return balance_cache_1.BalanceCache; } });
18
18
  var price_cache_1 = require("./stores/price-cache");
19
19
  Object.defineProperty(exports, "PriceCache", { enumerable: true, get: function () { return price_cache_1.PriceCache; } });
20
+ var portfolio_cache_1 = require("./stores/portfolio-cache");
21
+ Object.defineProperty(exports, "PortfolioCache", { enumerable: true, get: function () { return portfolio_cache_1.PortfolioCache; } });
20
22
  var transaction_cache_1 = require("./stores/transaction-cache");
21
23
  Object.defineProperty(exports, "TransactionCache", { enumerable: true, get: function () { return transaction_cache_1.TransactionCache; } });
22
24
  // Worker exports
@@ -39,6 +39,7 @@ export declare class BalanceCache extends BaseCache<BalanceData> {
39
39
  getBalance(caip: string, pubkey: string, waitForFresh?: boolean): Promise<BalanceData>;
40
40
  /**
41
41
  * Get balances for multiple assets (batch operation)
42
+ * OPTIMIZED: Uses Redis MGET for single round-trip instead of N individual GETs
42
43
  */
43
44
  getBatchBalances(items: Array<{
44
45
  caip: string;
@@ -17,14 +17,14 @@ class BalanceCache extends base_cache_1.BaseCache {
17
17
  const defaultConfig = {
18
18
  name: 'balance',
19
19
  keyPrefix: 'balance_v2:',
20
- ttl: 5 * 60 * 1000, // 5 minutes
21
- staleThreshold: 2 * 60 * 1000, // 2 minutes
22
- enableTTL: true,
23
- queueName: 'balance-refresh-v2',
20
+ ttl: 0, // Ignored when enableTTL: false
21
+ staleThreshold: 5 * 60 * 1000, // 5 minutes - triggers background refresh
22
+ enableTTL: false, // NEVER EXPIRE - data persists forever
23
+ queueName: 'cache-refresh',
24
24
  enableQueue: true,
25
25
  maxRetries: 3,
26
26
  retryDelay: 10000,
27
- blockOnMiss: true, // Wait for fresh data on first request
27
+ blockOnMiss: true, // Wait for fresh data on first request - users need real balances!
28
28
  enableLegacyFallback: true,
29
29
  defaultValue: {
30
30
  caip: '',
@@ -131,17 +131,70 @@ class BalanceCache extends base_cache_1.BaseCache {
131
131
  }
132
132
  /**
133
133
  * Get balances for multiple assets (batch operation)
134
+ * OPTIMIZED: Uses Redis MGET for single round-trip instead of N individual GETs
134
135
  */
135
136
  async getBatchBalances(items, waitForFresh) {
136
137
  const tag = this.TAG + 'getBatchBalances | ';
137
138
  const startTime = Date.now();
138
139
  try {
139
- log.info(tag, `Batch request for ${items.length} balances`);
140
- // Get all balances in parallel
141
- const promises = items.map(item => this.getBalance(item.caip, item.pubkey, waitForFresh));
142
- const results = await Promise.all(promises);
140
+ log.info(tag, `Batch request for ${items.length} balances using Redis MGET`);
141
+ // Build all Redis keys
142
+ const keys = items.map(item => this.buildKey({ caip: item.caip, pubkey: item.pubkey }));
143
+ // PERF: Use MGET to fetch all keys in ONE Redis round-trip
144
+ const cachedValues = await this.redis.mget(...keys);
145
+ // Process results
146
+ const results = [];
147
+ const missedItems = [];
148
+ for (let i = 0; i < items.length; i++) {
149
+ const item = items[i];
150
+ const cached = cachedValues[i];
151
+ if (cached) {
152
+ try {
153
+ const parsed = JSON.parse(cached);
154
+ if (parsed.value && parsed.value.caip && parsed.value.pubkey) {
155
+ results[i] = parsed.value;
156
+ continue;
157
+ }
158
+ }
159
+ catch (e) {
160
+ log.warn(tag, `Failed to parse cached value for ${keys[i]}`);
161
+ }
162
+ }
163
+ // Cache miss - record for fetching
164
+ missedItems.push({ ...item, index: i });
165
+ results[i] = this.config.defaultValue; // Placeholder
166
+ }
143
167
  const responseTime = Date.now() - startTime;
144
- log.info(tag, `Batch completed: ${results.length} balances in ${responseTime}ms (${(responseTime / results.length).toFixed(1)}ms avg)`);
168
+ const hitRate = ((items.length - missedItems.length) / items.length * 100).toFixed(1);
169
+ log.info(tag, `MGET completed: ${items.length} keys in ${responseTime}ms (${hitRate}% hit rate)`);
170
+ // If we have cache misses and blocking is enabled, fetch them
171
+ if (missedItems.length > 0) {
172
+ const shouldBlock = waitForFresh !== undefined ? waitForFresh : this.config.blockOnMiss;
173
+ if (shouldBlock) {
174
+ log.info(tag, `Fetching ${missedItems.length} cache misses...`);
175
+ const fetchStart = Date.now();
176
+ // Fetch all misses in parallel
177
+ const fetchPromises = missedItems.map(async (item) => {
178
+ try {
179
+ // Use fetchFresh to ensure Redis is updated and requests are deduplicated
180
+ const freshData = await this.fetchFresh({ caip: item.caip, pubkey: item.pubkey });
181
+ results[item.index] = freshData;
182
+ }
183
+ catch (error) {
184
+ log.error(tag, `Failed to fetch ${item.caip}/${item.pubkey}:`, error);
185
+ results[item.index] = { caip: item.caip, pubkey: item.pubkey, balance: '0' };
186
+ }
187
+ });
188
+ await Promise.all(fetchPromises);
189
+ log.info(tag, `Fetched ${missedItems.length} misses in ${Date.now() - fetchStart}ms`);
190
+ }
191
+ else {
192
+ // Non-blocking: trigger background refresh for misses
193
+ missedItems.forEach(item => {
194
+ this.triggerAsyncRefresh({ caip: item.caip, pubkey: item.pubkey }, 'high');
195
+ });
196
+ }
197
+ }
145
198
  return results;
146
199
  }
147
200
  catch (error) {
@@ -0,0 +1,79 @@
1
+ import { BaseCache } from '../core/base-cache';
2
+ import type { CacheConfig } from '../types';
3
+ /**
4
+ * Portfolio chart data structure
5
+ * Represents a single asset balance with pricing for charts
6
+ */
7
+ export interface ChartData {
8
+ caip: string;
9
+ pubkey: string;
10
+ networkId: string;
11
+ symbol: string;
12
+ name: string;
13
+ balance: string;
14
+ priceUsd: number;
15
+ valueUsd: number;
16
+ icon?: string;
17
+ type?: string;
18
+ decimal?: number;
19
+ }
20
+ /**
21
+ * Full portfolio data for a pubkey set
22
+ */
23
+ export interface PortfolioData {
24
+ pubkeys: Array<{
25
+ pubkey: string;
26
+ caip: string;
27
+ }>;
28
+ charts: ChartData[];
29
+ totalValueUsd: number;
30
+ timestamp: number;
31
+ }
32
+ /**
33
+ * PortfolioCache - Caches portfolio/chart data
34
+ *
35
+ * CRITICAL: This cache is NON-BLOCKING by design
36
+ * - Returns empty arrays immediately on cache miss
37
+ * - Never blocks waiting for blockchain APIs
38
+ * - Background jobs populate cache for next request
39
+ */
40
+ export declare class PortfolioCache extends BaseCache<PortfolioData> {
41
+ private balanceModule;
42
+ private marketsModule;
43
+ constructor(redis: any, balanceModule: any, marketsModule: any, config?: Partial<CacheConfig>);
44
+ /**
45
+ * Build Redis key for portfolio data
46
+ *
47
+ * Key strategy: Hash all pubkeys+caips to create a stable identifier
48
+ * Format: portfolio_v2:hash(pubkeys)
49
+ *
50
+ * This allows caching the same portfolio regardless of pubkey order
51
+ */
52
+ protected buildKey(params: Record<string, any>): string;
53
+ /**
54
+ * Simple hash function for cache keys
55
+ * Not cryptographic - just needs to be stable and collision-resistant
56
+ */
57
+ private simpleHash;
58
+ /**
59
+ * Fetch portfolio from blockchain APIs
60
+ *
61
+ * This is the SLOW operation that happens in the background
62
+ * It fetches balances for all pubkeys and enriches with pricing
63
+ */
64
+ protected fetchFromSource(params: Record<string, any>): Promise<PortfolioData>;
65
+ /**
66
+ * No legacy cache format for portfolios
67
+ */
68
+ protected getLegacyCached(params: Record<string, any>): Promise<PortfolioData | null>;
69
+ /**
70
+ * Get portfolio for a set of pubkeys
71
+ * Convenience method that wraps base get()
72
+ *
73
+ * RETURNS INSTANTLY - either cached data or empty arrays
74
+ */
75
+ getPortfolio(pubkeys: Array<{
76
+ pubkey: string;
77
+ caip: string;
78
+ }>, waitForFresh?: boolean): Promise<PortfolioData>;
79
+ }