@pioneer-platform/pioneer-cache 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # @pioneer-platform/pioneer-cache
2
2
 
3
+ ## 1.0.5
4
+
5
+ ### Patch Changes
6
+
7
+ - cache work
8
+ - Updated dependencies
9
+ - @pioneer-platform/redis-queue@8.11.3
10
+
11
+ ## 1.0.4
12
+
13
+ ### Patch Changes
14
+
15
+ - cache work
16
+ - Updated dependencies
17
+ - @pioneer-platform/redis-queue@8.11.2
18
+
3
19
  ## 1.0.3
4
20
 
5
21
  ### Patch Changes
@@ -151,10 +151,11 @@ class BaseCache {
151
151
  const t0 = Date.now();
152
152
  try {
153
153
  // Redis timeout for cache reads
154
- // PERFORMANCE FIX: Reduced from 10000ms to 100ms
155
- // Local Redis should respond in <10ms. 100ms timeout prevents cascading failures
156
- // when Redis is overloaded while still catching actual connection issues.
157
- const timeoutMs = 100;
154
+ // PERFORMANCE: Generous timeout for connection pool under concurrent load
155
+ // - 1000ms accommodates worst-case scenarios with connection pool
156
+ // - Prevents false cache misses while still failing reasonably fast
157
+ // - Redis itself averages <1ms, but ioredis queuing can add latency
158
+ const timeoutMs = 1000;
158
159
  const cached = await Promise.race([
159
160
  this.redis.get(key),
160
161
  new Promise((resolve) => setTimeout(() => {
@@ -257,20 +258,34 @@ class BaseCache {
257
258
  }
258
259
  return;
259
260
  }
260
- const job = {
261
- type: `REFRESH_${this.config.name.toUpperCase()}`,
262
- key: this.buildKey(params),
263
- params,
264
- priority,
265
- retryCount: 0,
266
- timestamp: Date.now()
267
- };
268
- // Queue job async (don't wait)
261
+ const key = this.buildKey(params);
262
+ // RATE LIMITING: Prevent duplicate refresh jobs within time window
263
+ // Use Redis SET NX EX for atomic rate limiting
264
+ const rateLimitKey = `refresh_lock:${key}`;
265
+ const rateLimitWindow = 5000; // 5 seconds - don't queue same refresh more than once per 5s
266
+ // Queue job async (don't wait) with rate limiting
269
267
  setImmediate(async () => {
270
268
  try {
269
+ // Try to acquire rate limit lock (atomic operation)
270
+ const acquired = await this.redis.set(rateLimitKey, '1', 'PX', rateLimitWindow, // Expire in milliseconds
271
+ 'NX' // Only set if not exists
272
+ );
273
+ if (!acquired) {
274
+ // Rate limit hit - job already queued recently
275
+ log.debug(tag, `⏱️ Rate limit: Skip queuing ${key} (already queued in last ${rateLimitWindow}ms)`);
276
+ return;
277
+ }
278
+ const job = {
279
+ type: `REFRESH_${this.config.name.toUpperCase()}`,
280
+ key,
281
+ params,
282
+ priority,
283
+ retryCount: 0,
284
+ timestamp: Date.now()
285
+ };
271
286
  await this.redisQueue.createWork(this.config.queueName, job);
272
287
  if (this.config.logRefreshJobs) {
273
- log.debug(tag, `Queued refresh job: ${job.key} (priority: ${priority})`);
288
+ log.debug(tag, `✅ Queued refresh job: ${job.key} (priority: ${priority})`);
274
289
  }
275
290
  }
276
291
  catch (error) {
@@ -8,6 +8,7 @@ import type { HealthCheckResult } from '../types';
8
8
  */
9
9
  export interface CacheManagerConfig {
10
10
  redis: any;
11
+ redisQueue?: any;
11
12
  balanceModule?: any;
12
13
  markets?: any;
13
14
  enableBalanceCache?: boolean;
@@ -21,6 +22,7 @@ export interface CacheManagerConfig {
21
22
  */
22
23
  export declare class CacheManager {
23
24
  private redis;
25
+ private redisQueue;
24
26
  private balanceCache?;
25
27
  private priceCache?;
26
28
  private portfolioCache?;
@@ -21,6 +21,7 @@ class CacheManager {
21
21
  constructor(config) {
22
22
  this.workers = [];
23
23
  this.redis = config.redis;
24
+ this.redisQueue = config.redisQueue || config.redis; // Fallback to main redis if not provided
24
25
  // Initialize Balance Cache
25
26
  if (config.enableBalanceCache !== false && config.balanceModule) {
26
27
  this.balanceCache = new balance_cache_1.BalanceCache(this.redis, config.balanceModule);
@@ -69,7 +70,8 @@ class CacheManager {
69
70
  }
70
71
  // Start unified worker if we have any caches with queues
71
72
  if (cacheRegistry.size > 0) {
72
- const worker = await (0, refresh_worker_1.startUnifiedWorker)(this.redis, cacheRegistry, 'cache-refresh', // Unified queue name
73
+ const worker = await (0, refresh_worker_1.startUnifiedWorker)(this.redisQueue, // Use dedicated queue client for blocking operations
74
+ cacheRegistry, 'cache-refresh', // Unified queue name
73
75
  {
74
76
  maxRetries: 3,
75
77
  retryDelay: 5000,
@@ -22,6 +22,7 @@ export declare class PriceCache extends BaseCache<PriceData> {
22
22
  /**
23
23
  * Fetch price from markets API using CAIP-first approach
24
24
  * FIX #7: Graceful handling of zero prices to prevent cache disruption
25
+ * FIX #8: Cache zero prices for unpriceable tokens to prevent infinite retry loops
25
26
  */
26
27
  protected fetchFromSource(params: Record<string, any>): Promise<PriceData>;
27
28
  /**
@@ -55,6 +55,7 @@ class PriceCache extends base_cache_1.BaseCache {
55
55
  /**
56
56
  * Fetch price from markets API using CAIP-first approach
57
57
  * FIX #7: Graceful handling of zero prices to prevent cache disruption
58
+ * FIX #8: Cache zero prices for unpriceable tokens to prevent infinite retry loops
58
59
  */
59
60
  async fetchFromSource(params) {
60
61
  const tag = this.TAG + 'fetchFromSource | ';
@@ -63,32 +64,36 @@ class PriceCache extends base_cache_1.BaseCache {
63
64
  // Use CAIP-first API (no symbol conversion needed!)
64
65
  // This directly queries the markets module with CAIP identifiers
65
66
  const price = await this.markets.getAssetPriceByCaip(caip);
66
- // FIX #7: Gracefully handle zero prices without throwing
67
- // This prevents disrupting batch operations during API rate limits
68
- if (isNaN(price) || price <= 0) {
69
- log.warn(tag, `Price fetch returned $${price} for ${caip} (likely API timeout or rate limit) - returning stale cache if available`);
67
+ // FIX #8: Accept zero prices - they are VALID for unpriceable tokens
68
+ // Zero means "this token has no market value and we should stop trying to price it"
69
+ // Changed from: price <= 0 to: price < 0
70
+ if (isNaN(price) || price < 0) {
71
+ log.warn(tag, `Price fetch returned invalid price $${price} for ${caip} - returning stale cache if available`);
70
72
  // Try to get stale cached value instead of failing
71
73
  const key = this.buildKey(params);
72
74
  const cachedValue = await this.getCached(key);
73
- if (cachedValue && cachedValue.value.price > 0) {
75
+ if (cachedValue && cachedValue.value.price >= 0) {
74
76
  log.info(tag, `Returning stale cached price for ${caip}: $${cachedValue.value.price}`);
75
77
  return cachedValue.value;
76
78
  }
77
79
  // Try legacy cache as fallback
78
80
  const legacyValue = await this.getLegacyCached(params);
79
- if (legacyValue && legacyValue.price > 0) {
81
+ if (legacyValue && legacyValue.price >= 0) {
80
82
  log.info(tag, `Returning legacy cached price for ${caip}: $${legacyValue.price}`);
81
83
  return legacyValue;
82
84
  }
83
- // Last resort: return zero price but don't cache it
84
- log.warn(tag, `No cached price available for ${caip}, returning zero`);
85
+ // Last resort: throw error for truly invalid prices
86
+ log.warn(tag, `No cached price available for ${caip}, invalid price: $${price}`);
85
87
  throw new Error(`No valid price available for ${caip}`);
86
88
  }
87
89
  log.debug(tag, `Fetched price for ${caip}: $${price}`);
90
+ // FIX #8: Mark zero prices with special source to indicate they are unpriceable tokens
91
+ // This allows us to track and monitor unpriceable token caching
92
+ const source = price === 0 ? 'unpriceable' : 'markets-caip';
88
93
  return {
89
94
  caip,
90
95
  price,
91
- source: 'markets-caip'
96
+ source
92
97
  };
93
98
  }
94
99
  catch (error) {
@@ -35,6 +35,7 @@ export declare class RefreshWorker {
35
35
  stop(): Promise<void>;
36
36
  /**
37
37
  * Poll for next job from the queue
38
+ * FIX #3: Atomic flag check and job processing to prevent race conditions
38
39
  */
39
40
  private poll;
40
41
  /**
@@ -72,6 +72,7 @@ class RefreshWorker {
72
72
  }
73
73
  /**
74
74
  * Poll for next job from the queue
75
+ * FIX #3: Atomic flag check and job processing to prevent race conditions
75
76
  */
76
77
  async poll() {
77
78
  const tag = TAG + 'poll | ';
@@ -79,6 +80,7 @@ class RefreshWorker {
79
80
  return;
80
81
  }
81
82
  try {
83
+ // FIX #3: Atomic check-and-set to prevent race conditions
82
84
  // Don't poll if already processing
83
85
  if (this.isProcessing) {
84
86
  this.schedulePoll();
@@ -87,14 +89,20 @@ class RefreshWorker {
87
89
  // Get next job from queue
88
90
  const work = await this.redisQueue.getWork(this.config.queueName, 1);
89
91
  if (work) {
92
+ // Set processing flag BEFORE processing to prevent double execution
90
93
  this.isProcessing = true;
91
- await this.processJob(work);
92
- this.isProcessing = false;
94
+ try {
95
+ await this.processJob(work);
96
+ }
97
+ finally {
98
+ // Always clear flag, even if processJob throws
99
+ this.isProcessing = false;
100
+ }
93
101
  }
94
102
  }
95
103
  catch (error) {
96
104
  log.error(tag, 'Error in poll loop:', error.message);
97
- this.isProcessing = false;
105
+ // Flag already cleared in inner finally block
98
106
  }
99
107
  finally {
100
108
  // Schedule next poll
@@ -138,7 +146,18 @@ class RefreshWorker {
138
146
  catch (error) {
139
147
  const processingTime = Date.now() - startTime;
140
148
  log.error(tag, `❌ Failed to process ${job.type} after ${processingTime}ms:`, error);
141
- // Retry logic
149
+ // FIX #1: Detect permanent failures (don't retry unpriceable tokens)
150
+ const errorMsg = error instanceof Error ? error.message : String(error);
151
+ const isPermanentFailure = (errorMsg.includes('No valid price available') ||
152
+ errorMsg.includes('unpriceable') ||
153
+ errorMsg.includes('not found') ||
154
+ errorMsg.includes('404'));
155
+ if (isPermanentFailure) {
156
+ log.warn(tag, `Permanent failure detected for ${job.type}, will not retry: ${errorMsg}`);
157
+ // Don't retry - job complete (failed permanently)
158
+ return;
159
+ }
160
+ // Retry logic (only for transient failures)
142
161
  if ((job.retryCount || 0) < this.config.maxRetries) {
143
162
  const newRetryCount = (job.retryCount || 0) + 1;
144
163
  log.info(tag, `Retrying job (attempt ${newRetryCount}/${this.config.maxRetries})`);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@pioneer-platform/pioneer-cache",
3
- "version": "1.0.3",
3
+ "version": "1.0.5",
4
4
  "description": "Unified caching system for Pioneer platform with Redis backend",
5
5
  "main": "./dist/index.js",
6
6
  "types": "./dist/index.d.ts",
@@ -21,7 +21,7 @@
21
21
  "license": "MIT",
22
22
  "dependencies": {
23
23
  "@pioneer-platform/loggerdog": "^8.11.0",
24
- "@pioneer-platform/redis-queue": "^8.11.1",
24
+ "@pioneer-platform/redis-queue": "^8.11.3",
25
25
  "@pioneer-platform/default-redis": "^8.11.7"
26
26
  },
27
27
  "devDependencies": {
@@ -185,10 +185,11 @@ export abstract class BaseCache<T> {
185
185
 
186
186
  try {
187
187
  // Redis timeout for cache reads
188
- // PERFORMANCE FIX: Reduced from 10000ms to 100ms
189
- // Local Redis should respond in <10ms. 100ms timeout prevents cascading failures
190
- // when Redis is overloaded while still catching actual connection issues.
191
- const timeoutMs = 100;
188
+ // PERFORMANCE: Generous timeout for connection pool under concurrent load
189
+ // - 1000ms accommodates worst-case scenarios with connection pool
190
+ // - Prevents false cache misses while still failing reasonably fast
191
+ // - Redis itself averages <1ms, but ioredis queuing can add latency
192
+ const timeoutMs = 1000;
192
193
  const cached = await Promise.race([
193
194
  this.redis.get(key),
194
195
  new Promise<null>((resolve) => setTimeout(() => {
@@ -307,22 +308,43 @@ export abstract class BaseCache<T> {
307
308
  return;
308
309
  }
309
310
 
310
- const job: RefreshJob = {
311
- type: `REFRESH_${this.config.name.toUpperCase()}`,
312
- key: this.buildKey(params),
313
- params,
314
- priority,
315
- retryCount: 0,
316
- timestamp: Date.now()
317
- };
311
+ const key = this.buildKey(params);
318
312
 
319
- // Queue job async (don't wait)
313
+ // RATE LIMITING: Prevent duplicate refresh jobs within time window
314
+ // Use Redis SET NX EX for atomic rate limiting
315
+ const rateLimitKey = `refresh_lock:${key}`;
316
+ const rateLimitWindow = 5000; // 5 seconds - don't queue same refresh more than once per 5s
317
+
318
+ // Queue job async (don't wait) with rate limiting
320
319
  setImmediate(async () => {
321
320
  try {
321
+ // Try to acquire rate limit lock (atomic operation)
322
+ const acquired = await this.redis.set(
323
+ rateLimitKey,
324
+ '1',
325
+ 'PX', rateLimitWindow, // Expire in milliseconds
326
+ 'NX' // Only set if not exists
327
+ );
328
+
329
+ if (!acquired) {
330
+ // Rate limit hit - job already queued recently
331
+ log.debug(tag, `⏱️ Rate limit: Skip queuing ${key} (already queued in last ${rateLimitWindow}ms)`);
332
+ return;
333
+ }
334
+
335
+ const job: RefreshJob = {
336
+ type: `REFRESH_${this.config.name.toUpperCase()}`,
337
+ key,
338
+ params,
339
+ priority,
340
+ retryCount: 0,
341
+ timestamp: Date.now()
342
+ };
343
+
322
344
  await this.redisQueue.createWork(this.config.queueName, job);
323
345
 
324
346
  if (this.config.logRefreshJobs) {
325
- log.debug(tag, `Queued refresh job: ${job.key} (priority: ${priority})`);
347
+ log.debug(tag, `✅ Queued refresh job: ${job.key} (priority: ${priority})`);
326
348
  }
327
349
 
328
350
  } catch (error) {
@@ -21,6 +21,7 @@ const TAG = ' | CacheManager | ';
21
21
  */
22
22
  export interface CacheManagerConfig {
23
23
  redis: any;
24
+ redisQueue?: any; // Dedicated Redis client for blocking queue operations (brpop, etc.)
24
25
  balanceModule?: any; // Optional: if not provided, balance cache won't be initialized
25
26
  markets?: any; // Optional: if not provided, price cache won't be initialized
26
27
  enableBalanceCache?: boolean;
@@ -35,6 +36,7 @@ export interface CacheManagerConfig {
35
36
  */
36
37
  export class CacheManager {
37
38
  private redis: any;
39
+ private redisQueue: any; // Dedicated client for blocking operations
38
40
  private balanceCache?: BalanceCache;
39
41
  private priceCache?: PriceCache;
40
42
  private portfolioCache?: PortfolioCache;
@@ -43,6 +45,7 @@ export class CacheManager {
43
45
 
44
46
  constructor(config: CacheManagerConfig) {
45
47
  this.redis = config.redis;
48
+ this.redisQueue = config.redisQueue || config.redis; // Fallback to main redis if not provided
46
49
 
47
50
  // Initialize Balance Cache
48
51
  if (config.enableBalanceCache !== false && config.balanceModule) {
@@ -105,7 +108,7 @@ export class CacheManager {
105
108
  // Start unified worker if we have any caches with queues
106
109
  if (cacheRegistry.size > 0) {
107
110
  const worker = await startUnifiedWorker(
108
- this.redis,
111
+ this.redisQueue, // Use dedicated queue client for blocking operations
109
112
  cacheRegistry,
110
113
  'cache-refresh', // Unified queue name
111
114
  {
@@ -71,6 +71,7 @@ export class PriceCache extends BaseCache<PriceData> {
71
71
  /**
72
72
  * Fetch price from markets API using CAIP-first approach
73
73
  * FIX #7: Graceful handling of zero prices to prevent cache disruption
74
+ * FIX #8: Cache zero prices for unpriceable tokens to prevent infinite retry loops
74
75
  */
75
76
  protected async fetchFromSource(params: Record<string, any>): Promise<PriceData> {
76
77
  const tag = this.TAG + 'fetchFromSource | ';
@@ -82,38 +83,43 @@ export class PriceCache extends BaseCache<PriceData> {
82
83
  // This directly queries the markets module with CAIP identifiers
83
84
  const price = await this.markets.getAssetPriceByCaip(caip);
84
85
 
85
- // FIX #7: Gracefully handle zero prices without throwing
86
- // This prevents disrupting batch operations during API rate limits
87
- if (isNaN(price) || price <= 0) {
88
- log.warn(tag, `Price fetch returned $${price} for ${caip} (likely API timeout or rate limit) - returning stale cache if available`);
86
+ // FIX #8: Accept zero prices - they are VALID for unpriceable tokens
87
+ // Zero means "this token has no market value and we should stop trying to price it"
88
+ // Changed from: price <= 0 to: price < 0
89
+ if (isNaN(price) || price < 0) {
90
+ log.warn(tag, `Price fetch returned invalid price $${price} for ${caip} - returning stale cache if available`);
89
91
 
90
92
  // Try to get stale cached value instead of failing
91
93
  const key = this.buildKey(params);
92
94
  const cachedValue = await this.getCached(key);
93
95
 
94
- if (cachedValue && cachedValue.value.price > 0) {
96
+ if (cachedValue && cachedValue.value.price >= 0) {
95
97
  log.info(tag, `Returning stale cached price for ${caip}: $${cachedValue.value.price}`);
96
98
  return cachedValue.value;
97
99
  }
98
100
 
99
101
  // Try legacy cache as fallback
100
102
  const legacyValue = await this.getLegacyCached(params);
101
- if (legacyValue && legacyValue.price > 0) {
103
+ if (legacyValue && legacyValue.price >= 0) {
102
104
  log.info(tag, `Returning legacy cached price for ${caip}: $${legacyValue.price}`);
103
105
  return legacyValue;
104
106
  }
105
107
 
106
- // Last resort: return zero price but don't cache it
107
- log.warn(tag, `No cached price available for ${caip}, returning zero`);
108
+ // Last resort: throw error for truly invalid prices
109
+ log.warn(tag, `No cached price available for ${caip}, invalid price: $${price}`);
108
110
  throw new Error(`No valid price available for ${caip}`);
109
111
  }
110
112
 
111
113
  log.debug(tag, `Fetched price for ${caip}: $${price}`);
112
114
 
115
+ // FIX #8: Mark zero prices with special source to indicate they are unpriceable tokens
116
+ // This allows us to track and monitor unpriceable token caching
117
+ const source = price === 0 ? 'unpriceable' : 'markets-caip';
118
+
113
119
  return {
114
120
  caip,
115
121
  price,
116
- source: 'markets-caip'
122
+ source
117
123
  };
118
124
 
119
125
  } catch (error) {
@@ -100,6 +100,7 @@ export class RefreshWorker {
100
100
 
101
101
  /**
102
102
  * Poll for next job from the queue
103
+ * FIX #3: Atomic flag check and job processing to prevent race conditions
103
104
  */
104
105
  private async poll(): Promise<void> {
105
106
  const tag = TAG + 'poll | ';
@@ -109,6 +110,7 @@ export class RefreshWorker {
109
110
  }
110
111
 
111
112
  try {
113
+ // FIX #3: Atomic check-and-set to prevent race conditions
112
114
  // Don't poll if already processing
113
115
  if (this.isProcessing) {
114
116
  this.schedulePoll();
@@ -119,14 +121,19 @@ export class RefreshWorker {
119
121
  const work = await this.redisQueue.getWork(this.config.queueName, 1);
120
122
 
121
123
  if (work) {
124
+ // Set processing flag BEFORE processing to prevent double execution
122
125
  this.isProcessing = true;
123
- await this.processJob(work);
124
- this.isProcessing = false;
126
+ try {
127
+ await this.processJob(work);
128
+ } finally {
129
+ // Always clear flag, even if processJob throws
130
+ this.isProcessing = false;
131
+ }
125
132
  }
126
133
 
127
134
  } catch (error: any) {
128
135
  log.error(tag, 'Error in poll loop:', error.message);
129
- this.isProcessing = false;
136
+ // Flag already cleared in inner finally block
130
137
  } finally {
131
138
  // Schedule next poll
132
139
  this.schedulePoll();
@@ -180,7 +187,22 @@ export class RefreshWorker {
180
187
  const processingTime = Date.now() - startTime;
181
188
  log.error(tag, `❌ Failed to process ${job.type} after ${processingTime}ms:`, error);
182
189
 
183
- // Retry logic
190
+ // FIX #1: Detect permanent failures (don't retry unpriceable tokens)
191
+ const errorMsg = error instanceof Error ? error.message : String(error);
192
+ const isPermanentFailure = (
193
+ errorMsg.includes('No valid price available') ||
194
+ errorMsg.includes('unpriceable') ||
195
+ errorMsg.includes('not found') ||
196
+ errorMsg.includes('404')
197
+ );
198
+
199
+ if (isPermanentFailure) {
200
+ log.warn(tag, `Permanent failure detected for ${job.type}, will not retry: ${errorMsg}`);
201
+ // Don't retry - job complete (failed permanently)
202
+ return;
203
+ }
204
+
205
+ // Retry logic (only for transient failures)
184
206
  if ((job.retryCount || 0) < this.config.maxRetries) {
185
207
  const newRetryCount = (job.retryCount || 0) + 1;
186
208
  log.info(tag, `Retrying job (attempt ${newRetryCount}/${this.config.maxRetries})`);