@forcecalendar/core 2.1.0 → 2.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,341 +4,343 @@
4
4
  */
5
5
 
6
6
  export class AdaptiveMemoryManager {
7
- constructor(config = {}) {
8
- this.config = {
9
- checkInterval: 30000, // Check memory every 30 seconds
10
- memoryThreshold: 0.8, // Start reducing cache at 80% memory usage
11
- criticalThreshold: 0.95, // Emergency clear at 95% memory usage
12
- minCacheSize: 10, // Minimum cache size to maintain
13
- maxCacheSize: 10000, // Maximum cache size allowed
14
- adaptiveScaling: true, // Enable/disable adaptive scaling
15
- ...config
16
- };
17
-
18
- // Cache references
19
- this.caches = new Map();
20
-
21
- // Memory statistics
22
- this.stats = {
23
- adjustments: 0,
24
- emergencyClears: 0,
25
- lastMemoryUsage: 0,
26
- lastCheckTime: null,
27
- cacheResizes: []
28
- };
29
-
30
- // Start monitoring if enabled
31
- this.monitoringInterval = null;
32
- if (this.config.adaptiveScaling) {
33
- this.startMonitoring();
34
- }
7
+ constructor(config = {}) {
8
+ this.config = {
9
+ checkInterval: 30000, // Check memory every 30 seconds
10
+ memoryThreshold: 0.8, // Start reducing cache at 80% memory usage
11
+ criticalThreshold: 0.95, // Emergency clear at 95% memory usage
12
+ minCacheSize: 10, // Minimum cache size to maintain
13
+ maxCacheSize: 10000, // Maximum cache size allowed
14
+ adaptiveScaling: true, // Enable/disable adaptive scaling
15
+ ...config
16
+ };
17
+
18
+ // Cache references
19
+ this.caches = new Map();
20
+
21
+ // Memory statistics
22
+ this.stats = {
23
+ adjustments: 0,
24
+ emergencyClears: 0,
25
+ lastMemoryUsage: 0,
26
+ lastCheckTime: null,
27
+ cacheResizes: []
28
+ };
29
+
30
+ // Start monitoring if enabled
31
+ this.monitoringInterval = null;
32
+ if (this.config.adaptiveScaling) {
33
+ this.startMonitoring();
35
34
  }
36
-
37
- /**
38
- * Register a cache for management
39
- * @param {string} name - Cache identifier
40
- * @param {Object} cache - Cache instance with size/clear methods
41
- * @param {Object} [options] - Cache-specific options
42
- */
43
- registerCache(name, cache, options = {}) {
44
- this.caches.set(name, {
45
- cache,
46
- priority: options.priority || 1, // Higher priority = less likely to be reduced
47
- currentCapacity: options.initialCapacity || 100,
48
- minCapacity: options.minCapacity || this.config.minCacheSize,
49
- maxCapacity: options.maxCapacity || this.config.maxCacheSize,
50
- scaleFactor: options.scaleFactor || 0.5, // How much to reduce on pressure
51
- lastAccess: Date.now()
52
- });
35
+ }
36
+
37
+ /**
38
+ * Register a cache for management
39
+ * @param {string} name - Cache identifier
40
+ * @param {Object} cache - Cache instance with size/clear methods
41
+ * @param {Object} [options] - Cache-specific options
42
+ */
43
+ registerCache(name, cache, options = {}) {
44
+ this.caches.set(name, {
45
+ cache,
46
+ priority: options.priority || 1, // Higher priority = less likely to be reduced
47
+ currentCapacity: options.initialCapacity || 100,
48
+ minCapacity: options.minCapacity || this.config.minCacheSize,
49
+ maxCapacity: options.maxCapacity || this.config.maxCacheSize,
50
+ scaleFactor: options.scaleFactor || 0.5, // How much to reduce on pressure
51
+ lastAccess: Date.now()
52
+ });
53
+ }
54
+
55
+ /**
56
+ * Unregister a cache
57
+ * @param {string} name - Cache identifier
58
+ */
59
+ unregisterCache(name) {
60
+ this.caches.delete(name);
61
+ }
62
+
63
+ /**
64
+ * Start memory monitoring
65
+ */
66
+ startMonitoring() {
67
+ if (this.monitoringInterval) {
68
+ return;
53
69
  }
54
70
 
55
- /**
56
- * Unregister a cache
57
- * @param {string} name - Cache identifier
58
- */
59
- unregisterCache(name) {
60
- this.caches.delete(name);
71
+ this.monitoringInterval = setInterval(() => {
72
+ this.checkMemoryPressure();
73
+ }, this.config.checkInterval);
74
+
75
+ // Initial check
76
+ this.checkMemoryPressure();
77
+ }
78
+
79
+ /**
80
+ * Stop memory monitoring
81
+ */
82
+ stopMonitoring() {
83
+ if (this.monitoringInterval) {
84
+ clearInterval(this.monitoringInterval);
85
+ this.monitoringInterval = null;
61
86
  }
62
-
63
- /**
64
- * Start memory monitoring
65
- */
66
- startMonitoring() {
67
- if (this.monitoringInterval) {
68
- return;
69
- }
70
-
71
- this.monitoringInterval = setInterval(() => {
72
- this.checkMemoryPressure();
73
- }, this.config.checkInterval);
74
-
75
- // Initial check
76
- this.checkMemoryPressure();
87
+ }
88
+
89
+ /**
90
+ * Check memory pressure and adjust caches
91
+ */
92
+ async checkMemoryPressure() {
93
+ const memoryUsage = await this.getMemoryUsage();
94
+ this.stats.lastMemoryUsage = memoryUsage;
95
+ this.stats.lastCheckTime = new Date();
96
+
97
+ if (memoryUsage > this.config.criticalThreshold) {
98
+ // Emergency clear - clear all caches
99
+ this.emergencyClear();
100
+ } else if (memoryUsage > this.config.memoryThreshold) {
101
+ // Memory pressure - reduce cache sizes
102
+ this.reduceCacheSizes(memoryUsage);
103
+ } else if (memoryUsage < this.config.memoryThreshold - 0.2) {
104
+ // Memory available - can increase cache sizes
105
+ this.increaseCacheSizes();
77
106
  }
78
-
79
- /**
80
- * Stop memory monitoring
81
- */
82
- stopMonitoring() {
83
- if (this.monitoringInterval) {
84
- clearInterval(this.monitoringInterval);
85
- this.monitoringInterval = null;
86
- }
107
+ }
108
+
109
+ /**
110
+ * Get current memory usage percentage
111
+ * @returns {Promise<number>} Memory usage as percentage (0-1)
112
+ */
113
+ async getMemoryUsage() {
114
+ // Browser environment
115
+ if (typeof performance !== 'undefined' && performance.memory) {
116
+ const memInfo = performance.memory;
117
+ if (memInfo.jsHeapSizeLimit && memInfo.usedJSHeapSize) {
118
+ return memInfo.usedJSHeapSize / memInfo.jsHeapSizeLimit;
119
+ }
87
120
  }
88
121
 
89
- /**
90
- * Check memory pressure and adjust caches
91
- */
92
- async checkMemoryPressure() {
93
- const memoryUsage = await this.getMemoryUsage();
94
- this.stats.lastMemoryUsage = memoryUsage;
95
- this.stats.lastCheckTime = new Date();
96
-
97
- if (memoryUsage > this.config.criticalThreshold) {
98
- // Emergency clear - clear all caches
99
- this.emergencyClear();
100
- } else if (memoryUsage > this.config.memoryThreshold) {
101
- // Memory pressure - reduce cache sizes
102
- this.reduceCacheSizes(memoryUsage);
103
- } else if (memoryUsage < this.config.memoryThreshold - 0.2) {
104
- // Memory available - can increase cache sizes
105
- this.increaseCacheSizes();
122
+ // Node.js environment - use fully indirect access to avoid LWC static analysis
123
+ // Salesforce Locker Service blocks any reference to process.memoryUsage
124
+ try {
125
+ const g = typeof globalThis !== 'undefined' ? globalThis : {};
126
+ const procKey = 'proc' + 'ess';
127
+ const memKey = 'mem' + 'oryUsage';
128
+ const p = g[procKey];
129
+ if (p && typeof p === 'object') {
130
+ const memFn = p[memKey];
131
+ if (typeof memFn === 'function') {
132
+ const usage = memFn.call(p);
133
+ return usage.heapUsed / usage.heapTotal;
106
134
  }
135
+ }
136
+ } catch (e) {
137
+ // Ignore - not in Node.js environment
107
138
  }
108
139
 
109
- /**
110
- * Get current memory usage percentage
111
- * @returns {Promise<number>} Memory usage as percentage (0-1)
112
- */
113
- async getMemoryUsage() {
114
- // Browser environment
115
- if (typeof performance !== 'undefined' && performance.memory) {
116
- const memInfo = performance.memory;
117
- if (memInfo.jsHeapSizeLimit && memInfo.usedJSHeapSize) {
118
- return memInfo.usedJSHeapSize / memInfo.jsHeapSizeLimit;
119
- }
120
- }
121
-
122
- // Node.js environment - use fully indirect access to avoid LWC static analysis
123
- // Salesforce Locker Service blocks any reference to process.memoryUsage
124
- try {
125
- const g = typeof globalThis !== 'undefined' ? globalThis : {};
126
- const procKey = 'proc' + 'ess';
127
- const memKey = 'mem' + 'oryUsage';
128
- const p = g[procKey];
129
- if (p && typeof p === 'object') {
130
- const memFn = p[memKey];
131
- if (typeof memFn === 'function') {
132
- const usage = memFn.call(p);
133
- return usage.heapUsed / usage.heapTotal;
134
- }
135
- }
136
- } catch (e) {
137
- // Ignore - not in Node.js environment
138
- }
139
-
140
- // Fallback - estimate based on cache sizes
141
- return this.estimateMemoryUsage();
140
+ // Fallback - estimate based on cache sizes
141
+ return this.estimateMemoryUsage();
142
+ }
143
+
144
+ /**
145
+ * Estimate memory usage based on cache sizes
146
+ * @private
147
+ */
148
+ estimateMemoryUsage() {
149
+ let totalItems = 0;
150
+ let maxItems = 0;
151
+
152
+ for (const [_, cacheInfo] of this.caches) {
153
+ if (cacheInfo.cache.size !== undefined) {
154
+ totalItems += cacheInfo.cache.size;
155
+ maxItems += cacheInfo.maxCapacity;
156
+ }
142
157
  }
143
158
 
144
- /**
145
- * Estimate memory usage based on cache sizes
146
- * @private
147
- */
148
- estimateMemoryUsage() {
149
- let totalItems = 0;
150
- let maxItems = 0;
151
-
152
- for (const [_, cacheInfo] of this.caches) {
153
- if (cacheInfo.cache.size !== undefined) {
154
- totalItems += cacheInfo.cache.size;
155
- maxItems += cacheInfo.maxCapacity;
156
- }
157
- }
158
-
159
- return maxItems > 0 ? totalItems / maxItems : 0.5;
159
+ return maxItems > 0 ? totalItems / maxItems : 0.5;
160
+ }
161
+
162
+ /**
163
+ * Reduce cache sizes based on memory pressure
164
+ * @param {number} memoryUsage - Current memory usage percentage
165
+ */
166
+ reduceCacheSizes(memoryUsage) {
167
+ const pressureLevel =
168
+ (memoryUsage - this.config.memoryThreshold) /
169
+ (this.config.criticalThreshold - this.config.memoryThreshold);
170
+
171
+ // Sort caches by priority (lower priority first)
172
+ const sortedCaches = Array.from(this.caches.entries()).sort(
173
+ (a, b) => a[1].priority - b[1].priority
174
+ );
175
+
176
+ for (const [name, cacheInfo] of sortedCaches) {
177
+ const reduction = Math.floor(
178
+ cacheInfo.currentCapacity * cacheInfo.scaleFactor * pressureLevel
179
+ );
180
+ const newCapacity = Math.max(cacheInfo.minCapacity, cacheInfo.currentCapacity - reduction);
181
+
182
+ if (newCapacity < cacheInfo.currentCapacity) {
183
+ this.resizeCache(name, cacheInfo, newCapacity);
184
+ }
160
185
  }
161
186
 
162
- /**
163
- * Reduce cache sizes based on memory pressure
164
- * @param {number} memoryUsage - Current memory usage percentage
165
- */
166
- reduceCacheSizes(memoryUsage) {
167
- const pressureLevel = (memoryUsage - this.config.memoryThreshold) /
168
- (this.config.criticalThreshold - this.config.memoryThreshold);
169
-
170
- // Sort caches by priority (lower priority first)
171
- const sortedCaches = Array.from(this.caches.entries())
172
- .sort((a, b) => a[1].priority - b[1].priority);
173
-
174
- for (const [name, cacheInfo] of sortedCaches) {
175
- const reduction = Math.floor(cacheInfo.currentCapacity * cacheInfo.scaleFactor * pressureLevel);
176
- const newCapacity = Math.max(
177
- cacheInfo.minCapacity,
178
- cacheInfo.currentCapacity - reduction
179
- );
180
-
181
- if (newCapacity < cacheInfo.currentCapacity) {
182
- this.resizeCache(name, cacheInfo, newCapacity);
183
- }
187
+ this.stats.adjustments++;
188
+ }
189
+
190
+ /**
191
+ * Increase cache sizes when memory is available
192
+ */
193
+ increaseCacheSizes() {
194
+ for (const [name, cacheInfo] of this.caches) {
195
+ // Only increase if cache is being actively used
196
+ const timeSinceAccess = Date.now() - cacheInfo.lastAccess;
197
+ if (timeSinceAccess < 60000) {
198
+ // Used in last minute
199
+ const increase = Math.floor(cacheInfo.currentCapacity * 0.2);
200
+ const newCapacity = Math.min(cacheInfo.maxCapacity, cacheInfo.currentCapacity + increase);
201
+
202
+ if (newCapacity > cacheInfo.currentCapacity) {
203
+ this.resizeCache(name, cacheInfo, newCapacity);
184
204
  }
185
-
186
- this.stats.adjustments++;
205
+ }
187
206
  }
188
-
189
- /**
190
- * Increase cache sizes when memory is available
191
- */
192
- increaseCacheSizes() {
193
- for (const [name, cacheInfo] of this.caches) {
194
- // Only increase if cache is being actively used
195
- const timeSinceAccess = Date.now() - cacheInfo.lastAccess;
196
- if (timeSinceAccess < 60000) { // Used in last minute
197
- const increase = Math.floor(cacheInfo.currentCapacity * 0.2);
198
- const newCapacity = Math.min(
199
- cacheInfo.maxCapacity,
200
- cacheInfo.currentCapacity + increase
201
- );
202
-
203
- if (newCapacity > cacheInfo.currentCapacity) {
204
- this.resizeCache(name, cacheInfo, newCapacity);
205
- }
206
- }
207
- }
207
+ }
208
+
209
+ /**
210
+ * Resize a cache
211
+ * @private
212
+ */
213
+ resizeCache(name, cacheInfo, newCapacity) {
214
+ const oldCapacity = cacheInfo.currentCapacity;
215
+ cacheInfo.currentCapacity = newCapacity;
216
+
217
+ // If cache has a capacity property, update it
218
+ if (cacheInfo.cache.capacity !== undefined) {
219
+ cacheInfo.cache.capacity = newCapacity;
208
220
  }
209
221
 
210
- /**
211
- * Resize a cache
212
- * @private
213
- */
214
- resizeCache(name, cacheInfo, newCapacity) {
215
- const oldCapacity = cacheInfo.currentCapacity;
216
- cacheInfo.currentCapacity = newCapacity;
217
-
218
- // If cache has a capacity property, update it
219
- if (cacheInfo.cache.capacity !== undefined) {
220
- cacheInfo.cache.capacity = newCapacity;
221
- }
222
-
223
- // If cache is now over capacity, evict excess items
224
- if (cacheInfo.cache.size > newCapacity) {
225
- this.evictExcessItems(cacheInfo.cache, newCapacity);
226
- }
227
-
228
- // Record resize event
229
- this.stats.cacheResizes.push({
230
- cache: name,
231
- timestamp: new Date(),
232
- oldCapacity,
233
- newCapacity,
234
- reason: newCapacity < oldCapacity ? 'pressure' : 'available'
235
- });
236
-
237
- // Keep only last 100 resize events
238
- if (this.stats.cacheResizes.length > 100) {
239
- this.stats.cacheResizes.shift();
240
- }
222
+ // If cache is now over capacity, evict excess items
223
+ if (cacheInfo.cache.size > newCapacity) {
224
+ this.evictExcessItems(cacheInfo.cache, newCapacity);
241
225
  }
242
226
 
243
- /**
244
- * Evict excess items from cache
245
- * @private
246
- */
247
- evictExcessItems(cache, targetSize) {
248
- if (cache.size <= targetSize) {
249
- return;
250
- }
251
-
252
- const itemsToRemove = cache.size - targetSize;
253
-
254
- // If cache is a Map or has keys method
255
- if (cache.keys) {
256
- const keys = Array.from(cache.keys());
257
- for (let i = 0; i < itemsToRemove; i++) {
258
- cache.delete(keys[i]);
259
- }
260
- } else if (cache.clear) {
261
- // Last resort - clear the cache
262
- cache.clear();
263
- }
227
+ // Record resize event
228
+ this.stats.cacheResizes.push({
229
+ cache: name,
230
+ timestamp: new Date(),
231
+ oldCapacity,
232
+ newCapacity,
233
+ reason: newCapacity < oldCapacity ? 'pressure' : 'available'
234
+ });
235
+
236
+ // Keep only last 100 resize events
237
+ if (this.stats.cacheResizes.length > 100) {
238
+ this.stats.cacheResizes.shift();
264
239
  }
265
-
266
- /**
267
- * Emergency clear all caches
268
- */
269
- emergencyClear() {
270
- for (const [name, cacheInfo] of this.caches) {
271
- if (cacheInfo.cache.clear) {
272
- cacheInfo.cache.clear();
273
- }
274
- // Reset to minimum capacity
275
- cacheInfo.currentCapacity = cacheInfo.minCapacity;
276
- }
277
-
278
- this.stats.emergencyClears++;
279
- console.warn('AdaptiveMemoryManager: Emergency cache clear triggered');
240
+ }
241
+
242
+ /**
243
+ * Evict excess items from cache
244
+ * @private
245
+ */
246
+ evictExcessItems(cache, targetSize) {
247
+ if (cache.size <= targetSize) {
248
+ return;
280
249
  }
281
250
 
282
- /**
283
- * Update cache access time
284
- * @param {string} name - Cache name
285
- */
286
- touchCache(name) {
287
- const cacheInfo = this.caches.get(name);
288
- if (cacheInfo) {
289
- cacheInfo.lastAccess = Date.now();
290
- }
251
+ const itemsToRemove = cache.size - targetSize;
252
+
253
+ // If cache is a Map or has keys method
254
+ if (cache.keys) {
255
+ const keys = Array.from(cache.keys());
256
+ for (let i = 0; i < itemsToRemove; i++) {
257
+ cache.delete(keys[i]);
258
+ }
259
+ } else if (cache.clear) {
260
+ // Last resort - clear the cache
261
+ cache.clear();
291
262
  }
292
-
293
- /**
294
- * Get memory management statistics
295
- * @returns {Object} Statistics object
296
- */
297
- getStats() {
298
- const cacheStats = {};
299
- for (const [name, cacheInfo] of this.caches) {
300
- cacheStats[name] = {
301
- size: cacheInfo.cache.size || 0,
302
- capacity: cacheInfo.currentCapacity,
303
- priority: cacheInfo.priority,
304
- lastAccess: new Date(cacheInfo.lastAccess)
305
- };
306
- }
307
-
308
- return {
309
- ...this.stats,
310
- memoryUsagePercent: `${(this.stats.lastMemoryUsage * 100).toFixed(2)}%`,
311
- totalCaches: this.caches.size,
312
- cacheStats,
313
- monitoring: this.monitoringInterval !== null
314
- };
263
+ }
264
+
265
+ /**
266
+ * Emergency clear all caches
267
+ */
268
+ emergencyClear() {
269
+ for (const [name, cacheInfo] of this.caches) {
270
+ if (cacheInfo.cache.clear) {
271
+ cacheInfo.cache.clear();
272
+ }
273
+ // Reset to minimum capacity
274
+ cacheInfo.currentCapacity = cacheInfo.minCapacity;
315
275
  }
316
276
 
317
- /**
318
- * Manual trigger for memory pressure check
319
- */
320
- async checkNow() {
321
- await this.checkMemoryPressure();
277
+ this.stats.emergencyClears++;
278
+ console.warn('AdaptiveMemoryManager: Emergency cache clear triggered');
279
+ }
280
+
281
+ /**
282
+ * Update cache access time
283
+ * @param {string} name - Cache name
284
+ */
285
+ touchCache(name) {
286
+ const cacheInfo = this.caches.get(name);
287
+ if (cacheInfo) {
288
+ cacheInfo.lastAccess = Date.now();
322
289
  }
323
-
324
- /**
325
- * Set memory thresholds
326
- * @param {Object} thresholds - New threshold values
327
- */
328
- setThresholds(thresholds) {
329
- if (thresholds.memoryThreshold !== undefined) {
330
- this.config.memoryThreshold = Math.max(0.5, Math.min(0.95, thresholds.memoryThreshold));
331
- }
332
- if (thresholds.criticalThreshold !== undefined) {
333
- this.config.criticalThreshold = Math.max(this.config.memoryThreshold + 0.05, Math.min(1.0, thresholds.criticalThreshold));
334
- }
290
+ }
291
+
292
+ /**
293
+ * Get memory management statistics
294
+ * @returns {Object} Statistics object
295
+ */
296
+ getStats() {
297
+ const cacheStats = {};
298
+ for (const [name, cacheInfo] of this.caches) {
299
+ cacheStats[name] = {
300
+ size: cacheInfo.cache.size || 0,
301
+ capacity: cacheInfo.currentCapacity,
302
+ priority: cacheInfo.priority,
303
+ lastAccess: new Date(cacheInfo.lastAccess)
304
+ };
335
305
  }
336
306
 
337
- /**
338
- * Destroy manager and clean up
339
- */
340
- destroy() {
341
- this.stopMonitoring();
342
- this.caches.clear();
307
+ return {
308
+ ...this.stats,
309
+ memoryUsagePercent: `${(this.stats.lastMemoryUsage * 100).toFixed(2)}%`,
310
+ totalCaches: this.caches.size,
311
+ cacheStats,
312
+ monitoring: this.monitoringInterval !== null
313
+ };
314
+ }
315
+
316
+ /**
317
+ * Manual trigger for memory pressure check
318
+ */
319
+ async checkNow() {
320
+ await this.checkMemoryPressure();
321
+ }
322
+
323
+ /**
324
+ * Set memory thresholds
325
+ * @param {Object} thresholds - New threshold values
326
+ */
327
+ setThresholds(thresholds) {
328
+ if (thresholds.memoryThreshold !== undefined) {
329
+ this.config.memoryThreshold = Math.max(0.5, Math.min(0.95, thresholds.memoryThreshold));
330
+ }
331
+ if (thresholds.criticalThreshold !== undefined) {
332
+ this.config.criticalThreshold = Math.max(
333
+ this.config.memoryThreshold + 0.05,
334
+ Math.min(1.0, thresholds.criticalThreshold)
335
+ );
343
336
  }
344
- }
337
+ }
338
+
339
+ /**
340
+ * Destroy manager and clean up
341
+ */
342
+ destroy() {
343
+ this.stopMonitoring();
344
+ this.caches.clear();
345
+ }
346
+ }
@@ -86,9 +86,8 @@ export class LRUCache {
86
86
  * @returns {Object} Cache stats
87
87
  */
88
88
  getStats() {
89
- const hitRate = this.hits + this.misses > 0
90
- ? (this.hits / (this.hits + this.misses) * 100).toFixed(2)
91
- : 0;
89
+ const hitRate =
90
+ this.hits + this.misses > 0 ? ((this.hits / (this.hits + this.misses)) * 100).toFixed(2) : 0;
92
91
 
93
92
  return {
94
93
  size: this.cache.size,
@@ -115,4 +114,4 @@ export class LRUCache {
115
114
  get size() {
116
115
  return this.cache.size;
117
116
  }
118
- }
117
+ }