@soulcraft/brainy 0.40.0 → 0.43.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. package/README.md +605 -194
  2. package/dist/augmentations/conduitAugmentations.js +1158 -0
  3. package/dist/augmentations/conduitAugmentations.js.map +1 -0
  4. package/dist/augmentations/memoryAugmentations.d.ts +2 -0
  5. package/dist/augmentations/memoryAugmentations.d.ts.map +1 -1
  6. package/dist/augmentations/memoryAugmentations.js +270 -0
  7. package/dist/augmentations/memoryAugmentations.js.map +1 -0
  8. package/dist/augmentations/serverSearchAugmentations.js +531 -0
  9. package/dist/augmentations/serverSearchAugmentations.js.map +1 -0
  10. package/dist/browserFramework.d.ts +15 -0
  11. package/dist/demo.d.ts +106 -0
  12. package/dist/examples/basicUsage.js +118 -0
  13. package/dist/examples/basicUsage.js.map +1 -0
  14. package/dist/hnsw/distributedSearch.js +452 -0
  15. package/dist/hnsw/distributedSearch.js.map +1 -0
  16. package/dist/hnsw/hnswIndex.js +602 -0
  17. package/dist/hnsw/hnswIndex.js.map +1 -0
  18. package/dist/hnsw/hnswIndexOptimized.js +471 -0
  19. package/dist/hnsw/hnswIndexOptimized.js.map +1 -0
  20. package/dist/hnsw/optimizedHNSWIndex.js +313 -0
  21. package/dist/hnsw/optimizedHNSWIndex.js.map +1 -0
  22. package/dist/hnsw/partitionedHNSWIndex.js +304 -0
  23. package/dist/hnsw/partitionedHNSWIndex.js.map +1 -0
  24. package/dist/hnsw/scaledHNSWSystem.js +559 -0
  25. package/dist/hnsw/scaledHNSWSystem.js.map +1 -0
  26. package/dist/index.d.ts +5 -3
  27. package/dist/index.js +81 -0
  28. package/dist/mcp/brainyMCPAdapter.js +142 -0
  29. package/dist/mcp/brainyMCPAdapter.js.map +1 -0
  30. package/dist/mcp/brainyMCPService.js +248 -0
  31. package/dist/mcp/brainyMCPService.js.map +1 -0
  32. package/dist/mcp/index.js +17 -0
  33. package/dist/mcp/index.js.map +1 -0
  34. package/dist/mcp/mcpAugmentationToolset.js +180 -0
  35. package/dist/mcp/mcpAugmentationToolset.js.map +1 -0
  36. package/dist/storage/adapters/baseStorageAdapter.js +349 -0
  37. package/dist/storage/adapters/baseStorageAdapter.js.map +1 -0
  38. package/dist/storage/adapters/batchS3Operations.js +287 -0
  39. package/dist/storage/adapters/batchS3Operations.js.map +1 -0
  40. package/dist/storage/adapters/fileSystemStorage.js +846 -0
  41. package/dist/storage/adapters/fileSystemStorage.js.map +1 -0
  42. package/dist/storage/adapters/memoryStorage.js +532 -0
  43. package/dist/storage/adapters/memoryStorage.js.map +1 -0
  44. package/dist/storage/adapters/opfsStorage.d.ts.map +1 -1
  45. package/dist/storage/adapters/opfsStorage.js +1118 -0
  46. package/dist/storage/adapters/opfsStorage.js.map +1 -0
  47. package/dist/storage/adapters/optimizedS3Search.d.ts +79 -0
  48. package/dist/storage/adapters/optimizedS3Search.d.ts.map +1 -0
  49. package/dist/storage/adapters/optimizedS3Search.js +248 -0
  50. package/dist/storage/adapters/optimizedS3Search.js.map +1 -0
  51. package/dist/storage/adapters/s3CompatibleStorage.d.ts +21 -0
  52. package/dist/storage/adapters/s3CompatibleStorage.d.ts.map +1 -1
  53. package/dist/storage/adapters/s3CompatibleStorage.js +2026 -0
  54. package/dist/storage/adapters/s3CompatibleStorage.js.map +1 -0
  55. package/dist/storage/baseStorage.d.ts +1 -0
  56. package/dist/storage/baseStorage.d.ts.map +1 -1
  57. package/dist/storage/baseStorage.js +603 -0
  58. package/dist/storage/baseStorage.js.map +1 -0
  59. package/dist/storage/cacheManager.js +1306 -0
  60. package/dist/storage/cacheManager.js.map +1 -0
  61. package/dist/storage/enhancedCacheManager.js +520 -0
  62. package/dist/storage/enhancedCacheManager.js.map +1 -0
  63. package/dist/storage/readOnlyOptimizations.js +425 -0
  64. package/dist/storage/readOnlyOptimizations.js.map +1 -0
  65. package/dist/storage/storageFactory.d.ts +0 -1
  66. package/dist/storage/storageFactory.d.ts.map +1 -1
  67. package/dist/storage/storageFactory.js +227 -0
  68. package/dist/storage/storageFactory.js.map +1 -0
  69. package/dist/types/augmentations.js +16 -0
  70. package/dist/types/augmentations.js.map +1 -0
  71. package/dist/types/brainyDataInterface.js +8 -0
  72. package/dist/types/brainyDataInterface.js.map +1 -0
  73. package/dist/types/distributedTypes.js +6 -0
  74. package/dist/types/distributedTypes.js.map +1 -0
  75. package/dist/types/fileSystemTypes.js +8 -0
  76. package/dist/types/fileSystemTypes.js.map +1 -0
  77. package/dist/types/graphTypes.js +247 -0
  78. package/dist/types/graphTypes.js.map +1 -0
  79. package/dist/types/mcpTypes.js +22 -0
  80. package/dist/types/mcpTypes.js.map +1 -0
  81. package/dist/types/paginationTypes.js +5 -0
  82. package/dist/types/paginationTypes.js.map +1 -0
  83. package/dist/types/pipelineTypes.js +7 -0
  84. package/dist/types/pipelineTypes.js.map +1 -0
  85. package/dist/types/tensorflowTypes.js +6 -0
  86. package/dist/types/tensorflowTypes.js.map +1 -0
  87. package/dist/unified.js +52 -128048
  88. package/dist/utils/autoConfiguration.js +341 -0
  89. package/dist/utils/autoConfiguration.js.map +1 -0
  90. package/dist/utils/cacheAutoConfig.js +261 -0
  91. package/dist/utils/cacheAutoConfig.js.map +1 -0
  92. package/dist/utils/crypto.js +45 -0
  93. package/dist/utils/crypto.js.map +1 -0
  94. package/dist/utils/distance.js +239 -0
  95. package/dist/utils/distance.js.map +1 -0
  96. package/dist/utils/embedding.d.ts.map +1 -1
  97. package/dist/utils/embedding.js +702 -0
  98. package/dist/utils/embedding.js.map +1 -0
  99. package/dist/utils/environment.js +75 -0
  100. package/dist/utils/environment.js.map +1 -0
  101. package/dist/utils/fieldNameTracking.js +90 -0
  102. package/dist/utils/fieldNameTracking.js.map +1 -0
  103. package/dist/utils/index.d.ts +1 -0
  104. package/dist/utils/index.d.ts.map +1 -1
  105. package/dist/utils/index.js +8 -0
  106. package/dist/utils/index.js.map +1 -0
  107. package/dist/utils/jsonProcessing.js +179 -0
  108. package/dist/utils/jsonProcessing.js.map +1 -0
  109. package/dist/utils/logger.d.ts +45 -92
  110. package/dist/utils/logger.d.ts.map +1 -1
  111. package/dist/utils/logger.js +129 -0
  112. package/dist/utils/logger.js.map +1 -0
  113. package/dist/utils/operationUtils.js +126 -0
  114. package/dist/utils/operationUtils.js.map +1 -0
  115. package/dist/utils/robustModelLoader.d.ts +14 -0
  116. package/dist/utils/robustModelLoader.d.ts.map +1 -1
  117. package/dist/utils/robustModelLoader.js +537 -0
  118. package/dist/utils/robustModelLoader.js.map +1 -0
  119. package/dist/utils/searchCache.js +248 -0
  120. package/dist/utils/searchCache.js.map +1 -0
  121. package/dist/utils/statistics.js +25 -0
  122. package/dist/utils/statistics.js.map +1 -0
  123. package/dist/utils/statisticsCollector.js +224 -0
  124. package/dist/utils/statisticsCollector.js.map +1 -0
  125. package/dist/utils/textEncoding.js +309 -0
  126. package/dist/utils/textEncoding.js.map +1 -0
  127. package/dist/utils/typeUtils.js +40 -0
  128. package/dist/utils/typeUtils.js.map +1 -0
  129. package/dist/utils/version.d.ts +15 -3
  130. package/dist/utils/version.d.ts.map +1 -1
  131. package/dist/utils/version.js +24 -0
  132. package/dist/utils/version.js.map +1 -0
  133. package/dist/utils/workerUtils.js +458 -0
  134. package/dist/utils/workerUtils.js.map +1 -0
  135. package/package.json +23 -15
  136. package/dist/brainy.js +0 -90220
  137. package/dist/brainy.min.js +0 -12511
  138. package/dist/patched-platform-node.d.ts +0 -17
  139. package/dist/statistics/statisticsManager.d.ts +0 -121
  140. package/dist/storage/fileSystemStorage.d.ts +0 -73
  141. package/dist/storage/fileSystemStorage.d.ts.map +0 -1
  142. package/dist/storage/opfsStorage.d.ts +0 -236
  143. package/dist/storage/opfsStorage.d.ts.map +0 -1
  144. package/dist/storage/s3CompatibleStorage.d.ts +0 -157
  145. package/dist/storage/s3CompatibleStorage.d.ts.map +0 -1
  146. package/dist/testing/prettyReporter.d.ts +0 -23
  147. package/dist/testing/prettySummaryReporter.d.ts +0 -22
  148. package/dist/unified.min.js +0 -16153
  149. package/dist/utils/environmentDetection.d.ts +0 -47
  150. package/dist/utils/environmentDetection.d.ts.map +0 -1
  151. package/dist/utils/tensorflowUtils.d.ts +0 -17
  152. package/dist/utils/tensorflowUtils.d.ts.map +0 -1
@@ -0,0 +1,1306 @@
1
+ /**
2
+ * Multi-level Cache Manager
3
+ *
4
+ * Implements a three-level caching strategy:
5
+ * - Level 1: Hot cache (most accessed nodes) - RAM (automatically detecting and adjusting in each environment)
6
+ * - Level 2: Warm cache (recent nodes) - OPFS, Filesystem or S3 depending on environment
7
+ * - Level 3: Cold storage (all nodes) - OPFS, Filesystem or S3 depending on environment
8
+ */
9
+ // Environment detection for storage selection
10
+ var Environment;
11
+ (function (Environment) {
12
+ Environment[Environment["BROWSER"] = 0] = "BROWSER";
13
+ Environment[Environment["NODE"] = 1] = "NODE";
14
+ Environment[Environment["WORKER"] = 2] = "WORKER";
15
+ })(Environment || (Environment = {}));
16
+ // Storage type for warm and cold caches
17
+ var StorageType;
18
+ (function (StorageType) {
19
+ StorageType[StorageType["MEMORY"] = 0] = "MEMORY";
20
+ StorageType[StorageType["OPFS"] = 1] = "OPFS";
21
+ StorageType[StorageType["FILESYSTEM"] = 2] = "FILESYSTEM";
22
+ StorageType[StorageType["S3"] = 3] = "S3";
23
+ StorageType[StorageType["REMOTE_API"] = 4] = "REMOTE_API";
24
+ })(StorageType || (StorageType = {}));
25
+ /**
26
+ * Multi-level cache manager for efficient data access
27
+ */
28
+ export class CacheManager {
29
+ /**
30
+ * Initialize the cache manager
31
+ * @param options Configuration options
32
+ */
33
+ constructor(options = {}) {
34
+ // Hot cache (RAM)
35
+ this.hotCache = new Map();
36
+ // Cache statistics
37
+ this.stats = {
38
+ hits: 0,
39
+ misses: 0,
40
+ evictions: 0,
41
+ size: 0,
42
+ maxSize: 0,
43
+ hotCacheSize: 0,
44
+ warmCacheSize: 0,
45
+ hotCacheHits: 0,
46
+ hotCacheMisses: 0,
47
+ warmCacheHits: 0,
48
+ warmCacheMisses: 0
49
+ };
50
+ this.lastAutoTuneTime = 0;
51
+ this.autoTuneInterval = 5 * 60 * 1000; // 5 minutes
52
+ this.storageStatistics = null;
53
+ // Store options for later reference
54
+ this.options = options;
55
+ // Detect environment
56
+ this.environment = this.detectEnvironment();
57
+ // Set storage types based on environment
58
+ this.warmStorageType = this.detectWarmStorageType();
59
+ this.coldStorageType = this.detectColdStorageType();
60
+ // Initialize storage adapters
61
+ this.warmStorage = options.warmStorage || this.initializeWarmStorage();
62
+ this.coldStorage = options.coldStorage || this.initializeColdStorage();
63
+ // Set auto-tuning flag
64
+ this.autoTune = options.autoTune !== undefined ? options.autoTune : true;
65
+ // Get environment-specific configuration if available
66
+ const envConfig = options.environmentConfig?.[Environment[this.environment].toLowerCase()];
67
+ // Set default values or use environment-specific values or global values
68
+ this.hotCacheMaxSize = envConfig?.hotCacheMaxSize || options.hotCacheMaxSize || this.detectOptimalCacheSize();
69
+ this.hotCacheEvictionThreshold = envConfig?.hotCacheEvictionThreshold || options.hotCacheEvictionThreshold || 0.8;
70
+ this.warmCacheTTL = envConfig?.warmCacheTTL || options.warmCacheTTL || 24 * 60 * 60 * 1000; // 24 hours
71
+ this.batchSize = envConfig?.batchSize || options.batchSize || 10;
72
+ // If auto-tuning is enabled, perform initial tuning
73
+ if (this.autoTune) {
74
+ this.tuneParameters();
75
+ }
76
+ // Log configuration
77
+ if (process.env.DEBUG) {
78
+ console.log('Cache Manager initialized with configuration:', {
79
+ environment: Environment[this.environment],
80
+ hotCacheMaxSize: this.hotCacheMaxSize,
81
+ hotCacheEvictionThreshold: this.hotCacheEvictionThreshold,
82
+ warmCacheTTL: this.warmCacheTTL,
83
+ batchSize: this.batchSize,
84
+ autoTune: this.autoTune,
85
+ warmStorageType: StorageType[this.warmStorageType],
86
+ coldStorageType: StorageType[this.coldStorageType]
87
+ });
88
+ }
89
+ }
90
+ /**
91
+ * Detect the current environment
92
+ */
93
+ detectEnvironment() {
94
+ if (typeof window !== 'undefined' && typeof document !== 'undefined') {
95
+ return Environment.BROWSER;
96
+ }
97
+ else if (typeof self !== 'undefined' && typeof window === 'undefined') {
98
+ // In a worker environment, self is defined but window is not
99
+ return Environment.WORKER;
100
+ }
101
+ else {
102
+ return Environment.NODE;
103
+ }
104
+ }
105
+ /**
106
+ * Detect the optimal cache size based on available memory and operating mode
107
+ *
108
+ * Enhanced to better handle large datasets in S3 or other storage:
109
+ * - Increases cache size for read-only mode
110
+ * - Adjusts based on total dataset size when available
111
+ * - Provides more aggressive caching for large datasets
112
+ * - Optimizes memory usage based on environment
113
+ */
114
+ detectOptimalCacheSize() {
115
+ try {
116
+ // Default to a conservative value
117
+ const defaultSize = 1000;
118
+ // Get the total dataset size if available
119
+ const totalItems = this.storageStatistics ?
120
+ (this.storageStatistics.totalNodes || 0) + (this.storageStatistics.totalEdges || 0) : 0;
121
+ // Determine if we're dealing with a large dataset (>100K items)
122
+ const isLargeDataset = totalItems > 100000;
123
+ // Check if we're in read-only mode (from parent BrainyData instance)
124
+ const isReadOnly = this.options?.readOnly || false;
125
+ // In Node.js, use available system memory with enhanced allocation
126
+ if (this.environment === Environment.NODE) {
127
+ try {
128
+ // For ES module compatibility, we'll use a fixed default value
129
+ // since we can't use dynamic imports in a synchronous function
130
+ // Use conservative defaults that don't require OS module
131
+ // These values are reasonable for most systems
132
+ const estimatedTotalMemory = 8 * 1024 * 1024 * 1024; // Assume 8GB total
133
+ const estimatedFreeMemory = 4 * 1024 * 1024 * 1024; // Assume 4GB free
134
+ // Estimate average entry size (in bytes)
135
+ // This is a conservative estimate for complex objects with vectors
136
+ const ESTIMATED_BYTES_PER_ENTRY = 1024; // 1KB per entry
137
+ // Base memory percentage - 10% by default
138
+ let memoryPercentage = 0.1;
139
+ // Adjust based on operating mode and dataset size
140
+ if (isReadOnly) {
141
+ // In read-only mode, we can use more memory for caching
142
+ memoryPercentage = 0.25; // 25% of free memory
143
+ // For large datasets in read-only mode, be even more aggressive
144
+ if (isLargeDataset) {
145
+ memoryPercentage = 0.4; // 40% of free memory
146
+ }
147
+ }
148
+ else if (isLargeDataset) {
149
+ // For large datasets in normal mode, increase slightly
150
+ memoryPercentage = 0.15; // 15% of free memory
151
+ }
152
+ // Calculate optimal size based on adjusted percentage
153
+ const optimalSize = Math.max(Math.floor(estimatedFreeMemory * memoryPercentage / ESTIMATED_BYTES_PER_ENTRY), 1000);
154
+ // If we know the total dataset size, cap at a reasonable percentage
155
+ if (totalItems > 0) {
156
+ // In read-only mode, we can cache a larger percentage
157
+ const maxPercentage = isReadOnly ? 0.5 : 0.3;
158
+ const maxItems = Math.ceil(totalItems * maxPercentage);
159
+ // Return the smaller of the two to avoid excessive memory usage
160
+ return Math.min(optimalSize, maxItems);
161
+ }
162
+ return optimalSize;
163
+ }
164
+ catch (error) {
165
+ console.warn('Failed to detect optimal cache size:', error);
166
+ return defaultSize;
167
+ }
168
+ }
169
+ // In browser, use navigator.deviceMemory with enhanced allocation
170
+ if (this.environment === Environment.BROWSER && navigator.deviceMemory) {
171
+ // Base entries per GB
172
+ let entriesPerGB = 500;
173
+ // Adjust based on operating mode and dataset size
174
+ if (isReadOnly) {
175
+ entriesPerGB = 800; // More aggressive caching in read-only mode
176
+ if (isLargeDataset) {
177
+ entriesPerGB = 1000; // Even more aggressive for large datasets
178
+ }
179
+ }
180
+ else if (isLargeDataset) {
181
+ entriesPerGB = 600; // Slightly more aggressive for large datasets
182
+ }
183
+ // Calculate based on device memory
184
+ const browserCacheSize = Math.max(navigator.deviceMemory * entriesPerGB, 1000);
185
+ // If we know the total dataset size, cap at a reasonable percentage
186
+ if (totalItems > 0) {
187
+ // In read-only mode, we can cache a larger percentage
188
+ const maxPercentage = isReadOnly ? 0.4 : 0.25;
189
+ const maxItems = Math.ceil(totalItems * maxPercentage);
190
+ // Return the smaller of the two to avoid excessive memory usage
191
+ return Math.min(browserCacheSize, maxItems);
192
+ }
193
+ return browserCacheSize;
194
+ }
195
+ // For worker environments or when memory detection fails
196
+ if (this.environment === Environment.WORKER) {
197
+ // Workers typically have limited memory, be conservative
198
+ return isReadOnly ? 2000 : 1000;
199
+ }
200
+ return defaultSize;
201
+ }
202
+ catch (error) {
203
+ console.warn('Error detecting optimal cache size:', error);
204
+ return 1000; // Conservative default
205
+ }
206
+ }
207
+ /**
208
+ * Async version of detectOptimalCacheSize that uses dynamic imports
209
+ * to access system information in Node.js environments
210
+ *
211
+ * This method provides more accurate memory detection by using
212
+ * the OS module's dynamic import in Node.js environments
213
+ */
214
+ async detectOptimalCacheSizeAsync() {
215
+ try {
216
+ // Default to a conservative value
217
+ const defaultSize = 1000;
218
+ // Get the total dataset size if available
219
+ const totalItems = this.storageStatistics ?
220
+ (this.storageStatistics.totalNodes || 0) + (this.storageStatistics.totalEdges || 0) : 0;
221
+ // Determine if we're dealing with a large dataset (>100K items)
222
+ const isLargeDataset = totalItems > 100000;
223
+ // Check if we're in read-only mode (from parent BrainyData instance)
224
+ const isReadOnly = this.options?.readOnly || false;
225
+ // Get memory information based on environment
226
+ const memoryInfo = await this.detectAvailableMemory();
227
+ // If memory detection failed, use the synchronous method
228
+ if (!memoryInfo) {
229
+ return this.detectOptimalCacheSize();
230
+ }
231
+ // Estimate average entry size (in bytes)
232
+ // This is a conservative estimate for complex objects with vectors
233
+ const ESTIMATED_BYTES_PER_ENTRY = 1024; // 1KB per entry
234
+ // Base memory percentage - 10% by default
235
+ let memoryPercentage = 0.1;
236
+ // Adjust based on operating mode and dataset size
237
+ if (isReadOnly) {
238
+ // In read-only mode, we can use more memory for caching
239
+ memoryPercentage = 0.25; // 25% of free memory
240
+ // For large datasets in read-only mode, be even more aggressive
241
+ if (isLargeDataset) {
242
+ memoryPercentage = 0.4; // 40% of free memory
243
+ }
244
+ }
245
+ else if (isLargeDataset) {
246
+ // For large datasets in normal mode, increase slightly
247
+ memoryPercentage = 0.15; // 15% of free memory
248
+ }
249
+ // Calculate optimal size based on adjusted percentage
250
+ const optimalSize = Math.max(Math.floor(memoryInfo.freeMemory * memoryPercentage / ESTIMATED_BYTES_PER_ENTRY), 1000);
251
+ // If we know the total dataset size, cap at a reasonable percentage
252
+ if (totalItems > 0) {
253
+ // In read-only mode, we can cache a larger percentage
254
+ const maxPercentage = isReadOnly ? 0.5 : 0.3;
255
+ const maxItems = Math.ceil(totalItems * maxPercentage);
256
+ // Return the smaller of the two to avoid excessive memory usage
257
+ return Math.min(optimalSize, maxItems);
258
+ }
259
+ return optimalSize;
260
+ }
261
+ catch (error) {
262
+ console.warn('Error detecting optimal cache size asynchronously:', error);
263
+ return 1000; // Conservative default
264
+ }
265
+ }
266
+ /**
267
+ * Detects available memory across different environments
268
+ *
269
+ * This method uses different techniques to detect memory in:
270
+ * - Node.js: Uses the OS module with dynamic import
271
+ * - Browser: Uses performance.memory or navigator.deviceMemory
272
+ * - Worker: Uses performance.memory if available
273
+ *
274
+ * @returns An object with totalMemory and freeMemory in bytes, or null if detection fails
275
+ */
276
+ async detectAvailableMemory() {
277
+ try {
278
+ // Node.js environment
279
+ if (this.environment === Environment.NODE) {
280
+ try {
281
+ // Use dynamic import for OS module
282
+ const os = await import('os');
283
+ // Get actual system memory information
284
+ const totalMemory = os.totalmem();
285
+ const freeMemory = os.freemem();
286
+ return { totalMemory, freeMemory };
287
+ }
288
+ catch (error) {
289
+ console.warn('Failed to detect memory in Node.js environment:', error);
290
+ }
291
+ }
292
+ // Browser environment
293
+ if (this.environment === Environment.BROWSER) {
294
+ // Try using performance.memory (Chrome only)
295
+ if (performance && performance.memory) {
296
+ const memoryInfo = performance.memory;
297
+ // jsHeapSizeLimit is the maximum size of the heap
298
+ // totalJSHeapSize is the currently allocated heap size
299
+ // usedJSHeapSize is the amount of heap currently being used
300
+ const totalMemory = memoryInfo.jsHeapSizeLimit || 0;
301
+ const usedMemory = memoryInfo.usedJSHeapSize || 0;
302
+ const freeMemory = Math.max(totalMemory - usedMemory, 0);
303
+ return { totalMemory, freeMemory };
304
+ }
305
+ // Try using navigator.deviceMemory as fallback
306
+ if (navigator.deviceMemory) {
307
+ // deviceMemory is in GB, convert to bytes
308
+ const totalMemory = navigator.deviceMemory * 1024 * 1024 * 1024;
309
+ // Assume 50% is free
310
+ const freeMemory = totalMemory * 0.5;
311
+ return { totalMemory, freeMemory };
312
+ }
313
+ }
314
+ // Worker environment
315
+ if (this.environment === Environment.WORKER) {
316
+ // Try using performance.memory if available (Chrome workers)
317
+ if (performance && performance.memory) {
318
+ const memoryInfo = performance.memory;
319
+ const totalMemory = memoryInfo.jsHeapSizeLimit || 0;
320
+ const usedMemory = memoryInfo.usedJSHeapSize || 0;
321
+ const freeMemory = Math.max(totalMemory - usedMemory, 0);
322
+ return { totalMemory, freeMemory };
323
+ }
324
+ // For workers, use a conservative estimate
325
+ // Assume 2GB total memory with 1GB free
326
+ return {
327
+ totalMemory: 2 * 1024 * 1024 * 1024,
328
+ freeMemory: 1 * 1024 * 1024 * 1024
329
+ };
330
+ }
331
+ // If all detection methods fail, use conservative defaults
332
+ return {
333
+ totalMemory: 8 * 1024 * 1024 * 1024, // Assume 8GB total
334
+ freeMemory: 4 * 1024 * 1024 * 1024 // Assume 4GB free
335
+ };
336
+ }
337
+ catch (error) {
338
+ console.warn('Memory detection failed:', error);
339
+ return null;
340
+ }
341
+ }
342
+ /**
343
+ * Tune cache parameters based on statistics and environment
344
+ * This method is called periodically if auto-tuning is enabled
345
+ *
346
+ * The auto-tuning process:
347
+ * 1. Retrieves storage statistics if available
348
+ * 2. Tunes each parameter based on statistics and environment
349
+ * 3. Logs the tuned parameters if debug is enabled
350
+ *
351
+ * Auto-tuning helps optimize cache performance by adapting to:
352
+ * - The current environment (Node.js, browser, worker)
353
+ * - Available system resources (memory, CPU)
354
+ * - Usage patterns (read-heavy vs. write-heavy workloads)
355
+ * - Cache efficiency (hit/miss ratios)
356
+ */
357
+ async tuneParameters() {
358
+ // Skip if auto-tuning is disabled
359
+ if (!this.autoTune)
360
+ return;
361
+ // Check if it's time to tune parameters
362
+ const now = Date.now();
363
+ if (now - this.lastAutoTuneTime < this.autoTuneInterval)
364
+ return;
365
+ // Update last tune time
366
+ this.lastAutoTuneTime = now;
367
+ try {
368
+ // Get storage statistics if available
369
+ if (this.coldStorage && typeof this.coldStorage.getStatistics === 'function') {
370
+ this.storageStatistics = await this.coldStorage.getStatistics();
371
+ }
372
+ // Get cache statistics for adaptive tuning
373
+ const cacheStats = this.getStats();
374
+ // Use the async version of tuneHotCacheSize which uses detectOptimalCacheSizeAsync
375
+ await this.tuneHotCacheSize();
376
+ // Tune eviction threshold based on hit/miss ratio
377
+ this.tuneEvictionThreshold(cacheStats);
378
+ // Tune warm cache TTL based on access patterns
379
+ this.tuneWarmCacheTTL(cacheStats);
380
+ // Tune batch size based on access patterns and storage type
381
+ this.tuneBatchSize(cacheStats);
382
+ // Log tuned parameters if debug is enabled
383
+ if (process.env.DEBUG) {
384
+ console.log('Cache parameters auto-tuned:', {
385
+ hotCacheMaxSize: this.hotCacheMaxSize,
386
+ hotCacheEvictionThreshold: this.hotCacheEvictionThreshold,
387
+ warmCacheTTL: this.warmCacheTTL,
388
+ batchSize: this.batchSize,
389
+ cacheStats: {
390
+ hotCacheSize: cacheStats.hotCacheSize,
391
+ warmCacheSize: cacheStats.warmCacheSize,
392
+ hotCacheHits: cacheStats.hotCacheHits,
393
+ hotCacheMisses: cacheStats.hotCacheMisses,
394
+ warmCacheHits: cacheStats.warmCacheHits,
395
+ warmCacheMisses: cacheStats.warmCacheMisses
396
+ }
397
+ });
398
+ }
399
+ }
400
+ catch (error) {
401
+ console.warn('Error during cache parameter auto-tuning:', error);
402
+ }
403
+ }
404
+ /**
405
+ * Tune hot cache size based on statistics, environment, and operating mode
406
+ *
407
+ * The hot cache size is tuned based on:
408
+ * 1. Available memory in the current environment
409
+ * 2. Total number of nodes and edges in the system
410
+ * 3. Cache hit/miss ratio
411
+ * 4. Operating mode (read-only vs. read-write)
412
+ * 5. Storage type (S3, filesystem, memory)
413
+ *
414
+ * Enhanced algorithm:
415
+ * - Start with a size based on available memory and operating mode
416
+ * - For large datasets in S3 or other remote storage, use more aggressive caching
417
+ * - Adjust based on access patterns (read-heavy vs. write-heavy)
418
+ * - For read-only mode, prioritize cache size over eviction speed
419
+ * - Dynamically adjust based on hit/miss ratio and query patterns
420
+ */
421
+ async tuneHotCacheSize() {
422
+ // Use the async version to get more accurate memory information
423
+ let optimalSize = await this.detectOptimalCacheSizeAsync();
424
+ // Check if we're in read-only mode
425
+ const isReadOnly = this.options?.readOnly || false;
426
+ // Check if we're using S3 or other remote storage
427
+ const isRemoteStorage = this.coldStorageType === StorageType.S3 ||
428
+ this.coldStorageType === StorageType.REMOTE_API;
429
+ // If we have storage statistics, adjust based on total nodes/edges
430
+ if (this.storageStatistics) {
431
+ const totalItems = (this.storageStatistics.totalNodes || 0) +
432
+ (this.storageStatistics.totalEdges || 0);
433
+ // If total items is significant, adjust cache size
434
+ if (totalItems > 0) {
435
+ // Base percentage to cache - adjusted based on mode and storage
436
+ let percentageToCache = 0.2; // Cache 20% of items by default
437
+ // For read-only mode, increase cache percentage
438
+ if (isReadOnly) {
439
+ percentageToCache = 0.3; // 30% for read-only mode
440
+ // For remote storage in read-only mode, be even more aggressive
441
+ if (isRemoteStorage) {
442
+ percentageToCache = 0.4; // 40% for remote storage in read-only mode
443
+ }
444
+ }
445
+ // For remote storage in normal mode, increase slightly
446
+ else if (isRemoteStorage) {
447
+ percentageToCache = 0.25; // 25% for remote storage
448
+ }
449
+ // For large datasets, cap the percentage to avoid excessive memory usage
450
+ if (totalItems > 1000000) { // Over 1 million items
451
+ percentageToCache = Math.min(percentageToCache, 0.15);
452
+ }
453
+ else if (totalItems > 100000) { // Over 100K items
454
+ percentageToCache = Math.min(percentageToCache, 0.25);
455
+ }
456
+ const statisticsBasedSize = Math.ceil(totalItems * percentageToCache);
457
+ // Use the smaller of the two to avoid memory issues
458
+ optimalSize = Math.min(optimalSize, statisticsBasedSize);
459
+ }
460
+ }
461
+ // Adjust based on hit/miss ratio if we have enough data
462
+ const totalAccesses = this.stats.hits + this.stats.misses;
463
+ if (totalAccesses > 100) {
464
+ const hitRatio = this.stats.hits / totalAccesses;
465
+ // Base adjustment factor
466
+ let hitRatioFactor = 1.0;
467
+ // If hit ratio is low, we might need a larger cache
468
+ if (hitRatio < 0.5) {
469
+ // Calculate adjustment factor based on hit ratio
470
+ const baseAdjustment = 0.5 - hitRatio;
471
+ // For read-only mode or remote storage, be more aggressive
472
+ if (isReadOnly || isRemoteStorage) {
473
+ hitRatioFactor = 1 + (baseAdjustment * 1.5); // Up to 75% increase
474
+ }
475
+ else {
476
+ hitRatioFactor = 1 + baseAdjustment; // Up to 50% increase
477
+ }
478
+ optimalSize = Math.ceil(optimalSize * hitRatioFactor);
479
+ }
480
+ // If hit ratio is very high, we might be able to reduce cache size slightly
481
+ else if (hitRatio > 0.9 && !isReadOnly && !isRemoteStorage) {
482
+ // Only reduce cache size in normal mode with local storage
483
+ // and only if hit ratio is very high
484
+ hitRatioFactor = 0.9; // 10% reduction
485
+ optimalSize = Math.ceil(optimalSize * hitRatioFactor);
486
+ }
487
+ }
488
+ // Check for operation patterns if available
489
+ if (this.storageStatistics?.operations) {
490
+ const ops = this.storageStatistics.operations;
491
+ const totalOps = ops.total || 1;
492
+ // Calculate read/write ratio
493
+ const readOps = (ops.search || 0) + (ops.get || 0);
494
+ const writeOps = (ops.add || 0) + (ops.update || 0) + (ops.delete || 0);
495
+ if (totalOps > 100) {
496
+ const readRatio = readOps / totalOps;
497
+ // For read-heavy workloads, increase cache size
498
+ if (readRatio > 0.8) {
499
+ // More aggressive for remote storage
500
+ const readAdjustment = isRemoteStorage ? 1.3 : 1.2;
501
+ optimalSize = Math.ceil(optimalSize * readAdjustment);
502
+ }
503
+ }
504
+ }
505
+ // Ensure we have a reasonable minimum size based on environment and mode
506
+ let minSize = 1000; // Default minimum
507
+ // For read-only mode, use a higher minimum
508
+ if (isReadOnly) {
509
+ minSize = 2000;
510
+ }
511
+ // For remote storage, use an even higher minimum
512
+ if (isRemoteStorage) {
513
+ minSize = isReadOnly ? 3000 : 2000;
514
+ }
515
+ optimalSize = Math.max(optimalSize, minSize);
516
+ // Update the hot cache max size
517
+ this.hotCacheMaxSize = optimalSize;
518
+ this.stats.maxSize = optimalSize;
519
+ }
520
+ /**
521
+ * Tune eviction threshold based on statistics
522
+ *
523
+ * The eviction threshold determines when items start being evicted from the hot cache.
524
+ * It is tuned based on:
525
+ * 1. Cache hit/miss ratio
526
+ * 2. Operation patterns (read-heavy vs. write-heavy workloads)
527
+ * 3. Memory pressure and available resources
528
+ *
529
+ * Algorithm:
530
+ * - Start with a default threshold of 0.8 (80% of max size)
531
+ * - For high hit ratios, increase the threshold to keep more items in cache
532
+ * - For low hit ratios, decrease the threshold to evict items more aggressively
533
+ * - For read-heavy workloads, use a higher threshold
534
+ * - For write-heavy workloads, use a lower threshold
535
+ * - Under memory pressure, use a lower threshold to conserve resources
536
+ *
537
+ * @param cacheStats Optional cache statistics for more adaptive tuning
538
+ */
539
+ tuneEvictionThreshold(cacheStats) {
540
+ // Default threshold
541
+ let threshold = 0.8;
542
+ // Use provided cache stats or internal stats
543
+ const stats = cacheStats || this.getStats();
544
+ // Adjust based on hit/miss ratio if we have enough data
545
+ const totalHotAccesses = stats.hotCacheHits + stats.hotCacheMisses;
546
+ if (totalHotAccesses > 100) {
547
+ const hotHitRatio = stats.hotCacheHits / totalHotAccesses;
548
+ // If hit ratio is high, we can use a higher threshold
549
+ // If hit ratio is low, we should use a lower threshold to evict more aggressively
550
+ if (hotHitRatio > 0.8) {
551
+ // High hit ratio, increase threshold (up to 0.9)
552
+ threshold = Math.min(0.9, 0.8 + (hotHitRatio - 0.8) * 0.5);
553
+ }
554
+ else if (hotHitRatio < 0.5) {
555
+ // Low hit ratio, decrease threshold (down to 0.6)
556
+ threshold = Math.max(0.6, 0.8 - (0.5 - hotHitRatio) * 0.5);
557
+ }
558
+ }
559
+ // If we have storage statistics with operation counts, adjust based on operation patterns
560
+ if (this.storageStatistics && this.storageStatistics.operations) {
561
+ const ops = this.storageStatistics.operations;
562
+ const totalOps = ops.total || 1;
563
+ // Calculate read/write ratio
564
+ const readOps = ops.search || 0;
565
+ const writeOps = (ops.add || 0) + (ops.update || 0) + (ops.delete || 0);
566
+ if (totalOps > 100) {
567
+ const readRatio = readOps / totalOps;
568
+ const writeRatio = writeOps / totalOps;
569
+ // For read-heavy workloads, use higher threshold
570
+ // For write-heavy workloads, use lower threshold
571
+ if (readRatio > 0.8) {
572
+ // Read-heavy, increase threshold slightly
573
+ threshold = Math.min(0.9, threshold + 0.05);
574
+ }
575
+ else if (writeRatio > 0.5) {
576
+ // Write-heavy, decrease threshold
577
+ threshold = Math.max(0.6, threshold - 0.1);
578
+ }
579
+ }
580
+ }
581
+ // Check memory pressure - if hot cache is growing too fast relative to hits,
582
+ // reduce the threshold to conserve memory
583
+ if (stats.hotCacheSize > 0 && totalHotAccesses > 0) {
584
+ const sizeToAccessRatio = stats.hotCacheSize / totalHotAccesses;
585
+ // If the ratio is high, it means we're caching a lot but not getting many hits
586
+ if (sizeToAccessRatio > 10) {
587
+ // Reduce threshold more aggressively under high memory pressure
588
+ threshold = Math.max(0.5, threshold - 0.1);
589
+ }
590
+ }
591
+ // If we're in read-only mode, we can be more aggressive with caching
592
+ const isReadOnly = this.options?.readOnly || false;
593
+ if (isReadOnly) {
594
+ threshold = Math.min(0.95, threshold + 0.05);
595
+ }
596
+ // Update the eviction threshold
597
+ this.hotCacheEvictionThreshold = threshold;
598
+ }
599
+ /**
600
+ * Tune warm cache TTL based on statistics
601
+ *
602
+ * The warm cache TTL determines how long items remain in the warm cache.
603
+ * It is tuned based on:
604
+ * 1. Update frequency from operation statistics
605
+ * 2. Warm cache hit/miss ratio
606
+ * 3. Access patterns and frequency
607
+ * 4. Available storage resources
608
+ *
609
+ * Algorithm:
610
+ * - Start with a default TTL of 24 hours
611
+ * - For frequently updated data, use a shorter TTL
612
+ * - For rarely updated data, use a longer TTL
613
+ * - For frequently accessed data, use a longer TTL
614
+ * - For rarely accessed data, use a shorter TTL
615
+ * - Under storage pressure, use a shorter TTL
616
+ *
617
+ * @param cacheStats Optional cache statistics for more adaptive tuning
618
+ */
619
+ tuneWarmCacheTTL(cacheStats) {
620
+ // Default TTL (24 hours)
621
+ let ttl = 24 * 60 * 60 * 1000;
622
+ // Use provided cache stats or internal stats
623
+ const stats = cacheStats || this.getStats();
624
+ // Adjust based on warm cache hit/miss ratio if we have enough data
625
+ const totalWarmAccesses = stats.warmCacheHits + stats.warmCacheMisses;
626
+ if (totalWarmAccesses > 50) {
627
+ const warmHitRatio = stats.warmCacheHits / totalWarmAccesses;
628
+ // If warm cache hit ratio is high, items in warm cache are useful
629
+ // so we should keep them longer
630
+ if (warmHitRatio > 0.7) {
631
+ // High hit ratio, increase TTL (up to 36 hours)
632
+ ttl = Math.min(36 * 60 * 60 * 1000, ttl * (1 + (warmHitRatio - 0.7)));
633
+ }
634
+ else if (warmHitRatio < 0.3) {
635
+ // Low hit ratio, decrease TTL (down to 12 hours)
636
+ ttl = Math.max(12 * 60 * 60 * 1000, ttl * (0.8 - (0.3 - warmHitRatio)));
637
+ }
638
+ }
639
+ // If we have storage statistics with operation counts, adjust based on update frequency
640
+ if (this.storageStatistics && this.storageStatistics.operations) {
641
+ const ops = this.storageStatistics.operations;
642
+ const totalOps = ops.total || 1;
643
+ const updateOps = (ops.update || 0);
644
+ if (totalOps > 100) {
645
+ const updateRatio = updateOps / totalOps;
646
+ // For frequently updated data, use shorter TTL
647
+ // For rarely updated data, use longer TTL
648
+ if (updateRatio > 0.3) {
649
+ // Frequently updated, decrease TTL (down to 6 hours)
650
+ ttl = Math.max(6 * 60 * 60 * 1000, ttl * (1 - updateRatio * 0.5));
651
+ }
652
+ else if (updateRatio < 0.1) {
653
+ // Rarely updated, increase TTL (up to 48 hours)
654
+ ttl = Math.min(48 * 60 * 60 * 1000, ttl * (1.2 - updateRatio));
655
+ }
656
+ }
657
+ }
658
+ // Check warm cache size relative to hot cache size
659
+ // If warm cache is much larger than hot cache, reduce TTL to prevent excessive storage use
660
+ if (stats.warmCacheSize > 0 && stats.hotCacheSize > 0) {
661
+ const warmToHotRatio = stats.warmCacheSize / stats.hotCacheSize;
662
+ if (warmToHotRatio > 5) {
663
+ // Warm cache is much larger than hot cache, reduce TTL
664
+ ttl = Math.max(6 * 60 * 60 * 1000, ttl * (0.9 - Math.min(0.3, (warmToHotRatio - 5) / 20)));
665
+ }
666
+ }
667
+ // If we're in read-only mode, we can use a longer TTL
668
+ const isReadOnly = this.options?.readOnly || false;
669
+ if (isReadOnly) {
670
+ ttl = Math.min(72 * 60 * 60 * 1000, ttl * 1.5);
671
+ }
672
+ // Update the warm cache TTL
673
+ this.warmCacheTTL = ttl;
674
+ }
675
+ /**
676
+ * Tune batch size based on environment, statistics, and operating mode
677
+ *
678
+ * The batch size determines how many items are processed in a single batch
679
+ * for operations like prefetching. It is tuned based on:
680
+ * 1. Current environment (Node.js, browser, worker)
681
+ * 2. Available memory
682
+ * 3. Operation patterns
683
+ * 4. Cache hit/miss ratio
684
+ * 5. Operating mode (read-only vs. read-write)
685
+ * 6. Storage type (S3, filesystem, memory)
686
+ * 7. Dataset size
687
+ * 8. Cache efficiency and access patterns
688
+ *
689
+ * Enhanced algorithm:
690
+ * - Start with a default based on the environment
691
+ * - For large datasets in S3 or other remote storage, use larger batches
692
+ * - For read-only mode, use larger batches to improve throughput
693
+ * - Dynamically adjust based on network latency and throughput
694
+ * - Balance between memory usage and performance
695
+ * - Adapt to cache hit/miss patterns
696
+ *
697
+ * @param cacheStats Optional cache statistics for more adaptive tuning
698
+ */
699
+ tuneBatchSize(cacheStats) {
700
+ // Default batch size
701
+ let batchSize = 10;
702
+ // Use provided cache stats or internal stats
703
+ const stats = cacheStats || this.getStats();
704
+ // Check if we're in read-only mode
705
+ const isReadOnly = this.options?.readOnly || false;
706
+ // Check if we're using S3 or other remote storage
707
+ const isRemoteStorage = this.coldStorageType === StorageType.S3 ||
708
+ this.coldStorageType === StorageType.REMOTE_API;
709
+ // Get the total dataset size if available
710
+ const totalItems = this.storageStatistics ?
711
+ (this.storageStatistics.totalNodes || 0) + (this.storageStatistics.totalEdges || 0) : 0;
712
+ // Determine if we're dealing with a large dataset
713
+ const isLargeDataset = totalItems > 100000;
714
+ const isVeryLargeDataset = totalItems > 1000000;
715
+ // Base batch size adjustment based on environment
716
+ if (this.environment === Environment.NODE) {
717
+ // Node.js can handle larger batches
718
+ batchSize = isReadOnly ? 30 : 20;
719
+ // For remote storage, increase batch size
720
+ if (isRemoteStorage) {
721
+ batchSize = isReadOnly ? 50 : 30;
722
+ }
723
+ // For large datasets, adjust batch size
724
+ if (isLargeDataset) {
725
+ batchSize = Math.min(100, batchSize * 1.5);
726
+ }
727
+ // For very large datasets, adjust even more
728
+ if (isVeryLargeDataset) {
729
+ batchSize = Math.min(200, batchSize * 2);
730
+ }
731
+ }
732
+ else if (this.environment === Environment.BROWSER) {
733
+ // Browsers might need smaller batches
734
+ batchSize = isReadOnly ? 15 : 10;
735
+ // If we have memory information, adjust accordingly
736
+ if (navigator.deviceMemory) {
737
+ // Scale batch size with available memory
738
+ const memoryFactor = isReadOnly ? 3 : 2;
739
+ batchSize = Math.max(5, Math.min(30, Math.floor(navigator.deviceMemory * memoryFactor)));
740
+ // For large datasets, adjust based on memory
741
+ if (isLargeDataset && navigator.deviceMemory > 4) {
742
+ batchSize = Math.min(50, batchSize * 1.5);
743
+ }
744
+ }
745
+ }
746
+ else if (this.environment === Environment.WORKER) {
747
+ // Workers can handle moderate batch sizes
748
+ batchSize = isReadOnly ? 20 : 15;
749
+ }
750
+ // Adjust based on cache hit/miss ratios
751
+ const totalHotAccesses = stats.hotCacheHits + stats.hotCacheMisses;
752
+ const totalWarmAccesses = stats.warmCacheHits + stats.warmCacheMisses;
753
+ if (totalHotAccesses > 100) {
754
+ const hotHitRatio = stats.hotCacheHits / totalHotAccesses;
755
+ // If hot cache hit ratio is high, we're effectively using the cache
756
+ // so we can use larger batches for better throughput
757
+ if (hotHitRatio > 0.8) {
758
+ // High hit ratio, increase batch size
759
+ batchSize = Math.min(batchSize * 1.5, isRemoteStorage ? 250 : 150);
760
+ }
761
+ else if (hotHitRatio < 0.4) {
762
+ // Low hit ratio, we might be fetching too much at once
763
+ // Reduce batch size to be more selective
764
+ batchSize = Math.max(5, batchSize * 0.8);
765
+ }
766
+ }
767
+ if (totalWarmAccesses > 50) {
768
+ const warmHitRatio = stats.warmCacheHits / totalWarmAccesses;
769
+ // If warm cache hit ratio is high, prefetching is effective
770
+ // so we can use larger batches
771
+ if (warmHitRatio > 0.7) {
772
+ // High warm hit ratio, increase batch size
773
+ batchSize = Math.min(batchSize * 1.3, isRemoteStorage ? 200 : 120);
774
+ }
775
+ else if (warmHitRatio < 0.3) {
776
+ // Low warm hit ratio, reduce batch size
777
+ batchSize = Math.max(5, batchSize * 0.9);
778
+ }
779
+ }
780
+ // If we have storage statistics with operation counts, adjust based on operation patterns
781
+ if (this.storageStatistics && this.storageStatistics.operations) {
782
+ const ops = this.storageStatistics.operations;
783
+ const totalOps = ops.total || 1;
784
+ const searchOps = (ops.search || 0);
785
+ const getOps = (ops.get || 0);
786
+ if (totalOps > 100) {
787
+ // Calculate search and get ratios
788
+ const searchRatio = searchOps / totalOps;
789
+ const getRatio = getOps / totalOps;
790
+ // For search-heavy workloads, use larger batch size
791
+ if (searchRatio > 0.6) {
792
+ // Search-heavy, increase batch size
793
+ const searchFactor = isRemoteStorage ? 1.8 : 1.5;
794
+ batchSize = Math.min(isRemoteStorage ? 200 : 100, Math.ceil(batchSize * searchFactor));
795
+ }
796
+ // For get-heavy workloads, adjust batch size
797
+ if (getRatio > 0.6) {
798
+ // Get-heavy, adjust batch size based on storage type
799
+ if (isRemoteStorage) {
800
+ // For remote storage, larger batches reduce network overhead
801
+ batchSize = Math.min(150, Math.ceil(batchSize * 1.5));
802
+ }
803
+ else {
804
+ // For local storage, smaller batches might be more efficient
805
+ batchSize = Math.max(10, Math.ceil(batchSize * 0.9));
806
+ }
807
+ }
808
+ }
809
+ }
810
+ // Check if we're experiencing memory pressure
811
+ if (stats.hotCacheSize > 0 && this.hotCacheMaxSize > 0) {
812
+ const cacheUtilization = stats.hotCacheSize / this.hotCacheMaxSize;
813
+ // If cache utilization is high, reduce batch size to avoid memory pressure
814
+ if (cacheUtilization > 0.85) {
815
+ batchSize = Math.max(5, Math.floor(batchSize * 0.8));
816
+ }
817
+ }
818
+ // Adjust based on overall hit/miss ratio if we have enough data
819
+ const totalAccesses = stats.hotCacheHits + stats.hotCacheMisses + stats.warmCacheHits + stats.warmCacheMisses;
820
+ if (totalAccesses > 100) {
821
+ const hitRatio = (stats.hotCacheHits + stats.warmCacheHits) / totalAccesses;
822
+ // Base adjustment factors
823
+ let increaseFactorForLowHitRatio = isRemoteStorage ? 1.5 : 1.2;
824
+ let decreaseFactorForHighHitRatio = 0.8;
825
+ // In read-only mode, be more aggressive with batch size adjustments
826
+ if (isReadOnly) {
827
+ increaseFactorForLowHitRatio = isRemoteStorage ? 2.0 : 1.5;
828
+ decreaseFactorForHighHitRatio = 0.9; // Less reduction in read-only mode
829
+ }
830
+ // If hit ratio is high, we can use smaller batches
831
+ if (hitRatio > 0.8 && !isVeryLargeDataset) {
832
+ // High hit ratio, decrease batch size slightly
833
+ // But don't decrease too much for large datasets or remote storage
834
+ if (!(isLargeDataset && isRemoteStorage)) {
835
+ batchSize = Math.max(isReadOnly ? 10 : 5, Math.floor(batchSize * decreaseFactorForHighHitRatio));
836
+ }
837
+ }
838
+ // If hit ratio is low, we need larger batches
839
+ else if (hitRatio < 0.5) {
840
+ // Low hit ratio, increase batch size
841
+ const maxBatchSize = isRemoteStorage ?
842
+ (isVeryLargeDataset ? 300 : 200) :
843
+ (isVeryLargeDataset ? 150 : 100);
844
+ batchSize = Math.min(maxBatchSize, Math.ceil(batchSize * increaseFactorForLowHitRatio));
845
+ }
846
+ }
847
+ // Set minimum batch sizes based on storage type and mode
848
+ let minBatchSize = 5;
849
+ if (isRemoteStorage) {
850
+ minBatchSize = isReadOnly ? 20 : 10;
851
+ }
852
+ else if (isReadOnly) {
853
+ minBatchSize = 10;
854
+ }
855
+ // Ensure batch size is within reasonable limits
856
+ batchSize = Math.max(minBatchSize, batchSize);
857
+ // Cap maximum batch size based on environment and storage
858
+ const maxBatchSize = isRemoteStorage ?
859
+ (this.environment === Environment.NODE ? 300 : 150) :
860
+ (this.environment === Environment.NODE ? 150 : 75);
861
+ batchSize = Math.min(maxBatchSize, batchSize);
862
+ // Update the batch size with the adaptively tuned value
863
+ this.batchSize = Math.round(batchSize);
864
+ }
865
+ /**
866
+ * Detect the appropriate warm storage type based on environment
867
+ */
868
+ detectWarmStorageType() {
869
+ if (this.environment === Environment.BROWSER) {
870
+ // Use OPFS if available, otherwise use memory
871
+ if ('storage' in navigator && 'getDirectory' in navigator.storage) {
872
+ return StorageType.OPFS;
873
+ }
874
+ return StorageType.MEMORY;
875
+ }
876
+ else if (this.environment === Environment.WORKER) {
877
+ // Use OPFS if available, otherwise use memory
878
+ if ('storage' in self && 'getDirectory' in self.storage) {
879
+ return StorageType.OPFS;
880
+ }
881
+ return StorageType.MEMORY;
882
+ }
883
+ else {
884
+ // In Node.js, use filesystem
885
+ return StorageType.FILESYSTEM;
886
+ }
887
+ }
888
+ /**
889
+ * Detect the appropriate cold storage type based on environment
890
+ */
891
+ detectColdStorageType() {
892
+ if (this.environment === Environment.BROWSER) {
893
+ // Use OPFS if available, otherwise use memory
894
+ if ('storage' in navigator && 'getDirectory' in navigator.storage) {
895
+ return StorageType.OPFS;
896
+ }
897
+ return StorageType.MEMORY;
898
+ }
899
+ else if (this.environment === Environment.WORKER) {
900
+ // Use OPFS if available, otherwise use memory
901
+ if ('storage' in self && 'getDirectory' in self.storage) {
902
+ return StorageType.OPFS;
903
+ }
904
+ return StorageType.MEMORY;
905
+ }
906
+ else {
907
+ // In Node.js, use S3 if configured, otherwise filesystem
908
+ return StorageType.S3;
909
+ }
910
+ }
911
+ /**
912
+ * Initialize warm storage adapter
913
+ */
914
+ initializeWarmStorage() {
915
+ // Implementation depends on the detected storage type
916
+ // For now, return null as this will be provided by the storage adapter
917
+ return null;
918
+ }
919
+ /**
920
+ * Initialize cold storage adapter
921
+ */
922
+ initializeColdStorage() {
923
+ // Implementation depends on the detected storage type
924
+ // For now, return null as this will be provided by the storage adapter
925
+ return null;
926
+ }
927
+ /**
928
+ * Get an item from cache, trying each level in order
929
+ * @param id The item ID
930
+ * @returns The cached item or null if not found
931
+ */
932
+ async get(id) {
933
+ // Check if it's time to tune parameters
934
+ await this.checkAndTuneParameters();
935
+ // Try hot cache first (fastest)
936
+ const hotCacheEntry = this.hotCache.get(id);
937
+ if (hotCacheEntry) {
938
+ // Update access metadata
939
+ hotCacheEntry.lastAccessed = Date.now();
940
+ hotCacheEntry.accessCount++;
941
+ // Update stats
942
+ this.stats.hits++;
943
+ return hotCacheEntry.data;
944
+ }
945
+ // Try warm cache next
946
+ try {
947
+ const warmCacheItem = await this.getFromWarmCache(id);
948
+ if (warmCacheItem) {
949
+ // Promote to hot cache
950
+ this.addToHotCache(id, warmCacheItem);
951
+ // Update stats
952
+ this.stats.hits++;
953
+ return warmCacheItem;
954
+ }
955
+ }
956
+ catch (error) {
957
+ console.warn(`Error accessing warm cache for ${id}:`, error);
958
+ }
959
+ // Finally, try cold storage
960
+ try {
961
+ const coldStorageItem = await this.getFromColdStorage(id);
962
+ if (coldStorageItem) {
963
+ // Promote to hot and warm caches
964
+ this.addToHotCache(id, coldStorageItem);
965
+ await this.addToWarmCache(id, coldStorageItem);
966
+ // Update stats
967
+ this.stats.misses++;
968
+ return coldStorageItem;
969
+ }
970
+ }
971
+ catch (error) {
972
+ console.warn(`Error accessing cold storage for ${id}:`, error);
973
+ }
974
+ // Item not found in any cache level
975
+ this.stats.misses++;
976
+ return null;
977
+ }
978
+ /**
979
+ * Get an item from warm cache
980
+ * @param id The item ID
981
+ * @returns The cached item or null if not found
982
+ */
983
+ async getFromWarmCache(id) {
984
+ if (!this.warmStorage)
985
+ return null;
986
+ try {
987
+ return await this.warmStorage.get(id);
988
+ }
989
+ catch (error) {
990
+ console.warn(`Error getting item ${id} from warm cache:`, error);
991
+ return null;
992
+ }
993
+ }
994
+ /**
995
+ * Get an item from cold storage
996
+ * @param id The item ID
997
+ * @returns The item or null if not found
998
+ */
999
+ async getFromColdStorage(id) {
1000
+ if (!this.coldStorage)
1001
+ return null;
1002
+ try {
1003
+ return await this.coldStorage.get(id);
1004
+ }
1005
+ catch (error) {
1006
+ console.warn(`Error getting item ${id} from cold storage:`, error);
1007
+ return null;
1008
+ }
1009
+ }
1010
+ /**
1011
+ * Add an item to hot cache
1012
+ * @param id The item ID
1013
+ * @param item The item to cache
1014
+ */
1015
+ addToHotCache(id, item) {
1016
+ // Check if we need to evict items
1017
+ if (this.hotCache.size >= this.hotCacheMaxSize * this.hotCacheEvictionThreshold) {
1018
+ this.evictFromHotCache();
1019
+ }
1020
+ // Add to hot cache
1021
+ this.hotCache.set(id, {
1022
+ data: item,
1023
+ lastAccessed: Date.now(),
1024
+ accessCount: 1,
1025
+ expiresAt: null // Hot cache items don't expire
1026
+ });
1027
+ // Update stats
1028
+ this.stats.size = this.hotCache.size;
1029
+ }
1030
+ /**
1031
+ * Add an item to warm cache
1032
+ * @param id The item ID
1033
+ * @param item The item to cache
1034
+ */
1035
+ async addToWarmCache(id, item) {
1036
+ if (!this.warmStorage)
1037
+ return;
1038
+ try {
1039
+ // Add to warm cache with TTL
1040
+ await this.warmStorage.set(id, item, {
1041
+ ttl: this.warmCacheTTL
1042
+ });
1043
+ }
1044
+ catch (error) {
1045
+ console.warn(`Error adding item ${id} to warm cache:`, error);
1046
+ }
1047
+ }
1048
+ /**
1049
+ * Evict items from hot cache based on LRU policy
1050
+ */
1051
+ evictFromHotCache() {
1052
+ // Find the least recently used items
1053
+ const entries = Array.from(this.hotCache.entries());
1054
+ // Sort by last accessed time (oldest first)
1055
+ entries.sort((a, b) => a[1].lastAccessed - b[1].lastAccessed);
1056
+ // Remove the oldest 20% of items
1057
+ const itemsToRemove = Math.ceil(this.hotCache.size * 0.2);
1058
+ for (let i = 0; i < itemsToRemove && i < entries.length; i++) {
1059
+ this.hotCache.delete(entries[i][0]);
1060
+ this.stats.evictions++;
1061
+ }
1062
+ // Update stats
1063
+ this.stats.size = this.hotCache.size;
1064
+ if (process.env.DEBUG) {
1065
+ console.log(`Evicted ${itemsToRemove} items from hot cache, new size: ${this.hotCache.size}`);
1066
+ }
1067
+ }
1068
+ /**
1069
+ * Set an item in all cache levels
1070
+ * @param id The item ID
1071
+ * @param item The item to cache
1072
+ */
1073
+ async set(id, item) {
1074
+ // Add to hot cache
1075
+ this.addToHotCache(id, item);
1076
+ // Add to warm cache
1077
+ await this.addToWarmCache(id, item);
1078
+ // Add to cold storage
1079
+ if (this.coldStorage) {
1080
+ try {
1081
+ await this.coldStorage.set(id, item);
1082
+ }
1083
+ catch (error) {
1084
+ console.warn(`Error adding item ${id} to cold storage:`, error);
1085
+ }
1086
+ }
1087
+ }
1088
+ /**
1089
+ * Delete an item from all cache levels
1090
+ * @param id The item ID to delete
1091
+ */
1092
+ async delete(id) {
1093
+ // Remove from hot cache
1094
+ this.hotCache.delete(id);
1095
+ // Remove from warm cache
1096
+ if (this.warmStorage) {
1097
+ try {
1098
+ await this.warmStorage.delete(id);
1099
+ }
1100
+ catch (error) {
1101
+ console.warn(`Error deleting item ${id} from warm cache:`, error);
1102
+ }
1103
+ }
1104
+ // Remove from cold storage
1105
+ if (this.coldStorage) {
1106
+ try {
1107
+ await this.coldStorage.delete(id);
1108
+ }
1109
+ catch (error) {
1110
+ console.warn(`Error deleting item ${id} from cold storage:`, error);
1111
+ }
1112
+ }
1113
+ // Update stats
1114
+ this.stats.size = this.hotCache.size;
1115
+ }
1116
+ /**
1117
+ * Clear all cache levels
1118
+ */
1119
+ async clear() {
1120
+ // Clear hot cache
1121
+ this.hotCache.clear();
1122
+ // Clear warm cache
1123
+ if (this.warmStorage) {
1124
+ try {
1125
+ await this.warmStorage.clear();
1126
+ }
1127
+ catch (error) {
1128
+ console.warn('Error clearing warm cache:', error);
1129
+ }
1130
+ }
1131
+ // Clear cold storage
1132
+ if (this.coldStorage) {
1133
+ try {
1134
+ await this.coldStorage.clear();
1135
+ }
1136
+ catch (error) {
1137
+ console.warn('Error clearing cold storage:', error);
1138
+ }
1139
+ }
1140
+ // Reset stats
1141
+ this.stats = {
1142
+ hits: 0,
1143
+ misses: 0,
1144
+ evictions: 0,
1145
+ size: 0,
1146
+ maxSize: this.hotCacheMaxSize,
1147
+ hotCacheSize: 0,
1148
+ warmCacheSize: 0,
1149
+ hotCacheHits: 0,
1150
+ hotCacheMisses: 0,
1151
+ warmCacheHits: 0,
1152
+ warmCacheMisses: 0
1153
+ };
1154
+ }
1155
+ /**
1156
+ * Get cache statistics
1157
+ * @returns Cache statistics
1158
+ */
1159
+ getStats() {
1160
+ return { ...this.stats };
1161
+ }
1162
+ /**
1163
+ * Prefetch items based on ID patterns or relationships
1164
+ * @param ids Array of IDs to prefetch
1165
+ */
1166
+ async prefetch(ids) {
1167
+ // Check if it's time to tune parameters
1168
+ await this.checkAndTuneParameters();
1169
+ // Prefetch in batches to avoid overwhelming the system
1170
+ const batches = [];
1171
+ // Split into batches using the configurable batch size
1172
+ for (let i = 0; i < ids.length; i += this.batchSize) {
1173
+ const batch = ids.slice(i, i + this.batchSize);
1174
+ batches.push(batch);
1175
+ }
1176
+ // Process each batch
1177
+ for (const batch of batches) {
1178
+ await Promise.all(batch.map(async (id) => {
1179
+ // Skip if already in hot cache
1180
+ if (this.hotCache.has(id))
1181
+ return;
1182
+ try {
1183
+ // Try to get from any cache level
1184
+ await this.get(id);
1185
+ }
1186
+ catch (error) {
1187
+ // Ignore errors during prefetching
1188
+ if (process.env.DEBUG) {
1189
+ console.warn(`Error prefetching ${id}:`, error);
1190
+ }
1191
+ }
1192
+ }));
1193
+ }
1194
+ }
1195
+ /**
1196
+ * Check if it's time to tune parameters and do so if needed
1197
+ * This is called before operations that might benefit from tuned parameters
1198
+ *
1199
+ * This method serves as a checkpoint for auto-tuning, ensuring that:
1200
+ * 1. Parameters are tuned periodically based on the auto-tune interval
1201
+ * 2. Tuning happens before critical operations that would benefit from optimized parameters
1202
+ * 3. Tuning doesn't happen too frequently, which could impact performance
1203
+ *
1204
+ * By calling this method before get(), getMany(), and prefetch() operations,
1205
+ * we ensure that the cache parameters are optimized for the current workload
1206
+ * without adding unnecessary overhead to every operation.
1207
+ */
1208
+ async checkAndTuneParameters() {
1209
+ // Skip if auto-tuning is disabled
1210
+ if (!this.autoTune)
1211
+ return;
1212
+ // Check if it's time to tune parameters
1213
+ const now = Date.now();
1214
+ if (now - this.lastAutoTuneTime >= this.autoTuneInterval) {
1215
+ await this.tuneParameters();
1216
+ }
1217
+ }
1218
+ /**
1219
+ * Get multiple items at once, optimizing for batch retrieval
1220
+ * @param ids Array of IDs to get
1221
+ * @returns Map of ID to item
1222
+ */
1223
+ async getMany(ids) {
1224
+ // Check if it's time to tune parameters
1225
+ await this.checkAndTuneParameters();
1226
+ const result = new Map();
1227
+ // First check hot cache for all IDs
1228
+ const missingIds = [];
1229
+ for (const id of ids) {
1230
+ const hotCacheEntry = this.hotCache.get(id);
1231
+ if (hotCacheEntry) {
1232
+ // Update access metadata
1233
+ hotCacheEntry.lastAccessed = Date.now();
1234
+ hotCacheEntry.accessCount++;
1235
+ // Add to result
1236
+ result.set(id, hotCacheEntry.data);
1237
+ // Update stats
1238
+ this.stats.hits++;
1239
+ }
1240
+ else {
1241
+ missingIds.push(id);
1242
+ }
1243
+ }
1244
+ if (missingIds.length === 0) {
1245
+ return result;
1246
+ }
1247
+ // Try to get missing items from warm cache
1248
+ if (this.warmStorage) {
1249
+ try {
1250
+ const warmCacheItems = await this.warmStorage.getMany(missingIds);
1251
+ for (const [id, item] of warmCacheItems.entries()) {
1252
+ if (item) {
1253
+ // Promote to hot cache
1254
+ this.addToHotCache(id, item);
1255
+ // Add to result
1256
+ result.set(id, item);
1257
+ // Update stats
1258
+ this.stats.hits++;
1259
+ // Remove from missing IDs
1260
+ const index = missingIds.indexOf(id);
1261
+ if (index !== -1) {
1262
+ missingIds.splice(index, 1);
1263
+ }
1264
+ }
1265
+ }
1266
+ }
1267
+ catch (error) {
1268
+ console.warn('Error accessing warm cache for batch:', error);
1269
+ }
1270
+ }
1271
+ if (missingIds.length === 0) {
1272
+ return result;
1273
+ }
1274
+ // Try to get remaining missing items from cold storage
1275
+ if (this.coldStorage) {
1276
+ try {
1277
+ const coldStorageItems = await this.coldStorage.getMany(missingIds);
1278
+ for (const [id, item] of coldStorageItems.entries()) {
1279
+ if (item) {
1280
+ // Promote to hot and warm caches
1281
+ this.addToHotCache(id, item);
1282
+ await this.addToWarmCache(id, item);
1283
+ // Add to result
1284
+ result.set(id, item);
1285
+ // Update stats
1286
+ this.stats.misses++;
1287
+ }
1288
+ }
1289
+ }
1290
+ catch (error) {
1291
+ console.warn('Error accessing cold storage for batch:', error);
1292
+ }
1293
+ }
1294
+ return result;
1295
+ }
1296
+ /**
1297
+ * Set the storage adapters for warm and cold caches
1298
+ * @param warmStorage Warm cache storage adapter
1299
+ * @param coldStorage Cold storage adapter
1300
+ */
1301
+ setStorageAdapters(warmStorage, coldStorage) {
1302
+ this.warmStorage = warmStorage;
1303
+ this.coldStorage = coldStorage;
1304
+ }
1305
+ }
1306
+ //# sourceMappingURL=cacheManager.js.map