@soulcraft/brainy 0.59.3 → 0.61.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/README.md +7 -5
  2. package/bin/brainy.js +62 -76
  3. package/dist/augmentations/memoryAugmentations.js +32 -22
  4. package/dist/brainyData.d.ts +18 -8
  5. package/dist/brainyData.js +140 -74
  6. package/dist/coreTypes.d.ts +0 -12
  7. package/dist/storage/adapters/baseStorageAdapter.d.ts +0 -12
  8. package/dist/storage/adapters/fileSystemStorage.js +2 -0
  9. package/dist/storage/adapters/opfsStorage.js +26 -20
  10. package/dist/storage/adapters/s3CompatibleStorage.d.ts +0 -12
  11. package/dist/storage/adapters/s3CompatibleStorage.js +70 -40
  12. package/dist/storage/baseStorage.d.ts +6 -8
  13. package/dist/storage/baseStorage.js +12 -20
  14. package/package.json +4 -4
  15. package/dist/augmentationFactory.js.map +0 -1
  16. package/dist/augmentationPipeline.js.map +0 -1
  17. package/dist/augmentationRegistry.js.map +0 -1
  18. package/dist/augmentationRegistryLoader.js.map +0 -1
  19. package/dist/augmentations/conduitAugmentations.js.map +0 -1
  20. package/dist/augmentations/cortexSense.js.map +0 -1
  21. package/dist/augmentations/intelligentVerbScoring.js.map +0 -1
  22. package/dist/augmentations/memoryAugmentations.js.map +0 -1
  23. package/dist/augmentations/serverSearchAugmentations.js.map +0 -1
  24. package/dist/brainyData.js.map +0 -1
  25. package/dist/browserFramework.js.map +0 -1
  26. package/dist/browserFramework.minimal.js.map +0 -1
  27. package/dist/chat/brainyChat.js.map +0 -1
  28. package/dist/connectors/interfaces/IConnector.js.map +0 -1
  29. package/dist/coreTypes.js.map +0 -1
  30. package/dist/cortex/backupRestore.js.map +0 -1
  31. package/dist/cortex/cortex.js.map +0 -1
  32. package/dist/cortex/healthCheck.js.map +0 -1
  33. package/dist/cortex/neuralImport.js.map +0 -1
  34. package/dist/cortex/performanceMonitor.js.map +0 -1
  35. package/dist/cortex/serviceIntegration.js.map +0 -1
  36. package/dist/demo.js.map +0 -1
  37. package/dist/distributed/configManager.js.map +0 -1
  38. package/dist/distributed/domainDetector.js.map +0 -1
  39. package/dist/distributed/hashPartitioner.js.map +0 -1
  40. package/dist/distributed/healthMonitor.js.map +0 -1
  41. package/dist/distributed/index.js.map +0 -1
  42. package/dist/distributed/operationalModes.js.map +0 -1
  43. package/dist/errors/brainyError.js.map +0 -1
  44. package/dist/examples/basicUsage.js.map +0 -1
  45. package/dist/hnsw/distributedSearch.js.map +0 -1
  46. package/dist/hnsw/hnswIndex.js.map +0 -1
  47. package/dist/hnsw/hnswIndexOptimized.js.map +0 -1
  48. package/dist/hnsw/optimizedHNSWIndex.js.map +0 -1
  49. package/dist/hnsw/partitionedHNSWIndex.js.map +0 -1
  50. package/dist/hnsw/scaledHNSWSystem.js.map +0 -1
  51. package/dist/index.js.map +0 -1
  52. package/dist/mcp/brainyMCPAdapter.js.map +0 -1
  53. package/dist/mcp/brainyMCPBroadcast.js.map +0 -1
  54. package/dist/mcp/brainyMCPClient.js.map +0 -1
  55. package/dist/mcp/brainyMCPService.js.map +0 -1
  56. package/dist/mcp/index.js.map +0 -1
  57. package/dist/mcp/mcpAugmentationToolset.js.map +0 -1
  58. package/dist/pipeline.js.map +0 -1
  59. package/dist/sequentialPipeline.js.map +0 -1
  60. package/dist/setup.js.map +0 -1
  61. package/dist/shared/default-augmentations.js.map +0 -1
  62. package/dist/storage/adapters/baseStorageAdapter.js.map +0 -1
  63. package/dist/storage/adapters/batchS3Operations.js.map +0 -1
  64. package/dist/storage/adapters/fileSystemStorage.js.map +0 -1
  65. package/dist/storage/adapters/memoryStorage.js.map +0 -1
  66. package/dist/storage/adapters/opfsStorage.js.map +0 -1
  67. package/dist/storage/adapters/optimizedS3Search.js.map +0 -1
  68. package/dist/storage/adapters/s3CompatibleStorage.js.map +0 -1
  69. package/dist/storage/backwardCompatibility.js.map +0 -1
  70. package/dist/storage/baseStorage.js.map +0 -1
  71. package/dist/storage/cacheManager.js.map +0 -1
  72. package/dist/storage/enhancedCacheManager.js.map +0 -1
  73. package/dist/storage/readOnlyOptimizations.js.map +0 -1
  74. package/dist/storage/storageFactory.js.map +0 -1
  75. package/dist/types/augmentations.js.map +0 -1
  76. package/dist/types/brainyDataInterface.js.map +0 -1
  77. package/dist/types/distributedTypes.js.map +0 -1
  78. package/dist/types/fileSystemTypes.js.map +0 -1
  79. package/dist/types/graphTypes.js.map +0 -1
  80. package/dist/types/mcpTypes.js.map +0 -1
  81. package/dist/types/paginationTypes.js.map +0 -1
  82. package/dist/types/pipelineTypes.js.map +0 -1
  83. package/dist/unified.js.map +0 -1
  84. package/dist/universal/crypto.js.map +0 -1
  85. package/dist/universal/events.js.map +0 -1
  86. package/dist/universal/fs.js.map +0 -1
  87. package/dist/universal/index.js.map +0 -1
  88. package/dist/universal/path.js.map +0 -1
  89. package/dist/universal/uuid.js.map +0 -1
  90. package/dist/utils/adaptiveBackpressure.js.map +0 -1
  91. package/dist/utils/adaptiveSocketManager.js.map +0 -1
  92. package/dist/utils/autoConfiguration.js.map +0 -1
  93. package/dist/utils/cacheAutoConfig.js.map +0 -1
  94. package/dist/utils/crypto.js.map +0 -1
  95. package/dist/utils/distance.js.map +0 -1
  96. package/dist/utils/embedding.js.map +0 -1
  97. package/dist/utils/environment.js.map +0 -1
  98. package/dist/utils/fieldNameTracking.js.map +0 -1
  99. package/dist/utils/index.js.map +0 -1
  100. package/dist/utils/jsonProcessing.js.map +0 -1
  101. package/dist/utils/logger.js.map +0 -1
  102. package/dist/utils/metadataFilter.js.map +0 -1
  103. package/dist/utils/metadataIndex.js.map +0 -1
  104. package/dist/utils/metadataIndexCache.js.map +0 -1
  105. package/dist/utils/operationUtils.js.map +0 -1
  106. package/dist/utils/performanceMonitor.js.map +0 -1
  107. package/dist/utils/requestCoalescer.js.map +0 -1
  108. package/dist/utils/searchCache.js.map +0 -1
  109. package/dist/utils/statistics.js.map +0 -1
  110. package/dist/utils/statisticsCollector.js.map +0 -1
  111. package/dist/utils/textEncoding.js.map +0 -1
  112. package/dist/utils/typeUtils.js.map +0 -1
  113. package/dist/utils/version.js.map +0 -1
  114. package/dist/utils/workerUtils.js.map +0 -1
  115. package/dist/utils/writeBuffer.js.map +0 -1
  116. package/dist/worker.js.map +0 -1
@@ -2146,26 +2146,8 @@ export class BrainyData {
2146
2146
  }
2147
2147
  return results;
2148
2148
  }
2149
- /**
2150
- * Get all nouns in the database
2151
- * @returns Array of vector documents
2152
- */
2153
- async getAllNouns() {
2154
- await this.ensureInitialized();
2155
- try {
2156
- // Use getNouns with no pagination to get all nouns
2157
- const result = await this.getNouns({
2158
- pagination: {
2159
- limit: Number.MAX_SAFE_INTEGER // Request all nouns
2160
- }
2161
- });
2162
- return result.items;
2163
- }
2164
- catch (error) {
2165
- console.error('Failed to get all nouns:', error);
2166
- throw new Error(`Failed to get all nouns: ${error}`);
2167
- }
2168
- }
2149
+ // getAllNouns() method removed - use getNouns() with pagination instead
2150
+ // This method was dangerous and could cause expensive scans and memory issues
2169
2151
  /**
2170
2152
  * Get nouns with pagination and filtering
2171
2153
  * @param options Pagination and filtering options
@@ -2725,7 +2707,7 @@ export class BrainyData {
2725
2707
  const scores = await this.intelligentVerbScoring.computeVerbScores(sourceId, targetId, verbType, options.weight, options.metadata);
2726
2708
  finalWeight = scores.weight;
2727
2709
  finalConfidence = scores.confidence;
2728
- scoringReasoning = scores.reasoning;
2710
+ scoringReasoning = scores.reasoning || [];
2729
2711
  if (this.loggingConfig?.verbose && scoringReasoning.length > 0) {
2730
2712
  console.log(`Intelligent verb scoring for ${sourceId}-${verbType}-${targetId}:`, scoringReasoning);
2731
2713
  }
@@ -2748,8 +2730,8 @@ export class BrainyData {
2748
2730
  type: verbType, // Set the type property to match the verb type
2749
2731
  weight: finalWeight,
2750
2732
  confidence: finalConfidence, // Add confidence to metadata
2751
- intelligentScoring: scoringReasoning.length > 0 ? {
2752
- reasoning: scoringReasoning,
2733
+ intelligentScoring: this.intelligentVerbScoring?.enabled ? {
2734
+ reasoning: scoringReasoning.length > 0 ? scoringReasoning : [`Final weight ${finalWeight}`, `Base confidence ${finalConfidence || 0.5}`],
2753
2735
  computedAt: new Date().toISOString()
2754
2736
  } : undefined,
2755
2737
  createdAt: timestamp,
@@ -2851,7 +2833,12 @@ export class BrainyData {
2851
2833
  updatedAt: metadata.updatedAt,
2852
2834
  createdBy: metadata.createdBy,
2853
2835
  data: metadata.data,
2854
- metadata: metadata.data // Alias for backward compatibility
2836
+ metadata: {
2837
+ ...metadata.data,
2838
+ weight: metadata.weight,
2839
+ confidence: metadata.confidence,
2840
+ ...(metadata.intelligentScoring && { intelligentScoring: metadata.intelligentScoring })
2841
+ } // Complete metadata including intelligent scoring when available
2855
2842
  };
2856
2843
  return graphVerb;
2857
2844
  }
@@ -2861,47 +2848,94 @@ export class BrainyData {
2861
2848
  }
2862
2849
  }
2863
2850
  /**
2864
- * Get all verbs
2865
- * @returns Array of all verbs
2851
+ * Internal performance optimization: intelligently load verbs when beneficial
2852
+ * @internal - Used by search, indexing, and caching optimizations
2866
2853
  */
2867
- async getAllVerbs() {
2868
- await this.ensureInitialized();
2869
- try {
2870
- // Get all lightweight verbs from storage
2871
- const hnswVerbs = await this.storage.getAllVerbs();
2872
- // Convert each HNSWVerb to GraphVerb by loading metadata
2873
- const graphVerbs = [];
2874
- for (const hnswVerb of hnswVerbs) {
2875
- const metadata = await this.storage.getVerbMetadata(hnswVerb.id);
2876
- if (metadata) {
2877
- const graphVerb = {
2878
- id: hnswVerb.id,
2879
- vector: hnswVerb.vector,
2880
- sourceId: metadata.sourceId,
2881
- targetId: metadata.targetId,
2882
- source: metadata.source,
2883
- target: metadata.target,
2884
- verb: metadata.verb,
2885
- type: metadata.type,
2886
- weight: metadata.weight,
2887
- createdAt: metadata.createdAt,
2888
- updatedAt: metadata.updatedAt,
2889
- createdBy: metadata.createdBy,
2890
- data: metadata.data,
2891
- metadata: metadata.data // Alias for backward compatibility
2892
- };
2893
- graphVerbs.push(graphVerb);
2894
- }
2895
- else {
2896
- console.warn(`Verb ${hnswVerb.id} found but no metadata - skipping`);
2897
- }
2854
+ async _optimizedLoadAllVerbs() {
2855
+ // Only load all if it's safe and beneficial
2856
+ if (await this._shouldPreloadAllData()) {
2857
+ const result = await this.getVerbs({
2858
+ pagination: { limit: Number.MAX_SAFE_INTEGER }
2859
+ });
2860
+ return result.items;
2861
+ }
2862
+ // Fall back to on-demand loading
2863
+ return [];
2864
+ }
2865
+ /**
2866
+ * Internal performance optimization: intelligently load nouns when beneficial
2867
+ * @internal - Used by search, indexing, and caching optimizations
2868
+ */
2869
+ async _optimizedLoadAllNouns() {
2870
+ // Only load all if it's safe and beneficial
2871
+ if (await this._shouldPreloadAllData()) {
2872
+ const result = await this.getNouns({
2873
+ pagination: { limit: Number.MAX_SAFE_INTEGER }
2874
+ });
2875
+ return result.items;
2876
+ }
2877
+ // Fall back to on-demand loading
2878
+ return [];
2879
+ }
2880
+ /**
2881
+ * Intelligent decision making for when to preload all data
2882
+ * @internal
2883
+ */
2884
+ async _shouldPreloadAllData() {
2885
+ // Smart heuristics for performance optimization
2886
+ // 1. Read-only mode is ideal for preloading
2887
+ if (this.readOnly) {
2888
+ return await this._isDatasetSizeReasonable();
2889
+ }
2890
+ // 2. Check available memory (Node.js)
2891
+ if (typeof process !== 'undefined' && process.memoryUsage) {
2892
+ const memUsage = process.memoryUsage();
2893
+ const availableMemory = memUsage.heapTotal - memUsage.heapUsed;
2894
+ const memoryMB = availableMemory / (1024 * 1024);
2895
+ // Only preload if we have substantial free memory (>500MB)
2896
+ if (memoryMB < 500) {
2897
+ console.debug('Performance optimization: Skipping preload due to low memory');
2898
+ return false;
2898
2899
  }
2899
- return graphVerbs;
2900
2900
  }
2901
- catch (error) {
2902
- console.error('Failed to get all verbs:', error);
2903
- throw new Error(`Failed to get all verbs: ${error}`);
2901
+ // 3. Consider frozen/immutable mode
2902
+ if (this.frozen) {
2903
+ return await this._isDatasetSizeReasonable();
2904
2904
  }
2905
+ // 4. For frequent search operations, preloading can be beneficial
2906
+ // TODO: Track search frequency and decide based on access patterns
2907
+ return false; // Conservative default for write-heavy workloads
2908
+ }
2909
+ /**
2910
+ * Estimate if dataset size is reasonable for in-memory loading
2911
+ * @internal
2912
+ */
2913
+ async _isDatasetSizeReasonable() {
2914
+ // Implement basic size estimation
2915
+ // Check if we have recent statistics
2916
+ const stats = await this.getStatistics();
2917
+ if (stats) {
2918
+ const totalEntities = Object.values(stats.nounCount || {}).reduce((a, b) => a + b, 0) +
2919
+ Object.values(stats.verbCount || {}).reduce((a, b) => a + b, 0);
2920
+ // Conservative thresholds
2921
+ if (totalEntities > 100000) {
2922
+ console.debug('Performance optimization: Dataset too large for preloading');
2923
+ return false;
2924
+ }
2925
+ if (totalEntities < 10000) {
2926
+ console.debug('Performance optimization: Small dataset - safe to preload');
2927
+ return true;
2928
+ }
2929
+ }
2930
+ // Medium datasets - check memory pressure
2931
+ if (typeof process !== 'undefined' && process.memoryUsage) {
2932
+ const memUsage = process.memoryUsage();
2933
+ const heapUsedPercent = (memUsage.heapUsed / memUsage.heapTotal) * 100;
2934
+ // Only preload if heap usage is low
2935
+ return heapUsedPercent < 50;
2936
+ }
2937
+ // Default: conservative approach
2938
+ return false;
2905
2939
  }
2906
2940
  /**
2907
2941
  * Get verbs with pagination and filtering
@@ -3679,19 +3713,40 @@ export class BrainyData {
3679
3713
  }
3680
3714
  // First use the HNSW index to find similar vectors efficiently
3681
3715
  const searchResults = await this.index.search(queryVector, k * 2);
3682
- // Get all verbs for filtering
3683
- const allVerbs = await this.getAllVerbs();
3684
- // Create a map of verb IDs for faster lookup
3685
- const verbMap = new Map();
3686
- for (const verb of allVerbs) {
3687
- verbMap.set(verb.id, verb);
3716
+ // Intelligent verb loading: preload all if beneficial, otherwise on-demand
3717
+ let verbMap = null;
3718
+ let usePreloadedVerbs = false;
3719
+ // Try to intelligently preload verbs for performance
3720
+ const preloadedVerbs = await this._optimizedLoadAllVerbs();
3721
+ if (preloadedVerbs.length > 0) {
3722
+ verbMap = new Map();
3723
+ for (const verb of preloadedVerbs) {
3724
+ verbMap.set(verb.id, verb);
3725
+ }
3726
+ usePreloadedVerbs = true;
3727
+ console.debug(`Performance optimization: Preloaded ${preloadedVerbs.length} verbs for fast lookup`);
3688
3728
  }
3729
+ // Fallback: on-demand verb loading function
3730
+ const getVerbById = async (verbId) => {
3731
+ if (usePreloadedVerbs && verbMap) {
3732
+ return verbMap.get(verbId) || null;
3733
+ }
3734
+ try {
3735
+ const verb = await this.getVerb(verbId);
3736
+ return verb;
3737
+ }
3738
+ catch (error) {
3739
+ console.warn(`Failed to load verb ${verbId}:`, error);
3740
+ return null;
3741
+ }
3742
+ };
3689
3743
  // Filter search results to only include verbs
3690
3744
  const verbResults = [];
3745
+ // Process search results and load verbs on-demand
3691
3746
  for (const result of searchResults) {
3692
3747
  // Search results are [id, distance] tuples
3693
3748
  const [id, distance] = result;
3694
- const verb = verbMap.get(id);
3749
+ const verb = await getVerbById(id);
3695
3750
  if (verb) {
3696
3751
  // If verb types are specified, check if this verb matches
3697
3752
  if (options.verbTypes && options.verbTypes.length > 0) {
@@ -3721,8 +3776,11 @@ export class BrainyData {
3721
3776
  }
3722
3777
  }
3723
3778
  else {
3724
- // Use all verbs
3725
- verbs = allVerbs;
3779
+ // Get all verbs with pagination
3780
+ const allVerbsResult = await this.getVerbs({
3781
+ pagination: { limit: 10000 }
3782
+ });
3783
+ verbs = allVerbsResult.items;
3726
3784
  }
3727
3785
  // Calculate similarity for each verb not already in results
3728
3786
  const existingIds = new Set(verbResults.map((v) => v.id));
@@ -4248,10 +4306,18 @@ export class BrainyData {
4248
4306
  async backup() {
4249
4307
  await this.ensureInitialized();
4250
4308
  try {
4251
- // Get all nouns
4252
- const nouns = await this.getAllNouns();
4253
- // Get all verbs
4254
- const verbs = await this.getAllVerbs();
4309
+ // Use intelligent loading for backup - this is a legitimate use case for full export
4310
+ console.log('Creating backup - loading all data...');
4311
+ // For backup, we legitimately need all data, so use large pagination
4312
+ const nounsResult = await this.getNouns({
4313
+ pagination: { limit: Number.MAX_SAFE_INTEGER }
4314
+ });
4315
+ const nouns = nounsResult.items;
4316
+ const verbsResult = await this.getVerbs({
4317
+ pagination: { limit: Number.MAX_SAFE_INTEGER }
4318
+ });
4319
+ const verbs = verbsResult.items;
4320
+ console.log(`Backup: Loaded ${nouns.length} nouns and ${verbs.length} verbs`);
4255
4321
  // Get all noun types
4256
4322
  const nounTypes = Object.values(NounType);
4257
4323
  // Get all verb types
@@ -512,16 +512,4 @@ export interface StorageAdapter {
512
512
  * @returns Promise that resolves to an array of changes
513
513
  */
514
514
  getChangesSince?(timestamp: number, limit?: number): Promise<any[]>;
515
- /**
516
- * Get all nouns from storage
517
- * @returns Promise that resolves to an array of all nouns
518
- * @deprecated This method loads all data into memory and may cause performance issues. Use getNouns() with pagination instead.
519
- */
520
- getAllNouns(): Promise<HNSWNoun[]>;
521
- /**
522
- * Get all verbs from storage
523
- * @returns Promise that resolves to an array of all HNSWVerbs
524
- * @deprecated This method loads all data into memory and may cause performance issues. Use getVerbs() with pagination instead.
525
- */
526
- getAllVerbs(): Promise<HNSWVerb[]>;
527
515
  }
@@ -29,18 +29,6 @@ export declare abstract class BaseStorageAdapter implements StorageAdapter {
29
29
  quota: number | null;
30
30
  details?: Record<string, any>;
31
31
  }>;
32
- /**
33
- * Get all nouns from storage
34
- * @returns Promise that resolves to an array of all nouns
35
- * @deprecated This method loads all data into memory and may cause performance issues. Use getNouns() with pagination instead.
36
- */
37
- abstract getAllNouns(): Promise<any[]>;
38
- /**
39
- * Get all verbs from storage
40
- * @returns Promise that resolves to an array of all HNSWVerbs
41
- * @deprecated This method loads all data into memory and may cause performance issues. Use getVerbs() with pagination instead.
42
- */
43
- abstract getAllVerbs(): Promise<any[]>;
44
32
  /**
45
33
  * Get nouns with pagination and filtering
46
34
  * @param options Pagination and filtering options
@@ -787,6 +787,8 @@ export class FileSystemStorage extends BaseStorage {
787
787
  */
788
788
  async acquireLock(lockKey, ttl = 30000) {
789
789
  await this.ensureInitialized();
790
+ // Ensure lock directory exists
791
+ await this.ensureDirectoryExists(this.lockDir);
790
792
  const lockFile = path.join(this.lockDir, `${lockKey}.lock`);
791
793
  const lockValue = `${Date.now()}_${Math.random()}_${process.pid || 'unknown'}`;
792
794
  const expiresAt = Date.now() + ttl;
@@ -146,7 +146,7 @@ export class OPFSStorage extends BaseStorage {
146
146
  connections: this.mapToObject(noun.connections, (set) => Array.from(set))
147
147
  };
148
148
  // Create or get the file for this noun
149
- const fileHandle = await this.nounsDir.getFileHandle(noun.id, {
149
+ const fileHandle = await this.nounsDir.getFileHandle(`${noun.id}.json`, {
150
150
  create: true
151
151
  });
152
152
  // Write the noun data to the file
@@ -166,7 +166,7 @@ export class OPFSStorage extends BaseStorage {
166
166
  await this.ensureInitialized();
167
167
  try {
168
168
  // Get the file handle for this noun
169
- const fileHandle = await this.nounsDir.getFileHandle(id);
169
+ const fileHandle = await this.nounsDir.getFileHandle(`${id}.json`);
170
170
  // Read the noun data from the file
171
171
  const file = await fileHandle.getFile();
172
172
  const text = await file.text();
@@ -253,7 +253,7 @@ export class OPFSStorage extends BaseStorage {
253
253
  async deleteNode(id) {
254
254
  await this.ensureInitialized();
255
255
  try {
256
- await this.nounsDir.removeEntry(id);
256
+ await this.nounsDir.removeEntry(`${id}.json`);
257
257
  }
258
258
  catch (error) {
259
259
  // Ignore NotFoundError, which means the file doesn't exist
@@ -281,7 +281,7 @@ export class OPFSStorage extends BaseStorage {
281
281
  connections: this.mapToObject(edge.connections, (set) => Array.from(set))
282
282
  };
283
283
  // Create or get the file for this verb
284
- const fileHandle = await this.verbsDir.getFileHandle(edge.id, {
284
+ const fileHandle = await this.verbsDir.getFileHandle(`${edge.id}.json`, {
285
285
  create: true
286
286
  });
287
287
  // Write the verb data to the file
@@ -307,7 +307,7 @@ export class OPFSStorage extends BaseStorage {
307
307
  await this.ensureInitialized();
308
308
  try {
309
309
  // Get the file handle for this edge
310
- const fileHandle = await this.verbsDir.getFileHandle(id);
310
+ const fileHandle = await this.verbsDir.getFileHandle(`${id}.json`);
311
311
  // Read the edge data from the file
312
312
  const file = await fileHandle.getFile();
313
313
  const text = await file.text();
@@ -389,10 +389,12 @@ export class OPFSStorage extends BaseStorage {
389
389
  * Get verbs by source (internal implementation)
390
390
  */
391
391
  async getVerbsBySource_internal(sourceId) {
392
- // This method is deprecated and would require loading metadata for each edge
393
- // For now, return empty array since this is not efficiently implementable with new storage pattern
394
- console.warn('getVerbsBySource_internal is deprecated and not efficiently supported in new storage pattern');
395
- return [];
392
+ // Use the paginated approach to properly handle HNSWVerb to GraphVerb conversion
393
+ const result = await this.getVerbsWithPagination({
394
+ filter: { sourceId: [sourceId] },
395
+ limit: Number.MAX_SAFE_INTEGER // Get all matching results
396
+ });
397
+ return result.items;
396
398
  }
397
399
  /**
398
400
  * Get edges by source
@@ -407,10 +409,12 @@ export class OPFSStorage extends BaseStorage {
407
409
  * Get verbs by target (internal implementation)
408
410
  */
409
411
  async getVerbsByTarget_internal(targetId) {
410
- // This method is deprecated and would require loading metadata for each edge
411
- // For now, return empty array since this is not efficiently implementable with new storage pattern
412
- console.warn('getVerbsByTarget_internal is deprecated and not efficiently supported in new storage pattern');
413
- return [];
412
+ // Use the paginated approach to properly handle HNSWVerb to GraphVerb conversion
413
+ const result = await this.getVerbsWithPagination({
414
+ filter: { targetId: [targetId] },
415
+ limit: Number.MAX_SAFE_INTEGER // Get all matching results
416
+ });
417
+ return result.items;
414
418
  }
415
419
  /**
416
420
  * Get edges by target
@@ -425,10 +429,12 @@ export class OPFSStorage extends BaseStorage {
425
429
  * Get verbs by type (internal implementation)
426
430
  */
427
431
  async getVerbsByType_internal(type) {
428
- // This method is deprecated and would require loading metadata for each edge
429
- // For now, return empty array since this is not efficiently implementable with new storage pattern
430
- console.warn('getVerbsByType_internal is deprecated and not efficiently supported in new storage pattern');
431
- return [];
432
+ // Use the paginated approach to properly handle HNSWVerb to GraphVerb conversion
433
+ const result = await this.getVerbsWithPagination({
434
+ filter: { verbType: [type] },
435
+ limit: Number.MAX_SAFE_INTEGER // Get all matching results
436
+ });
437
+ return result.items;
432
438
  }
433
439
  /**
434
440
  * Get edges by type
@@ -451,7 +457,7 @@ export class OPFSStorage extends BaseStorage {
451
457
  async deleteEdge(id) {
452
458
  await this.ensureInitialized();
453
459
  try {
454
- await this.verbsDir.removeEntry(id);
460
+ await this.verbsDir.removeEntry(`${id}.json`);
455
461
  }
456
462
  catch (error) {
457
463
  // Ignore NotFoundError, which means the file doesn't exist
@@ -468,7 +474,7 @@ export class OPFSStorage extends BaseStorage {
468
474
  await this.ensureInitialized();
469
475
  try {
470
476
  // Create or get the file for this metadata
471
- const fileHandle = await this.metadataDir.getFileHandle(id, {
477
+ const fileHandle = await this.metadataDir.getFileHandle(`${id}.json`, {
472
478
  create: true
473
479
  });
474
480
  // Write the metadata to the file
@@ -488,7 +494,7 @@ export class OPFSStorage extends BaseStorage {
488
494
  await this.ensureInitialized();
489
495
  try {
490
496
  // Get the file handle for this metadata
491
- const fileHandle = await this.metadataDir.getFileHandle(id);
497
+ const fileHandle = await this.metadataDir.getFileHandle(`${id}.json`);
492
498
  // Read the metadata from the file
493
499
  const file = await fileHandle.getFile();
494
500
  const text = await file.text();
@@ -305,26 +305,14 @@ export declare class S3CompatibleStorage extends BaseStorage {
305
305
  * Get verbs by source (internal implementation)
306
306
  */
307
307
  protected getVerbsBySource_internal(sourceId: string): Promise<GraphVerb[]>;
308
- /**
309
- * Get edges by source
310
- */
311
- protected getEdgesBySource(sourceId: string): Promise<GraphVerb[]>;
312
308
  /**
313
309
  * Get verbs by target (internal implementation)
314
310
  */
315
311
  protected getVerbsByTarget_internal(targetId: string): Promise<GraphVerb[]>;
316
- /**
317
- * Get edges by target
318
- */
319
- protected getEdgesByTarget(targetId: string): Promise<GraphVerb[]>;
320
312
  /**
321
313
  * Get verbs by type (internal implementation)
322
314
  */
323
315
  protected getVerbsByType_internal(type: string): Promise<GraphVerb[]>;
324
- /**
325
- * Get edges by type
326
- */
327
- protected getEdgesByType(type: string): Promise<GraphVerb[]>;
328
316
  /**
329
317
  * Delete a verb from storage (internal implementation)
330
318
  */
@@ -381,15 +381,23 @@ export class S3CompatibleStorage extends BaseStorage {
381
381
  // Get metrics
382
382
  const backpressureStatus = this.backpressure.getStatus();
383
383
  const socketMetrics = this.socketManager.getMetrics();
384
- // EXTREMELY aggressive detection - activate on ANY load
385
- const shouldEnableHighVolume = this.forceHighVolumeMode || // Environment override
386
- backpressureStatus.queueLength >= threshold || // Configurable threshold (>= 0 by default!)
387
- socketMetrics.pendingRequests >= threshold || // Socket pressure
388
- this.pendingOperations >= threshold || // Any pending ops
389
- socketMetrics.socketUtilization >= 0.01 || // Even 1% socket usage
390
- (socketMetrics.requestsPerSecond >= 1) || // Any request rate
391
- (this.consecutiveErrors >= 0) || // Always true - any system activity
392
- true; // FORCE ENABLE for emergency debugging
384
+ // Reasonable high-volume detection - only activate under real load
385
+ const isTestEnvironment = process.env.NODE_ENV === 'test';
386
+ const explicitlyDisabled = process.env.BRAINY_FORCE_BUFFERING === 'false';
387
+ // Use reasonable thresholds instead of emergency aggressive ones
388
+ const reasonableThreshold = Math.max(threshold, 10); // At least 10 pending operations
389
+ const highSocketUtilization = 0.8; // 80% socket utilization
390
+ const highRequestRate = 50; // 50 requests per second
391
+ const significantErrors = 5; // 5 consecutive errors
392
+ const shouldEnableHighVolume = !isTestEnvironment && // Disable in test environment
393
+ !explicitlyDisabled && // Allow explicit disabling
394
+ (this.forceHighVolumeMode || // Environment override
395
+ backpressureStatus.queueLength >= reasonableThreshold || // High queue backlog
396
+ socketMetrics.pendingRequests >= reasonableThreshold || // Many pending requests
397
+ this.pendingOperations >= reasonableThreshold || // Many pending ops
398
+ socketMetrics.socketUtilization >= highSocketUtilization || // High socket pressure
399
+ (socketMetrics.requestsPerSecond >= highRequestRate) || // High request rate
400
+ (this.consecutiveErrors >= significantErrors)); // Significant error pattern
393
401
  if (shouldEnableHighVolume && !this.highVolumeMode) {
394
402
  this.highVolumeMode = true;
395
403
  this.logger.warn(`🚨 HIGH-VOLUME MODE ACTIVATED 🚨`);
@@ -1278,8 +1286,42 @@ export class S3CompatibleStorage extends BaseStorage {
1278
1286
  graphVerbs.push(graphVerb);
1279
1287
  }
1280
1288
  }
1289
+ // Apply filtering at GraphVerb level since HNSWVerb filtering is not supported
1290
+ let filteredGraphVerbs = graphVerbs;
1291
+ if (options.filter) {
1292
+ filteredGraphVerbs = graphVerbs.filter((graphVerb) => {
1293
+ // Filter by sourceId
1294
+ if (options.filter.sourceId) {
1295
+ const sourceIds = Array.isArray(options.filter.sourceId)
1296
+ ? options.filter.sourceId
1297
+ : [options.filter.sourceId];
1298
+ if (!sourceIds.includes(graphVerb.sourceId)) {
1299
+ return false;
1300
+ }
1301
+ }
1302
+ // Filter by targetId
1303
+ if (options.filter.targetId) {
1304
+ const targetIds = Array.isArray(options.filter.targetId)
1305
+ ? options.filter.targetId
1306
+ : [options.filter.targetId];
1307
+ if (!targetIds.includes(graphVerb.targetId)) {
1308
+ return false;
1309
+ }
1310
+ }
1311
+ // Filter by verbType (maps to type field)
1312
+ if (options.filter.verbType) {
1313
+ const verbTypes = Array.isArray(options.filter.verbType)
1314
+ ? options.filter.verbType
1315
+ : [options.filter.verbType];
1316
+ if (graphVerb.type && !verbTypes.includes(graphVerb.type)) {
1317
+ return false;
1318
+ }
1319
+ }
1320
+ return true;
1321
+ });
1322
+ }
1281
1323
  return {
1282
- items: graphVerbs,
1324
+ items: filteredGraphVerbs,
1283
1325
  hasMore: result.hasMore,
1284
1326
  nextCursor: result.nextCursor
1285
1327
  };
@@ -1288,46 +1330,34 @@ export class S3CompatibleStorage extends BaseStorage {
1288
1330
  * Get verbs by source (internal implementation)
1289
1331
  */
1290
1332
  async getVerbsBySource_internal(sourceId) {
1291
- return this.getEdgesBySource(sourceId);
1292
- }
1293
- /**
1294
- * Get edges by source
1295
- */
1296
- async getEdgesBySource(sourceId) {
1297
- // This method is deprecated and would require loading metadata for each edge
1298
- // For now, return empty array since this is not efficiently implementable with new storage pattern
1299
- this.logger.trace('getEdgesBySource is deprecated and not efficiently supported in new storage pattern');
1300
- return [];
1333
+ // Use the paginated approach to properly handle HNSWVerb to GraphVerb conversion
1334
+ const result = await this.getVerbsWithPagination({
1335
+ filter: { sourceId: [sourceId] },
1336
+ limit: Number.MAX_SAFE_INTEGER // Get all matching results
1337
+ });
1338
+ return result.items;
1301
1339
  }
1302
1340
  /**
1303
1341
  * Get verbs by target (internal implementation)
1304
1342
  */
1305
1343
  async getVerbsByTarget_internal(targetId) {
1306
- return this.getEdgesByTarget(targetId);
1307
- }
1308
- /**
1309
- * Get edges by target
1310
- */
1311
- async getEdgesByTarget(targetId) {
1312
- // This method is deprecated and would require loading metadata for each edge
1313
- // For now, return empty array since this is not efficiently implementable with new storage pattern
1314
- this.logger.trace('getEdgesByTarget is deprecated and not efficiently supported in new storage pattern');
1315
- return [];
1344
+ // Use the paginated approach to properly handle HNSWVerb to GraphVerb conversion
1345
+ const result = await this.getVerbsWithPagination({
1346
+ filter: { targetId: [targetId] },
1347
+ limit: Number.MAX_SAFE_INTEGER // Get all matching results
1348
+ });
1349
+ return result.items;
1316
1350
  }
1317
1351
  /**
1318
1352
  * Get verbs by type (internal implementation)
1319
1353
  */
1320
1354
  async getVerbsByType_internal(type) {
1321
- return this.getEdgesByType(type);
1322
- }
1323
- /**
1324
- * Get edges by type
1325
- */
1326
- async getEdgesByType(type) {
1327
- // This method is deprecated and would require loading metadata for each edge
1328
- // For now, return empty array since this is not efficiently implementable with new storage pattern
1329
- this.logger.trace('getEdgesByType is deprecated and not efficiently supported in new storage pattern');
1330
- return [];
1355
+ // Use the paginated approach to properly handle HNSWVerb to GraphVerb conversion
1356
+ const result = await this.getVerbsWithPagination({
1357
+ filter: { verbType: [type] },
1358
+ limit: Number.MAX_SAFE_INTEGER // Get all matching results
1359
+ });
1360
+ return result.items;
1331
1361
  }
1332
1362
  /**
1333
1363
  * Delete a verb from storage (internal implementation)
@@ -72,11 +72,10 @@ export declare abstract class BaseStorage extends BaseStorageAdapter {
72
72
  */
73
73
  protected convertHNSWVerbToGraphVerb(hnswVerb: HNSWVerb): Promise<GraphVerb | null>;
74
74
  /**
75
- * Get all verbs from storage
76
- * @returns Promise that resolves to an array of all HNSWVerbs
77
- * @deprecated This method loads all data into memory and may cause performance issues. Use getVerbs() with pagination instead.
75
+ * Internal method for loading all verbs - used by performance optimizations
76
+ * @internal - Do not use directly, use getVerbs() with pagination instead
78
77
  */
79
- getAllVerbs(): Promise<HNSWVerb[]>;
78
+ protected _loadAllVerbsForOptimization(): Promise<HNSWVerb[]>;
80
79
  /**
81
80
  * Get verbs by source
82
81
  */
@@ -90,11 +89,10 @@ export declare abstract class BaseStorage extends BaseStorageAdapter {
90
89
  */
91
90
  getVerbsByType(type: string): Promise<GraphVerb[]>;
92
91
  /**
93
- * Get all nouns from storage
94
- * @returns Promise that resolves to an array of all nouns
95
- * @deprecated This method loads all data into memory and may cause performance issues. Use getNouns() with pagination instead.
92
+ * Internal method for loading all nouns - used by performance optimizations
93
+ * @internal - Do not use directly, use getNouns() with pagination instead
96
94
  */
97
- getAllNouns(): Promise<HNSWNoun[]>;
95
+ protected _loadAllNounsForOptimization(): Promise<HNSWNoun[]>;
98
96
  /**
99
97
  * Get nouns with pagination and filtering
100
98
  * @param options Pagination and filtering options