@soulcraft/brainy 1.5.0 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. package/CHANGELOG.md +188 -0
  2. package/LICENSE +2 -2
  3. package/README.md +200 -595
  4. package/bin/brainy-interactive.js +564 -0
  5. package/bin/brainy-ts.js +18 -0
  6. package/bin/brainy.js +672 -81
  7. package/dist/augmentationPipeline.d.ts +48 -220
  8. package/dist/augmentationPipeline.js +60 -508
  9. package/dist/augmentationRegistry.d.ts +22 -31
  10. package/dist/augmentationRegistry.js +28 -79
  11. package/dist/augmentations/apiServerAugmentation.d.ts +108 -0
  12. package/dist/augmentations/apiServerAugmentation.js +502 -0
  13. package/dist/augmentations/batchProcessingAugmentation.d.ts +95 -0
  14. package/dist/augmentations/batchProcessingAugmentation.js +567 -0
  15. package/dist/augmentations/brainyAugmentation.d.ts +153 -0
  16. package/dist/augmentations/brainyAugmentation.js +145 -0
  17. package/dist/augmentations/cacheAugmentation.d.ts +105 -0
  18. package/dist/augmentations/cacheAugmentation.js +238 -0
  19. package/dist/augmentations/conduitAugmentations.d.ts +54 -156
  20. package/dist/augmentations/conduitAugmentations.js +156 -1082
  21. package/dist/augmentations/connectionPoolAugmentation.d.ts +62 -0
  22. package/dist/augmentations/connectionPoolAugmentation.js +316 -0
  23. package/dist/augmentations/defaultAugmentations.d.ts +53 -0
  24. package/dist/augmentations/defaultAugmentations.js +88 -0
  25. package/dist/augmentations/entityRegistryAugmentation.d.ts +126 -0
  26. package/dist/augmentations/entityRegistryAugmentation.js +386 -0
  27. package/dist/augmentations/indexAugmentation.d.ts +117 -0
  28. package/dist/augmentations/indexAugmentation.js +284 -0
  29. package/dist/augmentations/intelligentVerbScoringAugmentation.d.ts +152 -0
  30. package/dist/augmentations/intelligentVerbScoringAugmentation.js +554 -0
  31. package/dist/augmentations/metricsAugmentation.d.ts +202 -0
  32. package/dist/augmentations/metricsAugmentation.js +291 -0
  33. package/dist/augmentations/monitoringAugmentation.d.ts +94 -0
  34. package/dist/augmentations/monitoringAugmentation.js +227 -0
  35. package/dist/augmentations/neuralImport.d.ts +50 -117
  36. package/dist/augmentations/neuralImport.js +255 -629
  37. package/dist/augmentations/requestDeduplicatorAugmentation.d.ts +52 -0
  38. package/dist/augmentations/requestDeduplicatorAugmentation.js +162 -0
  39. package/dist/augmentations/serverSearchAugmentations.d.ts +43 -22
  40. package/dist/augmentations/serverSearchAugmentations.js +125 -72
  41. package/dist/augmentations/storageAugmentation.d.ts +54 -0
  42. package/dist/augmentations/storageAugmentation.js +93 -0
  43. package/dist/augmentations/storageAugmentations.d.ts +96 -0
  44. package/dist/augmentations/storageAugmentations.js +182 -0
  45. package/dist/augmentations/synapseAugmentation.d.ts +156 -0
  46. package/dist/augmentations/synapseAugmentation.js +312 -0
  47. package/dist/augmentations/walAugmentation.d.ts +108 -0
  48. package/dist/augmentations/walAugmentation.js +515 -0
  49. package/dist/brainyData.d.ts +404 -130
  50. package/dist/brainyData.js +1331 -853
  51. package/dist/chat/BrainyChat.d.ts +16 -8
  52. package/dist/chat/BrainyChat.js +60 -32
  53. package/dist/chat/ChatCLI.d.ts +1 -1
  54. package/dist/chat/ChatCLI.js +6 -6
  55. package/dist/cli/catalog.d.ts +3 -3
  56. package/dist/cli/catalog.js +116 -70
  57. package/dist/cli/commands/core.d.ts +61 -0
  58. package/dist/cli/commands/core.js +348 -0
  59. package/dist/cli/commands/neural.d.ts +25 -0
  60. package/dist/cli/commands/neural.js +508 -0
  61. package/dist/cli/commands/utility.d.ts +37 -0
  62. package/dist/cli/commands/utility.js +276 -0
  63. package/dist/cli/index.d.ts +7 -0
  64. package/dist/cli/index.js +167 -0
  65. package/dist/cli/interactive.d.ts +164 -0
  66. package/dist/cli/interactive.js +542 -0
  67. package/dist/cortex/neuralImport.js +5 -5
  68. package/dist/critical/model-guardian.js +11 -4
  69. package/dist/embeddings/lightweight-embedder.d.ts +23 -0
  70. package/dist/embeddings/lightweight-embedder.js +136 -0
  71. package/dist/embeddings/universal-memory-manager.d.ts +38 -0
  72. package/dist/embeddings/universal-memory-manager.js +206 -0
  73. package/dist/embeddings/worker-embedding.d.ts +7 -0
  74. package/dist/embeddings/worker-embedding.js +77 -0
  75. package/dist/embeddings/worker-manager.d.ts +28 -0
  76. package/dist/embeddings/worker-manager.js +162 -0
  77. package/dist/examples/basicUsage.js +7 -7
  78. package/dist/graph/pathfinding.d.ts +78 -0
  79. package/dist/graph/pathfinding.js +393 -0
  80. package/dist/hnsw/hnswIndex.d.ts +13 -0
  81. package/dist/hnsw/hnswIndex.js +35 -0
  82. package/dist/hnsw/hnswIndexOptimized.d.ts +1 -0
  83. package/dist/hnsw/hnswIndexOptimized.js +3 -0
  84. package/dist/index.d.ts +9 -11
  85. package/dist/index.js +21 -11
  86. package/dist/indices/fieldIndex.d.ts +76 -0
  87. package/dist/indices/fieldIndex.js +357 -0
  88. package/dist/mcp/brainyMCPAdapter.js +3 -2
  89. package/dist/mcp/mcpAugmentationToolset.js +11 -17
  90. package/dist/neural/embeddedPatterns.d.ts +41 -0
  91. package/dist/neural/embeddedPatterns.js +4044 -0
  92. package/dist/neural/naturalLanguageProcessor.d.ts +94 -0
  93. package/dist/neural/naturalLanguageProcessor.js +317 -0
  94. package/dist/neural/naturalLanguageProcessorStatic.d.ts +64 -0
  95. package/dist/neural/naturalLanguageProcessorStatic.js +151 -0
  96. package/dist/neural/neuralAPI.d.ts +255 -0
  97. package/dist/neural/neuralAPI.js +612 -0
  98. package/dist/neural/patternLibrary.d.ts +101 -0
  99. package/dist/neural/patternLibrary.js +313 -0
  100. package/dist/neural/patterns.d.ts +27 -0
  101. package/dist/neural/patterns.js +68 -0
  102. package/dist/neural/staticPatternMatcher.d.ts +35 -0
  103. package/dist/neural/staticPatternMatcher.js +153 -0
  104. package/dist/scripts/precomputePatternEmbeddings.d.ts +19 -0
  105. package/dist/scripts/precomputePatternEmbeddings.js +100 -0
  106. package/dist/storage/adapters/fileSystemStorage.d.ts +5 -0
  107. package/dist/storage/adapters/fileSystemStorage.js +20 -0
  108. package/dist/storage/adapters/s3CompatibleStorage.d.ts +5 -0
  109. package/dist/storage/adapters/s3CompatibleStorage.js +16 -0
  110. package/dist/storage/enhancedClearOperations.d.ts +83 -0
  111. package/dist/storage/enhancedClearOperations.js +345 -0
  112. package/dist/storage/storageFactory.js +31 -27
  113. package/dist/triple/TripleIntelligence.d.ts +134 -0
  114. package/dist/triple/TripleIntelligence.js +548 -0
  115. package/dist/types/augmentations.d.ts +45 -344
  116. package/dist/types/augmentations.js +5 -2
  117. package/dist/types/brainyDataInterface.d.ts +20 -10
  118. package/dist/types/graphTypes.d.ts +46 -0
  119. package/dist/types/graphTypes.js +16 -2
  120. package/dist/utils/BoundedRegistry.d.ts +29 -0
  121. package/dist/utils/BoundedRegistry.js +54 -0
  122. package/dist/utils/embedding.js +20 -3
  123. package/dist/utils/hybridModelManager.js +10 -5
  124. package/dist/utils/metadataFilter.d.ts +33 -19
  125. package/dist/utils/metadataFilter.js +58 -23
  126. package/dist/utils/metadataIndex.d.ts +37 -6
  127. package/dist/utils/metadataIndex.js +427 -64
  128. package/dist/utils/requestDeduplicator.d.ts +10 -0
  129. package/dist/utils/requestDeduplicator.js +24 -0
  130. package/dist/utils/unifiedCache.d.ts +103 -0
  131. package/dist/utils/unifiedCache.js +311 -0
  132. package/package.json +40 -125
  133. package/scripts/ensure-models.js +108 -0
  134. package/scripts/prepare-models.js +387 -0
  135. package/OFFLINE_MODELS.md +0 -56
  136. package/dist/intelligence/neuralEngine.d.ts +0 -207
  137. package/dist/intelligence/neuralEngine.js +0 -706
  138. package/dist/utils/modelLoader.d.ts +0 -32
  139. package/dist/utils/modelLoader.js +0 -219
  140. package/dist/utils/modelManager.d.ts +0 -77
  141. package/dist/utils/modelManager.js +0 -219
@@ -0,0 +1,100 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * 🧠 Pre-compute Pattern Embeddings Script
4
+ *
5
+ * This script pre-computes embeddings for all patterns and saves them to disk.
6
+ * Run this once after adding new patterns to avoid runtime embedding costs.
7
+ *
8
+ * How it works:
9
+ * 1. Load all patterns from library.json
10
+ * 2. Use Brainy's embedding model to encode each pattern's examples
11
+ * 3. Average the example embeddings to get a robust pattern representation
12
+ * 4. Save embeddings to patterns/embeddings.bin for instant loading
13
+ *
14
+ * Benefits:
15
+ * - Pattern matching becomes pure math (cosine similarity)
16
+ * - No embedding model calls during query processing
17
+ * - Patterns load instantly with pre-computed vectors
18
+ */
19
+ import { BrainyData } from '../brainyData.js';
20
+ import patternData from '../patterns/library.json' assert { type: 'json' };
21
+ import * as fs from 'fs/promises';
22
+ import * as path from 'path';
23
+ async function precomputeEmbeddings() {
24
+ console.log('🧠 Pre-computing pattern embeddings...');
25
+ // Initialize Brainy with minimal config
26
+ const brain = new BrainyData({
27
+ storage: { forceMemoryStorage: true },
28
+ logging: { verbose: false }
29
+ });
30
+ await brain.init();
31
+ console.log('✅ Brainy initialized');
32
+ const embeddings = {};
33
+ let processedCount = 0;
34
+ const totalPatterns = patternData.patterns.length;
35
+ for (const pattern of patternData.patterns) {
36
+ console.log(`\n📝 Processing pattern: ${pattern.id} (${++processedCount}/${totalPatterns})`);
37
+ console.log(` Category: ${pattern.category}`);
38
+ console.log(` Examples: ${pattern.examples.length}`);
39
+ // Embed all examples
40
+ const exampleEmbeddings = [];
41
+ for (const example of pattern.examples) {
42
+ try {
43
+ const embedding = await brain.embed(example);
44
+ exampleEmbeddings.push(embedding);
45
+ console.log(` ✓ Embedded: "${example.substring(0, 50)}..."`);
46
+ }
47
+ catch (error) {
48
+ console.error(` ✗ Failed to embed: "${example}"`, error);
49
+ }
50
+ }
51
+ if (exampleEmbeddings.length === 0) {
52
+ console.warn(` ⚠️ No embeddings generated for pattern ${pattern.id}`);
53
+ continue;
54
+ }
55
+ // Average the embeddings for a robust representation
56
+ const avgEmbedding = averageVectors(exampleEmbeddings);
57
+ embeddings[pattern.id] = {
58
+ patternId: pattern.id,
59
+ embedding: avgEmbedding,
60
+ examples: pattern.examples,
61
+ averageMethod: 'arithmetic_mean'
62
+ };
63
+ console.log(` ✅ Generated ${avgEmbedding.length}-dimensional embedding`);
64
+ }
65
+ // Save embeddings to file
66
+ const outputPath = path.join(process.cwd(), 'src', 'patterns', 'embeddings.json');
67
+ await fs.writeFile(outputPath, JSON.stringify(embeddings, null, 2));
68
+ console.log(`\n✅ Saved ${Object.keys(embeddings).length} pattern embeddings to ${outputPath}`);
69
+ // Calculate storage size
70
+ const stats = await fs.stat(outputPath);
71
+ console.log(`📊 File size: ${(stats.size / 1024).toFixed(2)} KB`);
72
+ // Print statistics
73
+ console.log('\n📈 Embedding Statistics:');
74
+ console.log(` Total patterns: ${totalPatterns}`);
75
+ console.log(` Successfully embedded: ${Object.keys(embeddings).length}`);
76
+ console.log(` Failed: ${totalPatterns - Object.keys(embeddings).length}`);
77
+ console.log(` Embedding dimensions: ${Object.values(embeddings)[0]?.embedding.length || 0}`);
78
+ await brain.close();
79
+ console.log('\n✅ Complete!');
80
+ }
81
+ function averageVectors(vectors) {
82
+ if (vectors.length === 0)
83
+ return [];
84
+ const dim = vectors[0].length;
85
+ const avg = new Array(dim).fill(0);
86
+ // Sum all vectors
87
+ for (const vec of vectors) {
88
+ for (let i = 0; i < dim; i++) {
89
+ avg[i] += vec[i];
90
+ }
91
+ }
92
+ // Divide by count to get average
93
+ for (let i = 0; i < dim; i++) {
94
+ avg[i] /= vectors.length;
95
+ }
96
+ return avg;
97
+ }
98
+ // Run the script
99
+ precomputeEmbeddings().catch(console.error);
100
+ //# sourceMappingURL=precomputePatternEmbeddings.js.map
@@ -136,6 +136,11 @@ export declare class FileSystemStorage extends BaseStorage {
136
136
  * Clear all data from storage
137
137
  */
138
138
  clear(): Promise<void>;
139
+ /**
140
+ * Enhanced clear operation with safety mechanisms and performance optimizations
141
+ * Provides progress tracking, backup options, and instance name confirmation
142
+ */
143
+ clearEnhanced(options?: import('../enhancedClearOperations.js').ClearOptions): Promise<import('../enhancedClearOperations.js').ClearResult>;
139
144
  /**
140
145
  * Get information about storage usage and capacity
141
146
  */
@@ -600,6 +600,26 @@ export class FileSystemStorage extends BaseStorage {
600
600
  this.statisticsCache = null;
601
601
  this.statisticsModified = false;
602
602
  }
603
+ /**
604
+ * Enhanced clear operation with safety mechanisms and performance optimizations
605
+ * Provides progress tracking, backup options, and instance name confirmation
606
+ */
607
+ async clearEnhanced(options = {}) {
608
+ await this.ensureInitialized();
609
+ // Check if fs module is available
610
+ if (!fs || !fs.promises) {
611
+ throw new Error('FileSystemStorage.clearEnhanced: fs module not available');
612
+ }
613
+ const { EnhancedFileSystemClear } = await import('../enhancedClearOperations.js');
614
+ const enhancedClear = new EnhancedFileSystemClear(this.rootDir, fs, path);
615
+ const result = await enhancedClear.clear(options);
616
+ if (result.success) {
617
+ // Clear the statistics cache
618
+ this.statisticsCache = null;
619
+ this.statisticsModified = false;
620
+ }
621
+ return result;
622
+ }
603
623
  /**
604
624
  * Get information about storage usage and capacity
605
625
  */
@@ -358,6 +358,11 @@ export declare class S3CompatibleStorage extends BaseStorage {
358
358
  * Clear all data from storage
359
359
  */
360
360
  clear(): Promise<void>;
361
+ /**
362
+ * Enhanced clear operation with safety mechanisms and performance optimizations
363
+ * Provides progress tracking, backup options, and instance name confirmation
364
+ */
365
+ clearEnhanced(options?: import('../enhancedClearOperations.js').ClearOptions): Promise<import('../enhancedClearOperations.js').ClearResult>;
361
366
  /**
362
367
  * Get information about storage usage and capacity
363
368
  * Optimized version that uses cached statistics instead of expensive full scans
@@ -1800,6 +1800,22 @@ export class S3CompatibleStorage extends BaseStorage {
1800
1800
  throw new Error(`Failed to clear storage: ${error}`);
1801
1801
  }
1802
1802
  }
1803
+ /**
1804
+ * Enhanced clear operation with safety mechanisms and performance optimizations
1805
+ * Provides progress tracking, backup options, and instance name confirmation
1806
+ */
1807
+ async clearEnhanced(options = {}) {
1808
+ await this.ensureInitialized();
1809
+ const { EnhancedS3Clear } = await import('../enhancedClearOperations.js');
1810
+ const enhancedClear = new EnhancedS3Clear(this.s3Client, this.bucketName);
1811
+ const result = await enhancedClear.clear(options);
1812
+ if (result.success) {
1813
+ // Clear the statistics cache
1814
+ this.statisticsCache = null;
1815
+ this.statisticsModified = false;
1816
+ }
1817
+ return result;
1818
+ }
1803
1819
  /**
1804
1820
  * Get information about storage usage and capacity
1805
1821
  * Optimized version that uses cached statistics instead of expensive full scans
@@ -0,0 +1,83 @@
1
+ /**
2
+ * Enhanced Clear/Delete Operations for Brainy Storage
3
+ * Provides safe, efficient, and production-ready bulk deletion methods
4
+ */
5
+ export interface ClearOptions {
6
+ /**
7
+ * Safety confirmation - must match database instance name
8
+ * Prevents accidental deletion of wrong databases
9
+ */
10
+ confirmInstanceName?: string;
11
+ /**
12
+ * Performance optimization settings
13
+ */
14
+ batchSize?: number;
15
+ maxConcurrency?: number;
16
+ /**
17
+ * Safety mechanisms
18
+ */
19
+ dryRun?: boolean;
20
+ createBackup?: boolean;
21
+ /**
22
+ * Progress callback for large operations
23
+ */
24
+ onProgress?: (progress: ClearProgress) => void;
25
+ }
26
+ export interface ClearProgress {
27
+ stage: 'backup' | 'nouns' | 'verbs' | 'metadata' | 'system' | 'cache' | 'complete';
28
+ totalItems: number;
29
+ processedItems: number;
30
+ errors: number;
31
+ estimatedTimeRemaining?: number;
32
+ }
33
+ export interface ClearResult {
34
+ success: boolean;
35
+ itemsDeleted: {
36
+ nouns: number;
37
+ verbs: number;
38
+ metadata: number;
39
+ system: number;
40
+ };
41
+ duration: number;
42
+ errors: Error[];
43
+ backupLocation?: string;
44
+ }
45
+ /**
46
+ * Enhanced FileSystem bulk delete operations
47
+ */
48
+ export declare class EnhancedFileSystemClear {
49
+ private rootDir;
50
+ private fs;
51
+ private path;
52
+ constructor(rootDir: string, fs: any, path: any);
53
+ /**
54
+ * Optimized bulk delete for filesystem storage
55
+ * Uses parallel deletion with controlled concurrency
56
+ */
57
+ clear(options?: ClearOptions): Promise<ClearResult>;
58
+ /**
59
+ * High-performance directory clearing with controlled concurrency
60
+ */
61
+ private clearDirectoryOptimized;
62
+ private createBackup;
63
+ private performDryRun;
64
+ }
65
+ /**
66
+ * Enhanced S3 bulk delete operations
67
+ */
68
+ export declare class EnhancedS3Clear {
69
+ private s3Client;
70
+ private bucketName;
71
+ constructor(s3Client: any, bucketName: string);
72
+ /**
73
+ * Optimized bulk delete for S3 storage
74
+ * Uses batch delete operations for maximum efficiency
75
+ */
76
+ clear(options?: ClearOptions): Promise<ClearResult>;
77
+ /**
78
+ * High-performance prefix clearing using S3 batch delete
79
+ */
80
+ private clearPrefixOptimized;
81
+ private getBucketInfo;
82
+ private performDryRun;
83
+ }
@@ -0,0 +1,345 @@
1
+ /**
2
+ * Enhanced Clear/Delete Operations for Brainy Storage
3
+ * Provides safe, efficient, and production-ready bulk deletion methods
4
+ */
5
+ /**
6
+ * Enhanced FileSystem bulk delete operations
7
+ */
8
+ export class EnhancedFileSystemClear {
9
+ constructor(rootDir, fs, path) {
10
+ this.rootDir = rootDir;
11
+ this.fs = fs;
12
+ this.path = path;
13
+ }
14
+ /**
15
+ * Optimized bulk delete for filesystem storage
16
+ * Uses parallel deletion with controlled concurrency
17
+ */
18
+ async clear(options = {}) {
19
+ const startTime = Date.now();
20
+ const result = {
21
+ success: false,
22
+ itemsDeleted: { nouns: 0, verbs: 0, metadata: 0, system: 0 },
23
+ duration: 0,
24
+ errors: []
25
+ };
26
+ try {
27
+ // Safety checks
28
+ if (options.confirmInstanceName) {
29
+ const actualName = this.path.basename(this.rootDir);
30
+ if (actualName !== options.confirmInstanceName) {
31
+ throw new Error(`Instance name mismatch: expected '${options.confirmInstanceName}', got '${actualName}'`);
32
+ }
33
+ }
34
+ // Create backup if requested
35
+ if (options.createBackup) {
36
+ result.backupLocation = await this.createBackup();
37
+ options.onProgress?.({
38
+ stage: 'backup',
39
+ totalItems: 1,
40
+ processedItems: 1,
41
+ errors: 0
42
+ });
43
+ }
44
+ // Dry run - just count items
45
+ if (options.dryRun) {
46
+ return await this.performDryRun(options);
47
+ }
48
+ // Optimized deletion with batching
49
+ const batchSize = options.batchSize || 100;
50
+ const maxConcurrency = options.maxConcurrency || 10;
51
+ // Delete nouns directory with optimization
52
+ result.itemsDeleted.nouns = await this.clearDirectoryOptimized(this.path.join(this.rootDir, 'nouns'), batchSize, maxConcurrency, (progress) => options.onProgress?.({ ...progress, stage: 'nouns' }));
53
+ // Delete verbs directory with optimization
54
+ result.itemsDeleted.verbs = await this.clearDirectoryOptimized(this.path.join(this.rootDir, 'verbs'), batchSize, maxConcurrency, (progress) => options.onProgress?.({ ...progress, stage: 'verbs' }));
55
+ // Delete metadata directories
56
+ const metadataDirs = ['metadata', 'noun-metadata', 'verb-metadata'];
57
+ for (const dir of metadataDirs) {
58
+ result.itemsDeleted.metadata += await this.clearDirectoryOptimized(this.path.join(this.rootDir, dir), batchSize, maxConcurrency, (progress) => options.onProgress?.({ ...progress, stage: 'metadata' }));
59
+ }
60
+ // Delete system directories
61
+ const systemDirs = ['system', 'index'];
62
+ for (const dir of systemDirs) {
63
+ result.itemsDeleted.system += await this.clearDirectoryOptimized(this.path.join(this.rootDir, dir), batchSize, maxConcurrency, (progress) => options.onProgress?.({ ...progress, stage: 'system' }));
64
+ }
65
+ result.success = true;
66
+ result.duration = Date.now() - startTime;
67
+ options.onProgress?.({
68
+ stage: 'complete',
69
+ totalItems: Object.values(result.itemsDeleted).reduce((a, b) => a + b, 0),
70
+ processedItems: Object.values(result.itemsDeleted).reduce((a, b) => a + b, 0),
71
+ errors: result.errors.length
72
+ });
73
+ }
74
+ catch (error) {
75
+ result.errors.push(error);
76
+ result.duration = Date.now() - startTime;
77
+ }
78
+ return result;
79
+ }
80
+ /**
81
+ * High-performance directory clearing with controlled concurrency
82
+ */
83
+ async clearDirectoryOptimized(dirPath, batchSize, maxConcurrency, onProgress) {
84
+ try {
85
+ // Check if directory exists
86
+ const stats = await this.fs.promises.stat(dirPath);
87
+ if (!stats.isDirectory())
88
+ return 0;
89
+ // Get all files in the directory
90
+ const files = await this.fs.promises.readdir(dirPath);
91
+ const totalFiles = files.length;
92
+ if (totalFiles === 0)
93
+ return 0;
94
+ let processedFiles = 0;
95
+ let errors = 0;
96
+ // Process files in batches with controlled concurrency
97
+ for (let i = 0; i < files.length; i += batchSize) {
98
+ const batch = files.slice(i, i + batchSize);
99
+ // Create semaphore for concurrency control
100
+ const semaphore = new Array(Math.min(maxConcurrency, batch.length)).fill(0);
101
+ await Promise.all(batch.map(async (file, index) => {
102
+ // Wait for semaphore slot
103
+ await new Promise(resolve => {
104
+ const slotIndex = index % semaphore.length;
105
+ semaphore[slotIndex] = performance.now();
106
+ resolve(undefined);
107
+ });
108
+ try {
109
+ const filePath = this.path.join(dirPath, file);
110
+ await this.fs.promises.unlink(filePath);
111
+ processedFiles++;
112
+ }
113
+ catch (error) {
114
+ errors++;
115
+ console.warn(`Failed to delete file ${file}:`, error);
116
+ }
117
+ // Report progress every 50 files or at end of batch
118
+ if (processedFiles % 50 === 0 || processedFiles === totalFiles) {
119
+ onProgress?.({
120
+ totalItems: totalFiles,
121
+ processedItems: processedFiles,
122
+ errors
123
+ });
124
+ }
125
+ }));
126
+ // Small yield between batches to prevent blocking
127
+ await new Promise(resolve => setImmediate(resolve));
128
+ }
129
+ return processedFiles;
130
+ }
131
+ catch (error) {
132
+ if (error.code === 'ENOENT') {
133
+ return 0; // Directory doesn't exist, that's fine
134
+ }
135
+ throw error;
136
+ }
137
+ }
138
+ async createBackup() {
139
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
140
+ const backupDir = `${this.rootDir}-backup-${timestamp}`;
141
+ // Use cp -r for efficient directory copying
142
+ const { spawn } = await import('child_process');
143
+ return new Promise((resolve, reject) => {
144
+ const cp = spawn('cp', ['-r', this.rootDir, backupDir]);
145
+ cp.on('close', (code) => {
146
+ if (code === 0) {
147
+ resolve(backupDir);
148
+ }
149
+ else {
150
+ reject(new Error(`Backup failed with code ${code}`));
151
+ }
152
+ });
153
+ cp.on('error', reject);
154
+ });
155
+ }
156
+ async performDryRun(options) {
157
+ const startTime = Date.now();
158
+ const result = {
159
+ success: true,
160
+ itemsDeleted: { nouns: 0, verbs: 0, metadata: 0, system: 0 },
161
+ duration: 0,
162
+ errors: []
163
+ };
164
+ const countFiles = async (dirPath) => {
165
+ try {
166
+ const files = await this.fs.promises.readdir(dirPath);
167
+ return files.filter((f) => f.endsWith('.json')).length;
168
+ }
169
+ catch (error) {
170
+ if (error.code === 'ENOENT')
171
+ return 0;
172
+ throw error;
173
+ }
174
+ };
175
+ result.itemsDeleted.nouns = await countFiles(this.path.join(this.rootDir, 'nouns'));
176
+ result.itemsDeleted.verbs = await countFiles(this.path.join(this.rootDir, 'verbs'));
177
+ result.itemsDeleted.metadata =
178
+ await countFiles(this.path.join(this.rootDir, 'metadata')) +
179
+ await countFiles(this.path.join(this.rootDir, 'noun-metadata')) +
180
+ await countFiles(this.path.join(this.rootDir, 'verb-metadata'));
181
+ result.itemsDeleted.system =
182
+ await countFiles(this.path.join(this.rootDir, 'system')) +
183
+ await countFiles(this.path.join(this.rootDir, 'index'));
184
+ result.duration = Date.now() - startTime;
185
+ return result;
186
+ }
187
+ }
188
+ /**
189
+ * Enhanced S3 bulk delete operations
190
+ */
191
+ export class EnhancedS3Clear {
192
+ constructor(s3Client, bucketName) {
193
+ this.s3Client = s3Client;
194
+ this.bucketName = bucketName;
195
+ }
196
+ /**
197
+ * Optimized bulk delete for S3 storage
198
+ * Uses batch delete operations for maximum efficiency
199
+ */
200
+ async clear(options = {}) {
201
+ const startTime = Date.now();
202
+ const result = {
203
+ success: false,
204
+ itemsDeleted: { nouns: 0, verbs: 0, metadata: 0, system: 0 },
205
+ duration: 0,
206
+ errors: []
207
+ };
208
+ try {
209
+ // Safety checks
210
+ if (options.confirmInstanceName) {
211
+ // Extract instance name from bucket structure or prefix
212
+ const bucketInfo = await this.getBucketInfo();
213
+ if (bucketInfo.instanceName !== options.confirmInstanceName) {
214
+ throw new Error(`Instance name mismatch: expected '${options.confirmInstanceName}', got '${bucketInfo.instanceName}'`);
215
+ }
216
+ }
217
+ // Dry run - just count objects
218
+ if (options.dryRun) {
219
+ return await this.performDryRun(options);
220
+ }
221
+ // AWS S3 batch delete supports up to 1000 objects per request
222
+ const batchSize = Math.min(options.batchSize || 1000, 1000);
223
+ // Delete with optimized batching
224
+ const prefixes = [
225
+ { prefix: 'nouns/', key: 'nouns' },
226
+ { prefix: 'verbs/', key: 'verbs' },
227
+ { prefix: 'metadata/', key: 'metadata' },
228
+ { prefix: 'noun-metadata/', key: 'metadata' },
229
+ { prefix: 'verb-metadata/', key: 'metadata' },
230
+ { prefix: 'system/', key: 'system' },
231
+ { prefix: 'index/', key: 'system' }
232
+ ];
233
+ for (const { prefix, key } of prefixes) {
234
+ const deleted = await this.clearPrefixOptimized(prefix, batchSize, (progress) => options.onProgress?.({
235
+ ...progress,
236
+ stage: key === 'nouns' ? 'nouns' :
237
+ key === 'verbs' ? 'verbs' :
238
+ key === 'metadata' ? 'metadata' : 'system'
239
+ }));
240
+ result.itemsDeleted[key] += deleted;
241
+ }
242
+ result.success = true;
243
+ result.duration = Date.now() - startTime;
244
+ }
245
+ catch (error) {
246
+ result.errors.push(error);
247
+ result.duration = Date.now() - startTime;
248
+ }
249
+ return result;
250
+ }
251
+ /**
252
+ * High-performance prefix clearing using S3 batch delete
253
+ */
254
+ async clearPrefixOptimized(prefix, batchSize, onProgress) {
255
+ const { ListObjectsV2Command, DeleteObjectsCommand } = await import('@aws-sdk/client-s3');
256
+ let totalDeleted = 0;
257
+ let continuationToken;
258
+ do {
259
+ // List objects with the prefix
260
+ const listResponse = await this.s3Client.send(new ListObjectsV2Command({
261
+ Bucket: this.bucketName,
262
+ Prefix: prefix,
263
+ MaxKeys: batchSize,
264
+ ContinuationToken: continuationToken
265
+ }));
266
+ if (!listResponse.Contents || listResponse.Contents.length === 0) {
267
+ break;
268
+ }
269
+ // Prepare batch delete request
270
+ const objectsToDelete = listResponse.Contents
271
+ .filter((obj) => obj.Key)
272
+ .map((obj) => ({ Key: obj.Key }));
273
+ if (objectsToDelete.length > 0) {
274
+ // Perform batch delete
275
+ const deleteResponse = await this.s3Client.send(new DeleteObjectsCommand({
276
+ Bucket: this.bucketName,
277
+ Delete: {
278
+ Objects: objectsToDelete,
279
+ Quiet: false // Get detailed response
280
+ }
281
+ }));
282
+ const deletedCount = deleteResponse.Deleted?.length || 0;
283
+ totalDeleted += deletedCount;
284
+ // Report any errors
285
+ if (deleteResponse.Errors && deleteResponse.Errors.length > 0) {
286
+ for (const error of deleteResponse.Errors) {
287
+ console.warn(`Failed to delete ${error.Key}: ${error.Message}`);
288
+ }
289
+ }
290
+ // Report progress
291
+ onProgress?.({
292
+ totalItems: totalDeleted + (listResponse.IsTruncated ? 1000 : 0), // Estimate
293
+ processedItems: totalDeleted,
294
+ errors: deleteResponse.Errors?.length || 0
295
+ });
296
+ }
297
+ continuationToken = listResponse.NextContinuationToken;
298
+ // Small delay to respect AWS rate limits
299
+ await new Promise(resolve => setTimeout(resolve, 10));
300
+ } while (continuationToken);
301
+ return totalDeleted;
302
+ }
303
+ async getBucketInfo() {
304
+ // Each Brainy instance has its own bucket with the same name as the instance
305
+ // The bucket name IS the instance name
306
+ return { instanceName: this.bucketName };
307
+ }
308
+ async performDryRun(options) {
309
+ const startTime = Date.now();
310
+ const { ListObjectsV2Command } = await import('@aws-sdk/client-s3');
311
+ const result = {
312
+ success: true,
313
+ itemsDeleted: { nouns: 0, verbs: 0, metadata: 0, system: 0 },
314
+ duration: 0,
315
+ errors: []
316
+ };
317
+ const countObjects = async (prefix) => {
318
+ let count = 0;
319
+ let continuationToken;
320
+ do {
321
+ const response = await this.s3Client.send(new ListObjectsV2Command({
322
+ Bucket: this.bucketName,
323
+ Prefix: prefix,
324
+ MaxKeys: 1000,
325
+ ContinuationToken: continuationToken
326
+ }));
327
+ count += response.KeyCount || 0;
328
+ continuationToken = response.NextContinuationToken;
329
+ } while (continuationToken);
330
+ return count;
331
+ };
332
+ result.itemsDeleted.nouns = await countObjects('nouns/');
333
+ result.itemsDeleted.verbs = await countObjects('verbs/');
334
+ result.itemsDeleted.metadata =
335
+ await countObjects('metadata/') +
336
+ await countObjects('noun-metadata/') +
337
+ await countObjects('verb-metadata/');
338
+ result.itemsDeleted.system =
339
+ await countObjects('system/') +
340
+ await countObjects('index/');
341
+ result.duration = Date.now() - startTime;
342
+ return result;
343
+ }
344
+ }
345
+ //# sourceMappingURL=enhancedClearOperations.js.map
@@ -181,38 +181,42 @@ export async function createStorage(options = {}) {
181
181
  });
182
182
  }
183
183
  // Auto-detect the best storage adapter based on the environment
184
- // First, try OPFS (browser only)
185
- const opfsStorage = new OPFSStorage();
186
- if (opfsStorage.isOPFSAvailable()) {
187
- console.log('Using OPFS storage (auto-detected)');
188
- await opfsStorage.init();
189
- // Request persistent storage if specified
190
- if (options.requestPersistentStorage) {
191
- const isPersistent = await opfsStorage.requestPersistentStorage();
192
- console.log(`Persistent storage ${isPersistent ? 'granted' : 'denied'}`);
184
+ // First, check if we're in Node.js (prioritize for test environments)
185
+ if (!isBrowser()) {
186
+ try {
187
+ // Check if we're in a Node.js environment
188
+ if (typeof process !== 'undefined' &&
189
+ process.versions &&
190
+ process.versions.node) {
191
+ console.log('Using file system storage (auto-detected)');
192
+ try {
193
+ const { FileSystemStorage } = await import('./adapters/fileSystemStorage.js');
194
+ return new FileSystemStorage(options.rootDirectory || './brainy-data');
195
+ }
196
+ catch (fsError) {
197
+ console.warn('Failed to load FileSystemStorage, falling back to memory storage:', fsError);
198
+ }
199
+ }
200
+ }
201
+ catch (error) {
202
+ // Not in a Node.js environment or file system is not available
203
+ console.warn('Not in a Node.js environment:', error);
193
204
  }
194
- return opfsStorage;
195
205
  }
196
- // Next, try file system storage (Node.js only)
197
- try {
198
- // Check if we're in a Node.js environment
199
- if (typeof process !== 'undefined' &&
200
- process.versions &&
201
- process.versions.node) {
202
- console.log('Using file system storage (auto-detected)');
203
- try {
204
- const { FileSystemStorage } = await import('./adapters/fileSystemStorage.js');
205
- return new FileSystemStorage(options.rootDirectory || './brainy-data');
206
- }
207
- catch (fsError) {
208
- console.warn('Failed to load FileSystemStorage, falling back to memory storage:', fsError);
206
+ // Next, try OPFS (browser only)
207
+ if (isBrowser()) {
208
+ const opfsStorage = new OPFSStorage();
209
+ if (opfsStorage.isOPFSAvailable()) {
210
+ console.log('Using OPFS storage (auto-detected)');
211
+ await opfsStorage.init();
212
+ // Request persistent storage if specified
213
+ if (options.requestPersistentStorage) {
214
+ const isPersistent = await opfsStorage.requestPersistentStorage();
215
+ console.log(`Persistent storage ${isPersistent ? 'granted' : 'denied'}`);
209
216
  }
217
+ return opfsStorage;
210
218
  }
211
219
  }
212
- catch (error) {
213
- // Not in a Node.js environment or file system is not available
214
- console.warn('Not in a Node.js environment:', error);
215
- }
216
220
  // Finally, fall back to memory storage
217
221
  console.log('Using memory storage (auto-detected)');
218
222
  return new MemoryStorage();