@soulcraft/brainy 3.9.1 → 3.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +64 -6
  2. package/dist/augmentations/KnowledgeAugmentation.d.ts +40 -0
  3. package/dist/augmentations/KnowledgeAugmentation.js +251 -0
  4. package/dist/augmentations/defaultAugmentations.d.ts +1 -0
  5. package/dist/augmentations/defaultAugmentations.js +5 -0
  6. package/dist/brainy.d.ts +11 -0
  7. package/dist/brainy.js +87 -1
  8. package/dist/embeddings/EmbeddingManager.js +14 -2
  9. package/dist/utils/mutex.d.ts +2 -0
  10. package/dist/utils/mutex.js +14 -3
  11. package/dist/vfs/ConceptSystem.d.ts +202 -0
  12. package/dist/vfs/ConceptSystem.js +598 -0
  13. package/dist/vfs/EntityManager.d.ts +75 -0
  14. package/dist/vfs/EntityManager.js +216 -0
  15. package/dist/vfs/EventRecorder.d.ts +83 -0
  16. package/dist/vfs/EventRecorder.js +292 -0
  17. package/dist/vfs/FSCompat.d.ts +85 -0
  18. package/dist/vfs/FSCompat.js +257 -0
  19. package/dist/vfs/GitBridge.d.ts +167 -0
  20. package/dist/vfs/GitBridge.js +537 -0
  21. package/dist/vfs/KnowledgeAugmentation.d.ts +104 -0
  22. package/dist/vfs/KnowledgeAugmentation.js +146 -0
  23. package/dist/vfs/KnowledgeLayer.d.ts +35 -0
  24. package/dist/vfs/KnowledgeLayer.js +443 -0
  25. package/dist/vfs/PathResolver.d.ts +96 -0
  26. package/dist/vfs/PathResolver.js +362 -0
  27. package/dist/vfs/PersistentEntitySystem.d.ts +163 -0
  28. package/dist/vfs/PersistentEntitySystem.js +525 -0
  29. package/dist/vfs/SemanticVersioning.d.ts +105 -0
  30. package/dist/vfs/SemanticVersioning.js +318 -0
  31. package/dist/vfs/VirtualFileSystem.d.ts +246 -0
  32. package/dist/vfs/VirtualFileSystem.js +1927 -0
  33. package/dist/vfs/importers/DirectoryImporter.d.ts +86 -0
  34. package/dist/vfs/importers/DirectoryImporter.js +298 -0
  35. package/dist/vfs/index.d.ts +19 -0
  36. package/dist/vfs/index.js +26 -0
  37. package/dist/vfs/streams/VFSReadStream.d.ts +19 -0
  38. package/dist/vfs/streams/VFSReadStream.js +54 -0
  39. package/dist/vfs/streams/VFSWriteStream.d.ts +21 -0
  40. package/dist/vfs/streams/VFSWriteStream.js +70 -0
  41. package/dist/vfs/types.d.ts +330 -0
  42. package/dist/vfs/types.js +46 -0
  43. package/package.json +1 -1
@@ -0,0 +1,86 @@
1
+ /**
2
+ * Directory Importer for VFS
3
+ *
4
+ * Efficiently imports real directories into VFS with:
5
+ * - Batch processing for performance
6
+ * - Progress tracking
7
+ * - Error recovery
8
+ * - Parallel processing
9
+ */
10
+ import { VirtualFileSystem } from '../VirtualFileSystem.js';
11
+ import { Brainy } from '../../brainy.js';
12
+ export interface ImportOptions {
13
+ targetPath?: string;
14
+ recursive?: boolean;
15
+ skipHidden?: boolean;
16
+ skipNodeModules?: boolean;
17
+ batchSize?: number;
18
+ generateEmbeddings?: boolean;
19
+ extractMetadata?: boolean;
20
+ showProgress?: boolean;
21
+ filter?: (path: string) => boolean;
22
+ }
23
+ export interface ImportResult {
24
+ imported: string[];
25
+ failed: Array<{
26
+ path: string;
27
+ error: Error;
28
+ }>;
29
+ skipped: string[];
30
+ totalSize: number;
31
+ duration: number;
32
+ filesProcessed: number;
33
+ directoriesCreated: number;
34
+ }
35
+ export interface ImportProgress {
36
+ type: 'progress' | 'complete' | 'error';
37
+ processed: number;
38
+ total?: number;
39
+ current?: string;
40
+ error?: Error;
41
+ }
42
+ export declare class DirectoryImporter {
43
+ private vfs;
44
+ private brain;
45
+ constructor(vfs: VirtualFileSystem, brain: Brainy);
46
+ /**
47
+ * Import a directory or file into VFS
48
+ */
49
+ import(sourcePath: string, options?: ImportOptions): Promise<ImportResult>;
50
+ /**
51
+ * Import with progress tracking (generator)
52
+ */
53
+ importStream(sourcePath: string, options?: ImportOptions): AsyncGenerator<ImportProgress>;
54
+ /**
55
+ * Import a directory recursively
56
+ */
57
+ private importDirectory;
58
+ /**
59
+ * Create directory structure in VFS
60
+ */
61
+ private createDirectoryStructure;
62
+ /**
63
+ * Collect all files to be imported
64
+ */
65
+ private collectFiles;
66
+ /**
67
+ * Process a batch of files
68
+ */
69
+ private processBatch;
70
+ /**
71
+ * Import a single file
72
+ */
73
+ private importSingleFile;
74
+ /**
75
+ * Import a single file (for non-directory imports)
76
+ */
77
+ private importFile;
78
+ /**
79
+ * Check if a path should be skipped
80
+ */
81
+ private shouldSkip;
82
+ /**
83
+ * Detect MIME type from file content and extension
84
+ */
85
+ private detectMimeType;
86
+ }
@@ -0,0 +1,298 @@
1
+ /**
2
+ * Directory Importer for VFS
3
+ *
4
+ * Efficiently imports real directories into VFS with:
5
+ * - Batch processing for performance
6
+ * - Progress tracking
7
+ * - Error recovery
8
+ * - Parallel processing
9
+ */
10
+ import { promises as fs } from 'fs';
11
+ import * as path from 'path';
12
+ export class DirectoryImporter {
13
+ constructor(vfs, brain) {
14
+ this.vfs = vfs;
15
+ this.brain = brain;
16
+ }
17
+ /**
18
+ * Import a directory or file into VFS
19
+ */
20
+ async import(sourcePath, options = {}) {
21
+ const startTime = Date.now();
22
+ const result = {
23
+ imported: [],
24
+ failed: [],
25
+ skipped: [],
26
+ totalSize: 0,
27
+ duration: 0,
28
+ filesProcessed: 0,
29
+ directoriesCreated: 0
30
+ };
31
+ try {
32
+ const stats = await fs.stat(sourcePath);
33
+ if (stats.isFile()) {
34
+ await this.importFile(sourcePath, options.targetPath || '/', result);
35
+ }
36
+ else if (stats.isDirectory()) {
37
+ await this.importDirectory(sourcePath, options, result);
38
+ }
39
+ }
40
+ catch (error) {
41
+ result.failed.push({
42
+ path: sourcePath,
43
+ error: error
44
+ });
45
+ }
46
+ result.duration = Date.now() - startTime;
47
+ return result;
48
+ }
49
+ /**
50
+ * Import with progress tracking (generator)
51
+ */
52
+ async *importStream(sourcePath, options = {}) {
53
+ const files = await this.collectFiles(sourcePath, options);
54
+ const total = files.length;
55
+ const batchSize = options.batchSize || 100;
56
+ let processed = 0;
57
+ // Process in batches
58
+ for (let i = 0; i < files.length; i += batchSize) {
59
+ const batch = files.slice(i, i + batchSize);
60
+ try {
61
+ await this.processBatch(batch, options);
62
+ processed += batch.length;
63
+ yield {
64
+ type: 'progress',
65
+ processed,
66
+ total,
67
+ current: batch[batch.length - 1]
68
+ };
69
+ }
70
+ catch (error) {
71
+ yield {
72
+ type: 'error',
73
+ processed,
74
+ total,
75
+ error: error
76
+ };
77
+ }
78
+ }
79
+ yield {
80
+ type: 'complete',
81
+ processed,
82
+ total
83
+ };
84
+ }
85
+ /**
86
+ * Import a directory recursively
87
+ */
88
+ async importDirectory(dirPath, options, result) {
89
+ const targetPath = options.targetPath || '/';
90
+ // Create VFS directory structure
91
+ await this.createDirectoryStructure(dirPath, targetPath, options, result);
92
+ // Collect all files
93
+ const files = await this.collectFiles(dirPath, options);
94
+ // Process files in batches
95
+ const batchSize = options.batchSize || 100;
96
+ for (let i = 0; i < files.length; i += batchSize) {
97
+ const batch = files.slice(i, i + batchSize);
98
+ await this.processBatch(batch, options, result);
99
+ if (options.showProgress && i % (batchSize * 10) === 0) {
100
+ console.log(`Imported ${i} / ${files.length} files...`);
101
+ }
102
+ }
103
+ }
104
+ /**
105
+ * Create directory structure in VFS
106
+ */
107
+ async createDirectoryStructure(sourcePath, targetPath, options, result) {
108
+ // Walk directory tree and create all directories first
109
+ const dirsToCreate = [];
110
+ const collectDirs = async (dir, vfsPath) => {
111
+ dirsToCreate.push(vfsPath);
112
+ const entries = await fs.readdir(dir, { withFileTypes: true });
113
+ for (const entry of entries) {
114
+ if (entry.isDirectory()) {
115
+ if (this.shouldSkip(entry.name, path.join(dir, entry.name), options)) {
116
+ continue;
117
+ }
118
+ const childPath = path.join(dir, entry.name);
119
+ const childVfsPath = path.posix.join(vfsPath, entry.name);
120
+ if (options.recursive !== false) {
121
+ await collectDirs(childPath, childVfsPath);
122
+ }
123
+ }
124
+ }
125
+ };
126
+ await collectDirs(sourcePath, targetPath);
127
+ // Create all directories
128
+ for (const dirPath of dirsToCreate) {
129
+ try {
130
+ await this.vfs.mkdir(dirPath, { recursive: true });
131
+ result.directoriesCreated++;
132
+ }
133
+ catch (error) {
134
+ if (error.code !== 'EEXIST') {
135
+ result.failed.push({ path: dirPath, error });
136
+ }
137
+ }
138
+ }
139
+ }
140
+ /**
141
+ * Collect all files to be imported
142
+ */
143
+ async collectFiles(dirPath, options) {
144
+ const files = [];
145
+ const walk = async (dir) => {
146
+ const entries = await fs.readdir(dir, { withFileTypes: true });
147
+ for (const entry of entries) {
148
+ const fullPath = path.join(dir, entry.name);
149
+ if (this.shouldSkip(entry.name, fullPath, options)) {
150
+ continue;
151
+ }
152
+ if (entry.isFile()) {
153
+ files.push(fullPath);
154
+ }
155
+ else if (entry.isDirectory() && options.recursive !== false) {
156
+ await walk(fullPath);
157
+ }
158
+ }
159
+ };
160
+ await walk(dirPath);
161
+ return files;
162
+ }
163
+ /**
164
+ * Process a batch of files
165
+ */
166
+ async processBatch(files, options, result) {
167
+ const imports = await Promise.allSettled(files.map(filePath => this.importSingleFile(filePath, options)));
168
+ if (result) {
169
+ for (let i = 0; i < imports.length; i++) {
170
+ const importResult = imports[i];
171
+ const filePath = files[i];
172
+ if (importResult.status === 'fulfilled') {
173
+ result.imported.push(importResult.value.vfsPath);
174
+ result.totalSize += importResult.value.size;
175
+ result.filesProcessed++;
176
+ }
177
+ else {
178
+ result.failed.push({
179
+ path: filePath,
180
+ error: importResult.reason
181
+ });
182
+ }
183
+ }
184
+ }
185
+ }
186
+ /**
187
+ * Import a single file
188
+ */
189
+ async importSingleFile(filePath, options) {
190
+ const stats = await fs.stat(filePath);
191
+ const content = await fs.readFile(filePath);
192
+ // Calculate VFS path
193
+ const relativePath = path.relative(process.cwd(), filePath);
194
+ const vfsPath = path.posix.join(options.targetPath || '/', relativePath);
195
+ // Generate embedding if requested
196
+ let embedding;
197
+ if (options.generateEmbeddings !== false) {
198
+ try {
199
+ // Use first 10KB for embedding
200
+ const text = content.toString('utf8', 0, Math.min(10240, content.length));
201
+ // Generate embedding using brain's embed method
202
+ const embedResult = await this.brain.embed({ data: text });
203
+ embedding = embedResult;
204
+ }
205
+ catch {
206
+ // Continue without embedding if generation fails
207
+ }
208
+ }
209
+ // Write to VFS
210
+ await this.vfs.writeFile(vfsPath, content, {
211
+ generateEmbedding: options.generateEmbeddings,
212
+ extractMetadata: options.extractMetadata,
213
+ metadata: {
214
+ originalPath: filePath,
215
+ importedAt: Date.now(),
216
+ originalSize: stats.size,
217
+ originalModified: stats.mtime.getTime()
218
+ }
219
+ });
220
+ return { vfsPath, size: stats.size };
221
+ }
222
+ /**
223
+ * Import a single file (for non-directory imports)
224
+ */
225
+ async importFile(filePath, targetPath, result) {
226
+ try {
227
+ const imported = await this.importSingleFile(filePath, { targetPath });
228
+ result.imported.push(imported.vfsPath);
229
+ result.totalSize += imported.size;
230
+ result.filesProcessed++;
231
+ }
232
+ catch (error) {
233
+ result.failed.push({
234
+ path: filePath,
235
+ error: error
236
+ });
237
+ }
238
+ }
239
+ /**
240
+ * Check if a path should be skipped
241
+ */
242
+ shouldSkip(name, fullPath, options) {
243
+ // Skip hidden files if requested
244
+ if (options.skipHidden && name.startsWith('.')) {
245
+ return true;
246
+ }
247
+ // Skip node_modules by default
248
+ if (name === 'node_modules' && options.skipNodeModules !== false) {
249
+ return true;
250
+ }
251
+ // Apply custom filter
252
+ if (options.filter && !options.filter(fullPath)) {
253
+ return true;
254
+ }
255
+ return false;
256
+ }
257
+ /**
258
+ * Detect MIME type from file content and extension
259
+ */
260
+ detectMimeType(filePath, content) {
261
+ const ext = path.extname(filePath).toLowerCase();
262
+ // Common extensions
263
+ const mimeTypes = {
264
+ '.js': 'application/javascript',
265
+ '.ts': 'application/typescript',
266
+ '.jsx': 'application/javascript',
267
+ '.tsx': 'application/typescript',
268
+ '.json': 'application/json',
269
+ '.md': 'text/markdown',
270
+ '.html': 'text/html',
271
+ '.css': 'text/css',
272
+ '.py': 'text/x-python',
273
+ '.go': 'text/x-go',
274
+ '.rs': 'text/x-rust',
275
+ '.java': 'text/x-java',
276
+ '.cpp': 'text/x-c++',
277
+ '.c': 'text/x-c',
278
+ '.h': 'text/x-c',
279
+ '.txt': 'text/plain',
280
+ '.xml': 'application/xml',
281
+ '.yaml': 'text/yaml',
282
+ '.yml': 'text/yaml',
283
+ '.toml': 'text/toml',
284
+ '.sh': 'text/x-shellscript',
285
+ '.pdf': 'application/pdf',
286
+ '.jpg': 'image/jpeg',
287
+ '.jpeg': 'image/jpeg',
288
+ '.png': 'image/png',
289
+ '.gif': 'image/gif',
290
+ '.svg': 'image/svg+xml',
291
+ '.mp3': 'audio/mpeg',
292
+ '.mp4': 'video/mp4',
293
+ '.zip': 'application/zip'
294
+ };
295
+ return mimeTypes[ext] || 'application/octet-stream';
296
+ }
297
+ }
298
+ //# sourceMappingURL=DirectoryImporter.js.map
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Brainy Virtual Filesystem
3
+ *
4
+ * A simplified fs-compatible filesystem that stores data in Brainy
5
+ * Works across all storage adapters and scales to millions of files
6
+ */
7
+ export { VirtualFileSystem } from './VirtualFileSystem.js';
8
+ export { PathResolver } from './PathResolver.js';
9
+ export * from './types.js';
10
+ export { FSCompat, createFS } from './FSCompat.js';
11
+ export { DirectoryImporter } from './importers/DirectoryImporter.js';
12
+ export { VFSReadStream } from './streams/VFSReadStream.js';
13
+ export { VFSWriteStream } from './streams/VFSWriteStream.js';
14
+ export { EventRecorder } from './EventRecorder.js';
15
+ export { SemanticVersioning } from './SemanticVersioning.js';
16
+ export { PersistentEntitySystem } from './PersistentEntitySystem.js';
17
+ export { ConceptSystem } from './ConceptSystem.js';
18
+ export { GitBridge } from './GitBridge.js';
19
+ export { VirtualFileSystem as VFS } from './VirtualFileSystem.js';
@@ -0,0 +1,26 @@
1
+ /**
2
+ * Brainy Virtual Filesystem
3
+ *
4
+ * A simplified fs-compatible filesystem that stores data in Brainy
5
+ * Works across all storage adapters and scales to millions of files
6
+ */
7
+ // Core VFS
8
+ export { VirtualFileSystem } from './VirtualFileSystem.js';
9
+ export { PathResolver } from './PathResolver.js';
10
+ export * from './types.js';
11
+ // fs compatibility layer
12
+ export { FSCompat, createFS } from './FSCompat.js';
13
+ // Directory import
14
+ export { DirectoryImporter } from './importers/DirectoryImporter.js';
15
+ // Streaming
16
+ export { VFSReadStream } from './streams/VFSReadStream.js';
17
+ export { VFSWriteStream } from './streams/VFSWriteStream.js';
18
+ // Knowledge Layer Components (optional via augmentation)
19
+ export { EventRecorder } from './EventRecorder.js';
20
+ export { SemanticVersioning } from './SemanticVersioning.js';
21
+ export { PersistentEntitySystem } from './PersistentEntitySystem.js';
22
+ export { ConceptSystem } from './ConceptSystem.js';
23
+ export { GitBridge } from './GitBridge.js';
24
+ // Convenience alias
25
+ export { VirtualFileSystem as VFS } from './VirtualFileSystem.js';
26
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,19 @@
1
+ /**
2
+ * VFS Read Stream Implementation
3
+ *
4
+ * Real streaming support for large files
5
+ */
6
+ import { Readable } from 'stream';
7
+ import { VirtualFileSystem } from '../VirtualFileSystem.js';
8
+ import { ReadStreamOptions } from '../types.js';
9
+ export declare class VFSReadStream extends Readable {
10
+ private vfs;
11
+ private path;
12
+ private options;
13
+ private position;
14
+ private entity;
15
+ private data;
16
+ constructor(vfs: VirtualFileSystem, path: string, options?: ReadStreamOptions);
17
+ _read(size: number): Promise<void>;
18
+ _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
19
+ }
@@ -0,0 +1,54 @@
1
+ /**
2
+ * VFS Read Stream Implementation
3
+ *
4
+ * Real streaming support for large files
5
+ */
6
+ import { Readable } from 'stream';
7
+ export class VFSReadStream extends Readable {
8
+ constructor(vfs, path, options = {}) {
9
+ super({
10
+ highWaterMark: options.highWaterMark || 64 * 1024 // 64KB chunks
11
+ });
12
+ this.vfs = vfs;
13
+ this.path = path;
14
+ this.options = options;
15
+ this.entity = null;
16
+ this.data = null;
17
+ this.position = options.start || 0;
18
+ }
19
+ async _read(size) {
20
+ try {
21
+ // Lazy load entity
22
+ if (!this.entity) {
23
+ this.entity = await this.vfs.getEntity(this.path);
24
+ this.data = this.entity.data;
25
+ if (!Buffer.isBuffer(this.data)) {
26
+ // Convert string to buffer if needed
27
+ this.data = Buffer.from(this.data);
28
+ }
29
+ }
30
+ // Check if we've reached the end
31
+ const end = this.options.end || this.data.length;
32
+ if (this.position >= end) {
33
+ this.push(null); // Signal EOF
34
+ return;
35
+ }
36
+ // Calculate chunk size
37
+ const chunkEnd = Math.min(this.position + size, end);
38
+ const chunk = this.data.slice(this.position, chunkEnd);
39
+ // Update position and push chunk
40
+ this.position = chunkEnd;
41
+ this.push(chunk);
42
+ }
43
+ catch (error) {
44
+ this.destroy(error);
45
+ }
46
+ }
47
+ _destroy(error, callback) {
48
+ // Clean up resources
49
+ this.entity = null;
50
+ this.data = null;
51
+ callback(error);
52
+ }
53
+ }
54
+ //# sourceMappingURL=VFSReadStream.js.map
@@ -0,0 +1,21 @@
1
+ /**
2
+ * VFS Write Stream Implementation
3
+ *
4
+ * Real streaming write support for large files
5
+ */
6
+ import { Writable } from 'stream';
7
+ import { VirtualFileSystem } from '../VirtualFileSystem.js';
8
+ import { WriteStreamOptions } from '../types.js';
9
+ export declare class VFSWriteStream extends Writable {
10
+ private vfs;
11
+ private path;
12
+ private options;
13
+ private chunks;
14
+ private size;
15
+ private _closed;
16
+ constructor(vfs: VirtualFileSystem, path: string, options?: WriteStreamOptions);
17
+ _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): Promise<void>;
18
+ _final(callback: (error?: Error | null) => void): Promise<void>;
19
+ private _flush;
20
+ _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
21
+ }
@@ -0,0 +1,70 @@
1
+ /**
2
+ * VFS Write Stream Implementation
3
+ *
4
+ * Real streaming write support for large files
5
+ */
6
+ import { Writable } from 'stream';
7
+ export class VFSWriteStream extends Writable {
8
+ constructor(vfs, path, options = {}) {
9
+ super({
10
+ highWaterMark: 64 * 1024 // 64KB chunks
11
+ });
12
+ this.vfs = vfs;
13
+ this.path = path;
14
+ this.options = options;
15
+ this.chunks = [];
16
+ this.size = 0;
17
+ this._closed = false;
18
+ // Handle autoClose option
19
+ if (options.autoClose !== false) {
20
+ this.once('finish', () => this._flush());
21
+ }
22
+ }
23
+ async _write(chunk, encoding, callback) {
24
+ try {
25
+ // Convert to buffer if needed
26
+ const buffer = Buffer.isBuffer(chunk)
27
+ ? chunk
28
+ : Buffer.from(chunk, encoding);
29
+ // Store chunk
30
+ this.chunks.push(buffer);
31
+ this.size += buffer.length;
32
+ // For very large files, we could flush periodically
33
+ // to avoid memory issues, but for now we accumulate
34
+ callback();
35
+ }
36
+ catch (error) {
37
+ callback(error);
38
+ }
39
+ }
40
+ async _final(callback) {
41
+ try {
42
+ await this._flush();
43
+ callback();
44
+ }
45
+ catch (error) {
46
+ callback(error);
47
+ }
48
+ }
49
+ async _flush() {
50
+ if (this._closed)
51
+ return;
52
+ this._closed = true;
53
+ // Combine all chunks
54
+ const data = Buffer.concat(this.chunks, this.size);
55
+ // Write to VFS
56
+ await this.vfs.writeFile(this.path, data, {
57
+ mode: this.options.mode,
58
+ encoding: this.options.encoding
59
+ });
60
+ // Clear chunks to free memory
61
+ this.chunks = [];
62
+ }
63
+ _destroy(error, callback) {
64
+ // Clean up resources
65
+ this.chunks = [];
66
+ this._closed = true;
67
+ callback(error);
68
+ }
69
+ }
70
+ //# sourceMappingURL=VFSWriteStream.js.map