@soulcraft/brainy 6.2.2 → 6.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/augmentations/KnowledgeAugmentation.d.ts +40 -0
- package/dist/augmentations/KnowledgeAugmentation.js +251 -0
- package/dist/importManager.d.ts +78 -0
- package/dist/importManager.js +267 -0
- package/dist/query/typeInference.d.ts +158 -0
- package/dist/query/typeInference.js +760 -0
- package/dist/storage/adapters/historicalStorageAdapter.d.ts +0 -2
- package/dist/storage/adapters/historicalStorageAdapter.js +4 -4
- package/dist/storage/adapters/typeAwareStorageAdapter.d.ts +252 -0
- package/dist/storage/adapters/typeAwareStorageAdapter.js +814 -0
- package/dist/storage/baseStorage.d.ts +12 -0
- package/dist/storage/baseStorage.js +16 -0
- package/dist/types/brainyDataInterface.d.ts +52 -0
- package/dist/types/brainyDataInterface.js +10 -0
- package/dist/utils/metadataIndex.d.ts +6 -2
- package/dist/utils/metadataIndex.js +31 -14
- package/dist/vfs/ConceptSystem.d.ts +203 -0
- package/dist/vfs/ConceptSystem.js +545 -0
- package/dist/vfs/EntityManager.d.ts +75 -0
- package/dist/vfs/EntityManager.js +216 -0
- package/dist/vfs/EventRecorder.d.ts +84 -0
- package/dist/vfs/EventRecorder.js +269 -0
- package/dist/vfs/GitBridge.d.ts +167 -0
- package/dist/vfs/GitBridge.js +537 -0
- package/dist/vfs/KnowledgeLayer.d.ts +35 -0
- package/dist/vfs/KnowledgeLayer.js +443 -0
- package/dist/vfs/PersistentEntitySystem.d.ts +165 -0
- package/dist/vfs/PersistentEntitySystem.js +503 -0
- package/dist/vfs/SemanticVersioning.d.ts +105 -0
- package/dist/vfs/SemanticVersioning.js +309 -0
- package/package.json +1 -1
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Knowledge Layer Augmentation for VFS
|
|
3
|
+
*
|
|
4
|
+
* Adds intelligent features to VFS without modifying core functionality:
|
|
5
|
+
* - Event recording for all operations
|
|
6
|
+
* - Semantic versioning based on content changes
|
|
7
|
+
* - Entity and concept extraction
|
|
8
|
+
* - Git bridge for import/export
|
|
9
|
+
*
|
|
10
|
+
* This is a TRUE augmentation - VFS works perfectly without it
|
|
11
|
+
*/
|
|
12
|
+
import { Brainy } from '../brainy.js';
|
|
13
|
+
import { BaseAugmentation } from './brainyAugmentation.js';
|
|
14
|
+
export declare class KnowledgeAugmentation extends BaseAugmentation {
|
|
15
|
+
name: string;
|
|
16
|
+
timing: 'after';
|
|
17
|
+
metadata: 'none';
|
|
18
|
+
operations: any;
|
|
19
|
+
priority: number;
|
|
20
|
+
constructor(config?: any);
|
|
21
|
+
execute<T = any>(operation: string, params: any, next: () => Promise<T>): Promise<T>;
|
|
22
|
+
private eventRecorder?;
|
|
23
|
+
private semanticVersioning?;
|
|
24
|
+
private entitySystem?;
|
|
25
|
+
private conceptSystem?;
|
|
26
|
+
private gitBridge?;
|
|
27
|
+
private originalMethods;
|
|
28
|
+
initialize(context: any): Promise<void>;
|
|
29
|
+
augment(brain: Brainy): Promise<void>;
|
|
30
|
+
/**
|
|
31
|
+
* Wrap a VFS method to add Knowledge Layer functionality
|
|
32
|
+
*/
|
|
33
|
+
private wrapMethod;
|
|
34
|
+
/**
|
|
35
|
+
* Add Knowledge Layer methods to VFS
|
|
36
|
+
*/
|
|
37
|
+
private addKnowledgeMethods;
|
|
38
|
+
private isSemanticChange;
|
|
39
|
+
cleanup(brain: Brainy): Promise<void>;
|
|
40
|
+
}
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Knowledge Layer Augmentation for VFS
|
|
3
|
+
*
|
|
4
|
+
* Adds intelligent features to VFS without modifying core functionality:
|
|
5
|
+
* - Event recording for all operations
|
|
6
|
+
* - Semantic versioning based on content changes
|
|
7
|
+
* - Entity and concept extraction
|
|
8
|
+
* - Git bridge for import/export
|
|
9
|
+
*
|
|
10
|
+
* This is a TRUE augmentation - VFS works perfectly without it
|
|
11
|
+
*/
|
|
12
|
+
import { BaseAugmentation } from './brainyAugmentation.js';
|
|
13
|
+
import { EventRecorder } from '../vfs/EventRecorder.js';
|
|
14
|
+
import { SemanticVersioning } from '../vfs/SemanticVersioning.js';
|
|
15
|
+
import { PersistentEntitySystem } from '../vfs/PersistentEntitySystem.js';
|
|
16
|
+
import { ConceptSystem } from '../vfs/ConceptSystem.js';
|
|
17
|
+
import { GitBridge } from '../vfs/GitBridge.js';
|
|
18
|
+
export class KnowledgeAugmentation extends BaseAugmentation {
|
|
19
|
+
constructor(config = {}) {
|
|
20
|
+
super(config);
|
|
21
|
+
this.name = 'knowledge';
|
|
22
|
+
this.timing = 'after'; // Process after VFS operations
|
|
23
|
+
this.metadata = 'none'; // No metadata access needed
|
|
24
|
+
this.operations = []; // VFS-specific augmentation, no operation interception
|
|
25
|
+
this.priority = 100; // Run last
|
|
26
|
+
this.originalMethods = new Map();
|
|
27
|
+
}
|
|
28
|
+
async execute(operation, params, next) {
|
|
29
|
+
// Pass through - this augmentation works at VFS level, not operation level
|
|
30
|
+
return await next();
|
|
31
|
+
}
|
|
32
|
+
async initialize(context) {
|
|
33
|
+
await this.augment(context.brain);
|
|
34
|
+
}
|
|
35
|
+
async augment(brain) {
|
|
36
|
+
// Only augment if VFS exists
|
|
37
|
+
const vfs = brain.vfs?.();
|
|
38
|
+
if (!vfs) {
|
|
39
|
+
console.warn('KnowledgeAugmentation: VFS not found, skipping');
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
// Initialize Knowledge Layer components
|
|
43
|
+
this.eventRecorder = new EventRecorder(brain);
|
|
44
|
+
this.semanticVersioning = new SemanticVersioning(brain);
|
|
45
|
+
this.entitySystem = new PersistentEntitySystem(brain);
|
|
46
|
+
this.conceptSystem = new ConceptSystem(brain);
|
|
47
|
+
this.gitBridge = new GitBridge(vfs, brain);
|
|
48
|
+
// Wrap VFS methods to add intelligence WITHOUT slowing them down
|
|
49
|
+
this.wrapMethod(vfs, 'writeFile', async (original, path, data, options) => {
|
|
50
|
+
// Call original first (stays fast)
|
|
51
|
+
const result = await original.call(vfs, path, data, options);
|
|
52
|
+
// Knowledge processing in background (non-blocking)
|
|
53
|
+
setImmediate(async () => {
|
|
54
|
+
try {
|
|
55
|
+
// Record event
|
|
56
|
+
if (this.eventRecorder) {
|
|
57
|
+
await this.eventRecorder.recordEvent({
|
|
58
|
+
type: 'write',
|
|
59
|
+
path,
|
|
60
|
+
content: data,
|
|
61
|
+
size: data.length,
|
|
62
|
+
author: options?.author || 'system'
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
// Check for semantic versioning
|
|
66
|
+
if (this.semanticVersioning) {
|
|
67
|
+
const existingContent = await vfs.readFile(path).catch(() => null);
|
|
68
|
+
const shouldVersion = existingContent && this.isSemanticChange(existingContent, data);
|
|
69
|
+
if (shouldVersion) {
|
|
70
|
+
await this.semanticVersioning.createVersion(path, data, {
|
|
71
|
+
message: 'Automatic semantic version'
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
// Extract concepts
|
|
76
|
+
if (this.conceptSystem && options?.extractConcepts !== false) {
|
|
77
|
+
await this.conceptSystem.extractAndLinkConcepts(path, data);
|
|
78
|
+
}
|
|
79
|
+
// Extract entities
|
|
80
|
+
if (this.entitySystem && options?.extractEntities !== false) {
|
|
81
|
+
await this.entitySystem.extractEntities(data.toString('utf8'), data);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
catch (error) {
|
|
85
|
+
// Knowledge Layer errors should not affect VFS operations
|
|
86
|
+
console.debug('KnowledgeLayer background processing error:', error);
|
|
87
|
+
}
|
|
88
|
+
});
|
|
89
|
+
return result;
|
|
90
|
+
});
|
|
91
|
+
this.wrapMethod(vfs, 'unlink', async (original, path) => {
|
|
92
|
+
const result = await original.call(vfs, path);
|
|
93
|
+
// Record deletion event
|
|
94
|
+
setImmediate(async () => {
|
|
95
|
+
if (this.eventRecorder) {
|
|
96
|
+
await this.eventRecorder.recordEvent({
|
|
97
|
+
type: 'delete',
|
|
98
|
+
path,
|
|
99
|
+
author: 'system'
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
return result;
|
|
104
|
+
});
|
|
105
|
+
this.wrapMethod(vfs, 'rename', async (original, oldPath, newPath) => {
|
|
106
|
+
const result = await original.call(vfs, oldPath, newPath);
|
|
107
|
+
// Record rename event
|
|
108
|
+
setImmediate(async () => {
|
|
109
|
+
if (this.eventRecorder) {
|
|
110
|
+
await this.eventRecorder.recordEvent({
|
|
111
|
+
type: 'rename',
|
|
112
|
+
path: oldPath,
|
|
113
|
+
metadata: { newPath },
|
|
114
|
+
author: 'system'
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
return result;
|
|
119
|
+
});
|
|
120
|
+
// Add Knowledge Layer methods to VFS
|
|
121
|
+
this.addKnowledgeMethods(vfs);
|
|
122
|
+
console.log('✨ Knowledge Layer augmentation enabled');
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Wrap a VFS method to add Knowledge Layer functionality
|
|
126
|
+
*/
|
|
127
|
+
wrapMethod(vfs, methodName, wrapper) {
|
|
128
|
+
const original = vfs[methodName];
|
|
129
|
+
if (!original)
|
|
130
|
+
return;
|
|
131
|
+
// Store original for cleanup
|
|
132
|
+
this.originalMethods.set(methodName, original);
|
|
133
|
+
// Replace with wrapped version
|
|
134
|
+
vfs[methodName] = async (...args) => {
|
|
135
|
+
return await wrapper(original, ...args);
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Add Knowledge Layer methods to VFS
|
|
140
|
+
*/
|
|
141
|
+
addKnowledgeMethods(vfs) {
|
|
142
|
+
// Event history
|
|
143
|
+
vfs.getHistory = async (path, options) => {
|
|
144
|
+
if (!this.eventRecorder)
|
|
145
|
+
throw new Error('Knowledge Layer not initialized');
|
|
146
|
+
return await this.eventRecorder.getHistory(path, options);
|
|
147
|
+
};
|
|
148
|
+
vfs.reconstructAtTime = async (path, timestamp) => {
|
|
149
|
+
if (!this.eventRecorder)
|
|
150
|
+
throw new Error('Knowledge Layer not initialized');
|
|
151
|
+
return await this.eventRecorder.reconstructFileAtTime(path, timestamp);
|
|
152
|
+
};
|
|
153
|
+
// Semantic versioning
|
|
154
|
+
vfs.getVersions = async (path) => {
|
|
155
|
+
if (!this.semanticVersioning)
|
|
156
|
+
throw new Error('Knowledge Layer not initialized');
|
|
157
|
+
return await this.semanticVersioning.getVersions(path);
|
|
158
|
+
};
|
|
159
|
+
vfs.restoreVersion = async (path, versionId) => {
|
|
160
|
+
if (!this.semanticVersioning)
|
|
161
|
+
throw new Error('Knowledge Layer not initialized');
|
|
162
|
+
const version = await this.semanticVersioning.getVersion(path, versionId);
|
|
163
|
+
if (version) {
|
|
164
|
+
await vfs.writeFile(path, version);
|
|
165
|
+
}
|
|
166
|
+
};
|
|
167
|
+
// Entities
|
|
168
|
+
vfs.findEntity = async (query) => {
|
|
169
|
+
if (!this.entitySystem)
|
|
170
|
+
throw new Error('Knowledge Layer not initialized');
|
|
171
|
+
return await this.entitySystem.findEntity(query);
|
|
172
|
+
};
|
|
173
|
+
vfs.getEntityAppearances = async (entityId) => {
|
|
174
|
+
if (!this.entitySystem)
|
|
175
|
+
throw new Error('Knowledge Layer not initialized');
|
|
176
|
+
return await this.entitySystem.getEvolution(entityId);
|
|
177
|
+
};
|
|
178
|
+
// Concepts
|
|
179
|
+
vfs.getConcepts = async (path) => {
|
|
180
|
+
if (!this.conceptSystem)
|
|
181
|
+
throw new Error('Knowledge Layer not initialized');
|
|
182
|
+
const concepts = await this.conceptSystem.findConcepts({ manifestedIn: path });
|
|
183
|
+
return concepts;
|
|
184
|
+
};
|
|
185
|
+
vfs.getConceptGraph = async (options) => {
|
|
186
|
+
if (!this.conceptSystem)
|
|
187
|
+
throw new Error('Knowledge Layer not initialized');
|
|
188
|
+
return await this.conceptSystem.getConceptGraph(options);
|
|
189
|
+
};
|
|
190
|
+
// Git bridge
|
|
191
|
+
vfs.exportToGit = async (vfsPath, gitPath) => {
|
|
192
|
+
if (!this.gitBridge)
|
|
193
|
+
throw new Error('Knowledge Layer not initialized');
|
|
194
|
+
return await this.gitBridge.exportToGit(vfsPath, gitPath);
|
|
195
|
+
};
|
|
196
|
+
vfs.importFromGit = async (gitPath, vfsPath) => {
|
|
197
|
+
if (!this.gitBridge)
|
|
198
|
+
throw new Error('Knowledge Layer not initialized');
|
|
199
|
+
return await this.gitBridge.importFromGit(gitPath, vfsPath);
|
|
200
|
+
};
|
|
201
|
+
// Temporal coupling
|
|
202
|
+
vfs.findTemporalCoupling = async (path, windowMs) => {
|
|
203
|
+
if (!this.eventRecorder)
|
|
204
|
+
throw new Error('Knowledge Layer not initialized');
|
|
205
|
+
return await this.eventRecorder.findTemporalCoupling(path, windowMs);
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
isSemanticChange(oldContent, newContent) {
|
|
209
|
+
// Simple heuristic - significant size change or different content
|
|
210
|
+
const oldStr = oldContent.toString('utf8');
|
|
211
|
+
const newStr = newContent.toString('utf8');
|
|
212
|
+
// Check for significant size change (>10%)
|
|
213
|
+
const sizeDiff = Math.abs(oldStr.length - newStr.length) / oldStr.length;
|
|
214
|
+
if (sizeDiff > 0.1)
|
|
215
|
+
return true;
|
|
216
|
+
// Check for structural changes (simplified)
|
|
217
|
+
const oldLines = oldStr.split('\n').filter(l => l.trim());
|
|
218
|
+
const newLines = newStr.split('\n').filter(l => l.trim());
|
|
219
|
+
// Different number of non-empty lines
|
|
220
|
+
return Math.abs(oldLines.length - newLines.length) > 5;
|
|
221
|
+
}
|
|
222
|
+
async cleanup(brain) {
|
|
223
|
+
const vfs = brain.vfs?.();
|
|
224
|
+
if (!vfs)
|
|
225
|
+
return;
|
|
226
|
+
// Restore original methods
|
|
227
|
+
for (const [methodName, original] of this.originalMethods) {
|
|
228
|
+
vfs[methodName] = original;
|
|
229
|
+
}
|
|
230
|
+
// Remove added methods
|
|
231
|
+
delete vfs.getHistory;
|
|
232
|
+
delete vfs.reconstructAtTime;
|
|
233
|
+
delete vfs.getVersions;
|
|
234
|
+
delete vfs.restoreVersion;
|
|
235
|
+
delete vfs.findEntity;
|
|
236
|
+
delete vfs.getEntityAppearances;
|
|
237
|
+
delete vfs.getConcepts;
|
|
238
|
+
delete vfs.getConceptGraph;
|
|
239
|
+
delete vfs.exportToGit;
|
|
240
|
+
delete vfs.importFromGit;
|
|
241
|
+
delete vfs.findTemporalCoupling;
|
|
242
|
+
// Clean up components
|
|
243
|
+
this.eventRecorder = undefined;
|
|
244
|
+
this.semanticVersioning = undefined;
|
|
245
|
+
this.entitySystem = undefined;
|
|
246
|
+
this.conceptSystem = undefined;
|
|
247
|
+
this.gitBridge = undefined;
|
|
248
|
+
console.log('Knowledge Layer augmentation removed');
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
//# sourceMappingURL=KnowledgeAugmentation.js.map
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Import Manager - Comprehensive data import with intelligent type detection
|
|
3
|
+
*
|
|
4
|
+
* Handles multiple data sources:
|
|
5
|
+
* - Direct data (objects, arrays)
|
|
6
|
+
* - Files (JSON, CSV, text)
|
|
7
|
+
* - URLs (fetch and parse)
|
|
8
|
+
* - Streams (for large files)
|
|
9
|
+
*
|
|
10
|
+
* Uses NeuralImportAugmentation for intelligent processing
|
|
11
|
+
*/
|
|
12
|
+
import { NounType } from './types/graphTypes.js';
|
|
13
|
+
export interface ImportOptions {
|
|
14
|
+
source?: 'data' | 'file' | 'url' | 'auto';
|
|
15
|
+
format?: 'json' | 'csv' | 'text' | 'yaml' | 'auto';
|
|
16
|
+
batchSize?: number;
|
|
17
|
+
autoDetect?: boolean;
|
|
18
|
+
typeHint?: NounType;
|
|
19
|
+
extractRelationships?: boolean;
|
|
20
|
+
csvDelimiter?: string;
|
|
21
|
+
csvHeaders?: boolean;
|
|
22
|
+
parallel?: boolean;
|
|
23
|
+
maxConcurrency?: number;
|
|
24
|
+
}
|
|
25
|
+
export interface ImportResult {
|
|
26
|
+
success: boolean;
|
|
27
|
+
nouns: string[];
|
|
28
|
+
verbs: string[];
|
|
29
|
+
errors: string[];
|
|
30
|
+
stats: {
|
|
31
|
+
total: number;
|
|
32
|
+
imported: number;
|
|
33
|
+
failed: number;
|
|
34
|
+
relationships: number;
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
export declare class ImportManager {
|
|
38
|
+
private neuralImport;
|
|
39
|
+
private typeMatcher;
|
|
40
|
+
private brain;
|
|
41
|
+
constructor(brain: any);
|
|
42
|
+
/**
|
|
43
|
+
* Initialize the import manager
|
|
44
|
+
*/
|
|
45
|
+
init(): Promise<void>;
|
|
46
|
+
/**
|
|
47
|
+
* Main import method - handles all sources
|
|
48
|
+
*/
|
|
49
|
+
import(source: string | Buffer | any[] | any, options?: ImportOptions): Promise<ImportResult>;
|
|
50
|
+
/**
|
|
51
|
+
* Import from file
|
|
52
|
+
*/
|
|
53
|
+
importFile(filePath: string, options?: ImportOptions): Promise<ImportResult>;
|
|
54
|
+
/**
|
|
55
|
+
* Import from URL
|
|
56
|
+
*/
|
|
57
|
+
importUrl(url: string, options?: ImportOptions): Promise<ImportResult>;
|
|
58
|
+
/**
|
|
59
|
+
* Detect source type
|
|
60
|
+
*/
|
|
61
|
+
private detectSourceType;
|
|
62
|
+
/**
|
|
63
|
+
* Detect format from file path
|
|
64
|
+
*/
|
|
65
|
+
private detectFormatFromPath;
|
|
66
|
+
/**
|
|
67
|
+
* Read file
|
|
68
|
+
*/
|
|
69
|
+
private readFile;
|
|
70
|
+
/**
|
|
71
|
+
* Fetch from URL
|
|
72
|
+
*/
|
|
73
|
+
private fetchFromUrl;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Create an import manager instance
|
|
77
|
+
*/
|
|
78
|
+
export declare function createImportManager(brain: any): ImportManager;
|
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Import Manager - Comprehensive data import with intelligent type detection
|
|
3
|
+
*
|
|
4
|
+
* Handles multiple data sources:
|
|
5
|
+
* - Direct data (objects, arrays)
|
|
6
|
+
* - Files (JSON, CSV, text)
|
|
7
|
+
* - URLs (fetch and parse)
|
|
8
|
+
* - Streams (for large files)
|
|
9
|
+
*
|
|
10
|
+
* Uses NeuralImportAugmentation for intelligent processing
|
|
11
|
+
*/
|
|
12
|
+
import { VerbType } from './types/graphTypes.js';
|
|
13
|
+
import { NeuralImportAugmentation } from './augmentations/neuralImport.js';
|
|
14
|
+
import * as fs from './universal/fs.js';
|
|
15
|
+
import * as path from './universal/path.js';
|
|
16
|
+
import { prodLog } from './utils/logger.js';
|
|
17
|
+
export class ImportManager {
|
|
18
|
+
constructor(brain) {
|
|
19
|
+
this.typeMatcher = null;
|
|
20
|
+
this.brain = brain;
|
|
21
|
+
this.neuralImport = new NeuralImportAugmentation();
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Initialize the import manager
|
|
25
|
+
*/
|
|
26
|
+
async init() {
|
|
27
|
+
// Initialize neural import with proper context
|
|
28
|
+
const context = {
|
|
29
|
+
brain: this.brain,
|
|
30
|
+
storage: this.brain.storage,
|
|
31
|
+
config: {},
|
|
32
|
+
log: (message, level) => {
|
|
33
|
+
if (level === 'error') {
|
|
34
|
+
prodLog.error(message);
|
|
35
|
+
}
|
|
36
|
+
else if (level === 'warn') {
|
|
37
|
+
prodLog.warn(message);
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
prodLog.info(message);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
await this.neuralImport.initialize(context);
|
|
45
|
+
// Get type matcher
|
|
46
|
+
const { getBrainyTypes } = await import('./augmentations/typeMatching/brainyTypes.js');
|
|
47
|
+
this.typeMatcher = await getBrainyTypes();
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Main import method - handles all sources
|
|
51
|
+
*/
|
|
52
|
+
async import(source, options = {}) {
|
|
53
|
+
const result = {
|
|
54
|
+
success: false,
|
|
55
|
+
nouns: [],
|
|
56
|
+
verbs: [],
|
|
57
|
+
errors: [],
|
|
58
|
+
stats: {
|
|
59
|
+
total: 0,
|
|
60
|
+
imported: 0,
|
|
61
|
+
failed: 0,
|
|
62
|
+
relationships: 0
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
try {
|
|
66
|
+
// Detect source type
|
|
67
|
+
const sourceType = await this.detectSourceType(source, options.source);
|
|
68
|
+
// Get data based on source type
|
|
69
|
+
let data;
|
|
70
|
+
let format = options.format || 'auto';
|
|
71
|
+
switch (sourceType) {
|
|
72
|
+
case 'url':
|
|
73
|
+
data = await this.fetchFromUrl(source);
|
|
74
|
+
break;
|
|
75
|
+
case 'file':
|
|
76
|
+
const filePath = source;
|
|
77
|
+
data = await this.readFile(filePath);
|
|
78
|
+
if (format === 'auto') {
|
|
79
|
+
format = this.detectFormatFromPath(filePath);
|
|
80
|
+
}
|
|
81
|
+
break;
|
|
82
|
+
case 'data':
|
|
83
|
+
default:
|
|
84
|
+
data = source;
|
|
85
|
+
break;
|
|
86
|
+
}
|
|
87
|
+
// Process data through neural import
|
|
88
|
+
let items;
|
|
89
|
+
let relationships = [];
|
|
90
|
+
if (Buffer.isBuffer(data) || typeof data === 'string') {
|
|
91
|
+
// Use neural import for parsing and analysis
|
|
92
|
+
const analysis = await this.neuralImport.getNeuralAnalysis(data, format);
|
|
93
|
+
// Extract items and relationships
|
|
94
|
+
items = analysis.detectedEntities.map(entity => ({
|
|
95
|
+
data: entity.originalData,
|
|
96
|
+
type: entity.nounType,
|
|
97
|
+
confidence: entity.confidence,
|
|
98
|
+
id: entity.suggestedId
|
|
99
|
+
}));
|
|
100
|
+
if (options.extractRelationships !== false) {
|
|
101
|
+
relationships = analysis.detectedRelationships;
|
|
102
|
+
}
|
|
103
|
+
// Log insights
|
|
104
|
+
for (const insight of analysis.insights) {
|
|
105
|
+
prodLog.info(`🧠 ${insight.description} (confidence: ${insight.confidence})`);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
else if (Array.isArray(data)) {
|
|
109
|
+
items = data;
|
|
110
|
+
}
|
|
111
|
+
else {
|
|
112
|
+
items = [data];
|
|
113
|
+
}
|
|
114
|
+
result.stats.total = items.length;
|
|
115
|
+
// Import items in batches
|
|
116
|
+
const batchSize = options.batchSize || 50;
|
|
117
|
+
for (let i = 0; i < items.length; i += batchSize) {
|
|
118
|
+
const batch = items.slice(i, i + batchSize);
|
|
119
|
+
// Process batch in parallel if enabled
|
|
120
|
+
const promises = batch.map(async (item) => {
|
|
121
|
+
try {
|
|
122
|
+
// Detect type if needed
|
|
123
|
+
let nounType = item.type || options.typeHint;
|
|
124
|
+
if (!nounType && options.autoDetect !== false && this.typeMatcher) {
|
|
125
|
+
const match = await this.typeMatcher.matchNounType(item.data || item);
|
|
126
|
+
nounType = match.type;
|
|
127
|
+
}
|
|
128
|
+
// Prepare the data to import
|
|
129
|
+
const dataToImport = item.data || item;
|
|
130
|
+
// Create metadata combining original data with import metadata
|
|
131
|
+
const metadata = {
|
|
132
|
+
...(typeof dataToImport === 'object' ? dataToImport : {}),
|
|
133
|
+
...(item.data?.metadata || {}),
|
|
134
|
+
nounType,
|
|
135
|
+
_importedAt: new Date().toISOString(),
|
|
136
|
+
_confidence: item.confidence
|
|
137
|
+
};
|
|
138
|
+
// Add to brain using modern API signature
|
|
139
|
+
const id = await this.brain.add({ data: dataToImport, type: nounType || 'content', metadata });
|
|
140
|
+
result.nouns.push(id);
|
|
141
|
+
result.stats.imported++;
|
|
142
|
+
return id;
|
|
143
|
+
}
|
|
144
|
+
catch (error) {
|
|
145
|
+
result.errors.push(`Failed to import item: ${error.message}`);
|
|
146
|
+
result.stats.failed++;
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
if (options.parallel !== false) {
|
|
151
|
+
await Promise.all(promises);
|
|
152
|
+
}
|
|
153
|
+
else {
|
|
154
|
+
for (const promise of promises) {
|
|
155
|
+
await promise;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
// Import relationships
|
|
160
|
+
for (const rel of relationships) {
|
|
161
|
+
try {
|
|
162
|
+
// Match verb type if needed
|
|
163
|
+
let verbType = rel.verbType;
|
|
164
|
+
if (!Object.values(VerbType).includes(verbType) && this.typeMatcher) {
|
|
165
|
+
const match = await this.typeMatcher.matchVerbType({ id: rel.sourceId }, { id: rel.targetId }, rel.verbType);
|
|
166
|
+
verbType = match.type;
|
|
167
|
+
}
|
|
168
|
+
const verbId = await this.brain.relate({
|
|
169
|
+
from: rel.sourceId,
|
|
170
|
+
to: rel.targetId,
|
|
171
|
+
type: verbType,
|
|
172
|
+
metadata: rel.metadata,
|
|
173
|
+
weight: rel.weight
|
|
174
|
+
});
|
|
175
|
+
result.verbs.push(verbId);
|
|
176
|
+
result.stats.relationships++;
|
|
177
|
+
}
|
|
178
|
+
catch (error) {
|
|
179
|
+
result.errors.push(`Failed to create relationship: ${error.message}`);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
result.success = result.stats.imported > 0;
|
|
183
|
+
prodLog.info(`✨ Import complete: ${result.stats.imported}/${result.stats.total} items, ${result.stats.relationships} relationships`);
|
|
184
|
+
}
|
|
185
|
+
catch (error) {
|
|
186
|
+
result.errors.push(`Import failed: ${error.message}`);
|
|
187
|
+
prodLog.error('Import failed:', error);
|
|
188
|
+
}
|
|
189
|
+
return result;
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Import from file
|
|
193
|
+
*/
|
|
194
|
+
async importFile(filePath, options = {}) {
|
|
195
|
+
return this.import(filePath, { ...options, source: 'file' });
|
|
196
|
+
}
|
|
197
|
+
/**
|
|
198
|
+
* Import from URL
|
|
199
|
+
*/
|
|
200
|
+
async importUrl(url, options = {}) {
|
|
201
|
+
return this.import(url, { ...options, source: 'url' });
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Detect source type
|
|
205
|
+
*/
|
|
206
|
+
async detectSourceType(source, hint) {
|
|
207
|
+
if (hint && hint !== 'auto') {
|
|
208
|
+
return hint;
|
|
209
|
+
}
|
|
210
|
+
if (typeof source === 'string') {
|
|
211
|
+
// Check if URL
|
|
212
|
+
if (source.startsWith('http://') || source.startsWith('https://')) {
|
|
213
|
+
return 'url';
|
|
214
|
+
}
|
|
215
|
+
// Check if file path exists
|
|
216
|
+
try {
|
|
217
|
+
if (await fs.exists(source)) {
|
|
218
|
+
return 'file';
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
catch (error) {
|
|
222
|
+
// File system check failed, not a file path
|
|
223
|
+
console.debug('File path check failed:', error);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
return 'data';
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* Detect format from file path
|
|
230
|
+
*/
|
|
231
|
+
detectFormatFromPath(filePath) {
|
|
232
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
233
|
+
switch (ext) {
|
|
234
|
+
case '.json': return 'json';
|
|
235
|
+
case '.csv': return 'csv';
|
|
236
|
+
case '.txt': return 'text';
|
|
237
|
+
case '.md': return 'text';
|
|
238
|
+
case '.yaml':
|
|
239
|
+
case '.yml': return 'yaml';
|
|
240
|
+
default: return 'auto';
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Read file
|
|
245
|
+
*/
|
|
246
|
+
async readFile(filePath) {
|
|
247
|
+
const content = await fs.readFile(filePath, 'utf8');
|
|
248
|
+
return Buffer.from(content, 'utf8');
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Fetch from URL
|
|
252
|
+
*/
|
|
253
|
+
async fetchFromUrl(url) {
|
|
254
|
+
const response = await fetch(url);
|
|
255
|
+
if (!response.ok) {
|
|
256
|
+
throw new Error(`Failed to fetch ${url}: ${response.statusText}`);
|
|
257
|
+
}
|
|
258
|
+
return response.text();
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Create an import manager instance
|
|
263
|
+
*/
|
|
264
|
+
export function createImportManager(brain) {
|
|
265
|
+
return new ImportManager(brain);
|
|
266
|
+
}
|
|
267
|
+
//# sourceMappingURL=importManager.js.map
|