@soulcraft/brainy 2.15.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/README.md +249 -152
- package/dist/api/ConfigAPI.d.ts +67 -0
- package/dist/api/ConfigAPI.js +166 -0
- package/dist/api/DataAPI.d.ts +123 -0
- package/dist/api/DataAPI.js +391 -0
- package/dist/api/SecurityAPI.d.ts +50 -0
- package/dist/api/SecurityAPI.js +139 -0
- package/dist/api/UniversalImportAPI.d.ts +134 -0
- package/dist/api/UniversalImportAPI.js +615 -0
- package/dist/augmentationManager.js +12 -7
- package/dist/augmentationPipeline.d.ts +0 -61
- package/dist/augmentationPipeline.js +0 -87
- package/dist/augmentationRegistry.d.ts +1 -1
- package/dist/augmentationRegistry.js +1 -1
- package/dist/augmentations/apiServerAugmentation.d.ts +27 -1
- package/dist/augmentations/apiServerAugmentation.js +288 -7
- package/dist/augmentations/auditLogAugmentation.d.ts +109 -0
- package/dist/augmentations/auditLogAugmentation.js +358 -0
- package/dist/augmentations/batchProcessingAugmentation.d.ts +3 -2
- package/dist/augmentations/batchProcessingAugmentation.js +123 -22
- package/dist/augmentations/brainyAugmentation.d.ts +87 -8
- package/dist/augmentations/brainyAugmentation.js +159 -2
- package/dist/augmentations/cacheAugmentation.d.ts +6 -5
- package/dist/augmentations/cacheAugmentation.js +113 -17
- package/dist/augmentations/conduitAugmentations.d.ts +2 -2
- package/dist/augmentations/conduitAugmentations.js +2 -2
- package/dist/augmentations/configResolver.d.ts +122 -0
- package/dist/augmentations/configResolver.js +440 -0
- package/dist/augmentations/connectionPoolAugmentation.d.ts +3 -1
- package/dist/augmentations/connectionPoolAugmentation.js +37 -12
- package/dist/augmentations/defaultAugmentations.d.ts +9 -11
- package/dist/augmentations/defaultAugmentations.js +4 -11
- package/dist/augmentations/discovery/catalogDiscovery.d.ts +142 -0
- package/dist/augmentations/discovery/catalogDiscovery.js +249 -0
- package/dist/augmentations/discovery/localDiscovery.d.ts +84 -0
- package/dist/augmentations/discovery/localDiscovery.js +246 -0
- package/dist/augmentations/discovery/runtimeLoader.d.ts +97 -0
- package/dist/augmentations/discovery/runtimeLoader.js +337 -0
- package/dist/augmentations/discovery.d.ts +152 -0
- package/dist/augmentations/discovery.js +441 -0
- package/dist/augmentations/display/intelligentComputation.d.ts +1 -1
- package/dist/augmentations/display/intelligentComputation.js +4 -4
- package/dist/augmentations/entityRegistryAugmentation.d.ts +3 -1
- package/dist/augmentations/entityRegistryAugmentation.js +5 -1
- package/dist/augmentations/indexAugmentation.d.ts +3 -3
- package/dist/augmentations/indexAugmentation.js +2 -2
- package/dist/augmentations/intelligentVerbScoringAugmentation.d.ts +22 -6
- package/dist/augmentations/intelligentVerbScoringAugmentation.js +106 -23
- package/dist/augmentations/manifest.d.ts +176 -0
- package/dist/augmentations/manifest.js +8 -0
- package/dist/augmentations/marketplace/AugmentationMarketplace.d.ts +168 -0
- package/dist/augmentations/marketplace/AugmentationMarketplace.js +329 -0
- package/dist/augmentations/marketplace/cli.d.ts +47 -0
- package/dist/augmentations/marketplace/cli.js +265 -0
- package/dist/augmentations/metricsAugmentation.d.ts +3 -3
- package/dist/augmentations/metricsAugmentation.js +2 -2
- package/dist/augmentations/monitoringAugmentation.d.ts +3 -3
- package/dist/augmentations/monitoringAugmentation.js +2 -2
- package/dist/augmentations/neuralImport.d.ts +1 -1
- package/dist/augmentations/rateLimitAugmentation.d.ts +82 -0
- package/dist/augmentations/rateLimitAugmentation.js +321 -0
- package/dist/augmentations/requestDeduplicatorAugmentation.d.ts +2 -2
- package/dist/augmentations/requestDeduplicatorAugmentation.js +1 -1
- package/dist/augmentations/storageAugmentation.d.ts +1 -1
- package/dist/augmentations/storageAugmentation.js +2 -2
- package/dist/augmentations/storageAugmentations.d.ts +37 -8
- package/dist/augmentations/storageAugmentations.js +204 -15
- package/dist/augmentations/synapseAugmentation.d.ts +1 -1
- package/dist/augmentations/synapseAugmentation.js +35 -16
- package/dist/augmentations/typeMatching/intelligentTypeMatcher.d.ts +39 -59
- package/dist/augmentations/typeMatching/intelligentTypeMatcher.js +103 -389
- package/dist/augmentations/universalDisplayAugmentation.d.ts +2 -2
- package/dist/augmentations/universalDisplayAugmentation.js +2 -2
- package/dist/brainy-unified.d.ts +106 -0
- package/dist/brainy-unified.js +327 -0
- package/dist/brainy.d.ts +273 -0
- package/dist/brainy.js +1181 -0
- package/dist/brainyData.d.ts +29 -72
- package/dist/brainyData.js +350 -304
- package/dist/brainyDataV3.d.ts +186 -0
- package/dist/brainyDataV3.js +337 -0
- package/dist/browserFramework.d.ts +6 -6
- package/dist/browserFramework.js +11 -8
- package/dist/browserFramework.minimal.d.ts +5 -5
- package/dist/browserFramework.minimal.js +11 -8
- package/dist/config/index.d.ts +2 -2
- package/dist/config/index.js +3 -3
- package/dist/config/modelAutoConfig.d.ts +6 -7
- package/dist/config/modelAutoConfig.js +17 -76
- package/dist/cortex/backupRestore.d.ts +2 -2
- package/dist/cortex/backupRestore.js +85 -27
- package/dist/cortex/healthCheck.d.ts +2 -2
- package/dist/cortex/neuralImport.d.ts +2 -2
- package/dist/cortex/neuralImport.js +18 -13
- package/dist/cortex/performanceMonitor.d.ts +2 -2
- package/dist/critical/model-guardian.d.ts +4 -0
- package/dist/critical/model-guardian.js +31 -11
- package/dist/demo.d.ts +4 -4
- package/dist/demo.js +7 -7
- package/dist/distributed/cacheSync.d.ts +112 -0
- package/dist/distributed/cacheSync.js +265 -0
- package/dist/distributed/coordinator.d.ts +193 -0
- package/dist/distributed/coordinator.js +548 -0
- package/dist/distributed/httpTransport.d.ts +120 -0
- package/dist/distributed/httpTransport.js +446 -0
- package/dist/distributed/index.d.ts +8 -0
- package/dist/distributed/index.js +5 -0
- package/dist/distributed/networkTransport.d.ts +132 -0
- package/dist/distributed/networkTransport.js +633 -0
- package/dist/distributed/queryPlanner.d.ts +104 -0
- package/dist/distributed/queryPlanner.js +327 -0
- package/dist/distributed/readWriteSeparation.d.ts +134 -0
- package/dist/distributed/readWriteSeparation.js +350 -0
- package/dist/distributed/shardManager.d.ts +114 -0
- package/dist/distributed/shardManager.js +357 -0
- package/dist/distributed/shardMigration.d.ts +110 -0
- package/dist/distributed/shardMigration.js +289 -0
- package/dist/distributed/storageDiscovery.d.ts +160 -0
- package/dist/distributed/storageDiscovery.js +551 -0
- package/dist/embeddings/EmbeddingManager.d.ts +0 -4
- package/dist/embeddings/EmbeddingManager.js +21 -26
- package/dist/errors/brainyError.d.ts +5 -1
- package/dist/errors/brainyError.js +12 -0
- package/dist/examples/basicUsage.js +3 -3
- package/dist/graph/graphAdjacencyIndex.d.ts +96 -0
- package/dist/graph/graphAdjacencyIndex.js +288 -0
- package/dist/graph/pathfinding.js +4 -2
- package/dist/hnsw/scaledHNSWSystem.js +11 -2
- package/dist/importManager.js +6 -3
- package/dist/index.d.ts +12 -21
- package/dist/index.js +14 -22
- package/dist/mcp/brainyMCPAdapter.d.ts +4 -4
- package/dist/mcp/brainyMCPAdapter.js +5 -5
- package/dist/mcp/brainyMCPService.d.ts +3 -3
- package/dist/mcp/brainyMCPService.js +3 -11
- package/dist/mcp/mcpAugmentationToolset.js +20 -30
- package/dist/neural/embeddedPatterns.d.ts +1 -1
- package/dist/neural/embeddedPatterns.js +2 -2
- package/dist/neural/entityExtractor.d.ts +65 -0
- package/dist/neural/entityExtractor.js +316 -0
- package/dist/neural/improvedNeuralAPI.js +90 -79
- package/dist/neural/naturalLanguageProcessor.d.ts +155 -10
- package/dist/neural/naturalLanguageProcessor.js +941 -66
- package/dist/neural/naturalLanguageProcessorStatic.d.ts +2 -2
- package/dist/neural/naturalLanguageProcessorStatic.js +3 -3
- package/dist/neural/neuralAPI.js +8 -2
- package/dist/neural/patternLibrary.d.ts +57 -3
- package/dist/neural/patternLibrary.js +348 -13
- package/dist/neural/staticPatternMatcher.d.ts +2 -2
- package/dist/neural/staticPatternMatcher.js +2 -2
- package/dist/shared/default-augmentations.d.ts +3 -3
- package/dist/shared/default-augmentations.js +5 -5
- package/dist/storage/adapters/fileSystemStorage.d.ts +4 -0
- package/dist/storage/adapters/fileSystemStorage.js +54 -1
- package/dist/storage/adapters/memoryStorage.js +13 -8
- package/dist/storage/backwardCompatibility.d.ts +10 -78
- package/dist/storage/backwardCompatibility.js +17 -132
- package/dist/storage/baseStorage.d.ts +6 -0
- package/dist/storage/baseStorage.js +17 -0
- package/dist/storage/cacheManager.js +2 -2
- package/dist/storage/readOnlyOptimizations.js +8 -3
- package/dist/streaming/pipeline.d.ts +154 -0
- package/dist/streaming/pipeline.js +551 -0
- package/dist/triple/TripleIntelligence.d.ts +25 -110
- package/dist/triple/TripleIntelligence.js +4 -574
- package/dist/triple/TripleIntelligenceSystem.d.ts +159 -0
- package/dist/triple/TripleIntelligenceSystem.js +519 -0
- package/dist/types/apiTypes.d.ts +278 -0
- package/dist/types/apiTypes.js +33 -0
- package/dist/types/brainy.types.d.ts +308 -0
- package/dist/types/brainy.types.js +8 -0
- package/dist/types/brainyDataInterface.d.ts +3 -3
- package/dist/types/brainyDataInterface.js +2 -2
- package/dist/types/graphTypes.js +2 -2
- package/dist/utils/cacheAutoConfig.d.ts +3 -3
- package/dist/utils/embedding.js +8 -14
- package/dist/utils/enhancedLogger.d.ts +104 -0
- package/dist/utils/enhancedLogger.js +232 -0
- package/dist/utils/index.d.ts +1 -1
- package/dist/utils/index.js +1 -1
- package/dist/utils/intelligentTypeMapper.d.ts +60 -0
- package/dist/utils/intelligentTypeMapper.js +349 -0
- package/dist/utils/metadataIndex.d.ts +118 -1
- package/dist/utils/metadataIndex.js +539 -16
- package/dist/utils/paramValidation.d.ts +39 -0
- package/dist/utils/paramValidation.js +192 -0
- package/dist/utils/rateLimiter.d.ts +160 -0
- package/dist/utils/rateLimiter.js +271 -0
- package/dist/utils/statistics.d.ts +4 -4
- package/dist/utils/statistics.js +3 -3
- package/dist/utils/structuredLogger.d.ts +146 -0
- package/dist/utils/structuredLogger.js +394 -0
- package/dist/utils/textEncoding.js +2 -1
- package/dist/utils/typeValidation.d.ts +34 -0
- package/dist/utils/typeValidation.js +247 -0
- package/package.json +14 -6
- package/scripts/download-models.cjs +6 -15
- package/dist/augmentations/walAugmentation.d.ts +0 -111
- package/dist/augmentations/walAugmentation.js +0 -519
- package/dist/chat/BrainyChat.d.ts +0 -121
- package/dist/chat/BrainyChat.js +0 -396
- package/dist/chat/ChatCLI.d.ts +0 -61
- package/dist/chat/ChatCLI.js +0 -351
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Data Management API for Brainy 3.0
|
|
3
|
+
* Provides backup, restore, import, export, and data management
|
|
4
|
+
*/
|
|
5
|
+
import { NounType } from '../types/graphTypes.js';
|
|
6
|
+
export class DataAPI {
|
|
7
|
+
constructor(storage, getEntity, getRelation, brain) {
|
|
8
|
+
this.storage = storage;
|
|
9
|
+
this.getEntity = getEntity;
|
|
10
|
+
this.getRelation = getRelation;
|
|
11
|
+
this.brain = brain;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Create a backup of all data
|
|
15
|
+
*/
|
|
16
|
+
async backup(options = {}) {
|
|
17
|
+
const { includeVectors = true, compress = false, format = 'json' } = options;
|
|
18
|
+
const startTime = Date.now();
|
|
19
|
+
// Get all entities
|
|
20
|
+
const nounsResult = await this.storage.getNouns({
|
|
21
|
+
pagination: { limit: 1000000 }
|
|
22
|
+
});
|
|
23
|
+
const entities = [];
|
|
24
|
+
for (const noun of nounsResult.items) {
|
|
25
|
+
const entity = {
|
|
26
|
+
id: noun.id,
|
|
27
|
+
vector: includeVectors ? noun.vector : undefined,
|
|
28
|
+
type: noun.metadata?.noun || NounType.Thing,
|
|
29
|
+
metadata: noun.metadata,
|
|
30
|
+
service: noun.metadata?.service
|
|
31
|
+
};
|
|
32
|
+
entities.push(entity);
|
|
33
|
+
}
|
|
34
|
+
// Get all relations
|
|
35
|
+
const verbsResult = await this.storage.getVerbs({
|
|
36
|
+
pagination: { limit: 1000000 }
|
|
37
|
+
});
|
|
38
|
+
const relations = [];
|
|
39
|
+
for (const verb of verbsResult.items) {
|
|
40
|
+
relations.push({
|
|
41
|
+
id: verb.id,
|
|
42
|
+
from: verb.sourceId,
|
|
43
|
+
to: verb.targetId,
|
|
44
|
+
type: (verb.verb || verb.type),
|
|
45
|
+
weight: verb.weight || 1.0,
|
|
46
|
+
metadata: verb.metadata
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
// Create backup data
|
|
50
|
+
const backupData = {
|
|
51
|
+
version: '3.0.0',
|
|
52
|
+
timestamp: Date.now(),
|
|
53
|
+
entities,
|
|
54
|
+
relations,
|
|
55
|
+
stats: {
|
|
56
|
+
entityCount: entities.length,
|
|
57
|
+
relationCount: relations.length,
|
|
58
|
+
vectorDimensions: entities[0]?.vector?.length
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
// Compress if requested
|
|
62
|
+
if (compress) {
|
|
63
|
+
// Import zlib for compression
|
|
64
|
+
const { gzipSync } = await import('zlib');
|
|
65
|
+
const jsonString = JSON.stringify(backupData);
|
|
66
|
+
const compressed = gzipSync(Buffer.from(jsonString));
|
|
67
|
+
return {
|
|
68
|
+
compressed: true,
|
|
69
|
+
data: compressed.toString('base64'),
|
|
70
|
+
originalSize: jsonString.length,
|
|
71
|
+
compressedSize: compressed.length
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
return backupData;
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Restore data from a backup
|
|
78
|
+
*/
|
|
79
|
+
async restore(params) {
|
|
80
|
+
const { backup, merge = false, overwrite = false, validate = true } = params;
|
|
81
|
+
// Validate backup format
|
|
82
|
+
if (validate) {
|
|
83
|
+
if (!backup.version || !backup.entities || !backup.relations) {
|
|
84
|
+
throw new Error('Invalid backup format');
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
// Clear existing data if not merging
|
|
88
|
+
if (!merge && overwrite) {
|
|
89
|
+
await this.clear({ entities: true, relations: true });
|
|
90
|
+
}
|
|
91
|
+
// Restore entities
|
|
92
|
+
for (const entity of backup.entities) {
|
|
93
|
+
try {
|
|
94
|
+
const noun = {
|
|
95
|
+
id: entity.id,
|
|
96
|
+
vector: entity.vector || new Array(384).fill(0), // Default vector if missing
|
|
97
|
+
connections: new Map(),
|
|
98
|
+
level: 0,
|
|
99
|
+
metadata: {
|
|
100
|
+
...entity.metadata,
|
|
101
|
+
noun: entity.type,
|
|
102
|
+
service: entity.service
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
// Check if entity exists when merging
|
|
106
|
+
if (merge) {
|
|
107
|
+
const existing = await this.storage.getNoun(entity.id);
|
|
108
|
+
if (existing && !overwrite) {
|
|
109
|
+
continue; // Skip existing entities unless overwriting
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
await this.storage.saveNoun(noun);
|
|
113
|
+
}
|
|
114
|
+
catch (error) {
|
|
115
|
+
console.error(`Failed to restore entity ${entity.id}:`, error);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
// Restore relations
|
|
119
|
+
for (const relation of backup.relations) {
|
|
120
|
+
try {
|
|
121
|
+
// Get source and target entities to compute relation vector
|
|
122
|
+
const sourceNoun = await this.storage.getNoun(relation.from);
|
|
123
|
+
const targetNoun = await this.storage.getNoun(relation.to);
|
|
124
|
+
if (!sourceNoun || !targetNoun) {
|
|
125
|
+
console.warn(`Skipping relation ${relation.id}: missing entities`);
|
|
126
|
+
continue;
|
|
127
|
+
}
|
|
128
|
+
// Compute relation vector as average of source and target
|
|
129
|
+
const relationVector = sourceNoun.vector.map((v, i) => (v + targetNoun.vector[i]) / 2);
|
|
130
|
+
const verb = {
|
|
131
|
+
id: relation.id,
|
|
132
|
+
vector: relationVector,
|
|
133
|
+
sourceId: relation.from,
|
|
134
|
+
targetId: relation.to,
|
|
135
|
+
source: sourceNoun.metadata?.noun || NounType.Thing,
|
|
136
|
+
target: targetNoun.metadata?.noun || NounType.Thing,
|
|
137
|
+
verb: relation.type,
|
|
138
|
+
type: relation.type,
|
|
139
|
+
weight: relation.weight,
|
|
140
|
+
metadata: relation.metadata,
|
|
141
|
+
createdAt: Date.now()
|
|
142
|
+
};
|
|
143
|
+
// Check if relation exists when merging
|
|
144
|
+
if (merge) {
|
|
145
|
+
const existing = await this.storage.getVerb(relation.id);
|
|
146
|
+
if (existing && !overwrite) {
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
await this.storage.saveVerb(verb);
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
console.error(`Failed to restore relation ${relation.id}:`, error);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Clear data
|
|
159
|
+
*/
|
|
160
|
+
async clear(params = {}) {
|
|
161
|
+
const { entities = true, relations = true, config = false } = params;
|
|
162
|
+
if (entities) {
|
|
163
|
+
// Clear all entities
|
|
164
|
+
const nounsResult = await this.storage.getNouns({
|
|
165
|
+
pagination: { limit: 1000000 }
|
|
166
|
+
});
|
|
167
|
+
for (const noun of nounsResult.items) {
|
|
168
|
+
await this.storage.deleteNoun(noun.id);
|
|
169
|
+
}
|
|
170
|
+
// Also clear the HNSW index if available
|
|
171
|
+
if (this.brain?.index?.clear) {
|
|
172
|
+
this.brain.index.clear();
|
|
173
|
+
}
|
|
174
|
+
// Clear metadata index if available
|
|
175
|
+
if (this.brain?.metadataIndex) {
|
|
176
|
+
await this.brain.metadataIndex.rebuild(); // Rebuild empty index
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
if (relations) {
|
|
180
|
+
// Clear all relations
|
|
181
|
+
const verbsResult = await this.storage.getVerbs({
|
|
182
|
+
pagination: { limit: 1000000 }
|
|
183
|
+
});
|
|
184
|
+
for (const verb of verbsResult.items) {
|
|
185
|
+
await this.storage.deleteVerb(verb.id);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
if (config) {
|
|
189
|
+
// Clear configuration would be handled by ConfigAPI
|
|
190
|
+
// For now, skip this
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Import data from various formats
|
|
195
|
+
*/
|
|
196
|
+
async import(params) {
|
|
197
|
+
const { data, format, mapping = {}, batchSize = 100, validate = true } = params;
|
|
198
|
+
const result = {
|
|
199
|
+
successful: 0,
|
|
200
|
+
failed: 0,
|
|
201
|
+
errors: [],
|
|
202
|
+
duration: 0
|
|
203
|
+
};
|
|
204
|
+
const startTime = Date.now();
|
|
205
|
+
try {
|
|
206
|
+
// ALWAYS use neural import for proper type matching
|
|
207
|
+
const { UniversalImportAPI } = await import('./UniversalImportAPI.js');
|
|
208
|
+
const universalImport = new UniversalImportAPI(this.brain);
|
|
209
|
+
await universalImport.init();
|
|
210
|
+
// Convert to ImportSource format
|
|
211
|
+
const neuralResult = await universalImport.import({
|
|
212
|
+
type: 'object',
|
|
213
|
+
data,
|
|
214
|
+
format: format || 'json',
|
|
215
|
+
metadata: { mapping, batchSize, validate }
|
|
216
|
+
});
|
|
217
|
+
// Convert neural result to ImportResult format
|
|
218
|
+
result.successful = neuralResult.stats.entitiesCreated;
|
|
219
|
+
result.failed = 0; // Neural import always succeeds with best match
|
|
220
|
+
result.duration = neuralResult.stats.processingTimeMs;
|
|
221
|
+
// Log relationships created
|
|
222
|
+
if (neuralResult.stats.relationshipsCreated > 0) {
|
|
223
|
+
console.log(`Neural import also created ${neuralResult.stats.relationshipsCreated} relationships`);
|
|
224
|
+
}
|
|
225
|
+
return result;
|
|
226
|
+
}
|
|
227
|
+
catch (error) {
|
|
228
|
+
// Fallback to legacy import ONLY if neural import fails to load
|
|
229
|
+
console.warn('Neural import failed, using legacy import:', error);
|
|
230
|
+
let items = [];
|
|
231
|
+
// Parse data based on format
|
|
232
|
+
switch (format) {
|
|
233
|
+
case 'json':
|
|
234
|
+
items = Array.isArray(data) ? data : [data];
|
|
235
|
+
break;
|
|
236
|
+
case 'csv':
|
|
237
|
+
// CSV parsing would go here
|
|
238
|
+
// For now, assume data is already parsed
|
|
239
|
+
items = data;
|
|
240
|
+
break;
|
|
241
|
+
// Parquet format removed - not implemented
|
|
242
|
+
default:
|
|
243
|
+
throw new Error(`Unsupported format: ${format}`);
|
|
244
|
+
}
|
|
245
|
+
// Process items in batches
|
|
246
|
+
for (let i = 0; i < items.length; i += batchSize) {
|
|
247
|
+
const batch = items.slice(i, i + batchSize);
|
|
248
|
+
for (const item of batch) {
|
|
249
|
+
try {
|
|
250
|
+
// Apply field mapping
|
|
251
|
+
const mapped = this.applyMapping(item, mapping);
|
|
252
|
+
// Validate if requested
|
|
253
|
+
if (validate) {
|
|
254
|
+
this.validateImportItem(mapped);
|
|
255
|
+
}
|
|
256
|
+
// Save as entity
|
|
257
|
+
const noun = {
|
|
258
|
+
id: mapped.id || this.generateId(),
|
|
259
|
+
vector: mapped.vector || new Array(384).fill(0),
|
|
260
|
+
connections: new Map(),
|
|
261
|
+
level: 0,
|
|
262
|
+
metadata: mapped
|
|
263
|
+
};
|
|
264
|
+
await this.storage.saveNoun(noun);
|
|
265
|
+
result.successful++;
|
|
266
|
+
}
|
|
267
|
+
catch (error) {
|
|
268
|
+
result.failed++;
|
|
269
|
+
result.errors.push({
|
|
270
|
+
item,
|
|
271
|
+
error: error.message
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
result.duration = Date.now() - startTime;
|
|
277
|
+
return result;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Export data to various formats
|
|
282
|
+
*/
|
|
283
|
+
async export(params = {}) {
|
|
284
|
+
const { format = 'json', filter = {}, includeVectors = false } = params;
|
|
285
|
+
// Get filtered entities
|
|
286
|
+
const nounsResult = await this.storage.getNouns({
|
|
287
|
+
pagination: { limit: 1000000 }
|
|
288
|
+
});
|
|
289
|
+
let entities = nounsResult.items;
|
|
290
|
+
// Apply filters
|
|
291
|
+
if (filter.type) {
|
|
292
|
+
const types = Array.isArray(filter.type) ? filter.type : [filter.type];
|
|
293
|
+
entities = entities.filter(e => types.includes(e.metadata?.noun));
|
|
294
|
+
}
|
|
295
|
+
if (filter.service) {
|
|
296
|
+
entities = entities.filter(e => e.metadata?.service === filter.service);
|
|
297
|
+
}
|
|
298
|
+
if (filter.where) {
|
|
299
|
+
entities = entities.filter(e => this.matchesFilter(e.metadata, filter.where));
|
|
300
|
+
}
|
|
301
|
+
// Format data based on export format
|
|
302
|
+
switch (format) {
|
|
303
|
+
case 'json':
|
|
304
|
+
return entities.map(e => ({
|
|
305
|
+
id: e.id,
|
|
306
|
+
vector: includeVectors ? e.vector : undefined,
|
|
307
|
+
...e.metadata
|
|
308
|
+
}));
|
|
309
|
+
case 'csv':
|
|
310
|
+
// Convert to CSV format
|
|
311
|
+
// For now, return simplified format
|
|
312
|
+
return this.convertToCSV(entities);
|
|
313
|
+
// Parquet format removed - not implemented
|
|
314
|
+
default:
|
|
315
|
+
throw new Error(`Unsupported export format: ${format}`);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Get storage statistics
|
|
320
|
+
*/
|
|
321
|
+
async getStats() {
|
|
322
|
+
const nounsResult = await this.storage.getNouns({
|
|
323
|
+
pagination: { limit: 1 }
|
|
324
|
+
});
|
|
325
|
+
const verbsResult = await this.storage.getVerbs({
|
|
326
|
+
pagination: { limit: 1 }
|
|
327
|
+
});
|
|
328
|
+
const firstNoun = nounsResult.items[0];
|
|
329
|
+
return {
|
|
330
|
+
entities: nounsResult.totalCount || nounsResult.items.length,
|
|
331
|
+
relations: verbsResult.totalCount || verbsResult.items.length,
|
|
332
|
+
vectorDimensions: firstNoun?.vector?.length
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
// Helper methods
|
|
336
|
+
applyMapping(item, mapping) {
|
|
337
|
+
const mapped = {};
|
|
338
|
+
for (const [key, value] of Object.entries(item)) {
|
|
339
|
+
const mappedKey = mapping[key] || key;
|
|
340
|
+
mapped[mappedKey] = value;
|
|
341
|
+
}
|
|
342
|
+
return mapped;
|
|
343
|
+
}
|
|
344
|
+
validateImportItem(item) {
|
|
345
|
+
// Basic validation
|
|
346
|
+
if (!item || typeof item !== 'object') {
|
|
347
|
+
throw new Error('Invalid item: must be an object');
|
|
348
|
+
}
|
|
349
|
+
// Could add more validation here
|
|
350
|
+
}
|
|
351
|
+
matchesFilter(metadata, filter) {
|
|
352
|
+
for (const [key, value] of Object.entries(filter)) {
|
|
353
|
+
if (metadata[key] !== value) {
|
|
354
|
+
return false;
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
return true;
|
|
358
|
+
}
|
|
359
|
+
convertToCSV(entities) {
|
|
360
|
+
if (entities.length === 0)
|
|
361
|
+
return '';
|
|
362
|
+
// Get all unique keys from metadata
|
|
363
|
+
const keys = new Set();
|
|
364
|
+
for (const entity of entities) {
|
|
365
|
+
if (entity.metadata) {
|
|
366
|
+
Object.keys(entity.metadata).forEach(k => keys.add(k));
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
// Create CSV header
|
|
370
|
+
const headers = ['id', ...Array.from(keys)];
|
|
371
|
+
const rows = [headers.join(',')];
|
|
372
|
+
// Add data rows
|
|
373
|
+
for (const entity of entities) {
|
|
374
|
+
const row = [entity.id];
|
|
375
|
+
for (const key of keys) {
|
|
376
|
+
const value = entity.metadata?.[key] || '';
|
|
377
|
+
// Escape values that contain commas
|
|
378
|
+
const escaped = String(value).includes(',')
|
|
379
|
+
? `"${String(value).replace(/"/g, '""')}"`
|
|
380
|
+
: String(value);
|
|
381
|
+
row.push(escaped);
|
|
382
|
+
}
|
|
383
|
+
rows.push(row.join(','));
|
|
384
|
+
}
|
|
385
|
+
return rows.join('\n');
|
|
386
|
+
}
|
|
387
|
+
generateId() {
|
|
388
|
+
return `import_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
//# sourceMappingURL=DataAPI.js.map
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Security API for Brainy 3.0
|
|
3
|
+
* Provides encryption, decryption, hashing, and secure storage
|
|
4
|
+
*/
|
|
5
|
+
export declare class SecurityAPI {
|
|
6
|
+
private config?;
|
|
7
|
+
private encryptionKey?;
|
|
8
|
+
constructor(config?: {
|
|
9
|
+
encryptionKey?: string;
|
|
10
|
+
} | undefined);
|
|
11
|
+
/**
|
|
12
|
+
* Encrypt data using AES-256-CBC
|
|
13
|
+
*/
|
|
14
|
+
encrypt(data: string): Promise<string>;
|
|
15
|
+
/**
|
|
16
|
+
* Decrypt data encrypted with encrypt()
|
|
17
|
+
*/
|
|
18
|
+
decrypt(encryptedData: string): Promise<string>;
|
|
19
|
+
/**
|
|
20
|
+
* Create a one-way hash of data (for passwords, etc)
|
|
21
|
+
*/
|
|
22
|
+
hash(data: string, algorithm?: 'sha256' | 'sha512'): Promise<string>;
|
|
23
|
+
/**
|
|
24
|
+
* Compare data with a hash (for password verification)
|
|
25
|
+
*/
|
|
26
|
+
compare(data: string, hash: string, algorithm?: 'sha256' | 'sha512'): Promise<boolean>;
|
|
27
|
+
/**
|
|
28
|
+
* Generate a secure random token
|
|
29
|
+
*/
|
|
30
|
+
generateToken(bytes?: number): Promise<string>;
|
|
31
|
+
/**
|
|
32
|
+
* Derive a key from a password using PBKDF2
|
|
33
|
+
* Note: Simplified version using hash instead of PBKDF2 which may not be available
|
|
34
|
+
*/
|
|
35
|
+
deriveKey(password: string, salt?: string, iterations?: number): Promise<{
|
|
36
|
+
key: string;
|
|
37
|
+
salt: string;
|
|
38
|
+
}>;
|
|
39
|
+
/**
|
|
40
|
+
* Sign data with HMAC
|
|
41
|
+
*/
|
|
42
|
+
sign(data: string, secret?: string): Promise<string>;
|
|
43
|
+
/**
|
|
44
|
+
* Verify HMAC signature
|
|
45
|
+
*/
|
|
46
|
+
verify(data: string, signature: string, secret: string): Promise<boolean>;
|
|
47
|
+
private hexToBytes;
|
|
48
|
+
private bytesToHex;
|
|
49
|
+
private constantTimeCompare;
|
|
50
|
+
}
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Security API for Brainy 3.0
|
|
3
|
+
* Provides encryption, decryption, hashing, and secure storage
|
|
4
|
+
*/
|
|
5
|
+
export class SecurityAPI {
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.config = config;
|
|
8
|
+
if (config?.encryptionKey) {
|
|
9
|
+
// Use provided key (must be 32 bytes hex string)
|
|
10
|
+
this.encryptionKey = this.hexToBytes(config.encryptionKey);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Encrypt data using AES-256-CBC
|
|
15
|
+
*/
|
|
16
|
+
async encrypt(data) {
|
|
17
|
+
const crypto = await import('../universal/crypto.js');
|
|
18
|
+
// Generate or use existing key
|
|
19
|
+
const key = this.encryptionKey || crypto.randomBytes(32);
|
|
20
|
+
const iv = crypto.randomBytes(16);
|
|
21
|
+
const cipher = crypto.createCipheriv('aes-256-cbc', key, iv);
|
|
22
|
+
let encrypted = cipher.update(data, 'utf8', 'hex');
|
|
23
|
+
encrypted += cipher.final('hex');
|
|
24
|
+
// Package encrypted data with metadata
|
|
25
|
+
// In production, store keys separately in a key management service
|
|
26
|
+
return JSON.stringify({
|
|
27
|
+
encrypted,
|
|
28
|
+
key: this.bytesToHex(key),
|
|
29
|
+
iv: this.bytesToHex(iv),
|
|
30
|
+
algorithm: 'aes-256-cbc',
|
|
31
|
+
timestamp: Date.now()
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Decrypt data encrypted with encrypt()
|
|
36
|
+
*/
|
|
37
|
+
async decrypt(encryptedData) {
|
|
38
|
+
const crypto = await import('../universal/crypto.js');
|
|
39
|
+
try {
|
|
40
|
+
const parsed = JSON.parse(encryptedData);
|
|
41
|
+
const { encrypted, key: keyHex, iv: ivHex, algorithm } = parsed;
|
|
42
|
+
if (algorithm && algorithm !== 'aes-256-cbc') {
|
|
43
|
+
throw new Error(`Unsupported encryption algorithm: ${algorithm}`);
|
|
44
|
+
}
|
|
45
|
+
const key = this.hexToBytes(keyHex);
|
|
46
|
+
const iv = this.hexToBytes(ivHex);
|
|
47
|
+
const decipher = crypto.createDecipheriv('aes-256-cbc', key, iv);
|
|
48
|
+
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
|
49
|
+
decrypted += decipher.final('utf8');
|
|
50
|
+
return decrypted;
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
throw new Error(`Decryption failed: ${error.message}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Create a one-way hash of data (for passwords, etc)
|
|
58
|
+
*/
|
|
59
|
+
async hash(data, algorithm = 'sha256') {
|
|
60
|
+
const crypto = await import('../universal/crypto.js');
|
|
61
|
+
const hash = crypto.createHash(algorithm);
|
|
62
|
+
hash.update(data);
|
|
63
|
+
return hash.digest('hex');
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Compare data with a hash (for password verification)
|
|
67
|
+
*/
|
|
68
|
+
async compare(data, hash, algorithm = 'sha256') {
|
|
69
|
+
const dataHash = await this.hash(data, algorithm);
|
|
70
|
+
return this.constantTimeCompare(dataHash, hash);
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Generate a secure random token
|
|
74
|
+
*/
|
|
75
|
+
async generateToken(bytes = 32) {
|
|
76
|
+
const crypto = await import('../universal/crypto.js');
|
|
77
|
+
const buffer = crypto.randomBytes(bytes);
|
|
78
|
+
return this.bytesToHex(buffer);
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Derive a key from a password using PBKDF2
|
|
82
|
+
* Note: Simplified version using hash instead of PBKDF2 which may not be available
|
|
83
|
+
*/
|
|
84
|
+
async deriveKey(password, salt, iterations = 100000) {
|
|
85
|
+
const crypto = await import('../universal/crypto.js');
|
|
86
|
+
const actualSalt = salt || this.bytesToHex(crypto.randomBytes(32));
|
|
87
|
+
// Simplified key derivation using repeated hashing
|
|
88
|
+
// In production, use a proper PBKDF2 implementation
|
|
89
|
+
let derived = password + actualSalt;
|
|
90
|
+
for (let i = 0; i < Math.min(iterations, 1000); i++) {
|
|
91
|
+
const hash = crypto.createHash('sha256');
|
|
92
|
+
hash.update(derived);
|
|
93
|
+
derived = hash.digest('hex');
|
|
94
|
+
}
|
|
95
|
+
return {
|
|
96
|
+
key: derived,
|
|
97
|
+
salt: actualSalt
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Sign data with HMAC
|
|
102
|
+
*/
|
|
103
|
+
async sign(data, secret) {
|
|
104
|
+
const crypto = await import('../universal/crypto.js');
|
|
105
|
+
const actualSecret = secret || (await this.generateToken());
|
|
106
|
+
const hmac = crypto.createHmac('sha256', actualSecret);
|
|
107
|
+
hmac.update(data);
|
|
108
|
+
return hmac.digest('hex');
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Verify HMAC signature
|
|
112
|
+
*/
|
|
113
|
+
async verify(data, signature, secret) {
|
|
114
|
+
const expectedSignature = await this.sign(data, secret);
|
|
115
|
+
return this.constantTimeCompare(signature, expectedSignature);
|
|
116
|
+
}
|
|
117
|
+
// Helper methods
|
|
118
|
+
hexToBytes(hex) {
|
|
119
|
+
const matches = hex.match(/.{1,2}/g);
|
|
120
|
+
if (!matches)
|
|
121
|
+
throw new Error('Invalid hex string');
|
|
122
|
+
return new Uint8Array(matches.map(byte => parseInt(byte, 16)));
|
|
123
|
+
}
|
|
124
|
+
bytesToHex(bytes) {
|
|
125
|
+
return Array.from(bytes)
|
|
126
|
+
.map(b => b.toString(16).padStart(2, '0'))
|
|
127
|
+
.join('');
|
|
128
|
+
}
|
|
129
|
+
constantTimeCompare(a, b) {
|
|
130
|
+
if (a.length !== b.length)
|
|
131
|
+
return false;
|
|
132
|
+
let result = 0;
|
|
133
|
+
for (let i = 0; i < a.length; i++) {
|
|
134
|
+
result |= a.charCodeAt(i) ^ b.charCodeAt(i);
|
|
135
|
+
}
|
|
136
|
+
return result === 0;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
//# sourceMappingURL=SecurityAPI.js.map
|