@soulcraft/brainy 1.5.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +188 -0
- package/LICENSE +2 -2
- package/README.md +201 -596
- package/bin/brainy-interactive.js +564 -0
- package/bin/brainy-ts.js +18 -0
- package/bin/brainy.js +672 -81
- package/dist/augmentationPipeline.d.ts +48 -220
- package/dist/augmentationPipeline.js +60 -508
- package/dist/augmentationRegistry.d.ts +22 -31
- package/dist/augmentationRegistry.js +28 -79
- package/dist/augmentations/apiServerAugmentation.d.ts +108 -0
- package/dist/augmentations/apiServerAugmentation.js +502 -0
- package/dist/augmentations/batchProcessingAugmentation.d.ts +95 -0
- package/dist/augmentations/batchProcessingAugmentation.js +567 -0
- package/dist/augmentations/brainyAugmentation.d.ts +153 -0
- package/dist/augmentations/brainyAugmentation.js +145 -0
- package/dist/augmentations/cacheAugmentation.d.ts +105 -0
- package/dist/augmentations/cacheAugmentation.js +238 -0
- package/dist/augmentations/conduitAugmentations.d.ts +54 -156
- package/dist/augmentations/conduitAugmentations.js +156 -1082
- package/dist/augmentations/connectionPoolAugmentation.d.ts +62 -0
- package/dist/augmentations/connectionPoolAugmentation.js +316 -0
- package/dist/augmentations/defaultAugmentations.d.ts +53 -0
- package/dist/augmentations/defaultAugmentations.js +88 -0
- package/dist/augmentations/entityRegistryAugmentation.d.ts +126 -0
- package/dist/augmentations/entityRegistryAugmentation.js +386 -0
- package/dist/augmentations/indexAugmentation.d.ts +117 -0
- package/dist/augmentations/indexAugmentation.js +284 -0
- package/dist/augmentations/intelligentVerbScoringAugmentation.d.ts +152 -0
- package/dist/augmentations/intelligentVerbScoringAugmentation.js +554 -0
- package/dist/augmentations/metricsAugmentation.d.ts +202 -0
- package/dist/augmentations/metricsAugmentation.js +291 -0
- package/dist/augmentations/monitoringAugmentation.d.ts +94 -0
- package/dist/augmentations/monitoringAugmentation.js +227 -0
- package/dist/augmentations/neuralImport.d.ts +50 -117
- package/dist/augmentations/neuralImport.js +255 -629
- package/dist/augmentations/requestDeduplicatorAugmentation.d.ts +52 -0
- package/dist/augmentations/requestDeduplicatorAugmentation.js +162 -0
- package/dist/augmentations/serverSearchAugmentations.d.ts +43 -22
- package/dist/augmentations/serverSearchAugmentations.js +125 -72
- package/dist/augmentations/storageAugmentation.d.ts +54 -0
- package/dist/augmentations/storageAugmentation.js +93 -0
- package/dist/augmentations/storageAugmentations.d.ts +96 -0
- package/dist/augmentations/storageAugmentations.js +182 -0
- package/dist/augmentations/synapseAugmentation.d.ts +156 -0
- package/dist/augmentations/synapseAugmentation.js +312 -0
- package/dist/augmentations/walAugmentation.d.ts +108 -0
- package/dist/augmentations/walAugmentation.js +515 -0
- package/dist/brainyData.d.ts +404 -130
- package/dist/brainyData.js +1331 -853
- package/dist/chat/BrainyChat.d.ts +16 -8
- package/dist/chat/BrainyChat.js +60 -32
- package/dist/chat/ChatCLI.d.ts +1 -1
- package/dist/chat/ChatCLI.js +6 -6
- package/dist/cli/catalog.d.ts +3 -3
- package/dist/cli/catalog.js +116 -70
- package/dist/cli/commands/core.d.ts +61 -0
- package/dist/cli/commands/core.js +348 -0
- package/dist/cli/commands/neural.d.ts +25 -0
- package/dist/cli/commands/neural.js +508 -0
- package/dist/cli/commands/utility.d.ts +37 -0
- package/dist/cli/commands/utility.js +276 -0
- package/dist/cli/index.d.ts +7 -0
- package/dist/cli/index.js +167 -0
- package/dist/cli/interactive.d.ts +164 -0
- package/dist/cli/interactive.js +542 -0
- package/dist/cortex/neuralImport.js +5 -5
- package/dist/critical/model-guardian.js +11 -4
- package/dist/embeddings/lightweight-embedder.d.ts +23 -0
- package/dist/embeddings/lightweight-embedder.js +136 -0
- package/dist/embeddings/universal-memory-manager.d.ts +38 -0
- package/dist/embeddings/universal-memory-manager.js +206 -0
- package/dist/embeddings/worker-embedding.d.ts +7 -0
- package/dist/embeddings/worker-embedding.js +77 -0
- package/dist/embeddings/worker-manager.d.ts +28 -0
- package/dist/embeddings/worker-manager.js +162 -0
- package/dist/examples/basicUsage.js +7 -7
- package/dist/graph/pathfinding.d.ts +78 -0
- package/dist/graph/pathfinding.js +393 -0
- package/dist/hnsw/hnswIndex.d.ts +13 -0
- package/dist/hnsw/hnswIndex.js +35 -0
- package/dist/hnsw/hnswIndexOptimized.d.ts +1 -0
- package/dist/hnsw/hnswIndexOptimized.js +3 -0
- package/dist/index.d.ts +9 -11
- package/dist/index.js +21 -11
- package/dist/indices/fieldIndex.d.ts +76 -0
- package/dist/indices/fieldIndex.js +357 -0
- package/dist/mcp/brainyMCPAdapter.js +3 -2
- package/dist/mcp/mcpAugmentationToolset.js +11 -17
- package/dist/neural/embeddedPatterns.d.ts +41 -0
- package/dist/neural/embeddedPatterns.js +4044 -0
- package/dist/neural/naturalLanguageProcessor.d.ts +94 -0
- package/dist/neural/naturalLanguageProcessor.js +317 -0
- package/dist/neural/naturalLanguageProcessorStatic.d.ts +64 -0
- package/dist/neural/naturalLanguageProcessorStatic.js +151 -0
- package/dist/neural/neuralAPI.d.ts +255 -0
- package/dist/neural/neuralAPI.js +612 -0
- package/dist/neural/patternLibrary.d.ts +101 -0
- package/dist/neural/patternLibrary.js +313 -0
- package/dist/neural/patterns.d.ts +27 -0
- package/dist/neural/patterns.js +68 -0
- package/dist/neural/staticPatternMatcher.d.ts +35 -0
- package/dist/neural/staticPatternMatcher.js +153 -0
- package/dist/scripts/precomputePatternEmbeddings.d.ts +19 -0
- package/dist/scripts/precomputePatternEmbeddings.js +100 -0
- package/dist/storage/adapters/fileSystemStorage.d.ts +5 -0
- package/dist/storage/adapters/fileSystemStorage.js +20 -0
- package/dist/storage/adapters/s3CompatibleStorage.d.ts +5 -0
- package/dist/storage/adapters/s3CompatibleStorage.js +16 -0
- package/dist/storage/enhancedClearOperations.d.ts +83 -0
- package/dist/storage/enhancedClearOperations.js +345 -0
- package/dist/storage/storageFactory.js +31 -27
- package/dist/triple/TripleIntelligence.d.ts +134 -0
- package/dist/triple/TripleIntelligence.js +548 -0
- package/dist/types/augmentations.d.ts +45 -344
- package/dist/types/augmentations.js +5 -2
- package/dist/types/brainyDataInterface.d.ts +20 -10
- package/dist/types/graphTypes.d.ts +46 -0
- package/dist/types/graphTypes.js +16 -2
- package/dist/utils/BoundedRegistry.d.ts +29 -0
- package/dist/utils/BoundedRegistry.js +54 -0
- package/dist/utils/embedding.js +20 -3
- package/dist/utils/hybridModelManager.js +10 -5
- package/dist/utils/metadataFilter.d.ts +33 -19
- package/dist/utils/metadataFilter.js +58 -23
- package/dist/utils/metadataIndex.d.ts +37 -6
- package/dist/utils/metadataIndex.js +427 -64
- package/dist/utils/requestDeduplicator.d.ts +10 -0
- package/dist/utils/requestDeduplicator.js +24 -0
- package/dist/utils/unifiedCache.d.ts +103 -0
- package/dist/utils/unifiedCache.js +311 -0
- package/package.json +43 -128
- package/scripts/ensure-models.js +108 -0
- package/scripts/prepare-models.js +387 -0
- package/OFFLINE_MODELS.md +0 -56
- package/dist/intelligence/neuralEngine.d.ts +0 -207
- package/dist/intelligence/neuralEngine.js +0 -706
- package/dist/utils/modelLoader.d.ts +0 -32
- package/dist/utils/modelLoader.js +0 -219
- package/dist/utils/modelManager.d.ts +0 -77
- package/dist/utils/modelManager.js +0 -219
|
@@ -36,13 +36,13 @@ async function runExample() {
|
|
|
36
36
|
// Add vectors to the database
|
|
37
37
|
const ids = {};
|
|
38
38
|
for (const [word, vector] of Object.entries(wordEmbeddings)) {
|
|
39
|
-
ids[word] = await db.
|
|
39
|
+
ids[word] = await db.addNoun(vector, metadata[word]);
|
|
40
40
|
console.log(`Added "${word}" with ID: ${ids[word]}`);
|
|
41
41
|
}
|
|
42
42
|
console.log('\nDatabase size:', db.size());
|
|
43
43
|
// Search for similar vectors
|
|
44
44
|
console.log('\nSearching for vectors similar to "cat"...');
|
|
45
|
-
const catResults = await db.search(wordEmbeddings['cat'], 3);
|
|
45
|
+
const catResults = await db.search(wordEmbeddings['cat'], { limit: 3 });
|
|
46
46
|
console.log('Results:');
|
|
47
47
|
for (const result of catResults) {
|
|
48
48
|
const word = Object.entries(ids).find(([_, id]) => id === result.id)?.[0] || 'unknown';
|
|
@@ -50,7 +50,7 @@ async function runExample() {
|
|
|
50
50
|
}
|
|
51
51
|
// Search for similar vectors
|
|
52
52
|
console.log('\nSearching for vectors similar to "fish"...');
|
|
53
|
-
const fishResults = await db.search(wordEmbeddings['fish'], 3);
|
|
53
|
+
const fishResults = await db.search(wordEmbeddings['fish'], { limit: 3 });
|
|
54
54
|
console.log('Results:');
|
|
55
55
|
for (const result of fishResults) {
|
|
56
56
|
const word = Object.entries(ids).find(([_, id]) => id === result.id)?.[0] || 'unknown';
|
|
@@ -58,20 +58,20 @@ async function runExample() {
|
|
|
58
58
|
}
|
|
59
59
|
// Update metadata
|
|
60
60
|
console.log('\nUpdating metadata for "bird"...');
|
|
61
|
-
await db.
|
|
61
|
+
await db.updateNounMetadata(ids['bird'], {
|
|
62
62
|
...metadata['bird'],
|
|
63
63
|
notes: 'Can fly'
|
|
64
64
|
});
|
|
65
65
|
// Get the updated document
|
|
66
|
-
const birdDoc = await db.
|
|
66
|
+
const birdDoc = await db.getNoun(ids['bird']);
|
|
67
67
|
console.log('Updated bird document:', birdDoc);
|
|
68
68
|
// Delete a vector
|
|
69
69
|
console.log('\nDeleting "shark"...');
|
|
70
|
-
await db.
|
|
70
|
+
await db.deleteNoun(ids['shark']);
|
|
71
71
|
console.log('Database size after deletion:', db.size());
|
|
72
72
|
// Search again to verify shark is gone
|
|
73
73
|
console.log('\nSearching for vectors similar to "fish" after deletion...');
|
|
74
|
-
const fishResultsAfterDeletion = await db.search(wordEmbeddings['fish'], 3);
|
|
74
|
+
const fishResultsAfterDeletion = await db.search(wordEmbeddings['fish'], { limit: 3 });
|
|
75
75
|
console.log('Results:');
|
|
76
76
|
for (const result of fishResultsAfterDeletion) {
|
|
77
77
|
const word = Object.entries(ids).find(([_, id]) => id === result.id)?.[0] || 'unknown';
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Advanced Graph Pathfinding Algorithms
|
|
3
|
+
* Provides shortest path, multi-hop traversal, and path ranking
|
|
4
|
+
*/
|
|
5
|
+
export interface GraphNode {
|
|
6
|
+
id: string;
|
|
7
|
+
[key: string]: any;
|
|
8
|
+
}
|
|
9
|
+
export interface GraphEdge {
|
|
10
|
+
source: string;
|
|
11
|
+
target: string;
|
|
12
|
+
type: string;
|
|
13
|
+
weight: number;
|
|
14
|
+
metadata?: any;
|
|
15
|
+
}
|
|
16
|
+
export interface Path {
|
|
17
|
+
nodes: string[];
|
|
18
|
+
edges: GraphEdge[];
|
|
19
|
+
totalWeight: number;
|
|
20
|
+
length: number;
|
|
21
|
+
}
|
|
22
|
+
export interface PathfindingOptions {
|
|
23
|
+
maxDepth?: number;
|
|
24
|
+
maxPaths?: number;
|
|
25
|
+
bidirectional?: boolean;
|
|
26
|
+
weightField?: string;
|
|
27
|
+
relationshipTypes?: string[];
|
|
28
|
+
nodeFilter?: (node: GraphNode) => boolean;
|
|
29
|
+
edgeFilter?: (edge: GraphEdge) => boolean;
|
|
30
|
+
}
|
|
31
|
+
export declare class GraphPathfinding {
|
|
32
|
+
private adjacencyList;
|
|
33
|
+
private nodes;
|
|
34
|
+
/**
|
|
35
|
+
* Add a node to the graph
|
|
36
|
+
*/
|
|
37
|
+
addNode(node: GraphNode): void;
|
|
38
|
+
/**
|
|
39
|
+
* Add an edge to the graph
|
|
40
|
+
*/
|
|
41
|
+
addEdge(edge: GraphEdge): void;
|
|
42
|
+
/**
|
|
43
|
+
* Find shortest path using Dijkstra's algorithm
|
|
44
|
+
* O((V + E) log V) with binary heap
|
|
45
|
+
*/
|
|
46
|
+
shortestPath(start: string, end: string, options?: PathfindingOptions): Path | null;
|
|
47
|
+
/**
|
|
48
|
+
* Find all paths between two nodes
|
|
49
|
+
* Uses DFS with cycle detection
|
|
50
|
+
*/
|
|
51
|
+
allPaths(start: string, end: string, options?: PathfindingOptions): Path[];
|
|
52
|
+
/**
|
|
53
|
+
* Bidirectional search for faster pathfinding
|
|
54
|
+
* Searches from both start and end simultaneously
|
|
55
|
+
*/
|
|
56
|
+
bidirectionalSearch(start: string, end: string, options?: PathfindingOptions): Path | null;
|
|
57
|
+
/**
|
|
58
|
+
* Multi-hop traversal (e.g., friends of friends)
|
|
59
|
+
* Returns all nodes within N hops
|
|
60
|
+
*/
|
|
61
|
+
multiHopTraversal(start: string, hops: number, options?: PathfindingOptions): Map<string, {
|
|
62
|
+
distance: number;
|
|
63
|
+
paths: Path[];
|
|
64
|
+
}>;
|
|
65
|
+
/**
|
|
66
|
+
* Find connected components using DFS
|
|
67
|
+
*/
|
|
68
|
+
connectedComponents(): Array<Set<string>>;
|
|
69
|
+
/**
|
|
70
|
+
* Calculate PageRank for all nodes
|
|
71
|
+
* Useful for ranking importance in the graph
|
|
72
|
+
*/
|
|
73
|
+
pageRank(iterations?: number, damping?: number): Map<string, number>;
|
|
74
|
+
/**
|
|
75
|
+
* Clear the graph
|
|
76
|
+
*/
|
|
77
|
+
clear(): void;
|
|
78
|
+
}
|
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Advanced Graph Pathfinding Algorithms
|
|
3
|
+
* Provides shortest path, multi-hop traversal, and path ranking
|
|
4
|
+
*/
|
|
5
|
+
export class GraphPathfinding {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.adjacencyList = new Map();
|
|
8
|
+
this.nodes = new Map();
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Add a node to the graph
|
|
12
|
+
*/
|
|
13
|
+
addNode(node) {
|
|
14
|
+
this.nodes.set(node.id, node);
|
|
15
|
+
if (!this.adjacencyList.has(node.id)) {
|
|
16
|
+
this.adjacencyList.set(node.id, new Map());
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Add an edge to the graph
|
|
21
|
+
*/
|
|
22
|
+
addEdge(edge) {
|
|
23
|
+
// Ensure nodes exist
|
|
24
|
+
if (!this.adjacencyList.has(edge.source)) {
|
|
25
|
+
this.adjacencyList.set(edge.source, new Map());
|
|
26
|
+
}
|
|
27
|
+
if (!this.adjacencyList.has(edge.target)) {
|
|
28
|
+
this.adjacencyList.set(edge.target, new Map());
|
|
29
|
+
}
|
|
30
|
+
// Add edge to adjacency list
|
|
31
|
+
const sourceEdges = this.adjacencyList.get(edge.source);
|
|
32
|
+
if (!sourceEdges.has(edge.target)) {
|
|
33
|
+
sourceEdges.set(edge.target, []);
|
|
34
|
+
}
|
|
35
|
+
sourceEdges.get(edge.target).push(edge);
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Find shortest path using Dijkstra's algorithm
|
|
39
|
+
* O((V + E) log V) with binary heap
|
|
40
|
+
*/
|
|
41
|
+
shortestPath(start, end, options = {}) {
|
|
42
|
+
const { maxDepth = Infinity, relationshipTypes, edgeFilter } = options;
|
|
43
|
+
// Priority queue: [nodeId, distance, path]
|
|
44
|
+
const pq = [[start, 0, [start], []]];
|
|
45
|
+
const visited = new Set();
|
|
46
|
+
const distances = new Map([[start, 0]]);
|
|
47
|
+
while (pq.length > 0) {
|
|
48
|
+
// Sort by distance (simple array, could optimize with heap)
|
|
49
|
+
pq.sort((a, b) => a[1] - b[1]);
|
|
50
|
+
const [current, distance, path, edges] = pq.shift();
|
|
51
|
+
if (visited.has(current))
|
|
52
|
+
continue;
|
|
53
|
+
visited.add(current);
|
|
54
|
+
// Found target
|
|
55
|
+
if (current === end) {
|
|
56
|
+
return {
|
|
57
|
+
nodes: path,
|
|
58
|
+
edges,
|
|
59
|
+
totalWeight: distance,
|
|
60
|
+
length: path.length - 1
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
// Max depth reached
|
|
64
|
+
if (path.length > maxDepth)
|
|
65
|
+
continue;
|
|
66
|
+
// Explore neighbors
|
|
67
|
+
const neighbors = this.adjacencyList.get(current);
|
|
68
|
+
if (!neighbors)
|
|
69
|
+
continue;
|
|
70
|
+
for (const [neighbor, edgeList] of neighbors) {
|
|
71
|
+
if (visited.has(neighbor))
|
|
72
|
+
continue;
|
|
73
|
+
// Find best edge to neighbor
|
|
74
|
+
let bestEdge = null;
|
|
75
|
+
let bestWeight = Infinity;
|
|
76
|
+
for (const edge of edgeList) {
|
|
77
|
+
// Apply filters
|
|
78
|
+
if (relationshipTypes && !relationshipTypes.includes(edge.type))
|
|
79
|
+
continue;
|
|
80
|
+
if (edgeFilter && !edgeFilter(edge))
|
|
81
|
+
continue;
|
|
82
|
+
if (edge.weight < bestWeight) {
|
|
83
|
+
bestWeight = edge.weight;
|
|
84
|
+
bestEdge = edge;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
if (!bestEdge)
|
|
88
|
+
continue;
|
|
89
|
+
const newDistance = distance + bestWeight;
|
|
90
|
+
const currentBest = distances.get(neighbor) ?? Infinity;
|
|
91
|
+
if (newDistance < currentBest) {
|
|
92
|
+
distances.set(neighbor, newDistance);
|
|
93
|
+
pq.push([
|
|
94
|
+
neighbor,
|
|
95
|
+
newDistance,
|
|
96
|
+
[...path, neighbor],
|
|
97
|
+
[...edges, bestEdge]
|
|
98
|
+
]);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
return null; // No path found
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Find all paths between two nodes
|
|
106
|
+
* Uses DFS with cycle detection
|
|
107
|
+
*/
|
|
108
|
+
allPaths(start, end, options = {}) {
|
|
109
|
+
const { maxDepth = 10, maxPaths = 100, relationshipTypes, edgeFilter } = options;
|
|
110
|
+
const paths = [];
|
|
111
|
+
const visited = new Set();
|
|
112
|
+
const dfs = (current, path, edges, weight) => {
|
|
113
|
+
if (paths.length >= maxPaths)
|
|
114
|
+
return;
|
|
115
|
+
if (path.length > maxDepth)
|
|
116
|
+
return;
|
|
117
|
+
if (current === end && path.length > 1) {
|
|
118
|
+
paths.push({
|
|
119
|
+
nodes: [...path],
|
|
120
|
+
edges: [...edges],
|
|
121
|
+
totalWeight: weight,
|
|
122
|
+
length: path.length - 1
|
|
123
|
+
});
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
visited.add(current);
|
|
127
|
+
const neighbors = this.adjacencyList.get(current);
|
|
128
|
+
if (neighbors) {
|
|
129
|
+
for (const [neighbor, edgeList] of neighbors) {
|
|
130
|
+
if (visited.has(neighbor))
|
|
131
|
+
continue;
|
|
132
|
+
for (const edge of edgeList) {
|
|
133
|
+
// Apply filters
|
|
134
|
+
if (relationshipTypes && !relationshipTypes.includes(edge.type))
|
|
135
|
+
continue;
|
|
136
|
+
if (edgeFilter && !edgeFilter(edge))
|
|
137
|
+
continue;
|
|
138
|
+
dfs(neighbor, [...path, neighbor], [...edges, edge], weight + edge.weight);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
visited.delete(current);
|
|
143
|
+
};
|
|
144
|
+
dfs(start, [start], [], 0);
|
|
145
|
+
// Sort paths by weight
|
|
146
|
+
paths.sort((a, b) => a.totalWeight - b.totalWeight);
|
|
147
|
+
return paths;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Bidirectional search for faster pathfinding
|
|
151
|
+
* Searches from both start and end simultaneously
|
|
152
|
+
*/
|
|
153
|
+
bidirectionalSearch(start, end, options = {}) {
|
|
154
|
+
const { maxDepth = 10 } = options;
|
|
155
|
+
// Two search frontiers
|
|
156
|
+
const forwardVisited = new Map();
|
|
157
|
+
const backwardVisited = new Map();
|
|
158
|
+
forwardVisited.set(start, { path: [start], edges: [], weight: 0 });
|
|
159
|
+
backwardVisited.set(end, { path: [end], edges: [], weight: 0 });
|
|
160
|
+
const forwardQueue = [start];
|
|
161
|
+
const backwardQueue = [end];
|
|
162
|
+
let depth = 0;
|
|
163
|
+
while ((forwardQueue.length > 0 || backwardQueue.length > 0) &&
|
|
164
|
+
depth < maxDepth) {
|
|
165
|
+
// Expand forward frontier
|
|
166
|
+
const forwardNext = [];
|
|
167
|
+
for (const current of forwardQueue) {
|
|
168
|
+
const currentData = forwardVisited.get(current);
|
|
169
|
+
const neighbors = this.adjacencyList.get(current);
|
|
170
|
+
if (neighbors) {
|
|
171
|
+
for (const [neighbor, edges] of neighbors) {
|
|
172
|
+
if (forwardVisited.has(neighbor))
|
|
173
|
+
continue;
|
|
174
|
+
const bestEdge = edges[0]; // TODO: Select best edge
|
|
175
|
+
forwardVisited.set(neighbor, {
|
|
176
|
+
path: [...currentData.path, neighbor],
|
|
177
|
+
edges: [...currentData.edges, bestEdge],
|
|
178
|
+
weight: currentData.weight + bestEdge.weight
|
|
179
|
+
});
|
|
180
|
+
// Check if we met the backward search
|
|
181
|
+
if (backwardVisited.has(neighbor)) {
|
|
182
|
+
const forward = forwardVisited.get(neighbor);
|
|
183
|
+
const backward = backwardVisited.get(neighbor);
|
|
184
|
+
// Combine paths
|
|
185
|
+
const fullPath = [
|
|
186
|
+
...forward.path,
|
|
187
|
+
...backward.path.slice(1).reverse()
|
|
188
|
+
];
|
|
189
|
+
// Reverse backward edges and combine
|
|
190
|
+
const backwardEdgesReversed = backward.edges
|
|
191
|
+
.map(e => ({
|
|
192
|
+
...e,
|
|
193
|
+
source: e.target,
|
|
194
|
+
target: e.source
|
|
195
|
+
}))
|
|
196
|
+
.reverse();
|
|
197
|
+
return {
|
|
198
|
+
nodes: fullPath,
|
|
199
|
+
edges: [...forward.edges, ...backwardEdgesReversed],
|
|
200
|
+
totalWeight: forward.weight + backward.weight,
|
|
201
|
+
length: fullPath.length - 1
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
forwardNext.push(neighbor);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
// Expand backward frontier
|
|
209
|
+
const backwardNext = [];
|
|
210
|
+
for (const current of backwardQueue) {
|
|
211
|
+
const currentData = backwardVisited.get(current);
|
|
212
|
+
// For backward search, we need to look at incoming edges
|
|
213
|
+
for (const [nodeId, neighbors] of this.adjacencyList) {
|
|
214
|
+
const edges = neighbors.get(current);
|
|
215
|
+
if (!edges)
|
|
216
|
+
continue;
|
|
217
|
+
if (backwardVisited.has(nodeId))
|
|
218
|
+
continue;
|
|
219
|
+
const bestEdge = edges[0]; // TODO: Select best edge
|
|
220
|
+
backwardVisited.set(nodeId, {
|
|
221
|
+
path: [...currentData.path, nodeId],
|
|
222
|
+
edges: [...currentData.edges, bestEdge],
|
|
223
|
+
weight: currentData.weight + bestEdge.weight
|
|
224
|
+
});
|
|
225
|
+
// Check if we met the forward search
|
|
226
|
+
if (forwardVisited.has(nodeId)) {
|
|
227
|
+
const forward = forwardVisited.get(nodeId);
|
|
228
|
+
const backward = backwardVisited.get(nodeId);
|
|
229
|
+
// Combine paths
|
|
230
|
+
const fullPath = [
|
|
231
|
+
...forward.path,
|
|
232
|
+
...backward.path.slice(1).reverse()
|
|
233
|
+
];
|
|
234
|
+
// Reverse backward edges and combine
|
|
235
|
+
const backwardEdgesReversed = backward.edges
|
|
236
|
+
.map(e => ({
|
|
237
|
+
...e,
|
|
238
|
+
source: e.target,
|
|
239
|
+
target: e.source
|
|
240
|
+
}))
|
|
241
|
+
.reverse();
|
|
242
|
+
return {
|
|
243
|
+
nodes: fullPath,
|
|
244
|
+
edges: [...forward.edges, ...backwardEdgesReversed],
|
|
245
|
+
totalWeight: forward.weight + backward.weight,
|
|
246
|
+
length: fullPath.length - 1
|
|
247
|
+
};
|
|
248
|
+
}
|
|
249
|
+
backwardNext.push(nodeId);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
forwardQueue.splice(0, forwardQueue.length, ...forwardNext);
|
|
253
|
+
backwardQueue.splice(0, backwardQueue.length, ...backwardNext);
|
|
254
|
+
depth++;
|
|
255
|
+
}
|
|
256
|
+
return null;
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Multi-hop traversal (e.g., friends of friends)
|
|
260
|
+
* Returns all nodes within N hops
|
|
261
|
+
*/
|
|
262
|
+
multiHopTraversal(start, hops, options = {}) {
|
|
263
|
+
const { relationshipTypes, nodeFilter, edgeFilter } = options;
|
|
264
|
+
const results = new Map();
|
|
265
|
+
const visited = new Set();
|
|
266
|
+
const queue = [
|
|
267
|
+
{ node: start, distance: 0, path: [start], edges: [] }
|
|
268
|
+
];
|
|
269
|
+
while (queue.length > 0) {
|
|
270
|
+
const { node, distance, path, edges } = queue.shift();
|
|
271
|
+
if (distance > hops)
|
|
272
|
+
continue;
|
|
273
|
+
// Record this node
|
|
274
|
+
if (!results.has(node)) {
|
|
275
|
+
results.set(node, { distance, paths: [] });
|
|
276
|
+
}
|
|
277
|
+
results.get(node).paths.push({
|
|
278
|
+
nodes: path,
|
|
279
|
+
edges,
|
|
280
|
+
totalWeight: edges.reduce((sum, e) => sum + e.weight, 0),
|
|
281
|
+
length: path.length - 1
|
|
282
|
+
});
|
|
283
|
+
if (distance === hops)
|
|
284
|
+
continue;
|
|
285
|
+
// Explore neighbors
|
|
286
|
+
const neighbors = this.adjacencyList.get(node);
|
|
287
|
+
if (neighbors) {
|
|
288
|
+
for (const [neighbor, edgeList] of neighbors) {
|
|
289
|
+
// Apply node filter
|
|
290
|
+
if (nodeFilter) {
|
|
291
|
+
const neighborNode = this.nodes.get(neighbor);
|
|
292
|
+
if (neighborNode && !nodeFilter(neighborNode))
|
|
293
|
+
continue;
|
|
294
|
+
}
|
|
295
|
+
for (const edge of edgeList) {
|
|
296
|
+
// Apply filters
|
|
297
|
+
if (relationshipTypes && !relationshipTypes.includes(edge.type))
|
|
298
|
+
continue;
|
|
299
|
+
if (edgeFilter && !edgeFilter(edge))
|
|
300
|
+
continue;
|
|
301
|
+
queue.push({
|
|
302
|
+
node: neighbor,
|
|
303
|
+
distance: distance + 1,
|
|
304
|
+
path: [...path, neighbor],
|
|
305
|
+
edges: [...edges, edge]
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
return results;
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
314
|
+
* Find connected components using DFS
|
|
315
|
+
*/
|
|
316
|
+
connectedComponents() {
|
|
317
|
+
const visited = new Set();
|
|
318
|
+
const components = [];
|
|
319
|
+
const dfs = (node, component) => {
|
|
320
|
+
visited.add(node);
|
|
321
|
+
component.add(node);
|
|
322
|
+
const neighbors = this.adjacencyList.get(node);
|
|
323
|
+
if (neighbors) {
|
|
324
|
+
for (const neighbor of neighbors.keys()) {
|
|
325
|
+
if (!visited.has(neighbor)) {
|
|
326
|
+
dfs(neighbor, component);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
};
|
|
331
|
+
for (const node of this.adjacencyList.keys()) {
|
|
332
|
+
if (!visited.has(node)) {
|
|
333
|
+
const component = new Set();
|
|
334
|
+
dfs(node, component);
|
|
335
|
+
components.push(component);
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
return components;
|
|
339
|
+
}
|
|
340
|
+
/**
|
|
341
|
+
* Calculate PageRank for all nodes
|
|
342
|
+
* Useful for ranking importance in the graph
|
|
343
|
+
*/
|
|
344
|
+
pageRank(iterations = 100, damping = 0.85) {
|
|
345
|
+
const nodes = Array.from(this.adjacencyList.keys());
|
|
346
|
+
const n = nodes.length;
|
|
347
|
+
if (n === 0)
|
|
348
|
+
return new Map();
|
|
349
|
+
// Initialize ranks
|
|
350
|
+
const ranks = new Map();
|
|
351
|
+
for (const node of nodes) {
|
|
352
|
+
ranks.set(node, 1 / n);
|
|
353
|
+
}
|
|
354
|
+
// Calculate outgoing edge counts
|
|
355
|
+
const outDegree = new Map();
|
|
356
|
+
for (const [node, neighbors] of this.adjacencyList) {
|
|
357
|
+
let count = 0;
|
|
358
|
+
for (const edges of neighbors.values()) {
|
|
359
|
+
count += edges.length;
|
|
360
|
+
}
|
|
361
|
+
outDegree.set(node, count);
|
|
362
|
+
}
|
|
363
|
+
// Iterate PageRank algorithm
|
|
364
|
+
for (let i = 0; i < iterations; i++) {
|
|
365
|
+
const newRanks = new Map();
|
|
366
|
+
for (const node of nodes) {
|
|
367
|
+
let rank = (1 - damping) / n;
|
|
368
|
+
// Sum contributions from incoming edges
|
|
369
|
+
for (const [source, neighbors] of this.adjacencyList) {
|
|
370
|
+
if (neighbors.has(node)) {
|
|
371
|
+
const sourceRank = ranks.get(source) ?? 0;
|
|
372
|
+
const sourceOutDegree = outDegree.get(source) ?? 1;
|
|
373
|
+
rank += damping * (sourceRank / sourceOutDegree);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
newRanks.set(node, rank);
|
|
377
|
+
}
|
|
378
|
+
// Update ranks
|
|
379
|
+
for (const [node, rank] of newRanks) {
|
|
380
|
+
ranks.set(node, rank);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
return ranks;
|
|
384
|
+
}
|
|
385
|
+
/**
|
|
386
|
+
* Clear the graph
|
|
387
|
+
*/
|
|
388
|
+
clear() {
|
|
389
|
+
this.adjacencyList.clear();
|
|
390
|
+
this.nodes.clear();
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
//# sourceMappingURL=pathfinding.js.map
|
package/dist/hnsw/hnswIndex.d.ts
CHANGED
|
@@ -91,6 +91,19 @@ export declare class HNSWIndex {
|
|
|
91
91
|
* Get the configuration
|
|
92
92
|
*/
|
|
93
93
|
getConfig(): HNSWConfig;
|
|
94
|
+
/**
|
|
95
|
+
* Get all nodes at a specific level for clustering
|
|
96
|
+
* This enables O(n) clustering using HNSW's natural hierarchy
|
|
97
|
+
*/
|
|
98
|
+
getNodesAtLevel(level: number): HNSWNoun[];
|
|
99
|
+
/**
|
|
100
|
+
* Get level statistics for understanding the hierarchy
|
|
101
|
+
*/
|
|
102
|
+
getLevelStats(): Array<{
|
|
103
|
+
level: number;
|
|
104
|
+
nodeCount: number;
|
|
105
|
+
avgConnections: number;
|
|
106
|
+
}>;
|
|
94
107
|
/**
|
|
95
108
|
* Get index health metrics
|
|
96
109
|
*/
|
package/dist/hnsw/hnswIndex.js
CHANGED
|
@@ -427,6 +427,41 @@ export class HNSWIndex {
|
|
|
427
427
|
getConfig() {
|
|
428
428
|
return { ...this.config };
|
|
429
429
|
}
|
|
430
|
+
/**
|
|
431
|
+
* Get all nodes at a specific level for clustering
|
|
432
|
+
* This enables O(n) clustering using HNSW's natural hierarchy
|
|
433
|
+
*/
|
|
434
|
+
getNodesAtLevel(level) {
|
|
435
|
+
const nodesAtLevel = [];
|
|
436
|
+
for (const noun of this.nouns.values()) {
|
|
437
|
+
// A noun exists at level L if it has connections at that level or higher
|
|
438
|
+
if (noun.level >= level) {
|
|
439
|
+
nodesAtLevel.push(noun);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
return nodesAtLevel;
|
|
443
|
+
}
|
|
444
|
+
/**
|
|
445
|
+
* Get level statistics for understanding the hierarchy
|
|
446
|
+
*/
|
|
447
|
+
getLevelStats() {
|
|
448
|
+
const levelStats = new Map();
|
|
449
|
+
for (const noun of this.nouns.values()) {
|
|
450
|
+
for (let level = 0; level <= noun.level; level++) {
|
|
451
|
+
if (!levelStats.has(level)) {
|
|
452
|
+
levelStats.set(level, { count: 0, totalConnections: 0 });
|
|
453
|
+
}
|
|
454
|
+
const stats = levelStats.get(level);
|
|
455
|
+
stats.count++;
|
|
456
|
+
stats.totalConnections += noun.connections.get(level)?.size || 0;
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
return Array.from(levelStats.entries()).map(([level, stats]) => ({
|
|
460
|
+
level,
|
|
461
|
+
nodeCount: stats.count,
|
|
462
|
+
avgConnections: stats.count > 0 ? stats.totalConnections / stats.count : 0
|
|
463
|
+
})).sort((a, b) => a.level - b.level);
|
|
464
|
+
}
|
|
430
465
|
/**
|
|
431
466
|
* Get index health metrics
|
|
432
467
|
*/
|
|
@@ -95,6 +95,7 @@ export declare class HNSWIndexOptimized extends HNSWIndex {
|
|
|
95
95
|
private memoryUsage;
|
|
96
96
|
private vectorCount;
|
|
97
97
|
private memoryUpdateLock;
|
|
98
|
+
private unifiedCache;
|
|
98
99
|
constructor(config: Partial<HNSWOptimizedConfig> | undefined, distanceFunction: DistanceFunction, storage?: StorageAdapter | null);
|
|
99
100
|
/**
|
|
100
101
|
* Thread-safe method to update memory usage
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
* Uses product quantization for dimensionality reduction and disk-based storage when needed
|
|
5
5
|
*/
|
|
6
6
|
import { HNSWIndex } from './hnswIndex.js';
|
|
7
|
+
import { getGlobalCache } from '../utils/unifiedCache.js';
|
|
7
8
|
// Default configuration for the optimized HNSW index
|
|
8
9
|
const DEFAULT_OPTIMIZED_CONFIG = {
|
|
9
10
|
M: 16,
|
|
@@ -232,6 +233,8 @@ export class HNSWIndexOptimized extends HNSWIndex {
|
|
|
232
233
|
}
|
|
233
234
|
// Set disk-based index flag
|
|
234
235
|
this.useDiskBasedIndex = this.optimizedConfig.useDiskBasedIndex || false;
|
|
236
|
+
// Get global unified cache for coordinated memory management
|
|
237
|
+
this.unifiedCache = getGlobalCache();
|
|
235
238
|
}
|
|
236
239
|
/**
|
|
237
240
|
* Thread-safe method to update memory usage
|
package/dist/index.d.ts
CHANGED
|
@@ -30,30 +30,28 @@ import { OPFSStorage, MemoryStorage, R2Storage, S3CompatibleStorage, createStora
|
|
|
30
30
|
export { OPFSStorage, MemoryStorage, R2Storage, S3CompatibleStorage, createStorage };
|
|
31
31
|
export { FileSystemStorage } from './storage/adapters/fileSystemStorage.js';
|
|
32
32
|
import { Pipeline, pipeline, augmentationPipeline, ExecutionMode, PipelineOptions, PipelineResult, createPipeline, createStreamingPipeline, StreamlinedExecutionMode, StreamlinedPipelineOptions, StreamlinedPipelineResult } from './pipeline.js';
|
|
33
|
-
|
|
34
|
-
export {
|
|
35
|
-
export type { PipelineOptions, PipelineResult, StreamlinedPipelineOptions, StreamlinedPipelineResult, AugmentationOptions };
|
|
33
|
+
export { Pipeline, pipeline, augmentationPipeline, ExecutionMode, createPipeline, createStreamingPipeline, StreamlinedExecutionMode, };
|
|
34
|
+
export type { PipelineOptions, PipelineResult, StreamlinedPipelineOptions, StreamlinedPipelineResult };
|
|
36
35
|
import { availableAugmentations, registerAugmentation, initializeAugmentationPipeline, setAugmentationEnabled, getAugmentationsByType } from './augmentationRegistry.js';
|
|
37
36
|
export { availableAugmentations, registerAugmentation, initializeAugmentationPipeline, setAugmentationEnabled, getAugmentationsByType };
|
|
38
37
|
import { loadAugmentationsFromModules, createAugmentationRegistryPlugin, createAugmentationRegistryRollupPlugin } from './augmentationRegistryLoader.js';
|
|
39
38
|
import type { AugmentationRegistryLoaderOptions, AugmentationLoadResult } from './augmentationRegistryLoader.js';
|
|
40
39
|
export { loadAugmentationsFromModules, createAugmentationRegistryPlugin, createAugmentationRegistryRollupPlugin };
|
|
41
40
|
export type { AugmentationRegistryLoaderOptions, AugmentationLoadResult };
|
|
42
|
-
import {
|
|
43
|
-
import {
|
|
41
|
+
import { StorageAugmentation, DynamicStorageAugmentation, createStorageAugmentationFromConfig } from './augmentations/storageAugmentation.js';
|
|
42
|
+
import { MemoryStorageAugmentation, FileSystemStorageAugmentation, OPFSStorageAugmentation, S3StorageAugmentation, R2StorageAugmentation, GCSStorageAugmentation, createAutoStorageAugmentation } from './augmentations/storageAugmentations.js';
|
|
43
|
+
import { WebSocketConduitAugmentation } from './augmentations/conduitAugmentations.js';
|
|
44
44
|
import { ServerSearchConduitAugmentation, ServerSearchActivationAugmentation, createServerSearchAugmentations } from './augmentations/serverSearchAugmentations.js';
|
|
45
|
-
export {
|
|
45
|
+
export { StorageAugmentation, DynamicStorageAugmentation, MemoryStorageAugmentation, FileSystemStorageAugmentation, OPFSStorageAugmentation, S3StorageAugmentation, R2StorageAugmentation, GCSStorageAugmentation, createAutoStorageAugmentation, createStorageAugmentationFromConfig };
|
|
46
|
+
export { WebSocketConduitAugmentation, ServerSearchConduitAugmentation, ServerSearchActivationAugmentation, createServerSearchAugmentations };
|
|
46
47
|
import type { Vector, VectorDocument, SearchResult, DistanceFunction, EmbeddingFunction, EmbeddingModel, HNSWNoun, HNSWVerb, HNSWConfig, StorageAdapter } from './coreTypes.js';
|
|
47
48
|
import { HNSWIndex } from './hnsw/hnswIndex.js';
|
|
48
49
|
import { HNSWIndexOptimized, HNSWOptimizedConfig } from './hnsw/hnswIndexOptimized.js';
|
|
49
50
|
export { HNSWIndex, HNSWIndexOptimized };
|
|
50
51
|
export type { Vector, VectorDocument, SearchResult, DistanceFunction, EmbeddingFunction, EmbeddingModel, HNSWNoun, HNSWVerb, HNSWConfig, HNSWOptimizedConfig, StorageAdapter };
|
|
51
|
-
import type {
|
|
52
|
-
import { AugmentationType, BrainyAugmentations } from './types/augmentations.js';
|
|
52
|
+
import type { AugmentationResponse, BrainyAugmentation, BaseAugmentation, AugmentationContext } from './types/augmentations.js';
|
|
53
53
|
export { AugmentationManager, type AugmentationInfo } from './augmentationManager.js';
|
|
54
|
-
export type {
|
|
55
|
-
export { AugmentationType, BrainyAugmentations, ISenseAugmentation, IConduitAugmentation, ICognitionAugmentation, IMemoryAugmentation, IPerceptionAugmentation, IDialogAugmentation, IActivationAugmentation };
|
|
56
|
-
export type { IWebSocketCognitionAugmentation, IWebSocketSenseAugmentation, IWebSocketPerceptionAugmentation, IWebSocketActivationAugmentation, IWebSocketDialogAugmentation, IWebSocketConduitAugmentation, IWebSocketMemoryAugmentation } from './types/augmentations.js';
|
|
54
|
+
export type { AugmentationResponse, BrainyAugmentation, BaseAugmentation, AugmentationContext };
|
|
57
55
|
import type { GraphNoun, GraphVerb, EmbeddedGraphVerb, Person, Location, Thing, Event, Concept, Content, Collection, Organization, Document, Media, File, Message, Dataset, Product, Service, User, Task, Project, Process, State, Role, Topic, Language, Currency, Measurement } from './types/graphTypes.js';
|
|
58
56
|
import { NounType, VerbType } from './types/graphTypes.js';
|
|
59
57
|
export type { GraphNoun, GraphVerb, EmbeddedGraphVerb, Person, Location, Thing, Event, Concept, Content, Collection, Organization, Document, Media, File, Message, Dataset, Product, Service, User, Task, Project, Process, State, Role, Topic, Language, Currency, Measurement };
|