@soulcraft/brainy 3.20.1 → 3.20.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/bin/brainy-interactive.js +2 -2
- package/dist/brainy.d.ts +1 -1
- package/dist/mcp/brainyMCPAdapter.d.ts +1 -1
- package/dist/mcp/brainyMCPService.d.ts +1 -1
- package/dist/neural/embeddedPatterns.d.ts +1 -1
- package/dist/neural/embeddedPatterns.js +1 -1
- package/dist/shared/default-augmentations.d.ts +1 -1
- package/dist/types/{brainyDataInterface.js → brainyInterface.js} +1 -1
- package/dist/vfs/VirtualFileSystem.d.ts +1 -0
- package/dist/vfs/VirtualFileSystem.js +108 -69
- package/package.json +1 -1
- package/dist/augmentationFactory.d.ts +0 -86
- package/dist/augmentationFactory.js +0 -342
- package/dist/augmentationRegistry.d.ts +0 -38
- package/dist/augmentationRegistry.js +0 -54
- package/dist/augmentationRegistryLoader.d.ts +0 -146
- package/dist/augmentationRegistryLoader.js +0 -213
- package/dist/augmentations/KnowledgeAugmentation.d.ts +0 -40
- package/dist/augmentations/KnowledgeAugmentation.js +0 -251
- package/dist/augmentations/intelligentVerbScoring.d.ts +0 -158
- package/dist/augmentations/intelligentVerbScoring.js +0 -377
- package/dist/augmentations/marketplace/AugmentationMarketplace.d.ts +0 -168
- package/dist/augmentations/marketplace/AugmentationMarketplace.js +0 -329
- package/dist/augmentations/marketplace/cli.d.ts +0 -47
- package/dist/augmentations/marketplace/cli.js +0 -265
- package/dist/augmentations/memoryAugmentations.d.ts +0 -72
- package/dist/augmentations/memoryAugmentations.js +0 -280
- package/dist/augmentations/serverSearchAugmentations.d.ts +0 -190
- package/dist/augmentations/serverSearchAugmentations.js +0 -586
- package/dist/brainy-unified.d.ts +0 -106
- package/dist/brainy-unified.js +0 -327
- package/dist/brainyData.d.ts +0 -1832
- package/dist/brainyData.js +0 -6443
- package/dist/brainyDataV3.d.ts +0 -186
- package/dist/brainyDataV3.js +0 -337
- package/dist/config/distributedPresets-new.d.ts +0 -118
- package/dist/config/distributedPresets-new.js +0 -318
- package/dist/config/modelPrecisionManager.d.ts +0 -42
- package/dist/config/modelPrecisionManager.js +0 -98
- package/dist/connectors/interfaces/IConnector.d.ts +0 -143
- package/dist/connectors/interfaces/IConnector.js +0 -8
- package/dist/demo.d.ts +0 -106
- package/dist/demo.js +0 -201
- package/dist/embeddings/SingletonModelManager.d.ts +0 -95
- package/dist/embeddings/SingletonModelManager.js +0 -220
- package/dist/embeddings/lightweight-embedder.d.ts +0 -22
- package/dist/embeddings/lightweight-embedder.js +0 -128
- package/dist/embeddings/model-manager.d.ts +0 -39
- package/dist/embeddings/model-manager.js +0 -245
- package/dist/embeddings/universal-memory-manager.d.ts +0 -38
- package/dist/embeddings/universal-memory-manager.js +0 -166
- package/dist/embeddings/worker-embedding.d.ts +0 -7
- package/dist/embeddings/worker-embedding.js +0 -73
- package/dist/embeddings/worker-manager.d.ts +0 -28
- package/dist/embeddings/worker-manager.js +0 -162
- package/dist/examples/basicUsage.d.ts +0 -4
- package/dist/examples/basicUsage.js +0 -121
- package/dist/indices/fieldIndex.d.ts +0 -76
- package/dist/indices/fieldIndex.js +0 -357
- package/dist/mcp/brainyMCPBroadcast.d.ts +0 -82
- package/dist/mcp/brainyMCPBroadcast.js +0 -303
- package/dist/mcp/brainyMCPClient.d.ts +0 -92
- package/dist/mcp/brainyMCPClient.js +0 -258
- package/dist/scripts/precomputePatternEmbeddings.d.ts +0 -19
- package/dist/scripts/precomputePatternEmbeddings.js +0 -100
- package/dist/utils/cacheAutoConfig.d.ts +0 -63
- package/dist/utils/cacheAutoConfig.js +0 -261
- package/dist/utils/hybridModelManager.d.ts +0 -64
- package/dist/utils/hybridModelManager.js +0 -95
- package/dist/utils/statistics.d.ts +0 -28
- package/dist/utils/statistics.js +0 -25
- package/dist/vfs/ConceptSystem.d.ts +0 -203
- package/dist/vfs/ConceptSystem.js +0 -545
- package/dist/vfs/EntityManager.d.ts +0 -75
- package/dist/vfs/EntityManager.js +0 -216
- package/dist/vfs/EventRecorder.d.ts +0 -84
- package/dist/vfs/EventRecorder.js +0 -269
- package/dist/vfs/GitBridge.d.ts +0 -167
- package/dist/vfs/GitBridge.js +0 -537
- package/dist/vfs/KnowledgeAugmentation.d.ts +0 -104
- package/dist/vfs/KnowledgeAugmentation.js +0 -146
- package/dist/vfs/KnowledgeLayer.d.ts +0 -35
- package/dist/vfs/KnowledgeLayer.js +0 -443
- package/dist/vfs/PersistentEntitySystem.d.ts +0 -165
- package/dist/vfs/PersistentEntitySystem.js +0 -503
- package/dist/vfs/SemanticVersioning.d.ts +0 -105
- package/dist/vfs/SemanticVersioning.js +0 -309
- package/dist/vfs/VFSHealthCheck.d.ts +0 -78
- package/dist/vfs/VFSHealthCheck.js +0 -299
- /package/dist/types/{brainyDataInterface.d.ts → brainyInterface.d.ts} +0 -0
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Worker process for embeddings - Workaround for transformers.js memory leak
|
|
3
|
-
*
|
|
4
|
-
* This worker can be killed and restarted to release memory completely.
|
|
5
|
-
* Based on 2024 research: dispose() doesn't fully free memory in transformers.js
|
|
6
|
-
*/
|
|
7
|
-
import { TransformerEmbedding } from '../utils/embedding.js';
|
|
8
|
-
import { parentPort } from 'worker_threads';
|
|
9
|
-
import { getModelPrecision } from '../config/modelPrecisionManager.js';
|
|
10
|
-
let model = null;
|
|
11
|
-
let requestCount = 0;
|
|
12
|
-
const MAX_REQUESTS = 100; // Restart worker after 100 requests to prevent memory leak
|
|
13
|
-
async function initModel() {
|
|
14
|
-
if (!model) {
|
|
15
|
-
model = new TransformerEmbedding({
|
|
16
|
-
verbose: false,
|
|
17
|
-
precision: getModelPrecision(), // Use centrally managed precision
|
|
18
|
-
localFilesOnly: process.env.BRAINY_ALLOW_REMOTE_MODELS !== 'true'
|
|
19
|
-
});
|
|
20
|
-
await model.init();
|
|
21
|
-
console.log('🔧 Worker: Model initialized');
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
if (parentPort) {
|
|
25
|
-
parentPort.on('message', async (message) => {
|
|
26
|
-
try {
|
|
27
|
-
const { id, type, data } = message;
|
|
28
|
-
switch (type) {
|
|
29
|
-
case 'embed':
|
|
30
|
-
await initModel();
|
|
31
|
-
const embeddings = await model.embed(data);
|
|
32
|
-
parentPort.postMessage({ id, success: true, result: embeddings });
|
|
33
|
-
requestCount++;
|
|
34
|
-
// Proactively restart worker to prevent memory leak
|
|
35
|
-
if (requestCount >= MAX_REQUESTS) {
|
|
36
|
-
console.log(`🔄 Worker: Restarting after ${requestCount} requests (memory leak prevention)`);
|
|
37
|
-
process.exit(0); // Parent will restart us
|
|
38
|
-
}
|
|
39
|
-
break;
|
|
40
|
-
case 'dispose':
|
|
41
|
-
// SingletonModelManager persists - just acknowledge
|
|
42
|
-
console.log('ℹ️ Worker: Singleton model persists');
|
|
43
|
-
parentPort.postMessage({ id, success: true });
|
|
44
|
-
break;
|
|
45
|
-
case 'restart':
|
|
46
|
-
// Force restart to clear memory
|
|
47
|
-
console.log('🔄 Worker: Force restart requested');
|
|
48
|
-
process.exit(0);
|
|
49
|
-
break;
|
|
50
|
-
default:
|
|
51
|
-
parentPort.postMessage({
|
|
52
|
-
id,
|
|
53
|
-
success: false,
|
|
54
|
-
error: `Unknown message type: ${type}`
|
|
55
|
-
});
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
catch (error) {
|
|
59
|
-
parentPort.postMessage({
|
|
60
|
-
id: message.id,
|
|
61
|
-
success: false,
|
|
62
|
-
error: error instanceof Error ? error.message : String(error)
|
|
63
|
-
});
|
|
64
|
-
}
|
|
65
|
-
});
|
|
66
|
-
console.log('🚀 Embedding worker started');
|
|
67
|
-
parentPort.postMessage({ type: 'ready' });
|
|
68
|
-
}
|
|
69
|
-
else {
|
|
70
|
-
console.error('❌ Worker: parentPort is null, cannot communicate with main thread');
|
|
71
|
-
process.exit(1);
|
|
72
|
-
}
|
|
73
|
-
//# sourceMappingURL=worker-embedding.js.map
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Worker Manager for Memory-Safe Embeddings
|
|
3
|
-
*
|
|
4
|
-
* Manages worker lifecycle to prevent transformers.js memory leaks
|
|
5
|
-
* Workers are automatically restarted when memory usage grows too high
|
|
6
|
-
*/
|
|
7
|
-
import { Vector, EmbeddingFunction } from '../coreTypes.js';
|
|
8
|
-
export declare class WorkerEmbeddingManager {
|
|
9
|
-
private worker;
|
|
10
|
-
private requestId;
|
|
11
|
-
private pendingRequests;
|
|
12
|
-
private isRestarting;
|
|
13
|
-
private totalRequests;
|
|
14
|
-
getEmbeddingFunction(): Promise<EmbeddingFunction>;
|
|
15
|
-
embed(data: string | string[]): Promise<Vector>;
|
|
16
|
-
private ensureWorker;
|
|
17
|
-
private createWorker;
|
|
18
|
-
dispose(): Promise<void>;
|
|
19
|
-
forceRestart(): Promise<void>;
|
|
20
|
-
getStats(): {
|
|
21
|
-
totalRequests: number;
|
|
22
|
-
pendingRequests: number;
|
|
23
|
-
workerActive: boolean;
|
|
24
|
-
isRestarting: boolean;
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
export declare const workerEmbeddingManager: WorkerEmbeddingManager;
|
|
28
|
-
export declare function getWorkerEmbeddingFunction(): Promise<EmbeddingFunction>;
|
|
@@ -1,162 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Worker Manager for Memory-Safe Embeddings
|
|
3
|
-
*
|
|
4
|
-
* Manages worker lifecycle to prevent transformers.js memory leaks
|
|
5
|
-
* Workers are automatically restarted when memory usage grows too high
|
|
6
|
-
*/
|
|
7
|
-
import { Worker } from 'worker_threads';
|
|
8
|
-
import { join, dirname } from 'path';
|
|
9
|
-
import { fileURLToPath } from 'url';
|
|
10
|
-
// Get current directory for worker path
|
|
11
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
12
|
-
const __dirname = dirname(__filename);
|
|
13
|
-
export class WorkerEmbeddingManager {
|
|
14
|
-
constructor() {
|
|
15
|
-
this.worker = null;
|
|
16
|
-
this.requestId = 0;
|
|
17
|
-
this.pendingRequests = new Map();
|
|
18
|
-
this.isRestarting = false;
|
|
19
|
-
this.totalRequests = 0;
|
|
20
|
-
}
|
|
21
|
-
async getEmbeddingFunction() {
|
|
22
|
-
return async (data) => {
|
|
23
|
-
return this.embed(data);
|
|
24
|
-
};
|
|
25
|
-
}
|
|
26
|
-
async embed(data) {
|
|
27
|
-
await this.ensureWorker();
|
|
28
|
-
const id = ++this.requestId;
|
|
29
|
-
this.totalRequests++;
|
|
30
|
-
return new Promise((resolve, reject) => {
|
|
31
|
-
const timeout = setTimeout(() => {
|
|
32
|
-
this.pendingRequests.delete(id);
|
|
33
|
-
reject(new Error('Embedding request timed out (120s)'));
|
|
34
|
-
}, 120000);
|
|
35
|
-
this.pendingRequests.set(id, { resolve, reject, timeout });
|
|
36
|
-
this.worker.postMessage({
|
|
37
|
-
id,
|
|
38
|
-
type: 'embed',
|
|
39
|
-
data
|
|
40
|
-
});
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
async ensureWorker() {
|
|
44
|
-
if (this.worker && !this.isRestarting) {
|
|
45
|
-
return;
|
|
46
|
-
}
|
|
47
|
-
if (this.isRestarting) {
|
|
48
|
-
// Wait for restart to complete
|
|
49
|
-
return new Promise((resolve) => {
|
|
50
|
-
const checkRestart = () => {
|
|
51
|
-
if (!this.isRestarting) {
|
|
52
|
-
resolve();
|
|
53
|
-
}
|
|
54
|
-
else {
|
|
55
|
-
setTimeout(checkRestart, 100);
|
|
56
|
-
}
|
|
57
|
-
};
|
|
58
|
-
checkRestart();
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
await this.createWorker();
|
|
62
|
-
}
|
|
63
|
-
async createWorker() {
|
|
64
|
-
this.isRestarting = true;
|
|
65
|
-
// Kill existing worker if any
|
|
66
|
-
if (this.worker) {
|
|
67
|
-
this.worker.terminate();
|
|
68
|
-
this.worker = null;
|
|
69
|
-
}
|
|
70
|
-
// Clear pending requests
|
|
71
|
-
for (const [id, request] of this.pendingRequests) {
|
|
72
|
-
if (request.timeout) {
|
|
73
|
-
clearTimeout(request.timeout);
|
|
74
|
-
}
|
|
75
|
-
request.reject(new Error('Worker restarted'));
|
|
76
|
-
}
|
|
77
|
-
this.pendingRequests.clear();
|
|
78
|
-
console.log('🔄 Starting embedding worker...');
|
|
79
|
-
// Create new worker
|
|
80
|
-
const workerPath = join(__dirname, 'worker-embedding.js');
|
|
81
|
-
this.worker = new Worker(workerPath);
|
|
82
|
-
// Handle worker messages
|
|
83
|
-
this.worker.on('message', (message) => {
|
|
84
|
-
if (message.type === 'ready') {
|
|
85
|
-
console.log('✅ Embedding worker ready');
|
|
86
|
-
this.isRestarting = false;
|
|
87
|
-
return;
|
|
88
|
-
}
|
|
89
|
-
const { id, success, result, error } = message;
|
|
90
|
-
const request = this.pendingRequests.get(id);
|
|
91
|
-
if (request) {
|
|
92
|
-
if (request.timeout) {
|
|
93
|
-
clearTimeout(request.timeout);
|
|
94
|
-
}
|
|
95
|
-
this.pendingRequests.delete(id);
|
|
96
|
-
if (success) {
|
|
97
|
-
request.resolve(result);
|
|
98
|
-
}
|
|
99
|
-
else {
|
|
100
|
-
request.reject(new Error(error));
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
});
|
|
104
|
-
// Handle worker exit
|
|
105
|
-
this.worker.on('exit', (code) => {
|
|
106
|
-
console.log(`🔄 Embedding worker exited with code ${code}`);
|
|
107
|
-
if (code !== 0 && !this.isRestarting) {
|
|
108
|
-
console.log('🔄 Worker crashed, will restart on next request');
|
|
109
|
-
}
|
|
110
|
-
this.worker = null;
|
|
111
|
-
});
|
|
112
|
-
// Wait for worker to be ready
|
|
113
|
-
return new Promise((resolve, reject) => {
|
|
114
|
-
const timeout = setTimeout(() => {
|
|
115
|
-
reject(new Error('Worker startup timeout'));
|
|
116
|
-
}, 30000);
|
|
117
|
-
const checkReady = () => {
|
|
118
|
-
if (!this.isRestarting) {
|
|
119
|
-
clearTimeout(timeout);
|
|
120
|
-
resolve();
|
|
121
|
-
}
|
|
122
|
-
else {
|
|
123
|
-
setTimeout(checkReady, 100);
|
|
124
|
-
}
|
|
125
|
-
};
|
|
126
|
-
checkReady();
|
|
127
|
-
});
|
|
128
|
-
}
|
|
129
|
-
async dispose() {
|
|
130
|
-
if (this.worker) {
|
|
131
|
-
this.worker.terminate();
|
|
132
|
-
this.worker = null;
|
|
133
|
-
}
|
|
134
|
-
// Clear pending requests
|
|
135
|
-
for (const [id, request] of this.pendingRequests) {
|
|
136
|
-
if (request.timeout) {
|
|
137
|
-
clearTimeout(request.timeout);
|
|
138
|
-
}
|
|
139
|
-
request.reject(new Error('Manager disposed'));
|
|
140
|
-
}
|
|
141
|
-
this.pendingRequests.clear();
|
|
142
|
-
}
|
|
143
|
-
async forceRestart() {
|
|
144
|
-
console.log('🔄 Force restarting embedding worker (memory cleanup)');
|
|
145
|
-
await this.createWorker();
|
|
146
|
-
}
|
|
147
|
-
getStats() {
|
|
148
|
-
return {
|
|
149
|
-
totalRequests: this.totalRequests,
|
|
150
|
-
pendingRequests: this.pendingRequests.size,
|
|
151
|
-
workerActive: this.worker !== null,
|
|
152
|
-
isRestarting: this.isRestarting
|
|
153
|
-
};
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
// Export singleton instance
|
|
157
|
-
export const workerEmbeddingManager = new WorkerEmbeddingManager();
|
|
158
|
-
// Export convenience function
|
|
159
|
-
export async function getWorkerEmbeddingFunction() {
|
|
160
|
-
return workerEmbeddingManager.getEmbeddingFunction();
|
|
161
|
-
}
|
|
162
|
-
//# sourceMappingURL=worker-manager.js.map
|
|
@@ -1,121 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Basic usage example for the Soulcraft Brainy database
|
|
3
|
-
*/
|
|
4
|
-
import { Brainy } from '../brainy.js';
|
|
5
|
-
// Example data - word embeddings
|
|
6
|
-
const wordEmbeddings = {
|
|
7
|
-
cat: [0.2, 0.3, 0.4, 0.1],
|
|
8
|
-
dog: [0.3, 0.2, 0.4, 0.2],
|
|
9
|
-
fish: [0.1, 0.1, 0.8, 0.2],
|
|
10
|
-
bird: [0.1, 0.4, 0.2, 0.5],
|
|
11
|
-
tiger: [0.3, 0.4, 0.3, 0.1],
|
|
12
|
-
lion: [0.4, 0.3, 0.2, 0.1],
|
|
13
|
-
shark: [0.2, 0.1, 0.7, 0.3],
|
|
14
|
-
eagle: [0.2, 0.5, 0.1, 0.4]
|
|
15
|
-
};
|
|
16
|
-
// Example metadata
|
|
17
|
-
const metadata = {
|
|
18
|
-
cat: { type: 'mammal', domesticated: true },
|
|
19
|
-
dog: { type: 'mammal', domesticated: true },
|
|
20
|
-
fish: { type: 'fish', domesticated: false },
|
|
21
|
-
bird: { type: 'bird', domesticated: false },
|
|
22
|
-
tiger: { type: 'mammal', domesticated: false },
|
|
23
|
-
lion: { type: 'mammal', domesticated: false },
|
|
24
|
-
shark: { type: 'fish', domesticated: false },
|
|
25
|
-
eagle: { type: 'bird', domesticated: false }
|
|
26
|
-
};
|
|
27
|
-
/**
|
|
28
|
-
* Run the example
|
|
29
|
-
*/
|
|
30
|
-
async function runExample() {
|
|
31
|
-
console.log('Initializing vector database...');
|
|
32
|
-
// Create a new vector database
|
|
33
|
-
const db = new Brainy();
|
|
34
|
-
await db.init();
|
|
35
|
-
console.log('Adding vectors to the database...');
|
|
36
|
-
// Add vectors to the database
|
|
37
|
-
const ids = {};
|
|
38
|
-
for (const [word, vector] of Object.entries(wordEmbeddings)) {
|
|
39
|
-
// Determine noun type based on the metadata
|
|
40
|
-
const meta = metadata[word];
|
|
41
|
-
const nounType = meta.type === 'mammal' || meta.type === 'bird' || meta.type === 'fish' ? 'Thing' : 'Content';
|
|
42
|
-
ids[word] = await db.addNoun(vector, nounType, meta);
|
|
43
|
-
console.log(`Added "${word}" with ID: ${ids[word]}`);
|
|
44
|
-
}
|
|
45
|
-
console.log('\nDatabase size:', db.size());
|
|
46
|
-
// Search for similar vectors
|
|
47
|
-
console.log('\nSearching for vectors similar to "cat"...');
|
|
48
|
-
const catResults = await db.search(wordEmbeddings['cat'], { limit: 3 });
|
|
49
|
-
console.log('Results:');
|
|
50
|
-
for (const result of catResults) {
|
|
51
|
-
const word = Object.entries(ids).find(([_, id]) => id === result.id)?.[0] || 'unknown';
|
|
52
|
-
console.log(`- ${word} (score: ${result.score.toFixed(4)}, metadata:`, result.metadata, ')');
|
|
53
|
-
}
|
|
54
|
-
// Search for similar vectors
|
|
55
|
-
console.log('\nSearching for vectors similar to "fish"...');
|
|
56
|
-
const fishResults = await db.search(wordEmbeddings['fish'], { limit: 3 });
|
|
57
|
-
console.log('Results:');
|
|
58
|
-
for (const result of fishResults) {
|
|
59
|
-
const word = Object.entries(ids).find(([_, id]) => id === result.id)?.[0] || 'unknown';
|
|
60
|
-
console.log(`- ${word} (score: ${result.score.toFixed(4)}, metadata:`, result.metadata, ')');
|
|
61
|
-
}
|
|
62
|
-
// Update metadata
|
|
63
|
-
console.log('\nUpdating metadata for "bird"...');
|
|
64
|
-
await db.updateNounMetadata(ids['bird'], {
|
|
65
|
-
...metadata['bird'],
|
|
66
|
-
notes: 'Can fly'
|
|
67
|
-
});
|
|
68
|
-
// Get the updated document
|
|
69
|
-
const birdDoc = await db.getNoun(ids['bird']);
|
|
70
|
-
console.log('Updated bird document:', birdDoc);
|
|
71
|
-
// Delete a vector
|
|
72
|
-
console.log('\nDeleting "shark"...');
|
|
73
|
-
await db.deleteNoun(ids['shark']);
|
|
74
|
-
console.log('Database size after deletion:', db.size());
|
|
75
|
-
// Search again to verify shark is gone
|
|
76
|
-
console.log('\nSearching for vectors similar to "fish" after deletion...');
|
|
77
|
-
const fishResultsAfterDeletion = await db.search(wordEmbeddings['fish'], { limit: 3 });
|
|
78
|
-
console.log('Results:');
|
|
79
|
-
for (const result of fishResultsAfterDeletion) {
|
|
80
|
-
const word = Object.entries(ids).find(([_, id]) => id === result.id)?.[0] || 'unknown';
|
|
81
|
-
console.log(`- ${word} (score: ${result.score.toFixed(4)}, metadata:`, result.metadata, ')');
|
|
82
|
-
}
|
|
83
|
-
console.log('\nExample completed successfully!');
|
|
84
|
-
}
|
|
85
|
-
// Check if we're in a browser or Node.js environment
|
|
86
|
-
if (typeof window !== 'undefined') {
|
|
87
|
-
// Browser environment
|
|
88
|
-
document.addEventListener('DOMContentLoaded', () => {
|
|
89
|
-
const button = document.createElement('button');
|
|
90
|
-
button.textContent = 'Run Brainy Example';
|
|
91
|
-
button.addEventListener('click', async () => {
|
|
92
|
-
const output = document.createElement('pre');
|
|
93
|
-
document.body.appendChild(output);
|
|
94
|
-
// Redirect console.log to the output element
|
|
95
|
-
const originalLog = console.log;
|
|
96
|
-
console.log = (...args) => {
|
|
97
|
-
originalLog(...args);
|
|
98
|
-
output.textContent +=
|
|
99
|
-
args
|
|
100
|
-
.map((arg) => typeof arg === 'object' ? JSON.stringify(arg, null, 2) : arg)
|
|
101
|
-
.join(' ') + '\n';
|
|
102
|
-
};
|
|
103
|
-
try {
|
|
104
|
-
await runExample();
|
|
105
|
-
}
|
|
106
|
-
catch (error) {
|
|
107
|
-
console.error('Error running example:', error);
|
|
108
|
-
}
|
|
109
|
-
// Restore console.log
|
|
110
|
-
console.log = originalLog;
|
|
111
|
-
});
|
|
112
|
-
document.body.appendChild(button);
|
|
113
|
-
});
|
|
114
|
-
}
|
|
115
|
-
else {
|
|
116
|
-
// Node.js environment
|
|
117
|
-
runExample().catch((error) => {
|
|
118
|
-
console.error('Error running example:', error);
|
|
119
|
-
});
|
|
120
|
-
}
|
|
121
|
-
//# sourceMappingURL=basicUsage.js.map
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Field Index for efficient field-based queries
|
|
3
|
-
* Provides O(log n) lookups for field values and range queries
|
|
4
|
-
*/
|
|
5
|
-
interface RangeQueryOptions {
|
|
6
|
-
field: string;
|
|
7
|
-
min?: any;
|
|
8
|
-
max?: any;
|
|
9
|
-
includeMin?: boolean;
|
|
10
|
-
includeMax?: boolean;
|
|
11
|
-
}
|
|
12
|
-
export declare class FieldIndex {
|
|
13
|
-
private indices;
|
|
14
|
-
private sortedIndices;
|
|
15
|
-
private indexedFields;
|
|
16
|
-
/**
|
|
17
|
-
* Add a document to the field index
|
|
18
|
-
*/
|
|
19
|
-
add(id: string, metadata: Record<string, any>): void;
|
|
20
|
-
/**
|
|
21
|
-
* Remove a document from the field index
|
|
22
|
-
*/
|
|
23
|
-
remove(id: string, metadata: Record<string, any>): void;
|
|
24
|
-
/**
|
|
25
|
-
* Query for exact field value match
|
|
26
|
-
* O(1) hash lookup
|
|
27
|
-
*/
|
|
28
|
-
queryExact(field: string, value: any): string[];
|
|
29
|
-
/**
|
|
30
|
-
* Query for multiple values (IN operator)
|
|
31
|
-
* O(k) where k is number of values
|
|
32
|
-
*/
|
|
33
|
-
queryIn(field: string, values: any[]): string[];
|
|
34
|
-
/**
|
|
35
|
-
* Query for range of values
|
|
36
|
-
* O(log n + m) where m is number of results
|
|
37
|
-
*/
|
|
38
|
-
queryRange(options: RangeQueryOptions): string[];
|
|
39
|
-
/**
|
|
40
|
-
* Query with complex where clause
|
|
41
|
-
*/
|
|
42
|
-
query(where: Record<string, any>): string[];
|
|
43
|
-
/**
|
|
44
|
-
* Mark sorted index as needing rebuild
|
|
45
|
-
*/
|
|
46
|
-
private markSortedIndexDirty;
|
|
47
|
-
/**
|
|
48
|
-
* Ensure sorted index is up to date for a field
|
|
49
|
-
*/
|
|
50
|
-
private ensureSortedIndex;
|
|
51
|
-
/**
|
|
52
|
-
* Binary search for start position (inclusive)
|
|
53
|
-
*/
|
|
54
|
-
private binarySearch;
|
|
55
|
-
/**
|
|
56
|
-
* Binary search for end position (inclusive)
|
|
57
|
-
*/
|
|
58
|
-
private binarySearchEnd;
|
|
59
|
-
/**
|
|
60
|
-
* Debug method to inspect index contents
|
|
61
|
-
*/
|
|
62
|
-
debugIndex(field?: string): any;
|
|
63
|
-
/**
|
|
64
|
-
* Get statistics about the index
|
|
65
|
-
*/
|
|
66
|
-
getStats(): {
|
|
67
|
-
indexedFields: number;
|
|
68
|
-
totalValues: number;
|
|
69
|
-
totalMappings: number;
|
|
70
|
-
};
|
|
71
|
-
/**
|
|
72
|
-
* Clear all indices
|
|
73
|
-
*/
|
|
74
|
-
clear(): void;
|
|
75
|
-
}
|
|
76
|
-
export {};
|