@ruvector/wasm 0.1.16 → 0.1.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +85 -30
- package/README.md +0 -969
- package/pkg/README.md +0 -969
- package/src/indexeddb.js +0 -355
- package/src/worker-pool.js +0 -254
- package/src/worker.js +0 -184
package/src/indexeddb.js
DELETED
|
@@ -1,355 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* IndexedDB Persistence Layer for Ruvector
|
|
3
|
-
*
|
|
4
|
-
* Provides:
|
|
5
|
-
* - Save/load database state to IndexedDB
|
|
6
|
-
* - Batch operations for performance
|
|
7
|
-
* - Progressive loading with pagination
|
|
8
|
-
* - LRU cache for hot vectors
|
|
9
|
-
*/
|
|
10
|
-
|
|
11
|
-
const DB_NAME = 'ruvector_storage';
|
|
12
|
-
const DB_VERSION = 1;
|
|
13
|
-
const VECTOR_STORE = 'vectors';
|
|
14
|
-
const META_STORE = 'metadata';
|
|
15
|
-
|
|
16
|
-
/**
|
|
17
|
-
* LRU Cache for hot vectors
|
|
18
|
-
*/
|
|
19
|
-
class LRUCache {
|
|
20
|
-
constructor(capacity = 1000) {
|
|
21
|
-
this.capacity = capacity;
|
|
22
|
-
this.cache = new Map();
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
get(key) {
|
|
26
|
-
if (!this.cache.has(key)) return null;
|
|
27
|
-
|
|
28
|
-
// Move to end (most recently used)
|
|
29
|
-
const value = this.cache.get(key);
|
|
30
|
-
this.cache.delete(key);
|
|
31
|
-
this.cache.set(key, value);
|
|
32
|
-
|
|
33
|
-
return value;
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
set(key, value) {
|
|
37
|
-
// Remove if exists
|
|
38
|
-
if (this.cache.has(key)) {
|
|
39
|
-
this.cache.delete(key);
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
// Add to end
|
|
43
|
-
this.cache.set(key, value);
|
|
44
|
-
|
|
45
|
-
// Evict oldest if over capacity
|
|
46
|
-
if (this.cache.size > this.capacity) {
|
|
47
|
-
const firstKey = this.cache.keys().next().value;
|
|
48
|
-
this.cache.delete(firstKey);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
has(key) {
|
|
53
|
-
return this.cache.has(key);
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
clear() {
|
|
57
|
-
this.cache.clear();
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
get size() {
|
|
61
|
-
return this.cache.size;
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
/**
|
|
66
|
-
* IndexedDB Persistence Manager
|
|
67
|
-
*/
|
|
68
|
-
export class IndexedDBPersistence {
|
|
69
|
-
constructor(dbName = null) {
|
|
70
|
-
this.dbName = dbName || DB_NAME;
|
|
71
|
-
this.db = null;
|
|
72
|
-
this.cache = new LRUCache(1000);
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
/**
|
|
76
|
-
* Open IndexedDB connection
|
|
77
|
-
*/
|
|
78
|
-
async open() {
|
|
79
|
-
return new Promise((resolve, reject) => {
|
|
80
|
-
const request = indexedDB.open(this.dbName, DB_VERSION);
|
|
81
|
-
|
|
82
|
-
request.onerror = () => reject(request.error);
|
|
83
|
-
request.onsuccess = () => {
|
|
84
|
-
this.db = request.result;
|
|
85
|
-
resolve(this.db);
|
|
86
|
-
};
|
|
87
|
-
|
|
88
|
-
request.onupgradeneeded = (event) => {
|
|
89
|
-
const db = event.target.result;
|
|
90
|
-
|
|
91
|
-
// Create object stores if they don't exist
|
|
92
|
-
if (!db.objectStoreNames.contains(VECTOR_STORE)) {
|
|
93
|
-
const vectorStore = db.createObjectStore(VECTOR_STORE, { keyPath: 'id' });
|
|
94
|
-
vectorStore.createIndex('timestamp', 'timestamp', { unique: false });
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
if (!db.objectStoreNames.contains(META_STORE)) {
|
|
98
|
-
db.createObjectStore(META_STORE, { keyPath: 'key' });
|
|
99
|
-
}
|
|
100
|
-
};
|
|
101
|
-
});
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
/**
|
|
105
|
-
* Save a single vector
|
|
106
|
-
*/
|
|
107
|
-
async saveVector(id, vector, metadata = null) {
|
|
108
|
-
if (!this.db) await this.open();
|
|
109
|
-
|
|
110
|
-
return new Promise((resolve, reject) => {
|
|
111
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readwrite');
|
|
112
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
113
|
-
|
|
114
|
-
const data = {
|
|
115
|
-
id,
|
|
116
|
-
vector: Array.from(vector), // Convert Float32Array to regular array
|
|
117
|
-
metadata,
|
|
118
|
-
timestamp: Date.now()
|
|
119
|
-
};
|
|
120
|
-
|
|
121
|
-
const request = store.put(data);
|
|
122
|
-
|
|
123
|
-
request.onsuccess = () => {
|
|
124
|
-
this.cache.set(id, data);
|
|
125
|
-
resolve(id);
|
|
126
|
-
};
|
|
127
|
-
request.onerror = () => reject(request.error);
|
|
128
|
-
});
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
/**
|
|
132
|
-
* Save vectors in batch (more efficient)
|
|
133
|
-
*/
|
|
134
|
-
async saveBatch(entries, batchSize = 100) {
|
|
135
|
-
if (!this.db) await this.open();
|
|
136
|
-
|
|
137
|
-
const chunks = [];
|
|
138
|
-
for (let i = 0; i < entries.length; i += batchSize) {
|
|
139
|
-
chunks.push(entries.slice(i, i + batchSize));
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
for (const chunk of chunks) {
|
|
143
|
-
await new Promise((resolve, reject) => {
|
|
144
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readwrite');
|
|
145
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
146
|
-
|
|
147
|
-
for (const entry of chunk) {
|
|
148
|
-
const data = {
|
|
149
|
-
id: entry.id,
|
|
150
|
-
vector: Array.from(entry.vector),
|
|
151
|
-
metadata: entry.metadata,
|
|
152
|
-
timestamp: Date.now()
|
|
153
|
-
};
|
|
154
|
-
|
|
155
|
-
store.put(data);
|
|
156
|
-
this.cache.set(entry.id, data);
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
transaction.oncomplete = () => resolve();
|
|
160
|
-
transaction.onerror = () => reject(transaction.error);
|
|
161
|
-
});
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
return entries.length;
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
/**
|
|
168
|
-
* Load a single vector by ID
|
|
169
|
-
*/
|
|
170
|
-
async loadVector(id) {
|
|
171
|
-
// Check cache first
|
|
172
|
-
if (this.cache.has(id)) {
|
|
173
|
-
return this.cache.get(id);
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
if (!this.db) await this.open();
|
|
177
|
-
|
|
178
|
-
return new Promise((resolve, reject) => {
|
|
179
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readonly');
|
|
180
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
181
|
-
const request = store.get(id);
|
|
182
|
-
|
|
183
|
-
request.onsuccess = () => {
|
|
184
|
-
const data = request.result;
|
|
185
|
-
if (data) {
|
|
186
|
-
// Convert array back to Float32Array
|
|
187
|
-
data.vector = new Float32Array(data.vector);
|
|
188
|
-
this.cache.set(id, data);
|
|
189
|
-
}
|
|
190
|
-
resolve(data);
|
|
191
|
-
};
|
|
192
|
-
request.onerror = () => reject(request.error);
|
|
193
|
-
});
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
/**
|
|
197
|
-
* Load all vectors (with progressive loading)
|
|
198
|
-
*/
|
|
199
|
-
async loadAll(onProgress = null, batchSize = 100) {
|
|
200
|
-
if (!this.db) await this.open();
|
|
201
|
-
|
|
202
|
-
return new Promise((resolve, reject) => {
|
|
203
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readonly');
|
|
204
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
205
|
-
const request = store.openCursor();
|
|
206
|
-
|
|
207
|
-
const vectors = [];
|
|
208
|
-
let count = 0;
|
|
209
|
-
|
|
210
|
-
request.onsuccess = (event) => {
|
|
211
|
-
const cursor = event.target.result;
|
|
212
|
-
|
|
213
|
-
if (cursor) {
|
|
214
|
-
const data = cursor.value;
|
|
215
|
-
data.vector = new Float32Array(data.vector);
|
|
216
|
-
vectors.push(data);
|
|
217
|
-
count++;
|
|
218
|
-
|
|
219
|
-
// Cache hot vectors (first 1000)
|
|
220
|
-
if (count <= 1000) {
|
|
221
|
-
this.cache.set(data.id, data);
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
// Report progress every batch
|
|
225
|
-
if (onProgress && count % batchSize === 0) {
|
|
226
|
-
onProgress({
|
|
227
|
-
loaded: count,
|
|
228
|
-
vectors: [...vectors]
|
|
229
|
-
});
|
|
230
|
-
vectors.length = 0; // Clear batch
|
|
231
|
-
}
|
|
232
|
-
|
|
233
|
-
cursor.continue();
|
|
234
|
-
} else {
|
|
235
|
-
// Done
|
|
236
|
-
if (onProgress && vectors.length > 0) {
|
|
237
|
-
onProgress({
|
|
238
|
-
loaded: count,
|
|
239
|
-
vectors: vectors,
|
|
240
|
-
complete: true
|
|
241
|
-
});
|
|
242
|
-
}
|
|
243
|
-
resolve({ count, complete: true });
|
|
244
|
-
}
|
|
245
|
-
};
|
|
246
|
-
|
|
247
|
-
request.onerror = () => reject(request.error);
|
|
248
|
-
});
|
|
249
|
-
}
|
|
250
|
-
|
|
251
|
-
/**
|
|
252
|
-
* Delete a vector by ID
|
|
253
|
-
*/
|
|
254
|
-
async deleteVector(id) {
|
|
255
|
-
if (!this.db) await this.open();
|
|
256
|
-
|
|
257
|
-
this.cache.delete(id);
|
|
258
|
-
|
|
259
|
-
return new Promise((resolve, reject) => {
|
|
260
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readwrite');
|
|
261
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
262
|
-
const request = store.delete(id);
|
|
263
|
-
|
|
264
|
-
request.onsuccess = () => resolve(true);
|
|
265
|
-
request.onerror = () => reject(request.error);
|
|
266
|
-
});
|
|
267
|
-
}
|
|
268
|
-
|
|
269
|
-
/**
|
|
270
|
-
* Clear all vectors
|
|
271
|
-
*/
|
|
272
|
-
async clear() {
|
|
273
|
-
if (!this.db) await this.open();
|
|
274
|
-
|
|
275
|
-
this.cache.clear();
|
|
276
|
-
|
|
277
|
-
return new Promise((resolve, reject) => {
|
|
278
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readwrite');
|
|
279
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
280
|
-
const request = store.clear();
|
|
281
|
-
|
|
282
|
-
request.onsuccess = () => resolve();
|
|
283
|
-
request.onerror = () => reject(request.error);
|
|
284
|
-
});
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
/**
|
|
288
|
-
* Get database statistics
|
|
289
|
-
*/
|
|
290
|
-
async getStats() {
|
|
291
|
-
if (!this.db) await this.open();
|
|
292
|
-
|
|
293
|
-
return new Promise((resolve, reject) => {
|
|
294
|
-
const transaction = this.db.transaction([VECTOR_STORE], 'readonly');
|
|
295
|
-
const store = transaction.objectStore(VECTOR_STORE);
|
|
296
|
-
const request = store.count();
|
|
297
|
-
|
|
298
|
-
request.onsuccess = () => {
|
|
299
|
-
resolve({
|
|
300
|
-
totalVectors: request.result,
|
|
301
|
-
cacheSize: this.cache.size,
|
|
302
|
-
cacheHitRate: this.cache.size / request.result
|
|
303
|
-
});
|
|
304
|
-
};
|
|
305
|
-
request.onerror = () => reject(request.error);
|
|
306
|
-
});
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
/**
|
|
310
|
-
* Save metadata
|
|
311
|
-
*/
|
|
312
|
-
async saveMeta(key, value) {
|
|
313
|
-
if (!this.db) await this.open();
|
|
314
|
-
|
|
315
|
-
return new Promise((resolve, reject) => {
|
|
316
|
-
const transaction = this.db.transaction([META_STORE], 'readwrite');
|
|
317
|
-
const store = transaction.objectStore(META_STORE);
|
|
318
|
-
const request = store.put({ key, value });
|
|
319
|
-
|
|
320
|
-
request.onsuccess = () => resolve();
|
|
321
|
-
request.onerror = () => reject(request.error);
|
|
322
|
-
});
|
|
323
|
-
}
|
|
324
|
-
|
|
325
|
-
/**
|
|
326
|
-
* Load metadata
|
|
327
|
-
*/
|
|
328
|
-
async loadMeta(key) {
|
|
329
|
-
if (!this.db) await this.open();
|
|
330
|
-
|
|
331
|
-
return new Promise((resolve, reject) => {
|
|
332
|
-
const transaction = this.db.transaction([META_STORE], 'readonly');
|
|
333
|
-
const store = transaction.objectStore(META_STORE);
|
|
334
|
-
const request = store.get(key);
|
|
335
|
-
|
|
336
|
-
request.onsuccess = () => {
|
|
337
|
-
const data = request.result;
|
|
338
|
-
resolve(data ? data.value : null);
|
|
339
|
-
};
|
|
340
|
-
request.onerror = () => reject(request.error);
|
|
341
|
-
});
|
|
342
|
-
}
|
|
343
|
-
|
|
344
|
-
/**
|
|
345
|
-
* Close the database connection
|
|
346
|
-
*/
|
|
347
|
-
close() {
|
|
348
|
-
if (this.db) {
|
|
349
|
-
this.db.close();
|
|
350
|
-
this.db = null;
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
|
|
355
|
-
export default IndexedDBPersistence;
|
package/src/worker-pool.js
DELETED
|
@@ -1,254 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Web Worker Pool Manager
|
|
3
|
-
*
|
|
4
|
-
* Manages a pool of workers for parallel vector operations.
|
|
5
|
-
* Supports:
|
|
6
|
-
* - Round-robin task distribution
|
|
7
|
-
* - Load balancing
|
|
8
|
-
* - Automatic worker initialization
|
|
9
|
-
* - Promise-based API
|
|
10
|
-
*/
|
|
11
|
-
|
|
12
|
-
export class WorkerPool {
|
|
13
|
-
constructor(workerUrl, wasmUrl, options = {}) {
|
|
14
|
-
this.workerUrl = workerUrl;
|
|
15
|
-
this.wasmUrl = wasmUrl;
|
|
16
|
-
this.poolSize = options.poolSize || navigator.hardwareConcurrency || 4;
|
|
17
|
-
this.workers = [];
|
|
18
|
-
this.nextWorker = 0;
|
|
19
|
-
this.pendingRequests = new Map();
|
|
20
|
-
this.requestId = 0;
|
|
21
|
-
this.initialized = false;
|
|
22
|
-
this.options = options;
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
/**
|
|
26
|
-
* Initialize the worker pool
|
|
27
|
-
*/
|
|
28
|
-
async init() {
|
|
29
|
-
if (this.initialized) return;
|
|
30
|
-
|
|
31
|
-
console.log(`Initializing worker pool with ${this.poolSize} workers...`);
|
|
32
|
-
|
|
33
|
-
const initPromises = [];
|
|
34
|
-
|
|
35
|
-
for (let i = 0; i < this.poolSize; i++) {
|
|
36
|
-
const worker = new Worker(this.workerUrl, { type: 'module' });
|
|
37
|
-
|
|
38
|
-
worker.onmessage = (e) => this.handleMessage(i, e);
|
|
39
|
-
worker.onerror = (error) => this.handleError(i, error);
|
|
40
|
-
|
|
41
|
-
this.workers.push({
|
|
42
|
-
worker,
|
|
43
|
-
busy: false,
|
|
44
|
-
id: i
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
// Initialize worker with WASM
|
|
48
|
-
const initPromise = this.sendToWorker(i, 'init', {
|
|
49
|
-
wasmUrl: this.wasmUrl,
|
|
50
|
-
dimensions: this.options.dimensions,
|
|
51
|
-
metric: this.options.metric,
|
|
52
|
-
useHnsw: this.options.useHnsw
|
|
53
|
-
});
|
|
54
|
-
|
|
55
|
-
initPromises.push(initPromise);
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
await Promise.all(initPromises);
|
|
59
|
-
this.initialized = true;
|
|
60
|
-
|
|
61
|
-
console.log(`Worker pool initialized successfully`);
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
/**
|
|
65
|
-
* Handle message from worker
|
|
66
|
-
*/
|
|
67
|
-
handleMessage(workerId, event) {
|
|
68
|
-
const { type, requestId, data, error } = event.data;
|
|
69
|
-
|
|
70
|
-
if (type === 'error') {
|
|
71
|
-
const request = this.pendingRequests.get(requestId);
|
|
72
|
-
if (request) {
|
|
73
|
-
request.reject(new Error(error.message));
|
|
74
|
-
this.pendingRequests.delete(requestId);
|
|
75
|
-
}
|
|
76
|
-
return;
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
const request = this.pendingRequests.get(requestId);
|
|
80
|
-
if (request) {
|
|
81
|
-
this.workers[workerId].busy = false;
|
|
82
|
-
request.resolve(data);
|
|
83
|
-
this.pendingRequests.delete(requestId);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
/**
|
|
88
|
-
* Handle worker error
|
|
89
|
-
*/
|
|
90
|
-
handleError(workerId, error) {
|
|
91
|
-
console.error(`Worker ${workerId} error:`, error);
|
|
92
|
-
|
|
93
|
-
// Reject all pending requests for this worker
|
|
94
|
-
for (const [requestId, request] of this.pendingRequests) {
|
|
95
|
-
if (request.workerId === workerId) {
|
|
96
|
-
request.reject(error);
|
|
97
|
-
this.pendingRequests.delete(requestId);
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
/**
|
|
103
|
-
* Get next available worker (round-robin)
|
|
104
|
-
*/
|
|
105
|
-
getNextWorker() {
|
|
106
|
-
// Try to find an idle worker
|
|
107
|
-
for (let i = 0; i < this.workers.length; i++) {
|
|
108
|
-
const idx = (this.nextWorker + i) % this.workers.length;
|
|
109
|
-
if (!this.workers[idx].busy) {
|
|
110
|
-
this.nextWorker = (idx + 1) % this.workers.length;
|
|
111
|
-
return idx;
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
// All busy, use round-robin
|
|
116
|
-
const idx = this.nextWorker;
|
|
117
|
-
this.nextWorker = (this.nextWorker + 1) % this.workers.length;
|
|
118
|
-
return idx;
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
/**
|
|
122
|
-
* Send message to specific worker
|
|
123
|
-
*/
|
|
124
|
-
sendToWorker(workerId, type, data) {
|
|
125
|
-
return new Promise((resolve, reject) => {
|
|
126
|
-
const requestId = this.requestId++;
|
|
127
|
-
|
|
128
|
-
this.pendingRequests.set(requestId, {
|
|
129
|
-
resolve,
|
|
130
|
-
reject,
|
|
131
|
-
workerId,
|
|
132
|
-
timestamp: Date.now()
|
|
133
|
-
});
|
|
134
|
-
|
|
135
|
-
this.workers[workerId].busy = true;
|
|
136
|
-
this.workers[workerId].worker.postMessage({
|
|
137
|
-
type,
|
|
138
|
-
data: { ...data, requestId }
|
|
139
|
-
});
|
|
140
|
-
|
|
141
|
-
// Timeout after 30 seconds
|
|
142
|
-
setTimeout(() => {
|
|
143
|
-
if (this.pendingRequests.has(requestId)) {
|
|
144
|
-
this.pendingRequests.delete(requestId);
|
|
145
|
-
reject(new Error('Request timeout'));
|
|
146
|
-
}
|
|
147
|
-
}, 30000);
|
|
148
|
-
});
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
/**
|
|
152
|
-
* Execute operation on next available worker
|
|
153
|
-
*/
|
|
154
|
-
async execute(type, data) {
|
|
155
|
-
if (!this.initialized) {
|
|
156
|
-
await this.init();
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
const workerId = this.getNextWorker();
|
|
160
|
-
return this.sendToWorker(workerId, type, data);
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
/**
|
|
164
|
-
* Insert vector
|
|
165
|
-
*/
|
|
166
|
-
async insert(vector, id = null, metadata = null) {
|
|
167
|
-
return this.execute('insert', { vector, id, metadata });
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
/**
|
|
171
|
-
* Insert batch of vectors
|
|
172
|
-
*/
|
|
173
|
-
async insertBatch(entries) {
|
|
174
|
-
// Distribute batch across workers
|
|
175
|
-
const chunkSize = Math.ceil(entries.length / this.poolSize);
|
|
176
|
-
const chunks = [];
|
|
177
|
-
|
|
178
|
-
for (let i = 0; i < entries.length; i += chunkSize) {
|
|
179
|
-
chunks.push(entries.slice(i, i + chunkSize));
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
const promises = chunks.map((chunk, i) =>
|
|
183
|
-
this.sendToWorker(i % this.poolSize, 'insertBatch', { entries: chunk })
|
|
184
|
-
);
|
|
185
|
-
|
|
186
|
-
const results = await Promise.all(promises);
|
|
187
|
-
return results.flat();
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
/**
|
|
191
|
-
* Search for similar vectors
|
|
192
|
-
*/
|
|
193
|
-
async search(query, k = 10, filter = null) {
|
|
194
|
-
return this.execute('search', { query, k, filter });
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
/**
|
|
198
|
-
* Parallel search across multiple queries
|
|
199
|
-
*/
|
|
200
|
-
async searchBatch(queries, k = 10, filter = null) {
|
|
201
|
-
const promises = queries.map((query, i) =>
|
|
202
|
-
this.sendToWorker(i % this.poolSize, 'search', { query, k, filter })
|
|
203
|
-
);
|
|
204
|
-
|
|
205
|
-
return Promise.all(promises);
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
/**
|
|
209
|
-
* Delete vector
|
|
210
|
-
*/
|
|
211
|
-
async delete(id) {
|
|
212
|
-
return this.execute('delete', { id });
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
/**
|
|
216
|
-
* Get vector by ID
|
|
217
|
-
*/
|
|
218
|
-
async get(id) {
|
|
219
|
-
return this.execute('get', { id });
|
|
220
|
-
}
|
|
221
|
-
|
|
222
|
-
/**
|
|
223
|
-
* Get database length (from first worker)
|
|
224
|
-
*/
|
|
225
|
-
async len() {
|
|
226
|
-
return this.sendToWorker(0, 'len', {});
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
/**
|
|
230
|
-
* Terminate all workers
|
|
231
|
-
*/
|
|
232
|
-
terminate() {
|
|
233
|
-
for (const { worker } of this.workers) {
|
|
234
|
-
worker.terminate();
|
|
235
|
-
}
|
|
236
|
-
this.workers = [];
|
|
237
|
-
this.initialized = false;
|
|
238
|
-
console.log('Worker pool terminated');
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
/**
|
|
242
|
-
* Get pool statistics
|
|
243
|
-
*/
|
|
244
|
-
getStats() {
|
|
245
|
-
return {
|
|
246
|
-
poolSize: this.poolSize,
|
|
247
|
-
busyWorkers: this.workers.filter(w => w.busy).length,
|
|
248
|
-
idleWorkers: this.workers.filter(w => !w.busy).length,
|
|
249
|
-
pendingRequests: this.pendingRequests.size
|
|
250
|
-
};
|
|
251
|
-
}
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
export default WorkerPool;
|