agentic-flow 2.0.1-alpha.14 → 2.0.1-alpha.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +35 -0
- package/dist/.tsbuildinfo +1 -1
- package/dist/intelligence/EmbeddingService.d.ts +89 -16
- package/dist/intelligence/EmbeddingService.d.ts.map +1 -1
- package/dist/intelligence/EmbeddingService.js +311 -82
- package/dist/intelligence/EmbeddingService.js.map +1 -1
- package/dist/intelligence/embedding-benchmark.js +6 -2
- package/dist/intelligence/embedding-benchmark.js.map +1 -1
- package/package.json +3 -3
- package/wasm/reasoningbank/reasoningbank_wasm_bg.js +2 -2
- package/wasm/reasoningbank/reasoningbank_wasm_bg.wasm +0 -0
|
@@ -1,14 +1,37 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* EmbeddingService - Unified embedding interface for agentic-flow
|
|
3
3
|
*
|
|
4
|
-
*
|
|
5
|
-
* -
|
|
6
|
-
* -
|
|
4
|
+
* Uses ruvector@0.1.61+ for ONNX embeddings with:
|
|
5
|
+
* - SIMD128 acceleration (6x faster)
|
|
6
|
+
* - Parallel worker threads (7 workers)
|
|
7
|
+
* - all-MiniLM-L6-v2 model (384 dimensions)
|
|
7
8
|
*
|
|
8
9
|
* Configure via:
|
|
9
|
-
* - AGENTIC_FLOW_EMBEDDINGS=simple|onnx (default:
|
|
10
|
+
* - AGENTIC_FLOW_EMBEDDINGS=simple|onnx|auto (default: auto)
|
|
10
11
|
* - AGENTIC_FLOW_EMBEDDING_MODEL=all-MiniLM-L6-v2 (default)
|
|
11
12
|
*/
|
|
13
|
+
// ONNX availability cache
|
|
14
|
+
let onnxAvailable = null;
|
|
15
|
+
let ruvectorModule = null;
|
|
16
|
+
/**
|
|
17
|
+
* Detect ONNX/SIMD support by loading ruvector
|
|
18
|
+
*/
|
|
19
|
+
async function detectOnnx() {
|
|
20
|
+
if (onnxAvailable !== null) {
|
|
21
|
+
return onnxAvailable;
|
|
22
|
+
}
|
|
23
|
+
try {
|
|
24
|
+
const mod = await import('ruvector');
|
|
25
|
+
ruvectorModule = mod;
|
|
26
|
+
onnxAvailable = mod.isOnnxAvailable?.() ?? false;
|
|
27
|
+
return onnxAvailable;
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
// Ruvector loading failed - fall back to simple embeddings
|
|
31
|
+
onnxAvailable = false;
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
12
35
|
// Simple LRU cache for embeddings
|
|
13
36
|
class EmbeddingCache {
|
|
14
37
|
cache = new Map();
|
|
@@ -45,13 +68,12 @@ class EmbeddingCache {
|
|
|
45
68
|
export class EmbeddingService {
|
|
46
69
|
static instance = null;
|
|
47
70
|
backend;
|
|
71
|
+
effectiveBackend = null;
|
|
48
72
|
dimension;
|
|
49
73
|
modelName;
|
|
50
74
|
// ONNX state
|
|
51
|
-
embedder = null;
|
|
52
|
-
wasmModule = null;
|
|
53
|
-
loadingPromise = null;
|
|
54
75
|
modelLoaded = false;
|
|
76
|
+
loadingPromise = null;
|
|
55
77
|
// Stats
|
|
56
78
|
totalEmbeddings = 0;
|
|
57
79
|
totalLatencyMs = 0;
|
|
@@ -59,10 +81,13 @@ export class EmbeddingService {
|
|
|
59
81
|
// Cache
|
|
60
82
|
cache;
|
|
61
83
|
cacheEnabled;
|
|
84
|
+
// Corpus for search operations
|
|
85
|
+
corpus = { texts: [], embeddings: [] };
|
|
62
86
|
constructor() {
|
|
63
|
-
|
|
87
|
+
// Default to 'auto' which will detect ONNX and use it if available
|
|
88
|
+
this.backend = process.env.AGENTIC_FLOW_EMBEDDINGS || 'auto';
|
|
64
89
|
this.modelName = process.env.AGENTIC_FLOW_EMBEDDING_MODEL || 'all-MiniLM-L6-v2';
|
|
65
|
-
this.dimension =
|
|
90
|
+
this.dimension = 256; // Will be updated when ONNX loads (384)
|
|
66
91
|
this.cacheEnabled = process.env.AGENTIC_FLOW_EMBEDDING_CACHE !== 'false';
|
|
67
92
|
this.cache = new EmbeddingCache(1000);
|
|
68
93
|
}
|
|
@@ -73,11 +98,40 @@ export class EmbeddingService {
|
|
|
73
98
|
return EmbeddingService.instance;
|
|
74
99
|
}
|
|
75
100
|
/**
|
|
76
|
-
*
|
|
101
|
+
* Resolve the effective backend based on ONNX detection
|
|
102
|
+
*/
|
|
103
|
+
async resolveBackend() {
|
|
104
|
+
if (this.effectiveBackend) {
|
|
105
|
+
return this.effectiveBackend;
|
|
106
|
+
}
|
|
107
|
+
if (this.backend === 'auto') {
|
|
108
|
+
const hasOnnx = await detectOnnx();
|
|
109
|
+
this.effectiveBackend = hasOnnx ? 'onnx' : 'simple';
|
|
110
|
+
if (hasOnnx) {
|
|
111
|
+
this.dimension = 384; // all-MiniLM-L6-v2 dimension
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
this.effectiveBackend = this.backend;
|
|
116
|
+
if (this.backend === 'onnx') {
|
|
117
|
+
await detectOnnx(); // Ensure module is loaded
|
|
118
|
+
this.dimension = 384;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
return this.effectiveBackend;
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Get configured backend (may be 'auto')
|
|
77
125
|
*/
|
|
78
126
|
getBackend() {
|
|
79
127
|
return this.backend;
|
|
80
128
|
}
|
|
129
|
+
/**
|
|
130
|
+
* Get effective backend after detection
|
|
131
|
+
*/
|
|
132
|
+
getEffectiveBackend() {
|
|
133
|
+
return this.effectiveBackend || this.backend;
|
|
134
|
+
}
|
|
81
135
|
/**
|
|
82
136
|
* Get embedding dimension
|
|
83
137
|
*/
|
|
@@ -90,54 +144,9 @@ export class EmbeddingService {
|
|
|
90
144
|
isModelLoaded() {
|
|
91
145
|
return this.modelLoaded;
|
|
92
146
|
}
|
|
93
|
-
/**
|
|
94
|
-
* Initialize ONNX embedder (lazy load on first use)
|
|
95
|
-
*/
|
|
96
|
-
async initOnnx() {
|
|
97
|
-
if (this.modelLoaded)
|
|
98
|
-
return;
|
|
99
|
-
if (this.loadingPromise)
|
|
100
|
-
return this.loadingPromise;
|
|
101
|
-
this.loadingPromise = this._loadOnnxModel();
|
|
102
|
-
await this.loadingPromise;
|
|
103
|
-
}
|
|
104
|
-
async _loadOnnxModel() {
|
|
105
|
-
try {
|
|
106
|
-
console.log(`[EmbeddingService] Loading ONNX model: ${this.modelName}`);
|
|
107
|
-
const startTime = performance.now();
|
|
108
|
-
// Import WASM module (auto-initializes with --experimental-wasm-modules)
|
|
109
|
-
const wasmModule = await import('ruvector-onnx-embeddings-wasm');
|
|
110
|
-
this.wasmModule = wasmModule;
|
|
111
|
-
// Import loader for model fetching
|
|
112
|
-
const loaderModule = await import('ruvector-onnx-embeddings-wasm/loader.js');
|
|
113
|
-
const { ModelLoader, MODELS } = loaderModule;
|
|
114
|
-
const modelConfig = MODELS[this.modelName];
|
|
115
|
-
if (!modelConfig) {
|
|
116
|
-
throw new Error(`Unknown model: ${this.modelName}`);
|
|
117
|
-
}
|
|
118
|
-
// Load model files
|
|
119
|
-
const loader = new ModelLoader();
|
|
120
|
-
const { modelBytes, tokenizerJson } = await loader.loadModel(this.modelName);
|
|
121
|
-
// Create embedder with configuration
|
|
122
|
-
const embedderConfig = new wasmModule.WasmEmbedderConfig()
|
|
123
|
-
.setMaxLength(modelConfig.maxLength)
|
|
124
|
-
.setNormalize(true)
|
|
125
|
-
.setPooling(0); // Mean pooling
|
|
126
|
-
this.embedder = wasmModule.WasmEmbedder.withConfig(modelBytes, tokenizerJson, embedderConfig);
|
|
127
|
-
this.dimension = this.embedder.dimension();
|
|
128
|
-
this.modelLoaded = true;
|
|
129
|
-
const loadTime = performance.now() - startTime;
|
|
130
|
-
console.log(`[EmbeddingService] Model loaded in ${loadTime.toFixed(0)}ms, dim=${this.dimension}`);
|
|
131
|
-
}
|
|
132
|
-
catch (error) {
|
|
133
|
-
console.error('[EmbeddingService] Failed to load ONNX model:', error);
|
|
134
|
-
console.log('[EmbeddingService] Falling back to simple embeddings');
|
|
135
|
-
this.backend = 'simple';
|
|
136
|
-
this.dimension = 256;
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
147
|
/**
|
|
140
148
|
* Generate embedding for text
|
|
149
|
+
* Auto-detects ONNX and uses it if available (default behavior)
|
|
141
150
|
*/
|
|
142
151
|
async embed(text) {
|
|
143
152
|
const startTime = performance.now();
|
|
@@ -149,11 +158,14 @@ export class EmbeddingService {
|
|
|
149
158
|
return cached;
|
|
150
159
|
}
|
|
151
160
|
}
|
|
161
|
+
// Resolve backend (handles 'auto' mode)
|
|
162
|
+
const effectiveBackend = await this.resolveBackend();
|
|
152
163
|
let embedding;
|
|
153
|
-
if (
|
|
154
|
-
await
|
|
155
|
-
if (
|
|
156
|
-
embedding =
|
|
164
|
+
if (effectiveBackend === 'onnx' && ruvectorModule) {
|
|
165
|
+
const result = await ruvectorModule.embed(text);
|
|
166
|
+
if (result?.embedding) {
|
|
167
|
+
embedding = result.embedding;
|
|
168
|
+
this.modelLoaded = true;
|
|
157
169
|
}
|
|
158
170
|
else {
|
|
159
171
|
embedding = this.simpleEmbed(text);
|
|
@@ -172,38 +184,223 @@ export class EmbeddingService {
|
|
|
172
184
|
return embedding;
|
|
173
185
|
}
|
|
174
186
|
/**
|
|
175
|
-
* Generate embeddings for multiple texts
|
|
187
|
+
* Generate embeddings for multiple texts (batch processing with parallel workers)
|
|
188
|
+
* Batch processing provides significant speedup with parallel ONNX workers
|
|
176
189
|
*/
|
|
177
190
|
async embedBatch(texts) {
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
191
|
+
const startTime = performance.now();
|
|
192
|
+
// Check cache for all texts first
|
|
193
|
+
if (this.cacheEnabled) {
|
|
194
|
+
const cachedResults = texts.map(t => this.cache.get(t) || null);
|
|
195
|
+
const allCached = cachedResults.every(r => r !== null);
|
|
196
|
+
if (allCached) {
|
|
197
|
+
this.cacheHits += texts.length;
|
|
198
|
+
return cachedResults;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
// Resolve backend
|
|
202
|
+
const effectiveBackend = await this.resolveBackend();
|
|
203
|
+
if (effectiveBackend === 'onnx' && ruvectorModule) {
|
|
204
|
+
const result = await ruvectorModule.embedBatch(texts);
|
|
205
|
+
if (result?.embeddings && result.embeddings.length === texts.length) {
|
|
206
|
+
const embeddings = result.embeddings;
|
|
207
|
+
// Cache individual embeddings
|
|
208
|
+
if (this.cacheEnabled) {
|
|
209
|
+
for (let i = 0; i < texts.length; i++) {
|
|
210
|
+
this.cache.set(texts[i], embeddings[i]);
|
|
211
|
+
}
|
|
186
212
|
}
|
|
213
|
+
// Update stats
|
|
214
|
+
this.totalEmbeddings += texts.length;
|
|
215
|
+
this.totalLatencyMs += performance.now() - startTime;
|
|
216
|
+
this.modelLoaded = true;
|
|
187
217
|
return embeddings;
|
|
188
218
|
}
|
|
189
219
|
}
|
|
190
|
-
// Fall back to sequential
|
|
220
|
+
// Fall back to sequential for simple backend
|
|
191
221
|
return Promise.all(texts.map(t => this.embed(t)));
|
|
192
222
|
}
|
|
193
223
|
/**
|
|
194
224
|
* Compute similarity between two texts
|
|
195
225
|
*/
|
|
196
226
|
async similarity(text1, text2) {
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
}
|
|
227
|
+
const effectiveBackend = await this.resolveBackend();
|
|
228
|
+
if (effectiveBackend === 'onnx' && ruvectorModule) {
|
|
229
|
+
const result = await ruvectorModule.similarity(text1, text2);
|
|
230
|
+
return result.similarity;
|
|
202
231
|
}
|
|
203
232
|
// Fall back to embedding + cosine
|
|
204
233
|
const [e1, e2] = await Promise.all([this.embed(text1), this.embed(text2)]);
|
|
205
234
|
return this.cosineSimilarity(e1, e2);
|
|
206
235
|
}
|
|
236
|
+
/**
|
|
237
|
+
* Compute NxN similarity matrix for a list of texts
|
|
238
|
+
* Uses parallel workers for ONNX backend
|
|
239
|
+
*/
|
|
240
|
+
async similarityMatrix(texts) {
|
|
241
|
+
const embeddings = await this.embedBatch(texts);
|
|
242
|
+
const n = texts.length;
|
|
243
|
+
const matrix = Array(n).fill(null).map(() => Array(n).fill(0));
|
|
244
|
+
for (let i = 0; i < n; i++) {
|
|
245
|
+
matrix[i][i] = 1.0; // Self-similarity
|
|
246
|
+
for (let j = i + 1; j < n; j++) {
|
|
247
|
+
const sim = this.cosineSimilarity(embeddings[i], embeddings[j]);
|
|
248
|
+
matrix[i][j] = sim;
|
|
249
|
+
matrix[j][i] = sim; // Symmetric
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
return matrix;
|
|
253
|
+
}
|
|
254
|
+
/**
|
|
255
|
+
* Build a corpus for semantic search
|
|
256
|
+
*/
|
|
257
|
+
async buildCorpus(texts) {
|
|
258
|
+
this.corpus.texts = texts;
|
|
259
|
+
this.corpus.embeddings = await this.embedBatch(texts);
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* Semantic search against the corpus
|
|
263
|
+
* Returns top-k most similar texts
|
|
264
|
+
*/
|
|
265
|
+
async semanticSearch(query, topK = 5) {
|
|
266
|
+
if (this.corpus.texts.length === 0) {
|
|
267
|
+
throw new Error('Corpus not built. Call buildCorpus() first.');
|
|
268
|
+
}
|
|
269
|
+
const queryEmbedding = await this.embed(query);
|
|
270
|
+
const results = [];
|
|
271
|
+
for (let i = 0; i < this.corpus.texts.length; i++) {
|
|
272
|
+
const sim = this.cosineSimilarity(queryEmbedding, this.corpus.embeddings[i]);
|
|
273
|
+
results.push({
|
|
274
|
+
text: this.corpus.texts[i],
|
|
275
|
+
index: i,
|
|
276
|
+
similarity: sim,
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
// Sort by similarity (descending) and return top-k
|
|
280
|
+
results.sort((a, b) => b.similarity - a.similarity);
|
|
281
|
+
return results.slice(0, topK);
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Find near-duplicate texts in a list
|
|
285
|
+
* Groups texts with similarity above threshold
|
|
286
|
+
*/
|
|
287
|
+
async findDuplicates(texts, threshold = 0.9) {
|
|
288
|
+
const embeddings = await this.embedBatch(texts);
|
|
289
|
+
const n = texts.length;
|
|
290
|
+
const visited = new Set();
|
|
291
|
+
const groups = [];
|
|
292
|
+
for (let i = 0; i < n; i++) {
|
|
293
|
+
if (visited.has(i))
|
|
294
|
+
continue;
|
|
295
|
+
const group = {
|
|
296
|
+
indices: [i],
|
|
297
|
+
texts: [texts[i]],
|
|
298
|
+
similarity: 1.0,
|
|
299
|
+
};
|
|
300
|
+
for (let j = i + 1; j < n; j++) {
|
|
301
|
+
if (visited.has(j))
|
|
302
|
+
continue;
|
|
303
|
+
const sim = this.cosineSimilarity(embeddings[i], embeddings[j]);
|
|
304
|
+
if (sim >= threshold) {
|
|
305
|
+
group.indices.push(j);
|
|
306
|
+
group.texts.push(texts[j]);
|
|
307
|
+
group.similarity = Math.min(group.similarity, sim);
|
|
308
|
+
visited.add(j);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
if (group.indices.length > 1) {
|
|
312
|
+
visited.add(i);
|
|
313
|
+
groups.push(group);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
return groups;
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* K-means clustering of texts
|
|
320
|
+
* Returns cluster assignments and centroids
|
|
321
|
+
*/
|
|
322
|
+
async clusterTexts(texts, k = 3, maxIterations = 100) {
|
|
323
|
+
const embeddings = await this.embedBatch(texts);
|
|
324
|
+
const n = texts.length;
|
|
325
|
+
const dim = this.dimension;
|
|
326
|
+
// Initialize centroids randomly (copy to new ArrayBuffer for consistent typing)
|
|
327
|
+
const centroidIndices = new Set();
|
|
328
|
+
while (centroidIndices.size < k && centroidIndices.size < n) {
|
|
329
|
+
centroidIndices.add(Math.floor(Math.random() * n));
|
|
330
|
+
}
|
|
331
|
+
let centroids = Array.from(centroidIndices).map(i => {
|
|
332
|
+
const copy = new Float32Array(dim);
|
|
333
|
+
copy.set(embeddings[i]);
|
|
334
|
+
return copy;
|
|
335
|
+
});
|
|
336
|
+
let clusters = new Array(n).fill(0);
|
|
337
|
+
for (let iter = 0; iter < maxIterations; iter++) {
|
|
338
|
+
// Assign points to nearest centroid
|
|
339
|
+
const newClusters = embeddings.map(emb => {
|
|
340
|
+
let bestCluster = 0;
|
|
341
|
+
let bestSim = -Infinity;
|
|
342
|
+
for (let c = 0; c < k; c++) {
|
|
343
|
+
const sim = this.cosineSimilarity(emb, centroids[c]);
|
|
344
|
+
if (sim > bestSim) {
|
|
345
|
+
bestSim = sim;
|
|
346
|
+
bestCluster = c;
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
return bestCluster;
|
|
350
|
+
});
|
|
351
|
+
// Check convergence
|
|
352
|
+
const changed = newClusters.some((c, i) => c !== clusters[i]);
|
|
353
|
+
clusters = newClusters;
|
|
354
|
+
if (!changed)
|
|
355
|
+
break;
|
|
356
|
+
// Update centroids
|
|
357
|
+
const newCentroids = [];
|
|
358
|
+
for (let c = 0; c < k; c++) {
|
|
359
|
+
newCentroids.push(new Float32Array(dim));
|
|
360
|
+
}
|
|
361
|
+
const counts = new Array(k).fill(0);
|
|
362
|
+
for (let i = 0; i < n; i++) {
|
|
363
|
+
const c = clusters[i];
|
|
364
|
+
counts[c]++;
|
|
365
|
+
for (let d = 0; d < dim; d++) {
|
|
366
|
+
newCentroids[c][d] += embeddings[i][d];
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
// Normalize centroids
|
|
370
|
+
for (let c = 0; c < k; c++) {
|
|
371
|
+
if (counts[c] > 0) {
|
|
372
|
+
let norm = 0;
|
|
373
|
+
for (let d = 0; d < dim; d++) {
|
|
374
|
+
newCentroids[c][d] /= counts[c];
|
|
375
|
+
norm += newCentroids[c][d] * newCentroids[c][d];
|
|
376
|
+
}
|
|
377
|
+
norm = Math.sqrt(norm) || 1;
|
|
378
|
+
for (let d = 0; d < dim; d++) {
|
|
379
|
+
newCentroids[c][d] /= norm;
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
centroids = newCentroids;
|
|
384
|
+
}
|
|
385
|
+
return { clusters, centroids };
|
|
386
|
+
}
|
|
387
|
+
/**
|
|
388
|
+
* Stream embeddings for large batches (memory efficient)
|
|
389
|
+
* Yields embeddings one at a time
|
|
390
|
+
*/
|
|
391
|
+
async *streamEmbed(texts, batchSize = 32) {
|
|
392
|
+
for (let i = 0; i < texts.length; i += batchSize) {
|
|
393
|
+
const batch = texts.slice(i, i + batchSize);
|
|
394
|
+
const embeddings = await this.embedBatch(batch);
|
|
395
|
+
for (let j = 0; j < batch.length; j++) {
|
|
396
|
+
yield {
|
|
397
|
+
index: i + j,
|
|
398
|
+
text: batch[j],
|
|
399
|
+
embedding: embeddings[j],
|
|
400
|
+
};
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
}
|
|
207
404
|
/**
|
|
208
405
|
* Simple hash-based embedding (fast, not semantic)
|
|
209
406
|
*/
|
|
@@ -231,8 +428,8 @@ export class EmbeddingService {
|
|
|
231
428
|
* Compute cosine similarity between two embeddings
|
|
232
429
|
*/
|
|
233
430
|
cosineSimilarity(a, b) {
|
|
234
|
-
if (
|
|
235
|
-
return
|
|
431
|
+
if (ruvectorModule?.cosineSimilarity) {
|
|
432
|
+
return ruvectorModule.cosineSimilarity(a, b);
|
|
236
433
|
}
|
|
237
434
|
// JS fallback
|
|
238
435
|
let dot = 0;
|
|
@@ -249,16 +446,20 @@ export class EmbeddingService {
|
|
|
249
446
|
* Get statistics
|
|
250
447
|
*/
|
|
251
448
|
getStats() {
|
|
449
|
+
const effective = this.effectiveBackend || this.backend;
|
|
450
|
+
const ruvectorStats = ruvectorModule?.getStats?.() || {};
|
|
252
451
|
return {
|
|
253
452
|
backend: this.backend,
|
|
453
|
+
effectiveBackend: effective,
|
|
254
454
|
dimension: this.dimension,
|
|
255
455
|
totalEmbeddings: this.totalEmbeddings,
|
|
256
456
|
totalLatencyMs: this.totalLatencyMs,
|
|
257
457
|
avgLatencyMs: this.totalEmbeddings > 0 ? this.totalLatencyMs / this.totalEmbeddings : 0,
|
|
258
458
|
cacheHits: this.cacheHits,
|
|
259
459
|
modelLoaded: this.modelLoaded,
|
|
260
|
-
modelName:
|
|
261
|
-
simdAvailable:
|
|
460
|
+
modelName: effective === 'onnx' ? this.modelName : undefined,
|
|
461
|
+
simdAvailable: ruvectorStats.simdAvailable ?? onnxAvailable,
|
|
462
|
+
parallelWorkers: ruvectorStats.workerCount ?? undefined,
|
|
262
463
|
};
|
|
263
464
|
}
|
|
264
465
|
/**
|
|
@@ -267,14 +468,30 @@ export class EmbeddingService {
|
|
|
267
468
|
clearCache() {
|
|
268
469
|
this.cache.clear();
|
|
269
470
|
}
|
|
471
|
+
/**
|
|
472
|
+
* Clear corpus
|
|
473
|
+
*/
|
|
474
|
+
clearCorpus() {
|
|
475
|
+
this.corpus = { texts: [], embeddings: [] };
|
|
476
|
+
}
|
|
477
|
+
/**
|
|
478
|
+
* Shutdown (cleanup workers)
|
|
479
|
+
*/
|
|
480
|
+
async shutdown() {
|
|
481
|
+
if (ruvectorModule?.shutdown) {
|
|
482
|
+
await ruvectorModule.shutdown();
|
|
483
|
+
}
|
|
484
|
+
}
|
|
270
485
|
/**
|
|
271
486
|
* Reset instance (for testing)
|
|
272
487
|
*/
|
|
273
|
-
static reset() {
|
|
274
|
-
if (EmbeddingService.instance
|
|
275
|
-
EmbeddingService.instance.
|
|
488
|
+
static async reset() {
|
|
489
|
+
if (EmbeddingService.instance) {
|
|
490
|
+
await EmbeddingService.instance.shutdown();
|
|
276
491
|
}
|
|
277
492
|
EmbeddingService.instance = null;
|
|
493
|
+
onnxAvailable = null;
|
|
494
|
+
ruvectorModule = null;
|
|
278
495
|
}
|
|
279
496
|
}
|
|
280
497
|
// Export singleton getter
|
|
@@ -294,4 +511,16 @@ export async function textSimilarity(text1, text2) {
|
|
|
294
511
|
export function simpleEmbed(text, dim = 256) {
|
|
295
512
|
return getEmbeddingService().simpleEmbed(text, dim);
|
|
296
513
|
}
|
|
514
|
+
export async function similarityMatrix(texts) {
|
|
515
|
+
return getEmbeddingService().similarityMatrix(texts);
|
|
516
|
+
}
|
|
517
|
+
export async function semanticSearch(query, topK = 5) {
|
|
518
|
+
return getEmbeddingService().semanticSearch(query, topK);
|
|
519
|
+
}
|
|
520
|
+
export async function findDuplicates(texts, threshold = 0.9) {
|
|
521
|
+
return getEmbeddingService().findDuplicates(texts, threshold);
|
|
522
|
+
}
|
|
523
|
+
export async function clusterTexts(texts, k = 3) {
|
|
524
|
+
return getEmbeddingService().clusterTexts(texts, k);
|
|
525
|
+
}
|
|
297
526
|
//# sourceMappingURL=EmbeddingService.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"EmbeddingService.js","sourceRoot":"","sources":["../../src/intelligence/EmbeddingService.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AA+DH,kCAAkC;AAClC,MAAM,cAAc;IACV,KAAK,GAA8B,IAAI,GAAG,EAAE,CAAC;IAC7C,OAAO,CAAS;IAExB,YAAY,UAAkB,IAAI;QAChC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED,GAAG,CAAC,GAAW;QACb,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAClC,IAAI,KAAK,EAAE,CAAC;YACV,mCAAmC;YACnC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACvB,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC7B,CAAC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,GAAG,CAAC,GAAW,EAAE,KAAmB;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACpC,8BAA8B;YAC9B,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC;YAChD,IAAI,QAAQ,EAAE,CAAC;gBACb,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YAC9B,CAAC;QACH,CAAC;QACD,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC7B,CAAC;IAED,KAAK;QACH,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;IACrB,CAAC;IAED,IAAI,IAAI;QACN,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;IACzB,CAAC;CACF;AAED,MAAM,OAAO,gBAAgB;IACnB,MAAM,CAAC,QAAQ,GAA4B,IAAI,CAAC;IAEhD,OAAO,CAAmB;IAC1B,SAAS,CAAS;IAClB,SAAS,CAAS;IAE1B,aAAa;IACL,QAAQ,GAAwB,IAAI,CAAC;IACrC,UAAU,GAAsB,IAAI,CAAC;IACrC,cAAc,GAAyB,IAAI,CAAC;IAC5C,WAAW,GAAY,KAAK,CAAC;IAErC,QAAQ;IACA,eAAe,GAAW,CAAC,CAAC;IAC5B,cAAc,GAAW,CAAC,CAAC;IAC3B,SAAS,GAAW,CAAC,CAAC;IAE9B,QAAQ;IACA,KAAK,CAAiB;IACtB,YAAY,CAAU;IAE9B;QACE,IAAI,CAAC,OAAO,GAAI,OAAO,CAAC,GAAG,CAAC,uBAA4C,IAAI,QAAQ,CAAC;QACrF,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B,IAAI,kBAAkB,CAAC;QAChF,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,KAAK,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,iCAAiC;QACvF,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B,KAAK,OAAO,CAAC;QACzE,IAAI,CAAC,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;IACxC,CAAC;IAED,MAAM,CAAC,WAAW;QAChB,IAAI,CAAC,gBAAgB,CAAC,QAAQ,EAAE,CAAC;YAC/B,gBAAgB,CAAC,QAAQ,GAAG,IAAI,gBAAgB,EAAE,CAAC;QACrD,CAAC;QACD,OAAO,gBAAgB,CAAC,QAAQ,CAAC;IACnC,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;OAEG;IACH,YAAY;QACV,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAQ;QACZ,IAAI,IAAI,CAAC,WAAW;YAAE,OAAO;QAC7B,IAAI,IAAI,CAAC,cAAc;YAAE,OAAO,IAAI,CAAC,cAAc,CAAC;QAEpD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QAC5C,MAAM,IAAI,CAAC,cAAc,CAAC;IAC5B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,IAAI,CAAC;YACH,OAAO,CAAC,GAAG,CAAC,0CAA0C,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;YACxE,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;YAEpC,yEAAyE;YACzE,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,+BAA+B,CAAC,CAAC;YACjE,IAAI,CAAC,UAAU,GAAG,UAAmC,CAAC;YAEtD,mCAAmC;YACnC,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,yCAAyC,CAAC,CAAC;YAC7E,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,YAAY,CAAC;YAE7C,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAgB,CAAC;YAC1D,IAAI,CAAC,WAAW,EAAE,CAAC;gBACjB,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;YACtD,CAAC;YAED,mBAAmB;YACnB,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;YACjC,MAAM,EAAE,UAAU,EAAE,aAAa,EAAE,GAAG,MAAM,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YAE7E,qCAAqC;YACrC,MAAM,cAAc,GAAG,IAAK,UAAkB,CAAC,kBAAkB,EAAE;iBAChE,YAAY,CAAC,WAAW,CAAC,SAAS,CAAC;iBACnC,YAAY,CAAC,IAAI,CAAC;iBAClB,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,eAAe;YAEjC,IAAI,CAAC,QAAQ,GAAI,UAAkB,CAAC,YAAY,CAAC,UAAU,CACzD,UAAU,EACV,aAAa,EACb,cAAc,CACf,CAAC;YAEF,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,QAAS,CAAC,SAAS,EAAE,CAAC;YAC5C,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;YAExB,MAAM,QAAQ,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YAC/C,OAAO,CAAC,GAAG,CAAC,sCAAsC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,WAAW,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;QACpG,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,OAAO,CAAC,KAAK,CAAC,+CAA+C,EAAE,KAAK,CAAC,CAAC;YACtE,OAAO,CAAC,GAAG,CAAC,sDAAsD,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,GAAG,QAAQ,CAAC;YACxB,IAAI,CAAC,SAAS,GAAG,GAAG,CAAC;QACvB,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK,CAAC,IAAY;QACtB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,cAAc;QACd,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YACpC,IAAI,MAAM,EAAE,CAAC;gBACX,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,OAAO,MAAM,CAAC;YAChB,CAAC;QACH,CAAC;QAED,IAAI,SAAuB,CAAC;QAE5B,IAAI,IAAI,CAAC,OAAO,KAAK,MAAM,EAAE,CAAC;YAC5B,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;YACtB,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAClB,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;YAC3C,CAAC;iBAAM,CAAC;gBACN,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;YACrC,CAAC;QACH,CAAC;aAAM,CAAC;YACN,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;QAED,eAAe;QACf,IAAI,CAAC,eAAe,EAAE,CAAC;QACvB,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAErD,eAAe;QACf,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;QAClC,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,UAAU,CAAC,KAAe;QAC9B,IAAI,IAAI,CAAC,OAAO,KAAK,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC7C,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;YACtB,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAClB,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;gBACvD,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC;gBAC3B,MAAM,UAAU,GAAmB,EAAE,CAAC;gBACtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACtC,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;gBAChE,CAAC;gBACD,OAAO,UAAU,CAAC;YACpB,CAAC;QACH,CAAC;QAED,0BAA0B;QAC1B,OAAO,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACpD,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,UAAU,CAAC,KAAa,EAAE,KAAa;QAC3C,IAAI,IAAI,CAAC,OAAO,KAAK,MAAM,EAAE,CAAC;YAC5B,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAC;YACtB,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAClB,OAAO,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YAChD,CAAC;QACH,CAAC;QAED,kCAAkC;QAClC,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC3E,OAAO,IAAI,CAAC,gBAAgB,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC;IACvC,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,IAAY,EAAE,MAAc,GAAG;QACzC,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;QAExC,0CAA0C;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACrC,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAChC,SAAS,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,IAAI,GAAG,GAAG,CAAC;YACjC,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;YAC/C,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QAClD,CAAC;QAED,YAAY;QACZ,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7B,IAAI,IAAI,SAAS,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;QACtC,CAAC;QACD,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7B,SAAS,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;QACvB,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,gBAAgB,CAAC,CAAe,EAAE,CAAe;QAC/C,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;YACpB,OAAO,IAAI,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAChD,CAAC;QAED,cAAc;QACd,IAAI,GAAG,GAAG,CAAC,CAAC;QACZ,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAClC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACrB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACvB,CAAC;QACD,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1D,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO;YACL,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,eAAe,EAAE,IAAI,CAAC,eAAe;YACrC,cAAc,EAAE,IAAI,CAAC,cAAc;YACnC,YAAY,EAAE,IAAI,CAAC,eAAe,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;YACvF,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,SAAS,EAAE,IAAI,CAAC,OAAO,KAAK,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS;YAC/D,aAAa,EAAE,IAAI,CAAC,UAAU,EAAE,cAAc,EAAE,EAAE,IAAI,SAAS;SAChE,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,UAAU;QACR,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,KAAK;QACV,IAAI,gBAAgB,CAAC,QAAQ,EAAE,QAAQ,EAAE,CAAC;YACxC,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC5C,CAAC;QACD,gBAAgB,CAAC,QAAQ,GAAG,IAAI,CAAC;IACnC,CAAC;;AAGH,0BAA0B;AAC1B,MAAM,UAAU,mBAAmB;IACjC,OAAO,gBAAgB,CAAC,WAAW,EAAE,CAAC;AACxC,CAAC;AAED,+BAA+B;AAC/B,MAAM,CAAC,KAAK,UAAU,KAAK,CAAC,IAAY;IACtC,OAAO,mBAAmB,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AAC3C,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,KAAe;IAC9C,OAAO,mBAAmB,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AACjD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,cAAc,CAAC,KAAa,EAAE,KAAa;IAC/D,OAAO,mBAAmB,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AACxD,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,IAAY,EAAE,MAAc,GAAG;IACzD,OAAO,mBAAmB,EAAE,CAAC,WAAW,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;AACtD,CAAC","sourcesContent":["/**\n * EmbeddingService - Unified embedding interface for agentic-flow\n *\n * Supports two backends:\n * - Simple: Hash-based, ~0.01ms, not semantic\n * - ONNX: Real neural embeddings via ruvector-onnx-embeddings-wasm, ~10-50ms, semantic\n *\n * Configure via:\n * - AGENTIC_FLOW_EMBEDDINGS=simple|onnx (default: simple)\n * - AGENTIC_FLOW_EMBEDDING_MODEL=all-MiniLM-L6-v2 (default)\n */\n\n// Types for ONNX Runtime\ninterface OnnxSession {\n run(feeds: Record<string, any>): Promise<Record<string, any>>;\n release(): void;\n}\n\ninterface Tokenizer {\n encode(text: string, addSpecialTokens?: boolean): { ids: number[]; attentionMask: number[] };\n}\n\n// Types for the WASM module\ninterface WasmEmbedder {\n embedOne(text: string): Float32Array;\n embedBatch(texts: string[]): Float32Array;\n similarity(text1: string, text2: string): number;\n dimension(): number;\n maxLength(): number;\n free(): void;\n}\n\ninterface WasmModule {\n default: () => Promise<void>;\n WasmEmbedder: {\n withConfig(modelBytes: Uint8Array, tokenizerJson: string, config: WasmEmbedderConfig): WasmEmbedder;\n };\n WasmEmbedderConfig: new () => WasmEmbedderConfig;\n cosineSimilarity(a: Float32Array, b: Float32Array): number;\n simd_available(): boolean;\n version(): string;\n}\n\ninterface WasmEmbedderConfig {\n setMaxLength(maxLength: number): WasmEmbedderConfig;\n setNormalize(normalize: boolean): WasmEmbedderConfig;\n setPooling(pooling: number): WasmEmbedderConfig;\n}\n\ninterface ModelConfig {\n name: string;\n dimension: number;\n maxLength: number;\n size: string;\n description: string;\n model: string;\n tokenizer: string;\n}\n\nexport type EmbeddingBackend = 'simple' | 'onnx' | 'onnx-native';\n\nexport interface EmbeddingStats {\n backend: EmbeddingBackend;\n dimension: number;\n totalEmbeddings: number;\n totalLatencyMs: number;\n avgLatencyMs: number;\n cacheHits: number;\n modelLoaded: boolean;\n modelName?: string;\n simdAvailable?: boolean;\n}\n\n// Simple LRU cache for embeddings\nclass EmbeddingCache {\n private cache: Map<string, Float32Array> = new Map();\n private maxSize: number;\n\n constructor(maxSize: number = 1000) {\n this.maxSize = maxSize;\n }\n\n get(key: string): Float32Array | undefined {\n const value = this.cache.get(key);\n if (value) {\n // Move to end (most recently used)\n this.cache.delete(key);\n this.cache.set(key, value);\n }\n return value;\n }\n\n set(key: string, value: Float32Array): void {\n if (this.cache.size >= this.maxSize) {\n // Delete oldest (first) entry\n const firstKey = this.cache.keys().next().value;\n if (firstKey) {\n this.cache.delete(firstKey);\n }\n }\n this.cache.set(key, value);\n }\n\n clear(): void {\n this.cache.clear();\n }\n\n get size(): number {\n return this.cache.size;\n }\n}\n\nexport class EmbeddingService {\n private static instance: EmbeddingService | null = null;\n\n private backend: EmbeddingBackend;\n private dimension: number;\n private modelName: string;\n\n // ONNX state\n private embedder: WasmEmbedder | null = null;\n private wasmModule: WasmModule | null = null;\n private loadingPromise: Promise<void> | null = null;\n private modelLoaded: boolean = false;\n\n // Stats\n private totalEmbeddings: number = 0;\n private totalLatencyMs: number = 0;\n private cacheHits: number = 0;\n\n // Cache\n private cache: EmbeddingCache;\n private cacheEnabled: boolean;\n\n private constructor() {\n this.backend = (process.env.AGENTIC_FLOW_EMBEDDINGS as EmbeddingBackend) || 'simple';\n this.modelName = process.env.AGENTIC_FLOW_EMBEDDING_MODEL || 'all-MiniLM-L6-v2';\n this.dimension = this.backend === 'onnx' ? 384 : 256; // ONNX uses 384, simple uses 256\n this.cacheEnabled = process.env.AGENTIC_FLOW_EMBEDDING_CACHE !== 'false';\n this.cache = new EmbeddingCache(1000);\n }\n\n static getInstance(): EmbeddingService {\n if (!EmbeddingService.instance) {\n EmbeddingService.instance = new EmbeddingService();\n }\n return EmbeddingService.instance;\n }\n\n /**\n * Get current backend\n */\n getBackend(): EmbeddingBackend {\n return this.backend;\n }\n\n /**\n * Get embedding dimension\n */\n getDimension(): number {\n return this.dimension;\n }\n\n /**\n * Check if ONNX model is loaded\n */\n isModelLoaded(): boolean {\n return this.modelLoaded;\n }\n\n /**\n * Initialize ONNX embedder (lazy load on first use)\n */\n async initOnnx(): Promise<void> {\n if (this.modelLoaded) return;\n if (this.loadingPromise) return this.loadingPromise;\n\n this.loadingPromise = this._loadOnnxModel();\n await this.loadingPromise;\n }\n\n private async _loadOnnxModel(): Promise<void> {\n try {\n console.log(`[EmbeddingService] Loading ONNX model: ${this.modelName}`);\n const startTime = performance.now();\n\n // Import WASM module (auto-initializes with --experimental-wasm-modules)\n const wasmModule = await import('ruvector-onnx-embeddings-wasm');\n this.wasmModule = wasmModule as unknown as WasmModule;\n\n // Import loader for model fetching\n const loaderModule = await import('ruvector-onnx-embeddings-wasm/loader.js');\n const { ModelLoader, MODELS } = loaderModule;\n\n const modelConfig = MODELS[this.modelName] as ModelConfig;\n if (!modelConfig) {\n throw new Error(`Unknown model: ${this.modelName}`);\n }\n\n // Load model files\n const loader = new ModelLoader();\n const { modelBytes, tokenizerJson } = await loader.loadModel(this.modelName);\n\n // Create embedder with configuration\n const embedderConfig = new (wasmModule as any).WasmEmbedderConfig()\n .setMaxLength(modelConfig.maxLength)\n .setNormalize(true)\n .setPooling(0); // Mean pooling\n\n this.embedder = (wasmModule as any).WasmEmbedder.withConfig(\n modelBytes,\n tokenizerJson,\n embedderConfig\n );\n\n this.dimension = this.embedder!.dimension();\n this.modelLoaded = true;\n\n const loadTime = performance.now() - startTime;\n console.log(`[EmbeddingService] Model loaded in ${loadTime.toFixed(0)}ms, dim=${this.dimension}`);\n } catch (error) {\n console.error('[EmbeddingService] Failed to load ONNX model:', error);\n console.log('[EmbeddingService] Falling back to simple embeddings');\n this.backend = 'simple';\n this.dimension = 256;\n }\n }\n\n /**\n * Generate embedding for text\n */\n async embed(text: string): Promise<Float32Array> {\n const startTime = performance.now();\n\n // Check cache\n if (this.cacheEnabled) {\n const cached = this.cache.get(text);\n if (cached) {\n this.cacheHits++;\n return cached;\n }\n }\n\n let embedding: Float32Array;\n\n if (this.backend === 'onnx') {\n await this.initOnnx();\n if (this.embedder) {\n embedding = this.embedder.embedOne(text);\n } else {\n embedding = this.simpleEmbed(text);\n }\n } else {\n embedding = this.simpleEmbed(text);\n }\n\n // Update stats\n this.totalEmbeddings++;\n this.totalLatencyMs += performance.now() - startTime;\n\n // Cache result\n if (this.cacheEnabled) {\n this.cache.set(text, embedding);\n }\n\n return embedding;\n }\n\n /**\n * Generate embeddings for multiple texts\n */\n async embedBatch(texts: string[]): Promise<Float32Array[]> {\n if (this.backend === 'onnx' && this.embedder) {\n await this.initOnnx();\n if (this.embedder) {\n const batchEmbedding = this.embedder.embedBatch(texts);\n const dim = this.dimension;\n const embeddings: Float32Array[] = [];\n for (let i = 0; i < texts.length; i++) {\n embeddings.push(batchEmbedding.slice(i * dim, (i + 1) * dim));\n }\n return embeddings;\n }\n }\n\n // Fall back to sequential\n return Promise.all(texts.map(t => this.embed(t)));\n }\n\n /**\n * Compute similarity between two texts\n */\n async similarity(text1: string, text2: string): Promise<number> {\n if (this.backend === 'onnx') {\n await this.initOnnx();\n if (this.embedder) {\n return this.embedder.similarity(text1, text2);\n }\n }\n\n // Fall back to embedding + cosine\n const [e1, e2] = await Promise.all([this.embed(text1), this.embed(text2)]);\n return this.cosineSimilarity(e1, e2);\n }\n\n /**\n * Simple hash-based embedding (fast, not semantic)\n */\n simpleEmbed(text: string, dim: number = 256): Float32Array {\n const embedding = new Float32Array(dim);\n\n // Multi-pass hash for better distribution\n for (let i = 0; i < text.length; i++) {\n const code = text.charCodeAt(i);\n embedding[i % dim] += code / 255;\n embedding[(i * 7) % dim] += (code * 0.3) / 255;\n embedding[(i * 13) % dim] += (code * 0.2) / 255;\n }\n\n // Normalize\n let norm = 0;\n for (let i = 0; i < dim; i++) {\n norm += embedding[i] * embedding[i];\n }\n norm = Math.sqrt(norm) || 1;\n for (let i = 0; i < dim; i++) {\n embedding[i] /= norm;\n }\n\n return embedding;\n }\n\n /**\n * Compute cosine similarity between two embeddings\n */\n cosineSimilarity(a: Float32Array, b: Float32Array): number {\n if (this.wasmModule) {\n return this.wasmModule.cosineSimilarity(a, b);\n }\n\n // JS fallback\n let dot = 0;\n let normA = 0;\n let normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n return dot / (Math.sqrt(normA) * Math.sqrt(normB) || 1);\n }\n\n /**\n * Get statistics\n */\n getStats(): EmbeddingStats {\n return {\n backend: this.backend,\n dimension: this.dimension,\n totalEmbeddings: this.totalEmbeddings,\n totalLatencyMs: this.totalLatencyMs,\n avgLatencyMs: this.totalEmbeddings > 0 ? this.totalLatencyMs / this.totalEmbeddings : 0,\n cacheHits: this.cacheHits,\n modelLoaded: this.modelLoaded,\n modelName: this.backend === 'onnx' ? this.modelName : undefined,\n simdAvailable: this.wasmModule?.simd_available?.() ?? undefined,\n };\n }\n\n /**\n * Clear cache\n */\n clearCache(): void {\n this.cache.clear();\n }\n\n /**\n * Reset instance (for testing)\n */\n static reset(): void {\n if (EmbeddingService.instance?.embedder) {\n EmbeddingService.instance.embedder.free();\n }\n EmbeddingService.instance = null;\n }\n}\n\n// Export singleton getter\nexport function getEmbeddingService(): EmbeddingService {\n return EmbeddingService.getInstance();\n}\n\n// Export convenience functions\nexport async function embed(text: string): Promise<Float32Array> {\n return getEmbeddingService().embed(text);\n}\n\nexport async function embedBatch(texts: string[]): Promise<Float32Array[]> {\n return getEmbeddingService().embedBatch(texts);\n}\n\nexport async function textSimilarity(text1: string, text2: string): Promise<number> {\n return getEmbeddingService().similarity(text1, text2);\n}\n\nexport function simpleEmbed(text: string, dim: number = 256): Float32Array {\n return getEmbeddingService().simpleEmbed(text, dim);\n}\n"]}
|
|
1
|
+
{"version":3,"file":"EmbeddingService.js","sourceRoot":"","sources":["../../src/intelligence/EmbeddingService.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AA2DH,0BAA0B;AAC1B,IAAI,aAAa,GAAmB,IAAI,CAAC;AACzC,IAAI,cAAc,GAA0B,IAAI,CAAC;AAEjD;;GAEG;AACH,KAAK,UAAU,UAAU;IACvB,IAAI,aAAa,KAAK,IAAI,EAAE,CAAC;QAC3B,OAAO,aAAa,CAAC;IACvB,CAAC;IAED,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,UAAU,CAA8B,CAAC;QAClE,cAAc,GAAG,GAAG,CAAC;QACrB,aAAa,GAAG,GAAG,CAAC,eAAe,EAAE,EAAE,IAAI,KAAK,CAAC;QACjD,OAAO,aAAa,CAAC;IACvB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,2DAA2D;QAC3D,aAAa,GAAG,KAAK,CAAC;QACtB,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC;AAED,kCAAkC;AAClC,MAAM,cAAc;IACV,KAAK,GAA8B,IAAI,GAAG,EAAE,CAAC;IAC7C,OAAO,CAAS;IAExB,YAAY,UAAkB,IAAI;QAChC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED,GAAG,CAAC,GAAW;QACb,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAClC,IAAI,KAAK,EAAE,CAAC;YACV,mCAAmC;YACnC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACvB,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC7B,CAAC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,GAAG,CAAC,GAAW,EAAE,KAAmB;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACpC,8BAA8B;YAC9B,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC;YAChD,IAAI,QAAQ,EAAE,CAAC;gBACb,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YAC9B,CAAC;QACH,CAAC;QACD,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC7B,CAAC;IAED,KAAK;QACH,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;IACrB,CAAC;IAED,IAAI,IAAI;QACN,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;IACzB,CAAC;CACF;AAED,MAAM,OAAO,gBAAgB;IACnB,MAAM,CAAC,QAAQ,GAA4B,IAAI,CAAC;IAEhD,OAAO,CAAmB;IAC1B,gBAAgB,GAA4B,IAAI,CAAC;IACjD,SAAS,CAAS;IAClB,SAAS,CAAS;IAE1B,aAAa;IACL,WAAW,GAAY,KAAK,CAAC;IAC7B,cAAc,GAAyB,IAAI,CAAC;IAEpD,QAAQ;IACA,eAAe,GAAW,CAAC,CAAC;IAC5B,cAAc,GAAW,CAAC,CAAC;IAC3B,SAAS,GAAW,CAAC,CAAC;IAE9B,QAAQ;IACA,KAAK,CAAiB;IACtB,YAAY,CAAU;IAE9B,+BAA+B;IACvB,MAAM,GAAoD,EAAE,KAAK,EAAE,EAAE,EAAE,UAAU,EAAE,EAAE,EAAE,CAAC;IAEhG;QACE,mEAAmE;QACnE,IAAI,CAAC,OAAO,GAAI,OAAO,CAAC,GAAG,CAAC,uBAA4C,IAAI,MAAM,CAAC;QACnF,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B,IAAI,kBAAkB,CAAC;QAChF,IAAI,CAAC,SAAS,GAAG,GAAG,CAAC,CAAC,wCAAwC;QAC9D,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B,KAAK,OAAO,CAAC;QACzE,IAAI,CAAC,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;IACxC,CAAC;IAED,MAAM,CAAC,WAAW;QAChB,IAAI,CAAC,gBAAgB,CAAC,QAAQ,EAAE,CAAC;YAC/B,gBAAgB,CAAC,QAAQ,GAAG,IAAI,gBAAgB,EAAE,CAAC;QACrD,CAAC;QACD,OAAO,gBAAgB,CAAC,QAAQ,CAAC;IACnC,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,cAAc;QAC1B,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;YAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC;QAC/B,CAAC;QAED,IAAI,IAAI,CAAC,OAAO,KAAK,MAAM,EAAE,CAAC;YAC5B,MAAM,OAAO,GAAG,MAAM,UAAU,EAAE,CAAC;YACnC,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC;YACpD,IAAI,OAAO,EAAE,CAAC;gBACZ,IAAI,CAAC,SAAS,GAAG,GAAG,CAAC,CAAC,6BAA6B;YACrD,CAAC;QACH,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,OAAO,CAAC;YACrC,IAAI,IAAI,CAAC,OAAO,KAAK,MAAM,EAAE,CAAC;gBAC5B,MAAM,UAAU,EAAE,CAAC,CAAC,0BAA0B;gBAC9C,IAAI,CAAC,SAAS,GAAG,GAAG,CAAC;YACvB,CAAC;QACH,CAAC;QAED,OAAO,IAAI,CAAC,gBAAgB,CAAC;IAC/B,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,OAAO,CAAC;IAC/C,CAAC;IAED;;OAEG;IACH,YAAY;QACV,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,KAAK,CAAC,IAAY;QACtB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,cAAc;QACd,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YACpC,IAAI,MAAM,EAAE,CAAC;gBACX,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,OAAO,MAAM,CAAC;YAChB,CAAC;QACH,CAAC;QAED,wCAAwC;QACxC,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QACrD,IAAI,SAAuB,CAAC;QAE5B,IAAI,gBAAgB,KAAK,MAAM,IAAI,cAAc,EAAE,CAAC;YAClD,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAChD,IAAI,MAAM,EAAE,SAAS,EAAE,CAAC;gBACtB,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;gBAC7B,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;YAC1B,CAAC;iBAAM,CAAC;gBACN,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;YACrC,CAAC;QACH,CAAC;aAAM,CAAC;YACN,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;QAED,eAAe;QACf,IAAI,CAAC,eAAe,EAAE,CAAC;QACvB,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAErD,eAAe;QACf,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;QAClC,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,UAAU,CAAC,KAAe;QAC9B,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,kCAAkC;QAClC,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YACtB,MAAM,aAAa,GAA4B,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC;YACzF,MAAM,SAAS,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC;YACvD,IAAI,SAAS,EAAE,CAAC;gBACd,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;gBAC/B,OAAO,aAA+B,CAAC;YACzC,CAAC;QACH,CAAC;QAED,kBAAkB;QAClB,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAErD,IAAI,gBAAgB,KAAK,MAAM,IAAI,cAAc,EAAE,CAAC;YAClD,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YACtD,IAAI,MAAM,EAAE,UAAU,IAAI,MAAM,CAAC,UAAU,CAAC,MAAM,KAAK,KAAK,CAAC,MAAM,EAAE,CAAC;gBACpE,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC;gBAErC,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;wBACtC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1C,CAAC;gBACH,CAAC;gBAED,eAAe;gBACf,IAAI,CAAC,eAAe,IAAI,KAAK,CAAC,MAAM,CAAC;gBACrC,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;gBACrD,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;gBAExB,OAAO,UAAU,CAAC;YACpB,CAAC;QACH,CAAC;QAED,6CAA6C;QAC7C,OAAO,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACpD,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,UAAU,CAAC,KAAa,EAAE,KAAa;QAC3C,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAErD,IAAI,gBAAgB,KAAK,MAAM,IAAI,cAAc,EAAE,CAAC;YAClD,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,UAAU,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YAC7D,OAAO,MAAM,CAAC,UAAU,CAAC;QAC3B,CAAC;QAED,kCAAkC;QAClC,MAAM,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC3E,OAAO,IAAI,CAAC,gBAAgB,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC;IACvC,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,gBAAgB,CAAC,KAAe;QACpC,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAChD,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC;QACvB,MAAM,MAAM,GAAe,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QAE3E,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;YAC3B,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,kBAAkB;YACtC,KAAK,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC/B,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;gBAChE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;gBACnB,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,YAAY;YAClC,CAAC;QACH,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAAW,CAAC,KAAe;QAC/B,IAAI,CAAC,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IACxD,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,cAAc,CAAC,KAAa,EAAE,OAAe,CAAC;QAClD,IAAI,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACnC,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;QACjE,CAAC;QAED,MAAM,cAAc,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC/C,MAAM,OAAO,GAAmB,EAAE,CAAC;QAEnC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAClD,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YAC7E,OAAO,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;gBAC1B,KAAK,EAAE,CAAC;gBACR,UAAU,EAAE,GAAG;aAChB,CAAC,CAAC;QACL,CAAC;QAED,mDAAmD;QACnD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC;QACpD,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;IAChC,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,cAAc,CAAC,KAAe,EAAE,YAAoB,GAAG;QAC3D,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAChD,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC;QACvB,MAAM,OAAO,GAAG,IAAI,GAAG,EAAU,CAAC;QAClC,MAAM,MAAM,GAAqB,EAAE,CAAC;QAEpC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;YAC3B,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;gBAAE,SAAS;YAE7B,MAAM,KAAK,GAAmB;gBAC5B,OAAO,EAAE,CAAC,CAAC,CAAC;gBACZ,KAAK,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;gBACjB,UAAU,EAAE,GAAG;aAChB,CAAC;YAEF,KAAK,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC/B,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;oBAAE,SAAS;gBAE7B,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;gBAChE,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;oBACrB,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;oBACtB,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC3B,KAAK,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC;oBACnD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;gBACjB,CAAC;YACH,CAAC;YAED,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC7B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;gBACf,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACrB,CAAC;QACH,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,YAAY,CAChB,KAAe,EACf,IAAY,CAAC,EACb,gBAAwB,GAAG;QAE3B,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QAChD,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC;QACvB,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAC;QAE3B,gFAAgF;QAChF,MAAM,eAAe,GAAG,IAAI,GAAG,EAAU,CAAC;QAC1C,OAAO,eAAe,CAAC,IAAI,GAAG,CAAC,IAAI,eAAe,CAAC,IAAI,GAAG,CAAC,EAAE,CAAC;YAC5D,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACrD,CAAC;QACD,IAAI,SAAS,GAAmB,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;YAClE,MAAM,IAAI,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;YACnC,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YACxB,OAAO,IAAI,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,IAAI,QAAQ,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAEpC,KAAK,IAAI,IAAI,GAAG,CAAC,EAAE,IAAI,GAAG,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC;YAChD,oCAAoC;YACpC,MAAM,WAAW,GAAG,UAAU,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACvC,IAAI,WAAW,GAAG,CAAC,CAAC;gBACpB,IAAI,OAAO,GAAG,CAAC,QAAQ,CAAC;gBACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;oBAC3B,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;oBACrD,IAAI,GAAG,GAAG,OAAO,EAAE,CAAC;wBAClB,OAAO,GAAG,GAAG,CAAC;wBACd,WAAW,GAAG,CAAC,CAAC;oBAClB,CAAC;gBACH,CAAC;gBACD,OAAO,WAAW,CAAC;YACrB,CAAC,CAAC,CAAC;YAEH,oBAAoB;YACpB,MAAM,OAAO,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YAC9D,QAAQ,GAAG,WAAW,CAAC;YACvB,IAAI,CAAC,OAAO;gBAAE,MAAM;YAEpB,mBAAmB;YACnB,MAAM,YAAY,GAAmB,EAAE,CAAC;YACxC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3B,YAAY,CAAC,IAAI,CAAC,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC;YAC3C,CAAC;YACD,MAAM,MAAM,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAEpC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3B,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;gBACtB,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;gBACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;oBAC7B,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;gBACzC,CAAC;YACH,CAAC;YAED,sBAAsB;YACtB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3B,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC;oBAClB,IAAI,IAAI,GAAG,CAAC,CAAC;oBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;wBAC7B,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC;wBAChC,IAAI,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;oBAClD,CAAC;oBACD,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;wBAC7B,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;oBAC7B,CAAC;gBACH,CAAC;YACH,CAAC;YACD,SAAS,GAAG,YAAY,CAAC;QAC3B,CAAC;QAED,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC;IACjC,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,CAAC,WAAW,CAAC,KAAe,EAAE,YAAoB,EAAE;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC;YACjD,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC;YAC5C,MAAM,UAAU,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YAEhD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACtC,MAAM;oBACJ,KAAK,EAAE,CAAC,GAAG,CAAC;oBACZ,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;oBACd,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC;iBACzB,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,IAAY,EAAE,MAAc,GAAG;QACzC,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;QAExC,0CAA0C;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACrC,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAChC,SAAS,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,IAAI,GAAG,GAAG,CAAC;YACjC,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;YAC/C,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;QAClD,CAAC;QAED,YAAY;QACZ,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7B,IAAI,IAAI,SAAS,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;QACtC,CAAC;QACD,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7B,SAAS,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;QACvB,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,gBAAgB,CAAC,CAAe,EAAE,CAAe;QAC/C,IAAI,cAAc,EAAE,gBAAgB,EAAE,CAAC;YACrC,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC/C,CAAC;QAED,cAAc;QACd,IAAI,GAAG,GAAG,CAAC,CAAC;QACZ,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAClC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACrB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACvB,CAAC;QACD,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1D,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,MAAM,SAAS,GAAG,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,OAAO,CAAC;QACxD,MAAM,aAAa,GAAG,cAAc,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC;QAEzD,OAAO;YACL,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,gBAAgB,EAAE,SAAS;YAC3B,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,eAAe,EAAE,IAAI,CAAC,eAAe;YACrC,cAAc,EAAE,IAAI,CAAC,cAAc;YACnC,YAAY,EAAE,IAAI,CAAC,eAAe,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;YACvF,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,SAAS,EAAE,SAAS,KAAK,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS;YAC5D,aAAa,EAAE,aAAa,CAAC,aAAa,IAAI,aAAa;YAC3D,eAAe,EAAE,aAAa,CAAC,WAAW,IAAI,SAAS;SACxD,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,UAAU;QACR,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,WAAW;QACT,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,UAAU,EAAE,EAAE,EAAE,CAAC;IAC9C,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAQ;QACZ,IAAI,cAAc,EAAE,QAAQ,EAAE,CAAC;YAC7B,MAAM,cAAc,CAAC,QAAQ,EAAE,CAAC;QAClC,CAAC;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,KAAK,CAAC,KAAK;QAChB,IAAI,gBAAgB,CAAC,QAAQ,EAAE,CAAC;YAC9B,MAAM,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC;QAC7C,CAAC;QACD,gBAAgB,CAAC,QAAQ,GAAG,IAAI,CAAC;QACjC,aAAa,GAAG,IAAI,CAAC;QACrB,cAAc,GAAG,IAAI,CAAC;IACxB,CAAC;;AAGH,0BAA0B;AAC1B,MAAM,UAAU,mBAAmB;IACjC,OAAO,gBAAgB,CAAC,WAAW,EAAE,CAAC;AACxC,CAAC;AAED,+BAA+B;AAC/B,MAAM,CAAC,KAAK,UAAU,KAAK,CAAC,IAAY;IACtC,OAAO,mBAAmB,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AAC3C,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,UAAU,CAAC,KAAe;IAC9C,OAAO,mBAAmB,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AACjD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,cAAc,CAAC,KAAa,EAAE,KAAa;IAC/D,OAAO,mBAAmB,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AACxD,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,IAAY,EAAE,MAAc,GAAG;IACzD,OAAO,mBAAmB,EAAE,CAAC,WAAW,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;AACtD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,gBAAgB,CAAC,KAAe;IACpD,OAAO,mBAAmB,EAAE,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC;AACvD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,cAAc,CAAC,KAAa,EAAE,OAAe,CAAC;IAClE,OAAO,mBAAmB,EAAE,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;AAC3D,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,cAAc,CAAC,KAAe,EAAE,YAAoB,GAAG;IAC3E,OAAO,mBAAmB,EAAE,CAAC,cAAc,CAAC,KAAK,EAAE,SAAS,CAAC,CAAC;AAChE,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,YAAY,CAAC,KAAe,EAAE,IAAY,CAAC;IAC/D,OAAO,mBAAmB,EAAE,CAAC,YAAY,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AACtD,CAAC","sourcesContent":["/**\n * EmbeddingService - Unified embedding interface for agentic-flow\n *\n * Uses ruvector@0.1.61+ for ONNX embeddings with:\n * - SIMD128 acceleration (6x faster)\n * - Parallel worker threads (7 workers)\n * - all-MiniLM-L6-v2 model (384 dimensions)\n *\n * Configure via:\n * - AGENTIC_FLOW_EMBEDDINGS=simple|onnx|auto (default: auto)\n * - AGENTIC_FLOW_EMBEDDING_MODEL=all-MiniLM-L6-v2 (default)\n */\n\nexport type EmbeddingBackend = 'simple' | 'onnx' | 'auto';\n\nexport interface EmbeddingStats {\n backend: EmbeddingBackend;\n effectiveBackend: EmbeddingBackend;\n dimension: number;\n totalEmbeddings: number;\n totalLatencyMs: number;\n avgLatencyMs: number;\n cacheHits: number;\n modelLoaded: boolean;\n modelName?: string;\n simdAvailable?: boolean;\n parallelWorkers?: number;\n}\n\nexport interface SimilarityResult {\n similarity: number;\n timeMs: number;\n}\n\nexport interface SearchResult {\n text: string;\n index: number;\n similarity: number;\n}\n\nexport interface DuplicateGroup {\n indices: number[];\n texts: string[];\n similarity: number;\n}\n\n// Ruvector embedding result types\ninterface EmbeddingResult {\n embedding: Float32Array;\n timeMs?: number;\n}\n\ninterface BatchEmbeddingResult {\n embeddings: Float32Array[];\n timeMs?: number;\n}\n\n// Ruvector module interface\ninterface RuvectorModule {\n isOnnxAvailable: () => boolean;\n getDefaultEmbeddingService: () => any;\n embed: (text: string) => Promise<EmbeddingResult | null>;\n embedBatch: (texts: string[]) => Promise<BatchEmbeddingResult | null>;\n similarity: (text1: string, text2: string) => Promise<SimilarityResult>;\n toFloat32Array: (arr: number[]) => Float32Array;\n cosineSimilarity: (a: Float32Array, b: Float32Array) => number;\n getStats: () => any;\n shutdown: () => Promise<void>;\n}\n\n// ONNX availability cache\nlet onnxAvailable: boolean | null = null;\nlet ruvectorModule: RuvectorModule | null = null;\n\n/**\n * Detect ONNX/SIMD support by loading ruvector\n */\nasync function detectOnnx(): Promise<boolean> {\n if (onnxAvailable !== null) {\n return onnxAvailable;\n }\n\n try {\n const mod = await import('ruvector') as unknown as RuvectorModule;\n ruvectorModule = mod;\n onnxAvailable = mod.isOnnxAvailable?.() ?? false;\n return onnxAvailable;\n } catch (error) {\n // Ruvector loading failed - fall back to simple embeddings\n onnxAvailable = false;\n return false;\n }\n}\n\n// Simple LRU cache for embeddings\nclass EmbeddingCache {\n private cache: Map<string, Float32Array> = new Map();\n private maxSize: number;\n\n constructor(maxSize: number = 1000) {\n this.maxSize = maxSize;\n }\n\n get(key: string): Float32Array | undefined {\n const value = this.cache.get(key);\n if (value) {\n // Move to end (most recently used)\n this.cache.delete(key);\n this.cache.set(key, value);\n }\n return value;\n }\n\n set(key: string, value: Float32Array): void {\n if (this.cache.size >= this.maxSize) {\n // Delete oldest (first) entry\n const firstKey = this.cache.keys().next().value;\n if (firstKey) {\n this.cache.delete(firstKey);\n }\n }\n this.cache.set(key, value);\n }\n\n clear(): void {\n this.cache.clear();\n }\n\n get size(): number {\n return this.cache.size;\n }\n}\n\nexport class EmbeddingService {\n private static instance: EmbeddingService | null = null;\n\n private backend: EmbeddingBackend;\n private effectiveBackend: EmbeddingBackend | null = null;\n private dimension: number;\n private modelName: string;\n\n // ONNX state\n private modelLoaded: boolean = false;\n private loadingPromise: Promise<void> | null = null;\n\n // Stats\n private totalEmbeddings: number = 0;\n private totalLatencyMs: number = 0;\n private cacheHits: number = 0;\n\n // Cache\n private cache: EmbeddingCache;\n private cacheEnabled: boolean;\n\n // Corpus for search operations\n private corpus: { texts: string[]; embeddings: Float32Array[] } = { texts: [], embeddings: [] };\n\n private constructor() {\n // Default to 'auto' which will detect ONNX and use it if available\n this.backend = (process.env.AGENTIC_FLOW_EMBEDDINGS as EmbeddingBackend) || 'auto';\n this.modelName = process.env.AGENTIC_FLOW_EMBEDDING_MODEL || 'all-MiniLM-L6-v2';\n this.dimension = 256; // Will be updated when ONNX loads (384)\n this.cacheEnabled = process.env.AGENTIC_FLOW_EMBEDDING_CACHE !== 'false';\n this.cache = new EmbeddingCache(1000);\n }\n\n static getInstance(): EmbeddingService {\n if (!EmbeddingService.instance) {\n EmbeddingService.instance = new EmbeddingService();\n }\n return EmbeddingService.instance;\n }\n\n /**\n * Resolve the effective backend based on ONNX detection\n */\n private async resolveBackend(): Promise<EmbeddingBackend> {\n if (this.effectiveBackend) {\n return this.effectiveBackend;\n }\n\n if (this.backend === 'auto') {\n const hasOnnx = await detectOnnx();\n this.effectiveBackend = hasOnnx ? 'onnx' : 'simple';\n if (hasOnnx) {\n this.dimension = 384; // all-MiniLM-L6-v2 dimension\n }\n } else {\n this.effectiveBackend = this.backend;\n if (this.backend === 'onnx') {\n await detectOnnx(); // Ensure module is loaded\n this.dimension = 384;\n }\n }\n\n return this.effectiveBackend;\n }\n\n /**\n * Get configured backend (may be 'auto')\n */\n getBackend(): EmbeddingBackend {\n return this.backend;\n }\n\n /**\n * Get effective backend after detection\n */\n getEffectiveBackend(): EmbeddingBackend {\n return this.effectiveBackend || this.backend;\n }\n\n /**\n * Get embedding dimension\n */\n getDimension(): number {\n return this.dimension;\n }\n\n /**\n * Check if ONNX model is loaded\n */\n isModelLoaded(): boolean {\n return this.modelLoaded;\n }\n\n /**\n * Generate embedding for text\n * Auto-detects ONNX and uses it if available (default behavior)\n */\n async embed(text: string): Promise<Float32Array> {\n const startTime = performance.now();\n\n // Check cache\n if (this.cacheEnabled) {\n const cached = this.cache.get(text);\n if (cached) {\n this.cacheHits++;\n return cached;\n }\n }\n\n // Resolve backend (handles 'auto' mode)\n const effectiveBackend = await this.resolveBackend();\n let embedding: Float32Array;\n\n if (effectiveBackend === 'onnx' && ruvectorModule) {\n const result = await ruvectorModule.embed(text);\n if (result?.embedding) {\n embedding = result.embedding;\n this.modelLoaded = true;\n } else {\n embedding = this.simpleEmbed(text);\n }\n } else {\n embedding = this.simpleEmbed(text);\n }\n\n // Update stats\n this.totalEmbeddings++;\n this.totalLatencyMs += performance.now() - startTime;\n\n // Cache result\n if (this.cacheEnabled) {\n this.cache.set(text, embedding);\n }\n\n return embedding;\n }\n\n /**\n * Generate embeddings for multiple texts (batch processing with parallel workers)\n * Batch processing provides significant speedup with parallel ONNX workers\n */\n async embedBatch(texts: string[]): Promise<Float32Array[]> {\n const startTime = performance.now();\n\n // Check cache for all texts first\n if (this.cacheEnabled) {\n const cachedResults: (Float32Array | null)[] = texts.map(t => this.cache.get(t) || null);\n const allCached = cachedResults.every(r => r !== null);\n if (allCached) {\n this.cacheHits += texts.length;\n return cachedResults as Float32Array[];\n }\n }\n\n // Resolve backend\n const effectiveBackend = await this.resolveBackend();\n\n if (effectiveBackend === 'onnx' && ruvectorModule) {\n const result = await ruvectorModule.embedBatch(texts);\n if (result?.embeddings && result.embeddings.length === texts.length) {\n const embeddings = result.embeddings;\n\n // Cache individual embeddings\n if (this.cacheEnabled) {\n for (let i = 0; i < texts.length; i++) {\n this.cache.set(texts[i], embeddings[i]);\n }\n }\n\n // Update stats\n this.totalEmbeddings += texts.length;\n this.totalLatencyMs += performance.now() - startTime;\n this.modelLoaded = true;\n\n return embeddings;\n }\n }\n\n // Fall back to sequential for simple backend\n return Promise.all(texts.map(t => this.embed(t)));\n }\n\n /**\n * Compute similarity between two texts\n */\n async similarity(text1: string, text2: string): Promise<number> {\n const effectiveBackend = await this.resolveBackend();\n\n if (effectiveBackend === 'onnx' && ruvectorModule) {\n const result = await ruvectorModule.similarity(text1, text2);\n return result.similarity;\n }\n\n // Fall back to embedding + cosine\n const [e1, e2] = await Promise.all([this.embed(text1), this.embed(text2)]);\n return this.cosineSimilarity(e1, e2);\n }\n\n /**\n * Compute NxN similarity matrix for a list of texts\n * Uses parallel workers for ONNX backend\n */\n async similarityMatrix(texts: string[]): Promise<number[][]> {\n const embeddings = await this.embedBatch(texts);\n const n = texts.length;\n const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0));\n\n for (let i = 0; i < n; i++) {\n matrix[i][i] = 1.0; // Self-similarity\n for (let j = i + 1; j < n; j++) {\n const sim = this.cosineSimilarity(embeddings[i], embeddings[j]);\n matrix[i][j] = sim;\n matrix[j][i] = sim; // Symmetric\n }\n }\n\n return matrix;\n }\n\n /**\n * Build a corpus for semantic search\n */\n async buildCorpus(texts: string[]): Promise<void> {\n this.corpus.texts = texts;\n this.corpus.embeddings = await this.embedBatch(texts);\n }\n\n /**\n * Semantic search against the corpus\n * Returns top-k most similar texts\n */\n async semanticSearch(query: string, topK: number = 5): Promise<SearchResult[]> {\n if (this.corpus.texts.length === 0) {\n throw new Error('Corpus not built. Call buildCorpus() first.');\n }\n\n const queryEmbedding = await this.embed(query);\n const results: SearchResult[] = [];\n\n for (let i = 0; i < this.corpus.texts.length; i++) {\n const sim = this.cosineSimilarity(queryEmbedding, this.corpus.embeddings[i]);\n results.push({\n text: this.corpus.texts[i],\n index: i,\n similarity: sim,\n });\n }\n\n // Sort by similarity (descending) and return top-k\n results.sort((a, b) => b.similarity - a.similarity);\n return results.slice(0, topK);\n }\n\n /**\n * Find near-duplicate texts in a list\n * Groups texts with similarity above threshold\n */\n async findDuplicates(texts: string[], threshold: number = 0.9): Promise<DuplicateGroup[]> {\n const embeddings = await this.embedBatch(texts);\n const n = texts.length;\n const visited = new Set<number>();\n const groups: DuplicateGroup[] = [];\n\n for (let i = 0; i < n; i++) {\n if (visited.has(i)) continue;\n\n const group: DuplicateGroup = {\n indices: [i],\n texts: [texts[i]],\n similarity: 1.0,\n };\n\n for (let j = i + 1; j < n; j++) {\n if (visited.has(j)) continue;\n\n const sim = this.cosineSimilarity(embeddings[i], embeddings[j]);\n if (sim >= threshold) {\n group.indices.push(j);\n group.texts.push(texts[j]);\n group.similarity = Math.min(group.similarity, sim);\n visited.add(j);\n }\n }\n\n if (group.indices.length > 1) {\n visited.add(i);\n groups.push(group);\n }\n }\n\n return groups;\n }\n\n /**\n * K-means clustering of texts\n * Returns cluster assignments and centroids\n */\n async clusterTexts(\n texts: string[],\n k: number = 3,\n maxIterations: number = 100\n ): Promise<{ clusters: number[]; centroids: Float32Array[] }> {\n const embeddings = await this.embedBatch(texts);\n const n = texts.length;\n const dim = this.dimension;\n\n // Initialize centroids randomly (copy to new ArrayBuffer for consistent typing)\n const centroidIndices = new Set<number>();\n while (centroidIndices.size < k && centroidIndices.size < n) {\n centroidIndices.add(Math.floor(Math.random() * n));\n }\n let centroids: Float32Array[] = Array.from(centroidIndices).map(i => {\n const copy = new Float32Array(dim);\n copy.set(embeddings[i]);\n return copy;\n });\n\n let clusters = new Array(n).fill(0);\n\n for (let iter = 0; iter < maxIterations; iter++) {\n // Assign points to nearest centroid\n const newClusters = embeddings.map(emb => {\n let bestCluster = 0;\n let bestSim = -Infinity;\n for (let c = 0; c < k; c++) {\n const sim = this.cosineSimilarity(emb, centroids[c]);\n if (sim > bestSim) {\n bestSim = sim;\n bestCluster = c;\n }\n }\n return bestCluster;\n });\n\n // Check convergence\n const changed = newClusters.some((c, i) => c !== clusters[i]);\n clusters = newClusters;\n if (!changed) break;\n\n // Update centroids\n const newCentroids: Float32Array[] = [];\n for (let c = 0; c < k; c++) {\n newCentroids.push(new Float32Array(dim));\n }\n const counts = new Array(k).fill(0);\n\n for (let i = 0; i < n; i++) {\n const c = clusters[i];\n counts[c]++;\n for (let d = 0; d < dim; d++) {\n newCentroids[c][d] += embeddings[i][d];\n }\n }\n\n // Normalize centroids\n for (let c = 0; c < k; c++) {\n if (counts[c] > 0) {\n let norm = 0;\n for (let d = 0; d < dim; d++) {\n newCentroids[c][d] /= counts[c];\n norm += newCentroids[c][d] * newCentroids[c][d];\n }\n norm = Math.sqrt(norm) || 1;\n for (let d = 0; d < dim; d++) {\n newCentroids[c][d] /= norm;\n }\n }\n }\n centroids = newCentroids;\n }\n\n return { clusters, centroids };\n }\n\n /**\n * Stream embeddings for large batches (memory efficient)\n * Yields embeddings one at a time\n */\n async *streamEmbed(texts: string[], batchSize: number = 32): AsyncGenerator<{ index: number; text: string; embedding: Float32Array }> {\n for (let i = 0; i < texts.length; i += batchSize) {\n const batch = texts.slice(i, i + batchSize);\n const embeddings = await this.embedBatch(batch);\n\n for (let j = 0; j < batch.length; j++) {\n yield {\n index: i + j,\n text: batch[j],\n embedding: embeddings[j],\n };\n }\n }\n }\n\n /**\n * Simple hash-based embedding (fast, not semantic)\n */\n simpleEmbed(text: string, dim: number = 256): Float32Array {\n const embedding = new Float32Array(dim);\n\n // Multi-pass hash for better distribution\n for (let i = 0; i < text.length; i++) {\n const code = text.charCodeAt(i);\n embedding[i % dim] += code / 255;\n embedding[(i * 7) % dim] += (code * 0.3) / 255;\n embedding[(i * 13) % dim] += (code * 0.2) / 255;\n }\n\n // Normalize\n let norm = 0;\n for (let i = 0; i < dim; i++) {\n norm += embedding[i] * embedding[i];\n }\n norm = Math.sqrt(norm) || 1;\n for (let i = 0; i < dim; i++) {\n embedding[i] /= norm;\n }\n\n return embedding;\n }\n\n /**\n * Compute cosine similarity between two embeddings\n */\n cosineSimilarity(a: Float32Array, b: Float32Array): number {\n if (ruvectorModule?.cosineSimilarity) {\n return ruvectorModule.cosineSimilarity(a, b);\n }\n\n // JS fallback\n let dot = 0;\n let normA = 0;\n let normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n return dot / (Math.sqrt(normA) * Math.sqrt(normB) || 1);\n }\n\n /**\n * Get statistics\n */\n getStats(): EmbeddingStats {\n const effective = this.effectiveBackend || this.backend;\n const ruvectorStats = ruvectorModule?.getStats?.() || {};\n\n return {\n backend: this.backend,\n effectiveBackend: effective,\n dimension: this.dimension,\n totalEmbeddings: this.totalEmbeddings,\n totalLatencyMs: this.totalLatencyMs,\n avgLatencyMs: this.totalEmbeddings > 0 ? this.totalLatencyMs / this.totalEmbeddings : 0,\n cacheHits: this.cacheHits,\n modelLoaded: this.modelLoaded,\n modelName: effective === 'onnx' ? this.modelName : undefined,\n simdAvailable: ruvectorStats.simdAvailable ?? onnxAvailable,\n parallelWorkers: ruvectorStats.workerCount ?? undefined,\n };\n }\n\n /**\n * Clear cache\n */\n clearCache(): void {\n this.cache.clear();\n }\n\n /**\n * Clear corpus\n */\n clearCorpus(): void {\n this.corpus = { texts: [], embeddings: [] };\n }\n\n /**\n * Shutdown (cleanup workers)\n */\n async shutdown(): Promise<void> {\n if (ruvectorModule?.shutdown) {\n await ruvectorModule.shutdown();\n }\n }\n\n /**\n * Reset instance (for testing)\n */\n static async reset(): Promise<void> {\n if (EmbeddingService.instance) {\n await EmbeddingService.instance.shutdown();\n }\n EmbeddingService.instance = null;\n onnxAvailable = null;\n ruvectorModule = null;\n }\n}\n\n// Export singleton getter\nexport function getEmbeddingService(): EmbeddingService {\n return EmbeddingService.getInstance();\n}\n\n// Export convenience functions\nexport async function embed(text: string): Promise<Float32Array> {\n return getEmbeddingService().embed(text);\n}\n\nexport async function embedBatch(texts: string[]): Promise<Float32Array[]> {\n return getEmbeddingService().embedBatch(texts);\n}\n\nexport async function textSimilarity(text1: string, text2: string): Promise<number> {\n return getEmbeddingService().similarity(text1, text2);\n}\n\nexport function simpleEmbed(text: string, dim: number = 256): Float32Array {\n return getEmbeddingService().simpleEmbed(text, dim);\n}\n\nexport async function similarityMatrix(texts: string[]): Promise<number[][]> {\n return getEmbeddingService().similarityMatrix(texts);\n}\n\nexport async function semanticSearch(query: string, topK: number = 5): Promise<SearchResult[]> {\n return getEmbeddingService().semanticSearch(query, topK);\n}\n\nexport async function findDuplicates(texts: string[], threshold: number = 0.9): Promise<DuplicateGroup[]> {\n return getEmbeddingService().findDuplicates(texts, threshold);\n}\n\nexport async function clusterTexts(texts: string[], k: number = 3): Promise<{ clusters: number[]; centroids: Float32Array[] }> {\n return getEmbeddingService().clusterTexts(texts, k);\n}\n"]}
|
|
@@ -111,12 +111,16 @@ async function benchmarkService(service, name) {
|
|
|
111
111
|
const avgWarmLatency = warmTotalLatency / TEST_TEXTS.length;
|
|
112
112
|
console.log(` Total: ${warmTotalLatency.toFixed(2)}ms, Avg: ${avgWarmLatency.toFixed(2)}ms`);
|
|
113
113
|
// Batch embedding
|
|
114
|
-
console.log(`[${name}] Batch embedding...`);
|
|
114
|
+
console.log(`[${name}] Batch embedding (10 texts)...`);
|
|
115
115
|
service.clearCache();
|
|
116
116
|
const batchStart = performance.now();
|
|
117
117
|
await service.embedBatch(TEST_TEXTS);
|
|
118
118
|
const batchLatency = performance.now() - batchStart;
|
|
119
|
-
|
|
119
|
+
const batchPerText = batchLatency / TEST_TEXTS.length;
|
|
120
|
+
console.log(` Batch latency: ${batchLatency.toFixed(2)}ms (${batchPerText.toFixed(2)}ms per text)`);
|
|
121
|
+
// Compare batch vs sequential
|
|
122
|
+
const speedup = avgWarmLatency > 0 ? avgWarmLatency / batchPerText : 0;
|
|
123
|
+
console.log(` Batch speedup: ${speedup.toFixed(1)}x vs sequential`);
|
|
120
124
|
// Semantic similarity tests
|
|
121
125
|
console.log(`[${name}] Semantic similarity tests...`);
|
|
122
126
|
const similarities = [];
|