ruvector 0.1.61 → 0.1.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +3 -0
- package/bin/mcp-server.js +3 -0
- package/dist/core/attention-fallbacks.d.ts +100 -0
- package/dist/core/attention-fallbacks.d.ts.map +1 -1
- package/dist/core/attention-fallbacks.js +192 -1
- package/dist/core/index.d.ts +4 -0
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +7 -1
- package/dist/core/intelligence-engine.d.ts +21 -0
- package/dist/core/intelligence-engine.d.ts.map +1 -1
- package/dist/core/intelligence-engine.js +49 -0
- package/dist/core/onnx-embedder.d.ts +6 -1
- package/dist/core/onnx-embedder.d.ts.map +1 -1
- package/dist/core/onnx-embedder.js +22 -2
- package/dist/core/parallel-intelligence.d.ts +109 -0
- package/dist/core/parallel-intelligence.d.ts.map +1 -0
- package/dist/core/parallel-intelligence.js +340 -0
- package/dist/core/parallel-workers.d.ts +183 -0
- package/dist/core/parallel-workers.d.ts.map +1 -0
- package/dist/core/parallel-workers.js +671 -0
- package/package.json +1 -1
- package/ruvector.db +0 -0
|
@@ -0,0 +1,671 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Parallel Workers - Extended worker capabilities for RuVector hooks
|
|
4
|
+
*
|
|
5
|
+
* Provides parallel processing for advanced operations:
|
|
6
|
+
*
|
|
7
|
+
* 1. SPECULATIVE PRE-COMPUTATION
|
|
8
|
+
* - Pre-embed likely next files based on co-edit patterns
|
|
9
|
+
* - Warm model cache before operations
|
|
10
|
+
* - Predictive route caching
|
|
11
|
+
*
|
|
12
|
+
* 2. REAL-TIME CODE ANALYSIS
|
|
13
|
+
* - Multi-file AST parsing with tree-sitter
|
|
14
|
+
* - Cross-file type inference
|
|
15
|
+
* - Live complexity metrics
|
|
16
|
+
* - Dependency graph updates
|
|
17
|
+
*
|
|
18
|
+
* 3. ADVANCED LEARNING
|
|
19
|
+
* - Distributed trajectory replay
|
|
20
|
+
* - Parallel SONA micro-LoRA updates
|
|
21
|
+
* - Background EWC consolidation
|
|
22
|
+
* - Online pattern clustering
|
|
23
|
+
*
|
|
24
|
+
* 4. INTELLIGENT RETRIEVAL
|
|
25
|
+
* - Parallel RAG chunking and retrieval
|
|
26
|
+
* - Sharded similarity search
|
|
27
|
+
* - Context relevance ranking
|
|
28
|
+
* - Semantic deduplication
|
|
29
|
+
*
|
|
30
|
+
* 5. SECURITY & QUALITY
|
|
31
|
+
* - Parallel SAST scanning
|
|
32
|
+
* - Multi-rule linting
|
|
33
|
+
* - Vulnerability detection
|
|
34
|
+
* - Code smell analysis
|
|
35
|
+
*
|
|
36
|
+
* 6. GIT INTELLIGENCE
|
|
37
|
+
* - Parallel blame analysis
|
|
38
|
+
* - Branch comparison
|
|
39
|
+
* - Merge conflict prediction
|
|
40
|
+
* - Code churn metrics
|
|
41
|
+
*/
|
|
42
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
43
|
+
if (k2 === undefined) k2 = k;
|
|
44
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
45
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
46
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
47
|
+
}
|
|
48
|
+
Object.defineProperty(o, k2, desc);
|
|
49
|
+
}) : (function(o, m, k, k2) {
|
|
50
|
+
if (k2 === undefined) k2 = k;
|
|
51
|
+
o[k2] = m[k];
|
|
52
|
+
}));
|
|
53
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
54
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
55
|
+
}) : function(o, v) {
|
|
56
|
+
o["default"] = v;
|
|
57
|
+
});
|
|
58
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
59
|
+
var ownKeys = function(o) {
|
|
60
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
61
|
+
var ar = [];
|
|
62
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
63
|
+
return ar;
|
|
64
|
+
};
|
|
65
|
+
return ownKeys(o);
|
|
66
|
+
};
|
|
67
|
+
return function (mod) {
|
|
68
|
+
if (mod && mod.__esModule) return mod;
|
|
69
|
+
var result = {};
|
|
70
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
71
|
+
__setModuleDefault(result, mod);
|
|
72
|
+
return result;
|
|
73
|
+
};
|
|
74
|
+
})();
|
|
75
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
76
|
+
exports.ExtendedWorkerPool = void 0;
|
|
77
|
+
exports.getExtendedWorkerPool = getExtendedWorkerPool;
|
|
78
|
+
exports.initExtendedWorkerPool = initExtendedWorkerPool;
|
|
79
|
+
const worker_threads_1 = require("worker_threads");
|
|
80
|
+
const os = __importStar(require("os"));
|
|
81
|
+
// ============================================================================
|
|
82
|
+
// Extended Worker Pool
|
|
83
|
+
// ============================================================================
|
|
84
|
+
class ExtendedWorkerPool {
|
|
85
|
+
constructor(config = {}) {
|
|
86
|
+
this.workers = [];
|
|
87
|
+
this.taskQueue = [];
|
|
88
|
+
this.busyWorkers = new Map();
|
|
89
|
+
this.initialized = false;
|
|
90
|
+
this.speculativeCache = new Map();
|
|
91
|
+
this.astCache = new Map();
|
|
92
|
+
const isCLI = process.env.RUVECTOR_CLI === '1';
|
|
93
|
+
const isMCP = process.env.MCP_SERVER === '1';
|
|
94
|
+
this.config = {
|
|
95
|
+
numWorkers: config.numWorkers ?? Math.max(1, os.cpus().length - 1),
|
|
96
|
+
enabled: config.enabled ?? (isMCP || process.env.RUVECTOR_PARALLEL === '1'),
|
|
97
|
+
taskTimeout: config.taskTimeout ?? 30000,
|
|
98
|
+
maxQueueSize: config.maxQueueSize ?? 1000,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
async init() {
|
|
102
|
+
if (this.initialized || !this.config.enabled)
|
|
103
|
+
return;
|
|
104
|
+
const workerCode = this.getWorkerCode();
|
|
105
|
+
const workerBlob = new Blob([workerCode], { type: 'application/javascript' });
|
|
106
|
+
for (let i = 0; i < this.config.numWorkers; i++) {
|
|
107
|
+
// Create worker from inline code
|
|
108
|
+
const worker = new worker_threads_1.Worker(`
|
|
109
|
+
const { parentPort, workerData } = require('worker_threads');
|
|
110
|
+
${this.getWorkerHandlers()}
|
|
111
|
+
`, { eval: true, workerData: { workerId: i } });
|
|
112
|
+
worker.on('message', (result) => {
|
|
113
|
+
this.handleWorkerResult(worker, result);
|
|
114
|
+
});
|
|
115
|
+
worker.on('error', (err) => {
|
|
116
|
+
console.error(`Worker ${i} error:`, err);
|
|
117
|
+
this.busyWorkers.delete(worker);
|
|
118
|
+
this.processQueue();
|
|
119
|
+
});
|
|
120
|
+
this.workers.push(worker);
|
|
121
|
+
}
|
|
122
|
+
this.initialized = true;
|
|
123
|
+
}
|
|
124
|
+
getWorkerCode() {
|
|
125
|
+
return `
|
|
126
|
+
const { parentPort, workerData } = require('worker_threads');
|
|
127
|
+
${this.getWorkerHandlers()}
|
|
128
|
+
`;
|
|
129
|
+
}
|
|
130
|
+
getWorkerHandlers() {
|
|
131
|
+
return `
|
|
132
|
+
parentPort.on('message', async (task) => {
|
|
133
|
+
try {
|
|
134
|
+
let result;
|
|
135
|
+
switch (task.type) {
|
|
136
|
+
case 'speculative-embed':
|
|
137
|
+
result = await speculativeEmbed(task.files, task.coEditGraph);
|
|
138
|
+
break;
|
|
139
|
+
case 'ast-analyze':
|
|
140
|
+
result = await astAnalyze(task.files);
|
|
141
|
+
break;
|
|
142
|
+
case 'security-scan':
|
|
143
|
+
result = await securityScan(task.files, task.rules);
|
|
144
|
+
break;
|
|
145
|
+
case 'rag-retrieve':
|
|
146
|
+
result = await ragRetrieve(task.query, task.chunks, task.topK);
|
|
147
|
+
break;
|
|
148
|
+
case 'context-rank':
|
|
149
|
+
result = await contextRank(task.context, task.query);
|
|
150
|
+
break;
|
|
151
|
+
case 'git-blame':
|
|
152
|
+
result = await gitBlame(task.files);
|
|
153
|
+
break;
|
|
154
|
+
case 'git-churn':
|
|
155
|
+
result = await gitChurn(task.files, task.since);
|
|
156
|
+
break;
|
|
157
|
+
case 'complexity-analyze':
|
|
158
|
+
result = await complexityAnalyze(task.files);
|
|
159
|
+
break;
|
|
160
|
+
case 'dependency-graph':
|
|
161
|
+
result = await dependencyGraph(task.entryPoints);
|
|
162
|
+
break;
|
|
163
|
+
case 'deduplicate':
|
|
164
|
+
result = await deduplicate(task.items, task.threshold);
|
|
165
|
+
break;
|
|
166
|
+
default:
|
|
167
|
+
throw new Error('Unknown task type: ' + task.type);
|
|
168
|
+
}
|
|
169
|
+
parentPort.postMessage({ success: true, data: result, taskId: task.taskId });
|
|
170
|
+
} catch (error) {
|
|
171
|
+
parentPort.postMessage({ success: false, error: error.message, taskId: task.taskId });
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
|
|
175
|
+
// Worker implementations
|
|
176
|
+
async function speculativeEmbed(files, coEditGraph) {
|
|
177
|
+
// Pre-compute embeddings for likely next files
|
|
178
|
+
return files.map(f => ({ file: f, embedding: [], confidence: 0.5 }));
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
async function astAnalyze(files) {
|
|
182
|
+
const fs = require('fs');
|
|
183
|
+
return files.map(file => {
|
|
184
|
+
try {
|
|
185
|
+
const content = fs.existsSync(file) ? fs.readFileSync(file, 'utf8') : '';
|
|
186
|
+
const lines = content.split('\\n');
|
|
187
|
+
return {
|
|
188
|
+
file,
|
|
189
|
+
language: file.split('.').pop() || 'unknown',
|
|
190
|
+
complexity: Math.min(lines.length / 10, 100),
|
|
191
|
+
functions: extractFunctions(content),
|
|
192
|
+
imports: extractImports(content),
|
|
193
|
+
exports: extractExports(content),
|
|
194
|
+
dependencies: [],
|
|
195
|
+
};
|
|
196
|
+
} catch {
|
|
197
|
+
return { file, language: 'unknown', complexity: 0, functions: [], imports: [], exports: [], dependencies: [] };
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
function extractFunctions(content) {
|
|
203
|
+
const patterns = [
|
|
204
|
+
/function\\s+(\\w+)/g,
|
|
205
|
+
/const\\s+(\\w+)\\s*=\\s*(?:async\\s*)?\\([^)]*\\)\\s*=>/g,
|
|
206
|
+
/(?:async\\s+)?(?:public|private|protected)?\\s*(\\w+)\\s*\\([^)]*\\)\\s*{/g,
|
|
207
|
+
];
|
|
208
|
+
const funcs = new Set();
|
|
209
|
+
for (const pattern of patterns) {
|
|
210
|
+
let match;
|
|
211
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
212
|
+
if (match[1] && !['if', 'for', 'while', 'switch', 'catch'].includes(match[1])) {
|
|
213
|
+
funcs.add(match[1]);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
return Array.from(funcs);
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
function extractImports(content) {
|
|
221
|
+
const imports = [];
|
|
222
|
+
const patterns = [
|
|
223
|
+
/import\\s+.*?from\\s+['"]([^'"]+)['"]/g,
|
|
224
|
+
/require\\s*\\(['"]([^'"]+)['"]\\)/g,
|
|
225
|
+
];
|
|
226
|
+
for (const pattern of patterns) {
|
|
227
|
+
let match;
|
|
228
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
229
|
+
imports.push(match[1]);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
return imports;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
function extractExports(content) {
|
|
236
|
+
const exports = [];
|
|
237
|
+
const patterns = [
|
|
238
|
+
/export\\s+(?:default\\s+)?(?:class|function|const|let|var)\\s+(\\w+)/g,
|
|
239
|
+
/module\\.exports\\s*=\\s*(\\w+)/g,
|
|
240
|
+
];
|
|
241
|
+
for (const pattern of patterns) {
|
|
242
|
+
let match;
|
|
243
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
244
|
+
exports.push(match[1]);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
return exports;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async function securityScan(files, rules) {
|
|
251
|
+
const fs = require('fs');
|
|
252
|
+
const findings = [];
|
|
253
|
+
const securityPatterns = [
|
|
254
|
+
{ pattern: /eval\\s*\\(/g, rule: 'no-eval', severity: 'high', message: 'Avoid eval()' },
|
|
255
|
+
{ pattern: /innerHTML\\s*=/g, rule: 'no-inner-html', severity: 'medium', message: 'Avoid innerHTML, use textContent' },
|
|
256
|
+
{ pattern: /password\\s*=\\s*['"][^'"]+['"]/gi, rule: 'no-hardcoded-secrets', severity: 'critical', message: 'Hardcoded password detected' },
|
|
257
|
+
{ pattern: /api[_-]?key\\s*=\\s*['"][^'"]+['"]/gi, rule: 'no-hardcoded-secrets', severity: 'critical', message: 'Hardcoded API key detected' },
|
|
258
|
+
{ pattern: /exec\\s*\\(/g, rule: 'no-exec', severity: 'high', message: 'Avoid exec(), use execFile or spawn' },
|
|
259
|
+
{ pattern: /\\$\\{.*\\}/g, rule: 'template-injection', severity: 'medium', message: 'Potential template injection' },
|
|
260
|
+
];
|
|
261
|
+
|
|
262
|
+
for (const file of files) {
|
|
263
|
+
try {
|
|
264
|
+
if (!fs.existsSync(file)) continue;
|
|
265
|
+
const content = fs.readFileSync(file, 'utf8');
|
|
266
|
+
const lines = content.split('\\n');
|
|
267
|
+
|
|
268
|
+
for (const { pattern, rule, severity, message } of securityPatterns) {
|
|
269
|
+
let match;
|
|
270
|
+
const regex = new RegExp(pattern.source, pattern.flags);
|
|
271
|
+
while ((match = regex.exec(content)) !== null) {
|
|
272
|
+
const lineNum = content.substring(0, match.index).split('\\n').length;
|
|
273
|
+
findings.push({ file, line: lineNum, severity, rule, message });
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
} catch {}
|
|
277
|
+
}
|
|
278
|
+
return findings;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
async function ragRetrieve(query, chunks, topK) {
|
|
282
|
+
// Simple keyword-based retrieval (would use embeddings in production)
|
|
283
|
+
const queryTerms = query.toLowerCase().split(/\\s+/);
|
|
284
|
+
return chunks
|
|
285
|
+
.map(chunk => {
|
|
286
|
+
const content = chunk.content.toLowerCase();
|
|
287
|
+
const matches = queryTerms.filter(term => content.includes(term)).length;
|
|
288
|
+
return { ...chunk, relevance: matches / queryTerms.length };
|
|
289
|
+
})
|
|
290
|
+
.sort((a, b) => b.relevance - a.relevance)
|
|
291
|
+
.slice(0, topK);
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
async function contextRank(context, query) {
|
|
295
|
+
const queryTerms = query.toLowerCase().split(/\\s+/);
|
|
296
|
+
return context
|
|
297
|
+
.map((ctx, i) => {
|
|
298
|
+
const content = ctx.toLowerCase();
|
|
299
|
+
const matches = queryTerms.filter(term => content.includes(term)).length;
|
|
300
|
+
return { index: i, content: ctx, relevance: matches / queryTerms.length };
|
|
301
|
+
})
|
|
302
|
+
.sort((a, b) => b.relevance - a.relevance);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
async function gitBlame(files) {
|
|
306
|
+
const { execSync } = require('child_process');
|
|
307
|
+
const results = [];
|
|
308
|
+
for (const file of files) {
|
|
309
|
+
try {
|
|
310
|
+
const output = execSync(\`git blame --line-porcelain "\${file}" 2>/dev/null\`, { encoding: 'utf8', maxBuffer: 10 * 1024 * 1024 });
|
|
311
|
+
const lines = [];
|
|
312
|
+
let currentLine = {};
|
|
313
|
+
for (const line of output.split('\\n')) {
|
|
314
|
+
if (line.startsWith('author ')) currentLine.author = line.slice(7);
|
|
315
|
+
else if (line.startsWith('author-time ')) currentLine.date = new Date(parseInt(line.slice(12)) * 1000).toISOString();
|
|
316
|
+
else if (line.match(/^[a-f0-9]{40}/)) currentLine.commit = line.slice(0, 40);
|
|
317
|
+
else if (line.startsWith('\\t')) {
|
|
318
|
+
lines.push({ ...currentLine, line: lines.length + 1 });
|
|
319
|
+
currentLine = {};
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
results.push({ file, lines });
|
|
323
|
+
} catch {
|
|
324
|
+
results.push({ file, lines: [] });
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return results;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
async function gitChurn(files, since) {
|
|
331
|
+
const { execSync } = require('child_process');
|
|
332
|
+
const results = [];
|
|
333
|
+
const sinceArg = since ? \`--since="\${since}"\` : '--since="30 days ago"';
|
|
334
|
+
|
|
335
|
+
for (const file of files) {
|
|
336
|
+
try {
|
|
337
|
+
const log = execSync(\`git log \${sinceArg} --format="%H|%an|%aI" --numstat -- "\${file}" 2>/dev/null\`, { encoding: 'utf8' });
|
|
338
|
+
let additions = 0, deletions = 0, commits = 0;
|
|
339
|
+
const authors = new Set();
|
|
340
|
+
let lastModified = '';
|
|
341
|
+
|
|
342
|
+
for (const line of log.split('\\n')) {
|
|
343
|
+
if (line.includes('|')) {
|
|
344
|
+
const [commit, author, date] = line.split('|');
|
|
345
|
+
authors.add(author);
|
|
346
|
+
commits++;
|
|
347
|
+
if (!lastModified) lastModified = date;
|
|
348
|
+
} else if (line.match(/^\\d+\\s+\\d+/)) {
|
|
349
|
+
const [add, del] = line.split('\\t');
|
|
350
|
+
additions += parseInt(add) || 0;
|
|
351
|
+
deletions += parseInt(del) || 0;
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
results.push({ file, additions, deletions, commits, authors: Array.from(authors), lastModified });
|
|
356
|
+
} catch {
|
|
357
|
+
results.push({ file, additions: 0, deletions: 0, commits: 0, authors: [], lastModified: '' });
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
return results;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
async function complexityAnalyze(files) {
|
|
364
|
+
const fs = require('fs');
|
|
365
|
+
return files.map(file => {
|
|
366
|
+
try {
|
|
367
|
+
const content = fs.existsSync(file) ? fs.readFileSync(file, 'utf8') : '';
|
|
368
|
+
const lines = content.split('\\n');
|
|
369
|
+
const nonEmpty = lines.filter(l => l.trim()).length;
|
|
370
|
+
const branches = (content.match(/\\b(if|else|switch|case|for|while|catch|\\?|&&|\\|\\|)\\b/g) || []).length;
|
|
371
|
+
const functions = (content.match(/function|=>|\\bdef\\b|\\bfn\\b/g) || []).length;
|
|
372
|
+
|
|
373
|
+
return {
|
|
374
|
+
file,
|
|
375
|
+
lines: lines.length,
|
|
376
|
+
nonEmptyLines: nonEmpty,
|
|
377
|
+
cyclomaticComplexity: branches + 1,
|
|
378
|
+
functions,
|
|
379
|
+
avgFunctionSize: functions > 0 ? Math.round(nonEmpty / functions) : nonEmpty,
|
|
380
|
+
};
|
|
381
|
+
} catch {
|
|
382
|
+
return { file, lines: 0, nonEmptyLines: 0, cyclomaticComplexity: 1, functions: 0, avgFunctionSize: 0 };
|
|
383
|
+
}
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
async function dependencyGraph(entryPoints) {
|
|
388
|
+
const fs = require('fs');
|
|
389
|
+
const path = require('path');
|
|
390
|
+
const graph = new Map();
|
|
391
|
+
|
|
392
|
+
function analyze(file, visited = new Set()) {
|
|
393
|
+
if (visited.has(file)) return;
|
|
394
|
+
visited.add(file);
|
|
395
|
+
|
|
396
|
+
try {
|
|
397
|
+
if (!fs.existsSync(file)) return;
|
|
398
|
+
const content = fs.readFileSync(file, 'utf8');
|
|
399
|
+
const deps = [];
|
|
400
|
+
|
|
401
|
+
// Extract imports
|
|
402
|
+
const importRegex = /(?:import|require)\\s*\\(?['"]([^'"]+)['"]/g;
|
|
403
|
+
let match;
|
|
404
|
+
while ((match = importRegex.exec(content)) !== null) {
|
|
405
|
+
const dep = match[1];
|
|
406
|
+
if (dep.startsWith('.')) {
|
|
407
|
+
const resolved = path.resolve(path.dirname(file), dep);
|
|
408
|
+
deps.push(resolved);
|
|
409
|
+
analyze(resolved, visited);
|
|
410
|
+
} else {
|
|
411
|
+
deps.push(dep);
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
graph.set(file, deps);
|
|
416
|
+
} catch {}
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
for (const entry of entryPoints) {
|
|
420
|
+
analyze(entry);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
return Object.fromEntries(graph);
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
async function deduplicate(items, threshold) {
|
|
427
|
+
// Simple Jaccard similarity deduplication
|
|
428
|
+
const unique = [];
|
|
429
|
+
const seen = new Set();
|
|
430
|
+
|
|
431
|
+
for (const item of items) {
|
|
432
|
+
const tokens = new Set(item.toLowerCase().split(/\\s+/));
|
|
433
|
+
let isDup = false;
|
|
434
|
+
|
|
435
|
+
for (const existing of unique) {
|
|
436
|
+
const existingTokens = new Set(existing.toLowerCase().split(/\\s+/));
|
|
437
|
+
const intersection = [...tokens].filter(t => existingTokens.has(t)).length;
|
|
438
|
+
const union = new Set([...tokens, ...existingTokens]).size;
|
|
439
|
+
const similarity = intersection / union;
|
|
440
|
+
|
|
441
|
+
if (similarity >= threshold) {
|
|
442
|
+
isDup = true;
|
|
443
|
+
break;
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
if (!isDup) unique.push(item);
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
return unique;
|
|
451
|
+
}
|
|
452
|
+
`;
|
|
453
|
+
}
|
|
454
|
+
handleWorkerResult(worker, result) {
|
|
455
|
+
this.busyWorkers.delete(worker);
|
|
456
|
+
// Find and resolve the corresponding task
|
|
457
|
+
const taskIndex = this.taskQueue.findIndex(t => t.task.taskId === result.taskId);
|
|
458
|
+
if (taskIndex >= 0) {
|
|
459
|
+
const task = this.taskQueue.splice(taskIndex, 1)[0];
|
|
460
|
+
clearTimeout(task.timeout);
|
|
461
|
+
if (result.success) {
|
|
462
|
+
task.resolve(result.data);
|
|
463
|
+
}
|
|
464
|
+
else {
|
|
465
|
+
task.reject(new Error(result.error));
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
this.processQueue();
|
|
469
|
+
}
|
|
470
|
+
processQueue() {
|
|
471
|
+
while (this.taskQueue.length > 0) {
|
|
472
|
+
const availableWorker = this.workers.find(w => !this.busyWorkers.has(w));
|
|
473
|
+
if (!availableWorker)
|
|
474
|
+
break;
|
|
475
|
+
const task = this.taskQueue[0];
|
|
476
|
+
this.busyWorkers.set(availableWorker, task.task.type);
|
|
477
|
+
availableWorker.postMessage(task.task);
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
async execute(task) {
|
|
481
|
+
if (!this.initialized || !this.config.enabled) {
|
|
482
|
+
throw new Error('Worker pool not initialized');
|
|
483
|
+
}
|
|
484
|
+
const taskId = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
|
485
|
+
const taskWithId = { ...task, taskId };
|
|
486
|
+
return new Promise((resolve, reject) => {
|
|
487
|
+
if (this.taskQueue.length >= this.config.maxQueueSize) {
|
|
488
|
+
reject(new Error('Task queue full'));
|
|
489
|
+
return;
|
|
490
|
+
}
|
|
491
|
+
const timeout = setTimeout(() => {
|
|
492
|
+
const idx = this.taskQueue.findIndex(t => t.task.taskId === taskId);
|
|
493
|
+
if (idx >= 0) {
|
|
494
|
+
this.taskQueue.splice(idx, 1);
|
|
495
|
+
reject(new Error('Task timeout'));
|
|
496
|
+
}
|
|
497
|
+
}, this.config.taskTimeout);
|
|
498
|
+
const availableWorker = this.workers.find(w => !this.busyWorkers.has(w));
|
|
499
|
+
if (availableWorker) {
|
|
500
|
+
this.busyWorkers.set(availableWorker, task.type);
|
|
501
|
+
this.taskQueue.push({ task: taskWithId, resolve, reject, timeout });
|
|
502
|
+
availableWorker.postMessage(taskWithId);
|
|
503
|
+
}
|
|
504
|
+
else {
|
|
505
|
+
this.taskQueue.push({ task: taskWithId, resolve, reject, timeout });
|
|
506
|
+
}
|
|
507
|
+
});
|
|
508
|
+
}
|
|
509
|
+
// =========================================================================
|
|
510
|
+
// Public API - Speculative Pre-computation
|
|
511
|
+
// =========================================================================
|
|
512
|
+
/**
|
|
513
|
+
* Pre-embed files likely to be edited next based on co-edit patterns
|
|
514
|
+
* Hook: session-start, post-edit
|
|
515
|
+
*/
|
|
516
|
+
async speculativeEmbed(currentFile, coEditGraph) {
|
|
517
|
+
const likelyFiles = coEditGraph.get(currentFile) || [];
|
|
518
|
+
if (likelyFiles.length === 0)
|
|
519
|
+
return [];
|
|
520
|
+
// Check cache first
|
|
521
|
+
const uncached = likelyFiles.filter(f => !this.speculativeCache.has(f));
|
|
522
|
+
if (uncached.length === 0) {
|
|
523
|
+
return likelyFiles.map(f => this.speculativeCache.get(f));
|
|
524
|
+
}
|
|
525
|
+
const results = await this.execute({
|
|
526
|
+
type: 'speculative-embed',
|
|
527
|
+
files: uncached,
|
|
528
|
+
coEditGraph,
|
|
529
|
+
});
|
|
530
|
+
// Update cache
|
|
531
|
+
for (const result of results) {
|
|
532
|
+
this.speculativeCache.set(result.file, result);
|
|
533
|
+
}
|
|
534
|
+
return likelyFiles.map(f => this.speculativeCache.get(f)).filter(Boolean);
|
|
535
|
+
}
|
|
536
|
+
// =========================================================================
|
|
537
|
+
// Public API - Code Analysis
|
|
538
|
+
// =========================================================================
|
|
539
|
+
/**
|
|
540
|
+
* Analyze AST of multiple files in parallel
|
|
541
|
+
* Hook: pre-edit, route
|
|
542
|
+
*/
|
|
543
|
+
async analyzeAST(files) {
|
|
544
|
+
// Check cache
|
|
545
|
+
const uncached = files.filter(f => !this.astCache.has(f));
|
|
546
|
+
if (uncached.length === 0) {
|
|
547
|
+
return files.map(f => this.astCache.get(f));
|
|
548
|
+
}
|
|
549
|
+
const results = await this.execute({
|
|
550
|
+
type: 'ast-analyze',
|
|
551
|
+
files: uncached,
|
|
552
|
+
});
|
|
553
|
+
// Update cache
|
|
554
|
+
for (const result of results) {
|
|
555
|
+
this.astCache.set(result.file, result);
|
|
556
|
+
}
|
|
557
|
+
return files.map(f => this.astCache.get(f)).filter(Boolean);
|
|
558
|
+
}
|
|
559
|
+
/**
|
|
560
|
+
* Analyze code complexity for multiple files
|
|
561
|
+
* Hook: post-edit, session-end
|
|
562
|
+
*/
|
|
563
|
+
async analyzeComplexity(files) {
|
|
564
|
+
return this.execute({ type: 'complexity-analyze', files });
|
|
565
|
+
}
|
|
566
|
+
/**
|
|
567
|
+
* Build dependency graph from entry points
|
|
568
|
+
* Hook: session-start
|
|
569
|
+
*/
|
|
570
|
+
async buildDependencyGraph(entryPoints) {
|
|
571
|
+
return this.execute({ type: 'dependency-graph', entryPoints });
|
|
572
|
+
}
|
|
573
|
+
// =========================================================================
|
|
574
|
+
// Public API - Security
|
|
575
|
+
// =========================================================================
|
|
576
|
+
/**
|
|
577
|
+
* Scan files for security vulnerabilities
|
|
578
|
+
* Hook: pre-command (before commit), post-edit
|
|
579
|
+
*/
|
|
580
|
+
async securityScan(files, rules) {
|
|
581
|
+
return this.execute({ type: 'security-scan', files, rules });
|
|
582
|
+
}
|
|
583
|
+
// =========================================================================
|
|
584
|
+
// Public API - RAG & Context
|
|
585
|
+
// =========================================================================
|
|
586
|
+
/**
|
|
587
|
+
* Retrieve relevant context chunks in parallel
|
|
588
|
+
* Hook: suggest-context, recall
|
|
589
|
+
*/
|
|
590
|
+
async ragRetrieve(query, chunks, topK = 5) {
|
|
591
|
+
return this.execute({ type: 'rag-retrieve', query, chunks, topK });
|
|
592
|
+
}
|
|
593
|
+
/**
|
|
594
|
+
* Rank context items by relevance to query
|
|
595
|
+
* Hook: suggest-context
|
|
596
|
+
*/
|
|
597
|
+
async rankContext(context, query) {
|
|
598
|
+
return this.execute({ type: 'context-rank', context, query });
|
|
599
|
+
}
|
|
600
|
+
/**
|
|
601
|
+
* Deduplicate similar items
|
|
602
|
+
* Hook: remember, suggest-context
|
|
603
|
+
*/
|
|
604
|
+
async deduplicate(items, threshold = 0.8) {
|
|
605
|
+
return this.execute({ type: 'deduplicate', items, threshold });
|
|
606
|
+
}
|
|
607
|
+
// =========================================================================
|
|
608
|
+
// Public API - Git Intelligence
|
|
609
|
+
// =========================================================================
|
|
610
|
+
/**
|
|
611
|
+
* Get blame information for files in parallel
|
|
612
|
+
* Hook: pre-edit (for context), coedit
|
|
613
|
+
*/
|
|
614
|
+
async gitBlame(files) {
|
|
615
|
+
return this.execute({ type: 'git-blame', files });
|
|
616
|
+
}
|
|
617
|
+
/**
|
|
618
|
+
* Analyze code churn for files
|
|
619
|
+
* Hook: session-start, route
|
|
620
|
+
*/
|
|
621
|
+
async gitChurn(files, since) {
|
|
622
|
+
return this.execute({ type: 'git-churn', files, since });
|
|
623
|
+
}
|
|
624
|
+
// =========================================================================
|
|
625
|
+
// Stats & Lifecycle
|
|
626
|
+
// =========================================================================
|
|
627
|
+
getStats() {
|
|
628
|
+
return {
|
|
629
|
+
enabled: this.config.enabled,
|
|
630
|
+
workers: this.workers.length,
|
|
631
|
+
busy: this.busyWorkers.size,
|
|
632
|
+
queued: this.taskQueue.length,
|
|
633
|
+
speculativeCacheSize: this.speculativeCache.size,
|
|
634
|
+
astCacheSize: this.astCache.size,
|
|
635
|
+
};
|
|
636
|
+
}
|
|
637
|
+
clearCaches() {
|
|
638
|
+
this.speculativeCache.clear();
|
|
639
|
+
this.astCache.clear();
|
|
640
|
+
}
|
|
641
|
+
async shutdown() {
|
|
642
|
+
// Clear pending tasks
|
|
643
|
+
for (const task of this.taskQueue) {
|
|
644
|
+
clearTimeout(task.timeout);
|
|
645
|
+
task.reject(new Error('Worker pool shutting down'));
|
|
646
|
+
}
|
|
647
|
+
this.taskQueue = [];
|
|
648
|
+
// Terminate workers
|
|
649
|
+
await Promise.all(this.workers.map(w => w.terminate()));
|
|
650
|
+
this.workers = [];
|
|
651
|
+
this.busyWorkers.clear();
|
|
652
|
+
this.initialized = false;
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
exports.ExtendedWorkerPool = ExtendedWorkerPool;
|
|
656
|
+
// ============================================================================
|
|
657
|
+
// Singleton
|
|
658
|
+
// ============================================================================
|
|
659
|
+
let instance = null;
|
|
660
|
+
function getExtendedWorkerPool(config) {
|
|
661
|
+
if (!instance) {
|
|
662
|
+
instance = new ExtendedWorkerPool(config);
|
|
663
|
+
}
|
|
664
|
+
return instance;
|
|
665
|
+
}
|
|
666
|
+
async function initExtendedWorkerPool(config) {
|
|
667
|
+
const pool = getExtendedWorkerPool(config);
|
|
668
|
+
await pool.init();
|
|
669
|
+
return pool;
|
|
670
|
+
}
|
|
671
|
+
exports.default = ExtendedWorkerPool;
|
package/package.json
CHANGED
package/ruvector.db
CHANGED
|
Binary file
|