ruvector 0.1.61 → 0.1.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +3 -0
- package/bin/mcp-server.js +3 -0
- package/dist/core/attention-fallbacks.d.ts +100 -0
- package/dist/core/attention-fallbacks.d.ts.map +1 -1
- package/dist/core/attention-fallbacks.js +192 -1
- package/dist/core/index.d.ts +4 -0
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +7 -1
- package/dist/core/intelligence-engine.d.ts +21 -0
- package/dist/core/intelligence-engine.d.ts.map +1 -1
- package/dist/core/intelligence-engine.js +49 -0
- package/dist/core/onnx-embedder.d.ts +6 -1
- package/dist/core/onnx-embedder.d.ts.map +1 -1
- package/dist/core/onnx-embedder.js +22 -2
- package/dist/core/parallel-intelligence.d.ts +109 -0
- package/dist/core/parallel-intelligence.d.ts.map +1 -0
- package/dist/core/parallel-intelligence.js +340 -0
- package/dist/core/parallel-workers.d.ts +183 -0
- package/dist/core/parallel-workers.d.ts.map +1 -0
- package/dist/core/parallel-workers.js +671 -0
- package/package.json +1 -1
- package/ruvector.db +0 -0
|
@@ -201,8 +201,28 @@ async function initOnnxEmbedder(config = {}) {
|
|
|
201
201
|
detectSimd();
|
|
202
202
|
console.error(`ONNX embedder ready: ${embedder.dimension()}d, SIMD: ${simdAvailable}`);
|
|
203
203
|
isInitialized = true;
|
|
204
|
-
//
|
|
205
|
-
|
|
204
|
+
// Determine if we should use parallel workers
|
|
205
|
+
// - true: always enable
|
|
206
|
+
// - false: never enable
|
|
207
|
+
// - 'auto'/undefined: enable for long-running processes (MCP, servers), skip for CLI
|
|
208
|
+
let shouldTryParallel = false;
|
|
209
|
+
if (config.enableParallel === true) {
|
|
210
|
+
shouldTryParallel = true;
|
|
211
|
+
}
|
|
212
|
+
else if (config.enableParallel === false) {
|
|
213
|
+
shouldTryParallel = false;
|
|
214
|
+
}
|
|
215
|
+
else {
|
|
216
|
+
// Auto-detect: check if running as CLI hook or long-running process
|
|
217
|
+
const isCLI = process.argv[1]?.includes('cli.js') ||
|
|
218
|
+
process.argv[1]?.includes('bin/ruvector') ||
|
|
219
|
+
process.env.RUVECTOR_CLI === '1';
|
|
220
|
+
const isMCP = process.env.MCP_SERVER === '1' ||
|
|
221
|
+
process.argv.some(a => a.includes('mcp'));
|
|
222
|
+
const forceParallel = process.env.RUVECTOR_PARALLEL === '1';
|
|
223
|
+
// Enable parallel for MCP/servers or if explicitly requested, skip for CLI
|
|
224
|
+
shouldTryParallel = forceParallel || (isMCP && !isCLI);
|
|
225
|
+
}
|
|
206
226
|
if (shouldTryParallel) {
|
|
207
227
|
await tryInitParallel(config);
|
|
208
228
|
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parallel Intelligence - Worker-based acceleration for IntelligenceEngine
|
|
3
|
+
*
|
|
4
|
+
* Provides parallel processing for:
|
|
5
|
+
* - Q-learning batch updates (3-4x faster)
|
|
6
|
+
* - Multi-file pattern matching
|
|
7
|
+
* - Background memory indexing
|
|
8
|
+
* - Parallel similarity search
|
|
9
|
+
* - Multi-file code analysis
|
|
10
|
+
* - Parallel git commit analysis
|
|
11
|
+
*
|
|
12
|
+
* Uses worker_threads for CPU-bound operations, keeping hooks non-blocking.
|
|
13
|
+
*/
|
|
14
|
+
export interface ParallelConfig {
|
|
15
|
+
/** Number of worker threads (default: CPU cores - 1) */
|
|
16
|
+
numWorkers?: number;
|
|
17
|
+
/** Enable parallel processing (default: true for MCP, false for CLI) */
|
|
18
|
+
enabled?: boolean;
|
|
19
|
+
/** Minimum batch size to use parallel (default: 4) */
|
|
20
|
+
batchThreshold?: number;
|
|
21
|
+
}
|
|
22
|
+
export interface BatchEpisode {
|
|
23
|
+
state: string;
|
|
24
|
+
action: string;
|
|
25
|
+
reward: number;
|
|
26
|
+
nextState: string;
|
|
27
|
+
done: boolean;
|
|
28
|
+
metadata?: Record<string, any>;
|
|
29
|
+
}
|
|
30
|
+
export interface PatternMatchResult {
|
|
31
|
+
file: string;
|
|
32
|
+
patterns: Array<{
|
|
33
|
+
pattern: string;
|
|
34
|
+
confidence: number;
|
|
35
|
+
}>;
|
|
36
|
+
}
|
|
37
|
+
export interface CoEditAnalysis {
|
|
38
|
+
file1: string;
|
|
39
|
+
file2: string;
|
|
40
|
+
commits: string[];
|
|
41
|
+
strength: number;
|
|
42
|
+
}
|
|
43
|
+
export declare class ParallelIntelligence {
|
|
44
|
+
private workers;
|
|
45
|
+
private taskQueue;
|
|
46
|
+
private busyWorkers;
|
|
47
|
+
private config;
|
|
48
|
+
private initialized;
|
|
49
|
+
constructor(config?: ParallelConfig);
|
|
50
|
+
/**
|
|
51
|
+
* Initialize worker pool
|
|
52
|
+
*/
|
|
53
|
+
init(): Promise<void>;
|
|
54
|
+
private processQueue;
|
|
55
|
+
/**
|
|
56
|
+
* Execute task in worker pool
|
|
57
|
+
*/
|
|
58
|
+
private executeInWorker;
|
|
59
|
+
/**
|
|
60
|
+
* Batch Q-learning episode recording (3-4x faster)
|
|
61
|
+
*/
|
|
62
|
+
recordEpisodesBatch(episodes: BatchEpisode[]): Promise<void>;
|
|
63
|
+
/**
|
|
64
|
+
* Multi-file pattern matching (parallel pretrain)
|
|
65
|
+
*/
|
|
66
|
+
matchPatternsParallel(files: string[]): Promise<PatternMatchResult[]>;
|
|
67
|
+
/**
|
|
68
|
+
* Background memory indexing (non-blocking)
|
|
69
|
+
*/
|
|
70
|
+
indexMemoriesBackground(memories: Array<{
|
|
71
|
+
content: string;
|
|
72
|
+
type: string;
|
|
73
|
+
}>): Promise<void>;
|
|
74
|
+
/**
|
|
75
|
+
* Parallel similarity search with sharding
|
|
76
|
+
*/
|
|
77
|
+
searchParallel(query: string, topK?: number): Promise<Array<{
|
|
78
|
+
content: string;
|
|
79
|
+
score: number;
|
|
80
|
+
}>>;
|
|
81
|
+
/**
|
|
82
|
+
* Multi-file AST analysis for routing
|
|
83
|
+
*/
|
|
84
|
+
analyzeFilesParallel(files: string[]): Promise<Map<string, {
|
|
85
|
+
agent: string;
|
|
86
|
+
confidence: number;
|
|
87
|
+
}>>;
|
|
88
|
+
/**
|
|
89
|
+
* Parallel git commit analysis for co-edit detection
|
|
90
|
+
*/
|
|
91
|
+
analyzeCommitsParallel(commits: string[]): Promise<CoEditAnalysis[]>;
|
|
92
|
+
/**
|
|
93
|
+
* Get worker pool stats
|
|
94
|
+
*/
|
|
95
|
+
getStats(): {
|
|
96
|
+
enabled: boolean;
|
|
97
|
+
workers: number;
|
|
98
|
+
busy: number;
|
|
99
|
+
queued: number;
|
|
100
|
+
};
|
|
101
|
+
/**
|
|
102
|
+
* Shutdown worker pool
|
|
103
|
+
*/
|
|
104
|
+
shutdown(): Promise<void>;
|
|
105
|
+
}
|
|
106
|
+
export declare function getParallelIntelligence(config?: ParallelConfig): ParallelIntelligence;
|
|
107
|
+
export declare function initParallelIntelligence(config?: ParallelConfig): Promise<ParallelIntelligence>;
|
|
108
|
+
export default ParallelIntelligence;
|
|
109
|
+
//# sourceMappingURL=parallel-intelligence.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parallel-intelligence.d.ts","sourceRoot":"","sources":["../../src/core/parallel-intelligence.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAUH,MAAM,WAAW,cAAc;IAC7B,wDAAwD;IACxD,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,wEAAwE;IACxE,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,sDAAsD;IACtD,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED,MAAM,WAAW,YAAY;IAC3B,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,IAAI,EAAE,OAAO,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAChC;AAED,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,KAAK,CAAC;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CAC1D;AAED,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;CAClB;AAMD,qBAAa,oBAAoB;IAC/B,OAAO,CAAC,OAAO,CAAgB;IAC/B,OAAO,CAAC,SAAS,CAAiE;IAClF,OAAO,CAAC,WAAW,CAA0B;IAC7C,OAAO,CAAC,MAAM,CAA2B;IACzC,OAAO,CAAC,WAAW,CAAS;gBAEhB,MAAM,GAAE,cAAmB;IAWvC;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAyB3B,OAAO,CAAC,YAAY;IAWpB;;OAEG;YACW,eAAe;IAiC7B;;OAEG;IACG,mBAAmB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBlE;;OAEG;IACG,qBAAqB,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,kBAAkB,EAAE,CAAC;IAkB3E;;OAEG;IACG,uBAAuB,CAAC,QAAQ,EAAE,KAAK,CAAC;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAOhG;;OAEG;IACG,cAAc,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAE,MAAU,GAAG,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAsBzG;;OAEG;IACG,oBAAoB,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAqBxG;;OAEG;IACG,sBAAsB,CAAC,OAAO,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAkB1E;;OAEG;IACH,QAAQ,IAAI;QAAE,OAAO,EAAE,OAAO,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE;IAS/E;;OAEG;IACG,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAOhC;AAkGD,wBAAgB,uBAAuB,CAAC,MAAM,CAAC,EAAE,cAAc,GAAG,oBAAoB,CAKrF;AAED,wBAAsB,wBAAwB,CAAC,MAAM,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAIrG;AAED,eAAe,oBAAoB,CAAC"}
|
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Parallel Intelligence - Worker-based acceleration for IntelligenceEngine
|
|
4
|
+
*
|
|
5
|
+
* Provides parallel processing for:
|
|
6
|
+
* - Q-learning batch updates (3-4x faster)
|
|
7
|
+
* - Multi-file pattern matching
|
|
8
|
+
* - Background memory indexing
|
|
9
|
+
* - Parallel similarity search
|
|
10
|
+
* - Multi-file code analysis
|
|
11
|
+
* - Parallel git commit analysis
|
|
12
|
+
*
|
|
13
|
+
* Uses worker_threads for CPU-bound operations, keeping hooks non-blocking.
|
|
14
|
+
*/
|
|
15
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
16
|
+
if (k2 === undefined) k2 = k;
|
|
17
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
18
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
19
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
20
|
+
}
|
|
21
|
+
Object.defineProperty(o, k2, desc);
|
|
22
|
+
}) : (function(o, m, k, k2) {
|
|
23
|
+
if (k2 === undefined) k2 = k;
|
|
24
|
+
o[k2] = m[k];
|
|
25
|
+
}));
|
|
26
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
27
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
28
|
+
}) : function(o, v) {
|
|
29
|
+
o["default"] = v;
|
|
30
|
+
});
|
|
31
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
32
|
+
var ownKeys = function(o) {
|
|
33
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
34
|
+
var ar = [];
|
|
35
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
36
|
+
return ar;
|
|
37
|
+
};
|
|
38
|
+
return ownKeys(o);
|
|
39
|
+
};
|
|
40
|
+
return function (mod) {
|
|
41
|
+
if (mod && mod.__esModule) return mod;
|
|
42
|
+
var result = {};
|
|
43
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
44
|
+
__setModuleDefault(result, mod);
|
|
45
|
+
return result;
|
|
46
|
+
};
|
|
47
|
+
})();
|
|
48
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
49
|
+
exports.ParallelIntelligence = void 0;
|
|
50
|
+
exports.getParallelIntelligence = getParallelIntelligence;
|
|
51
|
+
exports.initParallelIntelligence = initParallelIntelligence;
|
|
52
|
+
const worker_threads_1 = require("worker_threads");
|
|
53
|
+
const os = __importStar(require("os"));
|
|
54
|
+
// ============================================================================
|
|
55
|
+
// Worker Pool Manager
|
|
56
|
+
// ============================================================================
|
|
57
|
+
class ParallelIntelligence {
|
|
58
|
+
constructor(config = {}) {
|
|
59
|
+
this.workers = [];
|
|
60
|
+
this.taskQueue = [];
|
|
61
|
+
this.busyWorkers = new Set();
|
|
62
|
+
this.initialized = false;
|
|
63
|
+
const isCLI = process.env.RUVECTOR_CLI === '1';
|
|
64
|
+
const isMCP = process.env.MCP_SERVER === '1';
|
|
65
|
+
this.config = {
|
|
66
|
+
numWorkers: config.numWorkers ?? Math.max(1, os.cpus().length - 1),
|
|
67
|
+
enabled: config.enabled ?? (isMCP || (!isCLI && process.env.RUVECTOR_PARALLEL === '1')),
|
|
68
|
+
batchThreshold: config.batchThreshold ?? 4,
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Initialize worker pool
|
|
73
|
+
*/
|
|
74
|
+
async init() {
|
|
75
|
+
if (this.initialized || !this.config.enabled)
|
|
76
|
+
return;
|
|
77
|
+
for (let i = 0; i < this.config.numWorkers; i++) {
|
|
78
|
+
const worker = new worker_threads_1.Worker(__filename, {
|
|
79
|
+
workerData: { workerId: i },
|
|
80
|
+
});
|
|
81
|
+
worker.on('message', (result) => {
|
|
82
|
+
this.busyWorkers.delete(worker);
|
|
83
|
+
this.processQueue();
|
|
84
|
+
});
|
|
85
|
+
worker.on('error', (err) => {
|
|
86
|
+
console.error(`Worker ${i} error:`, err);
|
|
87
|
+
this.busyWorkers.delete(worker);
|
|
88
|
+
});
|
|
89
|
+
this.workers.push(worker);
|
|
90
|
+
}
|
|
91
|
+
this.initialized = true;
|
|
92
|
+
console.error(`ParallelIntelligence: ${this.config.numWorkers} workers ready`);
|
|
93
|
+
}
|
|
94
|
+
processQueue() {
|
|
95
|
+
while (this.taskQueue.length > 0 && this.busyWorkers.size < this.workers.length) {
|
|
96
|
+
const availableWorker = this.workers.find(w => !this.busyWorkers.has(w));
|
|
97
|
+
if (!availableWorker)
|
|
98
|
+
break;
|
|
99
|
+
const task = this.taskQueue.shift();
|
|
100
|
+
this.busyWorkers.add(availableWorker);
|
|
101
|
+
availableWorker.postMessage(task.task);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Execute task in worker pool
|
|
106
|
+
*/
|
|
107
|
+
async executeInWorker(task) {
|
|
108
|
+
if (!this.initialized || !this.config.enabled) {
|
|
109
|
+
throw new Error('ParallelIntelligence not initialized');
|
|
110
|
+
}
|
|
111
|
+
return new Promise((resolve, reject) => {
|
|
112
|
+
const availableWorker = this.workers.find(w => !this.busyWorkers.has(w));
|
|
113
|
+
if (availableWorker) {
|
|
114
|
+
this.busyWorkers.add(availableWorker);
|
|
115
|
+
const handler = (result) => {
|
|
116
|
+
this.busyWorkers.delete(availableWorker);
|
|
117
|
+
availableWorker.off('message', handler);
|
|
118
|
+
if (result.error) {
|
|
119
|
+
reject(new Error(result.error));
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
resolve(result.data);
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
availableWorker.on('message', handler);
|
|
126
|
+
availableWorker.postMessage(task);
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
this.taskQueue.push({ task, resolve, reject });
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
// =========================================================================
|
|
134
|
+
// Parallel Operations
|
|
135
|
+
// =========================================================================
|
|
136
|
+
/**
|
|
137
|
+
* Batch Q-learning episode recording (3-4x faster)
|
|
138
|
+
*/
|
|
139
|
+
async recordEpisodesBatch(episodes) {
|
|
140
|
+
if (episodes.length < this.config.batchThreshold || !this.config.enabled) {
|
|
141
|
+
// Fall back to sequential
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
// Split into chunks for workers
|
|
145
|
+
const chunkSize = Math.ceil(episodes.length / this.config.numWorkers);
|
|
146
|
+
const chunks = [];
|
|
147
|
+
for (let i = 0; i < episodes.length; i += chunkSize) {
|
|
148
|
+
chunks.push(episodes.slice(i, i + chunkSize));
|
|
149
|
+
}
|
|
150
|
+
await Promise.all(chunks.map(chunk => this.executeInWorker({ type: 'recordEpisodes', episodes: chunk })));
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Multi-file pattern matching (parallel pretrain)
|
|
154
|
+
*/
|
|
155
|
+
async matchPatternsParallel(files) {
|
|
156
|
+
if (files.length < this.config.batchThreshold || !this.config.enabled) {
|
|
157
|
+
return [];
|
|
158
|
+
}
|
|
159
|
+
const chunkSize = Math.ceil(files.length / this.config.numWorkers);
|
|
160
|
+
const chunks = [];
|
|
161
|
+
for (let i = 0; i < files.length; i += chunkSize) {
|
|
162
|
+
chunks.push(files.slice(i, i + chunkSize));
|
|
163
|
+
}
|
|
164
|
+
const results = await Promise.all(chunks.map(chunk => this.executeInWorker({ type: 'matchPatterns', files: chunk })));
|
|
165
|
+
return results.flat();
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Background memory indexing (non-blocking)
|
|
169
|
+
*/
|
|
170
|
+
async indexMemoriesBackground(memories) {
|
|
171
|
+
if (memories.length === 0 || !this.config.enabled)
|
|
172
|
+
return;
|
|
173
|
+
// Fire and forget - non-blocking
|
|
174
|
+
this.executeInWorker({ type: 'indexMemories', memories }).catch(() => { });
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Parallel similarity search with sharding
|
|
178
|
+
*/
|
|
179
|
+
async searchParallel(query, topK = 5) {
|
|
180
|
+
if (!this.config.enabled)
|
|
181
|
+
return [];
|
|
182
|
+
// Each worker searches its shard
|
|
183
|
+
const shardResults = await Promise.all(this.workers.map((_, i) => this.executeInWorker({
|
|
184
|
+
type: 'search',
|
|
185
|
+
query,
|
|
186
|
+
topK,
|
|
187
|
+
shardId: i,
|
|
188
|
+
})));
|
|
189
|
+
// Merge and sort results
|
|
190
|
+
return shardResults
|
|
191
|
+
.flat()
|
|
192
|
+
.sort((a, b) => b.score - a.score)
|
|
193
|
+
.slice(0, topK);
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Multi-file AST analysis for routing
|
|
197
|
+
*/
|
|
198
|
+
async analyzeFilesParallel(files) {
|
|
199
|
+
if (files.length < this.config.batchThreshold || !this.config.enabled) {
|
|
200
|
+
return new Map();
|
|
201
|
+
}
|
|
202
|
+
const chunkSize = Math.ceil(files.length / this.config.numWorkers);
|
|
203
|
+
const chunks = [];
|
|
204
|
+
for (let i = 0; i < files.length; i += chunkSize) {
|
|
205
|
+
chunks.push(files.slice(i, i + chunkSize));
|
|
206
|
+
}
|
|
207
|
+
const results = await Promise.all(chunks.map(chunk => this.executeInWorker({
|
|
208
|
+
type: 'analyzeFiles',
|
|
209
|
+
files: chunk,
|
|
210
|
+
})));
|
|
211
|
+
return new Map(results.flat());
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Parallel git commit analysis for co-edit detection
|
|
215
|
+
*/
|
|
216
|
+
async analyzeCommitsParallel(commits) {
|
|
217
|
+
if (commits.length < this.config.batchThreshold || !this.config.enabled) {
|
|
218
|
+
return [];
|
|
219
|
+
}
|
|
220
|
+
const chunkSize = Math.ceil(commits.length / this.config.numWorkers);
|
|
221
|
+
const chunks = [];
|
|
222
|
+
for (let i = 0; i < commits.length; i += chunkSize) {
|
|
223
|
+
chunks.push(commits.slice(i, i + chunkSize));
|
|
224
|
+
}
|
|
225
|
+
const results = await Promise.all(chunks.map(chunk => this.executeInWorker({ type: 'analyzeCommits', commits: chunk })));
|
|
226
|
+
return results.flat();
|
|
227
|
+
}
|
|
228
|
+
/**
|
|
229
|
+
* Get worker pool stats
|
|
230
|
+
*/
|
|
231
|
+
getStats() {
|
|
232
|
+
return {
|
|
233
|
+
enabled: this.config.enabled,
|
|
234
|
+
workers: this.workers.length,
|
|
235
|
+
busy: this.busyWorkers.size,
|
|
236
|
+
queued: this.taskQueue.length,
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Shutdown worker pool
|
|
241
|
+
*/
|
|
242
|
+
async shutdown() {
|
|
243
|
+
await Promise.all(this.workers.map(w => w.terminate()));
|
|
244
|
+
this.workers = [];
|
|
245
|
+
this.busyWorkers.clear();
|
|
246
|
+
this.taskQueue = [];
|
|
247
|
+
this.initialized = false;
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
exports.ParallelIntelligence = ParallelIntelligence;
|
|
251
|
+
// ============================================================================
|
|
252
|
+
// Worker Thread Code
|
|
253
|
+
// ============================================================================
|
|
254
|
+
if (!worker_threads_1.isMainThread && worker_threads_1.parentPort) {
|
|
255
|
+
// This code runs in worker threads
|
|
256
|
+
const { workerId } = worker_threads_1.workerData;
|
|
257
|
+
worker_threads_1.parentPort.on('message', async (task) => {
|
|
258
|
+
try {
|
|
259
|
+
let result;
|
|
260
|
+
switch (task.type) {
|
|
261
|
+
case 'recordEpisodes':
|
|
262
|
+
// Process episode batch
|
|
263
|
+
result = await processEpisodes(task.episodes);
|
|
264
|
+
break;
|
|
265
|
+
case 'matchPatterns':
|
|
266
|
+
// Match patterns in files
|
|
267
|
+
result = await matchPatterns(task.files);
|
|
268
|
+
break;
|
|
269
|
+
case 'indexMemories':
|
|
270
|
+
// Index memories
|
|
271
|
+
result = await indexMemories(task.memories);
|
|
272
|
+
break;
|
|
273
|
+
case 'search':
|
|
274
|
+
// Search shard
|
|
275
|
+
result = await searchShard(task.query, task.topK, task.shardId);
|
|
276
|
+
break;
|
|
277
|
+
case 'analyzeFiles':
|
|
278
|
+
// Analyze file ASTs
|
|
279
|
+
result = await analyzeFiles(task.files);
|
|
280
|
+
break;
|
|
281
|
+
case 'analyzeCommits':
|
|
282
|
+
// Analyze git commits
|
|
283
|
+
result = await analyzeCommits(task.commits);
|
|
284
|
+
break;
|
|
285
|
+
default:
|
|
286
|
+
throw new Error(`Unknown task type: ${task.type}`);
|
|
287
|
+
}
|
|
288
|
+
worker_threads_1.parentPort.postMessage({ data: result });
|
|
289
|
+
}
|
|
290
|
+
catch (error) {
|
|
291
|
+
worker_threads_1.parentPort.postMessage({ error: error.message });
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
// Worker task implementations
|
|
295
|
+
async function processEpisodes(episodes) {
|
|
296
|
+
// Embed and process episodes
|
|
297
|
+
// In a real implementation, this would use the embedder and update Q-values
|
|
298
|
+
return episodes.length;
|
|
299
|
+
}
|
|
300
|
+
async function matchPatterns(files) {
|
|
301
|
+
// Match patterns in files
|
|
302
|
+
// Would read files and extract patterns
|
|
303
|
+
return files.map(file => ({
|
|
304
|
+
file,
|
|
305
|
+
patterns: [],
|
|
306
|
+
}));
|
|
307
|
+
}
|
|
308
|
+
async function indexMemories(memories) {
|
|
309
|
+
// Index memories in background
|
|
310
|
+
return memories.length;
|
|
311
|
+
}
|
|
312
|
+
async function searchShard(query, topK, shardId) {
|
|
313
|
+
// Search this worker's shard
|
|
314
|
+
return [];
|
|
315
|
+
}
|
|
316
|
+
async function analyzeFiles(files) {
|
|
317
|
+
// Analyze file ASTs
|
|
318
|
+
return files.map(f => [f, { agent: 'coder', confidence: 0.5 }]);
|
|
319
|
+
}
|
|
320
|
+
async function analyzeCommits(commits) {
|
|
321
|
+
// Analyze git commits for co-edit patterns
|
|
322
|
+
return [];
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
// ============================================================================
|
|
326
|
+
// Singleton for easy access
|
|
327
|
+
// ============================================================================
|
|
328
|
+
let instance = null;
|
|
329
|
+
function getParallelIntelligence(config) {
|
|
330
|
+
if (!instance) {
|
|
331
|
+
instance = new ParallelIntelligence(config);
|
|
332
|
+
}
|
|
333
|
+
return instance;
|
|
334
|
+
}
|
|
335
|
+
async function initParallelIntelligence(config) {
|
|
336
|
+
const pi = getParallelIntelligence(config);
|
|
337
|
+
await pi.init();
|
|
338
|
+
return pi;
|
|
339
|
+
}
|
|
340
|
+
exports.default = ParallelIntelligence;
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parallel Workers - Extended worker capabilities for RuVector hooks
|
|
3
|
+
*
|
|
4
|
+
* Provides parallel processing for advanced operations:
|
|
5
|
+
*
|
|
6
|
+
* 1. SPECULATIVE PRE-COMPUTATION
|
|
7
|
+
* - Pre-embed likely next files based on co-edit patterns
|
|
8
|
+
* - Warm model cache before operations
|
|
9
|
+
* - Predictive route caching
|
|
10
|
+
*
|
|
11
|
+
* 2. REAL-TIME CODE ANALYSIS
|
|
12
|
+
* - Multi-file AST parsing with tree-sitter
|
|
13
|
+
* - Cross-file type inference
|
|
14
|
+
* - Live complexity metrics
|
|
15
|
+
* - Dependency graph updates
|
|
16
|
+
*
|
|
17
|
+
* 3. ADVANCED LEARNING
|
|
18
|
+
* - Distributed trajectory replay
|
|
19
|
+
* - Parallel SONA micro-LoRA updates
|
|
20
|
+
* - Background EWC consolidation
|
|
21
|
+
* - Online pattern clustering
|
|
22
|
+
*
|
|
23
|
+
* 4. INTELLIGENT RETRIEVAL
|
|
24
|
+
* - Parallel RAG chunking and retrieval
|
|
25
|
+
* - Sharded similarity search
|
|
26
|
+
* - Context relevance ranking
|
|
27
|
+
* - Semantic deduplication
|
|
28
|
+
*
|
|
29
|
+
* 5. SECURITY & QUALITY
|
|
30
|
+
* - Parallel SAST scanning
|
|
31
|
+
* - Multi-rule linting
|
|
32
|
+
* - Vulnerability detection
|
|
33
|
+
* - Code smell analysis
|
|
34
|
+
*
|
|
35
|
+
* 6. GIT INTELLIGENCE
|
|
36
|
+
* - Parallel blame analysis
|
|
37
|
+
* - Branch comparison
|
|
38
|
+
* - Merge conflict prediction
|
|
39
|
+
* - Code churn metrics
|
|
40
|
+
*/
|
|
41
|
+
export interface WorkerPoolConfig {
|
|
42
|
+
numWorkers?: number;
|
|
43
|
+
enabled?: boolean;
|
|
44
|
+
taskTimeout?: number;
|
|
45
|
+
maxQueueSize?: number;
|
|
46
|
+
}
|
|
47
|
+
export interface SpeculativeEmbedding {
|
|
48
|
+
file: string;
|
|
49
|
+
embedding: number[];
|
|
50
|
+
confidence: number;
|
|
51
|
+
timestamp: number;
|
|
52
|
+
}
|
|
53
|
+
export interface ASTAnalysis {
|
|
54
|
+
file: string;
|
|
55
|
+
language: string;
|
|
56
|
+
complexity: number;
|
|
57
|
+
functions: string[];
|
|
58
|
+
imports: string[];
|
|
59
|
+
exports: string[];
|
|
60
|
+
dependencies: string[];
|
|
61
|
+
}
|
|
62
|
+
export interface SecurityFinding {
|
|
63
|
+
file: string;
|
|
64
|
+
line: number;
|
|
65
|
+
severity: 'low' | 'medium' | 'high' | 'critical';
|
|
66
|
+
rule: string;
|
|
67
|
+
message: string;
|
|
68
|
+
suggestion?: string;
|
|
69
|
+
}
|
|
70
|
+
export interface ContextChunk {
|
|
71
|
+
content: string;
|
|
72
|
+
source: string;
|
|
73
|
+
relevance: number;
|
|
74
|
+
embedding?: number[];
|
|
75
|
+
}
|
|
76
|
+
export interface GitBlame {
|
|
77
|
+
file: string;
|
|
78
|
+
lines: Array<{
|
|
79
|
+
line: number;
|
|
80
|
+
author: string;
|
|
81
|
+
date: string;
|
|
82
|
+
commit: string;
|
|
83
|
+
}>;
|
|
84
|
+
}
|
|
85
|
+
export interface CodeChurn {
|
|
86
|
+
file: string;
|
|
87
|
+
additions: number;
|
|
88
|
+
deletions: number;
|
|
89
|
+
commits: number;
|
|
90
|
+
authors: string[];
|
|
91
|
+
lastModified: string;
|
|
92
|
+
}
|
|
93
|
+
export declare class ExtendedWorkerPool {
|
|
94
|
+
private workers;
|
|
95
|
+
private taskQueue;
|
|
96
|
+
private busyWorkers;
|
|
97
|
+
private config;
|
|
98
|
+
private initialized;
|
|
99
|
+
private speculativeCache;
|
|
100
|
+
private astCache;
|
|
101
|
+
constructor(config?: WorkerPoolConfig);
|
|
102
|
+
init(): Promise<void>;
|
|
103
|
+
private getWorkerCode;
|
|
104
|
+
private getWorkerHandlers;
|
|
105
|
+
private handleWorkerResult;
|
|
106
|
+
private processQueue;
|
|
107
|
+
private execute;
|
|
108
|
+
/**
|
|
109
|
+
* Pre-embed files likely to be edited next based on co-edit patterns
|
|
110
|
+
* Hook: session-start, post-edit
|
|
111
|
+
*/
|
|
112
|
+
speculativeEmbed(currentFile: string, coEditGraph: Map<string, string[]>): Promise<SpeculativeEmbedding[]>;
|
|
113
|
+
/**
|
|
114
|
+
* Analyze AST of multiple files in parallel
|
|
115
|
+
* Hook: pre-edit, route
|
|
116
|
+
*/
|
|
117
|
+
analyzeAST(files: string[]): Promise<ASTAnalysis[]>;
|
|
118
|
+
/**
|
|
119
|
+
* Analyze code complexity for multiple files
|
|
120
|
+
* Hook: post-edit, session-end
|
|
121
|
+
*/
|
|
122
|
+
analyzeComplexity(files: string[]): Promise<Array<{
|
|
123
|
+
file: string;
|
|
124
|
+
lines: number;
|
|
125
|
+
nonEmptyLines: number;
|
|
126
|
+
cyclomaticComplexity: number;
|
|
127
|
+
functions: number;
|
|
128
|
+
avgFunctionSize: number;
|
|
129
|
+
}>>;
|
|
130
|
+
/**
|
|
131
|
+
* Build dependency graph from entry points
|
|
132
|
+
* Hook: session-start
|
|
133
|
+
*/
|
|
134
|
+
buildDependencyGraph(entryPoints: string[]): Promise<Record<string, string[]>>;
|
|
135
|
+
/**
|
|
136
|
+
* Scan files for security vulnerabilities
|
|
137
|
+
* Hook: pre-command (before commit), post-edit
|
|
138
|
+
*/
|
|
139
|
+
securityScan(files: string[], rules?: string[]): Promise<SecurityFinding[]>;
|
|
140
|
+
/**
|
|
141
|
+
* Retrieve relevant context chunks in parallel
|
|
142
|
+
* Hook: suggest-context, recall
|
|
143
|
+
*/
|
|
144
|
+
ragRetrieve(query: string, chunks: ContextChunk[], topK?: number): Promise<ContextChunk[]>;
|
|
145
|
+
/**
|
|
146
|
+
* Rank context items by relevance to query
|
|
147
|
+
* Hook: suggest-context
|
|
148
|
+
*/
|
|
149
|
+
rankContext(context: string[], query: string): Promise<Array<{
|
|
150
|
+
index: number;
|
|
151
|
+
content: string;
|
|
152
|
+
relevance: number;
|
|
153
|
+
}>>;
|
|
154
|
+
/**
|
|
155
|
+
* Deduplicate similar items
|
|
156
|
+
* Hook: remember, suggest-context
|
|
157
|
+
*/
|
|
158
|
+
deduplicate(items: string[], threshold?: number): Promise<string[]>;
|
|
159
|
+
/**
|
|
160
|
+
* Get blame information for files in parallel
|
|
161
|
+
* Hook: pre-edit (for context), coedit
|
|
162
|
+
*/
|
|
163
|
+
gitBlame(files: string[]): Promise<GitBlame[]>;
|
|
164
|
+
/**
|
|
165
|
+
* Analyze code churn for files
|
|
166
|
+
* Hook: session-start, route
|
|
167
|
+
*/
|
|
168
|
+
gitChurn(files: string[], since?: string): Promise<CodeChurn[]>;
|
|
169
|
+
getStats(): {
|
|
170
|
+
enabled: boolean;
|
|
171
|
+
workers: number;
|
|
172
|
+
busy: number;
|
|
173
|
+
queued: number;
|
|
174
|
+
speculativeCacheSize: number;
|
|
175
|
+
astCacheSize: number;
|
|
176
|
+
};
|
|
177
|
+
clearCaches(): void;
|
|
178
|
+
shutdown(): Promise<void>;
|
|
179
|
+
}
|
|
180
|
+
export declare function getExtendedWorkerPool(config?: WorkerPoolConfig): ExtendedWorkerPool;
|
|
181
|
+
export declare function initExtendedWorkerPool(config?: WorkerPoolConfig): Promise<ExtendedWorkerPool>;
|
|
182
|
+
export default ExtendedWorkerPool;
|
|
183
|
+
//# sourceMappingURL=parallel-workers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parallel-workers.d.ts","sourceRoot":"","sources":["../../src/core/parallel-workers.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAuCG;AAWH,MAAM,WAAW,gBAAgB;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,YAAY,EAAE,MAAM,EAAE,CAAC;CACxB;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,UAAU,CAAC;IACjD,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,YAAY;IAC3B,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,KAAK,CAAC;QACX,IAAI,EAAE,MAAM,CAAC;QACb,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,CAAC;QACb,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;CACJ;AAED,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,YAAY,EAAE,MAAM,CAAC;CACtB;AA+BD,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,OAAO,CAAgB;IAC/B,OAAO,CAAC,SAAS,CAKT;IACR,OAAO,CAAC,WAAW,CAAkC;IACrD,OAAO,CAAC,MAAM,CAA6B;IAC3C,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,gBAAgB,CAAgD;IACxE,OAAO,CAAC,QAAQ,CAAuC;gBAE3C,MAAM,GAAE,gBAAqB;IAYnC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAgC3B,OAAO,CAAC,aAAa;IAOrB,OAAO,CAAC,iBAAiB;IAqUzB,OAAO,CAAC,kBAAkB;IAmB1B,OAAO,CAAC,YAAY;YAWN,OAAO;IAsCrB;;;OAGG;IACG,gBAAgB,CACpB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,GACjC,OAAO,CAAC,oBAAoB,EAAE,CAAC;IA4BlC;;;OAGG;IACG,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAoBzD;;;OAGG;IACG,iBAAiB,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,KAAK,CAAC;QACtD,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,EAAE,MAAM,CAAC;QACd,aAAa,EAAE,MAAM,CAAC;QACtB,oBAAoB,EAAE,MAAM,CAAC;QAC7B,SAAS,EAAE,MAAM,CAAC;QAClB,eAAe,EAAE,MAAM,CAAC;KACzB,CAAC,CAAC;IAIH;;;OAGG;IACG,oBAAoB,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;IAQpF;;;OAGG;IACG,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,KAAK,CAAC,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;IAQjF;;;OAGG;IACG,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,EAAE,IAAI,GAAE,MAAU,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC;IAInG;;;OAGG;IACG,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAI1H;;;OAGG;IACG,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,SAAS,GAAE,MAAY,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAQ9E;;;OAGG;IACG,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;IAIpD;;;OAGG;IACG,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,EAAE,CAAC;IAQrE,QAAQ,IAAI;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,OAAO,EAAE,MAAM,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,MAAM,EAAE,MAAM,CAAC;QACf,oBAAoB,EAAE,MAAM,CAAC;QAC7B,YAAY,EAAE,MAAM,CAAC;KACtB;IAWD,WAAW,IAAI,IAAI;IAKb,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAchC;AAQD,wBAAgB,qBAAqB,CAAC,MAAM,CAAC,EAAE,gBAAgB,GAAG,kBAAkB,CAKnF;AAED,wBAAsB,sBAAsB,CAAC,MAAM,CAAC,EAAE,gBAAgB,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAInG;AAED,eAAe,kBAAkB,CAAC"}
|