ucn 3.8.13 → 3.8.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/skills/ucn/SKILL.md +3 -1
- package/.github/workflows/ci.yml +13 -1
- package/README.md +1 -0
- package/cli/index.js +165 -246
- package/core/analysis.js +1400 -0
- package/core/build-worker.js +194 -0
- package/core/cache.js +105 -7
- package/core/callers.js +194 -64
- package/core/deadcode.js +22 -66
- package/core/discovery.js +9 -54
- package/core/execute.js +139 -54
- package/core/graph.js +615 -0
- package/core/output/analysis-ext.js +271 -0
- package/core/output/analysis.js +491 -0
- package/core/output/extraction.js +188 -0
- package/core/output/find.js +355 -0
- package/core/output/graph.js +399 -0
- package/core/output/refactoring.js +293 -0
- package/core/output/reporting.js +331 -0
- package/core/output/search.js +307 -0
- package/core/output/shared.js +271 -0
- package/core/output/tracing.js +416 -0
- package/core/output.js +15 -3293
- package/core/parallel-build.js +165 -0
- package/core/project.js +299 -3633
- package/core/registry.js +59 -0
- package/core/reporting.js +258 -0
- package/core/search.js +890 -0
- package/core/stacktrace.js +1 -1
- package/core/tracing.js +631 -0
- package/core/verify.js +10 -13
- package/eslint.config.js +43 -0
- package/jsconfig.json +10 -0
- package/languages/go.js +21 -2
- package/languages/html.js +8 -0
- package/languages/index.js +102 -40
- package/languages/java.js +13 -0
- package/languages/javascript.js +17 -1
- package/languages/python.js +14 -0
- package/languages/rust.js +13 -0
- package/languages/utils.js +1 -1
- package/mcp/server.js +45 -28
- package/package.json +8 -3
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* core/parallel-build.js - Worker pool orchestration for parallel indexing
|
|
5
|
+
*
|
|
6
|
+
* Splits files into N chunks, spawns worker threads to parse them in parallel,
|
|
7
|
+
* then merges results into the ProjectIndex. Uses Atomics.wait + MessageChannel
|
|
8
|
+
* to keep the build() API synchronous.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const os = require('os');
|
|
12
|
+
const path = require('path');
|
|
13
|
+
const { Worker, MessageChannel, receiveMessageOnPort } = require('worker_threads');
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Build index in parallel using worker threads.
|
|
17
|
+
*
|
|
18
|
+
* @param {object} index - ProjectIndex instance
|
|
19
|
+
* @param {string[]} files - Files to index
|
|
20
|
+
* @param {object} options
|
|
21
|
+
* @param {number} [options.workerCount] - Number of workers (auto-detect if omitted)
|
|
22
|
+
* @param {boolean} [options.quiet] - Suppress output
|
|
23
|
+
* @returns {number|false} Number of changed files, or false if too few workers
|
|
24
|
+
*/
|
|
25
|
+
function parallelBuild(index, files, options = {}) {
|
|
26
|
+
const availableCpus = (typeof os.availableParallelism === 'function')
|
|
27
|
+
? os.availableParallelism()
|
|
28
|
+
: os.cpus().length;
|
|
29
|
+
const autoWorkers = Math.max(availableCpus - 1, 1);
|
|
30
|
+
const maxWorkers = (options.workerCount > 0) ? options.workerCount : autoWorkers;
|
|
31
|
+
const workerCount = Math.min(
|
|
32
|
+
maxWorkers,
|
|
33
|
+
8,
|
|
34
|
+
Math.ceil(files.length / 100) // at least 100 files per worker
|
|
35
|
+
);
|
|
36
|
+
|
|
37
|
+
if (workerCount < 2) return false;
|
|
38
|
+
|
|
39
|
+
if (!options.quiet) {
|
|
40
|
+
console.error(`Parallel build: ${workerCount} workers for ${files.length} files`);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Prepare existing hash data for skip-if-unchanged checks in workers
|
|
44
|
+
const existingHashes = Object.create(null);
|
|
45
|
+
for (const [fp, entry] of index.files) {
|
|
46
|
+
existingHashes[fp] = { mtime: entry.mtime, size: entry.size, hash: entry.hash };
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Partition files round-robin for balanced work distribution
|
|
50
|
+
const chunks = Array.from({ length: workerCount }, () => []);
|
|
51
|
+
for (let i = 0; i < files.length; i++) {
|
|
52
|
+
chunks[i % workerCount].push(files[i]);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Synchronization: one Int32 per worker in SharedArrayBuffer
|
|
56
|
+
const sab = new SharedArrayBuffer(4 * workerCount);
|
|
57
|
+
const signal = new Int32Array(sab);
|
|
58
|
+
|
|
59
|
+
const ports = [];
|
|
60
|
+
const workers = [];
|
|
61
|
+
|
|
62
|
+
for (let i = 0; i < workerCount; i++) {
|
|
63
|
+
const { port1, port2 } = new MessageChannel();
|
|
64
|
+
ports.push(port1);
|
|
65
|
+
|
|
66
|
+
const worker = new Worker(path.join(__dirname, 'build-worker.js'), {
|
|
67
|
+
workerData: {
|
|
68
|
+
files: chunks[i],
|
|
69
|
+
rootDir: index.root,
|
|
70
|
+
existingHashes,
|
|
71
|
+
signal: sab,
|
|
72
|
+
workerIndex: i,
|
|
73
|
+
port: port2,
|
|
74
|
+
},
|
|
75
|
+
transferList: [port2],
|
|
76
|
+
});
|
|
77
|
+
workers.push(worker);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Block main thread until all workers finish (with timeout)
|
|
81
|
+
const TIMEOUT_MS = 300_000; // 5 minutes
|
|
82
|
+
const deadline = Date.now() + TIMEOUT_MS;
|
|
83
|
+
|
|
84
|
+
for (let i = 0; i < workerCount; i++) {
|
|
85
|
+
while (Atomics.load(signal, i) === 0) {
|
|
86
|
+
const remaining = deadline - Date.now();
|
|
87
|
+
if (remaining <= 0) {
|
|
88
|
+
for (const w of workers) w.terminate();
|
|
89
|
+
throw new Error('Parallel build timed out after 5 minutes');
|
|
90
|
+
}
|
|
91
|
+
Atomics.wait(signal, i, 0, Math.min(remaining, 5000));
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Collect and merge results from each worker
|
|
96
|
+
let changed = 0;
|
|
97
|
+
|
|
98
|
+
for (let i = 0; i < workerCount; i++) {
|
|
99
|
+
const msg = receiveMessageOnPort(ports[i]);
|
|
100
|
+
ports[i].close();
|
|
101
|
+
if (!msg) continue;
|
|
102
|
+
|
|
103
|
+
for (const result of msg.message) {
|
|
104
|
+
if (result.error) {
|
|
105
|
+
index.failedFiles.add(result.filePath);
|
|
106
|
+
if (!options.quiet) {
|
|
107
|
+
console.error(` Warning: Could not index ${result.filePath}: ${result.error}`);
|
|
108
|
+
}
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (result.skipped) {
|
|
113
|
+
// Update mtime/size if content matched but stat changed
|
|
114
|
+
if (result.mtimeUpdate !== undefined) {
|
|
115
|
+
const existing = index.files.get(result.filePath);
|
|
116
|
+
if (existing) {
|
|
117
|
+
existing.mtime = result.mtimeUpdate;
|
|
118
|
+
existing.size = result.sizeUpdate;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
index.failedFiles.delete(result.filePath);
|
|
122
|
+
continue;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Changed or new file — merge into index
|
|
126
|
+
if (result.hadExisting) {
|
|
127
|
+
index.removeFileSymbols(result.filePath);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
const fe = result.fileEntry;
|
|
131
|
+
|
|
132
|
+
// Register symbols in global map
|
|
133
|
+
for (const symbol of fe.symbols) {
|
|
134
|
+
if (!index.symbols.has(symbol.name)) {
|
|
135
|
+
index.symbols.set(symbol.name, []);
|
|
136
|
+
}
|
|
137
|
+
index.symbols.get(symbol.name).push(symbol);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
index.files.set(result.filePath, fe);
|
|
141
|
+
|
|
142
|
+
// Populate callsCache (avoids re-parsing in buildCalleeIndex)
|
|
143
|
+
if (result.calls) {
|
|
144
|
+
index.callsCache.set(result.filePath, {
|
|
145
|
+
mtime: result.callsMtime,
|
|
146
|
+
hash: result.callsHash,
|
|
147
|
+
calls: result.calls,
|
|
148
|
+
});
|
|
149
|
+
index.callsCacheDirty = true;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
index.failedFiles.delete(result.filePath);
|
|
153
|
+
changed++;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Terminate workers
|
|
158
|
+
for (const w of workers) {
|
|
159
|
+
w.terminate();
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
return changed;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
module.exports = { parallelBuild };
|