@optave/codegraph 1.3.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +109 -40
- package/package.json +5 -5
- package/src/builder.js +52 -8
- package/src/cli.js +127 -1
- package/src/config.js +45 -3
- package/src/constants.js +0 -2
- package/src/cycles.js +2 -2
- package/src/db.js +13 -0
- package/src/export.js +44 -9
- package/src/index.js +21 -0
- package/src/mcp.js +308 -8
- package/src/parser.js +13 -14
- package/src/queries.js +30 -0
- package/src/registry.js +145 -0
- package/src/resolve.js +1 -1
- package/src/structure.js +491 -0
- package/src/watcher.js +2 -2
package/src/registry.js
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import os from 'node:os';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { debug, warn } from './logger.js';
|
|
5
|
+
|
|
6
|
+
export const REGISTRY_PATH = path.join(os.homedir(), '.codegraph', 'registry.json');
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Load the registry from disk.
|
|
10
|
+
* Returns `{ repos: {} }` on missing or corrupt file.
|
|
11
|
+
*/
|
|
12
|
+
export function loadRegistry(registryPath = REGISTRY_PATH) {
|
|
13
|
+
try {
|
|
14
|
+
const raw = fs.readFileSync(registryPath, 'utf-8');
|
|
15
|
+
const data = JSON.parse(raw);
|
|
16
|
+
if (!data || typeof data.repos !== 'object') return { repos: {} };
|
|
17
|
+
return data;
|
|
18
|
+
} catch {
|
|
19
|
+
return { repos: {} };
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Persist the registry to disk (atomic write via temp + rename).
|
|
25
|
+
* Creates the parent directory if needed.
|
|
26
|
+
*/
|
|
27
|
+
export function saveRegistry(registry, registryPath = REGISTRY_PATH) {
|
|
28
|
+
const dir = path.dirname(registryPath);
|
|
29
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
30
|
+
|
|
31
|
+
const tmp = `${registryPath}.tmp.${process.pid}`;
|
|
32
|
+
fs.writeFileSync(tmp, JSON.stringify(registry, null, 2), 'utf-8');
|
|
33
|
+
fs.renameSync(tmp, registryPath);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Register a project directory. Idempotent.
|
|
38
|
+
* Name defaults to `path.basename(rootDir)`.
|
|
39
|
+
*
|
|
40
|
+
* When no explicit name is provided and the basename already exists
|
|
41
|
+
* pointing to a different path, auto-suffixes (`api` → `api-2`, `api-3`, …).
|
|
42
|
+
* Re-registering the same path updates in place. Explicit names always overwrite.
|
|
43
|
+
*/
|
|
44
|
+
export function registerRepo(rootDir, name, registryPath = REGISTRY_PATH) {
|
|
45
|
+
const absRoot = path.resolve(rootDir);
|
|
46
|
+
const baseName = name || path.basename(absRoot);
|
|
47
|
+
const registry = loadRegistry(registryPath);
|
|
48
|
+
|
|
49
|
+
let repoName = baseName;
|
|
50
|
+
|
|
51
|
+
// Auto-suffix only when no explicit name was provided
|
|
52
|
+
if (!name) {
|
|
53
|
+
const existing = registry.repos[baseName];
|
|
54
|
+
if (existing && path.resolve(existing.path) !== absRoot) {
|
|
55
|
+
// Basename collision with a different path — find next available suffix
|
|
56
|
+
let suffix = 2;
|
|
57
|
+
while (registry.repos[`${baseName}-${suffix}`]) {
|
|
58
|
+
const entry = registry.repos[`${baseName}-${suffix}`];
|
|
59
|
+
if (path.resolve(entry.path) === absRoot) {
|
|
60
|
+
// Already registered under this suffixed name — update in place
|
|
61
|
+
repoName = `${baseName}-${suffix}`;
|
|
62
|
+
break;
|
|
63
|
+
}
|
|
64
|
+
suffix++;
|
|
65
|
+
}
|
|
66
|
+
if (repoName === baseName) {
|
|
67
|
+
repoName = `${baseName}-${suffix}`;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
registry.repos[repoName] = {
|
|
73
|
+
path: absRoot,
|
|
74
|
+
dbPath: path.join(absRoot, '.codegraph', 'graph.db'),
|
|
75
|
+
addedAt: new Date().toISOString(),
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
saveRegistry(registry, registryPath);
|
|
79
|
+
debug(`Registered repo "${repoName}" at ${absRoot}`);
|
|
80
|
+
return { name: repoName, entry: registry.repos[repoName] };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Remove a repo from the registry. Returns false if not found.
|
|
85
|
+
*/
|
|
86
|
+
export function unregisterRepo(name, registryPath = REGISTRY_PATH) {
|
|
87
|
+
const registry = loadRegistry(registryPath);
|
|
88
|
+
if (!registry.repos[name]) return false;
|
|
89
|
+
delete registry.repos[name];
|
|
90
|
+
saveRegistry(registry, registryPath);
|
|
91
|
+
return true;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* List all registered repos, sorted by name.
|
|
96
|
+
*/
|
|
97
|
+
export function listRepos(registryPath = REGISTRY_PATH) {
|
|
98
|
+
const registry = loadRegistry(registryPath);
|
|
99
|
+
return Object.entries(registry.repos)
|
|
100
|
+
.map(([name, entry]) => ({
|
|
101
|
+
name,
|
|
102
|
+
path: entry.path,
|
|
103
|
+
dbPath: entry.dbPath,
|
|
104
|
+
addedAt: entry.addedAt,
|
|
105
|
+
}))
|
|
106
|
+
.sort((a, b) => a.name.localeCompare(b.name));
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* Resolve a repo name to its database path.
|
|
111
|
+
* Returns undefined if the repo is not found or its DB file is missing.
|
|
112
|
+
*/
|
|
113
|
+
export function resolveRepoDbPath(name, registryPath = REGISTRY_PATH) {
|
|
114
|
+
const registry = loadRegistry(registryPath);
|
|
115
|
+
const entry = registry.repos[name];
|
|
116
|
+
if (!entry) return undefined;
|
|
117
|
+
if (!fs.existsSync(entry.dbPath)) {
|
|
118
|
+
warn(`Registry: database missing for "${name}" at ${entry.dbPath}`);
|
|
119
|
+
return undefined;
|
|
120
|
+
}
|
|
121
|
+
return entry.dbPath;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Remove registry entries whose repo directory no longer exists on disk.
|
|
126
|
+
* Only checks the repo directory (not the DB file — a missing DB is normal pre-build state).
|
|
127
|
+
* Returns an array of `{ name, path }` for each pruned entry.
|
|
128
|
+
*/
|
|
129
|
+
export function pruneRegistry(registryPath = REGISTRY_PATH) {
|
|
130
|
+
const registry = loadRegistry(registryPath);
|
|
131
|
+
const pruned = [];
|
|
132
|
+
|
|
133
|
+
for (const [name, entry] of Object.entries(registry.repos)) {
|
|
134
|
+
if (!fs.existsSync(entry.path)) {
|
|
135
|
+
pruned.push({ name, path: entry.path });
|
|
136
|
+
delete registry.repos[name];
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
if (pruned.length > 0) {
|
|
141
|
+
saveRegistry(registry, registryPath);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return pruned;
|
|
145
|
+
}
|
package/src/resolve.js
CHANGED
|
@@ -12,7 +12,7 @@ import { loadNative } from './native.js';
|
|
|
12
12
|
export function convertAliasesForNative(aliases) {
|
|
13
13
|
if (!aliases) return null;
|
|
14
14
|
return {
|
|
15
|
-
baseUrl: aliases.baseUrl ||
|
|
15
|
+
baseUrl: aliases.baseUrl || '',
|
|
16
16
|
paths: Object.entries(aliases.paths || {}).map(([pattern, targets]) => ({
|
|
17
17
|
pattern,
|
|
18
18
|
targets,
|
package/src/structure.js
ADDED
|
@@ -0,0 +1,491 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { normalizePath } from './constants.js';
|
|
3
|
+
import { openReadonlyOrFail } from './db.js';
|
|
4
|
+
import { debug } from './logger.js';
|
|
5
|
+
|
|
6
|
+
// ─── Build-time: insert directory nodes, contains edges, and metrics ────
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Build directory structure nodes, containment edges, and compute metrics.
|
|
10
|
+
* Called from builder.js after edge building.
|
|
11
|
+
*
|
|
12
|
+
* @param {import('better-sqlite3').Database} db - Open read-write database
|
|
13
|
+
* @param {Map<string, object>} fileSymbols - Map of relPath → { definitions, imports, exports, calls }
|
|
14
|
+
* @param {string} rootDir - Absolute root directory
|
|
15
|
+
* @param {Map<string, number>} lineCountMap - Map of relPath → line count
|
|
16
|
+
* @param {Set<string>} directories - Set of relative directory paths
|
|
17
|
+
*/
|
|
18
|
+
export function buildStructure(db, fileSymbols, _rootDir, lineCountMap, directories) {
|
|
19
|
+
const insertNode = db.prepare(
|
|
20
|
+
'INSERT OR IGNORE INTO nodes (name, kind, file, line, end_line) VALUES (?, ?, ?, ?, ?)',
|
|
21
|
+
);
|
|
22
|
+
const getNodeId = db.prepare(
|
|
23
|
+
'SELECT id FROM nodes WHERE name = ? AND kind = ? AND file = ? AND line = ?',
|
|
24
|
+
);
|
|
25
|
+
const insertEdge = db.prepare(
|
|
26
|
+
'INSERT INTO edges (source_id, target_id, kind, confidence, dynamic) VALUES (?, ?, ?, ?, ?)',
|
|
27
|
+
);
|
|
28
|
+
const upsertMetric = db.prepare(`
|
|
29
|
+
INSERT OR REPLACE INTO node_metrics
|
|
30
|
+
(node_id, line_count, symbol_count, import_count, export_count, fan_in, fan_out, cohesion, file_count)
|
|
31
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
32
|
+
`);
|
|
33
|
+
|
|
34
|
+
// Clean previous directory nodes/edges (idempotent rebuild)
|
|
35
|
+
db.exec(`
|
|
36
|
+
DELETE FROM edges WHERE kind = 'contains';
|
|
37
|
+
DELETE FROM node_metrics;
|
|
38
|
+
DELETE FROM nodes WHERE kind = 'directory';
|
|
39
|
+
`);
|
|
40
|
+
|
|
41
|
+
// Step 1: Ensure all directories are represented (including intermediate parents)
|
|
42
|
+
const allDirs = new Set();
|
|
43
|
+
for (const dir of directories) {
|
|
44
|
+
let d = dir;
|
|
45
|
+
while (d && d !== '.') {
|
|
46
|
+
allDirs.add(d);
|
|
47
|
+
d = normalizePath(path.dirname(d));
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
// Also add dirs derived from file paths
|
|
51
|
+
for (const relPath of fileSymbols.keys()) {
|
|
52
|
+
let d = normalizePath(path.dirname(relPath));
|
|
53
|
+
while (d && d !== '.') {
|
|
54
|
+
allDirs.add(d);
|
|
55
|
+
d = normalizePath(path.dirname(d));
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Step 2: Insert directory nodes
|
|
60
|
+
const insertDirs = db.transaction(() => {
|
|
61
|
+
for (const dir of allDirs) {
|
|
62
|
+
insertNode.run(dir, 'directory', dir, 0, null);
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
insertDirs();
|
|
66
|
+
|
|
67
|
+
// Step 3: Insert 'contains' edges (dir → file, dir → subdirectory)
|
|
68
|
+
const insertContains = db.transaction(() => {
|
|
69
|
+
// dir → file
|
|
70
|
+
for (const relPath of fileSymbols.keys()) {
|
|
71
|
+
const dir = normalizePath(path.dirname(relPath));
|
|
72
|
+
if (!dir || dir === '.') continue;
|
|
73
|
+
const dirRow = getNodeId.get(dir, 'directory', dir, 0);
|
|
74
|
+
const fileRow = getNodeId.get(relPath, 'file', relPath, 0);
|
|
75
|
+
if (dirRow && fileRow) {
|
|
76
|
+
insertEdge.run(dirRow.id, fileRow.id, 'contains', 1.0, 0);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
// dir → subdirectory
|
|
80
|
+
for (const dir of allDirs) {
|
|
81
|
+
const parent = normalizePath(path.dirname(dir));
|
|
82
|
+
if (!parent || parent === '.' || parent === dir) continue;
|
|
83
|
+
const parentRow = getNodeId.get(parent, 'directory', parent, 0);
|
|
84
|
+
const childRow = getNodeId.get(dir, 'directory', dir, 0);
|
|
85
|
+
if (parentRow && childRow) {
|
|
86
|
+
insertEdge.run(parentRow.id, childRow.id, 'contains', 1.0, 0);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
insertContains();
|
|
91
|
+
|
|
92
|
+
// Step 4: Compute per-file metrics
|
|
93
|
+
// Pre-compute fan-in/fan-out per file from import edges
|
|
94
|
+
const fanInMap = new Map();
|
|
95
|
+
const fanOutMap = new Map();
|
|
96
|
+
const importEdges = db
|
|
97
|
+
.prepare(`
|
|
98
|
+
SELECT n1.file AS source_file, n2.file AS target_file
|
|
99
|
+
FROM edges e
|
|
100
|
+
JOIN nodes n1 ON e.source_id = n1.id
|
|
101
|
+
JOIN nodes n2 ON e.target_id = n2.id
|
|
102
|
+
WHERE e.kind IN ('imports', 'imports-type')
|
|
103
|
+
AND n1.file != n2.file
|
|
104
|
+
`)
|
|
105
|
+
.all();
|
|
106
|
+
|
|
107
|
+
for (const { source_file, target_file } of importEdges) {
|
|
108
|
+
fanOutMap.set(source_file, (fanOutMap.get(source_file) || 0) + 1);
|
|
109
|
+
fanInMap.set(target_file, (fanInMap.get(target_file) || 0) + 1);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const computeFileMetrics = db.transaction(() => {
|
|
113
|
+
for (const [relPath, symbols] of fileSymbols) {
|
|
114
|
+
const fileRow = getNodeId.get(relPath, 'file', relPath, 0);
|
|
115
|
+
if (!fileRow) continue;
|
|
116
|
+
|
|
117
|
+
const lineCount = lineCountMap.get(relPath) || 0;
|
|
118
|
+
// Deduplicate definitions by name+kind+line
|
|
119
|
+
const seen = new Set();
|
|
120
|
+
let symbolCount = 0;
|
|
121
|
+
for (const d of symbols.definitions) {
|
|
122
|
+
const key = `${d.name}|${d.kind}|${d.line}`;
|
|
123
|
+
if (!seen.has(key)) {
|
|
124
|
+
seen.add(key);
|
|
125
|
+
symbolCount++;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
const importCount = symbols.imports.length;
|
|
129
|
+
const exportCount = symbols.exports.length;
|
|
130
|
+
const fanIn = fanInMap.get(relPath) || 0;
|
|
131
|
+
const fanOut = fanOutMap.get(relPath) || 0;
|
|
132
|
+
|
|
133
|
+
upsertMetric.run(
|
|
134
|
+
fileRow.id,
|
|
135
|
+
lineCount,
|
|
136
|
+
symbolCount,
|
|
137
|
+
importCount,
|
|
138
|
+
exportCount,
|
|
139
|
+
fanIn,
|
|
140
|
+
fanOut,
|
|
141
|
+
null,
|
|
142
|
+
null,
|
|
143
|
+
);
|
|
144
|
+
}
|
|
145
|
+
});
|
|
146
|
+
computeFileMetrics();
|
|
147
|
+
|
|
148
|
+
// Step 5: Compute per-directory metrics
|
|
149
|
+
// Build a map of dir → descendant files
|
|
150
|
+
const dirFiles = new Map();
|
|
151
|
+
for (const dir of allDirs) {
|
|
152
|
+
dirFiles.set(dir, []);
|
|
153
|
+
}
|
|
154
|
+
for (const relPath of fileSymbols.keys()) {
|
|
155
|
+
let d = normalizePath(path.dirname(relPath));
|
|
156
|
+
while (d && d !== '.') {
|
|
157
|
+
if (dirFiles.has(d)) {
|
|
158
|
+
dirFiles.get(d).push(relPath);
|
|
159
|
+
}
|
|
160
|
+
d = normalizePath(path.dirname(d));
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const computeDirMetrics = db.transaction(() => {
|
|
165
|
+
for (const [dir, files] of dirFiles) {
|
|
166
|
+
const dirRow = getNodeId.get(dir, 'directory', dir, 0);
|
|
167
|
+
if (!dirRow) continue;
|
|
168
|
+
|
|
169
|
+
const fileCount = files.length;
|
|
170
|
+
let symbolCount = 0;
|
|
171
|
+
let totalFanIn = 0;
|
|
172
|
+
let totalFanOut = 0;
|
|
173
|
+
const filesInDir = new Set(files);
|
|
174
|
+
|
|
175
|
+
for (const f of files) {
|
|
176
|
+
const sym = fileSymbols.get(f);
|
|
177
|
+
if (sym) {
|
|
178
|
+
const seen = new Set();
|
|
179
|
+
for (const d of sym.definitions) {
|
|
180
|
+
const key = `${d.name}|${d.kind}|${d.line}`;
|
|
181
|
+
if (!seen.has(key)) {
|
|
182
|
+
seen.add(key);
|
|
183
|
+
symbolCount++;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Compute cross-boundary fan-in/fan-out and cohesion
|
|
190
|
+
let intraEdges = 0;
|
|
191
|
+
let crossEdges = 0;
|
|
192
|
+
for (const { source_file, target_file } of importEdges) {
|
|
193
|
+
const srcInside = filesInDir.has(source_file);
|
|
194
|
+
const tgtInside = filesInDir.has(target_file);
|
|
195
|
+
if (srcInside && tgtInside) {
|
|
196
|
+
intraEdges++;
|
|
197
|
+
} else if (srcInside || tgtInside) {
|
|
198
|
+
crossEdges++;
|
|
199
|
+
if (!srcInside && tgtInside) totalFanIn++;
|
|
200
|
+
if (srcInside && !tgtInside) totalFanOut++;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
const totalEdges = intraEdges + crossEdges;
|
|
205
|
+
const cohesion = totalEdges > 0 ? intraEdges / totalEdges : null;
|
|
206
|
+
|
|
207
|
+
upsertMetric.run(
|
|
208
|
+
dirRow.id,
|
|
209
|
+
null,
|
|
210
|
+
symbolCount,
|
|
211
|
+
null,
|
|
212
|
+
null,
|
|
213
|
+
totalFanIn,
|
|
214
|
+
totalFanOut,
|
|
215
|
+
cohesion,
|
|
216
|
+
fileCount,
|
|
217
|
+
);
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
computeDirMetrics();
|
|
221
|
+
|
|
222
|
+
const dirCount = allDirs.size;
|
|
223
|
+
debug(`Structure: ${dirCount} directories, ${fileSymbols.size} files with metrics`);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// ─── Query functions (read-only) ──────────────────────────────────────
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Return hierarchical directory tree with metrics.
|
|
230
|
+
*/
|
|
231
|
+
export function structureData(customDbPath, opts = {}) {
|
|
232
|
+
const db = openReadonlyOrFail(customDbPath);
|
|
233
|
+
const filterDir = opts.directory || null;
|
|
234
|
+
const maxDepth = opts.depth || null;
|
|
235
|
+
const sortBy = opts.sort || 'files';
|
|
236
|
+
|
|
237
|
+
// Get all directory nodes with their metrics
|
|
238
|
+
let dirs = db
|
|
239
|
+
.prepare(`
|
|
240
|
+
SELECT n.id, n.name, n.file, nm.symbol_count, nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count
|
|
241
|
+
FROM nodes n
|
|
242
|
+
LEFT JOIN node_metrics nm ON n.id = nm.node_id
|
|
243
|
+
WHERE n.kind = 'directory'
|
|
244
|
+
`)
|
|
245
|
+
.all();
|
|
246
|
+
|
|
247
|
+
if (filterDir) {
|
|
248
|
+
const norm = normalizePath(filterDir);
|
|
249
|
+
dirs = dirs.filter((d) => d.name === norm || d.name.startsWith(`${norm}/`));
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
if (maxDepth) {
|
|
253
|
+
const baseDepth = filterDir ? normalizePath(filterDir).split('/').length : 0;
|
|
254
|
+
dirs = dirs.filter((d) => {
|
|
255
|
+
const depth = d.name.split('/').length - baseDepth;
|
|
256
|
+
return depth <= maxDepth;
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Sort
|
|
261
|
+
const sortFn = getSortFn(sortBy);
|
|
262
|
+
dirs.sort(sortFn);
|
|
263
|
+
|
|
264
|
+
// Get file metrics for each directory
|
|
265
|
+
const result = dirs.map((d) => {
|
|
266
|
+
const files = db
|
|
267
|
+
.prepare(`
|
|
268
|
+
SELECT n.name, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count, nm.fan_in, nm.fan_out
|
|
269
|
+
FROM edges e
|
|
270
|
+
JOIN nodes n ON e.target_id = n.id
|
|
271
|
+
LEFT JOIN node_metrics nm ON n.id = nm.node_id
|
|
272
|
+
WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file'
|
|
273
|
+
`)
|
|
274
|
+
.all(d.id);
|
|
275
|
+
|
|
276
|
+
const subdirs = db
|
|
277
|
+
.prepare(`
|
|
278
|
+
SELECT n.name
|
|
279
|
+
FROM edges e
|
|
280
|
+
JOIN nodes n ON e.target_id = n.id
|
|
281
|
+
WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'directory'
|
|
282
|
+
`)
|
|
283
|
+
.all(d.id);
|
|
284
|
+
|
|
285
|
+
return {
|
|
286
|
+
directory: d.name,
|
|
287
|
+
fileCount: d.file_count || 0,
|
|
288
|
+
symbolCount: d.symbol_count || 0,
|
|
289
|
+
fanIn: d.fan_in || 0,
|
|
290
|
+
fanOut: d.fan_out || 0,
|
|
291
|
+
cohesion: d.cohesion,
|
|
292
|
+
density: d.file_count > 0 ? (d.symbol_count || 0) / d.file_count : 0,
|
|
293
|
+
files: files.map((f) => ({
|
|
294
|
+
file: f.name,
|
|
295
|
+
lineCount: f.line_count || 0,
|
|
296
|
+
symbolCount: f.symbol_count || 0,
|
|
297
|
+
importCount: f.import_count || 0,
|
|
298
|
+
exportCount: f.export_count || 0,
|
|
299
|
+
fanIn: f.fan_in || 0,
|
|
300
|
+
fanOut: f.fan_out || 0,
|
|
301
|
+
})),
|
|
302
|
+
subdirectories: subdirs.map((s) => s.name),
|
|
303
|
+
};
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
db.close();
|
|
307
|
+
return { directories: result, count: result.length };
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
/**
|
|
311
|
+
* Return top N files or directories ranked by a chosen metric.
|
|
312
|
+
*/
|
|
313
|
+
export function hotspotsData(customDbPath, opts = {}) {
|
|
314
|
+
const db = openReadonlyOrFail(customDbPath);
|
|
315
|
+
const metric = opts.metric || 'fan-in';
|
|
316
|
+
const level = opts.level || 'file';
|
|
317
|
+
const limit = opts.limit || 10;
|
|
318
|
+
|
|
319
|
+
const kind = level === 'directory' ? 'directory' : 'file';
|
|
320
|
+
|
|
321
|
+
const HOTSPOT_QUERIES = {
|
|
322
|
+
'fan-in': db.prepare(`
|
|
323
|
+
SELECT n.name, n.kind, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count,
|
|
324
|
+
nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count
|
|
325
|
+
FROM nodes n JOIN node_metrics nm ON n.id = nm.node_id
|
|
326
|
+
WHERE n.kind = ? ORDER BY nm.fan_in DESC NULLS LAST LIMIT ?`),
|
|
327
|
+
'fan-out': db.prepare(`
|
|
328
|
+
SELECT n.name, n.kind, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count,
|
|
329
|
+
nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count
|
|
330
|
+
FROM nodes n JOIN node_metrics nm ON n.id = nm.node_id
|
|
331
|
+
WHERE n.kind = ? ORDER BY nm.fan_out DESC NULLS LAST LIMIT ?`),
|
|
332
|
+
density: db.prepare(`
|
|
333
|
+
SELECT n.name, n.kind, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count,
|
|
334
|
+
nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count
|
|
335
|
+
FROM nodes n JOIN node_metrics nm ON n.id = nm.node_id
|
|
336
|
+
WHERE n.kind = ? ORDER BY nm.symbol_count DESC NULLS LAST LIMIT ?`),
|
|
337
|
+
coupling: db.prepare(`
|
|
338
|
+
SELECT n.name, n.kind, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count,
|
|
339
|
+
nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count
|
|
340
|
+
FROM nodes n JOIN node_metrics nm ON n.id = nm.node_id
|
|
341
|
+
WHERE n.kind = ? ORDER BY (COALESCE(nm.fan_in, 0) + COALESCE(nm.fan_out, 0)) DESC NULLS LAST LIMIT ?`),
|
|
342
|
+
};
|
|
343
|
+
|
|
344
|
+
const stmt = HOTSPOT_QUERIES[metric] || HOTSPOT_QUERIES['fan-in'];
|
|
345
|
+
const rows = stmt.all(kind, limit);
|
|
346
|
+
|
|
347
|
+
const hotspots = rows.map((r) => ({
|
|
348
|
+
name: r.name,
|
|
349
|
+
kind: r.kind,
|
|
350
|
+
lineCount: r.line_count,
|
|
351
|
+
symbolCount: r.symbol_count,
|
|
352
|
+
importCount: r.import_count,
|
|
353
|
+
exportCount: r.export_count,
|
|
354
|
+
fanIn: r.fan_in,
|
|
355
|
+
fanOut: r.fan_out,
|
|
356
|
+
cohesion: r.cohesion,
|
|
357
|
+
fileCount: r.file_count,
|
|
358
|
+
density:
|
|
359
|
+
r.file_count > 0
|
|
360
|
+
? (r.symbol_count || 0) / r.file_count
|
|
361
|
+
: r.line_count > 0
|
|
362
|
+
? (r.symbol_count || 0) / r.line_count
|
|
363
|
+
: 0,
|
|
364
|
+
coupling: (r.fan_in || 0) + (r.fan_out || 0),
|
|
365
|
+
}));
|
|
366
|
+
|
|
367
|
+
db.close();
|
|
368
|
+
return { metric, level, limit, hotspots };
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Return directories with cohesion above threshold, with top exports/imports.
|
|
373
|
+
*/
|
|
374
|
+
export function moduleBoundariesData(customDbPath, opts = {}) {
|
|
375
|
+
const db = openReadonlyOrFail(customDbPath);
|
|
376
|
+
const threshold = opts.threshold || 0.3;
|
|
377
|
+
|
|
378
|
+
const dirs = db
|
|
379
|
+
.prepare(`
|
|
380
|
+
SELECT n.id, n.name, nm.symbol_count, nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count
|
|
381
|
+
FROM nodes n
|
|
382
|
+
JOIN node_metrics nm ON n.id = nm.node_id
|
|
383
|
+
WHERE n.kind = 'directory' AND nm.cohesion IS NOT NULL AND nm.cohesion >= ?
|
|
384
|
+
ORDER BY nm.cohesion DESC
|
|
385
|
+
`)
|
|
386
|
+
.all(threshold);
|
|
387
|
+
|
|
388
|
+
const modules = dirs.map((d) => {
|
|
389
|
+
// Get files inside this directory
|
|
390
|
+
const files = db
|
|
391
|
+
.prepare(`
|
|
392
|
+
SELECT n.name FROM edges e
|
|
393
|
+
JOIN nodes n ON e.target_id = n.id
|
|
394
|
+
WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file'
|
|
395
|
+
`)
|
|
396
|
+
.all(d.id)
|
|
397
|
+
.map((f) => f.name);
|
|
398
|
+
|
|
399
|
+
return {
|
|
400
|
+
directory: d.name,
|
|
401
|
+
cohesion: d.cohesion,
|
|
402
|
+
fileCount: d.file_count || 0,
|
|
403
|
+
symbolCount: d.symbol_count || 0,
|
|
404
|
+
fanIn: d.fan_in || 0,
|
|
405
|
+
fanOut: d.fan_out || 0,
|
|
406
|
+
files,
|
|
407
|
+
};
|
|
408
|
+
});
|
|
409
|
+
|
|
410
|
+
db.close();
|
|
411
|
+
return { threshold, modules, count: modules.length };
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
// ─── Formatters ───────────────────────────────────────────────────────
|
|
415
|
+
|
|
416
|
+
export function formatStructure(data) {
|
|
417
|
+
if (data.count === 0) return 'No directory structure found. Run "codegraph build" first.';
|
|
418
|
+
|
|
419
|
+
const lines = [`\nProject structure (${data.count} directories):\n`];
|
|
420
|
+
for (const d of data.directories) {
|
|
421
|
+
const cohStr = d.cohesion !== null ? ` cohesion=${d.cohesion.toFixed(2)}` : '';
|
|
422
|
+
const depth = d.directory.split('/').length - 1;
|
|
423
|
+
const indent = ' '.repeat(depth);
|
|
424
|
+
lines.push(
|
|
425
|
+
`${indent}${d.directory}/ (${d.fileCount} files, ${d.symbolCount} symbols, <-${d.fanIn} ->${d.fanOut}${cohStr})`,
|
|
426
|
+
);
|
|
427
|
+
for (const f of d.files) {
|
|
428
|
+
lines.push(
|
|
429
|
+
`${indent} ${path.basename(f.file)} ${f.lineCount}L ${f.symbolCount}sym <-${f.fanIn} ->${f.fanOut}`,
|
|
430
|
+
);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
return lines.join('\n');
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
export function formatHotspots(data) {
|
|
437
|
+
if (data.hotspots.length === 0) return 'No hotspots found. Run "codegraph build" first.';
|
|
438
|
+
|
|
439
|
+
const lines = [`\nHotspots by ${data.metric} (${data.level}-level, top ${data.limit}):\n`];
|
|
440
|
+
let rank = 1;
|
|
441
|
+
for (const h of data.hotspots) {
|
|
442
|
+
const extra =
|
|
443
|
+
h.kind === 'directory'
|
|
444
|
+
? `${h.fileCount} files, cohesion=${h.cohesion !== null ? h.cohesion.toFixed(2) : 'n/a'}`
|
|
445
|
+
: `${h.lineCount || 0}L, ${h.symbolCount || 0} symbols`;
|
|
446
|
+
lines.push(
|
|
447
|
+
` ${String(rank++).padStart(2)}. ${h.name} <-${h.fanIn || 0} ->${h.fanOut || 0} (${extra})`,
|
|
448
|
+
);
|
|
449
|
+
}
|
|
450
|
+
return lines.join('\n');
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
export function formatModuleBoundaries(data) {
|
|
454
|
+
if (data.count === 0) return `No modules found with cohesion >= ${data.threshold}.`;
|
|
455
|
+
|
|
456
|
+
const lines = [`\nModule boundaries (cohesion >= ${data.threshold}, ${data.count} modules):\n`];
|
|
457
|
+
for (const m of data.modules) {
|
|
458
|
+
lines.push(
|
|
459
|
+
` ${m.directory}/ cohesion=${m.cohesion.toFixed(2)} (${m.fileCount} files, ${m.symbolCount} symbols)`,
|
|
460
|
+
);
|
|
461
|
+
lines.push(` Incoming: ${m.fanIn} edges Outgoing: ${m.fanOut} edges`);
|
|
462
|
+
if (m.files.length > 0) {
|
|
463
|
+
lines.push(
|
|
464
|
+
` Files: ${m.files.slice(0, 5).join(', ')}${m.files.length > 5 ? ` ... +${m.files.length - 5}` : ''}`,
|
|
465
|
+
);
|
|
466
|
+
}
|
|
467
|
+
lines.push('');
|
|
468
|
+
}
|
|
469
|
+
return lines.join('\n');
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
// ─── Helpers ──────────────────────────────────────────────────────────
|
|
473
|
+
|
|
474
|
+
function getSortFn(sortBy) {
|
|
475
|
+
switch (sortBy) {
|
|
476
|
+
case 'cohesion':
|
|
477
|
+
return (a, b) => (b.cohesion ?? -1) - (a.cohesion ?? -1);
|
|
478
|
+
case 'fan-in':
|
|
479
|
+
return (a, b) => (b.fan_in || 0) - (a.fan_in || 0);
|
|
480
|
+
case 'fan-out':
|
|
481
|
+
return (a, b) => (b.fan_out || 0) - (a.fan_out || 0);
|
|
482
|
+
case 'density':
|
|
483
|
+
return (a, b) => {
|
|
484
|
+
const da = a.file_count > 0 ? (a.symbol_count || 0) / a.file_count : 0;
|
|
485
|
+
const db_ = b.file_count > 0 ? (b.symbol_count || 0) / b.file_count : 0;
|
|
486
|
+
return db_ - da;
|
|
487
|
+
};
|
|
488
|
+
default:
|
|
489
|
+
return (a, b) => a.name.localeCompare(b.name);
|
|
490
|
+
}
|
|
491
|
+
}
|
package/src/watcher.js
CHANGED
|
@@ -173,10 +173,10 @@ export async function watchProject(rootDir, opts = {}) {
|
|
|
173
173
|
countNodes: db.prepare('SELECT COUNT(*) as c FROM nodes WHERE file = ?'),
|
|
174
174
|
countEdgesForFile: null,
|
|
175
175
|
findNodeInFile: db.prepare(
|
|
176
|
-
"SELECT id, file FROM nodes WHERE name = ? AND kind IN ('function', 'method', 'class', 'interface') AND file = ?",
|
|
176
|
+
"SELECT id, file FROM nodes WHERE name = ? AND kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module') AND file = ?",
|
|
177
177
|
),
|
|
178
178
|
findNodeByName: db.prepare(
|
|
179
|
-
"SELECT id, file FROM nodes WHERE name = ? AND kind IN ('function', 'method', 'class', 'interface')",
|
|
179
|
+
"SELECT id, file FROM nodes WHERE name = ? AND kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module')",
|
|
180
180
|
),
|
|
181
181
|
};
|
|
182
182
|
|