@optave/codegraph 3.0.4 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -53
- package/package.json +9 -9
- package/src/builder.js +274 -154
- package/src/cfg.js +11 -9
- package/src/cli.js +35 -0
- package/src/dataflow.js +11 -9
- package/src/db.js +7 -0
- package/src/flow.js +3 -70
- package/src/index.js +2 -1
- package/src/mcp.js +60 -0
- package/src/parser.js +58 -131
- package/src/queries.js +60 -21
- package/src/resolve.js +11 -2
- package/src/sequence.js +369 -0
package/src/queries.js
CHANGED
|
@@ -163,7 +163,7 @@ function resolveMethodViaHierarchy(db, methodName) {
|
|
|
163
163
|
* Find nodes matching a name query, ranked by relevance.
|
|
164
164
|
* Scoring: exact=100, prefix=60, word-boundary=40, substring=10, plus fan-in tiebreaker.
|
|
165
165
|
*/
|
|
166
|
-
function findMatchingNodes(db, name, opts = {}) {
|
|
166
|
+
export function findMatchingNodes(db, name, opts = {}) {
|
|
167
167
|
const kinds = opts.kind ? [opts.kind] : FUNCTION_KINDS;
|
|
168
168
|
const placeholders = kinds.map(() => '?').join(', ');
|
|
169
169
|
const params = [`%${name}%`, ...kinds];
|
|
@@ -3134,31 +3134,49 @@ export function roles(customDbPath, opts = {}) {
|
|
|
3134
3134
|
|
|
3135
3135
|
// ─── exportsData ─────────────────────────────────────────────────────
|
|
3136
3136
|
|
|
3137
|
-
function exportsFileImpl(db, target, noTests, getFileLines) {
|
|
3137
|
+
function exportsFileImpl(db, target, noTests, getFileLines, unused) {
|
|
3138
3138
|
const fileNodes = db
|
|
3139
3139
|
.prepare(`SELECT * FROM nodes WHERE file LIKE ? AND kind = 'file'`)
|
|
3140
3140
|
.all(`%${target}%`);
|
|
3141
3141
|
if (fileNodes.length === 0) return [];
|
|
3142
3142
|
|
|
3143
|
+
// Detect whether exported column exists
|
|
3144
|
+
let hasExportedCol = false;
|
|
3145
|
+
try {
|
|
3146
|
+
db.prepare('SELECT exported FROM nodes LIMIT 0').raw();
|
|
3147
|
+
hasExportedCol = true;
|
|
3148
|
+
} catch {
|
|
3149
|
+
/* old DB without exported column */
|
|
3150
|
+
}
|
|
3151
|
+
|
|
3143
3152
|
return fileNodes.map((fn) => {
|
|
3144
3153
|
const symbols = db
|
|
3145
3154
|
.prepare(`SELECT * FROM nodes WHERE file = ? AND kind != 'file' ORDER BY line`)
|
|
3146
3155
|
.all(fn.file);
|
|
3147
3156
|
|
|
3148
|
-
|
|
3149
|
-
|
|
3150
|
-
|
|
3157
|
+
let exported;
|
|
3158
|
+
if (hasExportedCol) {
|
|
3159
|
+
// Use the exported column populated during build
|
|
3160
|
+
exported = db
|
|
3151
3161
|
.prepare(
|
|
3152
|
-
|
|
3153
|
-
JOIN nodes caller ON e.source_id = caller.id
|
|
3154
|
-
JOIN nodes target ON e.target_id = target.id
|
|
3155
|
-
WHERE target.file = ? AND caller.file != ? AND e.kind = 'calls'`,
|
|
3162
|
+
"SELECT * FROM nodes WHERE file = ? AND kind != 'file' AND exported = 1 ORDER BY line",
|
|
3156
3163
|
)
|
|
3157
|
-
.all(fn.file
|
|
3158
|
-
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3164
|
+
.all(fn.file);
|
|
3165
|
+
} else {
|
|
3166
|
+
// Fallback: symbols that have incoming calls from other files
|
|
3167
|
+
const exportedIds = new Set(
|
|
3168
|
+
db
|
|
3169
|
+
.prepare(
|
|
3170
|
+
`SELECT DISTINCT e.target_id FROM edges e
|
|
3171
|
+
JOIN nodes caller ON e.source_id = caller.id
|
|
3172
|
+
JOIN nodes target ON e.target_id = target.id
|
|
3173
|
+
WHERE target.file = ? AND caller.file != ? AND e.kind = 'calls'`,
|
|
3174
|
+
)
|
|
3175
|
+
.all(fn.file, fn.file)
|
|
3176
|
+
.map((r) => r.target_id),
|
|
3177
|
+
);
|
|
3178
|
+
exported = symbols.filter((s) => exportedIds.has(s.id));
|
|
3179
|
+
}
|
|
3162
3180
|
const internalCount = symbols.length - exported.length;
|
|
3163
3181
|
|
|
3164
3182
|
const results = exported.map((s) => {
|
|
@@ -3185,6 +3203,8 @@ function exportsFileImpl(db, target, noTests, getFileLines) {
|
|
|
3185
3203
|
};
|
|
3186
3204
|
});
|
|
3187
3205
|
|
|
3206
|
+
const totalUnused = results.filter((r) => r.consumerCount === 0).length;
|
|
3207
|
+
|
|
3188
3208
|
// Files that re-export this file (barrel → this file)
|
|
3189
3209
|
const reexports = db
|
|
3190
3210
|
.prepare(
|
|
@@ -3194,12 +3214,18 @@ function exportsFileImpl(db, target, noTests, getFileLines) {
|
|
|
3194
3214
|
.all(fn.id)
|
|
3195
3215
|
.map((r) => ({ file: r.file }));
|
|
3196
3216
|
|
|
3217
|
+
let filteredResults = results;
|
|
3218
|
+
if (unused) {
|
|
3219
|
+
filteredResults = results.filter((r) => r.consumerCount === 0);
|
|
3220
|
+
}
|
|
3221
|
+
|
|
3197
3222
|
return {
|
|
3198
3223
|
file: fn.file,
|
|
3199
|
-
results,
|
|
3224
|
+
results: filteredResults,
|
|
3200
3225
|
reexports,
|
|
3201
3226
|
totalExported: exported.length,
|
|
3202
3227
|
totalInternal: internalCount,
|
|
3228
|
+
totalUnused,
|
|
3203
3229
|
};
|
|
3204
3230
|
});
|
|
3205
3231
|
}
|
|
@@ -3229,12 +3255,13 @@ export function exportsData(file, customDbPath, opts = {}) {
|
|
|
3229
3255
|
}
|
|
3230
3256
|
}
|
|
3231
3257
|
|
|
3232
|
-
const
|
|
3258
|
+
const unused = opts.unused || false;
|
|
3259
|
+
const fileResults = exportsFileImpl(db, file, noTests, getFileLines, unused);
|
|
3233
3260
|
db.close();
|
|
3234
3261
|
|
|
3235
3262
|
if (fileResults.length === 0) {
|
|
3236
3263
|
return paginateResult(
|
|
3237
|
-
{ file, results: [], reexports: [], totalExported: 0, totalInternal: 0 },
|
|
3264
|
+
{ file, results: [], reexports: [], totalExported: 0, totalInternal: 0, totalUnused: 0 },
|
|
3238
3265
|
'results',
|
|
3239
3266
|
{ limit: opts.limit, offset: opts.offset },
|
|
3240
3267
|
);
|
|
@@ -3248,6 +3275,7 @@ export function exportsData(file, customDbPath, opts = {}) {
|
|
|
3248
3275
|
reexports: first.reexports,
|
|
3249
3276
|
totalExported: first.totalExported,
|
|
3250
3277
|
totalInternal: first.totalInternal,
|
|
3278
|
+
totalUnused: first.totalUnused,
|
|
3251
3279
|
};
|
|
3252
3280
|
return paginateResult(base, 'results', { limit: opts.limit, offset: opts.offset });
|
|
3253
3281
|
}
|
|
@@ -3264,13 +3292,24 @@ export function fileExports(file, customDbPath, opts = {}) {
|
|
|
3264
3292
|
}
|
|
3265
3293
|
|
|
3266
3294
|
if (data.results.length === 0) {
|
|
3267
|
-
|
|
3295
|
+
if (opts.unused) {
|
|
3296
|
+
console.log(`No unused exports found for "${file}".`);
|
|
3297
|
+
} else {
|
|
3298
|
+
console.log(`No exported symbols found for "${file}". Run "codegraph build" first.`);
|
|
3299
|
+
}
|
|
3268
3300
|
return;
|
|
3269
3301
|
}
|
|
3270
3302
|
|
|
3271
|
-
|
|
3272
|
-
|
|
3273
|
-
|
|
3303
|
+
if (opts.unused) {
|
|
3304
|
+
console.log(
|
|
3305
|
+
`\n# ${data.file} — ${data.totalUnused} unused export${data.totalUnused !== 1 ? 's' : ''} (of ${data.totalExported} exported)\n`,
|
|
3306
|
+
);
|
|
3307
|
+
} else {
|
|
3308
|
+
const unusedNote = data.totalUnused > 0 ? ` (${data.totalUnused} unused)` : '';
|
|
3309
|
+
console.log(
|
|
3310
|
+
`\n# ${data.file} — ${data.totalExported} exported${unusedNote}, ${data.totalInternal} internal\n`,
|
|
3311
|
+
);
|
|
3312
|
+
}
|
|
3274
3313
|
|
|
3275
3314
|
for (const sym of data.results) {
|
|
3276
3315
|
const icon = kindIcon(sym.kind);
|
package/src/resolve.js
CHANGED
|
@@ -146,8 +146,12 @@ export function computeConfidence(callerFile, targetFile, importedFrom) {
|
|
|
146
146
|
/**
|
|
147
147
|
* Batch resolve multiple imports in a single native call.
|
|
148
148
|
* Returns Map<"fromFile|importSource", resolvedPath> or null when native unavailable.
|
|
149
|
+
* @param {Array} inputs - Array of { fromFile, importSource }
|
|
150
|
+
* @param {string} rootDir - Project root
|
|
151
|
+
* @param {object} aliases - Path aliases
|
|
152
|
+
* @param {string[]} [knownFiles] - Optional file paths for FS cache (avoids syscalls)
|
|
149
153
|
*/
|
|
150
|
-
export function resolveImportsBatch(inputs, rootDir, aliases) {
|
|
154
|
+
export function resolveImportsBatch(inputs, rootDir, aliases, knownFiles) {
|
|
151
155
|
const native = loadNative();
|
|
152
156
|
if (!native) return null;
|
|
153
157
|
|
|
@@ -156,7 +160,12 @@ export function resolveImportsBatch(inputs, rootDir, aliases) {
|
|
|
156
160
|
fromFile,
|
|
157
161
|
importSource,
|
|
158
162
|
}));
|
|
159
|
-
const results = native.resolveImports(
|
|
163
|
+
const results = native.resolveImports(
|
|
164
|
+
nativeInputs,
|
|
165
|
+
rootDir,
|
|
166
|
+
convertAliasesForNative(aliases),
|
|
167
|
+
knownFiles || null,
|
|
168
|
+
);
|
|
160
169
|
const map = new Map();
|
|
161
170
|
for (const r of results) {
|
|
162
171
|
map.set(`${r.fromFile}|${r.importSource}`, normalizePath(path.normalize(r.resolvedPath)));
|
package/src/sequence.js
ADDED
|
@@ -0,0 +1,369 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sequence diagram generation – Mermaid sequenceDiagram from call graph edges.
|
|
3
|
+
*
|
|
4
|
+
* Participants are files (not individual functions). Calls within the same file
|
|
5
|
+
* become self-messages. This keeps diagrams readable and matches typical
|
|
6
|
+
* sequence-diagram conventions.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { openReadonlyOrFail } from './db.js';
|
|
10
|
+
import { paginateResult, printNdjson } from './paginate.js';
|
|
11
|
+
import { findMatchingNodes, isTestFile, kindIcon } from './queries.js';
|
|
12
|
+
import { FRAMEWORK_ENTRY_PREFIXES } from './structure.js';
|
|
13
|
+
|
|
14
|
+
// ─── Alias generation ────────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Build short participant aliases from file paths with collision handling.
|
|
18
|
+
* e.g. "src/builder.js" → "builder", but if two files share basename,
|
|
19
|
+
* progressively add parent dirs: "src/builder" vs "lib/builder".
|
|
20
|
+
*/
|
|
21
|
+
function buildAliases(files) {
|
|
22
|
+
const aliases = new Map();
|
|
23
|
+
const basenames = new Map();
|
|
24
|
+
|
|
25
|
+
// Group by basename
|
|
26
|
+
for (const file of files) {
|
|
27
|
+
const base = file
|
|
28
|
+
.split('/')
|
|
29
|
+
.pop()
|
|
30
|
+
.replace(/\.[^.]+$/, '');
|
|
31
|
+
if (!basenames.has(base)) basenames.set(base, []);
|
|
32
|
+
basenames.get(base).push(file);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
for (const [base, paths] of basenames) {
|
|
36
|
+
if (paths.length === 1) {
|
|
37
|
+
aliases.set(paths[0], base);
|
|
38
|
+
} else {
|
|
39
|
+
// Collision — progressively add parent dirs until aliases are unique
|
|
40
|
+
for (let depth = 2; depth <= 10; depth++) {
|
|
41
|
+
const trial = new Map();
|
|
42
|
+
let allUnique = true;
|
|
43
|
+
const seen = new Set();
|
|
44
|
+
|
|
45
|
+
for (const p of paths) {
|
|
46
|
+
const parts = p.replace(/\.[^.]+$/, '').split('/');
|
|
47
|
+
const alias = parts
|
|
48
|
+
.slice(-depth)
|
|
49
|
+
.join('_')
|
|
50
|
+
.replace(/[^a-zA-Z0-9_-]/g, '_');
|
|
51
|
+
trial.set(p, alias);
|
|
52
|
+
if (seen.has(alias)) allUnique = false;
|
|
53
|
+
seen.add(alias);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (allUnique || depth === 10) {
|
|
57
|
+
for (const [p, alias] of trial) {
|
|
58
|
+
aliases.set(p, alias);
|
|
59
|
+
}
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return aliases;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// ─── Core data function ──────────────────────────────────────────────
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Build sequence diagram data by BFS-forward from an entry point.
|
|
73
|
+
*
|
|
74
|
+
* @param {string} name - Symbol name to trace from
|
|
75
|
+
* @param {string} [dbPath]
|
|
76
|
+
* @param {object} [opts]
|
|
77
|
+
* @param {number} [opts.depth=10]
|
|
78
|
+
* @param {boolean} [opts.noTests]
|
|
79
|
+
* @param {string} [opts.file]
|
|
80
|
+
* @param {string} [opts.kind]
|
|
81
|
+
* @param {boolean} [opts.dataflow]
|
|
82
|
+
* @param {number} [opts.limit]
|
|
83
|
+
* @param {number} [opts.offset]
|
|
84
|
+
* @returns {{ entry, participants, messages, depth, totalMessages, truncated }}
|
|
85
|
+
*/
|
|
86
|
+
export function sequenceData(name, dbPath, opts = {}) {
|
|
87
|
+
const db = openReadonlyOrFail(dbPath);
|
|
88
|
+
const maxDepth = opts.depth || 10;
|
|
89
|
+
const noTests = opts.noTests || false;
|
|
90
|
+
const withDataflow = opts.dataflow || false;
|
|
91
|
+
|
|
92
|
+
// Phase 1: Direct LIKE match
|
|
93
|
+
let matchNode = findMatchingNodes(db, name, opts)[0] ?? null;
|
|
94
|
+
|
|
95
|
+
// Phase 2: Prefix-stripped matching
|
|
96
|
+
if (!matchNode) {
|
|
97
|
+
for (const prefix of FRAMEWORK_ENTRY_PREFIXES) {
|
|
98
|
+
matchNode = findMatchingNodes(db, `${prefix}${name}`, opts)[0] ?? null;
|
|
99
|
+
if (matchNode) break;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (!matchNode) {
|
|
104
|
+
db.close();
|
|
105
|
+
return {
|
|
106
|
+
entry: null,
|
|
107
|
+
participants: [],
|
|
108
|
+
messages: [],
|
|
109
|
+
depth: maxDepth,
|
|
110
|
+
totalMessages: 0,
|
|
111
|
+
truncated: false,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const entry = {
|
|
116
|
+
name: matchNode.name,
|
|
117
|
+
file: matchNode.file,
|
|
118
|
+
kind: matchNode.kind,
|
|
119
|
+
line: matchNode.line,
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
// BFS forward — track edges, not just nodes
|
|
123
|
+
const visited = new Set([matchNode.id]);
|
|
124
|
+
let frontier = [matchNode.id];
|
|
125
|
+
const messages = [];
|
|
126
|
+
const fileSet = new Set([matchNode.file]);
|
|
127
|
+
const idToNode = new Map();
|
|
128
|
+
idToNode.set(matchNode.id, matchNode);
|
|
129
|
+
let truncated = false;
|
|
130
|
+
|
|
131
|
+
const getCallees = db.prepare(
|
|
132
|
+
`SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line
|
|
133
|
+
FROM edges e JOIN nodes n ON e.target_id = n.id
|
|
134
|
+
WHERE e.source_id = ? AND e.kind = 'calls'`,
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
for (let d = 1; d <= maxDepth; d++) {
|
|
138
|
+
const nextFrontier = [];
|
|
139
|
+
|
|
140
|
+
for (const fid of frontier) {
|
|
141
|
+
const callees = getCallees.all(fid);
|
|
142
|
+
|
|
143
|
+
const caller = idToNode.get(fid);
|
|
144
|
+
|
|
145
|
+
for (const c of callees) {
|
|
146
|
+
if (noTests && isTestFile(c.file)) continue;
|
|
147
|
+
|
|
148
|
+
// Always record the message (even for visited nodes — different caller path)
|
|
149
|
+
fileSet.add(c.file);
|
|
150
|
+
messages.push({
|
|
151
|
+
from: caller.file,
|
|
152
|
+
to: c.file,
|
|
153
|
+
label: c.name,
|
|
154
|
+
type: 'call',
|
|
155
|
+
depth: d,
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
if (visited.has(c.id)) continue;
|
|
159
|
+
|
|
160
|
+
visited.add(c.id);
|
|
161
|
+
nextFrontier.push(c.id);
|
|
162
|
+
idToNode.set(c.id, c);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
frontier = nextFrontier;
|
|
167
|
+
if (frontier.length === 0) break;
|
|
168
|
+
|
|
169
|
+
if (d === maxDepth && frontier.length > 0) {
|
|
170
|
+
// Only mark truncated if at least one frontier node has further callees
|
|
171
|
+
const hasMoreCalls = frontier.some((fid) => getCallees.all(fid).length > 0);
|
|
172
|
+
if (hasMoreCalls) truncated = true;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Dataflow annotations: add return arrows
|
|
177
|
+
if (withDataflow && messages.length > 0) {
|
|
178
|
+
const hasTable = db
|
|
179
|
+
.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='dataflow'")
|
|
180
|
+
.get();
|
|
181
|
+
|
|
182
|
+
if (hasTable) {
|
|
183
|
+
// Build name|file lookup for O(1) target node access
|
|
184
|
+
const nodeByNameFile = new Map();
|
|
185
|
+
for (const n of idToNode.values()) {
|
|
186
|
+
nodeByNameFile.set(`${n.name}|${n.file}`, n);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
const getReturns = db.prepare(
|
|
190
|
+
`SELECT d.expression FROM dataflow d
|
|
191
|
+
WHERE d.source_id = ? AND d.kind = 'returns'`,
|
|
192
|
+
);
|
|
193
|
+
const getFlowsTo = db.prepare(
|
|
194
|
+
`SELECT d.expression FROM dataflow d
|
|
195
|
+
WHERE d.target_id = ? AND d.kind = 'flows_to'
|
|
196
|
+
ORDER BY d.param_index`,
|
|
197
|
+
);
|
|
198
|
+
|
|
199
|
+
// For each called function, check if it has return edges
|
|
200
|
+
const seenReturns = new Set();
|
|
201
|
+
for (const msg of [...messages]) {
|
|
202
|
+
if (msg.type !== 'call') continue;
|
|
203
|
+
const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
|
|
204
|
+
if (!targetNode) continue;
|
|
205
|
+
|
|
206
|
+
const returnKey = `${msg.to}->${msg.from}:${msg.label}`;
|
|
207
|
+
if (seenReturns.has(returnKey)) continue;
|
|
208
|
+
|
|
209
|
+
const returns = getReturns.all(targetNode.id);
|
|
210
|
+
|
|
211
|
+
if (returns.length > 0) {
|
|
212
|
+
seenReturns.add(returnKey);
|
|
213
|
+
const expr = returns[0].expression || 'result';
|
|
214
|
+
messages.push({
|
|
215
|
+
from: msg.to,
|
|
216
|
+
to: msg.from,
|
|
217
|
+
label: expr,
|
|
218
|
+
type: 'return',
|
|
219
|
+
depth: msg.depth,
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Annotate call messages with parameter names
|
|
225
|
+
for (const msg of messages) {
|
|
226
|
+
if (msg.type !== 'call') continue;
|
|
227
|
+
const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
|
|
228
|
+
if (!targetNode) continue;
|
|
229
|
+
|
|
230
|
+
const params = getFlowsTo.all(targetNode.id);
|
|
231
|
+
|
|
232
|
+
if (params.length > 0) {
|
|
233
|
+
const paramNames = params
|
|
234
|
+
.map((p) => p.expression)
|
|
235
|
+
.filter(Boolean)
|
|
236
|
+
.slice(0, 3);
|
|
237
|
+
if (paramNames.length > 0) {
|
|
238
|
+
msg.label = `${msg.label}(${paramNames.join(', ')})`;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// Sort messages by depth, then call before return
|
|
246
|
+
messages.sort((a, b) => {
|
|
247
|
+
if (a.depth !== b.depth) return a.depth - b.depth;
|
|
248
|
+
if (a.type === 'call' && b.type === 'return') return -1;
|
|
249
|
+
if (a.type === 'return' && b.type === 'call') return 1;
|
|
250
|
+
return 0;
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
// Build participant list from files
|
|
254
|
+
const aliases = buildAliases([...fileSet]);
|
|
255
|
+
const participants = [...fileSet].map((file) => ({
|
|
256
|
+
id: aliases.get(file),
|
|
257
|
+
label: file.split('/').pop(),
|
|
258
|
+
file,
|
|
259
|
+
}));
|
|
260
|
+
|
|
261
|
+
// Sort participants: entry file first, then alphabetically
|
|
262
|
+
participants.sort((a, b) => {
|
|
263
|
+
if (a.file === entry.file) return -1;
|
|
264
|
+
if (b.file === entry.file) return 1;
|
|
265
|
+
return a.file.localeCompare(b.file);
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
// Replace file paths with alias IDs in messages
|
|
269
|
+
for (const msg of messages) {
|
|
270
|
+
msg.from = aliases.get(msg.from);
|
|
271
|
+
msg.to = aliases.get(msg.to);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
db.close();
|
|
275
|
+
|
|
276
|
+
const base = {
|
|
277
|
+
entry,
|
|
278
|
+
participants,
|
|
279
|
+
messages,
|
|
280
|
+
depth: maxDepth,
|
|
281
|
+
totalMessages: messages.length,
|
|
282
|
+
truncated,
|
|
283
|
+
};
|
|
284
|
+
const result = paginateResult(base, 'messages', { limit: opts.limit, offset: opts.offset });
|
|
285
|
+
if (opts.limit !== undefined || opts.offset !== undefined) {
|
|
286
|
+
const activeFiles = new Set(result.messages.flatMap((m) => [m.from, m.to]));
|
|
287
|
+
result.participants = result.participants.filter((p) => activeFiles.has(p.id));
|
|
288
|
+
}
|
|
289
|
+
return result;
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// ─── Mermaid formatter ───────────────────────────────────────────────
|
|
293
|
+
|
|
294
|
+
/**
|
|
295
|
+
* Escape special Mermaid characters in labels.
|
|
296
|
+
*/
|
|
297
|
+
function escapeMermaid(str) {
|
|
298
|
+
return str
|
|
299
|
+
.replace(/</g, '<')
|
|
300
|
+
.replace(/>/g, '>')
|
|
301
|
+
.replace(/:/g, '#colon;')
|
|
302
|
+
.replace(/"/g, '#quot;');
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
/**
|
|
306
|
+
* Convert sequenceData result to Mermaid sequenceDiagram syntax.
|
|
307
|
+
* @param {{ participants, messages, truncated }} seqResult
|
|
308
|
+
* @returns {string}
|
|
309
|
+
*/
|
|
310
|
+
export function sequenceToMermaid(seqResult) {
|
|
311
|
+
const lines = ['sequenceDiagram'];
|
|
312
|
+
|
|
313
|
+
for (const p of seqResult.participants) {
|
|
314
|
+
lines.push(` participant ${p.id} as ${escapeMermaid(p.label)}`);
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
for (const msg of seqResult.messages) {
|
|
318
|
+
const arrow = msg.type === 'return' ? '-->>' : '->>';
|
|
319
|
+
lines.push(` ${msg.from}${arrow}${msg.to}: ${escapeMermaid(msg.label)}`);
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
if (seqResult.truncated && seqResult.participants.length > 0) {
|
|
323
|
+
lines.push(
|
|
324
|
+
` note right of ${seqResult.participants[0].id}: Truncated at depth ${seqResult.depth}`,
|
|
325
|
+
);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return lines.join('\n');
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// ─── CLI formatter ───────────────────────────────────────────────────
|
|
332
|
+
|
|
333
|
+
/**
|
|
334
|
+
* CLI entry point — format sequence data as mermaid, JSON, or ndjson.
|
|
335
|
+
*/
|
|
336
|
+
export function sequence(name, dbPath, opts = {}) {
|
|
337
|
+
const data = sequenceData(name, dbPath, opts);
|
|
338
|
+
|
|
339
|
+
if (opts.ndjson) {
|
|
340
|
+
printNdjson(data, 'messages');
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
if (opts.json) {
|
|
345
|
+
console.log(JSON.stringify(data, null, 2));
|
|
346
|
+
return;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
// Default: mermaid format
|
|
350
|
+
if (!data.entry) {
|
|
351
|
+
console.log(`No matching function found for "${name}".`);
|
|
352
|
+
return;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
const e = data.entry;
|
|
356
|
+
console.log(`\nSequence from: [${kindIcon(e.kind)}] ${e.name} ${e.file}:${e.line}`);
|
|
357
|
+
console.log(`Participants: ${data.participants.length} Messages: ${data.totalMessages}`);
|
|
358
|
+
if (data.truncated) {
|
|
359
|
+
console.log(` (truncated at depth ${data.depth})`);
|
|
360
|
+
}
|
|
361
|
+
console.log();
|
|
362
|
+
|
|
363
|
+
if (data.messages.length === 0) {
|
|
364
|
+
console.log(' (leaf node — no callees)');
|
|
365
|
+
return;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
console.log(sequenceToMermaid(data));
|
|
369
|
+
}
|