@optave/codegraph 3.1.0 → 3.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -5
- package/grammars/tree-sitter-go.wasm +0 -0
- package/package.json +8 -9
- package/src/ast-analysis/rules/csharp.js +201 -0
- package/src/ast-analysis/rules/go.js +182 -0
- package/src/ast-analysis/rules/index.js +82 -0
- package/src/ast-analysis/rules/java.js +175 -0
- package/src/ast-analysis/rules/javascript.js +246 -0
- package/src/ast-analysis/rules/php.js +219 -0
- package/src/ast-analysis/rules/python.js +196 -0
- package/src/ast-analysis/rules/ruby.js +204 -0
- package/src/ast-analysis/rules/rust.js +173 -0
- package/src/ast-analysis/shared.js +223 -0
- package/src/ast.js +15 -28
- package/src/audit.js +4 -5
- package/src/boundaries.js +1 -1
- package/src/branch-compare.js +84 -79
- package/src/builder.js +0 -5
- package/src/cfg.js +106 -338
- package/src/check.js +3 -3
- package/src/cli.js +99 -179
- package/src/cochange.js +1 -1
- package/src/communities.js +13 -16
- package/src/complexity.js +196 -1239
- package/src/cycles.js +1 -1
- package/src/dataflow.js +269 -694
- package/src/db/connection.js +88 -0
- package/src/db/migrations.js +312 -0
- package/src/db/query-builder.js +280 -0
- package/src/db/repository.js +134 -0
- package/src/db.js +19 -399
- package/src/embedder.js +145 -141
- package/src/export.js +1 -1
- package/src/flow.js +161 -162
- package/src/index.js +34 -1
- package/src/kinds.js +49 -0
- package/src/manifesto.js +3 -8
- package/src/mcp.js +37 -20
- package/src/owners.js +132 -132
- package/src/queries-cli.js +866 -0
- package/src/queries.js +1323 -2267
- package/src/result-formatter.js +21 -0
- package/src/sequence.js +177 -182
- package/src/structure.js +200 -199
- package/src/test-filter.js +7 -0
- package/src/triage.js +120 -162
- package/src/viewer.js +1 -1
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { printNdjson } from './paginate.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Shared JSON / NDJSON output dispatch for CLI wrappers.
|
|
5
|
+
*
|
|
6
|
+
* @param {object} data - Result object from a *Data() function
|
|
7
|
+
* @param {string} field - Array field name for NDJSON streaming (e.g. 'results')
|
|
8
|
+
* @param {object} opts - CLI options ({ json?, ndjson? })
|
|
9
|
+
* @returns {boolean} true if output was handled (caller should return early)
|
|
10
|
+
*/
|
|
11
|
+
export function outputResult(data, field, opts) {
|
|
12
|
+
if (opts.ndjson) {
|
|
13
|
+
printNdjson(data, field);
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
if (opts.json) {
|
|
17
|
+
console.log(JSON.stringify(data, null, 2));
|
|
18
|
+
return true;
|
|
19
|
+
}
|
|
20
|
+
return false;
|
|
21
|
+
}
|
package/src/sequence.js
CHANGED
|
@@ -7,9 +7,11 @@
|
|
|
7
7
|
*/
|
|
8
8
|
|
|
9
9
|
import { openReadonlyOrFail } from './db.js';
|
|
10
|
-
import { paginateResult
|
|
11
|
-
import { findMatchingNodes,
|
|
10
|
+
import { paginateResult } from './paginate.js';
|
|
11
|
+
import { findMatchingNodes, kindIcon } from './queries.js';
|
|
12
|
+
import { outputResult } from './result-formatter.js';
|
|
12
13
|
import { FRAMEWORK_ENTRY_PREFIXES } from './structure.js';
|
|
14
|
+
import { isTestFile } from './test-filter.js';
|
|
13
15
|
|
|
14
16
|
// ─── Alias generation ────────────────────────────────────────────────
|
|
15
17
|
|
|
@@ -85,208 +87,209 @@ function buildAliases(files) {
|
|
|
85
87
|
*/
|
|
86
88
|
export function sequenceData(name, dbPath, opts = {}) {
|
|
87
89
|
const db = openReadonlyOrFail(dbPath);
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
90
|
+
try {
|
|
91
|
+
const maxDepth = opts.depth || 10;
|
|
92
|
+
const noTests = opts.noTests || false;
|
|
93
|
+
const withDataflow = opts.dataflow || false;
|
|
94
|
+
|
|
95
|
+
// Phase 1: Direct LIKE match
|
|
96
|
+
let matchNode = findMatchingNodes(db, name, opts)[0] ?? null;
|
|
97
|
+
|
|
98
|
+
// Phase 2: Prefix-stripped matching
|
|
99
|
+
if (!matchNode) {
|
|
100
|
+
for (const prefix of FRAMEWORK_ENTRY_PREFIXES) {
|
|
101
|
+
matchNode = findMatchingNodes(db, `${prefix}${name}`, opts)[0] ?? null;
|
|
102
|
+
if (matchNode) break;
|
|
103
|
+
}
|
|
100
104
|
}
|
|
101
|
-
}
|
|
102
105
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
106
|
+
if (!matchNode) {
|
|
107
|
+
return {
|
|
108
|
+
entry: null,
|
|
109
|
+
participants: [],
|
|
110
|
+
messages: [],
|
|
111
|
+
depth: maxDepth,
|
|
112
|
+
totalMessages: 0,
|
|
113
|
+
truncated: false,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const entry = {
|
|
118
|
+
name: matchNode.name,
|
|
119
|
+
file: matchNode.file,
|
|
120
|
+
kind: matchNode.kind,
|
|
121
|
+
line: matchNode.line,
|
|
112
122
|
};
|
|
113
|
-
}
|
|
114
123
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
const fileSet = new Set([matchNode.file]);
|
|
127
|
-
const idToNode = new Map();
|
|
128
|
-
idToNode.set(matchNode.id, matchNode);
|
|
129
|
-
let truncated = false;
|
|
130
|
-
|
|
131
|
-
const getCallees = db.prepare(
|
|
132
|
-
`SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line
|
|
124
|
+
// BFS forward — track edges, not just nodes
|
|
125
|
+
const visited = new Set([matchNode.id]);
|
|
126
|
+
let frontier = [matchNode.id];
|
|
127
|
+
const messages = [];
|
|
128
|
+
const fileSet = new Set([matchNode.file]);
|
|
129
|
+
const idToNode = new Map();
|
|
130
|
+
idToNode.set(matchNode.id, matchNode);
|
|
131
|
+
let truncated = false;
|
|
132
|
+
|
|
133
|
+
const getCallees = db.prepare(
|
|
134
|
+
`SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line
|
|
133
135
|
FROM edges e JOIN nodes n ON e.target_id = n.id
|
|
134
136
|
WHERE e.source_id = ? AND e.kind = 'calls'`,
|
|
135
|
-
|
|
137
|
+
);
|
|
136
138
|
|
|
137
|
-
|
|
138
|
-
|
|
139
|
+
for (let d = 1; d <= maxDepth; d++) {
|
|
140
|
+
const nextFrontier = [];
|
|
139
141
|
|
|
140
|
-
|
|
141
|
-
|
|
142
|
+
for (const fid of frontier) {
|
|
143
|
+
const callees = getCallees.all(fid);
|
|
142
144
|
|
|
143
|
-
|
|
145
|
+
const caller = idToNode.get(fid);
|
|
144
146
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
+
for (const c of callees) {
|
|
148
|
+
if (noTests && isTestFile(c.file)) continue;
|
|
147
149
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
150
|
+
// Always record the message (even for visited nodes — different caller path)
|
|
151
|
+
fileSet.add(c.file);
|
|
152
|
+
messages.push({
|
|
153
|
+
from: caller.file,
|
|
154
|
+
to: c.file,
|
|
155
|
+
label: c.name,
|
|
156
|
+
type: 'call',
|
|
157
|
+
depth: d,
|
|
158
|
+
});
|
|
157
159
|
|
|
158
|
-
|
|
160
|
+
if (visited.has(c.id)) continue;
|
|
159
161
|
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
162
|
+
visited.add(c.id);
|
|
163
|
+
nextFrontier.push(c.id);
|
|
164
|
+
idToNode.set(c.id, c);
|
|
165
|
+
}
|
|
163
166
|
}
|
|
164
|
-
}
|
|
165
167
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
+
frontier = nextFrontier;
|
|
169
|
+
if (frontier.length === 0) break;
|
|
168
170
|
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
171
|
+
if (d === maxDepth && frontier.length > 0) {
|
|
172
|
+
// Only mark truncated if at least one frontier node has further callees
|
|
173
|
+
const hasMoreCalls = frontier.some((fid) => getCallees.all(fid).length > 0);
|
|
174
|
+
if (hasMoreCalls) truncated = true;
|
|
175
|
+
}
|
|
173
176
|
}
|
|
174
|
-
}
|
|
175
177
|
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
178
|
+
// Dataflow annotations: add return arrows
|
|
179
|
+
if (withDataflow && messages.length > 0) {
|
|
180
|
+
const hasTable = db
|
|
181
|
+
.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='dataflow'")
|
|
182
|
+
.get();
|
|
183
|
+
|
|
184
|
+
if (hasTable) {
|
|
185
|
+
// Build name|file lookup for O(1) target node access
|
|
186
|
+
const nodeByNameFile = new Map();
|
|
187
|
+
for (const n of idToNode.values()) {
|
|
188
|
+
nodeByNameFile.set(`${n.name}|${n.file}`, n);
|
|
189
|
+
}
|
|
188
190
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
+
const getReturns = db.prepare(
|
|
192
|
+
`SELECT d.expression FROM dataflow d
|
|
191
193
|
WHERE d.source_id = ? AND d.kind = 'returns'`,
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
194
|
+
);
|
|
195
|
+
const getFlowsTo = db.prepare(
|
|
196
|
+
`SELECT d.expression FROM dataflow d
|
|
195
197
|
WHERE d.target_id = ? AND d.kind = 'flows_to'
|
|
196
198
|
ORDER BY d.param_index`,
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
199
|
+
);
|
|
200
|
+
|
|
201
|
+
// For each called function, check if it has return edges
|
|
202
|
+
const seenReturns = new Set();
|
|
203
|
+
for (const msg of [...messages]) {
|
|
204
|
+
if (msg.type !== 'call') continue;
|
|
205
|
+
const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
|
|
206
|
+
if (!targetNode) continue;
|
|
207
|
+
|
|
208
|
+
const returnKey = `${msg.to}->${msg.from}:${msg.label}`;
|
|
209
|
+
if (seenReturns.has(returnKey)) continue;
|
|
210
|
+
|
|
211
|
+
const returns = getReturns.all(targetNode.id);
|
|
212
|
+
|
|
213
|
+
if (returns.length > 0) {
|
|
214
|
+
seenReturns.add(returnKey);
|
|
215
|
+
const expr = returns[0].expression || 'result';
|
|
216
|
+
messages.push({
|
|
217
|
+
from: msg.to,
|
|
218
|
+
to: msg.from,
|
|
219
|
+
label: expr,
|
|
220
|
+
type: 'return',
|
|
221
|
+
depth: msg.depth,
|
|
222
|
+
});
|
|
223
|
+
}
|
|
221
224
|
}
|
|
222
|
-
}
|
|
223
225
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
226
|
+
// Annotate call messages with parameter names
|
|
227
|
+
for (const msg of messages) {
|
|
228
|
+
if (msg.type !== 'call') continue;
|
|
229
|
+
const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
|
|
230
|
+
if (!targetNode) continue;
|
|
231
|
+
|
|
232
|
+
const params = getFlowsTo.all(targetNode.id);
|
|
233
|
+
|
|
234
|
+
if (params.length > 0) {
|
|
235
|
+
const paramNames = params
|
|
236
|
+
.map((p) => p.expression)
|
|
237
|
+
.filter(Boolean)
|
|
238
|
+
.slice(0, 3);
|
|
239
|
+
if (paramNames.length > 0) {
|
|
240
|
+
msg.label = `${msg.label}(${paramNames.join(', ')})`;
|
|
241
|
+
}
|
|
239
242
|
}
|
|
240
243
|
}
|
|
241
244
|
}
|
|
242
245
|
}
|
|
243
|
-
}
|
|
244
246
|
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
247
|
+
// Sort messages by depth, then call before return
|
|
248
|
+
messages.sort((a, b) => {
|
|
249
|
+
if (a.depth !== b.depth) return a.depth - b.depth;
|
|
250
|
+
if (a.type === 'call' && b.type === 'return') return -1;
|
|
251
|
+
if (a.type === 'return' && b.type === 'call') return 1;
|
|
252
|
+
return 0;
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
// Build participant list from files
|
|
256
|
+
const aliases = buildAliases([...fileSet]);
|
|
257
|
+
const participants = [...fileSet].map((file) => ({
|
|
258
|
+
id: aliases.get(file),
|
|
259
|
+
label: file.split('/').pop(),
|
|
260
|
+
file,
|
|
261
|
+
}));
|
|
262
|
+
|
|
263
|
+
// Sort participants: entry file first, then alphabetically
|
|
264
|
+
participants.sort((a, b) => {
|
|
265
|
+
if (a.file === entry.file) return -1;
|
|
266
|
+
if (b.file === entry.file) return 1;
|
|
267
|
+
return a.file.localeCompare(b.file);
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
// Replace file paths with alias IDs in messages
|
|
271
|
+
for (const msg of messages) {
|
|
272
|
+
msg.from = aliases.get(msg.from);
|
|
273
|
+
msg.to = aliases.get(msg.to);
|
|
274
|
+
}
|
|
273
275
|
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
276
|
+
const base = {
|
|
277
|
+
entry,
|
|
278
|
+
participants,
|
|
279
|
+
messages,
|
|
280
|
+
depth: maxDepth,
|
|
281
|
+
totalMessages: messages.length,
|
|
282
|
+
truncated,
|
|
283
|
+
};
|
|
284
|
+
const result = paginateResult(base, 'messages', { limit: opts.limit, offset: opts.offset });
|
|
285
|
+
if (opts.limit !== undefined || opts.offset !== undefined) {
|
|
286
|
+
const activeFiles = new Set(result.messages.flatMap((m) => [m.from, m.to]));
|
|
287
|
+
result.participants = result.participants.filter((p) => activeFiles.has(p.id));
|
|
288
|
+
}
|
|
289
|
+
return result;
|
|
290
|
+
} finally {
|
|
291
|
+
db.close();
|
|
288
292
|
}
|
|
289
|
-
return result;
|
|
290
293
|
}
|
|
291
294
|
|
|
292
295
|
// ─── Mermaid formatter ───────────────────────────────────────────────
|
|
@@ -336,15 +339,7 @@ export function sequenceToMermaid(seqResult) {
|
|
|
336
339
|
export function sequence(name, dbPath, opts = {}) {
|
|
337
340
|
const data = sequenceData(name, dbPath, opts);
|
|
338
341
|
|
|
339
|
-
if (opts
|
|
340
|
-
printNdjson(data, 'messages');
|
|
341
|
-
return;
|
|
342
|
-
}
|
|
343
|
-
|
|
344
|
-
if (opts.json) {
|
|
345
|
-
console.log(JSON.stringify(data, null, 2));
|
|
346
|
-
return;
|
|
347
|
-
}
|
|
342
|
+
if (outputResult(data, 'messages', opts)) return;
|
|
348
343
|
|
|
349
344
|
// Default: mermaid format
|
|
350
345
|
if (!data.entry) {
|