@optave/codegraph 3.1.0 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +5 -5
  2. package/grammars/tree-sitter-go.wasm +0 -0
  3. package/package.json +8 -9
  4. package/src/ast-analysis/rules/csharp.js +201 -0
  5. package/src/ast-analysis/rules/go.js +182 -0
  6. package/src/ast-analysis/rules/index.js +82 -0
  7. package/src/ast-analysis/rules/java.js +175 -0
  8. package/src/ast-analysis/rules/javascript.js +246 -0
  9. package/src/ast-analysis/rules/php.js +219 -0
  10. package/src/ast-analysis/rules/python.js +196 -0
  11. package/src/ast-analysis/rules/ruby.js +204 -0
  12. package/src/ast-analysis/rules/rust.js +173 -0
  13. package/src/ast-analysis/shared.js +223 -0
  14. package/src/ast.js +15 -28
  15. package/src/audit.js +4 -5
  16. package/src/boundaries.js +1 -1
  17. package/src/branch-compare.js +84 -79
  18. package/src/builder.js +0 -5
  19. package/src/cfg.js +106 -338
  20. package/src/check.js +3 -3
  21. package/src/cli.js +99 -179
  22. package/src/cochange.js +1 -1
  23. package/src/communities.js +13 -16
  24. package/src/complexity.js +196 -1239
  25. package/src/cycles.js +1 -1
  26. package/src/dataflow.js +269 -694
  27. package/src/db/connection.js +88 -0
  28. package/src/db/migrations.js +312 -0
  29. package/src/db/query-builder.js +280 -0
  30. package/src/db/repository.js +134 -0
  31. package/src/db.js +19 -399
  32. package/src/embedder.js +145 -141
  33. package/src/export.js +1 -1
  34. package/src/flow.js +161 -162
  35. package/src/index.js +34 -1
  36. package/src/kinds.js +49 -0
  37. package/src/manifesto.js +3 -8
  38. package/src/mcp.js +37 -20
  39. package/src/owners.js +132 -132
  40. package/src/queries-cli.js +866 -0
  41. package/src/queries.js +1323 -2267
  42. package/src/result-formatter.js +21 -0
  43. package/src/sequence.js +177 -182
  44. package/src/structure.js +200 -199
  45. package/src/test-filter.js +7 -0
  46. package/src/triage.js +120 -162
  47. package/src/viewer.js +1 -1
@@ -0,0 +1,21 @@
1
+ import { printNdjson } from './paginate.js';
2
+
3
+ /**
4
+ * Shared JSON / NDJSON output dispatch for CLI wrappers.
5
+ *
6
+ * @param {object} data - Result object from a *Data() function
7
+ * @param {string} field - Array field name for NDJSON streaming (e.g. 'results')
8
+ * @param {object} opts - CLI options ({ json?, ndjson? })
9
+ * @returns {boolean} true if output was handled (caller should return early)
10
+ */
11
+ export function outputResult(data, field, opts) {
12
+ if (opts.ndjson) {
13
+ printNdjson(data, field);
14
+ return true;
15
+ }
16
+ if (opts.json) {
17
+ console.log(JSON.stringify(data, null, 2));
18
+ return true;
19
+ }
20
+ return false;
21
+ }
package/src/sequence.js CHANGED
@@ -7,9 +7,11 @@
7
7
  */
8
8
 
9
9
  import { openReadonlyOrFail } from './db.js';
10
- import { paginateResult, printNdjson } from './paginate.js';
11
- import { findMatchingNodes, isTestFile, kindIcon } from './queries.js';
10
+ import { paginateResult } from './paginate.js';
11
+ import { findMatchingNodes, kindIcon } from './queries.js';
12
+ import { outputResult } from './result-formatter.js';
12
13
  import { FRAMEWORK_ENTRY_PREFIXES } from './structure.js';
14
+ import { isTestFile } from './test-filter.js';
13
15
 
14
16
  // ─── Alias generation ────────────────────────────────────────────────
15
17
 
@@ -85,208 +87,209 @@ function buildAliases(files) {
85
87
  */
86
88
  export function sequenceData(name, dbPath, opts = {}) {
87
89
  const db = openReadonlyOrFail(dbPath);
88
- const maxDepth = opts.depth || 10;
89
- const noTests = opts.noTests || false;
90
- const withDataflow = opts.dataflow || false;
91
-
92
- // Phase 1: Direct LIKE match
93
- let matchNode = findMatchingNodes(db, name, opts)[0] ?? null;
94
-
95
- // Phase 2: Prefix-stripped matching
96
- if (!matchNode) {
97
- for (const prefix of FRAMEWORK_ENTRY_PREFIXES) {
98
- matchNode = findMatchingNodes(db, `${prefix}${name}`, opts)[0] ?? null;
99
- if (matchNode) break;
90
+ try {
91
+ const maxDepth = opts.depth || 10;
92
+ const noTests = opts.noTests || false;
93
+ const withDataflow = opts.dataflow || false;
94
+
95
+ // Phase 1: Direct LIKE match
96
+ let matchNode = findMatchingNodes(db, name, opts)[0] ?? null;
97
+
98
+ // Phase 2: Prefix-stripped matching
99
+ if (!matchNode) {
100
+ for (const prefix of FRAMEWORK_ENTRY_PREFIXES) {
101
+ matchNode = findMatchingNodes(db, `${prefix}${name}`, opts)[0] ?? null;
102
+ if (matchNode) break;
103
+ }
100
104
  }
101
- }
102
105
 
103
- if (!matchNode) {
104
- db.close();
105
- return {
106
- entry: null,
107
- participants: [],
108
- messages: [],
109
- depth: maxDepth,
110
- totalMessages: 0,
111
- truncated: false,
106
+ if (!matchNode) {
107
+ return {
108
+ entry: null,
109
+ participants: [],
110
+ messages: [],
111
+ depth: maxDepth,
112
+ totalMessages: 0,
113
+ truncated: false,
114
+ };
115
+ }
116
+
117
+ const entry = {
118
+ name: matchNode.name,
119
+ file: matchNode.file,
120
+ kind: matchNode.kind,
121
+ line: matchNode.line,
112
122
  };
113
- }
114
123
 
115
- const entry = {
116
- name: matchNode.name,
117
- file: matchNode.file,
118
- kind: matchNode.kind,
119
- line: matchNode.line,
120
- };
121
-
122
- // BFS forward — track edges, not just nodes
123
- const visited = new Set([matchNode.id]);
124
- let frontier = [matchNode.id];
125
- const messages = [];
126
- const fileSet = new Set([matchNode.file]);
127
- const idToNode = new Map();
128
- idToNode.set(matchNode.id, matchNode);
129
- let truncated = false;
130
-
131
- const getCallees = db.prepare(
132
- `SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line
124
+ // BFS forward — track edges, not just nodes
125
+ const visited = new Set([matchNode.id]);
126
+ let frontier = [matchNode.id];
127
+ const messages = [];
128
+ const fileSet = new Set([matchNode.file]);
129
+ const idToNode = new Map();
130
+ idToNode.set(matchNode.id, matchNode);
131
+ let truncated = false;
132
+
133
+ const getCallees = db.prepare(
134
+ `SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line
133
135
  FROM edges e JOIN nodes n ON e.target_id = n.id
134
136
  WHERE e.source_id = ? AND e.kind = 'calls'`,
135
- );
137
+ );
136
138
 
137
- for (let d = 1; d <= maxDepth; d++) {
138
- const nextFrontier = [];
139
+ for (let d = 1; d <= maxDepth; d++) {
140
+ const nextFrontier = [];
139
141
 
140
- for (const fid of frontier) {
141
- const callees = getCallees.all(fid);
142
+ for (const fid of frontier) {
143
+ const callees = getCallees.all(fid);
142
144
 
143
- const caller = idToNode.get(fid);
145
+ const caller = idToNode.get(fid);
144
146
 
145
- for (const c of callees) {
146
- if (noTests && isTestFile(c.file)) continue;
147
+ for (const c of callees) {
148
+ if (noTests && isTestFile(c.file)) continue;
147
149
 
148
- // Always record the message (even for visited nodes — different caller path)
149
- fileSet.add(c.file);
150
- messages.push({
151
- from: caller.file,
152
- to: c.file,
153
- label: c.name,
154
- type: 'call',
155
- depth: d,
156
- });
150
+ // Always record the message (even for visited nodes — different caller path)
151
+ fileSet.add(c.file);
152
+ messages.push({
153
+ from: caller.file,
154
+ to: c.file,
155
+ label: c.name,
156
+ type: 'call',
157
+ depth: d,
158
+ });
157
159
 
158
- if (visited.has(c.id)) continue;
160
+ if (visited.has(c.id)) continue;
159
161
 
160
- visited.add(c.id);
161
- nextFrontier.push(c.id);
162
- idToNode.set(c.id, c);
162
+ visited.add(c.id);
163
+ nextFrontier.push(c.id);
164
+ idToNode.set(c.id, c);
165
+ }
163
166
  }
164
- }
165
167
 
166
- frontier = nextFrontier;
167
- if (frontier.length === 0) break;
168
+ frontier = nextFrontier;
169
+ if (frontier.length === 0) break;
168
170
 
169
- if (d === maxDepth && frontier.length > 0) {
170
- // Only mark truncated if at least one frontier node has further callees
171
- const hasMoreCalls = frontier.some((fid) => getCallees.all(fid).length > 0);
172
- if (hasMoreCalls) truncated = true;
171
+ if (d === maxDepth && frontier.length > 0) {
172
+ // Only mark truncated if at least one frontier node has further callees
173
+ const hasMoreCalls = frontier.some((fid) => getCallees.all(fid).length > 0);
174
+ if (hasMoreCalls) truncated = true;
175
+ }
173
176
  }
174
- }
175
177
 
176
- // Dataflow annotations: add return arrows
177
- if (withDataflow && messages.length > 0) {
178
- const hasTable = db
179
- .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='dataflow'")
180
- .get();
181
-
182
- if (hasTable) {
183
- // Build name|file lookup for O(1) target node access
184
- const nodeByNameFile = new Map();
185
- for (const n of idToNode.values()) {
186
- nodeByNameFile.set(`${n.name}|${n.file}`, n);
187
- }
178
+ // Dataflow annotations: add return arrows
179
+ if (withDataflow && messages.length > 0) {
180
+ const hasTable = db
181
+ .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='dataflow'")
182
+ .get();
183
+
184
+ if (hasTable) {
185
+ // Build name|file lookup for O(1) target node access
186
+ const nodeByNameFile = new Map();
187
+ for (const n of idToNode.values()) {
188
+ nodeByNameFile.set(`${n.name}|${n.file}`, n);
189
+ }
188
190
 
189
- const getReturns = db.prepare(
190
- `SELECT d.expression FROM dataflow d
191
+ const getReturns = db.prepare(
192
+ `SELECT d.expression FROM dataflow d
191
193
  WHERE d.source_id = ? AND d.kind = 'returns'`,
192
- );
193
- const getFlowsTo = db.prepare(
194
- `SELECT d.expression FROM dataflow d
194
+ );
195
+ const getFlowsTo = db.prepare(
196
+ `SELECT d.expression FROM dataflow d
195
197
  WHERE d.target_id = ? AND d.kind = 'flows_to'
196
198
  ORDER BY d.param_index`,
197
- );
198
-
199
- // For each called function, check if it has return edges
200
- const seenReturns = new Set();
201
- for (const msg of [...messages]) {
202
- if (msg.type !== 'call') continue;
203
- const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
204
- if (!targetNode) continue;
205
-
206
- const returnKey = `${msg.to}->${msg.from}:${msg.label}`;
207
- if (seenReturns.has(returnKey)) continue;
208
-
209
- const returns = getReturns.all(targetNode.id);
210
-
211
- if (returns.length > 0) {
212
- seenReturns.add(returnKey);
213
- const expr = returns[0].expression || 'result';
214
- messages.push({
215
- from: msg.to,
216
- to: msg.from,
217
- label: expr,
218
- type: 'return',
219
- depth: msg.depth,
220
- });
199
+ );
200
+
201
+ // For each called function, check if it has return edges
202
+ const seenReturns = new Set();
203
+ for (const msg of [...messages]) {
204
+ if (msg.type !== 'call') continue;
205
+ const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
206
+ if (!targetNode) continue;
207
+
208
+ const returnKey = `${msg.to}->${msg.from}:${msg.label}`;
209
+ if (seenReturns.has(returnKey)) continue;
210
+
211
+ const returns = getReturns.all(targetNode.id);
212
+
213
+ if (returns.length > 0) {
214
+ seenReturns.add(returnKey);
215
+ const expr = returns[0].expression || 'result';
216
+ messages.push({
217
+ from: msg.to,
218
+ to: msg.from,
219
+ label: expr,
220
+ type: 'return',
221
+ depth: msg.depth,
222
+ });
223
+ }
221
224
  }
222
- }
223
225
 
224
- // Annotate call messages with parameter names
225
- for (const msg of messages) {
226
- if (msg.type !== 'call') continue;
227
- const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
228
- if (!targetNode) continue;
229
-
230
- const params = getFlowsTo.all(targetNode.id);
231
-
232
- if (params.length > 0) {
233
- const paramNames = params
234
- .map((p) => p.expression)
235
- .filter(Boolean)
236
- .slice(0, 3);
237
- if (paramNames.length > 0) {
238
- msg.label = `${msg.label}(${paramNames.join(', ')})`;
226
+ // Annotate call messages with parameter names
227
+ for (const msg of messages) {
228
+ if (msg.type !== 'call') continue;
229
+ const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`);
230
+ if (!targetNode) continue;
231
+
232
+ const params = getFlowsTo.all(targetNode.id);
233
+
234
+ if (params.length > 0) {
235
+ const paramNames = params
236
+ .map((p) => p.expression)
237
+ .filter(Boolean)
238
+ .slice(0, 3);
239
+ if (paramNames.length > 0) {
240
+ msg.label = `${msg.label}(${paramNames.join(', ')})`;
241
+ }
239
242
  }
240
243
  }
241
244
  }
242
245
  }
243
- }
244
246
 
245
- // Sort messages by depth, then call before return
246
- messages.sort((a, b) => {
247
- if (a.depth !== b.depth) return a.depth - b.depth;
248
- if (a.type === 'call' && b.type === 'return') return -1;
249
- if (a.type === 'return' && b.type === 'call') return 1;
250
- return 0;
251
- });
252
-
253
- // Build participant list from files
254
- const aliases = buildAliases([...fileSet]);
255
- const participants = [...fileSet].map((file) => ({
256
- id: aliases.get(file),
257
- label: file.split('/').pop(),
258
- file,
259
- }));
260
-
261
- // Sort participants: entry file first, then alphabetically
262
- participants.sort((a, b) => {
263
- if (a.file === entry.file) return -1;
264
- if (b.file === entry.file) return 1;
265
- return a.file.localeCompare(b.file);
266
- });
267
-
268
- // Replace file paths with alias IDs in messages
269
- for (const msg of messages) {
270
- msg.from = aliases.get(msg.from);
271
- msg.to = aliases.get(msg.to);
272
- }
247
+ // Sort messages by depth, then call before return
248
+ messages.sort((a, b) => {
249
+ if (a.depth !== b.depth) return a.depth - b.depth;
250
+ if (a.type === 'call' && b.type === 'return') return -1;
251
+ if (a.type === 'return' && b.type === 'call') return 1;
252
+ return 0;
253
+ });
254
+
255
+ // Build participant list from files
256
+ const aliases = buildAliases([...fileSet]);
257
+ const participants = [...fileSet].map((file) => ({
258
+ id: aliases.get(file),
259
+ label: file.split('/').pop(),
260
+ file,
261
+ }));
262
+
263
+ // Sort participants: entry file first, then alphabetically
264
+ participants.sort((a, b) => {
265
+ if (a.file === entry.file) return -1;
266
+ if (b.file === entry.file) return 1;
267
+ return a.file.localeCompare(b.file);
268
+ });
269
+
270
+ // Replace file paths with alias IDs in messages
271
+ for (const msg of messages) {
272
+ msg.from = aliases.get(msg.from);
273
+ msg.to = aliases.get(msg.to);
274
+ }
273
275
 
274
- db.close();
275
-
276
- const base = {
277
- entry,
278
- participants,
279
- messages,
280
- depth: maxDepth,
281
- totalMessages: messages.length,
282
- truncated,
283
- };
284
- const result = paginateResult(base, 'messages', { limit: opts.limit, offset: opts.offset });
285
- if (opts.limit !== undefined || opts.offset !== undefined) {
286
- const activeFiles = new Set(result.messages.flatMap((m) => [m.from, m.to]));
287
- result.participants = result.participants.filter((p) => activeFiles.has(p.id));
276
+ const base = {
277
+ entry,
278
+ participants,
279
+ messages,
280
+ depth: maxDepth,
281
+ totalMessages: messages.length,
282
+ truncated,
283
+ };
284
+ const result = paginateResult(base, 'messages', { limit: opts.limit, offset: opts.offset });
285
+ if (opts.limit !== undefined || opts.offset !== undefined) {
286
+ const activeFiles = new Set(result.messages.flatMap((m) => [m.from, m.to]));
287
+ result.participants = result.participants.filter((p) => activeFiles.has(p.id));
288
+ }
289
+ return result;
290
+ } finally {
291
+ db.close();
288
292
  }
289
- return result;
290
293
  }
291
294
 
292
295
  // ─── Mermaid formatter ───────────────────────────────────────────────
@@ -336,15 +339,7 @@ export function sequenceToMermaid(seqResult) {
336
339
  export function sequence(name, dbPath, opts = {}) {
337
340
  const data = sequenceData(name, dbPath, opts);
338
341
 
339
- if (opts.ndjson) {
340
- printNdjson(data, 'messages');
341
- return;
342
- }
343
-
344
- if (opts.json) {
345
- console.log(JSON.stringify(data, null, 2));
346
- return;
347
- }
342
+ if (outputResult(data, 'messages', opts)) return;
348
343
 
349
344
  // Default: mermaid format
350
345
  if (!data.entry) {