@toolbaux/guardian 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +366 -0
- package/dist/adapters/csharp-adapter.js +149 -0
- package/dist/adapters/go-adapter.js +96 -0
- package/dist/adapters/index.js +16 -0
- package/dist/adapters/java-adapter.js +122 -0
- package/dist/adapters/python-adapter.js +183 -0
- package/dist/adapters/runner.js +69 -0
- package/dist/adapters/types.js +1 -0
- package/dist/adapters/typescript-adapter.js +179 -0
- package/dist/benchmarking/framework.js +91 -0
- package/dist/cli.js +343 -0
- package/dist/commands/analyze-depth.js +43 -0
- package/dist/commands/api-spec-extractor.js +52 -0
- package/dist/commands/breaking-change-analyzer.js +334 -0
- package/dist/commands/config-compliance.js +219 -0
- package/dist/commands/constraints.js +221 -0
- package/dist/commands/context.js +101 -0
- package/dist/commands/data-flow-tracer.js +291 -0
- package/dist/commands/dependency-impact-analyzer.js +27 -0
- package/dist/commands/diff.js +146 -0
- package/dist/commands/discrepancy.js +71 -0
- package/dist/commands/doc-generate.js +163 -0
- package/dist/commands/doc-html.js +120 -0
- package/dist/commands/drift.js +88 -0
- package/dist/commands/extract.js +16 -0
- package/dist/commands/feature-context.js +116 -0
- package/dist/commands/generate.js +339 -0
- package/dist/commands/guard.js +182 -0
- package/dist/commands/init.js +209 -0
- package/dist/commands/intel.js +20 -0
- package/dist/commands/license-dependency-auditor.js +33 -0
- package/dist/commands/performance-hotspot-profiler.js +42 -0
- package/dist/commands/search.js +314 -0
- package/dist/commands/security-boundary-auditor.js +359 -0
- package/dist/commands/simulate.js +294 -0
- package/dist/commands/summary.js +27 -0
- package/dist/commands/test-coverage-mapper.js +264 -0
- package/dist/commands/verify-drift.js +62 -0
- package/dist/config.js +441 -0
- package/dist/extract/ai-context-hints.js +107 -0
- package/dist/extract/analyzers/backend.js +1704 -0
- package/dist/extract/analyzers/depth.js +264 -0
- package/dist/extract/analyzers/frontend.js +2221 -0
- package/dist/extract/api-usage-tracker.js +19 -0
- package/dist/extract/cache.js +53 -0
- package/dist/extract/codebase-intel.js +190 -0
- package/dist/extract/compress.js +452 -0
- package/dist/extract/context-block.js +356 -0
- package/dist/extract/contracts.js +183 -0
- package/dist/extract/discrepancies.js +233 -0
- package/dist/extract/docs-loader.js +110 -0
- package/dist/extract/docs.js +2379 -0
- package/dist/extract/drift.js +1578 -0
- package/dist/extract/duplicates.js +435 -0
- package/dist/extract/feature-arcs.js +138 -0
- package/dist/extract/graph.js +76 -0
- package/dist/extract/html-doc.js +1409 -0
- package/dist/extract/ignore.js +45 -0
- package/dist/extract/index.js +455 -0
- package/dist/extract/llm-client.js +159 -0
- package/dist/extract/pattern-registry.js +141 -0
- package/dist/extract/product-doc.js +497 -0
- package/dist/extract/python.js +1202 -0
- package/dist/extract/runtime.js +193 -0
- package/dist/extract/schema-evolution-validator.js +35 -0
- package/dist/extract/test-gap-analyzer.js +20 -0
- package/dist/extract/tests.js +74 -0
- package/dist/extract/types.js +1 -0
- package/dist/extract/validate-backend.js +30 -0
- package/dist/extract/writer.js +11 -0
- package/dist/output-layout.js +37 -0
- package/dist/project-discovery.js +309 -0
- package/dist/schema/architecture.js +350 -0
- package/dist/schema/feature-spec.js +89 -0
- package/dist/schema/index.js +8 -0
- package/dist/schema/ux.js +46 -0
- package/package.json +75 -0
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
function buildGraph(fileGraph, moduleGraph, modules) {
|
|
2
|
+
const nodes = new Set();
|
|
3
|
+
const adj = new Map();
|
|
4
|
+
let edges = 0;
|
|
5
|
+
const ensure = (id) => {
|
|
6
|
+
if (!nodes.has(id)) {
|
|
7
|
+
nodes.add(id);
|
|
8
|
+
adj.set(id, new Set());
|
|
9
|
+
}
|
|
10
|
+
};
|
|
11
|
+
const addEdge = (from, to) => {
|
|
12
|
+
ensure(from);
|
|
13
|
+
ensure(to);
|
|
14
|
+
const set = adj.get(from);
|
|
15
|
+
if (!set.has(to)) {
|
|
16
|
+
set.add(to);
|
|
17
|
+
edges++;
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
// Prefer file-level graph for precision; fall back to module-level
|
|
21
|
+
const hasFG = fileGraph.length > 0;
|
|
22
|
+
if (hasFG) {
|
|
23
|
+
for (const edge of fileGraph) {
|
|
24
|
+
addEdge(edge.from, edge.to);
|
|
25
|
+
}
|
|
26
|
+
// Also make sure all module files are nodes
|
|
27
|
+
for (const mod of modules) {
|
|
28
|
+
for (const f of mod.files) {
|
|
29
|
+
ensure(f);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
for (const edge of moduleGraph) {
|
|
35
|
+
addEdge(edge.from, edge.to);
|
|
36
|
+
}
|
|
37
|
+
for (const mod of modules) {
|
|
38
|
+
ensure(mod.id);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return { nodes, adj, edges };
|
|
42
|
+
}
|
|
43
|
+
// ─── Subgraph Extraction ──────────────────────────────────────────────────────
|
|
44
|
+
function tokenize(s) {
|
|
45
|
+
return s.toLowerCase().split(/[^a-z0-9_]+/).filter(Boolean);
|
|
46
|
+
}
|
|
47
|
+
function matchesQuery(id, tokens) {
|
|
48
|
+
const lower = id.toLowerCase();
|
|
49
|
+
return tokens.some((t) => lower.includes(t));
|
|
50
|
+
}
|
|
51
|
+
function extractSubgraph(graph, queryTokens, hops = 2) {
|
|
52
|
+
const seedNodes = new Set();
|
|
53
|
+
for (const node of graph.nodes) {
|
|
54
|
+
if (matchesQuery(node, queryTokens)) {
|
|
55
|
+
seedNodes.add(node);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
// BFS expansion
|
|
59
|
+
const visited = new Set(seedNodes);
|
|
60
|
+
let frontier = [...seedNodes];
|
|
61
|
+
for (let hop = 0; hop < hops; hop++) {
|
|
62
|
+
const next = [];
|
|
63
|
+
for (const node of frontier) {
|
|
64
|
+
for (const neighbour of graph.adj.get(node) ?? []) {
|
|
65
|
+
if (!visited.has(neighbour)) {
|
|
66
|
+
visited.add(neighbour);
|
|
67
|
+
next.push(neighbour);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
// Also expand reverse edges (who imports this node?)
|
|
71
|
+
for (const [from, tos] of graph.adj) {
|
|
72
|
+
if (tos.has(node) && !visited.has(from)) {
|
|
73
|
+
visited.add(from);
|
|
74
|
+
next.push(from);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
frontier = next;
|
|
79
|
+
}
|
|
80
|
+
// Build subgraph
|
|
81
|
+
const subAdj = new Map();
|
|
82
|
+
let subEdges = 0;
|
|
83
|
+
for (const node of visited) {
|
|
84
|
+
subAdj.set(node, new Set());
|
|
85
|
+
}
|
|
86
|
+
for (const [from, tos] of graph.adj) {
|
|
87
|
+
if (!visited.has(from))
|
|
88
|
+
continue;
|
|
89
|
+
for (const to of tos) {
|
|
90
|
+
if (visited.has(to)) {
|
|
91
|
+
subAdj.get(from).add(to);
|
|
92
|
+
subEdges++;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
const subgraph = {
|
|
97
|
+
nodes: visited,
|
|
98
|
+
adj: subAdj,
|
|
99
|
+
edges: subEdges
|
|
100
|
+
};
|
|
101
|
+
return { subgraph, seedNodes, totalMatchedNodes: seedNodes.size };
|
|
102
|
+
}
|
|
103
|
+
// ─── Metrics ──────────────────────────────────────────────────────────────────
|
|
104
|
+
/** Longest path via DFS with memoization. Caps at 20. */
|
|
105
|
+
function longestPath(adj, nodes) {
|
|
106
|
+
const memo = new Map();
|
|
107
|
+
const visiting = new Set();
|
|
108
|
+
function dfs(node) {
|
|
109
|
+
if (memo.has(node))
|
|
110
|
+
return memo.get(node);
|
|
111
|
+
if (visiting.has(node))
|
|
112
|
+
return 0; // cycle — skip
|
|
113
|
+
visiting.add(node);
|
|
114
|
+
let best = 0;
|
|
115
|
+
for (const neighbour of adj.get(node) ?? []) {
|
|
116
|
+
best = Math.min(Math.max(best, 1 + dfs(neighbour)), 20);
|
|
117
|
+
}
|
|
118
|
+
visiting.delete(node);
|
|
119
|
+
memo.set(node, best);
|
|
120
|
+
return best;
|
|
121
|
+
}
|
|
122
|
+
let max = 0;
|
|
123
|
+
for (const node of nodes) {
|
|
124
|
+
max = Math.max(max, dfs(node));
|
|
125
|
+
}
|
|
126
|
+
return max;
|
|
127
|
+
}
|
|
128
|
+
function computeFanout(adj, nodes) {
|
|
129
|
+
if (nodes.size === 0)
|
|
130
|
+
return { avg: 0, max: 0 };
|
|
131
|
+
let total = 0;
|
|
132
|
+
let max = 0;
|
|
133
|
+
for (const node of nodes) {
|
|
134
|
+
const degree = adj.get(node)?.size ?? 0;
|
|
135
|
+
total += degree;
|
|
136
|
+
if (degree > max)
|
|
137
|
+
max = degree;
|
|
138
|
+
}
|
|
139
|
+
return { avg: total / nodes.size, max };
|
|
140
|
+
}
|
|
141
|
+
// ─── Classify & Score ────────────────────────────────────────────────────────
|
|
142
|
+
function classifyDepth(depth) {
|
|
143
|
+
if (depth <= 2)
|
|
144
|
+
return "LOW";
|
|
145
|
+
if (depth <= 5)
|
|
146
|
+
return "MEDIUM";
|
|
147
|
+
return "HIGH";
|
|
148
|
+
}
|
|
149
|
+
function classifyPropagation(fanoutAvg, fanoutMax) {
|
|
150
|
+
if (fanoutAvg > 2 || fanoutMax > 5)
|
|
151
|
+
return "STRONG";
|
|
152
|
+
if (fanoutAvg > 1.2 || fanoutMax > 2)
|
|
153
|
+
return "MODERATE";
|
|
154
|
+
return "LOCAL";
|
|
155
|
+
}
|
|
156
|
+
function classifyCompressibility(depth, propagation, hasCycles) {
|
|
157
|
+
if (hasCycles || (depth === "HIGH" && propagation === "STRONG"))
|
|
158
|
+
return "NON_COMPRESSIBLE";
|
|
159
|
+
if (depth === "LOW" && propagation === "LOCAL")
|
|
160
|
+
return "COMPRESSIBLE";
|
|
161
|
+
return "PARTIAL";
|
|
162
|
+
}
|
|
163
|
+
function classifyConfidence(value) {
|
|
164
|
+
if (value >= 0.8)
|
|
165
|
+
return "STRONG";
|
|
166
|
+
if (value >= 0.6)
|
|
167
|
+
return "MODERATE";
|
|
168
|
+
return "WEAK";
|
|
169
|
+
}
|
|
170
|
+
function classifyAmbiguity(seedNodes, totalNodes) {
|
|
171
|
+
if (totalNodes === 0)
|
|
172
|
+
return "HIGH";
|
|
173
|
+
const ratio = seedNodes / totalNodes;
|
|
174
|
+
if (ratio < 0.1)
|
|
175
|
+
return "HIGH";
|
|
176
|
+
if (ratio < 0.3)
|
|
177
|
+
return "MEDIUM";
|
|
178
|
+
return "LOW";
|
|
179
|
+
}
|
|
180
|
+
function patternFor(compressible) {
|
|
181
|
+
if (compressible === "NON_COMPRESSIBLE")
|
|
182
|
+
return "multi-step workflow / stateful / pipeline";
|
|
183
|
+
if (compressible === "PARTIAL")
|
|
184
|
+
return "layered / service-oriented";
|
|
185
|
+
return "direct / single-pass";
|
|
186
|
+
}
|
|
187
|
+
function avoidFor(compressible) {
|
|
188
|
+
if (compressible === "NON_COMPRESSIBLE") {
|
|
189
|
+
return ["single function implementation", "local-only logic", "greedy shortcut logic"];
|
|
190
|
+
}
|
|
191
|
+
if (compressible === "PARTIAL") {
|
|
192
|
+
return ["monolithic handler logic"];
|
|
193
|
+
}
|
|
194
|
+
return [];
|
|
195
|
+
}
|
|
196
|
+
export function analyzeDepth(input) {
|
|
197
|
+
const { query, modules, moduleGraph, fileGraph, circularDependencies } = input;
|
|
198
|
+
const queryTokens = tokenize(query);
|
|
199
|
+
const graph = buildGraph(fileGraph, moduleGraph, modules);
|
|
200
|
+
const { subgraph, seedNodes, totalMatchedNodes } = extractSubgraph(graph, queryTokens, 2);
|
|
201
|
+
const nodeCount = subgraph.nodes.size;
|
|
202
|
+
const edgeCount = subgraph.edges;
|
|
203
|
+
const depth = longestPath(subgraph.adj, subgraph.nodes);
|
|
204
|
+
const { avg: fanoutAvg, max: fanoutMax } = computeFanout(subgraph.adj, subgraph.nodes);
|
|
205
|
+
const density = nodeCount > 0 ? edgeCount / nodeCount : 0;
|
|
206
|
+
const hasCycles = circularDependencies.some((cycle) => cycle.some((node) => subgraph.nodes.has(node)));
|
|
207
|
+
// Scores
|
|
208
|
+
const depthScore = Math.min(depth / 10, 1);
|
|
209
|
+
const fanoutScore = Math.min(fanoutAvg / 3, 1);
|
|
210
|
+
const densityScore = Math.min(density / 3, 1);
|
|
211
|
+
const cycleScore = hasCycles ? 1 : 0;
|
|
212
|
+
const queryScore = graph.nodes.size > 0 ? totalMatchedNodes / graph.nodes.size : 0;
|
|
213
|
+
const confidenceValue = 0.35 * depthScore +
|
|
214
|
+
0.20 * fanoutScore +
|
|
215
|
+
0.15 * densityScore +
|
|
216
|
+
0.15 * cycleScore +
|
|
217
|
+
0.15 * queryScore;
|
|
218
|
+
const depthLevel = classifyDepth(depth);
|
|
219
|
+
const propagation = classifyPropagation(fanoutAvg, fanoutMax);
|
|
220
|
+
const compressible = classifyCompressibility(depthLevel, propagation, hasCycles);
|
|
221
|
+
const primaryPattern = patternFor(compressible);
|
|
222
|
+
return {
|
|
223
|
+
feature: query,
|
|
224
|
+
structure: { nodes: nodeCount, edges: edgeCount },
|
|
225
|
+
metrics: {
|
|
226
|
+
depth,
|
|
227
|
+
fanout_avg: Math.round(fanoutAvg * 100) / 100,
|
|
228
|
+
fanout_max: fanoutMax,
|
|
229
|
+
density: Math.round(density * 100) / 100,
|
|
230
|
+
has_cycles: hasCycles
|
|
231
|
+
},
|
|
232
|
+
scores: {
|
|
233
|
+
depth_score: Math.round(depthScore * 1000) / 1000,
|
|
234
|
+
fanout_score: Math.round(fanoutScore * 1000) / 1000,
|
|
235
|
+
density_score: Math.round(densityScore * 1000) / 1000,
|
|
236
|
+
cycle_score: cycleScore,
|
|
237
|
+
query_score: Math.round(queryScore * 1000) / 1000
|
|
238
|
+
},
|
|
239
|
+
confidence: {
|
|
240
|
+
value: Math.round(confidenceValue * 1000) / 1000,
|
|
241
|
+
level: classifyConfidence(confidenceValue)
|
|
242
|
+
},
|
|
243
|
+
ambiguity: {
|
|
244
|
+
level: classifyAmbiguity(seedNodes.size, nodeCount)
|
|
245
|
+
},
|
|
246
|
+
classification: {
|
|
247
|
+
depth_level: depthLevel,
|
|
248
|
+
propagation,
|
|
249
|
+
compressible
|
|
250
|
+
},
|
|
251
|
+
recommendation: {
|
|
252
|
+
primary: { pattern: primaryPattern, confidence: Math.round(confidenceValue * 1000) / 1000 },
|
|
253
|
+
fallback: {
|
|
254
|
+
pattern: "direct / single-pass",
|
|
255
|
+
condition: "if implementation remains isolated with no cross-module propagation"
|
|
256
|
+
},
|
|
257
|
+
avoid: avoidFor(compressible)
|
|
258
|
+
},
|
|
259
|
+
guardrails: {
|
|
260
|
+
enforce_if_confidence_above: 0.8
|
|
261
|
+
},
|
|
262
|
+
override: { allowed: true, requires_reason: true }
|
|
263
|
+
};
|
|
264
|
+
}
|