@toolbaux/guardian 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +366 -0
- package/dist/adapters/csharp-adapter.js +149 -0
- package/dist/adapters/go-adapter.js +96 -0
- package/dist/adapters/index.js +16 -0
- package/dist/adapters/java-adapter.js +122 -0
- package/dist/adapters/python-adapter.js +183 -0
- package/dist/adapters/runner.js +69 -0
- package/dist/adapters/types.js +1 -0
- package/dist/adapters/typescript-adapter.js +179 -0
- package/dist/benchmarking/framework.js +91 -0
- package/dist/cli.js +343 -0
- package/dist/commands/analyze-depth.js +43 -0
- package/dist/commands/api-spec-extractor.js +52 -0
- package/dist/commands/breaking-change-analyzer.js +334 -0
- package/dist/commands/config-compliance.js +219 -0
- package/dist/commands/constraints.js +221 -0
- package/dist/commands/context.js +101 -0
- package/dist/commands/data-flow-tracer.js +291 -0
- package/dist/commands/dependency-impact-analyzer.js +27 -0
- package/dist/commands/diff.js +146 -0
- package/dist/commands/discrepancy.js +71 -0
- package/dist/commands/doc-generate.js +163 -0
- package/dist/commands/doc-html.js +120 -0
- package/dist/commands/drift.js +88 -0
- package/dist/commands/extract.js +16 -0
- package/dist/commands/feature-context.js +116 -0
- package/dist/commands/generate.js +339 -0
- package/dist/commands/guard.js +182 -0
- package/dist/commands/init.js +209 -0
- package/dist/commands/intel.js +20 -0
- package/dist/commands/license-dependency-auditor.js +33 -0
- package/dist/commands/performance-hotspot-profiler.js +42 -0
- package/dist/commands/search.js +314 -0
- package/dist/commands/security-boundary-auditor.js +359 -0
- package/dist/commands/simulate.js +294 -0
- package/dist/commands/summary.js +27 -0
- package/dist/commands/test-coverage-mapper.js +264 -0
- package/dist/commands/verify-drift.js +62 -0
- package/dist/config.js +441 -0
- package/dist/extract/ai-context-hints.js +107 -0
- package/dist/extract/analyzers/backend.js +1704 -0
- package/dist/extract/analyzers/depth.js +264 -0
- package/dist/extract/analyzers/frontend.js +2221 -0
- package/dist/extract/api-usage-tracker.js +19 -0
- package/dist/extract/cache.js +53 -0
- package/dist/extract/codebase-intel.js +190 -0
- package/dist/extract/compress.js +452 -0
- package/dist/extract/context-block.js +356 -0
- package/dist/extract/contracts.js +183 -0
- package/dist/extract/discrepancies.js +233 -0
- package/dist/extract/docs-loader.js +110 -0
- package/dist/extract/docs.js +2379 -0
- package/dist/extract/drift.js +1578 -0
- package/dist/extract/duplicates.js +435 -0
- package/dist/extract/feature-arcs.js +138 -0
- package/dist/extract/graph.js +76 -0
- package/dist/extract/html-doc.js +1409 -0
- package/dist/extract/ignore.js +45 -0
- package/dist/extract/index.js +455 -0
- package/dist/extract/llm-client.js +159 -0
- package/dist/extract/pattern-registry.js +141 -0
- package/dist/extract/product-doc.js +497 -0
- package/dist/extract/python.js +1202 -0
- package/dist/extract/runtime.js +193 -0
- package/dist/extract/schema-evolution-validator.js +35 -0
- package/dist/extract/test-gap-analyzer.js +20 -0
- package/dist/extract/tests.js +74 -0
- package/dist/extract/types.js +1 -0
- package/dist/extract/validate-backend.js +30 -0
- package/dist/extract/writer.js +11 -0
- package/dist/output-layout.js +37 -0
- package/dist/project-discovery.js +309 -0
- package/dist/schema/architecture.js +350 -0
- package/dist/schema/feature-spec.js +89 -0
- package/dist/schema/index.js +8 -0
- package/dist/schema/ux.js +46 -0
- package/package.json +75 -0
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FEATURE 2: BREAKING CHANGE ANALYZER
|
|
3
|
+
*
|
|
4
|
+
* Detects when code changes will break dependent modules
|
|
5
|
+
* Analyzes endpoint signature changes, schema modifications, exports changes
|
|
6
|
+
*
|
|
7
|
+
* Benchmarking: Medium-High complexity
|
|
8
|
+
* Problem Domain: API Safety, Backwards Compatibility
|
|
9
|
+
*/
|
|
10
|
+
import fs from "node:fs/promises";
|
|
11
|
+
import path from "node:path";
|
|
12
|
+
/**
|
|
13
|
+
* Main function: Analyze breaking changes between two versions
|
|
14
|
+
*/
|
|
15
|
+
export async function analyzeBreakingChanges(options) {
|
|
16
|
+
const { baselineDir, currentDir, output } = options;
|
|
17
|
+
const baselineFiles = await scanSourceFiles(baselineDir);
|
|
18
|
+
const currentFiles = await scanSourceFiles(currentDir);
|
|
19
|
+
const changes = [];
|
|
20
|
+
// Detect removed files (potential breaking changes)
|
|
21
|
+
for (const [filePath, _] of baselineFiles.entries()) {
|
|
22
|
+
if (!currentFiles.has(filePath)) {
|
|
23
|
+
changes.push({
|
|
24
|
+
type: "export_removed",
|
|
25
|
+
severity: "high",
|
|
26
|
+
location: filePath,
|
|
27
|
+
description: `File removed: ${filePath}`,
|
|
28
|
+
affectedModules: [filePath],
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
// Detect modified files
|
|
33
|
+
for (const [filePath, currentContent] of currentFiles.entries()) {
|
|
34
|
+
if (baselineFiles.has(filePath)) {
|
|
35
|
+
const baselineContent = baselineFiles.get(filePath);
|
|
36
|
+
const fileChanges = detectBreakingChangesInFile(filePath, baselineContent, currentContent);
|
|
37
|
+
changes.push(...fileChanges);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
// Detect new required parameters
|
|
41
|
+
const newRequiredParams = detectNewRequiredParams(baselineFiles, currentFiles);
|
|
42
|
+
changes.push(...newRequiredParams);
|
|
43
|
+
// Calculate risk score
|
|
44
|
+
const riskScore = calculateRiskScore(changes);
|
|
45
|
+
const report = {
|
|
46
|
+
timestamp: new Date().toISOString(),
|
|
47
|
+
totalChanges: changes.length,
|
|
48
|
+
criticalChanges: changes.filter((c) => c.severity === "critical").length,
|
|
49
|
+
highChanges: changes.filter((c) => c.severity === "high").length,
|
|
50
|
+
changes,
|
|
51
|
+
riskScore,
|
|
52
|
+
recommendations: generateRecommendations(changes),
|
|
53
|
+
};
|
|
54
|
+
if (output) {
|
|
55
|
+
await writeReport(report, output);
|
|
56
|
+
}
|
|
57
|
+
return report;
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Helper: Scan source files in directory
|
|
61
|
+
*/
|
|
62
|
+
async function scanSourceFiles(dir) {
|
|
63
|
+
const files = new Map();
|
|
64
|
+
async function walkDir(currentPath) {
|
|
65
|
+
try {
|
|
66
|
+
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
|
67
|
+
for (const entry of entries) {
|
|
68
|
+
const fullPath = path.join(currentPath, entry.name);
|
|
69
|
+
if (entry.isDirectory() &&
|
|
70
|
+
[".git", "node_modules", "dist", "build"].includes(entry.name)) {
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
if (entry.isDirectory()) {
|
|
74
|
+
await walkDir(fullPath);
|
|
75
|
+
}
|
|
76
|
+
else if (entry.isFile() &&
|
|
77
|
+
[".ts", ".js", ".tsx"].some((ext) => entry.name.endsWith(ext))) {
|
|
78
|
+
const content = await fs.readFile(fullPath, "utf8");
|
|
79
|
+
const relPath = path.relative(dir, fullPath);
|
|
80
|
+
files.set(relPath, content);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
catch {
|
|
85
|
+
// Skip inaccessible directories
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
await walkDir(dir);
|
|
89
|
+
return files;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Helper: Detect breaking changes within a file
|
|
93
|
+
*/
|
|
94
|
+
function detectBreakingChangesInFile(filePath, baseline, current) {
|
|
95
|
+
const changes = [];
|
|
96
|
+
// Detect removed exports
|
|
97
|
+
const baselineExports = extractExports(baseline);
|
|
98
|
+
const currentExports = extractExports(current);
|
|
99
|
+
for (const exp of baselineExports) {
|
|
100
|
+
if (!currentExports.includes(exp)) {
|
|
101
|
+
changes.push({
|
|
102
|
+
type: "export_removed",
|
|
103
|
+
severity: "high",
|
|
104
|
+
location: `${filePath}:export:${exp}`,
|
|
105
|
+
description: `Exported symbol removed: ${exp}`,
|
|
106
|
+
affectedModules: [filePath],
|
|
107
|
+
suggestedFix: `Mark as deprecated instead of removing: @deprecated`,
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// Detect endpoint removals (if applicable)
|
|
112
|
+
const baselineEndpoints = extractEndpoints(baseline);
|
|
113
|
+
const currentEndpoints = extractEndpoints(current);
|
|
114
|
+
for (const endpoint of baselineEndpoints) {
|
|
115
|
+
if (!currentEndpoints.includes(endpoint)) {
|
|
116
|
+
changes.push({
|
|
117
|
+
type: "endpoint_removed",
|
|
118
|
+
severity: "critical",
|
|
119
|
+
location: `${filePath}:endpoint:${endpoint}`,
|
|
120
|
+
description: `API endpoint removed: ${endpoint}`,
|
|
121
|
+
affectedModules: [filePath],
|
|
122
|
+
suggestedFix: `Maintain backwards compatibility with deprecation warnings`,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Detect type changes
|
|
127
|
+
const typeChanges = detectTypeChanges(filePath, baseline, current);
|
|
128
|
+
changes.push(...typeChanges);
|
|
129
|
+
return changes;
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Helper: Extract exported symbols
|
|
133
|
+
*/
|
|
134
|
+
function extractExports(content) {
|
|
135
|
+
const patterns = [
|
|
136
|
+
/export\s+(?:default\s+)?(?:class|function|interface|type|const)\s+(\w+)/g,
|
|
137
|
+
/export\s*\{\s*([^}]+)\s*\}/g,
|
|
138
|
+
];
|
|
139
|
+
const exports = [];
|
|
140
|
+
for (const pattern of patterns) {
|
|
141
|
+
let match;
|
|
142
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
143
|
+
const names = match[1]
|
|
144
|
+
?.split(",")
|
|
145
|
+
.map((n) => n.trim())
|
|
146
|
+
.filter((n) => n);
|
|
147
|
+
if (names) {
|
|
148
|
+
exports.push(...names);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
return [...new Set(exports)];
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Helper: Extract API endpoints
|
|
156
|
+
*/
|
|
157
|
+
function extractEndpoints(content) {
|
|
158
|
+
const patterns = [
|
|
159
|
+
/(?:router|app)\.(?:get|post|put|delete|patch)\s*\(\s*['"`]([^'"`]+)/gi,
|
|
160
|
+
/route:\s*['"`]([^'"`]+)/gi,
|
|
161
|
+
];
|
|
162
|
+
const endpoints = [];
|
|
163
|
+
for (const pattern of patterns) {
|
|
164
|
+
let match;
|
|
165
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
166
|
+
endpoints.push(match[1]);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
return [...new Set(endpoints)];
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Helper: Detect type signature changes
|
|
173
|
+
*/
|
|
174
|
+
function detectTypeChanges(filePath, baseline, current) {
|
|
175
|
+
const changes = [];
|
|
176
|
+
// Simple heuristic: check for interface/type field removals
|
|
177
|
+
const baselineInterfaces = extractInterfaces(baseline);
|
|
178
|
+
const currentInterfaces = extractInterfaces(current);
|
|
179
|
+
for (const [name, baselineFields] of baselineInterfaces.entries()) {
|
|
180
|
+
if (currentInterfaces.has(name)) {
|
|
181
|
+
const currentFields = currentInterfaces.get(name);
|
|
182
|
+
for (const field of baselineFields) {
|
|
183
|
+
if (!currentFields.includes(field)) {
|
|
184
|
+
changes.push({
|
|
185
|
+
type: "type_changed",
|
|
186
|
+
severity: "high",
|
|
187
|
+
location: `${filePath}:interface:${name}`,
|
|
188
|
+
description: `Interface field removed: ${name}.${field}`,
|
|
189
|
+
affectedModules: [filePath],
|
|
190
|
+
suggestedFix: `Mark field as optional or deprecated: ${field}?`,
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
return changes;
|
|
197
|
+
}
|
|
198
|
+
/**
|
|
199
|
+
* Helper: Extract interfaces and their fields
|
|
200
|
+
*/
|
|
201
|
+
function extractInterfaces(content) {
|
|
202
|
+
const interfaces = new Map();
|
|
203
|
+
// Simple regex to find interfaces
|
|
204
|
+
const pattern = /(?:interface|type)\s+(\w+)\s*(?:extends|=)?\s*\{([^}]+)\}/g;
|
|
205
|
+
let match;
|
|
206
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
207
|
+
const name = match[1];
|
|
208
|
+
const body = match[2];
|
|
209
|
+
// Extract field names
|
|
210
|
+
const fieldPattern = /\s*(\w+)\s*[?:]?/g;
|
|
211
|
+
const fields = [];
|
|
212
|
+
let fieldMatch;
|
|
213
|
+
while ((fieldMatch = fieldPattern.exec(body)) !== null) {
|
|
214
|
+
fields.push(fieldMatch[1]);
|
|
215
|
+
}
|
|
216
|
+
interfaces.set(name, fields);
|
|
217
|
+
}
|
|
218
|
+
return interfaces;
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Helper: Detect new required parameters
|
|
222
|
+
*/
|
|
223
|
+
function detectNewRequiredParams(baseline, current) {
|
|
224
|
+
const changes = [];
|
|
225
|
+
// Look for function signatures that gained required params
|
|
226
|
+
for (const [filePath, currentContent] of current.entries()) {
|
|
227
|
+
if (!baseline.has(filePath))
|
|
228
|
+
continue;
|
|
229
|
+
const baselineContent = baseline.get(filePath);
|
|
230
|
+
const baseFunctions = extractFunctions(baselineContent);
|
|
231
|
+
const currentFunctions = extractFunctions(currentContent);
|
|
232
|
+
for (const [funcName, baselineParams] of baseFunctions.entries()) {
|
|
233
|
+
if (currentFunctions.has(funcName)) {
|
|
234
|
+
const currentParams = currentFunctions.get(funcName);
|
|
235
|
+
if (currentParams.length > baselineParams.length) {
|
|
236
|
+
const newParams = currentParams.slice(baselineParams.length);
|
|
237
|
+
changes.push({
|
|
238
|
+
type: "required_parameter_added",
|
|
239
|
+
severity: "high",
|
|
240
|
+
location: `${filePath}:function:${funcName}`,
|
|
241
|
+
description: `Required parameters added: ${newParams.join(", ")}`,
|
|
242
|
+
affectedModules: [filePath],
|
|
243
|
+
suggestedFix: `Make new parameters optional with defaults`,
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
return changes;
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Helper: Extract function signatures
|
|
253
|
+
*/
|
|
254
|
+
function extractFunctions(content) {
|
|
255
|
+
const functions = new Map();
|
|
256
|
+
const pattern = /(?:async\s+)?(?:export\s+)?function\s+(\w+)\s*\(([^)]*)\)/g;
|
|
257
|
+
let match;
|
|
258
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
259
|
+
const funcName = match[1];
|
|
260
|
+
const paramsStr = match[2];
|
|
261
|
+
const params = paramsStr
|
|
262
|
+
.split(",")
|
|
263
|
+
.map((p) => p.trim().split(":")[0])
|
|
264
|
+
.filter((p) => p);
|
|
265
|
+
functions.set(funcName, params);
|
|
266
|
+
}
|
|
267
|
+
return functions;
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Helper: Calculate overall risk score
|
|
271
|
+
*/
|
|
272
|
+
function calculateRiskScore(changes) {
|
|
273
|
+
if (changes.length === 0)
|
|
274
|
+
return 0;
|
|
275
|
+
const criticalWeight = 25;
|
|
276
|
+
const highWeight = 10;
|
|
277
|
+
const mediumWeight = 5;
|
|
278
|
+
const lowWeight = 1;
|
|
279
|
+
const score = changes.filter((c) => c.severity === "critical").length * criticalWeight +
|
|
280
|
+
changes.filter((c) => c.severity === "high").length * highWeight +
|
|
281
|
+
changes.filter((c) => c.severity === "medium").length * mediumWeight +
|
|
282
|
+
changes.filter((c) => c.severity === "low").length * lowWeight;
|
|
283
|
+
return Math.min(score, 100);
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Helper: Generate recommendations
|
|
287
|
+
*/
|
|
288
|
+
function generateRecommendations(changes) {
|
|
289
|
+
const recommendations = [];
|
|
290
|
+
if (changes.some((c) => c.severity === "critical")) {
|
|
291
|
+
recommendations.push("🚨 CRITICAL: Major breaking changes detected. Bump major version.");
|
|
292
|
+
recommendations.push("Consider deprecation period (3+ months) before removal.");
|
|
293
|
+
}
|
|
294
|
+
if (changes.some((c) => c.severity === "high")) {
|
|
295
|
+
recommendations.push("⚠️ HIGH: Multiple breaking changes. Request code review.");
|
|
296
|
+
}
|
|
297
|
+
if (changes.length > 5) {
|
|
298
|
+
recommendations.push("💡 Consider splitting this into multiple commits.");
|
|
299
|
+
}
|
|
300
|
+
if (changes.filter((c) => c.severity === "critical" || c.severity === "high")
|
|
301
|
+
.length === 0) {
|
|
302
|
+
recommendations.push("✅ No critical breaking changes detected.");
|
|
303
|
+
}
|
|
304
|
+
return recommendations;
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Helper: Write report to markdown
|
|
308
|
+
*/
|
|
309
|
+
async function writeReport(report, outputPath) {
|
|
310
|
+
let md = `# Breaking Change Analysis Report\n\n`;
|
|
311
|
+
md += `Generated: ${report.timestamp}\n\n`;
|
|
312
|
+
md += `## Summary\n`;
|
|
313
|
+
md += `- **Total Changes:** ${report.totalChanges}\n`;
|
|
314
|
+
md += `- **Critical:** ${report.criticalChanges}\n`;
|
|
315
|
+
md += `- **High:** ${report.highChanges}\n`;
|
|
316
|
+
md += `- **Risk Score:** ${report.riskScore}/100\n\n`;
|
|
317
|
+
md += `## Changes\n`;
|
|
318
|
+
for (const change of report.changes) {
|
|
319
|
+
md += `### ${change.type} - ${change.severity}\n`;
|
|
320
|
+
md += `**Location:** ${change.location}\n`;
|
|
321
|
+
md += `${change.description}\n`;
|
|
322
|
+
if (change.suggestedFix) {
|
|
323
|
+
md += `**Fix:** ${change.suggestedFix}\n`;
|
|
324
|
+
}
|
|
325
|
+
md += `\n`;
|
|
326
|
+
}
|
|
327
|
+
md += `## Recommendations\n`;
|
|
328
|
+
for (const rec of report.recommendations) {
|
|
329
|
+
md += `- ${rec}\n`;
|
|
330
|
+
}
|
|
331
|
+
await fs.mkdir(path.dirname(outputPath), { recursive: true });
|
|
332
|
+
await fs.writeFile(outputPath, md, "utf8");
|
|
333
|
+
}
|
|
334
|
+
export default analyzeBreakingChanges;
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
/**
|
|
4
|
+
* Primary function: Scan config and check compliance
|
|
5
|
+
*/
|
|
6
|
+
export async function checkConfigCompliance(options) {
|
|
7
|
+
const { configPath, srcRoot, environments = ["dev", "test", "prod"], output } = options;
|
|
8
|
+
// Load config
|
|
9
|
+
let configContent;
|
|
10
|
+
try {
|
|
11
|
+
configContent = await fs.readFile(configPath, "utf8");
|
|
12
|
+
}
|
|
13
|
+
catch (err) {
|
|
14
|
+
throw new Error(`Failed to read config at ${configPath}: ${err}`);
|
|
15
|
+
}
|
|
16
|
+
const config = parseConfigFile(configContent);
|
|
17
|
+
const configFields = flattenConfig(config);
|
|
18
|
+
// Scan source files for usage
|
|
19
|
+
const usageMap = await scanSourceFiles(srcRoot);
|
|
20
|
+
// Mark which fields are used
|
|
21
|
+
const checkedFields = configFields.map((field) => ({
|
|
22
|
+
...field,
|
|
23
|
+
used: hasAnyUsage(field, usageMap),
|
|
24
|
+
usageCount: countUsage(field, usageMap),
|
|
25
|
+
}));
|
|
26
|
+
// Find dead config
|
|
27
|
+
const deadFields = checkedFields.filter((f) => !f.used);
|
|
28
|
+
// Find env inconsistencies
|
|
29
|
+
const envInconsistencies = findEnvironmentInconsistencies(configContent, environments);
|
|
30
|
+
const result = {
|
|
31
|
+
totalFields: configFields.length,
|
|
32
|
+
usedFields: checkedFields.filter((f) => f.used).length,
|
|
33
|
+
deadFields,
|
|
34
|
+
inconsistentAcrossEnv: envInconsistencies,
|
|
35
|
+
stats: {
|
|
36
|
+
usage: checkedFields.filter((f) => f.used).length,
|
|
37
|
+
deadConfig: deadFields.length,
|
|
38
|
+
envInconsistency: envInconsistencies.length,
|
|
39
|
+
},
|
|
40
|
+
};
|
|
41
|
+
if (output) {
|
|
42
|
+
await writeComplianceReport(result, output);
|
|
43
|
+
}
|
|
44
|
+
return result;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Helper: Parse config file (supports .ts, .json, .yaml)
|
|
48
|
+
*/
|
|
49
|
+
function parseConfigFile(content) {
|
|
50
|
+
// Simplified parser - in real impl would support YAML/TS as well
|
|
51
|
+
try {
|
|
52
|
+
// Try JSON first
|
|
53
|
+
return JSON.parse(content);
|
|
54
|
+
}
|
|
55
|
+
catch {
|
|
56
|
+
// Fallback to object literal extraction from TypeScript
|
|
57
|
+
const match = content.match(/const\s+DEFAULT_CONFIG[^=]*=\s*({[\s\S]*?})\s*;/);
|
|
58
|
+
if (match) {
|
|
59
|
+
// This is a simplified extraction - real implementation would use AST
|
|
60
|
+
return parseObjectLiteral(match[1]);
|
|
61
|
+
}
|
|
62
|
+
return {};
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Helper: Flatten nested config into dot-notation paths
|
|
67
|
+
*/
|
|
68
|
+
function flattenConfig(obj, prefix = "") {
|
|
69
|
+
const result = [];
|
|
70
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
71
|
+
const fullPath = prefix ? `${prefix}.${key}` : key;
|
|
72
|
+
if (value === null || value === undefined) {
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
if (typeof value === "object" && !Array.isArray(value)) {
|
|
76
|
+
result.push(...flattenConfig(value, fullPath));
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
result.push({
|
|
80
|
+
path: fullPath,
|
|
81
|
+
value,
|
|
82
|
+
type: getType(value),
|
|
83
|
+
used: false,
|
|
84
|
+
usageCount: 0,
|
|
85
|
+
environments: new Set(),
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return result;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Helper: Scan all source files for config usage
|
|
93
|
+
*/
|
|
94
|
+
async function scanSourceFiles(srcRoot) {
|
|
95
|
+
const usageMap = new Map();
|
|
96
|
+
async function walkDir(dir) {
|
|
97
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
98
|
+
for (const entry of entries) {
|
|
99
|
+
const fullPath = path.join(dir, entry.name);
|
|
100
|
+
// Skip common non-source directories
|
|
101
|
+
if (entry.isDirectory() &&
|
|
102
|
+
[".git", "node_modules", "dist", "build", ".next"].includes(entry.name)) {
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
if (entry.isDirectory()) {
|
|
106
|
+
await walkDir(fullPath);
|
|
107
|
+
}
|
|
108
|
+
else if (entry.isFile() &&
|
|
109
|
+
[".ts", ".js", ".tsx", ".jsx"].some((ext) => entry.name.endsWith(ext))) {
|
|
110
|
+
const content = await fs.readFile(fullPath, "utf8");
|
|
111
|
+
const matches = content.match(/config\.\w+/g) || [];
|
|
112
|
+
matches.forEach((match) => {
|
|
113
|
+
const key = match.replace("config.", "");
|
|
114
|
+
usageMap.set(key, (usageMap.get(key) ?? 0) + 1);
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
await walkDir(srcRoot);
|
|
120
|
+
return usageMap;
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Helper: Check if a config field is used anywhere
|
|
124
|
+
*/
|
|
125
|
+
function hasAnyUsage(field, usageMap) {
|
|
126
|
+
// Check exact path
|
|
127
|
+
if (usageMap.has(field.path))
|
|
128
|
+
return true;
|
|
129
|
+
// Check prefix matches (e.g., "drift" for "drift.weights.entropy")
|
|
130
|
+
const prefix = field.path.split(".")[0];
|
|
131
|
+
return usageMap.has(prefix);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Helper: Count usages of a config field
|
|
135
|
+
*/
|
|
136
|
+
function countUsage(field, usageMap) {
|
|
137
|
+
return usageMap.get(field.path) ?? usageMap.get(field.path.split(".")[0]) ?? 0;
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Helper: Find environment-specific inconsistencies
|
|
141
|
+
*/
|
|
142
|
+
function findEnvironmentInconsistencies(configContent, environments) {
|
|
143
|
+
// Simplified implementation - look for env-specific overrides
|
|
144
|
+
const inconsistencies = [];
|
|
145
|
+
// In real implementation, would parse env-specific configs and compare
|
|
146
|
+
for (const env of environments) {
|
|
147
|
+
const envRegex = new RegExp(`(${env}|process\\.env\\.${env.toUpperCase()})`, "gi");
|
|
148
|
+
if (envRegex.test(configContent)) {
|
|
149
|
+
// Found env-specific logic
|
|
150
|
+
// TODO: Compare values across envs for consistency
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
return inconsistencies;
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Helper: Get type of a value
|
|
157
|
+
*/
|
|
158
|
+
function getType(value) {
|
|
159
|
+
if (Array.isArray(value))
|
|
160
|
+
return "array";
|
|
161
|
+
if (typeof value === "object")
|
|
162
|
+
return "object";
|
|
163
|
+
if (typeof value === "string")
|
|
164
|
+
return "string";
|
|
165
|
+
if (typeof value === "number")
|
|
166
|
+
return "number";
|
|
167
|
+
if (typeof value === "boolean")
|
|
168
|
+
return "boolean";
|
|
169
|
+
return "unknown";
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Helper: Parse object literal (simplified)
|
|
173
|
+
*/
|
|
174
|
+
function parseObjectLiteral(str) {
|
|
175
|
+
// This is a very simplified parser - real impl would use proper AST parsing
|
|
176
|
+
try {
|
|
177
|
+
// Wrap in object constructor syntax
|
|
178
|
+
const sanitized = str
|
|
179
|
+
.replace(/'/g, '"') // Convert single quotes to double
|
|
180
|
+
.replace(/,\s*}/g, "}") // Remove trailing commas
|
|
181
|
+
.replace(/,\s*]/g, "]");
|
|
182
|
+
return JSON.parse(`{${sanitized}}`);
|
|
183
|
+
}
|
|
184
|
+
catch {
|
|
185
|
+
return {};
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Helper: Generate compliance report
|
|
190
|
+
*/
|
|
191
|
+
async function writeComplianceReport(result, outputPath) {
|
|
192
|
+
const timestamp = new Date().toISOString();
|
|
193
|
+
let report = `# Configuration Compliance Report\n\n`;
|
|
194
|
+
report += `Generated: ${timestamp}\n\n`;
|
|
195
|
+
report += `## Summary\n`;
|
|
196
|
+
report += `- Total Config Fields: ${result.totalFields}\n`;
|
|
197
|
+
report += `- Used Fields: ${result.usedFields} (${((result.usedFields / result.totalFields) * 100).toFixed(1)}%)\n`;
|
|
198
|
+
report += `- Dead Config: ${result.stats.deadConfig}\n`;
|
|
199
|
+
report += `- Environment Inconsistencies: ${result.stats.envInconsistency}\n\n`;
|
|
200
|
+
if (result.deadFields.length > 0) {
|
|
201
|
+
report += `## ⚠️ Dead Configuration Fields\n`;
|
|
202
|
+
report += `These fields are defined but never used:\n\n`;
|
|
203
|
+
result.deadFields.forEach((field) => {
|
|
204
|
+
report += `- \`${field.path}\` = ${JSON.stringify(field.value)}\n`;
|
|
205
|
+
});
|
|
206
|
+
report += `\n`;
|
|
207
|
+
}
|
|
208
|
+
if (result.inconsistentAcrossEnv.length > 0) {
|
|
209
|
+
report += `## ⚠️ Environment Inconsistencies\n`;
|
|
210
|
+
result.inconsistentAcrossEnv.forEach((field) => {
|
|
211
|
+
report += `- \`${field.path}\` differs across environments\n`;
|
|
212
|
+
});
|
|
213
|
+
report += `\n`;
|
|
214
|
+
}
|
|
215
|
+
await fs.mkdir(path.dirname(outputPath), { recursive: true });
|
|
216
|
+
await fs.writeFile(outputPath, report, "utf8");
|
|
217
|
+
console.log(`📋 Config compliance report written to ${outputPath}`);
|
|
218
|
+
}
|
|
219
|
+
export default checkConfigCompliance;
|