archrisk-engine 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aiDiagnosis.d.ts +24 -0
- package/dist/aiDiagnosis.js +125 -0
- package/dist/analyzer.d.ts +18 -0
- package/dist/analyzer.js +136 -0
- package/dist/archScanner.d.ts +46 -0
- package/dist/archScanner.js +235 -0
- package/dist/deepAnalysis.d.ts +10 -0
- package/dist/deepAnalysis.js +101 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +21 -0
- package/dist/repoAnalyzer.d.ts +43 -0
- package/dist/repoAnalyzer.js +299 -0
- package/package.json +21 -0
- package/src/aiDiagnosis.ts +159 -0
- package/src/analyzer.ts +124 -0
- package/src/archScanner.ts +227 -0
- package/src/deepAnalysis.ts +85 -0
- package/src/index.ts +5 -0
- package/src/repoAnalyzer.ts +298 -0
- package/tsconfig.json +22 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [The Brain] AI Diagnosis Engine (TypeScript Version)
|
|
3
|
+
*
|
|
4
|
+
* Transforms errors and architecture issues into actionable solutions using Gemini API.
|
|
5
|
+
*/
|
|
6
|
+
import { ArchIssue } from "./archScanner.js";
|
|
7
|
+
export interface DiagnosisResult {
|
|
8
|
+
severity: 'error' | 'warning';
|
|
9
|
+
issue: string;
|
|
10
|
+
suggestion: string;
|
|
11
|
+
fixedCode: string;
|
|
12
|
+
confidence: number;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Diagnose code error using Gemini AI
|
|
16
|
+
*/
|
|
17
|
+
export declare function diagnoseCodeError(file: string, line: number, errorType: string, errorMessage: string, codeContext: string): Promise<DiagnosisResult>;
|
|
18
|
+
/**
|
|
19
|
+
* Diagnose architecture issue using Gemini AI
|
|
20
|
+
*/
|
|
21
|
+
export declare function diagnoseArchIssue(issue: ArchIssue, fileContexts: {
|
|
22
|
+
path: string;
|
|
23
|
+
content: string;
|
|
24
|
+
}[]): Promise<DiagnosisResult>;
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* [The Brain] AI Diagnosis Engine (TypeScript Version)
|
|
4
|
+
*
|
|
5
|
+
* Transforms errors and architecture issues into actionable solutions using Gemini API.
|
|
6
|
+
*/
|
|
7
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
+
exports.diagnoseCodeError = diagnoseCodeError;
|
|
9
|
+
exports.diagnoseArchIssue = diagnoseArchIssue;
|
|
10
|
+
const diagnosisCache = new Map();
|
|
11
|
+
/**
|
|
12
|
+
* Call Gemini API with structured prompt
|
|
13
|
+
*/
|
|
14
|
+
async function callGeminiAPI(prompt) {
|
|
15
|
+
const apiKey = process.env.GEMINI_API_KEY;
|
|
16
|
+
if (!apiKey) {
|
|
17
|
+
console.warn('[Brain] Gemini API key not configured. Returning mock diagnosis.');
|
|
18
|
+
return mockDiagnosis();
|
|
19
|
+
}
|
|
20
|
+
const model = process.env.AI_DIAGNOSIS_MODEL || 'gemini-2.0-flash';
|
|
21
|
+
const url = `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${apiKey}`;
|
|
22
|
+
try {
|
|
23
|
+
const response = await fetch(url, {
|
|
24
|
+
method: 'POST',
|
|
25
|
+
headers: { 'Content-Type': 'application/json' },
|
|
26
|
+
body: JSON.stringify({
|
|
27
|
+
contents: [{
|
|
28
|
+
parts: [{ text: prompt }]
|
|
29
|
+
}],
|
|
30
|
+
generationConfig: {
|
|
31
|
+
temperature: 0.2,
|
|
32
|
+
maxOutputTokens: parseInt(process.env.AI_DIAGNOSIS_MAX_TOKENS || '1024')
|
|
33
|
+
}
|
|
34
|
+
})
|
|
35
|
+
});
|
|
36
|
+
const data = await response.json();
|
|
37
|
+
if (data.error) {
|
|
38
|
+
throw new Error(data.error.message);
|
|
39
|
+
}
|
|
40
|
+
const text = data.candidates[0].content.parts[0].text;
|
|
41
|
+
const jsonMatch = text.match(/```json\n([\s\S]*?)\n```/) || text.match(/\{[\s\S]*\}/);
|
|
42
|
+
if (jsonMatch) {
|
|
43
|
+
return JSON.parse(jsonMatch[1] || jsonMatch[0]);
|
|
44
|
+
}
|
|
45
|
+
throw new Error('Failed to parse JSON from Gemini response');
|
|
46
|
+
}
|
|
47
|
+
catch (error) {
|
|
48
|
+
console.error('[Brain] Gemini API error:', error.message);
|
|
49
|
+
return mockDiagnosis('AI Diagnosis limited - using fallback', error.message);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
function mockDiagnosis(issue = 'API key not configured - using mock diagnosis', suggestion = 'Set GEMINI_API_KEY in environment variables') {
|
|
53
|
+
return {
|
|
54
|
+
severity: 'error',
|
|
55
|
+
issue: issue,
|
|
56
|
+
suggestion: suggestion,
|
|
57
|
+
fixedCode: '# Gemini API quota exceeded or key missing. Using static fallback analysis.',
|
|
58
|
+
confidence: 0.0
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Diagnose code error using Gemini AI
|
|
63
|
+
*/
|
|
64
|
+
async function diagnoseCodeError(file, line, errorType, errorMessage, codeContext) {
|
|
65
|
+
const cacheKey = `${file}:${line}:${errorType}`;
|
|
66
|
+
if (diagnosisCache.has(cacheKey)) {
|
|
67
|
+
return diagnosisCache.get(cacheKey);
|
|
68
|
+
}
|
|
69
|
+
const prompt = `당신은 AI-to-Job 컴파일러입니다. Python 에러를 실행 가능한 작업 정의로 변환하는 역할입니다.
|
|
70
|
+
|
|
71
|
+
**에러 정보:**
|
|
72
|
+
- 타입: ${errorType}
|
|
73
|
+
- 라인: ${line}
|
|
74
|
+
- 파일: ${file}
|
|
75
|
+
- 에러 메시지: ${errorMessage}
|
|
76
|
+
|
|
77
|
+
**코드 컨텍스트:**
|
|
78
|
+
\`\`\`python
|
|
79
|
+
${codeContext}
|
|
80
|
+
\`\`\`
|
|
81
|
+
|
|
82
|
+
**작업:**
|
|
83
|
+
이 에러를 분석하고 다음의 정확한 구조로 JSON 응답만 제공하세요:
|
|
84
|
+
{
|
|
85
|
+
"severity": "error" | "warning",
|
|
86
|
+
"issue": "문제에 대한 간단한 설명 (한글)",
|
|
87
|
+
"suggestion": "실행 가능한 수정 지침 (한글)",
|
|
88
|
+
"fixedCode": "수정된 코드 스니펫",
|
|
89
|
+
"confidence": 0.0 ~ 1.0 사이의 값
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
**중요**: 유효한 JSON만 응답하세요. 설명이나 JSON 블록 밖의 마크다운은 작성하지 마세요.`;
|
|
93
|
+
const diagnosis = await callGeminiAPI(prompt);
|
|
94
|
+
diagnosisCache.set(cacheKey, diagnosis);
|
|
95
|
+
return diagnosis;
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Diagnose architecture issue using Gemini AI
|
|
99
|
+
*/
|
|
100
|
+
async function diagnoseArchIssue(issue, fileContexts) {
|
|
101
|
+
const contexts = fileContexts.map(f => `--- File: ${f.path} ---\n${f.content.slice(0, 2000)}...`).join('\n\n');
|
|
102
|
+
const prompt = `당신은 Senior Software Architect AI입니다. 현재 프로젝트의 아키텍처 결함을 분석하고 리팩토링 방안을 제시하는 역할입니다.
|
|
103
|
+
|
|
104
|
+
**탐지된 이슈 정보:**
|
|
105
|
+
- 타입(RuleId): ${issue.ruleId}
|
|
106
|
+
- 제목: ${issue.title}
|
|
107
|
+
- 상세 설명: ${issue.details}
|
|
108
|
+
- 관련 경로: ${issue.relatedPaths?.join(', ') || 'N/A'}
|
|
109
|
+
|
|
110
|
+
**코드 컨텍스트 (일부):**
|
|
111
|
+
${contexts}
|
|
112
|
+
|
|
113
|
+
**작업:**
|
|
114
|
+
이 아키텍처 이슈를 분석하고 다음의 정확한 구조로 JSON 응답만 제공하세요:
|
|
115
|
+
{
|
|
116
|
+
"severity": "error" | "warning",
|
|
117
|
+
"issue": "아키텍처 문제에 대한 구조적 분석 (한글)",
|
|
118
|
+
"suggestion": "구체적인 리팩토링 가이드 및 단계별 조치 사항 (한글)",
|
|
119
|
+
"fixedCode": "리팩토링에 도움이 되는 코드 스니펫 또는 인터페이스 설계 예시",
|
|
120
|
+
"confidence": 0.0 ~ 1.0 사이의 값
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
**중요**: 유효한 JSON만 응답하세요. 설명이나 JSON 블록 밖의 마크다운은 작성하지 마세요.`;
|
|
124
|
+
return await callGeminiAPI(prompt);
|
|
125
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [The Eye] Error Capture Engine (TypeScript Version)
|
|
3
|
+
*
|
|
4
|
+
* Intercepts Python syntax/runtime errors.
|
|
5
|
+
*/
|
|
6
|
+
export interface AnalysisResult {
|
|
7
|
+
hasError: boolean;
|
|
8
|
+
error?: string;
|
|
9
|
+
line?: number;
|
|
10
|
+
type?: string;
|
|
11
|
+
file?: string;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Analyze Python code string for syntax errors and security risks
|
|
15
|
+
* @param code - Python source code
|
|
16
|
+
* @param fileName - Original filename for context
|
|
17
|
+
*/
|
|
18
|
+
export declare function analyzePythonCode(code: string, fileName: string): Promise<AnalysisResult>;
|
package/dist/analyzer.js
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.analyzePythonCode = analyzePythonCode;
|
|
37
|
+
const child_process_1 = require("child_process");
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const fs = __importStar(require("fs"));
|
|
40
|
+
const os_1 = require("os");
|
|
41
|
+
const crypto_1 = require("crypto");
|
|
42
|
+
/**
|
|
43
|
+
* Analyze Python code string for syntax errors and security risks
|
|
44
|
+
* @param code - Python source code
|
|
45
|
+
* @param fileName - Original filename for context
|
|
46
|
+
*/
|
|
47
|
+
async function analyzePythonCode(code, fileName) {
|
|
48
|
+
// 1. Architecture Health Scan (God Module / Large File)
|
|
49
|
+
const lines = code.split('\n');
|
|
50
|
+
if (lines.length > 800) {
|
|
51
|
+
return {
|
|
52
|
+
hasError: true,
|
|
53
|
+
error: `Large File Risk: ${lines.length} lines. 이 파일의 수정은 시스템 전반에 예측 불가능한 영향을 미칠 수 있습니다.`,
|
|
54
|
+
line: 1,
|
|
55
|
+
type: 'ProductionRisk',
|
|
56
|
+
file: fileName
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
// 2. Risk Scan (Operational & Production Readiness)
|
|
60
|
+
const riskResult = scanForRisks(code, fileName);
|
|
61
|
+
if (riskResult.hasError) {
|
|
62
|
+
return riskResult;
|
|
63
|
+
}
|
|
64
|
+
// 3. Syntax Check
|
|
65
|
+
const flatFileName = fileName.replace(/[\/\\]/g, '_');
|
|
66
|
+
const tempFilePath = path.join((0, os_1.tmpdir)(), `arch_risk_${(0, crypto_1.randomBytes)(4).toString('hex')}_${flatFileName}`);
|
|
67
|
+
try {
|
|
68
|
+
fs.writeFileSync(tempFilePath, code);
|
|
69
|
+
return new Promise((resolve) => {
|
|
70
|
+
const proc = (0, child_process_1.spawn)('python3', ['-m', 'py_compile', tempFilePath]);
|
|
71
|
+
let stderr = '';
|
|
72
|
+
proc.stderr.on('data', (data) => {
|
|
73
|
+
stderr += data.toString();
|
|
74
|
+
});
|
|
75
|
+
proc.on('close', (code) => {
|
|
76
|
+
if (code !== 0) {
|
|
77
|
+
const errorInfo = parseErrorMessage(stderr, fileName);
|
|
78
|
+
resolve({
|
|
79
|
+
hasError: true,
|
|
80
|
+
...errorInfo
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
resolve({ hasError: false });
|
|
85
|
+
}
|
|
86
|
+
// Cleanup temp file
|
|
87
|
+
if (fs.existsSync(tempFilePath))
|
|
88
|
+
fs.unlinkSync(tempFilePath);
|
|
89
|
+
});
|
|
90
|
+
setTimeout(() => {
|
|
91
|
+
proc.kill();
|
|
92
|
+
resolve({ hasError: false, error: 'Analysis timeout' });
|
|
93
|
+
if (fs.existsSync(tempFilePath))
|
|
94
|
+
fs.unlinkSync(tempFilePath);
|
|
95
|
+
}, 5000);
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
catch (error) {
|
|
99
|
+
return { hasError: true, error: error.message };
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
function scanForRisks(code, fileName) {
|
|
103
|
+
const risks = [
|
|
104
|
+
{ pattern: /os\.system\(/, type: 'SecurityRisk', message: '[보안] os.system() 사용이 감지되었습니다. 외부 공격에 노출될 위험이 있습니다.' },
|
|
105
|
+
{ pattern: /subprocess\.(popen|run|call|check_output)\(.*shell\s*=\s*True/, type: 'SecurityRisk', message: '[보안] shell=True 옵션은 명령어 주입 공격의 통로가 될 수 있습니다.' },
|
|
106
|
+
{ pattern: /eval\(|exec\(/, type: 'SecurityRisk', message: '[보안] 동적 코드 실행 함수 사용은 잠재적인 보안 홀을 형성합니다.' },
|
|
107
|
+
{ pattern: /requests\.(get|post|put|delete|patch)\((?!.*timeout=)/, type: 'ProductionRisk', message: '[운영] 외부 API 호출 시 timeout 설정이 없습니다. 장애 발생 시 서비스가 무한 대기에 빠질 수 있습니다.' },
|
|
108
|
+
{ pattern: /aiohttp\.ClientSession\(\).*(get|post|put|delete|patch)\((?!.*timeout=)/, type: 'ProductionRisk', message: '[운영] 비동기 호출 시 timeout 설정이 없습니다. 시스템 리소스 고갈의 원인이 됩니다.' },
|
|
109
|
+
{ pattern: /(?:api_key|password|secret|token)\s*=\s*['"][a-zA-Z0-9_-]{10,}['"]/, type: 'ProductionRisk', message: '[운영] 민감 정보(API Key/Password)가 코드 내에 하드코딩 되어 있습니다. 보안 사고의 직접적인 원인입니다.' },
|
|
110
|
+
];
|
|
111
|
+
const lines = code.split('\n');
|
|
112
|
+
for (let i = 0; i < lines.length; i++) {
|
|
113
|
+
for (const risk of risks) {
|
|
114
|
+
if (risk.pattern.test(lines[i])) {
|
|
115
|
+
return {
|
|
116
|
+
hasError: true,
|
|
117
|
+
error: risk.message,
|
|
118
|
+
line: i + 1,
|
|
119
|
+
type: risk.type,
|
|
120
|
+
file: fileName
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return { hasError: false };
|
|
126
|
+
}
|
|
127
|
+
function parseErrorMessage(stderr, fileName) {
|
|
128
|
+
const lineMatch = stderr.match(/line (\d+)/);
|
|
129
|
+
const typeMatch = stderr.match(/(\w+Error):/);
|
|
130
|
+
return {
|
|
131
|
+
error: stderr.trim(),
|
|
132
|
+
line: lineMatch ? parseInt(lineMatch[1]) : undefined,
|
|
133
|
+
type: typeMatch ? typeMatch[1] : 'SyntaxError',
|
|
134
|
+
file: fileName
|
|
135
|
+
};
|
|
136
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [Phase 8] Architecture Scanner - Ported from Code Observatory
|
|
3
|
+
*/
|
|
4
|
+
export interface ArchIssue {
|
|
5
|
+
id: string;
|
|
6
|
+
severity: 'WARN' | 'ERROR';
|
|
7
|
+
ruleId: string;
|
|
8
|
+
title: string;
|
|
9
|
+
details: string;
|
|
10
|
+
relatedPaths?: string[];
|
|
11
|
+
metrics?: any;
|
|
12
|
+
}
|
|
13
|
+
export interface ArchNode {
|
|
14
|
+
id: string;
|
|
15
|
+
kind: 'project' | 'folder' | 'file' | 'hotspot' | 'cycle';
|
|
16
|
+
label: string;
|
|
17
|
+
health: 'OK' | 'WARN' | 'ERROR';
|
|
18
|
+
path: string;
|
|
19
|
+
metrics: any;
|
|
20
|
+
issues?: string[];
|
|
21
|
+
}
|
|
22
|
+
export interface ArchEdge {
|
|
23
|
+
id: string;
|
|
24
|
+
source: string;
|
|
25
|
+
target: string;
|
|
26
|
+
type: 'contains' | 'imports';
|
|
27
|
+
}
|
|
28
|
+
export interface ArchScanResult {
|
|
29
|
+
scanId: string;
|
|
30
|
+
rootPath: string;
|
|
31
|
+
createdAt: number;
|
|
32
|
+
health: 'OK' | 'WARN' | 'ERROR';
|
|
33
|
+
nodes: ArchNode[];
|
|
34
|
+
edges: ArchEdge[];
|
|
35
|
+
issues: ArchIssue[];
|
|
36
|
+
phase: 'FAST' | 'DEPS' | 'DONE';
|
|
37
|
+
}
|
|
38
|
+
export declare function fastScan(rootPath: string): ArchScanResult;
|
|
39
|
+
export declare function depsScan(rootPath: string): {
|
|
40
|
+
adj: Map<string, Set<string>>;
|
|
41
|
+
cycles: string[][];
|
|
42
|
+
};
|
|
43
|
+
/**
|
|
44
|
+
* Note: Full DEPS scan requires complex dependency resolution logic.
|
|
45
|
+
* For now, we provide the FAST scan as the baseline for the GitHub App.
|
|
46
|
+
*/
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.fastScan = fastScan;
|
|
37
|
+
exports.depsScan = depsScan;
|
|
38
|
+
const fs = __importStar(require("fs"));
|
|
39
|
+
const path = __importStar(require("path"));
|
|
40
|
+
function safeId(prefix = 'id') {
|
|
41
|
+
return `${prefix}_${Date.now()}_${Math.random().toString(16).slice(2)}`;
|
|
42
|
+
}
|
|
43
|
+
function isTextFile(p) {
|
|
44
|
+
const ext = path.extname(p).toLowerCase();
|
|
45
|
+
return ['.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs', '.json', '.py'].includes(ext);
|
|
46
|
+
}
|
|
47
|
+
function isSkippableDir(name) {
|
|
48
|
+
return ['node_modules', '.git', '.next', 'dist', 'build', 'out', '.turbo', '__pycache__', 'venv', '.venv'].includes(name);
|
|
49
|
+
}
|
|
50
|
+
function suspiciousFolderName(name) {
|
|
51
|
+
const n = name.toLowerCase();
|
|
52
|
+
return ['temp', 'tmp', 'backup', 'bak', 'old', 'new', 'new2', 'final', 'final_final', 'copy'].some(k => n.includes(k));
|
|
53
|
+
}
|
|
54
|
+
function countLOC(text) {
|
|
55
|
+
return text.split(/\r?\n/).length;
|
|
56
|
+
}
|
|
57
|
+
function fastScan(rootPath) {
|
|
58
|
+
const start = Date.now();
|
|
59
|
+
let fileCount = 0;
|
|
60
|
+
let dirCount = 0;
|
|
61
|
+
let rootFiles = 0;
|
|
62
|
+
let maxDepth = 0;
|
|
63
|
+
let suspiciousDirs = [];
|
|
64
|
+
const hotspots = [];
|
|
65
|
+
function walk(dir, depth) {
|
|
66
|
+
maxDepth = Math.max(maxDepth, depth);
|
|
67
|
+
let entries;
|
|
68
|
+
try {
|
|
69
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
70
|
+
}
|
|
71
|
+
catch {
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
for (const ent of entries) {
|
|
75
|
+
const full = path.join(dir, ent.name);
|
|
76
|
+
if (ent.isDirectory()) {
|
|
77
|
+
if (isSkippableDir(ent.name))
|
|
78
|
+
continue;
|
|
79
|
+
dirCount++;
|
|
80
|
+
if (suspiciousFolderName(ent.name))
|
|
81
|
+
suspiciousDirs.push(full);
|
|
82
|
+
walk(full, depth + 1);
|
|
83
|
+
}
|
|
84
|
+
else if (ent.isFile()) {
|
|
85
|
+
fileCount++;
|
|
86
|
+
if (dir === rootPath)
|
|
87
|
+
rootFiles++;
|
|
88
|
+
if (isTextFile(full)) {
|
|
89
|
+
try {
|
|
90
|
+
const txt = fs.readFileSync(full, 'utf8');
|
|
91
|
+
const loc = countLOC(txt);
|
|
92
|
+
if (loc >= 800)
|
|
93
|
+
hotspots.push({ filePath: full, loc });
|
|
94
|
+
}
|
|
95
|
+
catch { }
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
walk(rootPath, 0);
|
|
101
|
+
const scanId = safeId('scan');
|
|
102
|
+
const issues = [];
|
|
103
|
+
const nodes = [];
|
|
104
|
+
const edges = [];
|
|
105
|
+
let overall = 'OK';
|
|
106
|
+
if (rootFiles > 60) {
|
|
107
|
+
overall = 'WARN';
|
|
108
|
+
issues.push({ id: safeId('issue'), severity: 'WARN', ruleId: 'root-files', title: 'Root is cluttered', details: `Root directory has ${rootFiles} files. Consider moving code under src/.`, metrics: { rootFiles } });
|
|
109
|
+
}
|
|
110
|
+
if (maxDepth > 10) {
|
|
111
|
+
if (overall === 'OK')
|
|
112
|
+
overall = 'WARN';
|
|
113
|
+
issues.push({ id: safeId('issue'), severity: 'WARN', ruleId: 'deep-nesting', title: 'Deep directory nesting', details: `Max directory depth is ${maxDepth}.`, metrics: { maxDepth } });
|
|
114
|
+
}
|
|
115
|
+
if (suspiciousDirs.length >= 2) {
|
|
116
|
+
if (overall === 'OK')
|
|
117
|
+
overall = 'WARN';
|
|
118
|
+
issues.push({ id: safeId('issue'), severity: 'WARN', ruleId: 'suspicious-folders', title: 'Suspicious folders found', details: `Found folders like temp/backup/etc.`, relatedPaths: suspiciousDirs.slice(0, 10).map(p => path.relative(rootPath, p)), metrics: { count: suspiciousDirs.length } });
|
|
119
|
+
}
|
|
120
|
+
if (hotspots.length >= 3) {
|
|
121
|
+
if (overall === 'OK')
|
|
122
|
+
overall = 'WARN';
|
|
123
|
+
issues.push({ id: safeId('issue'), severity: 'WARN', ruleId: 'hotspots', title: 'Large files (hotspots)', details: `Found ${hotspots.length} files with LOC >= 800.`, relatedPaths: hotspots.slice(0, 10).map(h => path.relative(rootPath, h.filePath)), metrics: { hotspots: hotspots.length } });
|
|
124
|
+
}
|
|
125
|
+
nodes.push({ id: 'ARCH_PROJECT', kind: 'project', label: path.basename(rootPath) || 'PROJECT', health: overall, path: rootPath, metrics: { fileCount, dirCount, rootFiles, maxDepth, ms: Date.now() - start }, issues: issues.map(i => i.id) });
|
|
126
|
+
return { scanId, rootPath, createdAt: Date.now(), health: overall, nodes, edges, issues, phase: 'FAST' };
|
|
127
|
+
}
|
|
128
|
+
// --- DEPS SCAN ---
|
|
129
|
+
function parseRelativeImports(fileText) {
|
|
130
|
+
const imports = [];
|
|
131
|
+
const re1 = /import\s+[^'"]*['"]([^'"]+)['"]/g;
|
|
132
|
+
const re2 = /require\(\s*['"]([^'"]+)['"]\s*\)/g;
|
|
133
|
+
const re3 = /from\s+['"]([^'"]+)['"]\s+import/g; // Python style
|
|
134
|
+
let m;
|
|
135
|
+
while ((m = re1.exec(fileText)))
|
|
136
|
+
imports.push(m[1]);
|
|
137
|
+
while ((m = re2.exec(fileText)))
|
|
138
|
+
imports.push(m[1]);
|
|
139
|
+
while ((m = re3.exec(fileText)))
|
|
140
|
+
imports.push(m[1]);
|
|
141
|
+
return imports.filter(s => s.startsWith('./') || s.startsWith('../') || s.includes('.'));
|
|
142
|
+
}
|
|
143
|
+
function resolveImport(fromFile, spec) {
|
|
144
|
+
const base = path.resolve(path.dirname(fromFile), spec);
|
|
145
|
+
const candidates = [
|
|
146
|
+
base,
|
|
147
|
+
base + '.py', base + '.ts', base + '.tsx', base + '.js', base + '.jsx',
|
|
148
|
+
path.join(base, '__init__.py'), path.join(base, 'index.ts'), path.join(base, 'index.js')
|
|
149
|
+
];
|
|
150
|
+
for (const c of candidates) {
|
|
151
|
+
if (fs.existsSync(c) && fs.statSync(c).isFile())
|
|
152
|
+
return c;
|
|
153
|
+
}
|
|
154
|
+
return null;
|
|
155
|
+
}
|
|
156
|
+
function depsScan(rootPath) {
|
|
157
|
+
const files = [];
|
|
158
|
+
function walk(dir) {
|
|
159
|
+
let entries;
|
|
160
|
+
try {
|
|
161
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
162
|
+
}
|
|
163
|
+
catch {
|
|
164
|
+
return;
|
|
165
|
+
}
|
|
166
|
+
for (const ent of entries) {
|
|
167
|
+
const full = path.join(dir, ent.name);
|
|
168
|
+
if (ent.isDirectory()) {
|
|
169
|
+
if (isSkippableDir(ent.name))
|
|
170
|
+
continue;
|
|
171
|
+
walk(full);
|
|
172
|
+
}
|
|
173
|
+
else if (ent.isFile() && isTextFile(full))
|
|
174
|
+
files.push(full);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
walk(rootPath);
|
|
178
|
+
const adj = new Map();
|
|
179
|
+
for (const f of files) {
|
|
180
|
+
let txt;
|
|
181
|
+
try {
|
|
182
|
+
txt = fs.readFileSync(f, 'utf8');
|
|
183
|
+
}
|
|
184
|
+
catch {
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
const specs = parseRelativeImports(txt);
|
|
188
|
+
for (const spec of specs) {
|
|
189
|
+
const resolved = resolveImport(f, spec);
|
|
190
|
+
if (!resolved)
|
|
191
|
+
continue;
|
|
192
|
+
if (!adj.has(f))
|
|
193
|
+
adj.set(f, new Set());
|
|
194
|
+
adj.get(f).add(resolved);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
const visited = new Set();
|
|
198
|
+
const stack = new Set();
|
|
199
|
+
const parent = new Map();
|
|
200
|
+
const cycles = [];
|
|
201
|
+
function dfs(u) {
|
|
202
|
+
visited.add(u);
|
|
203
|
+
stack.add(u);
|
|
204
|
+
const nbrs = adj.get(u);
|
|
205
|
+
if (nbrs) {
|
|
206
|
+
for (const v of nbrs) {
|
|
207
|
+
if (!visited.has(v)) {
|
|
208
|
+
parent.set(v, u);
|
|
209
|
+
dfs(v);
|
|
210
|
+
}
|
|
211
|
+
else if (stack.has(v)) {
|
|
212
|
+
const cycle = [v];
|
|
213
|
+
let cur = u;
|
|
214
|
+
while (cur && cur !== v && cycle.length < 50) {
|
|
215
|
+
cycle.push(cur);
|
|
216
|
+
cur = parent.get(cur);
|
|
217
|
+
}
|
|
218
|
+
cycle.push(v);
|
|
219
|
+
cycle.reverse();
|
|
220
|
+
cycles.push(cycle);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
stack.delete(u);
|
|
225
|
+
}
|
|
226
|
+
for (const f of adj.keys()) {
|
|
227
|
+
if (!visited.has(f))
|
|
228
|
+
dfs(f);
|
|
229
|
+
}
|
|
230
|
+
return { adj, cycles };
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Note: Full DEPS scan requires complex dependency resolution logic.
|
|
234
|
+
* For now, we provide the FAST scan as the baseline for the GitHub App.
|
|
235
|
+
*/
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export interface DeepAnalysisResult {
|
|
2
|
+
summary: string;
|
|
3
|
+
refactoringGuides: {
|
|
4
|
+
file: string;
|
|
5
|
+
description: string;
|
|
6
|
+
suggestion: string;
|
|
7
|
+
}[];
|
|
8
|
+
techDebtScore: number;
|
|
9
|
+
}
|
|
10
|
+
export declare function runDeepAnalysis(repoDir: string, provider: string, apiKey: string): Promise<DeepAnalysisResult>;
|