@reinforcedai/hardhat-security-review 2511.26.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +0 -0
- package/package.json +51 -0
- package/src/AutoFixManager.ts +208 -0
- package/src/ContractUnflattener.ts +151 -0
- package/src/ReinforcedHardhatRuntimeEnvironmentField.ts +155 -0
- package/src/consts.ts +3 -0
- package/src/index.ts +257 -0
- package/src/type-extensions.ts +27 -0
- package/src/types.ts +64 -0
package/README.md
ADDED
|
File without changes
|
package/package.json
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@reinforcedai/hardhat-security-review",
|
|
3
|
+
"main": "dist/index.js",
|
|
4
|
+
"types": "dist/index.d.ts",
|
|
5
|
+
"type": "commonjs",
|
|
6
|
+
"version": "2511.26.1",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"lint:fix": "prettier --write 'src/**/*.{js,ts}' 'test/**/*.{js,ts}' && tslint --fix --config tslint.json --project tsconfig.json",
|
|
9
|
+
"lint": "tslint --config tslint.json --project tsconfig.json",
|
|
10
|
+
"test": "mocha",
|
|
11
|
+
"clean": "rm -rf dist && cd test/fixture-projects/hardhat-project && npx hardhat clean",
|
|
12
|
+
"build": "npm run clean && npm run test && tsc",
|
|
13
|
+
"watch": "tsc -w",
|
|
14
|
+
"prepublishOnly": "npm run build"
|
|
15
|
+
},
|
|
16
|
+
"files": [
|
|
17
|
+
"dist/",
|
|
18
|
+
"src/",
|
|
19
|
+
"LICENSE"
|
|
20
|
+
],
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"eslint-plugin-mocha": "^11.1.0",
|
|
23
|
+
"nanospinner": "^1.2.2",
|
|
24
|
+
"readline-sync": "^1.4.10",
|
|
25
|
+
"tsx": "^4.7.3",
|
|
26
|
+
"tty-table": "^4.2.3"
|
|
27
|
+
},
|
|
28
|
+
"devDependencies": {
|
|
29
|
+
"@nomiclabs/hardhat-ethers": "^2.0.0",
|
|
30
|
+
"@types/chai": "5.2.2",
|
|
31
|
+
"@types/fs-extra": "11.0.4",
|
|
32
|
+
"@types/mocha": "10.0.10",
|
|
33
|
+
"@types/node": "24.0.12",
|
|
34
|
+
"@types/readline-sync": "^1.4.8",
|
|
35
|
+
"@typescript-eslint/eslint-plugin": "^8.37.0",
|
|
36
|
+
"@typescript-eslint/parser": "^8.37.0",
|
|
37
|
+
"chai": "5.2.1",
|
|
38
|
+
"eslint": "^9.31.0",
|
|
39
|
+
"hardhat": "^2.0.0",
|
|
40
|
+
"mocha": "11.7.1",
|
|
41
|
+
"prettier": "^2.2.0",
|
|
42
|
+
"ts-node": "^10.8.0",
|
|
43
|
+
"tslint": "^5.16.0",
|
|
44
|
+
"tslint-config-prettier": "^1.18.0",
|
|
45
|
+
"tslint-plugin-prettier": "^2.0.1",
|
|
46
|
+
"typescript": "5.8.3"
|
|
47
|
+
},
|
|
48
|
+
"peerDependencies": {
|
|
49
|
+
"hardhat": "^2.0.0"
|
|
50
|
+
}
|
|
51
|
+
}
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import * as path from 'path';
|
|
2
|
+
import * as readlineSync from 'readline-sync';
|
|
3
|
+
import { ContractUnflattener, VulnerabilityMapping } from './ContractUnflattener';
|
|
4
|
+
import { AutoFixSelection, CodeReplacementResult, Vulnerability, VulnerabilityWithIndex } from './types';
|
|
5
|
+
|
|
6
|
+
export class AutoFixManager {
|
|
7
|
+
|
|
8
|
+
constructor() {
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
public promptUserForVulnerabilitySelection(vulnerabilities: Vulnerability[]): AutoFixSelection[] {
|
|
12
|
+
if(vulnerabilities.length === 0) {
|
|
13
|
+
return [];
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
console.log('\nš Found vulnerabilities that can be auto-fixed:');
|
|
17
|
+
console.log('ā'.repeat(80));
|
|
18
|
+
|
|
19
|
+
const vulnerabilitiesWithIndex: VulnerabilityWithIndex[] = vulnerabilities
|
|
20
|
+
.map((vuln, index) => ({...vuln, index}))
|
|
21
|
+
.filter(vuln => vuln.fixed_lines !== null && vuln.fixed_lines.trim().length > 0);
|
|
22
|
+
|
|
23
|
+
if(vulnerabilitiesWithIndex.length === 0) {
|
|
24
|
+
console.log('ā¹ļø No auto-fixable vulnerabilities found (no miner suggestions available).');
|
|
25
|
+
return [];
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
vulnerabilitiesWithIndex.forEach((vuln) => {
|
|
29
|
+
console.log(`\n[${vuln.index + 1}] ${vuln.vulnerability_class}`);
|
|
30
|
+
|
|
31
|
+
console.log(` Lines ${vuln.original_from_line || vuln.from_line}-${vuln.original_to_line || vuln.to_line}${vuln.original_file ? ` in ${vuln.original_file.split('/').pop()}` : ''}`);
|
|
32
|
+
console.log(` Miners consensus: ${vuln.miners_select_count}/${vuln.miners_participated_count}`);
|
|
33
|
+
console.log(` Description: ${vuln.description.substring(0, 100)}...`);
|
|
34
|
+
console.log(` Has fix: ${vuln.fixed_lines ? 'Yes' : 'No'}`);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
console.log('ā'.repeat(80));
|
|
38
|
+
console.log('Select vulnerabilities to auto-fix:');
|
|
39
|
+
console.log('⢠Enter numbers separated by commas (e.g., 1,3,5)');
|
|
40
|
+
console.log('⢠Enter "all" to fix all vulnerabilities');
|
|
41
|
+
console.log('⢠Enter "none" or press Enter to skip auto-fixing');
|
|
42
|
+
|
|
43
|
+
const userInput = readlineSync.question('\nYour selection: ').trim();
|
|
44
|
+
|
|
45
|
+
if(userInput.toLowerCase() === 'none' || userInput === '') {
|
|
46
|
+
return [];
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
let selectedIndices: number[] = [];
|
|
50
|
+
|
|
51
|
+
if(userInput.toLowerCase() === 'all') {
|
|
52
|
+
selectedIndices = vulnerabilitiesWithIndex.map(v => v.index);
|
|
53
|
+
} else {
|
|
54
|
+
const inputNumbers = userInput.split(',')
|
|
55
|
+
.map(s => parseInt(s.trim()))
|
|
56
|
+
.filter(n => !isNaN(n) && n >= 1 && n <= vulnerabilities.length);
|
|
57
|
+
|
|
58
|
+
selectedIndices = inputNumbers.map(n => n - 1);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const selections: AutoFixSelection[] = vulnerabilities.map((_, index) => ({
|
|
62
|
+
vulnerabilityIndex: index,
|
|
63
|
+
shouldFix: selectedIndices.includes(index) &&
|
|
64
|
+
vulnerabilities[index].fixed_lines !== null &&
|
|
65
|
+
vulnerabilities[index].fixed_lines!.trim().length > 0,
|
|
66
|
+
}));
|
|
67
|
+
|
|
68
|
+
const selectedCount = selections.filter(s => s.shouldFix).length;
|
|
69
|
+
console.log(`\nā
Selected ${selectedCount} vulnerabilities for auto-fixing.`);
|
|
70
|
+
|
|
71
|
+
return selections;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
public applyAutoFixes(
|
|
75
|
+
vulnerabilities: Vulnerability[],
|
|
76
|
+
selections: AutoFixSelection[],
|
|
77
|
+
sourceCode: string,
|
|
78
|
+
sourcePaths: string[],
|
|
79
|
+
): CodeReplacementResult {
|
|
80
|
+
|
|
81
|
+
const selectedVulns = selections
|
|
82
|
+
.filter(s => s.shouldFix)
|
|
83
|
+
.map(s => ({...vulnerabilities[s.vulnerabilityIndex], index: s.vulnerabilityIndex}))
|
|
84
|
+
.sort((a, b) => b.to_line - a.to_line); // Sort by line number descending to avoid offset issues
|
|
85
|
+
|
|
86
|
+
if(selectedVulns.length === 0) {
|
|
87
|
+
return {
|
|
88
|
+
success: true,
|
|
89
|
+
appliedFixes: [],
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
try {
|
|
94
|
+
const unflattener = new ContractUnflattener();
|
|
95
|
+
unflattener.parseFlattened(sourceCode, sourcePaths);
|
|
96
|
+
|
|
97
|
+
const vulnerabilitiesByFile = new Map<string, Array<{
|
|
98
|
+
vulnerability: typeof selectedVulns[0],
|
|
99
|
+
mapping: VulnerabilityMapping
|
|
100
|
+
}>>();
|
|
101
|
+
|
|
102
|
+
for(const vuln of selectedVulns) {
|
|
103
|
+
const mapping = unflattener.mapVulnerabilityToOriginal(vuln.from_line, vuln.to_line);
|
|
104
|
+
if(mapping) {
|
|
105
|
+
const filePath = mapping.originalFile;
|
|
106
|
+
if(!vulnerabilitiesByFile.has(filePath)) {
|
|
107
|
+
vulnerabilitiesByFile.set(filePath, []);
|
|
108
|
+
}
|
|
109
|
+
vulnerabilitiesByFile.get(filePath)!.push({vulnerability: vuln, mapping});
|
|
110
|
+
|
|
111
|
+
vuln.original_file = filePath;
|
|
112
|
+
vuln.original_from_line = mapping.originalFromLine;
|
|
113
|
+
vuln.original_to_line = mapping.originalToLine;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
if(vulnerabilitiesByFile.size === 0) {
|
|
118
|
+
return {
|
|
119
|
+
success: false,
|
|
120
|
+
error: 'Could not map vulnerabilities to original files',
|
|
121
|
+
appliedFixes: [],
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const appliedFixes: number[] = [];
|
|
126
|
+
const modifiedFiles: string[] = [];
|
|
127
|
+
|
|
128
|
+
console.log('\nš§ Applying auto-fixes to original files...');
|
|
129
|
+
|
|
130
|
+
for(const [filePath, vulnMappings] of vulnerabilitiesByFile.entries()) {
|
|
131
|
+
vulnMappings.sort((a, b) => b.mapping.originalToLine - a.mapping.originalToLine);
|
|
132
|
+
|
|
133
|
+
console.log(` š Processing ${path.basename(filePath)} (${vulnMappings.length} fixes)`);
|
|
134
|
+
|
|
135
|
+
for(const {vulnerability: vuln, mapping} of vulnMappings) {
|
|
136
|
+
if(!vuln.fixed_lines) continue;
|
|
137
|
+
|
|
138
|
+
console.log(` ⢠Fixing ${vuln.vulnerability_class} at lines ${mapping.originalFromLine}-${mapping.originalToLine}`);
|
|
139
|
+
|
|
140
|
+
const fixResult = unflattener.applyFixToOriginalFile(mapping, vuln.fixed_lines);
|
|
141
|
+
|
|
142
|
+
if(fixResult.success) {
|
|
143
|
+
appliedFixes.push(vuln.index);
|
|
144
|
+
if(!modifiedFiles.includes(filePath)) {
|
|
145
|
+
modifiedFiles.push(filePath);
|
|
146
|
+
}
|
|
147
|
+
} else {
|
|
148
|
+
console.warn(` ā ļø Failed to apply fix: ${fixResult.error}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
console.log(`ā
Applied ${appliedFixes.length} fixes to ${modifiedFiles.length} files`);
|
|
154
|
+
|
|
155
|
+
return {
|
|
156
|
+
success: true,
|
|
157
|
+
appliedFixes,
|
|
158
|
+
modifiedFiles,
|
|
159
|
+
};
|
|
160
|
+
|
|
161
|
+
} catch (error) {
|
|
162
|
+
return {
|
|
163
|
+
success: false,
|
|
164
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
165
|
+
appliedFixes: [],
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
public displayAutoFixSummary(result: CodeReplacementResult, vulnerabilities: Vulnerability[]) {
|
|
171
|
+
console.log('\n' + '='.repeat(80));
|
|
172
|
+
console.log('AUTO-FIX SUMMARY');
|
|
173
|
+
console.log('='.repeat(80));
|
|
174
|
+
|
|
175
|
+
if(!result.success) {
|
|
176
|
+
console.log(`ā Auto-fix failed: ${result.error}`);
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
if(result.appliedFixes.length === 0) {
|
|
181
|
+
console.log('ā¹ļø No fixes were applied.');
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
console.log(`ā
Successfully applied ${result.appliedFixes.length} auto-fixes:`);
|
|
186
|
+
|
|
187
|
+
result.appliedFixes.forEach(index => {
|
|
188
|
+
const vuln = vulnerabilities[index];
|
|
189
|
+
const originalInfo = vuln.original_file && vuln.original_from_line && vuln.original_to_line
|
|
190
|
+
? ` in ${path.basename(vuln.original_file)} (lines ${vuln.original_from_line}-${vuln.original_to_line})`
|
|
191
|
+
: ` (lines ${vuln.from_line}-${vuln.to_line})`;
|
|
192
|
+
console.log(` ⢠${vuln.vulnerability_class}${originalInfo}`);
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
if(result.modifiedFiles && result.modifiedFiles.length > 0) {
|
|
196
|
+
console.log('\nš Modified files:');
|
|
197
|
+
result.modifiedFiles.forEach(filePath => {
|
|
198
|
+
console.log(` ⢠${path.basename(filePath)}`);
|
|
199
|
+
});
|
|
200
|
+
} else if(result.filePath) {
|
|
201
|
+
console.log(`\nš Fixed code saved to: ${path.basename(result.filePath)}`);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
console.log('\nā ļø IMPORTANT: Please review the changes carefully before deploying!');
|
|
205
|
+
console.log(' The fixes have been applied directly to your original contract files.');
|
|
206
|
+
console.log('='.repeat(80));
|
|
207
|
+
}
|
|
208
|
+
}
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
|
|
4
|
+
export interface FileMapping {
|
|
5
|
+
filePath: string;
|
|
6
|
+
startLine: number;
|
|
7
|
+
endLine: number;
|
|
8
|
+
originalContent: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface VulnerabilityMapping {
|
|
12
|
+
originalFile: string;
|
|
13
|
+
originalFromLine: number;
|
|
14
|
+
originalToLine: number;
|
|
15
|
+
flattenedFromLine: number;
|
|
16
|
+
flattenedToLine: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export class ContractUnflattener {
|
|
20
|
+
private fileMappings: FileMapping[] = [];
|
|
21
|
+
|
|
22
|
+
public parseFlattened(flattenedSource: string, sourcePaths: string[]): FileMapping[] {
|
|
23
|
+
const lines = flattenedSource.split('\n');
|
|
24
|
+
const mappings: FileMapping[] = [];
|
|
25
|
+
let currentFile: string | null = null;
|
|
26
|
+
let currentStartLine = 0;
|
|
27
|
+
let currentLines: string[] = [];
|
|
28
|
+
|
|
29
|
+
for(let i = 0; i < lines.length; i++) {
|
|
30
|
+
const line = lines[i];
|
|
31
|
+
|
|
32
|
+
const fileMarker = line.match(/^\/\/ File (.+)$/);
|
|
33
|
+
|
|
34
|
+
if(fileMarker) {
|
|
35
|
+
if(currentFile && currentLines.length > 0) {
|
|
36
|
+
const originalContent = this.findOriginalContent(currentFile);
|
|
37
|
+
mappings.push({
|
|
38
|
+
filePath: currentFile,
|
|
39
|
+
startLine: currentStartLine,
|
|
40
|
+
endLine: i - 2,
|
|
41
|
+
originalContent,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const fullPath = this.findFullPath(fileMarker[1], sourcePaths);
|
|
46
|
+
|
|
47
|
+
if(fullPath) {
|
|
48
|
+
currentFile = fullPath;
|
|
49
|
+
let nextNonEmptyLine = i + 1;
|
|
50
|
+
while(nextNonEmptyLine < lines.length && lines[nextNonEmptyLine].trim() === '') {
|
|
51
|
+
nextNonEmptyLine++;
|
|
52
|
+
}
|
|
53
|
+
currentStartLine = nextNonEmptyLine + 1;
|
|
54
|
+
currentLines = [];
|
|
55
|
+
}
|
|
56
|
+
} else if(currentFile) {
|
|
57
|
+
if(line.startsWith('// Original license:')) {
|
|
58
|
+
const group = line.match(/^\/\/ Original license:(.*)$/);
|
|
59
|
+
if(group && group[1]) {
|
|
60
|
+
currentLines.push(group[1].trim());
|
|
61
|
+
}
|
|
62
|
+
} else {
|
|
63
|
+
currentLines.push(line);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if(currentFile && currentLines.length > 0) {
|
|
69
|
+
mappings.push({
|
|
70
|
+
filePath: currentFile,
|
|
71
|
+
startLine: currentStartLine,
|
|
72
|
+
endLine: lines.length,
|
|
73
|
+
originalContent: this.findOriginalContent(currentFile),
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
this.fileMappings = mappings;
|
|
78
|
+
return mappings;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
public mapVulnerabilityToOriginal(
|
|
83
|
+
flattenedFromLine: number,
|
|
84
|
+
flattenedToLine: number,
|
|
85
|
+
): VulnerabilityMapping | null {
|
|
86
|
+
for(const mapping of this.fileMappings) {
|
|
87
|
+
if(flattenedFromLine >= mapping.startLine && flattenedToLine <= mapping.endLine) {
|
|
88
|
+
return {
|
|
89
|
+
originalFile: mapping.filePath,
|
|
90
|
+
originalFromLine: flattenedFromLine - mapping.startLine + 1,
|
|
91
|
+
originalToLine: flattenedToLine - mapping.startLine + 1,
|
|
92
|
+
flattenedFromLine,
|
|
93
|
+
flattenedToLine,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
public applyFixToOriginalFile(
|
|
103
|
+
mapping: VulnerabilityMapping,
|
|
104
|
+
fixedLines: string,
|
|
105
|
+
): { success: boolean; error?: string } {
|
|
106
|
+
try {
|
|
107
|
+
if(!fs.existsSync(mapping.originalFile)) {
|
|
108
|
+
return {success: false, error: `Original file not found: ${mapping.originalFile}`};
|
|
109
|
+
}
|
|
110
|
+
const originalLines = fs.readFileSync(mapping.originalFile, 'utf8').split('\n');
|
|
111
|
+
|
|
112
|
+
const startIndex = mapping.originalFromLine - 1;
|
|
113
|
+
const endIndex = mapping.originalToLine - 1;
|
|
114
|
+
|
|
115
|
+
originalLines.splice(startIndex, endIndex - startIndex + 1, ...fixedLines.split('\n'));
|
|
116
|
+
|
|
117
|
+
fs.writeFileSync(mapping.originalFile, originalLines.join('\n'));
|
|
118
|
+
|
|
119
|
+
return {success: true};
|
|
120
|
+
} catch (error) {
|
|
121
|
+
return {
|
|
122
|
+
success: false,
|
|
123
|
+
error: error instanceof Error ? error.message : 'Unknown error',
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
private findFullPath(relativePath: string, sourcePaths: string[]): string | null {
|
|
129
|
+
const exactMatch = sourcePaths.find(sp => sp === relativePath);
|
|
130
|
+
if(exactMatch) return exactMatch;
|
|
131
|
+
|
|
132
|
+
const byFilename = sourcePaths.find(sp => path.basename(sp) === path.basename(relativePath));
|
|
133
|
+
if(byFilename) return byFilename;
|
|
134
|
+
|
|
135
|
+
const pathMatch = sourcePaths.find(sp => sp.endsWith(relativePath));
|
|
136
|
+
if(pathMatch) return pathMatch;
|
|
137
|
+
|
|
138
|
+
return null;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
private findOriginalContent(filePath: string): string {
|
|
142
|
+
try {
|
|
143
|
+
if(fs.existsSync(filePath)) {
|
|
144
|
+
return fs.readFileSync(filePath, 'utf8');
|
|
145
|
+
}
|
|
146
|
+
} catch (error) {
|
|
147
|
+
console.warn(`Could not read original content for ${filePath}:`, error);
|
|
148
|
+
}
|
|
149
|
+
return '';
|
|
150
|
+
}
|
|
151
|
+
}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import { createSpinner } from 'nanospinner';
|
|
3
|
+
import { AUDIT_URL, MAX_RETRIES, STATUS_CHECK_INTERVAL } from './consts';
|
|
4
|
+
import { AuditResult, AuditStatus, HttpError, JRPCResponse, PdfReportResult } from './types';
|
|
5
|
+
|
|
6
|
+
export class ReinforcedHardhatRuntimeEnvironmentField {
|
|
7
|
+
public async auditContract(sourceCode: string, apiKey: string, statusCheckInterval?: number, maxRetries?: number, reportFolder?: string): Promise<AuditResult> {
|
|
8
|
+
const taskId = await this.requestAuditWithRetry(statusCheckInterval, maxRetries, sourceCode, apiKey);
|
|
9
|
+
let spinner = null;
|
|
10
|
+
while(true) {
|
|
11
|
+
const status = await this.checkAuditStatusWithRetry(statusCheckInterval, maxRetries, taskId, apiKey);
|
|
12
|
+
if(status == 'pending' || status == 'processing') {
|
|
13
|
+
if(!spinner)
|
|
14
|
+
spinner = createSpinner('Processing...').start();
|
|
15
|
+
await sleep(statusCheckInterval ?? STATUS_CHECK_INTERVAL);
|
|
16
|
+
continue;
|
|
17
|
+
}
|
|
18
|
+
break;
|
|
19
|
+
}
|
|
20
|
+
if(spinner)
|
|
21
|
+
spinner.success();
|
|
22
|
+
const result = this.getAuditResultWithRetry(statusCheckInterval, maxRetries, taskId, apiKey);
|
|
23
|
+
fs.mkdirSync(reportFolder ?? './reinforcedai_reports', {recursive: true});
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
const report = await this.getPdfReport(taskId, apiKey);
|
|
27
|
+
console.log(`š PDF report generated: ${report.filename} (${(report.size_bytes / 1024).toFixed(2)} KB)`);
|
|
28
|
+
const bin = Buffer.from(report.pdf_base64, 'base64');
|
|
29
|
+
if(report.size_bytes !== bin.length) {
|
|
30
|
+
console.warn(`Warning: PDF size mismatch. Expected ${report.size_bytes} bytes, but got ${bin.length} bytes.`);
|
|
31
|
+
}
|
|
32
|
+
fs.writeFileSync(`${reportFolder ?? './reinforcedai_reports'}/${report.filename}`, bin);
|
|
33
|
+
} catch (e) {
|
|
34
|
+
console.error(`Failed to generate PDF report: ${(e as Error).message}`);
|
|
35
|
+
}
|
|
36
|
+
return result;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
private async checkAuditStatusWithRetry (retryDelay?: number, maxRetries?: number, ...args: Parameters<typeof this.checkAuditStatus>) {
|
|
40
|
+
return await retryHttpRequest(() => this.checkAuditStatus(...args), retryDelay, maxRetries);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
private async getAuditResultWithRetry(retryDelay?: number, maxRetries?: number, ...args: Parameters<typeof this.getAuditResult>) {
|
|
44
|
+
return await retryHttpRequest(() => this.getAuditResult(...args), retryDelay, maxRetries);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
private async requestAuditWithRetry(retryDelay?: number, maxRetries?: number, ...args: Parameters<typeof this.requestAudit>) {
|
|
48
|
+
return await retryHttpRequest(() => this.requestAudit(...args), retryDelay, maxRetries);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
private async fetchAudit(payload: any, apiKey: string): Promise<Response> {
|
|
52
|
+
return await fetch(AUDIT_URL, {
|
|
53
|
+
method: 'POST',
|
|
54
|
+
headers: auditHeaders(apiKey),
|
|
55
|
+
body: JSON.stringify(payload),
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
private async requestAudit(sourceCode: string, apiKey: string): Promise<string> {
|
|
60
|
+
const payload = {
|
|
61
|
+
'jsonrpc': '2.0',
|
|
62
|
+
'method': 'audit.get',
|
|
63
|
+
'params': {'code': sourceCode},
|
|
64
|
+
'id': 1,
|
|
65
|
+
};
|
|
66
|
+
const response = await this.fetchAudit(payload, apiKey);
|
|
67
|
+
if(!response.ok) {
|
|
68
|
+
throw new HttpError(`HTTP ${response.status} ${response.statusText}`, response.status);
|
|
69
|
+
}
|
|
70
|
+
return (await response.json()).result.task_id;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
private async checkAuditStatus(taskId: string, apiKey: string): Promise<AuditStatus> {
|
|
74
|
+
const payload = {
|
|
75
|
+
'jsonrpc': '2.0',
|
|
76
|
+
'method': 'task.status',
|
|
77
|
+
'params': {'task_id': taskId},
|
|
78
|
+
'id': 2,
|
|
79
|
+
};
|
|
80
|
+
const response = await this.fetchAudit(payload, apiKey);
|
|
81
|
+
if(!response.ok) {
|
|
82
|
+
throw new HttpError(`HTTP ${response.status} ${response.statusText}`, response.status);
|
|
83
|
+
}
|
|
84
|
+
const auditResponse = (await response.json()) as JRPCResponse<AuditResult>;
|
|
85
|
+
if(auditResponse.result.error_message) {
|
|
86
|
+
throw new Error(`Error on task.status ${auditResponse.result.error_message}`);
|
|
87
|
+
}
|
|
88
|
+
return auditResponse.result.status;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
private async getAuditResult(taskId: string, apiKey: string): Promise<AuditResult> {
|
|
92
|
+
const payload = {
|
|
93
|
+
'jsonrpc': '2.0',
|
|
94
|
+
'method': 'task.result',
|
|
95
|
+
'params': {'task_id': taskId},
|
|
96
|
+
'id': 3,
|
|
97
|
+
};
|
|
98
|
+
const response = await this.fetchAudit(payload, apiKey);
|
|
99
|
+
if(!response.ok) {
|
|
100
|
+
throw new HttpError(`HTTP ${response.status} ${response.statusText}`, response.status);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const auditResponse = (await response.json()) as JRPCResponse<AuditResult>;
|
|
104
|
+
if(auditResponse.result.error_message) {
|
|
105
|
+
throw new Error(`Error on task.result ${auditResponse.result.error_message}`);
|
|
106
|
+
}
|
|
107
|
+
return auditResponse.result;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
private async getPdfReport(taskId: string, apiKey: string): Promise<PdfReportResult> {
|
|
111
|
+
const payload = {
|
|
112
|
+
'jsonrpc': '2.0',
|
|
113
|
+
'method': 'task.generate_pdf',
|
|
114
|
+
'params': {'task_id': taskId},
|
|
115
|
+
'id': 4,
|
|
116
|
+
};
|
|
117
|
+
const response = await this.fetchAudit(payload, apiKey);
|
|
118
|
+
if(!response.ok) {
|
|
119
|
+
throw new HttpError(`HTTP ${response.status} ${response.statusText}`, response.status);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return ((await response.json()) as JRPCResponse<PdfReportResult>).result;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
async function retryHttpRequest<T>(request: () => Promise<T>, retryDelay: number = STATUS_CHECK_INTERVAL, maxRetries: number = MAX_RETRIES): Promise<T> {
|
|
127
|
+
let attempt = 0;
|
|
128
|
+
while(true) {
|
|
129
|
+
try {
|
|
130
|
+
return await request();
|
|
131
|
+
} catch (e: unknown) {
|
|
132
|
+
attempt++;
|
|
133
|
+
if(attempt <= maxRetries && e instanceof HttpError && (e.statusCode == 429 || e.statusCode >= 500)) {
|
|
134
|
+
const base = retryDelay * 2 ** (attempt - 1);
|
|
135
|
+
const delay = Math.random() * base;
|
|
136
|
+
console.warn(`Retry #${attempt} in ${Math.round(delay)} ms`, e);
|
|
137
|
+
await sleep(delay);
|
|
138
|
+
continue;
|
|
139
|
+
} else {
|
|
140
|
+
throw e;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
function sleep(ms: number): Promise<void> {
|
|
147
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
function auditHeaders(apiKey: string) {
|
|
151
|
+
return {
|
|
152
|
+
'Content-Type': 'application/json',
|
|
153
|
+
'X-Auth-Token': 'Bearer ' + apiKey,
|
|
154
|
+
};
|
|
155
|
+
}
|
package/src/consts.ts
ADDED
package/src/index.ts
ADDED
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import { TASK_COMPILE, TASK_COMPILE_SOLIDITY, TASK_FLATTEN_GET_FLATTENED_SOURCE } from 'hardhat/builtin-tasks/task-names';
|
|
3
|
+
import { extendEnvironment, subtask, task } from 'hardhat/config';
|
|
4
|
+
import { parseFullyQualifiedName } from 'hardhat/utils/contract-names';
|
|
5
|
+
|
|
6
|
+
// This import is needed to let the TypeScript compiler know that it should include your type
|
|
7
|
+
// extensions in your npm package's types file.
|
|
8
|
+
import { HardhatRuntimeEnvironment } from 'hardhat/types';
|
|
9
|
+
import Table from 'tty-table';
|
|
10
|
+
import { Vulnerability } from 'types';
|
|
11
|
+
import { AutoFixManager } from './AutoFixManager';
|
|
12
|
+
import { ContractUnflattener } from './ContractUnflattener';
|
|
13
|
+
import { ReinforcedHardhatRuntimeEnvironmentField } from './ReinforcedHardhatRuntimeEnvironmentField';
|
|
14
|
+
import './type-extensions';
|
|
15
|
+
|
|
16
|
+
subtask(TASK_COMPILE_SOLIDITY, async (args, hre, runSuper) => {
|
|
17
|
+
await runSuper(args); // run regular compilation first
|
|
18
|
+
if(!hre.config.reinforced?.compilationHookEnabled) {
|
|
19
|
+
console.log('Reinforced verification not enabled.');
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
await auditAllContracts(hre);
|
|
23
|
+
console.log('ā
Audit complete before deployment.');
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
task('check-vulnerabilities', 'Run pre-deployment audit', async (_, hre) => {
|
|
27
|
+
await hre.run(TASK_COMPILE_SOLIDITY, {force: false, quiet: true});
|
|
28
|
+
await auditAllContracts(hre);
|
|
29
|
+
console.log('ā
Audit complete');
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
task('check-vulnerabilities:autofix', 'Run audit and apply auto-fixes for vulnerabilities', async (_, hre) => {
|
|
33
|
+
await hre.run(TASK_COMPILE_SOLIDITY, {force: false, quiet: true});
|
|
34
|
+
await auditAllContractsWithAutoFix(hre);
|
|
35
|
+
console.log('ā
Audit and auto-fix complete');
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
async function auditAllContractsWithAutoFix(hre: HardhatRuntimeEnvironment) {
|
|
39
|
+
const apiKey = hre.config.reinforced?.apiKey;
|
|
40
|
+
if(!apiKey) {
|
|
41
|
+
console.log('Reinforced API key not set.');
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const flatSource = await hre.run(TASK_FLATTEN_GET_FLATTENED_SOURCE, {force: false, quiet: true});
|
|
46
|
+
|
|
47
|
+
const qualifiedContractNames = await hre.artifacts.getAllFullyQualifiedNames();
|
|
48
|
+
const sourcePaths: string[] = [];
|
|
49
|
+
|
|
50
|
+
for(const qualifiedContractName of qualifiedContractNames) {
|
|
51
|
+
const {sourceName} = parseFullyQualifiedName(qualifiedContractName);
|
|
52
|
+
if(sourcePaths.includes(sourceName))
|
|
53
|
+
continue;
|
|
54
|
+
sourcePaths.push(sourceName);
|
|
55
|
+
}
|
|
56
|
+
if(sourcePaths.length > 0) {
|
|
57
|
+
try {
|
|
58
|
+
if(sourcePaths.length > 1) {
|
|
59
|
+
console.log('Auditing:');
|
|
60
|
+
for(const path of sourcePaths)
|
|
61
|
+
console.log(' ', path);
|
|
62
|
+
} else {
|
|
63
|
+
console.log('Auditing ', sourcePaths[0]);
|
|
64
|
+
}
|
|
65
|
+
const auditResult = await hre.reinforced.auditContract(flatSource, apiKey, hre.config.reinforced?.statusCheckInterval);
|
|
66
|
+
if(auditResult.error_message)
|
|
67
|
+
console.log('Error processing contract:', auditResult.error_message);
|
|
68
|
+
else if(!auditResult.result || auditResult.result.length == 0)
|
|
69
|
+
console.log('No vulnerabilities found');
|
|
70
|
+
else {
|
|
71
|
+
const enrichedVulnerabilities = enrichVulnerabilitiesWithOriginalMapping(
|
|
72
|
+
auditResult.result,
|
|
73
|
+
flatSource,
|
|
74
|
+
sourcePaths.map(sp => sp.trim()),
|
|
75
|
+
);
|
|
76
|
+
console.log(formatAuditResult(enrichedVulnerabilities, flatSource));
|
|
77
|
+
|
|
78
|
+
const autoFixManager = new AutoFixManager();
|
|
79
|
+
const selections = autoFixManager.promptUserForVulnerabilitySelection(enrichedVulnerabilities);
|
|
80
|
+
|
|
81
|
+
if(selections.some(s => s.shouldFix)) {
|
|
82
|
+
const result = autoFixManager.applyAutoFixes(
|
|
83
|
+
enrichedVulnerabilities,
|
|
84
|
+
selections,
|
|
85
|
+
flatSource,
|
|
86
|
+
sourcePaths.map(sp => sp.trim()),
|
|
87
|
+
);
|
|
88
|
+
autoFixManager.displayAutoFixSummary(result, enrichedVulnerabilities);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
} catch (error) {
|
|
92
|
+
console.error('Error auditing contracts:', error);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function enrichVulnerabilitiesWithOriginalMapping(
|
|
98
|
+
vulnerabilities: Vulnerability[],
|
|
99
|
+
flattenedSource: string,
|
|
100
|
+
sourcePaths: string[],
|
|
101
|
+
): Vulnerability[] {
|
|
102
|
+
const unflattener = new ContractUnflattener();
|
|
103
|
+
unflattener.parseFlattened(flattenedSource, sourcePaths);
|
|
104
|
+
|
|
105
|
+
return vulnerabilities.map(vuln => {
|
|
106
|
+
const mapping = unflattener.mapVulnerabilityToOriginal(vuln.from_line, vuln.to_line);
|
|
107
|
+
console.log('Mapping for vulnerability:', mapping);
|
|
108
|
+
if(mapping) {
|
|
109
|
+
return {
|
|
110
|
+
...vuln,
|
|
111
|
+
original_file: mapping.originalFile,
|
|
112
|
+
original_from_line: mapping.originalFromLine,
|
|
113
|
+
original_to_line: mapping.originalToLine,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
return vuln;
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async function auditAllContracts(hre: HardhatRuntimeEnvironment) {
|
|
121
|
+
const apiKey = hre.config.reinforced?.apiKey;
|
|
122
|
+
if(!apiKey) {
|
|
123
|
+
console.log('Reinforced API key not set.');
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const flatSource = await hre.run(TASK_FLATTEN_GET_FLATTENED_SOURCE, {force: false, quiet: true});
|
|
128
|
+
|
|
129
|
+
const qualifiedContractNames = await hre.artifacts.getAllFullyQualifiedNames();
|
|
130
|
+
const sourcePaths: string[] = [];
|
|
131
|
+
|
|
132
|
+
for(const qualifiedContractName of qualifiedContractNames) {
|
|
133
|
+
const {sourceName} = parseFullyQualifiedName(qualifiedContractName);
|
|
134
|
+
if(sourcePaths.includes(sourceName))
|
|
135
|
+
continue;
|
|
136
|
+
sourcePaths.push(sourceName);
|
|
137
|
+
}
|
|
138
|
+
if(sourcePaths.length > 0) {
|
|
139
|
+
try {
|
|
140
|
+
if(sourcePaths.length > 1) {
|
|
141
|
+
console.log('Auditing:');
|
|
142
|
+
for(const path of sourcePaths)
|
|
143
|
+
console.log(' ', path);
|
|
144
|
+
} else {
|
|
145
|
+
console.log('Auditing ', sourcePaths[0]);
|
|
146
|
+
}
|
|
147
|
+
const auditResult = await hre.reinforced.auditContract(flatSource, apiKey, hre.config.reinforced?.statusCheckInterval);
|
|
148
|
+
if(auditResult.error_message)
|
|
149
|
+
console.log('Error processing contract:', auditResult.error_message);
|
|
150
|
+
else if(!auditResult.result || auditResult.result.length == 0)
|
|
151
|
+
console.log('No vulnerabilities found');
|
|
152
|
+
else {
|
|
153
|
+
const enrichedVulnerabilities = enrichVulnerabilitiesWithOriginalMapping(
|
|
154
|
+
auditResult.result,
|
|
155
|
+
flatSource,
|
|
156
|
+
sourcePaths.map(sp => sp.trim()),
|
|
157
|
+
);
|
|
158
|
+
console.log('Enriched vulnerabilities:', enrichedVulnerabilities);
|
|
159
|
+
console.log(formatAuditResult(enrichedVulnerabilities, flatSource));
|
|
160
|
+
|
|
161
|
+
if(hre.config.reinforced?.autoFixEnabled !== false) {
|
|
162
|
+
const autoFixManager = new AutoFixManager();
|
|
163
|
+
const selections = autoFixManager.promptUserForVulnerabilitySelection(enrichedVulnerabilities);
|
|
164
|
+
|
|
165
|
+
if(selections.some(s => s.shouldFix)) {
|
|
166
|
+
const result = autoFixManager.applyAutoFixes(
|
|
167
|
+
enrichedVulnerabilities,
|
|
168
|
+
selections,
|
|
169
|
+
flatSource,
|
|
170
|
+
sourcePaths.map(sp => sp.trim()),
|
|
171
|
+
);
|
|
172
|
+
autoFixManager.displayAutoFixSummary(result, enrichedVulnerabilities);
|
|
173
|
+
}
|
|
174
|
+
} else {
|
|
175
|
+
console.log('\nš” Auto-fix is disabled. You can enable it by setting "autoFixEnabled: true" in your hardhat.config.ts');
|
|
176
|
+
console.log(' Or run "npx hardhat audit:autofix" to run with auto-fix enabled.');
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
} catch (error) {
|
|
180
|
+
console.error('Error auditing contracts:', error);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
function formatAuditResult(vulnerabilities: Vulnerability[], sourceCode: string): string {
|
|
186
|
+
let output = '\n';
|
|
187
|
+
for(let i = 0; i < vulnerabilities.length; i++) {
|
|
188
|
+
const vulnerability = vulnerabilities[i];
|
|
189
|
+
|
|
190
|
+
output += `Possible vulnerability: ${vulnerability.vulnerability_class}${vulnerability.original_file ? ` in ${vulnerability.original_file.split('/').pop()}` : ''}`;
|
|
191
|
+
const headers = [{
|
|
192
|
+
value: '',
|
|
193
|
+
width: '20%',
|
|
194
|
+
},
|
|
195
|
+
{
|
|
196
|
+
value: '',
|
|
197
|
+
width: '80%',
|
|
198
|
+
}];
|
|
199
|
+
|
|
200
|
+
let affectedCode: string;
|
|
201
|
+
if(vulnerability.original_file && vulnerability.original_from_line && vulnerability.original_to_line) {
|
|
202
|
+
try {
|
|
203
|
+
const originalFileContent = fs.readFileSync(vulnerability.original_file, 'utf8');
|
|
204
|
+
affectedCode = originalFileContent.split('\n')
|
|
205
|
+
.slice(vulnerability.original_from_line - 1, vulnerability.original_to_line)
|
|
206
|
+
.map((line, index) => {
|
|
207
|
+
const lineNumber = vulnerability.original_from_line! + index;
|
|
208
|
+
return `${lineNumber.toString().padStart(4, ' ')}: ${line}`;
|
|
209
|
+
})
|
|
210
|
+
.join('\n');
|
|
211
|
+
} catch (error) {
|
|
212
|
+
affectedCode = sourceCode.split('\n')
|
|
213
|
+
.slice(vulnerability.from_line - 1, vulnerability.to_line)
|
|
214
|
+
.map((line, index) => {
|
|
215
|
+
const lineNumber = vulnerability.from_line + index;
|
|
216
|
+
return `${lineNumber.toString().padStart(4, ' ')}: ${line}`;
|
|
217
|
+
})
|
|
218
|
+
.join('\n');
|
|
219
|
+
}
|
|
220
|
+
} else {
|
|
221
|
+
affectedCode = sourceCode.split('\n')
|
|
222
|
+
.slice(vulnerability.from_line - 1, vulnerability.to_line)
|
|
223
|
+
.map((line, index) => {
|
|
224
|
+
const lineNumber = vulnerability.from_line + index;
|
|
225
|
+
return `${lineNumber.toString().padStart(4, ' ')}: ${line}`;
|
|
226
|
+
})
|
|
227
|
+
.join('\n');
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const rows = [
|
|
231
|
+
['Possibility ', `${vulnerability.miners_select_count}/${vulnerability.miners_participated_count}`],
|
|
232
|
+
['Description ', vulnerability.description],
|
|
233
|
+
['Affected code ', affectedCode],
|
|
234
|
+
['Possible fix ', vulnerability.fixed_lines || 'No fix provided'],
|
|
235
|
+
['Test case ', vulnerability.test_case || 'No test case provided'],
|
|
236
|
+
['Is suggestion', vulnerability.is_suggestion ? 'Yes' : 'No'],
|
|
237
|
+
];
|
|
238
|
+
const options = {
|
|
239
|
+
width: '100%',
|
|
240
|
+
borderStyle: 'solid',
|
|
241
|
+
align: 'left',
|
|
242
|
+
};
|
|
243
|
+
const table = Table(
|
|
244
|
+
headers,
|
|
245
|
+
rows,
|
|
246
|
+
options,
|
|
247
|
+
);
|
|
248
|
+
|
|
249
|
+
output += table.render();
|
|
250
|
+
output += '\n\n';
|
|
251
|
+
}
|
|
252
|
+
return output;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
extendEnvironment((hre) => {
|
|
256
|
+
hre.reinforced = new ReinforcedHardhatRuntimeEnvironmentField();
|
|
257
|
+
});
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { ReinforcedHardhatRuntimeEnvironmentField } from 'ReinforcedHardhatRuntimeEnvironmentField';
|
|
2
|
+
import 'hardhat/types/config';
|
|
3
|
+
|
|
4
|
+
export interface ReinforcedUserConfig {
|
|
5
|
+
apiKey?: string;
|
|
6
|
+
compilationHookEnabled?: boolean;
|
|
7
|
+
statusCheckInterval?: number;
|
|
8
|
+
maxRetries?: number;
|
|
9
|
+
autoFixEnabled?: boolean;
|
|
10
|
+
reportFolder?: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
declare module 'hardhat/types/config' {
|
|
14
|
+
interface HardhatUserConfig {
|
|
15
|
+
reinforced?: ReinforcedUserConfig;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
interface HardhatConfig {
|
|
19
|
+
reinforced?: ReinforcedUserConfig;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
declare module 'hardhat/types/runtime' {
|
|
24
|
+
export interface HardhatRuntimeEnvironment {
|
|
25
|
+
reinforced: ReinforcedHardhatRuntimeEnvironmentField;
|
|
26
|
+
}
|
|
27
|
+
}
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
export type AuditStatus = 'pending' | 'processing' | 'failed' | 'completed';
|
|
2
|
+
|
|
3
|
+
export interface AuditResult {
|
|
4
|
+
error_message: string | null,
|
|
5
|
+
result: Vulnerability [] | null,
|
|
6
|
+
status: AuditStatus,
|
|
7
|
+
task_id: string
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
export interface JRPCResponse<T> {
|
|
11
|
+
id: number,
|
|
12
|
+
jsonrpc: string,
|
|
13
|
+
result: T
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface PdfReportResult {
|
|
17
|
+
task_id: string,
|
|
18
|
+
filename: string,
|
|
19
|
+
pdf_base64: string,
|
|
20
|
+
size_bytes: number
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface Vulnerability {
|
|
24
|
+
description: string
|
|
25
|
+
fixed_lines: string | null,
|
|
26
|
+
from_line: number,
|
|
27
|
+
is_suggestion: boolean,
|
|
28
|
+
miners_select_count: number,
|
|
29
|
+
miners_participated_count: number,
|
|
30
|
+
prior_art: any [],
|
|
31
|
+
test_case: string | null,
|
|
32
|
+
to_line: number,
|
|
33
|
+
vulnerability_class: string,
|
|
34
|
+
original_file?: string,
|
|
35
|
+
original_from_line?: number,
|
|
36
|
+
original_to_line?: number,
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export interface VulnerabilityWithIndex extends Vulnerability {
|
|
40
|
+
index: number;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface AutoFixSelection {
|
|
44
|
+
vulnerabilityIndex: number;
|
|
45
|
+
shouldFix: boolean;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export interface CodeReplacementResult {
|
|
49
|
+
success: boolean;
|
|
50
|
+
filePath?: string;
|
|
51
|
+
error?: string;
|
|
52
|
+
appliedFixes: number[];
|
|
53
|
+
modifiedFiles?: string[];
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export class HttpError extends Error {
|
|
57
|
+
public statusCode: number;
|
|
58
|
+
|
|
59
|
+
constructor(message: string, statusCode: number) {
|
|
60
|
+
super(message);
|
|
61
|
+
this.name = 'HttpError';
|
|
62
|
+
this.statusCode = statusCode;
|
|
63
|
+
}
|
|
64
|
+
}
|