recon-generate 0.0.34 → 0.0.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/db.d.ts +4 -0
- package/dist/db.js +185 -0
- package/dist/index.js +15 -0
- package/dist/ingestion/db.d.ts +4 -0
- package/dist/ingestion/db.js +25 -0
- package/dist/ingestion/handlers.d.ts +64 -0
- package/dist/ingestion/handlers.js +357 -0
- package/dist/ingestion/index.d.ts +19 -0
- package/dist/ingestion/index.js +339 -0
- package/dist/ingestion/references.d.ts +4 -0
- package/dist/ingestion/references.js +171 -0
- package/dist/ingestion/schema.d.ts +1 -0
- package/dist/ingestion/schema.js +515 -0
- package/dist/ingestion/yul.d.ts +11 -0
- package/dist/ingestion/yul.js +96 -0
- package/dist/link2.js +51 -25
- package/package.json +4 -1
package/dist/db.d.ts
ADDED
package/dist/db.js
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.runDb = void 0;
|
|
37
|
+
const child_process_1 = require("child_process");
|
|
38
|
+
const fs = __importStar(require("fs"));
|
|
39
|
+
const path = __importStar(require("path"));
|
|
40
|
+
const db_1 = require("./ingestion/db");
|
|
41
|
+
const ingestion_1 = require("./ingestion");
|
|
42
|
+
const utils_1 = require("./utils");
|
|
43
|
+
/**
|
|
44
|
+
* Normalize build-info JSON into compiler output + version.
|
|
45
|
+
*/
|
|
46
|
+
function normalizeCompilerOutput(data) {
|
|
47
|
+
if (data.sources && !data.output) {
|
|
48
|
+
return { output: data, compilerVersion: null };
|
|
49
|
+
}
|
|
50
|
+
if (data.output && data.output.sources) {
|
|
51
|
+
return {
|
|
52
|
+
output: data.output,
|
|
53
|
+
compilerVersion: data.solcVersion || data.solcLongVersion || null,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
if (data.output && typeof data.output === 'object') {
|
|
57
|
+
return {
|
|
58
|
+
output: data.output,
|
|
59
|
+
compilerVersion: data.solcVersion || null,
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
throw new Error('Unrecognized JSON format. Expected either:\n' +
|
|
63
|
+
' - Standard solc compiler output (with \'sources\' key)\n' +
|
|
64
|
+
' - Foundry build-info JSON (with \'output.sources\')\n' +
|
|
65
|
+
'Run: solc --combined-json ast <file.sol>\n' +
|
|
66
|
+
' or: forge build --build-info');
|
|
67
|
+
}
|
|
68
|
+
function runCmd(cmd, cwd) {
|
|
69
|
+
return new Promise((resolve, reject) => {
|
|
70
|
+
(0, child_process_1.exec)(cmd, { cwd, env: { ...process.env, PATH: (0, utils_1.getEnvPath)() } }, (err, _stdout, stderr) => {
|
|
71
|
+
if (err) {
|
|
72
|
+
reject(new Error(stderr || err.message));
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
resolve();
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Build into .recon/out with --build-info, skipping test/script dirs.
|
|
82
|
+
* Reuses existing build if .recon/out already exists (unless forceBuild).
|
|
83
|
+
*/
|
|
84
|
+
async function ensureBuild(foundryRoot, forceBuild) {
|
|
85
|
+
const reconOutDir = path.join(foundryRoot, '.recon', 'out');
|
|
86
|
+
if (!forceBuild && await (0, utils_1.fileExists)(reconOutDir)) {
|
|
87
|
+
console.log('[recon-generate] Using existing .recon/out build');
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
const reconDir = path.join(foundryRoot, '.recon');
|
|
91
|
+
if (!fs.existsSync(reconDir)) {
|
|
92
|
+
fs.mkdirSync(reconDir, { recursive: true });
|
|
93
|
+
}
|
|
94
|
+
const config = await (0, utils_1.getFoundryConfig)(foundryRoot);
|
|
95
|
+
const skipDirs = new Set([config.test, config.script]);
|
|
96
|
+
const skipGlobs = Array.from(skipDirs).map(d => `*/${d}/**`);
|
|
97
|
+
const skipArg = skipGlobs.length > 0 ? `--skip ${skipGlobs.join(' ')}` : '';
|
|
98
|
+
const cmd = `forge build --build-info ${skipArg} --out .recon/out`.replace(/\s+/g, ' ').trim();
|
|
99
|
+
console.log(`[recon-generate] Building: ${cmd}`);
|
|
100
|
+
await runCmd(cmd, foundryRoot);
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Load the latest build-info JSON file from .recon/out/build-info.
|
|
104
|
+
*/
|
|
105
|
+
function loadBuildInfoRaw(foundryRoot) {
|
|
106
|
+
const buildInfoDir = path.join(foundryRoot, '.recon', 'out', 'build-info');
|
|
107
|
+
let files = [];
|
|
108
|
+
try {
|
|
109
|
+
const entries = fs.readdirSync(buildInfoDir);
|
|
110
|
+
const jsonFiles = entries.filter((f) => f.endsWith('.json'));
|
|
111
|
+
files = jsonFiles.map((f) => {
|
|
112
|
+
const fullPath = path.join(buildInfoDir, f);
|
|
113
|
+
return { name: f, fullPath, mtime: fs.statSync(fullPath).mtime };
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
catch {
|
|
117
|
+
throw new Error(`No build-info directory found at ${buildInfoDir}. Run: forge build --build-info`);
|
|
118
|
+
}
|
|
119
|
+
if (files.length === 0) {
|
|
120
|
+
throw new Error(`No build-info JSON files found in ${buildInfoDir}. Run: forge build --build-info`);
|
|
121
|
+
}
|
|
122
|
+
const latestFile = files.sort((a, b) => b.mtime.getTime() - a.mtime.getTime())[0];
|
|
123
|
+
console.log(`[recon-generate] Using build-info: ${latestFile.name}`);
|
|
124
|
+
const stat = fs.statSync(latestFile.fullPath);
|
|
125
|
+
console.log(`[recon-generate] Reading build-info (${(stat.size / 1024 / 1024).toFixed(1)} MB)...`);
|
|
126
|
+
const rawContent = fs.readFileSync(latestFile.fullPath, 'utf-8');
|
|
127
|
+
console.log(`[recon-generate] Parsing JSON...`);
|
|
128
|
+
const parsed = JSON.parse(rawContent);
|
|
129
|
+
const { output, compilerVersion: detectedVersion } = normalizeCompilerOutput(parsed);
|
|
130
|
+
const compilerVersion = detectedVersion || '0.8.0';
|
|
131
|
+
return { raw: parsed, output, compilerVersion };
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Extract source contents from build-info input.sources
|
|
135
|
+
*/
|
|
136
|
+
function extractSourceContents(raw) {
|
|
137
|
+
var _a;
|
|
138
|
+
const sourceContents = new Map();
|
|
139
|
+
if ((_a = raw.input) === null || _a === void 0 ? void 0 : _a.sources) {
|
|
140
|
+
for (const [filePath, entry] of Object.entries(raw.input.sources)) {
|
|
141
|
+
if (entry === null || entry === void 0 ? void 0 : entry.content) {
|
|
142
|
+
sourceContents.set(filePath, entry.content);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return sourceContents;
|
|
147
|
+
}
|
|
148
|
+
const runDb = async (foundryRoot, options = {}) => {
|
|
149
|
+
// Build into .recon/out (skipping test/script), reuse if already present
|
|
150
|
+
await ensureBuild(foundryRoot, !!options.forceBuild);
|
|
151
|
+
// Load latest build-info from .recon/out
|
|
152
|
+
const { raw, output, compilerVersion } = loadBuildInfoRaw(foundryRoot);
|
|
153
|
+
const sourceContents = extractSourceContents(raw);
|
|
154
|
+
console.log(`[recon-generate] Compiler version: ${compilerVersion}`);
|
|
155
|
+
if (sourceContents.size > 0) {
|
|
156
|
+
console.log(`[recon-generate] Found ${sourceContents.size} source content entries`);
|
|
157
|
+
}
|
|
158
|
+
// Determine DB path
|
|
159
|
+
const reconDir = path.join(foundryRoot, '.recon');
|
|
160
|
+
if (!fs.existsSync(reconDir)) {
|
|
161
|
+
fs.mkdirSync(reconDir, { recursive: true });
|
|
162
|
+
}
|
|
163
|
+
const dbPath = options.output
|
|
164
|
+
? (path.isAbsolute(options.output) ? options.output : path.join(foundryRoot, options.output))
|
|
165
|
+
: path.join(reconDir, 'db.sqlite');
|
|
166
|
+
// Remove existing DB to start fresh
|
|
167
|
+
if (fs.existsSync(dbPath)) {
|
|
168
|
+
fs.unlinkSync(dbPath);
|
|
169
|
+
}
|
|
170
|
+
console.log(`[recon-generate] Creating database at ${dbPath}`);
|
|
171
|
+
const db = (0, db_1.createDatabase)(dbPath);
|
|
172
|
+
try {
|
|
173
|
+
const sourceUnits = (0, ingestion_1.ingest)(db, output, {
|
|
174
|
+
compilerVersion,
|
|
175
|
+
sourceContents,
|
|
176
|
+
compilerContracts: output.contracts,
|
|
177
|
+
});
|
|
178
|
+
console.log(`[recon-generate] Ingested ${sourceUnits.length} source unit(s)`);
|
|
179
|
+
}
|
|
180
|
+
finally {
|
|
181
|
+
db.close();
|
|
182
|
+
}
|
|
183
|
+
console.log(`[recon-generate] Done. Database written to ${dbPath}`);
|
|
184
|
+
};
|
|
185
|
+
exports.runDb = runDb;
|
package/dist/index.js
CHANGED
|
@@ -47,6 +47,7 @@ const link_1 = require("./link");
|
|
|
47
47
|
const link2_1 = require("./link2");
|
|
48
48
|
const sourcemap_1 = require("./sourcemap");
|
|
49
49
|
const dictionary_1 = require("./dictionary");
|
|
50
|
+
const db_1 = require("./db");
|
|
50
51
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
51
52
|
const packageJson = require('../package.json');
|
|
52
53
|
function parseFilter(input) {
|
|
@@ -286,6 +287,20 @@ async function main() {
|
|
|
286
287
|
: undefined;
|
|
287
288
|
await (0, info_1.runInfo)(foundryRoot, contractName, { outputPath, json: !!opts.json });
|
|
288
289
|
});
|
|
290
|
+
program
|
|
291
|
+
.command('db')
|
|
292
|
+
.description('Ingest Solidity AST from build-info into a SQLite database (.recon/db.sqlite)')
|
|
293
|
+
.option('-o, --output <path>', 'Custom output path for the database file')
|
|
294
|
+
.option('--force-build', 'Delete .recon/out to force a fresh forge build before ingesting')
|
|
295
|
+
.option('--foundry-config <path>', 'Path to foundry.toml (defaults to ./foundry.toml)')
|
|
296
|
+
.action(async function () {
|
|
297
|
+
// @ts-ignore - Commander types are complex
|
|
298
|
+
const opts = this.opts();
|
|
299
|
+
const workspaceRoot = process.cwd();
|
|
300
|
+
const foundryConfig = (0, utils_1.getFoundryConfigPath)(workspaceRoot, opts.foundryConfig);
|
|
301
|
+
const foundryRoot = path.dirname(foundryConfig);
|
|
302
|
+
await (0, db_1.runDb)(foundryRoot, { output: opts.output, forceBuild: !!opts.forceBuild });
|
|
303
|
+
});
|
|
289
304
|
program
|
|
290
305
|
.command('wake')
|
|
291
306
|
.description('Generate Python fuzzing suite using Wake framework')
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.openDatabase = openDatabase;
|
|
7
|
+
exports.initializeSchema = initializeSchema;
|
|
8
|
+
exports.createDatabase = createDatabase;
|
|
9
|
+
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
10
|
+
const schema_1 = require("./schema");
|
|
11
|
+
function openDatabase(dbPath) {
|
|
12
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
13
|
+
db.pragma("journal_mode = WAL");
|
|
14
|
+
db.pragma("foreign_keys = ON");
|
|
15
|
+
db.pragma("synchronous = NORMAL");
|
|
16
|
+
return db;
|
|
17
|
+
}
|
|
18
|
+
function initializeSchema(db) {
|
|
19
|
+
db.exec(schema_1.SCHEMA_SQL);
|
|
20
|
+
}
|
|
21
|
+
function createDatabase(dbPath) {
|
|
22
|
+
const db = openDatabase(dbPath);
|
|
23
|
+
initializeSchema(db);
|
|
24
|
+
return db;
|
|
25
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import type Database from "better-sqlite3";
|
|
2
|
+
import type { ASTNode } from "solc-typed-ast";
|
|
3
|
+
export interface PreparedStatements {
|
|
4
|
+
insertSourceUnit: Database.Statement;
|
|
5
|
+
insertAstNode: Database.Statement;
|
|
6
|
+
insertEdge: Database.Statement;
|
|
7
|
+
insertContract: Database.Statement;
|
|
8
|
+
insertFunction: Database.Statement;
|
|
9
|
+
insertModifier: Database.Statement;
|
|
10
|
+
insertVariable: Database.Statement;
|
|
11
|
+
insertStruct: Database.Statement;
|
|
12
|
+
insertEnum: Database.Statement;
|
|
13
|
+
insertEnumValue: Database.Statement;
|
|
14
|
+
insertEvent: Database.Statement;
|
|
15
|
+
insertError: Database.Statement;
|
|
16
|
+
insertUserDefinedValueType: Database.Statement;
|
|
17
|
+
insertIdentifier: Database.Statement;
|
|
18
|
+
insertLiteral: Database.Statement;
|
|
19
|
+
insertBinaryOperation: Database.Statement;
|
|
20
|
+
insertUnaryOperation: Database.Statement;
|
|
21
|
+
insertAssignment: Database.Statement;
|
|
22
|
+
insertConditional: Database.Statement;
|
|
23
|
+
insertFunctionCall: Database.Statement;
|
|
24
|
+
insertFunctionCallOptions: Database.Statement;
|
|
25
|
+
insertMemberAccess: Database.Statement;
|
|
26
|
+
insertIndexAccess: Database.Statement;
|
|
27
|
+
insertIndexRangeAccess: Database.Statement;
|
|
28
|
+
insertTupleExpression: Database.Statement;
|
|
29
|
+
insertNewExpression: Database.Statement;
|
|
30
|
+
insertElementaryTypeNameExpression: Database.Statement;
|
|
31
|
+
insertIfStatement: Database.Statement;
|
|
32
|
+
insertForStatement: Database.Statement;
|
|
33
|
+
insertWhileStatement: Database.Statement;
|
|
34
|
+
insertDoWhileStatement: Database.Statement;
|
|
35
|
+
insertTryStatement: Database.Statement;
|
|
36
|
+
insertTryCatchClause: Database.Statement;
|
|
37
|
+
insertVariableDeclarationStatement: Database.Statement;
|
|
38
|
+
insertEmitStatement: Database.Statement;
|
|
39
|
+
insertRevertStatement: Database.Statement;
|
|
40
|
+
insertInlineAssembly: Database.Statement;
|
|
41
|
+
insertBlock: Database.Statement;
|
|
42
|
+
insertExpressionStatement: Database.Statement;
|
|
43
|
+
insertReturnStatement: Database.Statement;
|
|
44
|
+
insertElementaryTypeName: Database.Statement;
|
|
45
|
+
insertUserDefinedTypeName: Database.Statement;
|
|
46
|
+
insertFunctionTypeName: Database.Statement;
|
|
47
|
+
insertMappingTypeName: Database.Statement;
|
|
48
|
+
insertArrayTypeName: Database.Statement;
|
|
49
|
+
insertPragma: Database.Statement;
|
|
50
|
+
insertImport: Database.Statement;
|
|
51
|
+
insertInheritanceSpecifier: Database.Statement;
|
|
52
|
+
insertModifierInvocation: Database.Statement;
|
|
53
|
+
insertParameterList: Database.Statement;
|
|
54
|
+
insertIdentifierPath: Database.Statement;
|
|
55
|
+
insertStructuredDoc: Database.Statement;
|
|
56
|
+
insertUsingForDirective: Database.Statement;
|
|
57
|
+
insertOverrideSpecifier: Database.Statement;
|
|
58
|
+
insertYulNode: Database.Statement;
|
|
59
|
+
insertYulEdge: Database.Statement;
|
|
60
|
+
}
|
|
61
|
+
export declare function createPreparedStatements(db: Database.Database): PreparedStatements;
|
|
62
|
+
type Handler = (node: ASTNode, stmts: PreparedStatements) => void;
|
|
63
|
+
declare const handlers: Map<string, Handler>;
|
|
64
|
+
export { handlers };
|