hardhat-deploy 2.0.0-next.7 → 2.0.0-next.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +120 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +197 -0
- package/dist/cli.js.map +1 -0
- package/dist/config/default.d.ts.map +1 -0
- package/dist/config/default.js +5 -0
- package/dist/config/default.js.map +1 -0
- package/dist/config/get-config.d.ts.map +1 -0
- package/dist/config/get-config.js +9 -0
- package/dist/config/get-config.js.map +1 -0
- package/dist/config/validation.d.ts.map +1 -0
- package/dist/config/validation.js +17 -0
- package/dist/config/validation.js.map +1 -0
- package/dist/generate-types.d.ts +5 -0
- package/dist/generate-types.d.ts.map +1 -0
- package/dist/generate-types.js +244 -0
- package/dist/generate-types.js.map +1 -0
- package/dist/helpers.d.ts +34 -0
- package/dist/helpers.d.ts.map +1 -0
- package/dist/{esm/helpers.js → helpers.js} +106 -73
- package/dist/helpers.js.map +1 -0
- package/dist/hook-handlers/config.d.ts.map +1 -0
- package/dist/hook-handlers/config.js +68 -0
- package/dist/hook-handlers/config.js.map +1 -0
- package/dist/hook-handlers/solidity.d.ts.map +1 -0
- package/dist/hook-handlers/solidity.js +21 -0
- package/dist/hook-handlers/solidity.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/{esm/index.js → index.js} +20 -3
- package/dist/index.js.map +1 -0
- package/dist/postinstall.d.ts +3 -0
- package/dist/postinstall.d.ts.map +1 -0
- package/dist/postinstall.js +148 -0
- package/dist/postinstall.js.map +1 -0
- package/dist/{esm/tasks → tasks}/deploy.d.ts +1 -0
- package/dist/tasks/deploy.d.ts.map +1 -0
- package/dist/tasks/deploy.js +32 -0
- package/dist/tasks/deploy.js.map +1 -0
- package/dist/{esm/type-extensions.d.ts → type-extensions.d.ts} +2 -2
- package/dist/type-extensions.d.ts.map +1 -0
- package/dist/type-extensions.js.map +1 -0
- package/dist/types.d.ts +13 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/files.d.ts +9 -0
- package/dist/utils/files.d.ts.map +1 -0
- package/dist/utils/files.js +25 -0
- package/dist/utils/files.js.map +1 -0
- package/dist/v1-detection.d.ts +12 -0
- package/dist/v1-detection.d.ts.map +1 -0
- package/dist/v1-detection.js +118 -0
- package/dist/v1-detection.js.map +1 -0
- package/dist/v1-entry.cjs +92 -0
- package/package.json +58 -26
- package/src/cli.ts +246 -0
- package/src/config/default.ts +6 -0
- package/src/config/get-config.ts +12 -0
- package/src/config/validation.ts +25 -0
- package/src/generate-types.ts +281 -0
- package/src/helpers.ts +311 -0
- package/src/hook-handlers/config.ts +80 -0
- package/src/hook-handlers/solidity.ts +33 -0
- package/src/index.ts +53 -0
- package/src/postinstall.ts +166 -0
- package/src/tasks/deploy.ts +43 -0
- package/src/type-extensions.ts +12 -0
- package/src/types.ts +9 -0
- package/src/utils/files.ts +37 -0
- package/src/v1-entry.cjs +92 -0
- package/templates/basic/README.md +44 -0
- package/templates/basic/contracts/Counter.sol +19 -0
- package/templates/basic/contracts/Counter.t.sol +29 -0
- package/templates/basic/deploy/01_deploy_counter.ts +13 -0
- package/templates/basic/hardhat.config.ts +50 -0
- package/templates/basic/package.json +31 -0
- package/templates/basic/pnpm-lock.yaml +1643 -0
- package/templates/basic/rocketh/config.ts +65 -0
- package/templates/basic/rocketh/deploy.ts +20 -0
- package/templates/basic/rocketh/environment.ts +22 -0
- package/templates/basic/test/Counter.ts +61 -0
- package/templates/basic/tsconfig.json +13 -0
- package/dist/esm/config/default.d.ts.map +0 -1
- package/dist/esm/config/default.js +0 -8
- package/dist/esm/config/default.js.map +0 -1
- package/dist/esm/config/get-config.d.ts.map +0 -1
- package/dist/esm/config/get-config.js +0 -8
- package/dist/esm/config/get-config.js.map +0 -1
- package/dist/esm/config/validation.d.ts.map +0 -1
- package/dist/esm/config/validation.js +0 -16
- package/dist/esm/config/validation.js.map +0 -1
- package/dist/esm/generate-types.d.ts +0 -6
- package/dist/esm/generate-types.d.ts.map +0 -1
- package/dist/esm/generate-types.js +0 -198
- package/dist/esm/generate-types.js.map +0 -1
- package/dist/esm/helpers.d.ts +0 -18
- package/dist/esm/helpers.d.ts.map +0 -1
- package/dist/esm/helpers.js.map +0 -1
- package/dist/esm/hook-handlers/config.d.ts.map +0 -1
- package/dist/esm/hook-handlers/config.js +0 -16
- package/dist/esm/hook-handlers/config.js.map +0 -1
- package/dist/esm/hook-handlers/solidity.d.ts.map +0 -1
- package/dist/esm/hook-handlers/solidity.js +0 -15
- package/dist/esm/hook-handlers/solidity.js.map +0 -1
- package/dist/esm/index.d.ts +0 -5
- package/dist/esm/index.d.ts.map +0 -1
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/tasks/deploy.d.ts.map +0 -1
- package/dist/esm/tasks/deploy.js +0 -21
- package/dist/esm/tasks/deploy.js.map +0 -1
- package/dist/esm/type-extensions.d.ts.map +0 -1
- package/dist/esm/type-extensions.js.map +0 -1
- package/dist/esm/types.d.ts +0 -15
- package/dist/esm/types.d.ts.map +0 -1
- package/dist/esm/types.js.map +0 -1
- /package/dist/{esm/config → config}/default.d.ts +0 -0
- /package/dist/{esm/config → config}/get-config.d.ts +0 -0
- /package/dist/{esm/config → config}/validation.d.ts +0 -0
- /package/dist/{esm/hook-handlers → hook-handlers}/config.d.ts +0 -0
- /package/dist/{esm/hook-handlers → hook-handlers}/solidity.d.ts +0 -0
- /package/dist/{esm/type-extensions.js → type-extensions.js} +0 -0
- /package/dist/{esm/types.js → types.js} +0 -0
package/package.json
CHANGED
|
@@ -1,54 +1,86 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "hardhat-deploy",
|
|
3
|
-
"version": "2.0.0-next.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "2.0.0-next.70",
|
|
4
|
+
"description": "Hardhat plugin for replicable smart contract deployments and easy testing across multiple EVM chains, with support for proxies, diamonds, named accounts, and deployment fixtures",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"hardhat",
|
|
7
|
+
"ethereum",
|
|
8
|
+
"solidity",
|
|
9
|
+
"deployment",
|
|
10
|
+
"test",
|
|
11
|
+
"cli"
|
|
12
|
+
],
|
|
13
|
+
"author": "Ronan Sandford",
|
|
14
|
+
"license": "MIT",
|
|
15
|
+
"homepage": "https://github.com/wighawag/hardhat-deploy#readme",
|
|
16
|
+
"repository": {
|
|
17
|
+
"type": "git",
|
|
18
|
+
"url": "git+https://github.com/wighawag/hardhat-deploy.git"
|
|
19
|
+
},
|
|
20
|
+
"bugs": {
|
|
21
|
+
"url": "https://github.com/wighawag/hardhat-deploy/issues"
|
|
22
|
+
},
|
|
5
23
|
"publishConfig": {
|
|
6
24
|
"access": "public"
|
|
7
25
|
},
|
|
8
26
|
"type": "module",
|
|
9
|
-
"main": "dist/
|
|
10
|
-
"module": "./dist/
|
|
11
|
-
"types": "./dist/
|
|
27
|
+
"main": "dist/index.js",
|
|
28
|
+
"module": "./dist/index.js",
|
|
29
|
+
"types": "./dist/index.d.ts",
|
|
12
30
|
"exports": {
|
|
13
31
|
".": {
|
|
14
32
|
"import": {
|
|
15
|
-
"types": "./dist/
|
|
16
|
-
"default": "./dist/
|
|
17
|
-
}
|
|
33
|
+
"types": "./dist/index.d.ts",
|
|
34
|
+
"default": "./dist/index.js"
|
|
35
|
+
},
|
|
36
|
+
"require": "./dist/v1-entry.cjs"
|
|
18
37
|
},
|
|
19
38
|
"./helpers": {
|
|
20
39
|
"import": {
|
|
21
|
-
"types": "./dist/
|
|
22
|
-
"default": "./dist/
|
|
40
|
+
"types": "./dist/helpers.d.ts",
|
|
41
|
+
"default": "./dist/helpers.js"
|
|
23
42
|
}
|
|
24
43
|
}
|
|
25
44
|
},
|
|
26
45
|
"files": [
|
|
27
|
-
"dist"
|
|
46
|
+
"dist",
|
|
47
|
+
"src",
|
|
48
|
+
"bin",
|
|
49
|
+
"templates"
|
|
28
50
|
],
|
|
51
|
+
"bin": {
|
|
52
|
+
"hardhat-deploy": "./dist/cli.js"
|
|
53
|
+
},
|
|
54
|
+
"engines": {
|
|
55
|
+
"node": ">=22.0.0"
|
|
56
|
+
},
|
|
29
57
|
"devDependencies": {
|
|
30
|
-
"@changesets/cli": "^2.
|
|
31
|
-
"@
|
|
32
|
-
"
|
|
33
|
-
"
|
|
34
|
-
"
|
|
35
|
-
"
|
|
58
|
+
"@changesets/cli": "^2.29.8",
|
|
59
|
+
"@rocketh/node": "0.17.21",
|
|
60
|
+
"@types/node": "^25.0.10",
|
|
61
|
+
"as-soon": "^0.1.5",
|
|
62
|
+
"hardhat": "3.1.5",
|
|
63
|
+
"rimraf": "^6.1.2",
|
|
64
|
+
"rocketh": "0.17.18",
|
|
36
65
|
"set-defaults": "^0.0.5",
|
|
37
|
-
"typescript": "^5.
|
|
66
|
+
"typescript": "^5.9.3"
|
|
38
67
|
},
|
|
39
68
|
"peerDependencies": {
|
|
40
|
-
"
|
|
41
|
-
"
|
|
69
|
+
"@rocketh/node": "^0.17.21",
|
|
70
|
+
"hardhat": "^3.1.5"
|
|
42
71
|
},
|
|
43
72
|
"dependencies": {
|
|
44
|
-
"@nomicfoundation/hardhat-
|
|
45
|
-
"@nomicfoundation/hardhat-zod-utils": "3.0.0-next.0",
|
|
73
|
+
"@nomicfoundation/hardhat-zod-utils": "3.0.1",
|
|
46
74
|
"@types/debug": "^4.1.12",
|
|
47
|
-
"
|
|
48
|
-
"
|
|
75
|
+
"commander": "^14.0.2",
|
|
76
|
+
"debug": "^4.4.3",
|
|
77
|
+
"named-logs-console": "^0.5.1",
|
|
78
|
+
"slash": "^5.1.0",
|
|
79
|
+
"zod": "^4.3.6"
|
|
49
80
|
},
|
|
50
81
|
"scripts": {
|
|
51
|
-
"build": "tsc --project tsconfig.json",
|
|
52
|
-
"dev": "as-soon -w src pnpm build"
|
|
82
|
+
"build": "tsc --project tsconfig.json && cp src/v1-entry.cjs dist/",
|
|
83
|
+
"dev": "as-soon -w src pnpm build",
|
|
84
|
+
"postinstall": "node ./dist/postinstall.js"
|
|
53
85
|
}
|
|
54
86
|
}
|
package/src/cli.ts
ADDED
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { Command } from 'commander';
|
|
4
|
+
import { readFileSync, readdirSync, mkdirSync, copyFileSync, existsSync, writeFileSync, statSync } from 'fs';
|
|
5
|
+
import { join, dirname, basename } from 'path';
|
|
6
|
+
import { fileURLToPath } from 'url';
|
|
7
|
+
import * as readline from 'readline';
|
|
8
|
+
import pkg from '../package.json' with { type: 'json' };
|
|
9
|
+
|
|
10
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
11
|
+
const __dirname = dirname(__filename);
|
|
12
|
+
|
|
13
|
+
const program = new Command();
|
|
14
|
+
|
|
15
|
+
// Get the current version of hardhat-deploy
|
|
16
|
+
const getHardhatDeployVersion = (): string => {
|
|
17
|
+
return pkg.version;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
const askFolder = async (): Promise<string> => {
|
|
21
|
+
const rl = readline.createInterface({
|
|
22
|
+
input: process.stdin,
|
|
23
|
+
output: process.stdout,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
return new Promise((resolve) => {
|
|
27
|
+
rl.question('Enter folder path (default: ./): ', (answer) => {
|
|
28
|
+
rl.close();
|
|
29
|
+
resolve(answer.trim() || './');
|
|
30
|
+
});
|
|
31
|
+
});
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
const askAutoInstall = async (): Promise<boolean> => {
|
|
35
|
+
const rl = readline.createInterface({
|
|
36
|
+
input: process.stdin,
|
|
37
|
+
output: process.stdout,
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
return new Promise((resolve) => {
|
|
41
|
+
rl.question('Auto-install dependencies with pnpm? (Y/n): ', (answer) => {
|
|
42
|
+
rl.close();
|
|
43
|
+
const trimmed = answer.trim().toLowerCase();
|
|
44
|
+
resolve(trimmed === '' || trimmed === 'y' || trimmed === 'yes');
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
const isFolderEmpty = (folderPath: string): boolean => {
|
|
50
|
+
if (!existsSync(folderPath)) {
|
|
51
|
+
return true;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
const files = readdirSync(folderPath);
|
|
56
|
+
return files.length === 0;
|
|
57
|
+
} catch (error) {
|
|
58
|
+
// If we can't read the directory, treat it as not empty
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
const copyFile = (
|
|
64
|
+
source: string,
|
|
65
|
+
target: string,
|
|
66
|
+
replacements: Record<string, string> = {},
|
|
67
|
+
gitignorePatterns: string[] = []
|
|
68
|
+
): void => {
|
|
69
|
+
const fileName = basename(source);
|
|
70
|
+
|
|
71
|
+
// Check if file should be skipped based on gitignore patterns
|
|
72
|
+
for (const pattern of gitignorePatterns) {
|
|
73
|
+
if (fileName === pattern || fileName.endsWith(pattern.replace('*', ''))) {
|
|
74
|
+
return; // Skip this file
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
let content = readFileSync(source, 'utf-8');
|
|
79
|
+
|
|
80
|
+
// Apply replacements
|
|
81
|
+
for (const [search, replace] of Object.entries(replacements)) {
|
|
82
|
+
content = content.replaceAll(search, replace);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
mkdirSync(dirname(target), { recursive: true });
|
|
86
|
+
|
|
87
|
+
// For binary files, just copy as-is
|
|
88
|
+
if (source.endsWith('.lock') || source.endsWith('.so') || source.endsWith('.wasm')) {
|
|
89
|
+
copyFileSync(source, target);
|
|
90
|
+
} else {
|
|
91
|
+
writeFileSync(target, content, 'utf-8');
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
const parseGitignore = (gitignorePath: string): string[] => {
|
|
96
|
+
if (!existsSync(gitignorePath)) {
|
|
97
|
+
return [];
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const content = readFileSync(gitignorePath, 'utf-8');
|
|
101
|
+
return content
|
|
102
|
+
.split('\n')
|
|
103
|
+
.map((line: string) => line.trim())
|
|
104
|
+
.filter((line: string) => line && !line.startsWith('#'));
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
const copyFolder = (
|
|
108
|
+
source: string,
|
|
109
|
+
target: string,
|
|
110
|
+
replacements: Record<string, string> = {},
|
|
111
|
+
gitignorePatterns: string[] = []
|
|
112
|
+
): void => {
|
|
113
|
+
if (!existsSync(target)) {
|
|
114
|
+
mkdirSync(target, { recursive: true });
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const files = readdirSync(source);
|
|
118
|
+
|
|
119
|
+
files.forEach((file) => {
|
|
120
|
+
const sourcePath = join(source, file);
|
|
121
|
+
const targetPath = join(target, file);
|
|
122
|
+
|
|
123
|
+
const stat = statSync(sourcePath);
|
|
124
|
+
|
|
125
|
+
if (stat.isDirectory()) {
|
|
126
|
+
// Check if directory should be skipped based on gitignore patterns
|
|
127
|
+
const shouldSkip = gitignorePatterns.some(pattern =>
|
|
128
|
+
file === pattern.replace('/', '') || pattern.startsWith('/') && file === pattern.slice(1)
|
|
129
|
+
);
|
|
130
|
+
|
|
131
|
+
if (!shouldSkip) {
|
|
132
|
+
copyFolder(sourcePath, targetPath, replacements, gitignorePatterns);
|
|
133
|
+
}
|
|
134
|
+
} else {
|
|
135
|
+
copyFile(sourcePath, targetPath, replacements, gitignorePatterns);
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
const generateProject = (targetFolder: string, projectName?: string): void => {
|
|
141
|
+
// find template in published package
|
|
142
|
+
const templatePath = join(__dirname, '../templates/basic');
|
|
143
|
+
const gitignorePath = join(templatePath, '.gitignore');
|
|
144
|
+
|
|
145
|
+
// Parse gitignore patterns
|
|
146
|
+
const gitignorePatterns = parseGitignore(gitignorePath);
|
|
147
|
+
|
|
148
|
+
// Determine project name from folder or use placeholder
|
|
149
|
+
const folderName = projectName || basename(targetFolder === './' ? process.cwd() : targetFolder);
|
|
150
|
+
|
|
151
|
+
// Get the current version of hardhat-deploy
|
|
152
|
+
const hardhatDeployVersion = getHardhatDeployVersion();
|
|
153
|
+
|
|
154
|
+
const replacements: Record<string, string> = {
|
|
155
|
+
'template-hardhat-node-test-runner': `${folderName}`,
|
|
156
|
+
'workspace:*': hardhatDeployVersion,
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
console.log(`Generating project in: ${targetFolder}`);
|
|
160
|
+
copyFolder(templatePath, targetFolder, replacements, gitignorePatterns);
|
|
161
|
+
console.log('✓ Project initialized successfully!');
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
const runPnpmInstall = async (folderPath: string): Promise<void> => {
|
|
165
|
+
console.log(`Installing dependencies...`);
|
|
166
|
+
const { spawn } = await import('child_process');
|
|
167
|
+
|
|
168
|
+
return new Promise((resolve, reject) => {
|
|
169
|
+
// Use --ignore-workspace to ensure dependencies are installed locally
|
|
170
|
+
// This prevents pnpm from treating the target folder as part of a parent workspace
|
|
171
|
+
const pnpm = spawn('pnpm', ['install', '--ignore-workspace', `--no-frozen-lockfile`], {
|
|
172
|
+
cwd: folderPath,
|
|
173
|
+
stdio: 'inherit',
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
pnpm.on('close', (code) => {
|
|
177
|
+
if (code === 0) {
|
|
178
|
+
console.log('✓ Dependencies installed successfully!');
|
|
179
|
+
resolve();
|
|
180
|
+
} else {
|
|
181
|
+
reject(new Error(`pnpm install failed with exit code ${code}`));
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
pnpm.on('error', (error) => {
|
|
186
|
+
reject(error);
|
|
187
|
+
});
|
|
188
|
+
});
|
|
189
|
+
};
|
|
190
|
+
|
|
191
|
+
program
|
|
192
|
+
.name('hardhat-deploy')
|
|
193
|
+
.description('CLI for hardhat-deploy')
|
|
194
|
+
.version(pkg.version);
|
|
195
|
+
|
|
196
|
+
program
|
|
197
|
+
.command('init')
|
|
198
|
+
.argument('[folder]', 'folder to initialize the project in')
|
|
199
|
+
.option('--install', 'auto-install dependencies with pnpm')
|
|
200
|
+
.description('Initialize a new hardhat-deploy project')
|
|
201
|
+
.action(async (folder?: string, options?: { install?: boolean }) => {
|
|
202
|
+
let targetFolder = folder;
|
|
203
|
+
let autoInstall = options?.install ?? false;
|
|
204
|
+
|
|
205
|
+
// If no folder specified, ask user
|
|
206
|
+
if (!targetFolder) {
|
|
207
|
+
targetFolder = await askFolder();
|
|
208
|
+
// If we prompted for folder, also prompt for auto-install
|
|
209
|
+
autoInstall = await askAutoInstall();
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// Normalize path
|
|
213
|
+
targetFolder = targetFolder.trim();
|
|
214
|
+
|
|
215
|
+
// Check if folder is empty
|
|
216
|
+
if (!isFolderEmpty(targetFolder)) {
|
|
217
|
+
console.error(`Error: Folder "${targetFolder}" is not empty. Please specify an empty folder or a new folder path.`);
|
|
218
|
+
process.exit(1);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Generate project
|
|
222
|
+
generateProject(targetFolder);
|
|
223
|
+
|
|
224
|
+
// Auto-install if requested
|
|
225
|
+
if (autoInstall) {
|
|
226
|
+
try {
|
|
227
|
+
await runPnpmInstall(targetFolder);
|
|
228
|
+
} catch (error) {
|
|
229
|
+
console.error('Failed to install dependencies:', error);
|
|
230
|
+
console.log('\nYou can install dependencies manually:');
|
|
231
|
+
console.log(` cd ${targetFolder === './' ? '.' : targetFolder}`);
|
|
232
|
+
console.log(' pnpm install');
|
|
233
|
+
process.exit(1);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Show next steps
|
|
238
|
+
console.log(`\nNext steps:`);
|
|
239
|
+
console.log(` cd ${targetFolder === './' ? '.' : targetFolder}`);
|
|
240
|
+
if (!autoInstall) {
|
|
241
|
+
console.log(` pnpm install`);
|
|
242
|
+
}
|
|
243
|
+
console.log(` pnpm hardhat test`);
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
program.parse();
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type {ArtifactGenerationConfig, ArtifactGenerationUserConfig} from '../types.js';
|
|
2
|
+
|
|
3
|
+
import {DEFAULT_CONFIG} from './default.js';
|
|
4
|
+
|
|
5
|
+
export function getConfig(userConfig: ArtifactGenerationUserConfig | undefined): ArtifactGenerationConfig {
|
|
6
|
+
return {
|
|
7
|
+
destinations:
|
|
8
|
+
userConfig?.destinations?.map((v) => ({mode: v.mode || 'javascript', folder: v.folder || './generated'})) ||
|
|
9
|
+
DEFAULT_CONFIG.destinations,
|
|
10
|
+
// externalArtifacts: userConfig?.externalArtifacts || DEFAULT_CONFIG.externalArtifacts,
|
|
11
|
+
};
|
|
12
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type {HardhatUserConfigValidationError} from '@nomicfoundation/hardhat-zod-utils';
|
|
2
|
+
import type {HardhatUserConfig} from 'hardhat/config';
|
|
3
|
+
|
|
4
|
+
import {validateUserConfigZodType} from '@nomicfoundation/hardhat-zod-utils';
|
|
5
|
+
import {z} from 'zod';
|
|
6
|
+
|
|
7
|
+
const artifactGenerationUserConfigSchema = z
|
|
8
|
+
.object({
|
|
9
|
+
// externalArtifacts: z.array(z.string()).optional(),
|
|
10
|
+
destinations: z
|
|
11
|
+
.array(
|
|
12
|
+
z.object({
|
|
13
|
+
mode: z.union([z.literal('javascript'), z.literal('typescript')]).optional(),
|
|
14
|
+
folder: z.string().optional(),
|
|
15
|
+
})
|
|
16
|
+
)
|
|
17
|
+
.optional(),
|
|
18
|
+
})
|
|
19
|
+
.optional();
|
|
20
|
+
|
|
21
|
+
export async function validateTypechainUserConfig(
|
|
22
|
+
userConfig: HardhatUserConfig
|
|
23
|
+
): Promise<HardhatUserConfigValidationError[]> {
|
|
24
|
+
return validateUserConfigZodType(userConfig.generateTypedArtifacts, artifactGenerationUserConfigSchema);
|
|
25
|
+
}
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
import type {ArtifactGenerationConfig} from './types.js';
|
|
2
|
+
import debug from 'debug';
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import path, {basename, dirname} from 'node:path';
|
|
5
|
+
import slash from 'slash';
|
|
6
|
+
import {FileTraversed, traverse} from './utils/files.js';
|
|
7
|
+
|
|
8
|
+
const log = debug('hardhat-deploy:generate-types');
|
|
9
|
+
|
|
10
|
+
function writeIfDifferent(filePath: string, newTextContent: string) {
|
|
11
|
+
// Ensure we're working with a string
|
|
12
|
+
const contentToWrite = String(newTextContent);
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
let existingContent;
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
existingContent = fs.readFileSync(filePath, 'utf8');
|
|
19
|
+
} catch (error) {
|
|
20
|
+
// console.log(`do not exist? => writing ${filePath}`);
|
|
21
|
+
// File doesn't exist, write and return
|
|
22
|
+
fs.writeFileSync(filePath, contentToWrite);
|
|
23
|
+
return {written: true, reason: 'File did not exist'};
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Simple string comparison
|
|
27
|
+
if (contentToWrite !== existingContent) {
|
|
28
|
+
// console.log(`content different => writing ${filePath}`);
|
|
29
|
+
fs.writeFileSync(filePath, contentToWrite);
|
|
30
|
+
return {written: true, reason: 'Content was different'};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return {written: false, reason: 'Content was identical'};
|
|
34
|
+
} catch (error) {
|
|
35
|
+
console.error('Error in writeIfDifferent:', error);
|
|
36
|
+
throw error;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function ensureDirExistsSync(folderPath: string) {
|
|
41
|
+
// Check if directory already exists
|
|
42
|
+
if (fs.existsSync(folderPath)) {
|
|
43
|
+
return {created: false, reason: 'Directory already exists'};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// console.log(`do not exist? => mkdir ${folderPath}`);
|
|
47
|
+
// Directory doesn't exist, create it
|
|
48
|
+
fs.mkdirSync(folderPath, {recursive: true});
|
|
49
|
+
return {created: true, reason: 'Directory was created'};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
type Artifact = {
|
|
53
|
+
contractName: string;
|
|
54
|
+
abi: any[];
|
|
55
|
+
// ...
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
type Artifacts = {[key: string]: Artifact};
|
|
59
|
+
|
|
60
|
+
function writeArtifactToFile(folder: string, canonicalName: string, data: Artifact, mode: 'typescript' | 'javascript') {
|
|
61
|
+
const name = canonicalName.split('/').pop();
|
|
62
|
+
const artifactName = `Artifact_${name}`;
|
|
63
|
+
const tsFilepath = path.join(folder, 'artifacts', canonicalName) + '.ts';
|
|
64
|
+
const folderPath = path.dirname(tsFilepath);
|
|
65
|
+
ensureDirExistsSync(folderPath);
|
|
66
|
+
if (mode === 'typescript') {
|
|
67
|
+
const newContent = `export const ${artifactName}: ${JSON.stringify(data, null, 2)} = ${JSON.stringify(data, null, 2)} as const;`;
|
|
68
|
+
writeIfDifferent(tsFilepath, newContent);
|
|
69
|
+
} else if (mode === 'javascript') {
|
|
70
|
+
const newContent = `export const ${artifactName} = /** @type {const} **/ (${JSON.stringify(data, null, 2)});`;
|
|
71
|
+
const dtsContent = `export declare const ${artifactName}: ${JSON.stringify(data, null, 2)};`;
|
|
72
|
+
const jsFilepath = path.join(folder, 'artifacts', canonicalName) + '.js';
|
|
73
|
+
writeIfDifferent(jsFilepath, newContent);
|
|
74
|
+
writeIfDifferent(jsFilepath.replace(/\.js$/, '.d.ts'), dtsContent);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function writeArtifactIndexToFile(folder: string, data: Artifacts, mode: 'typescript' | 'javascript') {
|
|
79
|
+
const tsFilepath = path.join(folder, 'artifacts', 'index') + '.ts';
|
|
80
|
+
const folderPath = path.dirname(tsFilepath);
|
|
81
|
+
ensureDirExistsSync(folderPath);
|
|
82
|
+
if (mode === 'typescript') {
|
|
83
|
+
let newContent = '';
|
|
84
|
+
for (const canonicalName of Object.keys(data)) {
|
|
85
|
+
const transformedName = canonicalName.replaceAll('/', '_').replaceAll('.', '_');
|
|
86
|
+
const name = canonicalName.split('/').pop();
|
|
87
|
+
const artifactName = `Artifact_${name}`;
|
|
88
|
+
const importNaming =
|
|
89
|
+
canonicalName != name ? `${artifactName} as ${transformedName}` : `${artifactName} as ${name}`;
|
|
90
|
+
newContent += `export {${importNaming}} from './${canonicalName}.js';\n`;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
writeIfDifferent(tsFilepath, newContent);
|
|
94
|
+
} else if (mode === 'javascript') {
|
|
95
|
+
let newContent = '';
|
|
96
|
+
for (const canonicalName of Object.keys(data)) {
|
|
97
|
+
const transformedName = canonicalName.replaceAll('/', '_').replaceAll('.', '_');
|
|
98
|
+
const name = canonicalName.split('/').pop();
|
|
99
|
+
const artifactName = `Artifact_${name}`;
|
|
100
|
+
const importNaming =
|
|
101
|
+
canonicalName != name ? `${artifactName} as ${transformedName}` : `${artifactName} as ${name}`;
|
|
102
|
+
newContent += `export {${importNaming}} from './${canonicalName}.js';\n`;
|
|
103
|
+
}
|
|
104
|
+
const jsFilepath = path.join(folder, 'artifacts', 'index') + '.js';
|
|
105
|
+
writeIfDifferent(jsFilepath, newContent);
|
|
106
|
+
writeIfDifferent(jsFilepath.replace(/\.js$/, '.d.ts'), newContent);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function writeABIDefinitionToFile(
|
|
111
|
+
folder: string,
|
|
112
|
+
canonicalName: string,
|
|
113
|
+
data: Artifact,
|
|
114
|
+
mode: 'typescript' | 'javascript',
|
|
115
|
+
) {
|
|
116
|
+
const nameAsPath = canonicalName.split('/');
|
|
117
|
+
const name = nameAsPath[nameAsPath.length - 1];
|
|
118
|
+
const abiName = `Abi_${name}`;
|
|
119
|
+
const artifactName = `Artifact_${name}`;
|
|
120
|
+
const relativePath = `../`.repeat(nameAsPath.length);
|
|
121
|
+
const tsFilepath = path.join(folder, 'abis', canonicalName) + '.ts';
|
|
122
|
+
const folderPath = path.dirname(tsFilepath);
|
|
123
|
+
ensureDirExistsSync(folderPath);
|
|
124
|
+
if (mode === 'typescript') {
|
|
125
|
+
const newContent = `import {${artifactName}} from '${relativePath}artifacts/${canonicalName}.js';
|
|
126
|
+
export type ${abiName} = (typeof ${artifactName})['abi'];\n`;
|
|
127
|
+
writeIfDifferent(tsFilepath, newContent);
|
|
128
|
+
} else if (mode === 'javascript') {
|
|
129
|
+
const jsFilepath = path.join(folder, 'abis', canonicalName) + '.js';
|
|
130
|
+
const newContent = `export {};\n`;
|
|
131
|
+
const dtsContent = `import {${artifactName}} from '${relativePath}artifacts/${canonicalName}.js';
|
|
132
|
+
export type ${abiName} = (typeof ${artifactName})['abi'];\n`;
|
|
133
|
+
writeIfDifferent(jsFilepath, newContent);
|
|
134
|
+
writeIfDifferent(jsFilepath.replace(/\.js$/, '.d.ts'), dtsContent);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
function writeABIDefinitionIndexToFile(folder: string, data: Artifacts, mode: 'typescript' | 'javascript') {
|
|
138
|
+
const tsFilepath = path.join(folder, 'abis', 'index') + '.ts';
|
|
139
|
+
const folderPath = path.dirname(tsFilepath);
|
|
140
|
+
ensureDirExistsSync(folderPath);
|
|
141
|
+
if (mode === 'typescript') {
|
|
142
|
+
let newContent = '';
|
|
143
|
+
for (const canonicalName of Object.keys(data)) {
|
|
144
|
+
const transformedName = canonicalName.replaceAll('/', '_').replaceAll('.', '_');
|
|
145
|
+
const name = canonicalName.split('/').pop();
|
|
146
|
+
const abiName = `Abi_${name}`;
|
|
147
|
+
const importNaming = canonicalName != name ? `${abiName} as ${transformedName}` : `${abiName} as ${name}`;
|
|
148
|
+
newContent += `export {${importNaming}} from "./${canonicalName}.js"\n`;
|
|
149
|
+
}
|
|
150
|
+
writeIfDifferent(tsFilepath, newContent);
|
|
151
|
+
} else if (mode === 'javascript') {
|
|
152
|
+
const jsFilepath = path.join(folder, 'abis', 'index') + '.js';
|
|
153
|
+
let newContent = '';
|
|
154
|
+
for (const canonicalName of Object.keys(data)) {
|
|
155
|
+
const transformedName = canonicalName.replaceAll('/', '_').replaceAll('.', '_');
|
|
156
|
+
const name = canonicalName.split('/').pop();
|
|
157
|
+
const abiName = `Abi_${name}`;
|
|
158
|
+
const importNaming = canonicalName != name ? `${abiName} as ${transformedName}` : `${abiName} as ${name}`;
|
|
159
|
+
newContent += `export {${importNaming}} from "./${canonicalName}.js"\n`;
|
|
160
|
+
}
|
|
161
|
+
writeIfDifferent(jsFilepath, newContent);
|
|
162
|
+
writeIfDifferent(jsFilepath.replace(/\.js$/, '.d.ts'), newContent);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
export async function generateTypes(paths: {artifacts: string[]}, config: ArtifactGenerationConfig): Promise<void> {
|
|
167
|
+
const buildInfoCache = new Map<string, any>();
|
|
168
|
+
const allArtifacts: {[name: string]: any} = {};
|
|
169
|
+
const shortNameDict: {[shortName: string]: boolean} = {};
|
|
170
|
+
|
|
171
|
+
for (const artifactsPath of paths.artifacts) {
|
|
172
|
+
const files: FileTraversed[] = traverse(
|
|
173
|
+
artifactsPath,
|
|
174
|
+
[],
|
|
175
|
+
artifactsPath,
|
|
176
|
+
(name) => name != 'build-info' && !name.endsWith('.t.sol') && !name.endsWith('.dbg.json'),
|
|
177
|
+
);
|
|
178
|
+
|
|
179
|
+
// console.log('--------------------------');
|
|
180
|
+
// console.log(files);
|
|
181
|
+
// console.log('--------------------------');
|
|
182
|
+
|
|
183
|
+
for (const file of files) {
|
|
184
|
+
const filepath = file.path;
|
|
185
|
+
if (file.directory || !filepath.endsWith('.json')) {
|
|
186
|
+
continue;
|
|
187
|
+
}
|
|
188
|
+
const filename = slash(path.basename(filepath));
|
|
189
|
+
const dirname = slash(path.dirname(file.relativePath));
|
|
190
|
+
|
|
191
|
+
// const namePath = dirname.replace('.sol', '');
|
|
192
|
+
const contractName = filename.replace('.json', '');
|
|
193
|
+
// const shortName = artifact.artifactsEmitted[i];
|
|
194
|
+
// console.log({path: filepath});
|
|
195
|
+
const content = fs.readFileSync(filepath, 'utf-8');
|
|
196
|
+
const parsed = JSON.parse(content);
|
|
197
|
+
|
|
198
|
+
if (!parsed.buildInfoId) continue;
|
|
199
|
+
|
|
200
|
+
// TODO read config for artifacts folder
|
|
201
|
+
let buildInfoFilepath = path.join(artifactsPath, 'build-info', `${parsed.buildInfoId}.output.json`);
|
|
202
|
+
|
|
203
|
+
if (!parsed.buildInfoId) {
|
|
204
|
+
// support hardhat v2 artifacts files
|
|
205
|
+
if (fs.existsSync(filepath.replace('.json', '.dbg.json'))) {
|
|
206
|
+
// console.warn(`Artifact ${filepath} does not have a buildInfoId, but found a .dbg.json file. Using that instead.`);
|
|
207
|
+
const dbgContent = fs.readFileSync(filepath.replace('.json', '.dbg.json'), 'utf-8');
|
|
208
|
+
const dbgParsed = JSON.parse(dbgContent);
|
|
209
|
+
const buildInfoRelativePath = dbgParsed.buildInfo;
|
|
210
|
+
parsed.buildInfoId = path.basename(buildInfoRelativePath, '.json');
|
|
211
|
+
// console.log({buildInfoRelativePath, buildInfoId: parsed.buildInfoId});
|
|
212
|
+
buildInfoFilepath = path.join(artifactsPath, 'build-info', `${parsed.buildInfoId}.json`);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// const backupBuildInfoFilepath = path.join(
|
|
217
|
+
// './generated',
|
|
218
|
+
// buildInfoFilepath.slice(buildInfoFilepath.indexOf('/', 1))
|
|
219
|
+
// );
|
|
220
|
+
let buildInfoFilepathToUse = buildInfoFilepath;
|
|
221
|
+
// if (!fs.existsSync(buildInfoFilepathToUse)) {
|
|
222
|
+
// buildInfoFilepathToUse = backupBuildInfoFilepath;
|
|
223
|
+
// }
|
|
224
|
+
let parsedBuildInfo;
|
|
225
|
+
if (!buildInfoCache.has(buildInfoFilepathToUse)) {
|
|
226
|
+
if (!fs.existsSync(buildInfoFilepathToUse)) continue;
|
|
227
|
+
const buildInfoContent = fs.readFileSync(buildInfoFilepathToUse, 'utf-8');
|
|
228
|
+
parsedBuildInfo = JSON.parse(buildInfoContent);
|
|
229
|
+
buildInfoCache.set(buildInfoFilepathToUse, parsedBuildInfo);
|
|
230
|
+
} else {
|
|
231
|
+
parsedBuildInfo = buildInfoCache.get(buildInfoFilepathToUse);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
const solidityOutput = parsedBuildInfo.output.contracts[parsed.inputSourceName][contractName];
|
|
235
|
+
const hardhatArtifactObject = {...parsed, ...solidityOutput};
|
|
236
|
+
const {buildInfoId, _format, ...artifactObject} = hardhatArtifactObject;
|
|
237
|
+
const fullName = `${dirname}/${contractName}`;
|
|
238
|
+
allArtifacts[fullName] = artifactObject;
|
|
239
|
+
if (shortNameDict[contractName]) {
|
|
240
|
+
delete allArtifacts[contractName];
|
|
241
|
+
} else {
|
|
242
|
+
allArtifacts[contractName] = artifactObject;
|
|
243
|
+
shortNameDict[contractName] = true;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
for (const key of Object.keys(allArtifacts)) {
|
|
249
|
+
if (key.indexOf('/') >= 0) {
|
|
250
|
+
const split = key.split('/');
|
|
251
|
+
if (split.length > 1) {
|
|
252
|
+
const shortName = split[split.length - 1];
|
|
253
|
+
if (allArtifacts[shortName]) {
|
|
254
|
+
delete allArtifacts[key];
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// for (const key of Object.keys(allArtifacts)) {
|
|
261
|
+
// const artifact = allArtifacts[key];
|
|
262
|
+
// writeFiles(key, artifact, config);
|
|
263
|
+
// }
|
|
264
|
+
// // const json = hre.config.generateTypedArtifacts.json || [];
|
|
265
|
+
// // json.push('./generated/_artifacts.json');
|
|
266
|
+
// // writeFiles(undefined, allArtifacts, {...hre.config.generateTypedArtifacts, json: json});
|
|
267
|
+
|
|
268
|
+
// writeFiles(undefined, allArtifacts, config);
|
|
269
|
+
|
|
270
|
+
for (const destination of config.destinations) {
|
|
271
|
+
const generatedFolder = destination.folder;
|
|
272
|
+
const mode = destination.mode;
|
|
273
|
+
for (const key of Object.keys(allArtifacts)) {
|
|
274
|
+
writeABIDefinitionToFile(generatedFolder, key, allArtifacts[key], mode);
|
|
275
|
+
writeArtifactToFile(generatedFolder, key, allArtifacts[key], mode);
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
writeArtifactIndexToFile(generatedFolder, allArtifacts, mode);
|
|
279
|
+
writeABIDefinitionIndexToFile(generatedFolder, allArtifacts, mode);
|
|
280
|
+
}
|
|
281
|
+
}
|