@solidusnetwork/hardhat-deploy-metadata 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,25 @@
1
+ {
2
+ "version": "0.2.0",
3
+ "configurations": [
4
+ {
5
+ "name": "Debug Mocha Tests",
6
+ "type": "node",
7
+ "request": "launch",
8
+ // Points to the Mocha executable in node_modules
9
+ "program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
10
+ "args": [
11
+ // Any glob or file path pattern to find your tests
12
+ "test/**/*.js",
13
+ // Increase timeout for slower tests
14
+ "--timeout",
15
+ "40000",
16
+ // Show colors in the console
17
+ "--colors"
18
+ ],
19
+ // Use an integrated terminal so you can see interactive logs
20
+ "console": "integratedTerminal",
21
+ // Ensure we are in the root of the plugin workspace
22
+ "cwd": "${workspaceFolder}"
23
+ }
24
+ ]
25
+ }
package/README.md ADDED
@@ -0,0 +1,31 @@
1
+ # @solidusnetwork/hardhat-deploy-metadata
2
+
3
+ A Hardhat plugin that allows users to push contract metadata to IPFS.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install @solidusnetwork/hardhat-deploy-metadata
9
+ ```
10
+
11
+ ## Usage
12
+
13
+ In `hardhat.config.js`:
14
+
15
+ ```js
16
+ require("@solidusnetwork/hardhat-deploy-metadata");
17
+ ```
18
+
19
+ In your deploy script:
20
+
21
+ ```js
22
+ const { deployMetadata } = require("hardhat");
23
+
24
+ const ipfsHash = await deployMetadata.pushMetadataToIPFS("ContractName");
25
+
26
+ const factory = new solidus.ContractFactory(contract.abi, contract.bytecode, wallet, ipfsHash);
27
+ ```
28
+
29
+ ## License
30
+
31
+ MIT
package/index.js ADDED
@@ -0,0 +1,168 @@
1
+ const fs = require("fs");
2
+ const path = require("path");
3
+ const { parseFullyQualifiedName } = require("hardhat/utils/contract-names");
4
+ const Hash = require('ipfs-only-hash')
5
+ const CBOR = require('cbor-x')
6
+ const bs58 = require('bs58')
7
+ const { splitAuxdata, AuxdataStyle } = require('@ethereum-sourcify/bytecode-utils');
8
+ const { arrayify } = require('@ethersproject/bytes')
9
+ const { extendEnvironment } = require("hardhat/config");
10
+ const IPFS_URL = 'https://ipfs.solidus.network/api/v0'
11
+
12
+ async function pushMetadataToIPFS(hre, contractName) {
13
+
14
+ // Load the artifact for the specified contract
15
+ const { artifacts } = hre; // The Hardhat 'artifacts' object
16
+ const artifact = await artifacts.readArtifact(contractName);
17
+ const ipfsHash = await pushMetadataToIPFSWithBytecode(hre, artifact.deployedBytecode)
18
+ return ipfsHash
19
+ }
20
+
21
+ async function pushMetadataToIPFSWithBytecode(hre, bytecode) {
22
+ const { artifacts } = hre; // The Hardhat 'artifacts' object
23
+
24
+ let metadataSections = decodeMultipleMetadataSections(bytecode)
25
+
26
+ let ipfsEntries = []
27
+ const fqNames = await artifacts.getAllFullyQualifiedNames();
28
+ for (let fqn of fqNames) {
29
+ const buildInfo = await artifacts.getBuildInfo(fqn);
30
+ const { sourceName, contractName: name } = parseFullyQualifiedName(fqn);
31
+ const contractOutput = buildInfo.output.contracts[sourceName][name];
32
+ if (!contractOutput) {
33
+ throw new Error(`CompilerOutputContract not found for ${name}.`);
34
+ }
35
+ const hash = await Hash.of(contractOutput.metadata)
36
+ for (let metadata of metadataSections) {
37
+ if (hash === metadata.ipfs) {
38
+ ipfsEntries.push({ipfs: metadata.ipfs, name, metadata: contractOutput.metadata})
39
+ }
40
+ }
41
+ }
42
+ if (ipfsEntries.length === 0) {
43
+ throw new Error(
44
+ "Hardhat plugin error: No IPFS hash found in bytecode metadata. You must set `bytecodeHash: 'ipfs'` in your hardhat.config.js compiler settings:\n\n" +
45
+ " solidity: {\n" +
46
+ " compilers: [\n" +
47
+ " {\n" +
48
+ " version: '0.8.19',\n" +
49
+ " settings: {\n" +
50
+ " metadata: {\n" +
51
+ " bytecodeHash: 'ipfs',\n" +
52
+ " ...\n" +
53
+ " }\n" +
54
+ " }\n" +
55
+ " }\n" +
56
+ " ]\n" +
57
+ " }"
58
+ );
59
+ }
60
+ const { create } = await import('kubo-rpc-client')
61
+ const client = create({
62
+ url: IPFS_URL,
63
+ });
64
+ try {
65
+ for (let data of ipfsEntries) {
66
+ const { cid } = await client.add(data.metadata);
67
+ await client.pin.add(cid);
68
+ console.log('File added with CID:', cid.toString());
69
+ console.log("Original IPFS hash found in bytecode:", data.ipfs);
70
+ if(data.ipfs !== cid.toString()) {
71
+ throw new Error("IPFS hash in bytecode does not match the CID of the added metadata, want " + data.ipfs + " got " + cid.toString())
72
+ }
73
+ const outputPath = path.join(
74
+ hre.config.paths.root,
75
+ "metadata",
76
+ `${data.name}_metadata.json`
77
+ );
78
+ const outputDir = path.dirname(outputPath);
79
+ fs.mkdirSync(outputDir, { recursive: true });
80
+ fs.writeFileSync(outputPath, JSON.stringify(data.metadata, null, 2));
81
+
82
+ console.log(`Metadata JSON for ${data.name} saved to ${outputPath}`);
83
+ }
84
+
85
+ } catch (error) {
86
+ console.error('Error adding file:', error);
87
+ return ""
88
+ }
89
+ return ipfsEntries[0].ipfs
90
+ }
91
+
92
+ const decodeMultipleMetadataSections = (bytecode) => {
93
+ if (!bytecode || bytecode.length === 0) {
94
+ throw new Error("Bytecode cannot be empty");
95
+ }
96
+ bytecode = ensureHexPrefix(bytecode);
97
+
98
+ const metadataSections = [];
99
+ let remainingBytecode = bytecode;
100
+
101
+ while (remainingBytecode.length > 0) {
102
+ try {
103
+ const [executionBytecode, auxdata] = splitAuxdata(remainingBytecode, AuxdataStyle.SOLIDITY);
104
+
105
+ if (auxdata) {
106
+ const decodedMetadata = CBOR.decode(arrayify(`0x${auxdata}`));
107
+ metadataSections.push(decodedMetadata);
108
+ remainingBytecode = executionBytecode;
109
+ } else {
110
+ break;
111
+ }
112
+ } catch (error) {
113
+ console.log(error)
114
+ break;
115
+ }
116
+ }
117
+
118
+ return metadataSections.map((metadata) => ({
119
+ ...metadata,
120
+ ipfs: metadata.ipfs ? bs58.default.encode(metadata.ipfs) : undefined,
121
+ }));
122
+ };
123
+ const ensureHexPrefix = (bytecode) => {
124
+ return bytecode.startsWith('0x') ? bytecode : `0x${bytecode}`;
125
+ };
126
+
127
+ // Extend Hardhat's environment
128
+ extendEnvironment((hre) => {
129
+ // attach the function to the HRE
130
+ hre.deployMetadata = {
131
+ pushMetadataToIPFS: (contractName) => pushMetadataToIPFS(hre, contractName),
132
+ pushMetadataToIPFSWithBytecode: (bytecode) => pushMetadataToIPFSWithBytecode(hre, bytecode),
133
+ };
134
+ })
135
+
136
+
137
+ function ensureBytecodeHashIsIPFS(hre) {
138
+ // Read the Hardhat config
139
+ const compilers = hre.config.solidity.compilers || [];
140
+
141
+ // Check each compiler’s settings
142
+ const hasIPFSBytecode = compilers.some((compiler) => {
143
+ const settings = compiler.settings || {};
144
+ const metadata = settings.metadata || {};
145
+ return metadata.bytecodeHash === 'ipfs';
146
+ });
147
+
148
+ if (!hasIPFSBytecode) {
149
+ throw new Error(
150
+ "Hardhat plugin error: You must set `bytecodeHash: 'ipfs'` in your hardhat.config.js compiler settings:\n\n" +
151
+ " solidity: {\n" +
152
+ " compilers: [\n" +
153
+ " {\n" +
154
+ " version: '0.8.19',\n" +
155
+ " settings: {\n" +
156
+ " metadata: {\n" +
157
+ " bytecodeHash: 'ipfs',\n" +
158
+ " ...\n" +
159
+ " }\n" +
160
+ " }\n" +
161
+ " }\n" +
162
+ " ]\n" +
163
+ " }"
164
+ );
165
+ }
166
+ }
167
+
168
+ module.exports.pushMetadataToIPFS = pushMetadataToIPFS;
package/package.json ADDED
@@ -0,0 +1,48 @@
1
+ {
2
+ "name": "@solidusnetwork/hardhat-deploy-metadata",
3
+ "version": "1.0.8",
4
+ "description": "A Hardhat plugin that allows users to push contract metadata to IPFS.",
5
+ "main": "index.js",
6
+ "author": "Solidus Network",
7
+ "license": "MIT",
8
+ "keywords": [
9
+ "hardhat",
10
+ "plugin",
11
+ "ipfs",
12
+ "solidity",
13
+ "metadata",
14
+ "solidus"
15
+ ],
16
+ "repository": {
17
+ "type": "git",
18
+ "url": "https://github.com/solidus-network/hardhat-deploy-metadata.git"
19
+ },
20
+ "publishConfig": {
21
+ "access": "public"
22
+ },
23
+ "scripts": {
24
+ "lint": "eslint .",
25
+ "test": "mocha --timeout 40000"
26
+ },
27
+ "peerDependencies": {
28
+ "hardhat": "^2.22.0"
29
+ },
30
+ "dependencies": {
31
+ "@ethereum-sourcify/bytecode-utils": "1.3.1",
32
+ "@ethersproject/bytes": "^5.7.0",
33
+ "@openzeppelin/contracts": "^5.0.0",
34
+ "arrayify": "^1.0.0",
35
+ "bs58": "^6.0.0",
36
+ "cbor-x": "^1.6.0",
37
+ "chai": "^4.1.2",
38
+ "crypto": "^1.0.1",
39
+ "dotenv": "^16.4.5",
40
+ "hardhat": "^2.22.0",
41
+ "ipfs-only-hash": "^4.0.0",
42
+ "kubo-rpc-client": "^5.0.2",
43
+ "mocha": "^11.0.1"
44
+ },
45
+ "devDependencies": {
46
+ "eslint": "^8.48.0"
47
+ }
48
+ }
@@ -0,0 +1,28 @@
1
+ const path = require("path");
2
+ const fs = require("fs-extra");
3
+ const { resetHardhatContext } = require("hardhat/plugins-testing");
4
+
5
+ function useEnvironment(projectSubdir) {
6
+ beforeEach("Loading hardhat environment", function () {
7
+ const projectPath = path.join(__dirname, projectSubdir);
8
+ if (!fs.existsSync(projectPath)) {
9
+ throw new Error(`Test project folder '${projectSubdir}' not found in test/`);
10
+ }
11
+
12
+ // Change directory into the test fixture project
13
+ process.chdir(projectPath);
14
+ // Clear any require cache that might be left over
15
+ resetHardhatContext();
16
+
17
+ // Load a new Hardhat instance from this fixture
18
+ this.env = require("hardhat");
19
+ });
20
+
21
+ afterEach("Reset hardhat context", function () {
22
+ resetHardhatContext();
23
+ // Move back out to the main plugin directory
24
+ process.chdir(path.join(__dirname, "..", ".."));
25
+ });
26
+ }
27
+
28
+ module.exports = { useEnvironment };
@@ -0,0 +1,7 @@
1
+ pragma solidity ^0.8.20;
2
+
3
+ contract TestContract {
4
+ function helloWorld() public pure returns (string memory) {
5
+ return "Hello World";
6
+ }
7
+ }
@@ -0,0 +1,18 @@
1
+ require('../../index.js');
2
+
3
+ module.exports = {
4
+ solidity: {
5
+ compilers: [
6
+ {
7
+ version: "0.8.20",
8
+ settings: {
9
+ optimizer: { enabled: false },
10
+ evmVersion: 'london',
11
+ metadata: {
12
+ bytecodeHash: 'ipfs',
13
+ }
14
+ }
15
+ }
16
+ ]
17
+ }
18
+ };
@@ -0,0 +1,7 @@
1
+ pragma solidity ^0.8.20;
2
+
3
+ contract TestContract {
4
+ function helloWorld() public pure returns (string memory) {
5
+ return "Hello World";
6
+ }
7
+ }
@@ -0,0 +1,18 @@
1
+ require('../../index.js');
2
+
3
+ module.exports = {
4
+ solidity: {
5
+ compilers: [
6
+ {
7
+ version: "0.8.20",
8
+ settings: {
9
+ optimizer: { enabled: false },
10
+ metadata: {
11
+ useLiteralContent: true
12
+ },
13
+ evmVersion: 'london',
14
+ }
15
+ }
16
+ ]
17
+ }
18
+ };
@@ -0,0 +1,65 @@
1
+ const { expect } = require("chai");
2
+ const { useEnvironment } = require("./helpers");
3
+
4
+ describe("Hardhat IPFS Metadata Plugin", function () {
5
+ // 1) Test using a project that DOES have `bytecodeHash: "ipfs"`
6
+ describe("when using the ipfs-project fixture", function () {
7
+ useEnvironment("ipfs-project");
8
+ it("should attach 'deployMetadata.pushMetadataToIPFS' to the Hardhat environment", function () {
9
+ // this.env is the Hardhat Runtime Environment loaded from helpers.js
10
+ expect(this.env.deployMetadata).to.be.an("object");
11
+ expect(this.env.deployMetadata.pushMetadataToIPFS).to.be.a("function");
12
+ expect(this.env.deployMetadata.pushMetadataToIPFSWithBytecode).to.be.a("function");
13
+ });
14
+
15
+ it("should not throw an error for missing 'bytecodeHash'", async function () {
16
+ // In the fixture, we set 'bytecodeHash' to 'ipfs' so it should pass.
17
+ // Let's compile a sample contract to ensure everything runs.
18
+ await this.env.run("compile");
19
+
20
+ // If there's a contract named "TestContract", let's call the plugin function
21
+ const ipfsHash = await this.env.deployMetadata.pushMetadataToIPFS("TestContract");
22
+
23
+ // The plugin returns the IPFS hash from the metadata in the bytecode
24
+ expect(ipfsHash).to.be.a("string");
25
+ expect(ipfsHash.length).to.be.equal(46);
26
+ console.log(`Retrieved IPFS hash: ${ipfsHash}`);
27
+ });
28
+
29
+ it("should deploy metadata to IPFS from provided bytecode", async function () {
30
+ // In the fixture, we set 'bytecodeHash' to 'ipfs' so it should pass.
31
+ // Let's compile a sample contract to ensure everything runs.
32
+ await this.env.run("compile");
33
+
34
+ // If there's a contract named "TestContract", let's call the plugin function
35
+ const { artifacts } = this.env; // The Hardhat 'artifacts' object
36
+ const artifact = await artifacts.readArtifact("TestContract");
37
+ const ipfsHash = await this.env.deployMetadata.pushMetadataToIPFSWithBytecode(artifact.deployedBytecode);
38
+
39
+ // The plugin returns the IPFS hash from the metadata in the bytecode
40
+ expect(ipfsHash).to.be.a("string");
41
+ expect(ipfsHash.length).to.be.equal(46);
42
+ console.log(`Retrieved IPFS hash: ${ipfsHash}`);
43
+ });
44
+ });
45
+
46
+ // 2) Optionally, test a fixture that does NOT have 'bytecodeHash' set
47
+ // to ensure your plugin properly throws an error.
48
+ /*describe("when using a project with no ipfs setting", function () {
49
+ // Suppose you have another fixture called "no-ipfs-project"
50
+ // that sets `bytecodeHash: "none"` or leaves it out.
51
+ useEnvironment("no-ipfs-project");
52
+
53
+ it("should throw an error if 'bytecodeHash' is not set to 'ipfs'", async function () {
54
+ let errorThrown = false;
55
+ try {
56
+ await this.env.run("compile");
57
+ await this.env.deployMetadata.pushMetadataToIPFS("TestContract");
58
+ } catch (err) {
59
+ errorThrown = true;
60
+ expect(err.message).to.contain("You must set `bytecodeHash: 'ipfs'`");
61
+ }
62
+ expect(errorThrown).to.equal(true, "Expected error for missing IPFS setting");
63
+ });
64
+ });*/
65
+ });