@1inch/solidity-utils 5.3.0 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/docgen/templates.js +1 -2
- package/dist/docgen/templates.js.map +1 -1
- package/dist/hardhat-setup/networks.js +4 -4
- package/dist/hardhat-setup/networks.js.map +1 -1
- package/dist/src/bySig.js +4 -4
- package/dist/src/bySig.js.map +1 -1
- package/dist/src/expect.d.ts +2 -2
- package/dist/src/expect.js +2 -3
- package/dist/src/expect.js.map +1 -1
- package/dist/src/permit.js +14 -14
- package/dist/src/permit.js.map +1 -1
- package/dist/src/prelude.js +2 -2
- package/dist/src/prelude.js.map +1 -1
- package/dist/src/profileEVM.d.ts +0 -2
- package/dist/src/profileEVM.js +3 -3
- package/dist/src/profileEVM.js.map +1 -1
- package/dist/src/utils.js +11 -12
- package/dist/src/utils.js.map +1 -1
- package/dist/typechain-types/@openzeppelin/contracts/utils/Errors.d.ts +20 -0
- package/dist/typechain-types/@openzeppelin/contracts/utils/{math/Math.js → Errors.js} +1 -1
- package/dist/typechain-types/@openzeppelin/contracts/utils/Errors.js.map +1 -0
- package/dist/typechain-types/@openzeppelin/contracts/utils/index.d.ts +1 -0
- package/dist/typechain-types/@openzeppelin/contracts/utils/math/{Math.d.ts → SafeCast.d.ts} +4 -4
- package/dist/typechain-types/@openzeppelin/contracts/utils/math/SafeCast.js +3 -0
- package/dist/typechain-types/@openzeppelin/contracts/utils/math/SafeCast.js.map +1 -0
- package/dist/typechain-types/@openzeppelin/contracts/utils/math/index.d.ts +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Address__factory.d.ts +1 -13
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Address__factory.js +1 -17
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Address__factory.js.map +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Errors__factory.d.ts +50 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Errors__factory.js +77 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Errors__factory.js.map +1 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/ShortStrings__factory.d.ts +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/ShortStrings__factory.js +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Strings__factory.d.ts +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/Strings__factory.js +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/cryptography/ECDSA__factory.d.ts +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/cryptography/ECDSA__factory.js +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/index.d.ts +1 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/index.js +3 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/index.js.map +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/SafeCast__factory.d.ts +62 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/SafeCast__factory.js +94 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/SafeCast__factory.js.map +1 -0
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/index.d.ts +1 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/index.js +3 -3
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/index.js.map +1 -1
- package/dist/typechain-types/factories/contracts/libraries/SafeERC20__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/libraries/SafeERC20__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/libraries/UniERC20__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/libraries/UniERC20__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/mixins/BySig__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/mixins/BySig__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/mixins/BySig__factory.js.map +1 -1
- package/dist/typechain-types/factories/contracts/mocks/ERC20PermitMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/mocks/ERC20PermitMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/mocks/TokenCustomDecimalsMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/mocks/TokenCustomDecimalsMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/mocks/TokenMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/mocks/TokenMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/ECDSATest__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/ECDSATest__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/DaiLikePermitMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/DaiLikePermitMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/ERC1271WalletMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/ERC1271WalletMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/PermitAndCallMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/PermitAndCallMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/PermitableMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/PermitableMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20NoReturnMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20NoReturnMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20PermitNoRevertMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20PermitNoRevertMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20ReturnFalseMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20ReturnFalseMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20ReturnTrueMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20ReturnTrueMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20ThroughZeroApprove__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20ThroughZeroApprove__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20WithSafeBalance__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/ERC20WithSafeBalance__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/Permit2ReturnTrueMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/Permit2ReturnTrueMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/SafeERC20Wrapper__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/SafeERC20Wrapper__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/SafeWETHWrapper__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/SafeERC20Helper.sol/SafeWETHWrapper__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/TokenWithBySig__factory.d.ts +2 -2
- package/dist/typechain-types/factories/contracts/tests/mocks/TokenWithBySig__factory.js +2 -2
- package/dist/typechain-types/factories/contracts/tests/mocks/TokenWithBySig__factory.js.map +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/USDCLikePermitMock__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/USDCLikePermitMock__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ERC20Capitals__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ERC20Capitals__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ERC20bytes32Capitals__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ERC20bytes32Capitals__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ERC20bytes32__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ERC20bytes32__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ETHBadReceiver__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/ETHBadReceiver__factory.js +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/UniERC20Wrapper__factory.d.ts +1 -1
- package/dist/typechain-types/factories/contracts/tests/mocks/UniERC20Helper.sol/UniERC20Wrapper__factory.js +1 -1
- package/dist/typechain-types/index.d.ts +4 -2
- package/dist/typechain-types/index.js +6 -4
- package/dist/typechain-types/index.js.map +1 -1
- package/package.json +32 -38
- package/dist/typechain-types/@openzeppelin/contracts/utils/math/Math.js.map +0 -1
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/Math__factory.d.ts +0 -26
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/Math__factory.js +0 -45
- package/dist/typechain-types/factories/@openzeppelin/contracts/utils/math/Math__factory.js.map +0 -1
- package/utils/README.md +0 -198
- package/utils/acquit-markdown.js +0 -53
- package/utils/docify.utils.js +0 -99
- package/utils/file-dependencies.js +0 -203
- package/utils/solidity-docgen-helpers.js +0 -31
- package/utils/test-docgen.js +0 -117
package/utils/README.md
DELETED
|
@@ -1,198 +0,0 @@
|
|
|
1
|
-
### UTILS
|
|
2
|
-
|
|
3
|
-
#### Docify
|
|
4
|
-
|
|
5
|
-
Generates documentation in markdown format from natspec docs
|
|
6
|
-
|
|
7
|
-
##### Usage
|
|
8
|
-
Add to `package.json` file solidity compiler version (add version you use), solidity-docgen 0.6 util and shortcut to run command
|
|
9
|
-
|
|
10
|
-
`devDependencies` section
|
|
11
|
-
|
|
12
|
-
```
|
|
13
|
-
"solc": "0.8.23",
|
|
14
|
-
"solidity-docgen": "0.6.0-beta.36",
|
|
15
|
-
```
|
|
16
|
-
|
|
17
|
-
`scripts` section
|
|
18
|
-
```
|
|
19
|
-
"docify": "yarn hardhat docgen; npx solidity-utils-docify"
|
|
20
|
-
```
|
|
21
|
-
|
|
22
|
-
You can set output directory with ENV variable:
|
|
23
|
-
```
|
|
24
|
-
"docify": "DOCGEN_OUTPUT_DIR=./docs npx solidity-utils-docify"
|
|
25
|
-
```
|
|
26
|
-
|
|
27
|
-
Then set appopriate settings for docgen in `hardhat.config.js` file
|
|
28
|
-
|
|
29
|
-
```JavaScript
|
|
30
|
-
require('solidity-docgen');
|
|
31
|
-
|
|
32
|
-
// You can use 1inch templates built-in templates
|
|
33
|
-
const { oneInchTemplates } = require('@1inch/solidity-utils/docgen');
|
|
34
|
-
|
|
35
|
-
module.exports = {
|
|
36
|
-
...
|
|
37
|
-
docgen: {
|
|
38
|
-
outputDir: 'docs', // Can be omitted, docs used by default
|
|
39
|
-
templates: oneInchTemplates(), // 1inch templates
|
|
40
|
-
pages: 'files', // Doc output format for 1inch templates
|
|
41
|
-
exclude: ['mocks', 'test'], // Directories to exclude from generation
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
```
|
|
46
|
-
|
|
47
|
-
#### Dependencies list (imports-list)
|
|
48
|
-
|
|
49
|
-
Lists all imports recursively for the given solidity contract file.
|
|
50
|
-
|
|
51
|
-
##### Usage
|
|
52
|
-
```
|
|
53
|
-
npx imports-list -i <solidity file> [-a <alias list>]
|
|
54
|
-
```
|
|
55
|
-
|
|
56
|
-
Available parameters
|
|
57
|
-
```
|
|
58
|
-
Options:
|
|
59
|
-
-i, --input <input> file to get dependencies for
|
|
60
|
-
-a, --alias [alias...] projects alias list
|
|
61
|
-
-h, --help display help for command
|
|
62
|
-
```
|
|
63
|
-
Aliases are used to provide source code for third-party projects.
|
|
64
|
-
For example, your contract uses imports from your other project and import is defined as
|
|
65
|
-
```
|
|
66
|
-
import "@1inch/otherproject/contracts/dependency.sol";
|
|
67
|
-
```
|
|
68
|
-
and you've got source code for `@1inch/otherproject` locally. Then you provide local path for the project to rip dependencies from `dependency.sol` as well.
|
|
69
|
-
If there are several dependencies they should be provided using space as separator.
|
|
70
|
-
|
|
71
|
-
##### Example
|
|
72
|
-
File imports
|
|
73
|
-
```Solidity
|
|
74
|
-
#rootFile.sol
|
|
75
|
-
import '@1inch/otherproject/contracts/dependency.sol'
|
|
76
|
-
|
|
77
|
-
#@1inch/otherproject/contracts/dependency.sol
|
|
78
|
-
import 'helpers/helper.sol'
|
|
79
|
-
```
|
|
80
|
-
File and folder structure
|
|
81
|
-
```
|
|
82
|
-
rootFolder/
|
|
83
|
-
|
|
84
|
-
-- mainProject/
|
|
85
|
-
---- contracts/
|
|
86
|
-
------ rootFile.sol
|
|
87
|
-
|
|
88
|
-
-- dependencyProject/
|
|
89
|
-
---- helpers/
|
|
90
|
-
------ helper.sol
|
|
91
|
-
---- dependency.sol
|
|
92
|
-
```
|
|
93
|
-
Command
|
|
94
|
-
```
|
|
95
|
-
rootFolder/mainProject % npx imports-list -i './contracts/rootFile.sol' -a '@1inch/otherproject' '../dependencyProject'
|
|
96
|
-
```
|
|
97
|
-
Output
|
|
98
|
-
```
|
|
99
|
-
Project => root
|
|
100
|
-
not set
|
|
101
|
-
|
|
102
|
-
Project => @1inch/otherproject
|
|
103
|
-
../otherproject/contracts/dependency.sol
|
|
104
|
-
../otherproject/contracts/helpers/helper.sol
|
|
105
|
-
```
|
|
106
|
-
|
|
107
|
-
#### Test documentation generator (test-docgen)
|
|
108
|
-
Script generates documentation for tests in markdown format.
|
|
109
|
-
Give descriptions for `describe` and `it` sections and build documentation using these descriptions.
|
|
110
|
-
|
|
111
|
-
##### Example
|
|
112
|
-
Test described as shown below
|
|
113
|
-
|
|
114
|
-
```JavaScript
|
|
115
|
-
// Test suite
|
|
116
|
-
describe('My feature', function() {
|
|
117
|
-
// Nested test suite
|
|
118
|
-
describe("My subfeature", function() {
|
|
119
|
-
/*
|
|
120
|
-
**Test case 1**
|
|
121
|
-
Test case should work
|
|
122
|
-
*/
|
|
123
|
-
it("My case", function() {
|
|
124
|
-
// code here
|
|
125
|
-
})
|
|
126
|
-
})
|
|
127
|
-
})
|
|
128
|
-
```
|
|
129
|
-
will generated the following output
|
|
130
|
-
```Markdown
|
|
131
|
-
|
|
132
|
-
# My feature
|
|
133
|
-
|
|
134
|
-
Test suite
|
|
135
|
-
|
|
136
|
-
## My subfeature
|
|
137
|
-
|
|
138
|
-
Nested test suite
|
|
139
|
-
|
|
140
|
-
### My case
|
|
141
|
-
|
|
142
|
-
**Test case 1**
|
|
143
|
-
Test case should work
|
|
144
|
-
```
|
|
145
|
-
|
|
146
|
-
##### Installation
|
|
147
|
-
- Before use install documentation parser
|
|
148
|
-
```
|
|
149
|
-
yarn add acquit --dev
|
|
150
|
-
```
|
|
151
|
-
- Optionally configure script for default usage. Add to `script` section in `package.json`
|
|
152
|
-
```
|
|
153
|
-
"test:docs": "npx test-docgen"
|
|
154
|
-
```
|
|
155
|
-
- Optionally configure script for generating test list only. Add to `script` section in `package.json`
|
|
156
|
-
```
|
|
157
|
-
"test:docs": "npx test-docgen -l"
|
|
158
|
-
```
|
|
159
|
-
|
|
160
|
-
##### Usage
|
|
161
|
-
If script configured
|
|
162
|
-
```
|
|
163
|
-
yarn test:docs
|
|
164
|
-
```
|
|
165
|
-
or
|
|
166
|
-
```
|
|
167
|
-
npx test-docgen
|
|
168
|
-
```
|
|
169
|
-
|
|
170
|
-
Available parameters
|
|
171
|
-
```
|
|
172
|
-
Options:
|
|
173
|
-
-i, --input <input> tests directory (default: "test")
|
|
174
|
-
-x, --exclude [exclude] exclude directories and files. omit argument to exclude all subdirectories (default: false)
|
|
175
|
-
-o, --output <output> file to write output (default: "TESTS.md")
|
|
176
|
-
-c, --code include code (default: false)
|
|
177
|
-
-l, --list list tests only, do not include description (default: false)
|
|
178
|
-
-d, --debug debug mode (default: false)
|
|
179
|
-
-h, --help display help for command
|
|
180
|
-
```
|
|
181
|
-
##### Examples
|
|
182
|
-
Generate docs with default input and output
|
|
183
|
-
```
|
|
184
|
-
npx test-docgen
|
|
185
|
-
```
|
|
186
|
-
|
|
187
|
-
Generate docs for files in folders `tests/mocks` and `tests/utils`
|
|
188
|
-
```
|
|
189
|
-
npx test-docgen -i "tests/mocks;tests/utils"
|
|
190
|
-
```
|
|
191
|
-
Exclude from docs file `test/mock-exclude.js` and `test/utils folder`
|
|
192
|
-
```
|
|
193
|
-
npx test-docgen -x "tests/mock-exclude.js;tests/utils"
|
|
194
|
-
```
|
|
195
|
-
Generate list of tests only
|
|
196
|
-
```
|
|
197
|
-
npx test-docgen -l
|
|
198
|
-
```
|
package/utils/acquit-markdown.js
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
module.exports = plugin;
|
|
4
|
-
|
|
5
|
-
function plugin(instance, options) {
|
|
6
|
-
if (instance) {
|
|
7
|
-
instance.output(markdown(options, instance));
|
|
8
|
-
} else {
|
|
9
|
-
const acquit = require('acquit');
|
|
10
|
-
acquit.output(markdown(options, acquit));
|
|
11
|
-
}
|
|
12
|
-
};
|
|
13
|
-
|
|
14
|
-
plugin.markdown = markdown;
|
|
15
|
-
|
|
16
|
-
function markdown(options, acquit) {
|
|
17
|
-
return function(res) {
|
|
18
|
-
return recurse(res, 0, options, acquit);
|
|
19
|
-
};
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
function recurse(blocks, level, options, acquit) {
|
|
23
|
-
var str = '';
|
|
24
|
-
var hashes = getHashes(level + 1);
|
|
25
|
-
for (var i = 0; i < blocks.length; ++i) {
|
|
26
|
-
if (blocks[i].contents) {
|
|
27
|
-
str += hashes + ' ' + (blocks[i].type === 'it' ? (!options || !options.it ? 'It ': '') : '') +
|
|
28
|
-
blocks[i].contents;
|
|
29
|
-
}
|
|
30
|
-
str += '\n\n';
|
|
31
|
-
for (var j = 0; j < blocks[i].comments.length; ++j) {
|
|
32
|
-
str += acquit.trimEachLine(blocks[i].comments[j]);
|
|
33
|
-
str += '\n\n';
|
|
34
|
-
}
|
|
35
|
-
if (blocks[i].type === 'describe') {
|
|
36
|
-
str += recurse(blocks[i].blocks, level + 1, options, acquit);
|
|
37
|
-
} else if (blocks[i].code.trim() && options.code) {
|
|
38
|
-
str += ['```javascript', blocks[i].code, '```'].join('\n');
|
|
39
|
-
}
|
|
40
|
-
if (i + 1 < blocks.length) {
|
|
41
|
-
str += '\n\n';
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
return str;
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
function getHashes(level) {
|
|
48
|
-
var str = '';
|
|
49
|
-
for (var i = 0; i < level; ++i) {
|
|
50
|
-
str += '#';
|
|
51
|
-
}
|
|
52
|
-
return str;
|
|
53
|
-
}
|
package/utils/docify.utils.js
DELETED
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
const BASE_DIR = 'docs';
|
|
3
|
-
const OUTPUT_DIR = process.env.DOCGEN_OUTPUT_DIR || `${BASE_DIR}`;
|
|
4
|
-
|
|
5
|
-
const fs = require('fs');
|
|
6
|
-
const path = require('path');
|
|
7
|
-
|
|
8
|
-
function getFileNameWithoutExtension (fileName) {
|
|
9
|
-
return fileName.substr(0, fileName.lastIndexOf('.'));
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
function getReadmes (targetPath) {
|
|
13
|
-
let result = [];
|
|
14
|
-
const readmePath = path.join(targetPath, 'README.md');
|
|
15
|
-
if (!fs.existsSync(readmePath)) {
|
|
16
|
-
const content = `# ${path.basename(targetPath)}\n`;
|
|
17
|
-
result.push({ path: readmePath, content });
|
|
18
|
-
}
|
|
19
|
-
const childDirs = fs.readdirSync(targetPath, { withFileTypes: true }).filter(item => item.isDirectory());
|
|
20
|
-
for (const dir of childDirs) {
|
|
21
|
-
result = result.concat(getReadmes(path.join(targetPath, dir.name)));
|
|
22
|
-
}
|
|
23
|
-
return result;
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
function generateReadmes (readmes) {
|
|
27
|
-
for (const readme of readmes) {
|
|
28
|
-
fs.writeFileSync(readme.path, readme.content);
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
function getSummary (targetPath) {
|
|
33
|
-
function getSummaryRoot (summaryTargetPath, indentation) {
|
|
34
|
-
function specialCaseRoot (item) {
|
|
35
|
-
if (item.indentation >= 0) {
|
|
36
|
-
return item;
|
|
37
|
-
}
|
|
38
|
-
return ({
|
|
39
|
-
name: 'Main Readme',
|
|
40
|
-
path: item.path,
|
|
41
|
-
indentation: 0,
|
|
42
|
-
});
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
const items = fs.readdirSync(summaryTargetPath, { withFileTypes: true });
|
|
46
|
-
let result = [specialCaseRoot({
|
|
47
|
-
name: path.basename(summaryTargetPath),
|
|
48
|
-
path: path.relative(targetPath, path.join(summaryTargetPath, 'README.md')).replaceAll('\\', '/'),
|
|
49
|
-
indentation: indentation - 1,
|
|
50
|
-
})];
|
|
51
|
-
for (const dir of items.filter(item => item.isDirectory())) {
|
|
52
|
-
result = result.concat(getSummaryRoot(path.join(summaryTargetPath, dir.name), indentation + 1));
|
|
53
|
-
}
|
|
54
|
-
result = result
|
|
55
|
-
.concat(items
|
|
56
|
-
.filter(item => !item.isDirectory() &&
|
|
57
|
-
!item.name.endsWith('README.md') &&
|
|
58
|
-
!item.name.endsWith('SUMMARY.md'))
|
|
59
|
-
.map(file => ({
|
|
60
|
-
name: getFileNameWithoutExtension(file.name),
|
|
61
|
-
path: path.relative(targetPath, path.join(summaryTargetPath, file.name)).replaceAll('\\', '/'),
|
|
62
|
-
indentation,
|
|
63
|
-
})));
|
|
64
|
-
return result;
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
function generateContent (summaryTree) {
|
|
68
|
-
const lines = summaryTree.map(x => `${'\t'.repeat(x.indentation)}* [${x.name}](${x.path})`).join('\n');
|
|
69
|
-
return `# Table of contents\n\n${lines}`;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
return generateContent(getSummaryRoot(targetPath, 0));
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
function generateSummary (targetPath, summary) {
|
|
76
|
-
fs.writeFileSync(path.join(targetPath, 'SUMMARY.md'), summary);
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
function generateGitbookFiles () {
|
|
80
|
-
if (fs.existsSync(path.join(BASE_DIR, 'README.md'))){
|
|
81
|
-
fs.copyFileSync(path.join(BASE_DIR, 'README.md'), path.join(OUTPUT_DIR, 'README.md'));
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
const readmesToGenerate = getReadmes(OUTPUT_DIR);
|
|
85
|
-
const summary = getSummary(OUTPUT_DIR);
|
|
86
|
-
|
|
87
|
-
generateReadmes(readmesToGenerate);
|
|
88
|
-
generateSummary(OUTPUT_DIR, summary);
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
function removeUnwantedDocs () {
|
|
92
|
-
const unwantedDirs = ['mocks', 'tests'];
|
|
93
|
-
for (const unwantedDir of unwantedDirs) {
|
|
94
|
-
fs.rmSync(path.join(OUTPUT_DIR, unwantedDir), { force: true, recursive: true });
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
generateGitbookFiles();
|
|
99
|
-
removeUnwantedDocs();
|
|
@@ -1,203 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
const fs = require('fs');
|
|
4
|
-
const path = require('path');
|
|
5
|
-
const commander = require('commander');
|
|
6
|
-
const program = new commander.Command();
|
|
7
|
-
|
|
8
|
-
program
|
|
9
|
-
.name("imports-list")
|
|
10
|
-
.usage("-i <root file> [options]")
|
|
11
|
-
.requiredOption("-i, --input <input>", "file to get dependencies for")
|
|
12
|
-
.option("-a, --alias [alias...]", "alias list")
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
program.parse(process.argv);
|
|
16
|
-
|
|
17
|
-
const options = program.opts();
|
|
18
|
-
// process input parameter
|
|
19
|
-
const rootPath = options.input;
|
|
20
|
-
// process alias parameter
|
|
21
|
-
const aliases = {};
|
|
22
|
-
const alias_opt = options.alias;
|
|
23
|
-
if (alias_opt != undefined && alias_opt.length > 0){
|
|
24
|
-
if (alias_opt.length % 2 == 0){
|
|
25
|
-
for(let i=0; i<alias_opt.length; i+=2){
|
|
26
|
-
aliases[alias_opt[i]] = alias_opt[i+1];
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
else{
|
|
30
|
-
let warning_notice = "\nWARNING:Skipping alias parameter, there are odd number of arguments.";
|
|
31
|
-
warning_notice += "\nAliases should be set in the form of pairs seperated by space.";
|
|
32
|
-
warning_notice += "\nUsage: -a alias1 path1 alias2 path2";
|
|
33
|
-
console.warn(warning_notice);
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
// Script body
|
|
38
|
-
const regexp = /^import ({.+} from )?"(?<source>.+)";/gm;
|
|
39
|
-
|
|
40
|
-
const rootString = path.relative(process.cwd(), rootPath);
|
|
41
|
-
|
|
42
|
-
let rootNode = new Node(null, new NodeItem('root', rootPath, path.resolve(rootPath)));
|
|
43
|
-
let rootImports = extractImports(rootNode);
|
|
44
|
-
rootImports = flattenResults(rootImports);
|
|
45
|
-
|
|
46
|
-
// Scripts
|
|
47
|
-
|
|
48
|
-
function Node(parentItem, nodeItem){
|
|
49
|
-
this.parent = parentItem;
|
|
50
|
-
this.import = nodeItem;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
function NodeItem(name, source, path){
|
|
54
|
-
this.name = name; // @1inch/solidity-utils/contracts/EthReceiver.sol
|
|
55
|
-
this.source = source; // ./contracts/EthReceiver.sol
|
|
56
|
-
this.path = path; // /usrs/james/Desktop/1inch/solidity-utils/contracts/EthReceiver.sol
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
function extractFileImports(file){
|
|
60
|
-
let imports = [];
|
|
61
|
-
|
|
62
|
-
if (file == null)
|
|
63
|
-
return imports;
|
|
64
|
-
|
|
65
|
-
let content = fs.readFileSync(file);
|
|
66
|
-
let result = content.toString().matchAll(regexp);
|
|
67
|
-
|
|
68
|
-
for (const match of result) {
|
|
69
|
-
let source = match.groups['source'];
|
|
70
|
-
if (source != null){
|
|
71
|
-
imports.push(source);
|
|
72
|
-
}
|
|
73
|
-
else{
|
|
74
|
-
console.log('No source found for import: ', match);
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
return imports;
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
function getAliasNameIfExists(source){
|
|
82
|
-
for(let alias in aliases){
|
|
83
|
-
if(source.startsWith(alias)){
|
|
84
|
-
return alias;
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
return false;
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
function getAliasIfExists(source){
|
|
92
|
-
for(let alias in aliases){
|
|
93
|
-
if(source.startsWith(alias)){
|
|
94
|
-
source = source.replace(alias, aliases[alias]);
|
|
95
|
-
return source;
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
return false;
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
function isAlias(dependencyLink){
|
|
103
|
-
return dependencyLink.startsWith('@');
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
function resolveProject(parentProject, dependencyLink){
|
|
107
|
-
if (isAlias(dependencyLink)) {
|
|
108
|
-
let alias = getAliasNameIfExists(dependencyLink);
|
|
109
|
-
if (alias){
|
|
110
|
-
return alias;
|
|
111
|
-
}
|
|
112
|
-
else{
|
|
113
|
-
return null;
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
else{
|
|
117
|
-
return parentProject;
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
function resolvePath(parentNode, dependencyLink){
|
|
122
|
-
if (isAlias(dependencyLink)) {
|
|
123
|
-
let alias = getAliasIfExists(dependencyLink)
|
|
124
|
-
if( alias ){
|
|
125
|
-
return path.join(process.cwd(), alias);
|
|
126
|
-
}
|
|
127
|
-
else{
|
|
128
|
-
return null;
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
let source_dir = path.dirname(parentNode.path);
|
|
132
|
-
return path.join(source_dir, dependencyLink);
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
function resolveLinks(parentNode, dependencyLinks, nodes = []){
|
|
136
|
-
for (let dependencyLink of dependencyLinks){
|
|
137
|
-
let project = resolveProject(parentNode.name, dependencyLink);
|
|
138
|
-
let dependencyPath = resolvePath(parentNode, dependencyLink);
|
|
139
|
-
let node = new Node(parentNode, new NodeItem(project, dependencyLink, dependencyPath));
|
|
140
|
-
nodes.push(node);
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
return nodes;
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
function extractImports(node, dependencies = []){
|
|
147
|
-
let dependencyLinks = extractFileImports(node.import.path);
|
|
148
|
-
let nodesToImport = resolveLinks(node.import, dependencyLinks);
|
|
149
|
-
for (let nodeToImport of nodesToImport){
|
|
150
|
-
let found = dependencies.find(item => item.import.path == nodeToImport.import.path);
|
|
151
|
-
|
|
152
|
-
if (!found){
|
|
153
|
-
dependencies.push(nodeToImport);
|
|
154
|
-
extractImports(nodeToImport, dependencies);
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
return dependencies;
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
function aliasCompare( a, b ) {
|
|
161
|
-
if ( a.import.name < b.import.name ){
|
|
162
|
-
return 1;
|
|
163
|
-
}
|
|
164
|
-
if ( a.import.name > b.import.name ){
|
|
165
|
-
return -1;
|
|
166
|
-
}
|
|
167
|
-
if( a.import.path < b.import.path ){
|
|
168
|
-
return -1;
|
|
169
|
-
}
|
|
170
|
-
if (a.import.path > b.import.path ){
|
|
171
|
-
return 1;
|
|
172
|
-
}
|
|
173
|
-
if ( a.import.source < b.import.source ){
|
|
174
|
-
return -1;
|
|
175
|
-
}
|
|
176
|
-
if ( a.import.source > b.import.source ){
|
|
177
|
-
return 1;
|
|
178
|
-
}
|
|
179
|
-
return 0;
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
function flattenResults(dependencyNodes){
|
|
183
|
-
dependencyNodes.sort(aliasCompare);
|
|
184
|
-
|
|
185
|
-
let prevProject = null;
|
|
186
|
-
for(let dependencyNode of dependencyNodes){
|
|
187
|
-
if (dependencyNode.import.name != prevProject){
|
|
188
|
-
console.log('\nProject =>', dependencyNode.import.name);
|
|
189
|
-
prevProject = dependencyNode.import.name;
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
let relativePath = 'not set';
|
|
193
|
-
if (dependencyNode.import.path != null){
|
|
194
|
-
relativePath = path.relative(process.cwd(), dependencyNode.import.path);
|
|
195
|
-
}
|
|
196
|
-
else{
|
|
197
|
-
relativePath = dependencyNode.import.source;
|
|
198
|
-
}
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
console.log(relativePath);
|
|
202
|
-
}
|
|
203
|
-
}
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
module.exports = {
|
|
2
|
-
removeNewlines (str) {
|
|
3
|
-
return str.replace(/\r?\n/g, ' ');
|
|
4
|
-
},
|
|
5
|
-
withoutFirstElement (arr) {
|
|
6
|
-
return arr.splice(1);
|
|
7
|
-
},
|
|
8
|
-
getRelativeDocPath (contractName, contractsDir, sources) {
|
|
9
|
-
function getRelativePath (contractPath) {
|
|
10
|
-
if (contractPath){
|
|
11
|
-
if (contractPath.startsWith(contractsDir)) {
|
|
12
|
-
return contractPath.substr(contractsDir.length + 1).replace('.sol', '.md');
|
|
13
|
-
}
|
|
14
|
-
if (contractPath.startsWith('@openzeppelin')) {
|
|
15
|
-
const regexMatch = contractPath.match(/@openzeppelin\/contracts\/(.+)\/([^/]+)\.sol/);
|
|
16
|
-
return `https://docs.openzeppelin.com/contracts/3.x/api/${regexMatch[1]}#${regexMatch[2]}`;
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
return null;
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
const sourcesKeys = Object.keys(sources);
|
|
23
|
-
const contractPath = sourcesKeys.find(x => x.includes(contractName));
|
|
24
|
-
|
|
25
|
-
if (!contractPath){
|
|
26
|
-
console.log('WARNING: file ' + contractName + '.sol not found in source files.');
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
return getRelativePath(contractPath);
|
|
30
|
-
},
|
|
31
|
-
};
|
package/utils/test-docgen.js
DELETED
|
@@ -1,117 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
const commander = require('commander');
|
|
4
|
-
const fs = require('fs');
|
|
5
|
-
const path = require('path');
|
|
6
|
-
const program = new commander.Command();
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
program
|
|
10
|
-
.option('-i, --input <input>', 'tests directory', 'test')
|
|
11
|
-
.option('-x, --exclude [exclude]', 'exclude directories and files. omit argument to exclude all subdirectories', false)
|
|
12
|
-
.option('-o, --output <output>', 'file to write output', 'TESTS.md')
|
|
13
|
-
.option('-c, --code', 'include code', false)
|
|
14
|
-
.option('-l, --list', 'list tests only, do not include description', false)
|
|
15
|
-
.option('-d, --debug', 'debug mode', false);
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
program.parse(process.argv);
|
|
19
|
-
|
|
20
|
-
const options = program.opts();
|
|
21
|
-
const debugMode = options.debug;
|
|
22
|
-
const includeCode = options.code ? true : false;
|
|
23
|
-
const listOnly = options.list ? true : false;
|
|
24
|
-
const includeSubs = !(options.exclude === true);
|
|
25
|
-
const inputDir = options.input.split(';');
|
|
26
|
-
const outputFile = options.output;
|
|
27
|
-
const excludeDirs = (typeof options.exclude == 'boolean') ? [] : options.exclude.split(';');
|
|
28
|
-
|
|
29
|
-
if (debugMode){
|
|
30
|
-
console.log('----- DEBUG MODE -----');
|
|
31
|
-
console.log('options:', options);
|
|
32
|
-
console.log();
|
|
33
|
-
console.log('parsed options:');
|
|
34
|
-
console.log(
|
|
35
|
-
' includeCode:', includeCode,
|
|
36
|
-
'\n listOnly:', listOnly,
|
|
37
|
-
'\n inputDir:', inputDir,
|
|
38
|
-
'\n outputFile:', outputFile,
|
|
39
|
-
'\n includeSubs:', includeSubs,
|
|
40
|
-
'\n excludeDirs:', excludeDirs,
|
|
41
|
-
'\n debugMode:', debugMode
|
|
42
|
-
);
|
|
43
|
-
console.log('\nRemaining arguments: ', program.args);
|
|
44
|
-
console.log('\nFiles and directories found:');
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
let files = [];
|
|
48
|
-
function throughDirectory (directory, includeSubs, excludeDirs) {
|
|
49
|
-
if (!fs.existsSync(directory)) {
|
|
50
|
-
console.log('WARNING! Directory does not exist:', directory, '=> skipped');
|
|
51
|
-
return;
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
fs.readdirSync(directory).forEach(file => {
|
|
55
|
-
const absolute = path.join(directory, file);
|
|
56
|
-
if (debugMode) console.log(absolute);
|
|
57
|
-
if (!excludeDirs.includes(absolute)) {
|
|
58
|
-
if (fs.statSync(absolute).isDirectory()){
|
|
59
|
-
if (includeSubs) throughDirectory(absolute, includeSubs, excludeDirs);
|
|
60
|
-
}
|
|
61
|
-
else files.push(absolute);
|
|
62
|
-
}
|
|
63
|
-
});
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
inputDir.forEach(dir => {
|
|
67
|
-
throughDirectory(dir, includeSubs, excludeDirs);
|
|
68
|
-
});
|
|
69
|
-
|
|
70
|
-
if (debugMode) console.log('\nfiles:', files);
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
//Script
|
|
75
|
-
const acquitMd = require('acquit')();
|
|
76
|
-
const acquitJson = require('acquit')();
|
|
77
|
-
require('./acquit-markdown.js')(acquitMd, { code: includeCode, it: true });
|
|
78
|
-
|
|
79
|
-
const legend = {};
|
|
80
|
-
let content;
|
|
81
|
-
let markdown = '';
|
|
82
|
-
let legendMd = '';
|
|
83
|
-
|
|
84
|
-
if (debugMode) console.log('\nFiles processed:');
|
|
85
|
-
files.forEach((file) => {
|
|
86
|
-
content = fs.readFileSync(file).toString();
|
|
87
|
-
legend.blocks = acquitJson.parse(content);
|
|
88
|
-
legend.contents = file;
|
|
89
|
-
legendMd += buildLegend(legend, 1, listOnly);
|
|
90
|
-
markdown += acquitMd.parse(content).toString();
|
|
91
|
-
markdown += '\n';
|
|
92
|
-
if (debugMode) console.log(' ', file, '=> done');
|
|
93
|
-
});
|
|
94
|
-
|
|
95
|
-
content = listOnly ? legendMd : legendMd + markdown;
|
|
96
|
-
|
|
97
|
-
fs.writeFileSync(outputFile, content);
|
|
98
|
-
console.log('done');
|
|
99
|
-
|
|
100
|
-
function buildLegend (block, depth, listOnly) {
|
|
101
|
-
// console.log(depth, block.contents);
|
|
102
|
-
const url = (block.contents == null)
|
|
103
|
-
? ''
|
|
104
|
-
: block.contents.toLowerCase().trim()
|
|
105
|
-
.split(' ').join('-')
|
|
106
|
-
.split(/,|\+|\/|:|\(|\)/).join('')
|
|
107
|
-
.replace('--', '-');
|
|
108
|
-
let legend = listOnly
|
|
109
|
-
? Array(depth).join(' ') + '* ' + block.contents + '\n'
|
|
110
|
-
: Array(depth).join(' ') + '* [' + block.contents + '](#' + url + ')\n';
|
|
111
|
-
if (block.blocks) {
|
|
112
|
-
legend += block.blocks.map(function (child) {
|
|
113
|
-
return buildLegend(child, depth + 1, listOnly);
|
|
114
|
-
}).join('');
|
|
115
|
-
}
|
|
116
|
-
return legend;
|
|
117
|
-
}
|