@devtion/actions 0.0.0-8bb9489 → 0.0.0-9239207
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +322 -262
- package/dist/index.node.js +323 -261
- package/dist/types/src/helpers/constants.d.ts +5 -2
- package/dist/types/src/helpers/constants.d.ts.map +1 -1
- package/dist/types/src/helpers/contracts.d.ts.map +1 -1
- package/dist/types/src/helpers/crypto.d.ts +1 -0
- package/dist/types/src/helpers/crypto.d.ts.map +1 -1
- package/dist/types/src/helpers/database.d.ts +8 -0
- package/dist/types/src/helpers/database.d.ts.map +1 -1
- package/dist/types/src/helpers/security.d.ts +1 -1
- package/dist/types/src/helpers/security.d.ts.map +1 -1
- package/dist/types/src/helpers/storage.d.ts +1 -1
- package/dist/types/src/helpers/storage.d.ts.map +1 -1
- package/dist/types/src/helpers/utils.d.ts +34 -20
- package/dist/types/src/helpers/utils.d.ts.map +1 -1
- package/dist/types/src/helpers/verification.d.ts +3 -2
- package/dist/types/src/helpers/verification.d.ts.map +1 -1
- package/dist/types/src/helpers/vm.d.ts.map +1 -1
- package/dist/types/src/index.d.ts +2 -2
- package/dist/types/src/index.d.ts.map +1 -1
- package/dist/types/src/types/index.d.ts +9 -3
- package/dist/types/src/types/index.d.ts.map +1 -1
- package/package.json +3 -4
- package/src/helpers/constants.ts +40 -32
- package/src/helpers/contracts.ts +3 -3
- package/src/helpers/database.ts +13 -0
- package/src/helpers/security.ts +8 -5
- package/src/helpers/services.ts +2 -2
- package/src/helpers/storage.ts +3 -3
- package/src/helpers/utils.ts +299 -254
- package/src/helpers/verification.ts +6 -6
- package/src/helpers/vm.ts +9 -4
- package/src/index.ts +3 -1
- package/src/types/index.ts +23 -3
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @module @p0tion/actions
|
|
3
|
-
* @version 1.
|
|
3
|
+
* @version 1.2.5
|
|
4
4
|
* @file A set of actions and helpers for CLI commands
|
|
5
5
|
* @copyright Ethereum Foundation 2022
|
|
6
6
|
* @license MIT
|
|
@@ -15,7 +15,6 @@ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp,
|
|
|
15
15
|
import { zKey, groth16 } from 'snarkjs';
|
|
16
16
|
import crypto from 'crypto';
|
|
17
17
|
import blake from 'blakejs';
|
|
18
|
-
import { utils } from 'ffjavascript';
|
|
19
18
|
import winston from 'winston';
|
|
20
19
|
import { pipeline } from 'stream';
|
|
21
20
|
import { promisify } from 'util';
|
|
@@ -27,10 +26,10 @@ import { EC2Client, RunInstancesCommand, DescribeInstanceStatusCommand, StartIns
|
|
|
27
26
|
import { SSMClient, SendCommandCommand, GetCommandInvocationCommand } from '@aws-sdk/client-ssm';
|
|
28
27
|
import dotenv from 'dotenv';
|
|
29
28
|
|
|
30
|
-
// Main part for the
|
|
31
|
-
const potFileDownloadMainUrl = `https://
|
|
32
|
-
// Main part for the
|
|
33
|
-
const potFilenameTemplate = `
|
|
29
|
+
// Main part for the PPoT Phase 1 Trusted Setup URLs to download PoT files.
|
|
30
|
+
const potFileDownloadMainUrl = `https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/`;
|
|
31
|
+
// Main part for the PPoT Phase 1 Trusted Setup PoT files to be downloaded.
|
|
32
|
+
const potFilenameTemplate = `ppot_0080_`;
|
|
34
33
|
// The genesis zKey index.
|
|
35
34
|
const genesisZkeyIndex = `00000`;
|
|
36
35
|
// The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
|
|
@@ -47,6 +46,8 @@ const verifierSmartContractAcronym = "verifier";
|
|
|
47
46
|
const ec2InstanceTag = "p0tionec2instance";
|
|
48
47
|
// The name of the VM startup script file.
|
|
49
48
|
const vmBootstrapScriptFilename = "bootstrap.sh";
|
|
49
|
+
// Match hash output by snarkjs in transcript log
|
|
50
|
+
const contribHashRegex = new RegExp("Contribution.+Hash.+\n\t\t.+\n\t\t.+\n.+\n\t\t.+\r?\n");
|
|
50
51
|
/**
|
|
51
52
|
* Define the supported VM configuration types.
|
|
52
53
|
* @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
|
|
@@ -104,112 +105,116 @@ const vmConfigurationTypes = {
|
|
|
104
105
|
*/
|
|
105
106
|
const powersOfTauFiles = [
|
|
106
107
|
{
|
|
107
|
-
ref: "https://
|
|
108
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_01.ptau",
|
|
108
109
|
size: 0.000084
|
|
109
110
|
},
|
|
110
111
|
{
|
|
111
|
-
ref: "https://
|
|
112
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_02.ptau",
|
|
112
113
|
size: 0.000086
|
|
113
114
|
},
|
|
114
115
|
{
|
|
115
|
-
ref: "https://
|
|
116
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_03.ptau",
|
|
116
117
|
size: 0.000091
|
|
117
118
|
},
|
|
118
119
|
{
|
|
119
|
-
ref: "https://
|
|
120
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_04.ptau",
|
|
120
121
|
size: 0.0001
|
|
121
122
|
},
|
|
122
123
|
{
|
|
123
|
-
ref: "https://
|
|
124
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_05.ptau",
|
|
124
125
|
size: 0.000117
|
|
125
126
|
},
|
|
126
127
|
{
|
|
127
|
-
ref: "https://
|
|
128
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_06.ptau",
|
|
128
129
|
size: 0.000153
|
|
129
130
|
},
|
|
130
131
|
{
|
|
131
|
-
ref: "https://
|
|
132
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_07.ptau",
|
|
132
133
|
size: 0.000225
|
|
133
134
|
},
|
|
134
135
|
{
|
|
135
|
-
ref: "https://
|
|
136
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_08.ptau",
|
|
136
137
|
size: 0.0004
|
|
137
138
|
},
|
|
138
139
|
{
|
|
139
|
-
ref: "https://
|
|
140
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_09.ptau",
|
|
140
141
|
size: 0.000658
|
|
141
142
|
},
|
|
142
143
|
{
|
|
143
|
-
ref: "https://
|
|
144
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_10.ptau",
|
|
144
145
|
size: 0.0013
|
|
145
146
|
},
|
|
146
147
|
{
|
|
147
|
-
ref: "https://
|
|
148
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_11.ptau",
|
|
148
149
|
size: 0.0023
|
|
149
150
|
},
|
|
150
151
|
{
|
|
151
|
-
ref: "https://
|
|
152
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_12.ptau",
|
|
152
153
|
size: 0.0046
|
|
153
154
|
},
|
|
154
155
|
{
|
|
155
|
-
ref: "https://
|
|
156
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_13.ptau",
|
|
156
157
|
size: 0.0091
|
|
157
158
|
},
|
|
158
159
|
{
|
|
159
|
-
ref: "https://
|
|
160
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_14.ptau",
|
|
160
161
|
size: 0.0181
|
|
161
162
|
},
|
|
162
163
|
{
|
|
163
|
-
ref: "https://
|
|
164
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_15.ptau",
|
|
164
165
|
size: 0.0361
|
|
165
166
|
},
|
|
166
167
|
{
|
|
167
|
-
ref: "https://
|
|
168
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_16.ptau",
|
|
168
169
|
size: 0.0721
|
|
169
170
|
},
|
|
170
171
|
{
|
|
171
|
-
ref: "https://
|
|
172
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_17.ptau",
|
|
172
173
|
size: 0.144
|
|
173
174
|
},
|
|
174
175
|
{
|
|
175
|
-
ref: "https://
|
|
176
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_18.ptau",
|
|
176
177
|
size: 0.288
|
|
177
178
|
},
|
|
178
179
|
{
|
|
179
|
-
ref: "https://
|
|
180
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_19.ptau",
|
|
180
181
|
size: 0.576
|
|
181
182
|
},
|
|
182
183
|
{
|
|
183
|
-
ref: "https://
|
|
184
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_20.ptau",
|
|
184
185
|
size: 1.1
|
|
185
186
|
},
|
|
186
187
|
{
|
|
187
|
-
ref: "https://
|
|
188
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_21.ptau",
|
|
188
189
|
size: 2.3
|
|
189
190
|
},
|
|
190
191
|
{
|
|
191
|
-
ref: "https://
|
|
192
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_22.ptau",
|
|
192
193
|
size: 4.5
|
|
193
194
|
},
|
|
194
195
|
{
|
|
195
|
-
ref: "https://
|
|
196
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_23.ptau",
|
|
196
197
|
size: 9.0
|
|
197
198
|
},
|
|
198
199
|
{
|
|
199
|
-
ref: "https://
|
|
200
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_24.ptau",
|
|
200
201
|
size: 18.0
|
|
201
202
|
},
|
|
202
203
|
{
|
|
203
|
-
ref: "https://
|
|
204
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_25.ptau",
|
|
204
205
|
size: 36.0
|
|
205
206
|
},
|
|
206
207
|
{
|
|
207
|
-
ref: "https://
|
|
208
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau",
|
|
208
209
|
size: 72.0
|
|
209
210
|
},
|
|
210
211
|
{
|
|
211
|
-
ref: "https://
|
|
212
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_27.ptau",
|
|
212
213
|
size: 144.0
|
|
214
|
+
},
|
|
215
|
+
{
|
|
216
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_final.ptau",
|
|
217
|
+
size: 288.0
|
|
213
218
|
}
|
|
214
219
|
];
|
|
215
220
|
/**
|
|
@@ -339,7 +344,9 @@ const commonTerms = {
|
|
|
339
344
|
finalizeCircuit: "finalizeCircuit",
|
|
340
345
|
finalizeCeremony: "finalizeCeremony",
|
|
341
346
|
downloadCircuitArtifacts: "downloadCircuitArtifacts",
|
|
342
|
-
transferObject: "transferObject"
|
|
347
|
+
transferObject: "transferObject",
|
|
348
|
+
bandadaValidateProof: "bandadaValidateProof",
|
|
349
|
+
checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
|
|
343
350
|
}
|
|
344
351
|
};
|
|
345
352
|
|
|
@@ -705,8 +712,8 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
|
|
|
705
712
|
// @ts-ignore
|
|
706
713
|
const response = await fetch(chunksWithUrls[i].preSignedUrl, {
|
|
707
714
|
retryOptions: {
|
|
708
|
-
retryInitialDelay: 500,
|
|
709
|
-
socketTimeout: 60000,
|
|
715
|
+
retryInitialDelay: 500, // 500 ms.
|
|
716
|
+
socketTimeout: 60000, // 60 seconds.
|
|
710
717
|
retryMaxDuration: 300000 // 5 minutes.
|
|
711
718
|
},
|
|
712
719
|
method: "PUT",
|
|
@@ -1003,6 +1010,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
|
|
|
1003
1010
|
]);
|
|
1004
1011
|
return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
|
|
1005
1012
|
};
|
|
1013
|
+
/**
|
|
1014
|
+
* Query all ceremonies
|
|
1015
|
+
* @notice get all ceremonies from the database.
|
|
1016
|
+
* @dev this is a helper for the CLI ceremony methods.
|
|
1017
|
+
* @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
|
|
1018
|
+
* @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
|
|
1019
|
+
*/
|
|
1020
|
+
const getAllCeremonies = async (firestoreDatabase) => {
|
|
1021
|
+
const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
|
|
1022
|
+
return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
|
|
1023
|
+
};
|
|
1006
1024
|
|
|
1007
1025
|
/**
|
|
1008
1026
|
* @hidden
|
|
@@ -1051,193 +1069,22 @@ const compareHashes = async (path1, path2) => {
|
|
|
1051
1069
|
};
|
|
1052
1070
|
|
|
1053
1071
|
/**
|
|
1054
|
-
*
|
|
1055
|
-
* @
|
|
1056
|
-
* @
|
|
1057
|
-
* @param cleanup <boolean> - whether to delete the r1cs file after parsing
|
|
1058
|
-
* @returns any - the data to pass to the cloud function for setup and the circuit artifacts
|
|
1072
|
+
* Return a string with double digits if the provided input is one digit only.
|
|
1073
|
+
* @param in <number> - the input number to be converted.
|
|
1074
|
+
* @returns <string> - the two digits stringified number derived from the conversion.
|
|
1059
1075
|
*/
|
|
1060
|
-
const
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
if (!data.circuits || data.circuits.length === 0)
|
|
1073
|
-
throw new Error("You need to provide the data for at least 1 circuit.");
|
|
1074
|
-
// validate that the end date is in the future
|
|
1075
|
-
let endDate;
|
|
1076
|
-
let startDate;
|
|
1077
|
-
try {
|
|
1078
|
-
endDate = new Date(data.endDate);
|
|
1079
|
-
startDate = new Date(data.startDate);
|
|
1080
|
-
}
|
|
1081
|
-
catch (error) {
|
|
1082
|
-
throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
|
|
1083
|
-
}
|
|
1084
|
-
if (endDate <= startDate)
|
|
1085
|
-
throw new Error("The end date should be greater than the start date.");
|
|
1086
|
-
const currentDate = new Date();
|
|
1087
|
-
if (endDate <= currentDate || startDate <= currentDate)
|
|
1088
|
-
throw new Error("The start and end dates should be in the future.");
|
|
1089
|
-
// validate penalty
|
|
1090
|
-
if (data.penalty <= 0)
|
|
1091
|
-
throw new Error("The penalty should be greater than zero.");
|
|
1092
|
-
const circuits = [];
|
|
1093
|
-
const urlPattern = /(https?:\/\/[^\s]+)/g;
|
|
1094
|
-
const commitHashPattern = /^[a-f0-9]{40}$/i;
|
|
1095
|
-
const circuitArtifacts = [];
|
|
1096
|
-
for (let i = 0; i < data.circuits.length; i++) {
|
|
1097
|
-
const circuitData = data.circuits[i];
|
|
1098
|
-
const artifacts = circuitData.artifacts;
|
|
1099
|
-
circuitArtifacts.push({
|
|
1100
|
-
artifacts: artifacts
|
|
1101
|
-
});
|
|
1102
|
-
// where we storing the r1cs downloaded
|
|
1103
|
-
const localR1csPath = `./${circuitData.name}.r1cs`;
|
|
1104
|
-
// where we storing the wasm downloaded
|
|
1105
|
-
const localWasmPath = `./${circuitData.name}.wasm`;
|
|
1106
|
-
// download the r1cs to extract the metadata
|
|
1107
|
-
const streamPipeline = promisify(pipeline);
|
|
1108
|
-
// Make the call.
|
|
1109
|
-
const responseR1CS = await fetch(artifacts.r1csStoragePath);
|
|
1110
|
-
// Handle errors.
|
|
1111
|
-
if (!responseR1CS.ok && responseR1CS.status !== 200)
|
|
1112
|
-
throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1113
|
-
await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
|
|
1114
|
-
// Write the file locally
|
|
1115
|
-
// extract the metadata from the r1cs
|
|
1116
|
-
const metadata = getR1CSInfo(localR1csPath);
|
|
1117
|
-
// download wasm too to ensure it's available
|
|
1118
|
-
const responseWASM = await fetch(artifacts.wasmStoragePath);
|
|
1119
|
-
if (!responseWASM.ok && responseWASM.status !== 200)
|
|
1120
|
-
throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1121
|
-
await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
|
|
1122
|
-
// validate that the circuit hash and template links are valid
|
|
1123
|
-
const template = circuitData.template;
|
|
1124
|
-
const URLMatch = template.source.match(urlPattern);
|
|
1125
|
-
if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
|
|
1126
|
-
throw new Error("You should provide the URL to the circuits templates on GitHub.");
|
|
1127
|
-
const hashMatch = template.commitHash.match(commitHashPattern);
|
|
1128
|
-
if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
|
|
1129
|
-
throw new Error("You should provide a valid commit hash of the circuit templates.");
|
|
1130
|
-
// calculate the hash of the r1cs file
|
|
1131
|
-
const r1csBlake2bHash = await blake512FromPath(localR1csPath);
|
|
1132
|
-
const circuitPrefix = extractPrefix(circuitData.name);
|
|
1133
|
-
// filenames
|
|
1134
|
-
const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
|
|
1135
|
-
const r1csCompleteFilename = `${circuitData.name}.r1cs`;
|
|
1136
|
-
const wasmCompleteFilename = `${circuitData.name}.wasm`;
|
|
1137
|
-
const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
|
|
1138
|
-
const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
|
|
1139
|
-
// storage paths
|
|
1140
|
-
const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
|
|
1141
|
-
const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
|
|
1142
|
-
const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
|
|
1143
|
-
const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
|
|
1144
|
-
const files = {
|
|
1145
|
-
potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
|
|
1146
|
-
r1csFilename: r1csCompleteFilename,
|
|
1147
|
-
wasmFilename: wasmCompleteFilename,
|
|
1148
|
-
initialZkeyFilename: firstZkeyCompleteFilename,
|
|
1149
|
-
potStoragePath: potStorageFilePath,
|
|
1150
|
-
r1csStoragePath: r1csStorageFilePath,
|
|
1151
|
-
wasmStoragePath: wasmStorageFilePath,
|
|
1152
|
-
initialZkeyStoragePath: zkeyStorageFilePath,
|
|
1153
|
-
r1csBlake2bHash: r1csBlake2bHash
|
|
1154
|
-
};
|
|
1155
|
-
// validate that the compiler hash is a valid hash
|
|
1156
|
-
const compiler = circuitData.compiler;
|
|
1157
|
-
const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
|
|
1158
|
-
if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
|
|
1159
|
-
throw new Error("You should provide a valid commit hash of the circuit compiler.");
|
|
1160
|
-
// validate that the verification options are valid
|
|
1161
|
-
const verification = circuitData.verification;
|
|
1162
|
-
if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
|
|
1163
|
-
throw new Error("Please enter a valid verification mechanism: either CF or VM");
|
|
1164
|
-
// @todo VM parameters verification
|
|
1165
|
-
// if (verification['cfOrVM'] === "VM") {}
|
|
1166
|
-
// check that the timeout is provided for the correct configuration
|
|
1167
|
-
let dynamicThreshold;
|
|
1168
|
-
let fixedTimeWindow;
|
|
1169
|
-
let circuit = {};
|
|
1170
|
-
if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
|
|
1171
|
-
if (circuitData.dynamicThreshold <= 0)
|
|
1172
|
-
throw new Error("The dynamic threshold should be > 0.");
|
|
1173
|
-
dynamicThreshold = circuitData.dynamicThreshold;
|
|
1174
|
-
// the Circuit data for the ceremony setup
|
|
1175
|
-
circuit = {
|
|
1176
|
-
name: circuitData.name,
|
|
1177
|
-
description: circuitData.description,
|
|
1178
|
-
prefix: circuitPrefix,
|
|
1179
|
-
sequencePosition: i + 1,
|
|
1180
|
-
metadata: metadata,
|
|
1181
|
-
files: files,
|
|
1182
|
-
template: template,
|
|
1183
|
-
compiler: compiler,
|
|
1184
|
-
verification: verification,
|
|
1185
|
-
dynamicThreshold: dynamicThreshold,
|
|
1186
|
-
avgTimings: {
|
|
1187
|
-
contributionComputation: 0,
|
|
1188
|
-
fullContribution: 0,
|
|
1189
|
-
verifyCloudFunction: 0
|
|
1190
|
-
}
|
|
1191
|
-
};
|
|
1192
|
-
}
|
|
1193
|
-
if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
|
|
1194
|
-
if (circuitData.fixedTimeWindow <= 0)
|
|
1195
|
-
throw new Error("The fixed time window threshold should be > 0.");
|
|
1196
|
-
fixedTimeWindow = circuitData.fixedTimeWindow;
|
|
1197
|
-
// the Circuit data for the ceremony setup
|
|
1198
|
-
circuit = {
|
|
1199
|
-
name: circuitData.name,
|
|
1200
|
-
description: circuitData.description,
|
|
1201
|
-
prefix: circuitPrefix,
|
|
1202
|
-
sequencePosition: i + 1,
|
|
1203
|
-
metadata: metadata,
|
|
1204
|
-
files: files,
|
|
1205
|
-
template: template,
|
|
1206
|
-
compiler: compiler,
|
|
1207
|
-
verification: verification,
|
|
1208
|
-
fixedTimeWindow: fixedTimeWindow,
|
|
1209
|
-
avgTimings: {
|
|
1210
|
-
contributionComputation: 0,
|
|
1211
|
-
fullContribution: 0,
|
|
1212
|
-
verifyCloudFunction: 0
|
|
1213
|
-
}
|
|
1214
|
-
};
|
|
1215
|
-
}
|
|
1216
|
-
circuits.push(circuit);
|
|
1217
|
-
// remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
|
|
1218
|
-
if (cleanup)
|
|
1219
|
-
fs.unlinkSync(localR1csPath);
|
|
1220
|
-
fs.unlinkSync(localWasmPath);
|
|
1221
|
-
}
|
|
1222
|
-
const setupData = {
|
|
1223
|
-
ceremonyInputData: {
|
|
1224
|
-
title: data.title,
|
|
1225
|
-
description: data.description,
|
|
1226
|
-
startDate: startDate.valueOf(),
|
|
1227
|
-
endDate: endDate.valueOf(),
|
|
1228
|
-
timeoutMechanismType: data.timeoutMechanismType,
|
|
1229
|
-
penalty: data.penalty
|
|
1230
|
-
},
|
|
1231
|
-
ceremonyPrefix: extractPrefix(data.title),
|
|
1232
|
-
circuits: circuits,
|
|
1233
|
-
circuitArtifacts: circuitArtifacts
|
|
1234
|
-
};
|
|
1235
|
-
return setupData;
|
|
1236
|
-
}
|
|
1237
|
-
catch (error) {
|
|
1238
|
-
throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
|
|
1239
|
-
}
|
|
1240
|
-
};
|
|
1076
|
+
const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
|
|
1077
|
+
/**
|
|
1078
|
+
* Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
|
|
1079
|
+
* @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
|
|
1080
|
+
* @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
|
|
1081
|
+
* NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
|
|
1082
|
+
* @param str <string> - the arbitrary string from which to extract the prefix.
|
|
1083
|
+
* @returns <string> - the resulting prefix.
|
|
1084
|
+
*/
|
|
1085
|
+
const extractPrefix = (str) =>
|
|
1086
|
+
// eslint-disable-next-line no-useless-escape
|
|
1087
|
+
str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
|
|
1241
1088
|
/**
|
|
1242
1089
|
* Extract data from a R1CS metadata file generated with a custom file-based logger.
|
|
1243
1090
|
* @notice useful for extracting metadata circuits contained in the generated file using a logger
|
|
@@ -1294,17 +1141,6 @@ const formatZkeyIndex = (progress) => {
|
|
|
1294
1141
|
* @returns <number> - the amount of powers.
|
|
1295
1142
|
*/
|
|
1296
1143
|
const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
|
|
1297
|
-
/**
|
|
1298
|
-
* Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
|
|
1299
|
-
* @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
|
|
1300
|
-
* @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
|
|
1301
|
-
* NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
|
|
1302
|
-
* @param str <string> - the arbitrary string from which to extract the prefix.
|
|
1303
|
-
* @returns <string> - the resulting prefix.
|
|
1304
|
-
*/
|
|
1305
|
-
const extractPrefix = (str) =>
|
|
1306
|
-
// eslint-disable-next-line no-useless-escape
|
|
1307
|
-
str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
|
|
1308
1144
|
/**
|
|
1309
1145
|
* Automate the generation of an entropy for a contribution.
|
|
1310
1146
|
* @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
|
|
@@ -1444,6 +1280,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
|
|
|
1444
1280
|
// Return the read bytes.
|
|
1445
1281
|
return buffer;
|
|
1446
1282
|
};
|
|
1283
|
+
/**
|
|
1284
|
+
* Given a buffer in little endian format, convert it to bigint
|
|
1285
|
+
* @param buffer
|
|
1286
|
+
* @returns
|
|
1287
|
+
*/
|
|
1288
|
+
function leBufferToBigint(buffer) {
|
|
1289
|
+
return BigInt(`0x${buffer.reverse().toString("hex")}`);
|
|
1290
|
+
}
|
|
1291
|
+
/**
|
|
1292
|
+
* Given an input containing string values, convert them
|
|
1293
|
+
* to bigint
|
|
1294
|
+
* @param input - The input to convert
|
|
1295
|
+
* @returns the input with string values converted to bigint
|
|
1296
|
+
*/
|
|
1297
|
+
const unstringifyBigInts = (input) => {
|
|
1298
|
+
if (typeof input === "string" && /^[0-9]+$/.test(input)) {
|
|
1299
|
+
return BigInt(input);
|
|
1300
|
+
}
|
|
1301
|
+
if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
|
|
1302
|
+
return BigInt(input);
|
|
1303
|
+
}
|
|
1304
|
+
if (Array.isArray(input)) {
|
|
1305
|
+
return input.map(unstringifyBigInts);
|
|
1306
|
+
}
|
|
1307
|
+
if (input === null) {
|
|
1308
|
+
return null;
|
|
1309
|
+
}
|
|
1310
|
+
if (typeof input === "object") {
|
|
1311
|
+
return Object.entries(input).reduce((acc, [key, value]) => {
|
|
1312
|
+
acc[key] = unstringifyBigInts(value);
|
|
1313
|
+
return acc;
|
|
1314
|
+
}, {});
|
|
1315
|
+
}
|
|
1316
|
+
return input;
|
|
1317
|
+
};
|
|
1447
1318
|
/**
|
|
1448
1319
|
* Return the info about the R1CS file.ù
|
|
1449
1320
|
* @dev this method was built taking inspiration from
|
|
@@ -1504,17 +1375,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1504
1375
|
let constraints = 0;
|
|
1505
1376
|
try {
|
|
1506
1377
|
// Get 'number of section' (jump magic r1cs and version1 data).
|
|
1507
|
-
const numberOfSections =
|
|
1378
|
+
const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
|
|
1508
1379
|
// Jump to first section.
|
|
1509
1380
|
pointer = 12;
|
|
1510
1381
|
// For each section
|
|
1511
1382
|
for (let i = 0; i < numberOfSections; i++) {
|
|
1512
1383
|
// Read section type.
|
|
1513
|
-
const sectionType =
|
|
1384
|
+
const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
|
|
1514
1385
|
// Jump to section size.
|
|
1515
1386
|
pointer += 4;
|
|
1516
1387
|
// Read section size
|
|
1517
|
-
const sectionSize = Number(
|
|
1388
|
+
const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
|
|
1518
1389
|
// If at header section (0x00000001 : Header Section).
|
|
1519
1390
|
if (sectionType === BigInt(1)) {
|
|
1520
1391
|
// Read info from header section.
|
|
@@ -1546,22 +1417,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1546
1417
|
*/
|
|
1547
1418
|
pointer += sectionSize - 20;
|
|
1548
1419
|
// Read R1CS info.
|
|
1549
|
-
wires = Number(
|
|
1420
|
+
wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1550
1421
|
pointer += 4;
|
|
1551
|
-
publicOutputs = Number(
|
|
1422
|
+
publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1552
1423
|
pointer += 4;
|
|
1553
|
-
publicInputs = Number(
|
|
1424
|
+
publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1554
1425
|
pointer += 4;
|
|
1555
|
-
privateInputs = Number(
|
|
1426
|
+
privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1556
1427
|
pointer += 4;
|
|
1557
|
-
labels = Number(
|
|
1428
|
+
labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
|
|
1558
1429
|
pointer += 8;
|
|
1559
|
-
constraints = Number(
|
|
1430
|
+
constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1560
1431
|
}
|
|
1561
1432
|
pointer += 8 + Number(sectionSize);
|
|
1562
1433
|
}
|
|
1563
1434
|
return {
|
|
1564
|
-
curve: "bn-128",
|
|
1435
|
+
curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
|
|
1565
1436
|
wires,
|
|
1566
1437
|
constraints,
|
|
1567
1438
|
privateInputs,
|
|
@@ -1576,11 +1447,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1576
1447
|
}
|
|
1577
1448
|
};
|
|
1578
1449
|
/**
|
|
1579
|
-
*
|
|
1580
|
-
* @
|
|
1581
|
-
* @
|
|
1450
|
+
* Parse and validate that the ceremony configuration is correct
|
|
1451
|
+
* @notice this does not upload any files to storage
|
|
1452
|
+
* @param path <string> - the path to the configuration file
|
|
1453
|
+
* @param cleanup <boolean> - whether to delete the r1cs file after parsing
|
|
1454
|
+
* @returns any - the data to pass to the cloud function for setup and the circuit artifacts
|
|
1582
1455
|
*/
|
|
1583
|
-
const
|
|
1456
|
+
const parseCeremonyFile = async (path, cleanup = false) => {
|
|
1457
|
+
// check that the path exists
|
|
1458
|
+
if (!fs.existsSync(path))
|
|
1459
|
+
throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
|
|
1460
|
+
try {
|
|
1461
|
+
// read the data
|
|
1462
|
+
const data = JSON.parse(fs.readFileSync(path).toString());
|
|
1463
|
+
// verify that the data is correct
|
|
1464
|
+
if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
|
|
1465
|
+
data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
|
|
1466
|
+
throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
|
|
1467
|
+
// validate that we have at least 1 circuit input data
|
|
1468
|
+
if (!data.circuits || data.circuits.length === 0)
|
|
1469
|
+
throw new Error("You need to provide the data for at least 1 circuit.");
|
|
1470
|
+
// validate that the end date is in the future
|
|
1471
|
+
let endDate;
|
|
1472
|
+
let startDate;
|
|
1473
|
+
try {
|
|
1474
|
+
endDate = new Date(data.endDate);
|
|
1475
|
+
startDate = new Date(data.startDate);
|
|
1476
|
+
}
|
|
1477
|
+
catch (error) {
|
|
1478
|
+
throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
|
|
1479
|
+
}
|
|
1480
|
+
if (endDate <= startDate)
|
|
1481
|
+
throw new Error("The end date should be greater than the start date.");
|
|
1482
|
+
const currentDate = new Date();
|
|
1483
|
+
if (endDate <= currentDate || startDate <= currentDate)
|
|
1484
|
+
throw new Error("The start and end dates should be in the future.");
|
|
1485
|
+
// validate penalty
|
|
1486
|
+
if (data.penalty <= 0)
|
|
1487
|
+
throw new Error("The penalty should be greater than zero.");
|
|
1488
|
+
const circuits = [];
|
|
1489
|
+
const urlPattern = /(https?:\/\/[^\s]+)/g;
|
|
1490
|
+
const commitHashPattern = /^[a-f0-9]{40}$/i;
|
|
1491
|
+
const circuitArtifacts = [];
|
|
1492
|
+
for (let i = 0; i < data.circuits.length; i++) {
|
|
1493
|
+
const circuitData = data.circuits[i];
|
|
1494
|
+
const { artifacts } = circuitData;
|
|
1495
|
+
circuitArtifacts.push({
|
|
1496
|
+
artifacts
|
|
1497
|
+
});
|
|
1498
|
+
// where we storing the r1cs downloaded
|
|
1499
|
+
const localR1csPath = `./${circuitData.name}.r1cs`;
|
|
1500
|
+
// where we storing the wasm downloaded
|
|
1501
|
+
const localWasmPath = `./${circuitData.name}.wasm`;
|
|
1502
|
+
// download the r1cs to extract the metadata
|
|
1503
|
+
const streamPipeline = promisify(pipeline);
|
|
1504
|
+
// Make the call.
|
|
1505
|
+
const responseR1CS = await fetch(artifacts.r1csStoragePath);
|
|
1506
|
+
// Handle errors.
|
|
1507
|
+
if (!responseR1CS.ok && responseR1CS.status !== 200)
|
|
1508
|
+
throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1509
|
+
await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
|
|
1510
|
+
// Write the file locally
|
|
1511
|
+
// extract the metadata from the r1cs
|
|
1512
|
+
const metadata = getR1CSInfo(localR1csPath);
|
|
1513
|
+
// download wasm too to ensure it's available
|
|
1514
|
+
const responseWASM = await fetch(artifacts.wasmStoragePath);
|
|
1515
|
+
if (!responseWASM.ok && responseWASM.status !== 200)
|
|
1516
|
+
throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1517
|
+
await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
|
|
1518
|
+
// validate that the circuit hash and template links are valid
|
|
1519
|
+
const { template } = circuitData;
|
|
1520
|
+
const URLMatch = template.source.match(urlPattern);
|
|
1521
|
+
if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
|
|
1522
|
+
throw new Error("You should provide the URL to the circuits templates on GitHub.");
|
|
1523
|
+
const hashMatch = template.commitHash.match(commitHashPattern);
|
|
1524
|
+
if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
|
|
1525
|
+
throw new Error("You should provide a valid commit hash of the circuit templates.");
|
|
1526
|
+
// calculate the hash of the r1cs file
|
|
1527
|
+
const r1csBlake2bHash = await blake512FromPath(localR1csPath);
|
|
1528
|
+
const circuitPrefix = extractPrefix(circuitData.name);
|
|
1529
|
+
// filenames
|
|
1530
|
+
const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
|
|
1531
|
+
const r1csCompleteFilename = `${circuitData.name}.r1cs`;
|
|
1532
|
+
const wasmCompleteFilename = `${circuitData.name}.wasm`;
|
|
1533
|
+
const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
|
|
1534
|
+
const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
|
|
1535
|
+
// storage paths
|
|
1536
|
+
const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
|
|
1537
|
+
const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
|
|
1538
|
+
const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
|
|
1539
|
+
const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
|
|
1540
|
+
const files = {
|
|
1541
|
+
potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
|
|
1542
|
+
r1csFilename: r1csCompleteFilename,
|
|
1543
|
+
wasmFilename: wasmCompleteFilename,
|
|
1544
|
+
initialZkeyFilename: firstZkeyCompleteFilename,
|
|
1545
|
+
potStoragePath: potStorageFilePath,
|
|
1546
|
+
r1csStoragePath: r1csStorageFilePath,
|
|
1547
|
+
wasmStoragePath: wasmStorageFilePath,
|
|
1548
|
+
initialZkeyStoragePath: zkeyStorageFilePath,
|
|
1549
|
+
r1csBlake2bHash
|
|
1550
|
+
};
|
|
1551
|
+
// validate that the compiler hash is a valid hash
|
|
1552
|
+
const { compiler } = circuitData;
|
|
1553
|
+
const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
|
|
1554
|
+
if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
|
|
1555
|
+
throw new Error("You should provide a valid commit hash of the circuit compiler.");
|
|
1556
|
+
// validate that the verification options are valid
|
|
1557
|
+
const { verification } = circuitData;
|
|
1558
|
+
if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
|
|
1559
|
+
throw new Error("Please enter a valid verification mechanism: either CF or VM");
|
|
1560
|
+
// @todo VM parameters verification
|
|
1561
|
+
// if (verification['cfOrVM'] === "VM") {}
|
|
1562
|
+
// check that the timeout is provided for the correct configuration
|
|
1563
|
+
let dynamicThreshold;
|
|
1564
|
+
let fixedTimeWindow;
|
|
1565
|
+
let circuit = {};
|
|
1566
|
+
if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
|
|
1567
|
+
if (circuitData.dynamicThreshold <= 0)
|
|
1568
|
+
throw new Error("The dynamic threshold should be > 0.");
|
|
1569
|
+
dynamicThreshold = circuitData.dynamicThreshold;
|
|
1570
|
+
// the Circuit data for the ceremony setup
|
|
1571
|
+
circuit = {
|
|
1572
|
+
name: circuitData.name,
|
|
1573
|
+
description: circuitData.description,
|
|
1574
|
+
prefix: circuitPrefix,
|
|
1575
|
+
sequencePosition: i + 1,
|
|
1576
|
+
metadata,
|
|
1577
|
+
files,
|
|
1578
|
+
template,
|
|
1579
|
+
compiler,
|
|
1580
|
+
verification,
|
|
1581
|
+
dynamicThreshold,
|
|
1582
|
+
avgTimings: {
|
|
1583
|
+
contributionComputation: 0,
|
|
1584
|
+
fullContribution: 0,
|
|
1585
|
+
verifyCloudFunction: 0
|
|
1586
|
+
}
|
|
1587
|
+
};
|
|
1588
|
+
}
|
|
1589
|
+
if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
|
|
1590
|
+
if (circuitData.fixedTimeWindow <= 0)
|
|
1591
|
+
throw new Error("The fixed time window threshold should be > 0.");
|
|
1592
|
+
fixedTimeWindow = circuitData.fixedTimeWindow;
|
|
1593
|
+
// the Circuit data for the ceremony setup
|
|
1594
|
+
circuit = {
|
|
1595
|
+
name: circuitData.name,
|
|
1596
|
+
description: circuitData.description,
|
|
1597
|
+
prefix: circuitPrefix,
|
|
1598
|
+
sequencePosition: i + 1,
|
|
1599
|
+
metadata,
|
|
1600
|
+
files,
|
|
1601
|
+
template,
|
|
1602
|
+
compiler,
|
|
1603
|
+
verification,
|
|
1604
|
+
fixedTimeWindow,
|
|
1605
|
+
avgTimings: {
|
|
1606
|
+
contributionComputation: 0,
|
|
1607
|
+
fullContribution: 0,
|
|
1608
|
+
verifyCloudFunction: 0
|
|
1609
|
+
}
|
|
1610
|
+
};
|
|
1611
|
+
}
|
|
1612
|
+
circuits.push(circuit);
|
|
1613
|
+
// remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
|
|
1614
|
+
if (cleanup) {
|
|
1615
|
+
fs.unlinkSync(localR1csPath);
|
|
1616
|
+
fs.unlinkSync(localWasmPath);
|
|
1617
|
+
}
|
|
1618
|
+
}
|
|
1619
|
+
const setupData = {
|
|
1620
|
+
ceremonyInputData: {
|
|
1621
|
+
title: data.title,
|
|
1622
|
+
description: data.description,
|
|
1623
|
+
startDate: startDate.valueOf(),
|
|
1624
|
+
endDate: endDate.valueOf(),
|
|
1625
|
+
timeoutMechanismType: data.timeoutMechanismType,
|
|
1626
|
+
penalty: data.penalty
|
|
1627
|
+
},
|
|
1628
|
+
ceremonyPrefix: extractPrefix(data.title),
|
|
1629
|
+
circuits,
|
|
1630
|
+
circuitArtifacts
|
|
1631
|
+
};
|
|
1632
|
+
return setupData;
|
|
1633
|
+
}
|
|
1634
|
+
catch (error) {
|
|
1635
|
+
throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
|
|
1636
|
+
}
|
|
1637
|
+
};
|
|
1584
1638
|
|
|
1585
1639
|
/**
|
|
1586
1640
|
* Verify that a zKey is valid
|
|
@@ -1838,14 +1892,14 @@ const getAWSVariables = () => {
|
|
|
1838
1892
|
if (!process.env.AWS_ACCESS_KEY_ID ||
|
|
1839
1893
|
!process.env.AWS_SECRET_ACCESS_KEY ||
|
|
1840
1894
|
!process.env.AWS_REGION ||
|
|
1841
|
-
!process.env.
|
|
1895
|
+
!process.env.AWS_INSTANCE_PROFILE_ARN ||
|
|
1842
1896
|
!process.env.AWS_AMI_ID)
|
|
1843
1897
|
throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
|
|
1844
1898
|
return {
|
|
1845
1899
|
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
1846
1900
|
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
1847
1901
|
region: process.env.AWS_REGION || "us-east-1",
|
|
1848
|
-
|
|
1902
|
+
instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
|
|
1849
1903
|
amiId: process.env.AWS_AMI_ID
|
|
1850
1904
|
};
|
|
1851
1905
|
};
|
|
@@ -1926,11 +1980,11 @@ const p256 = (proofPart) => {
|
|
|
1926
1980
|
*/
|
|
1927
1981
|
const formatSolidityCalldata = (circuitInput, _proof) => {
|
|
1928
1982
|
try {
|
|
1929
|
-
const proof =
|
|
1983
|
+
const proof = unstringifyBigInts(_proof);
|
|
1930
1984
|
// format the public inputs to the circuit
|
|
1931
1985
|
const formattedCircuitInput = [];
|
|
1932
1986
|
for (const cInput of circuitInput) {
|
|
1933
|
-
formattedCircuitInput.push(p256(
|
|
1987
|
+
formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
|
|
1934
1988
|
}
|
|
1935
1989
|
// construct calldata
|
|
1936
1990
|
const calldata = {
|
|
@@ -2098,7 +2152,8 @@ const getGitHubStats = async (user) => {
|
|
|
2098
2152
|
following: jsonData.following,
|
|
2099
2153
|
followers: jsonData.followers,
|
|
2100
2154
|
publicRepos: jsonData.public_repos,
|
|
2101
|
-
avatarUrl: jsonData.avatar_url
|
|
2155
|
+
avatarUrl: jsonData.avatar_url,
|
|
2156
|
+
age: jsonData.created_at
|
|
2102
2157
|
};
|
|
2103
2158
|
return data;
|
|
2104
2159
|
};
|
|
@@ -2110,20 +2165,21 @@ const getGitHubStats = async (user) => {
|
|
|
2110
2165
|
* @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
|
|
2111
2166
|
* @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
|
|
2112
2167
|
*/
|
|
2113
|
-
const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
|
|
2168
|
+
const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
|
|
2114
2169
|
if (!process.env.GITHUB_ACCESS_TOKEN)
|
|
2115
2170
|
throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
|
|
2116
|
-
const { following, followers, publicRepos, avatarUrl } = await getGitHubStats(userLogin);
|
|
2171
|
+
const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
|
|
2117
2172
|
if (following < minimumAmountOfFollowing ||
|
|
2118
2173
|
publicRepos < minimumAmountOfPublicRepos ||
|
|
2119
|
-
followers < minimumAmountOfFollowers
|
|
2174
|
+
followers < minimumAmountOfFollowers ||
|
|
2175
|
+
new Date(age) > new Date(Date.now() - minimumAge))
|
|
2120
2176
|
return {
|
|
2121
2177
|
reputable: false,
|
|
2122
2178
|
avatarUrl: ""
|
|
2123
2179
|
};
|
|
2124
2180
|
return {
|
|
2125
2181
|
reputable: true,
|
|
2126
|
-
avatarUrl
|
|
2182
|
+
avatarUrl
|
|
2127
2183
|
};
|
|
2128
2184
|
};
|
|
2129
2185
|
|
|
@@ -2310,8 +2366,8 @@ const createSSMClient = async () => {
|
|
|
2310
2366
|
* @returns <Array<string>> - the list of startup commands to be executed.
|
|
2311
2367
|
*/
|
|
2312
2368
|
const vmBootstrapCommand = (bucketName) => [
|
|
2313
|
-
"#!/bin/bash",
|
|
2314
|
-
`aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
|
|
2369
|
+
"#!/bin/bash", // shabang.
|
|
2370
|
+
`aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
|
|
2315
2371
|
`chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
|
|
2316
2372
|
];
|
|
2317
2373
|
/**
|
|
@@ -2384,7 +2440,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
|
|
|
2384
2440
|
*/
|
|
2385
2441
|
const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
|
|
2386
2442
|
// Get the AWS variables.
|
|
2387
|
-
const { amiId,
|
|
2443
|
+
const { amiId, instanceProfileArn } = getAWSVariables();
|
|
2388
2444
|
// Parametrize the VM EC2 instance.
|
|
2389
2445
|
const params = {
|
|
2390
2446
|
ImageId: amiId,
|
|
@@ -2393,7 +2449,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2393
2449
|
MinCount: 1,
|
|
2394
2450
|
// nb. to find this: iam -> roles -> role_name.
|
|
2395
2451
|
IamInstanceProfile: {
|
|
2396
|
-
Arn:
|
|
2452
|
+
Arn: instanceProfileArn
|
|
2397
2453
|
},
|
|
2398
2454
|
// nb. for running commands at the startup.
|
|
2399
2455
|
UserData: Buffer.from(commands.join("\n")).toString("base64"),
|
|
@@ -2402,7 +2458,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2402
2458
|
DeviceName: "/dev/xvda",
|
|
2403
2459
|
Ebs: {
|
|
2404
2460
|
DeleteOnTermination: true,
|
|
2405
|
-
VolumeSize: volumeSize,
|
|
2461
|
+
VolumeSize: volumeSize, // disk size in GB.
|
|
2406
2462
|
VolumeType: diskType
|
|
2407
2463
|
}
|
|
2408
2464
|
}
|
|
@@ -2419,6 +2475,10 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2419
2475
|
{
|
|
2420
2476
|
Key: "Initialized",
|
|
2421
2477
|
Value: "false"
|
|
2478
|
+
},
|
|
2479
|
+
{
|
|
2480
|
+
Key: "ProjectName",
|
|
2481
|
+
Value: process.env.AWS_TAG_VALUE
|
|
2422
2482
|
}
|
|
2423
2483
|
]
|
|
2424
2484
|
}
|
|
@@ -2588,4 +2648,4 @@ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
|
|
|
2588
2648
|
}
|
|
2589
2649
|
};
|
|
2590
2650
|
|
|
2591
|
-
export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
|
|
2651
|
+
export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, contribHashRegex, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCeremonies, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
|