@devtion/actions 0.0.0-92056fa → 0.0.0-9239207
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.mjs +346 -277
- package/dist/index.node.js +345 -274
- package/dist/types/src/helpers/constants.d.ts +5 -2
- package/dist/types/src/helpers/constants.d.ts.map +1 -1
- package/dist/types/src/helpers/contracts.d.ts.map +1 -1
- package/dist/types/src/helpers/crypto.d.ts +1 -0
- package/dist/types/src/helpers/crypto.d.ts.map +1 -1
- package/dist/types/src/helpers/database.d.ts +8 -0
- package/dist/types/src/helpers/database.d.ts.map +1 -1
- package/dist/types/src/helpers/security.d.ts +1 -1
- package/dist/types/src/helpers/security.d.ts.map +1 -1
- package/dist/types/src/helpers/storage.d.ts +5 -2
- package/dist/types/src/helpers/storage.d.ts.map +1 -1
- package/dist/types/src/helpers/utils.d.ts +34 -20
- package/dist/types/src/helpers/utils.d.ts.map +1 -1
- package/dist/types/src/helpers/verification.d.ts +3 -2
- package/dist/types/src/helpers/verification.d.ts.map +1 -1
- package/dist/types/src/helpers/vm.d.ts.map +1 -1
- package/dist/types/src/index.d.ts +2 -2
- package/dist/types/src/index.d.ts.map +1 -1
- package/dist/types/src/types/index.d.ts +9 -3
- package/dist/types/src/types/index.d.ts.map +1 -1
- package/package.json +3 -8
- package/src/helpers/constants.ts +39 -31
- package/src/helpers/contracts.ts +3 -3
- package/src/helpers/database.ts +13 -0
- package/src/helpers/functions.ts +1 -1
- package/src/helpers/security.ts +11 -10
- package/src/helpers/services.ts +3 -3
- package/src/helpers/storage.ts +15 -3
- package/src/helpers/utils.ts +316 -272
- package/src/helpers/verification.ts +6 -6
- package/src/helpers/vm.ts +18 -7
- package/src/index.ts +5 -3
- package/src/types/index.ts +32 -8
package/dist/index.node.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @module @devtion/actions
|
|
3
|
-
* @version 1.
|
|
3
|
+
* @version 1.2.5
|
|
4
4
|
* @file A set of actions and helpers for CLI commands
|
|
5
5
|
* @copyright Ethereum Foundation 2022
|
|
6
6
|
* @license MIT
|
|
@@ -17,9 +17,7 @@ var firestore = require('firebase/firestore');
|
|
|
17
17
|
var snarkjs = require('snarkjs');
|
|
18
18
|
var crypto = require('crypto');
|
|
19
19
|
var blake = require('blakejs');
|
|
20
|
-
var ffjavascript = require('ffjavascript');
|
|
21
20
|
var winston = require('winston');
|
|
22
|
-
var clientS3 = require('@aws-sdk/client-s3');
|
|
23
21
|
var stream = require('stream');
|
|
24
22
|
var util = require('util');
|
|
25
23
|
var app = require('firebase/app');
|
|
@@ -30,10 +28,10 @@ var clientEc2 = require('@aws-sdk/client-ec2');
|
|
|
30
28
|
var clientSsm = require('@aws-sdk/client-ssm');
|
|
31
29
|
var dotenv = require('dotenv');
|
|
32
30
|
|
|
33
|
-
// Main part for the
|
|
34
|
-
const potFileDownloadMainUrl = `https://
|
|
35
|
-
// Main part for the
|
|
36
|
-
const potFilenameTemplate = `
|
|
31
|
+
// Main part for the PPoT Phase 1 Trusted Setup URLs to download PoT files.
|
|
32
|
+
const potFileDownloadMainUrl = `https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/`;
|
|
33
|
+
// Main part for the PPoT Phase 1 Trusted Setup PoT files to be downloaded.
|
|
34
|
+
const potFilenameTemplate = `ppot_0080_`;
|
|
37
35
|
// The genesis zKey index.
|
|
38
36
|
const genesisZkeyIndex = `00000`;
|
|
39
37
|
// The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
|
|
@@ -50,6 +48,8 @@ const verifierSmartContractAcronym = "verifier";
|
|
|
50
48
|
const ec2InstanceTag = "p0tionec2instance";
|
|
51
49
|
// The name of the VM startup script file.
|
|
52
50
|
const vmBootstrapScriptFilename = "bootstrap.sh";
|
|
51
|
+
// Match hash output by snarkjs in transcript log
|
|
52
|
+
const contribHashRegex = new RegExp("Contribution.+Hash.+\n\t\t.+\n\t\t.+\n.+\n\t\t.+\r?\n");
|
|
53
53
|
/**
|
|
54
54
|
* Define the supported VM configuration types.
|
|
55
55
|
* @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
|
|
@@ -107,112 +107,116 @@ const vmConfigurationTypes = {
|
|
|
107
107
|
*/
|
|
108
108
|
const powersOfTauFiles = [
|
|
109
109
|
{
|
|
110
|
-
ref: "https://
|
|
110
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_01.ptau",
|
|
111
111
|
size: 0.000084
|
|
112
112
|
},
|
|
113
113
|
{
|
|
114
|
-
ref: "https://
|
|
114
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_02.ptau",
|
|
115
115
|
size: 0.000086
|
|
116
116
|
},
|
|
117
117
|
{
|
|
118
|
-
ref: "https://
|
|
118
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_03.ptau",
|
|
119
119
|
size: 0.000091
|
|
120
120
|
},
|
|
121
121
|
{
|
|
122
|
-
ref: "https://
|
|
122
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_04.ptau",
|
|
123
123
|
size: 0.0001
|
|
124
124
|
},
|
|
125
125
|
{
|
|
126
|
-
ref: "https://
|
|
126
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_05.ptau",
|
|
127
127
|
size: 0.000117
|
|
128
128
|
},
|
|
129
129
|
{
|
|
130
|
-
ref: "https://
|
|
130
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_06.ptau",
|
|
131
131
|
size: 0.000153
|
|
132
132
|
},
|
|
133
133
|
{
|
|
134
|
-
ref: "https://
|
|
134
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_07.ptau",
|
|
135
135
|
size: 0.000225
|
|
136
136
|
},
|
|
137
137
|
{
|
|
138
|
-
ref: "https://
|
|
138
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_08.ptau",
|
|
139
139
|
size: 0.0004
|
|
140
140
|
},
|
|
141
141
|
{
|
|
142
|
-
ref: "https://
|
|
142
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_09.ptau",
|
|
143
143
|
size: 0.000658
|
|
144
144
|
},
|
|
145
145
|
{
|
|
146
|
-
ref: "https://
|
|
146
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_10.ptau",
|
|
147
147
|
size: 0.0013
|
|
148
148
|
},
|
|
149
149
|
{
|
|
150
|
-
ref: "https://
|
|
150
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_11.ptau",
|
|
151
151
|
size: 0.0023
|
|
152
152
|
},
|
|
153
153
|
{
|
|
154
|
-
ref: "https://
|
|
154
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_12.ptau",
|
|
155
155
|
size: 0.0046
|
|
156
156
|
},
|
|
157
157
|
{
|
|
158
|
-
ref: "https://
|
|
158
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_13.ptau",
|
|
159
159
|
size: 0.0091
|
|
160
160
|
},
|
|
161
161
|
{
|
|
162
|
-
ref: "https://
|
|
162
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_14.ptau",
|
|
163
163
|
size: 0.0181
|
|
164
164
|
},
|
|
165
165
|
{
|
|
166
|
-
ref: "https://
|
|
166
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_15.ptau",
|
|
167
167
|
size: 0.0361
|
|
168
168
|
},
|
|
169
169
|
{
|
|
170
|
-
ref: "https://
|
|
170
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_16.ptau",
|
|
171
171
|
size: 0.0721
|
|
172
172
|
},
|
|
173
173
|
{
|
|
174
|
-
ref: "https://
|
|
174
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_17.ptau",
|
|
175
175
|
size: 0.144
|
|
176
176
|
},
|
|
177
177
|
{
|
|
178
|
-
ref: "https://
|
|
178
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_18.ptau",
|
|
179
179
|
size: 0.288
|
|
180
180
|
},
|
|
181
181
|
{
|
|
182
|
-
ref: "https://
|
|
182
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_19.ptau",
|
|
183
183
|
size: 0.576
|
|
184
184
|
},
|
|
185
185
|
{
|
|
186
|
-
ref: "https://
|
|
186
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_20.ptau",
|
|
187
187
|
size: 1.1
|
|
188
188
|
},
|
|
189
189
|
{
|
|
190
|
-
ref: "https://
|
|
190
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_21.ptau",
|
|
191
191
|
size: 2.3
|
|
192
192
|
},
|
|
193
193
|
{
|
|
194
|
-
ref: "https://
|
|
194
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_22.ptau",
|
|
195
195
|
size: 4.5
|
|
196
196
|
},
|
|
197
197
|
{
|
|
198
|
-
ref: "https://
|
|
198
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_23.ptau",
|
|
199
199
|
size: 9.0
|
|
200
200
|
},
|
|
201
201
|
{
|
|
202
|
-
ref: "https://
|
|
202
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_24.ptau",
|
|
203
203
|
size: 18.0
|
|
204
204
|
},
|
|
205
205
|
{
|
|
206
|
-
ref: "https://
|
|
206
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_25.ptau",
|
|
207
207
|
size: 36.0
|
|
208
208
|
},
|
|
209
209
|
{
|
|
210
|
-
ref: "https://
|
|
210
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau",
|
|
211
211
|
size: 72.0
|
|
212
212
|
},
|
|
213
213
|
{
|
|
214
|
-
ref: "https://
|
|
214
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_27.ptau",
|
|
215
215
|
size: 144.0
|
|
216
|
+
},
|
|
217
|
+
{
|
|
218
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_final.ptau",
|
|
219
|
+
size: 288.0
|
|
216
220
|
}
|
|
217
221
|
];
|
|
218
222
|
/**
|
|
@@ -343,6 +347,8 @@ const commonTerms = {
|
|
|
343
347
|
finalizeCeremony: "finalizeCeremony",
|
|
344
348
|
downloadCircuitArtifacts: "downloadCircuitArtifacts",
|
|
345
349
|
transferObject: "transferObject",
|
|
350
|
+
bandadaValidateProof: "bandadaValidateProof",
|
|
351
|
+
checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
|
|
346
352
|
}
|
|
347
353
|
};
|
|
348
354
|
|
|
@@ -693,19 +699,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
|
|
|
693
699
|
* @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
|
|
694
700
|
* @param ceremonyId <string> - the unique identifier of the ceremony.
|
|
695
701
|
* @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
|
|
702
|
+
* @param logger <GenericBar> - an optional logger to show progress.
|
|
696
703
|
* @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
|
|
697
704
|
*/
|
|
698
|
-
const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
|
|
705
|
+
const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
|
|
699
706
|
// Keep track of uploaded chunks.
|
|
700
707
|
const uploadedChunks = alreadyUploadedChunks || [];
|
|
708
|
+
// if we were passed a logger, start it
|
|
709
|
+
if (logger)
|
|
710
|
+
logger.start(chunksWithUrls.length, 0);
|
|
701
711
|
// Loop through remaining chunks.
|
|
702
712
|
for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
|
|
703
713
|
// Consume the pre-signed url to upload the chunk.
|
|
704
714
|
// @ts-ignore
|
|
705
715
|
const response = await fetch(chunksWithUrls[i].preSignedUrl, {
|
|
706
716
|
retryOptions: {
|
|
707
|
-
retryInitialDelay: 500,
|
|
708
|
-
socketTimeout: 60000,
|
|
717
|
+
retryInitialDelay: 500, // 500 ms.
|
|
718
|
+
socketTimeout: 60000, // 60 seconds.
|
|
709
719
|
retryMaxDuration: 300000 // 5 minutes.
|
|
710
720
|
},
|
|
711
721
|
method: "PUT",
|
|
@@ -729,6 +739,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
|
|
|
729
739
|
// nb. this must be done only when contributing (not finalizing).
|
|
730
740
|
if (!!ceremonyId && !!cloudFunctions)
|
|
731
741
|
await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
|
|
742
|
+
// increment the count on the logger
|
|
743
|
+
if (logger)
|
|
744
|
+
logger.increment();
|
|
732
745
|
}
|
|
733
746
|
return uploadedChunks;
|
|
734
747
|
};
|
|
@@ -749,8 +762,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
|
|
|
749
762
|
* @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
|
|
750
763
|
* @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
|
|
751
764
|
* @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
|
|
765
|
+
* @param logger <GenericBar> - an optional logger to show progress.
|
|
752
766
|
*/
|
|
753
|
-
const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
|
|
767
|
+
const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
|
|
754
768
|
// The unique identifier of the multi-part upload.
|
|
755
769
|
let multiPartUploadId = "";
|
|
756
770
|
// The list of already uploaded chunks.
|
|
@@ -774,7 +788,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
|
|
|
774
788
|
const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
|
|
775
789
|
// Step (2).
|
|
776
790
|
const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
|
|
777
|
-
cloudFunctions, ceremonyId, alreadyUploadedChunks);
|
|
791
|
+
cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
|
|
778
792
|
// Step (3).
|
|
779
793
|
await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
|
|
780
794
|
};
|
|
@@ -998,6 +1012,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
|
|
|
998
1012
|
]);
|
|
999
1013
|
return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
|
|
1000
1014
|
};
|
|
1015
|
+
/**
|
|
1016
|
+
* Query all ceremonies
|
|
1017
|
+
* @notice get all ceremonies from the database.
|
|
1018
|
+
* @dev this is a helper for the CLI ceremony methods.
|
|
1019
|
+
* @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
|
|
1020
|
+
* @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
|
|
1021
|
+
*/
|
|
1022
|
+
const getAllCeremonies = async (firestoreDatabase) => {
|
|
1023
|
+
const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
|
|
1024
|
+
return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
|
|
1025
|
+
};
|
|
1001
1026
|
|
|
1002
1027
|
/**
|
|
1003
1028
|
* @hidden
|
|
@@ -1046,199 +1071,22 @@ const compareHashes = async (path1, path2) => {
|
|
|
1046
1071
|
};
|
|
1047
1072
|
|
|
1048
1073
|
/**
|
|
1049
|
-
*
|
|
1050
|
-
* @
|
|
1051
|
-
* @
|
|
1052
|
-
* @param cleanup <boolean> - whether to delete the r1cs file after parsing
|
|
1053
|
-
* @returns any - the data to pass to the cloud function for setup and the circuit artifacts
|
|
1074
|
+
* Return a string with double digits if the provided input is one digit only.
|
|
1075
|
+
* @param in <number> - the input number to be converted.
|
|
1076
|
+
* @returns <string> - the two digits stringified number derived from the conversion.
|
|
1054
1077
|
*/
|
|
1055
|
-
const
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
throw new Error("You need to provide the data for at least 1 circuit.");
|
|
1068
|
-
// validate that the end date is in the future
|
|
1069
|
-
let endDate;
|
|
1070
|
-
let startDate;
|
|
1071
|
-
try {
|
|
1072
|
-
endDate = new Date(data.endDate);
|
|
1073
|
-
startDate = new Date(data.startDate);
|
|
1074
|
-
}
|
|
1075
|
-
catch (error) {
|
|
1076
|
-
throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
|
|
1077
|
-
}
|
|
1078
|
-
if (endDate <= startDate)
|
|
1079
|
-
throw new Error("The end date should be greater than the start date.");
|
|
1080
|
-
const currentDate = new Date();
|
|
1081
|
-
if (endDate <= currentDate || startDate <= currentDate)
|
|
1082
|
-
throw new Error("The start and end dates should be in the future.");
|
|
1083
|
-
// validate penalty
|
|
1084
|
-
if (data.penalty <= 0)
|
|
1085
|
-
throw new Error("The penalty should be greater than zero.");
|
|
1086
|
-
const circuits = [];
|
|
1087
|
-
const urlPattern = /(https?:\/\/[^\s]+)/g;
|
|
1088
|
-
const commitHashPattern = /^[a-f0-9]{40}$/i;
|
|
1089
|
-
const circuitArtifacts = [];
|
|
1090
|
-
for (let i = 0; i < data.circuits.length; i++) {
|
|
1091
|
-
const circuitData = data.circuits[i];
|
|
1092
|
-
const artifacts = circuitData.artifacts;
|
|
1093
|
-
circuitArtifacts.push({
|
|
1094
|
-
artifacts: artifacts
|
|
1095
|
-
});
|
|
1096
|
-
// where we storing the r1cs downloaded
|
|
1097
|
-
const localR1csPath = `./${circuitData.name}.r1cs`;
|
|
1098
|
-
// where we storing the wasm downloaded
|
|
1099
|
-
const localWasmPath = `./${circuitData.name}.wasm`;
|
|
1100
|
-
// check that the artifacts exist in S3
|
|
1101
|
-
// we don't need any privileges to download this
|
|
1102
|
-
// just the correct region
|
|
1103
|
-
const s3 = new clientS3.S3Client({
|
|
1104
|
-
region: artifacts.region,
|
|
1105
|
-
credentials: undefined
|
|
1106
|
-
});
|
|
1107
|
-
// download the r1cs to extract the metadata
|
|
1108
|
-
const command = new clientS3.GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
|
|
1109
|
-
const response = await s3.send(command);
|
|
1110
|
-
const streamPipeline = util.promisify(stream.pipeline);
|
|
1111
|
-
if (response.$metadata.httpStatusCode !== 200)
|
|
1112
|
-
throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1113
|
-
if (response.Body instanceof stream.Readable)
|
|
1114
|
-
await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
|
|
1115
|
-
// extract the metadata from the r1cs
|
|
1116
|
-
const metadata = getR1CSInfo(localR1csPath);
|
|
1117
|
-
// download wasm too to ensure it's available
|
|
1118
|
-
const wasmCommand = new clientS3.GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.wasmStoragePath });
|
|
1119
|
-
const wasmResponse = await s3.send(wasmCommand);
|
|
1120
|
-
if (wasmResponse.$metadata.httpStatusCode !== 200)
|
|
1121
|
-
throw new Error(`There was an error while trying to download the wasm file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1122
|
-
if (wasmResponse.Body instanceof stream.Readable)
|
|
1123
|
-
await streamPipeline(wasmResponse.Body, fs.createWriteStream(localWasmPath));
|
|
1124
|
-
// validate that the circuit hash and template links are valid
|
|
1125
|
-
const template = circuitData.template;
|
|
1126
|
-
const URLMatch = template.source.match(urlPattern);
|
|
1127
|
-
if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
|
|
1128
|
-
throw new Error("You should provide the URL to the circuits templates on GitHub.");
|
|
1129
|
-
const hashMatch = template.commitHash.match(commitHashPattern);
|
|
1130
|
-
if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
|
|
1131
|
-
throw new Error("You should provide a valid commit hash of the circuit templates.");
|
|
1132
|
-
// calculate the hash of the r1cs file
|
|
1133
|
-
const r1csBlake2bHash = await blake512FromPath(localR1csPath);
|
|
1134
|
-
const circuitPrefix = extractPrefix(circuitData.name);
|
|
1135
|
-
// filenames
|
|
1136
|
-
const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
|
|
1137
|
-
const r1csCompleteFilename = `${circuitData.name}.r1cs`;
|
|
1138
|
-
const wasmCompleteFilename = `${circuitData.name}.wasm`;
|
|
1139
|
-
const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
|
|
1140
|
-
const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
|
|
1141
|
-
// storage paths
|
|
1142
|
-
const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
|
|
1143
|
-
const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
|
|
1144
|
-
const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
|
|
1145
|
-
const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
|
|
1146
|
-
const files = {
|
|
1147
|
-
potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
|
|
1148
|
-
r1csFilename: r1csCompleteFilename,
|
|
1149
|
-
wasmFilename: wasmCompleteFilename,
|
|
1150
|
-
initialZkeyFilename: firstZkeyCompleteFilename,
|
|
1151
|
-
potStoragePath: potStorageFilePath,
|
|
1152
|
-
r1csStoragePath: r1csStorageFilePath,
|
|
1153
|
-
wasmStoragePath: wasmStorageFilePath,
|
|
1154
|
-
initialZkeyStoragePath: zkeyStorageFilePath,
|
|
1155
|
-
r1csBlake2bHash: r1csBlake2bHash
|
|
1156
|
-
};
|
|
1157
|
-
// validate that the compiler hash is a valid hash
|
|
1158
|
-
const compiler = circuitData.compiler;
|
|
1159
|
-
const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
|
|
1160
|
-
if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
|
|
1161
|
-
throw new Error("You should provide a valid commit hash of the circuit compiler.");
|
|
1162
|
-
// validate that the verification options are valid
|
|
1163
|
-
const verification = circuitData.verification;
|
|
1164
|
-
if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
|
|
1165
|
-
throw new Error("Please enter a valid verification mechanism: either CF or VM");
|
|
1166
|
-
// @todo VM parameters verification
|
|
1167
|
-
// if (verification['cfOrVM'] === "VM") {}
|
|
1168
|
-
// check that the timeout is provided for the correct configuration
|
|
1169
|
-
let dynamicThreshold;
|
|
1170
|
-
let fixedTimeWindow;
|
|
1171
|
-
let circuit = {};
|
|
1172
|
-
if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
|
|
1173
|
-
if (circuitData.dynamicThreshold <= 0)
|
|
1174
|
-
throw new Error("The dynamic threshold should be > 0.");
|
|
1175
|
-
dynamicThreshold = circuitData.dynamicThreshold;
|
|
1176
|
-
// the Circuit data for the ceremony setup
|
|
1177
|
-
circuit = {
|
|
1178
|
-
name: circuitData.name,
|
|
1179
|
-
description: circuitData.description,
|
|
1180
|
-
prefix: circuitPrefix,
|
|
1181
|
-
sequencePosition: i + 1,
|
|
1182
|
-
metadata: metadata,
|
|
1183
|
-
files: files,
|
|
1184
|
-
template: template,
|
|
1185
|
-
compiler: compiler,
|
|
1186
|
-
verification: verification,
|
|
1187
|
-
dynamicThreshold: dynamicThreshold,
|
|
1188
|
-
avgTimings: {
|
|
1189
|
-
contributionComputation: 0,
|
|
1190
|
-
fullContribution: 0,
|
|
1191
|
-
verifyCloudFunction: 0
|
|
1192
|
-
},
|
|
1193
|
-
};
|
|
1194
|
-
}
|
|
1195
|
-
if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
|
|
1196
|
-
if (circuitData.fixedTimeWindow <= 0)
|
|
1197
|
-
throw new Error("The fixed time window threshold should be > 0.");
|
|
1198
|
-
fixedTimeWindow = circuitData.fixedTimeWindow;
|
|
1199
|
-
// the Circuit data for the ceremony setup
|
|
1200
|
-
circuit = {
|
|
1201
|
-
name: circuitData.name,
|
|
1202
|
-
description: circuitData.description,
|
|
1203
|
-
prefix: circuitPrefix,
|
|
1204
|
-
sequencePosition: i + 1,
|
|
1205
|
-
metadata: metadata,
|
|
1206
|
-
files: files,
|
|
1207
|
-
template: template,
|
|
1208
|
-
compiler: compiler,
|
|
1209
|
-
verification: verification,
|
|
1210
|
-
fixedTimeWindow: fixedTimeWindow,
|
|
1211
|
-
avgTimings: {
|
|
1212
|
-
contributionComputation: 0,
|
|
1213
|
-
fullContribution: 0,
|
|
1214
|
-
verifyCloudFunction: 0
|
|
1215
|
-
},
|
|
1216
|
-
};
|
|
1217
|
-
}
|
|
1218
|
-
circuits.push(circuit);
|
|
1219
|
-
// remove the local r1cs download (if used for verifying the config only vs setup)
|
|
1220
|
-
if (cleanup)
|
|
1221
|
-
fs.unlinkSync(localR1csPath);
|
|
1222
|
-
}
|
|
1223
|
-
const setupData = {
|
|
1224
|
-
ceremonyInputData: {
|
|
1225
|
-
title: data.title,
|
|
1226
|
-
description: data.description,
|
|
1227
|
-
startDate: startDate.valueOf(),
|
|
1228
|
-
endDate: endDate.valueOf(),
|
|
1229
|
-
timeoutMechanismType: data.timeoutMechanismType,
|
|
1230
|
-
penalty: data.penalty
|
|
1231
|
-
},
|
|
1232
|
-
ceremonyPrefix: extractPrefix(data.title),
|
|
1233
|
-
circuits: circuits,
|
|
1234
|
-
circuitArtifacts: circuitArtifacts
|
|
1235
|
-
};
|
|
1236
|
-
return setupData;
|
|
1237
|
-
}
|
|
1238
|
-
catch (error) {
|
|
1239
|
-
throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
|
|
1240
|
-
}
|
|
1241
|
-
};
|
|
1078
|
+
const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
|
|
1079
|
+
/**
|
|
1080
|
+
* Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
|
|
1081
|
+
* @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
|
|
1082
|
+
* @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
|
|
1083
|
+
* NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
|
|
1084
|
+
* @param str <string> - the arbitrary string from which to extract the prefix.
|
|
1085
|
+
* @returns <string> - the resulting prefix.
|
|
1086
|
+
*/
|
|
1087
|
+
const extractPrefix = (str) =>
|
|
1088
|
+
// eslint-disable-next-line no-useless-escape
|
|
1089
|
+
str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
|
|
1242
1090
|
/**
|
|
1243
1091
|
* Extract data from a R1CS metadata file generated with a custom file-based logger.
|
|
1244
1092
|
* @notice useful for extracting metadata circuits contained in the generated file using a logger
|
|
@@ -1295,17 +1143,6 @@ const formatZkeyIndex = (progress) => {
|
|
|
1295
1143
|
* @returns <number> - the amount of powers.
|
|
1296
1144
|
*/
|
|
1297
1145
|
const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
|
|
1298
|
-
/**
|
|
1299
|
-
* Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
|
|
1300
|
-
* @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
|
|
1301
|
-
* @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
|
|
1302
|
-
* NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
|
|
1303
|
-
* @param str <string> - the arbitrary string from which to extract the prefix.
|
|
1304
|
-
* @returns <string> - the resulting prefix.
|
|
1305
|
-
*/
|
|
1306
|
-
const extractPrefix = (str) =>
|
|
1307
|
-
// eslint-disable-next-line no-useless-escape
|
|
1308
|
-
str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
|
|
1309
1146
|
/**
|
|
1310
1147
|
* Automate the generation of an entropy for a contribution.
|
|
1311
1148
|
* @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
|
|
@@ -1372,7 +1209,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
|
|
|
1372
1209
|
* @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
|
|
1373
1210
|
* @returns <string> - the public attestation preamble.
|
|
1374
1211
|
*/
|
|
1375
|
-
const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}
|
|
1212
|
+
const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
|
|
1213
|
+
? "."
|
|
1214
|
+
: " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
|
|
1376
1215
|
/**
|
|
1377
1216
|
* Check and prepare public attestation for the contributor made only of its valid contributions.
|
|
1378
1217
|
* @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
|
|
@@ -1443,6 +1282,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
|
|
|
1443
1282
|
// Return the read bytes.
|
|
1444
1283
|
return buffer;
|
|
1445
1284
|
};
|
|
1285
|
+
/**
|
|
1286
|
+
* Given a buffer in little endian format, convert it to bigint
|
|
1287
|
+
* @param buffer
|
|
1288
|
+
* @returns
|
|
1289
|
+
*/
|
|
1290
|
+
function leBufferToBigint(buffer) {
|
|
1291
|
+
return BigInt(`0x${buffer.reverse().toString("hex")}`);
|
|
1292
|
+
}
|
|
1293
|
+
/**
|
|
1294
|
+
* Given an input containing string values, convert them
|
|
1295
|
+
* to bigint
|
|
1296
|
+
* @param input - The input to convert
|
|
1297
|
+
* @returns the input with string values converted to bigint
|
|
1298
|
+
*/
|
|
1299
|
+
const unstringifyBigInts = (input) => {
|
|
1300
|
+
if (typeof input === "string" && /^[0-9]+$/.test(input)) {
|
|
1301
|
+
return BigInt(input);
|
|
1302
|
+
}
|
|
1303
|
+
if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
|
|
1304
|
+
return BigInt(input);
|
|
1305
|
+
}
|
|
1306
|
+
if (Array.isArray(input)) {
|
|
1307
|
+
return input.map(unstringifyBigInts);
|
|
1308
|
+
}
|
|
1309
|
+
if (input === null) {
|
|
1310
|
+
return null;
|
|
1311
|
+
}
|
|
1312
|
+
if (typeof input === "object") {
|
|
1313
|
+
return Object.entries(input).reduce((acc, [key, value]) => {
|
|
1314
|
+
acc[key] = unstringifyBigInts(value);
|
|
1315
|
+
return acc;
|
|
1316
|
+
}, {});
|
|
1317
|
+
}
|
|
1318
|
+
return input;
|
|
1319
|
+
};
|
|
1446
1320
|
/**
|
|
1447
1321
|
* Return the info about the R1CS file.ù
|
|
1448
1322
|
* @dev this method was built taking inspiration from
|
|
@@ -1503,17 +1377,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1503
1377
|
let constraints = 0;
|
|
1504
1378
|
try {
|
|
1505
1379
|
// Get 'number of section' (jump magic r1cs and version1 data).
|
|
1506
|
-
const numberOfSections =
|
|
1380
|
+
const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
|
|
1507
1381
|
// Jump to first section.
|
|
1508
1382
|
pointer = 12;
|
|
1509
1383
|
// For each section
|
|
1510
1384
|
for (let i = 0; i < numberOfSections; i++) {
|
|
1511
1385
|
// Read section type.
|
|
1512
|
-
const sectionType =
|
|
1386
|
+
const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
|
|
1513
1387
|
// Jump to section size.
|
|
1514
1388
|
pointer += 4;
|
|
1515
1389
|
// Read section size
|
|
1516
|
-
const sectionSize = Number(
|
|
1390
|
+
const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
|
|
1517
1391
|
// If at header section (0x00000001 : Header Section).
|
|
1518
1392
|
if (sectionType === BigInt(1)) {
|
|
1519
1393
|
// Read info from header section.
|
|
@@ -1545,22 +1419,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1545
1419
|
*/
|
|
1546
1420
|
pointer += sectionSize - 20;
|
|
1547
1421
|
// Read R1CS info.
|
|
1548
|
-
wires = Number(
|
|
1422
|
+
wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1549
1423
|
pointer += 4;
|
|
1550
|
-
publicOutputs = Number(
|
|
1424
|
+
publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1551
1425
|
pointer += 4;
|
|
1552
|
-
publicInputs = Number(
|
|
1426
|
+
publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1553
1427
|
pointer += 4;
|
|
1554
|
-
privateInputs = Number(
|
|
1428
|
+
privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1555
1429
|
pointer += 4;
|
|
1556
|
-
labels = Number(
|
|
1430
|
+
labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
|
|
1557
1431
|
pointer += 8;
|
|
1558
|
-
constraints = Number(
|
|
1432
|
+
constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1559
1433
|
}
|
|
1560
1434
|
pointer += 8 + Number(sectionSize);
|
|
1561
1435
|
}
|
|
1562
1436
|
return {
|
|
1563
|
-
curve: "bn-128",
|
|
1437
|
+
curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
|
|
1564
1438
|
wires,
|
|
1565
1439
|
constraints,
|
|
1566
1440
|
privateInputs,
|
|
@@ -1575,11 +1449,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1575
1449
|
}
|
|
1576
1450
|
};
|
|
1577
1451
|
/**
|
|
1578
|
-
*
|
|
1579
|
-
* @
|
|
1580
|
-
* @
|
|
1452
|
+
* Parse and validate that the ceremony configuration is correct
|
|
1453
|
+
* @notice this does not upload any files to storage
|
|
1454
|
+
* @param path <string> - the path to the configuration file
|
|
1455
|
+
* @param cleanup <boolean> - whether to delete the r1cs file after parsing
|
|
1456
|
+
* @returns any - the data to pass to the cloud function for setup and the circuit artifacts
|
|
1581
1457
|
*/
|
|
1582
|
-
const
|
|
1458
|
+
const parseCeremonyFile = async (path, cleanup = false) => {
|
|
1459
|
+
// check that the path exists
|
|
1460
|
+
if (!fs.existsSync(path))
|
|
1461
|
+
throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
|
|
1462
|
+
try {
|
|
1463
|
+
// read the data
|
|
1464
|
+
const data = JSON.parse(fs.readFileSync(path).toString());
|
|
1465
|
+
// verify that the data is correct
|
|
1466
|
+
if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
|
|
1467
|
+
data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
|
|
1468
|
+
throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
|
|
1469
|
+
// validate that we have at least 1 circuit input data
|
|
1470
|
+
if (!data.circuits || data.circuits.length === 0)
|
|
1471
|
+
throw new Error("You need to provide the data for at least 1 circuit.");
|
|
1472
|
+
// validate that the end date is in the future
|
|
1473
|
+
let endDate;
|
|
1474
|
+
let startDate;
|
|
1475
|
+
try {
|
|
1476
|
+
endDate = new Date(data.endDate);
|
|
1477
|
+
startDate = new Date(data.startDate);
|
|
1478
|
+
}
|
|
1479
|
+
catch (error) {
|
|
1480
|
+
throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
|
|
1481
|
+
}
|
|
1482
|
+
if (endDate <= startDate)
|
|
1483
|
+
throw new Error("The end date should be greater than the start date.");
|
|
1484
|
+
const currentDate = new Date();
|
|
1485
|
+
if (endDate <= currentDate || startDate <= currentDate)
|
|
1486
|
+
throw new Error("The start and end dates should be in the future.");
|
|
1487
|
+
// validate penalty
|
|
1488
|
+
if (data.penalty <= 0)
|
|
1489
|
+
throw new Error("The penalty should be greater than zero.");
|
|
1490
|
+
const circuits = [];
|
|
1491
|
+
const urlPattern = /(https?:\/\/[^\s]+)/g;
|
|
1492
|
+
const commitHashPattern = /^[a-f0-9]{40}$/i;
|
|
1493
|
+
const circuitArtifacts = [];
|
|
1494
|
+
for (let i = 0; i < data.circuits.length; i++) {
|
|
1495
|
+
const circuitData = data.circuits[i];
|
|
1496
|
+
const { artifacts } = circuitData;
|
|
1497
|
+
circuitArtifacts.push({
|
|
1498
|
+
artifacts
|
|
1499
|
+
});
|
|
1500
|
+
// where we storing the r1cs downloaded
|
|
1501
|
+
const localR1csPath = `./${circuitData.name}.r1cs`;
|
|
1502
|
+
// where we storing the wasm downloaded
|
|
1503
|
+
const localWasmPath = `./${circuitData.name}.wasm`;
|
|
1504
|
+
// download the r1cs to extract the metadata
|
|
1505
|
+
const streamPipeline = util.promisify(stream.pipeline);
|
|
1506
|
+
// Make the call.
|
|
1507
|
+
const responseR1CS = await fetch(artifacts.r1csStoragePath);
|
|
1508
|
+
// Handle errors.
|
|
1509
|
+
if (!responseR1CS.ok && responseR1CS.status !== 200)
|
|
1510
|
+
throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1511
|
+
await streamPipeline(responseR1CS.body, fs.createWriteStream(localR1csPath));
|
|
1512
|
+
// Write the file locally
|
|
1513
|
+
// extract the metadata from the r1cs
|
|
1514
|
+
const metadata = getR1CSInfo(localR1csPath);
|
|
1515
|
+
// download wasm too to ensure it's available
|
|
1516
|
+
const responseWASM = await fetch(artifacts.wasmStoragePath);
|
|
1517
|
+
if (!responseWASM.ok && responseWASM.status !== 200)
|
|
1518
|
+
throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1519
|
+
await streamPipeline(responseWASM.body, fs.createWriteStream(localWasmPath));
|
|
1520
|
+
// validate that the circuit hash and template links are valid
|
|
1521
|
+
const { template } = circuitData;
|
|
1522
|
+
const URLMatch = template.source.match(urlPattern);
|
|
1523
|
+
if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
|
|
1524
|
+
throw new Error("You should provide the URL to the circuits templates on GitHub.");
|
|
1525
|
+
const hashMatch = template.commitHash.match(commitHashPattern);
|
|
1526
|
+
if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
|
|
1527
|
+
throw new Error("You should provide a valid commit hash of the circuit templates.");
|
|
1528
|
+
// calculate the hash of the r1cs file
|
|
1529
|
+
const r1csBlake2bHash = await blake512FromPath(localR1csPath);
|
|
1530
|
+
const circuitPrefix = extractPrefix(circuitData.name);
|
|
1531
|
+
// filenames
|
|
1532
|
+
const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
|
|
1533
|
+
const r1csCompleteFilename = `${circuitData.name}.r1cs`;
|
|
1534
|
+
const wasmCompleteFilename = `${circuitData.name}.wasm`;
|
|
1535
|
+
const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
|
|
1536
|
+
const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
|
|
1537
|
+
// storage paths
|
|
1538
|
+
const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
|
|
1539
|
+
const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
|
|
1540
|
+
const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
|
|
1541
|
+
const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
|
|
1542
|
+
const files = {
|
|
1543
|
+
potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
|
|
1544
|
+
r1csFilename: r1csCompleteFilename,
|
|
1545
|
+
wasmFilename: wasmCompleteFilename,
|
|
1546
|
+
initialZkeyFilename: firstZkeyCompleteFilename,
|
|
1547
|
+
potStoragePath: potStorageFilePath,
|
|
1548
|
+
r1csStoragePath: r1csStorageFilePath,
|
|
1549
|
+
wasmStoragePath: wasmStorageFilePath,
|
|
1550
|
+
initialZkeyStoragePath: zkeyStorageFilePath,
|
|
1551
|
+
r1csBlake2bHash
|
|
1552
|
+
};
|
|
1553
|
+
// validate that the compiler hash is a valid hash
|
|
1554
|
+
const { compiler } = circuitData;
|
|
1555
|
+
const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
|
|
1556
|
+
if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
|
|
1557
|
+
throw new Error("You should provide a valid commit hash of the circuit compiler.");
|
|
1558
|
+
// validate that the verification options are valid
|
|
1559
|
+
const { verification } = circuitData;
|
|
1560
|
+
if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
|
|
1561
|
+
throw new Error("Please enter a valid verification mechanism: either CF or VM");
|
|
1562
|
+
// @todo VM parameters verification
|
|
1563
|
+
// if (verification['cfOrVM'] === "VM") {}
|
|
1564
|
+
// check that the timeout is provided for the correct configuration
|
|
1565
|
+
let dynamicThreshold;
|
|
1566
|
+
let fixedTimeWindow;
|
|
1567
|
+
let circuit = {};
|
|
1568
|
+
if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
|
|
1569
|
+
if (circuitData.dynamicThreshold <= 0)
|
|
1570
|
+
throw new Error("The dynamic threshold should be > 0.");
|
|
1571
|
+
dynamicThreshold = circuitData.dynamicThreshold;
|
|
1572
|
+
// the Circuit data for the ceremony setup
|
|
1573
|
+
circuit = {
|
|
1574
|
+
name: circuitData.name,
|
|
1575
|
+
description: circuitData.description,
|
|
1576
|
+
prefix: circuitPrefix,
|
|
1577
|
+
sequencePosition: i + 1,
|
|
1578
|
+
metadata,
|
|
1579
|
+
files,
|
|
1580
|
+
template,
|
|
1581
|
+
compiler,
|
|
1582
|
+
verification,
|
|
1583
|
+
dynamicThreshold,
|
|
1584
|
+
avgTimings: {
|
|
1585
|
+
contributionComputation: 0,
|
|
1586
|
+
fullContribution: 0,
|
|
1587
|
+
verifyCloudFunction: 0
|
|
1588
|
+
}
|
|
1589
|
+
};
|
|
1590
|
+
}
|
|
1591
|
+
if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
|
|
1592
|
+
if (circuitData.fixedTimeWindow <= 0)
|
|
1593
|
+
throw new Error("The fixed time window threshold should be > 0.");
|
|
1594
|
+
fixedTimeWindow = circuitData.fixedTimeWindow;
|
|
1595
|
+
// the Circuit data for the ceremony setup
|
|
1596
|
+
circuit = {
|
|
1597
|
+
name: circuitData.name,
|
|
1598
|
+
description: circuitData.description,
|
|
1599
|
+
prefix: circuitPrefix,
|
|
1600
|
+
sequencePosition: i + 1,
|
|
1601
|
+
metadata,
|
|
1602
|
+
files,
|
|
1603
|
+
template,
|
|
1604
|
+
compiler,
|
|
1605
|
+
verification,
|
|
1606
|
+
fixedTimeWindow,
|
|
1607
|
+
avgTimings: {
|
|
1608
|
+
contributionComputation: 0,
|
|
1609
|
+
fullContribution: 0,
|
|
1610
|
+
verifyCloudFunction: 0
|
|
1611
|
+
}
|
|
1612
|
+
};
|
|
1613
|
+
}
|
|
1614
|
+
circuits.push(circuit);
|
|
1615
|
+
// remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
|
|
1616
|
+
if (cleanup) {
|
|
1617
|
+
fs.unlinkSync(localR1csPath);
|
|
1618
|
+
fs.unlinkSync(localWasmPath);
|
|
1619
|
+
}
|
|
1620
|
+
}
|
|
1621
|
+
const setupData = {
|
|
1622
|
+
ceremonyInputData: {
|
|
1623
|
+
title: data.title,
|
|
1624
|
+
description: data.description,
|
|
1625
|
+
startDate: startDate.valueOf(),
|
|
1626
|
+
endDate: endDate.valueOf(),
|
|
1627
|
+
timeoutMechanismType: data.timeoutMechanismType,
|
|
1628
|
+
penalty: data.penalty
|
|
1629
|
+
},
|
|
1630
|
+
ceremonyPrefix: extractPrefix(data.title),
|
|
1631
|
+
circuits,
|
|
1632
|
+
circuitArtifacts
|
|
1633
|
+
};
|
|
1634
|
+
return setupData;
|
|
1635
|
+
}
|
|
1636
|
+
catch (error) {
|
|
1637
|
+
throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
|
|
1638
|
+
}
|
|
1639
|
+
};
|
|
1583
1640
|
|
|
1584
1641
|
/**
|
|
1585
1642
|
* Verify that a zKey is valid
|
|
@@ -1828,7 +1885,7 @@ const getFirestoreDatabase = (app) => firestore.getFirestore(app);
|
|
|
1828
1885
|
* @param app <FirebaseApp> - the Firebase application.
|
|
1829
1886
|
* @returns <Functions> - the Cloud Functions associated to the application.
|
|
1830
1887
|
*/
|
|
1831
|
-
const getFirebaseFunctions = (app) => functions.getFunctions(app,
|
|
1888
|
+
const getFirebaseFunctions = (app) => functions.getFunctions(app, "europe-west1");
|
|
1832
1889
|
/**
|
|
1833
1890
|
* Retrieve the configuration variables for the AWS services (S3, EC2).
|
|
1834
1891
|
* @returns <AWSVariables> - the values of the AWS services configuration variables.
|
|
@@ -1837,14 +1894,14 @@ const getAWSVariables = () => {
|
|
|
1837
1894
|
if (!process.env.AWS_ACCESS_KEY_ID ||
|
|
1838
1895
|
!process.env.AWS_SECRET_ACCESS_KEY ||
|
|
1839
1896
|
!process.env.AWS_REGION ||
|
|
1840
|
-
!process.env.
|
|
1897
|
+
!process.env.AWS_INSTANCE_PROFILE_ARN ||
|
|
1841
1898
|
!process.env.AWS_AMI_ID)
|
|
1842
1899
|
throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
|
|
1843
1900
|
return {
|
|
1844
1901
|
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
1845
1902
|
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
1846
1903
|
region: process.env.AWS_REGION || "us-east-1",
|
|
1847
|
-
|
|
1904
|
+
instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
|
|
1848
1905
|
amiId: process.env.AWS_AMI_ID
|
|
1849
1906
|
};
|
|
1850
1907
|
};
|
|
@@ -1925,11 +1982,11 @@ const p256 = (proofPart) => {
|
|
|
1925
1982
|
*/
|
|
1926
1983
|
const formatSolidityCalldata = (circuitInput, _proof) => {
|
|
1927
1984
|
try {
|
|
1928
|
-
const proof =
|
|
1985
|
+
const proof = unstringifyBigInts(_proof);
|
|
1929
1986
|
// format the public inputs to the circuit
|
|
1930
1987
|
const formattedCircuitInput = [];
|
|
1931
1988
|
for (const cInput of circuitInput) {
|
|
1932
|
-
formattedCircuitInput.push(p256(
|
|
1989
|
+
formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
|
|
1933
1990
|
}
|
|
1934
1991
|
// construct calldata
|
|
1935
1992
|
const calldata = {
|
|
@@ -2097,7 +2154,8 @@ const getGitHubStats = async (user) => {
|
|
|
2097
2154
|
following: jsonData.following,
|
|
2098
2155
|
followers: jsonData.followers,
|
|
2099
2156
|
publicRepos: jsonData.public_repos,
|
|
2100
|
-
avatarUrl: jsonData.avatar_url
|
|
2157
|
+
avatarUrl: jsonData.avatar_url,
|
|
2158
|
+
age: jsonData.created_at
|
|
2101
2159
|
};
|
|
2102
2160
|
return data;
|
|
2103
2161
|
};
|
|
@@ -2109,20 +2167,21 @@ const getGitHubStats = async (user) => {
|
|
|
2109
2167
|
* @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
|
|
2110
2168
|
* @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
|
|
2111
2169
|
*/
|
|
2112
|
-
const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
|
|
2170
|
+
const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
|
|
2113
2171
|
if (!process.env.GITHUB_ACCESS_TOKEN)
|
|
2114
2172
|
throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
|
|
2115
|
-
const { following, followers, publicRepos, avatarUrl } = await getGitHubStats(userLogin);
|
|
2173
|
+
const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
|
|
2116
2174
|
if (following < minimumAmountOfFollowing ||
|
|
2117
2175
|
publicRepos < minimumAmountOfPublicRepos ||
|
|
2118
|
-
followers < minimumAmountOfFollowers
|
|
2176
|
+
followers < minimumAmountOfFollowers ||
|
|
2177
|
+
new Date(age) > new Date(Date.now() - minimumAge))
|
|
2119
2178
|
return {
|
|
2120
2179
|
reputable: false,
|
|
2121
2180
|
avatarUrl: ""
|
|
2122
2181
|
};
|
|
2123
2182
|
return {
|
|
2124
2183
|
reputable: true,
|
|
2125
|
-
avatarUrl
|
|
2184
|
+
avatarUrl
|
|
2126
2185
|
};
|
|
2127
2186
|
};
|
|
2128
2187
|
|
|
@@ -2309,8 +2368,8 @@ const createSSMClient = async () => {
|
|
|
2309
2368
|
* @returns <Array<string>> - the list of startup commands to be executed.
|
|
2310
2369
|
*/
|
|
2311
2370
|
const vmBootstrapCommand = (bucketName) => [
|
|
2312
|
-
"#!/bin/bash",
|
|
2313
|
-
`aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
|
|
2371
|
+
"#!/bin/bash", // shabang.
|
|
2372
|
+
`aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
|
|
2314
2373
|
`chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
|
|
2315
2374
|
];
|
|
2316
2375
|
/**
|
|
@@ -2331,8 +2390,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
|
|
|
2331
2390
|
// eslint-disable-next-line no-template-curly-in-string
|
|
2332
2391
|
"touch ${MARKER_FILE}",
|
|
2333
2392
|
"sudo yum update -y",
|
|
2334
|
-
"curl -
|
|
2335
|
-
"
|
|
2393
|
+
"curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
|
|
2394
|
+
"tar -xf node-v16.13.0-linux-x64.tar.xz",
|
|
2395
|
+
"mv node-v16.13.0-linux-x64 nodejs",
|
|
2396
|
+
"sudo mv nodejs /opt/",
|
|
2397
|
+
"echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
|
|
2398
|
+
"echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
|
|
2399
|
+
"source /etc/profile",
|
|
2336
2400
|
"npm install -g snarkjs",
|
|
2337
2401
|
`aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
|
|
2338
2402
|
`aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
|
|
@@ -2351,6 +2415,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
|
|
|
2351
2415
|
* @returns Array<string> - the list of commands for contribution verification.
|
|
2352
2416
|
*/
|
|
2353
2417
|
const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
|
|
2418
|
+
`source /etc/profile`,
|
|
2354
2419
|
`aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
|
|
2355
2420
|
`snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
|
|
2356
2421
|
`aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
|
|
@@ -2377,7 +2442,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
|
|
|
2377
2442
|
*/
|
|
2378
2443
|
const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
|
|
2379
2444
|
// Get the AWS variables.
|
|
2380
|
-
const { amiId,
|
|
2445
|
+
const { amiId, instanceProfileArn } = getAWSVariables();
|
|
2381
2446
|
// Parametrize the VM EC2 instance.
|
|
2382
2447
|
const params = {
|
|
2383
2448
|
ImageId: amiId,
|
|
@@ -2386,7 +2451,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2386
2451
|
MinCount: 1,
|
|
2387
2452
|
// nb. to find this: iam -> roles -> role_name.
|
|
2388
2453
|
IamInstanceProfile: {
|
|
2389
|
-
Arn:
|
|
2454
|
+
Arn: instanceProfileArn
|
|
2390
2455
|
},
|
|
2391
2456
|
// nb. for running commands at the startup.
|
|
2392
2457
|
UserData: Buffer.from(commands.join("\n")).toString("base64"),
|
|
@@ -2395,7 +2460,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2395
2460
|
DeviceName: "/dev/xvda",
|
|
2396
2461
|
Ebs: {
|
|
2397
2462
|
DeleteOnTermination: true,
|
|
2398
|
-
VolumeSize: volumeSize,
|
|
2463
|
+
VolumeSize: volumeSize, // disk size in GB.
|
|
2399
2464
|
VolumeType: diskType
|
|
2400
2465
|
}
|
|
2401
2466
|
}
|
|
@@ -2412,6 +2477,10 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2412
2477
|
{
|
|
2413
2478
|
Key: "Initialized",
|
|
2414
2479
|
Value: "false"
|
|
2480
|
+
},
|
|
2481
|
+
{
|
|
2482
|
+
Key: "ProjectName",
|
|
2483
|
+
Value: process.env.AWS_TAG_VALUE
|
|
2415
2484
|
}
|
|
2416
2485
|
]
|
|
2417
2486
|
}
|
|
@@ -2595,6 +2664,7 @@ exports.completeMultiPartUpload = completeMultiPartUpload;
|
|
|
2595
2664
|
exports.computeDiskSizeForVM = computeDiskSizeForVM;
|
|
2596
2665
|
exports.computeSHA256ToHex = computeSHA256ToHex;
|
|
2597
2666
|
exports.computeSmallestPowersOfTauForCircuit = computeSmallestPowersOfTauForCircuit;
|
|
2667
|
+
exports.contribHashRegex = contribHashRegex;
|
|
2598
2668
|
exports.convertBytesOrKbToGb = convertBytesOrKbToGb;
|
|
2599
2669
|
exports.convertToDoubleDigits = convertToDoubleDigits;
|
|
2600
2670
|
exports.createCustomLoggerForFile = createCustomLoggerForFile;
|
|
@@ -2623,6 +2693,7 @@ exports.generatePreSignedUrlsParts = generatePreSignedUrlsParts;
|
|
|
2623
2693
|
exports.generateValidContributionsAttestation = generateValidContributionsAttestation;
|
|
2624
2694
|
exports.generateZkeyFromScratch = generateZkeyFromScratch;
|
|
2625
2695
|
exports.genesisZkeyIndex = genesisZkeyIndex;
|
|
2696
|
+
exports.getAllCeremonies = getAllCeremonies;
|
|
2626
2697
|
exports.getAllCollectionDocs = getAllCollectionDocs;
|
|
2627
2698
|
exports.getBucketName = getBucketName;
|
|
2628
2699
|
exports.getCeremonyCircuits = getCeremonyCircuits;
|