@devtion/actions 0.0.0-5d170d3 → 0.0.0-5fad82d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.mjs +368 -329
- package/dist/index.node.js +369 -328
- package/dist/types/src/helpers/constants.d.ts +11 -2
- package/dist/types/src/helpers/constants.d.ts.map +1 -1
- package/dist/types/src/helpers/contracts.d.ts.map +1 -1
- package/dist/types/src/helpers/crypto.d.ts +1 -0
- package/dist/types/src/helpers/crypto.d.ts.map +1 -1
- package/dist/types/src/helpers/database.d.ts +8 -0
- package/dist/types/src/helpers/database.d.ts.map +1 -1
- package/dist/types/src/helpers/security.d.ts +2 -2
- package/dist/types/src/helpers/security.d.ts.map +1 -1
- package/dist/types/src/helpers/storage.d.ts +5 -2
- package/dist/types/src/helpers/storage.d.ts.map +1 -1
- package/dist/types/src/helpers/utils.d.ts +34 -20
- package/dist/types/src/helpers/utils.d.ts.map +1 -1
- package/dist/types/src/helpers/verification.d.ts +3 -2
- package/dist/types/src/helpers/verification.d.ts.map +1 -1
- package/dist/types/src/helpers/vm.d.ts.map +1 -1
- package/dist/types/src/index.d.ts +2 -2
- package/dist/types/src/index.d.ts.map +1 -1
- package/dist/types/src/types/index.d.ts +9 -3
- package/dist/types/src/types/index.d.ts.map +1 -1
- package/package.json +3 -8
- package/src/helpers/constants.ts +45 -31
- package/src/helpers/contracts.ts +3 -3
- package/src/helpers/database.ts +13 -0
- package/src/helpers/functions.ts +1 -1
- package/src/helpers/security.ts +33 -52
- package/src/helpers/services.ts +3 -3
- package/src/helpers/storage.ts +15 -3
- package/src/helpers/utils.ts +316 -277
- package/src/helpers/verification.ts +6 -6
- package/src/helpers/vm.ts +14 -7
- package/src/index.ts +5 -3
- package/src/types/index.ts +32 -8
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @module @p0tion/actions
|
|
3
|
-
* @version 1.
|
|
3
|
+
* @version 1.2.5
|
|
4
4
|
* @file A set of actions and helpers for CLI commands
|
|
5
5
|
* @copyright Ethereum Foundation 2022
|
|
6
6
|
* @license MIT
|
|
@@ -15,10 +15,8 @@ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp,
|
|
|
15
15
|
import { zKey, groth16 } from 'snarkjs';
|
|
16
16
|
import crypto from 'crypto';
|
|
17
17
|
import blake from 'blakejs';
|
|
18
|
-
import { utils } from 'ffjavascript';
|
|
19
18
|
import winston from 'winston';
|
|
20
|
-
import {
|
|
21
|
-
import { pipeline, Readable } from 'stream';
|
|
19
|
+
import { pipeline } from 'stream';
|
|
22
20
|
import { promisify } from 'util';
|
|
23
21
|
import { initializeApp } from 'firebase/app';
|
|
24
22
|
import { signInWithCredential, initializeAuth, getAuth } from 'firebase/auth';
|
|
@@ -28,10 +26,10 @@ import { EC2Client, RunInstancesCommand, DescribeInstanceStatusCommand, StartIns
|
|
|
28
26
|
import { SSMClient, SendCommandCommand, GetCommandInvocationCommand } from '@aws-sdk/client-ssm';
|
|
29
27
|
import dotenv from 'dotenv';
|
|
30
28
|
|
|
31
|
-
// Main part for the
|
|
32
|
-
const potFileDownloadMainUrl = `https://
|
|
33
|
-
// Main part for the
|
|
34
|
-
const potFilenameTemplate = `
|
|
29
|
+
// Main part for the PPoT Phase 1 Trusted Setup URLs to download PoT files.
|
|
30
|
+
const potFileDownloadMainUrl = `https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/`;
|
|
31
|
+
// Main part for the PPoT Phase 1 Trusted Setup PoT files to be downloaded.
|
|
32
|
+
const potFilenameTemplate = `ppot_0080_`;
|
|
35
33
|
// The genesis zKey index.
|
|
36
34
|
const genesisZkeyIndex = `00000`;
|
|
37
35
|
// The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
|
|
@@ -48,6 +46,8 @@ const verifierSmartContractAcronym = "verifier";
|
|
|
48
46
|
const ec2InstanceTag = "p0tionec2instance";
|
|
49
47
|
// The name of the VM startup script file.
|
|
50
48
|
const vmBootstrapScriptFilename = "bootstrap.sh";
|
|
49
|
+
// Match hash output by snarkjs in transcript log
|
|
50
|
+
const contribHashRegex = new RegExp("Contribution.+Hash.+\n\t\t.+\n\t\t.+\n.+\n\t\t.+\r?\n");
|
|
51
51
|
/**
|
|
52
52
|
* Define the supported VM configuration types.
|
|
53
53
|
* @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
|
|
@@ -105,112 +105,116 @@ const vmConfigurationTypes = {
|
|
|
105
105
|
*/
|
|
106
106
|
const powersOfTauFiles = [
|
|
107
107
|
{
|
|
108
|
-
ref: "https://
|
|
108
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_01.ptau",
|
|
109
109
|
size: 0.000084
|
|
110
110
|
},
|
|
111
111
|
{
|
|
112
|
-
ref: "https://
|
|
112
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_02.ptau",
|
|
113
113
|
size: 0.000086
|
|
114
114
|
},
|
|
115
115
|
{
|
|
116
|
-
ref: "https://
|
|
116
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_03.ptau",
|
|
117
117
|
size: 0.000091
|
|
118
118
|
},
|
|
119
119
|
{
|
|
120
|
-
ref: "https://
|
|
120
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_04.ptau",
|
|
121
121
|
size: 0.0001
|
|
122
122
|
},
|
|
123
123
|
{
|
|
124
|
-
ref: "https://
|
|
124
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_05.ptau",
|
|
125
125
|
size: 0.000117
|
|
126
126
|
},
|
|
127
127
|
{
|
|
128
|
-
ref: "https://
|
|
128
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_06.ptau",
|
|
129
129
|
size: 0.000153
|
|
130
130
|
},
|
|
131
131
|
{
|
|
132
|
-
ref: "https://
|
|
132
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_07.ptau",
|
|
133
133
|
size: 0.000225
|
|
134
134
|
},
|
|
135
135
|
{
|
|
136
|
-
ref: "https://
|
|
136
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_08.ptau",
|
|
137
137
|
size: 0.0004
|
|
138
138
|
},
|
|
139
139
|
{
|
|
140
|
-
ref: "https://
|
|
140
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_09.ptau",
|
|
141
141
|
size: 0.000658
|
|
142
142
|
},
|
|
143
143
|
{
|
|
144
|
-
ref: "https://
|
|
144
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_10.ptau",
|
|
145
145
|
size: 0.0013
|
|
146
146
|
},
|
|
147
147
|
{
|
|
148
|
-
ref: "https://
|
|
148
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_11.ptau",
|
|
149
149
|
size: 0.0023
|
|
150
150
|
},
|
|
151
151
|
{
|
|
152
|
-
ref: "https://
|
|
152
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_12.ptau",
|
|
153
153
|
size: 0.0046
|
|
154
154
|
},
|
|
155
155
|
{
|
|
156
|
-
ref: "https://
|
|
156
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_13.ptau",
|
|
157
157
|
size: 0.0091
|
|
158
158
|
},
|
|
159
159
|
{
|
|
160
|
-
ref: "https://
|
|
160
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_14.ptau",
|
|
161
161
|
size: 0.0181
|
|
162
162
|
},
|
|
163
163
|
{
|
|
164
|
-
ref: "https://
|
|
164
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_15.ptau",
|
|
165
165
|
size: 0.0361
|
|
166
166
|
},
|
|
167
167
|
{
|
|
168
|
-
ref: "https://
|
|
168
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_16.ptau",
|
|
169
169
|
size: 0.0721
|
|
170
170
|
},
|
|
171
171
|
{
|
|
172
|
-
ref: "https://
|
|
172
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_17.ptau",
|
|
173
173
|
size: 0.144
|
|
174
174
|
},
|
|
175
175
|
{
|
|
176
|
-
ref: "https://
|
|
176
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_18.ptau",
|
|
177
177
|
size: 0.288
|
|
178
178
|
},
|
|
179
179
|
{
|
|
180
|
-
ref: "https://
|
|
180
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_19.ptau",
|
|
181
181
|
size: 0.576
|
|
182
182
|
},
|
|
183
183
|
{
|
|
184
|
-
ref: "https://
|
|
184
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_20.ptau",
|
|
185
185
|
size: 1.1
|
|
186
186
|
},
|
|
187
187
|
{
|
|
188
|
-
ref: "https://
|
|
188
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_21.ptau",
|
|
189
189
|
size: 2.3
|
|
190
190
|
},
|
|
191
191
|
{
|
|
192
|
-
ref: "https://
|
|
192
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_22.ptau",
|
|
193
193
|
size: 4.5
|
|
194
194
|
},
|
|
195
195
|
{
|
|
196
|
-
ref: "https://
|
|
196
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_23.ptau",
|
|
197
197
|
size: 9.0
|
|
198
198
|
},
|
|
199
199
|
{
|
|
200
|
-
ref: "https://
|
|
200
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_24.ptau",
|
|
201
201
|
size: 18.0
|
|
202
202
|
},
|
|
203
203
|
{
|
|
204
|
-
ref: "https://
|
|
204
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_25.ptau",
|
|
205
205
|
size: 36.0
|
|
206
206
|
},
|
|
207
207
|
{
|
|
208
|
-
ref: "https://
|
|
208
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau",
|
|
209
209
|
size: 72.0
|
|
210
210
|
},
|
|
211
211
|
{
|
|
212
|
-
ref: "https://
|
|
212
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_27.ptau",
|
|
213
213
|
size: 144.0
|
|
214
|
+
},
|
|
215
|
+
{
|
|
216
|
+
ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_final.ptau",
|
|
217
|
+
size: 288.0
|
|
214
218
|
}
|
|
215
219
|
];
|
|
216
220
|
/**
|
|
@@ -244,6 +248,12 @@ const commonTerms = {
|
|
|
244
248
|
verificationStartedAt: "verificationStartedAt"
|
|
245
249
|
}
|
|
246
250
|
},
|
|
251
|
+
avatars: {
|
|
252
|
+
name: "avatars",
|
|
253
|
+
fields: {
|
|
254
|
+
avatarUrl: "avatarUrl"
|
|
255
|
+
}
|
|
256
|
+
},
|
|
247
257
|
ceremonies: {
|
|
248
258
|
name: "ceremonies",
|
|
249
259
|
fields: {
|
|
@@ -335,6 +345,8 @@ const commonTerms = {
|
|
|
335
345
|
finalizeCeremony: "finalizeCeremony",
|
|
336
346
|
downloadCircuitArtifacts: "downloadCircuitArtifacts",
|
|
337
347
|
transferObject: "transferObject",
|
|
348
|
+
bandadaValidateProof: "bandadaValidateProof",
|
|
349
|
+
checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
|
|
338
350
|
}
|
|
339
351
|
};
|
|
340
352
|
|
|
@@ -685,19 +697,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
|
|
|
685
697
|
* @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
|
|
686
698
|
* @param ceremonyId <string> - the unique identifier of the ceremony.
|
|
687
699
|
* @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
|
|
700
|
+
* @param logger <GenericBar> - an optional logger to show progress.
|
|
688
701
|
* @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
|
|
689
702
|
*/
|
|
690
|
-
const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
|
|
703
|
+
const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
|
|
691
704
|
// Keep track of uploaded chunks.
|
|
692
705
|
const uploadedChunks = alreadyUploadedChunks || [];
|
|
706
|
+
// if we were passed a logger, start it
|
|
707
|
+
if (logger)
|
|
708
|
+
logger.start(chunksWithUrls.length, 0);
|
|
693
709
|
// Loop through remaining chunks.
|
|
694
710
|
for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
|
|
695
711
|
// Consume the pre-signed url to upload the chunk.
|
|
696
712
|
// @ts-ignore
|
|
697
713
|
const response = await fetch(chunksWithUrls[i].preSignedUrl, {
|
|
698
714
|
retryOptions: {
|
|
699
|
-
retryInitialDelay: 500,
|
|
700
|
-
socketTimeout: 60000,
|
|
715
|
+
retryInitialDelay: 500, // 500 ms.
|
|
716
|
+
socketTimeout: 60000, // 60 seconds.
|
|
701
717
|
retryMaxDuration: 300000 // 5 minutes.
|
|
702
718
|
},
|
|
703
719
|
method: "PUT",
|
|
@@ -721,6 +737,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
|
|
|
721
737
|
// nb. this must be done only when contributing (not finalizing).
|
|
722
738
|
if (!!ceremonyId && !!cloudFunctions)
|
|
723
739
|
await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
|
|
740
|
+
// increment the count on the logger
|
|
741
|
+
if (logger)
|
|
742
|
+
logger.increment();
|
|
724
743
|
}
|
|
725
744
|
return uploadedChunks;
|
|
726
745
|
};
|
|
@@ -741,8 +760,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
|
|
|
741
760
|
* @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
|
|
742
761
|
* @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
|
|
743
762
|
* @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
|
|
763
|
+
* @param logger <GenericBar> - an optional logger to show progress.
|
|
744
764
|
*/
|
|
745
|
-
const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
|
|
765
|
+
const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
|
|
746
766
|
// The unique identifier of the multi-part upload.
|
|
747
767
|
let multiPartUploadId = "";
|
|
748
768
|
// The list of already uploaded chunks.
|
|
@@ -766,7 +786,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
|
|
|
766
786
|
const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
|
|
767
787
|
// Step (2).
|
|
768
788
|
const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
|
|
769
|
-
cloudFunctions, ceremonyId, alreadyUploadedChunks);
|
|
789
|
+
cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
|
|
770
790
|
// Step (3).
|
|
771
791
|
await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
|
|
772
792
|
};
|
|
@@ -990,6 +1010,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
|
|
|
990
1010
|
]);
|
|
991
1011
|
return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
|
|
992
1012
|
};
|
|
1013
|
+
/**
|
|
1014
|
+
* Query all ceremonies
|
|
1015
|
+
* @notice get all ceremonies from the database.
|
|
1016
|
+
* @dev this is a helper for the CLI ceremony methods.
|
|
1017
|
+
* @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
|
|
1018
|
+
* @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
|
|
1019
|
+
*/
|
|
1020
|
+
const getAllCeremonies = async (firestoreDatabase) => {
|
|
1021
|
+
const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
|
|
1022
|
+
return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
|
|
1023
|
+
};
|
|
993
1024
|
|
|
994
1025
|
/**
|
|
995
1026
|
* @hidden
|
|
@@ -1038,207 +1069,22 @@ const compareHashes = async (path1, path2) => {
|
|
|
1038
1069
|
};
|
|
1039
1070
|
|
|
1040
1071
|
/**
|
|
1041
|
-
*
|
|
1042
|
-
* @
|
|
1043
|
-
* @
|
|
1044
|
-
* @param cleanup <boolean> - whether to delete the r1cs file after parsing
|
|
1045
|
-
* @returns any - the data to pass to the cloud function for setup and the circuit artifacts
|
|
1072
|
+
* Return a string with double digits if the provided input is one digit only.
|
|
1073
|
+
* @param in <number> - the input number to be converted.
|
|
1074
|
+
* @returns <string> - the two digits stringified number derived from the conversion.
|
|
1046
1075
|
*/
|
|
1047
|
-
const
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
throw new Error("You need to provide the data for at least 1 circuit.");
|
|
1060
|
-
// validate that the end date is in the future
|
|
1061
|
-
let endDate;
|
|
1062
|
-
let startDate;
|
|
1063
|
-
try {
|
|
1064
|
-
endDate = new Date(data.endDate);
|
|
1065
|
-
startDate = new Date(data.startDate);
|
|
1066
|
-
}
|
|
1067
|
-
catch (error) {
|
|
1068
|
-
throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
|
|
1069
|
-
}
|
|
1070
|
-
if (endDate <= startDate)
|
|
1071
|
-
throw new Error("The end date should be greater than the start date.");
|
|
1072
|
-
const currentDate = new Date();
|
|
1073
|
-
if (endDate <= currentDate || startDate <= currentDate)
|
|
1074
|
-
throw new Error("The start and end dates should be in the future.");
|
|
1075
|
-
// validate penalty
|
|
1076
|
-
if (data.penalty <= 0)
|
|
1077
|
-
throw new Error("The penalty should be greater than zero.");
|
|
1078
|
-
const circuits = [];
|
|
1079
|
-
const urlPattern = /(https?:\/\/[^\s]+)/g;
|
|
1080
|
-
const commitHashPattern = /^[a-f0-9]{40}$/i;
|
|
1081
|
-
const circuitArtifacts = [];
|
|
1082
|
-
for (let i = 0; i < data.circuits.length; i++) {
|
|
1083
|
-
const circuitData = data.circuits[i];
|
|
1084
|
-
const artifacts = circuitData.artifacts;
|
|
1085
|
-
circuitArtifacts.push({
|
|
1086
|
-
artifacts: artifacts
|
|
1087
|
-
});
|
|
1088
|
-
const r1csPath = artifacts.r1csStoragePath;
|
|
1089
|
-
const wasmPath = artifacts.wasmStoragePath;
|
|
1090
|
-
// where we storing the r1cs downloaded
|
|
1091
|
-
const localR1csPath = `./${circuitData.name}.r1cs`;
|
|
1092
|
-
// check that the artifacts exist in S3
|
|
1093
|
-
// we don't need any privileges to download this
|
|
1094
|
-
// just the correct region
|
|
1095
|
-
const s3 = new S3Client({ region: artifacts.region });
|
|
1096
|
-
try {
|
|
1097
|
-
await s3.send(new HeadObjectCommand({
|
|
1098
|
-
Bucket: artifacts.bucket,
|
|
1099
|
-
Key: r1csPath
|
|
1100
|
-
}));
|
|
1101
|
-
}
|
|
1102
|
-
catch (error) {
|
|
1103
|
-
throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
|
|
1104
|
-
}
|
|
1105
|
-
try {
|
|
1106
|
-
await s3.send(new HeadObjectCommand({
|
|
1107
|
-
Bucket: artifacts.bucket,
|
|
1108
|
-
Key: wasmPath
|
|
1109
|
-
}));
|
|
1110
|
-
}
|
|
1111
|
-
catch (error) {
|
|
1112
|
-
throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
|
|
1113
|
-
}
|
|
1114
|
-
// download the r1cs to extract the metadata
|
|
1115
|
-
const command = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
|
|
1116
|
-
const response = await s3.send(command);
|
|
1117
|
-
const streamPipeline = promisify(pipeline);
|
|
1118
|
-
if (response.$metadata.httpStatusCode !== 200)
|
|
1119
|
-
throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
|
|
1120
|
-
if (response.Body instanceof Readable)
|
|
1121
|
-
await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
|
|
1122
|
-
// extract the metadata from the r1cs
|
|
1123
|
-
const metadata = getR1CSInfo(localR1csPath);
|
|
1124
|
-
// validate that the circuit hash and template links are valid
|
|
1125
|
-
const template = circuitData.template;
|
|
1126
|
-
const URLMatch = template.source.match(urlPattern);
|
|
1127
|
-
if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
|
|
1128
|
-
throw new Error("You should provide the URL to the circuits templates on GitHub.");
|
|
1129
|
-
const hashMatch = template.commitHash.match(commitHashPattern);
|
|
1130
|
-
if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
|
|
1131
|
-
throw new Error("You should provide a valid commit hash of the circuit templates.");
|
|
1132
|
-
// calculate the hash of the r1cs file
|
|
1133
|
-
const r1csBlake2bHash = await blake512FromPath(localR1csPath);
|
|
1134
|
-
const circuitPrefix = extractPrefix(circuitData.name);
|
|
1135
|
-
// filenames
|
|
1136
|
-
const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
|
|
1137
|
-
const r1csCompleteFilename = `${circuitData.name}.r1cs`;
|
|
1138
|
-
const wasmCompleteFilename = `${circuitData.name}.wasm`;
|
|
1139
|
-
const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
|
|
1140
|
-
const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
|
|
1141
|
-
// storage paths
|
|
1142
|
-
const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
|
|
1143
|
-
const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
|
|
1144
|
-
const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
|
|
1145
|
-
const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
|
|
1146
|
-
const files = {
|
|
1147
|
-
potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
|
|
1148
|
-
r1csFilename: r1csCompleteFilename,
|
|
1149
|
-
wasmFilename: wasmCompleteFilename,
|
|
1150
|
-
initialZkeyFilename: firstZkeyCompleteFilename,
|
|
1151
|
-
potStoragePath: potStorageFilePath,
|
|
1152
|
-
r1csStoragePath: r1csStorageFilePath,
|
|
1153
|
-
wasmStoragePath: wasmStorageFilePath,
|
|
1154
|
-
initialZkeyStoragePath: zkeyStorageFilePath,
|
|
1155
|
-
r1csBlake2bHash: r1csBlake2bHash
|
|
1156
|
-
};
|
|
1157
|
-
// validate that the compiler hash is a valid hash
|
|
1158
|
-
const compiler = circuitData.compiler;
|
|
1159
|
-
const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
|
|
1160
|
-
if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
|
|
1161
|
-
throw new Error("You should provide a valid commit hash of the circuit compiler.");
|
|
1162
|
-
// validate that the verification options are valid
|
|
1163
|
-
const verification = circuitData.verification;
|
|
1164
|
-
if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
|
|
1165
|
-
throw new Error("Please enter a valid verification mechanism: either CF or VM");
|
|
1166
|
-
// @todo VM parameters verification
|
|
1167
|
-
// if (verification['cfOrVM'] === "VM") {}
|
|
1168
|
-
// check that the timeout is provided for the correct configuration
|
|
1169
|
-
let dynamicThreshold;
|
|
1170
|
-
let fixedTimeWindow;
|
|
1171
|
-
let circuit = {};
|
|
1172
|
-
if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
|
|
1173
|
-
if (circuitData.dynamicThreshold <= 0)
|
|
1174
|
-
throw new Error("The dynamic threshold should be > 0.");
|
|
1175
|
-
dynamicThreshold = circuitData.dynamicThreshold;
|
|
1176
|
-
// the Circuit data for the ceremony setup
|
|
1177
|
-
circuit = {
|
|
1178
|
-
name: circuitData.name,
|
|
1179
|
-
description: circuitData.description,
|
|
1180
|
-
prefix: circuitPrefix,
|
|
1181
|
-
sequencePosition: i + 1,
|
|
1182
|
-
metadata: metadata,
|
|
1183
|
-
files: files,
|
|
1184
|
-
template: template,
|
|
1185
|
-
compiler: compiler,
|
|
1186
|
-
verification: verification,
|
|
1187
|
-
dynamicThreshold: dynamicThreshold,
|
|
1188
|
-
avgTimings: {
|
|
1189
|
-
contributionComputation: 0,
|
|
1190
|
-
fullContribution: 0,
|
|
1191
|
-
verifyCloudFunction: 0
|
|
1192
|
-
},
|
|
1193
|
-
};
|
|
1194
|
-
}
|
|
1195
|
-
if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
|
|
1196
|
-
if (circuitData.fixedTimeWindow <= 0)
|
|
1197
|
-
throw new Error("The fixed time window threshold should be > 0.");
|
|
1198
|
-
fixedTimeWindow = circuitData.fixedTimeWindow;
|
|
1199
|
-
// the Circuit data for the ceremony setup
|
|
1200
|
-
circuit = {
|
|
1201
|
-
name: circuitData.name,
|
|
1202
|
-
description: circuitData.description,
|
|
1203
|
-
prefix: circuitPrefix,
|
|
1204
|
-
sequencePosition: i + 1,
|
|
1205
|
-
metadata: metadata,
|
|
1206
|
-
files: files,
|
|
1207
|
-
template: template,
|
|
1208
|
-
compiler: compiler,
|
|
1209
|
-
verification: verification,
|
|
1210
|
-
fixedTimeWindow: fixedTimeWindow,
|
|
1211
|
-
avgTimings: {
|
|
1212
|
-
contributionComputation: 0,
|
|
1213
|
-
fullContribution: 0,
|
|
1214
|
-
verifyCloudFunction: 0
|
|
1215
|
-
},
|
|
1216
|
-
};
|
|
1217
|
-
}
|
|
1218
|
-
circuits.push(circuit);
|
|
1219
|
-
// remove the local r1cs download (if used for verifying the config only vs setup)
|
|
1220
|
-
if (cleanup)
|
|
1221
|
-
fs.unlinkSync(localR1csPath);
|
|
1222
|
-
}
|
|
1223
|
-
const setupData = {
|
|
1224
|
-
ceremonyInputData: {
|
|
1225
|
-
title: data.title,
|
|
1226
|
-
description: data.description,
|
|
1227
|
-
startDate: startDate.valueOf(),
|
|
1228
|
-
endDate: endDate.valueOf(),
|
|
1229
|
-
timeoutMechanismType: data.timeoutMechanismType,
|
|
1230
|
-
penalty: data.penalty
|
|
1231
|
-
},
|
|
1232
|
-
ceremonyPrefix: extractPrefix(data.title),
|
|
1233
|
-
circuits: circuits,
|
|
1234
|
-
circuitArtifacts: circuitArtifacts
|
|
1235
|
-
};
|
|
1236
|
-
return setupData;
|
|
1237
|
-
}
|
|
1238
|
-
catch (error) {
|
|
1239
|
-
throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
|
|
1240
|
-
}
|
|
1241
|
-
};
|
|
1076
|
+
const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
|
|
1077
|
+
/**
|
|
1078
|
+
* Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
|
|
1079
|
+
* @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
|
|
1080
|
+
* @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
|
|
1081
|
+
* NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
|
|
1082
|
+
* @param str <string> - the arbitrary string from which to extract the prefix.
|
|
1083
|
+
* @returns <string> - the resulting prefix.
|
|
1084
|
+
*/
|
|
1085
|
+
const extractPrefix = (str) =>
|
|
1086
|
+
// eslint-disable-next-line no-useless-escape
|
|
1087
|
+
str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
|
|
1242
1088
|
/**
|
|
1243
1089
|
* Extract data from a R1CS metadata file generated with a custom file-based logger.
|
|
1244
1090
|
* @notice useful for extracting metadata circuits contained in the generated file using a logger
|
|
@@ -1295,17 +1141,6 @@ const formatZkeyIndex = (progress) => {
|
|
|
1295
1141
|
* @returns <number> - the amount of powers.
|
|
1296
1142
|
*/
|
|
1297
1143
|
const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
|
|
1298
|
-
/**
|
|
1299
|
-
* Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
|
|
1300
|
-
* @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
|
|
1301
|
-
* @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
|
|
1302
|
-
* NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
|
|
1303
|
-
* @param str <string> - the arbitrary string from which to extract the prefix.
|
|
1304
|
-
* @returns <string> - the resulting prefix.
|
|
1305
|
-
*/
|
|
1306
|
-
const extractPrefix = (str) =>
|
|
1307
|
-
// eslint-disable-next-line no-useless-escape
|
|
1308
|
-
str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
|
|
1309
1144
|
/**
|
|
1310
1145
|
* Automate the generation of an entropy for a contribution.
|
|
1311
1146
|
* @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
|
|
@@ -1372,7 +1207,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
|
|
|
1372
1207
|
* @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
|
|
1373
1208
|
* @returns <string> - the public attestation preamble.
|
|
1374
1209
|
*/
|
|
1375
|
-
const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}
|
|
1210
|
+
const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
|
|
1211
|
+
? "."
|
|
1212
|
+
: " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
|
|
1376
1213
|
/**
|
|
1377
1214
|
* Check and prepare public attestation for the contributor made only of its valid contributions.
|
|
1378
1215
|
* @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
|
|
@@ -1443,6 +1280,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
|
|
|
1443
1280
|
// Return the read bytes.
|
|
1444
1281
|
return buffer;
|
|
1445
1282
|
};
|
|
1283
|
+
/**
|
|
1284
|
+
* Given a buffer in little endian format, convert it to bigint
|
|
1285
|
+
* @param buffer
|
|
1286
|
+
* @returns
|
|
1287
|
+
*/
|
|
1288
|
+
function leBufferToBigint(buffer) {
|
|
1289
|
+
return BigInt(`0x${buffer.reverse().toString("hex")}`);
|
|
1290
|
+
}
|
|
1291
|
+
/**
|
|
1292
|
+
* Given an input containing string values, convert them
|
|
1293
|
+
* to bigint
|
|
1294
|
+
* @param input - The input to convert
|
|
1295
|
+
* @returns the input with string values converted to bigint
|
|
1296
|
+
*/
|
|
1297
|
+
const unstringifyBigInts = (input) => {
|
|
1298
|
+
if (typeof input === "string" && /^[0-9]+$/.test(input)) {
|
|
1299
|
+
return BigInt(input);
|
|
1300
|
+
}
|
|
1301
|
+
if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
|
|
1302
|
+
return BigInt(input);
|
|
1303
|
+
}
|
|
1304
|
+
if (Array.isArray(input)) {
|
|
1305
|
+
return input.map(unstringifyBigInts);
|
|
1306
|
+
}
|
|
1307
|
+
if (input === null) {
|
|
1308
|
+
return null;
|
|
1309
|
+
}
|
|
1310
|
+
if (typeof input === "object") {
|
|
1311
|
+
return Object.entries(input).reduce((acc, [key, value]) => {
|
|
1312
|
+
acc[key] = unstringifyBigInts(value);
|
|
1313
|
+
return acc;
|
|
1314
|
+
}, {});
|
|
1315
|
+
}
|
|
1316
|
+
return input;
|
|
1317
|
+
};
|
|
1446
1318
|
/**
|
|
1447
1319
|
* Return the info about the R1CS file.ù
|
|
1448
1320
|
* @dev this method was built taking inspiration from
|
|
@@ -1503,17 +1375,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1503
1375
|
let constraints = 0;
|
|
1504
1376
|
try {
|
|
1505
1377
|
// Get 'number of section' (jump magic r1cs and version1 data).
|
|
1506
|
-
const numberOfSections =
|
|
1378
|
+
const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
|
|
1507
1379
|
// Jump to first section.
|
|
1508
1380
|
pointer = 12;
|
|
1509
1381
|
// For each section
|
|
1510
1382
|
for (let i = 0; i < numberOfSections; i++) {
|
|
1511
1383
|
// Read section type.
|
|
1512
|
-
const sectionType =
|
|
1384
|
+
const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
|
|
1513
1385
|
// Jump to section size.
|
|
1514
1386
|
pointer += 4;
|
|
1515
1387
|
// Read section size
|
|
1516
|
-
const sectionSize = Number(
|
|
1388
|
+
const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
|
|
1517
1389
|
// If at header section (0x00000001 : Header Section).
|
|
1518
1390
|
if (sectionType === BigInt(1)) {
|
|
1519
1391
|
// Read info from header section.
|
|
@@ -1545,22 +1417,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1545
1417
|
*/
|
|
1546
1418
|
pointer += sectionSize - 20;
|
|
1547
1419
|
// Read R1CS info.
|
|
1548
|
-
wires = Number(
|
|
1420
|
+
wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1549
1421
|
pointer += 4;
|
|
1550
|
-
publicOutputs = Number(
|
|
1422
|
+
publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1551
1423
|
pointer += 4;
|
|
1552
|
-
publicInputs = Number(
|
|
1424
|
+
publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1553
1425
|
pointer += 4;
|
|
1554
|
-
privateInputs = Number(
|
|
1426
|
+
privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1555
1427
|
pointer += 4;
|
|
1556
|
-
labels = Number(
|
|
1428
|
+
labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
|
|
1557
1429
|
pointer += 8;
|
|
1558
|
-
constraints = Number(
|
|
1430
|
+
constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
|
|
1559
1431
|
}
|
|
1560
1432
|
pointer += 8 + Number(sectionSize);
|
|
1561
1433
|
}
|
|
1562
1434
|
return {
|
|
1563
|
-
curve: "bn-128",
|
|
1435
|
+
curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
|
|
1564
1436
|
wires,
|
|
1565
1437
|
constraints,
|
|
1566
1438
|
privateInputs,
|
|
@@ -1575,11 +1447,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
|
|
|
1575
1447
|
}
|
|
1576
1448
|
};
|
|
1577
1449
|
/**
|
|
1578
|
-
*
|
|
1579
|
-
* @
|
|
1580
|
-
* @
|
|
1450
|
+
* Parse and validate that the ceremony configuration is correct
|
|
1451
|
+
* @notice this does not upload any files to storage
|
|
1452
|
+
* @param path <string> - the path to the configuration file
|
|
1453
|
+
* @param cleanup <boolean> - whether to delete the r1cs file after parsing
|
|
1454
|
+
* @returns any - the data to pass to the cloud function for setup and the circuit artifacts
|
|
1581
1455
|
*/
|
|
1582
|
-
const
|
|
1456
|
+
const parseCeremonyFile = async (path, cleanup = false) => {
|
|
1457
|
+
// check that the path exists
|
|
1458
|
+
if (!fs.existsSync(path))
|
|
1459
|
+
throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
|
|
1460
|
+
try {
|
|
1461
|
+
// read the data
|
|
1462
|
+
const data = JSON.parse(fs.readFileSync(path).toString());
|
|
1463
|
+
// verify that the data is correct
|
|
1464
|
+
if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
|
|
1465
|
+
data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
|
|
1466
|
+
throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
|
|
1467
|
+
// validate that we have at least 1 circuit input data
|
|
1468
|
+
if (!data.circuits || data.circuits.length === 0)
|
|
1469
|
+
throw new Error("You need to provide the data for at least 1 circuit.");
|
|
1470
|
+
// validate that the end date is in the future
|
|
1471
|
+
let endDate;
|
|
1472
|
+
let startDate;
|
|
1473
|
+
try {
|
|
1474
|
+
endDate = new Date(data.endDate);
|
|
1475
|
+
startDate = new Date(data.startDate);
|
|
1476
|
+
}
|
|
1477
|
+
catch (error) {
|
|
1478
|
+
throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
|
|
1479
|
+
}
|
|
1480
|
+
if (endDate <= startDate)
|
|
1481
|
+
throw new Error("The end date should be greater than the start date.");
|
|
1482
|
+
const currentDate = new Date();
|
|
1483
|
+
if (endDate <= currentDate || startDate <= currentDate)
|
|
1484
|
+
throw new Error("The start and end dates should be in the future.");
|
|
1485
|
+
// validate penalty
|
|
1486
|
+
if (data.penalty <= 0)
|
|
1487
|
+
throw new Error("The penalty should be greater than zero.");
|
|
1488
|
+
const circuits = [];
|
|
1489
|
+
const urlPattern = /(https?:\/\/[^\s]+)/g;
|
|
1490
|
+
const commitHashPattern = /^[a-f0-9]{40}$/i;
|
|
1491
|
+
const circuitArtifacts = [];
|
|
1492
|
+
for (let i = 0; i < data.circuits.length; i++) {
|
|
1493
|
+
const circuitData = data.circuits[i];
|
|
1494
|
+
const { artifacts } = circuitData;
|
|
1495
|
+
circuitArtifacts.push({
|
|
1496
|
+
artifacts
|
|
1497
|
+
});
|
|
1498
|
+
// where we storing the r1cs downloaded
|
|
1499
|
+
const localR1csPath = `./${circuitData.name}.r1cs`;
|
|
1500
|
+
// where we storing the wasm downloaded
|
|
1501
|
+
const localWasmPath = `./${circuitData.name}.wasm`;
|
|
1502
|
+
// download the r1cs to extract the metadata
|
|
1503
|
+
const streamPipeline = promisify(pipeline);
|
|
1504
|
+
// Make the call.
|
|
1505
|
+
const responseR1CS = await fetch(artifacts.r1csStoragePath);
|
|
1506
|
+
// Handle errors.
|
|
1507
|
+
if (!responseR1CS.ok && responseR1CS.status !== 200)
|
|
1508
|
+
throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1509
|
+
await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
|
|
1510
|
+
// Write the file locally
|
|
1511
|
+
// extract the metadata from the r1cs
|
|
1512
|
+
const metadata = getR1CSInfo(localR1csPath);
|
|
1513
|
+
// download wasm too to ensure it's available
|
|
1514
|
+
const responseWASM = await fetch(artifacts.wasmStoragePath);
|
|
1515
|
+
if (!responseWASM.ok && responseWASM.status !== 200)
|
|
1516
|
+
throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
|
|
1517
|
+
await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
|
|
1518
|
+
// validate that the circuit hash and template links are valid
|
|
1519
|
+
const { template } = circuitData;
|
|
1520
|
+
const URLMatch = template.source.match(urlPattern);
|
|
1521
|
+
if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
|
|
1522
|
+
throw new Error("You should provide the URL to the circuits templates on GitHub.");
|
|
1523
|
+
const hashMatch = template.commitHash.match(commitHashPattern);
|
|
1524
|
+
if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
|
|
1525
|
+
throw new Error("You should provide a valid commit hash of the circuit templates.");
|
|
1526
|
+
// calculate the hash of the r1cs file
|
|
1527
|
+
const r1csBlake2bHash = await blake512FromPath(localR1csPath);
|
|
1528
|
+
const circuitPrefix = extractPrefix(circuitData.name);
|
|
1529
|
+
// filenames
|
|
1530
|
+
const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
|
|
1531
|
+
const r1csCompleteFilename = `${circuitData.name}.r1cs`;
|
|
1532
|
+
const wasmCompleteFilename = `${circuitData.name}.wasm`;
|
|
1533
|
+
const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
|
|
1534
|
+
const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
|
|
1535
|
+
// storage paths
|
|
1536
|
+
const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
|
|
1537
|
+
const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
|
|
1538
|
+
const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
|
|
1539
|
+
const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
|
|
1540
|
+
const files = {
|
|
1541
|
+
potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
|
|
1542
|
+
r1csFilename: r1csCompleteFilename,
|
|
1543
|
+
wasmFilename: wasmCompleteFilename,
|
|
1544
|
+
initialZkeyFilename: firstZkeyCompleteFilename,
|
|
1545
|
+
potStoragePath: potStorageFilePath,
|
|
1546
|
+
r1csStoragePath: r1csStorageFilePath,
|
|
1547
|
+
wasmStoragePath: wasmStorageFilePath,
|
|
1548
|
+
initialZkeyStoragePath: zkeyStorageFilePath,
|
|
1549
|
+
r1csBlake2bHash
|
|
1550
|
+
};
|
|
1551
|
+
// validate that the compiler hash is a valid hash
|
|
1552
|
+
const { compiler } = circuitData;
|
|
1553
|
+
const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
|
|
1554
|
+
if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
|
|
1555
|
+
throw new Error("You should provide a valid commit hash of the circuit compiler.");
|
|
1556
|
+
// validate that the verification options are valid
|
|
1557
|
+
const { verification } = circuitData;
|
|
1558
|
+
if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
|
|
1559
|
+
throw new Error("Please enter a valid verification mechanism: either CF or VM");
|
|
1560
|
+
// @todo VM parameters verification
|
|
1561
|
+
// if (verification['cfOrVM'] === "VM") {}
|
|
1562
|
+
// check that the timeout is provided for the correct configuration
|
|
1563
|
+
let dynamicThreshold;
|
|
1564
|
+
let fixedTimeWindow;
|
|
1565
|
+
let circuit = {};
|
|
1566
|
+
if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
|
|
1567
|
+
if (circuitData.dynamicThreshold <= 0)
|
|
1568
|
+
throw new Error("The dynamic threshold should be > 0.");
|
|
1569
|
+
dynamicThreshold = circuitData.dynamicThreshold;
|
|
1570
|
+
// the Circuit data for the ceremony setup
|
|
1571
|
+
circuit = {
|
|
1572
|
+
name: circuitData.name,
|
|
1573
|
+
description: circuitData.description,
|
|
1574
|
+
prefix: circuitPrefix,
|
|
1575
|
+
sequencePosition: i + 1,
|
|
1576
|
+
metadata,
|
|
1577
|
+
files,
|
|
1578
|
+
template,
|
|
1579
|
+
compiler,
|
|
1580
|
+
verification,
|
|
1581
|
+
dynamicThreshold,
|
|
1582
|
+
avgTimings: {
|
|
1583
|
+
contributionComputation: 0,
|
|
1584
|
+
fullContribution: 0,
|
|
1585
|
+
verifyCloudFunction: 0
|
|
1586
|
+
}
|
|
1587
|
+
};
|
|
1588
|
+
}
|
|
1589
|
+
if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
|
|
1590
|
+
if (circuitData.fixedTimeWindow <= 0)
|
|
1591
|
+
throw new Error("The fixed time window threshold should be > 0.");
|
|
1592
|
+
fixedTimeWindow = circuitData.fixedTimeWindow;
|
|
1593
|
+
// the Circuit data for the ceremony setup
|
|
1594
|
+
circuit = {
|
|
1595
|
+
name: circuitData.name,
|
|
1596
|
+
description: circuitData.description,
|
|
1597
|
+
prefix: circuitPrefix,
|
|
1598
|
+
sequencePosition: i + 1,
|
|
1599
|
+
metadata,
|
|
1600
|
+
files,
|
|
1601
|
+
template,
|
|
1602
|
+
compiler,
|
|
1603
|
+
verification,
|
|
1604
|
+
fixedTimeWindow,
|
|
1605
|
+
avgTimings: {
|
|
1606
|
+
contributionComputation: 0,
|
|
1607
|
+
fullContribution: 0,
|
|
1608
|
+
verifyCloudFunction: 0
|
|
1609
|
+
}
|
|
1610
|
+
};
|
|
1611
|
+
}
|
|
1612
|
+
circuits.push(circuit);
|
|
1613
|
+
// remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
|
|
1614
|
+
if (cleanup) {
|
|
1615
|
+
fs.unlinkSync(localR1csPath);
|
|
1616
|
+
fs.unlinkSync(localWasmPath);
|
|
1617
|
+
}
|
|
1618
|
+
}
|
|
1619
|
+
const setupData = {
|
|
1620
|
+
ceremonyInputData: {
|
|
1621
|
+
title: data.title,
|
|
1622
|
+
description: data.description,
|
|
1623
|
+
startDate: startDate.valueOf(),
|
|
1624
|
+
endDate: endDate.valueOf(),
|
|
1625
|
+
timeoutMechanismType: data.timeoutMechanismType,
|
|
1626
|
+
penalty: data.penalty
|
|
1627
|
+
},
|
|
1628
|
+
ceremonyPrefix: extractPrefix(data.title),
|
|
1629
|
+
circuits,
|
|
1630
|
+
circuitArtifacts
|
|
1631
|
+
};
|
|
1632
|
+
return setupData;
|
|
1633
|
+
}
|
|
1634
|
+
catch (error) {
|
|
1635
|
+
throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
|
|
1636
|
+
}
|
|
1637
|
+
};
|
|
1583
1638
|
|
|
1584
1639
|
/**
|
|
1585
1640
|
* Verify that a zKey is valid
|
|
@@ -1828,7 +1883,7 @@ const getFirestoreDatabase = (app) => getFirestore(app);
|
|
|
1828
1883
|
* @param app <FirebaseApp> - the Firebase application.
|
|
1829
1884
|
* @returns <Functions> - the Cloud Functions associated to the application.
|
|
1830
1885
|
*/
|
|
1831
|
-
const getFirebaseFunctions = (app) => getFunctions(app,
|
|
1886
|
+
const getFirebaseFunctions = (app) => getFunctions(app, "europe-west1");
|
|
1832
1887
|
/**
|
|
1833
1888
|
* Retrieve the configuration variables for the AWS services (S3, EC2).
|
|
1834
1889
|
* @returns <AWSVariables> - the values of the AWS services configuration variables.
|
|
@@ -1837,14 +1892,14 @@ const getAWSVariables = () => {
|
|
|
1837
1892
|
if (!process.env.AWS_ACCESS_KEY_ID ||
|
|
1838
1893
|
!process.env.AWS_SECRET_ACCESS_KEY ||
|
|
1839
1894
|
!process.env.AWS_REGION ||
|
|
1840
|
-
!process.env.
|
|
1895
|
+
!process.env.AWS_INSTANCE_PROFILE_ARN ||
|
|
1841
1896
|
!process.env.AWS_AMI_ID)
|
|
1842
1897
|
throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
|
|
1843
1898
|
return {
|
|
1844
1899
|
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
1845
1900
|
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
1846
1901
|
region: process.env.AWS_REGION || "us-east-1",
|
|
1847
|
-
|
|
1902
|
+
instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
|
|
1848
1903
|
amiId: process.env.AWS_AMI_ID
|
|
1849
1904
|
};
|
|
1850
1905
|
};
|
|
@@ -1925,11 +1980,11 @@ const p256 = (proofPart) => {
|
|
|
1925
1980
|
*/
|
|
1926
1981
|
const formatSolidityCalldata = (circuitInput, _proof) => {
|
|
1927
1982
|
try {
|
|
1928
|
-
const proof =
|
|
1983
|
+
const proof = unstringifyBigInts(_proof);
|
|
1929
1984
|
// format the public inputs to the circuit
|
|
1930
1985
|
const formattedCircuitInput = [];
|
|
1931
1986
|
for (const cInput of circuitInput) {
|
|
1932
|
-
formattedCircuitInput.push(p256(
|
|
1987
|
+
formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
|
|
1933
1988
|
}
|
|
1934
1989
|
// construct calldata
|
|
1935
1990
|
const calldata = {
|
|
@@ -2079,55 +2134,28 @@ const verifyCeremony = async (functions, firestore, ceremonyPrefix, outputDirect
|
|
|
2079
2134
|
};
|
|
2080
2135
|
|
|
2081
2136
|
/**
|
|
2082
|
-
* This function
|
|
2083
|
-
* @param user
|
|
2084
|
-
* @returns
|
|
2085
|
-
*/
|
|
2086
|
-
const getNumberOfPublicReposGitHub = async (user) => {
|
|
2087
|
-
const response = await fetch(`https://api.github.com/user/${user}/repos`, {
|
|
2088
|
-
method: "GET",
|
|
2089
|
-
headers: {
|
|
2090
|
-
Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
|
|
2091
|
-
}
|
|
2092
|
-
});
|
|
2093
|
-
if (response.status !== 200)
|
|
2094
|
-
throw new Error("It was not possible to retrieve the number of public repositories. Please try again.");
|
|
2095
|
-
const jsonData = await response.json();
|
|
2096
|
-
return jsonData.length;
|
|
2097
|
-
};
|
|
2098
|
-
/**
|
|
2099
|
-
* This function will return the number of followers of a user
|
|
2100
|
-
* @param user <string> The username of the user
|
|
2101
|
-
* @returns <number> The number of followers
|
|
2137
|
+
* This function queries the GitHub API to fetch users statistics
|
|
2138
|
+
* @param user {string} the user uid
|
|
2139
|
+
* @returns {any} the stats from the GitHub API
|
|
2102
2140
|
*/
|
|
2103
|
-
const
|
|
2104
|
-
const response = await fetch(`https://api.github.com/user/${user}
|
|
2141
|
+
const getGitHubStats = async (user) => {
|
|
2142
|
+
const response = await fetch(`https://api.github.com/user/${user}`, {
|
|
2105
2143
|
method: "GET",
|
|
2106
2144
|
headers: {
|
|
2107
2145
|
Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
|
|
2108
2146
|
}
|
|
2109
2147
|
});
|
|
2110
2148
|
if (response.status !== 200)
|
|
2111
|
-
throw new Error("It was not possible to retrieve the
|
|
2149
|
+
throw new Error("It was not possible to retrieve the user's statistic. Please try again.");
|
|
2112
2150
|
const jsonData = await response.json();
|
|
2113
|
-
|
|
2114
|
-
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
const response = await fetch(`https://api.github.com/user/${user}/following`, {
|
|
2122
|
-
method: "GET",
|
|
2123
|
-
headers: {
|
|
2124
|
-
Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
|
|
2125
|
-
}
|
|
2126
|
-
});
|
|
2127
|
-
if (response.status !== 200)
|
|
2128
|
-
throw new Error("It was not possible to retrieve the number of following. Please try again.");
|
|
2129
|
-
const jsonData = await response.json();
|
|
2130
|
-
return jsonData.length;
|
|
2151
|
+
const data = {
|
|
2152
|
+
following: jsonData.following,
|
|
2153
|
+
followers: jsonData.followers,
|
|
2154
|
+
publicRepos: jsonData.public_repos,
|
|
2155
|
+
avatarUrl: jsonData.avatar_url,
|
|
2156
|
+
age: jsonData.created_at
|
|
2157
|
+
};
|
|
2158
|
+
return data;
|
|
2131
2159
|
};
|
|
2132
2160
|
/**
|
|
2133
2161
|
* This function will check if the user is reputable enough to be able to use the app
|
|
@@ -2135,19 +2163,24 @@ const getNumberOfFollowingGitHub = async (user) => {
|
|
|
2135
2163
|
* @param minimumAmountOfFollowing <number> The minimum amount of following the user should have
|
|
2136
2164
|
* @param minimumAmountOfFollowers <number> The minimum amount of followers the user should have
|
|
2137
2165
|
* @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
|
|
2138
|
-
* @returns <
|
|
2166
|
+
* @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
|
|
2139
2167
|
*/
|
|
2140
|
-
const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
|
|
2168
|
+
const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
|
|
2141
2169
|
if (!process.env.GITHUB_ACCESS_TOKEN)
|
|
2142
2170
|
throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
|
|
2143
|
-
const following = await
|
|
2144
|
-
const repos = await getNumberOfPublicReposGitHub(userLogin);
|
|
2145
|
-
const followers = await getNumberOfFollowersGitHub(userLogin);
|
|
2171
|
+
const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
|
|
2146
2172
|
if (following < minimumAmountOfFollowing ||
|
|
2147
|
-
|
|
2148
|
-
followers < minimumAmountOfFollowers
|
|
2149
|
-
|
|
2150
|
-
|
|
2173
|
+
publicRepos < minimumAmountOfPublicRepos ||
|
|
2174
|
+
followers < minimumAmountOfFollowers ||
|
|
2175
|
+
new Date(age) > new Date(Date.now() - minimumAge))
|
|
2176
|
+
return {
|
|
2177
|
+
reputable: false,
|
|
2178
|
+
avatarUrl: ""
|
|
2179
|
+
};
|
|
2180
|
+
return {
|
|
2181
|
+
reputable: true,
|
|
2182
|
+
avatarUrl
|
|
2183
|
+
};
|
|
2151
2184
|
};
|
|
2152
2185
|
|
|
2153
2186
|
/**
|
|
@@ -2333,8 +2366,8 @@ const createSSMClient = async () => {
|
|
|
2333
2366
|
* @returns <Array<string>> - the list of startup commands to be executed.
|
|
2334
2367
|
*/
|
|
2335
2368
|
const vmBootstrapCommand = (bucketName) => [
|
|
2336
|
-
"#!/bin/bash",
|
|
2337
|
-
`aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
|
|
2369
|
+
"#!/bin/bash", // shabang.
|
|
2370
|
+
`aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
|
|
2338
2371
|
`chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
|
|
2339
2372
|
];
|
|
2340
2373
|
/**
|
|
@@ -2355,8 +2388,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
|
|
|
2355
2388
|
// eslint-disable-next-line no-template-curly-in-string
|
|
2356
2389
|
"touch ${MARKER_FILE}",
|
|
2357
2390
|
"sudo yum update -y",
|
|
2358
|
-
"curl -
|
|
2359
|
-
"
|
|
2391
|
+
"curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
|
|
2392
|
+
"tar -xf node-v16.13.0-linux-x64.tar.xz",
|
|
2393
|
+
"mv node-v16.13.0-linux-x64 nodejs",
|
|
2394
|
+
"sudo mv nodejs /opt/",
|
|
2395
|
+
"echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
|
|
2396
|
+
"echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
|
|
2397
|
+
"source /etc/profile",
|
|
2360
2398
|
"npm install -g snarkjs",
|
|
2361
2399
|
`aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
|
|
2362
2400
|
`aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
|
|
@@ -2375,6 +2413,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
|
|
|
2375
2413
|
* @returns Array<string> - the list of commands for contribution verification.
|
|
2376
2414
|
*/
|
|
2377
2415
|
const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
|
|
2416
|
+
`source /etc/profile`,
|
|
2378
2417
|
`aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
|
|
2379
2418
|
`snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
|
|
2380
2419
|
`aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
|
|
@@ -2401,7 +2440,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
|
|
|
2401
2440
|
*/
|
|
2402
2441
|
const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
|
|
2403
2442
|
// Get the AWS variables.
|
|
2404
|
-
const { amiId,
|
|
2443
|
+
const { amiId, instanceProfileArn } = getAWSVariables();
|
|
2405
2444
|
// Parametrize the VM EC2 instance.
|
|
2406
2445
|
const params = {
|
|
2407
2446
|
ImageId: amiId,
|
|
@@ -2410,7 +2449,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2410
2449
|
MinCount: 1,
|
|
2411
2450
|
// nb. to find this: iam -> roles -> role_name.
|
|
2412
2451
|
IamInstanceProfile: {
|
|
2413
|
-
Arn:
|
|
2452
|
+
Arn: instanceProfileArn
|
|
2414
2453
|
},
|
|
2415
2454
|
// nb. for running commands at the startup.
|
|
2416
2455
|
UserData: Buffer.from(commands.join("\n")).toString("base64"),
|
|
@@ -2419,7 +2458,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
|
|
|
2419
2458
|
DeviceName: "/dev/xvda",
|
|
2420
2459
|
Ebs: {
|
|
2421
2460
|
DeleteOnTermination: true,
|
|
2422
|
-
VolumeSize: volumeSize,
|
|
2461
|
+
VolumeSize: volumeSize, // disk size in GB.
|
|
2423
2462
|
VolumeType: diskType
|
|
2424
2463
|
}
|
|
2425
2464
|
}
|
|
@@ -2605,4 +2644,4 @@ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
|
|
|
2605
2644
|
}
|
|
2606
2645
|
};
|
|
2607
2646
|
|
|
2608
|
-
export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
|
|
2647
|
+
export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, contribHashRegex, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCeremonies, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
|