@devtion/actions 0.0.0-9c50f66 → 0.0.0-9d46256

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +1 -1
  2. package/dist/index.mjs +373 -284
  3. package/dist/index.node.js +374 -283
  4. package/dist/types/src/helpers/constants.d.ts +5 -2
  5. package/dist/types/src/helpers/constants.d.ts.map +1 -1
  6. package/dist/types/src/helpers/contracts.d.ts.map +1 -1
  7. package/dist/types/src/helpers/crypto.d.ts +1 -0
  8. package/dist/types/src/helpers/crypto.d.ts.map +1 -1
  9. package/dist/types/src/helpers/database.d.ts +8 -0
  10. package/dist/types/src/helpers/database.d.ts.map +1 -1
  11. package/dist/types/src/helpers/security.d.ts +1 -1
  12. package/dist/types/src/helpers/security.d.ts.map +1 -1
  13. package/dist/types/src/helpers/storage.d.ts +5 -2
  14. package/dist/types/src/helpers/storage.d.ts.map +1 -1
  15. package/dist/types/src/helpers/utils.d.ts +34 -20
  16. package/dist/types/src/helpers/utils.d.ts.map +1 -1
  17. package/dist/types/src/helpers/verification.d.ts +3 -2
  18. package/dist/types/src/helpers/verification.d.ts.map +1 -1
  19. package/dist/types/src/helpers/vm.d.ts.map +1 -1
  20. package/dist/types/src/index.d.ts +2 -2
  21. package/dist/types/src/index.d.ts.map +1 -1
  22. package/dist/types/src/types/index.d.ts +9 -3
  23. package/dist/types/src/types/index.d.ts.map +1 -1
  24. package/package.json +3 -8
  25. package/src/helpers/constants.ts +39 -31
  26. package/src/helpers/contracts.ts +3 -3
  27. package/src/helpers/database.ts +13 -0
  28. package/src/helpers/functions.ts +1 -1
  29. package/src/helpers/security.ts +11 -10
  30. package/src/helpers/services.ts +3 -3
  31. package/src/helpers/storage.ts +15 -3
  32. package/src/helpers/utils.ts +336 -277
  33. package/src/helpers/verification.ts +6 -6
  34. package/src/helpers/vm.ts +28 -7
  35. package/src/index.ts +5 -3
  36. package/src/types/index.ts +32 -8
package/dist/index.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @module @p0tion/actions
3
- * @version 1.0.5
3
+ * @version 1.2.8
4
4
  * @file A set of actions and helpers for CLI commands
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -15,10 +15,8 @@ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp,
15
15
  import { zKey, groth16 } from 'snarkjs';
16
16
  import crypto from 'crypto';
17
17
  import blake from 'blakejs';
18
- import { utils } from 'ffjavascript';
19
18
  import winston from 'winston';
20
- import { S3Client, HeadObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
21
- import { pipeline, Readable } from 'stream';
19
+ import { pipeline } from 'stream';
22
20
  import { promisify } from 'util';
23
21
  import { initializeApp } from 'firebase/app';
24
22
  import { signInWithCredential, initializeAuth, getAuth } from 'firebase/auth';
@@ -28,10 +26,10 @@ import { EC2Client, RunInstancesCommand, DescribeInstanceStatusCommand, StartIns
28
26
  import { SSMClient, SendCommandCommand, GetCommandInvocationCommand } from '@aws-sdk/client-ssm';
29
27
  import dotenv from 'dotenv';
30
28
 
31
- // Main part for the Hermez Phase 1 Trusted Setup URLs to download PoT files.
32
- const potFileDownloadMainUrl = `https://hermez.s3-eu-west-1.amazonaws.com/`;
33
- // Main part for the Hermez Phase 1 Trusted Setup PoT files to be downloaded.
34
- const potFilenameTemplate = `powersOfTau28_hez_final_`;
29
+ // Main part for the PPoT Phase 1 Trusted Setup URLs to download PoT files.
30
+ const potFileDownloadMainUrl = `https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/`;
31
+ // Main part for the PPoT Phase 1 Trusted Setup PoT files to be downloaded.
32
+ const potFilenameTemplate = `ppot_0080_`;
35
33
  // The genesis zKey index.
36
34
  const genesisZkeyIndex = `00000`;
37
35
  // The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
@@ -48,6 +46,8 @@ const verifierSmartContractAcronym = "verifier";
48
46
  const ec2InstanceTag = "p0tionec2instance";
49
47
  // The name of the VM startup script file.
50
48
  const vmBootstrapScriptFilename = "bootstrap.sh";
49
+ // Match hash output by snarkjs in transcript log
50
+ const contribHashRegex = /Contribution.+Hash.+\s+.+\s+.+\s+.+\s+.+\s*/;
51
51
  /**
52
52
  * Define the supported VM configuration types.
53
53
  * @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
@@ -105,112 +105,116 @@ const vmConfigurationTypes = {
105
105
  */
106
106
  const powersOfTauFiles = [
107
107
  {
108
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_01.ptau",
108
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_01.ptau",
109
109
  size: 0.000084
110
110
  },
111
111
  {
112
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_02.ptau",
112
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_02.ptau",
113
113
  size: 0.000086
114
114
  },
115
115
  {
116
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_03.ptau",
116
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_03.ptau",
117
117
  size: 0.000091
118
118
  },
119
119
  {
120
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_04.ptau",
120
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_04.ptau",
121
121
  size: 0.0001
122
122
  },
123
123
  {
124
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_05.ptau",
124
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_05.ptau",
125
125
  size: 0.000117
126
126
  },
127
127
  {
128
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_06.ptau",
128
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_06.ptau",
129
129
  size: 0.000153
130
130
  },
131
131
  {
132
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_07.ptau",
132
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_07.ptau",
133
133
  size: 0.000225
134
134
  },
135
135
  {
136
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_08.ptau",
136
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_08.ptau",
137
137
  size: 0.0004
138
138
  },
139
139
  {
140
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_09.ptau",
140
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_09.ptau",
141
141
  size: 0.000658
142
142
  },
143
143
  {
144
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_10.ptau",
144
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_10.ptau",
145
145
  size: 0.0013
146
146
  },
147
147
  {
148
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_11.ptau",
148
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_11.ptau",
149
149
  size: 0.0023
150
150
  },
151
151
  {
152
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_12.ptau",
152
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_12.ptau",
153
153
  size: 0.0046
154
154
  },
155
155
  {
156
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_13.ptau",
156
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_13.ptau",
157
157
  size: 0.0091
158
158
  },
159
159
  {
160
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_14.ptau",
160
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_14.ptau",
161
161
  size: 0.0181
162
162
  },
163
163
  {
164
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_15.ptau",
164
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_15.ptau",
165
165
  size: 0.0361
166
166
  },
167
167
  {
168
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_16.ptau",
168
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_16.ptau",
169
169
  size: 0.0721
170
170
  },
171
171
  {
172
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau",
172
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_17.ptau",
173
173
  size: 0.144
174
174
  },
175
175
  {
176
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_18.ptau",
176
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_18.ptau",
177
177
  size: 0.288
178
178
  },
179
179
  {
180
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_19.ptau",
180
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_19.ptau",
181
181
  size: 0.576
182
182
  },
183
183
  {
184
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau",
184
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_20.ptau",
185
185
  size: 1.1
186
186
  },
187
187
  {
188
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_21.ptau",
188
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_21.ptau",
189
189
  size: 2.3
190
190
  },
191
191
  {
192
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_22.ptau",
192
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_22.ptau",
193
193
  size: 4.5
194
194
  },
195
195
  {
196
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_23.ptau",
196
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_23.ptau",
197
197
  size: 9.0
198
198
  },
199
199
  {
200
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_24.ptau",
200
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_24.ptau",
201
201
  size: 18.0
202
202
  },
203
203
  {
204
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_25.ptau",
204
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_25.ptau",
205
205
  size: 36.0
206
206
  },
207
207
  {
208
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_26.ptau",
208
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau",
209
209
  size: 72.0
210
210
  },
211
211
  {
212
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_27.ptau",
212
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_27.ptau",
213
213
  size: 144.0
214
+ },
215
+ {
216
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_final.ptau",
217
+ size: 288.0
214
218
  }
215
219
  ];
216
220
  /**
@@ -341,6 +345,8 @@ const commonTerms = {
341
345
  finalizeCeremony: "finalizeCeremony",
342
346
  downloadCircuitArtifacts: "downloadCircuitArtifacts",
343
347
  transferObject: "transferObject",
348
+ bandadaValidateProof: "bandadaValidateProof",
349
+ checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
344
350
  }
345
351
  };
346
352
 
@@ -691,19 +697,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
691
697
  * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
692
698
  * @param ceremonyId <string> - the unique identifier of the ceremony.
693
699
  * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
700
+ * @param logger <GenericBar> - an optional logger to show progress.
694
701
  * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
695
702
  */
696
- const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
703
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
697
704
  // Keep track of uploaded chunks.
698
705
  const uploadedChunks = alreadyUploadedChunks || [];
706
+ // if we were passed a logger, start it
707
+ if (logger)
708
+ logger.start(chunksWithUrls.length, 0);
699
709
  // Loop through remaining chunks.
700
710
  for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
701
711
  // Consume the pre-signed url to upload the chunk.
702
712
  // @ts-ignore
703
713
  const response = await fetch(chunksWithUrls[i].preSignedUrl, {
704
714
  retryOptions: {
705
- retryInitialDelay: 500,
706
- socketTimeout: 60000,
715
+ retryInitialDelay: 500, // 500 ms.
716
+ socketTimeout: 60000, // 60 seconds.
707
717
  retryMaxDuration: 300000 // 5 minutes.
708
718
  },
709
719
  method: "PUT",
@@ -727,6 +737,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
727
737
  // nb. this must be done only when contributing (not finalizing).
728
738
  if (!!ceremonyId && !!cloudFunctions)
729
739
  await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
740
+ // increment the count on the logger
741
+ if (logger)
742
+ logger.increment();
730
743
  }
731
744
  return uploadedChunks;
732
745
  };
@@ -747,8 +760,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
747
760
  * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
748
761
  * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
749
762
  * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
763
+ * @param logger <GenericBar> - an optional logger to show progress.
750
764
  */
751
- const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
765
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
752
766
  // The unique identifier of the multi-part upload.
753
767
  let multiPartUploadId = "";
754
768
  // The list of already uploaded chunks.
@@ -772,7 +786,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
772
786
  const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
773
787
  // Step (2).
774
788
  const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
775
- cloudFunctions, ceremonyId, alreadyUploadedChunks);
789
+ cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
776
790
  // Step (3).
777
791
  await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
778
792
  };
@@ -996,6 +1010,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
996
1010
  ]);
997
1011
  return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
998
1012
  };
1013
+ /**
1014
+ * Query all ceremonies
1015
+ * @notice get all ceremonies from the database.
1016
+ * @dev this is a helper for the CLI ceremony methods.
1017
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1018
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
1019
+ */
1020
+ const getAllCeremonies = async (firestoreDatabase) => {
1021
+ const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
1022
+ return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
1023
+ };
999
1024
 
1000
1025
  /**
1001
1026
  * @hidden
@@ -1044,207 +1069,22 @@ const compareHashes = async (path1, path2) => {
1044
1069
  };
1045
1070
 
1046
1071
  /**
1047
- * Parse and validate that the ceremony configuration is correct
1048
- * @notice this does not upload any files to storage
1049
- * @param path <string> - the path to the configuration file
1050
- * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1051
- * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1072
+ * Return a string with double digits if the provided input is one digit only.
1073
+ * @param in <number> - the input number to be converted.
1074
+ * @returns <string> - the two digits stringified number derived from the conversion.
1052
1075
  */
1053
- const parseCeremonyFile = async (path, cleanup = false) => {
1054
- // check that the path exists
1055
- if (!fs.existsSync(path))
1056
- throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1057
- try {
1058
- // read the data
1059
- const data = JSON.parse(fs.readFileSync(path).toString());
1060
- // verify that the data is correct
1061
- if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1062
- throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1063
- // validate that we have at least 1 circuit input data
1064
- if (!data.circuits || data.circuits.length === 0)
1065
- throw new Error("You need to provide the data for at least 1 circuit.");
1066
- // validate that the end date is in the future
1067
- let endDate;
1068
- let startDate;
1069
- try {
1070
- endDate = new Date(data.endDate);
1071
- startDate = new Date(data.startDate);
1072
- }
1073
- catch (error) {
1074
- throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1075
- }
1076
- if (endDate <= startDate)
1077
- throw new Error("The end date should be greater than the start date.");
1078
- const currentDate = new Date();
1079
- if (endDate <= currentDate || startDate <= currentDate)
1080
- throw new Error("The start and end dates should be in the future.");
1081
- // validate penalty
1082
- if (data.penalty <= 0)
1083
- throw new Error("The penalty should be greater than zero.");
1084
- const circuits = [];
1085
- const urlPattern = /(https?:\/\/[^\s]+)/g;
1086
- const commitHashPattern = /^[a-f0-9]{40}$/i;
1087
- const circuitArtifacts = [];
1088
- for (let i = 0; i < data.circuits.length; i++) {
1089
- const circuitData = data.circuits[i];
1090
- const artifacts = circuitData.artifacts;
1091
- circuitArtifacts.push({
1092
- artifacts: artifacts
1093
- });
1094
- const r1csPath = artifacts.r1csStoragePath;
1095
- const wasmPath = artifacts.wasmStoragePath;
1096
- // where we storing the r1cs downloaded
1097
- const localR1csPath = `./${circuitData.name}.r1cs`;
1098
- // check that the artifacts exist in S3
1099
- // we don't need any privileges to download this
1100
- // just the correct region
1101
- const s3 = new S3Client({ region: artifacts.region });
1102
- try {
1103
- await s3.send(new HeadObjectCommand({
1104
- Bucket: artifacts.bucket,
1105
- Key: r1csPath
1106
- }));
1107
- }
1108
- catch (error) {
1109
- throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1110
- }
1111
- try {
1112
- await s3.send(new HeadObjectCommand({
1113
- Bucket: artifacts.bucket,
1114
- Key: wasmPath
1115
- }));
1116
- }
1117
- catch (error) {
1118
- throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1119
- }
1120
- // download the r1cs to extract the metadata
1121
- const command = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1122
- const response = await s3.send(command);
1123
- const streamPipeline = promisify(pipeline);
1124
- if (response.$metadata.httpStatusCode !== 200)
1125
- throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
1126
- if (response.Body instanceof Readable)
1127
- await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1128
- // extract the metadata from the r1cs
1129
- const metadata = getR1CSInfo(localR1csPath);
1130
- // validate that the circuit hash and template links are valid
1131
- const template = circuitData.template;
1132
- const URLMatch = template.source.match(urlPattern);
1133
- if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1134
- throw new Error("You should provide the URL to the circuits templates on GitHub.");
1135
- const hashMatch = template.commitHash.match(commitHashPattern);
1136
- if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1137
- throw new Error("You should provide a valid commit hash of the circuit templates.");
1138
- // calculate the hash of the r1cs file
1139
- const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1140
- const circuitPrefix = extractPrefix(circuitData.name);
1141
- // filenames
1142
- const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1143
- const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1144
- const wasmCompleteFilename = `${circuitData.name}.wasm`;
1145
- const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1146
- const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1147
- // storage paths
1148
- const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1149
- const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1150
- const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1151
- const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1152
- const files = {
1153
- potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1154
- r1csFilename: r1csCompleteFilename,
1155
- wasmFilename: wasmCompleteFilename,
1156
- initialZkeyFilename: firstZkeyCompleteFilename,
1157
- potStoragePath: potStorageFilePath,
1158
- r1csStoragePath: r1csStorageFilePath,
1159
- wasmStoragePath: wasmStorageFilePath,
1160
- initialZkeyStoragePath: zkeyStorageFilePath,
1161
- r1csBlake2bHash: r1csBlake2bHash
1162
- };
1163
- // validate that the compiler hash is a valid hash
1164
- const compiler = circuitData.compiler;
1165
- const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1166
- if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1167
- throw new Error("You should provide a valid commit hash of the circuit compiler.");
1168
- // validate that the verification options are valid
1169
- const verification = circuitData.verification;
1170
- if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1171
- throw new Error("Please enter a valid verification mechanism: either CF or VM");
1172
- // @todo VM parameters verification
1173
- // if (verification['cfOrVM'] === "VM") {}
1174
- // check that the timeout is provided for the correct configuration
1175
- let dynamicThreshold;
1176
- let fixedTimeWindow;
1177
- let circuit = {};
1178
- if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1179
- if (circuitData.dynamicThreshold <= 0)
1180
- throw new Error("The dynamic threshold should be > 0.");
1181
- dynamicThreshold = circuitData.dynamicThreshold;
1182
- // the Circuit data for the ceremony setup
1183
- circuit = {
1184
- name: circuitData.name,
1185
- description: circuitData.description,
1186
- prefix: circuitPrefix,
1187
- sequencePosition: i + 1,
1188
- metadata: metadata,
1189
- files: files,
1190
- template: template,
1191
- compiler: compiler,
1192
- verification: verification,
1193
- dynamicThreshold: dynamicThreshold,
1194
- avgTimings: {
1195
- contributionComputation: 0,
1196
- fullContribution: 0,
1197
- verifyCloudFunction: 0
1198
- },
1199
- };
1200
- }
1201
- if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1202
- if (circuitData.fixedTimeWindow <= 0)
1203
- throw new Error("The fixed time window threshold should be > 0.");
1204
- fixedTimeWindow = circuitData.fixedTimeWindow;
1205
- // the Circuit data for the ceremony setup
1206
- circuit = {
1207
- name: circuitData.name,
1208
- description: circuitData.description,
1209
- prefix: circuitPrefix,
1210
- sequencePosition: i + 1,
1211
- metadata: metadata,
1212
- files: files,
1213
- template: template,
1214
- compiler: compiler,
1215
- verification: verification,
1216
- fixedTimeWindow: fixedTimeWindow,
1217
- avgTimings: {
1218
- contributionComputation: 0,
1219
- fullContribution: 0,
1220
- verifyCloudFunction: 0
1221
- },
1222
- };
1223
- }
1224
- circuits.push(circuit);
1225
- // remove the local r1cs download (if used for verifying the config only vs setup)
1226
- if (cleanup)
1227
- fs.unlinkSync(localR1csPath);
1228
- }
1229
- const setupData = {
1230
- ceremonyInputData: {
1231
- title: data.title,
1232
- description: data.description,
1233
- startDate: startDate.valueOf(),
1234
- endDate: endDate.valueOf(),
1235
- timeoutMechanismType: data.timeoutMechanismType,
1236
- penalty: data.penalty
1237
- },
1238
- ceremonyPrefix: extractPrefix(data.title),
1239
- circuits: circuits,
1240
- circuitArtifacts: circuitArtifacts
1241
- };
1242
- return setupData;
1243
- }
1244
- catch (error) {
1245
- throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1246
- }
1247
- };
1076
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1077
+ /**
1078
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1079
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1080
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1081
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1082
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1083
+ * @returns <string> - the resulting prefix.
1084
+ */
1085
+ const extractPrefix = (str) =>
1086
+ // eslint-disable-next-line no-useless-escape
1087
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1248
1088
  /**
1249
1089
  * Extract data from a R1CS metadata file generated with a custom file-based logger.
1250
1090
  * @notice useful for extracting metadata circuits contained in the generated file using a logger
@@ -1301,17 +1141,6 @@ const formatZkeyIndex = (progress) => {
1301
1141
  * @returns <number> - the amount of powers.
1302
1142
  */
1303
1143
  const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1304
- /**
1305
- * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1306
- * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1307
- * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1308
- * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1309
- * @param str <string> - the arbitrary string from which to extract the prefix.
1310
- * @returns <string> - the resulting prefix.
1311
- */
1312
- const extractPrefix = (str) =>
1313
- // eslint-disable-next-line no-useless-escape
1314
- str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1315
1144
  /**
1316
1145
  * Automate the generation of an entropy for a contribution.
1317
1146
  * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
@@ -1378,7 +1207,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
1378
1207
  * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1379
1208
  * @returns <string> - the public attestation preamble.
1380
1209
  */
1381
- const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1210
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
1211
+ ? "."
1212
+ : " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
1382
1213
  /**
1383
1214
  * Check and prepare public attestation for the contributor made only of its valid contributions.
1384
1215
  * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
@@ -1449,6 +1280,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
1449
1280
  // Return the read bytes.
1450
1281
  return buffer;
1451
1282
  };
1283
+ /**
1284
+ * Given a buffer in little endian format, convert it to bigint
1285
+ * @param buffer
1286
+ * @returns
1287
+ */
1288
+ function leBufferToBigint(buffer) {
1289
+ return BigInt(`0x${buffer.reverse().toString("hex")}`);
1290
+ }
1291
+ /**
1292
+ * Given an input containing string values, convert them
1293
+ * to bigint
1294
+ * @param input - The input to convert
1295
+ * @returns the input with string values converted to bigint
1296
+ */
1297
+ const unstringifyBigInts = (input) => {
1298
+ if (typeof input === "string" && /^[0-9]+$/.test(input)) {
1299
+ return BigInt(input);
1300
+ }
1301
+ if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
1302
+ return BigInt(input);
1303
+ }
1304
+ if (Array.isArray(input)) {
1305
+ return input.map(unstringifyBigInts);
1306
+ }
1307
+ if (input === null) {
1308
+ return null;
1309
+ }
1310
+ if (typeof input === "object") {
1311
+ return Object.entries(input).reduce((acc, [key, value]) => {
1312
+ acc[key] = unstringifyBigInts(value);
1313
+ return acc;
1314
+ }, {});
1315
+ }
1316
+ return input;
1317
+ };
1452
1318
  /**
1453
1319
  * Return the info about the R1CS file.ù
1454
1320
  * @dev this method was built taking inspiration from
@@ -1509,17 +1375,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
1509
1375
  let constraints = 0;
1510
1376
  try {
1511
1377
  // Get 'number of section' (jump magic r1cs and version1 data).
1512
- const numberOfSections = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1378
+ const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1513
1379
  // Jump to first section.
1514
1380
  pointer = 12;
1515
1381
  // For each section
1516
1382
  for (let i = 0; i < numberOfSections; i++) {
1517
1383
  // Read section type.
1518
- const sectionType = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1384
+ const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1519
1385
  // Jump to section size.
1520
1386
  pointer += 4;
1521
1387
  // Read section size
1522
- const sectionSize = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1388
+ const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1523
1389
  // If at header section (0x00000001 : Header Section).
1524
1390
  if (sectionType === BigInt(1)) {
1525
1391
  // Read info from header section.
@@ -1551,22 +1417,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
1551
1417
  */
1552
1418
  pointer += sectionSize - 20;
1553
1419
  // Read R1CS info.
1554
- wires = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1420
+ wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1555
1421
  pointer += 4;
1556
- publicOutputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1422
+ publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1557
1423
  pointer += 4;
1558
- publicInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1424
+ publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1559
1425
  pointer += 4;
1560
- privateInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1426
+ privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1561
1427
  pointer += 4;
1562
- labels = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1428
+ labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1563
1429
  pointer += 8;
1564
- constraints = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1430
+ constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1565
1431
  }
1566
1432
  pointer += 8 + Number(sectionSize);
1567
1433
  }
1568
1434
  return {
1569
- curve: "bn-128",
1435
+ curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
1570
1436
  wires,
1571
1437
  constraints,
1572
1438
  privateInputs,
@@ -1581,11 +1447,212 @@ const getR1CSInfo = (localR1CSFilePath) => {
1581
1447
  }
1582
1448
  };
1583
1449
  /**
1584
- * Return a string with double digits if the provided input is one digit only.
1585
- * @param in <number> - the input number to be converted.
1586
- * @returns <string> - the two digits stringified number derived from the conversion.
1450
+ * Parse and validate that the ceremony configuration is correct
1451
+ * @notice this does not upload any files to storage
1452
+ * @param path <string> - the path to the configuration file
1453
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1454
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1587
1455
  */
1588
- const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1456
+ const parseCeremonyFile = async (path, cleanup = false) => {
1457
+ // check that the path exists
1458
+ if (!fs.existsSync(path))
1459
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1460
+ try {
1461
+ // read the data
1462
+ const data = JSON.parse(fs.readFileSync(path).toString());
1463
+ // verify that the data is correct
1464
+ if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
1465
+ data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1466
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1467
+ // validate that we have at least 1 circuit input data
1468
+ if (!data.circuits || data.circuits.length === 0)
1469
+ throw new Error("You need to provide the data for at least 1 circuit.");
1470
+ // validate that the end date is in the future
1471
+ let endDate;
1472
+ let startDate;
1473
+ try {
1474
+ endDate = new Date(data.endDate);
1475
+ startDate = new Date(data.startDate);
1476
+ }
1477
+ catch (error) {
1478
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1479
+ }
1480
+ if (endDate <= startDate)
1481
+ throw new Error("The end date should be greater than the start date.");
1482
+ const currentDate = new Date();
1483
+ if (endDate <= currentDate || startDate <= currentDate)
1484
+ throw new Error("The start and end dates should be in the future.");
1485
+ // validate penalty
1486
+ if (data.penalty <= 0)
1487
+ throw new Error("The penalty should be greater than zero.");
1488
+ const circuits = [];
1489
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1490
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1491
+ const circuitArtifacts = [];
1492
+ for (let i = 0; i < data.circuits.length; i++) {
1493
+ const circuitData = data.circuits[i];
1494
+ const { artifacts } = circuitData;
1495
+ circuitArtifacts.push({
1496
+ artifacts
1497
+ });
1498
+ // where we storing the r1cs downloaded
1499
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1500
+ // where we storing the wasm downloaded
1501
+ const localWasmPath = `./${circuitData.name}.wasm`;
1502
+ // download the r1cs to extract the metadata
1503
+ const streamPipeline = promisify(pipeline);
1504
+ // Check if r1cs file already exists
1505
+ let r1csExists = false;
1506
+ if (fs.existsSync(localR1csPath)) {
1507
+ console.log(`Found existing r1cs file for circuit ${circuitData.name}. Skipping download.`);
1508
+ r1csExists = true;
1509
+ }
1510
+ if (!r1csExists) {
1511
+ // Make the call to download r1cs.
1512
+ const responseR1CS = await fetch(artifacts.r1csStoragePath);
1513
+ // Handle errors.
1514
+ if (!responseR1CS.ok && responseR1CS.status !== 200)
1515
+ throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1516
+ // Write the file locally
1517
+ await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
1518
+ console.log(`Downloaded r1cs file for circuit ${circuitData.name}.`);
1519
+ }
1520
+ // extract the metadata from the r1cs
1521
+ const metadata = getR1CSInfo(localR1csPath);
1522
+ // Check if wasm file already exists
1523
+ let wasmExists = false;
1524
+ if (fs.existsSync(localWasmPath)) {
1525
+ console.log(`Found existing wasm file for circuit ${circuitData.name}. Skipping download.`);
1526
+ wasmExists = true;
1527
+ }
1528
+ if (!wasmExists) {
1529
+ // download wasm if it's not available
1530
+ const responseWASM = await fetch(artifacts.wasmStoragePath);
1531
+ if (!responseWASM.ok && responseWASM.status !== 200)
1532
+ throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1533
+ await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
1534
+ console.log(`Downloaded wasm file for circuit ${circuitData.name}.`);
1535
+ }
1536
+ // validate that the circuit hash and template links are valid
1537
+ const { template } = circuitData;
1538
+ const URLMatch = template.source.match(urlPattern);
1539
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1540
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1541
+ const hashMatch = template.commitHash.match(commitHashPattern);
1542
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1543
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1544
+ // calculate the hash of the r1cs file
1545
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1546
+ const circuitPrefix = extractPrefix(circuitData.name);
1547
+ // filenames
1548
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1549
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1550
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1551
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1552
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1553
+ // storage paths
1554
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1555
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1556
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1557
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1558
+ const files = {
1559
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1560
+ r1csFilename: r1csCompleteFilename,
1561
+ wasmFilename: wasmCompleteFilename,
1562
+ initialZkeyFilename: firstZkeyCompleteFilename,
1563
+ potStoragePath: potStorageFilePath,
1564
+ r1csStoragePath: r1csStorageFilePath,
1565
+ wasmStoragePath: wasmStorageFilePath,
1566
+ initialZkeyStoragePath: zkeyStorageFilePath,
1567
+ r1csBlake2bHash
1568
+ };
1569
+ // validate that the compiler hash is a valid hash
1570
+ const { compiler } = circuitData;
1571
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1572
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1573
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1574
+ // validate that the verification options are valid
1575
+ const { verification } = circuitData;
1576
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1577
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1578
+ // @todo VM parameters verification
1579
+ // if (verification['cfOrVM'] === "VM") {}
1580
+ // check that the timeout is provided for the correct configuration
1581
+ let dynamicThreshold;
1582
+ let fixedTimeWindow;
1583
+ let circuit = {};
1584
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1585
+ if (circuitData.dynamicThreshold <= 0)
1586
+ throw new Error("The dynamic threshold should be > 0.");
1587
+ dynamicThreshold = circuitData.dynamicThreshold;
1588
+ // the Circuit data for the ceremony setup
1589
+ circuit = {
1590
+ name: circuitData.name,
1591
+ description: circuitData.description,
1592
+ prefix: circuitPrefix,
1593
+ sequencePosition: i + 1,
1594
+ metadata,
1595
+ files,
1596
+ template,
1597
+ compiler,
1598
+ verification,
1599
+ dynamicThreshold,
1600
+ avgTimings: {
1601
+ contributionComputation: 0,
1602
+ fullContribution: 0,
1603
+ verifyCloudFunction: 0
1604
+ }
1605
+ };
1606
+ }
1607
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1608
+ if (circuitData.fixedTimeWindow <= 0)
1609
+ throw new Error("The fixed time window threshold should be > 0.");
1610
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1611
+ // the Circuit data for the ceremony setup
1612
+ circuit = {
1613
+ name: circuitData.name,
1614
+ description: circuitData.description,
1615
+ prefix: circuitPrefix,
1616
+ sequencePosition: i + 1,
1617
+ metadata,
1618
+ files,
1619
+ template,
1620
+ compiler,
1621
+ verification,
1622
+ fixedTimeWindow,
1623
+ avgTimings: {
1624
+ contributionComputation: 0,
1625
+ fullContribution: 0,
1626
+ verifyCloudFunction: 0
1627
+ }
1628
+ };
1629
+ }
1630
+ circuits.push(circuit);
1631
+ // remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
1632
+ if (cleanup) {
1633
+ fs.unlinkSync(localR1csPath);
1634
+ fs.unlinkSync(localWasmPath);
1635
+ }
1636
+ }
1637
+ const setupData = {
1638
+ ceremonyInputData: {
1639
+ title: data.title,
1640
+ description: data.description,
1641
+ startDate: startDate.valueOf(),
1642
+ endDate: endDate.valueOf(),
1643
+ timeoutMechanismType: data.timeoutMechanismType,
1644
+ penalty: data.penalty
1645
+ },
1646
+ ceremonyPrefix: extractPrefix(data.title),
1647
+ circuits,
1648
+ circuitArtifacts
1649
+ };
1650
+ return setupData;
1651
+ }
1652
+ catch (error) {
1653
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1654
+ }
1655
+ };
1589
1656
 
1590
1657
  /**
1591
1658
  * Verify that a zKey is valid
@@ -1834,7 +1901,7 @@ const getFirestoreDatabase = (app) => getFirestore(app);
1834
1901
  * @param app <FirebaseApp> - the Firebase application.
1835
1902
  * @returns <Functions> - the Cloud Functions associated to the application.
1836
1903
  */
1837
- const getFirebaseFunctions = (app) => getFunctions(app, 'europe-west1');
1904
+ const getFirebaseFunctions = (app) => getFunctions(app, "europe-west1");
1838
1905
  /**
1839
1906
  * Retrieve the configuration variables for the AWS services (S3, EC2).
1840
1907
  * @returns <AWSVariables> - the values of the AWS services configuration variables.
@@ -1843,14 +1910,14 @@ const getAWSVariables = () => {
1843
1910
  if (!process.env.AWS_ACCESS_KEY_ID ||
1844
1911
  !process.env.AWS_SECRET_ACCESS_KEY ||
1845
1912
  !process.env.AWS_REGION ||
1846
- !process.env.AWS_ROLE_ARN ||
1913
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
1847
1914
  !process.env.AWS_AMI_ID)
1848
1915
  throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1849
1916
  return {
1850
1917
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1851
1918
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1852
1919
  region: process.env.AWS_REGION || "us-east-1",
1853
- roleArn: process.env.AWS_ROLE_ARN,
1920
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
1854
1921
  amiId: process.env.AWS_AMI_ID
1855
1922
  };
1856
1923
  };
@@ -1931,11 +1998,11 @@ const p256 = (proofPart) => {
1931
1998
  */
1932
1999
  const formatSolidityCalldata = (circuitInput, _proof) => {
1933
2000
  try {
1934
- const proof = utils.unstringifyBigInts(_proof);
2001
+ const proof = unstringifyBigInts(_proof);
1935
2002
  // format the public inputs to the circuit
1936
2003
  const formattedCircuitInput = [];
1937
2004
  for (const cInput of circuitInput) {
1938
- formattedCircuitInput.push(p256(utils.unstringifyBigInts(cInput)));
2005
+ formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
1939
2006
  }
1940
2007
  // construct calldata
1941
2008
  const calldata = {
@@ -2103,7 +2170,8 @@ const getGitHubStats = async (user) => {
2103
2170
  following: jsonData.following,
2104
2171
  followers: jsonData.followers,
2105
2172
  publicRepos: jsonData.public_repos,
2106
- avatarUrl: jsonData.avatar_url
2173
+ avatarUrl: jsonData.avatar_url,
2174
+ age: jsonData.created_at
2107
2175
  };
2108
2176
  return data;
2109
2177
  };
@@ -2115,20 +2183,21 @@ const getGitHubStats = async (user) => {
2115
2183
  * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2116
2184
  * @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
2117
2185
  */
2118
- const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2186
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
2119
2187
  if (!process.env.GITHUB_ACCESS_TOKEN)
2120
2188
  throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2121
- const { following, followers, publicRepos, avatarUrl } = await getGitHubStats(userLogin);
2189
+ const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
2122
2190
  if (following < minimumAmountOfFollowing ||
2123
2191
  publicRepos < minimumAmountOfPublicRepos ||
2124
- followers < minimumAmountOfFollowers)
2192
+ followers < minimumAmountOfFollowers ||
2193
+ new Date(age) > new Date(Date.now() - minimumAge))
2125
2194
  return {
2126
2195
  reputable: false,
2127
2196
  avatarUrl: ""
2128
2197
  };
2129
2198
  return {
2130
2199
  reputable: true,
2131
- avatarUrl: avatarUrl
2200
+ avatarUrl
2132
2201
  };
2133
2202
  };
2134
2203
 
@@ -2315,8 +2384,8 @@ const createSSMClient = async () => {
2315
2384
  * @returns <Array<string>> - the list of startup commands to be executed.
2316
2385
  */
2317
2386
  const vmBootstrapCommand = (bucketName) => [
2318
- "#!/bin/bash",
2319
- `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2387
+ "#!/bin/bash", // shabang.
2388
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
2320
2389
  `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2321
2390
  ];
2322
2391
  /**
@@ -2337,8 +2406,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2337
2406
  // eslint-disable-next-line no-template-curly-in-string
2338
2407
  "touch ${MARKER_FILE}",
2339
2408
  "sudo yum update -y",
2340
- "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2341
- "sudo yum install -y nodejs",
2409
+ "curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
2410
+ "tar -xf node-v16.13.0-linux-x64.tar.xz",
2411
+ "mv node-v16.13.0-linux-x64 nodejs",
2412
+ "sudo mv nodejs /opt/",
2413
+ "echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
2414
+ "echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
2415
+ "source /etc/profile",
2342
2416
  "npm install -g snarkjs",
2343
2417
  `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2344
2418
  `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
@@ -2357,6 +2431,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2357
2431
  * @returns Array<string> - the list of commands for contribution verification.
2358
2432
  */
2359
2433
  const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2434
+ `source /etc/profile`,
2360
2435
  `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2361
2436
  `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2362
2437
  `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
@@ -2383,8 +2458,9 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
2383
2458
  */
2384
2459
  const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2385
2460
  // Get the AWS variables.
2386
- const { amiId, roleArn } = getAWSVariables();
2461
+ const { amiId, instanceProfileArn } = getAWSVariables();
2387
2462
  // Parametrize the VM EC2 instance.
2463
+ console.log("\nLAUNCHING AWS EC2 INSTANCE\n");
2388
2464
  const params = {
2389
2465
  ImageId: amiId,
2390
2466
  InstanceType: instanceType,
@@ -2392,7 +2468,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2392
2468
  MinCount: 1,
2393
2469
  // nb. to find this: iam -> roles -> role_name.
2394
2470
  IamInstanceProfile: {
2395
- Arn: roleArn
2471
+ Arn: instanceProfileArn
2396
2472
  },
2397
2473
  // nb. for running commands at the startup.
2398
2474
  UserData: Buffer.from(commands.join("\n")).toString("base64"),
@@ -2401,7 +2477,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2401
2477
  DeviceName: "/dev/xvda",
2402
2478
  Ebs: {
2403
2479
  DeleteOnTermination: true,
2404
- VolumeSize: volumeSize,
2480
+ VolumeSize: volumeSize, // disk size in GB.
2405
2481
  VolumeType: diskType
2406
2482
  }
2407
2483
  }
@@ -2418,6 +2494,19 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2418
2494
  {
2419
2495
  Key: "Initialized",
2420
2496
  Value: "false"
2497
+ },
2498
+ {
2499
+ Key: "Project",
2500
+ Value: "trusted-setup"
2501
+ }
2502
+ ]
2503
+ },
2504
+ {
2505
+ ResourceType: "volume",
2506
+ Tags: [
2507
+ {
2508
+ Key: "Project",
2509
+ Value: "trusted-setup"
2421
2510
  }
2422
2511
  ]
2423
2512
  }
@@ -2587,4 +2676,4 @@ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
2587
2676
  }
2588
2677
  };
2589
2678
 
2590
- export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
2679
+ export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, contribHashRegex, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCeremonies, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };