@devtion/actions 0.0.0-9c50f66 → 0.0.0-a7b749e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +1 -1
  2. package/dist/index.mjs +341 -284
  3. package/dist/index.node.js +342 -283
  4. package/dist/types/src/helpers/constants.d.ts +5 -2
  5. package/dist/types/src/helpers/constants.d.ts.map +1 -1
  6. package/dist/types/src/helpers/contracts.d.ts.map +1 -1
  7. package/dist/types/src/helpers/crypto.d.ts +1 -0
  8. package/dist/types/src/helpers/crypto.d.ts.map +1 -1
  9. package/dist/types/src/helpers/database.d.ts +8 -0
  10. package/dist/types/src/helpers/database.d.ts.map +1 -1
  11. package/dist/types/src/helpers/security.d.ts +1 -1
  12. package/dist/types/src/helpers/security.d.ts.map +1 -1
  13. package/dist/types/src/helpers/storage.d.ts +5 -2
  14. package/dist/types/src/helpers/storage.d.ts.map +1 -1
  15. package/dist/types/src/helpers/utils.d.ts +34 -20
  16. package/dist/types/src/helpers/utils.d.ts.map +1 -1
  17. package/dist/types/src/helpers/verification.d.ts +3 -2
  18. package/dist/types/src/helpers/verification.d.ts.map +1 -1
  19. package/dist/types/src/helpers/vm.d.ts.map +1 -1
  20. package/dist/types/src/index.d.ts +2 -2
  21. package/dist/types/src/index.d.ts.map +1 -1
  22. package/dist/types/src/types/index.d.ts +9 -3
  23. package/dist/types/src/types/index.d.ts.map +1 -1
  24. package/package.json +3 -8
  25. package/src/helpers/constants.ts +39 -31
  26. package/src/helpers/contracts.ts +3 -3
  27. package/src/helpers/database.ts +13 -0
  28. package/src/helpers/functions.ts +1 -1
  29. package/src/helpers/security.ts +11 -10
  30. package/src/helpers/services.ts +3 -3
  31. package/src/helpers/storage.ts +15 -3
  32. package/src/helpers/utils.ts +316 -277
  33. package/src/helpers/verification.ts +6 -6
  34. package/src/helpers/vm.ts +14 -7
  35. package/src/index.ts +5 -3
  36. package/src/types/index.ts +32 -8
@@ -1,6 +1,6 @@
1
1
  /**
2
- * @module @p0tion/actions
3
- * @version 1.0.5
2
+ * @module @devtion/actions
3
+ * @version 1.2.5
4
4
  * @file A set of actions and helpers for CLI commands
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -17,9 +17,7 @@ var firestore = require('firebase/firestore');
17
17
  var snarkjs = require('snarkjs');
18
18
  var crypto = require('crypto');
19
19
  var blake = require('blakejs');
20
- var ffjavascript = require('ffjavascript');
21
20
  var winston = require('winston');
22
- var clientS3 = require('@aws-sdk/client-s3');
23
21
  var stream = require('stream');
24
22
  var util = require('util');
25
23
  var app = require('firebase/app');
@@ -30,10 +28,10 @@ var clientEc2 = require('@aws-sdk/client-ec2');
30
28
  var clientSsm = require('@aws-sdk/client-ssm');
31
29
  var dotenv = require('dotenv');
32
30
 
33
- // Main part for the Hermez Phase 1 Trusted Setup URLs to download PoT files.
34
- const potFileDownloadMainUrl = `https://hermez.s3-eu-west-1.amazonaws.com/`;
35
- // Main part for the Hermez Phase 1 Trusted Setup PoT files to be downloaded.
36
- const potFilenameTemplate = `powersOfTau28_hez_final_`;
31
+ // Main part for the PPoT Phase 1 Trusted Setup URLs to download PoT files.
32
+ const potFileDownloadMainUrl = `https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/`;
33
+ // Main part for the PPoT Phase 1 Trusted Setup PoT files to be downloaded.
34
+ const potFilenameTemplate = `ppot_0080_`;
37
35
  // The genesis zKey index.
38
36
  const genesisZkeyIndex = `00000`;
39
37
  // The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
@@ -50,6 +48,8 @@ const verifierSmartContractAcronym = "verifier";
50
48
  const ec2InstanceTag = "p0tionec2instance";
51
49
  // The name of the VM startup script file.
52
50
  const vmBootstrapScriptFilename = "bootstrap.sh";
51
+ // Match hash output by snarkjs in transcript log
52
+ const contribHashRegex = new RegExp("Contribution.+Hash.+\n\t\t.+\n\t\t.+\n.+\n\t\t.+\r?\n");
53
53
  /**
54
54
  * Define the supported VM configuration types.
55
55
  * @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
@@ -107,112 +107,116 @@ const vmConfigurationTypes = {
107
107
  */
108
108
  const powersOfTauFiles = [
109
109
  {
110
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_01.ptau",
110
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_01.ptau",
111
111
  size: 0.000084
112
112
  },
113
113
  {
114
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_02.ptau",
114
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_02.ptau",
115
115
  size: 0.000086
116
116
  },
117
117
  {
118
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_03.ptau",
118
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_03.ptau",
119
119
  size: 0.000091
120
120
  },
121
121
  {
122
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_04.ptau",
122
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_04.ptau",
123
123
  size: 0.0001
124
124
  },
125
125
  {
126
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_05.ptau",
126
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_05.ptau",
127
127
  size: 0.000117
128
128
  },
129
129
  {
130
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_06.ptau",
130
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_06.ptau",
131
131
  size: 0.000153
132
132
  },
133
133
  {
134
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_07.ptau",
134
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_07.ptau",
135
135
  size: 0.000225
136
136
  },
137
137
  {
138
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_08.ptau",
138
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_08.ptau",
139
139
  size: 0.0004
140
140
  },
141
141
  {
142
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_09.ptau",
142
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_09.ptau",
143
143
  size: 0.000658
144
144
  },
145
145
  {
146
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_10.ptau",
146
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_10.ptau",
147
147
  size: 0.0013
148
148
  },
149
149
  {
150
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_11.ptau",
150
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_11.ptau",
151
151
  size: 0.0023
152
152
  },
153
153
  {
154
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_12.ptau",
154
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_12.ptau",
155
155
  size: 0.0046
156
156
  },
157
157
  {
158
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_13.ptau",
158
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_13.ptau",
159
159
  size: 0.0091
160
160
  },
161
161
  {
162
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_14.ptau",
162
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_14.ptau",
163
163
  size: 0.0181
164
164
  },
165
165
  {
166
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_15.ptau",
166
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_15.ptau",
167
167
  size: 0.0361
168
168
  },
169
169
  {
170
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_16.ptau",
170
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_16.ptau",
171
171
  size: 0.0721
172
172
  },
173
173
  {
174
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau",
174
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_17.ptau",
175
175
  size: 0.144
176
176
  },
177
177
  {
178
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_18.ptau",
178
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_18.ptau",
179
179
  size: 0.288
180
180
  },
181
181
  {
182
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_19.ptau",
182
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_19.ptau",
183
183
  size: 0.576
184
184
  },
185
185
  {
186
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau",
186
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_20.ptau",
187
187
  size: 1.1
188
188
  },
189
189
  {
190
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_21.ptau",
190
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_21.ptau",
191
191
  size: 2.3
192
192
  },
193
193
  {
194
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_22.ptau",
194
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_22.ptau",
195
195
  size: 4.5
196
196
  },
197
197
  {
198
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_23.ptau",
198
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_23.ptau",
199
199
  size: 9.0
200
200
  },
201
201
  {
202
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_24.ptau",
202
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_24.ptau",
203
203
  size: 18.0
204
204
  },
205
205
  {
206
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_25.ptau",
206
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_25.ptau",
207
207
  size: 36.0
208
208
  },
209
209
  {
210
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_26.ptau",
210
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau",
211
211
  size: 72.0
212
212
  },
213
213
  {
214
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_27.ptau",
214
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_27.ptau",
215
215
  size: 144.0
216
+ },
217
+ {
218
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_final.ptau",
219
+ size: 288.0
216
220
  }
217
221
  ];
218
222
  /**
@@ -343,6 +347,8 @@ const commonTerms = {
343
347
  finalizeCeremony: "finalizeCeremony",
344
348
  downloadCircuitArtifacts: "downloadCircuitArtifacts",
345
349
  transferObject: "transferObject",
350
+ bandadaValidateProof: "bandadaValidateProof",
351
+ checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
346
352
  }
347
353
  };
348
354
 
@@ -693,19 +699,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
693
699
  * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
694
700
  * @param ceremonyId <string> - the unique identifier of the ceremony.
695
701
  * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
702
+ * @param logger <GenericBar> - an optional logger to show progress.
696
703
  * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
697
704
  */
698
- const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
705
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
699
706
  // Keep track of uploaded chunks.
700
707
  const uploadedChunks = alreadyUploadedChunks || [];
708
+ // if we were passed a logger, start it
709
+ if (logger)
710
+ logger.start(chunksWithUrls.length, 0);
701
711
  // Loop through remaining chunks.
702
712
  for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
703
713
  // Consume the pre-signed url to upload the chunk.
704
714
  // @ts-ignore
705
715
  const response = await fetch(chunksWithUrls[i].preSignedUrl, {
706
716
  retryOptions: {
707
- retryInitialDelay: 500,
708
- socketTimeout: 60000,
717
+ retryInitialDelay: 500, // 500 ms.
718
+ socketTimeout: 60000, // 60 seconds.
709
719
  retryMaxDuration: 300000 // 5 minutes.
710
720
  },
711
721
  method: "PUT",
@@ -729,6 +739,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
729
739
  // nb. this must be done only when contributing (not finalizing).
730
740
  if (!!ceremonyId && !!cloudFunctions)
731
741
  await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
742
+ // increment the count on the logger
743
+ if (logger)
744
+ logger.increment();
732
745
  }
733
746
  return uploadedChunks;
734
747
  };
@@ -749,8 +762,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
749
762
  * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
750
763
  * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
751
764
  * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
765
+ * @param logger <GenericBar> - an optional logger to show progress.
752
766
  */
753
- const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
767
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
754
768
  // The unique identifier of the multi-part upload.
755
769
  let multiPartUploadId = "";
756
770
  // The list of already uploaded chunks.
@@ -774,7 +788,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
774
788
  const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
775
789
  // Step (2).
776
790
  const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
777
- cloudFunctions, ceremonyId, alreadyUploadedChunks);
791
+ cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
778
792
  // Step (3).
779
793
  await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
780
794
  };
@@ -998,6 +1012,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
998
1012
  ]);
999
1013
  return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
1000
1014
  };
1015
+ /**
1016
+ * Query all ceremonies
1017
+ * @notice get all ceremonies from the database.
1018
+ * @dev this is a helper for the CLI ceremony methods.
1019
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1020
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
1021
+ */
1022
+ const getAllCeremonies = async (firestoreDatabase) => {
1023
+ const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
1024
+ return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
1025
+ };
1001
1026
 
1002
1027
  /**
1003
1028
  * @hidden
@@ -1046,207 +1071,22 @@ const compareHashes = async (path1, path2) => {
1046
1071
  };
1047
1072
 
1048
1073
  /**
1049
- * Parse and validate that the ceremony configuration is correct
1050
- * @notice this does not upload any files to storage
1051
- * @param path <string> - the path to the configuration file
1052
- * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1053
- * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1074
+ * Return a string with double digits if the provided input is one digit only.
1075
+ * @param in <number> - the input number to be converted.
1076
+ * @returns <string> - the two digits stringified number derived from the conversion.
1054
1077
  */
1055
- const parseCeremonyFile = async (path, cleanup = false) => {
1056
- // check that the path exists
1057
- if (!fs.existsSync(path))
1058
- throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1059
- try {
1060
- // read the data
1061
- const data = JSON.parse(fs.readFileSync(path).toString());
1062
- // verify that the data is correct
1063
- if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1064
- throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1065
- // validate that we have at least 1 circuit input data
1066
- if (!data.circuits || data.circuits.length === 0)
1067
- throw new Error("You need to provide the data for at least 1 circuit.");
1068
- // validate that the end date is in the future
1069
- let endDate;
1070
- let startDate;
1071
- try {
1072
- endDate = new Date(data.endDate);
1073
- startDate = new Date(data.startDate);
1074
- }
1075
- catch (error) {
1076
- throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1077
- }
1078
- if (endDate <= startDate)
1079
- throw new Error("The end date should be greater than the start date.");
1080
- const currentDate = new Date();
1081
- if (endDate <= currentDate || startDate <= currentDate)
1082
- throw new Error("The start and end dates should be in the future.");
1083
- // validate penalty
1084
- if (data.penalty <= 0)
1085
- throw new Error("The penalty should be greater than zero.");
1086
- const circuits = [];
1087
- const urlPattern = /(https?:\/\/[^\s]+)/g;
1088
- const commitHashPattern = /^[a-f0-9]{40}$/i;
1089
- const circuitArtifacts = [];
1090
- for (let i = 0; i < data.circuits.length; i++) {
1091
- const circuitData = data.circuits[i];
1092
- const artifacts = circuitData.artifacts;
1093
- circuitArtifacts.push({
1094
- artifacts: artifacts
1095
- });
1096
- const r1csPath = artifacts.r1csStoragePath;
1097
- const wasmPath = artifacts.wasmStoragePath;
1098
- // where we storing the r1cs downloaded
1099
- const localR1csPath = `./${circuitData.name}.r1cs`;
1100
- // check that the artifacts exist in S3
1101
- // we don't need any privileges to download this
1102
- // just the correct region
1103
- const s3 = new clientS3.S3Client({ region: artifacts.region });
1104
- try {
1105
- await s3.send(new clientS3.HeadObjectCommand({
1106
- Bucket: artifacts.bucket,
1107
- Key: r1csPath
1108
- }));
1109
- }
1110
- catch (error) {
1111
- throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1112
- }
1113
- try {
1114
- await s3.send(new clientS3.HeadObjectCommand({
1115
- Bucket: artifacts.bucket,
1116
- Key: wasmPath
1117
- }));
1118
- }
1119
- catch (error) {
1120
- throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1121
- }
1122
- // download the r1cs to extract the metadata
1123
- const command = new clientS3.GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1124
- const response = await s3.send(command);
1125
- const streamPipeline = util.promisify(stream.pipeline);
1126
- if (response.$metadata.httpStatusCode !== 200)
1127
- throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
1128
- if (response.Body instanceof stream.Readable)
1129
- await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1130
- // extract the metadata from the r1cs
1131
- const metadata = getR1CSInfo(localR1csPath);
1132
- // validate that the circuit hash and template links are valid
1133
- const template = circuitData.template;
1134
- const URLMatch = template.source.match(urlPattern);
1135
- if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1136
- throw new Error("You should provide the URL to the circuits templates on GitHub.");
1137
- const hashMatch = template.commitHash.match(commitHashPattern);
1138
- if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1139
- throw new Error("You should provide a valid commit hash of the circuit templates.");
1140
- // calculate the hash of the r1cs file
1141
- const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1142
- const circuitPrefix = extractPrefix(circuitData.name);
1143
- // filenames
1144
- const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1145
- const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1146
- const wasmCompleteFilename = `${circuitData.name}.wasm`;
1147
- const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1148
- const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1149
- // storage paths
1150
- const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1151
- const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1152
- const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1153
- const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1154
- const files = {
1155
- potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1156
- r1csFilename: r1csCompleteFilename,
1157
- wasmFilename: wasmCompleteFilename,
1158
- initialZkeyFilename: firstZkeyCompleteFilename,
1159
- potStoragePath: potStorageFilePath,
1160
- r1csStoragePath: r1csStorageFilePath,
1161
- wasmStoragePath: wasmStorageFilePath,
1162
- initialZkeyStoragePath: zkeyStorageFilePath,
1163
- r1csBlake2bHash: r1csBlake2bHash
1164
- };
1165
- // validate that the compiler hash is a valid hash
1166
- const compiler = circuitData.compiler;
1167
- const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1168
- if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1169
- throw new Error("You should provide a valid commit hash of the circuit compiler.");
1170
- // validate that the verification options are valid
1171
- const verification = circuitData.verification;
1172
- if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1173
- throw new Error("Please enter a valid verification mechanism: either CF or VM");
1174
- // @todo VM parameters verification
1175
- // if (verification['cfOrVM'] === "VM") {}
1176
- // check that the timeout is provided for the correct configuration
1177
- let dynamicThreshold;
1178
- let fixedTimeWindow;
1179
- let circuit = {};
1180
- if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1181
- if (circuitData.dynamicThreshold <= 0)
1182
- throw new Error("The dynamic threshold should be > 0.");
1183
- dynamicThreshold = circuitData.dynamicThreshold;
1184
- // the Circuit data for the ceremony setup
1185
- circuit = {
1186
- name: circuitData.name,
1187
- description: circuitData.description,
1188
- prefix: circuitPrefix,
1189
- sequencePosition: i + 1,
1190
- metadata: metadata,
1191
- files: files,
1192
- template: template,
1193
- compiler: compiler,
1194
- verification: verification,
1195
- dynamicThreshold: dynamicThreshold,
1196
- avgTimings: {
1197
- contributionComputation: 0,
1198
- fullContribution: 0,
1199
- verifyCloudFunction: 0
1200
- },
1201
- };
1202
- }
1203
- if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1204
- if (circuitData.fixedTimeWindow <= 0)
1205
- throw new Error("The fixed time window threshold should be > 0.");
1206
- fixedTimeWindow = circuitData.fixedTimeWindow;
1207
- // the Circuit data for the ceremony setup
1208
- circuit = {
1209
- name: circuitData.name,
1210
- description: circuitData.description,
1211
- prefix: circuitPrefix,
1212
- sequencePosition: i + 1,
1213
- metadata: metadata,
1214
- files: files,
1215
- template: template,
1216
- compiler: compiler,
1217
- verification: verification,
1218
- fixedTimeWindow: fixedTimeWindow,
1219
- avgTimings: {
1220
- contributionComputation: 0,
1221
- fullContribution: 0,
1222
- verifyCloudFunction: 0
1223
- },
1224
- };
1225
- }
1226
- circuits.push(circuit);
1227
- // remove the local r1cs download (if used for verifying the config only vs setup)
1228
- if (cleanup)
1229
- fs.unlinkSync(localR1csPath);
1230
- }
1231
- const setupData = {
1232
- ceremonyInputData: {
1233
- title: data.title,
1234
- description: data.description,
1235
- startDate: startDate.valueOf(),
1236
- endDate: endDate.valueOf(),
1237
- timeoutMechanismType: data.timeoutMechanismType,
1238
- penalty: data.penalty
1239
- },
1240
- ceremonyPrefix: extractPrefix(data.title),
1241
- circuits: circuits,
1242
- circuitArtifacts: circuitArtifacts
1243
- };
1244
- return setupData;
1245
- }
1246
- catch (error) {
1247
- throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1248
- }
1249
- };
1078
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1079
+ /**
1080
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1081
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1082
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1083
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1084
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1085
+ * @returns <string> - the resulting prefix.
1086
+ */
1087
+ const extractPrefix = (str) =>
1088
+ // eslint-disable-next-line no-useless-escape
1089
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1250
1090
  /**
1251
1091
  * Extract data from a R1CS metadata file generated with a custom file-based logger.
1252
1092
  * @notice useful for extracting metadata circuits contained in the generated file using a logger
@@ -1303,17 +1143,6 @@ const formatZkeyIndex = (progress) => {
1303
1143
  * @returns <number> - the amount of powers.
1304
1144
  */
1305
1145
  const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1306
- /**
1307
- * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1308
- * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1309
- * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1310
- * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1311
- * @param str <string> - the arbitrary string from which to extract the prefix.
1312
- * @returns <string> - the resulting prefix.
1313
- */
1314
- const extractPrefix = (str) =>
1315
- // eslint-disable-next-line no-useless-escape
1316
- str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1317
1146
  /**
1318
1147
  * Automate the generation of an entropy for a contribution.
1319
1148
  * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
@@ -1380,7 +1209,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
1380
1209
  * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1381
1210
  * @returns <string> - the public attestation preamble.
1382
1211
  */
1383
- const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1212
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
1213
+ ? "."
1214
+ : " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
1384
1215
  /**
1385
1216
  * Check and prepare public attestation for the contributor made only of its valid contributions.
1386
1217
  * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
@@ -1451,6 +1282,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
1451
1282
  // Return the read bytes.
1452
1283
  return buffer;
1453
1284
  };
1285
+ /**
1286
+ * Given a buffer in little endian format, convert it to bigint
1287
+ * @param buffer
1288
+ * @returns
1289
+ */
1290
+ function leBufferToBigint(buffer) {
1291
+ return BigInt(`0x${buffer.reverse().toString("hex")}`);
1292
+ }
1293
+ /**
1294
+ * Given an input containing string values, convert them
1295
+ * to bigint
1296
+ * @param input - The input to convert
1297
+ * @returns the input with string values converted to bigint
1298
+ */
1299
+ const unstringifyBigInts = (input) => {
1300
+ if (typeof input === "string" && /^[0-9]+$/.test(input)) {
1301
+ return BigInt(input);
1302
+ }
1303
+ if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
1304
+ return BigInt(input);
1305
+ }
1306
+ if (Array.isArray(input)) {
1307
+ return input.map(unstringifyBigInts);
1308
+ }
1309
+ if (input === null) {
1310
+ return null;
1311
+ }
1312
+ if (typeof input === "object") {
1313
+ return Object.entries(input).reduce((acc, [key, value]) => {
1314
+ acc[key] = unstringifyBigInts(value);
1315
+ return acc;
1316
+ }, {});
1317
+ }
1318
+ return input;
1319
+ };
1454
1320
  /**
1455
1321
  * Return the info about the R1CS file.ù
1456
1322
  * @dev this method was built taking inspiration from
@@ -1511,17 +1377,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
1511
1377
  let constraints = 0;
1512
1378
  try {
1513
1379
  // Get 'number of section' (jump magic r1cs and version1 data).
1514
- const numberOfSections = ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1380
+ const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1515
1381
  // Jump to first section.
1516
1382
  pointer = 12;
1517
1383
  // For each section
1518
1384
  for (let i = 0; i < numberOfSections; i++) {
1519
1385
  // Read section type.
1520
- const sectionType = ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1386
+ const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1521
1387
  // Jump to section size.
1522
1388
  pointer += 4;
1523
1389
  // Read section size
1524
- const sectionSize = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1390
+ const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1525
1391
  // If at header section (0x00000001 : Header Section).
1526
1392
  if (sectionType === BigInt(1)) {
1527
1393
  // Read info from header section.
@@ -1553,22 +1419,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
1553
1419
  */
1554
1420
  pointer += sectionSize - 20;
1555
1421
  // Read R1CS info.
1556
- wires = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1422
+ wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1557
1423
  pointer += 4;
1558
- publicOutputs = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1424
+ publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1559
1425
  pointer += 4;
1560
- publicInputs = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1426
+ publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1561
1427
  pointer += 4;
1562
- privateInputs = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1428
+ privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1563
1429
  pointer += 4;
1564
- labels = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1430
+ labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1565
1431
  pointer += 8;
1566
- constraints = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1432
+ constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1567
1433
  }
1568
1434
  pointer += 8 + Number(sectionSize);
1569
1435
  }
1570
1436
  return {
1571
- curve: "bn-128",
1437
+ curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
1572
1438
  wires,
1573
1439
  constraints,
1574
1440
  privateInputs,
@@ -1583,11 +1449,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
1583
1449
  }
1584
1450
  };
1585
1451
  /**
1586
- * Return a string with double digits if the provided input is one digit only.
1587
- * @param in <number> - the input number to be converted.
1588
- * @returns <string> - the two digits stringified number derived from the conversion.
1452
+ * Parse and validate that the ceremony configuration is correct
1453
+ * @notice this does not upload any files to storage
1454
+ * @param path <string> - the path to the configuration file
1455
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1456
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1589
1457
  */
1590
- const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1458
+ const parseCeremonyFile = async (path, cleanup = false) => {
1459
+ // check that the path exists
1460
+ if (!fs.existsSync(path))
1461
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1462
+ try {
1463
+ // read the data
1464
+ const data = JSON.parse(fs.readFileSync(path).toString());
1465
+ // verify that the data is correct
1466
+ if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
1467
+ data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1468
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1469
+ // validate that we have at least 1 circuit input data
1470
+ if (!data.circuits || data.circuits.length === 0)
1471
+ throw new Error("You need to provide the data for at least 1 circuit.");
1472
+ // validate that the end date is in the future
1473
+ let endDate;
1474
+ let startDate;
1475
+ try {
1476
+ endDate = new Date(data.endDate);
1477
+ startDate = new Date(data.startDate);
1478
+ }
1479
+ catch (error) {
1480
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1481
+ }
1482
+ if (endDate <= startDate)
1483
+ throw new Error("The end date should be greater than the start date.");
1484
+ const currentDate = new Date();
1485
+ if (endDate <= currentDate || startDate <= currentDate)
1486
+ throw new Error("The start and end dates should be in the future.");
1487
+ // validate penalty
1488
+ if (data.penalty <= 0)
1489
+ throw new Error("The penalty should be greater than zero.");
1490
+ const circuits = [];
1491
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1492
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1493
+ const circuitArtifacts = [];
1494
+ for (let i = 0; i < data.circuits.length; i++) {
1495
+ const circuitData = data.circuits[i];
1496
+ const { artifacts } = circuitData;
1497
+ circuitArtifacts.push({
1498
+ artifacts
1499
+ });
1500
+ // where we storing the r1cs downloaded
1501
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1502
+ // where we storing the wasm downloaded
1503
+ const localWasmPath = `./${circuitData.name}.wasm`;
1504
+ // download the r1cs to extract the metadata
1505
+ const streamPipeline = util.promisify(stream.pipeline);
1506
+ // Make the call.
1507
+ const responseR1CS = await fetch(artifacts.r1csStoragePath);
1508
+ // Handle errors.
1509
+ if (!responseR1CS.ok && responseR1CS.status !== 200)
1510
+ throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1511
+ await streamPipeline(responseR1CS.body, fs.createWriteStream(localR1csPath));
1512
+ // Write the file locally
1513
+ // extract the metadata from the r1cs
1514
+ const metadata = getR1CSInfo(localR1csPath);
1515
+ // download wasm too to ensure it's available
1516
+ const responseWASM = await fetch(artifacts.wasmStoragePath);
1517
+ if (!responseWASM.ok && responseWASM.status !== 200)
1518
+ throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1519
+ await streamPipeline(responseWASM.body, fs.createWriteStream(localWasmPath));
1520
+ // validate that the circuit hash and template links are valid
1521
+ const { template } = circuitData;
1522
+ const URLMatch = template.source.match(urlPattern);
1523
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1524
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1525
+ const hashMatch = template.commitHash.match(commitHashPattern);
1526
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1527
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1528
+ // calculate the hash of the r1cs file
1529
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1530
+ const circuitPrefix = extractPrefix(circuitData.name);
1531
+ // filenames
1532
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1533
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1534
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1535
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1536
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1537
+ // storage paths
1538
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1539
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1540
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1541
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1542
+ const files = {
1543
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1544
+ r1csFilename: r1csCompleteFilename,
1545
+ wasmFilename: wasmCompleteFilename,
1546
+ initialZkeyFilename: firstZkeyCompleteFilename,
1547
+ potStoragePath: potStorageFilePath,
1548
+ r1csStoragePath: r1csStorageFilePath,
1549
+ wasmStoragePath: wasmStorageFilePath,
1550
+ initialZkeyStoragePath: zkeyStorageFilePath,
1551
+ r1csBlake2bHash
1552
+ };
1553
+ // validate that the compiler hash is a valid hash
1554
+ const { compiler } = circuitData;
1555
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1556
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1557
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1558
+ // validate that the verification options are valid
1559
+ const { verification } = circuitData;
1560
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1561
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1562
+ // @todo VM parameters verification
1563
+ // if (verification['cfOrVM'] === "VM") {}
1564
+ // check that the timeout is provided for the correct configuration
1565
+ let dynamicThreshold;
1566
+ let fixedTimeWindow;
1567
+ let circuit = {};
1568
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1569
+ if (circuitData.dynamicThreshold <= 0)
1570
+ throw new Error("The dynamic threshold should be > 0.");
1571
+ dynamicThreshold = circuitData.dynamicThreshold;
1572
+ // the Circuit data for the ceremony setup
1573
+ circuit = {
1574
+ name: circuitData.name,
1575
+ description: circuitData.description,
1576
+ prefix: circuitPrefix,
1577
+ sequencePosition: i + 1,
1578
+ metadata,
1579
+ files,
1580
+ template,
1581
+ compiler,
1582
+ verification,
1583
+ dynamicThreshold,
1584
+ avgTimings: {
1585
+ contributionComputation: 0,
1586
+ fullContribution: 0,
1587
+ verifyCloudFunction: 0
1588
+ }
1589
+ };
1590
+ }
1591
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1592
+ if (circuitData.fixedTimeWindow <= 0)
1593
+ throw new Error("The fixed time window threshold should be > 0.");
1594
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1595
+ // the Circuit data for the ceremony setup
1596
+ circuit = {
1597
+ name: circuitData.name,
1598
+ description: circuitData.description,
1599
+ prefix: circuitPrefix,
1600
+ sequencePosition: i + 1,
1601
+ metadata,
1602
+ files,
1603
+ template,
1604
+ compiler,
1605
+ verification,
1606
+ fixedTimeWindow,
1607
+ avgTimings: {
1608
+ contributionComputation: 0,
1609
+ fullContribution: 0,
1610
+ verifyCloudFunction: 0
1611
+ }
1612
+ };
1613
+ }
1614
+ circuits.push(circuit);
1615
+ // remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
1616
+ if (cleanup) {
1617
+ fs.unlinkSync(localR1csPath);
1618
+ fs.unlinkSync(localWasmPath);
1619
+ }
1620
+ }
1621
+ const setupData = {
1622
+ ceremonyInputData: {
1623
+ title: data.title,
1624
+ description: data.description,
1625
+ startDate: startDate.valueOf(),
1626
+ endDate: endDate.valueOf(),
1627
+ timeoutMechanismType: data.timeoutMechanismType,
1628
+ penalty: data.penalty
1629
+ },
1630
+ ceremonyPrefix: extractPrefix(data.title),
1631
+ circuits,
1632
+ circuitArtifacts
1633
+ };
1634
+ return setupData;
1635
+ }
1636
+ catch (error) {
1637
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1638
+ }
1639
+ };
1591
1640
 
1592
1641
  /**
1593
1642
  * Verify that a zKey is valid
@@ -1836,7 +1885,7 @@ const getFirestoreDatabase = (app) => firestore.getFirestore(app);
1836
1885
  * @param app <FirebaseApp> - the Firebase application.
1837
1886
  * @returns <Functions> - the Cloud Functions associated to the application.
1838
1887
  */
1839
- const getFirebaseFunctions = (app) => functions.getFunctions(app, 'europe-west1');
1888
+ const getFirebaseFunctions = (app) => functions.getFunctions(app, "europe-west1");
1840
1889
  /**
1841
1890
  * Retrieve the configuration variables for the AWS services (S3, EC2).
1842
1891
  * @returns <AWSVariables> - the values of the AWS services configuration variables.
@@ -1845,14 +1894,14 @@ const getAWSVariables = () => {
1845
1894
  if (!process.env.AWS_ACCESS_KEY_ID ||
1846
1895
  !process.env.AWS_SECRET_ACCESS_KEY ||
1847
1896
  !process.env.AWS_REGION ||
1848
- !process.env.AWS_ROLE_ARN ||
1897
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
1849
1898
  !process.env.AWS_AMI_ID)
1850
1899
  throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1851
1900
  return {
1852
1901
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1853
1902
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1854
1903
  region: process.env.AWS_REGION || "us-east-1",
1855
- roleArn: process.env.AWS_ROLE_ARN,
1904
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
1856
1905
  amiId: process.env.AWS_AMI_ID
1857
1906
  };
1858
1907
  };
@@ -1933,11 +1982,11 @@ const p256 = (proofPart) => {
1933
1982
  */
1934
1983
  const formatSolidityCalldata = (circuitInput, _proof) => {
1935
1984
  try {
1936
- const proof = ffjavascript.utils.unstringifyBigInts(_proof);
1985
+ const proof = unstringifyBigInts(_proof);
1937
1986
  // format the public inputs to the circuit
1938
1987
  const formattedCircuitInput = [];
1939
1988
  for (const cInput of circuitInput) {
1940
- formattedCircuitInput.push(p256(ffjavascript.utils.unstringifyBigInts(cInput)));
1989
+ formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
1941
1990
  }
1942
1991
  // construct calldata
1943
1992
  const calldata = {
@@ -2105,7 +2154,8 @@ const getGitHubStats = async (user) => {
2105
2154
  following: jsonData.following,
2106
2155
  followers: jsonData.followers,
2107
2156
  publicRepos: jsonData.public_repos,
2108
- avatarUrl: jsonData.avatar_url
2157
+ avatarUrl: jsonData.avatar_url,
2158
+ age: jsonData.created_at
2109
2159
  };
2110
2160
  return data;
2111
2161
  };
@@ -2117,20 +2167,21 @@ const getGitHubStats = async (user) => {
2117
2167
  * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2118
2168
  * @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
2119
2169
  */
2120
- const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2170
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
2121
2171
  if (!process.env.GITHUB_ACCESS_TOKEN)
2122
2172
  throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2123
- const { following, followers, publicRepos, avatarUrl } = await getGitHubStats(userLogin);
2173
+ const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
2124
2174
  if (following < minimumAmountOfFollowing ||
2125
2175
  publicRepos < minimumAmountOfPublicRepos ||
2126
- followers < minimumAmountOfFollowers)
2176
+ followers < minimumAmountOfFollowers ||
2177
+ new Date(age) > new Date(Date.now() - minimumAge))
2127
2178
  return {
2128
2179
  reputable: false,
2129
2180
  avatarUrl: ""
2130
2181
  };
2131
2182
  return {
2132
2183
  reputable: true,
2133
- avatarUrl: avatarUrl
2184
+ avatarUrl
2134
2185
  };
2135
2186
  };
2136
2187
 
@@ -2317,8 +2368,8 @@ const createSSMClient = async () => {
2317
2368
  * @returns <Array<string>> - the list of startup commands to be executed.
2318
2369
  */
2319
2370
  const vmBootstrapCommand = (bucketName) => [
2320
- "#!/bin/bash",
2321
- `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2371
+ "#!/bin/bash", // shabang.
2372
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
2322
2373
  `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2323
2374
  ];
2324
2375
  /**
@@ -2339,8 +2390,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2339
2390
  // eslint-disable-next-line no-template-curly-in-string
2340
2391
  "touch ${MARKER_FILE}",
2341
2392
  "sudo yum update -y",
2342
- "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2343
- "sudo yum install -y nodejs",
2393
+ "curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
2394
+ "tar -xf node-v16.13.0-linux-x64.tar.xz",
2395
+ "mv node-v16.13.0-linux-x64 nodejs",
2396
+ "sudo mv nodejs /opt/",
2397
+ "echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
2398
+ "echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
2399
+ "source /etc/profile",
2344
2400
  "npm install -g snarkjs",
2345
2401
  `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2346
2402
  `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
@@ -2359,6 +2415,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2359
2415
  * @returns Array<string> - the list of commands for contribution verification.
2360
2416
  */
2361
2417
  const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2418
+ `source /etc/profile`,
2362
2419
  `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2363
2420
  `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2364
2421
  `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
@@ -2385,7 +2442,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
2385
2442
  */
2386
2443
  const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2387
2444
  // Get the AWS variables.
2388
- const { amiId, roleArn } = getAWSVariables();
2445
+ const { amiId, instanceProfileArn } = getAWSVariables();
2389
2446
  // Parametrize the VM EC2 instance.
2390
2447
  const params = {
2391
2448
  ImageId: amiId,
@@ -2394,7 +2451,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2394
2451
  MinCount: 1,
2395
2452
  // nb. to find this: iam -> roles -> role_name.
2396
2453
  IamInstanceProfile: {
2397
- Arn: roleArn
2454
+ Arn: instanceProfileArn
2398
2455
  },
2399
2456
  // nb. for running commands at the startup.
2400
2457
  UserData: Buffer.from(commands.join("\n")).toString("base64"),
@@ -2403,7 +2460,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2403
2460
  DeviceName: "/dev/xvda",
2404
2461
  Ebs: {
2405
2462
  DeleteOnTermination: true,
2406
- VolumeSize: volumeSize,
2463
+ VolumeSize: volumeSize, // disk size in GB.
2407
2464
  VolumeType: diskType
2408
2465
  }
2409
2466
  }
@@ -2603,6 +2660,7 @@ exports.completeMultiPartUpload = completeMultiPartUpload;
2603
2660
  exports.computeDiskSizeForVM = computeDiskSizeForVM;
2604
2661
  exports.computeSHA256ToHex = computeSHA256ToHex;
2605
2662
  exports.computeSmallestPowersOfTauForCircuit = computeSmallestPowersOfTauForCircuit;
2663
+ exports.contribHashRegex = contribHashRegex;
2606
2664
  exports.convertBytesOrKbToGb = convertBytesOrKbToGb;
2607
2665
  exports.convertToDoubleDigits = convertToDoubleDigits;
2608
2666
  exports.createCustomLoggerForFile = createCustomLoggerForFile;
@@ -2631,6 +2689,7 @@ exports.generatePreSignedUrlsParts = generatePreSignedUrlsParts;
2631
2689
  exports.generateValidContributionsAttestation = generateValidContributionsAttestation;
2632
2690
  exports.generateZkeyFromScratch = generateZkeyFromScratch;
2633
2691
  exports.genesisZkeyIndex = genesisZkeyIndex;
2692
+ exports.getAllCeremonies = getAllCeremonies;
2634
2693
  exports.getAllCollectionDocs = getAllCollectionDocs;
2635
2694
  exports.getBucketName = getBucketName;
2636
2695
  exports.getCeremonyCircuits = getCeremonyCircuits;