@devtion/actions 0.0.0-92056fa → 0.0.0-9843891

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +1 -1
  2. package/dist/index.mjs +338 -275
  3. package/dist/index.node.js +336 -272
  4. package/dist/types/src/helpers/constants.d.ts +4 -2
  5. package/dist/types/src/helpers/constants.d.ts.map +1 -1
  6. package/dist/types/src/helpers/contracts.d.ts.map +1 -1
  7. package/dist/types/src/helpers/crypto.d.ts +1 -0
  8. package/dist/types/src/helpers/crypto.d.ts.map +1 -1
  9. package/dist/types/src/helpers/database.d.ts +8 -0
  10. package/dist/types/src/helpers/database.d.ts.map +1 -1
  11. package/dist/types/src/helpers/security.d.ts +1 -1
  12. package/dist/types/src/helpers/security.d.ts.map +1 -1
  13. package/dist/types/src/helpers/storage.d.ts +5 -2
  14. package/dist/types/src/helpers/storage.d.ts.map +1 -1
  15. package/dist/types/src/helpers/utils.d.ts +34 -20
  16. package/dist/types/src/helpers/utils.d.ts.map +1 -1
  17. package/dist/types/src/helpers/verification.d.ts +3 -2
  18. package/dist/types/src/helpers/verification.d.ts.map +1 -1
  19. package/dist/types/src/helpers/vm.d.ts.map +1 -1
  20. package/dist/types/src/index.d.ts +1 -1
  21. package/dist/types/src/index.d.ts.map +1 -1
  22. package/dist/types/src/types/index.d.ts +9 -3
  23. package/dist/types/src/types/index.d.ts.map +1 -1
  24. package/package.json +3 -8
  25. package/src/helpers/constants.ts +35 -29
  26. package/src/helpers/contracts.ts +3 -3
  27. package/src/helpers/database.ts +13 -0
  28. package/src/helpers/functions.ts +1 -1
  29. package/src/helpers/security.ts +11 -10
  30. package/src/helpers/services.ts +3 -3
  31. package/src/helpers/storage.ts +15 -3
  32. package/src/helpers/utils.ts +316 -272
  33. package/src/helpers/verification.ts +6 -6
  34. package/src/helpers/vm.ts +14 -7
  35. package/src/index.ts +3 -2
  36. package/src/types/index.ts +32 -8
package/dist/index.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  /**
2
- * @module @devtion/actions
3
- * @version 1.0.6
2
+ * @module @p0tion/actions
3
+ * @version 1.2.0
4
4
  * @file A set of actions and helpers for CLI commands
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -15,10 +15,8 @@ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp,
15
15
  import { zKey, groth16 } from 'snarkjs';
16
16
  import crypto from 'crypto';
17
17
  import blake from 'blakejs';
18
- import { utils } from 'ffjavascript';
19
18
  import winston from 'winston';
20
- import { S3Client, GetObjectCommand } from '@aws-sdk/client-s3';
21
- import { pipeline, Readable } from 'stream';
19
+ import { pipeline } from 'stream';
22
20
  import { promisify } from 'util';
23
21
  import { initializeApp } from 'firebase/app';
24
22
  import { signInWithCredential, initializeAuth, getAuth } from 'firebase/auth';
@@ -29,9 +27,9 @@ import { SSMClient, SendCommandCommand, GetCommandInvocationCommand } from '@aws
29
27
  import dotenv from 'dotenv';
30
28
 
31
29
  // Main part for the Hermez Phase 1 Trusted Setup URLs to download PoT files.
32
- const potFileDownloadMainUrl = `https://hermez.s3-eu-west-1.amazonaws.com/`;
30
+ const potFileDownloadMainUrl = `https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/`;
33
31
  // Main part for the Hermez Phase 1 Trusted Setup PoT files to be downloaded.
34
- const potFilenameTemplate = `powersOfTau28_hez_final_`;
32
+ const potFilenameTemplate = `ppot_0080_`;
35
33
  // The genesis zKey index.
36
34
  const genesisZkeyIndex = `00000`;
37
35
  // The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
@@ -105,112 +103,116 @@ const vmConfigurationTypes = {
105
103
  */
106
104
  const powersOfTauFiles = [
107
105
  {
108
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_01.ptau",
106
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_01.ptau",
109
107
  size: 0.000084
110
108
  },
111
109
  {
112
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_02.ptau",
110
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_02.ptau",
113
111
  size: 0.000086
114
112
  },
115
113
  {
116
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_03.ptau",
114
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_03.ptau",
117
115
  size: 0.000091
118
116
  },
119
117
  {
120
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_04.ptau",
118
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_04.ptau",
121
119
  size: 0.0001
122
120
  },
123
121
  {
124
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_05.ptau",
122
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_05.ptau",
125
123
  size: 0.000117
126
124
  },
127
125
  {
128
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_06.ptau",
126
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_06.ptau",
129
127
  size: 0.000153
130
128
  },
131
129
  {
132
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_07.ptau",
130
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_07.ptau",
133
131
  size: 0.000225
134
132
  },
135
133
  {
136
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_08.ptau",
134
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_08.ptau",
137
135
  size: 0.0004
138
136
  },
139
137
  {
140
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_09.ptau",
138
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_09.ptau",
141
139
  size: 0.000658
142
140
  },
143
141
  {
144
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_10.ptau",
142
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_10.ptau",
145
143
  size: 0.0013
146
144
  },
147
145
  {
148
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_11.ptau",
146
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_11.ptau",
149
147
  size: 0.0023
150
148
  },
151
149
  {
152
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_12.ptau",
150
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_12.ptau",
153
151
  size: 0.0046
154
152
  },
155
153
  {
156
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_13.ptau",
154
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_13.ptau",
157
155
  size: 0.0091
158
156
  },
159
157
  {
160
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_14.ptau",
158
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_14.ptau",
161
159
  size: 0.0181
162
160
  },
163
161
  {
164
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_15.ptau",
162
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_15.ptau",
165
163
  size: 0.0361
166
164
  },
167
165
  {
168
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_16.ptau",
166
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_16.ptau",
169
167
  size: 0.0721
170
168
  },
171
169
  {
172
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau",
170
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_17.ptau",
173
171
  size: 0.144
174
172
  },
175
173
  {
176
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_18.ptau",
174
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_18.ptau",
177
175
  size: 0.288
178
176
  },
179
177
  {
180
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_19.ptau",
178
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_19.ptau",
181
179
  size: 0.576
182
180
  },
183
181
  {
184
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau",
182
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_20.ptau",
185
183
  size: 1.1
186
184
  },
187
185
  {
188
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_21.ptau",
186
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_21.ptau",
189
187
  size: 2.3
190
188
  },
191
189
  {
192
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_22.ptau",
190
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_22.ptau",
193
191
  size: 4.5
194
192
  },
195
193
  {
196
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_23.ptau",
194
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_23.ptau",
197
195
  size: 9.0
198
196
  },
199
197
  {
200
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_24.ptau",
198
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_24.ptau",
201
199
  size: 18.0
202
200
  },
203
201
  {
204
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_25.ptau",
202
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_25.ptau",
205
203
  size: 36.0
206
204
  },
207
205
  {
208
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_26.ptau",
206
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau",
209
207
  size: 72.0
210
208
  },
211
209
  {
212
- ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_27.ptau",
210
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_27.ptau",
213
211
  size: 144.0
212
+ },
213
+ {
214
+ ref: "https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_final.ptau",
215
+ size: 288.0
214
216
  }
215
217
  ];
216
218
  /**
@@ -341,6 +343,8 @@ const commonTerms = {
341
343
  finalizeCeremony: "finalizeCeremony",
342
344
  downloadCircuitArtifacts: "downloadCircuitArtifacts",
343
345
  transferObject: "transferObject",
346
+ bandadaValidateProof: "bandadaValidateProof",
347
+ checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
344
348
  }
345
349
  };
346
350
 
@@ -691,19 +695,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
691
695
  * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
692
696
  * @param ceremonyId <string> - the unique identifier of the ceremony.
693
697
  * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
698
+ * @param logger <GenericBar> - an optional logger to show progress.
694
699
  * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
695
700
  */
696
- const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
701
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
697
702
  // Keep track of uploaded chunks.
698
703
  const uploadedChunks = alreadyUploadedChunks || [];
704
+ // if we were passed a logger, start it
705
+ if (logger)
706
+ logger.start(chunksWithUrls.length, 0);
699
707
  // Loop through remaining chunks.
700
708
  for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
701
709
  // Consume the pre-signed url to upload the chunk.
702
710
  // @ts-ignore
703
711
  const response = await fetch(chunksWithUrls[i].preSignedUrl, {
704
712
  retryOptions: {
705
- retryInitialDelay: 500,
706
- socketTimeout: 60000,
713
+ retryInitialDelay: 500, // 500 ms.
714
+ socketTimeout: 60000, // 60 seconds.
707
715
  retryMaxDuration: 300000 // 5 minutes.
708
716
  },
709
717
  method: "PUT",
@@ -727,6 +735,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
727
735
  // nb. this must be done only when contributing (not finalizing).
728
736
  if (!!ceremonyId && !!cloudFunctions)
729
737
  await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
738
+ // increment the count on the logger
739
+ if (logger)
740
+ logger.increment();
730
741
  }
731
742
  return uploadedChunks;
732
743
  };
@@ -747,8 +758,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
747
758
  * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
748
759
  * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
749
760
  * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
761
+ * @param logger <GenericBar> - an optional logger to show progress.
750
762
  */
751
- const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
763
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
752
764
  // The unique identifier of the multi-part upload.
753
765
  let multiPartUploadId = "";
754
766
  // The list of already uploaded chunks.
@@ -772,7 +784,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
772
784
  const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
773
785
  // Step (2).
774
786
  const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
775
- cloudFunctions, ceremonyId, alreadyUploadedChunks);
787
+ cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
776
788
  // Step (3).
777
789
  await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
778
790
  };
@@ -996,6 +1008,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
996
1008
  ]);
997
1009
  return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
998
1010
  };
1011
+ /**
1012
+ * Query all ceremonies
1013
+ * @notice get all ceremonies from the database.
1014
+ * @dev this is a helper for the CLI ceremony methods.
1015
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1016
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
1017
+ */
1018
+ const getAllCeremonies = async (firestoreDatabase) => {
1019
+ const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
1020
+ return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
1021
+ };
999
1022
 
1000
1023
  /**
1001
1024
  * @hidden
@@ -1044,199 +1067,22 @@ const compareHashes = async (path1, path2) => {
1044
1067
  };
1045
1068
 
1046
1069
  /**
1047
- * Parse and validate that the ceremony configuration is correct
1048
- * @notice this does not upload any files to storage
1049
- * @param path <string> - the path to the configuration file
1050
- * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1051
- * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1070
+ * Return a string with double digits if the provided input is one digit only.
1071
+ * @param in <number> - the input number to be converted.
1072
+ * @returns <string> - the two digits stringified number derived from the conversion.
1052
1073
  */
1053
- const parseCeremonyFile = async (path, cleanup = false) => {
1054
- // check that the path exists
1055
- if (!fs.existsSync(path))
1056
- throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1057
- try {
1058
- // read the data
1059
- const data = JSON.parse(fs.readFileSync(path).toString());
1060
- // verify that the data is correct
1061
- if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1062
- throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1063
- // validate that we have at least 1 circuit input data
1064
- if (!data.circuits || data.circuits.length === 0)
1065
- throw new Error("You need to provide the data for at least 1 circuit.");
1066
- // validate that the end date is in the future
1067
- let endDate;
1068
- let startDate;
1069
- try {
1070
- endDate = new Date(data.endDate);
1071
- startDate = new Date(data.startDate);
1072
- }
1073
- catch (error) {
1074
- throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1075
- }
1076
- if (endDate <= startDate)
1077
- throw new Error("The end date should be greater than the start date.");
1078
- const currentDate = new Date();
1079
- if (endDate <= currentDate || startDate <= currentDate)
1080
- throw new Error("The start and end dates should be in the future.");
1081
- // validate penalty
1082
- if (data.penalty <= 0)
1083
- throw new Error("The penalty should be greater than zero.");
1084
- const circuits = [];
1085
- const urlPattern = /(https?:\/\/[^\s]+)/g;
1086
- const commitHashPattern = /^[a-f0-9]{40}$/i;
1087
- const circuitArtifacts = [];
1088
- for (let i = 0; i < data.circuits.length; i++) {
1089
- const circuitData = data.circuits[i];
1090
- const artifacts = circuitData.artifacts;
1091
- circuitArtifacts.push({
1092
- artifacts: artifacts
1093
- });
1094
- // where we storing the r1cs downloaded
1095
- const localR1csPath = `./${circuitData.name}.r1cs`;
1096
- // where we storing the wasm downloaded
1097
- const localWasmPath = `./${circuitData.name}.wasm`;
1098
- // check that the artifacts exist in S3
1099
- // we don't need any privileges to download this
1100
- // just the correct region
1101
- const s3 = new S3Client({
1102
- region: artifacts.region,
1103
- credentials: undefined
1104
- });
1105
- // download the r1cs to extract the metadata
1106
- const command = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1107
- const response = await s3.send(command);
1108
- const streamPipeline = promisify(pipeline);
1109
- if (response.$metadata.httpStatusCode !== 200)
1110
- throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1111
- if (response.Body instanceof Readable)
1112
- await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1113
- // extract the metadata from the r1cs
1114
- const metadata = getR1CSInfo(localR1csPath);
1115
- // download wasm too to ensure it's available
1116
- const wasmCommand = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.wasmStoragePath });
1117
- const wasmResponse = await s3.send(wasmCommand);
1118
- if (wasmResponse.$metadata.httpStatusCode !== 200)
1119
- throw new Error(`There was an error while trying to download the wasm file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1120
- if (wasmResponse.Body instanceof Readable)
1121
- await streamPipeline(wasmResponse.Body, fs.createWriteStream(localWasmPath));
1122
- // validate that the circuit hash and template links are valid
1123
- const template = circuitData.template;
1124
- const URLMatch = template.source.match(urlPattern);
1125
- if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1126
- throw new Error("You should provide the URL to the circuits templates on GitHub.");
1127
- const hashMatch = template.commitHash.match(commitHashPattern);
1128
- if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1129
- throw new Error("You should provide a valid commit hash of the circuit templates.");
1130
- // calculate the hash of the r1cs file
1131
- const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1132
- const circuitPrefix = extractPrefix(circuitData.name);
1133
- // filenames
1134
- const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1135
- const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1136
- const wasmCompleteFilename = `${circuitData.name}.wasm`;
1137
- const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1138
- const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1139
- // storage paths
1140
- const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1141
- const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1142
- const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1143
- const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1144
- const files = {
1145
- potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1146
- r1csFilename: r1csCompleteFilename,
1147
- wasmFilename: wasmCompleteFilename,
1148
- initialZkeyFilename: firstZkeyCompleteFilename,
1149
- potStoragePath: potStorageFilePath,
1150
- r1csStoragePath: r1csStorageFilePath,
1151
- wasmStoragePath: wasmStorageFilePath,
1152
- initialZkeyStoragePath: zkeyStorageFilePath,
1153
- r1csBlake2bHash: r1csBlake2bHash
1154
- };
1155
- // validate that the compiler hash is a valid hash
1156
- const compiler = circuitData.compiler;
1157
- const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1158
- if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1159
- throw new Error("You should provide a valid commit hash of the circuit compiler.");
1160
- // validate that the verification options are valid
1161
- const verification = circuitData.verification;
1162
- if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1163
- throw new Error("Please enter a valid verification mechanism: either CF or VM");
1164
- // @todo VM parameters verification
1165
- // if (verification['cfOrVM'] === "VM") {}
1166
- // check that the timeout is provided for the correct configuration
1167
- let dynamicThreshold;
1168
- let fixedTimeWindow;
1169
- let circuit = {};
1170
- if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1171
- if (circuitData.dynamicThreshold <= 0)
1172
- throw new Error("The dynamic threshold should be > 0.");
1173
- dynamicThreshold = circuitData.dynamicThreshold;
1174
- // the Circuit data for the ceremony setup
1175
- circuit = {
1176
- name: circuitData.name,
1177
- description: circuitData.description,
1178
- prefix: circuitPrefix,
1179
- sequencePosition: i + 1,
1180
- metadata: metadata,
1181
- files: files,
1182
- template: template,
1183
- compiler: compiler,
1184
- verification: verification,
1185
- dynamicThreshold: dynamicThreshold,
1186
- avgTimings: {
1187
- contributionComputation: 0,
1188
- fullContribution: 0,
1189
- verifyCloudFunction: 0
1190
- },
1191
- };
1192
- }
1193
- if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1194
- if (circuitData.fixedTimeWindow <= 0)
1195
- throw new Error("The fixed time window threshold should be > 0.");
1196
- fixedTimeWindow = circuitData.fixedTimeWindow;
1197
- // the Circuit data for the ceremony setup
1198
- circuit = {
1199
- name: circuitData.name,
1200
- description: circuitData.description,
1201
- prefix: circuitPrefix,
1202
- sequencePosition: i + 1,
1203
- metadata: metadata,
1204
- files: files,
1205
- template: template,
1206
- compiler: compiler,
1207
- verification: verification,
1208
- fixedTimeWindow: fixedTimeWindow,
1209
- avgTimings: {
1210
- contributionComputation: 0,
1211
- fullContribution: 0,
1212
- verifyCloudFunction: 0
1213
- },
1214
- };
1215
- }
1216
- circuits.push(circuit);
1217
- // remove the local r1cs download (if used for verifying the config only vs setup)
1218
- if (cleanup)
1219
- fs.unlinkSync(localR1csPath);
1220
- }
1221
- const setupData = {
1222
- ceremonyInputData: {
1223
- title: data.title,
1224
- description: data.description,
1225
- startDate: startDate.valueOf(),
1226
- endDate: endDate.valueOf(),
1227
- timeoutMechanismType: data.timeoutMechanismType,
1228
- penalty: data.penalty
1229
- },
1230
- ceremonyPrefix: extractPrefix(data.title),
1231
- circuits: circuits,
1232
- circuitArtifacts: circuitArtifacts
1233
- };
1234
- return setupData;
1235
- }
1236
- catch (error) {
1237
- throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1238
- }
1239
- };
1074
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1075
+ /**
1076
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1077
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1078
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1079
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1080
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1081
+ * @returns <string> - the resulting prefix.
1082
+ */
1083
+ const extractPrefix = (str) =>
1084
+ // eslint-disable-next-line no-useless-escape
1085
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1240
1086
  /**
1241
1087
  * Extract data from a R1CS metadata file generated with a custom file-based logger.
1242
1088
  * @notice useful for extracting metadata circuits contained in the generated file using a logger
@@ -1293,17 +1139,6 @@ const formatZkeyIndex = (progress) => {
1293
1139
  * @returns <number> - the amount of powers.
1294
1140
  */
1295
1141
  const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1296
- /**
1297
- * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1298
- * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1299
- * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1300
- * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1301
- * @param str <string> - the arbitrary string from which to extract the prefix.
1302
- * @returns <string> - the resulting prefix.
1303
- */
1304
- const extractPrefix = (str) =>
1305
- // eslint-disable-next-line no-useless-escape
1306
- str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1307
1142
  /**
1308
1143
  * Automate the generation of an entropy for a contribution.
1309
1144
  * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
@@ -1370,7 +1205,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
1370
1205
  * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1371
1206
  * @returns <string> - the public attestation preamble.
1372
1207
  */
1373
- const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1208
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
1209
+ ? "."
1210
+ : " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
1374
1211
  /**
1375
1212
  * Check and prepare public attestation for the contributor made only of its valid contributions.
1376
1213
  * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
@@ -1441,6 +1278,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
1441
1278
  // Return the read bytes.
1442
1279
  return buffer;
1443
1280
  };
1281
+ /**
1282
+ * Given a buffer in little endian format, convert it to bigint
1283
+ * @param buffer
1284
+ * @returns
1285
+ */
1286
+ function leBufferToBigint(buffer) {
1287
+ return BigInt(`0x${buffer.reverse().toString("hex")}`);
1288
+ }
1289
+ /**
1290
+ * Given an input containing string values, convert them
1291
+ * to bigint
1292
+ * @param input - The input to convert
1293
+ * @returns the input with string values converted to bigint
1294
+ */
1295
+ const unstringifyBigInts = (input) => {
1296
+ if (typeof input === "string" && /^[0-9]+$/.test(input)) {
1297
+ return BigInt(input);
1298
+ }
1299
+ if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
1300
+ return BigInt(input);
1301
+ }
1302
+ if (Array.isArray(input)) {
1303
+ return input.map(unstringifyBigInts);
1304
+ }
1305
+ if (input === null) {
1306
+ return null;
1307
+ }
1308
+ if (typeof input === "object") {
1309
+ return Object.entries(input).reduce((acc, [key, value]) => {
1310
+ acc[key] = unstringifyBigInts(value);
1311
+ return acc;
1312
+ }, {});
1313
+ }
1314
+ return input;
1315
+ };
1444
1316
  /**
1445
1317
  * Return the info about the R1CS file.ù
1446
1318
  * @dev this method was built taking inspiration from
@@ -1501,17 +1373,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
1501
1373
  let constraints = 0;
1502
1374
  try {
1503
1375
  // Get 'number of section' (jump magic r1cs and version1 data).
1504
- const numberOfSections = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1376
+ const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1505
1377
  // Jump to first section.
1506
1378
  pointer = 12;
1507
1379
  // For each section
1508
1380
  for (let i = 0; i < numberOfSections; i++) {
1509
1381
  // Read section type.
1510
- const sectionType = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1382
+ const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1511
1383
  // Jump to section size.
1512
1384
  pointer += 4;
1513
1385
  // Read section size
1514
- const sectionSize = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1386
+ const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1515
1387
  // If at header section (0x00000001 : Header Section).
1516
1388
  if (sectionType === BigInt(1)) {
1517
1389
  // Read info from header section.
@@ -1543,22 +1415,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
1543
1415
  */
1544
1416
  pointer += sectionSize - 20;
1545
1417
  // Read R1CS info.
1546
- wires = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1418
+ wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1547
1419
  pointer += 4;
1548
- publicOutputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1420
+ publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1549
1421
  pointer += 4;
1550
- publicInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1422
+ publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1551
1423
  pointer += 4;
1552
- privateInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1424
+ privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1553
1425
  pointer += 4;
1554
- labels = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1426
+ labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1555
1427
  pointer += 8;
1556
- constraints = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1428
+ constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1557
1429
  }
1558
1430
  pointer += 8 + Number(sectionSize);
1559
1431
  }
1560
1432
  return {
1561
- curve: "bn-128",
1433
+ curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
1562
1434
  wires,
1563
1435
  constraints,
1564
1436
  privateInputs,
@@ -1573,11 +1445,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
1573
1445
  }
1574
1446
  };
1575
1447
  /**
1576
- * Return a string with double digits if the provided input is one digit only.
1577
- * @param in <number> - the input number to be converted.
1578
- * @returns <string> - the two digits stringified number derived from the conversion.
1448
+ * Parse and validate that the ceremony configuration is correct
1449
+ * @notice this does not upload any files to storage
1450
+ * @param path <string> - the path to the configuration file
1451
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1452
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1579
1453
  */
1580
- const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1454
+ const parseCeremonyFile = async (path, cleanup = false) => {
1455
+ // check that the path exists
1456
+ if (!fs.existsSync(path))
1457
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1458
+ try {
1459
+ // read the data
1460
+ const data = JSON.parse(fs.readFileSync(path).toString());
1461
+ // verify that the data is correct
1462
+ if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
1463
+ data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1464
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1465
+ // validate that we have at least 1 circuit input data
1466
+ if (!data.circuits || data.circuits.length === 0)
1467
+ throw new Error("You need to provide the data for at least 1 circuit.");
1468
+ // validate that the end date is in the future
1469
+ let endDate;
1470
+ let startDate;
1471
+ try {
1472
+ endDate = new Date(data.endDate);
1473
+ startDate = new Date(data.startDate);
1474
+ }
1475
+ catch (error) {
1476
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1477
+ }
1478
+ if (endDate <= startDate)
1479
+ throw new Error("The end date should be greater than the start date.");
1480
+ const currentDate = new Date();
1481
+ if (endDate <= currentDate || startDate <= currentDate)
1482
+ throw new Error("The start and end dates should be in the future.");
1483
+ // validate penalty
1484
+ if (data.penalty <= 0)
1485
+ throw new Error("The penalty should be greater than zero.");
1486
+ const circuits = [];
1487
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1488
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1489
+ const circuitArtifacts = [];
1490
+ for (let i = 0; i < data.circuits.length; i++) {
1491
+ const circuitData = data.circuits[i];
1492
+ const { artifacts } = circuitData;
1493
+ circuitArtifacts.push({
1494
+ artifacts
1495
+ });
1496
+ // where we storing the r1cs downloaded
1497
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1498
+ // where we storing the wasm downloaded
1499
+ const localWasmPath = `./${circuitData.name}.wasm`;
1500
+ // download the r1cs to extract the metadata
1501
+ const streamPipeline = promisify(pipeline);
1502
+ // Make the call.
1503
+ const responseR1CS = await fetch(artifacts.r1csStoragePath);
1504
+ // Handle errors.
1505
+ if (!responseR1CS.ok && responseR1CS.status !== 200)
1506
+ throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1507
+ await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
1508
+ // Write the file locally
1509
+ // extract the metadata from the r1cs
1510
+ const metadata = getR1CSInfo(localR1csPath);
1511
+ // download wasm too to ensure it's available
1512
+ const responseWASM = await fetch(artifacts.wasmStoragePath);
1513
+ if (!responseWASM.ok && responseWASM.status !== 200)
1514
+ throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1515
+ await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
1516
+ // validate that the circuit hash and template links are valid
1517
+ const { template } = circuitData;
1518
+ const URLMatch = template.source.match(urlPattern);
1519
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1520
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1521
+ const hashMatch = template.commitHash.match(commitHashPattern);
1522
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1523
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1524
+ // calculate the hash of the r1cs file
1525
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1526
+ const circuitPrefix = extractPrefix(circuitData.name);
1527
+ // filenames
1528
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1529
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1530
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1531
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1532
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1533
+ // storage paths
1534
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1535
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1536
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1537
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1538
+ const files = {
1539
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1540
+ r1csFilename: r1csCompleteFilename,
1541
+ wasmFilename: wasmCompleteFilename,
1542
+ initialZkeyFilename: firstZkeyCompleteFilename,
1543
+ potStoragePath: potStorageFilePath,
1544
+ r1csStoragePath: r1csStorageFilePath,
1545
+ wasmStoragePath: wasmStorageFilePath,
1546
+ initialZkeyStoragePath: zkeyStorageFilePath,
1547
+ r1csBlake2bHash
1548
+ };
1549
+ // validate that the compiler hash is a valid hash
1550
+ const { compiler } = circuitData;
1551
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1552
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1553
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1554
+ // validate that the verification options are valid
1555
+ const { verification } = circuitData;
1556
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1557
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1558
+ // @todo VM parameters verification
1559
+ // if (verification['cfOrVM'] === "VM") {}
1560
+ // check that the timeout is provided for the correct configuration
1561
+ let dynamicThreshold;
1562
+ let fixedTimeWindow;
1563
+ let circuit = {};
1564
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1565
+ if (circuitData.dynamicThreshold <= 0)
1566
+ throw new Error("The dynamic threshold should be > 0.");
1567
+ dynamicThreshold = circuitData.dynamicThreshold;
1568
+ // the Circuit data for the ceremony setup
1569
+ circuit = {
1570
+ name: circuitData.name,
1571
+ description: circuitData.description,
1572
+ prefix: circuitPrefix,
1573
+ sequencePosition: i + 1,
1574
+ metadata,
1575
+ files,
1576
+ template,
1577
+ compiler,
1578
+ verification,
1579
+ dynamicThreshold,
1580
+ avgTimings: {
1581
+ contributionComputation: 0,
1582
+ fullContribution: 0,
1583
+ verifyCloudFunction: 0
1584
+ }
1585
+ };
1586
+ }
1587
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1588
+ if (circuitData.fixedTimeWindow <= 0)
1589
+ throw new Error("The fixed time window threshold should be > 0.");
1590
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1591
+ // the Circuit data for the ceremony setup
1592
+ circuit = {
1593
+ name: circuitData.name,
1594
+ description: circuitData.description,
1595
+ prefix: circuitPrefix,
1596
+ sequencePosition: i + 1,
1597
+ metadata,
1598
+ files,
1599
+ template,
1600
+ compiler,
1601
+ verification,
1602
+ fixedTimeWindow,
1603
+ avgTimings: {
1604
+ contributionComputation: 0,
1605
+ fullContribution: 0,
1606
+ verifyCloudFunction: 0
1607
+ }
1608
+ };
1609
+ }
1610
+ circuits.push(circuit);
1611
+ // remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
1612
+ if (cleanup) {
1613
+ fs.unlinkSync(localR1csPath);
1614
+ fs.unlinkSync(localWasmPath);
1615
+ }
1616
+ }
1617
+ const setupData = {
1618
+ ceremonyInputData: {
1619
+ title: data.title,
1620
+ description: data.description,
1621
+ startDate: startDate.valueOf(),
1622
+ endDate: endDate.valueOf(),
1623
+ timeoutMechanismType: data.timeoutMechanismType,
1624
+ penalty: data.penalty
1625
+ },
1626
+ ceremonyPrefix: extractPrefix(data.title),
1627
+ circuits,
1628
+ circuitArtifacts
1629
+ };
1630
+ return setupData;
1631
+ }
1632
+ catch (error) {
1633
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1634
+ }
1635
+ };
1581
1636
 
1582
1637
  /**
1583
1638
  * Verify that a zKey is valid
@@ -1826,7 +1881,7 @@ const getFirestoreDatabase = (app) => getFirestore(app);
1826
1881
  * @param app <FirebaseApp> - the Firebase application.
1827
1882
  * @returns <Functions> - the Cloud Functions associated to the application.
1828
1883
  */
1829
- const getFirebaseFunctions = (app) => getFunctions(app, 'europe-west1');
1884
+ const getFirebaseFunctions = (app) => getFunctions(app, "europe-west1");
1830
1885
  /**
1831
1886
  * Retrieve the configuration variables for the AWS services (S3, EC2).
1832
1887
  * @returns <AWSVariables> - the values of the AWS services configuration variables.
@@ -1835,14 +1890,14 @@ const getAWSVariables = () => {
1835
1890
  if (!process.env.AWS_ACCESS_KEY_ID ||
1836
1891
  !process.env.AWS_SECRET_ACCESS_KEY ||
1837
1892
  !process.env.AWS_REGION ||
1838
- !process.env.AWS_ROLE_ARN ||
1893
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
1839
1894
  !process.env.AWS_AMI_ID)
1840
1895
  throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1841
1896
  return {
1842
1897
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1843
1898
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1844
1899
  region: process.env.AWS_REGION || "us-east-1",
1845
- roleArn: process.env.AWS_ROLE_ARN,
1900
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
1846
1901
  amiId: process.env.AWS_AMI_ID
1847
1902
  };
1848
1903
  };
@@ -1923,11 +1978,11 @@ const p256 = (proofPart) => {
1923
1978
  */
1924
1979
  const formatSolidityCalldata = (circuitInput, _proof) => {
1925
1980
  try {
1926
- const proof = utils.unstringifyBigInts(_proof);
1981
+ const proof = unstringifyBigInts(_proof);
1927
1982
  // format the public inputs to the circuit
1928
1983
  const formattedCircuitInput = [];
1929
1984
  for (const cInput of circuitInput) {
1930
- formattedCircuitInput.push(p256(utils.unstringifyBigInts(cInput)));
1985
+ formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
1931
1986
  }
1932
1987
  // construct calldata
1933
1988
  const calldata = {
@@ -2095,7 +2150,8 @@ const getGitHubStats = async (user) => {
2095
2150
  following: jsonData.following,
2096
2151
  followers: jsonData.followers,
2097
2152
  publicRepos: jsonData.public_repos,
2098
- avatarUrl: jsonData.avatar_url
2153
+ avatarUrl: jsonData.avatar_url,
2154
+ age: jsonData.created_at
2099
2155
  };
2100
2156
  return data;
2101
2157
  };
@@ -2107,20 +2163,21 @@ const getGitHubStats = async (user) => {
2107
2163
  * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2108
2164
  * @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
2109
2165
  */
2110
- const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2166
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
2111
2167
  if (!process.env.GITHUB_ACCESS_TOKEN)
2112
2168
  throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2113
- const { following, followers, publicRepos, avatarUrl } = await getGitHubStats(userLogin);
2169
+ const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
2114
2170
  if (following < minimumAmountOfFollowing ||
2115
2171
  publicRepos < minimumAmountOfPublicRepos ||
2116
- followers < minimumAmountOfFollowers)
2172
+ followers < minimumAmountOfFollowers ||
2173
+ new Date(age) > new Date(Date.now() - minimumAge))
2117
2174
  return {
2118
2175
  reputable: false,
2119
2176
  avatarUrl: ""
2120
2177
  };
2121
2178
  return {
2122
2179
  reputable: true,
2123
- avatarUrl: avatarUrl
2180
+ avatarUrl
2124
2181
  };
2125
2182
  };
2126
2183
 
@@ -2307,8 +2364,8 @@ const createSSMClient = async () => {
2307
2364
  * @returns <Array<string>> - the list of startup commands to be executed.
2308
2365
  */
2309
2366
  const vmBootstrapCommand = (bucketName) => [
2310
- "#!/bin/bash",
2311
- `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2367
+ "#!/bin/bash", // shabang.
2368
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
2312
2369
  `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2313
2370
  ];
2314
2371
  /**
@@ -2329,8 +2386,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2329
2386
  // eslint-disable-next-line no-template-curly-in-string
2330
2387
  "touch ${MARKER_FILE}",
2331
2388
  "sudo yum update -y",
2332
- "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2333
- "sudo yum install -y nodejs",
2389
+ "curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
2390
+ "tar -xf node-v16.13.0-linux-x64.tar.xz",
2391
+ "mv node-v16.13.0-linux-x64 nodejs",
2392
+ "sudo mv nodejs /opt/",
2393
+ "echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
2394
+ "echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
2395
+ "source /etc/profile",
2334
2396
  "npm install -g snarkjs",
2335
2397
  `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2336
2398
  `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
@@ -2349,6 +2411,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2349
2411
  * @returns Array<string> - the list of commands for contribution verification.
2350
2412
  */
2351
2413
  const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2414
+ `source /etc/profile`,
2352
2415
  `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2353
2416
  `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2354
2417
  `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
@@ -2375,7 +2438,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
2375
2438
  */
2376
2439
  const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2377
2440
  // Get the AWS variables.
2378
- const { amiId, roleArn } = getAWSVariables();
2441
+ const { amiId, instanceProfileArn } = getAWSVariables();
2379
2442
  // Parametrize the VM EC2 instance.
2380
2443
  const params = {
2381
2444
  ImageId: amiId,
@@ -2384,7 +2447,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2384
2447
  MinCount: 1,
2385
2448
  // nb. to find this: iam -> roles -> role_name.
2386
2449
  IamInstanceProfile: {
2387
- Arn: roleArn
2450
+ Arn: instanceProfileArn
2388
2451
  },
2389
2452
  // nb. for running commands at the startup.
2390
2453
  UserData: Buffer.from(commands.join("\n")).toString("base64"),
@@ -2393,7 +2456,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2393
2456
  DeviceName: "/dev/xvda",
2394
2457
  Ebs: {
2395
2458
  DeleteOnTermination: true,
2396
- VolumeSize: volumeSize,
2459
+ VolumeSize: volumeSize, // disk size in GB.
2397
2460
  VolumeType: diskType
2398
2461
  }
2399
2462
  }
@@ -2579,4 +2642,4 @@ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
2579
2642
  }
2580
2643
  };
2581
2644
 
2582
- export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
2645
+ export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCeremonies, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };