@devtion/actions 0.0.0-270e9e0 → 0.0.0-2cb7418

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +1 -1
  2. package/dist/index.mjs +331 -280
  3. package/dist/index.node.js +331 -279
  4. package/dist/types/src/helpers/constants.d.ts +8 -0
  5. package/dist/types/src/helpers/constants.d.ts.map +1 -1
  6. package/dist/types/src/helpers/contracts.d.ts.map +1 -1
  7. package/dist/types/src/helpers/crypto.d.ts +1 -0
  8. package/dist/types/src/helpers/crypto.d.ts.map +1 -1
  9. package/dist/types/src/helpers/database.d.ts +8 -0
  10. package/dist/types/src/helpers/database.d.ts.map +1 -1
  11. package/dist/types/src/helpers/security.d.ts +2 -2
  12. package/dist/types/src/helpers/security.d.ts.map +1 -1
  13. package/dist/types/src/helpers/storage.d.ts +5 -2
  14. package/dist/types/src/helpers/storage.d.ts.map +1 -1
  15. package/dist/types/src/helpers/utils.d.ts +34 -20
  16. package/dist/types/src/helpers/utils.d.ts.map +1 -1
  17. package/dist/types/src/helpers/verification.d.ts +3 -2
  18. package/dist/types/src/helpers/verification.d.ts.map +1 -1
  19. package/dist/types/src/helpers/vm.d.ts.map +1 -1
  20. package/dist/types/src/index.d.ts +1 -1
  21. package/dist/types/src/index.d.ts.map +1 -1
  22. package/dist/types/src/types/index.d.ts +9 -3
  23. package/dist/types/src/types/index.d.ts.map +1 -1
  24. package/package.json +3 -8
  25. package/src/helpers/constants.ts +8 -0
  26. package/src/helpers/contracts.ts +3 -3
  27. package/src/helpers/database.ts +13 -0
  28. package/src/helpers/functions.ts +1 -1
  29. package/src/helpers/security.ts +33 -52
  30. package/src/helpers/services.ts +3 -3
  31. package/src/helpers/storage.ts +15 -3
  32. package/src/helpers/utils.ts +316 -277
  33. package/src/helpers/verification.ts +6 -6
  34. package/src/helpers/vm.ts +14 -7
  35. package/src/index.ts +3 -2
  36. package/src/types/index.ts +32 -8
package/dist/index.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @module @p0tion/actions
3
- * @version 1.0.4
3
+ * @version 1.2.0
4
4
  * @file A set of actions and helpers for CLI commands
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -15,10 +15,8 @@ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp,
15
15
  import { zKey, groth16 } from 'snarkjs';
16
16
  import crypto from 'crypto';
17
17
  import blake from 'blakejs';
18
- import { utils } from 'ffjavascript';
19
18
  import winston from 'winston';
20
- import { S3Client, HeadObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
21
- import { pipeline, Readable } from 'stream';
19
+ import { pipeline } from 'stream';
22
20
  import { promisify } from 'util';
23
21
  import { initializeApp } from 'firebase/app';
24
22
  import { signInWithCredential, initializeAuth, getAuth } from 'firebase/auth';
@@ -244,6 +242,12 @@ const commonTerms = {
244
242
  verificationStartedAt: "verificationStartedAt"
245
243
  }
246
244
  },
245
+ avatars: {
246
+ name: "avatars",
247
+ fields: {
248
+ avatarUrl: "avatarUrl"
249
+ }
250
+ },
247
251
  ceremonies: {
248
252
  name: "ceremonies",
249
253
  fields: {
@@ -335,6 +339,8 @@ const commonTerms = {
335
339
  finalizeCeremony: "finalizeCeremony",
336
340
  downloadCircuitArtifacts: "downloadCircuitArtifacts",
337
341
  transferObject: "transferObject",
342
+ bandadaValidateProof: "bandadaValidateProof",
343
+ checkNonceOfSIWEAddress: "checkNonceOfSIWEAddress"
338
344
  }
339
345
  };
340
346
 
@@ -685,19 +691,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
685
691
  * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
686
692
  * @param ceremonyId <string> - the unique identifier of the ceremony.
687
693
  * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
694
+ * @param logger <GenericBar> - an optional logger to show progress.
688
695
  * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
689
696
  */
690
- const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
697
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
691
698
  // Keep track of uploaded chunks.
692
699
  const uploadedChunks = alreadyUploadedChunks || [];
700
+ // if we were passed a logger, start it
701
+ if (logger)
702
+ logger.start(chunksWithUrls.length, 0);
693
703
  // Loop through remaining chunks.
694
704
  for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
695
705
  // Consume the pre-signed url to upload the chunk.
696
706
  // @ts-ignore
697
707
  const response = await fetch(chunksWithUrls[i].preSignedUrl, {
698
708
  retryOptions: {
699
- retryInitialDelay: 500,
700
- socketTimeout: 60000,
709
+ retryInitialDelay: 500, // 500 ms.
710
+ socketTimeout: 60000, // 60 seconds.
701
711
  retryMaxDuration: 300000 // 5 minutes.
702
712
  },
703
713
  method: "PUT",
@@ -721,6 +731,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
721
731
  // nb. this must be done only when contributing (not finalizing).
722
732
  if (!!ceremonyId && !!cloudFunctions)
723
733
  await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
734
+ // increment the count on the logger
735
+ if (logger)
736
+ logger.increment();
724
737
  }
725
738
  return uploadedChunks;
726
739
  };
@@ -741,8 +754,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
741
754
  * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
742
755
  * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
743
756
  * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
757
+ * @param logger <GenericBar> - an optional logger to show progress.
744
758
  */
745
- const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
759
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
746
760
  // The unique identifier of the multi-part upload.
747
761
  let multiPartUploadId = "";
748
762
  // The list of already uploaded chunks.
@@ -766,7 +780,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
766
780
  const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
767
781
  // Step (2).
768
782
  const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
769
- cloudFunctions, ceremonyId, alreadyUploadedChunks);
783
+ cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
770
784
  // Step (3).
771
785
  await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
772
786
  };
@@ -990,6 +1004,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
990
1004
  ]);
991
1005
  return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
992
1006
  };
1007
+ /**
1008
+ * Query all ceremonies
1009
+ * @notice get all ceremonies from the database.
1010
+ * @dev this is a helper for the CLI ceremony methods.
1011
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1012
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
1013
+ */
1014
+ const getAllCeremonies = async (firestoreDatabase) => {
1015
+ const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
1016
+ return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
1017
+ };
993
1018
 
994
1019
  /**
995
1020
  * @hidden
@@ -1038,189 +1063,22 @@ const compareHashes = async (path1, path2) => {
1038
1063
  };
1039
1064
 
1040
1065
  /**
1041
- * Parse and validate that the ceremony configuration is correct
1042
- * @notice this does not upload any files to storage
1043
- * @param path <string> - the path to the configuration file
1044
- * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1045
- * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1066
+ * Return a string with double digits if the provided input is one digit only.
1067
+ * @param in <number> - the input number to be converted.
1068
+ * @returns <string> - the two digits stringified number derived from the conversion.
1046
1069
  */
1047
- const parseCeremonyFile = async (path, cleanup = false) => {
1048
- // check that the path exists
1049
- if (!fs.existsSync(path))
1050
- throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1051
- try {
1052
- // read the data
1053
- const data = JSON.parse(fs.readFileSync(path).toString());
1054
- // verify that the data is correct
1055
- if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1056
- throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1057
- // validate that we have at least 1 circuit input data
1058
- if (!data.circuits || data.circuits.length === 0)
1059
- throw new Error("You need to provide the data for at least 1 circuit.");
1060
- // validate that the end date is in the future
1061
- let endDate;
1062
- let startDate;
1063
- try {
1064
- endDate = new Date(data.endDate);
1065
- startDate = new Date(data.startDate);
1066
- }
1067
- catch (error) {
1068
- throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1069
- }
1070
- if (endDate <= startDate)
1071
- throw new Error("The end date should be greater than the start date.");
1072
- const currentDate = new Date();
1073
- if (endDate <= currentDate || startDate <= currentDate)
1074
- throw new Error("The start and end dates should be in the future.");
1075
- // validate penalty
1076
- if (data.penalty <= 0)
1077
- throw new Error("The penalty should be greater than zero.");
1078
- const circuits = [];
1079
- const urlPattern = /(https?:\/\/[^\s]+)/g;
1080
- const commitHashPattern = /^[a-f0-9]{40}$/i;
1081
- const circuitArtifacts = [];
1082
- for (let i = 0; i < data.circuits.length; i++) {
1083
- const circuitData = data.circuits[i];
1084
- const artifacts = circuitData.artifacts;
1085
- circuitArtifacts.push({
1086
- artifacts: artifacts
1087
- });
1088
- const r1csPath = artifacts.r1csStoragePath;
1089
- const wasmPath = artifacts.wasmStoragePath;
1090
- // where we storing the r1cs downloaded
1091
- const localR1csPath = `./${circuitData.name}.r1cs`;
1092
- // check that the artifacts exist in S3
1093
- // we don't need any privileges to download this
1094
- // just the correct region
1095
- const s3 = new S3Client({ region: artifacts.region });
1096
- try {
1097
- await s3.send(new HeadObjectCommand({
1098
- Bucket: artifacts.bucket,
1099
- Key: r1csPath
1100
- }));
1101
- }
1102
- catch (error) {
1103
- throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1104
- }
1105
- try {
1106
- await s3.send(new HeadObjectCommand({
1107
- Bucket: artifacts.bucket,
1108
- Key: wasmPath
1109
- }));
1110
- }
1111
- catch (error) {
1112
- throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1113
- }
1114
- // download the r1cs to extract the metadata
1115
- const command = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1116
- const response = await s3.send(command);
1117
- const streamPipeline = promisify(pipeline);
1118
- if (response.$metadata.httpStatusCode !== 200)
1119
- throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
1120
- if (response.Body instanceof Readable)
1121
- await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1122
- // extract the metadata from the r1cs
1123
- const metadata = getR1CSInfo(localR1csPath);
1124
- // validate that the circuit hash and template links are valid
1125
- const template = circuitData.template;
1126
- const URLMatch = template.source.match(urlPattern);
1127
- if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1128
- throw new Error("You should provide the URL to the circuits templates on GitHub.");
1129
- const hashMatch = template.commitHash.match(commitHashPattern);
1130
- if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1131
- throw new Error("You should provide a valid commit hash of the circuit templates.");
1132
- // calculate the hash of the r1cs file
1133
- const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1134
- const circuitPrefix = extractPrefix(circuitData.name);
1135
- // filenames
1136
- const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1137
- const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1138
- const wasmCompleteFilename = `${circuitData.name}.wasm`;
1139
- const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1140
- const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1141
- // storage paths
1142
- const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1143
- const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1144
- const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1145
- const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1146
- const files = {
1147
- potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1148
- r1csFilename: r1csCompleteFilename,
1149
- wasmFilename: wasmCompleteFilename,
1150
- initialZkeyFilename: firstZkeyCompleteFilename,
1151
- potStoragePath: potStorageFilePath,
1152
- r1csStoragePath: r1csStorageFilePath,
1153
- wasmStoragePath: wasmStorageFilePath,
1154
- initialZkeyStoragePath: zkeyStorageFilePath,
1155
- r1csBlake2bHash: r1csBlake2bHash
1156
- };
1157
- // validate that the compiler hash is a valid hash
1158
- const compiler = circuitData.compiler;
1159
- const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1160
- if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1161
- throw new Error("You should provide a valid commit hash of the circuit compiler.");
1162
- // validate that the verification options are valid
1163
- const verification = circuitData.verification;
1164
- if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1165
- throw new Error("Please enter a valid verification mechanism: either CF or VM");
1166
- // @todo VM parameters verification
1167
- // if (verification['cfOrVM'] === "VM") {}
1168
- // check that the timeout is provided for the correct configuration
1169
- let dynamicThreshold;
1170
- let fixedTimeWindow;
1171
- if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1172
- if (circuitData.dynamicThreshold <= 0)
1173
- throw new Error("The dynamic threshold should be > 0.");
1174
- dynamicThreshold = circuitData.dynamicThreshold;
1175
- }
1176
- if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1177
- if (circuitData.fixedTimeWindow <= 0)
1178
- throw new Error("The fixed time window threshold should be > 0.");
1179
- fixedTimeWindow = circuitData.fixedTimeWindow;
1180
- }
1181
- // the Circuit data for the ceremony setup
1182
- const circuit = {
1183
- name: circuitData.name,
1184
- description: circuitData.description,
1185
- prefix: circuitPrefix,
1186
- sequencePosition: i + 1,
1187
- metadata: metadata,
1188
- files: files,
1189
- template: template,
1190
- compiler: compiler,
1191
- verification: verification,
1192
- fixedTimeWindow: fixedTimeWindow,
1193
- // dynamicThreshold: dynamicThreshold,
1194
- avgTimings: {
1195
- contributionComputation: 0,
1196
- fullContribution: 0,
1197
- verifyCloudFunction: 0
1198
- },
1199
- };
1200
- circuits.push(circuit);
1201
- // remove the local r1cs download (if used for verifying the config only vs setup)
1202
- if (cleanup)
1203
- fs.unlinkSync(localR1csPath);
1204
- }
1205
- const setupData = {
1206
- ceremonyInputData: {
1207
- title: data.title,
1208
- description: data.description,
1209
- startDate: startDate.valueOf(),
1210
- endDate: endDate.valueOf(),
1211
- timeoutMechanismType: data.timeoutMechanismType,
1212
- penalty: data.penalty
1213
- },
1214
- ceremonyPrefix: extractPrefix(data.title),
1215
- circuits: circuits,
1216
- circuitArtifacts: circuitArtifacts
1217
- };
1218
- return setupData;
1219
- }
1220
- catch (error) {
1221
- throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1222
- }
1223
- };
1070
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1071
+ /**
1072
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1073
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1074
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1075
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1076
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1077
+ * @returns <string> - the resulting prefix.
1078
+ */
1079
+ const extractPrefix = (str) =>
1080
+ // eslint-disable-next-line no-useless-escape
1081
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1224
1082
  /**
1225
1083
  * Extract data from a R1CS metadata file generated with a custom file-based logger.
1226
1084
  * @notice useful for extracting metadata circuits contained in the generated file using a logger
@@ -1277,17 +1135,6 @@ const formatZkeyIndex = (progress) => {
1277
1135
  * @returns <number> - the amount of powers.
1278
1136
  */
1279
1137
  const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1280
- /**
1281
- * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1282
- * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1283
- * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1284
- * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1285
- * @param str <string> - the arbitrary string from which to extract the prefix.
1286
- * @returns <string> - the resulting prefix.
1287
- */
1288
- const extractPrefix = (str) =>
1289
- // eslint-disable-next-line no-useless-escape
1290
- str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1291
1138
  /**
1292
1139
  * Automate the generation of an entropy for a contribution.
1293
1140
  * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
@@ -1354,7 +1201,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
1354
1201
  * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1355
1202
  * @returns <string> - the public attestation preamble.
1356
1203
  */
1357
- const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1204
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
1205
+ ? "."
1206
+ : " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
1358
1207
  /**
1359
1208
  * Check and prepare public attestation for the contributor made only of its valid contributions.
1360
1209
  * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
@@ -1425,6 +1274,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
1425
1274
  // Return the read bytes.
1426
1275
  return buffer;
1427
1276
  };
1277
+ /**
1278
+ * Given a buffer in little endian format, convert it to bigint
1279
+ * @param buffer
1280
+ * @returns
1281
+ */
1282
+ function leBufferToBigint(buffer) {
1283
+ return BigInt(`0x${buffer.reverse().toString("hex")}`);
1284
+ }
1285
+ /**
1286
+ * Given an input containing string values, convert them
1287
+ * to bigint
1288
+ * @param input - The input to convert
1289
+ * @returns the input with string values converted to bigint
1290
+ */
1291
+ const unstringifyBigInts = (input) => {
1292
+ if (typeof input === "string" && /^[0-9]+$/.test(input)) {
1293
+ return BigInt(input);
1294
+ }
1295
+ if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
1296
+ return BigInt(input);
1297
+ }
1298
+ if (Array.isArray(input)) {
1299
+ return input.map(unstringifyBigInts);
1300
+ }
1301
+ if (input === null) {
1302
+ return null;
1303
+ }
1304
+ if (typeof input === "object") {
1305
+ return Object.entries(input).reduce((acc, [key, value]) => {
1306
+ acc[key] = unstringifyBigInts(value);
1307
+ return acc;
1308
+ }, {});
1309
+ }
1310
+ return input;
1311
+ };
1428
1312
  /**
1429
1313
  * Return the info about the R1CS file.ù
1430
1314
  * @dev this method was built taking inspiration from
@@ -1485,17 +1369,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
1485
1369
  let constraints = 0;
1486
1370
  try {
1487
1371
  // Get 'number of section' (jump magic r1cs and version1 data).
1488
- const numberOfSections = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1372
+ const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1489
1373
  // Jump to first section.
1490
1374
  pointer = 12;
1491
1375
  // For each section
1492
1376
  for (let i = 0; i < numberOfSections; i++) {
1493
1377
  // Read section type.
1494
- const sectionType = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1378
+ const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1495
1379
  // Jump to section size.
1496
1380
  pointer += 4;
1497
1381
  // Read section size
1498
- const sectionSize = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1382
+ const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1499
1383
  // If at header section (0x00000001 : Header Section).
1500
1384
  if (sectionType === BigInt(1)) {
1501
1385
  // Read info from header section.
@@ -1527,22 +1411,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
1527
1411
  */
1528
1412
  pointer += sectionSize - 20;
1529
1413
  // Read R1CS info.
1530
- wires = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1414
+ wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1531
1415
  pointer += 4;
1532
- publicOutputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1416
+ publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1533
1417
  pointer += 4;
1534
- publicInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1418
+ publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1535
1419
  pointer += 4;
1536
- privateInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1420
+ privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1537
1421
  pointer += 4;
1538
- labels = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1422
+ labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1539
1423
  pointer += 8;
1540
- constraints = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1424
+ constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1541
1425
  }
1542
1426
  pointer += 8 + Number(sectionSize);
1543
1427
  }
1544
1428
  return {
1545
- curve: "bn-128",
1429
+ curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
1546
1430
  wires,
1547
1431
  constraints,
1548
1432
  privateInputs,
@@ -1557,11 +1441,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
1557
1441
  }
1558
1442
  };
1559
1443
  /**
1560
- * Return a string with double digits if the provided input is one digit only.
1561
- * @param in <number> - the input number to be converted.
1562
- * @returns <string> - the two digits stringified number derived from the conversion.
1444
+ * Parse and validate that the ceremony configuration is correct
1445
+ * @notice this does not upload any files to storage
1446
+ * @param path <string> - the path to the configuration file
1447
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1448
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1563
1449
  */
1564
- const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1450
+ const parseCeremonyFile = async (path, cleanup = false) => {
1451
+ // check that the path exists
1452
+ if (!fs.existsSync(path))
1453
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1454
+ try {
1455
+ // read the data
1456
+ const data = JSON.parse(fs.readFileSync(path).toString());
1457
+ // verify that the data is correct
1458
+ if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
1459
+ data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1460
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1461
+ // validate that we have at least 1 circuit input data
1462
+ if (!data.circuits || data.circuits.length === 0)
1463
+ throw new Error("You need to provide the data for at least 1 circuit.");
1464
+ // validate that the end date is in the future
1465
+ let endDate;
1466
+ let startDate;
1467
+ try {
1468
+ endDate = new Date(data.endDate);
1469
+ startDate = new Date(data.startDate);
1470
+ }
1471
+ catch (error) {
1472
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1473
+ }
1474
+ if (endDate <= startDate)
1475
+ throw new Error("The end date should be greater than the start date.");
1476
+ const currentDate = new Date();
1477
+ if (endDate <= currentDate || startDate <= currentDate)
1478
+ throw new Error("The start and end dates should be in the future.");
1479
+ // validate penalty
1480
+ if (data.penalty <= 0)
1481
+ throw new Error("The penalty should be greater than zero.");
1482
+ const circuits = [];
1483
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1484
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1485
+ const circuitArtifacts = [];
1486
+ for (let i = 0; i < data.circuits.length; i++) {
1487
+ const circuitData = data.circuits[i];
1488
+ const { artifacts } = circuitData;
1489
+ circuitArtifacts.push({
1490
+ artifacts
1491
+ });
1492
+ // where we storing the r1cs downloaded
1493
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1494
+ // where we storing the wasm downloaded
1495
+ const localWasmPath = `./${circuitData.name}.wasm`;
1496
+ // download the r1cs to extract the metadata
1497
+ const streamPipeline = promisify(pipeline);
1498
+ // Make the call.
1499
+ const responseR1CS = await fetch(artifacts.r1csStoragePath);
1500
+ // Handle errors.
1501
+ if (!responseR1CS.ok && responseR1CS.status !== 200)
1502
+ throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1503
+ await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
1504
+ // Write the file locally
1505
+ // extract the metadata from the r1cs
1506
+ const metadata = getR1CSInfo(localR1csPath);
1507
+ // download wasm too to ensure it's available
1508
+ const responseWASM = await fetch(artifacts.wasmStoragePath);
1509
+ if (!responseWASM.ok && responseWASM.status !== 200)
1510
+ throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1511
+ await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
1512
+ // validate that the circuit hash and template links are valid
1513
+ const { template } = circuitData;
1514
+ const URLMatch = template.source.match(urlPattern);
1515
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1516
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1517
+ const hashMatch = template.commitHash.match(commitHashPattern);
1518
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1519
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1520
+ // calculate the hash of the r1cs file
1521
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1522
+ const circuitPrefix = extractPrefix(circuitData.name);
1523
+ // filenames
1524
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1525
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1526
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1527
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1528
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1529
+ // storage paths
1530
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1531
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1532
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1533
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1534
+ const files = {
1535
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1536
+ r1csFilename: r1csCompleteFilename,
1537
+ wasmFilename: wasmCompleteFilename,
1538
+ initialZkeyFilename: firstZkeyCompleteFilename,
1539
+ potStoragePath: potStorageFilePath,
1540
+ r1csStoragePath: r1csStorageFilePath,
1541
+ wasmStoragePath: wasmStorageFilePath,
1542
+ initialZkeyStoragePath: zkeyStorageFilePath,
1543
+ r1csBlake2bHash
1544
+ };
1545
+ // validate that the compiler hash is a valid hash
1546
+ const { compiler } = circuitData;
1547
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1548
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1549
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1550
+ // validate that the verification options are valid
1551
+ const { verification } = circuitData;
1552
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1553
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1554
+ // @todo VM parameters verification
1555
+ // if (verification['cfOrVM'] === "VM") {}
1556
+ // check that the timeout is provided for the correct configuration
1557
+ let dynamicThreshold;
1558
+ let fixedTimeWindow;
1559
+ let circuit = {};
1560
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1561
+ if (circuitData.dynamicThreshold <= 0)
1562
+ throw new Error("The dynamic threshold should be > 0.");
1563
+ dynamicThreshold = circuitData.dynamicThreshold;
1564
+ // the Circuit data for the ceremony setup
1565
+ circuit = {
1566
+ name: circuitData.name,
1567
+ description: circuitData.description,
1568
+ prefix: circuitPrefix,
1569
+ sequencePosition: i + 1,
1570
+ metadata,
1571
+ files,
1572
+ template,
1573
+ compiler,
1574
+ verification,
1575
+ dynamicThreshold,
1576
+ avgTimings: {
1577
+ contributionComputation: 0,
1578
+ fullContribution: 0,
1579
+ verifyCloudFunction: 0
1580
+ }
1581
+ };
1582
+ }
1583
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1584
+ if (circuitData.fixedTimeWindow <= 0)
1585
+ throw new Error("The fixed time window threshold should be > 0.");
1586
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1587
+ // the Circuit data for the ceremony setup
1588
+ circuit = {
1589
+ name: circuitData.name,
1590
+ description: circuitData.description,
1591
+ prefix: circuitPrefix,
1592
+ sequencePosition: i + 1,
1593
+ metadata,
1594
+ files,
1595
+ template,
1596
+ compiler,
1597
+ verification,
1598
+ fixedTimeWindow,
1599
+ avgTimings: {
1600
+ contributionComputation: 0,
1601
+ fullContribution: 0,
1602
+ verifyCloudFunction: 0
1603
+ }
1604
+ };
1605
+ }
1606
+ circuits.push(circuit);
1607
+ // remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
1608
+ if (cleanup) {
1609
+ fs.unlinkSync(localR1csPath);
1610
+ fs.unlinkSync(localWasmPath);
1611
+ }
1612
+ }
1613
+ const setupData = {
1614
+ ceremonyInputData: {
1615
+ title: data.title,
1616
+ description: data.description,
1617
+ startDate: startDate.valueOf(),
1618
+ endDate: endDate.valueOf(),
1619
+ timeoutMechanismType: data.timeoutMechanismType,
1620
+ penalty: data.penalty
1621
+ },
1622
+ ceremonyPrefix: extractPrefix(data.title),
1623
+ circuits,
1624
+ circuitArtifacts
1625
+ };
1626
+ return setupData;
1627
+ }
1628
+ catch (error) {
1629
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1630
+ }
1631
+ };
1565
1632
 
1566
1633
  /**
1567
1634
  * Verify that a zKey is valid
@@ -1810,7 +1877,7 @@ const getFirestoreDatabase = (app) => getFirestore(app);
1810
1877
  * @param app <FirebaseApp> - the Firebase application.
1811
1878
  * @returns <Functions> - the Cloud Functions associated to the application.
1812
1879
  */
1813
- const getFirebaseFunctions = (app) => getFunctions(app, 'europe-west1');
1880
+ const getFirebaseFunctions = (app) => getFunctions(app, "europe-west1");
1814
1881
  /**
1815
1882
  * Retrieve the configuration variables for the AWS services (S3, EC2).
1816
1883
  * @returns <AWSVariables> - the values of the AWS services configuration variables.
@@ -1819,14 +1886,14 @@ const getAWSVariables = () => {
1819
1886
  if (!process.env.AWS_ACCESS_KEY_ID ||
1820
1887
  !process.env.AWS_SECRET_ACCESS_KEY ||
1821
1888
  !process.env.AWS_REGION ||
1822
- !process.env.AWS_ROLE_ARN ||
1889
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
1823
1890
  !process.env.AWS_AMI_ID)
1824
1891
  throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1825
1892
  return {
1826
1893
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1827
1894
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1828
1895
  region: process.env.AWS_REGION || "us-east-1",
1829
- roleArn: process.env.AWS_ROLE_ARN,
1896
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
1830
1897
  amiId: process.env.AWS_AMI_ID
1831
1898
  };
1832
1899
  };
@@ -1907,11 +1974,11 @@ const p256 = (proofPart) => {
1907
1974
  */
1908
1975
  const formatSolidityCalldata = (circuitInput, _proof) => {
1909
1976
  try {
1910
- const proof = utils.unstringifyBigInts(_proof);
1977
+ const proof = unstringifyBigInts(_proof);
1911
1978
  // format the public inputs to the circuit
1912
1979
  const formattedCircuitInput = [];
1913
1980
  for (const cInput of circuitInput) {
1914
- formattedCircuitInput.push(p256(utils.unstringifyBigInts(cInput)));
1981
+ formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
1915
1982
  }
1916
1983
  // construct calldata
1917
1984
  const calldata = {
@@ -2061,55 +2128,28 @@ const verifyCeremony = async (functions, firestore, ceremonyPrefix, outputDirect
2061
2128
  };
2062
2129
 
2063
2130
  /**
2064
- * This function will return the number of public repos of a user
2065
- * @param user <string> The username of the user
2066
- * @returns <number> The number of public repos
2067
- */
2068
- const getNumberOfPublicReposGitHub = async (user) => {
2069
- const response = await fetch(`https://api.github.com/user/${user}/repos`, {
2070
- method: "GET",
2071
- headers: {
2072
- Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2073
- }
2074
- });
2075
- if (response.status !== 200)
2076
- throw new Error("It was not possible to retrieve the number of public repositories. Please try again.");
2077
- const jsonData = await response.json();
2078
- return jsonData.length;
2079
- };
2080
- /**
2081
- * This function will return the number of followers of a user
2082
- * @param user <string> The username of the user
2083
- * @returns <number> The number of followers
2084
- */
2085
- const getNumberOfFollowersGitHub = async (user) => {
2086
- const response = await fetch(`https://api.github.com/user/${user}/followers`, {
2087
- method: "GET",
2088
- headers: {
2089
- Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2090
- }
2091
- });
2092
- if (response.status !== 200)
2093
- throw new Error("It was not possible to retrieve the number of followers. Please try again.");
2094
- const jsonData = await response.json();
2095
- return jsonData.length;
2096
- };
2097
- /**
2098
- * This function will return the number of following of a user
2099
- * @param user <string> The username of the user
2100
- * @returns <number> The number of following users
2131
+ * This function queries the GitHub API to fetch users statistics
2132
+ * @param user {string} the user uid
2133
+ * @returns {any} the stats from the GitHub API
2101
2134
  */
2102
- const getNumberOfFollowingGitHub = async (user) => {
2103
- const response = await fetch(`https://api.github.com/user/${user}/following`, {
2135
+ const getGitHubStats = async (user) => {
2136
+ const response = await fetch(`https://api.github.com/user/${user}`, {
2104
2137
  method: "GET",
2105
2138
  headers: {
2106
2139
  Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2107
2140
  }
2108
2141
  });
2109
2142
  if (response.status !== 200)
2110
- throw new Error("It was not possible to retrieve the number of following. Please try again.");
2143
+ throw new Error("It was not possible to retrieve the user's statistic. Please try again.");
2111
2144
  const jsonData = await response.json();
2112
- return jsonData.length;
2145
+ const data = {
2146
+ following: jsonData.following,
2147
+ followers: jsonData.followers,
2148
+ publicRepos: jsonData.public_repos,
2149
+ avatarUrl: jsonData.avatar_url,
2150
+ age: jsonData.created_at
2151
+ };
2152
+ return data;
2113
2153
  };
2114
2154
  /**
2115
2155
  * This function will check if the user is reputable enough to be able to use the app
@@ -2117,19 +2157,24 @@ const getNumberOfFollowingGitHub = async (user) => {
2117
2157
  * @param minimumAmountOfFollowing <number> The minimum amount of following the user should have
2118
2158
  * @param minimumAmountOfFollowers <number> The minimum amount of followers the user should have
2119
2159
  * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2120
- * @returns <boolean> True if the user is reputable enough, false otherwise
2160
+ * @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
2121
2161
  */
2122
- const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2162
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
2123
2163
  if (!process.env.GITHUB_ACCESS_TOKEN)
2124
2164
  throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2125
- const following = await getNumberOfFollowingGitHub(userLogin);
2126
- const repos = await getNumberOfPublicReposGitHub(userLogin);
2127
- const followers = await getNumberOfFollowersGitHub(userLogin);
2165
+ const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
2128
2166
  if (following < minimumAmountOfFollowing ||
2129
- repos < minimumAmountOfPublicRepos ||
2130
- followers < minimumAmountOfFollowers)
2131
- return false;
2132
- return true;
2167
+ publicRepos < minimumAmountOfPublicRepos ||
2168
+ followers < minimumAmountOfFollowers ||
2169
+ new Date(age) > new Date(Date.now() - minimumAge))
2170
+ return {
2171
+ reputable: false,
2172
+ avatarUrl: ""
2173
+ };
2174
+ return {
2175
+ reputable: true,
2176
+ avatarUrl
2177
+ };
2133
2178
  };
2134
2179
 
2135
2180
  /**
@@ -2315,8 +2360,8 @@ const createSSMClient = async () => {
2315
2360
  * @returns <Array<string>> - the list of startup commands to be executed.
2316
2361
  */
2317
2362
  const vmBootstrapCommand = (bucketName) => [
2318
- "#!/bin/bash",
2319
- `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2363
+ "#!/bin/bash", // shabang.
2364
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
2320
2365
  `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2321
2366
  ];
2322
2367
  /**
@@ -2337,8 +2382,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2337
2382
  // eslint-disable-next-line no-template-curly-in-string
2338
2383
  "touch ${MARKER_FILE}",
2339
2384
  "sudo yum update -y",
2340
- "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2341
- "sudo yum install -y nodejs",
2385
+ "curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
2386
+ "tar -xf node-v16.13.0-linux-x64.tar.xz",
2387
+ "mv node-v16.13.0-linux-x64 nodejs",
2388
+ "sudo mv nodejs /opt/",
2389
+ "echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
2390
+ "echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
2391
+ "source /etc/profile",
2342
2392
  "npm install -g snarkjs",
2343
2393
  `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2344
2394
  `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
@@ -2357,6 +2407,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2357
2407
  * @returns Array<string> - the list of commands for contribution verification.
2358
2408
  */
2359
2409
  const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2410
+ `source /etc/profile`,
2360
2411
  `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2361
2412
  `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2362
2413
  `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
@@ -2383,7 +2434,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
2383
2434
  */
2384
2435
  const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2385
2436
  // Get the AWS variables.
2386
- const { amiId, roleArn } = getAWSVariables();
2437
+ const { amiId, instanceProfileArn } = getAWSVariables();
2387
2438
  // Parametrize the VM EC2 instance.
2388
2439
  const params = {
2389
2440
  ImageId: amiId,
@@ -2392,7 +2443,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2392
2443
  MinCount: 1,
2393
2444
  // nb. to find this: iam -> roles -> role_name.
2394
2445
  IamInstanceProfile: {
2395
- Arn: roleArn
2446
+ Arn: instanceProfileArn
2396
2447
  },
2397
2448
  // nb. for running commands at the startup.
2398
2449
  UserData: Buffer.from(commands.join("\n")).toString("base64"),
@@ -2401,7 +2452,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2401
2452
  DeviceName: "/dev/xvda",
2402
2453
  Ebs: {
2403
2454
  DeleteOnTermination: true,
2404
- VolumeSize: volumeSize,
2455
+ VolumeSize: volumeSize, // disk size in GB.
2405
2456
  VolumeType: diskType
2406
2457
  }
2407
2458
  }
@@ -2587,4 +2638,4 @@ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
2587
2638
  }
2588
2639
  };
2589
2640
 
2590
- export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
2641
+ export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCeremonies, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };