@devtion/actions 0.0.0-5d170d3 → 0.0.0-67a4629

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +1 -1
  2. package/dist/index.mjs +330 -298
  3. package/dist/index.node.js +330 -297
  4. package/dist/types/src/helpers/constants.d.ts +7 -0
  5. package/dist/types/src/helpers/constants.d.ts.map +1 -1
  6. package/dist/types/src/helpers/contracts.d.ts.map +1 -1
  7. package/dist/types/src/helpers/crypto.d.ts +1 -0
  8. package/dist/types/src/helpers/crypto.d.ts.map +1 -1
  9. package/dist/types/src/helpers/database.d.ts +8 -0
  10. package/dist/types/src/helpers/database.d.ts.map +1 -1
  11. package/dist/types/src/helpers/security.d.ts +2 -2
  12. package/dist/types/src/helpers/security.d.ts.map +1 -1
  13. package/dist/types/src/helpers/storage.d.ts +5 -2
  14. package/dist/types/src/helpers/storage.d.ts.map +1 -1
  15. package/dist/types/src/helpers/utils.d.ts +34 -20
  16. package/dist/types/src/helpers/utils.d.ts.map +1 -1
  17. package/dist/types/src/helpers/verification.d.ts +3 -2
  18. package/dist/types/src/helpers/verification.d.ts.map +1 -1
  19. package/dist/types/src/helpers/vm.d.ts.map +1 -1
  20. package/dist/types/src/index.d.ts +1 -1
  21. package/dist/types/src/index.d.ts.map +1 -1
  22. package/dist/types/src/types/index.d.ts +9 -3
  23. package/dist/types/src/types/index.d.ts.map +1 -1
  24. package/package.json +3 -8
  25. package/src/helpers/constants.ts +7 -0
  26. package/src/helpers/contracts.ts +3 -3
  27. package/src/helpers/database.ts +13 -0
  28. package/src/helpers/functions.ts +1 -1
  29. package/src/helpers/security.ts +33 -52
  30. package/src/helpers/services.ts +3 -3
  31. package/src/helpers/storage.ts +15 -3
  32. package/src/helpers/utils.ts +316 -277
  33. package/src/helpers/verification.ts +6 -6
  34. package/src/helpers/vm.ts +14 -7
  35. package/src/index.ts +3 -2
  36. package/src/types/index.ts +32 -8
package/dist/index.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @module @p0tion/actions
3
- * @version 1.0.5
3
+ * @version 1.1.1
4
4
  * @file A set of actions and helpers for CLI commands
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -15,10 +15,8 @@ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp,
15
15
  import { zKey, groth16 } from 'snarkjs';
16
16
  import crypto from 'crypto';
17
17
  import blake from 'blakejs';
18
- import { utils } from 'ffjavascript';
19
18
  import winston from 'winston';
20
- import { S3Client, HeadObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
21
- import { pipeline, Readable } from 'stream';
19
+ import { pipeline } from 'stream';
22
20
  import { promisify } from 'util';
23
21
  import { initializeApp } from 'firebase/app';
24
22
  import { signInWithCredential, initializeAuth, getAuth } from 'firebase/auth';
@@ -244,6 +242,12 @@ const commonTerms = {
244
242
  verificationStartedAt: "verificationStartedAt"
245
243
  }
246
244
  },
245
+ avatars: {
246
+ name: "avatars",
247
+ fields: {
248
+ avatarUrl: "avatarUrl"
249
+ }
250
+ },
247
251
  ceremonies: {
248
252
  name: "ceremonies",
249
253
  fields: {
@@ -335,6 +339,7 @@ const commonTerms = {
335
339
  finalizeCeremony: "finalizeCeremony",
336
340
  downloadCircuitArtifacts: "downloadCircuitArtifacts",
337
341
  transferObject: "transferObject",
342
+ bandadaValidateProof: "bandadaValidateProof"
338
343
  }
339
344
  };
340
345
 
@@ -685,19 +690,23 @@ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey,
685
690
  * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
686
691
  * @param ceremonyId <string> - the unique identifier of the ceremony.
687
692
  * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
693
+ * @param logger <GenericBar> - an optional logger to show progress.
688
694
  * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
689
695
  */
690
- const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
696
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks, logger) => {
691
697
  // Keep track of uploaded chunks.
692
698
  const uploadedChunks = alreadyUploadedChunks || [];
699
+ // if we were passed a logger, start it
700
+ if (logger)
701
+ logger.start(chunksWithUrls.length, 0);
693
702
  // Loop through remaining chunks.
694
703
  for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
695
704
  // Consume the pre-signed url to upload the chunk.
696
705
  // @ts-ignore
697
706
  const response = await fetch(chunksWithUrls[i].preSignedUrl, {
698
707
  retryOptions: {
699
- retryInitialDelay: 500,
700
- socketTimeout: 60000,
708
+ retryInitialDelay: 500, // 500 ms.
709
+ socketTimeout: 60000, // 60 seconds.
701
710
  retryMaxDuration: 300000 // 5 minutes.
702
711
  },
703
712
  method: "PUT",
@@ -721,6 +730,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
721
730
  // nb. this must be done only when contributing (not finalizing).
722
731
  if (!!ceremonyId && !!cloudFunctions)
723
732
  await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
733
+ // increment the count on the logger
734
+ if (logger)
735
+ logger.increment();
724
736
  }
725
737
  return uploadedChunks;
726
738
  };
@@ -741,8 +753,9 @@ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremony
741
753
  * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
742
754
  * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
743
755
  * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
756
+ * @param logger <GenericBar> - an optional logger to show progress.
744
757
  */
745
- const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
758
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload, logger) => {
746
759
  // The unique identifier of the multi-part upload.
747
760
  let multiPartUploadId = "";
748
761
  // The list of already uploaded chunks.
@@ -766,7 +779,7 @@ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFileP
766
779
  const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
767
780
  // Step (2).
768
781
  const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
769
- cloudFunctions, ceremonyId, alreadyUploadedChunks);
782
+ cloudFunctions, ceremonyId, alreadyUploadedChunks, logger);
770
783
  // Step (3).
771
784
  await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
772
785
  };
@@ -990,6 +1003,17 @@ const getClosedCeremonies = async (firestoreDatabase) => {
990
1003
  ]);
991
1004
  return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
992
1005
  };
1006
+ /**
1007
+ * Query all ceremonies
1008
+ * @notice get all ceremonies from the database.
1009
+ * @dev this is a helper for the CLI ceremony methods.
1010
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1011
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of all ceremonies.
1012
+ */
1013
+ const getAllCeremonies = async (firestoreDatabase) => {
1014
+ const ceremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, []);
1015
+ return fromQueryToFirebaseDocumentInfo(ceremoniesQuerySnap.docs);
1016
+ };
993
1017
 
994
1018
  /**
995
1019
  * @hidden
@@ -1038,207 +1062,22 @@ const compareHashes = async (path1, path2) => {
1038
1062
  };
1039
1063
 
1040
1064
  /**
1041
- * Parse and validate that the ceremony configuration is correct
1042
- * @notice this does not upload any files to storage
1043
- * @param path <string> - the path to the configuration file
1044
- * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1045
- * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1065
+ * Return a string with double digits if the provided input is one digit only.
1066
+ * @param in <number> - the input number to be converted.
1067
+ * @returns <string> - the two digits stringified number derived from the conversion.
1046
1068
  */
1047
- const parseCeremonyFile = async (path, cleanup = false) => {
1048
- // check that the path exists
1049
- if (!fs.existsSync(path))
1050
- throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1051
- try {
1052
- // read the data
1053
- const data = JSON.parse(fs.readFileSync(path).toString());
1054
- // verify that the data is correct
1055
- if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1056
- throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1057
- // validate that we have at least 1 circuit input data
1058
- if (!data.circuits || data.circuits.length === 0)
1059
- throw new Error("You need to provide the data for at least 1 circuit.");
1060
- // validate that the end date is in the future
1061
- let endDate;
1062
- let startDate;
1063
- try {
1064
- endDate = new Date(data.endDate);
1065
- startDate = new Date(data.startDate);
1066
- }
1067
- catch (error) {
1068
- throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1069
- }
1070
- if (endDate <= startDate)
1071
- throw new Error("The end date should be greater than the start date.");
1072
- const currentDate = new Date();
1073
- if (endDate <= currentDate || startDate <= currentDate)
1074
- throw new Error("The start and end dates should be in the future.");
1075
- // validate penalty
1076
- if (data.penalty <= 0)
1077
- throw new Error("The penalty should be greater than zero.");
1078
- const circuits = [];
1079
- const urlPattern = /(https?:\/\/[^\s]+)/g;
1080
- const commitHashPattern = /^[a-f0-9]{40}$/i;
1081
- const circuitArtifacts = [];
1082
- for (let i = 0; i < data.circuits.length; i++) {
1083
- const circuitData = data.circuits[i];
1084
- const artifacts = circuitData.artifacts;
1085
- circuitArtifacts.push({
1086
- artifacts: artifacts
1087
- });
1088
- const r1csPath = artifacts.r1csStoragePath;
1089
- const wasmPath = artifacts.wasmStoragePath;
1090
- // where we storing the r1cs downloaded
1091
- const localR1csPath = `./${circuitData.name}.r1cs`;
1092
- // check that the artifacts exist in S3
1093
- // we don't need any privileges to download this
1094
- // just the correct region
1095
- const s3 = new S3Client({ region: artifacts.region });
1096
- try {
1097
- await s3.send(new HeadObjectCommand({
1098
- Bucket: artifacts.bucket,
1099
- Key: r1csPath
1100
- }));
1101
- }
1102
- catch (error) {
1103
- throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1104
- }
1105
- try {
1106
- await s3.send(new HeadObjectCommand({
1107
- Bucket: artifacts.bucket,
1108
- Key: wasmPath
1109
- }));
1110
- }
1111
- catch (error) {
1112
- throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1113
- }
1114
- // download the r1cs to extract the metadata
1115
- const command = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1116
- const response = await s3.send(command);
1117
- const streamPipeline = promisify(pipeline);
1118
- if (response.$metadata.httpStatusCode !== 200)
1119
- throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
1120
- if (response.Body instanceof Readable)
1121
- await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1122
- // extract the metadata from the r1cs
1123
- const metadata = getR1CSInfo(localR1csPath);
1124
- // validate that the circuit hash and template links are valid
1125
- const template = circuitData.template;
1126
- const URLMatch = template.source.match(urlPattern);
1127
- if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1128
- throw new Error("You should provide the URL to the circuits templates on GitHub.");
1129
- const hashMatch = template.commitHash.match(commitHashPattern);
1130
- if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1131
- throw new Error("You should provide a valid commit hash of the circuit templates.");
1132
- // calculate the hash of the r1cs file
1133
- const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1134
- const circuitPrefix = extractPrefix(circuitData.name);
1135
- // filenames
1136
- const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1137
- const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1138
- const wasmCompleteFilename = `${circuitData.name}.wasm`;
1139
- const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1140
- const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1141
- // storage paths
1142
- const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1143
- const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1144
- const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1145
- const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1146
- const files = {
1147
- potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1148
- r1csFilename: r1csCompleteFilename,
1149
- wasmFilename: wasmCompleteFilename,
1150
- initialZkeyFilename: firstZkeyCompleteFilename,
1151
- potStoragePath: potStorageFilePath,
1152
- r1csStoragePath: r1csStorageFilePath,
1153
- wasmStoragePath: wasmStorageFilePath,
1154
- initialZkeyStoragePath: zkeyStorageFilePath,
1155
- r1csBlake2bHash: r1csBlake2bHash
1156
- };
1157
- // validate that the compiler hash is a valid hash
1158
- const compiler = circuitData.compiler;
1159
- const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1160
- if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1161
- throw new Error("You should provide a valid commit hash of the circuit compiler.");
1162
- // validate that the verification options are valid
1163
- const verification = circuitData.verification;
1164
- if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1165
- throw new Error("Please enter a valid verification mechanism: either CF or VM");
1166
- // @todo VM parameters verification
1167
- // if (verification['cfOrVM'] === "VM") {}
1168
- // check that the timeout is provided for the correct configuration
1169
- let dynamicThreshold;
1170
- let fixedTimeWindow;
1171
- let circuit = {};
1172
- if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1173
- if (circuitData.dynamicThreshold <= 0)
1174
- throw new Error("The dynamic threshold should be > 0.");
1175
- dynamicThreshold = circuitData.dynamicThreshold;
1176
- // the Circuit data for the ceremony setup
1177
- circuit = {
1178
- name: circuitData.name,
1179
- description: circuitData.description,
1180
- prefix: circuitPrefix,
1181
- sequencePosition: i + 1,
1182
- metadata: metadata,
1183
- files: files,
1184
- template: template,
1185
- compiler: compiler,
1186
- verification: verification,
1187
- dynamicThreshold: dynamicThreshold,
1188
- avgTimings: {
1189
- contributionComputation: 0,
1190
- fullContribution: 0,
1191
- verifyCloudFunction: 0
1192
- },
1193
- };
1194
- }
1195
- if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1196
- if (circuitData.fixedTimeWindow <= 0)
1197
- throw new Error("The fixed time window threshold should be > 0.");
1198
- fixedTimeWindow = circuitData.fixedTimeWindow;
1199
- // the Circuit data for the ceremony setup
1200
- circuit = {
1201
- name: circuitData.name,
1202
- description: circuitData.description,
1203
- prefix: circuitPrefix,
1204
- sequencePosition: i + 1,
1205
- metadata: metadata,
1206
- files: files,
1207
- template: template,
1208
- compiler: compiler,
1209
- verification: verification,
1210
- fixedTimeWindow: fixedTimeWindow,
1211
- avgTimings: {
1212
- contributionComputation: 0,
1213
- fullContribution: 0,
1214
- verifyCloudFunction: 0
1215
- },
1216
- };
1217
- }
1218
- circuits.push(circuit);
1219
- // remove the local r1cs download (if used for verifying the config only vs setup)
1220
- if (cleanup)
1221
- fs.unlinkSync(localR1csPath);
1222
- }
1223
- const setupData = {
1224
- ceremonyInputData: {
1225
- title: data.title,
1226
- description: data.description,
1227
- startDate: startDate.valueOf(),
1228
- endDate: endDate.valueOf(),
1229
- timeoutMechanismType: data.timeoutMechanismType,
1230
- penalty: data.penalty
1231
- },
1232
- ceremonyPrefix: extractPrefix(data.title),
1233
- circuits: circuits,
1234
- circuitArtifacts: circuitArtifacts
1235
- };
1236
- return setupData;
1237
- }
1238
- catch (error) {
1239
- throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1240
- }
1241
- };
1069
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1070
+ /**
1071
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1072
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1073
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1074
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1075
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1076
+ * @returns <string> - the resulting prefix.
1077
+ */
1078
+ const extractPrefix = (str) =>
1079
+ // eslint-disable-next-line no-useless-escape
1080
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1242
1081
  /**
1243
1082
  * Extract data from a R1CS metadata file generated with a custom file-based logger.
1244
1083
  * @notice useful for extracting metadata circuits contained in the generated file using a logger
@@ -1295,17 +1134,6 @@ const formatZkeyIndex = (progress) => {
1295
1134
  * @returns <number> - the amount of powers.
1296
1135
  */
1297
1136
  const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1298
- /**
1299
- * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1300
- * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1301
- * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1302
- * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1303
- * @param str <string> - the arbitrary string from which to extract the prefix.
1304
- * @returns <string> - the resulting prefix.
1305
- */
1306
- const extractPrefix = (str) =>
1307
- // eslint-disable-next-line no-useless-escape
1308
- str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1309
1137
  /**
1310
1138
  * Automate the generation of an entropy for a contribution.
1311
1139
  * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
@@ -1372,7 +1200,9 @@ const getContributionsValidityForContributor = async (firestoreDatabase, circuit
1372
1200
  * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1373
1201
  * @returns <string> - the public attestation preamble.
1374
1202
  */
1375
- const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1203
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName}${ceremonyName.toLowerCase().includes("trusted setup") || ceremonyName.toLowerCase().includes("ceremony")
1204
+ ? "."
1205
+ : " MPC Phase2 Trusted Setup ceremony."}\nThe following are my contribution signatures:`;
1376
1206
  /**
1377
1207
  * Check and prepare public attestation for the contributor made only of its valid contributions.
1378
1208
  * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
@@ -1443,6 +1273,41 @@ const readBytesFromFile = (localFilePath, offset, length, position) => {
1443
1273
  // Return the read bytes.
1444
1274
  return buffer;
1445
1275
  };
1276
+ /**
1277
+ * Given a buffer in little endian format, convert it to bigint
1278
+ * @param buffer
1279
+ * @returns
1280
+ */
1281
+ function leBufferToBigint(buffer) {
1282
+ return BigInt(`0x${buffer.reverse().toString("hex")}`);
1283
+ }
1284
+ /**
1285
+ * Given an input containing string values, convert them
1286
+ * to bigint
1287
+ * @param input - The input to convert
1288
+ * @returns the input with string values converted to bigint
1289
+ */
1290
+ const unstringifyBigInts = (input) => {
1291
+ if (typeof input === "string" && /^[0-9]+$/.test(input)) {
1292
+ return BigInt(input);
1293
+ }
1294
+ if (typeof input === "string" && /^0x[0-9a-fA-F]+$/.test(input)) {
1295
+ return BigInt(input);
1296
+ }
1297
+ if (Array.isArray(input)) {
1298
+ return input.map(unstringifyBigInts);
1299
+ }
1300
+ if (input === null) {
1301
+ return null;
1302
+ }
1303
+ if (typeof input === "object") {
1304
+ return Object.entries(input).reduce((acc, [key, value]) => {
1305
+ acc[key] = unstringifyBigInts(value);
1306
+ return acc;
1307
+ }, {});
1308
+ }
1309
+ return input;
1310
+ };
1446
1311
  /**
1447
1312
  * Return the info about the R1CS file.ù
1448
1313
  * @dev this method was built taking inspiration from
@@ -1503,17 +1368,17 @@ const getR1CSInfo = (localR1CSFilePath) => {
1503
1368
  let constraints = 0;
1504
1369
  try {
1505
1370
  // Get 'number of section' (jump magic r1cs and version1 data).
1506
- const numberOfSections = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1371
+ const numberOfSections = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1507
1372
  // Jump to first section.
1508
1373
  pointer = 12;
1509
1374
  // For each section
1510
1375
  for (let i = 0; i < numberOfSections; i++) {
1511
1376
  // Read section type.
1512
- const sectionType = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1377
+ const sectionType = leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1513
1378
  // Jump to section size.
1514
1379
  pointer += 4;
1515
1380
  // Read section size
1516
- const sectionSize = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1381
+ const sectionSize = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1517
1382
  // If at header section (0x00000001 : Header Section).
1518
1383
  if (sectionType === BigInt(1)) {
1519
1384
  // Read info from header section.
@@ -1545,22 +1410,22 @@ const getR1CSInfo = (localR1CSFilePath) => {
1545
1410
  */
1546
1411
  pointer += sectionSize - 20;
1547
1412
  // Read R1CS info.
1548
- wires = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1413
+ wires = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1549
1414
  pointer += 4;
1550
- publicOutputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1415
+ publicOutputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1551
1416
  pointer += 4;
1552
- publicInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1417
+ publicInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1553
1418
  pointer += 4;
1554
- privateInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1419
+ privateInputs = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1555
1420
  pointer += 4;
1556
- labels = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1421
+ labels = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1557
1422
  pointer += 8;
1558
- constraints = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1423
+ constraints = Number(leBufferToBigint(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1559
1424
  }
1560
1425
  pointer += 8 + Number(sectionSize);
1561
1426
  }
1562
1427
  return {
1563
- curve: "bn-128",
1428
+ curve: "bn-128", /// @note currently default to bn-128 as we support only Groth16 proving system.
1564
1429
  wires,
1565
1430
  constraints,
1566
1431
  privateInputs,
@@ -1575,11 +1440,194 @@ const getR1CSInfo = (localR1CSFilePath) => {
1575
1440
  }
1576
1441
  };
1577
1442
  /**
1578
- * Return a string with double digits if the provided input is one digit only.
1579
- * @param in <number> - the input number to be converted.
1580
- * @returns <string> - the two digits stringified number derived from the conversion.
1443
+ * Parse and validate that the ceremony configuration is correct
1444
+ * @notice this does not upload any files to storage
1445
+ * @param path <string> - the path to the configuration file
1446
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1447
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1581
1448
  */
1582
- const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1449
+ const parseCeremonyFile = async (path, cleanup = false) => {
1450
+ // check that the path exists
1451
+ if (!fs.existsSync(path))
1452
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1453
+ try {
1454
+ // read the data
1455
+ const data = JSON.parse(fs.readFileSync(path).toString());
1456
+ // verify that the data is correct
1457
+ if (data.timeoutMechanismType !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ &&
1458
+ data.timeoutMechanismType !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1459
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1460
+ // validate that we have at least 1 circuit input data
1461
+ if (!data.circuits || data.circuits.length === 0)
1462
+ throw new Error("You need to provide the data for at least 1 circuit.");
1463
+ // validate that the end date is in the future
1464
+ let endDate;
1465
+ let startDate;
1466
+ try {
1467
+ endDate = new Date(data.endDate);
1468
+ startDate = new Date(data.startDate);
1469
+ }
1470
+ catch (error) {
1471
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1472
+ }
1473
+ if (endDate <= startDate)
1474
+ throw new Error("The end date should be greater than the start date.");
1475
+ const currentDate = new Date();
1476
+ if (endDate <= currentDate || startDate <= currentDate)
1477
+ throw new Error("The start and end dates should be in the future.");
1478
+ // validate penalty
1479
+ if (data.penalty <= 0)
1480
+ throw new Error("The penalty should be greater than zero.");
1481
+ const circuits = [];
1482
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1483
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1484
+ const circuitArtifacts = [];
1485
+ for (let i = 0; i < data.circuits.length; i++) {
1486
+ const circuitData = data.circuits[i];
1487
+ const { artifacts } = circuitData;
1488
+ circuitArtifacts.push({
1489
+ artifacts
1490
+ });
1491
+ // where we storing the r1cs downloaded
1492
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1493
+ // where we storing the wasm downloaded
1494
+ const localWasmPath = `./${circuitData.name}.wasm`;
1495
+ // download the r1cs to extract the metadata
1496
+ const streamPipeline = promisify(pipeline);
1497
+ // Make the call.
1498
+ const responseR1CS = await fetch(artifacts.r1csStoragePath);
1499
+ // Handle errors.
1500
+ if (!responseR1CS.ok && responseR1CS.status !== 200)
1501
+ throw new Error(`There was an error while trying to download the r1cs file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1502
+ await streamPipeline(responseR1CS.body, createWriteStream(localR1csPath));
1503
+ // Write the file locally
1504
+ // extract the metadata from the r1cs
1505
+ const metadata = getR1CSInfo(localR1csPath);
1506
+ // download wasm too to ensure it's available
1507
+ const responseWASM = await fetch(artifacts.wasmStoragePath);
1508
+ if (!responseWASM.ok && responseWASM.status !== 200)
1509
+ throw new Error(`There was an error while trying to download the WASM file for circuit ${circuitData.name}. Please check that the file has the correct permissions (public) set.`);
1510
+ await streamPipeline(responseWASM.body, createWriteStream(localWasmPath));
1511
+ // validate that the circuit hash and template links are valid
1512
+ const { template } = circuitData;
1513
+ const URLMatch = template.source.match(urlPattern);
1514
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1515
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1516
+ const hashMatch = template.commitHash.match(commitHashPattern);
1517
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1518
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1519
+ // calculate the hash of the r1cs file
1520
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1521
+ const circuitPrefix = extractPrefix(circuitData.name);
1522
+ // filenames
1523
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1524
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1525
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1526
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1527
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1528
+ // storage paths
1529
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1530
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1531
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1532
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1533
+ const files = {
1534
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1535
+ r1csFilename: r1csCompleteFilename,
1536
+ wasmFilename: wasmCompleteFilename,
1537
+ initialZkeyFilename: firstZkeyCompleteFilename,
1538
+ potStoragePath: potStorageFilePath,
1539
+ r1csStoragePath: r1csStorageFilePath,
1540
+ wasmStoragePath: wasmStorageFilePath,
1541
+ initialZkeyStoragePath: zkeyStorageFilePath,
1542
+ r1csBlake2bHash
1543
+ };
1544
+ // validate that the compiler hash is a valid hash
1545
+ const { compiler } = circuitData;
1546
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1547
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1548
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1549
+ // validate that the verification options are valid
1550
+ const { verification } = circuitData;
1551
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1552
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1553
+ // @todo VM parameters verification
1554
+ // if (verification['cfOrVM'] === "VM") {}
1555
+ // check that the timeout is provided for the correct configuration
1556
+ let dynamicThreshold;
1557
+ let fixedTimeWindow;
1558
+ let circuit = {};
1559
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1560
+ if (circuitData.dynamicThreshold <= 0)
1561
+ throw new Error("The dynamic threshold should be > 0.");
1562
+ dynamicThreshold = circuitData.dynamicThreshold;
1563
+ // the Circuit data for the ceremony setup
1564
+ circuit = {
1565
+ name: circuitData.name,
1566
+ description: circuitData.description,
1567
+ prefix: circuitPrefix,
1568
+ sequencePosition: i + 1,
1569
+ metadata,
1570
+ files,
1571
+ template,
1572
+ compiler,
1573
+ verification,
1574
+ dynamicThreshold,
1575
+ avgTimings: {
1576
+ contributionComputation: 0,
1577
+ fullContribution: 0,
1578
+ verifyCloudFunction: 0
1579
+ }
1580
+ };
1581
+ }
1582
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1583
+ if (circuitData.fixedTimeWindow <= 0)
1584
+ throw new Error("The fixed time window threshold should be > 0.");
1585
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1586
+ // the Circuit data for the ceremony setup
1587
+ circuit = {
1588
+ name: circuitData.name,
1589
+ description: circuitData.description,
1590
+ prefix: circuitPrefix,
1591
+ sequencePosition: i + 1,
1592
+ metadata,
1593
+ files,
1594
+ template,
1595
+ compiler,
1596
+ verification,
1597
+ fixedTimeWindow,
1598
+ avgTimings: {
1599
+ contributionComputation: 0,
1600
+ fullContribution: 0,
1601
+ verifyCloudFunction: 0
1602
+ }
1603
+ };
1604
+ }
1605
+ circuits.push(circuit);
1606
+ // remove the local r1cs and wasm downloads (if used for verifying the config only vs setup)
1607
+ if (cleanup) {
1608
+ fs.unlinkSync(localR1csPath);
1609
+ fs.unlinkSync(localWasmPath);
1610
+ }
1611
+ }
1612
+ const setupData = {
1613
+ ceremonyInputData: {
1614
+ title: data.title,
1615
+ description: data.description,
1616
+ startDate: startDate.valueOf(),
1617
+ endDate: endDate.valueOf(),
1618
+ timeoutMechanismType: data.timeoutMechanismType,
1619
+ penalty: data.penalty
1620
+ },
1621
+ ceremonyPrefix: extractPrefix(data.title),
1622
+ circuits,
1623
+ circuitArtifacts
1624
+ };
1625
+ return setupData;
1626
+ }
1627
+ catch (error) {
1628
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1629
+ }
1630
+ };
1583
1631
 
1584
1632
  /**
1585
1633
  * Verify that a zKey is valid
@@ -1828,7 +1876,7 @@ const getFirestoreDatabase = (app) => getFirestore(app);
1828
1876
  * @param app <FirebaseApp> - the Firebase application.
1829
1877
  * @returns <Functions> - the Cloud Functions associated to the application.
1830
1878
  */
1831
- const getFirebaseFunctions = (app) => getFunctions(app, 'europe-west1');
1879
+ const getFirebaseFunctions = (app) => getFunctions(app, "europe-west1");
1832
1880
  /**
1833
1881
  * Retrieve the configuration variables for the AWS services (S3, EC2).
1834
1882
  * @returns <AWSVariables> - the values of the AWS services configuration variables.
@@ -1837,14 +1885,14 @@ const getAWSVariables = () => {
1837
1885
  if (!process.env.AWS_ACCESS_KEY_ID ||
1838
1886
  !process.env.AWS_SECRET_ACCESS_KEY ||
1839
1887
  !process.env.AWS_REGION ||
1840
- !process.env.AWS_ROLE_ARN ||
1888
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
1841
1889
  !process.env.AWS_AMI_ID)
1842
1890
  throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1843
1891
  return {
1844
1892
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1845
1893
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1846
1894
  region: process.env.AWS_REGION || "us-east-1",
1847
- roleArn: process.env.AWS_ROLE_ARN,
1895
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
1848
1896
  amiId: process.env.AWS_AMI_ID
1849
1897
  };
1850
1898
  };
@@ -1925,11 +1973,11 @@ const p256 = (proofPart) => {
1925
1973
  */
1926
1974
  const formatSolidityCalldata = (circuitInput, _proof) => {
1927
1975
  try {
1928
- const proof = utils.unstringifyBigInts(_proof);
1976
+ const proof = unstringifyBigInts(_proof);
1929
1977
  // format the public inputs to the circuit
1930
1978
  const formattedCircuitInput = [];
1931
1979
  for (const cInput of circuitInput) {
1932
- formattedCircuitInput.push(p256(utils.unstringifyBigInts(cInput)));
1980
+ formattedCircuitInput.push(p256(unstringifyBigInts(cInput)));
1933
1981
  }
1934
1982
  // construct calldata
1935
1983
  const calldata = {
@@ -2079,55 +2127,28 @@ const verifyCeremony = async (functions, firestore, ceremonyPrefix, outputDirect
2079
2127
  };
2080
2128
 
2081
2129
  /**
2082
- * This function will return the number of public repos of a user
2083
- * @param user <string> The username of the user
2084
- * @returns <number> The number of public repos
2085
- */
2086
- const getNumberOfPublicReposGitHub = async (user) => {
2087
- const response = await fetch(`https://api.github.com/user/${user}/repos`, {
2088
- method: "GET",
2089
- headers: {
2090
- Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2091
- }
2092
- });
2093
- if (response.status !== 200)
2094
- throw new Error("It was not possible to retrieve the number of public repositories. Please try again.");
2095
- const jsonData = await response.json();
2096
- return jsonData.length;
2097
- };
2098
- /**
2099
- * This function will return the number of followers of a user
2100
- * @param user <string> The username of the user
2101
- * @returns <number> The number of followers
2130
+ * This function queries the GitHub API to fetch users statistics
2131
+ * @param user {string} the user uid
2132
+ * @returns {any} the stats from the GitHub API
2102
2133
  */
2103
- const getNumberOfFollowersGitHub = async (user) => {
2104
- const response = await fetch(`https://api.github.com/user/${user}/followers`, {
2134
+ const getGitHubStats = async (user) => {
2135
+ const response = await fetch(`https://api.github.com/user/${user}`, {
2105
2136
  method: "GET",
2106
2137
  headers: {
2107
2138
  Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2108
2139
  }
2109
2140
  });
2110
2141
  if (response.status !== 200)
2111
- throw new Error("It was not possible to retrieve the number of followers. Please try again.");
2142
+ throw new Error("It was not possible to retrieve the user's statistic. Please try again.");
2112
2143
  const jsonData = await response.json();
2113
- return jsonData.length;
2114
- };
2115
- /**
2116
- * This function will return the number of following of a user
2117
- * @param user <string> The username of the user
2118
- * @returns <number> The number of following users
2119
- */
2120
- const getNumberOfFollowingGitHub = async (user) => {
2121
- const response = await fetch(`https://api.github.com/user/${user}/following`, {
2122
- method: "GET",
2123
- headers: {
2124
- Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2125
- }
2126
- });
2127
- if (response.status !== 200)
2128
- throw new Error("It was not possible to retrieve the number of following. Please try again.");
2129
- const jsonData = await response.json();
2130
- return jsonData.length;
2144
+ const data = {
2145
+ following: jsonData.following,
2146
+ followers: jsonData.followers,
2147
+ publicRepos: jsonData.public_repos,
2148
+ avatarUrl: jsonData.avatar_url,
2149
+ age: jsonData.created_at
2150
+ };
2151
+ return data;
2131
2152
  };
2132
2153
  /**
2133
2154
  * This function will check if the user is reputable enough to be able to use the app
@@ -2135,19 +2156,24 @@ const getNumberOfFollowingGitHub = async (user) => {
2135
2156
  * @param minimumAmountOfFollowing <number> The minimum amount of following the user should have
2136
2157
  * @param minimumAmountOfFollowers <number> The minimum amount of followers the user should have
2137
2158
  * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2138
- * @returns <boolean> True if the user is reputable enough, false otherwise
2159
+ * @returns <any> Return the avatar URL of the user if the user is reputable, false otherwise
2139
2160
  */
2140
- const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2161
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos, minimumAge) => {
2141
2162
  if (!process.env.GITHUB_ACCESS_TOKEN)
2142
2163
  throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2143
- const following = await getNumberOfFollowingGitHub(userLogin);
2144
- const repos = await getNumberOfPublicReposGitHub(userLogin);
2145
- const followers = await getNumberOfFollowersGitHub(userLogin);
2164
+ const { following, followers, publicRepos, avatarUrl, age } = await getGitHubStats(userLogin);
2146
2165
  if (following < minimumAmountOfFollowing ||
2147
- repos < minimumAmountOfPublicRepos ||
2148
- followers < minimumAmountOfFollowers)
2149
- return false;
2150
- return true;
2166
+ publicRepos < minimumAmountOfPublicRepos ||
2167
+ followers < minimumAmountOfFollowers ||
2168
+ new Date(age) > new Date(Date.now() - minimumAge))
2169
+ return {
2170
+ reputable: false,
2171
+ avatarUrl: ""
2172
+ };
2173
+ return {
2174
+ reputable: true,
2175
+ avatarUrl
2176
+ };
2151
2177
  };
2152
2178
 
2153
2179
  /**
@@ -2333,8 +2359,8 @@ const createSSMClient = async () => {
2333
2359
  * @returns <Array<string>> - the list of startup commands to be executed.
2334
2360
  */
2335
2361
  const vmBootstrapCommand = (bucketName) => [
2336
- "#!/bin/bash",
2337
- `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2362
+ "#!/bin/bash", // shabang.
2363
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`, // copy file from S3 bucket to VM.
2338
2364
  `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2339
2365
  ];
2340
2366
  /**
@@ -2355,8 +2381,13 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2355
2381
  // eslint-disable-next-line no-template-curly-in-string
2356
2382
  "touch ${MARKER_FILE}",
2357
2383
  "sudo yum update -y",
2358
- "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2359
- "sudo yum install -y nodejs",
2384
+ "curl -O https://nodejs.org/dist/v16.13.0/node-v16.13.0-linux-x64.tar.xz",
2385
+ "tar -xf node-v16.13.0-linux-x64.tar.xz",
2386
+ "mv node-v16.13.0-linux-x64 nodejs",
2387
+ "sudo mv nodejs /opt/",
2388
+ "echo 'export NODEJS_HOME=/opt/nodejs' >> /etc/profile",
2389
+ "echo 'export PATH=$NODEJS_HOME/bin:$PATH' >> /etc/profile",
2390
+ "source /etc/profile",
2360
2391
  "npm install -g snarkjs",
2361
2392
  `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2362
2393
  `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
@@ -2375,6 +2406,7 @@ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, reg
2375
2406
  * @returns Array<string> - the list of commands for contribution verification.
2376
2407
  */
2377
2408
  const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2409
+ `source /etc/profile`,
2378
2410
  `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2379
2411
  `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2380
2412
  `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
@@ -2401,7 +2433,7 @@ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertByte
2401
2433
  */
2402
2434
  const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2403
2435
  // Get the AWS variables.
2404
- const { amiId, roleArn } = getAWSVariables();
2436
+ const { amiId, instanceProfileArn } = getAWSVariables();
2405
2437
  // Parametrize the VM EC2 instance.
2406
2438
  const params = {
2407
2439
  ImageId: amiId,
@@ -2410,7 +2442,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2410
2442
  MinCount: 1,
2411
2443
  // nb. to find this: iam -> roles -> role_name.
2412
2444
  IamInstanceProfile: {
2413
- Arn: roleArn
2445
+ Arn: instanceProfileArn
2414
2446
  },
2415
2447
  // nb. for running commands at the startup.
2416
2448
  UserData: Buffer.from(commands.join("\n")).toString("base64"),
@@ -2419,7 +2451,7 @@ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskTy
2419
2451
  DeviceName: "/dev/xvda",
2420
2452
  Ebs: {
2421
2453
  DeleteOnTermination: true,
2422
- VolumeSize: volumeSize,
2454
+ VolumeSize: volumeSize, // disk size in GB.
2423
2455
  VolumeType: diskType
2424
2456
  }
2425
2457
  }
@@ -2605,4 +2637,4 @@ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
2605
2637
  }
2606
2638
  };
2607
2639
 
2608
- export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };
2640
+ export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCeremonies, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };