@devtion/backend 0.0.0-8bb9489 → 0.0.0-9239207

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @module @p0tion/backend
3
- * @version 1.0.9
3
+ * @version 1.2.4
4
4
  * @file MPC Phase 2 backend for Firebase services management
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -27,10 +27,13 @@ var path = require('path');
27
27
  var os = require('os');
28
28
  var clientSsm = require('@aws-sdk/client-ssm');
29
29
  var clientEc2 = require('@aws-sdk/client-ec2');
30
+ var ethers = require('ethers');
30
31
  var functionsV1 = require('firebase-functions/v1');
31
32
  var functionsV2 = require('firebase-functions/v2');
32
33
  var timerNode = require('timer-node');
33
34
  var snarkjs = require('snarkjs');
35
+ var apiSdk = require('@bandada/api-sdk');
36
+ var auth = require('firebase-admin/auth');
34
37
 
35
38
  function _interopNamespaceDefault(e) {
36
39
  var n = Object.create(null);
@@ -72,7 +75,7 @@ var LogLevel;
72
75
  * @notice the set of Firebase Functions status codes. The codes are the same at the
73
76
  * ones exposed by {@link https://github.com/grpc/grpc/blob/master/doc/statuscodes.md | gRPC}.
74
77
  * @param errorCode <FunctionsErrorCode> - the set of possible error codes.
75
- * @param message <string> - the error messge.
78
+ * @param message <string> - the error message.
76
79
  * @param [details] <string> - the details of the error (optional).
77
80
  * @returns <HttpsError>
78
81
  */
@@ -164,6 +167,8 @@ const COMMON_ERRORS = {
164
167
  CM_INVALID_COMMAND_EXECUTION: makeError("unknown", "There was an error while executing the command on the VM", "Please, contact the coordinator if the error persists.")
165
168
  };
166
169
 
170
+ dotenv.config();
171
+ let provider;
167
172
  /**
168
173
  * Return a configured and connected instance of the AWS S3 client.
169
174
  * @dev this method check and utilize the environment variables to configure the connection
@@ -186,6 +191,36 @@ const getS3Client = async () => {
186
191
  region: process.env.AWS_REGION
187
192
  });
188
193
  };
194
+ /**
195
+ * Returns a Prvider, connected via a configured JSON URL or else
196
+ * the ethers.js default provider, using configured API keys.
197
+ * @returns <ethers.providers.Provider> An Eth node provider
198
+ */
199
+ const setEthProvider = () => {
200
+ if (provider)
201
+ return provider;
202
+ console.log(`setting new provider`);
203
+ // Use JSON URL if defined
204
+ // if ((hardhat as any).ethers) {
205
+ // console.log(`using hardhat.ethers provider`)
206
+ // provider = (hardhat as any).ethers.provider
207
+ // } else
208
+ if (process.env.ETH_PROVIDER_JSON_URL) {
209
+ console.log(`JSON URL provider at ${process.env.ETH_PROVIDER_JSON_URL}`);
210
+ provider = new ethers.providers.JsonRpcProvider({
211
+ url: process.env.ETH_PROVIDER_JSON_URL,
212
+ skipFetchSetup: true
213
+ });
214
+ }
215
+ else {
216
+ // Otherwise, connect the default provider with ALchemy, Infura, or both
217
+ provider = ethers.providers.getDefaultProvider("homestead", {
218
+ alchemy: process.env.ETH_PROVIDER_ALCHEMY_API_KEY,
219
+ infura: process.env.ETH_PROVIDER_INFURA_API_KEY
220
+ });
221
+ }
222
+ return provider;
223
+ };
189
224
 
190
225
  dotenv.config();
191
226
  /**
@@ -453,12 +488,14 @@ const htmlEncodeCircuitData = (circuitDocument) => ({
453
488
  const getGitHubVariables = () => {
454
489
  if (!process.env.GITHUB_MINIMUM_FOLLOWERS ||
455
490
  !process.env.GITHUB_MINIMUM_FOLLOWING ||
456
- !process.env.GITHUB_MINIMUM_PUBLIC_REPOS)
491
+ !process.env.GITHUB_MINIMUM_PUBLIC_REPOS ||
492
+ !process.env.GITHUB_MINIMUM_AGE)
457
493
  logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
458
494
  return {
459
495
  minimumFollowers: Number(process.env.GITHUB_MINIMUM_FOLLOWERS),
460
496
  minimumFollowing: Number(process.env.GITHUB_MINIMUM_FOLLOWING),
461
- minimumPublicRepos: Number(process.env.GITHUB_MINIMUM_PUBLIC_REPOS)
497
+ minimumPublicRepos: Number(process.env.GITHUB_MINIMUM_PUBLIC_REPOS),
498
+ minimumAge: Number(process.env.GITHUB_MINIMUM_AGE)
462
499
  };
463
500
  };
464
501
  /**
@@ -468,7 +505,7 @@ const getGitHubVariables = () => {
468
505
  const getAWSVariables = () => {
469
506
  if (!process.env.AWS_ACCESS_KEY_ID ||
470
507
  !process.env.AWS_SECRET_ACCESS_KEY ||
471
- !process.env.AWS_ROLE_ARN ||
508
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
472
509
  !process.env.AWS_AMI_ID ||
473
510
  !process.env.AWS_SNS_TOPIC_ARN)
474
511
  logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
@@ -476,7 +513,7 @@ const getAWSVariables = () => {
476
513
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
477
514
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
478
515
  region: process.env.AWS_REGION || "eu-central-1",
479
- roleArn: process.env.AWS_ROLE_ARN,
516
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
480
517
  amiId: process.env.AWS_AMI_ID,
481
518
  snsTopic: process.env.AWS_SNS_TOPIC_ARN
482
519
  };
@@ -522,7 +559,7 @@ dotenv.config();
522
559
  const registerAuthUser = functions__namespace
523
560
  .region("europe-west1")
524
561
  .runWith({
525
- memory: "512MB"
562
+ memory: "1GB"
526
563
  })
527
564
  .auth.user()
528
565
  .onCreate(async (user) => {
@@ -554,11 +591,11 @@ const registerAuthUser = functions__namespace
554
591
  email === process.env.CUSTOM_CLAIMS_COORDINATOR_EMAIL_ADDRESS_OR_DOMAIN)) {
555
592
  const auth = admin.auth();
556
593
  // if provider == github.com let's use our functions to check the user's reputation
557
- if (user.providerData[0].providerId === "github.com") {
594
+ if (user.providerData.length > 0 && user.providerData[0].providerId === "github.com") {
558
595
  const vars = getGitHubVariables();
559
596
  // this return true or false
560
597
  try {
561
- const { reputable, avatarUrl: avatarURL } = await actions.githubReputation(user.providerData[0].uid, vars.minimumFollowing, vars.minimumFollowers, vars.minimumPublicRepos);
598
+ const { reputable, avatarUrl: avatarURL } = await actions.githubReputation(user.providerData[0].uid, vars.minimumFollowing, vars.minimumFollowers, vars.minimumPublicRepos, vars.minimumAge);
562
599
  if (!reputable) {
563
600
  // Delete user
564
601
  await auth.deleteUser(user.uid);
@@ -586,7 +623,7 @@ const registerAuthUser = functions__namespace
586
623
  encodedDisplayName,
587
624
  // Metadata.
588
625
  creationTime,
589
- lastSignInTime,
626
+ lastSignInTime: lastSignInTime || creationTime,
590
627
  // Optional.
591
628
  email: email || "",
592
629
  emailVerified: emailVerified || false,
@@ -609,7 +646,7 @@ const registerAuthUser = functions__namespace
609
646
  const processSignUpWithCustomClaims = functions__namespace
610
647
  .region("europe-west1")
611
648
  .runWith({
612
- memory: "512MB"
649
+ memory: "1GB"
613
650
  })
614
651
  .auth.user()
615
652
  .onCreate(async (user) => {
@@ -650,7 +687,7 @@ dotenv.config();
650
687
  const startCeremony = functions__namespace
651
688
  .region("europe-west1")
652
689
  .runWith({
653
- memory: "512MB"
690
+ memory: "1GB"
654
691
  })
655
692
  .pubsub.schedule(`every 30 minutes`)
656
693
  .onRun(async () => {
@@ -672,7 +709,7 @@ const startCeremony = functions__namespace
672
709
  const stopCeremony = functions__namespace
673
710
  .region("europe-west1")
674
711
  .runWith({
675
- memory: "512MB"
712
+ memory: "1GB"
676
713
  })
677
714
  .pubsub.schedule(`every 30 minutes`)
678
715
  .onRun(async () => {
@@ -694,7 +731,7 @@ const stopCeremony = functions__namespace
694
731
  const setupCeremony = functions__namespace
695
732
  .region("europe-west1")
696
733
  .runWith({
697
- memory: "512MB"
734
+ memory: "1GB"
698
735
  })
699
736
  .https.onCall(async (data, context) => {
700
737
  // Check if the user has the coordinator claim.
@@ -819,7 +856,7 @@ const initEmptyWaitingQueueForCircuit = functions__namespace
819
856
  const finalizeCeremony = functions__namespace
820
857
  .region("europe-west1")
821
858
  .runWith({
822
- memory: "512MB"
859
+ memory: "1GB"
823
860
  })
824
861
  .https.onCall(async (data, context) => {
825
862
  if (!context.auth || !context.auth.token.coordinator)
@@ -840,7 +877,7 @@ const finalizeCeremony = functions__namespace
840
877
  // Get ceremony circuits.
841
878
  const circuits = await getCeremonyCircuits(ceremonyId);
842
879
  // Get final contribution for each circuit.
843
- // nb. the `getFinalContributionDocument` checks the existance of the final contribution document (if not present, throws).
880
+ // nb. the `getFinalContributionDocument` checks the existence of the final contribution document (if not present, throws).
844
881
  // Therefore, we just need to call the method without taking any data to verify the pre-condition of having already computed
845
882
  // the final contributions for each ceremony circuit.
846
883
  for await (const circuit of circuits)
@@ -895,7 +932,7 @@ dotenv.config();
895
932
  const checkParticipantForCeremony = functions__namespace
896
933
  .region("europe-west1")
897
934
  .runWith({
898
- memory: "512MB"
935
+ memory: "1GB"
899
936
  })
900
937
  .https.onCall(async (data, context) => {
901
938
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -964,7 +1001,7 @@ const checkParticipantForCeremony = functions__namespace
964
1001
  participantDoc.ref.update({
965
1002
  status: "EXHUMED" /* ParticipantStatus.EXHUMED */,
966
1003
  contributions,
967
- tempContributionData: tempContributionData ? tempContributionData : firestore.FieldValue.delete(),
1004
+ tempContributionData: tempContributionData || firestore.FieldValue.delete(),
968
1005
  contributionStep: "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */,
969
1006
  contributionStartedAt: 0,
970
1007
  verificationStartedAt: firestore.FieldValue.delete(),
@@ -999,7 +1036,7 @@ const checkParticipantForCeremony = functions__namespace
999
1036
  const progressToNextCircuitForContribution = functions__namespace
1000
1037
  .region("europe-west1")
1001
1038
  .runWith({
1002
- memory: "512MB"
1039
+ memory: "1GB"
1003
1040
  })
1004
1041
  .https.onCall(async (data, context) => {
1005
1042
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1046,7 +1083,7 @@ const progressToNextCircuitForContribution = functions__namespace
1046
1083
  const progressToNextContributionStep = functions__namespace
1047
1084
  .region("europe-west1")
1048
1085
  .runWith({
1049
- memory: "512MB"
1086
+ memory: "1GB"
1050
1087
  })
1051
1088
  .https.onCall(async (data, context) => {
1052
1089
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1097,7 +1134,7 @@ const progressToNextContributionStep = functions__namespace
1097
1134
  const permanentlyStoreCurrentContributionTimeAndHash = functions__namespace
1098
1135
  .region("europe-west1")
1099
1136
  .runWith({
1100
- memory: "512MB"
1137
+ memory: "1GB"
1101
1138
  })
1102
1139
  .https.onCall(async (data, context) => {
1103
1140
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1139,7 +1176,7 @@ const permanentlyStoreCurrentContributionTimeAndHash = functions__namespace
1139
1176
  const temporaryStoreCurrentContributionMultiPartUploadId = functions__namespace
1140
1177
  .region("europe-west1")
1141
1178
  .runWith({
1142
- memory: "512MB"
1179
+ memory: "1GB"
1143
1180
  })
1144
1181
  .https.onCall(async (data, context) => {
1145
1182
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1177,7 +1214,7 @@ const temporaryStoreCurrentContributionMultiPartUploadId = functions__namespace
1177
1214
  const temporaryStoreCurrentContributionUploadedChunkData = functions__namespace
1178
1215
  .region("europe-west1")
1179
1216
  .runWith({
1180
- memory: "512MB"
1217
+ memory: "1GB"
1181
1218
  })
1182
1219
  .https.onCall(async (data, context) => {
1183
1220
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1219,7 +1256,7 @@ const temporaryStoreCurrentContributionUploadedChunkData = functions__namespace
1219
1256
  const checkAndPrepareCoordinatorForFinalization = functions__namespace
1220
1257
  .region("europe-west1")
1221
1258
  .runWith({
1222
- memory: "512MB"
1259
+ memory: "1GB"
1223
1260
  })
1224
1261
  .https.onCall(async (data, context) => {
1225
1262
  if (!context.auth || !context.auth.token.coordinator)
@@ -1459,7 +1496,7 @@ const waitForVMCommandExecution = (ssm, vmInstanceId, commandId) => new Promise(
1459
1496
  const coordinateCeremonyParticipant = functionsV1__namespace
1460
1497
  .region("europe-west1")
1461
1498
  .runWith({
1462
- memory: "512MB"
1499
+ memory: "1GB"
1463
1500
  })
1464
1501
  .firestore.document(`${actions.commonTerms.collections.ceremonies.name}/{ceremonyId}/${actions.commonTerms.collections.participants.name}/{participantId}`)
1465
1502
  .onUpdate(async (participantChanges) => {
@@ -1528,11 +1565,9 @@ const checkIfVMRunning = async (ec2, vmInstanceId, attempts = 5) => {
1528
1565
  const isVMRunning = await actions.checkIfRunning(ec2, vmInstanceId);
1529
1566
  if (!isVMRunning) {
1530
1567
  printLog(`VM not running, ${attempts - 1} attempts remaining. Retrying in 1 minute...`, LogLevel.DEBUG);
1531
- return await checkIfVMRunning(ec2, vmInstanceId, attempts - 1);
1532
- }
1533
- else {
1534
- return true;
1568
+ return checkIfVMRunning(ec2, vmInstanceId, attempts - 1);
1535
1569
  }
1570
+ return true;
1536
1571
  };
1537
1572
  /**
1538
1573
  * Verify the contribution of a participant computed while contributing to a specific circuit of a ceremony.
@@ -1561,296 +1596,301 @@ const checkIfVMRunning = async (ec2, vmInstanceId, attempts = 5) => {
1561
1596
  * 2) Send all updates atomically to the Firestore database.
1562
1597
  */
1563
1598
  const verifycontribution = functionsV2__namespace.https.onCall({ memory: "16GiB", timeoutSeconds: 3600, region: "europe-west1" }, async (request) => {
1564
- if (!request.auth || (!request.auth.token.participant && !request.auth.token.coordinator))
1565
- logAndThrowError(SPECIFIC_ERRORS.SE_AUTH_NO_CURRENT_AUTH_USER);
1566
- if (!request.data.ceremonyId ||
1567
- !request.data.circuitId ||
1568
- !request.data.contributorOrCoordinatorIdentifier ||
1569
- !request.data.bucketName)
1570
- logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA);
1571
- if (!process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME ||
1572
- !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION ||
1573
- !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1574
- logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
1575
- // Step (0).
1576
- // Prepare and start timer.
1577
- const verifyContributionTimer = new timerNode.Timer({ label: actions.commonTerms.cloudFunctionsNames.verifyContribution });
1578
- verifyContributionTimer.start();
1579
- // Get DB.
1580
- const firestore = admin.firestore();
1581
- // Prepare batch of txs.
1582
- const batch = firestore.batch();
1583
- // Extract data.
1584
- const { ceremonyId, circuitId, contributorOrCoordinatorIdentifier, bucketName } = request.data;
1585
- const userId = request.auth?.uid;
1586
- // Look for the ceremony, circuit and participant document.
1587
- const ceremonyDoc = await getDocumentById(actions.commonTerms.collections.ceremonies.name, ceremonyId);
1588
- const circuitDoc = await getDocumentById(actions.getCircuitsCollectionPath(ceremonyId), circuitId);
1589
- const participantDoc = await getDocumentById(actions.getParticipantsCollectionPath(ceremonyId), userId);
1590
- if (!ceremonyDoc.data() || !circuitDoc.data() || !participantDoc.data())
1591
- logAndThrowError(COMMON_ERRORS.CM_INEXISTENT_DOCUMENT_DATA);
1592
- // Extract documents data.
1593
- const { state } = ceremonyDoc.data();
1594
- const { status, contributions, verificationStartedAt, contributionStartedAt } = participantDoc.data();
1595
- const { waitingQueue, prefix, avgTimings, verification, files } = circuitDoc.data();
1596
- const { completedContributions, failedContributions } = waitingQueue;
1597
- const { contributionComputation: avgContributionComputationTime, fullContribution: avgFullContributionTime, verifyCloudFunction: avgVerifyCloudFunctionTime } = avgTimings;
1598
- const { cfOrVm, vm } = verification;
1599
- // we might not have it if the circuit is not using VM.
1600
- let vmInstanceId = "";
1601
- if (vm)
1602
- vmInstanceId = vm.vmInstanceId;
1603
- // Define pre-conditions.
1604
- const isFinalizing = state === "CLOSED" /* CeremonyState.CLOSED */ && request.auth && request.auth.token.coordinator; // true only when the coordinator verifies the final contributions.
1605
- const isContributing = status === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1606
- const isUsingVM = cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */ && !!vmInstanceId;
1607
- // Prepare state.
1608
- let isContributionValid = false;
1609
- let verifyCloudFunctionExecutionTime = 0; // time spent while executing the verify contribution cloud function.
1610
- let verifyCloudFunctionTime = 0; // time spent while executing the core business logic of this cloud function.
1611
- let fullContributionTime = 0; // time spent while doing non-verification contributions tasks (download, compute, upload).
1612
- let contributionComputationTime = 0; // time spent while computing the contribution.
1613
- let lastZkeyBlake2bHash = ""; // the Blake2B hash of the last zKey.
1614
- let verificationTranscriptTemporaryLocalPath = ""; // the local temporary path for the verification transcript.
1615
- let transcriptBlake2bHash = ""; // the Blake2B hash of the verification transcript.
1616
- let commandId = ""; // the unique identifier of the VM command.
1617
- // Derive necessary data.
1618
- const lastZkeyIndex = actions.formatZkeyIndex(completedContributions + 1);
1619
- const verificationTranscriptCompleteFilename = `${prefix}_${isFinalizing
1620
- ? `${contributorOrCoordinatorIdentifier}_${actions.finalContributionIndex}_verification_transcript.log`
1621
- : `${lastZkeyIndex}_${contributorOrCoordinatorIdentifier}_verification_transcript.log`}`;
1622
- const lastZkeyFilename = `${prefix}_${isFinalizing ? actions.finalContributionIndex : lastZkeyIndex}.zkey`;
1623
- // Prepare state for VM verification (if needed).
1624
- const ec2 = await createEC2Client();
1625
- const ssm = await createSSMClient();
1626
- // Step (1.A.1).
1627
- // Get storage paths.
1628
- const verificationTranscriptStoragePathAndFilename = actions.getTranscriptStorageFilePath(prefix, verificationTranscriptCompleteFilename);
1629
- // the zKey storage path is required to be sent to the VM api
1630
- const lastZkeyStoragePath = actions.getZkeyStorageFilePath(prefix, `${prefix}_${isFinalizing ? actions.finalContributionIndex : lastZkeyIndex}.zkey`);
1631
- const verificationTaskTimer = new timerNode.Timer({ label: `${ceremonyId}-${circuitId}-${participantDoc.id}` });
1632
- const completeVerification = async () => {
1633
- // Stop verification task timer.
1634
- printLog("Completing verification", LogLevel.DEBUG);
1635
- verificationTaskTimer.stop();
1636
- verifyCloudFunctionExecutionTime = verificationTaskTimer.ms();
1637
- if (isUsingVM) {
1638
- // Create temporary path.
1639
- verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.log`);
1640
- await sleep(1000); // wait 1s for file creation.
1641
- // Download from bucket.
1642
- // nb. the transcript MUST be uploaded from the VM by verification commands.
1643
- await downloadArtifactFromS3Bucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath);
1644
- // Read the verification trascript and validate data by checking for core info ("ZKey Ok!").
1645
- const content = fs.readFileSync(verificationTranscriptTemporaryLocalPath, "utf-8");
1646
- if (content.includes("ZKey Ok!"))
1647
- isContributionValid = true;
1648
- // If the contribution is valid, then format and store the trascript.
1599
+ try {
1600
+ if (!request.auth || (!request.auth.token.participant && !request.auth.token.coordinator))
1601
+ logAndThrowError(SPECIFIC_ERRORS.SE_AUTH_NO_CURRENT_AUTH_USER);
1602
+ if (!request.data.ceremonyId ||
1603
+ !request.data.circuitId ||
1604
+ !request.data.contributorOrCoordinatorIdentifier ||
1605
+ !request.data.bucketName)
1606
+ logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA);
1607
+ if (!process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME ||
1608
+ !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION ||
1609
+ !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1610
+ logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
1611
+ // Step (0).
1612
+ // Prepare and start timer.
1613
+ const verifyContributionTimer = new timerNode.Timer({ label: actions.commonTerms.cloudFunctionsNames.verifyContribution });
1614
+ verifyContributionTimer.start();
1615
+ // Get DB.
1616
+ const firestore = admin.firestore();
1617
+ // Prepare batch of txs.
1618
+ const batch = firestore.batch();
1619
+ // Extract data.
1620
+ const { ceremonyId, circuitId, contributorOrCoordinatorIdentifier, bucketName } = request.data;
1621
+ const userId = request.auth?.uid;
1622
+ // Look for the ceremony, circuit and participant document.
1623
+ const ceremonyDoc = await getDocumentById(actions.commonTerms.collections.ceremonies.name, ceremonyId);
1624
+ const circuitDoc = await getDocumentById(actions.getCircuitsCollectionPath(ceremonyId), circuitId);
1625
+ const participantDoc = await getDocumentById(actions.getParticipantsCollectionPath(ceremonyId), userId);
1626
+ if (!ceremonyDoc.data() || !circuitDoc.data() || !participantDoc.data())
1627
+ logAndThrowError(COMMON_ERRORS.CM_INEXISTENT_DOCUMENT_DATA);
1628
+ // Extract documents data.
1629
+ const { state } = ceremonyDoc.data();
1630
+ const { status, contributions, verificationStartedAt, contributionStartedAt } = participantDoc.data();
1631
+ const { waitingQueue, prefix, avgTimings, verification, files } = circuitDoc.data();
1632
+ const { completedContributions, failedContributions } = waitingQueue;
1633
+ const { contributionComputation: avgContributionComputationTime, fullContribution: avgFullContributionTime, verifyCloudFunction: avgVerifyCloudFunctionTime } = avgTimings;
1634
+ const { cfOrVm, vm } = verification;
1635
+ // we might not have it if the circuit is not using VM.
1636
+ let vmInstanceId = "";
1637
+ if (vm)
1638
+ vmInstanceId = vm.vmInstanceId;
1639
+ // Define pre-conditions.
1640
+ const isFinalizing = state === "CLOSED" /* CeremonyState.CLOSED */ && request.auth && request.auth.token.coordinator; // true only when the coordinator verifies the final contributions.
1641
+ const isContributing = status === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1642
+ const isUsingVM = cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */ && !!vmInstanceId;
1643
+ // Prepare state.
1644
+ let isContributionValid = false;
1645
+ let verifyCloudFunctionExecutionTime = 0; // time spent while executing the verify contribution cloud function.
1646
+ let verifyCloudFunctionTime = 0; // time spent while executing the core business logic of this cloud function.
1647
+ let fullContributionTime = 0; // time spent while doing non-verification contributions tasks (download, compute, upload).
1648
+ let contributionComputationTime = 0; // time spent while computing the contribution.
1649
+ let lastZkeyBlake2bHash = ""; // the Blake2B hash of the last zKey.
1650
+ let verificationTranscriptTemporaryLocalPath = ""; // the local temporary path for the verification transcript.
1651
+ let transcriptBlake2bHash = ""; // the Blake2B hash of the verification transcript.
1652
+ let commandId = ""; // the unique identifier of the VM command.
1653
+ // Derive necessary data.
1654
+ const lastZkeyIndex = actions.formatZkeyIndex(completedContributions + 1);
1655
+ const verificationTranscriptCompleteFilename = `${prefix}_${isFinalizing
1656
+ ? `${contributorOrCoordinatorIdentifier}_${actions.finalContributionIndex}_verification_transcript.log`
1657
+ : `${lastZkeyIndex}_${contributorOrCoordinatorIdentifier}_verification_transcript.log`}`;
1658
+ const lastZkeyFilename = `${prefix}_${isFinalizing ? actions.finalContributionIndex : lastZkeyIndex}.zkey`;
1659
+ // Prepare state for VM verification (if needed).
1660
+ const ec2 = await createEC2Client();
1661
+ const ssm = await createSSMClient();
1662
+ // Step (1.A.1).
1663
+ // Get storage paths.
1664
+ const verificationTranscriptStoragePathAndFilename = actions.getTranscriptStorageFilePath(prefix, verificationTranscriptCompleteFilename);
1665
+ // the zKey storage path is required to be sent to the VM api
1666
+ const lastZkeyStoragePath = actions.getZkeyStorageFilePath(prefix, `${prefix}_${isFinalizing ? actions.finalContributionIndex : lastZkeyIndex}.zkey`);
1667
+ const verificationTaskTimer = new timerNode.Timer({ label: `${ceremonyId}-${circuitId}-${participantDoc.id}` });
1668
+ const completeVerification = async () => {
1669
+ // Stop verification task timer.
1670
+ printLog("Completing verification", LogLevel.DEBUG);
1671
+ verificationTaskTimer.stop();
1672
+ verifyCloudFunctionExecutionTime = verificationTaskTimer.ms();
1673
+ if (isUsingVM) {
1674
+ // Create temporary path.
1675
+ verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.log`);
1676
+ await sleep(1000); // wait 1s for file creation.
1677
+ // Download from bucket.
1678
+ // nb. the transcript MUST be uploaded from the VM by verification commands.
1679
+ await downloadArtifactFromS3Bucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath);
1680
+ // Read the verification trascript and validate data by checking for core info ("ZKey Ok!").
1681
+ const content = fs.readFileSync(verificationTranscriptTemporaryLocalPath, "utf-8");
1682
+ if (content.includes("ZKey Ok!"))
1683
+ isContributionValid = true;
1684
+ // If the contribution is valid, then format and store the trascript.
1685
+ if (isContributionValid) {
1686
+ // eslint-disable-next-line no-control-regex
1687
+ const updated = content.replace(/\x1b[[0-9;]*m/g, "");
1688
+ fs.writeFileSync(verificationTranscriptTemporaryLocalPath, updated);
1689
+ }
1690
+ }
1691
+ printLog(`The contribution has been verified - Result ${isContributionValid}`, LogLevel.DEBUG);
1692
+ // Create a new contribution document.
1693
+ const contributionDoc = await firestore
1694
+ .collection(actions.getContributionsCollectionPath(ceremonyId, circuitId))
1695
+ .doc()
1696
+ .get();
1697
+ // Step (1.A.4).
1649
1698
  if (isContributionValid) {
1650
- // eslint-disable-next-line no-control-regex
1651
- const updated = content.replace(/\x1b[[0-9;]*m/g, "");
1652
- fs.writeFileSync(verificationTranscriptTemporaryLocalPath, updated);
1699
+ // Sleep ~3 seconds to wait for verification transcription.
1700
+ await sleep(3000);
1701
+ // Step (1.A.4.A.1).
1702
+ if (isUsingVM) {
1703
+ // Retrieve the contribution hash from the command output.
1704
+ lastZkeyBlake2bHash = await actions.retrieveCommandOutput(ssm, vmInstanceId, commandId);
1705
+ const hashRegex = /[a-fA-F0-9]{64}/;
1706
+ const match = lastZkeyBlake2bHash.match(hashRegex);
1707
+ lastZkeyBlake2bHash = match.at(0);
1708
+ // re upload the formatted verification transcript
1709
+ await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1710
+ }
1711
+ else {
1712
+ // Upload verification transcript.
1713
+ /// nb. do not use multi-part upload here due to small file size.
1714
+ await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1715
+ }
1716
+ // Compute verification transcript hash.
1717
+ transcriptBlake2bHash = await actions.blake512FromPath(verificationTranscriptTemporaryLocalPath);
1718
+ // Free resources by unlinking transcript temporary file.
1719
+ fs.unlinkSync(verificationTranscriptTemporaryLocalPath);
1720
+ // Filter participant contributions to find the data related to the one verified.
1721
+ const participantContributions = contributions.filter((contribution) => !!contribution.hash && !!contribution.computationTime && !contribution.doc);
1722
+ /// @dev (there must be only one contribution with an empty 'doc' field).
1723
+ if (participantContributions.length !== 1)
1724
+ logAndThrowError(SPECIFIC_ERRORS.SE_VERIFICATION_NO_PARTICIPANT_CONTRIBUTION_DATA);
1725
+ // Get contribution computation time.
1726
+ contributionComputationTime = contributions.at(0).computationTime;
1727
+ // Step (1.A.4.A.2).
1728
+ batch.create(contributionDoc.ref, {
1729
+ participantId: participantDoc.id,
1730
+ contributionComputationTime,
1731
+ verificationComputationTime: verifyCloudFunctionExecutionTime,
1732
+ zkeyIndex: isFinalizing ? actions.finalContributionIndex : lastZkeyIndex,
1733
+ files: {
1734
+ transcriptFilename: verificationTranscriptCompleteFilename,
1735
+ lastZkeyFilename,
1736
+ transcriptStoragePath: verificationTranscriptStoragePathAndFilename,
1737
+ lastZkeyStoragePath,
1738
+ transcriptBlake2bHash,
1739
+ lastZkeyBlake2bHash
1740
+ },
1741
+ verificationSoftware: {
1742
+ name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1743
+ version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1744
+ commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1745
+ },
1746
+ valid: isContributionValid,
1747
+ lastUpdated: getCurrentServerTimestampInMillis()
1748
+ });
1749
+ verifyContributionTimer.stop();
1750
+ verifyCloudFunctionTime = verifyContributionTimer.ms();
1653
1751
  }
1654
- }
1655
- printLog(`The contribution has been verified - Result ${isContributionValid}`, LogLevel.DEBUG);
1656
- // Create a new contribution document.
1657
- const contributionDoc = await firestore
1658
- .collection(actions.getContributionsCollectionPath(ceremonyId, circuitId))
1659
- .doc()
1660
- .get();
1661
- // Step (1.A.4).
1662
- if (isContributionValid) {
1663
- // Sleep ~3 seconds to wait for verification transcription.
1664
- await sleep(3000);
1665
- // Step (1.A.4.A.1).
1752
+ else {
1753
+ // Step (1.A.4.B).
1754
+ // Free-up storage by deleting invalid contribution.
1755
+ await deleteObject(bucketName, lastZkeyStoragePath);
1756
+ // Step (1.A.4.B.1).
1757
+ batch.create(contributionDoc.ref, {
1758
+ participantId: participantDoc.id,
1759
+ verificationComputationTime: verifyCloudFunctionExecutionTime,
1760
+ zkeyIndex: isFinalizing ? actions.finalContributionIndex : lastZkeyIndex,
1761
+ verificationSoftware: {
1762
+ name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1763
+ version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1764
+ commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1765
+ },
1766
+ valid: isContributionValid,
1767
+ lastUpdated: getCurrentServerTimestampInMillis()
1768
+ });
1769
+ }
1770
+ // Stop VM instance
1666
1771
  if (isUsingVM) {
1667
- // Retrieve the contribution hash from the command output.
1668
- lastZkeyBlake2bHash = await actions.retrieveCommandOutput(ssm, vmInstanceId, commandId);
1669
- const hashRegex = /[a-fA-F0-9]{64}/;
1670
- const match = lastZkeyBlake2bHash.match(hashRegex);
1671
- lastZkeyBlake2bHash = match.at(0);
1672
- // re upload the formatted verification transcript
1673
- await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1772
+ // using try and catch as the VM stopping function can throw
1773
+ // however we want to continue without stopping as the
1774
+ // verification was valid, and inform the coordinator
1775
+ try {
1776
+ await actions.stopEC2Instance(ec2, vmInstanceId);
1777
+ }
1778
+ catch (error) {
1779
+ printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1780
+ }
1674
1781
  }
1675
- else {
1676
- // Upload verification transcript.
1677
- /// nb. do not use multi-part upload here due to small file size.
1678
- await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1782
+ // Step (1.A.4.C)
1783
+ if (!isFinalizing) {
1784
+ // Step (1.A.4.C.1)
1785
+ // Compute new average contribution/verification time.
1786
+ fullContributionTime = Number(verificationStartedAt) - Number(contributionStartedAt);
1787
+ const newAvgContributionComputationTime = avgContributionComputationTime > 0
1788
+ ? (avgContributionComputationTime + contributionComputationTime) / 2
1789
+ : contributionComputationTime;
1790
+ const newAvgFullContributionTime = avgFullContributionTime > 0
1791
+ ? (avgFullContributionTime + fullContributionTime) / 2
1792
+ : fullContributionTime;
1793
+ const newAvgVerifyCloudFunctionTime = avgVerifyCloudFunctionTime > 0
1794
+ ? (avgVerifyCloudFunctionTime + verifyCloudFunctionTime) / 2
1795
+ : verifyCloudFunctionTime;
1796
+ // Prepare tx to update circuit average contribution/verification time.
1797
+ const updatedCircuitDoc = await getDocumentById(actions.getCircuitsCollectionPath(ceremonyId), circuitId);
1798
+ const { waitingQueue: updatedWaitingQueue } = updatedCircuitDoc.data();
1799
+ /// @dev this must happen only for valid contributions.
1800
+ batch.update(circuitDoc.ref, {
1801
+ avgTimings: {
1802
+ contributionComputation: isContributionValid
1803
+ ? newAvgContributionComputationTime
1804
+ : avgContributionComputationTime,
1805
+ fullContribution: isContributionValid ? newAvgFullContributionTime : avgFullContributionTime,
1806
+ verifyCloudFunction: isContributionValid
1807
+ ? newAvgVerifyCloudFunctionTime
1808
+ : avgVerifyCloudFunctionTime
1809
+ },
1810
+ waitingQueue: {
1811
+ ...updatedWaitingQueue,
1812
+ completedContributions: isContributionValid
1813
+ ? completedContributions + 1
1814
+ : completedContributions,
1815
+ failedContributions: isContributionValid ? failedContributions : failedContributions + 1
1816
+ },
1817
+ lastUpdated: getCurrentServerTimestampInMillis()
1818
+ });
1679
1819
  }
1680
- // Compute verification transcript hash.
1681
- transcriptBlake2bHash = await actions.blake512FromPath(verificationTranscriptTemporaryLocalPath);
1682
- // Free resources by unlinking transcript temporary file.
1683
- fs.unlinkSync(verificationTranscriptTemporaryLocalPath);
1684
- // Filter participant contributions to find the data related to the one verified.
1685
- const participantContributions = contributions.filter((contribution) => !!contribution.hash && !!contribution.computationTime && !contribution.doc);
1686
- /// @dev (there must be only one contribution with an empty 'doc' field).
1687
- if (participantContributions.length !== 1)
1688
- logAndThrowError(SPECIFIC_ERRORS.SE_VERIFICATION_NO_PARTICIPANT_CONTRIBUTION_DATA);
1689
- // Get contribution computation time.
1690
- contributionComputationTime = contributions.at(0).computationTime;
1691
- // Step (1.A.4.A.2).
1692
- batch.create(contributionDoc.ref, {
1693
- participantId: participantDoc.id,
1694
- contributionComputationTime,
1695
- verificationComputationTime: verifyCloudFunctionExecutionTime,
1696
- zkeyIndex: isFinalizing ? actions.finalContributionIndex : lastZkeyIndex,
1697
- files: {
1698
- transcriptFilename: verificationTranscriptCompleteFilename,
1699
- lastZkeyFilename,
1700
- transcriptStoragePath: verificationTranscriptStoragePathAndFilename,
1701
- lastZkeyStoragePath,
1702
- transcriptBlake2bHash,
1703
- lastZkeyBlake2bHash
1704
- },
1705
- verificationSoftware: {
1706
- name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1707
- version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1708
- commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1709
- },
1710
- valid: isContributionValid,
1711
- lastUpdated: getCurrentServerTimestampInMillis()
1712
- });
1713
- verifyContributionTimer.stop();
1714
- verifyCloudFunctionTime = verifyContributionTimer.ms();
1715
- }
1716
- else {
1717
- // Step (1.A.4.B).
1718
- // Free-up storage by deleting invalid contribution.
1719
- await deleteObject(bucketName, lastZkeyStoragePath);
1720
- // Step (1.A.4.B.1).
1721
- batch.create(contributionDoc.ref, {
1722
- participantId: participantDoc.id,
1723
- verificationComputationTime: verifyCloudFunctionExecutionTime,
1724
- zkeyIndex: isFinalizing ? actions.finalContributionIndex : lastZkeyIndex,
1725
- verificationSoftware: {
1726
- name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1727
- version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1728
- commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1729
- },
1730
- valid: isContributionValid,
1731
- lastUpdated: getCurrentServerTimestampInMillis()
1732
- });
1733
- }
1734
- // Stop VM instance
1735
- if (isUsingVM) {
1736
- // using try and catch as the VM stopping function can throw
1737
- // however we want to continue without stopping as the
1738
- // verification was valid, and inform the coordinator
1820
+ // Step (2).
1821
+ await batch.commit();
1822
+ printLog(`The contribution #${isFinalizing ? actions.finalContributionIndex : lastZkeyIndex} of circuit ${circuitId} (ceremony ${ceremonyId}) has been verified as ${isContributionValid ? "valid" : "invalid"} for the participant ${participantDoc.id}`, LogLevel.DEBUG);
1823
+ };
1824
+ // Step (1).
1825
+ if (isContributing || isFinalizing) {
1826
+ // Prepare timer.
1827
+ verificationTaskTimer.start();
1828
+ // Step (1.A.3.0).
1829
+ if (isUsingVM) {
1830
+ printLog(`Starting the VM mechanism`, LogLevel.DEBUG);
1831
+ // Prepare for VM execution.
1832
+ let isVMRunning = false; // true when the VM is up, otherwise false.
1833
+ // Step (1.A.3.1).
1834
+ await actions.startEC2Instance(ec2, vmInstanceId);
1835
+ await sleep(60000); // nb. wait for VM startup (1 mins + retry).
1836
+ // Check if the startup is running.
1837
+ isVMRunning = await checkIfVMRunning(ec2, vmInstanceId);
1838
+ printLog(`VM running: ${isVMRunning}`, LogLevel.DEBUG);
1839
+ // Step (1.A.3.2).
1840
+ // Prepare.
1841
+ const verificationCommand = actions.vmContributionVerificationCommand(bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename);
1842
+ // Run.
1843
+ commandId = await actions.runCommandUsingSSM(ssm, vmInstanceId, verificationCommand);
1844
+ printLog(`Starting the execution of command ${commandId}`, LogLevel.DEBUG);
1845
+ // Step (1.A.3.3).
1846
+ return waitForVMCommandExecution(ssm, vmInstanceId, commandId)
1847
+ .then(async () => {
1848
+ // Command execution successfully completed.
1849
+ printLog(`Command ${commandId} execution has been successfully completed`, LogLevel.DEBUG);
1850
+ await completeVerification();
1851
+ })
1852
+ .catch((error) => {
1853
+ // Command execution aborted.
1854
+ printLog(`Command ${commandId} execution has been aborted - Error ${error}`, LogLevel.DEBUG);
1855
+ logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
1856
+ });
1857
+ }
1858
+ // CF approach.
1859
+ printLog(`CF mechanism`, LogLevel.DEBUG);
1860
+ const potStoragePath = actions.getPotStorageFilePath(files.potFilename);
1861
+ const firstZkeyStoragePath = actions.getZkeyStorageFilePath(prefix, `${prefix}_${actions.genesisZkeyIndex}.zkey`);
1862
+ // Prepare temporary file paths.
1863
+ // (nb. these are needed to download the necessary artifacts for verification from AWS S3).
1864
+ verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(verificationTranscriptCompleteFilename);
1865
+ const potTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.pot`);
1866
+ const firstZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_genesis.zkey`);
1867
+ const lastZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_last.zkey`);
1868
+ // Create and populate transcript.
1869
+ const transcriptLogger = actions.createCustomLoggerForFile(verificationTranscriptTemporaryLocalPath);
1870
+ transcriptLogger.info(`${isFinalizing ? `Final verification` : `Verification`} transcript for ${prefix} circuit Phase 2 contribution.\n${isFinalizing ? `Coordinator ` : `Contributor # ${Number(lastZkeyIndex)}`} (${contributorOrCoordinatorIdentifier})\n`);
1871
+ // Step (1.A.2).
1872
+ await downloadArtifactFromS3Bucket(bucketName, potStoragePath, potTempFilePath);
1873
+ await downloadArtifactFromS3Bucket(bucketName, firstZkeyStoragePath, firstZkeyTempFilePath);
1874
+ await downloadArtifactFromS3Bucket(bucketName, lastZkeyStoragePath, lastZkeyTempFilePath);
1875
+ // Step (1.A.4).
1876
+ isContributionValid = await snarkjs.zKey.verifyFromInit(firstZkeyTempFilePath, potTempFilePath, lastZkeyTempFilePath, transcriptLogger);
1877
+ // Compute contribution hash.
1878
+ lastZkeyBlake2bHash = await actions.blake512FromPath(lastZkeyTempFilePath);
1879
+ // Free resources by unlinking temporary folders.
1880
+ // Do not free-up verification transcript path here.
1739
1881
  try {
1740
- await actions.stopEC2Instance(ec2, vmInstanceId);
1882
+ fs.unlinkSync(potTempFilePath);
1883
+ fs.unlinkSync(firstZkeyTempFilePath);
1884
+ fs.unlinkSync(lastZkeyTempFilePath);
1741
1885
  }
1742
1886
  catch (error) {
1743
- printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1887
+ printLog(`Error while unlinking temporary files - Error ${error}`, LogLevel.WARN);
1744
1888
  }
1889
+ await completeVerification();
1745
1890
  }
1746
- // Step (1.A.4.C)
1747
- if (!isFinalizing) {
1748
- // Step (1.A.4.C.1)
1749
- // Compute new average contribution/verification time.
1750
- fullContributionTime = Number(verificationStartedAt) - Number(contributionStartedAt);
1751
- const newAvgContributionComputationTime = avgContributionComputationTime > 0
1752
- ? (avgContributionComputationTime + contributionComputationTime) / 2
1753
- : contributionComputationTime;
1754
- const newAvgFullContributionTime = avgFullContributionTime > 0
1755
- ? (avgFullContributionTime + fullContributionTime) / 2
1756
- : fullContributionTime;
1757
- const newAvgVerifyCloudFunctionTime = avgVerifyCloudFunctionTime > 0
1758
- ? (avgVerifyCloudFunctionTime + verifyCloudFunctionTime) / 2
1759
- : verifyCloudFunctionTime;
1760
- // Prepare tx to update circuit average contribution/verification time.
1761
- const updatedCircuitDoc = await getDocumentById(actions.getCircuitsCollectionPath(ceremonyId), circuitId);
1762
- const { waitingQueue: updatedWaitingQueue } = updatedCircuitDoc.data();
1763
- /// @dev this must happen only for valid contributions.
1764
- batch.update(circuitDoc.ref, {
1765
- avgTimings: {
1766
- contributionComputation: isContributionValid
1767
- ? newAvgContributionComputationTime
1768
- : avgContributionComputationTime,
1769
- fullContribution: isContributionValid ? newAvgFullContributionTime : avgFullContributionTime,
1770
- verifyCloudFunction: isContributionValid
1771
- ? newAvgVerifyCloudFunctionTime
1772
- : avgVerifyCloudFunctionTime
1773
- },
1774
- waitingQueue: {
1775
- ...updatedWaitingQueue,
1776
- completedContributions: isContributionValid
1777
- ? completedContributions + 1
1778
- : completedContributions,
1779
- failedContributions: isContributionValid ? failedContributions : failedContributions + 1
1780
- },
1781
- lastUpdated: getCurrentServerTimestampInMillis()
1782
- });
1783
- }
1784
- // Step (2).
1785
- await batch.commit();
1786
- printLog(`The contribution #${isFinalizing ? actions.finalContributionIndex : lastZkeyIndex} of circuit ${circuitId} (ceremony ${ceremonyId}) has been verified as ${isContributionValid ? "valid" : "invalid"} for the participant ${participantDoc.id}`, LogLevel.DEBUG);
1787
- };
1788
- // Step (1).
1789
- if (isContributing || isFinalizing) {
1790
- // Prepare timer.
1791
- verificationTaskTimer.start();
1792
- // Step (1.A.3.0).
1793
- if (isUsingVM) {
1794
- printLog(`Starting the VM mechanism`, LogLevel.DEBUG);
1795
- // Prepare for VM execution.
1796
- let isVMRunning = false; // true when the VM is up, otherwise false.
1797
- // Step (1.A.3.1).
1798
- await actions.startEC2Instance(ec2, vmInstanceId);
1799
- await sleep(60000); // nb. wait for VM startup (1 mins + retry).
1800
- // Check if the startup is running.
1801
- isVMRunning = await checkIfVMRunning(ec2, vmInstanceId);
1802
- printLog(`VM running: ${isVMRunning}`, LogLevel.DEBUG);
1803
- // Step (1.A.3.2).
1804
- // Prepare.
1805
- const verificationCommand = actions.vmContributionVerificationCommand(bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename);
1806
- // Run.
1807
- commandId = await actions.runCommandUsingSSM(ssm, vmInstanceId, verificationCommand);
1808
- printLog(`Starting the execution of command ${commandId}`, LogLevel.DEBUG);
1809
- // Step (1.A.3.3).
1810
- return waitForVMCommandExecution(ssm, vmInstanceId, commandId)
1811
- .then(async () => {
1812
- // Command execution successfully completed.
1813
- printLog(`Command ${commandId} execution has been successfully completed`, LogLevel.DEBUG);
1814
- await completeVerification();
1815
- })
1816
- .catch((error) => {
1817
- // Command execution aborted.
1818
- printLog(`Command ${commandId} execution has been aborted - Error ${error}`, LogLevel.DEBUG);
1819
- logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
1820
- });
1821
- }
1822
- // CF approach.
1823
- printLog(`CF mechanism`, LogLevel.DEBUG);
1824
- const potStoragePath = actions.getPotStorageFilePath(files.potFilename);
1825
- const firstZkeyStoragePath = actions.getZkeyStorageFilePath(prefix, `${prefix}_${actions.genesisZkeyIndex}.zkey`);
1826
- // Prepare temporary file paths.
1827
- // (nb. these are needed to download the necessary artifacts for verification from AWS S3).
1828
- verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(verificationTranscriptCompleteFilename);
1829
- const potTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.pot`);
1830
- const firstZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_genesis.zkey`);
1831
- const lastZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_last.zkey`);
1832
- // Create and populate transcript.
1833
- const transcriptLogger = actions.createCustomLoggerForFile(verificationTranscriptTemporaryLocalPath);
1834
- transcriptLogger.info(`${isFinalizing ? `Final verification` : `Verification`} transcript for ${prefix} circuit Phase 2 contribution.\n${isFinalizing ? `Coordinator ` : `Contributor # ${Number(lastZkeyIndex)}`} (${contributorOrCoordinatorIdentifier})\n`);
1835
- // Step (1.A.2).
1836
- await downloadArtifactFromS3Bucket(bucketName, potStoragePath, potTempFilePath);
1837
- await downloadArtifactFromS3Bucket(bucketName, firstZkeyStoragePath, firstZkeyTempFilePath);
1838
- await downloadArtifactFromS3Bucket(bucketName, lastZkeyStoragePath, lastZkeyTempFilePath);
1839
- // Step (1.A.4).
1840
- isContributionValid = await snarkjs.zKey.verifyFromInit(firstZkeyTempFilePath, potTempFilePath, lastZkeyTempFilePath, transcriptLogger);
1841
- // Compute contribution hash.
1842
- lastZkeyBlake2bHash = await actions.blake512FromPath(lastZkeyTempFilePath);
1843
- // Free resources by unlinking temporary folders.
1844
- // Do not free-up verification transcript path here.
1845
- try {
1846
- fs.unlinkSync(potTempFilePath);
1847
- fs.unlinkSync(firstZkeyTempFilePath);
1848
- fs.unlinkSync(lastZkeyTempFilePath);
1849
- }
1850
- catch (error) {
1851
- printLog(`Error while unlinking temporary files - Error ${error}`, LogLevel.WARN);
1852
- }
1853
- await completeVerification();
1891
+ }
1892
+ catch (error) {
1893
+ logAndThrowError(makeError("unknown", error));
1854
1894
  }
1855
1895
  });
1856
1896
  /**
@@ -1861,7 +1901,7 @@ const verifycontribution = functionsV2__namespace.https.onCall({ memory: "16GiB"
1861
1901
  const refreshParticipantAfterContributionVerification = functionsV1__namespace
1862
1902
  .region("europe-west1")
1863
1903
  .runWith({
1864
- memory: "512MB"
1904
+ memory: "1GB"
1865
1905
  })
1866
1906
  .firestore.document(`/${actions.commonTerms.collections.ceremonies.name}/{ceremony}/${actions.commonTerms.collections.circuits.name}/{circuit}/${actions.commonTerms.collections.contributions.name}/{contributions}`)
1867
1907
  .onCreate(async (createdContribution) => {
@@ -1922,7 +1962,7 @@ const refreshParticipantAfterContributionVerification = functionsV1__namespace
1922
1962
  const finalizeCircuit = functionsV1__namespace
1923
1963
  .region("europe-west1")
1924
1964
  .runWith({
1925
- memory: "512MB"
1965
+ memory: "1GB"
1926
1966
  })
1927
1967
  .https.onCall(async (data, context) => {
1928
1968
  if (!context.auth || !context.auth.token.coordinator)
@@ -2066,7 +2106,7 @@ const checkIfBucketIsDedicatedToCeremony = async (bucketName) => {
2066
2106
  const createBucket = functions__namespace
2067
2107
  .region("europe-west1")
2068
2108
  .runWith({
2069
- memory: "512MB"
2109
+ memory: "1GB"
2070
2110
  })
2071
2111
  .https.onCall(async (data, context) => {
2072
2112
  // Check if the user has the coordinator claim.
@@ -2156,7 +2196,7 @@ const createBucket = functions__namespace
2156
2196
  const checkIfObjectExist = functions__namespace
2157
2197
  .region("europe-west1")
2158
2198
  .runWith({
2159
- memory: "512MB"
2199
+ memory: "1GB"
2160
2200
  })
2161
2201
  .https.onCall(async (data, context) => {
2162
2202
  // Check if the user has the coordinator claim.
@@ -2202,7 +2242,7 @@ const checkIfObjectExist = functions__namespace
2202
2242
  const generateGetObjectPreSignedUrl = functions__namespace
2203
2243
  .region("europe-west1")
2204
2244
  .runWith({
2205
- memory: "512MB"
2245
+ memory: "1GB"
2206
2246
  })
2207
2247
  .https.onCall(async (data, context) => {
2208
2248
  if (!context.auth)
@@ -2242,7 +2282,7 @@ const generateGetObjectPreSignedUrl = functions__namespace
2242
2282
  const startMultiPartUpload = functions__namespace
2243
2283
  .region("europe-west1")
2244
2284
  .runWith({
2245
- memory: "512MB"
2285
+ memory: "2GB"
2246
2286
  })
2247
2287
  .https.onCall(async (data, context) => {
2248
2288
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2297,7 +2337,7 @@ const startMultiPartUpload = functions__namespace
2297
2337
  const generatePreSignedUrlsParts = functions__namespace
2298
2338
  .region("europe-west1")
2299
2339
  .runWith({
2300
- memory: "512MB",
2340
+ memory: "1GB",
2301
2341
  timeoutSeconds: 300
2302
2342
  })
2303
2343
  .https.onCall(async (data, context) => {
@@ -2358,7 +2398,7 @@ const generatePreSignedUrlsParts = functions__namespace
2358
2398
  const completeMultiPartUpload = functions__namespace
2359
2399
  .region("europe-west1")
2360
2400
  .runWith({
2361
- memory: "512MB"
2401
+ memory: "2GB"
2362
2402
  })
2363
2403
  .https.onCall(async (data, context) => {
2364
2404
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2407,6 +2447,216 @@ const completeMultiPartUpload = functions__namespace
2407
2447
  }
2408
2448
  });
2409
2449
 
2450
+ const VKEY_DATA = {
2451
+ protocol: "groth16",
2452
+ curve: "bn128",
2453
+ nPublic: 3,
2454
+ vk_alpha_1: [
2455
+ "20491192805390485299153009773594534940189261866228447918068658471970481763042",
2456
+ "9383485363053290200918347156157836566562967994039712273449902621266178545958",
2457
+ "1"
2458
+ ],
2459
+ vk_beta_2: [
2460
+ [
2461
+ "6375614351688725206403948262868962793625744043794305715222011528459656738731",
2462
+ "4252822878758300859123897981450591353533073413197771768651442665752259397132"
2463
+ ],
2464
+ [
2465
+ "10505242626370262277552901082094356697409835680220590971873171140371331206856",
2466
+ "21847035105528745403288232691147584728191162732299865338377159692350059136679"
2467
+ ],
2468
+ ["1", "0"]
2469
+ ],
2470
+ vk_gamma_2: [
2471
+ [
2472
+ "10857046999023057135944570762232829481370756359578518086990519993285655852781",
2473
+ "11559732032986387107991004021392285783925812861821192530917403151452391805634"
2474
+ ],
2475
+ [
2476
+ "8495653923123431417604973247489272438418190587263600148770280649306958101930",
2477
+ "4082367875863433681332203403145435568316851327593401208105741076214120093531"
2478
+ ],
2479
+ ["1", "0"]
2480
+ ],
2481
+ vk_delta_2: [
2482
+ [
2483
+ "3697618915467790705869942236922063775466274665053173890632463796679068973252",
2484
+ "14948341351907992175709156460547989243732741534604949238422596319735704165658"
2485
+ ],
2486
+ [
2487
+ "3028459181652799888716942141752307629938889957960373621898607910203491239368",
2488
+ "11380736494786911280692284374675752681598754560757720296073023058533044108340"
2489
+ ],
2490
+ ["1", "0"]
2491
+ ],
2492
+ vk_alphabeta_12: [
2493
+ [
2494
+ [
2495
+ "2029413683389138792403550203267699914886160938906632433982220835551125967885",
2496
+ "21072700047562757817161031222997517981543347628379360635925549008442030252106"
2497
+ ],
2498
+ [
2499
+ "5940354580057074848093997050200682056184807770593307860589430076672439820312",
2500
+ "12156638873931618554171829126792193045421052652279363021382169897324752428276"
2501
+ ],
2502
+ [
2503
+ "7898200236362823042373859371574133993780991612861777490112507062703164551277",
2504
+ "7074218545237549455313236346927434013100842096812539264420499035217050630853"
2505
+ ]
2506
+ ],
2507
+ [
2508
+ [
2509
+ "7077479683546002997211712695946002074877511277312570035766170199895071832130",
2510
+ "10093483419865920389913245021038182291233451549023025229112148274109565435465"
2511
+ ],
2512
+ [
2513
+ "4595479056700221319381530156280926371456704509942304414423590385166031118820",
2514
+ "19831328484489333784475432780421641293929726139240675179672856274388269393268"
2515
+ ],
2516
+ [
2517
+ "11934129596455521040620786944827826205713621633706285934057045369193958244500",
2518
+ "8037395052364110730298837004334506829870972346962140206007064471173334027475"
2519
+ ]
2520
+ ]
2521
+ ],
2522
+ IC: [
2523
+ [
2524
+ "12951059800758687233303204819298121944551181861362200875212570257618182506154",
2525
+ "5751958719396509176593242305268064754837298673622815112953832050159760501392",
2526
+ "1"
2527
+ ],
2528
+ [
2529
+ "9561588427935871983444704959674198910445823619407211599507208879011862515257",
2530
+ "14576201570478094842467636169770180675293504492823217349086195663150934064643",
2531
+ "1"
2532
+ ],
2533
+ [
2534
+ "4811967233483727873912563574622036989372099129165459921963463310078093941559",
2535
+ "1874883809855039536107616044787862082553628089593740724610117059083415551067",
2536
+ "1"
2537
+ ],
2538
+ [
2539
+ "12252730267779308452229639835051322390696643456253768618882001876621526827161",
2540
+ "7899194018737016222260328309937800777948677569409898603827268776967707173231",
2541
+ "1"
2542
+ ]
2543
+ ]
2544
+ };
2545
+ dotenv.config();
2546
+ const { BANDADA_API_URL, BANDADA_GROUP_ID } = process.env;
2547
+ const bandadaApi = new apiSdk.ApiSdk(BANDADA_API_URL);
2548
+ const bandadaValidateProof = functions__namespace
2549
+ .region("europe-west1")
2550
+ .runWith({
2551
+ memory: "512MB"
2552
+ })
2553
+ .https.onCall(async (data) => {
2554
+ if (!BANDADA_GROUP_ID)
2555
+ throw new Error("BANDADA_GROUP_ID is not defined in .env");
2556
+ const { proof, publicSignals } = data;
2557
+ const isCorrect = snarkjs.groth16.verify(VKEY_DATA, publicSignals, proof);
2558
+ if (!isCorrect)
2559
+ return {
2560
+ valid: false,
2561
+ message: "Invalid proof",
2562
+ token: ""
2563
+ };
2564
+ const commitment = data.publicSignals[1];
2565
+ const isMember = await bandadaApi.isGroupMember(BANDADA_GROUP_ID, commitment);
2566
+ if (!isMember)
2567
+ return {
2568
+ valid: false,
2569
+ message: "Not a member of the group",
2570
+ token: ""
2571
+ };
2572
+ const auth$1 = auth.getAuth();
2573
+ try {
2574
+ await admin.auth().createUser({
2575
+ uid: commitment
2576
+ });
2577
+ }
2578
+ catch (error) {
2579
+ // if user already exist then just pass
2580
+ if (error.code !== "auth/uid-already-exists") {
2581
+ throw new Error(error);
2582
+ }
2583
+ }
2584
+ const token = await auth$1.createCustomToken(commitment);
2585
+ return {
2586
+ valid: true,
2587
+ message: "Valid proof and group member",
2588
+ token
2589
+ };
2590
+ });
2591
+
2592
+ dotenv.config();
2593
+ const checkNonceOfSIWEAddress = functions__namespace
2594
+ .region("europe-west1")
2595
+ .runWith({ memory: "1GB" })
2596
+ .https.onCall(async (data) => {
2597
+ try {
2598
+ const { auth0Token } = data;
2599
+ const result = (await fetch(`${process.env.AUTH0_APPLICATION_URL}/userinfo`, {
2600
+ method: "GET",
2601
+ headers: {
2602
+ "content-type": "application/json",
2603
+ authorization: `Bearer ${auth0Token}`
2604
+ }
2605
+ }).then((_res) => _res.json()));
2606
+ if (!result.sub) {
2607
+ return {
2608
+ valid: false,
2609
+ message: "No user detected. Please check device flow token"
2610
+ };
2611
+ }
2612
+ const auth$1 = auth.getAuth();
2613
+ // check nonce
2614
+ const parts = result.sub.split("|");
2615
+ const address = decodeURIComponent(parts[2]).split(":")[2];
2616
+ const minimumNonce = Number(process.env.ETH_MINIMUM_NONCE);
2617
+ const nonceBlockHeight = "latest"; // process.env.ETH_NONCE_BLOCK_HEIGHT
2618
+ // look up nonce for address @block
2619
+ let nonceOk = true;
2620
+ if (minimumNonce > 0) {
2621
+ const provider = setEthProvider();
2622
+ console.log(`got provider - block # ${await provider.getBlockNumber()}`);
2623
+ const nonce = await provider.getTransactionCount(address, nonceBlockHeight);
2624
+ console.log(`nonce ${nonce}`);
2625
+ nonceOk = nonce >= minimumNonce;
2626
+ }
2627
+ console.log(`checking nonce ${nonceOk}`);
2628
+ if (!nonceOk) {
2629
+ return {
2630
+ valid: false,
2631
+ message: "Eth address does not meet the nonce requirements"
2632
+ };
2633
+ }
2634
+ try {
2635
+ await admin.auth().createUser({
2636
+ displayName: address,
2637
+ uid: address
2638
+ });
2639
+ }
2640
+ catch (error) {
2641
+ // if user already exist then just pass
2642
+ if (error.code !== "auth/uid-already-exists") {
2643
+ throw new Error(error);
2644
+ }
2645
+ }
2646
+ const token = await auth$1.createCustomToken(address);
2647
+ return {
2648
+ valid: true,
2649
+ token
2650
+ };
2651
+ }
2652
+ catch (error) {
2653
+ return {
2654
+ valid: false,
2655
+ message: `Something went wrong ${error}`
2656
+ };
2657
+ }
2658
+ });
2659
+
2410
2660
  dotenv.config();
2411
2661
  /**
2412
2662
  * Check and remove the current contributor if it doesn't complete the contribution on the specified amount of time.
@@ -2429,7 +2679,7 @@ dotenv.config();
2429
2679
  const checkAndRemoveBlockingContributor = functions__namespace
2430
2680
  .region("europe-west1")
2431
2681
  .runWith({
2432
- memory: "512MB"
2682
+ memory: "1GB"
2433
2683
  })
2434
2684
  .pubsub.schedule("every 1 minutes")
2435
2685
  .onRun(async () => {
@@ -2448,7 +2698,7 @@ const checkAndRemoveBlockingContributor = functions__namespace
2448
2698
  // Get ceremony circuits.
2449
2699
  const circuits = await getCeremonyCircuits(ceremony.id);
2450
2700
  // Extract ceremony data.
2451
- const { timeoutMechanismType, penalty } = ceremony.data();
2701
+ const { timeoutType: timeoutMechanismType, penalty } = ceremony.data();
2452
2702
  for (const circuit of circuits) {
2453
2703
  if (!circuit.data())
2454
2704
  // Do not use `logAndThrowError` method to avoid the function to exit before checking every ceremony.
@@ -2498,7 +2748,8 @@ const checkAndRemoveBlockingContributor = functions__namespace
2498
2748
  if (timeoutExpirationDateInMsForBlockingContributor < currentServerTimestamp &&
2499
2749
  (contributionStep === "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */ ||
2500
2750
  contributionStep === "COMPUTING" /* ParticipantContributionStep.COMPUTING */ ||
2501
- contributionStep === "UPLOADING" /* ParticipantContributionStep.UPLOADING */))
2751
+ contributionStep === "UPLOADING" /* ParticipantContributionStep.UPLOADING */ ||
2752
+ contributionStep === "COMPLETED" /* ParticipantContributionStep.COMPLETED */))
2502
2753
  timeoutType = "BLOCKING_CONTRIBUTION" /* TimeoutType.BLOCKING_CONTRIBUTION */;
2503
2754
  if (timeoutExpirationDateInMsForVerificationCloudFunction > 0 &&
2504
2755
  timeoutExpirationDateInMsForVerificationCloudFunction < currentServerTimestamp &&
@@ -2575,7 +2826,7 @@ const checkAndRemoveBlockingContributor = functions__namespace
2575
2826
  const resumeContributionAfterTimeoutExpiration = functions__namespace
2576
2827
  .region("europe-west1")
2577
2828
  .runWith({
2578
- memory: "512MB"
2829
+ memory: "1GB"
2579
2830
  })
2580
2831
  .https.onCall(async (data, context) => {
2581
2832
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2608,9 +2859,11 @@ const resumeContributionAfterTimeoutExpiration = functions__namespace
2608
2859
 
2609
2860
  admin.initializeApp();
2610
2861
 
2862
+ exports.bandadaValidateProof = bandadaValidateProof;
2611
2863
  exports.checkAndPrepareCoordinatorForFinalization = checkAndPrepareCoordinatorForFinalization;
2612
2864
  exports.checkAndRemoveBlockingContributor = checkAndRemoveBlockingContributor;
2613
2865
  exports.checkIfObjectExist = checkIfObjectExist;
2866
+ exports.checkNonceOfSIWEAddress = checkNonceOfSIWEAddress;
2614
2867
  exports.checkParticipantForCeremony = checkParticipantForCeremony;
2615
2868
  exports.completeMultiPartUpload = completeMultiPartUpload;
2616
2869
  exports.coordinateCeremonyParticipant = coordinateCeremonyParticipant;