@devtion/backend 0.0.0-8bb9489 → 0.0.0-9239207

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @module @p0tion/backend
3
- * @version 1.0.9
3
+ * @version 1.2.4
4
4
  * @file MPC Phase 2 backend for Firebase services management
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -9,7 +9,7 @@
9
9
  import admin from 'firebase-admin';
10
10
  import * as functions from 'firebase-functions';
11
11
  import dotenv from 'dotenv';
12
- import { getCircuitsCollectionPath, getTimeoutsCollectionPath, commonTerms, finalContributionIndex, getContributionsCollectionPath, githubReputation, getBucketName, vmBootstrapCommand, vmDependenciesAndCacheArtifactsCommand, vmBootstrapScriptFilename, computeDiskSizeForVM, createEC2Instance, getParticipantsCollectionPath, terminateEC2Instance, formatZkeyIndex, getTranscriptStorageFilePath, getZkeyStorageFilePath, startEC2Instance, vmContributionVerificationCommand, runCommandUsingSSM, getPotStorageFilePath, genesisZkeyIndex, createCustomLoggerForFile, blake512FromPath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, computeSHA256ToHex, checkIfRunning, retrieveCommandOutput, stopEC2Instance, verificationKeyAcronym, verifierSmartContractAcronym, retrieveCommandStatus } from '@p0tion/actions';
12
+ import { getCircuitsCollectionPath, getTimeoutsCollectionPath, commonTerms, finalContributionIndex, getContributionsCollectionPath, githubReputation, getBucketName, vmBootstrapCommand, vmDependenciesAndCacheArtifactsCommand, vmBootstrapScriptFilename, computeDiskSizeForVM, createEC2Instance, getParticipantsCollectionPath, terminateEC2Instance, formatZkeyIndex, getTranscriptStorageFilePath, getZkeyStorageFilePath, retrieveCommandOutput, blake512FromPath, stopEC2Instance, startEC2Instance, vmContributionVerificationCommand, runCommandUsingSSM, getPotStorageFilePath, genesisZkeyIndex, createCustomLoggerForFile, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, computeSHA256ToHex, checkIfRunning, verificationKeyAcronym, verifierSmartContractAcronym, retrieveCommandStatus } from '@p0tion/actions';
13
13
  import { encode } from 'html-entities';
14
14
  import { Timestamp, FieldValue } from 'firebase-admin/firestore';
15
15
  import { S3Client, GetObjectCommand, PutObjectCommand, DeleteObjectCommand, HeadBucketCommand, CreateBucketCommand, PutPublicAccessBlockCommand, PutBucketCorsCommand, HeadObjectCommand, CreateMultipartUploadCommand, UploadPartCommand, CompleteMultipartUploadCommand } from '@aws-sdk/client-s3';
@@ -25,10 +25,13 @@ import path from 'path';
25
25
  import os from 'os';
26
26
  import { SSMClient, CommandInvocationStatus } from '@aws-sdk/client-ssm';
27
27
  import { EC2Client } from '@aws-sdk/client-ec2';
28
+ import ethers from 'ethers';
28
29
  import * as functionsV1 from 'firebase-functions/v1';
29
30
  import * as functionsV2 from 'firebase-functions/v2';
30
31
  import { Timer } from 'timer-node';
31
- import { zKey } from 'snarkjs';
32
+ import { zKey, groth16 } from 'snarkjs';
33
+ import { ApiSdk } from '@bandada/api-sdk';
34
+ import { getAuth } from 'firebase-admin/auth';
32
35
 
33
36
  /**
34
37
  * Log levels.
@@ -49,7 +52,7 @@ var LogLevel;
49
52
  * @notice the set of Firebase Functions status codes. The codes are the same at the
50
53
  * ones exposed by {@link https://github.com/grpc/grpc/blob/master/doc/statuscodes.md | gRPC}.
51
54
  * @param errorCode <FunctionsErrorCode> - the set of possible error codes.
52
- * @param message <string> - the error messge.
55
+ * @param message <string> - the error message.
53
56
  * @param [details] <string> - the details of the error (optional).
54
57
  * @returns <HttpsError>
55
58
  */
@@ -141,6 +144,8 @@ const COMMON_ERRORS = {
141
144
  CM_INVALID_COMMAND_EXECUTION: makeError("unknown", "There was an error while executing the command on the VM", "Please, contact the coordinator if the error persists.")
142
145
  };
143
146
 
147
+ dotenv.config();
148
+ let provider;
144
149
  /**
145
150
  * Return a configured and connected instance of the AWS S3 client.
146
151
  * @dev this method check and utilize the environment variables to configure the connection
@@ -163,6 +168,36 @@ const getS3Client = async () => {
163
168
  region: process.env.AWS_REGION
164
169
  });
165
170
  };
171
+ /**
172
+ * Returns a Prvider, connected via a configured JSON URL or else
173
+ * the ethers.js default provider, using configured API keys.
174
+ * @returns <ethers.providers.Provider> An Eth node provider
175
+ */
176
+ const setEthProvider = () => {
177
+ if (provider)
178
+ return provider;
179
+ console.log(`setting new provider`);
180
+ // Use JSON URL if defined
181
+ // if ((hardhat as any).ethers) {
182
+ // console.log(`using hardhat.ethers provider`)
183
+ // provider = (hardhat as any).ethers.provider
184
+ // } else
185
+ if (process.env.ETH_PROVIDER_JSON_URL) {
186
+ console.log(`JSON URL provider at ${process.env.ETH_PROVIDER_JSON_URL}`);
187
+ provider = new ethers.providers.JsonRpcProvider({
188
+ url: process.env.ETH_PROVIDER_JSON_URL,
189
+ skipFetchSetup: true
190
+ });
191
+ }
192
+ else {
193
+ // Otherwise, connect the default provider with ALchemy, Infura, or both
194
+ provider = ethers.providers.getDefaultProvider("homestead", {
195
+ alchemy: process.env.ETH_PROVIDER_ALCHEMY_API_KEY,
196
+ infura: process.env.ETH_PROVIDER_INFURA_API_KEY
197
+ });
198
+ }
199
+ return provider;
200
+ };
166
201
 
167
202
  dotenv.config();
168
203
  /**
@@ -430,12 +465,14 @@ const htmlEncodeCircuitData = (circuitDocument) => ({
430
465
  const getGitHubVariables = () => {
431
466
  if (!process.env.GITHUB_MINIMUM_FOLLOWERS ||
432
467
  !process.env.GITHUB_MINIMUM_FOLLOWING ||
433
- !process.env.GITHUB_MINIMUM_PUBLIC_REPOS)
468
+ !process.env.GITHUB_MINIMUM_PUBLIC_REPOS ||
469
+ !process.env.GITHUB_MINIMUM_AGE)
434
470
  logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
435
471
  return {
436
472
  minimumFollowers: Number(process.env.GITHUB_MINIMUM_FOLLOWERS),
437
473
  minimumFollowing: Number(process.env.GITHUB_MINIMUM_FOLLOWING),
438
- minimumPublicRepos: Number(process.env.GITHUB_MINIMUM_PUBLIC_REPOS)
474
+ minimumPublicRepos: Number(process.env.GITHUB_MINIMUM_PUBLIC_REPOS),
475
+ minimumAge: Number(process.env.GITHUB_MINIMUM_AGE)
439
476
  };
440
477
  };
441
478
  /**
@@ -445,7 +482,7 @@ const getGitHubVariables = () => {
445
482
  const getAWSVariables = () => {
446
483
  if (!process.env.AWS_ACCESS_KEY_ID ||
447
484
  !process.env.AWS_SECRET_ACCESS_KEY ||
448
- !process.env.AWS_ROLE_ARN ||
485
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
449
486
  !process.env.AWS_AMI_ID ||
450
487
  !process.env.AWS_SNS_TOPIC_ARN)
451
488
  logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
@@ -453,7 +490,7 @@ const getAWSVariables = () => {
453
490
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
454
491
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
455
492
  region: process.env.AWS_REGION || "eu-central-1",
456
- roleArn: process.env.AWS_ROLE_ARN,
493
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
457
494
  amiId: process.env.AWS_AMI_ID,
458
495
  snsTopic: process.env.AWS_SNS_TOPIC_ARN
459
496
  };
@@ -499,7 +536,7 @@ dotenv.config();
499
536
  const registerAuthUser = functions
500
537
  .region("europe-west1")
501
538
  .runWith({
502
- memory: "512MB"
539
+ memory: "1GB"
503
540
  })
504
541
  .auth.user()
505
542
  .onCreate(async (user) => {
@@ -531,11 +568,11 @@ const registerAuthUser = functions
531
568
  email === process.env.CUSTOM_CLAIMS_COORDINATOR_EMAIL_ADDRESS_OR_DOMAIN)) {
532
569
  const auth = admin.auth();
533
570
  // if provider == github.com let's use our functions to check the user's reputation
534
- if (user.providerData[0].providerId === "github.com") {
571
+ if (user.providerData.length > 0 && user.providerData[0].providerId === "github.com") {
535
572
  const vars = getGitHubVariables();
536
573
  // this return true or false
537
574
  try {
538
- const { reputable, avatarUrl: avatarURL } = await githubReputation(user.providerData[0].uid, vars.minimumFollowing, vars.minimumFollowers, vars.minimumPublicRepos);
575
+ const { reputable, avatarUrl: avatarURL } = await githubReputation(user.providerData[0].uid, vars.minimumFollowing, vars.minimumFollowers, vars.minimumPublicRepos, vars.minimumAge);
539
576
  if (!reputable) {
540
577
  // Delete user
541
578
  await auth.deleteUser(user.uid);
@@ -563,7 +600,7 @@ const registerAuthUser = functions
563
600
  encodedDisplayName,
564
601
  // Metadata.
565
602
  creationTime,
566
- lastSignInTime,
603
+ lastSignInTime: lastSignInTime || creationTime,
567
604
  // Optional.
568
605
  email: email || "",
569
606
  emailVerified: emailVerified || false,
@@ -586,7 +623,7 @@ const registerAuthUser = functions
586
623
  const processSignUpWithCustomClaims = functions
587
624
  .region("europe-west1")
588
625
  .runWith({
589
- memory: "512MB"
626
+ memory: "1GB"
590
627
  })
591
628
  .auth.user()
592
629
  .onCreate(async (user) => {
@@ -627,7 +664,7 @@ dotenv.config();
627
664
  const startCeremony = functions
628
665
  .region("europe-west1")
629
666
  .runWith({
630
- memory: "512MB"
667
+ memory: "1GB"
631
668
  })
632
669
  .pubsub.schedule(`every 30 minutes`)
633
670
  .onRun(async () => {
@@ -649,7 +686,7 @@ const startCeremony = functions
649
686
  const stopCeremony = functions
650
687
  .region("europe-west1")
651
688
  .runWith({
652
- memory: "512MB"
689
+ memory: "1GB"
653
690
  })
654
691
  .pubsub.schedule(`every 30 minutes`)
655
692
  .onRun(async () => {
@@ -671,7 +708,7 @@ const stopCeremony = functions
671
708
  const setupCeremony = functions
672
709
  .region("europe-west1")
673
710
  .runWith({
674
- memory: "512MB"
711
+ memory: "1GB"
675
712
  })
676
713
  .https.onCall(async (data, context) => {
677
714
  // Check if the user has the coordinator claim.
@@ -796,7 +833,7 @@ const initEmptyWaitingQueueForCircuit = functions
796
833
  const finalizeCeremony = functions
797
834
  .region("europe-west1")
798
835
  .runWith({
799
- memory: "512MB"
836
+ memory: "1GB"
800
837
  })
801
838
  .https.onCall(async (data, context) => {
802
839
  if (!context.auth || !context.auth.token.coordinator)
@@ -817,7 +854,7 @@ const finalizeCeremony = functions
817
854
  // Get ceremony circuits.
818
855
  const circuits = await getCeremonyCircuits(ceremonyId);
819
856
  // Get final contribution for each circuit.
820
- // nb. the `getFinalContributionDocument` checks the existance of the final contribution document (if not present, throws).
857
+ // nb. the `getFinalContributionDocument` checks the existence of the final contribution document (if not present, throws).
821
858
  // Therefore, we just need to call the method without taking any data to verify the pre-condition of having already computed
822
859
  // the final contributions for each ceremony circuit.
823
860
  for await (const circuit of circuits)
@@ -872,7 +909,7 @@ dotenv.config();
872
909
  const checkParticipantForCeremony = functions
873
910
  .region("europe-west1")
874
911
  .runWith({
875
- memory: "512MB"
912
+ memory: "1GB"
876
913
  })
877
914
  .https.onCall(async (data, context) => {
878
915
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -941,7 +978,7 @@ const checkParticipantForCeremony = functions
941
978
  participantDoc.ref.update({
942
979
  status: "EXHUMED" /* ParticipantStatus.EXHUMED */,
943
980
  contributions,
944
- tempContributionData: tempContributionData ? tempContributionData : FieldValue.delete(),
981
+ tempContributionData: tempContributionData || FieldValue.delete(),
945
982
  contributionStep: "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */,
946
983
  contributionStartedAt: 0,
947
984
  verificationStartedAt: FieldValue.delete(),
@@ -976,7 +1013,7 @@ const checkParticipantForCeremony = functions
976
1013
  const progressToNextCircuitForContribution = functions
977
1014
  .region("europe-west1")
978
1015
  .runWith({
979
- memory: "512MB"
1016
+ memory: "1GB"
980
1017
  })
981
1018
  .https.onCall(async (data, context) => {
982
1019
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1023,7 +1060,7 @@ const progressToNextCircuitForContribution = functions
1023
1060
  const progressToNextContributionStep = functions
1024
1061
  .region("europe-west1")
1025
1062
  .runWith({
1026
- memory: "512MB"
1063
+ memory: "1GB"
1027
1064
  })
1028
1065
  .https.onCall(async (data, context) => {
1029
1066
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1074,7 +1111,7 @@ const progressToNextContributionStep = functions
1074
1111
  const permanentlyStoreCurrentContributionTimeAndHash = functions
1075
1112
  .region("europe-west1")
1076
1113
  .runWith({
1077
- memory: "512MB"
1114
+ memory: "1GB"
1078
1115
  })
1079
1116
  .https.onCall(async (data, context) => {
1080
1117
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1116,7 +1153,7 @@ const permanentlyStoreCurrentContributionTimeAndHash = functions
1116
1153
  const temporaryStoreCurrentContributionMultiPartUploadId = functions
1117
1154
  .region("europe-west1")
1118
1155
  .runWith({
1119
- memory: "512MB"
1156
+ memory: "1GB"
1120
1157
  })
1121
1158
  .https.onCall(async (data, context) => {
1122
1159
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1154,7 +1191,7 @@ const temporaryStoreCurrentContributionMultiPartUploadId = functions
1154
1191
  const temporaryStoreCurrentContributionUploadedChunkData = functions
1155
1192
  .region("europe-west1")
1156
1193
  .runWith({
1157
- memory: "512MB"
1194
+ memory: "1GB"
1158
1195
  })
1159
1196
  .https.onCall(async (data, context) => {
1160
1197
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1196,7 +1233,7 @@ const temporaryStoreCurrentContributionUploadedChunkData = functions
1196
1233
  const checkAndPrepareCoordinatorForFinalization = functions
1197
1234
  .region("europe-west1")
1198
1235
  .runWith({
1199
- memory: "512MB"
1236
+ memory: "1GB"
1200
1237
  })
1201
1238
  .https.onCall(async (data, context) => {
1202
1239
  if (!context.auth || !context.auth.token.coordinator)
@@ -1436,7 +1473,7 @@ const waitForVMCommandExecution = (ssm, vmInstanceId, commandId) => new Promise(
1436
1473
  const coordinateCeremonyParticipant = functionsV1
1437
1474
  .region("europe-west1")
1438
1475
  .runWith({
1439
- memory: "512MB"
1476
+ memory: "1GB"
1440
1477
  })
1441
1478
  .firestore.document(`${commonTerms.collections.ceremonies.name}/{ceremonyId}/${commonTerms.collections.participants.name}/{participantId}`)
1442
1479
  .onUpdate(async (participantChanges) => {
@@ -1505,11 +1542,9 @@ const checkIfVMRunning = async (ec2, vmInstanceId, attempts = 5) => {
1505
1542
  const isVMRunning = await checkIfRunning(ec2, vmInstanceId);
1506
1543
  if (!isVMRunning) {
1507
1544
  printLog(`VM not running, ${attempts - 1} attempts remaining. Retrying in 1 minute...`, LogLevel.DEBUG);
1508
- return await checkIfVMRunning(ec2, vmInstanceId, attempts - 1);
1509
- }
1510
- else {
1511
- return true;
1545
+ return checkIfVMRunning(ec2, vmInstanceId, attempts - 1);
1512
1546
  }
1547
+ return true;
1513
1548
  };
1514
1549
  /**
1515
1550
  * Verify the contribution of a participant computed while contributing to a specific circuit of a ceremony.
@@ -1538,296 +1573,301 @@ const checkIfVMRunning = async (ec2, vmInstanceId, attempts = 5) => {
1538
1573
  * 2) Send all updates atomically to the Firestore database.
1539
1574
  */
1540
1575
  const verifycontribution = functionsV2.https.onCall({ memory: "16GiB", timeoutSeconds: 3600, region: "europe-west1" }, async (request) => {
1541
- if (!request.auth || (!request.auth.token.participant && !request.auth.token.coordinator))
1542
- logAndThrowError(SPECIFIC_ERRORS.SE_AUTH_NO_CURRENT_AUTH_USER);
1543
- if (!request.data.ceremonyId ||
1544
- !request.data.circuitId ||
1545
- !request.data.contributorOrCoordinatorIdentifier ||
1546
- !request.data.bucketName)
1547
- logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA);
1548
- if (!process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME ||
1549
- !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION ||
1550
- !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1551
- logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
1552
- // Step (0).
1553
- // Prepare and start timer.
1554
- const verifyContributionTimer = new Timer({ label: commonTerms.cloudFunctionsNames.verifyContribution });
1555
- verifyContributionTimer.start();
1556
- // Get DB.
1557
- const firestore = admin.firestore();
1558
- // Prepare batch of txs.
1559
- const batch = firestore.batch();
1560
- // Extract data.
1561
- const { ceremonyId, circuitId, contributorOrCoordinatorIdentifier, bucketName } = request.data;
1562
- const userId = request.auth?.uid;
1563
- // Look for the ceremony, circuit and participant document.
1564
- const ceremonyDoc = await getDocumentById(commonTerms.collections.ceremonies.name, ceremonyId);
1565
- const circuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1566
- const participantDoc = await getDocumentById(getParticipantsCollectionPath(ceremonyId), userId);
1567
- if (!ceremonyDoc.data() || !circuitDoc.data() || !participantDoc.data())
1568
- logAndThrowError(COMMON_ERRORS.CM_INEXISTENT_DOCUMENT_DATA);
1569
- // Extract documents data.
1570
- const { state } = ceremonyDoc.data();
1571
- const { status, contributions, verificationStartedAt, contributionStartedAt } = participantDoc.data();
1572
- const { waitingQueue, prefix, avgTimings, verification, files } = circuitDoc.data();
1573
- const { completedContributions, failedContributions } = waitingQueue;
1574
- const { contributionComputation: avgContributionComputationTime, fullContribution: avgFullContributionTime, verifyCloudFunction: avgVerifyCloudFunctionTime } = avgTimings;
1575
- const { cfOrVm, vm } = verification;
1576
- // we might not have it if the circuit is not using VM.
1577
- let vmInstanceId = "";
1578
- if (vm)
1579
- vmInstanceId = vm.vmInstanceId;
1580
- // Define pre-conditions.
1581
- const isFinalizing = state === "CLOSED" /* CeremonyState.CLOSED */ && request.auth && request.auth.token.coordinator; // true only when the coordinator verifies the final contributions.
1582
- const isContributing = status === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1583
- const isUsingVM = cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */ && !!vmInstanceId;
1584
- // Prepare state.
1585
- let isContributionValid = false;
1586
- let verifyCloudFunctionExecutionTime = 0; // time spent while executing the verify contribution cloud function.
1587
- let verifyCloudFunctionTime = 0; // time spent while executing the core business logic of this cloud function.
1588
- let fullContributionTime = 0; // time spent while doing non-verification contributions tasks (download, compute, upload).
1589
- let contributionComputationTime = 0; // time spent while computing the contribution.
1590
- let lastZkeyBlake2bHash = ""; // the Blake2B hash of the last zKey.
1591
- let verificationTranscriptTemporaryLocalPath = ""; // the local temporary path for the verification transcript.
1592
- let transcriptBlake2bHash = ""; // the Blake2B hash of the verification transcript.
1593
- let commandId = ""; // the unique identifier of the VM command.
1594
- // Derive necessary data.
1595
- const lastZkeyIndex = formatZkeyIndex(completedContributions + 1);
1596
- const verificationTranscriptCompleteFilename = `${prefix}_${isFinalizing
1597
- ? `${contributorOrCoordinatorIdentifier}_${finalContributionIndex}_verification_transcript.log`
1598
- : `${lastZkeyIndex}_${contributorOrCoordinatorIdentifier}_verification_transcript.log`}`;
1599
- const lastZkeyFilename = `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`;
1600
- // Prepare state for VM verification (if needed).
1601
- const ec2 = await createEC2Client();
1602
- const ssm = await createSSMClient();
1603
- // Step (1.A.1).
1604
- // Get storage paths.
1605
- const verificationTranscriptStoragePathAndFilename = getTranscriptStorageFilePath(prefix, verificationTranscriptCompleteFilename);
1606
- // the zKey storage path is required to be sent to the VM api
1607
- const lastZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`);
1608
- const verificationTaskTimer = new Timer({ label: `${ceremonyId}-${circuitId}-${participantDoc.id}` });
1609
- const completeVerification = async () => {
1610
- // Stop verification task timer.
1611
- printLog("Completing verification", LogLevel.DEBUG);
1612
- verificationTaskTimer.stop();
1613
- verifyCloudFunctionExecutionTime = verificationTaskTimer.ms();
1614
- if (isUsingVM) {
1615
- // Create temporary path.
1616
- verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.log`);
1617
- await sleep(1000); // wait 1s for file creation.
1618
- // Download from bucket.
1619
- // nb. the transcript MUST be uploaded from the VM by verification commands.
1620
- await downloadArtifactFromS3Bucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath);
1621
- // Read the verification trascript and validate data by checking for core info ("ZKey Ok!").
1622
- const content = fs.readFileSync(verificationTranscriptTemporaryLocalPath, "utf-8");
1623
- if (content.includes("ZKey Ok!"))
1624
- isContributionValid = true;
1625
- // If the contribution is valid, then format and store the trascript.
1576
+ try {
1577
+ if (!request.auth || (!request.auth.token.participant && !request.auth.token.coordinator))
1578
+ logAndThrowError(SPECIFIC_ERRORS.SE_AUTH_NO_CURRENT_AUTH_USER);
1579
+ if (!request.data.ceremonyId ||
1580
+ !request.data.circuitId ||
1581
+ !request.data.contributorOrCoordinatorIdentifier ||
1582
+ !request.data.bucketName)
1583
+ logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA);
1584
+ if (!process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME ||
1585
+ !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION ||
1586
+ !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1587
+ logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
1588
+ // Step (0).
1589
+ // Prepare and start timer.
1590
+ const verifyContributionTimer = new Timer({ label: commonTerms.cloudFunctionsNames.verifyContribution });
1591
+ verifyContributionTimer.start();
1592
+ // Get DB.
1593
+ const firestore = admin.firestore();
1594
+ // Prepare batch of txs.
1595
+ const batch = firestore.batch();
1596
+ // Extract data.
1597
+ const { ceremonyId, circuitId, contributorOrCoordinatorIdentifier, bucketName } = request.data;
1598
+ const userId = request.auth?.uid;
1599
+ // Look for the ceremony, circuit and participant document.
1600
+ const ceremonyDoc = await getDocumentById(commonTerms.collections.ceremonies.name, ceremonyId);
1601
+ const circuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1602
+ const participantDoc = await getDocumentById(getParticipantsCollectionPath(ceremonyId), userId);
1603
+ if (!ceremonyDoc.data() || !circuitDoc.data() || !participantDoc.data())
1604
+ logAndThrowError(COMMON_ERRORS.CM_INEXISTENT_DOCUMENT_DATA);
1605
+ // Extract documents data.
1606
+ const { state } = ceremonyDoc.data();
1607
+ const { status, contributions, verificationStartedAt, contributionStartedAt } = participantDoc.data();
1608
+ const { waitingQueue, prefix, avgTimings, verification, files } = circuitDoc.data();
1609
+ const { completedContributions, failedContributions } = waitingQueue;
1610
+ const { contributionComputation: avgContributionComputationTime, fullContribution: avgFullContributionTime, verifyCloudFunction: avgVerifyCloudFunctionTime } = avgTimings;
1611
+ const { cfOrVm, vm } = verification;
1612
+ // we might not have it if the circuit is not using VM.
1613
+ let vmInstanceId = "";
1614
+ if (vm)
1615
+ vmInstanceId = vm.vmInstanceId;
1616
+ // Define pre-conditions.
1617
+ const isFinalizing = state === "CLOSED" /* CeremonyState.CLOSED */ && request.auth && request.auth.token.coordinator; // true only when the coordinator verifies the final contributions.
1618
+ const isContributing = status === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1619
+ const isUsingVM = cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */ && !!vmInstanceId;
1620
+ // Prepare state.
1621
+ let isContributionValid = false;
1622
+ let verifyCloudFunctionExecutionTime = 0; // time spent while executing the verify contribution cloud function.
1623
+ let verifyCloudFunctionTime = 0; // time spent while executing the core business logic of this cloud function.
1624
+ let fullContributionTime = 0; // time spent while doing non-verification contributions tasks (download, compute, upload).
1625
+ let contributionComputationTime = 0; // time spent while computing the contribution.
1626
+ let lastZkeyBlake2bHash = ""; // the Blake2B hash of the last zKey.
1627
+ let verificationTranscriptTemporaryLocalPath = ""; // the local temporary path for the verification transcript.
1628
+ let transcriptBlake2bHash = ""; // the Blake2B hash of the verification transcript.
1629
+ let commandId = ""; // the unique identifier of the VM command.
1630
+ // Derive necessary data.
1631
+ const lastZkeyIndex = formatZkeyIndex(completedContributions + 1);
1632
+ const verificationTranscriptCompleteFilename = `${prefix}_${isFinalizing
1633
+ ? `${contributorOrCoordinatorIdentifier}_${finalContributionIndex}_verification_transcript.log`
1634
+ : `${lastZkeyIndex}_${contributorOrCoordinatorIdentifier}_verification_transcript.log`}`;
1635
+ const lastZkeyFilename = `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`;
1636
+ // Prepare state for VM verification (if needed).
1637
+ const ec2 = await createEC2Client();
1638
+ const ssm = await createSSMClient();
1639
+ // Step (1.A.1).
1640
+ // Get storage paths.
1641
+ const verificationTranscriptStoragePathAndFilename = getTranscriptStorageFilePath(prefix, verificationTranscriptCompleteFilename);
1642
+ // the zKey storage path is required to be sent to the VM api
1643
+ const lastZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`);
1644
+ const verificationTaskTimer = new Timer({ label: `${ceremonyId}-${circuitId}-${participantDoc.id}` });
1645
+ const completeVerification = async () => {
1646
+ // Stop verification task timer.
1647
+ printLog("Completing verification", LogLevel.DEBUG);
1648
+ verificationTaskTimer.stop();
1649
+ verifyCloudFunctionExecutionTime = verificationTaskTimer.ms();
1650
+ if (isUsingVM) {
1651
+ // Create temporary path.
1652
+ verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.log`);
1653
+ await sleep(1000); // wait 1s for file creation.
1654
+ // Download from bucket.
1655
+ // nb. the transcript MUST be uploaded from the VM by verification commands.
1656
+ await downloadArtifactFromS3Bucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath);
1657
+ // Read the verification trascript and validate data by checking for core info ("ZKey Ok!").
1658
+ const content = fs.readFileSync(verificationTranscriptTemporaryLocalPath, "utf-8");
1659
+ if (content.includes("ZKey Ok!"))
1660
+ isContributionValid = true;
1661
+ // If the contribution is valid, then format and store the trascript.
1662
+ if (isContributionValid) {
1663
+ // eslint-disable-next-line no-control-regex
1664
+ const updated = content.replace(/\x1b[[0-9;]*m/g, "");
1665
+ fs.writeFileSync(verificationTranscriptTemporaryLocalPath, updated);
1666
+ }
1667
+ }
1668
+ printLog(`The contribution has been verified - Result ${isContributionValid}`, LogLevel.DEBUG);
1669
+ // Create a new contribution document.
1670
+ const contributionDoc = await firestore
1671
+ .collection(getContributionsCollectionPath(ceremonyId, circuitId))
1672
+ .doc()
1673
+ .get();
1674
+ // Step (1.A.4).
1626
1675
  if (isContributionValid) {
1627
- // eslint-disable-next-line no-control-regex
1628
- const updated = content.replace(/\x1b[[0-9;]*m/g, "");
1629
- fs.writeFileSync(verificationTranscriptTemporaryLocalPath, updated);
1676
+ // Sleep ~3 seconds to wait for verification transcription.
1677
+ await sleep(3000);
1678
+ // Step (1.A.4.A.1).
1679
+ if (isUsingVM) {
1680
+ // Retrieve the contribution hash from the command output.
1681
+ lastZkeyBlake2bHash = await retrieveCommandOutput(ssm, vmInstanceId, commandId);
1682
+ const hashRegex = /[a-fA-F0-9]{64}/;
1683
+ const match = lastZkeyBlake2bHash.match(hashRegex);
1684
+ lastZkeyBlake2bHash = match.at(0);
1685
+ // re upload the formatted verification transcript
1686
+ await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1687
+ }
1688
+ else {
1689
+ // Upload verification transcript.
1690
+ /// nb. do not use multi-part upload here due to small file size.
1691
+ await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1692
+ }
1693
+ // Compute verification transcript hash.
1694
+ transcriptBlake2bHash = await blake512FromPath(verificationTranscriptTemporaryLocalPath);
1695
+ // Free resources by unlinking transcript temporary file.
1696
+ fs.unlinkSync(verificationTranscriptTemporaryLocalPath);
1697
+ // Filter participant contributions to find the data related to the one verified.
1698
+ const participantContributions = contributions.filter((contribution) => !!contribution.hash && !!contribution.computationTime && !contribution.doc);
1699
+ /// @dev (there must be only one contribution with an empty 'doc' field).
1700
+ if (participantContributions.length !== 1)
1701
+ logAndThrowError(SPECIFIC_ERRORS.SE_VERIFICATION_NO_PARTICIPANT_CONTRIBUTION_DATA);
1702
+ // Get contribution computation time.
1703
+ contributionComputationTime = contributions.at(0).computationTime;
1704
+ // Step (1.A.4.A.2).
1705
+ batch.create(contributionDoc.ref, {
1706
+ participantId: participantDoc.id,
1707
+ contributionComputationTime,
1708
+ verificationComputationTime: verifyCloudFunctionExecutionTime,
1709
+ zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1710
+ files: {
1711
+ transcriptFilename: verificationTranscriptCompleteFilename,
1712
+ lastZkeyFilename,
1713
+ transcriptStoragePath: verificationTranscriptStoragePathAndFilename,
1714
+ lastZkeyStoragePath,
1715
+ transcriptBlake2bHash,
1716
+ lastZkeyBlake2bHash
1717
+ },
1718
+ verificationSoftware: {
1719
+ name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1720
+ version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1721
+ commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1722
+ },
1723
+ valid: isContributionValid,
1724
+ lastUpdated: getCurrentServerTimestampInMillis()
1725
+ });
1726
+ verifyContributionTimer.stop();
1727
+ verifyCloudFunctionTime = verifyContributionTimer.ms();
1630
1728
  }
1631
- }
1632
- printLog(`The contribution has been verified - Result ${isContributionValid}`, LogLevel.DEBUG);
1633
- // Create a new contribution document.
1634
- const contributionDoc = await firestore
1635
- .collection(getContributionsCollectionPath(ceremonyId, circuitId))
1636
- .doc()
1637
- .get();
1638
- // Step (1.A.4).
1639
- if (isContributionValid) {
1640
- // Sleep ~3 seconds to wait for verification transcription.
1641
- await sleep(3000);
1642
- // Step (1.A.4.A.1).
1729
+ else {
1730
+ // Step (1.A.4.B).
1731
+ // Free-up storage by deleting invalid contribution.
1732
+ await deleteObject(bucketName, lastZkeyStoragePath);
1733
+ // Step (1.A.4.B.1).
1734
+ batch.create(contributionDoc.ref, {
1735
+ participantId: participantDoc.id,
1736
+ verificationComputationTime: verifyCloudFunctionExecutionTime,
1737
+ zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1738
+ verificationSoftware: {
1739
+ name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1740
+ version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1741
+ commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1742
+ },
1743
+ valid: isContributionValid,
1744
+ lastUpdated: getCurrentServerTimestampInMillis()
1745
+ });
1746
+ }
1747
+ // Stop VM instance
1643
1748
  if (isUsingVM) {
1644
- // Retrieve the contribution hash from the command output.
1645
- lastZkeyBlake2bHash = await retrieveCommandOutput(ssm, vmInstanceId, commandId);
1646
- const hashRegex = /[a-fA-F0-9]{64}/;
1647
- const match = lastZkeyBlake2bHash.match(hashRegex);
1648
- lastZkeyBlake2bHash = match.at(0);
1649
- // re upload the formatted verification transcript
1650
- await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1749
+ // using try and catch as the VM stopping function can throw
1750
+ // however we want to continue without stopping as the
1751
+ // verification was valid, and inform the coordinator
1752
+ try {
1753
+ await stopEC2Instance(ec2, vmInstanceId);
1754
+ }
1755
+ catch (error) {
1756
+ printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1757
+ }
1651
1758
  }
1652
- else {
1653
- // Upload verification transcript.
1654
- /// nb. do not use multi-part upload here due to small file size.
1655
- await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1759
+ // Step (1.A.4.C)
1760
+ if (!isFinalizing) {
1761
+ // Step (1.A.4.C.1)
1762
+ // Compute new average contribution/verification time.
1763
+ fullContributionTime = Number(verificationStartedAt) - Number(contributionStartedAt);
1764
+ const newAvgContributionComputationTime = avgContributionComputationTime > 0
1765
+ ? (avgContributionComputationTime + contributionComputationTime) / 2
1766
+ : contributionComputationTime;
1767
+ const newAvgFullContributionTime = avgFullContributionTime > 0
1768
+ ? (avgFullContributionTime + fullContributionTime) / 2
1769
+ : fullContributionTime;
1770
+ const newAvgVerifyCloudFunctionTime = avgVerifyCloudFunctionTime > 0
1771
+ ? (avgVerifyCloudFunctionTime + verifyCloudFunctionTime) / 2
1772
+ : verifyCloudFunctionTime;
1773
+ // Prepare tx to update circuit average contribution/verification time.
1774
+ const updatedCircuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1775
+ const { waitingQueue: updatedWaitingQueue } = updatedCircuitDoc.data();
1776
+ /// @dev this must happen only for valid contributions.
1777
+ batch.update(circuitDoc.ref, {
1778
+ avgTimings: {
1779
+ contributionComputation: isContributionValid
1780
+ ? newAvgContributionComputationTime
1781
+ : avgContributionComputationTime,
1782
+ fullContribution: isContributionValid ? newAvgFullContributionTime : avgFullContributionTime,
1783
+ verifyCloudFunction: isContributionValid
1784
+ ? newAvgVerifyCloudFunctionTime
1785
+ : avgVerifyCloudFunctionTime
1786
+ },
1787
+ waitingQueue: {
1788
+ ...updatedWaitingQueue,
1789
+ completedContributions: isContributionValid
1790
+ ? completedContributions + 1
1791
+ : completedContributions,
1792
+ failedContributions: isContributionValid ? failedContributions : failedContributions + 1
1793
+ },
1794
+ lastUpdated: getCurrentServerTimestampInMillis()
1795
+ });
1656
1796
  }
1657
- // Compute verification transcript hash.
1658
- transcriptBlake2bHash = await blake512FromPath(verificationTranscriptTemporaryLocalPath);
1659
- // Free resources by unlinking transcript temporary file.
1660
- fs.unlinkSync(verificationTranscriptTemporaryLocalPath);
1661
- // Filter participant contributions to find the data related to the one verified.
1662
- const participantContributions = contributions.filter((contribution) => !!contribution.hash && !!contribution.computationTime && !contribution.doc);
1663
- /// @dev (there must be only one contribution with an empty 'doc' field).
1664
- if (participantContributions.length !== 1)
1665
- logAndThrowError(SPECIFIC_ERRORS.SE_VERIFICATION_NO_PARTICIPANT_CONTRIBUTION_DATA);
1666
- // Get contribution computation time.
1667
- contributionComputationTime = contributions.at(0).computationTime;
1668
- // Step (1.A.4.A.2).
1669
- batch.create(contributionDoc.ref, {
1670
- participantId: participantDoc.id,
1671
- contributionComputationTime,
1672
- verificationComputationTime: verifyCloudFunctionExecutionTime,
1673
- zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1674
- files: {
1675
- transcriptFilename: verificationTranscriptCompleteFilename,
1676
- lastZkeyFilename,
1677
- transcriptStoragePath: verificationTranscriptStoragePathAndFilename,
1678
- lastZkeyStoragePath,
1679
- transcriptBlake2bHash,
1680
- lastZkeyBlake2bHash
1681
- },
1682
- verificationSoftware: {
1683
- name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1684
- version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1685
- commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1686
- },
1687
- valid: isContributionValid,
1688
- lastUpdated: getCurrentServerTimestampInMillis()
1689
- });
1690
- verifyContributionTimer.stop();
1691
- verifyCloudFunctionTime = verifyContributionTimer.ms();
1692
- }
1693
- else {
1694
- // Step (1.A.4.B).
1695
- // Free-up storage by deleting invalid contribution.
1696
- await deleteObject(bucketName, lastZkeyStoragePath);
1697
- // Step (1.A.4.B.1).
1698
- batch.create(contributionDoc.ref, {
1699
- participantId: participantDoc.id,
1700
- verificationComputationTime: verifyCloudFunctionExecutionTime,
1701
- zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1702
- verificationSoftware: {
1703
- name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1704
- version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1705
- commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1706
- },
1707
- valid: isContributionValid,
1708
- lastUpdated: getCurrentServerTimestampInMillis()
1709
- });
1710
- }
1711
- // Stop VM instance
1712
- if (isUsingVM) {
1713
- // using try and catch as the VM stopping function can throw
1714
- // however we want to continue without stopping as the
1715
- // verification was valid, and inform the coordinator
1797
+ // Step (2).
1798
+ await batch.commit();
1799
+ printLog(`The contribution #${isFinalizing ? finalContributionIndex : lastZkeyIndex} of circuit ${circuitId} (ceremony ${ceremonyId}) has been verified as ${isContributionValid ? "valid" : "invalid"} for the participant ${participantDoc.id}`, LogLevel.DEBUG);
1800
+ };
1801
+ // Step (1).
1802
+ if (isContributing || isFinalizing) {
1803
+ // Prepare timer.
1804
+ verificationTaskTimer.start();
1805
+ // Step (1.A.3.0).
1806
+ if (isUsingVM) {
1807
+ printLog(`Starting the VM mechanism`, LogLevel.DEBUG);
1808
+ // Prepare for VM execution.
1809
+ let isVMRunning = false; // true when the VM is up, otherwise false.
1810
+ // Step (1.A.3.1).
1811
+ await startEC2Instance(ec2, vmInstanceId);
1812
+ await sleep(60000); // nb. wait for VM startup (1 mins + retry).
1813
+ // Check if the startup is running.
1814
+ isVMRunning = await checkIfVMRunning(ec2, vmInstanceId);
1815
+ printLog(`VM running: ${isVMRunning}`, LogLevel.DEBUG);
1816
+ // Step (1.A.3.2).
1817
+ // Prepare.
1818
+ const verificationCommand = vmContributionVerificationCommand(bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename);
1819
+ // Run.
1820
+ commandId = await runCommandUsingSSM(ssm, vmInstanceId, verificationCommand);
1821
+ printLog(`Starting the execution of command ${commandId}`, LogLevel.DEBUG);
1822
+ // Step (1.A.3.3).
1823
+ return waitForVMCommandExecution(ssm, vmInstanceId, commandId)
1824
+ .then(async () => {
1825
+ // Command execution successfully completed.
1826
+ printLog(`Command ${commandId} execution has been successfully completed`, LogLevel.DEBUG);
1827
+ await completeVerification();
1828
+ })
1829
+ .catch((error) => {
1830
+ // Command execution aborted.
1831
+ printLog(`Command ${commandId} execution has been aborted - Error ${error}`, LogLevel.DEBUG);
1832
+ logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
1833
+ });
1834
+ }
1835
+ // CF approach.
1836
+ printLog(`CF mechanism`, LogLevel.DEBUG);
1837
+ const potStoragePath = getPotStorageFilePath(files.potFilename);
1838
+ const firstZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${genesisZkeyIndex}.zkey`);
1839
+ // Prepare temporary file paths.
1840
+ // (nb. these are needed to download the necessary artifacts for verification from AWS S3).
1841
+ verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(verificationTranscriptCompleteFilename);
1842
+ const potTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.pot`);
1843
+ const firstZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_genesis.zkey`);
1844
+ const lastZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_last.zkey`);
1845
+ // Create and populate transcript.
1846
+ const transcriptLogger = createCustomLoggerForFile(verificationTranscriptTemporaryLocalPath);
1847
+ transcriptLogger.info(`${isFinalizing ? `Final verification` : `Verification`} transcript for ${prefix} circuit Phase 2 contribution.\n${isFinalizing ? `Coordinator ` : `Contributor # ${Number(lastZkeyIndex)}`} (${contributorOrCoordinatorIdentifier})\n`);
1848
+ // Step (1.A.2).
1849
+ await downloadArtifactFromS3Bucket(bucketName, potStoragePath, potTempFilePath);
1850
+ await downloadArtifactFromS3Bucket(bucketName, firstZkeyStoragePath, firstZkeyTempFilePath);
1851
+ await downloadArtifactFromS3Bucket(bucketName, lastZkeyStoragePath, lastZkeyTempFilePath);
1852
+ // Step (1.A.4).
1853
+ isContributionValid = await zKey.verifyFromInit(firstZkeyTempFilePath, potTempFilePath, lastZkeyTempFilePath, transcriptLogger);
1854
+ // Compute contribution hash.
1855
+ lastZkeyBlake2bHash = await blake512FromPath(lastZkeyTempFilePath);
1856
+ // Free resources by unlinking temporary folders.
1857
+ // Do not free-up verification transcript path here.
1716
1858
  try {
1717
- await stopEC2Instance(ec2, vmInstanceId);
1859
+ fs.unlinkSync(potTempFilePath);
1860
+ fs.unlinkSync(firstZkeyTempFilePath);
1861
+ fs.unlinkSync(lastZkeyTempFilePath);
1718
1862
  }
1719
1863
  catch (error) {
1720
- printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1864
+ printLog(`Error while unlinking temporary files - Error ${error}`, LogLevel.WARN);
1721
1865
  }
1866
+ await completeVerification();
1722
1867
  }
1723
- // Step (1.A.4.C)
1724
- if (!isFinalizing) {
1725
- // Step (1.A.4.C.1)
1726
- // Compute new average contribution/verification time.
1727
- fullContributionTime = Number(verificationStartedAt) - Number(contributionStartedAt);
1728
- const newAvgContributionComputationTime = avgContributionComputationTime > 0
1729
- ? (avgContributionComputationTime + contributionComputationTime) / 2
1730
- : contributionComputationTime;
1731
- const newAvgFullContributionTime = avgFullContributionTime > 0
1732
- ? (avgFullContributionTime + fullContributionTime) / 2
1733
- : fullContributionTime;
1734
- const newAvgVerifyCloudFunctionTime = avgVerifyCloudFunctionTime > 0
1735
- ? (avgVerifyCloudFunctionTime + verifyCloudFunctionTime) / 2
1736
- : verifyCloudFunctionTime;
1737
- // Prepare tx to update circuit average contribution/verification time.
1738
- const updatedCircuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1739
- const { waitingQueue: updatedWaitingQueue } = updatedCircuitDoc.data();
1740
- /// @dev this must happen only for valid contributions.
1741
- batch.update(circuitDoc.ref, {
1742
- avgTimings: {
1743
- contributionComputation: isContributionValid
1744
- ? newAvgContributionComputationTime
1745
- : avgContributionComputationTime,
1746
- fullContribution: isContributionValid ? newAvgFullContributionTime : avgFullContributionTime,
1747
- verifyCloudFunction: isContributionValid
1748
- ? newAvgVerifyCloudFunctionTime
1749
- : avgVerifyCloudFunctionTime
1750
- },
1751
- waitingQueue: {
1752
- ...updatedWaitingQueue,
1753
- completedContributions: isContributionValid
1754
- ? completedContributions + 1
1755
- : completedContributions,
1756
- failedContributions: isContributionValid ? failedContributions : failedContributions + 1
1757
- },
1758
- lastUpdated: getCurrentServerTimestampInMillis()
1759
- });
1760
- }
1761
- // Step (2).
1762
- await batch.commit();
1763
- printLog(`The contribution #${isFinalizing ? finalContributionIndex : lastZkeyIndex} of circuit ${circuitId} (ceremony ${ceremonyId}) has been verified as ${isContributionValid ? "valid" : "invalid"} for the participant ${participantDoc.id}`, LogLevel.DEBUG);
1764
- };
1765
- // Step (1).
1766
- if (isContributing || isFinalizing) {
1767
- // Prepare timer.
1768
- verificationTaskTimer.start();
1769
- // Step (1.A.3.0).
1770
- if (isUsingVM) {
1771
- printLog(`Starting the VM mechanism`, LogLevel.DEBUG);
1772
- // Prepare for VM execution.
1773
- let isVMRunning = false; // true when the VM is up, otherwise false.
1774
- // Step (1.A.3.1).
1775
- await startEC2Instance(ec2, vmInstanceId);
1776
- await sleep(60000); // nb. wait for VM startup (1 mins + retry).
1777
- // Check if the startup is running.
1778
- isVMRunning = await checkIfVMRunning(ec2, vmInstanceId);
1779
- printLog(`VM running: ${isVMRunning}`, LogLevel.DEBUG);
1780
- // Step (1.A.3.2).
1781
- // Prepare.
1782
- const verificationCommand = vmContributionVerificationCommand(bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename);
1783
- // Run.
1784
- commandId = await runCommandUsingSSM(ssm, vmInstanceId, verificationCommand);
1785
- printLog(`Starting the execution of command ${commandId}`, LogLevel.DEBUG);
1786
- // Step (1.A.3.3).
1787
- return waitForVMCommandExecution(ssm, vmInstanceId, commandId)
1788
- .then(async () => {
1789
- // Command execution successfully completed.
1790
- printLog(`Command ${commandId} execution has been successfully completed`, LogLevel.DEBUG);
1791
- await completeVerification();
1792
- })
1793
- .catch((error) => {
1794
- // Command execution aborted.
1795
- printLog(`Command ${commandId} execution has been aborted - Error ${error}`, LogLevel.DEBUG);
1796
- logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
1797
- });
1798
- }
1799
- // CF approach.
1800
- printLog(`CF mechanism`, LogLevel.DEBUG);
1801
- const potStoragePath = getPotStorageFilePath(files.potFilename);
1802
- const firstZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${genesisZkeyIndex}.zkey`);
1803
- // Prepare temporary file paths.
1804
- // (nb. these are needed to download the necessary artifacts for verification from AWS S3).
1805
- verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(verificationTranscriptCompleteFilename);
1806
- const potTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.pot`);
1807
- const firstZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_genesis.zkey`);
1808
- const lastZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_last.zkey`);
1809
- // Create and populate transcript.
1810
- const transcriptLogger = createCustomLoggerForFile(verificationTranscriptTemporaryLocalPath);
1811
- transcriptLogger.info(`${isFinalizing ? `Final verification` : `Verification`} transcript for ${prefix} circuit Phase 2 contribution.\n${isFinalizing ? `Coordinator ` : `Contributor # ${Number(lastZkeyIndex)}`} (${contributorOrCoordinatorIdentifier})\n`);
1812
- // Step (1.A.2).
1813
- await downloadArtifactFromS3Bucket(bucketName, potStoragePath, potTempFilePath);
1814
- await downloadArtifactFromS3Bucket(bucketName, firstZkeyStoragePath, firstZkeyTempFilePath);
1815
- await downloadArtifactFromS3Bucket(bucketName, lastZkeyStoragePath, lastZkeyTempFilePath);
1816
- // Step (1.A.4).
1817
- isContributionValid = await zKey.verifyFromInit(firstZkeyTempFilePath, potTempFilePath, lastZkeyTempFilePath, transcriptLogger);
1818
- // Compute contribution hash.
1819
- lastZkeyBlake2bHash = await blake512FromPath(lastZkeyTempFilePath);
1820
- // Free resources by unlinking temporary folders.
1821
- // Do not free-up verification transcript path here.
1822
- try {
1823
- fs.unlinkSync(potTempFilePath);
1824
- fs.unlinkSync(firstZkeyTempFilePath);
1825
- fs.unlinkSync(lastZkeyTempFilePath);
1826
- }
1827
- catch (error) {
1828
- printLog(`Error while unlinking temporary files - Error ${error}`, LogLevel.WARN);
1829
- }
1830
- await completeVerification();
1868
+ }
1869
+ catch (error) {
1870
+ logAndThrowError(makeError("unknown", error));
1831
1871
  }
1832
1872
  });
1833
1873
  /**
@@ -1838,7 +1878,7 @@ const verifycontribution = functionsV2.https.onCall({ memory: "16GiB", timeoutSe
1838
1878
  const refreshParticipantAfterContributionVerification = functionsV1
1839
1879
  .region("europe-west1")
1840
1880
  .runWith({
1841
- memory: "512MB"
1881
+ memory: "1GB"
1842
1882
  })
1843
1883
  .firestore.document(`/${commonTerms.collections.ceremonies.name}/{ceremony}/${commonTerms.collections.circuits.name}/{circuit}/${commonTerms.collections.contributions.name}/{contributions}`)
1844
1884
  .onCreate(async (createdContribution) => {
@@ -1899,7 +1939,7 @@ const refreshParticipantAfterContributionVerification = functionsV1
1899
1939
  const finalizeCircuit = functionsV1
1900
1940
  .region("europe-west1")
1901
1941
  .runWith({
1902
- memory: "512MB"
1942
+ memory: "1GB"
1903
1943
  })
1904
1944
  .https.onCall(async (data, context) => {
1905
1945
  if (!context.auth || !context.auth.token.coordinator)
@@ -2043,7 +2083,7 @@ const checkIfBucketIsDedicatedToCeremony = async (bucketName) => {
2043
2083
  const createBucket = functions
2044
2084
  .region("europe-west1")
2045
2085
  .runWith({
2046
- memory: "512MB"
2086
+ memory: "1GB"
2047
2087
  })
2048
2088
  .https.onCall(async (data, context) => {
2049
2089
  // Check if the user has the coordinator claim.
@@ -2133,7 +2173,7 @@ const createBucket = functions
2133
2173
  const checkIfObjectExist = functions
2134
2174
  .region("europe-west1")
2135
2175
  .runWith({
2136
- memory: "512MB"
2176
+ memory: "1GB"
2137
2177
  })
2138
2178
  .https.onCall(async (data, context) => {
2139
2179
  // Check if the user has the coordinator claim.
@@ -2179,7 +2219,7 @@ const checkIfObjectExist = functions
2179
2219
  const generateGetObjectPreSignedUrl = functions
2180
2220
  .region("europe-west1")
2181
2221
  .runWith({
2182
- memory: "512MB"
2222
+ memory: "1GB"
2183
2223
  })
2184
2224
  .https.onCall(async (data, context) => {
2185
2225
  if (!context.auth)
@@ -2219,7 +2259,7 @@ const generateGetObjectPreSignedUrl = functions
2219
2259
  const startMultiPartUpload = functions
2220
2260
  .region("europe-west1")
2221
2261
  .runWith({
2222
- memory: "512MB"
2262
+ memory: "2GB"
2223
2263
  })
2224
2264
  .https.onCall(async (data, context) => {
2225
2265
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2274,7 +2314,7 @@ const startMultiPartUpload = functions
2274
2314
  const generatePreSignedUrlsParts = functions
2275
2315
  .region("europe-west1")
2276
2316
  .runWith({
2277
- memory: "512MB",
2317
+ memory: "1GB",
2278
2318
  timeoutSeconds: 300
2279
2319
  })
2280
2320
  .https.onCall(async (data, context) => {
@@ -2335,7 +2375,7 @@ const generatePreSignedUrlsParts = functions
2335
2375
  const completeMultiPartUpload = functions
2336
2376
  .region("europe-west1")
2337
2377
  .runWith({
2338
- memory: "512MB"
2378
+ memory: "2GB"
2339
2379
  })
2340
2380
  .https.onCall(async (data, context) => {
2341
2381
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2384,6 +2424,216 @@ const completeMultiPartUpload = functions
2384
2424
  }
2385
2425
  });
2386
2426
 
2427
+ const VKEY_DATA = {
2428
+ protocol: "groth16",
2429
+ curve: "bn128",
2430
+ nPublic: 3,
2431
+ vk_alpha_1: [
2432
+ "20491192805390485299153009773594534940189261866228447918068658471970481763042",
2433
+ "9383485363053290200918347156157836566562967994039712273449902621266178545958",
2434
+ "1"
2435
+ ],
2436
+ vk_beta_2: [
2437
+ [
2438
+ "6375614351688725206403948262868962793625744043794305715222011528459656738731",
2439
+ "4252822878758300859123897981450591353533073413197771768651442665752259397132"
2440
+ ],
2441
+ [
2442
+ "10505242626370262277552901082094356697409835680220590971873171140371331206856",
2443
+ "21847035105528745403288232691147584728191162732299865338377159692350059136679"
2444
+ ],
2445
+ ["1", "0"]
2446
+ ],
2447
+ vk_gamma_2: [
2448
+ [
2449
+ "10857046999023057135944570762232829481370756359578518086990519993285655852781",
2450
+ "11559732032986387107991004021392285783925812861821192530917403151452391805634"
2451
+ ],
2452
+ [
2453
+ "8495653923123431417604973247489272438418190587263600148770280649306958101930",
2454
+ "4082367875863433681332203403145435568316851327593401208105741076214120093531"
2455
+ ],
2456
+ ["1", "0"]
2457
+ ],
2458
+ vk_delta_2: [
2459
+ [
2460
+ "3697618915467790705869942236922063775466274665053173890632463796679068973252",
2461
+ "14948341351907992175709156460547989243732741534604949238422596319735704165658"
2462
+ ],
2463
+ [
2464
+ "3028459181652799888716942141752307629938889957960373621898607910203491239368",
2465
+ "11380736494786911280692284374675752681598754560757720296073023058533044108340"
2466
+ ],
2467
+ ["1", "0"]
2468
+ ],
2469
+ vk_alphabeta_12: [
2470
+ [
2471
+ [
2472
+ "2029413683389138792403550203267699914886160938906632433982220835551125967885",
2473
+ "21072700047562757817161031222997517981543347628379360635925549008442030252106"
2474
+ ],
2475
+ [
2476
+ "5940354580057074848093997050200682056184807770593307860589430076672439820312",
2477
+ "12156638873931618554171829126792193045421052652279363021382169897324752428276"
2478
+ ],
2479
+ [
2480
+ "7898200236362823042373859371574133993780991612861777490112507062703164551277",
2481
+ "7074218545237549455313236346927434013100842096812539264420499035217050630853"
2482
+ ]
2483
+ ],
2484
+ [
2485
+ [
2486
+ "7077479683546002997211712695946002074877511277312570035766170199895071832130",
2487
+ "10093483419865920389913245021038182291233451549023025229112148274109565435465"
2488
+ ],
2489
+ [
2490
+ "4595479056700221319381530156280926371456704509942304414423590385166031118820",
2491
+ "19831328484489333784475432780421641293929726139240675179672856274388269393268"
2492
+ ],
2493
+ [
2494
+ "11934129596455521040620786944827826205713621633706285934057045369193958244500",
2495
+ "8037395052364110730298837004334506829870972346962140206007064471173334027475"
2496
+ ]
2497
+ ]
2498
+ ],
2499
+ IC: [
2500
+ [
2501
+ "12951059800758687233303204819298121944551181861362200875212570257618182506154",
2502
+ "5751958719396509176593242305268064754837298673622815112953832050159760501392",
2503
+ "1"
2504
+ ],
2505
+ [
2506
+ "9561588427935871983444704959674198910445823619407211599507208879011862515257",
2507
+ "14576201570478094842467636169770180675293504492823217349086195663150934064643",
2508
+ "1"
2509
+ ],
2510
+ [
2511
+ "4811967233483727873912563574622036989372099129165459921963463310078093941559",
2512
+ "1874883809855039536107616044787862082553628089593740724610117059083415551067",
2513
+ "1"
2514
+ ],
2515
+ [
2516
+ "12252730267779308452229639835051322390696643456253768618882001876621526827161",
2517
+ "7899194018737016222260328309937800777948677569409898603827268776967707173231",
2518
+ "1"
2519
+ ]
2520
+ ]
2521
+ };
2522
+ dotenv.config();
2523
+ const { BANDADA_API_URL, BANDADA_GROUP_ID } = process.env;
2524
+ const bandadaApi = new ApiSdk(BANDADA_API_URL);
2525
+ const bandadaValidateProof = functions
2526
+ .region("europe-west1")
2527
+ .runWith({
2528
+ memory: "512MB"
2529
+ })
2530
+ .https.onCall(async (data) => {
2531
+ if (!BANDADA_GROUP_ID)
2532
+ throw new Error("BANDADA_GROUP_ID is not defined in .env");
2533
+ const { proof, publicSignals } = data;
2534
+ const isCorrect = groth16.verify(VKEY_DATA, publicSignals, proof);
2535
+ if (!isCorrect)
2536
+ return {
2537
+ valid: false,
2538
+ message: "Invalid proof",
2539
+ token: ""
2540
+ };
2541
+ const commitment = data.publicSignals[1];
2542
+ const isMember = await bandadaApi.isGroupMember(BANDADA_GROUP_ID, commitment);
2543
+ if (!isMember)
2544
+ return {
2545
+ valid: false,
2546
+ message: "Not a member of the group",
2547
+ token: ""
2548
+ };
2549
+ const auth = getAuth();
2550
+ try {
2551
+ await admin.auth().createUser({
2552
+ uid: commitment
2553
+ });
2554
+ }
2555
+ catch (error) {
2556
+ // if user already exist then just pass
2557
+ if (error.code !== "auth/uid-already-exists") {
2558
+ throw new Error(error);
2559
+ }
2560
+ }
2561
+ const token = await auth.createCustomToken(commitment);
2562
+ return {
2563
+ valid: true,
2564
+ message: "Valid proof and group member",
2565
+ token
2566
+ };
2567
+ });
2568
+
2569
+ dotenv.config();
2570
+ const checkNonceOfSIWEAddress = functions
2571
+ .region("europe-west1")
2572
+ .runWith({ memory: "1GB" })
2573
+ .https.onCall(async (data) => {
2574
+ try {
2575
+ const { auth0Token } = data;
2576
+ const result = (await fetch(`${process.env.AUTH0_APPLICATION_URL}/userinfo`, {
2577
+ method: "GET",
2578
+ headers: {
2579
+ "content-type": "application/json",
2580
+ authorization: `Bearer ${auth0Token}`
2581
+ }
2582
+ }).then((_res) => _res.json()));
2583
+ if (!result.sub) {
2584
+ return {
2585
+ valid: false,
2586
+ message: "No user detected. Please check device flow token"
2587
+ };
2588
+ }
2589
+ const auth = getAuth();
2590
+ // check nonce
2591
+ const parts = result.sub.split("|");
2592
+ const address = decodeURIComponent(parts[2]).split(":")[2];
2593
+ const minimumNonce = Number(process.env.ETH_MINIMUM_NONCE);
2594
+ const nonceBlockHeight = "latest"; // process.env.ETH_NONCE_BLOCK_HEIGHT
2595
+ // look up nonce for address @block
2596
+ let nonceOk = true;
2597
+ if (minimumNonce > 0) {
2598
+ const provider = setEthProvider();
2599
+ console.log(`got provider - block # ${await provider.getBlockNumber()}`);
2600
+ const nonce = await provider.getTransactionCount(address, nonceBlockHeight);
2601
+ console.log(`nonce ${nonce}`);
2602
+ nonceOk = nonce >= minimumNonce;
2603
+ }
2604
+ console.log(`checking nonce ${nonceOk}`);
2605
+ if (!nonceOk) {
2606
+ return {
2607
+ valid: false,
2608
+ message: "Eth address does not meet the nonce requirements"
2609
+ };
2610
+ }
2611
+ try {
2612
+ await admin.auth().createUser({
2613
+ displayName: address,
2614
+ uid: address
2615
+ });
2616
+ }
2617
+ catch (error) {
2618
+ // if user already exist then just pass
2619
+ if (error.code !== "auth/uid-already-exists") {
2620
+ throw new Error(error);
2621
+ }
2622
+ }
2623
+ const token = await auth.createCustomToken(address);
2624
+ return {
2625
+ valid: true,
2626
+ token
2627
+ };
2628
+ }
2629
+ catch (error) {
2630
+ return {
2631
+ valid: false,
2632
+ message: `Something went wrong ${error}`
2633
+ };
2634
+ }
2635
+ });
2636
+
2387
2637
  dotenv.config();
2388
2638
  /**
2389
2639
  * Check and remove the current contributor if it doesn't complete the contribution on the specified amount of time.
@@ -2406,7 +2656,7 @@ dotenv.config();
2406
2656
  const checkAndRemoveBlockingContributor = functions
2407
2657
  .region("europe-west1")
2408
2658
  .runWith({
2409
- memory: "512MB"
2659
+ memory: "1GB"
2410
2660
  })
2411
2661
  .pubsub.schedule("every 1 minutes")
2412
2662
  .onRun(async () => {
@@ -2425,7 +2675,7 @@ const checkAndRemoveBlockingContributor = functions
2425
2675
  // Get ceremony circuits.
2426
2676
  const circuits = await getCeremonyCircuits(ceremony.id);
2427
2677
  // Extract ceremony data.
2428
- const { timeoutMechanismType, penalty } = ceremony.data();
2678
+ const { timeoutType: timeoutMechanismType, penalty } = ceremony.data();
2429
2679
  for (const circuit of circuits) {
2430
2680
  if (!circuit.data())
2431
2681
  // Do not use `logAndThrowError` method to avoid the function to exit before checking every ceremony.
@@ -2475,7 +2725,8 @@ const checkAndRemoveBlockingContributor = functions
2475
2725
  if (timeoutExpirationDateInMsForBlockingContributor < currentServerTimestamp &&
2476
2726
  (contributionStep === "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */ ||
2477
2727
  contributionStep === "COMPUTING" /* ParticipantContributionStep.COMPUTING */ ||
2478
- contributionStep === "UPLOADING" /* ParticipantContributionStep.UPLOADING */))
2728
+ contributionStep === "UPLOADING" /* ParticipantContributionStep.UPLOADING */ ||
2729
+ contributionStep === "COMPLETED" /* ParticipantContributionStep.COMPLETED */))
2479
2730
  timeoutType = "BLOCKING_CONTRIBUTION" /* TimeoutType.BLOCKING_CONTRIBUTION */;
2480
2731
  if (timeoutExpirationDateInMsForVerificationCloudFunction > 0 &&
2481
2732
  timeoutExpirationDateInMsForVerificationCloudFunction < currentServerTimestamp &&
@@ -2552,7 +2803,7 @@ const checkAndRemoveBlockingContributor = functions
2552
2803
  const resumeContributionAfterTimeoutExpiration = functions
2553
2804
  .region("europe-west1")
2554
2805
  .runWith({
2555
- memory: "512MB"
2806
+ memory: "1GB"
2556
2807
  })
2557
2808
  .https.onCall(async (data, context) => {
2558
2809
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2585,4 +2836,4 @@ const resumeContributionAfterTimeoutExpiration = functions
2585
2836
 
2586
2837
  admin.initializeApp();
2587
2838
 
2588
- export { checkAndPrepareCoordinatorForFinalization, checkAndRemoveBlockingContributor, checkIfObjectExist, checkParticipantForCeremony, completeMultiPartUpload, coordinateCeremonyParticipant, createBucket, finalizeCeremony, finalizeCircuit, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, initEmptyWaitingQueueForCircuit, permanentlyStoreCurrentContributionTimeAndHash, processSignUpWithCustomClaims, progressToNextCircuitForContribution, progressToNextContributionStep, refreshParticipantAfterContributionVerification, registerAuthUser, resumeContributionAfterTimeoutExpiration, setupCeremony, startCeremony, startMultiPartUpload, stopCeremony, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, verifycontribution };
2839
+ export { bandadaValidateProof, checkAndPrepareCoordinatorForFinalization, checkAndRemoveBlockingContributor, checkIfObjectExist, checkNonceOfSIWEAddress, checkParticipantForCeremony, completeMultiPartUpload, coordinateCeremonyParticipant, createBucket, finalizeCeremony, finalizeCircuit, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, initEmptyWaitingQueueForCircuit, permanentlyStoreCurrentContributionTimeAndHash, processSignUpWithCustomClaims, progressToNextCircuitForContribution, progressToNextContributionStep, refreshParticipantAfterContributionVerification, registerAuthUser, resumeContributionAfterTimeoutExpiration, setupCeremony, startCeremony, startMultiPartUpload, stopCeremony, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, verifycontribution };