@devtion/backend 0.0.0-3df1645 → 0.0.0-477457c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/README.md +2 -2
  2. package/dist/src/functions/index.js +627 -345
  3. package/dist/src/functions/index.mjs +628 -348
  4. package/dist/src/functions/types/functions/bandada.d.ts +4 -0
  5. package/dist/src/functions/types/functions/bandada.d.ts.map +1 -0
  6. package/dist/{types → src/functions/types}/functions/ceremony.d.ts.map +1 -1
  7. package/dist/{types → src/functions/types}/functions/circuit.d.ts.map +1 -1
  8. package/dist/{types → src/functions/types}/functions/index.d.ts +2 -0
  9. package/dist/{types → src/functions/types}/functions/index.d.ts.map +1 -1
  10. package/dist/src/functions/types/functions/siwe.d.ts +4 -0
  11. package/dist/src/functions/types/functions/siwe.d.ts.map +1 -0
  12. package/dist/{types → src/functions/types}/functions/storage.d.ts.map +1 -1
  13. package/dist/{types → src/functions/types}/functions/timeout.d.ts.map +1 -1
  14. package/dist/{types → src/functions/types}/functions/user.d.ts.map +1 -1
  15. package/dist/{types → src/functions/types}/lib/errors.d.ts +2 -1
  16. package/dist/src/functions/types/lib/errors.d.ts.map +1 -0
  17. package/dist/{types → src/functions/types}/lib/services.d.ts +7 -0
  18. package/dist/src/functions/types/lib/services.d.ts.map +1 -0
  19. package/dist/src/functions/types/lib/utils.d.ts.map +1 -0
  20. package/dist/{types → src/functions/types}/types/index.d.ts +56 -0
  21. package/dist/src/functions/types/types/index.d.ts.map +1 -0
  22. package/package.json +8 -7
  23. package/src/functions/bandada.ts +154 -0
  24. package/src/functions/ceremony.ts +11 -7
  25. package/src/functions/circuit.ts +414 -384
  26. package/src/functions/index.ts +2 -0
  27. package/src/functions/participant.ts +8 -8
  28. package/src/functions/siwe.ts +77 -0
  29. package/src/functions/storage.ts +7 -6
  30. package/src/functions/timeout.ts +14 -13
  31. package/src/functions/user.ts +6 -5
  32. package/src/lib/errors.ts +6 -1
  33. package/src/lib/services.ts +36 -0
  34. package/src/lib/utils.ts +8 -6
  35. package/src/types/declarations.d.ts +1 -0
  36. package/src/types/index.ts +60 -0
  37. package/dist/types/lib/errors.d.ts.map +0 -1
  38. package/dist/types/lib/services.d.ts.map +0 -1
  39. package/dist/types/lib/utils.d.ts.map +0 -1
  40. package/dist/types/types/index.d.ts.map +0 -1
  41. /package/dist/{types → src/functions/types}/functions/ceremony.d.ts +0 -0
  42. /package/dist/{types → src/functions/types}/functions/circuit.d.ts +0 -0
  43. /package/dist/{types → src/functions/types}/functions/participant.d.ts +0 -0
  44. /package/dist/{types → src/functions/types}/functions/participant.d.ts.map +0 -0
  45. /package/dist/{types → src/functions/types}/functions/storage.d.ts +0 -0
  46. /package/dist/{types → src/functions/types}/functions/timeout.d.ts +0 -0
  47. /package/dist/{types → src/functions/types}/functions/user.d.ts +0 -0
  48. /package/dist/{types → src/functions/types}/lib/utils.d.ts +0 -0
  49. /package/dist/{types → src/functions/types}/types/enums.d.ts +0 -0
  50. /package/dist/{types → src/functions/types}/types/enums.d.ts.map +0 -0
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * @module @p0tion/backend
3
- * @version 1.0.9
3
+ * @version 1.2.8
4
4
  * @file MPC Phase 2 backend for Firebase services management
5
5
  * @copyright Ethereum Foundation 2022
6
6
  * @license MIT
@@ -9,7 +9,7 @@
9
9
  import admin from 'firebase-admin';
10
10
  import * as functions from 'firebase-functions';
11
11
  import dotenv from 'dotenv';
12
- import { getCircuitsCollectionPath, getTimeoutsCollectionPath, commonTerms, finalContributionIndex, getContributionsCollectionPath, githubReputation, getBucketName, vmBootstrapCommand, vmDependenciesAndCacheArtifactsCommand, vmBootstrapScriptFilename, computeDiskSizeForVM, createEC2Instance, getParticipantsCollectionPath, terminateEC2Instance, formatZkeyIndex, getTranscriptStorageFilePath, getZkeyStorageFilePath, startEC2Instance, vmContributionVerificationCommand, runCommandUsingSSM, getPotStorageFilePath, genesisZkeyIndex, createCustomLoggerForFile, blake512FromPath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, computeSHA256ToHex, checkIfRunning, retrieveCommandOutput, stopEC2Instance, verificationKeyAcronym, verifierSmartContractAcronym, retrieveCommandStatus } from '@p0tion/actions';
12
+ import { getCircuitsCollectionPath, getTimeoutsCollectionPath, commonTerms, finalContributionIndex, getContributionsCollectionPath, githubReputation, getBucketName, vmBootstrapCommand, vmDependenciesAndCacheArtifactsCommand, vmBootstrapScriptFilename, computeDiskSizeForVM, createEC2Instance, getParticipantsCollectionPath, terminateEC2Instance, formatZkeyIndex, getTranscriptStorageFilePath, getZkeyStorageFilePath, retrieveCommandOutput, blake512FromPath, stopEC2Instance, startEC2Instance, vmContributionVerificationCommand, runCommandUsingSSM, getPotStorageFilePath, genesisZkeyIndex, createCustomLoggerForFile, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, computeSHA256ToHex, checkIfRunning, verificationKeyAcronym, verifierSmartContractAcronym, retrieveCommandStatus } from '@p0tion/actions';
13
13
  import { encode } from 'html-entities';
14
14
  import { Timestamp, FieldValue } from 'firebase-admin/firestore';
15
15
  import { S3Client, GetObjectCommand, PutObjectCommand, DeleteObjectCommand, HeadBucketCommand, CreateBucketCommand, PutPublicAccessBlockCommand, PutBucketCorsCommand, HeadObjectCommand, CreateMultipartUploadCommand, UploadPartCommand, CompleteMultipartUploadCommand } from '@aws-sdk/client-s3';
@@ -25,10 +25,13 @@ import path from 'path';
25
25
  import os from 'os';
26
26
  import { SSMClient, CommandInvocationStatus } from '@aws-sdk/client-ssm';
27
27
  import { EC2Client } from '@aws-sdk/client-ec2';
28
+ import ethers from 'ethers';
28
29
  import * as functionsV1 from 'firebase-functions/v1';
29
30
  import * as functionsV2 from 'firebase-functions/v2';
30
31
  import { Timer } from 'timer-node';
31
- import { zKey } from 'snarkjs';
32
+ import { zKey, groth16 } from 'snarkjs';
33
+ import { ApiSdk } from '@bandada/api-sdk';
34
+ import { getAuth } from 'firebase-admin/auth';
32
35
 
33
36
  /**
34
37
  * Log levels.
@@ -49,7 +52,7 @@ var LogLevel;
49
52
  * @notice the set of Firebase Functions status codes. The codes are the same at the
50
53
  * ones exposed by {@link https://github.com/grpc/grpc/blob/master/doc/statuscodes.md | gRPC}.
51
54
  * @param errorCode <FunctionsErrorCode> - the set of possible error codes.
52
- * @param message <string> - the error messge.
55
+ * @param message <string> - the error message.
53
56
  * @param [details] <string> - the details of the error (optional).
54
57
  * @returns <HttpsError>
55
58
  */
@@ -122,7 +125,8 @@ const SPECIFIC_ERRORS = {
122
125
  SE_VM_TIMEDOUT_COMMAND_EXECUTION: makeError("deadline-exceeded", "VM command execution took too long and has been timed-out", "Please, contact the coordinator if this error persists."),
123
126
  SE_VM_CANCELLED_COMMAND_EXECUTION: makeError("cancelled", "VM command execution has been cancelled", "Please, contact the coordinator if this error persists."),
124
127
  SE_VM_DELAYED_COMMAND_EXECUTION: makeError("unavailable", "VM command execution has been delayed since there were no available instance at the moment", "Please, contact the coordinator if this error persists."),
125
- SE_VM_UNKNOWN_COMMAND_STATUS: makeError("unavailable", "VM command execution has failed due to an unknown status code", "Please, contact the coordinator if this error persists.")
128
+ SE_VM_UNKNOWN_COMMAND_STATUS: makeError("unavailable", "VM command execution has failed due to an unknown status code", "Please, contact the coordinator if this error persists."),
129
+ WRONG_BUCKET_NAME: makeError("invalid-argument", "The provided bucket name is not valid.", "Bucket names must be between 3 and 63 characters long, can only contain lowercase letters, numbers, and hyphens, and must start and end with a letter or number.")
126
130
  };
127
131
  /**
128
132
  * A set of common errors.
@@ -141,6 +145,8 @@ const COMMON_ERRORS = {
141
145
  CM_INVALID_COMMAND_EXECUTION: makeError("unknown", "There was an error while executing the command on the VM", "Please, contact the coordinator if the error persists.")
142
146
  };
143
147
 
148
+ dotenv.config();
149
+ let provider;
144
150
  /**
145
151
  * Return a configured and connected instance of the AWS S3 client.
146
152
  * @dev this method check and utilize the environment variables to configure the connection
@@ -163,6 +169,36 @@ const getS3Client = async () => {
163
169
  region: process.env.AWS_REGION
164
170
  });
165
171
  };
172
+ /**
173
+ * Returns a Prvider, connected via a configured JSON URL or else
174
+ * the ethers.js default provider, using configured API keys.
175
+ * @returns <ethers.providers.Provider> An Eth node provider
176
+ */
177
+ const setEthProvider = () => {
178
+ if (provider)
179
+ return provider;
180
+ console.log(`setting new provider`);
181
+ // Use JSON URL if defined
182
+ // if ((hardhat as any).ethers) {
183
+ // console.log(`using hardhat.ethers provider`)
184
+ // provider = (hardhat as any).ethers.provider
185
+ // } else
186
+ if (process.env.ETH_PROVIDER_JSON_URL) {
187
+ console.log(`JSON URL provider at ${process.env.ETH_PROVIDER_JSON_URL}`);
188
+ provider = new ethers.providers.JsonRpcProvider({
189
+ url: process.env.ETH_PROVIDER_JSON_URL,
190
+ skipFetchSetup: true
191
+ });
192
+ }
193
+ else {
194
+ // Otherwise, connect the default provider with ALchemy, Infura, or both
195
+ provider = ethers.providers.getDefaultProvider("homestead", {
196
+ alchemy: process.env.ETH_PROVIDER_ALCHEMY_API_KEY,
197
+ infura: process.env.ETH_PROVIDER_INFURA_API_KEY
198
+ });
199
+ }
200
+ return provider;
201
+ };
166
202
 
167
203
  dotenv.config();
168
204
  /**
@@ -265,7 +301,7 @@ const queryOpenedCeremonies = async () => {
265
301
  const getCircuitDocumentByPosition = async (ceremonyId, sequencePosition) => {
266
302
  // Query for all ceremony circuits.
267
303
  const circuits = await getCeremonyCircuits(ceremonyId);
268
- // Apply a filter using the sequence postion.
304
+ // Apply a filter using the sequence position.
269
305
  const matchedCircuits = circuits.filter((circuit) => circuit.data().sequencePosition === sequencePosition);
270
306
  if (matchedCircuits.length !== 1)
271
307
  logAndThrowError(COMMON_ERRORS.CM_NO_CIRCUIT_FOR_GIVEN_SEQUENCE_POSITION);
@@ -430,12 +466,14 @@ const htmlEncodeCircuitData = (circuitDocument) => ({
430
466
  const getGitHubVariables = () => {
431
467
  if (!process.env.GITHUB_MINIMUM_FOLLOWERS ||
432
468
  !process.env.GITHUB_MINIMUM_FOLLOWING ||
433
- !process.env.GITHUB_MINIMUM_PUBLIC_REPOS)
469
+ !process.env.GITHUB_MINIMUM_PUBLIC_REPOS ||
470
+ !process.env.GITHUB_MINIMUM_AGE)
434
471
  logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
435
472
  return {
436
473
  minimumFollowers: Number(process.env.GITHUB_MINIMUM_FOLLOWERS),
437
474
  minimumFollowing: Number(process.env.GITHUB_MINIMUM_FOLLOWING),
438
- minimumPublicRepos: Number(process.env.GITHUB_MINIMUM_PUBLIC_REPOS)
475
+ minimumPublicRepos: Number(process.env.GITHUB_MINIMUM_PUBLIC_REPOS),
476
+ minimumAge: Number(process.env.GITHUB_MINIMUM_AGE)
439
477
  };
440
478
  };
441
479
  /**
@@ -445,7 +483,7 @@ const getGitHubVariables = () => {
445
483
  const getAWSVariables = () => {
446
484
  if (!process.env.AWS_ACCESS_KEY_ID ||
447
485
  !process.env.AWS_SECRET_ACCESS_KEY ||
448
- !process.env.AWS_ROLE_ARN ||
486
+ !process.env.AWS_INSTANCE_PROFILE_ARN ||
449
487
  !process.env.AWS_AMI_ID ||
450
488
  !process.env.AWS_SNS_TOPIC_ARN)
451
489
  logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
@@ -453,7 +491,7 @@ const getAWSVariables = () => {
453
491
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
454
492
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
455
493
  region: process.env.AWS_REGION || "eu-central-1",
456
- roleArn: process.env.AWS_ROLE_ARN,
494
+ instanceProfileArn: process.env.AWS_INSTANCE_PROFILE_ARN,
457
495
  amiId: process.env.AWS_AMI_ID,
458
496
  snsTopic: process.env.AWS_SNS_TOPIC_ARN
459
497
  };
@@ -499,7 +537,7 @@ dotenv.config();
499
537
  const registerAuthUser = functions
500
538
  .region("europe-west1")
501
539
  .runWith({
502
- memory: "512MB"
540
+ memory: "1GB"
503
541
  })
504
542
  .auth.user()
505
543
  .onCreate(async (user) => {
@@ -531,11 +569,11 @@ const registerAuthUser = functions
531
569
  email === process.env.CUSTOM_CLAIMS_COORDINATOR_EMAIL_ADDRESS_OR_DOMAIN)) {
532
570
  const auth = admin.auth();
533
571
  // if provider == github.com let's use our functions to check the user's reputation
534
- if (user.providerData[0].providerId === "github.com") {
572
+ if (user.providerData.length > 0 && user.providerData[0].providerId === "github.com") {
535
573
  const vars = getGitHubVariables();
536
574
  // this return true or false
537
575
  try {
538
- const { reputable, avatarUrl: avatarURL } = await githubReputation(user.providerData[0].uid, vars.minimumFollowing, vars.minimumFollowers, vars.minimumPublicRepos);
576
+ const { reputable, avatarUrl: avatarURL } = await githubReputation(user.providerData[0].uid, vars.minimumFollowing, vars.minimumFollowers, vars.minimumPublicRepos, vars.minimumAge);
539
577
  if (!reputable) {
540
578
  // Delete user
541
579
  await auth.deleteUser(user.uid);
@@ -563,7 +601,7 @@ const registerAuthUser = functions
563
601
  encodedDisplayName,
564
602
  // Metadata.
565
603
  creationTime,
566
- lastSignInTime,
604
+ lastSignInTime: lastSignInTime || creationTime,
567
605
  // Optional.
568
606
  email: email || "",
569
607
  emailVerified: emailVerified || false,
@@ -586,7 +624,7 @@ const registerAuthUser = functions
586
624
  const processSignUpWithCustomClaims = functions
587
625
  .region("europe-west1")
588
626
  .runWith({
589
- memory: "512MB"
627
+ memory: "1GB"
590
628
  })
591
629
  .auth.user()
592
630
  .onCreate(async (user) => {
@@ -627,7 +665,7 @@ dotenv.config();
627
665
  const startCeremony = functions
628
666
  .region("europe-west1")
629
667
  .runWith({
630
- memory: "512MB"
668
+ memory: "1GB"
631
669
  })
632
670
  .pubsub.schedule(`every 30 minutes`)
633
671
  .onRun(async () => {
@@ -649,7 +687,7 @@ const startCeremony = functions
649
687
  const stopCeremony = functions
650
688
  .region("europe-west1")
651
689
  .runWith({
652
- memory: "512MB"
690
+ memory: "1GB"
653
691
  })
654
692
  .pubsub.schedule(`every 30 minutes`)
655
693
  .onRun(async () => {
@@ -671,7 +709,7 @@ const stopCeremony = functions
671
709
  const setupCeremony = functions
672
710
  .region("europe-west1")
673
711
  .runWith({
674
- memory: "512MB"
712
+ memory: "1GB"
675
713
  })
676
714
  .https.onCall(async (data, context) => {
677
715
  // Check if the user has the coordinator claim.
@@ -709,7 +747,9 @@ const setupCeremony = functions
709
747
  // The VM unique identifier (if any).
710
748
  let vmInstanceId = "";
711
749
  // Get a new circuit document.
712
- const circuitDoc = await firestore.collection(getCircuitsCollectionPath(ceremonyDoc.ref.id)).doc().get();
750
+ const ccp = getCircuitsCollectionPath(ceremonyDoc.ref.id);
751
+ printLog(`CircuitsCollectionPath = ${ccp}`, LogLevel.DEBUG);
752
+ const circuitDoc = await firestore.collection(ccp).doc().get();
713
753
  // Check if using the VM approach for contribution verification.
714
754
  if (circuit.verification.cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */) {
715
755
  // VM command to be run at the startup.
@@ -746,12 +786,14 @@ const setupCeremony = functions
746
786
  }
747
787
  // Encode circuit data.
748
788
  const encodedCircuit = htmlEncodeCircuitData(circuit);
789
+ printLog(`writing circuit data...`, LogLevel.DEBUG);
749
790
  // Prepare tx to write circuit data.
750
791
  batch.create(circuitDoc.ref, {
751
792
  ...encodedCircuit,
752
793
  lastUpdated: getCurrentServerTimestampInMillis()
753
794
  });
754
795
  }
796
+ printLog(`Done handling circuits...`, LogLevel.DEBUG);
755
797
  // Send txs in a batch (to avoid race conditions).
756
798
  await batch.commit();
757
799
  printLog(`Setup completed for ceremony ${ceremonyDoc.id}`, LogLevel.DEBUG);
@@ -764,7 +806,7 @@ const setupCeremony = functions
764
806
  const initEmptyWaitingQueueForCircuit = functions
765
807
  .region("europe-west1")
766
808
  .runWith({
767
- memory: "512MB"
809
+ memory: "1GB"
768
810
  })
769
811
  .firestore.document(`/${commonTerms.collections.ceremonies.name}/{ceremony}/${commonTerms.collections.circuits.name}/{circuit}`)
770
812
  .onCreate(async (doc) => {
@@ -796,7 +838,7 @@ const initEmptyWaitingQueueForCircuit = functions
796
838
  const finalizeCeremony = functions
797
839
  .region("europe-west1")
798
840
  .runWith({
799
- memory: "512MB"
841
+ memory: "1GB"
800
842
  })
801
843
  .https.onCall(async (data, context) => {
802
844
  if (!context.auth || !context.auth.token.coordinator)
@@ -817,7 +859,7 @@ const finalizeCeremony = functions
817
859
  // Get ceremony circuits.
818
860
  const circuits = await getCeremonyCircuits(ceremonyId);
819
861
  // Get final contribution for each circuit.
820
- // nb. the `getFinalContributionDocument` checks the existance of the final contribution document (if not present, throws).
862
+ // nb. the `getFinalContributionDocument` checks the existence of the final contribution document (if not present, throws).
821
863
  // Therefore, we just need to call the method without taking any data to verify the pre-condition of having already computed
822
864
  // the final contributions for each ceremony circuit.
823
865
  for await (const circuit of circuits)
@@ -872,7 +914,7 @@ dotenv.config();
872
914
  const checkParticipantForCeremony = functions
873
915
  .region("europe-west1")
874
916
  .runWith({
875
- memory: "512MB"
917
+ memory: "1GB"
876
918
  })
877
919
  .https.onCall(async (data, context) => {
878
920
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -941,7 +983,7 @@ const checkParticipantForCeremony = functions
941
983
  participantDoc.ref.update({
942
984
  status: "EXHUMED" /* ParticipantStatus.EXHUMED */,
943
985
  contributions,
944
- tempContributionData: tempContributionData ? tempContributionData : FieldValue.delete(),
986
+ tempContributionData: tempContributionData || FieldValue.delete(),
945
987
  contributionStep: "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */,
946
988
  contributionStartedAt: 0,
947
989
  verificationStartedAt: FieldValue.delete(),
@@ -976,7 +1018,7 @@ const checkParticipantForCeremony = functions
976
1018
  const progressToNextCircuitForContribution = functions
977
1019
  .region("europe-west1")
978
1020
  .runWith({
979
- memory: "512MB"
1021
+ memory: "1GB"
980
1022
  })
981
1023
  .https.onCall(async (data, context) => {
982
1024
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1023,7 +1065,7 @@ const progressToNextCircuitForContribution = functions
1023
1065
  const progressToNextContributionStep = functions
1024
1066
  .region("europe-west1")
1025
1067
  .runWith({
1026
- memory: "512MB"
1068
+ memory: "1GB"
1027
1069
  })
1028
1070
  .https.onCall(async (data, context) => {
1029
1071
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1074,7 +1116,7 @@ const progressToNextContributionStep = functions
1074
1116
  const permanentlyStoreCurrentContributionTimeAndHash = functions
1075
1117
  .region("europe-west1")
1076
1118
  .runWith({
1077
- memory: "512MB"
1119
+ memory: "1GB"
1078
1120
  })
1079
1121
  .https.onCall(async (data, context) => {
1080
1122
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1116,7 +1158,7 @@ const permanentlyStoreCurrentContributionTimeAndHash = functions
1116
1158
  const temporaryStoreCurrentContributionMultiPartUploadId = functions
1117
1159
  .region("europe-west1")
1118
1160
  .runWith({
1119
- memory: "512MB"
1161
+ memory: "1GB"
1120
1162
  })
1121
1163
  .https.onCall(async (data, context) => {
1122
1164
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1154,7 +1196,7 @@ const temporaryStoreCurrentContributionMultiPartUploadId = functions
1154
1196
  const temporaryStoreCurrentContributionUploadedChunkData = functions
1155
1197
  .region("europe-west1")
1156
1198
  .runWith({
1157
- memory: "512MB"
1199
+ memory: "1GB"
1158
1200
  })
1159
1201
  .https.onCall(async (data, context) => {
1160
1202
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -1196,7 +1238,7 @@ const temporaryStoreCurrentContributionUploadedChunkData = functions
1196
1238
  const checkAndPrepareCoordinatorForFinalization = functions
1197
1239
  .region("europe-west1")
1198
1240
  .runWith({
1199
- memory: "512MB"
1241
+ memory: "1GB"
1200
1242
  })
1201
1243
  .https.onCall(async (data, context) => {
1202
1244
  if (!context.auth || !context.auth.token.coordinator)
@@ -1273,7 +1315,7 @@ const coordinate = async (participant, circuit, isSingleParticipantCoordination,
1273
1315
  if (isSingleParticipantCoordination) {
1274
1316
  // Scenario (A).
1275
1317
  if (emptyWaitingQueue) {
1276
- printLog(`Coordinate - executing scenario A - emptyWaitingQueue`, LogLevel.DEBUG);
1318
+ printLog(`Coordinate - executing scenario A - emptyWaitingQueue`, LogLevel.INFO);
1277
1319
  // Update.
1278
1320
  newCurrentContributorId = participant.id;
1279
1321
  newParticipantStatus = "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
@@ -1282,14 +1324,14 @@ const coordinate = async (participant, circuit, isSingleParticipantCoordination,
1282
1324
  }
1283
1325
  // Scenario (A).
1284
1326
  else if (participantResumingAfterTimeoutExpiration) {
1285
- printLog(`Coordinate - executing scenario A - single - participantResumingAfterTimeoutExpiration`, LogLevel.DEBUG);
1327
+ printLog(`Coordinate - executing scenario A - single - participantResumingAfterTimeoutExpiration`, LogLevel.INFO);
1286
1328
  newParticipantStatus = "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1287
1329
  newContributionStep = "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */;
1288
1330
  newCurrentContributorId = participant.id;
1289
1331
  }
1290
1332
  // Scenario (B).
1291
1333
  else if (participantIsNotCurrentContributor) {
1292
- printLog(`Coordinate - executing scenario B - single - participantIsNotCurrentContributor`, LogLevel.DEBUG);
1334
+ printLog(`Coordinate - executing scenario B - single - participantIsNotCurrentContributor`, LogLevel.INFO);
1293
1335
  newCurrentContributorId = currentContributor;
1294
1336
  newParticipantStatus = "WAITING" /* ParticipantStatus.WAITING */;
1295
1337
  newContributors.push(participant.id);
@@ -1308,7 +1350,7 @@ const coordinate = async (participant, circuit, isSingleParticipantCoordination,
1308
1350
  });
1309
1351
  }
1310
1352
  else if (participantIsCurrentContributor && participantCompletedOneOrAllContributions && !!ceremonyId) {
1311
- printLog(`Coordinate - executing scenario C - multi - participantIsCurrentContributor && participantCompletedOneOrAllContributions`, LogLevel.DEBUG);
1353
+ printLog(`Coordinate - executing scenario C - multi - participantIsCurrentContributor && participantCompletedOneOrAllContributions`, LogLevel.INFO);
1312
1354
  newParticipantStatus = "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1313
1355
  newContributionStep = "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */;
1314
1356
  // Remove from waiting queue of circuit X.
@@ -1326,7 +1368,7 @@ const coordinate = async (participant, circuit, isSingleParticipantCoordination,
1326
1368
  contributionStartedAt: getCurrentServerTimestampInMillis(),
1327
1369
  lastUpdated: getCurrentServerTimestampInMillis()
1328
1370
  });
1329
- printLog(`Participant ${newCurrentContributorId} is the new current contributor for circuit ${circuit.id}`, LogLevel.DEBUG);
1371
+ printLog(`Participant ${newCurrentContributorId} is the new current contributor for circuit ${circuit.id}`, LogLevel.INFO);
1330
1372
  }
1331
1373
  }
1332
1374
  // Prepare tx - must be done for all Scenarios.
@@ -1403,8 +1445,8 @@ const waitForVMCommandExecution = (ssm, vmInstanceId, commandId) => new Promise(
1403
1445
  try {
1404
1446
  await stopEC2Instance(ec2, vmInstanceId);
1405
1447
  }
1406
- catch (error) {
1407
- printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1448
+ catch (stopError) {
1449
+ printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${stopError}`, LogLevel.WARN);
1408
1450
  }
1409
1451
  if (!error.toString().includes(commandId))
1410
1452
  logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
@@ -1436,7 +1478,7 @@ const waitForVMCommandExecution = (ssm, vmInstanceId, commandId) => new Promise(
1436
1478
  const coordinateCeremonyParticipant = functionsV1
1437
1479
  .region("europe-west1")
1438
1480
  .runWith({
1439
- memory: "512MB"
1481
+ memory: "1GB"
1440
1482
  })
1441
1483
  .firestore.document(`${commonTerms.collections.ceremonies.name}/{ceremonyId}/${commonTerms.collections.participants.name}/{participantId}`)
1442
1484
  .onUpdate(async (participantChanges) => {
@@ -1452,8 +1494,8 @@ const coordinateCeremonyParticipant = functionsV1
1452
1494
  // Extract data.
1453
1495
  const { contributionProgress: prevContributionProgress, status: prevStatus, contributionStep: prevContributionStep } = exParticipant.data();
1454
1496
  const { contributionProgress: changedContributionProgress, status: changedStatus, contributionStep: changedContributionStep } = changedParticipant.data();
1455
- printLog(`Coordinate participant ${exParticipant.id} for ceremony ${ceremonyId}`, LogLevel.DEBUG);
1456
- printLog(`Participant status: ${prevStatus} => ${changedStatus} - Participant contribution step: ${prevContributionStep} => ${changedContributionStep}`, LogLevel.DEBUG);
1497
+ printLog(`Coordinate participant ${exParticipant.id} for ceremony ${ceremonyId}`, LogLevel.INFO);
1498
+ printLog(`Participant status: ${prevStatus} => ${changedStatus} - Participant contribution step: ${prevContributionStep} => ${changedContributionStep}`, LogLevel.INFO);
1457
1499
  // Define pre-conditions.
1458
1500
  const participantReadyToContribute = changedStatus === "READY" /* ParticipantStatus.READY */;
1459
1501
  const participantReadyForFirstContribution = participantReadyToContribute && prevContributionProgress === 0;
@@ -1463,8 +1505,8 @@ const coordinateCeremonyParticipant = functionsV1
1463
1505
  prevContributionProgress !== 0;
1464
1506
  const participantCompletedEveryCircuitContribution = changedStatus === "DONE" /* ParticipantStatus.DONE */ && prevStatus !== "DONE" /* ParticipantStatus.DONE */;
1465
1507
  const participantCompletedContribution = prevContributionProgress === changedContributionProgress &&
1466
- prevStatus === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */ &&
1467
- prevContributionStep === "VERIFYING" /* ParticipantContributionStep.VERIFYING */ &&
1508
+ (prevStatus === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */ ||
1509
+ prevContributionStep === "VERIFYING" /* ParticipantContributionStep.VERIFYING */) &&
1468
1510
  changedStatus === "CONTRIBUTED" /* ParticipantStatus.CONTRIBUTED */ &&
1469
1511
  changedContributionStep === "COMPLETED" /* ParticipantContributionStep.COMPLETED */;
1470
1512
  // Step (2).
@@ -1472,7 +1514,7 @@ const coordinateCeremonyParticipant = functionsV1
1472
1514
  participantResumingContributionAfterTimeout ||
1473
1515
  participantReadyForNextContribution) {
1474
1516
  // Step (2.A).
1475
- printLog(`Participant is ready for first contribution (${participantReadyForFirstContribution}) or for the next contribution (${participantReadyForNextContribution}) or is resuming after a timeout expiration (${participantResumingContributionAfterTimeout})`, LogLevel.DEBUG);
1517
+ printLog(`Participant is ready for first contribution (${participantReadyForFirstContribution}) or for the next contribution (${participantReadyForNextContribution}) or is resuming after a timeout expiration (${participantResumingContributionAfterTimeout})`, LogLevel.INFO);
1476
1518
  // Get the circuit.
1477
1519
  const circuit = await getCircuitDocumentByPosition(ceremonyId, changedContributionProgress);
1478
1520
  // Coordinate.
@@ -1481,7 +1523,7 @@ const coordinateCeremonyParticipant = functionsV1
1481
1523
  }
1482
1524
  else if (participantCompletedContribution || participantCompletedEveryCircuitContribution) {
1483
1525
  // Step (2.B).
1484
- printLog(`Participant completed a contribution (${participantCompletedContribution}) or every contribution for each circuit (${participantCompletedEveryCircuitContribution})`, LogLevel.DEBUG);
1526
+ printLog(`Participant completed a contribution (${participantCompletedContribution}) or every contribution for each circuit (${participantCompletedEveryCircuitContribution})`, LogLevel.INFO);
1485
1527
  // Get the circuit.
1486
1528
  const circuit = await getCircuitDocumentByPosition(ceremonyId, prevContributionProgress);
1487
1529
  // Coordinate.
@@ -1505,11 +1547,9 @@ const checkIfVMRunning = async (ec2, vmInstanceId, attempts = 5) => {
1505
1547
  const isVMRunning = await checkIfRunning(ec2, vmInstanceId);
1506
1548
  if (!isVMRunning) {
1507
1549
  printLog(`VM not running, ${attempts - 1} attempts remaining. Retrying in 1 minute...`, LogLevel.DEBUG);
1508
- return await checkIfVMRunning(ec2, vmInstanceId, attempts - 1);
1509
- }
1510
- else {
1511
- return true;
1550
+ return checkIfVMRunning(ec2, vmInstanceId, attempts - 1);
1512
1551
  }
1552
+ return true;
1513
1553
  };
1514
1554
  /**
1515
1555
  * Verify the contribution of a participant computed while contributing to a specific circuit of a ceremony.
@@ -1537,297 +1577,325 @@ const checkIfVMRunning = async (ec2, vmInstanceId, attempts = 5) => {
1537
1577
  * 1.A.4.C.1) If true, update circuit waiting for queue and average timings accordingly to contribution verification results;
1538
1578
  * 2) Send all updates atomically to the Firestore database.
1539
1579
  */
1540
- const verifycontribution = functionsV2.https.onCall({ memory: "16GiB", timeoutSeconds: 3600, region: "europe-west1" }, async (request) => {
1541
- if (!request.auth || (!request.auth.token.participant && !request.auth.token.coordinator))
1542
- logAndThrowError(SPECIFIC_ERRORS.SE_AUTH_NO_CURRENT_AUTH_USER);
1543
- if (!request.data.ceremonyId ||
1544
- !request.data.circuitId ||
1545
- !request.data.contributorOrCoordinatorIdentifier ||
1546
- !request.data.bucketName)
1547
- logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA);
1548
- if (!process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME ||
1549
- !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION ||
1550
- !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1551
- logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
1552
- // Step (0).
1553
- // Prepare and start timer.
1554
- const verifyContributionTimer = new Timer({ label: commonTerms.cloudFunctionsNames.verifyContribution });
1555
- verifyContributionTimer.start();
1556
- // Get DB.
1557
- const firestore = admin.firestore();
1558
- // Prepare batch of txs.
1559
- const batch = firestore.batch();
1560
- // Extract data.
1561
- const { ceremonyId, circuitId, contributorOrCoordinatorIdentifier, bucketName } = request.data;
1562
- const userId = request.auth?.uid;
1563
- // Look for the ceremony, circuit and participant document.
1564
- const ceremonyDoc = await getDocumentById(commonTerms.collections.ceremonies.name, ceremonyId);
1565
- const circuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1566
- const participantDoc = await getDocumentById(getParticipantsCollectionPath(ceremonyId), userId);
1567
- if (!ceremonyDoc.data() || !circuitDoc.data() || !participantDoc.data())
1568
- logAndThrowError(COMMON_ERRORS.CM_INEXISTENT_DOCUMENT_DATA);
1569
- // Extract documents data.
1570
- const { state } = ceremonyDoc.data();
1571
- const { status, contributions, verificationStartedAt, contributionStartedAt } = participantDoc.data();
1572
- const { waitingQueue, prefix, avgTimings, verification, files } = circuitDoc.data();
1573
- const { completedContributions, failedContributions } = waitingQueue;
1574
- const { contributionComputation: avgContributionComputationTime, fullContribution: avgFullContributionTime, verifyCloudFunction: avgVerifyCloudFunctionTime } = avgTimings;
1575
- const { cfOrVm, vm } = verification;
1576
- // we might not have it if the circuit is not using VM.
1577
- let vmInstanceId = "";
1578
- if (vm)
1579
- vmInstanceId = vm.vmInstanceId;
1580
- // Define pre-conditions.
1581
- const isFinalizing = state === "CLOSED" /* CeremonyState.CLOSED */ && request.auth && request.auth.token.coordinator; // true only when the coordinator verifies the final contributions.
1582
- const isContributing = status === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1583
- const isUsingVM = cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */ && !!vmInstanceId;
1584
- // Prepare state.
1585
- let isContributionValid = false;
1586
- let verifyCloudFunctionExecutionTime = 0; // time spent while executing the verify contribution cloud function.
1587
- let verifyCloudFunctionTime = 0; // time spent while executing the core business logic of this cloud function.
1588
- let fullContributionTime = 0; // time spent while doing non-verification contributions tasks (download, compute, upload).
1589
- let contributionComputationTime = 0; // time spent while computing the contribution.
1590
- let lastZkeyBlake2bHash = ""; // the Blake2B hash of the last zKey.
1591
- let verificationTranscriptTemporaryLocalPath = ""; // the local temporary path for the verification transcript.
1592
- let transcriptBlake2bHash = ""; // the Blake2B hash of the verification transcript.
1593
- let commandId = ""; // the unique identifier of the VM command.
1594
- // Derive necessary data.
1595
- const lastZkeyIndex = formatZkeyIndex(completedContributions + 1);
1596
- const verificationTranscriptCompleteFilename = `${prefix}_${isFinalizing
1597
- ? `${contributorOrCoordinatorIdentifier}_${finalContributionIndex}_verification_transcript.log`
1598
- : `${lastZkeyIndex}_${contributorOrCoordinatorIdentifier}_verification_transcript.log`}`;
1599
- const lastZkeyFilename = `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`;
1600
- // Prepare state for VM verification (if needed).
1601
- const ec2 = await createEC2Client();
1602
- const ssm = await createSSMClient();
1603
- // Step (1.A.1).
1604
- // Get storage paths.
1605
- const verificationTranscriptStoragePathAndFilename = getTranscriptStorageFilePath(prefix, verificationTranscriptCompleteFilename);
1606
- // the zKey storage path is required to be sent to the VM api
1607
- const lastZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`);
1608
- const verificationTaskTimer = new Timer({ label: `${ceremonyId}-${circuitId}-${participantDoc.id}` });
1609
- const completeVerification = async () => {
1610
- // Stop verification task timer.
1611
- printLog("Completing verification", LogLevel.DEBUG);
1612
- verificationTaskTimer.stop();
1613
- verifyCloudFunctionExecutionTime = verificationTaskTimer.ms();
1614
- if (isUsingVM) {
1615
- // Create temporary path.
1616
- verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.log`);
1617
- await sleep(1000); // wait 1s for file creation.
1618
- // Download from bucket.
1619
- // nb. the transcript MUST be uploaded from the VM by verification commands.
1620
- await downloadArtifactFromS3Bucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath);
1621
- // Read the verification trascript and validate data by checking for core info ("ZKey Ok!").
1622
- const content = fs.readFileSync(verificationTranscriptTemporaryLocalPath, "utf-8");
1623
- if (content.includes("ZKey Ok!"))
1624
- isContributionValid = true;
1625
- // If the contribution is valid, then format and store the trascript.
1626
- if (isContributionValid) {
1627
- // eslint-disable-next-line no-control-regex
1628
- const updated = content.replace(/\x1b[[0-9;]*m/g, "");
1629
- fs.writeFileSync(verificationTranscriptTemporaryLocalPath, updated);
1580
+ const verifycontribution = functionsV2.https.onCall({ memory: "32GiB", timeoutSeconds: 3600, region: "europe-west1", cpu: 8 }, async (request) => {
1581
+ try {
1582
+ if (!request.auth || (!request.auth.token.participant && !request.auth.token.coordinator))
1583
+ logAndThrowError(SPECIFIC_ERRORS.SE_AUTH_NO_CURRENT_AUTH_USER);
1584
+ if (!request.data.ceremonyId ||
1585
+ !request.data.circuitId ||
1586
+ !request.data.contributorOrCoordinatorIdentifier ||
1587
+ !request.data.bucketName)
1588
+ logAndThrowError(COMMON_ERRORS.CM_MISSING_OR_WRONG_INPUT_DATA);
1589
+ if (!process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME ||
1590
+ !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION ||
1591
+ !process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1592
+ logAndThrowError(COMMON_ERRORS.CM_WRONG_CONFIGURATION);
1593
+ const BUCKET_NAME_REGEX = /^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$/;
1594
+ if (!BUCKET_NAME_REGEX.test(request.data.bucketName))
1595
+ logAndThrowError(SPECIFIC_ERRORS.WRONG_BUCKET_NAME);
1596
+ // Step (0).
1597
+ // Prepare and start timer.
1598
+ const verifyContributionTimer = new Timer({ label: commonTerms.cloudFunctionsNames.verifyContribution });
1599
+ verifyContributionTimer.start();
1600
+ // Get DB.
1601
+ const firestore = admin.firestore();
1602
+ // Prepare batch of txs.
1603
+ const batch = firestore.batch();
1604
+ // Extract data.
1605
+ const { ceremonyId, circuitId, contributorOrCoordinatorIdentifier, bucketName } = request.data;
1606
+ const userId = request.auth?.uid;
1607
+ // Look for the ceremony, circuit and participant document.
1608
+ const ceremonyDoc = await getDocumentById(commonTerms.collections.ceremonies.name, ceremonyId);
1609
+ const circuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1610
+ const participantDoc = await getDocumentById(getParticipantsCollectionPath(ceremonyId), userId);
1611
+ if (!ceremonyDoc.data() || !circuitDoc.data() || !participantDoc.data())
1612
+ logAndThrowError(COMMON_ERRORS.CM_INEXISTENT_DOCUMENT_DATA);
1613
+ // Extract documents data.
1614
+ const { state } = ceremonyDoc.data();
1615
+ const { status, contributions, verificationStartedAt, contributionStartedAt } = participantDoc.data();
1616
+ const { waitingQueue, prefix, avgTimings, verification, files } = circuitDoc.data();
1617
+ const { completedContributions, failedContributions } = waitingQueue;
1618
+ const { contributionComputation: avgContributionComputationTime, fullContribution: avgFullContributionTime, verifyCloudFunction: avgVerifyCloudFunctionTime } = avgTimings;
1619
+ const { cfOrVm, vm } = verification;
1620
+ // we might not have it if the circuit is not using VM.
1621
+ let vmInstanceId = "";
1622
+ if (vm)
1623
+ vmInstanceId = vm.vmInstanceId;
1624
+ // Define pre-conditions.
1625
+ const isFinalizing = state === "CLOSED" /* CeremonyState.CLOSED */ && request.auth && request.auth.token.coordinator; // true only when the coordinator verifies the final contributions.
1626
+ const isContributing = status === "CONTRIBUTING" /* ParticipantStatus.CONTRIBUTING */;
1627
+ const isUsingVM = cfOrVm === "VM" /* CircuitContributionVerificationMechanism.VM */ && !!vmInstanceId;
1628
+ // Prepare state.
1629
+ let isContributionValid = false;
1630
+ let verifyCloudFunctionExecutionTime = 0; // time spent while executing the verify contribution cloud function.
1631
+ let verifyCloudFunctionTime = 0; // time spent while executing the core business logic of this cloud function.
1632
+ let fullContributionTime = 0; // time spent while doing non-verification contributions tasks (download, compute, upload).
1633
+ let contributionComputationTime = 0; // time spent while computing the contribution.
1634
+ let lastZkeyBlake2bHash = ""; // the Blake2B hash of the last zKey.
1635
+ let verificationTranscriptTemporaryLocalPath = ""; // the local temporary path for the verification transcript.
1636
+ let transcriptBlake2bHash = ""; // the Blake2B hash of the verification transcript.
1637
+ let commandId = ""; // the unique identifier of the VM command.
1638
+ // Derive necessary data.
1639
+ const lastZkeyIndex = formatZkeyIndex(completedContributions + 1);
1640
+ const verificationTranscriptCompleteFilename = `${prefix}_${isFinalizing
1641
+ ? `${contributorOrCoordinatorIdentifier}_${finalContributionIndex}_verification_transcript.log`
1642
+ : `${lastZkeyIndex}_${contributorOrCoordinatorIdentifier}_verification_transcript.log`}`;
1643
+ const lastZkeyFilename = `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`;
1644
+ // Prepare state for VM verification (if needed).
1645
+ const ec2 = await createEC2Client();
1646
+ const ssm = await createSSMClient();
1647
+ // Step (1.A.1).
1648
+ // Get storage paths.
1649
+ const verificationTranscriptStoragePathAndFilename = getTranscriptStorageFilePath(prefix, verificationTranscriptCompleteFilename);
1650
+ // the zKey storage path is required to be sent to the VM api
1651
+ const lastZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${isFinalizing ? finalContributionIndex : lastZkeyIndex}.zkey`);
1652
+ const verificationTaskTimer = new Timer({ label: `${ceremonyId}-${circuitId}-${participantDoc.id}` });
1653
+ const dumpLog = async (path) => {
1654
+ printLog(`transcript >>>>>>`, LogLevel.DEBUG);
1655
+ try {
1656
+ const data = await fs.promises.readFile(path, "utf8");
1657
+ printLog(data, LogLevel.DEBUG);
1630
1658
  }
1631
- }
1632
- printLog(`The contribution has been verified - Result ${isContributionValid}`, LogLevel.DEBUG);
1633
- // Create a new contribution document.
1634
- const contributionDoc = await firestore
1635
- .collection(getContributionsCollectionPath(ceremonyId, circuitId))
1636
- .doc()
1637
- .get();
1638
- // Step (1.A.4).
1639
- if (isContributionValid) {
1640
- // Sleep ~3 seconds to wait for verification transcription.
1641
- await sleep(3000);
1642
- // Step (1.A.4.A.1).
1659
+ catch (readError) {
1660
+ printLog(readError, LogLevel.ERROR);
1661
+ }
1662
+ };
1663
+ const completeVerification = async () => {
1664
+ // Stop verification task timer.
1665
+ printLog("Completing verification", LogLevel.DEBUG);
1666
+ verificationTaskTimer.stop();
1667
+ verifyCloudFunctionExecutionTime = verificationTaskTimer.ms();
1643
1668
  if (isUsingVM) {
1644
- // Retrieve the contribution hash from the command output.
1645
- lastZkeyBlake2bHash = await retrieveCommandOutput(ssm, vmInstanceId, commandId);
1646
- const hashRegex = /[a-fA-F0-9]{64}/;
1647
- const match = lastZkeyBlake2bHash.match(hashRegex);
1648
- lastZkeyBlake2bHash = match.at(0);
1649
- // re upload the formatted verification transcript
1650
- await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1669
+ // Create temporary path.
1670
+ verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.log`);
1671
+ await sleep(1000); // wait 1s for file creation.
1672
+ // Download from bucket.
1673
+ // nb. the transcript MUST be uploaded from the VM by verification commands.
1674
+ await downloadArtifactFromS3Bucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath);
1675
+ // Read the verification trascript and validate data by checking for core info ("ZKey Ok!").
1676
+ const content = fs.readFileSync(verificationTranscriptTemporaryLocalPath, "utf-8");
1677
+ if (content.includes("ZKey Ok!"))
1678
+ isContributionValid = true;
1679
+ // If the contribution is valid, then format and store the trascript.
1680
+ if (isContributionValid) {
1681
+ // eslint-disable-next-line no-control-regex
1682
+ const updated = content.replace(/\x1b[[0-9;]*m/g, "");
1683
+ fs.writeFileSync(verificationTranscriptTemporaryLocalPath, updated);
1684
+ }
1685
+ }
1686
+ printLog(`The contribution has been verified - Result ${isContributionValid}`, LogLevel.DEBUG);
1687
+ // Create a new contribution document.
1688
+ const contributionDoc = await firestore
1689
+ .collection(getContributionsCollectionPath(ceremonyId, circuitId))
1690
+ .doc()
1691
+ .get();
1692
+ // Step (1.A.4).
1693
+ if (isContributionValid) {
1694
+ // Sleep ~3 seconds to wait for verification transcription.
1695
+ await sleep(3000);
1696
+ // Step (1.A.4.A.1).
1697
+ if (isUsingVM) {
1698
+ // Retrieve the contribution hash from the command output.
1699
+ lastZkeyBlake2bHash = await retrieveCommandOutput(ssm, vmInstanceId, commandId);
1700
+ const hashRegex = /[a-fA-F0-9]{64}/;
1701
+ const match = lastZkeyBlake2bHash.match(hashRegex);
1702
+ lastZkeyBlake2bHash = match.at(0);
1703
+ // re upload the formatted verification transcript
1704
+ await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1705
+ }
1706
+ else {
1707
+ // Upload verification transcript.
1708
+ /// nb. do not use multi-part upload here due to small file size.
1709
+ await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1710
+ }
1711
+ // Compute verification transcript hash.
1712
+ transcriptBlake2bHash = await blake512FromPath(verificationTranscriptTemporaryLocalPath);
1713
+ // Free resources by unlinking transcript temporary file.
1714
+ fs.unlinkSync(verificationTranscriptTemporaryLocalPath);
1715
+ // Filter participant contributions to find the data related to the one verified.
1716
+ const participantContributions = contributions.filter((contribution) => !!contribution.hash && !!contribution.computationTime && !contribution.doc);
1717
+ /// @dev (there must be only one contribution with an empty 'doc' field).
1718
+ if (participantContributions.length !== 1)
1719
+ logAndThrowError(SPECIFIC_ERRORS.SE_VERIFICATION_NO_PARTICIPANT_CONTRIBUTION_DATA);
1720
+ // Get contribution computation time.
1721
+ contributionComputationTime = contributions.at(0).computationTime;
1722
+ // Step (1.A.4.A.2).
1723
+ batch.create(contributionDoc.ref, {
1724
+ participantId: participantDoc.id,
1725
+ contributionComputationTime,
1726
+ verificationComputationTime: verifyCloudFunctionExecutionTime,
1727
+ zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1728
+ files: {
1729
+ transcriptFilename: verificationTranscriptCompleteFilename,
1730
+ lastZkeyFilename,
1731
+ transcriptStoragePath: verificationTranscriptStoragePathAndFilename,
1732
+ lastZkeyStoragePath,
1733
+ transcriptBlake2bHash,
1734
+ lastZkeyBlake2bHash
1735
+ },
1736
+ verificationSoftware: {
1737
+ name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1738
+ version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1739
+ commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1740
+ },
1741
+ valid: isContributionValid,
1742
+ lastUpdated: getCurrentServerTimestampInMillis()
1743
+ });
1744
+ verifyContributionTimer.stop();
1745
+ verifyCloudFunctionTime = verifyContributionTimer.ms();
1651
1746
  }
1652
1747
  else {
1653
- // Upload verification transcript.
1654
- /// nb. do not use multi-part upload here due to small file size.
1655
- await uploadFileToBucket(bucketName, verificationTranscriptStoragePathAndFilename, verificationTranscriptTemporaryLocalPath, true);
1748
+ // Step (1.A.4.B).
1749
+ // Free-up storage by deleting invalid contribution.
1750
+ await deleteObject(bucketName, lastZkeyStoragePath);
1751
+ // Step (1.A.4.B.1).
1752
+ batch.create(contributionDoc.ref, {
1753
+ participantId: participantDoc.id,
1754
+ verificationComputationTime: verifyCloudFunctionExecutionTime,
1755
+ zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1756
+ verificationSoftware: {
1757
+ name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1758
+ version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1759
+ commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1760
+ },
1761
+ valid: isContributionValid,
1762
+ lastUpdated: getCurrentServerTimestampInMillis()
1763
+ });
1656
1764
  }
1657
- // Compute verification transcript hash.
1658
- transcriptBlake2bHash = await blake512FromPath(verificationTranscriptTemporaryLocalPath);
1659
- // Free resources by unlinking transcript temporary file.
1660
- fs.unlinkSync(verificationTranscriptTemporaryLocalPath);
1661
- // Filter participant contributions to find the data related to the one verified.
1662
- const participantContributions = contributions.filter((contribution) => !!contribution.hash && !!contribution.computationTime && !contribution.doc);
1663
- /// @dev (there must be only one contribution with an empty 'doc' field).
1664
- if (participantContributions.length !== 1)
1665
- logAndThrowError(SPECIFIC_ERRORS.SE_VERIFICATION_NO_PARTICIPANT_CONTRIBUTION_DATA);
1666
- // Get contribution computation time.
1667
- contributionComputationTime = contributions.at(0).computationTime;
1668
- // Step (1.A.4.A.2).
1669
- batch.create(contributionDoc.ref, {
1670
- participantId: participantDoc.id,
1671
- contributionComputationTime,
1672
- verificationComputationTime: verifyCloudFunctionExecutionTime,
1673
- zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1674
- files: {
1675
- transcriptFilename: verificationTranscriptCompleteFilename,
1676
- lastZkeyFilename,
1677
- transcriptStoragePath: verificationTranscriptStoragePathAndFilename,
1678
- lastZkeyStoragePath,
1679
- transcriptBlake2bHash,
1680
- lastZkeyBlake2bHash
1681
- },
1682
- verificationSoftware: {
1683
- name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1684
- version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1685
- commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1686
- },
1687
- valid: isContributionValid,
1688
- lastUpdated: getCurrentServerTimestampInMillis()
1689
- });
1690
- verifyContributionTimer.stop();
1691
- verifyCloudFunctionTime = verifyContributionTimer.ms();
1692
- }
1693
- else {
1694
- // Step (1.A.4.B).
1695
- // Free-up storage by deleting invalid contribution.
1696
- await deleteObject(bucketName, lastZkeyStoragePath);
1697
- // Step (1.A.4.B.1).
1698
- batch.create(contributionDoc.ref, {
1699
- participantId: participantDoc.id,
1700
- verificationComputationTime: verifyCloudFunctionExecutionTime,
1701
- zkeyIndex: isFinalizing ? finalContributionIndex : lastZkeyIndex,
1702
- verificationSoftware: {
1703
- name: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_NAME),
1704
- version: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_VERSION),
1705
- commitHash: String(process.env.CUSTOM_CONTRIBUTION_VERIFICATION_SOFTWARE_COMMIT_HASH)
1706
- },
1707
- valid: isContributionValid,
1708
- lastUpdated: getCurrentServerTimestampInMillis()
1709
- });
1710
- }
1711
- // Stop VM instance
1712
- if (isUsingVM) {
1713
- // using try and catch as the VM stopping function can throw
1714
- // however we want to continue without stopping as the
1715
- // verification was valid, and inform the coordinator
1765
+ // Stop VM instance
1766
+ if (isUsingVM) {
1767
+ // using try and catch as the VM stopping function can throw
1768
+ // however we want to continue without stopping as the
1769
+ // verification was valid, and inform the coordinator
1770
+ try {
1771
+ await stopEC2Instance(ec2, vmInstanceId);
1772
+ }
1773
+ catch (error) {
1774
+ printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1775
+ }
1776
+ }
1777
+ // Step (1.A.4.C)
1778
+ if (!isFinalizing) {
1779
+ // Step (1.A.4.C.1)
1780
+ // Compute new average contribution/verification time.
1781
+ fullContributionTime = Number(verificationStartedAt) - Number(contributionStartedAt);
1782
+ const newAvgContributionComputationTime = avgContributionComputationTime > 0
1783
+ ? (avgContributionComputationTime + contributionComputationTime) / 2
1784
+ : contributionComputationTime;
1785
+ const newAvgFullContributionTime = avgFullContributionTime > 0
1786
+ ? (avgFullContributionTime + fullContributionTime) / 2
1787
+ : fullContributionTime;
1788
+ const newAvgVerifyCloudFunctionTime = avgVerifyCloudFunctionTime > 0
1789
+ ? (avgVerifyCloudFunctionTime + verifyCloudFunctionTime) / 2
1790
+ : verifyCloudFunctionTime;
1791
+ // Prepare tx to update circuit average contribution/verification time.
1792
+ const updatedCircuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1793
+ const { waitingQueue: updatedWaitingQueue } = updatedCircuitDoc.data();
1794
+ /// @dev this must happen only for valid contributions.
1795
+ batch.update(circuitDoc.ref, {
1796
+ avgTimings: {
1797
+ contributionComputation: isContributionValid
1798
+ ? newAvgContributionComputationTime
1799
+ : avgContributionComputationTime,
1800
+ fullContribution: isContributionValid
1801
+ ? newAvgFullContributionTime
1802
+ : avgFullContributionTime,
1803
+ verifyCloudFunction: isContributionValid
1804
+ ? newAvgVerifyCloudFunctionTime
1805
+ : avgVerifyCloudFunctionTime
1806
+ },
1807
+ waitingQueue: {
1808
+ ...updatedWaitingQueue,
1809
+ completedContributions: isContributionValid
1810
+ ? completedContributions + 1
1811
+ : completedContributions,
1812
+ failedContributions: isContributionValid ? failedContributions : failedContributions + 1
1813
+ },
1814
+ lastUpdated: getCurrentServerTimestampInMillis()
1815
+ });
1816
+ }
1817
+ // Step (2).
1818
+ await batch.commit();
1819
+ printLog(`The contribution #${isFinalizing ? finalContributionIndex : lastZkeyIndex} of circuit ${circuitId} (ceremony ${ceremonyId}) has been verified as ${isContributionValid ? "valid" : "invalid"} for the participant ${participantDoc.id}`, LogLevel.INFO);
1820
+ };
1821
+ // Step (1).
1822
+ if (isContributing || isFinalizing) {
1823
+ // Prepare timer.
1824
+ verificationTaskTimer.start();
1825
+ // Step (1.A.3.0).
1826
+ if (isUsingVM) {
1827
+ printLog(`Starting the VM mechanism`, LogLevel.DEBUG);
1828
+ // Prepare for VM execution.
1829
+ let isVMRunning = false; // true when the VM is up, otherwise false.
1830
+ // Step (1.A.3.1).
1831
+ await startEC2Instance(ec2, vmInstanceId);
1832
+ await sleep(60000); // nb. wait for VM startup (1 mins + retry).
1833
+ // Check if the startup is running.
1834
+ isVMRunning = await checkIfVMRunning(ec2, vmInstanceId);
1835
+ printLog(`VM running: ${isVMRunning}`, LogLevel.DEBUG);
1836
+ // Step (1.A.3.2).
1837
+ // Prepare.
1838
+ const verificationCommand = vmContributionVerificationCommand(bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename);
1839
+ // Run.
1840
+ commandId = await runCommandUsingSSM(ssm, vmInstanceId, verificationCommand);
1841
+ printLog(`Starting the execution of command ${commandId}`, LogLevel.DEBUG);
1842
+ // Step (1.A.3.3).
1843
+ return await waitForVMCommandExecution(ssm, vmInstanceId, commandId)
1844
+ .then(async () => {
1845
+ // Command execution successfully completed.
1846
+ printLog(`Command ${commandId} execution has been successfully completed`, LogLevel.DEBUG);
1847
+ await completeVerification();
1848
+ })
1849
+ .catch((error) => {
1850
+ // Command execution aborted.
1851
+ printLog(`Command ${commandId} execution has been aborted - Error ${error}`, LogLevel.WARN);
1852
+ logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
1853
+ });
1854
+ }
1855
+ // CF approach.
1856
+ printLog(`CF mechanism`, LogLevel.DEBUG);
1857
+ const potStoragePath = getPotStorageFilePath(files.potFilename);
1858
+ const firstZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${genesisZkeyIndex}.zkey`);
1859
+ printLog(`pot file: ${potStoragePath}`, LogLevel.DEBUG);
1860
+ printLog(`zkey file: ${firstZkeyStoragePath}`, LogLevel.DEBUG);
1861
+ // Prepare temporary file paths.
1862
+ // (nb. these are needed to download the necessary artifacts for verification from AWS S3).
1863
+ verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(verificationTranscriptCompleteFilename);
1864
+ const potTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.pot`);
1865
+ const firstZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_genesis.zkey`);
1866
+ const lastZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_last.zkey`);
1867
+ printLog(`pot file: ${potTempFilePath}`, LogLevel.DEBUG);
1868
+ printLog(`firstZkey file: ${firstZkeyTempFilePath}`, LogLevel.DEBUG);
1869
+ printLog(`last zkey file: ${lastZkeyTempFilePath}`, LogLevel.DEBUG);
1870
+ // Create and populate transcript.
1871
+ const transcriptLogger = createCustomLoggerForFile(verificationTranscriptTemporaryLocalPath);
1872
+ transcriptLogger.info(`${isFinalizing ? `Final verification` : `Verification`} transcript for ${prefix} circuit Phase 2 contribution.\n${isFinalizing ? `Coordinator ` : `Contributor # ${Number(lastZkeyIndex)}`} (${contributorOrCoordinatorIdentifier})\n`);
1873
+ // Step (1.A.2).
1874
+ await downloadArtifactFromS3Bucket(bucketName, potStoragePath, potTempFilePath);
1875
+ await downloadArtifactFromS3Bucket(bucketName, firstZkeyStoragePath, firstZkeyTempFilePath);
1876
+ await downloadArtifactFromS3Bucket(bucketName, lastZkeyStoragePath, lastZkeyTempFilePath);
1877
+ // Step (1.A.4).
1878
+ isContributionValid = await zKey.verifyFromInit(firstZkeyTempFilePath, potTempFilePath, lastZkeyTempFilePath, transcriptLogger);
1879
+ await dumpLog(verificationTranscriptTemporaryLocalPath);
1880
+ // Compute contribution hash.
1881
+ lastZkeyBlake2bHash = await blake512FromPath(lastZkeyTempFilePath);
1882
+ // Free resources by unlinking temporary folders.
1883
+ // Do not free-up verification transcript path here.
1716
1884
  try {
1717
- await stopEC2Instance(ec2, vmInstanceId);
1885
+ fs.unlinkSync(potTempFilePath);
1886
+ fs.unlinkSync(firstZkeyTempFilePath);
1887
+ fs.unlinkSync(lastZkeyTempFilePath);
1718
1888
  }
1719
1889
  catch (error) {
1720
- printLog(`Error while stopping VM instance ${vmInstanceId} - Error ${error}`, LogLevel.WARN);
1890
+ printLog(`Error while unlinking temporary files - Error ${error}`, LogLevel.WARN);
1721
1891
  }
1892
+ await completeVerification();
1722
1893
  }
1723
- // Step (1.A.4.C)
1724
- if (!isFinalizing) {
1725
- // Step (1.A.4.C.1)
1726
- // Compute new average contribution/verification time.
1727
- fullContributionTime = Number(verificationStartedAt) - Number(contributionStartedAt);
1728
- const newAvgContributionComputationTime = avgContributionComputationTime > 0
1729
- ? (avgContributionComputationTime + contributionComputationTime) / 2
1730
- : contributionComputationTime;
1731
- const newAvgFullContributionTime = avgFullContributionTime > 0
1732
- ? (avgFullContributionTime + fullContributionTime) / 2
1733
- : fullContributionTime;
1734
- const newAvgVerifyCloudFunctionTime = avgVerifyCloudFunctionTime > 0
1735
- ? (avgVerifyCloudFunctionTime + verifyCloudFunctionTime) / 2
1736
- : verifyCloudFunctionTime;
1737
- // Prepare tx to update circuit average contribution/verification time.
1738
- const updatedCircuitDoc = await getDocumentById(getCircuitsCollectionPath(ceremonyId), circuitId);
1739
- const { waitingQueue: updatedWaitingQueue } = updatedCircuitDoc.data();
1740
- /// @dev this must happen only for valid contributions.
1741
- batch.update(circuitDoc.ref, {
1742
- avgTimings: {
1743
- contributionComputation: isContributionValid
1744
- ? newAvgContributionComputationTime
1745
- : avgContributionComputationTime,
1746
- fullContribution: isContributionValid ? newAvgFullContributionTime : avgFullContributionTime,
1747
- verifyCloudFunction: isContributionValid
1748
- ? newAvgVerifyCloudFunctionTime
1749
- : avgVerifyCloudFunctionTime
1750
- },
1751
- waitingQueue: {
1752
- ...updatedWaitingQueue,
1753
- completedContributions: isContributionValid
1754
- ? completedContributions + 1
1755
- : completedContributions,
1756
- failedContributions: isContributionValid ? failedContributions : failedContributions + 1
1757
- },
1758
- lastUpdated: getCurrentServerTimestampInMillis()
1759
- });
1760
- }
1761
- // Step (2).
1762
- await batch.commit();
1763
- printLog(`The contribution #${isFinalizing ? finalContributionIndex : lastZkeyIndex} of circuit ${circuitId} (ceremony ${ceremonyId}) has been verified as ${isContributionValid ? "valid" : "invalid"} for the participant ${participantDoc.id}`, LogLevel.DEBUG);
1764
- };
1765
- // Step (1).
1766
- if (isContributing || isFinalizing) {
1767
- // Prepare timer.
1768
- verificationTaskTimer.start();
1769
- // Step (1.A.3.0).
1770
- if (isUsingVM) {
1771
- printLog(`Starting the VM mechanism`, LogLevel.DEBUG);
1772
- // Prepare for VM execution.
1773
- let isVMRunning = false; // true when the VM is up, otherwise false.
1774
- // Step (1.A.3.1).
1775
- await startEC2Instance(ec2, vmInstanceId);
1776
- await sleep(60000); // nb. wait for VM startup (1 mins + retry).
1777
- // Check if the startup is running.
1778
- isVMRunning = await checkIfVMRunning(ec2, vmInstanceId);
1779
- printLog(`VM running: ${isVMRunning}`, LogLevel.DEBUG);
1780
- // Step (1.A.3.2).
1781
- // Prepare.
1782
- const verificationCommand = vmContributionVerificationCommand(bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename);
1783
- // Run.
1784
- commandId = await runCommandUsingSSM(ssm, vmInstanceId, verificationCommand);
1785
- printLog(`Starting the execution of command ${commandId}`, LogLevel.DEBUG);
1786
- // Step (1.A.3.3).
1787
- return waitForVMCommandExecution(ssm, vmInstanceId, commandId)
1788
- .then(async () => {
1789
- // Command execution successfully completed.
1790
- printLog(`Command ${commandId} execution has been successfully completed`, LogLevel.DEBUG);
1791
- await completeVerification();
1792
- })
1793
- .catch((error) => {
1794
- // Command execution aborted.
1795
- printLog(`Command ${commandId} execution has been aborted - Error ${error}`, LogLevel.DEBUG);
1796
- logAndThrowError(COMMON_ERRORS.CM_INVALID_COMMAND_EXECUTION);
1797
- });
1798
- }
1799
- // CF approach.
1800
- printLog(`CF mechanism`, LogLevel.DEBUG);
1801
- const potStoragePath = getPotStorageFilePath(files.potFilename);
1802
- const firstZkeyStoragePath = getZkeyStorageFilePath(prefix, `${prefix}_${genesisZkeyIndex}.zkey`);
1803
- // Prepare temporary file paths.
1804
- // (nb. these are needed to download the necessary artifacts for verification from AWS S3).
1805
- verificationTranscriptTemporaryLocalPath = createTemporaryLocalPath(verificationTranscriptCompleteFilename);
1806
- const potTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}.pot`);
1807
- const firstZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_genesis.zkey`);
1808
- const lastZkeyTempFilePath = createTemporaryLocalPath(`${circuitId}_${participantDoc.id}_last.zkey`);
1809
- // Create and populate transcript.
1810
- const transcriptLogger = createCustomLoggerForFile(verificationTranscriptTemporaryLocalPath);
1811
- transcriptLogger.info(`${isFinalizing ? `Final verification` : `Verification`} transcript for ${prefix} circuit Phase 2 contribution.\n${isFinalizing ? `Coordinator ` : `Contributor # ${Number(lastZkeyIndex)}`} (${contributorOrCoordinatorIdentifier})\n`);
1812
- // Step (1.A.2).
1813
- await downloadArtifactFromS3Bucket(bucketName, potStoragePath, potTempFilePath);
1814
- await downloadArtifactFromS3Bucket(bucketName, firstZkeyStoragePath, firstZkeyTempFilePath);
1815
- await downloadArtifactFromS3Bucket(bucketName, lastZkeyStoragePath, lastZkeyTempFilePath);
1816
- // Step (1.A.4).
1817
- isContributionValid = await zKey.verifyFromInit(firstZkeyTempFilePath, potTempFilePath, lastZkeyTempFilePath, transcriptLogger);
1818
- // Compute contribution hash.
1819
- lastZkeyBlake2bHash = await blake512FromPath(lastZkeyTempFilePath);
1820
- // Free resources by unlinking temporary folders.
1821
- // Do not free-up verification transcript path here.
1822
- try {
1823
- fs.unlinkSync(potTempFilePath);
1824
- fs.unlinkSync(firstZkeyTempFilePath);
1825
- fs.unlinkSync(lastZkeyTempFilePath);
1826
- }
1827
- catch (error) {
1828
- printLog(`Error while unlinking temporary files - Error ${error}`, LogLevel.WARN);
1829
- }
1830
- await completeVerification();
1894
+ return null;
1895
+ }
1896
+ catch (error) {
1897
+ logAndThrowError(makeError("unknown", error));
1898
+ return null;
1831
1899
  }
1832
1900
  });
1833
1901
  /**
@@ -1838,7 +1906,7 @@ const verifycontribution = functionsV2.https.onCall({ memory: "16GiB", timeoutSe
1838
1906
  const refreshParticipantAfterContributionVerification = functionsV1
1839
1907
  .region("europe-west1")
1840
1908
  .runWith({
1841
- memory: "512MB"
1909
+ memory: "1GB"
1842
1910
  })
1843
1911
  .firestore.document(`/${commonTerms.collections.ceremonies.name}/{ceremony}/${commonTerms.collections.circuits.name}/{circuit}/${commonTerms.collections.contributions.name}/{contributions}`)
1844
1912
  .onCreate(async (createdContribution) => {
@@ -1889,7 +1957,7 @@ const refreshParticipantAfterContributionVerification = functionsV1
1889
1957
  lastUpdated: getCurrentServerTimestampInMillis()
1890
1958
  });
1891
1959
  await batch.commit();
1892
- printLog(`Participant ${participantId} refreshed after contribution ${createdContribution.id} - The participant was finalizing the ceremony ${isFinalizing}`, LogLevel.DEBUG);
1960
+ printLog(`Participant ${participantId} refreshed after contribution ${createdContribution.id} - The participant was finalizing the ceremony? ${isFinalizing}`, LogLevel.INFO);
1893
1961
  });
1894
1962
  /**
1895
1963
  * Finalize the ceremony circuit.
@@ -1899,7 +1967,7 @@ const refreshParticipantAfterContributionVerification = functionsV1
1899
1967
  const finalizeCircuit = functionsV1
1900
1968
  .region("europe-west1")
1901
1969
  .runWith({
1902
- memory: "512MB"
1970
+ memory: "1GB"
1903
1971
  })
1904
1972
  .https.onCall(async (data, context) => {
1905
1973
  if (!context.auth || !context.auth.token.coordinator)
@@ -2043,7 +2111,7 @@ const checkIfBucketIsDedicatedToCeremony = async (bucketName) => {
2043
2111
  const createBucket = functions
2044
2112
  .region("europe-west1")
2045
2113
  .runWith({
2046
- memory: "512MB"
2114
+ memory: "1GB"
2047
2115
  })
2048
2116
  .https.onCall(async (data, context) => {
2049
2117
  // Check if the user has the coordinator claim.
@@ -2054,6 +2122,7 @@ const createBucket = functions
2054
2122
  // Connect to S3 client.
2055
2123
  const S3 = await getS3Client();
2056
2124
  try {
2125
+ printLog(`Creating AWS S3 bucket ${data.bucketName} ...`, LogLevel.LOG);
2057
2126
  // Try to get information about the bucket.
2058
2127
  await S3.send(new HeadBucketCommand({ Bucket: data.bucketName }));
2059
2128
  // If the command succeeded, the bucket exists, throw an error.
@@ -2133,7 +2202,7 @@ const createBucket = functions
2133
2202
  const checkIfObjectExist = functions
2134
2203
  .region("europe-west1")
2135
2204
  .runWith({
2136
- memory: "512MB"
2205
+ memory: "1GB"
2137
2206
  })
2138
2207
  .https.onCall(async (data, context) => {
2139
2208
  // Check if the user has the coordinator claim.
@@ -2179,7 +2248,7 @@ const checkIfObjectExist = functions
2179
2248
  const generateGetObjectPreSignedUrl = functions
2180
2249
  .region("europe-west1")
2181
2250
  .runWith({
2182
- memory: "512MB"
2251
+ memory: "1GB"
2183
2252
  })
2184
2253
  .https.onCall(async (data, context) => {
2185
2254
  if (!context.auth)
@@ -2219,7 +2288,7 @@ const generateGetObjectPreSignedUrl = functions
2219
2288
  const startMultiPartUpload = functions
2220
2289
  .region("europe-west1")
2221
2290
  .runWith({
2222
- memory: "512MB"
2291
+ memory: "2GB"
2223
2292
  })
2224
2293
  .https.onCall(async (data, context) => {
2225
2294
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2274,7 +2343,7 @@ const startMultiPartUpload = functions
2274
2343
  const generatePreSignedUrlsParts = functions
2275
2344
  .region("europe-west1")
2276
2345
  .runWith({
2277
- memory: "512MB",
2346
+ memory: "1GB",
2278
2347
  timeoutSeconds: 300
2279
2348
  })
2280
2349
  .https.onCall(async (data, context) => {
@@ -2335,7 +2404,7 @@ const generatePreSignedUrlsParts = functions
2335
2404
  const completeMultiPartUpload = functions
2336
2405
  .region("europe-west1")
2337
2406
  .runWith({
2338
- memory: "512MB"
2407
+ memory: "2GB"
2339
2408
  })
2340
2409
  .https.onCall(async (data, context) => {
2341
2410
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2384,6 +2453,216 @@ const completeMultiPartUpload = functions
2384
2453
  }
2385
2454
  });
2386
2455
 
2456
+ dotenv.config();
2457
+ const { BANDADA_API_URL, BANDADA_GROUP_ID } = process.env;
2458
+ const bandadaApi = new ApiSdk(BANDADA_API_URL);
2459
+ const bandadaValidateProof = functions
2460
+ .region("europe-west1")
2461
+ .runWith({
2462
+ memory: "1GB"
2463
+ })
2464
+ .https.onCall(async (data) => {
2465
+ const VKEY_DATA = {
2466
+ protocol: "groth16",
2467
+ curve: "bn128",
2468
+ nPublic: 3,
2469
+ vk_alpha_1: [
2470
+ "20491192805390485299153009773594534940189261866228447918068658471970481763042",
2471
+ "9383485363053290200918347156157836566562967994039712273449902621266178545958",
2472
+ "1"
2473
+ ],
2474
+ vk_beta_2: [
2475
+ [
2476
+ "6375614351688725206403948262868962793625744043794305715222011528459656738731",
2477
+ "4252822878758300859123897981450591353533073413197771768651442665752259397132"
2478
+ ],
2479
+ [
2480
+ "10505242626370262277552901082094356697409835680220590971873171140371331206856",
2481
+ "21847035105528745403288232691147584728191162732299865338377159692350059136679"
2482
+ ],
2483
+ ["1", "0"]
2484
+ ],
2485
+ vk_gamma_2: [
2486
+ [
2487
+ "10857046999023057135944570762232829481370756359578518086990519993285655852781",
2488
+ "11559732032986387107991004021392285783925812861821192530917403151452391805634"
2489
+ ],
2490
+ [
2491
+ "8495653923123431417604973247489272438418190587263600148770280649306958101930",
2492
+ "4082367875863433681332203403145435568316851327593401208105741076214120093531"
2493
+ ],
2494
+ ["1", "0"]
2495
+ ],
2496
+ vk_delta_2: [
2497
+ [
2498
+ "3697618915467790705869942236922063775466274665053173890632463796679068973252",
2499
+ "14948341351907992175709156460547989243732741534604949238422596319735704165658"
2500
+ ],
2501
+ [
2502
+ "3028459181652799888716942141752307629938889957960373621898607910203491239368",
2503
+ "11380736494786911280692284374675752681598754560757720296073023058533044108340"
2504
+ ],
2505
+ ["1", "0"]
2506
+ ],
2507
+ vk_alphabeta_12: [
2508
+ [
2509
+ [
2510
+ "2029413683389138792403550203267699914886160938906632433982220835551125967885",
2511
+ "21072700047562757817161031222997517981543347628379360635925549008442030252106"
2512
+ ],
2513
+ [
2514
+ "5940354580057074848093997050200682056184807770593307860589430076672439820312",
2515
+ "12156638873931618554171829126792193045421052652279363021382169897324752428276"
2516
+ ],
2517
+ [
2518
+ "7898200236362823042373859371574133993780991612861777490112507062703164551277",
2519
+ "7074218545237549455313236346927434013100842096812539264420499035217050630853"
2520
+ ]
2521
+ ],
2522
+ [
2523
+ [
2524
+ "7077479683546002997211712695946002074877511277312570035766170199895071832130",
2525
+ "10093483419865920389913245021038182291233451549023025229112148274109565435465"
2526
+ ],
2527
+ [
2528
+ "4595479056700221319381530156280926371456704509942304414423590385166031118820",
2529
+ "19831328484489333784475432780421641293929726139240675179672856274388269393268"
2530
+ ],
2531
+ [
2532
+ "11934129596455521040620786944827826205713621633706285934057045369193958244500",
2533
+ "8037395052364110730298837004334506829870972346962140206007064471173334027475"
2534
+ ]
2535
+ ]
2536
+ ],
2537
+ IC: [
2538
+ [
2539
+ "12951059800758687233303204819298121944551181861362200875212570257618182506154",
2540
+ "5751958719396509176593242305268064754837298673622815112953832050159760501392",
2541
+ "1"
2542
+ ],
2543
+ [
2544
+ "9561588427935871983444704959674198910445823619407211599507208879011862515257",
2545
+ "14576201570478094842467636169770180675293504492823217349086195663150934064643",
2546
+ "1"
2547
+ ],
2548
+ [
2549
+ "4811967233483727873912563574622036989372099129165459921963463310078093941559",
2550
+ "1874883809855039536107616044787862082553628089593740724610117059083415551067",
2551
+ "1"
2552
+ ],
2553
+ [
2554
+ "12252730267779308452229639835051322390696643456253768618882001876621526827161",
2555
+ "7899194018737016222260328309937800777948677569409898603827268776967707173231",
2556
+ "1"
2557
+ ]
2558
+ ]
2559
+ };
2560
+ if (!BANDADA_GROUP_ID)
2561
+ throw new Error("BANDADA_GROUP_ID is not defined in .env");
2562
+ const { proof, publicSignals } = data;
2563
+ const isCorrect = groth16.verify(VKEY_DATA, publicSignals, proof);
2564
+ if (!isCorrect)
2565
+ return {
2566
+ valid: false,
2567
+ message: "Invalid proof",
2568
+ token: ""
2569
+ };
2570
+ const commitment = data.publicSignals[1];
2571
+ const isMember = await bandadaApi.isGroupMember(BANDADA_GROUP_ID, commitment);
2572
+ if (!isMember)
2573
+ return {
2574
+ valid: false,
2575
+ message: "Not a member of the group",
2576
+ token: ""
2577
+ };
2578
+ const auth = getAuth();
2579
+ try {
2580
+ await admin.auth().createUser({
2581
+ uid: commitment
2582
+ });
2583
+ }
2584
+ catch (error) {
2585
+ // if user already exist then just pass
2586
+ if (error.code !== "auth/uid-already-exists") {
2587
+ throw new Error(error);
2588
+ }
2589
+ }
2590
+ const token = await auth.createCustomToken(commitment);
2591
+ return {
2592
+ valid: true,
2593
+ message: "Valid proof and group member",
2594
+ token
2595
+ };
2596
+ });
2597
+
2598
+ dotenv.config();
2599
+ const checkNonceOfSIWEAddress = functions
2600
+ .region("europe-west1")
2601
+ .runWith({ memory: "1GB" })
2602
+ .https.onCall(async (data) => {
2603
+ try {
2604
+ const { auth0Token } = data;
2605
+ const result = (await fetch(`${process.env.AUTH0_APPLICATION_URL}/userinfo`, {
2606
+ method: "GET",
2607
+ headers: {
2608
+ "content-type": "application/json",
2609
+ authorization: `Bearer ${auth0Token}`
2610
+ }
2611
+ }).then((_res) => _res.json()));
2612
+ if (!result.sub) {
2613
+ return {
2614
+ valid: false,
2615
+ message: "No user detected. Please check device flow token"
2616
+ };
2617
+ }
2618
+ const auth = getAuth();
2619
+ // check nonce
2620
+ const parts = result.sub.split("|");
2621
+ const address = decodeURIComponent(parts[2]).split(":")[2];
2622
+ const minimumNonce = Number(process.env.ETH_MINIMUM_NONCE);
2623
+ const nonceBlockHeight = "latest"; // process.env.ETH_NONCE_BLOCK_HEIGHT
2624
+ // look up nonce for address @block
2625
+ let nonceOk = true;
2626
+ if (minimumNonce > 0) {
2627
+ const provider = setEthProvider();
2628
+ console.log(`got provider - block # ${await provider.getBlockNumber()}`);
2629
+ const nonce = await provider.getTransactionCount(address, nonceBlockHeight);
2630
+ console.log(`nonce ${nonce}`);
2631
+ nonceOk = nonce >= minimumNonce;
2632
+ }
2633
+ console.log(`checking nonce ${nonceOk}`);
2634
+ if (!nonceOk) {
2635
+ return {
2636
+ valid: false,
2637
+ message: "Eth address does not meet the nonce requirements"
2638
+ };
2639
+ }
2640
+ try {
2641
+ await admin.auth().createUser({
2642
+ displayName: address,
2643
+ uid: address
2644
+ });
2645
+ }
2646
+ catch (error) {
2647
+ // if user already exist then just pass
2648
+ if (error.code !== "auth/uid-already-exists") {
2649
+ throw new Error(error);
2650
+ }
2651
+ }
2652
+ const token = await auth.createCustomToken(address);
2653
+ return {
2654
+ valid: true,
2655
+ token
2656
+ };
2657
+ }
2658
+ catch (error) {
2659
+ return {
2660
+ valid: false,
2661
+ message: `Something went wrong ${error}`
2662
+ };
2663
+ }
2664
+ });
2665
+
2387
2666
  dotenv.config();
2388
2667
  /**
2389
2668
  * Check and remove the current contributor if it doesn't complete the contribution on the specified amount of time.
@@ -2406,7 +2685,7 @@ dotenv.config();
2406
2685
  const checkAndRemoveBlockingContributor = functions
2407
2686
  .region("europe-west1")
2408
2687
  .runWith({
2409
- memory: "512MB"
2688
+ memory: "1GB"
2410
2689
  })
2411
2690
  .pubsub.schedule("every 1 minutes")
2412
2691
  .onRun(async () => {
@@ -2425,7 +2704,7 @@ const checkAndRemoveBlockingContributor = functions
2425
2704
  // Get ceremony circuits.
2426
2705
  const circuits = await getCeremonyCircuits(ceremony.id);
2427
2706
  // Extract ceremony data.
2428
- const { timeoutMechanismType, penalty } = ceremony.data();
2707
+ const { timeoutType: timeoutMechanismType, penalty } = ceremony.data();
2429
2708
  for (const circuit of circuits) {
2430
2709
  if (!circuit.data())
2431
2710
  // Do not use `logAndThrowError` method to avoid the function to exit before checking every ceremony.
@@ -2438,7 +2717,7 @@ const checkAndRemoveBlockingContributor = functions
2438
2717
  // Case (A).
2439
2718
  if (!currentContributor)
2440
2719
  // Do not use `logAndThrowError` method to avoid the function to exit before checking every ceremony.
2441
- printLog(`No current contributor for circuit ${circuit.id} - ceremony ${ceremony.id}`, LogLevel.WARN);
2720
+ printLog(`No current contributor for circuit ${circuit.id} - ceremony ${ceremony.id}`, LogLevel.DEBUG);
2442
2721
  else if (avgFullContribution === 0 &&
2443
2722
  avgContributionComputation === 0 &&
2444
2723
  avgVerifyCloudFunction === 0 &&
@@ -2464,7 +2743,7 @@ const checkAndRemoveBlockingContributor = functions
2464
2743
  ? Number(contributionStartedAt) +
2465
2744
  Number(avgFullContribution) +
2466
2745
  Number(timeoutDynamicThreshold)
2467
- : Number(contributionStartedAt) + Number(fixedTimeWindow) * 60000; // * 60000 = convert minutes to millis.
2746
+ : (Number(contributionStartedAt) + Number(fixedTimeWindow)) * 60000; // * 60000 = convert minutes to millis.
2468
2747
  // Case (D).
2469
2748
  const timeoutExpirationDateInMsForVerificationCloudFunction = contributionStep === "VERIFYING" /* ParticipantContributionStep.VERIFYING */ &&
2470
2749
  !!verificationStartedAt
@@ -2475,17 +2754,18 @@ const checkAndRemoveBlockingContributor = functions
2475
2754
  if (timeoutExpirationDateInMsForBlockingContributor < currentServerTimestamp &&
2476
2755
  (contributionStep === "DOWNLOADING" /* ParticipantContributionStep.DOWNLOADING */ ||
2477
2756
  contributionStep === "COMPUTING" /* ParticipantContributionStep.COMPUTING */ ||
2478
- contributionStep === "UPLOADING" /* ParticipantContributionStep.UPLOADING */))
2757
+ contributionStep === "UPLOADING" /* ParticipantContributionStep.UPLOADING */ ||
2758
+ contributionStep === "COMPLETED" /* ParticipantContributionStep.COMPLETED */))
2479
2759
  timeoutType = "BLOCKING_CONTRIBUTION" /* TimeoutType.BLOCKING_CONTRIBUTION */;
2480
2760
  if (timeoutExpirationDateInMsForVerificationCloudFunction > 0 &&
2481
2761
  timeoutExpirationDateInMsForVerificationCloudFunction < currentServerTimestamp &&
2482
2762
  contributionStep === "VERIFYING" /* ParticipantContributionStep.VERIFYING */)
2483
2763
  timeoutType = "BLOCKING_CLOUD_FUNCTION" /* TimeoutType.BLOCKING_CLOUD_FUNCTION */;
2484
- printLog(`${timeoutType} detected for circuit ${circuit.id} - ceremony ${ceremony.id}`, LogLevel.DEBUG);
2485
2764
  if (!timeoutType)
2486
2765
  // Do not use `logAndThrowError` method to avoid the function to exit before checking every ceremony.
2487
- printLog(`No timeout for circuit ${circuit.id} - ceremony ${ceremony.id}`, LogLevel.WARN);
2766
+ printLog(`No timeout for circuit ${circuit.id} - ceremony ${ceremony.id}`, LogLevel.DEBUG);
2488
2767
  else {
2768
+ printLog(`${timeoutType} detected for circuit ${circuit.id} - ceremony ${ceremony.id}`, LogLevel.WARN);
2489
2769
  // Case (E).
2490
2770
  let nextCurrentContributorId = "";
2491
2771
  // Prepare Firestore batch of txs.
@@ -2536,7 +2816,7 @@ const checkAndRemoveBlockingContributor = functions
2536
2816
  });
2537
2817
  // Send atomic update for Firestore.
2538
2818
  await batch.commit();
2539
- printLog(`The contributor ${participant.id} has been identified as potential blocking contributor. A timeout of type ${timeoutType} has been triggered w/ a penalty of ${timeoutPenaltyInMs} ms`, LogLevel.DEBUG);
2819
+ printLog(`The contributor ${participant.id} has been identified as potential blocking contributor. A timeout of type ${timeoutType} has been triggered w/ a penalty of ${timeoutPenaltyInMs} ms`, LogLevel.WARN);
2540
2820
  }
2541
2821
  }
2542
2822
  }
@@ -2552,7 +2832,7 @@ const checkAndRemoveBlockingContributor = functions
2552
2832
  const resumeContributionAfterTimeoutExpiration = functions
2553
2833
  .region("europe-west1")
2554
2834
  .runWith({
2555
- memory: "512MB"
2835
+ memory: "1GB"
2556
2836
  })
2557
2837
  .https.onCall(async (data, context) => {
2558
2838
  if (!context.auth || (!context.auth.token.participant && !context.auth.token.coordinator))
@@ -2585,4 +2865,4 @@ const resumeContributionAfterTimeoutExpiration = functions
2585
2865
 
2586
2866
  admin.initializeApp();
2587
2867
 
2588
- export { checkAndPrepareCoordinatorForFinalization, checkAndRemoveBlockingContributor, checkIfObjectExist, checkParticipantForCeremony, completeMultiPartUpload, coordinateCeremonyParticipant, createBucket, finalizeCeremony, finalizeCircuit, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, initEmptyWaitingQueueForCircuit, permanentlyStoreCurrentContributionTimeAndHash, processSignUpWithCustomClaims, progressToNextCircuitForContribution, progressToNextContributionStep, refreshParticipantAfterContributionVerification, registerAuthUser, resumeContributionAfterTimeoutExpiration, setupCeremony, startCeremony, startMultiPartUpload, stopCeremony, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, verifycontribution };
2868
+ export { bandadaValidateProof, checkAndPrepareCoordinatorForFinalization, checkAndRemoveBlockingContributor, checkIfObjectExist, checkNonceOfSIWEAddress, checkParticipantForCeremony, completeMultiPartUpload, coordinateCeremonyParticipant, createBucket, finalizeCeremony, finalizeCircuit, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, initEmptyWaitingQueueForCircuit, permanentlyStoreCurrentContributionTimeAndHash, processSignUpWithCustomClaims, progressToNextCircuitForContribution, progressToNextContributionStep, refreshParticipantAfterContributionVerification, registerAuthUser, resumeContributionAfterTimeoutExpiration, setupCeremony, startCeremony, startMultiPartUpload, stopCeremony, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, verifycontribution };