@devtion/actions 0.0.0-7e983e3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +83 -0
  3. package/dist/index.mjs +2608 -0
  4. package/dist/index.node.js +2714 -0
  5. package/dist/types/hardhat.config.d.ts +6 -0
  6. package/dist/types/hardhat.config.d.ts.map +1 -0
  7. package/dist/types/src/helpers/authentication.d.ts +21 -0
  8. package/dist/types/src/helpers/authentication.d.ts.map +1 -0
  9. package/dist/types/src/helpers/constants.d.ts +194 -0
  10. package/dist/types/src/helpers/constants.d.ts.map +1 -0
  11. package/dist/types/src/helpers/contracts.d.ts +57 -0
  12. package/dist/types/src/helpers/contracts.d.ts.map +1 -0
  13. package/dist/types/src/helpers/crypto.d.ts +27 -0
  14. package/dist/types/src/helpers/crypto.d.ts.map +1 -0
  15. package/dist/types/src/helpers/database.d.ts +105 -0
  16. package/dist/types/src/helpers/database.d.ts.map +1 -0
  17. package/dist/types/src/helpers/functions.d.ts +145 -0
  18. package/dist/types/src/helpers/functions.d.ts.map +1 -0
  19. package/dist/types/src/helpers/security.d.ts +10 -0
  20. package/dist/types/src/helpers/security.d.ts.map +1 -0
  21. package/dist/types/src/helpers/services.d.ts +38 -0
  22. package/dist/types/src/helpers/services.d.ts.map +1 -0
  23. package/dist/types/src/helpers/storage.d.ts +121 -0
  24. package/dist/types/src/helpers/storage.d.ts.map +1 -0
  25. package/dist/types/src/helpers/tasks.d.ts +2 -0
  26. package/dist/types/src/helpers/tasks.d.ts.map +1 -0
  27. package/dist/types/src/helpers/utils.d.ts +139 -0
  28. package/dist/types/src/helpers/utils.d.ts.map +1 -0
  29. package/dist/types/src/helpers/verification.d.ts +95 -0
  30. package/dist/types/src/helpers/verification.d.ts.map +1 -0
  31. package/dist/types/src/helpers/vm.d.ts +112 -0
  32. package/dist/types/src/helpers/vm.d.ts.map +1 -0
  33. package/dist/types/src/index.d.ts +15 -0
  34. package/dist/types/src/index.d.ts.map +1 -0
  35. package/dist/types/src/types/enums.d.ts +133 -0
  36. package/dist/types/src/types/enums.d.ts.map +1 -0
  37. package/dist/types/src/types/index.d.ts +603 -0
  38. package/dist/types/src/types/index.d.ts.map +1 -0
  39. package/package.json +87 -0
  40. package/src/helpers/authentication.ts +37 -0
  41. package/src/helpers/constants.ts +312 -0
  42. package/src/helpers/contracts.ts +268 -0
  43. package/src/helpers/crypto.ts +55 -0
  44. package/src/helpers/database.ts +221 -0
  45. package/src/helpers/functions.ts +438 -0
  46. package/src/helpers/security.ts +86 -0
  47. package/src/helpers/services.ts +83 -0
  48. package/src/helpers/storage.ts +329 -0
  49. package/src/helpers/tasks.ts +56 -0
  50. package/src/helpers/utils.ts +743 -0
  51. package/src/helpers/verification.ts +354 -0
  52. package/src/helpers/vm.ts +392 -0
  53. package/src/index.ts +162 -0
  54. package/src/types/enums.ts +141 -0
  55. package/src/types/index.ts +650 -0
package/dist/index.mjs ADDED
@@ -0,0 +1,2608 @@
1
+ /**
2
+ * @module @p0tion/actions
3
+ * @version 1.0.5
4
+ * @file A set of actions and helpers for CLI commands
5
+ * @copyright Ethereum Foundation 2022
6
+ * @license MIT
7
+ * @see [Github]{@link https://github.com/privacy-scaling-explorations/p0tion}
8
+ */
9
+ import mime from 'mime-types';
10
+ import fs, { createWriteStream } from 'fs';
11
+ import fetch from '@adobe/node-fetch-retry';
12
+ import https from 'https';
13
+ import { httpsCallable, httpsCallableFromURL, getFunctions } from 'firebase/functions';
14
+ import { onSnapshot, query, collection, getDocs, doc, getDoc, where, Timestamp, getFirestore } from 'firebase/firestore';
15
+ import { zKey, groth16 } from 'snarkjs';
16
+ import crypto from 'crypto';
17
+ import blake from 'blakejs';
18
+ import { utils } from 'ffjavascript';
19
+ import winston from 'winston';
20
+ import { S3Client, HeadObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
21
+ import { pipeline, Readable } from 'stream';
22
+ import { promisify } from 'util';
23
+ import { initializeApp } from 'firebase/app';
24
+ import { signInWithCredential, initializeAuth, getAuth } from 'firebase/auth';
25
+ import { ContractFactory } from 'ethers';
26
+ import solc from 'solc';
27
+ import { EC2Client, RunInstancesCommand, DescribeInstanceStatusCommand, StartInstancesCommand, StopInstancesCommand, TerminateInstancesCommand } from '@aws-sdk/client-ec2';
28
+ import { SSMClient, SendCommandCommand, GetCommandInvocationCommand } from '@aws-sdk/client-ssm';
29
+ import dotenv from 'dotenv';
30
+
31
+ // Main part for the Hermez Phase 1 Trusted Setup URLs to download PoT files.
32
+ const potFileDownloadMainUrl = `https://hermez.s3-eu-west-1.amazonaws.com/`;
33
+ // Main part for the Hermez Phase 1 Trusted Setup PoT files to be downloaded.
34
+ const potFilenameTemplate = `powersOfTau28_hez_final_`;
35
+ // The genesis zKey index.
36
+ const genesisZkeyIndex = `00000`;
37
+ // The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
38
+ const numExpIterations = 10;
39
+ // The Solidity version of the Verifier Smart Contract generated with SnarkJS when finalizing the ceremony.
40
+ const solidityVersion = "0.8.0";
41
+ // The index of the final zKey.
42
+ const finalContributionIndex = "final";
43
+ // The acronym for verification key.
44
+ const verificationKeyAcronym = "vkey";
45
+ // The acronym for Verifier smart contract.
46
+ const verifierSmartContractAcronym = "verifier";
47
+ // The tag for ec2 instances.
48
+ const ec2InstanceTag = "p0tionec2instance";
49
+ // The name of the VM startup script file.
50
+ const vmBootstrapScriptFilename = "bootstrap.sh";
51
+ /**
52
+ * Define the supported VM configuration types.
53
+ * @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
54
+ * The on-demand prices for the configurations can be retrieved at https://aws.amazon.com/ec2/pricing/on-demand/.
55
+ * @notice the price has to be intended as on-demand hourly billing usage for Linux OS
56
+ * VMs located in the us-east-1 region expressed in USD.
57
+ */
58
+ const vmConfigurationTypes = {
59
+ t3_large: {
60
+ type: "t3.large",
61
+ ram: 8,
62
+ vcpu: 2,
63
+ pricePerHour: 0.08352
64
+ },
65
+ t3_2xlarge: {
66
+ type: "t3.2xlarge",
67
+ ram: 32,
68
+ vcpu: 8,
69
+ pricePerHour: 0.3328
70
+ },
71
+ c5_9xlarge: {
72
+ type: "c5.9xlarge",
73
+ ram: 72,
74
+ vcpu: 36,
75
+ pricePerHour: 1.53
76
+ },
77
+ c5_18xlarge: {
78
+ type: "c5.18xlarge",
79
+ ram: 144,
80
+ vcpu: 72,
81
+ pricePerHour: 3.06
82
+ },
83
+ c5a_8xlarge: {
84
+ type: "c5a.8xlarge",
85
+ ram: 64,
86
+ vcpu: 32,
87
+ pricePerHour: 1.232
88
+ },
89
+ c6id_32xlarge: {
90
+ type: "c6id.32xlarge",
91
+ ram: 256,
92
+ vcpu: 128,
93
+ pricePerHour: 6.4512
94
+ },
95
+ m6a_32xlarge: {
96
+ type: "m6a.32xlarge",
97
+ ram: 512,
98
+ vcpu: 128,
99
+ pricePerHour: 5.5296
100
+ }
101
+ };
102
+ /**
103
+ * Define the PPoT Trusted Setup ceremony output powers of tau files size (in GB).
104
+ * @dev the powers of tau files can be retrieved at https://github.com/weijiekoh/perpetualpowersoftau
105
+ */
106
+ const powersOfTauFiles = [
107
+ {
108
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_01.ptau",
109
+ size: 0.000084
110
+ },
111
+ {
112
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_02.ptau",
113
+ size: 0.000086
114
+ },
115
+ {
116
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_03.ptau",
117
+ size: 0.000091
118
+ },
119
+ {
120
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_04.ptau",
121
+ size: 0.0001
122
+ },
123
+ {
124
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_05.ptau",
125
+ size: 0.000117
126
+ },
127
+ {
128
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_06.ptau",
129
+ size: 0.000153
130
+ },
131
+ {
132
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_07.ptau",
133
+ size: 0.000225
134
+ },
135
+ {
136
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_08.ptau",
137
+ size: 0.0004
138
+ },
139
+ {
140
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_09.ptau",
141
+ size: 0.000658
142
+ },
143
+ {
144
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_10.ptau",
145
+ size: 0.0013
146
+ },
147
+ {
148
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_11.ptau",
149
+ size: 0.0023
150
+ },
151
+ {
152
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_12.ptau",
153
+ size: 0.0046
154
+ },
155
+ {
156
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_13.ptau",
157
+ size: 0.0091
158
+ },
159
+ {
160
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_14.ptau",
161
+ size: 0.0181
162
+ },
163
+ {
164
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_15.ptau",
165
+ size: 0.0361
166
+ },
167
+ {
168
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_16.ptau",
169
+ size: 0.0721
170
+ },
171
+ {
172
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau",
173
+ size: 0.144
174
+ },
175
+ {
176
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_18.ptau",
177
+ size: 0.288
178
+ },
179
+ {
180
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_19.ptau",
181
+ size: 0.576
182
+ },
183
+ {
184
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau",
185
+ size: 1.1
186
+ },
187
+ {
188
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_21.ptau",
189
+ size: 2.3
190
+ },
191
+ {
192
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_22.ptau",
193
+ size: 4.5
194
+ },
195
+ {
196
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_23.ptau",
197
+ size: 9.0
198
+ },
199
+ {
200
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_24.ptau",
201
+ size: 18.0
202
+ },
203
+ {
204
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_25.ptau",
205
+ size: 36.0
206
+ },
207
+ {
208
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_26.ptau",
209
+ size: 72.0
210
+ },
211
+ {
212
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_27.ptau",
213
+ size: 144.0
214
+ }
215
+ ];
216
+ /**
217
+ * Commonly used terms.
218
+ * @dev useful for creating paths, references to collections and queries, object properties, folder names, and so on.
219
+ */
220
+ const commonTerms = {
221
+ collections: {
222
+ users: {
223
+ name: "users",
224
+ fields: {
225
+ creationTime: "creationTime",
226
+ displayName: "displayName",
227
+ email: "email",
228
+ emailVerified: "emailVerified",
229
+ lastSignInTime: "lastSignInTime",
230
+ lastUpdated: "lastUpdated",
231
+ name: "name",
232
+ photoURL: "photoURL"
233
+ }
234
+ },
235
+ participants: {
236
+ name: "participants",
237
+ fields: {
238
+ contributionProgress: "contributionProgress",
239
+ contributionStartedAt: "contributionStartedAt",
240
+ contributionStep: "contributionStep",
241
+ contributions: "contributions",
242
+ lastUpdated: "lastUpdated",
243
+ status: "status",
244
+ verificationStartedAt: "verificationStartedAt"
245
+ }
246
+ },
247
+ ceremonies: {
248
+ name: "ceremonies",
249
+ fields: {
250
+ coordinatorId: "coordinatorId",
251
+ description: "description",
252
+ endDate: "endDate",
253
+ lastUpdated: "lastUpdated",
254
+ penalty: "penalty",
255
+ prefix: "prefix",
256
+ startDate: "startDate",
257
+ state: "state",
258
+ timeoutType: "timeoutType",
259
+ title: "title",
260
+ type: "type"
261
+ }
262
+ },
263
+ circuits: {
264
+ name: "circuits",
265
+ fields: {
266
+ avgTimings: "avgTimings",
267
+ compiler: "compiler",
268
+ description: "description",
269
+ files: "files",
270
+ lastUpdated: "lastUpdated",
271
+ metadata: "metadata",
272
+ name: "name",
273
+ prefix: "prefix",
274
+ sequencePosition: "sequencePosition",
275
+ template: "template",
276
+ timeoutMaxContributionWaitingTime: "timeoutMaxContributionWaitingTime",
277
+ waitingQueue: "waitingQueue",
278
+ zKeySizeInBytes: "zKeySizeInBytes",
279
+ verification: "verification"
280
+ }
281
+ },
282
+ contributions: {
283
+ name: "contributions",
284
+ fields: {
285
+ contributionComputationTime: "contributionComputationTime",
286
+ files: "files",
287
+ lastUpdated: "lastUpdated",
288
+ participantId: "participantId",
289
+ valid: "valid",
290
+ verificationComputationTime: "verificationComputationTime",
291
+ zkeyIndex: "zKeyIndex"
292
+ }
293
+ },
294
+ timeouts: {
295
+ name: "timeouts",
296
+ fields: {
297
+ type: "type",
298
+ startDate: "startDate",
299
+ endDate: "endDate"
300
+ }
301
+ }
302
+ },
303
+ foldersAndPathsTerms: {
304
+ output: `output`,
305
+ setup: `setup`,
306
+ contribute: `contribute`,
307
+ finalize: `finalize`,
308
+ pot: `pot`,
309
+ zkeys: `zkeys`,
310
+ wasm: `wasm`,
311
+ vkeys: `vkeys`,
312
+ metadata: `metadata`,
313
+ transcripts: `transcripts`,
314
+ attestation: `attestation`,
315
+ verifiers: `verifiers`
316
+ },
317
+ cloudFunctionsNames: {
318
+ setupCeremony: "setupCeremony",
319
+ checkParticipantForCeremony: "checkParticipantForCeremony",
320
+ progressToNextCircuitForContribution: "progressToNextCircuitForContribution",
321
+ resumeContributionAfterTimeoutExpiration: "resumeContributionAfterTimeoutExpiration",
322
+ createBucket: "createBucket",
323
+ generateGetObjectPreSignedUrl: "generateGetObjectPreSignedUrl",
324
+ progressToNextContributionStep: "progressToNextContributionStep",
325
+ permanentlyStoreCurrentContributionTimeAndHash: "permanentlyStoreCurrentContributionTimeAndHash",
326
+ startMultiPartUpload: "startMultiPartUpload",
327
+ temporaryStoreCurrentContributionMultiPartUploadId: "temporaryStoreCurrentContributionMultiPartUploadId",
328
+ temporaryStoreCurrentContributionUploadedChunkData: "temporaryStoreCurrentContributionUploadedChunkData",
329
+ generatePreSignedUrlsParts: "generatePreSignedUrlsParts",
330
+ completeMultiPartUpload: "completeMultiPartUpload",
331
+ checkIfObjectExist: "checkIfObjectExist",
332
+ verifyContribution: "verifycontribution",
333
+ checkAndPrepareCoordinatorForFinalization: "checkAndPrepareCoordinatorForFinalization",
334
+ finalizeCircuit: "finalizeCircuit",
335
+ finalizeCeremony: "finalizeCeremony",
336
+ downloadCircuitArtifacts: "downloadCircuitArtifacts",
337
+ transferObject: "transferObject",
338
+ }
339
+ };
340
+
341
+ /**
342
+ * Setup a new ceremony by calling the related cloud function.
343
+ * @param functions <Functions> - the Firebase cloud functions object instance.
344
+ * @param ceremonyInputData <CeremonyInputData> - the input data of the ceremony.
345
+ * @param ceremonyPrefix <string> - the prefix of the ceremony.
346
+ * @param circuits <Circuit[]> - the circuits data.
347
+ * @returns Promise<string> - the unique identifier of the created ceremony.
348
+ */
349
+ const setupCeremony = async (functions, ceremonyInputData, ceremonyPrefix, circuits) => {
350
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.setupCeremony);
351
+ const { data: ceremonyId } = await cf({
352
+ ceremonyInputData,
353
+ ceremonyPrefix,
354
+ circuits
355
+ });
356
+ return String(ceremonyId);
357
+ };
358
+ /**
359
+ * Check the user's current participant status for the ceremony
360
+ * @param functions <Functions> - the Firebase cloud functions object instance.
361
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
362
+ * @returns <boolean> - true when participant is able to contribute; otherwise false.
363
+ */
364
+ const checkParticipantForCeremony = async (functions, ceremonyId) => {
365
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.checkParticipantForCeremony);
366
+ const { data } = await cf({ ceremonyId });
367
+ return data;
368
+ };
369
+ /**
370
+ * Progress the participant to the next circuit preparing for the next contribution.
371
+ * @param functions <Functions> - the Firebase cloud functions object instance.
372
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
373
+ */
374
+ const progressToNextCircuitForContribution = async (functions, ceremonyId) => {
375
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.progressToNextCircuitForContribution);
376
+ await cf({
377
+ ceremonyId
378
+ });
379
+ };
380
+ /**
381
+ * Resume the contributor circuit contribution from scratch after the timeout expiration.
382
+ * @param functions <Functions> - the Firebase cloud functions object instance.
383
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
384
+ */
385
+ const resumeContributionAfterTimeoutExpiration = async (functions, ceremonyId) => {
386
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.resumeContributionAfterTimeoutExpiration);
387
+ await cf({
388
+ ceremonyId
389
+ });
390
+ };
391
+ /**
392
+ * Make a request to create a new AWS S3 bucket for a ceremony.
393
+ * @param functions <Functions> - the Firebase cloud functions object instance.
394
+ * @param bucketName <string> - the name of the ceremony bucket.
395
+ */
396
+ const createS3Bucket = async (functions, bucketName) => {
397
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.createBucket);
398
+ await cf({ bucketName });
399
+ };
400
+ /**
401
+ * Return a pre-signed url for a given object contained inside the provided AWS S3 bucket in order to perform a GET request.
402
+ * @param functions <Functions> - the Firebase cloud functions object instance.
403
+ * @param bucketName <string> - the name of the ceremony bucket.
404
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
405
+ * @returns <Promise<string>> - the pre-signed url w/ GET request permissions for specified object key.
406
+ */
407
+ const generateGetObjectPreSignedUrl = async (functions, bucketName, objectKey) => {
408
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.generateGetObjectPreSignedUrl);
409
+ const { data: getPreSignedUrl } = await cf({
410
+ bucketName,
411
+ objectKey
412
+ });
413
+ return String(getPreSignedUrl);
414
+ };
415
+ /**
416
+ * Progress the participant to the next circuit preparing for the next contribution.
417
+ * @param functions <Functions> - the Firebase cloud functions object instance.
418
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
419
+ */
420
+ const progressToNextContributionStep = async (functions, ceremonyId) => {
421
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.progressToNextContributionStep);
422
+ await cf({
423
+ ceremonyId
424
+ });
425
+ };
426
+ /**
427
+ * Write the information about current contribution hash and computation time for the current contributor.
428
+ * @param functions <Functions> - the Firebase cloud functions object instance.
429
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
430
+ * @param contributionComputationTime <number> - the time when it was computed
431
+ * @param contributingHash <string> - the hash of the contribution
432
+ */
433
+ const permanentlyStoreCurrentContributionTimeAndHash = async (functions, ceremonyId, contributionComputationTime, contributionHash) => {
434
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.permanentlyStoreCurrentContributionTimeAndHash);
435
+ await cf({
436
+ ceremonyId,
437
+ contributionComputationTime,
438
+ contributionHash
439
+ });
440
+ };
441
+ /**
442
+ * Start a new multi-part upload for a specific object in the given AWS S3 bucket.
443
+ * @param functions <Functions> - the Firebase cloud functions object instance.
444
+ * @param bucketName <string> - the name of the ceremony bucket.
445
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
446
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
447
+ * @returns Promise<string> - the multi-part upload id.
448
+ */
449
+ const openMultiPartUpload = async (functions, bucketName, objectKey, ceremonyId) => {
450
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.startMultiPartUpload);
451
+ const { data: uploadId } = await cf({
452
+ bucketName,
453
+ objectKey,
454
+ ceremonyId
455
+ });
456
+ return String(uploadId);
457
+ };
458
+ /**
459
+ * Write temporary information about the unique identifier about the opened multi-part upload to eventually resume the contribution.
460
+ * @param functions <Functions> - the Firebase cloud functions object instance.
461
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
462
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
463
+ */
464
+ const temporaryStoreCurrentContributionMultiPartUploadId = async (functions, ceremonyId, uploadId) => {
465
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.temporaryStoreCurrentContributionMultiPartUploadId);
466
+ await cf({
467
+ ceremonyId,
468
+ uploadId
469
+ });
470
+ };
471
+ /**
472
+ * Write temporary information about the etags and part numbers for each uploaded chunk in order to make the upload resumable from last chunk.
473
+ * @param functions <Functions> - the Firebase cloud functions object instance.
474
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
475
+ * @param chunk <ETagWithPartNumber> - the information about the already uploaded chunk.
476
+ */
477
+ const temporaryStoreCurrentContributionUploadedChunkData = async (functions, ceremonyId, chunk) => {
478
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.temporaryStoreCurrentContributionUploadedChunkData);
479
+ await cf({
480
+ ceremonyId,
481
+ chunk
482
+ });
483
+ };
484
+ /**
485
+ * Generate a new pre-signed url for each chunk related to a started multi-part upload.
486
+ * @param functions <Functions> - the Firebase cloud functions object instance.
487
+ * @param bucketName <string> - the name of the ceremony bucket.
488
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
489
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
490
+ * @param numberOfChunks <number> - the number of pre-signed urls to be generated.
491
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
492
+ * @returns Promise<Array<string>> - the set of pre-signed urls (one for each chunk).
493
+ */
494
+ const generatePreSignedUrlsParts = async (functions, bucketName, objectKey, uploadId, numberOfParts, ceremonyId) => {
495
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.generatePreSignedUrlsParts);
496
+ const { data: chunksUrls } = await cf({
497
+ bucketName,
498
+ objectKey,
499
+ uploadId,
500
+ numberOfParts,
501
+ ceremonyId
502
+ });
503
+ return chunksUrls;
504
+ };
505
+ /**
506
+ * Complete a multi-part upload for a specific object in the given AWS S3 bucket.
507
+ * @param functions <Functions> - the Firebase cloud functions object instance.
508
+ * @param bucketName <string> - the name of the ceremony bucket.
509
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
510
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
511
+ * @param parts Array<ETagWithPartNumber> - the completed .
512
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
513
+ * @returns Promise<string> - the location of the uploaded ceremony artifact.
514
+ */
515
+ const completeMultiPartUpload = async (functions, bucketName, objectKey, uploadId, parts, ceremonyId) => {
516
+ // Call completeMultiPartUpload() Cloud Function.
517
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.completeMultiPartUpload);
518
+ const { data: location } = await cf({
519
+ bucketName,
520
+ objectKey,
521
+ uploadId,
522
+ parts,
523
+ ceremonyId
524
+ });
525
+ return String(location);
526
+ };
527
+ /**
528
+ * Check if a specified object exist in a given AWS S3 bucket.
529
+ * @param functions <Functions> - the Firebase cloud functions object instance.
530
+ * @param bucketName <string> - the name of the ceremony bucket.
531
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
532
+ * @returns <Promise<string>> - true if and only if the object exists, otherwise false.
533
+ */
534
+ const checkIfObjectExist = async (functions, bucketName, objectKey) => {
535
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.checkIfObjectExist);
536
+ const { data: doesObjectExist } = await cf({
537
+ bucketName,
538
+ objectKey
539
+ });
540
+ return doesObjectExist;
541
+ };
542
+ /**
543
+ * Request to verify the newest contribution for the circuit.
544
+ * @param functions <Functions> - the Firebase cloud functions object instance.
545
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
546
+ * @param circuit <FirebaseDocumentInfo> - the document info about the circuit.
547
+ * @param bucketName <string> - the name of the ceremony bucket.
548
+ * @param contributorOrCoordinatorIdentifier <string> - the identifier of the contributor or coordinator (only when finalizing).
549
+ * @param verifyContributionCloudFunctionEndpoint <string> - the endpoint (direct url) necessary to call the V2 Cloud Function.
550
+ * @returns <Promise<void>> -
551
+ */
552
+ const verifyContribution = async (functions, ceremonyId, circuit, // any just to avoid breaking the tests.
553
+ bucketName, contributorOrCoordinatorIdentifier, verifyContributionCloudFunctionEndpoint) => {
554
+ const cf = httpsCallableFromURL(functions, verifyContributionCloudFunctionEndpoint, {
555
+ timeout: 3600000 // max timeout 60 minutes.
556
+ });
557
+ /**
558
+ * @dev Force a race condition to fix #57.
559
+ * TL;DR if the cloud function does not return despite having finished its execution, we use
560
+ * a listener on the circuit, we check and retrieve the info about the correct execution and
561
+ * return it manually. In other cases, it will be the function that returns either a timeout in case it
562
+ * remains in execution for too long.
563
+ */
564
+ await Promise.race([
565
+ cf({
566
+ ceremonyId,
567
+ circuitId: circuit.id,
568
+ contributorOrCoordinatorIdentifier,
569
+ bucketName
570
+ }),
571
+ new Promise((resolve) => {
572
+ setTimeout(() => {
573
+ const unsubscribeToCeremonyCircuitListener = onSnapshot(circuit.ref, async (changedCircuit) => {
574
+ // Check data.
575
+ if (!circuit.data || !changedCircuit.data())
576
+ throw Error(`Unable to retrieve circuit data from the ceremony.`);
577
+ // Extract data.
578
+ const { avgTimings: changedAvgTimings, waitingQueue: changedWaitingQueue } = changedCircuit.data();
579
+ const { contributionComputation: changedContributionComputation, fullContribution: changedFullContribution, verifyCloudFunction: changedVerifyCloudFunction } = changedAvgTimings;
580
+ const { failedContributions: changedFailedContributions, completedContributions: changedCompletedContributions } = changedWaitingQueue;
581
+ const { avgTimings: prevAvgTimings, waitingQueue: prevWaitingQueue } = changedCircuit.data();
582
+ const { contributionComputation: prevContributionComputation, fullContribution: prevFullContribution, verifyCloudFunction: prevVerifyCloudFunction } = prevAvgTimings;
583
+ const { failedContributions: prevFailedContributions, completedContributions: prevCompletedContributions } = prevWaitingQueue;
584
+ // Pre-conditions.
585
+ const invalidContribution = prevFailedContributions === changedFailedContributions - 1;
586
+ const validContribution = prevCompletedContributions === changedCompletedContributions - 1;
587
+ const avgTimeUpdates = prevContributionComputation !== changedContributionComputation &&
588
+ prevFullContribution !== changedFullContribution &&
589
+ prevVerifyCloudFunction !== changedVerifyCloudFunction;
590
+ if ((invalidContribution || validContribution) && avgTimeUpdates) {
591
+ resolve({});
592
+ }
593
+ });
594
+ // Unsubscribe from listener.
595
+ unsubscribeToCeremonyCircuitListener();
596
+ }, 3600000 - 1000); // 59:59 throws 1s before max time for CF execution.
597
+ })
598
+ ]);
599
+ };
600
+ /**
601
+ * Prepare the coordinator for the finalization of the ceremony.
602
+ * @param functions <Functions> - the Firebase cloud functions object instance.
603
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
604
+ * @returns <Promise<boolean>> - true when the coordinator is ready for finalization; otherwise false.
605
+ */
606
+ const checkAndPrepareCoordinatorForFinalization = async (functions, ceremonyId) => {
607
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.checkAndPrepareCoordinatorForFinalization);
608
+ const { data: isCoordinatorReadyForCeremonyFinalization } = await cf({
609
+ ceremonyId
610
+ });
611
+ return isCoordinatorReadyForCeremonyFinalization;
612
+ };
613
+ /**
614
+ * Finalize the ceremony circuit.
615
+ * @param functions <Functions> - the Firebase cloud functions object instance.
616
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
617
+ * @param circuitId <string> - the unique identifier of the circuit.
618
+ * @param bucketName <string> - the name of the ceremony bucket.
619
+ * @param beacon <string> - the value used to compute the final contribution while finalizing the ceremony.
620
+ */
621
+ const finalizeCircuit = async (functions, ceremonyId, circuitId, bucketName, beacon) => {
622
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.finalizeCircuit);
623
+ await cf({
624
+ ceremonyId,
625
+ circuitId,
626
+ bucketName,
627
+ beacon
628
+ });
629
+ };
630
+ /**
631
+ * Conclude the finalization of the ceremony.
632
+ * @param functions <Functions> - the Firebase cloud functions object instance.
633
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
634
+ */
635
+ const finalizeCeremony = async (functions, ceremonyId) => {
636
+ const cf = httpsCallable(functions, commonTerms.cloudFunctionsNames.finalizeCeremony);
637
+ await cf({
638
+ ceremonyId
639
+ });
640
+ };
641
+
642
+ /**
643
+ * Return the bucket name based on ceremony prefix.
644
+ * @param ceremonyPrefix <string> - the ceremony prefix.
645
+ * @param ceremonyPostfix <string> - the ceremony postfix.
646
+ * @returns <string>
647
+ */
648
+ const getBucketName = (ceremonyPrefix, ceremonyPostfix) => `${ceremonyPrefix}${ceremonyPostfix}`;
649
+ /**
650
+ * Get chunks and signed urls related to an object that must be uploaded using a multi-part upload.
651
+ * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
652
+ * @param bucketName <string> - the name of the ceremony artifacts bucket (AWS S3).
653
+ * @param objectKey <string> - the unique key to identify the object inside the given AWS S3 bucket.
654
+ * @param localFilePath <string> - the local path where the artifact will be downloaded.
655
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
656
+ * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
657
+ * @param [ceremonyId] <string> - the unique identifier of the ceremony.
658
+ * @returns Promise<Array<ChunkWithUrl>> - the chunks with related pre-signed url.
659
+ */
660
+ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey, localFilePath, uploadId, configStreamChunkSize, ceremonyId) => {
661
+ // Prepare a new stream to read the file.
662
+ const stream = fs.createReadStream(localFilePath, {
663
+ highWaterMark: configStreamChunkSize * 1024 * 1024 // convert to MB.
664
+ });
665
+ // Split in chunks.
666
+ const chunks = [];
667
+ for await (const chunk of stream)
668
+ chunks.push(chunk);
669
+ // Check if the file is not empty.
670
+ if (!chunks.length)
671
+ throw new Error("Unable to split an empty file into chunks.");
672
+ // Request pre-signed url generation for each chunk.
673
+ const preSignedUrls = await generatePreSignedUrlsParts(cloudFunctions, bucketName, objectKey, uploadId, chunks.length, ceremonyId);
674
+ // Map pre-signed urls with corresponding chunks.
675
+ return chunks.map((val1, index) => ({
676
+ partNumber: index + 1,
677
+ chunk: val1,
678
+ preSignedUrl: preSignedUrls[index]
679
+ }));
680
+ };
681
+ /**
682
+ * Forward the request to upload each single chunk of the related ceremony artifact.
683
+ * @param chunksWithUrls <Array<ChunkWithUrl>> - the array containing each chunk mapped with the corresponding pre-signed urls.
684
+ * @param contentType <string | false> - the content type of the ceremony artifact.
685
+ * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
686
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
687
+ * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
688
+ * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
689
+ */
690
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
691
+ // Keep track of uploaded chunks.
692
+ const uploadedChunks = alreadyUploadedChunks || [];
693
+ // Loop through remaining chunks.
694
+ for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
695
+ // Consume the pre-signed url to upload the chunk.
696
+ // @ts-ignore
697
+ const response = await fetch(chunksWithUrls[i].preSignedUrl, {
698
+ retryOptions: {
699
+ retryInitialDelay: 500,
700
+ socketTimeout: 60000,
701
+ retryMaxDuration: 300000 // 5 minutes.
702
+ },
703
+ method: "PUT",
704
+ body: chunksWithUrls[i].chunk,
705
+ headers: {
706
+ "Content-Type": contentType.toString(),
707
+ "Content-Length": chunksWithUrls[i].chunk.length.toString()
708
+ },
709
+ agent: new https.Agent({ keepAlive: true })
710
+ });
711
+ // Verify the response.
712
+ if (response.status !== 200 || !response.ok)
713
+ throw new Error(`Unable to upload chunk number ${i}. Please, terminate the current session and retry to resume from the latest uploaded chunk.`);
714
+ // Extract uploaded chunk data.
715
+ const chunk = {
716
+ ETag: response.headers.get("etag") || undefined,
717
+ PartNumber: chunksWithUrls[i].partNumber
718
+ };
719
+ uploadedChunks.push(chunk);
720
+ // Temporary store uploaded chunk data to enable later resumable contribution.
721
+ // nb. this must be done only when contributing (not finalizing).
722
+ if (!!ceremonyId && !!cloudFunctions)
723
+ await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
724
+ }
725
+ return uploadedChunks;
726
+ };
727
+ /**
728
+ * Upload a ceremony artifact to the corresponding bucket.
729
+ * @notice this method implements the multi-part upload using pre-signed urls, optimal for large files.
730
+ * Steps:
731
+ * 0) Check if current contributor could resume a multi-part upload.
732
+ * 0.A) If yes, continue from last uploaded chunk using the already opened multi-part upload.
733
+ * 0.B) Otherwise, start creating a new multi-part upload.
734
+ * 1) Generate a pre-signed url for each (remaining) chunk of the ceremony artifact.
735
+ * 2) Consume the pre-signed urls to upload chunks.
736
+ * 3) Complete the multi-part upload.
737
+ * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
738
+ * @param bucketName <string> - the name of the ceremony artifacts bucket (AWS S3).
739
+ * @param objectKey <string> - the unique key to identify the object inside the given AWS S3 bucket.
740
+ * @param localPath <string> - the local path where the artifact will be downloaded.
741
+ * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
742
+ * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
743
+ * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
744
+ */
745
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
746
+ // The unique identifier of the multi-part upload.
747
+ let multiPartUploadId = "";
748
+ // The list of already uploaded chunks.
749
+ let alreadyUploadedChunks = [];
750
+ // Step (0).
751
+ if (temporaryDataToResumeMultiPartUpload && !!temporaryDataToResumeMultiPartUpload.uploadId) {
752
+ // Step (0.A).
753
+ multiPartUploadId = temporaryDataToResumeMultiPartUpload.uploadId;
754
+ alreadyUploadedChunks = temporaryDataToResumeMultiPartUpload.chunks;
755
+ }
756
+ else {
757
+ // Step (0.B).
758
+ // Open a new multi-part upload for the ceremony artifact.
759
+ multiPartUploadId = await openMultiPartUpload(cloudFunctions, bucketName, objectKey, ceremonyId);
760
+ // Store multi-part upload identifier on document collection.
761
+ if (ceremonyId)
762
+ // Store Multi-Part Upload ID after generation.
763
+ await temporaryStoreCurrentContributionMultiPartUploadId(cloudFunctions, ceremonyId, multiPartUploadId);
764
+ }
765
+ // Step (1).
766
+ const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
767
+ // Step (2).
768
+ const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
769
+ cloudFunctions, ceremonyId, alreadyUploadedChunks);
770
+ // Step (3).
771
+ await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
772
+ };
773
+ /**
774
+ * Download an artifact from S3 (only for authorized users)
775
+ * @param cloudFunctions <Functions> Firebase cloud functions instance.
776
+ * @param bucketName <string> Name of the bucket where the artifact is stored.
777
+ * @param storagePath <string> Path to the artifact in the bucket.
778
+ * @param localPath <string> Path to the local file where the artifact will be saved.
779
+ */
780
+ const downloadCeremonyArtifact = async (cloudFunctions, bucketName, storagePath, localPath) => {
781
+ // Request pre-signed url to make GET download request.
782
+ const getPreSignedUrl = await generateGetObjectPreSignedUrl(cloudFunctions, bucketName, storagePath);
783
+ // Make fetch to get info about the artifact.
784
+ // @ts-ignore
785
+ const response = await fetch(getPreSignedUrl);
786
+ if (response.status !== 200 && !response.ok)
787
+ throw new Error(`There was an erorr while downloading the object ${storagePath} from the bucket ${bucketName}. Please check the function inputs and try again.`);
788
+ const content = response.body;
789
+ // Prepare stream.
790
+ const writeStream = createWriteStream(localPath);
791
+ // Write chunk by chunk.
792
+ for await (const chunk of content) {
793
+ // Write chunk.
794
+ writeStream.write(chunk);
795
+ }
796
+ };
797
+ /**
798
+ * Get R1CS file path tied to a particular circuit of a ceremony in the storage.
799
+ * @notice each R1CS file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeR1csFilename>`.
800
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
801
+ * @param circuitPrefix <string> - the prefix of the circuit.
802
+ * @param completeR1csFilename <string> - the complete R1CS filename (name + ext).
803
+ * @returns <string> - the storage path of the R1CS file.
804
+ */
805
+ const getR1csStorageFilePath = (circuitPrefix, completeR1csFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeR1csFilename}`;
806
+ /**
807
+ * Get WASM file path tied to a particular circuit of a ceremony in the storage.
808
+ * @notice each WASM file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeWasmFilename>`.
809
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
810
+ * @param circuitPrefix <string> - the prefix of the circuit.
811
+ * @param completeWasmFilename <string> - the complete WASM filename (name + ext).
812
+ * @returns <string> - the storage path of the WASM file.
813
+ */
814
+ const getWasmStorageFilePath = (circuitPrefix, completeWasmFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeWasmFilename}`;
815
+ /**
816
+ * Get PoT file path in the storage.
817
+ * @notice each PoT file in the storage must be stored in the following path: `pot/<completePotFilename>`.
818
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
819
+ * @param completePotFilename <string> - the complete PoT filename (name + ext).
820
+ * @returns <string> - the storage path of the PoT file.
821
+ */
822
+ const getPotStorageFilePath = (completePotFilename) => `${commonTerms.foldersAndPathsTerms.pot}/${completePotFilename}`;
823
+ /**
824
+ * Get zKey file path tied to a particular circuit of a ceremony in the storage.
825
+ * @notice each zKey file in the storage must be stored in the following path: `circuits/<circuitPrefix>/contributions/<completeZkeyFilename>`.
826
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
827
+ * @param circuitPrefix <string> - the prefix of the circuit.
828
+ * @param completeZkeyFilename <string> - the complete zKey filename (name + ext).
829
+ * @returns <string> - the storage path of the zKey file.
830
+ */
831
+ const getZkeyStorageFilePath = (circuitPrefix, completeZkeyFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${commonTerms.collections.contributions.name}/${completeZkeyFilename}`;
832
+ /**
833
+ * Get verification key file path tied to a particular circuit of a ceremony in the storage.
834
+ * @notice each verification key file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeVerificationKeyFilename>`.
835
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
836
+ * @param circuitPrefix <string> - the prefix of the circuit.
837
+ * @param completeVerificationKeyFilename <string> - the complete verification key filename (name + ext).
838
+ * @returns <string> - the storage path of the verification key file.
839
+ */
840
+ const getVerificationKeyStorageFilePath = (circuitPrefix, completeVerificationKeyFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeVerificationKeyFilename}`;
841
+ /**
842
+ * Get verifier contract file path tied to a particular circuit of a ceremony in the storage.
843
+ * @notice each verifier contract file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeVerificationKeyFilename>`.
844
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
845
+ * @param circuitPrefix <string> - the prefix of the circuit.
846
+ * @param completeVerifierContractFilename <string> - the complete verifier contract filename (name + ext).
847
+ * @returns <string> - the storage path of the verifier contract file.
848
+ */
849
+ const getVerifierContractStorageFilePath = (circuitPrefix, completeVerifierContractFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeVerifierContractFilename}`;
850
+ /**
851
+ * Get transcript file path tied to a particular circuit of a ceremony in the storage.
852
+ * @notice each R1CS file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeTranscriptFilename>`.
853
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
854
+ * @param circuitPrefix <string> - the prefix of the circuit.
855
+ * @param completeTranscriptFilename <string> - the complete transcript filename (name + ext).
856
+ * @returns <string> - the storage path of the transcript file.
857
+ */
858
+ const getTranscriptStorageFilePath = (circuitPrefix, completeTranscriptFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${commonTerms.foldersAndPathsTerms.transcripts}/${completeTranscriptFilename}`;
859
+
860
+ /**
861
+ * Get participants collection path for database reference.
862
+ * @notice all participants related documents are store under `ceremonies/<ceremonyId>/participants` collection path.
863
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
864
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
865
+ * @returns <string> - the participants collection path.
866
+ */
867
+ const getParticipantsCollectionPath = (ceremonyId) => `${commonTerms.collections.ceremonies.name}/${ceremonyId}/${commonTerms.collections.participants.name}`;
868
+ /**
869
+ * Get circuits collection path for database reference.
870
+ * @notice all circuits related documents are store under `ceremonies/<ceremonyId>/circuits` collection path.
871
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
872
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
873
+ * @returns <string> - the participants collection path.
874
+ */
875
+ const getCircuitsCollectionPath = (ceremonyId) => `${commonTerms.collections.ceremonies.name}/${ceremonyId}/${commonTerms.collections.circuits.name}`;
876
+ /**
877
+ * Get contributions collection path for database reference.
878
+ * @notice all contributions related documents are store under `ceremonies/<ceremonyId>/circuits/<circuitId>/contributions` collection path.
879
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
880
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
881
+ * @param circuitId <string> - the unique identifier of the circuit.
882
+ * @returns <string> - the contributions collection path.
883
+ */
884
+ const getContributionsCollectionPath = (ceremonyId, circuitId) => `${getCircuitsCollectionPath(ceremonyId)}/${circuitId}/${commonTerms.collections.contributions.name}`;
885
+ /**
886
+ * Get timeouts collection path for database reference.
887
+ * @notice all timeouts related documents are store under `ceremonies/<ceremonyId>/participants/<participantId>/timeouts` collection path.
888
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
889
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
890
+ * @param participantId <string> - the unique identifier of the participant.
891
+ * @returns <string> - the timeouts collection path.
892
+ */
893
+ const getTimeoutsCollectionPath = (ceremonyId, participantId) => `${getParticipantsCollectionPath(ceremonyId)}/${participantId}/${commonTerms.collections.timeouts.name}`;
894
+ /**
895
+ * Helper for query a collection based on certain constraints.
896
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
897
+ * @param collection <string> - the name of the collection.
898
+ * @param queryConstraints <Array<QueryConstraint>> - a sequence of where conditions.
899
+ * @returns <Promise<QuerySnapshot<DocumentData>>> - return the matching documents (if any).
900
+ */
901
+ const queryCollection = async (firestoreDatabase, collection$1, queryConstraints) => {
902
+ // Make a query.
903
+ const q = query(collection(firestoreDatabase, collection$1), ...queryConstraints);
904
+ // Get docs.
905
+ const snap = await getDocs(q);
906
+ return snap;
907
+ };
908
+ /**
909
+ * Helper for obtaining uid and data for query document snapshots.
910
+ * @param queryDocSnap <Array<QueryDocumentSnapshot>> - the array of query document snapshot to be converted.
911
+ * @returns Array<FirebaseDocumentInfo>
912
+ */
913
+ const fromQueryToFirebaseDocumentInfo = (queryDocSnap) => queryDocSnap.map((document) => ({
914
+ id: document.id,
915
+ ref: document.ref,
916
+ data: document.data()
917
+ }));
918
+ /**
919
+ * Fetch for all documents in a collection.
920
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
921
+ * @param collection <string> - the name of the collection.
922
+ * @returns <Promise<Array<QueryDocumentSnapshot<DocumentData>>>> - return all documents (if any).
923
+ */
924
+ const getAllCollectionDocs = async (firestoreDatabase, collection$1) => (await getDocs(collection(firestoreDatabase, collection$1))).docs;
925
+ /**
926
+ * Get a specific document from database.
927
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
928
+ * @param collection <string> - the name of the collection.
929
+ * @param documentId <string> - the unique identifier of the document in the collection.
930
+ * @returns <Promise<DocumentSnapshot<DocumentData>>> - return the document from Firestore.
931
+ */
932
+ const getDocumentById = async (firestoreDatabase, collection, documentId) => {
933
+ const docRef = doc(firestoreDatabase, collection, documentId);
934
+ return getDoc(docRef);
935
+ };
936
+ /**
937
+ * Query for opened ceremonies.
938
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
939
+ * @returns <Promise<Array<FirebaseDocumentInfo>>>
940
+ */
941
+ const getOpenedCeremonies = async (firestoreDatabase) => {
942
+ const runningStateCeremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, [
943
+ where(commonTerms.collections.ceremonies.fields.state, "==", "OPENED" /* CeremonyState.OPENED */),
944
+ where(commonTerms.collections.ceremonies.fields.endDate, ">=", Date.now())
945
+ ]);
946
+ return fromQueryToFirebaseDocumentInfo(runningStateCeremoniesQuerySnap.docs);
947
+ };
948
+ /**
949
+ * Query for ceremony circuits.
950
+ * @notice the order by sequence position is fundamental to maintain parallelism among contributions for different circuits.
951
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
952
+ * @param ceremonyId <string> - the ceremony unique identifier.
953
+ * @returns Promise<Array<FirebaseDocumentInfo>> - the ceremony' circuits documents ordered by sequence position.
954
+ */
955
+ const getCeremonyCircuits = async (firestoreDatabase, ceremonyId) => fromQueryToFirebaseDocumentInfo(await getAllCollectionDocs(firestoreDatabase, getCircuitsCollectionPath(ceremonyId))).sort((a, b) => a.data.sequencePosition - b.data.sequencePosition);
956
+ /**
957
+ * Query for a specific ceremony' circuit contribution from a given contributor (if any).
958
+ * @notice if the caller is a coordinator, there could be more than one contribution (= the one from finalization applies to this criteria).
959
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
960
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
961
+ * @param circuitId <string> - the unique identifier of the circuit.
962
+ * @param participantId <string> - the unique identifier of the participant.
963
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the document info about the circuit contributions from contributor.
964
+ */
965
+ const getCircuitContributionsFromContributor = async (firestoreDatabase, ceremonyId, circuitId, participantId) => {
966
+ const participantContributionsQuerySnap = await queryCollection(firestoreDatabase, getContributionsCollectionPath(ceremonyId, circuitId), [where(commonTerms.collections.contributions.fields.participantId, "==", participantId)]);
967
+ return fromQueryToFirebaseDocumentInfo(participantContributionsQuerySnap.docs);
968
+ };
969
+ /**
970
+ * Query for the active timeout from given participant for a given ceremony (if any).
971
+ * @param ceremonyId <string> - the identifier of the ceremony.
972
+ * @param participantId <string> - the identifier of the participant.
973
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the document info about the current active participant timeout.
974
+ */
975
+ const getCurrentActiveParticipantTimeout = async (firestoreDatabase, ceremonyId, participantId) => {
976
+ const participantTimeoutQuerySnap = await queryCollection(firestoreDatabase, getTimeoutsCollectionPath(ceremonyId, participantId), [where(commonTerms.collections.timeouts.fields.endDate, ">=", Timestamp.now().toMillis())]);
977
+ return fromQueryToFirebaseDocumentInfo(participantTimeoutQuerySnap.docs);
978
+ };
979
+ /**
980
+ * Query for the closed ceremonies.
981
+ * @notice a ceremony is closed when the period for receiving new contributions has ended.
982
+ * @dev when the ceremony is closed it becomes ready for finalization.
983
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
984
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of closed ceremonies.
985
+ */
986
+ const getClosedCeremonies = async (firestoreDatabase) => {
987
+ const closedCeremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, [
988
+ where(commonTerms.collections.ceremonies.fields.state, "==", "CLOSED" /* CeremonyState.CLOSED */),
989
+ where(commonTerms.collections.ceremonies.fields.endDate, "<=", Date.now())
990
+ ]);
991
+ return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
992
+ };
993
+
994
+ /**
995
+ * @hidden
996
+ */
997
+ const toHexByte = (byte) => (byte < 0x10 ? `0${byte.toString(16)}` : byte.toString(16));
998
+ /**
999
+ * Converts Uint8Array to hexadecimal string.
1000
+ * @param buffer arbritrary length of data
1001
+ * @returns hexadecimal string
1002
+ */
1003
+ const toHex = (buffer) => Array.from(buffer).map(toHexByte).join("");
1004
+ /**
1005
+ * Get 512 bit blake hash of the contents of given path.
1006
+ * @param data buffer or hexadecimal string
1007
+ * @returns 64 byte hexadecimal string
1008
+ */
1009
+ const blake512FromPath = async (path) => {
1010
+ const context = blake.blake2bInit(64, undefined);
1011
+ const hash = await new Promise((resolve) => {
1012
+ fs.createReadStream(path)
1013
+ .on("data", (chunk) => {
1014
+ blake.blake2bUpdate(context, chunk);
1015
+ })
1016
+ .on("end", () => {
1017
+ resolve(toHex(blake.blake2bFinal(context)));
1018
+ });
1019
+ });
1020
+ return hash;
1021
+ };
1022
+ /**
1023
+ * Return the SHA256 hash (HEX format) of a given value
1024
+ * @param value <string> - the value to be hashed.
1025
+ * @returns <string> - the HEX format of the SHA256 hash of the given value
1026
+ */
1027
+ const computeSHA256ToHex = (value) => crypto.createHash("sha256").update(value).digest("hex");
1028
+ /**
1029
+ * Helper function that can be used to compare whether two files' hashes are equal or not.
1030
+ * @param path1 <string> Path to the first file.
1031
+ * @param path2 <string> Path to the second file.
1032
+ * @returns <Promise<boolean>> Whether the files are equal or not.
1033
+ */
1034
+ const compareHashes = async (path1, path2) => {
1035
+ const hash1 = await blake512FromPath(path1);
1036
+ const hash2 = await blake512FromPath(path2);
1037
+ return hash1 === hash2;
1038
+ };
1039
+
1040
+ /**
1041
+ * Parse and validate that the ceremony configuration is correct
1042
+ * @notice this does not upload any files to storage
1043
+ * @param path <string> - the path to the configuration file
1044
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1045
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1046
+ */
1047
+ const parseCeremonyFile = async (path, cleanup = false) => {
1048
+ // check that the path exists
1049
+ if (!fs.existsSync(path))
1050
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1051
+ try {
1052
+ // read the data
1053
+ const data = JSON.parse(fs.readFileSync(path).toString());
1054
+ // verify that the data is correct
1055
+ if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1056
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1057
+ // validate that we have at least 1 circuit input data
1058
+ if (!data.circuits || data.circuits.length === 0)
1059
+ throw new Error("You need to provide the data for at least 1 circuit.");
1060
+ // validate that the end date is in the future
1061
+ let endDate;
1062
+ let startDate;
1063
+ try {
1064
+ endDate = new Date(data.endDate);
1065
+ startDate = new Date(data.startDate);
1066
+ }
1067
+ catch (error) {
1068
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1069
+ }
1070
+ if (endDate <= startDate)
1071
+ throw new Error("The end date should be greater than the start date.");
1072
+ const currentDate = new Date();
1073
+ if (endDate <= currentDate || startDate <= currentDate)
1074
+ throw new Error("The start and end dates should be in the future.");
1075
+ // validate penalty
1076
+ if (data.penalty <= 0)
1077
+ throw new Error("The penalty should be greater than zero.");
1078
+ const circuits = [];
1079
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1080
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1081
+ const circuitArtifacts = [];
1082
+ for (let i = 0; i < data.circuits.length; i++) {
1083
+ const circuitData = data.circuits[i];
1084
+ const artifacts = circuitData.artifacts;
1085
+ circuitArtifacts.push({
1086
+ artifacts: artifacts
1087
+ });
1088
+ const r1csPath = artifacts.r1csStoragePath;
1089
+ const wasmPath = artifacts.wasmStoragePath;
1090
+ // where we storing the r1cs downloaded
1091
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1092
+ // check that the artifacts exist in S3
1093
+ // we don't need any privileges to download this
1094
+ // just the correct region
1095
+ const s3 = new S3Client({ region: artifacts.region });
1096
+ try {
1097
+ await s3.send(new HeadObjectCommand({
1098
+ Bucket: artifacts.bucket,
1099
+ Key: r1csPath
1100
+ }));
1101
+ }
1102
+ catch (error) {
1103
+ throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1104
+ }
1105
+ try {
1106
+ await s3.send(new HeadObjectCommand({
1107
+ Bucket: artifacts.bucket,
1108
+ Key: wasmPath
1109
+ }));
1110
+ }
1111
+ catch (error) {
1112
+ throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1113
+ }
1114
+ // download the r1cs to extract the metadata
1115
+ const command = new GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1116
+ const response = await s3.send(command);
1117
+ const streamPipeline = promisify(pipeline);
1118
+ if (response.$metadata.httpStatusCode !== 200)
1119
+ throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
1120
+ if (response.Body instanceof Readable)
1121
+ await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1122
+ // extract the metadata from the r1cs
1123
+ const metadata = getR1CSInfo(localR1csPath);
1124
+ // validate that the circuit hash and template links are valid
1125
+ const template = circuitData.template;
1126
+ const URLMatch = template.source.match(urlPattern);
1127
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1128
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1129
+ const hashMatch = template.commitHash.match(commitHashPattern);
1130
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1131
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1132
+ // calculate the hash of the r1cs file
1133
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1134
+ const circuitPrefix = extractPrefix(circuitData.name);
1135
+ // filenames
1136
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1137
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1138
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1139
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1140
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1141
+ // storage paths
1142
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1143
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1144
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1145
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1146
+ const files = {
1147
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1148
+ r1csFilename: r1csCompleteFilename,
1149
+ wasmFilename: wasmCompleteFilename,
1150
+ initialZkeyFilename: firstZkeyCompleteFilename,
1151
+ potStoragePath: potStorageFilePath,
1152
+ r1csStoragePath: r1csStorageFilePath,
1153
+ wasmStoragePath: wasmStorageFilePath,
1154
+ initialZkeyStoragePath: zkeyStorageFilePath,
1155
+ r1csBlake2bHash: r1csBlake2bHash
1156
+ };
1157
+ // validate that the compiler hash is a valid hash
1158
+ const compiler = circuitData.compiler;
1159
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1160
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1161
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1162
+ // validate that the verification options are valid
1163
+ const verification = circuitData.verification;
1164
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1165
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1166
+ // @todo VM parameters verification
1167
+ // if (verification['cfOrVM'] === "VM") {}
1168
+ // check that the timeout is provided for the correct configuration
1169
+ let dynamicThreshold;
1170
+ let fixedTimeWindow;
1171
+ let circuit = {};
1172
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1173
+ if (circuitData.dynamicThreshold <= 0)
1174
+ throw new Error("The dynamic threshold should be > 0.");
1175
+ dynamicThreshold = circuitData.dynamicThreshold;
1176
+ // the Circuit data for the ceremony setup
1177
+ circuit = {
1178
+ name: circuitData.name,
1179
+ description: circuitData.description,
1180
+ prefix: circuitPrefix,
1181
+ sequencePosition: i + 1,
1182
+ metadata: metadata,
1183
+ files: files,
1184
+ template: template,
1185
+ compiler: compiler,
1186
+ verification: verification,
1187
+ dynamicThreshold: dynamicThreshold,
1188
+ avgTimings: {
1189
+ contributionComputation: 0,
1190
+ fullContribution: 0,
1191
+ verifyCloudFunction: 0
1192
+ },
1193
+ };
1194
+ }
1195
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1196
+ if (circuitData.fixedTimeWindow <= 0)
1197
+ throw new Error("The fixed time window threshold should be > 0.");
1198
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1199
+ // the Circuit data for the ceremony setup
1200
+ circuit = {
1201
+ name: circuitData.name,
1202
+ description: circuitData.description,
1203
+ prefix: circuitPrefix,
1204
+ sequencePosition: i + 1,
1205
+ metadata: metadata,
1206
+ files: files,
1207
+ template: template,
1208
+ compiler: compiler,
1209
+ verification: verification,
1210
+ fixedTimeWindow: fixedTimeWindow,
1211
+ avgTimings: {
1212
+ contributionComputation: 0,
1213
+ fullContribution: 0,
1214
+ verifyCloudFunction: 0
1215
+ },
1216
+ };
1217
+ }
1218
+ circuits.push(circuit);
1219
+ // remove the local r1cs download (if used for verifying the config only vs setup)
1220
+ if (cleanup)
1221
+ fs.unlinkSync(localR1csPath);
1222
+ }
1223
+ const setupData = {
1224
+ ceremonyInputData: {
1225
+ title: data.title,
1226
+ description: data.description,
1227
+ startDate: startDate.valueOf(),
1228
+ endDate: endDate.valueOf(),
1229
+ timeoutMechanismType: data.timeoutMechanismType,
1230
+ penalty: data.penalty
1231
+ },
1232
+ ceremonyPrefix: extractPrefix(data.title),
1233
+ circuits: circuits,
1234
+ circuitArtifacts: circuitArtifacts
1235
+ };
1236
+ return setupData;
1237
+ }
1238
+ catch (error) {
1239
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1240
+ }
1241
+ };
1242
+ /**
1243
+ * Extract data from a R1CS metadata file generated with a custom file-based logger.
1244
+ * @notice useful for extracting metadata circuits contained in the generated file using a logger
1245
+ * on the `r1cs.info()` method of snarkjs.
1246
+ * @param fullFilePath <string> - the full path of the file.
1247
+ * @param keyRgx <RegExp> - the regular expression linked to the key from which you want to extract the value.
1248
+ * @returns <string> - the stringified extracted value.
1249
+ */
1250
+ const extractR1CSInfoValueForGivenKey = (fullFilePath, keyRgx) => {
1251
+ // Read the logger file.
1252
+ const fileContents = fs.readFileSync(fullFilePath, "utf-8");
1253
+ // Check for the matching value.
1254
+ const matchingValue = fileContents.match(keyRgx);
1255
+ if (!matchingValue)
1256
+ throw new Error(`Unable to retrieve circuit metadata. Possible causes may involve an error while using the logger. Please, check whether the corresponding \`.log\` file is present in your local \`output/setup/metadata\` folder. In any case, we kindly ask you to terminate the current session and repeat the process.`);
1257
+ // Elaborate spaces and special characters to extract the value.
1258
+ // nb. this is a manual process which follows this custom arbitrary extraction rule
1259
+ // accordingly to the output produced by the `r1cs.info()` method from snarkjs library.
1260
+ return matchingValue?.at(0)?.split(":")[1].replace(" ", "").split("#")[0].replace("\n", "");
1261
+ };
1262
+ /**
1263
+ * Calculate the smallest amount of Powers of Tau needed for a circuit with a constraint size.
1264
+ * @param constraints <number> - the number of circuit constraints (extracted from metadata).
1265
+ * @param outputs <number> - the number of circuit outputs (extracted from metadata)
1266
+ * @returns <number> - the smallest amount of Powers of Tau for the given constraint size.
1267
+ */
1268
+ const computeSmallestPowersOfTauForCircuit = (constraints, outputs) => {
1269
+ let power = 2;
1270
+ let tau = 2 ** power;
1271
+ while (constraints + outputs > tau) {
1272
+ power += 1;
1273
+ tau = 2 ** power;
1274
+ }
1275
+ return power;
1276
+ };
1277
+ /**
1278
+ * Transform a number in a zKey index format.
1279
+ * @dev this method is aligned with the number of characters of the genesis zKey index (which is a constant).
1280
+ * @param progress <number> - the progression in zKey index.
1281
+ * @returns <string> - the progression in a zKey index format (`XYZAB`).
1282
+ */
1283
+ const formatZkeyIndex = (progress) => {
1284
+ let index = progress.toString();
1285
+ // Pad with zeros if the progression has less digits.
1286
+ while (index.length < genesisZkeyIndex.length) {
1287
+ index = `0${index}`;
1288
+ }
1289
+ return index;
1290
+ };
1291
+ /**
1292
+ * Extract the amount of powers from Powers of Tau file name.
1293
+ * @dev the PoT files must follow these convention (i_am_a_pot_file_09.ptau) where the numbers before '.ptau' are the powers.
1294
+ * @param potCompleteFilename <string> - the complete filename of the Powers of Tau file.
1295
+ * @returns <number> - the amount of powers.
1296
+ */
1297
+ const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1298
+ /**
1299
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1300
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1301
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1302
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1303
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1304
+ * @returns <string> - the resulting prefix.
1305
+ */
1306
+ const extractPrefix = (str) =>
1307
+ // eslint-disable-next-line no-useless-escape
1308
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1309
+ /**
1310
+ * Automate the generation of an entropy for a contribution.
1311
+ * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
1312
+ * @todo we need to improve the entropy generation (too naive).
1313
+ * @returns <string> - the auto-generated entropy.
1314
+ */
1315
+ const autoGenerateEntropy = () => new Uint8Array(256).map(() => Math.random() * 256).toString();
1316
+ /**
1317
+ * Check and return the circuit document based on its sequence position among a set of circuits (if any).
1318
+ * @dev there should be only one circuit with a provided sequence position. This method checks and return an
1319
+ * error if none is found.
1320
+ * @param circuits <Array<FirebaseDocumentInfo>> - the set of ceremony circuits documents.
1321
+ * @param sequencePosition <number> - the sequence position (index) of the circuit to be found and returned.
1322
+ * @returns <FirebaseDocumentInfo> - the document of the circuit in the set of circuits that has the provided sequence position.
1323
+ */
1324
+ const getCircuitBySequencePosition = (circuits, sequencePosition) => {
1325
+ // Filter by sequence position.
1326
+ const matchedCircuits = circuits.filter((circuitDocument) => circuitDocument.data.sequencePosition === sequencePosition);
1327
+ if (matchedCircuits.length !== 1)
1328
+ throw new Error(`Unable to find the circuit having position ${sequencePosition}. Run the command again and, if this error persists please contact the coordinator.`);
1329
+ return matchedCircuits.at(0);
1330
+ };
1331
+ /**
1332
+ * Convert bytes or chilobytes into gigabytes with customizable precision.
1333
+ * @param bytesOrKb <number> - the amount of bytes or chilobytes to be converted.
1334
+ * @param isBytes <boolean> - true when the amount to be converted is in bytes; otherwise false (= Chilobytes).
1335
+ * @returns <number> - the converted amount in GBs.
1336
+ */
1337
+ const convertBytesOrKbToGb = (bytesOrKb, isBytes) => Number(bytesOrKb / 1024 ** (isBytes ? 3 : 2));
1338
+ /**
1339
+ * Get the validity of contributors' contributions for each circuit of the given ceremony (if any).
1340
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1341
+ * @param circuits <Array<FirebaseDocumentInfo>> - the array of ceremony circuits documents.
1342
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
1343
+ * @param participantId <string> - the unique identifier of the contributor.
1344
+ * @param isFinalizing <boolean> - flag to discriminate between ceremony finalization (true) and contribution (false).
1345
+ * @returns <Promise<Array<ContributionValidity>>> - a list of contributor contributions together with contribution validity (based on coordinator verification).
1346
+ */
1347
+ const getContributionsValidityForContributor = async (firestoreDatabase, circuits, ceremonyId, participantId, isFinalizing) => {
1348
+ const contributionsValidity = [];
1349
+ for await (const circuit of circuits) {
1350
+ // Get circuit contribution from contributor.
1351
+ const circuitContributionsFromContributor = await getCircuitContributionsFromContributor(firestoreDatabase, ceremonyId, circuit.id, participantId);
1352
+ // Check for ceremony finalization (= there could be more than one contribution).
1353
+ const contribution = isFinalizing
1354
+ ? circuitContributionsFromContributor
1355
+ .filter((contributionDocument) => contributionDocument.data.zkeyIndex === finalContributionIndex)
1356
+ .at(0)
1357
+ : circuitContributionsFromContributor.at(0);
1358
+ if (!contribution)
1359
+ throw new Error("Unable to retrieve contributions for the participant. There may have occurred a database-side error. Please, we kindly ask you to terminate the current session and repeat the process");
1360
+ contributionsValidity.push({
1361
+ contributionId: contribution?.id,
1362
+ circuitId: circuit.id,
1363
+ valid: contribution?.data.valid
1364
+ });
1365
+ }
1366
+ return contributionsValidity;
1367
+ };
1368
+ /**
1369
+ * Return the public attestation preamble for given contributor.
1370
+ * @param contributorIdentifier <string> - the identifier of the contributor (handle, name, uid).
1371
+ * @param ceremonyName <string> - the name of the ceremony.
1372
+ * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1373
+ * @returns <string> - the public attestation preamble.
1374
+ */
1375
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1376
+ /**
1377
+ * Check and prepare public attestation for the contributor made only of its valid contributions.
1378
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1379
+ * @param circuits <Array<FirebaseDocumentInfo>> - the array of ceremony circuits documents.
1380
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
1381
+ * @param participantId <string> - the unique identifier of the contributor.
1382
+ * @param participantContributions <Array<Co> - the document data of the participant.
1383
+ * @param contributorIdentifier <string> - the identifier of the contributor (handle, name, uid).
1384
+ * @param ceremonyName <string> - the name of the ceremony.
1385
+ * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1386
+ * @returns <Promise<string>> - the public attestation for the contributor.
1387
+ */
1388
+ const generateValidContributionsAttestation = async (firestoreDatabase, circuits, ceremonyId, participantId, participantContributions, contributorIdentifier, ceremonyName, isFinalizing) => {
1389
+ // Generate the attestation preamble for the contributor.
1390
+ let publicAttestation = getPublicAttestationPreambleForContributor(contributorIdentifier, ceremonyName, isFinalizing);
1391
+ // Get contributors' contributions validity.
1392
+ const contributionsWithValidity = await getContributionsValidityForContributor(firestoreDatabase, circuits, ceremonyId, participantId, isFinalizing);
1393
+ for await (const contributionWithValidity of contributionsWithValidity) {
1394
+ // Filter for the related contribution document info.
1395
+ const matchedContributions = participantContributions.filter((contribution) => contribution.doc === contributionWithValidity.contributionId);
1396
+ if (matchedContributions.length === 0)
1397
+ throw new Error(`Unable to retrieve given circuit contribution information. This could happen due to some errors while writing the information on the database.`);
1398
+ if (matchedContributions.length > 1)
1399
+ throw new Error(`Duplicated circuit contribution information. Please, contact the coordinator.`);
1400
+ const participantContribution = matchedContributions.at(0);
1401
+ // Get circuit document (the one for which the contribution was calculated).
1402
+ const circuitDocument = await getDocumentById(firestoreDatabase, getCircuitsCollectionPath(ceremonyId), contributionWithValidity.circuitId);
1403
+ const contributionDocument = await getDocumentById(firestoreDatabase, getContributionsCollectionPath(ceremonyId, contributionWithValidity.circuitId), participantContribution.doc);
1404
+ if (!contributionDocument.data() || !circuitDocument.data())
1405
+ throw new Error(`Something went wrong when retrieving the data from the database`);
1406
+ // Extract data.
1407
+ const { sequencePosition, prefix } = circuitDocument.data();
1408
+ const { zkeyIndex } = contributionDocument.data();
1409
+ // Update public attestation.
1410
+ publicAttestation = `${publicAttestation}\n\nCircuit # ${sequencePosition} (${prefix})\nContributor # ${zkeyIndex > 0 ? Number(zkeyIndex) : zkeyIndex}\n${participantContribution.hash}`;
1411
+ }
1412
+ return publicAttestation;
1413
+ };
1414
+ /**
1415
+ * Create a custom logger to write logs on a local file.
1416
+ * @param filename <string> - the name of the output file (where the logs are going to be written).
1417
+ * @param level <winston.LoggerOptions["level"]> - the option for the logger level (e.g., info, error).
1418
+ * @returns <Logger> - a customized winston logger for files.
1419
+ */
1420
+ const createCustomLoggerForFile = (filename, level = "info") => winston.createLogger({
1421
+ level,
1422
+ transports: new winston.transports.File({
1423
+ filename,
1424
+ format: winston.format.printf((log) => log.message),
1425
+ level
1426
+ })
1427
+ });
1428
+ /**
1429
+ * Return an amount of bytes read from a file to a particular location in the form of a buffer.
1430
+ * @param localFilePath <string> - the local path where the artifact will be downloaded.
1431
+ * @param offset <number> - the index of the line to be read (0 from the start).
1432
+ * @param length <number> - the length of the line to be read.
1433
+ * @param position <ReadPosition> - the position inside the file.
1434
+ * @returns <Buffer> - the buffer w/ the read bytes.
1435
+ */
1436
+ const readBytesFromFile = (localFilePath, offset, length, position) => {
1437
+ // Open the file (read mode).
1438
+ const fileDescriptor = fs.openSync(localFilePath, "r");
1439
+ // Prepare buffer.
1440
+ const buffer = Buffer.alloc(length);
1441
+ // Read bytes.
1442
+ fs.readSync(fileDescriptor, buffer, offset, length, position);
1443
+ // Return the read bytes.
1444
+ return buffer;
1445
+ };
1446
+ /**
1447
+ * Return the info about the R1CS file.ù
1448
+ * @dev this method was built taking inspiration from
1449
+ * https://github.com/weijiekoh/circom-helper/blob/master/ts/read_num_inputs.ts#L5.
1450
+ * You can find the specs of R1CS file here
1451
+ * https://github.com/iden3/r1csfile/blob/master/doc/r1cs_bin_format.md
1452
+ * @param localR1CSFilePath <string> - the local path to the R1CS file.
1453
+ * @returns <CircuitMetadata> - the info about the R1CS file.
1454
+ */
1455
+ const getR1CSInfo = (localR1CSFilePath) => {
1456
+ /**
1457
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1458
+ * ┃ 4 │ 72 31 63 73 ┃ Magic "r1cs"
1459
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1460
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1461
+ * ┃ 4 │ 01 00 00 00 ┃ Version 1
1462
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1463
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1464
+ * ┃ 4 │ 03 00 00 00 ┃ Number of Sections
1465
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1466
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┓
1467
+ * ┃ 4 │ sectionType ┃ 8 │ SectionSize ┃
1468
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┻━━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━┛
1469
+ * ┏━━━━━━━━━━━━━━━━━━━━━┓
1470
+ * ┃ ┃
1471
+ * ┃ ┃
1472
+ * ┃ ┃
1473
+ * ┃ Section Content ┃
1474
+ * ┃ ┃
1475
+ * ┃ ┃
1476
+ * ┃ ┃
1477
+ * ┗━━━━━━━━━━━━━━━━━━━━━┛
1478
+ *
1479
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┓
1480
+ * ┃ 4 │ sectionType ┃ 8 │ SectionSize ┃
1481
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┻━━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━┛
1482
+ * ┏━━━━━━━━━━━━━━━━━━━━━┓
1483
+ * ┃ ┃
1484
+ * ┃ ┃
1485
+ * ┃ ┃
1486
+ * ┃ Section Content ┃
1487
+ * ┃ ┃
1488
+ * ┃ ┃
1489
+ * ┃ ┃
1490
+ * ┗━━━━━━━━━━━━━━━━━━━━━┛
1491
+ *
1492
+ * ...
1493
+ * ...
1494
+ * ...
1495
+ */
1496
+ // Prepare state.
1497
+ let pointer = 0; // selector to particular file data position in order to read data.
1498
+ let wires = 0;
1499
+ let publicOutputs = 0;
1500
+ let publicInputs = 0;
1501
+ let privateInputs = 0;
1502
+ let labels = 0;
1503
+ let constraints = 0;
1504
+ try {
1505
+ // Get 'number of section' (jump magic r1cs and version1 data).
1506
+ const numberOfSections = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1507
+ // Jump to first section.
1508
+ pointer = 12;
1509
+ // For each section
1510
+ for (let i = 0; i < numberOfSections; i++) {
1511
+ // Read section type.
1512
+ const sectionType = utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1513
+ // Jump to section size.
1514
+ pointer += 4;
1515
+ // Read section size
1516
+ const sectionSize = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1517
+ // If at header section (0x00000001 : Header Section).
1518
+ if (sectionType === BigInt(1)) {
1519
+ // Read info from header section.
1520
+ /**
1521
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1522
+ * ┃ 4 │ 20 00 00 00 ┃ Field Size in bytes (fs)
1523
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1524
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
1525
+ * ┃ fs │ 010000f0 93f5e143 9170b979 48e83328 5d588181 b64550b8 29a031e1 724e6430 ┃ Prime size
1526
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
1527
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1528
+ * ┃ 32 │ 01 00 00 00 ┃ nWires
1529
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1530
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1531
+ * ┃ 32 │ 01 00 00 00 ┃ nPubOut
1532
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1533
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1534
+ * ┃ 32 │ 01 00 00 00 ┃ nPubIn
1535
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1536
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1537
+ * ┃ 32 │ 01 00 00 00 ┃ nPrvIn
1538
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1539
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
1540
+ * ┃ 64 │ 01 00 00 00 00 00 00 00 ┃ nLabels
1541
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
1542
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1543
+ * ┃ 32 │ 01 00 00 00 ┃ mConstraints
1544
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1545
+ */
1546
+ pointer += sectionSize - 20;
1547
+ // Read R1CS info.
1548
+ wires = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1549
+ pointer += 4;
1550
+ publicOutputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1551
+ pointer += 4;
1552
+ publicInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1553
+ pointer += 4;
1554
+ privateInputs = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1555
+ pointer += 4;
1556
+ labels = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1557
+ pointer += 8;
1558
+ constraints = Number(utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1559
+ }
1560
+ pointer += 8 + Number(sectionSize);
1561
+ }
1562
+ return {
1563
+ curve: "bn-128",
1564
+ wires,
1565
+ constraints,
1566
+ privateInputs,
1567
+ publicInputs,
1568
+ labels,
1569
+ outputs: publicOutputs,
1570
+ pot: computeSmallestPowersOfTauForCircuit(constraints, publicOutputs)
1571
+ };
1572
+ }
1573
+ catch (err) {
1574
+ throw new Error(`The R1CS file you provided would not appear to be correct. Please, check that you have provided a valid R1CS file and repeat the process.`);
1575
+ }
1576
+ };
1577
+ /**
1578
+ * Return a string with double digits if the provided input is one digit only.
1579
+ * @param in <number> - the input number to be converted.
1580
+ * @returns <string> - the two digits stringified number derived from the conversion.
1581
+ */
1582
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1583
+
1584
+ /**
1585
+ * Verify that a zKey is valid
1586
+ * @param r1csLocalFilePath <string> path to the r1cs file
1587
+ * @param zkeyLocalPath <string> path to the zKey file
1588
+ * @param potLocalFilePath <string> path to the PoT file
1589
+ * @param logger <any> logger instance
1590
+ * @returns <boolean> true if the zKey is valid, false otherwise
1591
+ */
1592
+ const verifyZKey = async (r1csLocalFilePath, zkeyLocalPath, potLocalFilePath, logger) => {
1593
+ if (!fs.existsSync(r1csLocalFilePath))
1594
+ throw new Error(`R1CS file not found at ${r1csLocalFilePath}`);
1595
+ if (!fs.existsSync(zkeyLocalPath))
1596
+ throw new Error(`zKey file not found at ${zkeyLocalPath}`);
1597
+ if (!fs.existsSync(potLocalFilePath))
1598
+ throw new Error(`PoT file not found at ${potLocalFilePath}`);
1599
+ const res = await zKey.verifyFromR1cs(r1csLocalFilePath, potLocalFilePath, zkeyLocalPath, logger);
1600
+ return res;
1601
+ };
1602
+ /**
1603
+ * Generates a GROTH16 proof
1604
+ * @param circuitInput <object> Input to the circuit
1605
+ * @param zkeyFilePath <string> Path to the proving key
1606
+ * @param wasmFilePath <string> Path to the compiled circuit
1607
+ * @param logger <any> Optional logger
1608
+ * @returns <Promise<object>> The proof
1609
+ */
1610
+ const generateGROTH16Proof = async (circuitInput, zkeyFilePath, wasmFilePath, logger) => {
1611
+ try {
1612
+ const { proof, publicSignals } = await groth16.fullProve(circuitInput, wasmFilePath, zkeyFilePath, logger);
1613
+ return {
1614
+ proof,
1615
+ publicSignals
1616
+ };
1617
+ }
1618
+ catch (error) {
1619
+ throw new Error("There was an error while generating a proof. Please check that the input is correct, as well as the required system paths; and please try again.");
1620
+ }
1621
+ };
1622
+ /**
1623
+ * Verifies a GROTH16 proof
1624
+ * @param verificationKeyPath <string> Path to the verification key
1625
+ * @param publicSignals <object> Public signals
1626
+ * @param proof <object> Proof
1627
+ * @returns <Promise<boolean>> Whether the proof is valid or not
1628
+ */
1629
+ const verifyGROTH16Proof = async (verificationKeyPath, publicSignals, proof) => {
1630
+ const verificationKey = JSON.parse(fs.readFileSync(verificationKeyPath).toString());
1631
+ const success = await groth16.verify(verificationKey, publicSignals, proof);
1632
+ return success;
1633
+ };
1634
+ /**
1635
+ * Helper method to extract the Solidity verifier
1636
+ * from a final zKey file and save it to a local file.
1637
+ * @param finalZkeyPath <string> The path to the zKey file.
1638
+ * @return <any> The Solidity verifier code.
1639
+ */
1640
+ const exportVerifierContract = async (finalZkeyPath, templatePath) => {
1641
+ // Extract verifier.
1642
+ let verifierCode = await zKey.exportSolidityVerifier(finalZkeyPath, {
1643
+ groth16: fs.readFileSync(templatePath).toString()
1644
+ }, console);
1645
+ // Update solidity version.
1646
+ verifierCode = verifierCode.replace(/pragma solidity \^\d+\.\d+\.\d+/, `pragma solidity ^${solidityVersion}`);
1647
+ return verifierCode;
1648
+ };
1649
+ /**
1650
+ * Helpers method to extract the vKey from a final zKey file
1651
+ * @param finalZkeyPath <string> The path to the zKey file.
1652
+ * @return <any> The vKey.
1653
+ */
1654
+ const exportVkey = async (finalZkeyPath) => {
1655
+ const verificationKeyJSONData = await zKey.exportVerificationKey(finalZkeyPath);
1656
+ return verificationKeyJSONData;
1657
+ };
1658
+ /**
1659
+ * Helper method to extract the Solidity verifier and the Verification key
1660
+ * from a final zKey file and save them to local files.
1661
+ * @param finalZkeyPath <string> The path to the zKey file.
1662
+ * @param verifierLocalPath <string> The path to the local file where the verifier will be saved.
1663
+ * @param vKeyLocalPath <string> The path to the local file where the vKey will be saved.
1664
+ * @param templatePath <string> The path to the template file.
1665
+ */
1666
+ const exportVerifierAndVKey = async (finalZkeyPath, verifierLocalPath, vKeyLocalPath, templatePath) => {
1667
+ const verifierCode = await exportVerifierContract(finalZkeyPath, templatePath);
1668
+ fs.writeFileSync(verifierLocalPath, verifierCode);
1669
+ const verificationKeyJSONData = await exportVkey(finalZkeyPath);
1670
+ fs.writeFileSync(vKeyLocalPath, JSON.stringify(verificationKeyJSONData));
1671
+ };
1672
+ /**
1673
+ * Generate a zKey from scratch (useful to compute either the genesis or final zKey)
1674
+ * @param isFinalizing <boolean> Whether the ceremony is finalizing or not
1675
+ * @param r1csLocalPath <string> The path to the local r1cs file
1676
+ * @param potLocalPath <string> The path to the local pot file
1677
+ * @param zkeyLocalPath <string> The path to save the generated zKey
1678
+ * @param logger <any> The logger instance
1679
+ * @param finalContributionZKeyLocalPath <string> The path to the local zkey file of the final contribution (only for final zKey)
1680
+ * @param coordinatorIdentifier <string> The identifier of the coordinator (only for final zKey)
1681
+ * @param beacon <string> The beacon value for the last contribution (only for final zKey)
1682
+ */
1683
+ const generateZkeyFromScratch = async (isFinalizing, r1csLocalPath, potLocalPath, zkeyLocalPath, logger, finalContributionZKeyLocalPath, coordinatorIdentifier, beacon) => {
1684
+ if (!fs.existsSync(r1csLocalPath) || !fs.existsSync(potLocalPath))
1685
+ throw new Error("There was an error while opening the local files. Please make sure that you provided the right paths and try again.");
1686
+ if (isFinalizing) {
1687
+ if (!fs.existsSync(finalContributionZKeyLocalPath))
1688
+ throw new Error("There was an error while opening the last zKey generated by a contributor. Please make sure that you provided the right path and try again.");
1689
+ await zKey.beacon(finalContributionZKeyLocalPath, zkeyLocalPath, coordinatorIdentifier, beacon, numExpIterations, logger);
1690
+ }
1691
+ else
1692
+ await zKey.newZKey(r1csLocalPath, potLocalPath, zkeyLocalPath, logger);
1693
+ };
1694
+ /**
1695
+ * Helper function used to compare two ceremony artifacts
1696
+ * @param firebaseFunctions <Functions> Firebase functions object
1697
+ * @param localPath1 <string> Local path to store the first artifact
1698
+ * @param localPath2 <string> Local path to store the second artifact
1699
+ * @param storagePath1 <string> Storage path to the first artifact
1700
+ * @param storagePath2 <string> Storage path to the second artifact
1701
+ * @param bucketName1 <string> Bucket name of the first artifact
1702
+ * @param bucketName2 <string> Bucket name of the second artifact
1703
+ * @param cleanup <boolean> Whether to delete the downloaded files or not
1704
+ * @returns <Promise<boolean>> true if the hashes match, false otherwise
1705
+ */
1706
+ const compareCeremonyArtifacts = async (firebaseFunctions, localPath1, localPath2, storagePath1, storagePath2, bucketName1, bucketName2, cleanup) => {
1707
+ // 1. download files
1708
+ await downloadCeremonyArtifact(firebaseFunctions, bucketName1, storagePath1, localPath1);
1709
+ await downloadCeremonyArtifact(firebaseFunctions, bucketName2, storagePath2, localPath2);
1710
+ // 2. compare hashes
1711
+ const res = await compareHashes(localPath1, localPath2);
1712
+ // 3. cleanup
1713
+ if (cleanup) {
1714
+ fs.unlinkSync(localPath1);
1715
+ fs.unlinkSync(localPath2);
1716
+ }
1717
+ // 4. return result
1718
+ return res;
1719
+ };
1720
+ /**
1721
+ * Given a ceremony prefix, download all the ceremony artifacts
1722
+ * @param functions <Functions> firebase functions instance
1723
+ * @param firestore <Firestore> firebase firestore instance
1724
+ * @param ceremonyPrefix <string> ceremony prefix
1725
+ * @param outputDirectory <string> output directory where to
1726
+ * @returns <Promise<CeremonyArtifacts[]>> array of ceremony artifacts
1727
+ */
1728
+ const downloadAllCeremonyArtifacts = async (functions, firestore, ceremonyPrefix, outputDirectory) => {
1729
+ // mkdir if not exists
1730
+ if (!fs.existsSync(outputDirectory)) {
1731
+ fs.mkdirSync(outputDirectory);
1732
+ }
1733
+ if (!process.env.CONFIG_CEREMONY_BUCKET_POSTFIX)
1734
+ throw new Error("CONFIG_CEREMONY_BUCKET_POSTFIX not set. Please review your env file and try again.");
1735
+ const ceremonyArtifacts = [];
1736
+ // find the ceremony given the prefix
1737
+ const ceremonyQuery = await queryCollection(firestore, commonTerms.collections.ceremonies.name, [
1738
+ where(commonTerms.collections.ceremonies.fields.prefix, "==", ceremonyPrefix)
1739
+ ]);
1740
+ // get the data
1741
+ const ceremonyData = fromQueryToFirebaseDocumentInfo(ceremonyQuery.docs);
1742
+ if (ceremonyData.length === 0)
1743
+ throw new Error("Ceremony not found. Please review your ceremony prefix and try again.");
1744
+ const ceremony = ceremonyData.at(0);
1745
+ // reconstruct the bucket name
1746
+ const bucketName = getBucketName(ceremonyPrefix, process.env.CONFIG_CEREMONY_BUCKET_POSTFIX);
1747
+ const circuits = await getCeremonyCircuits(firestore, ceremony.id);
1748
+ if (circuits.length === 0)
1749
+ throw new Error("No circuits found for this ceremony. Please review your ceremony prefix and try again.");
1750
+ // for each circuit we have to download artifacts
1751
+ for (const circuit of circuits) {
1752
+ // make a directory for storing the circuit artifacts
1753
+ const circuitDir = `${outputDirectory}/${ceremony.data.prefix}/${circuit.data.prefix}`;
1754
+ fs.mkdirSync(circuitDir, { recursive: true });
1755
+ // get all required file names in storage and for local storage
1756
+ const { potStoragePath } = circuit.data.files;
1757
+ const potLocalPath = `${circuitDir}/${circuit.data.files.potFilename}`;
1758
+ const { r1csStoragePath } = circuit.data.files;
1759
+ const r1csLocalPath = `${circuitDir}/${circuit.data.files.r1csFilename}`;
1760
+ const contributions = circuit.data.waitingQueue.completedContributions;
1761
+ const zkeyIndex = formatZkeyIndex(contributions);
1762
+ const lastZKeyStoragePath = getZkeyStorageFilePath(circuit.data.prefix, `${circuit.data.prefix}_${zkeyIndex}.zkey`);
1763
+ const lastZKeyLocalPath = `${circuitDir}/${circuit.data.prefix}_${zkeyIndex}.zkey`;
1764
+ const finalZKeyName = `${circuit.data.prefix}_${finalContributionIndex}.zkey`;
1765
+ const finalZkeyStoragePath = getZkeyStorageFilePath(circuit.data.prefix, finalZKeyName);
1766
+ const finalZKeyLocalPath = `${circuitDir}/${finalZKeyName}`;
1767
+ const verifierStoragePath = getVerifierContractStorageFilePath(circuit.data.prefix, `${verifierSmartContractAcronym}.sol`);
1768
+ const verifierLocalPath = `${circuitDir}/${circuit.data.prefix}_${verifierSmartContractAcronym}.sol`;
1769
+ const vKeyStoragePath = getVerificationKeyStorageFilePath(circuit.data.prefix, `${verificationKeyAcronym}.json`);
1770
+ const vKeyLocalPath = `${circuitDir}/${circuit.data.prefix}_${verificationKeyAcronym}.json`;
1771
+ const wasmStoragePath = getWasmStorageFilePath(circuit.data.prefix, `${circuit.data.prefix}.wasm`);
1772
+ const wasmLocalPath = `${circuitDir}/${circuit.data.prefix}.wasm`;
1773
+ // download everything
1774
+ await downloadCeremonyArtifact(functions, bucketName, potStoragePath, potLocalPath);
1775
+ await downloadCeremonyArtifact(functions, bucketName, r1csStoragePath, r1csLocalPath);
1776
+ await downloadCeremonyArtifact(functions, bucketName, lastZKeyStoragePath, lastZKeyLocalPath);
1777
+ await downloadCeremonyArtifact(functions, bucketName, finalZkeyStoragePath, finalZKeyLocalPath);
1778
+ await downloadCeremonyArtifact(functions, bucketName, verifierStoragePath, verifierLocalPath);
1779
+ await downloadCeremonyArtifact(functions, bucketName, vKeyStoragePath, vKeyLocalPath);
1780
+ await downloadCeremonyArtifact(functions, bucketName, wasmStoragePath, wasmLocalPath);
1781
+ ceremonyArtifacts.push({
1782
+ circuitPrefix: circuit.data.prefix,
1783
+ circuitId: circuit.id,
1784
+ directoryRoot: circuitDir,
1785
+ potLocalFilePath: potLocalPath,
1786
+ r1csLocalFilePath: r1csLocalPath,
1787
+ finalZkeyLocalFilePath: finalZKeyLocalPath,
1788
+ lastZkeyLocalFilePath: lastZKeyLocalPath,
1789
+ verifierLocalFilePath: verifierLocalPath,
1790
+ verificationKeyLocalFilePath: vKeyLocalPath,
1791
+ wasmLocalFilePath: wasmLocalPath
1792
+ });
1793
+ }
1794
+ return ceremonyArtifacts;
1795
+ };
1796
+ /**
1797
+ * Fetch the final contribution beacon from Firestore
1798
+ * @param firestore <Firestore> firebase firestore instance
1799
+ * @param ceremonyId <string> ceremony id
1800
+ * @param circuitId <string> circuit id
1801
+ * @param participantId <string> participant id
1802
+ * @returns <Promise<string>> final contribution beacon
1803
+ */
1804
+ const getFinalContributionBeacon = async (firestore, ceremonyId, circuitId, participantId) => {
1805
+ const contributions = await getCircuitContributionsFromContributor(firestore, ceremonyId, circuitId, participantId);
1806
+ const filtered = contributions
1807
+ .filter((contributionDocument) => contributionDocument.data.zkeyIndex === finalContributionIndex)
1808
+ .at(0);
1809
+ if (!filtered)
1810
+ throw new Error("Final contribution not found. Please check that you provided the correct input data and try again.");
1811
+ return filtered.data.beacon.value;
1812
+ };
1813
+
1814
+ /**
1815
+ * This method initialize a Firebase app if no other app has already been initialized.
1816
+ * @param options <FirebaseOptions> - an object w/ every necessary Firebase option to init app.
1817
+ * @returns <FirebaseApp> - the initialized Firebase app object.
1818
+ */
1819
+ const initializeFirebaseApp = (options) => initializeApp(options);
1820
+ /**
1821
+ * This method returns the Firestore database instance associated to the given Firebase application.
1822
+ * @param app <FirebaseApp> - the Firebase application.
1823
+ * @returns <Firestore> - the Firebase Firestore associated to the application.
1824
+ */
1825
+ const getFirestoreDatabase = (app) => getFirestore(app);
1826
+ /**
1827
+ * This method returns the Cloud Functions instance associated to the given Firebase application.
1828
+ * @param app <FirebaseApp> - the Firebase application.
1829
+ * @returns <Functions> - the Cloud Functions associated to the application.
1830
+ */
1831
+ const getFirebaseFunctions = (app) => getFunctions(app, 'europe-west1');
1832
+ /**
1833
+ * Retrieve the configuration variables for the AWS services (S3, EC2).
1834
+ * @returns <AWSVariables> - the values of the AWS services configuration variables.
1835
+ */
1836
+ const getAWSVariables = () => {
1837
+ if (!process.env.AWS_ACCESS_KEY_ID ||
1838
+ !process.env.AWS_SECRET_ACCESS_KEY ||
1839
+ !process.env.AWS_REGION ||
1840
+ !process.env.AWS_ROLE_ARN ||
1841
+ !process.env.AWS_AMI_ID)
1842
+ throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1843
+ return {
1844
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1845
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1846
+ region: process.env.AWS_REGION || "us-east-1",
1847
+ roleArn: process.env.AWS_ROLE_ARN,
1848
+ amiId: process.env.AWS_AMI_ID
1849
+ };
1850
+ };
1851
+ /**
1852
+ * Return the core Firebase services instances (App, Database, Functions).
1853
+ * @param apiKey <string> - the API key specified in the application config.
1854
+ * @param authDomain <string> - the authDomain string specified in the application config.
1855
+ * @param projectId <string> - the projectId specified in the application config.
1856
+ * @param messagingSenderId <string> - the messagingSenderId specified in the application config.
1857
+ * @param appId <string> - the appId specified in the application config.
1858
+ * @returns <Promise<FirebaseServices>>
1859
+ */
1860
+ const initializeFirebaseCoreServices = async (apiKey, authDomain, projectId, messagingSenderId, appId) => {
1861
+ const firebaseApp = initializeFirebaseApp({
1862
+ apiKey,
1863
+ authDomain,
1864
+ projectId,
1865
+ messagingSenderId,
1866
+ appId
1867
+ });
1868
+ const firestoreDatabase = getFirestoreDatabase(firebaseApp);
1869
+ const firebaseFunctions = getFirebaseFunctions(firebaseApp);
1870
+ return {
1871
+ firebaseApp,
1872
+ firestoreDatabase,
1873
+ firebaseFunctions
1874
+ };
1875
+ };
1876
+
1877
+ /**
1878
+ * Sign in w/ OAuth 2.0 token.
1879
+ * @param firebaseApp <FirebaseApp> - the configured instance of the Firebase App in use.
1880
+ * @param credentials <OAuthCredential> - the OAuth credential generated from token exchange.
1881
+ */
1882
+ const signInToFirebaseWithCredentials = async (firebaseApp, credentials) => signInWithCredential(initializeAuth(firebaseApp), credentials);
1883
+ /**
1884
+ * Return the current authenticated user in the given Firebase Application.
1885
+ * @param firebaseApp <FirebaseApp> - the configured instance of the Firebase App in use.
1886
+ * @returns <User> - the object containing the data about the current authenticated user in the given Firebase application.
1887
+ */
1888
+ const getCurrentFirebaseAuthUser = (firebaseApp) => {
1889
+ const user = getAuth(firebaseApp).currentUser;
1890
+ if (!user)
1891
+ throw new Error(`Unable to find the user currently authenticated with Firebase. Verify that the Firebase application is properly configured and repeat user authentication before trying again.`);
1892
+ return user;
1893
+ };
1894
+ /**
1895
+ * Check if the user can claim to be a coordinator.
1896
+ * @param user <User> - the user to be checked.
1897
+ * @returns Promise<boolean> - true if the user is a coordinator, false otherwise.
1898
+ */
1899
+ const isCoordinator = async (user) => {
1900
+ const userTokenAndClaims = await user.getIdTokenResult();
1901
+ return !!userTokenAndClaims.claims.coordinator;
1902
+ };
1903
+
1904
+ /**
1905
+ * Formats part of a GROTH16 SNARK proof
1906
+ * @link adapted from SNARKJS p256 function
1907
+ * @param proofPart <any> a part of a proof to be formatted
1908
+ * @returns <string> the formatted proof part
1909
+ */
1910
+ const p256 = (proofPart) => {
1911
+ let nProofPart = proofPart.toString(16);
1912
+ while (nProofPart.length < 64)
1913
+ nProofPart = `0${nProofPart}`;
1914
+ nProofPart = `0x${nProofPart}`;
1915
+ return nProofPart;
1916
+ };
1917
+ /**
1918
+ * This function formats the calldata for Solidity
1919
+ * @link adapted from SNARKJS formatSolidityCalldata function
1920
+ * @dev this function is supposed to be called with
1921
+ * @dev the output of generateGROTH16Proof
1922
+ * @param circuitInput <string[]> Inputs to the circuit
1923
+ * @param _proof <object> Proof
1924
+ * @returns <SolidityCalldata> The calldata formatted for Solidity
1925
+ */
1926
+ const formatSolidityCalldata = (circuitInput, _proof) => {
1927
+ try {
1928
+ const proof = utils.unstringifyBigInts(_proof);
1929
+ // format the public inputs to the circuit
1930
+ const formattedCircuitInput = [];
1931
+ for (const cInput of circuitInput) {
1932
+ formattedCircuitInput.push(p256(utils.unstringifyBigInts(cInput)));
1933
+ }
1934
+ // construct calldata
1935
+ const calldata = {
1936
+ arg1: [p256(proof.pi_a[0]), p256(proof.pi_a[1])],
1937
+ arg2: [
1938
+ [p256(proof.pi_b[0][1]), p256(proof.pi_b[0][0])],
1939
+ [p256(proof.pi_b[1][1]), p256(proof.pi_b[1][0])]
1940
+ ],
1941
+ arg3: [p256(proof.pi_c[0]), p256(proof.pi_c[1])],
1942
+ arg4: formattedCircuitInput
1943
+ };
1944
+ return calldata;
1945
+ }
1946
+ catch (error) {
1947
+ throw new Error("There was an error while formatting the calldata. Please make sure that you are calling this function with the output of the generateGROTH16Proof function, and then please try again.");
1948
+ }
1949
+ };
1950
+ /**
1951
+ * Verify a GROTH16 SNARK proof on chain
1952
+ * @param contract <Contract> The contract instance
1953
+ * @param proof <SolidityCalldata> The calldata formatted for Solidity
1954
+ * @returns <Promise<boolean>> Whether the proof is valid or not
1955
+ */
1956
+ const verifyGROTH16ProofOnChain = async (contract, proof) => {
1957
+ const res = await contract.verifyProof(proof.arg1, proof.arg2, proof.arg3, proof.arg4);
1958
+ return res;
1959
+ };
1960
+ /**
1961
+ * Compiles a contract given a path
1962
+ * @param contractPath <string> path to the verifier contract
1963
+ * @returns <Promise<any>> the compiled contract
1964
+ */
1965
+ const compileContract = async (contractPath) => {
1966
+ if (!fs.existsSync(contractPath))
1967
+ throw new Error("The contract path does not exist. Please make sure that you are passing a valid path to the contract and try again.");
1968
+ const data = fs.readFileSync(contractPath).toString();
1969
+ const input = {
1970
+ language: "Solidity",
1971
+ sources: {
1972
+ Verifier: { content: data }
1973
+ },
1974
+ settings: {
1975
+ outputSelection: {
1976
+ "*": {
1977
+ "*": ["*"]
1978
+ }
1979
+ }
1980
+ }
1981
+ };
1982
+ try {
1983
+ const compiled = JSON.parse(solc.compile(JSON.stringify(input), { import: { contents: "" } }));
1984
+ return compiled.contracts.Verifier.Verifier;
1985
+ }
1986
+ catch (error) {
1987
+ throw new Error("There was an error while compiling the smart contract. Please check that the file is not corrupted and try again.");
1988
+ }
1989
+ };
1990
+ /**
1991
+ * Deploy the verifier contract
1992
+ * @param contractFactory <ContractFactory> The contract factory
1993
+ * @returns <Promise<Contract>> The contract instance
1994
+ */
1995
+ const deployVerifierContract = async (contractPath, signer) => {
1996
+ const compiledContract = await compileContract(contractPath);
1997
+ // connect to hardhat node running locally
1998
+ const contractFactory = new ContractFactory(compiledContract.abi, compiledContract.evm.bytecode.object, signer);
1999
+ const contract = await contractFactory.deploy();
2000
+ await contract.deployed();
2001
+ return contract;
2002
+ };
2003
+ /**
2004
+ * Verify a ceremony validity
2005
+ * 1. Download all artifacts
2006
+ * 2. Verify that the zkeys are valid
2007
+ * 3. Extract the verifier and the vKey
2008
+ * 4. Generate a proof and verify it locally
2009
+ * 5. Deploy Verifier contract and verify the proof on-chain
2010
+ * @param functions <Functions> firebase functions instance
2011
+ * @param firestore <Firestore> firebase firestore instance
2012
+ * @param ceremonyPrefix <string> ceremony prefix
2013
+ * @param outputDirectory <string> output directory where to store the ceremony artifacts
2014
+ * @param circuitInputsPath <string> path to the circuit inputs file
2015
+ * @param verifierTemplatePath <string> path to the verifier template file
2016
+ * @param signer <Signer> signer for contract interaction
2017
+ * @param logger <any> logger for printing snarkjs output
2018
+ */
2019
+ const verifyCeremony = async (functions, firestore, ceremonyPrefix, outputDirectory, circuitInputsPath, verifierTemplatePath, signer, logger) => {
2020
+ // 1. download all ceremony artifacts
2021
+ const ceremonyArtifacts = await downloadAllCeremonyArtifacts(functions, firestore, ceremonyPrefix, outputDirectory);
2022
+ // if there are no ceremony artifacts, we throw an error
2023
+ if (ceremonyArtifacts.length === 0)
2024
+ throw new Error("There was an error while downloading all ceremony artifacts. Please review your ceremony prefix and try again.");
2025
+ // extract the circuit inputs
2026
+ if (!fs.existsSync(circuitInputsPath))
2027
+ throw new Error("The circuit inputs file does not exist. Please check the path and try again.");
2028
+ const circuitsInputs = JSON.parse(fs.readFileSync(circuitInputsPath).toString());
2029
+ // find the ceremony given the prefix
2030
+ const ceremonyQuery = await queryCollection(firestore, commonTerms.collections.ceremonies.name, [
2031
+ where(commonTerms.collections.ceremonies.fields.prefix, "==", ceremonyPrefix)
2032
+ ]);
2033
+ // get the ceremony data - no need to do an existence check as
2034
+ // we already checked that the ceremony exists in downloafAllCeremonyArtifacts
2035
+ const ceremonyData = fromQueryToFirebaseDocumentInfo(ceremonyQuery.docs);
2036
+ const ceremony = ceremonyData.at(0);
2037
+ // this is required to re-generate the final zKey
2038
+ const { coordinatorId } = ceremony.data;
2039
+ const ceremonyId = ceremony.id;
2040
+ // we verify each circuit separately
2041
+ for (const ceremonyArtifact of ceremonyArtifacts) {
2042
+ // get the index of the circuit in the list of circuits
2043
+ const inputIndex = ceremonyArtifacts.indexOf(ceremonyArtifact);
2044
+ // 2. verify the final zKey
2045
+ const isValid = await verifyZKey(ceremonyArtifact.r1csLocalFilePath, ceremonyArtifact.finalZkeyLocalFilePath, ceremonyArtifact.potLocalFilePath, logger);
2046
+ if (!isValid)
2047
+ throw new Error(`The zkey for Circuit ${ceremonyArtifact.circuitPrefix} is not valid. Please check that the artifact is correct. If not, you might have to re run the final contribution to compute a valid final zKey.`);
2048
+ // 3. get the final contribution beacon
2049
+ const contributionBeacon = await getFinalContributionBeacon(firestore, ceremonyId, ceremonyArtifact.circuitId, coordinatorId);
2050
+ const generatedFinalZkeyPath = `${ceremonyArtifact.directoryRoot}/${ceremonyArtifact.circuitPrefix}_${finalContributionIndex}_verification.zkey`;
2051
+ // 4. re generate the zkey using the beacon and check hashes
2052
+ await generateZkeyFromScratch(true, ceremonyArtifact.r1csLocalFilePath, ceremonyArtifact.potLocalFilePath, generatedFinalZkeyPath, logger, ceremonyArtifact.lastZkeyLocalFilePath, coordinatorId, contributionBeacon);
2053
+ const zKeysMatching = await compareHashes(generatedFinalZkeyPath, ceremonyArtifact.finalZkeyLocalFilePath);
2054
+ if (!zKeysMatching)
2055
+ throw new Error(`The final zkey for the Circuit ${ceremonyArtifact.circuitPrefix} does not match the one generated from the beacon. Please confirm manually by downloading from the S3 bucket.`);
2056
+ // 5. extract the verifier and the vKey
2057
+ const verifierLocalPath = `${ceremonyArtifact.directoryRoot}/${ceremonyArtifact.circuitPrefix}_${verifierSmartContractAcronym}_verification.sol`;
2058
+ const vKeyLocalPath = `${ceremonyArtifact.directoryRoot}/${ceremonyArtifact.circuitPrefix}_${verificationKeyAcronym}_verification.json`;
2059
+ await exportVerifierAndVKey(ceremonyArtifact.finalZkeyLocalFilePath, verifierLocalPath, vKeyLocalPath, verifierTemplatePath);
2060
+ // 6. verify that the generated verifier and vkey match the ones downloaded from S3
2061
+ const verifierMatching = await compareHashes(verifierLocalPath, ceremonyArtifact.verifierLocalFilePath);
2062
+ if (!verifierMatching)
2063
+ throw new Error(`The verifier contract for the Contract ${ceremonyArtifact.circuitPrefix} does not match the one downloaded from S3. Please confirm manually by downloading from the S3 bucket.`);
2064
+ const vKeyMatching = await compareHashes(vKeyLocalPath, ceremonyArtifact.verificationKeyLocalFilePath);
2065
+ if (!vKeyMatching)
2066
+ throw new Error(`The verification key for the Contract ${ceremonyArtifact.circuitPrefix} does not match the one downloaded from S3. Please confirm manually by downloading from the S3 bucket.`);
2067
+ // 7. generate a proof and verify it locally (use either of the downloaded or generated as the hashes will have matched at this point)
2068
+ const { proof, publicSignals } = await generateGROTH16Proof(circuitsInputs[inputIndex], ceremonyArtifact.finalZkeyLocalFilePath, ceremonyArtifact.wasmLocalFilePath, logger);
2069
+ const isProofValid = await verifyGROTH16Proof(vKeyLocalPath, publicSignals, proof);
2070
+ if (!isProofValid)
2071
+ throw new Error(`Could not verify the proof for Circuit ${ceremonyArtifact.circuitPrefix}. Please check that the artifacts are correct as well as the inputs to the circuit, and try again.`);
2072
+ // 8. deploy Verifier contract and verify the proof on-chain
2073
+ const verifierContract = await deployVerifierContract(verifierLocalPath, signer);
2074
+ const formattedProof = await formatSolidityCalldata(publicSignals, proof);
2075
+ const isProofValidOnChain = await verifyGROTH16ProofOnChain(verifierContract, formattedProof);
2076
+ if (!isProofValidOnChain)
2077
+ throw new Error(`Could not verify the proof on-chain for Circuit ${ceremonyArtifact.circuitPrefix}. Please check that the artifacts are correct as well as the inputs to the circuit, and try again.`);
2078
+ }
2079
+ };
2080
+
2081
+ /**
2082
+ * This function will return the number of public repos of a user
2083
+ * @param user <string> The username of the user
2084
+ * @returns <number> The number of public repos
2085
+ */
2086
+ const getNumberOfPublicReposGitHub = async (user) => {
2087
+ const response = await fetch(`https://api.github.com/user/${user}/repos`, {
2088
+ method: "GET",
2089
+ headers: {
2090
+ Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2091
+ }
2092
+ });
2093
+ if (response.status !== 200)
2094
+ throw new Error("It was not possible to retrieve the number of public repositories. Please try again.");
2095
+ const jsonData = await response.json();
2096
+ return jsonData.length;
2097
+ };
2098
+ /**
2099
+ * This function will return the number of followers of a user
2100
+ * @param user <string> The username of the user
2101
+ * @returns <number> The number of followers
2102
+ */
2103
+ const getNumberOfFollowersGitHub = async (user) => {
2104
+ const response = await fetch(`https://api.github.com/user/${user}/followers`, {
2105
+ method: "GET",
2106
+ headers: {
2107
+ Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2108
+ }
2109
+ });
2110
+ if (response.status !== 200)
2111
+ throw new Error("It was not possible to retrieve the number of followers. Please try again.");
2112
+ const jsonData = await response.json();
2113
+ return jsonData.length;
2114
+ };
2115
+ /**
2116
+ * This function will return the number of following of a user
2117
+ * @param user <string> The username of the user
2118
+ * @returns <number> The number of following users
2119
+ */
2120
+ const getNumberOfFollowingGitHub = async (user) => {
2121
+ const response = await fetch(`https://api.github.com/user/${user}/following`, {
2122
+ method: "GET",
2123
+ headers: {
2124
+ Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2125
+ }
2126
+ });
2127
+ if (response.status !== 200)
2128
+ throw new Error("It was not possible to retrieve the number of following. Please try again.");
2129
+ const jsonData = await response.json();
2130
+ return jsonData.length;
2131
+ };
2132
+ /**
2133
+ * This function will check if the user is reputable enough to be able to use the app
2134
+ * @param userLogin <string> The username of the user
2135
+ * @param minimumAmountOfFollowing <number> The minimum amount of following the user should have
2136
+ * @param minimumAmountOfFollowers <number> The minimum amount of followers the user should have
2137
+ * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2138
+ * @returns <boolean> True if the user is reputable enough, false otherwise
2139
+ */
2140
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2141
+ if (!process.env.GITHUB_ACCESS_TOKEN)
2142
+ throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2143
+ const following = await getNumberOfFollowingGitHub(userLogin);
2144
+ const repos = await getNumberOfPublicReposGitHub(userLogin);
2145
+ const followers = await getNumberOfFollowersGitHub(userLogin);
2146
+ if (following < minimumAmountOfFollowing ||
2147
+ repos < minimumAmountOfPublicRepos ||
2148
+ followers < minimumAmountOfFollowers)
2149
+ return false;
2150
+ return true;
2151
+ };
2152
+
2153
+ /**
2154
+ * Define different states of a ceremony.
2155
+ * @enum {string}
2156
+ * - SCHEDULED: when the ceremony setup has been properly completed but the contribution period has not yet started.
2157
+ * - OPENED: when the contribution period has started.
2158
+ * - PAUSED: When the coordinator has manually paused the ceremony (NB. currently not possible because the relevant functionality has not yet been implemented).
2159
+ * - CLOSED: when the contribution period has finished.
2160
+ * - FINALIZED: when the ceremony finalization has been properly completed.
2161
+ */
2162
+ var CeremonyState;
2163
+ (function (CeremonyState) {
2164
+ CeremonyState["SCHEDULED"] = "SCHEDULED";
2165
+ CeremonyState["OPENED"] = "OPENED";
2166
+ CeremonyState["PAUSED"] = "PAUSED";
2167
+ CeremonyState["CLOSED"] = "CLOSED";
2168
+ CeremonyState["FINALIZED"] = "FINALIZED";
2169
+ })(CeremonyState || (CeremonyState = {}));
2170
+ /**
2171
+ * Define the type of Trusted Setup ceremony (Phase 1 or Phase 2).
2172
+ * @enum {string}
2173
+ * - PHASE1: when the ceremony is a Phase 1 Trusted Setup ceremony.
2174
+ * - PHASE2: when the ceremony is a Phase 2 Trusted Setup ceremony.
2175
+ */
2176
+ var CeremonyType;
2177
+ (function (CeremonyType) {
2178
+ CeremonyType["PHASE1"] = "PHASE1";
2179
+ CeremonyType["PHASE2"] = "PHASE2";
2180
+ })(CeremonyType || (CeremonyType = {}));
2181
+ /**
2182
+ * Define different status of a participant.
2183
+ * @enum {string}
2184
+ * - CREATED: when the participant document has been created in the database.
2185
+ * - WAITING: when the participant is waiting for a contribution (i.e., is currently queued or is waiting for its status to be checked after a timeout expiration).
2186
+ * - READY: when the participant is ready for a contribution.
2187
+ * - CONTRIBUTING: when the participant is currently contributing (i.e., not queued anymore, but the current contributor at this time).
2188
+ * - CONTRIBUTED: when the participant has completed successfully the contribution for all circuits in a ceremony. The participant may need to wait for the latest contribution verification while having this status.
2189
+ * - DONE: when the participant has completed contributions and verifications from coordinator.
2190
+ * - FINALIZING: when the coordinator is currently finalizing the ceremony.
2191
+ * - FINALIZED: when the coordinator has successfully finalized the ceremony.
2192
+ * - TIMEDOUT: when the participant has been timedout while contributing. This may happen due to network or memory issues, un/intentional crash, or contributions lasting for too long.
2193
+ * - EXHUMED: when the participant is ready to resume the contribution after a timeout expiration.
2194
+ */
2195
+ var ParticipantStatus;
2196
+ (function (ParticipantStatus) {
2197
+ ParticipantStatus["CREATED"] = "CREATED";
2198
+ ParticipantStatus["WAITING"] = "WAITING";
2199
+ ParticipantStatus["READY"] = "READY";
2200
+ ParticipantStatus["CONTRIBUTING"] = "CONTRIBUTING";
2201
+ ParticipantStatus["CONTRIBUTED"] = "CONTRIBUTED";
2202
+ ParticipantStatus["DONE"] = "DONE";
2203
+ ParticipantStatus["FINALIZING"] = "FINALIZING";
2204
+ ParticipantStatus["FINALIZED"] = "FINALIZED";
2205
+ ParticipantStatus["TIMEDOUT"] = "TIMEDOUT";
2206
+ ParticipantStatus["EXHUMED"] = "EXHUMED";
2207
+ })(ParticipantStatus || (ParticipantStatus = {}));
2208
+ /**
2209
+ * Define different steps during which the participant may be during the contribution.
2210
+ * @enum {string}
2211
+ * - DOWNLOADING: when the participant is doing the download of the last contribution (from previous participant).
2212
+ * - COMPUTING: when the participant is actively computing the contribution.
2213
+ * - UPLOADING: when the participant is uploading the computed contribution.
2214
+ * - VERIFYING: when the participant is waiting from verification results from the coordinator.
2215
+ * - COMPLETED: when the participant has received the verification results from the coordinator and completed the contribution steps.
2216
+ */
2217
+ var ParticipantContributionStep;
2218
+ (function (ParticipantContributionStep) {
2219
+ ParticipantContributionStep["DOWNLOADING"] = "DOWNLOADING";
2220
+ ParticipantContributionStep["COMPUTING"] = "COMPUTING";
2221
+ ParticipantContributionStep["UPLOADING"] = "UPLOADING";
2222
+ ParticipantContributionStep["VERIFYING"] = "VERIFYING";
2223
+ ParticipantContributionStep["COMPLETED"] = "COMPLETED";
2224
+ })(ParticipantContributionStep || (ParticipantContributionStep = {}));
2225
+ /**
2226
+ * Define what type of timeout was performed.
2227
+ * @enum {string}
2228
+ * - BLOCKING_CONTRIBUTION: when the current contributor was blocking the waiting queue.
2229
+ * - BLOCKING_CLOUD_FUNCTION: when the contribution verification has gone beyond the time limit.
2230
+ */
2231
+ var TimeoutType;
2232
+ (function (TimeoutType) {
2233
+ TimeoutType["BLOCKING_CONTRIBUTION"] = "BLOCKING_CONTRIBUTION";
2234
+ TimeoutType["BLOCKING_CLOUD_FUNCTION"] = "BLOCKING_CLOUD_FUNCTION";
2235
+ })(TimeoutType || (TimeoutType = {}));
2236
+ /**
2237
+ * Define what type of timeout mechanism is currently adopted for a ceremony.
2238
+ * @enum {string}
2239
+ * - DYNAMIC: self-update approach based on latest contribution time.
2240
+ * - FIXED: approach based on a fixed amount of time.
2241
+ */
2242
+ var CeremonyTimeoutType;
2243
+ (function (CeremonyTimeoutType) {
2244
+ CeremonyTimeoutType["DYNAMIC"] = "DYNAMIC";
2245
+ CeremonyTimeoutType["FIXED"] = "FIXED";
2246
+ })(CeremonyTimeoutType || (CeremonyTimeoutType = {}));
2247
+ /**
2248
+ * Define request type for pre-signed urls.
2249
+ */
2250
+ var RequestType;
2251
+ (function (RequestType) {
2252
+ RequestType["PUT"] = "PUT";
2253
+ RequestType["GET"] = "GET";
2254
+ })(RequestType || (RequestType = {}));
2255
+ /**
2256
+ * Define the environment in use when testing.
2257
+ * @enum {string}
2258
+ * - DEVELOPMENT: tests are performed on the local Firebase emulator instance.
2259
+ * - PRODUCTION: tests are performed on the remote (deployed) Firebase application.
2260
+ */
2261
+ var TestingEnvironment;
2262
+ (function (TestingEnvironment) {
2263
+ TestingEnvironment["DEVELOPMENT"] = "DEVELOPMENT";
2264
+ TestingEnvironment["PRODUCTION"] = "PRODUCTION";
2265
+ })(TestingEnvironment || (TestingEnvironment = {}));
2266
+ /**
2267
+ * Define what type of contribution verification mechanism is currently adopted for a circuit.
2268
+ * @enum {string}
2269
+ * - CF: Cloud Functions.
2270
+ * - VM: Virtual Machine.
2271
+ */
2272
+ var CircuitContributionVerificationMechanism;
2273
+ (function (CircuitContributionVerificationMechanism) {
2274
+ CircuitContributionVerificationMechanism["CF"] = "CF";
2275
+ CircuitContributionVerificationMechanism["VM"] = "VM";
2276
+ })(CircuitContributionVerificationMechanism || (CircuitContributionVerificationMechanism = {}));
2277
+ /**
2278
+ * Define the supported VM volume types.
2279
+ * @dev the VM volume types can be retrieved at https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-volume-types.html
2280
+ * @enum {string}
2281
+ * - GP2: General Purpose SSD version 2.
2282
+ * - GP3: General Purpose SSD version 3.
2283
+ * - IO1: Provisioned IOPS SSD volumes version 1.
2284
+ * - ST1: Throughput Optimized HDD volumes.
2285
+ * - SC1: Cold HDD volumes.
2286
+ */
2287
+ var DiskTypeForVM;
2288
+ (function (DiskTypeForVM) {
2289
+ DiskTypeForVM["GP2"] = "gp2";
2290
+ DiskTypeForVM["GP3"] = "gp3";
2291
+ DiskTypeForVM["IO1"] = "io1";
2292
+ DiskTypeForVM["ST1"] = "st1";
2293
+ DiskTypeForVM["SC1"] = "sc1";
2294
+ })(DiskTypeForVM || (DiskTypeForVM = {}));
2295
+
2296
+ dotenv.config();
2297
+ /**
2298
+ * Create a new AWS EC2 client.
2299
+ * @returns <Promise<EC2Client>> - the EC2 client instance.
2300
+ */
2301
+ const createEC2Client = async () => {
2302
+ // Get the AWS variables.
2303
+ const { accessKeyId, secretAccessKey, region } = getAWSVariables();
2304
+ // Instantiate the new client.
2305
+ return new EC2Client({
2306
+ credentials: {
2307
+ accessKeyId,
2308
+ secretAccessKey
2309
+ },
2310
+ region
2311
+ });
2312
+ };
2313
+ /**
2314
+ * Create a new AWS SSM client.
2315
+ * @returns <Promise<SSMClient>> - the SSM client instance.
2316
+ */
2317
+ const createSSMClient = async () => {
2318
+ // Get the AWS variables.
2319
+ const { accessKeyId, secretAccessKey, region } = getAWSVariables();
2320
+ // Instantiate the new client.
2321
+ return new SSMClient({
2322
+ credentials: {
2323
+ accessKeyId,
2324
+ secretAccessKey
2325
+ },
2326
+ region
2327
+ });
2328
+ };
2329
+ /**
2330
+ * Return the list of bootstrap commands to be executed.
2331
+ * @dev the startup commands must be suitable for a shell script.
2332
+ * @param bucketName <string> - the name of the AWS S3 bucket.
2333
+ * @returns <Array<string>> - the list of startup commands to be executed.
2334
+ */
2335
+ const vmBootstrapCommand = (bucketName) => [
2336
+ "#!/bin/bash",
2337
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2338
+ `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2339
+ ];
2340
+ /**
2341
+ * Return the list of Node environment (and packages) installation plus artifact caching for contribution verification.
2342
+ * @param zKeyPath <string> - the path to zKey artifact inside AWS S3 bucket.
2343
+ * @param potPath <string> - the path to ptau artifact inside AWS S3 bucket.
2344
+ * @param snsTopic <string> - the SNS topic ARN.
2345
+ * @param region <string> - the AWS region.
2346
+ * @returns <Array<string>> - the array of commands to be run by the EC2 instance.
2347
+ */
2348
+ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, region) => [
2349
+ "#!/bin/bash",
2350
+ 'MARKER_FILE="/var/run/my_script_ran"',
2351
+ // eslint-disable-next-line no-template-curly-in-string
2352
+ "if [ -e ${MARKER_FILE} ]; then",
2353
+ "exit 0",
2354
+ "else",
2355
+ // eslint-disable-next-line no-template-curly-in-string
2356
+ "touch ${MARKER_FILE}",
2357
+ "sudo yum update -y",
2358
+ "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2359
+ "sudo yum install -y nodejs",
2360
+ "npm install -g snarkjs",
2361
+ `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2362
+ `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
2363
+ "wget https://github.com/BLAKE3-team/BLAKE3/releases/download/1.4.0/b3sum_linux_x64_bin -O /var/tmp/blake3.bin",
2364
+ "chmod +x /var/tmp/blake3.bin",
2365
+ "INSTANCE_ID=$(ec2-metadata -i | awk '{print $2}')",
2366
+ `aws sns publish --topic-arn ${snsTopic} --message "$INSTANCE_ID" --region ${region}`,
2367
+ "fi"
2368
+ ];
2369
+ /**
2370
+ * Return the list of commands for contribution verification.
2371
+ * @dev this method generates the verification transcript as well.
2372
+ * @param bucketName <string> - the name of the AWS S3 bucket.
2373
+ * @param lastZkeyStoragePath <string> - the last zKey storage path.
2374
+ * @param verificationTranscriptStoragePathAndFilename <string> - the verification transcript storage path.
2375
+ * @returns Array<string> - the list of commands for contribution verification.
2376
+ */
2377
+ const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2378
+ `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2379
+ `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2380
+ `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
2381
+ `/var/tmp/blake3.bin /var/tmp/verification_transcript.log | awk '{print $1}'`,
2382
+ `rm /var/tmp/lastZKey.zkey /var/tmp/verification_transcript.log /var/tmp/log.txt &>/dev/null`
2383
+ ];
2384
+ /**
2385
+ * Compute the VM disk size.
2386
+ * @dev the disk size is computed using the zKey size in bytes taking into consideration
2387
+ * the verification task (2 * zKeySize) + ptauSize + OS/VM (~8GB).
2388
+ * @param zKeySizeInBytes <number> the size of the zKey in bytes.
2389
+ * @param pot <number> the amount of powers needed for the circuit (index of the PPoT file).
2390
+ * @return <number> the configuration of the VM disk size in GB.
2391
+ */
2392
+ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertBytesOrKbToGb(zKeySizeInBytes, true) + powersOfTauFiles[pot - 1].size) + 8;
2393
+ /**
2394
+ * Creates a new EC2 instance
2395
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2396
+ * @param commands <Array<string>> - the list of commands to be run on the EC2 instance.
2397
+ * @param instanceType <string> - the type of the EC2 VM instance.
2398
+ * @param diskSize <number> - the size of the disk (volume) of the VM.
2399
+ * @param diskType <DiskTypeForVM> - the type of the disk (volume) of the VM.
2400
+ * @returns <Promise<P0tionEC2Instance>> the instance that was created
2401
+ */
2402
+ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2403
+ // Get the AWS variables.
2404
+ const { amiId, roleArn } = getAWSVariables();
2405
+ // Parametrize the VM EC2 instance.
2406
+ const params = {
2407
+ ImageId: amiId,
2408
+ InstanceType: instanceType,
2409
+ MaxCount: 1,
2410
+ MinCount: 1,
2411
+ // nb. to find this: iam -> roles -> role_name.
2412
+ IamInstanceProfile: {
2413
+ Arn: roleArn
2414
+ },
2415
+ // nb. for running commands at the startup.
2416
+ UserData: Buffer.from(commands.join("\n")).toString("base64"),
2417
+ BlockDeviceMappings: [
2418
+ {
2419
+ DeviceName: "/dev/xvda",
2420
+ Ebs: {
2421
+ DeleteOnTermination: true,
2422
+ VolumeSize: volumeSize,
2423
+ VolumeType: diskType
2424
+ }
2425
+ }
2426
+ ],
2427
+ // tag the resource
2428
+ TagSpecifications: [
2429
+ {
2430
+ ResourceType: "instance",
2431
+ Tags: [
2432
+ {
2433
+ Key: "Name",
2434
+ Value: ec2InstanceTag
2435
+ },
2436
+ {
2437
+ Key: "Initialized",
2438
+ Value: "false"
2439
+ }
2440
+ ]
2441
+ }
2442
+ ]
2443
+ };
2444
+ try {
2445
+ // Create a new command instance.
2446
+ const command = new RunInstancesCommand(params);
2447
+ // Send the command for execution.
2448
+ const response = await ec2.send(command);
2449
+ if (response.$metadata.httpStatusCode !== 200)
2450
+ throw new Error(`Something went wrong when creating the EC2 instance. More details ${response}`);
2451
+ // Create a new EC2 VM instance.
2452
+ return {
2453
+ instanceId: response.Instances[0].InstanceId,
2454
+ imageId: response.Instances[0].ImageId,
2455
+ instanceType: response.Instances[0].InstanceType,
2456
+ keyName: response.Instances[0].KeyName,
2457
+ launchTime: response.Instances[0].LaunchTime.toISOString()
2458
+ };
2459
+ }
2460
+ catch (error) {
2461
+ throw new Error(`Something went wrong when creating the EC2 instance. More details ${error}`);
2462
+ }
2463
+ };
2464
+ /**
2465
+ * Check if the current VM EC2 instance is running by querying the status.
2466
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2467
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2468
+ * @returns <Promise<boolean>> - true if the current status of the EC2 VM instance is 'running'; otherwise false.
2469
+ */
2470
+ const checkIfRunning = async (ec2Client, instanceId) => {
2471
+ // Generate a new describe status command.
2472
+ const command = new DescribeInstanceStatusCommand({
2473
+ InstanceIds: [instanceId]
2474
+ });
2475
+ // Run the command.
2476
+ const response = await ec2Client.send(command);
2477
+ if (response.$metadata.httpStatusCode !== 200)
2478
+ throw new Error(`Something went wrong when retrieving the EC2 instance (${instanceId}) status. More details ${response}`);
2479
+ return response.InstanceStatuses[0].InstanceState.Name === "running";
2480
+ };
2481
+ /**
2482
+ * Start an EC2 VM instance.
2483
+ * @dev the instance must have been created previously.
2484
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2485
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2486
+ */
2487
+ const startEC2Instance = async (ec2, instanceId) => {
2488
+ // Generate a new start instance command.
2489
+ const command = new StartInstancesCommand({
2490
+ InstanceIds: [instanceId],
2491
+ DryRun: false
2492
+ });
2493
+ // Run the command.
2494
+ const response = await ec2.send(command);
2495
+ if (response.$metadata.httpStatusCode !== 200)
2496
+ throw new Error(`Something went wrong when starting the EC2 instance (${instanceId}). More details ${response}`);
2497
+ };
2498
+ /**
2499
+ * Stop an EC2 VM instance.
2500
+ * @dev the instance must have been in a running status.
2501
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2502
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2503
+ */
2504
+ const stopEC2Instance = async (ec2, instanceId) => {
2505
+ // Generate a new stop instance command.
2506
+ const command = new StopInstancesCommand({
2507
+ InstanceIds: [instanceId],
2508
+ DryRun: false
2509
+ });
2510
+ // Run the command.
2511
+ const response = await ec2.send(command);
2512
+ if (response.$metadata.httpStatusCode !== 200)
2513
+ throw new Error(`Something went wrong when stopping the EC2 instance (${instanceId}). More details ${response}`);
2514
+ };
2515
+ /**
2516
+ * Terminate an EC2 VM instance.
2517
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2518
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2519
+ */
2520
+ const terminateEC2Instance = async (ec2, instanceId) => {
2521
+ // Generate a new terminate instance command.
2522
+ const command = new TerminateInstancesCommand({
2523
+ InstanceIds: [instanceId],
2524
+ DryRun: false
2525
+ });
2526
+ // Run the command.
2527
+ const response = await ec2.send(command);
2528
+ if (response.$metadata.httpStatusCode !== 200)
2529
+ throw new Error(`Something went wrong when terminating the EC2 instance (${instanceId}). More details ${response}`);
2530
+ };
2531
+ /**
2532
+ * Run a command on an EC2 VM instance by using SSM.
2533
+ * @dev this method returns the command identifier for checking the status and retrieve
2534
+ * the output of the command execution later on.
2535
+ * @param ssm <SSMClient> - the instance of the sSM client.
2536
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2537
+ * @param commands <Array<string>> - the list of commands.
2538
+ * @return <Promise<string>> - the unique identifier of the command.
2539
+ */
2540
+ const runCommandUsingSSM = async (ssm, instanceId, commands) => {
2541
+ // Generate a new send command input command.
2542
+ const params = {
2543
+ DocumentName: "AWS-RunShellScript",
2544
+ InstanceIds: [instanceId],
2545
+ Parameters: {
2546
+ commands
2547
+ },
2548
+ TimeoutSeconds: 1200
2549
+ };
2550
+ try {
2551
+ // Run the command.
2552
+ const response = await ssm.send(new SendCommandCommand(params));
2553
+ // if (response.$metadata.httpStatusCode !== 200)
2554
+ // throw new Error(
2555
+ // `Something went wrong when trying to run a command on the EC2 instance (${instanceId}). More details ${response}`
2556
+ // )
2557
+ return response.Command.CommandId;
2558
+ }
2559
+ catch (error) {
2560
+ throw new Error(`Something went wrong when trying to run a command on the EC2 instance. More details ${error}`);
2561
+ }
2562
+ };
2563
+ /**
2564
+ * Get the output of an SSM command executed on an EC2 VM instance.
2565
+ * @param ssm <SSMClient> - the instance of the sSM client.
2566
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2567
+ * @param commandId <string> - the unique identifier of the command.
2568
+ * @return <Promise<string>> - the command output.
2569
+ */
2570
+ const retrieveCommandOutput = async (ssm, instanceId, commandId) => {
2571
+ // Generate a new get command invocation command.
2572
+ const command = new GetCommandInvocationCommand({
2573
+ CommandId: commandId,
2574
+ InstanceId: instanceId
2575
+ });
2576
+ try {
2577
+ // Run the command.
2578
+ const response = await ssm.send(command);
2579
+ return response.StandardOutputContent;
2580
+ }
2581
+ catch (error) {
2582
+ throw new Error(`Something went wrong when trying to retrieve the command ${commandId} output on the EC2 instance (${instanceId}). More details ${error}`);
2583
+ }
2584
+ };
2585
+ /**
2586
+ * Get the status of an SSM command executed on an EC2 VM instance.
2587
+ * @param ssm <SSMClient> - the instance of the sSM client.
2588
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2589
+ * @param commandId <string> - the unique identifier of the command.
2590
+ * @return <Promise<string>> - the command status.
2591
+ */
2592
+ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
2593
+ // Generate a new get command invocation command.
2594
+ const command = new GetCommandInvocationCommand({
2595
+ CommandId: commandId,
2596
+ InstanceId: instanceId
2597
+ });
2598
+ try {
2599
+ // Run the command.
2600
+ const response = await ssm.send(command);
2601
+ return response.Status;
2602
+ }
2603
+ catch (error) {
2604
+ throw new Error(`Something went wrong when trying to retrieve the command ${commandId} status on the EC2 instance (${instanceId}). More details ${error}`);
2605
+ }
2606
+ };
2607
+
2608
+ export { CeremonyState, CeremonyTimeoutType, CeremonyType, CircuitContributionVerificationMechanism, DiskTypeForVM, ParticipantContributionStep, ParticipantStatus, RequestType, TestingEnvironment, TimeoutType, autoGenerateEntropy, blake512FromPath, checkAndPrepareCoordinatorForFinalization, checkIfObjectExist, checkIfRunning, checkParticipantForCeremony, commonTerms, compareCeremonyArtifacts, compareHashes, compileContract, completeMultiPartUpload, computeDiskSizeForVM, computeSHA256ToHex, computeSmallestPowersOfTauForCircuit, convertBytesOrKbToGb, convertToDoubleDigits, createCustomLoggerForFile, createEC2Client, createEC2Instance, createS3Bucket, createSSMClient, downloadAllCeremonyArtifacts, downloadCeremonyArtifact, ec2InstanceTag, exportVerifierAndVKey, exportVerifierContract, exportVkey, extractPoTFromFilename, extractPrefix, extractR1CSInfoValueForGivenKey, finalContributionIndex, finalizeCeremony, finalizeCircuit, formatSolidityCalldata, formatZkeyIndex, fromQueryToFirebaseDocumentInfo, generateGROTH16Proof, generateGetObjectPreSignedUrl, generatePreSignedUrlsParts, generateValidContributionsAttestation, generateZkeyFromScratch, genesisZkeyIndex, getAllCollectionDocs, getBucketName, getCeremonyCircuits, getCircuitBySequencePosition, getCircuitContributionsFromContributor, getCircuitsCollectionPath, getClosedCeremonies, getContributionsCollectionPath, getContributionsValidityForContributor, getCurrentActiveParticipantTimeout, getCurrentFirebaseAuthUser, getDocumentById, getOpenedCeremonies, getParticipantsCollectionPath, getPotStorageFilePath, getPublicAttestationPreambleForContributor, getR1CSInfo, getR1csStorageFilePath, getTimeoutsCollectionPath, getTranscriptStorageFilePath, getVerificationKeyStorageFilePath, getVerifierContractStorageFilePath, getWasmStorageFilePath, getZkeyStorageFilePath, githubReputation, initializeFirebaseCoreServices, isCoordinator, multiPartUpload, numExpIterations, p256, parseCeremonyFile, permanentlyStoreCurrentContributionTimeAndHash, potFileDownloadMainUrl, potFilenameTemplate, powersOfTauFiles, progressToNextCircuitForContribution, progressToNextContributionStep, queryCollection, resumeContributionAfterTimeoutExpiration, retrieveCommandOutput, retrieveCommandStatus, runCommandUsingSSM, setupCeremony, signInToFirebaseWithCredentials, solidityVersion, startEC2Instance, stopEC2Instance, temporaryStoreCurrentContributionMultiPartUploadId, temporaryStoreCurrentContributionUploadedChunkData, terminateEC2Instance, toHex, verificationKeyAcronym, verifierSmartContractAcronym, verifyCeremony, verifyContribution, verifyGROTH16Proof, verifyGROTH16ProofOnChain, verifyZKey, vmBootstrapCommand, vmBootstrapScriptFilename, vmConfigurationTypes, vmContributionVerificationCommand, vmDependenciesAndCacheArtifactsCommand };