@devtion/actions 0.0.0-7e983e3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +83 -0
  3. package/dist/index.mjs +2608 -0
  4. package/dist/index.node.js +2714 -0
  5. package/dist/types/hardhat.config.d.ts +6 -0
  6. package/dist/types/hardhat.config.d.ts.map +1 -0
  7. package/dist/types/src/helpers/authentication.d.ts +21 -0
  8. package/dist/types/src/helpers/authentication.d.ts.map +1 -0
  9. package/dist/types/src/helpers/constants.d.ts +194 -0
  10. package/dist/types/src/helpers/constants.d.ts.map +1 -0
  11. package/dist/types/src/helpers/contracts.d.ts +57 -0
  12. package/dist/types/src/helpers/contracts.d.ts.map +1 -0
  13. package/dist/types/src/helpers/crypto.d.ts +27 -0
  14. package/dist/types/src/helpers/crypto.d.ts.map +1 -0
  15. package/dist/types/src/helpers/database.d.ts +105 -0
  16. package/dist/types/src/helpers/database.d.ts.map +1 -0
  17. package/dist/types/src/helpers/functions.d.ts +145 -0
  18. package/dist/types/src/helpers/functions.d.ts.map +1 -0
  19. package/dist/types/src/helpers/security.d.ts +10 -0
  20. package/dist/types/src/helpers/security.d.ts.map +1 -0
  21. package/dist/types/src/helpers/services.d.ts +38 -0
  22. package/dist/types/src/helpers/services.d.ts.map +1 -0
  23. package/dist/types/src/helpers/storage.d.ts +121 -0
  24. package/dist/types/src/helpers/storage.d.ts.map +1 -0
  25. package/dist/types/src/helpers/tasks.d.ts +2 -0
  26. package/dist/types/src/helpers/tasks.d.ts.map +1 -0
  27. package/dist/types/src/helpers/utils.d.ts +139 -0
  28. package/dist/types/src/helpers/utils.d.ts.map +1 -0
  29. package/dist/types/src/helpers/verification.d.ts +95 -0
  30. package/dist/types/src/helpers/verification.d.ts.map +1 -0
  31. package/dist/types/src/helpers/vm.d.ts +112 -0
  32. package/dist/types/src/helpers/vm.d.ts.map +1 -0
  33. package/dist/types/src/index.d.ts +15 -0
  34. package/dist/types/src/index.d.ts.map +1 -0
  35. package/dist/types/src/types/enums.d.ts +133 -0
  36. package/dist/types/src/types/enums.d.ts.map +1 -0
  37. package/dist/types/src/types/index.d.ts +603 -0
  38. package/dist/types/src/types/index.d.ts.map +1 -0
  39. package/package.json +87 -0
  40. package/src/helpers/authentication.ts +37 -0
  41. package/src/helpers/constants.ts +312 -0
  42. package/src/helpers/contracts.ts +268 -0
  43. package/src/helpers/crypto.ts +55 -0
  44. package/src/helpers/database.ts +221 -0
  45. package/src/helpers/functions.ts +438 -0
  46. package/src/helpers/security.ts +86 -0
  47. package/src/helpers/services.ts +83 -0
  48. package/src/helpers/storage.ts +329 -0
  49. package/src/helpers/tasks.ts +56 -0
  50. package/src/helpers/utils.ts +743 -0
  51. package/src/helpers/verification.ts +354 -0
  52. package/src/helpers/vm.ts +392 -0
  53. package/src/index.ts +162 -0
  54. package/src/types/enums.ts +141 -0
  55. package/src/types/index.ts +650 -0
@@ -0,0 +1,2714 @@
1
+ /**
2
+ * @module @p0tion/actions
3
+ * @version 1.0.5
4
+ * @file A set of actions and helpers for CLI commands
5
+ * @copyright Ethereum Foundation 2022
6
+ * @license MIT
7
+ * @see [Github]{@link https://github.com/privacy-scaling-explorations/p0tion}
8
+ */
9
+ 'use strict';
10
+
11
+ var mime = require('mime-types');
12
+ var fs = require('fs');
13
+ var fetch = require('@adobe/node-fetch-retry');
14
+ var https = require('https');
15
+ var functions = require('firebase/functions');
16
+ var firestore = require('firebase/firestore');
17
+ var snarkjs = require('snarkjs');
18
+ var crypto = require('crypto');
19
+ var blake = require('blakejs');
20
+ var ffjavascript = require('ffjavascript');
21
+ var winston = require('winston');
22
+ var clientS3 = require('@aws-sdk/client-s3');
23
+ var stream = require('stream');
24
+ var util = require('util');
25
+ var app = require('firebase/app');
26
+ var auth = require('firebase/auth');
27
+ var ethers = require('ethers');
28
+ var solc = require('solc');
29
+ var clientEc2 = require('@aws-sdk/client-ec2');
30
+ var clientSsm = require('@aws-sdk/client-ssm');
31
+ var dotenv = require('dotenv');
32
+
33
+ // Main part for the Hermez Phase 1 Trusted Setup URLs to download PoT files.
34
+ const potFileDownloadMainUrl = `https://hermez.s3-eu-west-1.amazonaws.com/`;
35
+ // Main part for the Hermez Phase 1 Trusted Setup PoT files to be downloaded.
36
+ const potFilenameTemplate = `powersOfTau28_hez_final_`;
37
+ // The genesis zKey index.
38
+ const genesisZkeyIndex = `00000`;
39
+ // The number of exponential iterations to be executed by SnarkJS when finalizing the ceremony.
40
+ const numExpIterations = 10;
41
+ // The Solidity version of the Verifier Smart Contract generated with SnarkJS when finalizing the ceremony.
42
+ const solidityVersion = "0.8.0";
43
+ // The index of the final zKey.
44
+ const finalContributionIndex = "final";
45
+ // The acronym for verification key.
46
+ const verificationKeyAcronym = "vkey";
47
+ // The acronym for Verifier smart contract.
48
+ const verifierSmartContractAcronym = "verifier";
49
+ // The tag for ec2 instances.
50
+ const ec2InstanceTag = "p0tionec2instance";
51
+ // The name of the VM startup script file.
52
+ const vmBootstrapScriptFilename = "bootstrap.sh";
53
+ /**
54
+ * Define the supported VM configuration types.
55
+ * @dev the VM configurations can be retrieved at https://aws.amazon.com/ec2/instance-types/
56
+ * The on-demand prices for the configurations can be retrieved at https://aws.amazon.com/ec2/pricing/on-demand/.
57
+ * @notice the price has to be intended as on-demand hourly billing usage for Linux OS
58
+ * VMs located in the us-east-1 region expressed in USD.
59
+ */
60
+ const vmConfigurationTypes = {
61
+ t3_large: {
62
+ type: "t3.large",
63
+ ram: 8,
64
+ vcpu: 2,
65
+ pricePerHour: 0.08352
66
+ },
67
+ t3_2xlarge: {
68
+ type: "t3.2xlarge",
69
+ ram: 32,
70
+ vcpu: 8,
71
+ pricePerHour: 0.3328
72
+ },
73
+ c5_9xlarge: {
74
+ type: "c5.9xlarge",
75
+ ram: 72,
76
+ vcpu: 36,
77
+ pricePerHour: 1.53
78
+ },
79
+ c5_18xlarge: {
80
+ type: "c5.18xlarge",
81
+ ram: 144,
82
+ vcpu: 72,
83
+ pricePerHour: 3.06
84
+ },
85
+ c5a_8xlarge: {
86
+ type: "c5a.8xlarge",
87
+ ram: 64,
88
+ vcpu: 32,
89
+ pricePerHour: 1.232
90
+ },
91
+ c6id_32xlarge: {
92
+ type: "c6id.32xlarge",
93
+ ram: 256,
94
+ vcpu: 128,
95
+ pricePerHour: 6.4512
96
+ },
97
+ m6a_32xlarge: {
98
+ type: "m6a.32xlarge",
99
+ ram: 512,
100
+ vcpu: 128,
101
+ pricePerHour: 5.5296
102
+ }
103
+ };
104
+ /**
105
+ * Define the PPoT Trusted Setup ceremony output powers of tau files size (in GB).
106
+ * @dev the powers of tau files can be retrieved at https://github.com/weijiekoh/perpetualpowersoftau
107
+ */
108
+ const powersOfTauFiles = [
109
+ {
110
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_01.ptau",
111
+ size: 0.000084
112
+ },
113
+ {
114
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_02.ptau",
115
+ size: 0.000086
116
+ },
117
+ {
118
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_03.ptau",
119
+ size: 0.000091
120
+ },
121
+ {
122
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_04.ptau",
123
+ size: 0.0001
124
+ },
125
+ {
126
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_05.ptau",
127
+ size: 0.000117
128
+ },
129
+ {
130
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_06.ptau",
131
+ size: 0.000153
132
+ },
133
+ {
134
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_07.ptau",
135
+ size: 0.000225
136
+ },
137
+ {
138
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_08.ptau",
139
+ size: 0.0004
140
+ },
141
+ {
142
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_09.ptau",
143
+ size: 0.000658
144
+ },
145
+ {
146
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_10.ptau",
147
+ size: 0.0013
148
+ },
149
+ {
150
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_11.ptau",
151
+ size: 0.0023
152
+ },
153
+ {
154
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_12.ptau",
155
+ size: 0.0046
156
+ },
157
+ {
158
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_13.ptau",
159
+ size: 0.0091
160
+ },
161
+ {
162
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_14.ptau",
163
+ size: 0.0181
164
+ },
165
+ {
166
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_15.ptau",
167
+ size: 0.0361
168
+ },
169
+ {
170
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_16.ptau",
171
+ size: 0.0721
172
+ },
173
+ {
174
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau",
175
+ size: 0.144
176
+ },
177
+ {
178
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_18.ptau",
179
+ size: 0.288
180
+ },
181
+ {
182
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_19.ptau",
183
+ size: 0.576
184
+ },
185
+ {
186
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau",
187
+ size: 1.1
188
+ },
189
+ {
190
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_21.ptau",
191
+ size: 2.3
192
+ },
193
+ {
194
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_22.ptau",
195
+ size: 4.5
196
+ },
197
+ {
198
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_23.ptau",
199
+ size: 9.0
200
+ },
201
+ {
202
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_24.ptau",
203
+ size: 18.0
204
+ },
205
+ {
206
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_25.ptau",
207
+ size: 36.0
208
+ },
209
+ {
210
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_26.ptau",
211
+ size: 72.0
212
+ },
213
+ {
214
+ ref: "https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_27.ptau",
215
+ size: 144.0
216
+ }
217
+ ];
218
+ /**
219
+ * Commonly used terms.
220
+ * @dev useful for creating paths, references to collections and queries, object properties, folder names, and so on.
221
+ */
222
+ const commonTerms = {
223
+ collections: {
224
+ users: {
225
+ name: "users",
226
+ fields: {
227
+ creationTime: "creationTime",
228
+ displayName: "displayName",
229
+ email: "email",
230
+ emailVerified: "emailVerified",
231
+ lastSignInTime: "lastSignInTime",
232
+ lastUpdated: "lastUpdated",
233
+ name: "name",
234
+ photoURL: "photoURL"
235
+ }
236
+ },
237
+ participants: {
238
+ name: "participants",
239
+ fields: {
240
+ contributionProgress: "contributionProgress",
241
+ contributionStartedAt: "contributionStartedAt",
242
+ contributionStep: "contributionStep",
243
+ contributions: "contributions",
244
+ lastUpdated: "lastUpdated",
245
+ status: "status",
246
+ verificationStartedAt: "verificationStartedAt"
247
+ }
248
+ },
249
+ ceremonies: {
250
+ name: "ceremonies",
251
+ fields: {
252
+ coordinatorId: "coordinatorId",
253
+ description: "description",
254
+ endDate: "endDate",
255
+ lastUpdated: "lastUpdated",
256
+ penalty: "penalty",
257
+ prefix: "prefix",
258
+ startDate: "startDate",
259
+ state: "state",
260
+ timeoutType: "timeoutType",
261
+ title: "title",
262
+ type: "type"
263
+ }
264
+ },
265
+ circuits: {
266
+ name: "circuits",
267
+ fields: {
268
+ avgTimings: "avgTimings",
269
+ compiler: "compiler",
270
+ description: "description",
271
+ files: "files",
272
+ lastUpdated: "lastUpdated",
273
+ metadata: "metadata",
274
+ name: "name",
275
+ prefix: "prefix",
276
+ sequencePosition: "sequencePosition",
277
+ template: "template",
278
+ timeoutMaxContributionWaitingTime: "timeoutMaxContributionWaitingTime",
279
+ waitingQueue: "waitingQueue",
280
+ zKeySizeInBytes: "zKeySizeInBytes",
281
+ verification: "verification"
282
+ }
283
+ },
284
+ contributions: {
285
+ name: "contributions",
286
+ fields: {
287
+ contributionComputationTime: "contributionComputationTime",
288
+ files: "files",
289
+ lastUpdated: "lastUpdated",
290
+ participantId: "participantId",
291
+ valid: "valid",
292
+ verificationComputationTime: "verificationComputationTime",
293
+ zkeyIndex: "zKeyIndex"
294
+ }
295
+ },
296
+ timeouts: {
297
+ name: "timeouts",
298
+ fields: {
299
+ type: "type",
300
+ startDate: "startDate",
301
+ endDate: "endDate"
302
+ }
303
+ }
304
+ },
305
+ foldersAndPathsTerms: {
306
+ output: `output`,
307
+ setup: `setup`,
308
+ contribute: `contribute`,
309
+ finalize: `finalize`,
310
+ pot: `pot`,
311
+ zkeys: `zkeys`,
312
+ wasm: `wasm`,
313
+ vkeys: `vkeys`,
314
+ metadata: `metadata`,
315
+ transcripts: `transcripts`,
316
+ attestation: `attestation`,
317
+ verifiers: `verifiers`
318
+ },
319
+ cloudFunctionsNames: {
320
+ setupCeremony: "setupCeremony",
321
+ checkParticipantForCeremony: "checkParticipantForCeremony",
322
+ progressToNextCircuitForContribution: "progressToNextCircuitForContribution",
323
+ resumeContributionAfterTimeoutExpiration: "resumeContributionAfterTimeoutExpiration",
324
+ createBucket: "createBucket",
325
+ generateGetObjectPreSignedUrl: "generateGetObjectPreSignedUrl",
326
+ progressToNextContributionStep: "progressToNextContributionStep",
327
+ permanentlyStoreCurrentContributionTimeAndHash: "permanentlyStoreCurrentContributionTimeAndHash",
328
+ startMultiPartUpload: "startMultiPartUpload",
329
+ temporaryStoreCurrentContributionMultiPartUploadId: "temporaryStoreCurrentContributionMultiPartUploadId",
330
+ temporaryStoreCurrentContributionUploadedChunkData: "temporaryStoreCurrentContributionUploadedChunkData",
331
+ generatePreSignedUrlsParts: "generatePreSignedUrlsParts",
332
+ completeMultiPartUpload: "completeMultiPartUpload",
333
+ checkIfObjectExist: "checkIfObjectExist",
334
+ verifyContribution: "verifycontribution",
335
+ checkAndPrepareCoordinatorForFinalization: "checkAndPrepareCoordinatorForFinalization",
336
+ finalizeCircuit: "finalizeCircuit",
337
+ finalizeCeremony: "finalizeCeremony",
338
+ downloadCircuitArtifacts: "downloadCircuitArtifacts",
339
+ transferObject: "transferObject",
340
+ }
341
+ };
342
+
343
+ /**
344
+ * Setup a new ceremony by calling the related cloud function.
345
+ * @param functions <Functions> - the Firebase cloud functions object instance.
346
+ * @param ceremonyInputData <CeremonyInputData> - the input data of the ceremony.
347
+ * @param ceremonyPrefix <string> - the prefix of the ceremony.
348
+ * @param circuits <Circuit[]> - the circuits data.
349
+ * @returns Promise<string> - the unique identifier of the created ceremony.
350
+ */
351
+ const setupCeremony = async (functions$1, ceremonyInputData, ceremonyPrefix, circuits) => {
352
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.setupCeremony);
353
+ const { data: ceremonyId } = await cf({
354
+ ceremonyInputData,
355
+ ceremonyPrefix,
356
+ circuits
357
+ });
358
+ return String(ceremonyId);
359
+ };
360
+ /**
361
+ * Check the user's current participant status for the ceremony
362
+ * @param functions <Functions> - the Firebase cloud functions object instance.
363
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
364
+ * @returns <boolean> - true when participant is able to contribute; otherwise false.
365
+ */
366
+ const checkParticipantForCeremony = async (functions$1, ceremonyId) => {
367
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.checkParticipantForCeremony);
368
+ const { data } = await cf({ ceremonyId });
369
+ return data;
370
+ };
371
+ /**
372
+ * Progress the participant to the next circuit preparing for the next contribution.
373
+ * @param functions <Functions> - the Firebase cloud functions object instance.
374
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
375
+ */
376
+ const progressToNextCircuitForContribution = async (functions$1, ceremonyId) => {
377
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.progressToNextCircuitForContribution);
378
+ await cf({
379
+ ceremonyId
380
+ });
381
+ };
382
+ /**
383
+ * Resume the contributor circuit contribution from scratch after the timeout expiration.
384
+ * @param functions <Functions> - the Firebase cloud functions object instance.
385
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
386
+ */
387
+ const resumeContributionAfterTimeoutExpiration = async (functions$1, ceremonyId) => {
388
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.resumeContributionAfterTimeoutExpiration);
389
+ await cf({
390
+ ceremonyId
391
+ });
392
+ };
393
+ /**
394
+ * Make a request to create a new AWS S3 bucket for a ceremony.
395
+ * @param functions <Functions> - the Firebase cloud functions object instance.
396
+ * @param bucketName <string> - the name of the ceremony bucket.
397
+ */
398
+ const createS3Bucket = async (functions$1, bucketName) => {
399
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.createBucket);
400
+ await cf({ bucketName });
401
+ };
402
+ /**
403
+ * Return a pre-signed url for a given object contained inside the provided AWS S3 bucket in order to perform a GET request.
404
+ * @param functions <Functions> - the Firebase cloud functions object instance.
405
+ * @param bucketName <string> - the name of the ceremony bucket.
406
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
407
+ * @returns <Promise<string>> - the pre-signed url w/ GET request permissions for specified object key.
408
+ */
409
+ const generateGetObjectPreSignedUrl = async (functions$1, bucketName, objectKey) => {
410
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.generateGetObjectPreSignedUrl);
411
+ const { data: getPreSignedUrl } = await cf({
412
+ bucketName,
413
+ objectKey
414
+ });
415
+ return String(getPreSignedUrl);
416
+ };
417
+ /**
418
+ * Progress the participant to the next circuit preparing for the next contribution.
419
+ * @param functions <Functions> - the Firebase cloud functions object instance.
420
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
421
+ */
422
+ const progressToNextContributionStep = async (functions$1, ceremonyId) => {
423
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.progressToNextContributionStep);
424
+ await cf({
425
+ ceremonyId
426
+ });
427
+ };
428
+ /**
429
+ * Write the information about current contribution hash and computation time for the current contributor.
430
+ * @param functions <Functions> - the Firebase cloud functions object instance.
431
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
432
+ * @param contributionComputationTime <number> - the time when it was computed
433
+ * @param contributingHash <string> - the hash of the contribution
434
+ */
435
+ const permanentlyStoreCurrentContributionTimeAndHash = async (functions$1, ceremonyId, contributionComputationTime, contributionHash) => {
436
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.permanentlyStoreCurrentContributionTimeAndHash);
437
+ await cf({
438
+ ceremonyId,
439
+ contributionComputationTime,
440
+ contributionHash
441
+ });
442
+ };
443
+ /**
444
+ * Start a new multi-part upload for a specific object in the given AWS S3 bucket.
445
+ * @param functions <Functions> - the Firebase cloud functions object instance.
446
+ * @param bucketName <string> - the name of the ceremony bucket.
447
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
448
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
449
+ * @returns Promise<string> - the multi-part upload id.
450
+ */
451
+ const openMultiPartUpload = async (functions$1, bucketName, objectKey, ceremonyId) => {
452
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.startMultiPartUpload);
453
+ const { data: uploadId } = await cf({
454
+ bucketName,
455
+ objectKey,
456
+ ceremonyId
457
+ });
458
+ return String(uploadId);
459
+ };
460
+ /**
461
+ * Write temporary information about the unique identifier about the opened multi-part upload to eventually resume the contribution.
462
+ * @param functions <Functions> - the Firebase cloud functions object instance.
463
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
464
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
465
+ */
466
+ const temporaryStoreCurrentContributionMultiPartUploadId = async (functions$1, ceremonyId, uploadId) => {
467
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.temporaryStoreCurrentContributionMultiPartUploadId);
468
+ await cf({
469
+ ceremonyId,
470
+ uploadId
471
+ });
472
+ };
473
+ /**
474
+ * Write temporary information about the etags and part numbers for each uploaded chunk in order to make the upload resumable from last chunk.
475
+ * @param functions <Functions> - the Firebase cloud functions object instance.
476
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
477
+ * @param chunk <ETagWithPartNumber> - the information about the already uploaded chunk.
478
+ */
479
+ const temporaryStoreCurrentContributionUploadedChunkData = async (functions$1, ceremonyId, chunk) => {
480
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.temporaryStoreCurrentContributionUploadedChunkData);
481
+ await cf({
482
+ ceremonyId,
483
+ chunk
484
+ });
485
+ };
486
+ /**
487
+ * Generate a new pre-signed url for each chunk related to a started multi-part upload.
488
+ * @param functions <Functions> - the Firebase cloud functions object instance.
489
+ * @param bucketName <string> - the name of the ceremony bucket.
490
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
491
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
492
+ * @param numberOfChunks <number> - the number of pre-signed urls to be generated.
493
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
494
+ * @returns Promise<Array<string>> - the set of pre-signed urls (one for each chunk).
495
+ */
496
+ const generatePreSignedUrlsParts = async (functions$1, bucketName, objectKey, uploadId, numberOfParts, ceremonyId) => {
497
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.generatePreSignedUrlsParts);
498
+ const { data: chunksUrls } = await cf({
499
+ bucketName,
500
+ objectKey,
501
+ uploadId,
502
+ numberOfParts,
503
+ ceremonyId
504
+ });
505
+ return chunksUrls;
506
+ };
507
+ /**
508
+ * Complete a multi-part upload for a specific object in the given AWS S3 bucket.
509
+ * @param functions <Functions> - the Firebase cloud functions object instance.
510
+ * @param bucketName <string> - the name of the ceremony bucket.
511
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
512
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
513
+ * @param parts Array<ETagWithPartNumber> - the completed .
514
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
515
+ * @returns Promise<string> - the location of the uploaded ceremony artifact.
516
+ */
517
+ const completeMultiPartUpload = async (functions$1, bucketName, objectKey, uploadId, parts, ceremonyId) => {
518
+ // Call completeMultiPartUpload() Cloud Function.
519
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.completeMultiPartUpload);
520
+ const { data: location } = await cf({
521
+ bucketName,
522
+ objectKey,
523
+ uploadId,
524
+ parts,
525
+ ceremonyId
526
+ });
527
+ return String(location);
528
+ };
529
+ /**
530
+ * Check if a specified object exist in a given AWS S3 bucket.
531
+ * @param functions <Functions> - the Firebase cloud functions object instance.
532
+ * @param bucketName <string> - the name of the ceremony bucket.
533
+ * @param objectKey <string> - the storage path that locates the artifact to be downloaded in the bucket.
534
+ * @returns <Promise<string>> - true if and only if the object exists, otherwise false.
535
+ */
536
+ const checkIfObjectExist = async (functions$1, bucketName, objectKey) => {
537
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.checkIfObjectExist);
538
+ const { data: doesObjectExist } = await cf({
539
+ bucketName,
540
+ objectKey
541
+ });
542
+ return doesObjectExist;
543
+ };
544
+ /**
545
+ * Request to verify the newest contribution for the circuit.
546
+ * @param functions <Functions> - the Firebase cloud functions object instance.
547
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
548
+ * @param circuit <FirebaseDocumentInfo> - the document info about the circuit.
549
+ * @param bucketName <string> - the name of the ceremony bucket.
550
+ * @param contributorOrCoordinatorIdentifier <string> - the identifier of the contributor or coordinator (only when finalizing).
551
+ * @param verifyContributionCloudFunctionEndpoint <string> - the endpoint (direct url) necessary to call the V2 Cloud Function.
552
+ * @returns <Promise<void>> -
553
+ */
554
+ const verifyContribution = async (functions$1, ceremonyId, circuit, // any just to avoid breaking the tests.
555
+ bucketName, contributorOrCoordinatorIdentifier, verifyContributionCloudFunctionEndpoint) => {
556
+ const cf = functions.httpsCallableFromURL(functions$1, verifyContributionCloudFunctionEndpoint, {
557
+ timeout: 3600000 // max timeout 60 minutes.
558
+ });
559
+ /**
560
+ * @dev Force a race condition to fix #57.
561
+ * TL;DR if the cloud function does not return despite having finished its execution, we use
562
+ * a listener on the circuit, we check and retrieve the info about the correct execution and
563
+ * return it manually. In other cases, it will be the function that returns either a timeout in case it
564
+ * remains in execution for too long.
565
+ */
566
+ await Promise.race([
567
+ cf({
568
+ ceremonyId,
569
+ circuitId: circuit.id,
570
+ contributorOrCoordinatorIdentifier,
571
+ bucketName
572
+ }),
573
+ new Promise((resolve) => {
574
+ setTimeout(() => {
575
+ const unsubscribeToCeremonyCircuitListener = firestore.onSnapshot(circuit.ref, async (changedCircuit) => {
576
+ // Check data.
577
+ if (!circuit.data || !changedCircuit.data())
578
+ throw Error(`Unable to retrieve circuit data from the ceremony.`);
579
+ // Extract data.
580
+ const { avgTimings: changedAvgTimings, waitingQueue: changedWaitingQueue } = changedCircuit.data();
581
+ const { contributionComputation: changedContributionComputation, fullContribution: changedFullContribution, verifyCloudFunction: changedVerifyCloudFunction } = changedAvgTimings;
582
+ const { failedContributions: changedFailedContributions, completedContributions: changedCompletedContributions } = changedWaitingQueue;
583
+ const { avgTimings: prevAvgTimings, waitingQueue: prevWaitingQueue } = changedCircuit.data();
584
+ const { contributionComputation: prevContributionComputation, fullContribution: prevFullContribution, verifyCloudFunction: prevVerifyCloudFunction } = prevAvgTimings;
585
+ const { failedContributions: prevFailedContributions, completedContributions: prevCompletedContributions } = prevWaitingQueue;
586
+ // Pre-conditions.
587
+ const invalidContribution = prevFailedContributions === changedFailedContributions - 1;
588
+ const validContribution = prevCompletedContributions === changedCompletedContributions - 1;
589
+ const avgTimeUpdates = prevContributionComputation !== changedContributionComputation &&
590
+ prevFullContribution !== changedFullContribution &&
591
+ prevVerifyCloudFunction !== changedVerifyCloudFunction;
592
+ if ((invalidContribution || validContribution) && avgTimeUpdates) {
593
+ resolve({});
594
+ }
595
+ });
596
+ // Unsubscribe from listener.
597
+ unsubscribeToCeremonyCircuitListener();
598
+ }, 3600000 - 1000); // 59:59 throws 1s before max time for CF execution.
599
+ })
600
+ ]);
601
+ };
602
+ /**
603
+ * Prepare the coordinator for the finalization of the ceremony.
604
+ * @param functions <Functions> - the Firebase cloud functions object instance.
605
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
606
+ * @returns <Promise<boolean>> - true when the coordinator is ready for finalization; otherwise false.
607
+ */
608
+ const checkAndPrepareCoordinatorForFinalization = async (functions$1, ceremonyId) => {
609
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.checkAndPrepareCoordinatorForFinalization);
610
+ const { data: isCoordinatorReadyForCeremonyFinalization } = await cf({
611
+ ceremonyId
612
+ });
613
+ return isCoordinatorReadyForCeremonyFinalization;
614
+ };
615
+ /**
616
+ * Finalize the ceremony circuit.
617
+ * @param functions <Functions> - the Firebase cloud functions object instance.
618
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
619
+ * @param circuitId <string> - the unique identifier of the circuit.
620
+ * @param bucketName <string> - the name of the ceremony bucket.
621
+ * @param beacon <string> - the value used to compute the final contribution while finalizing the ceremony.
622
+ */
623
+ const finalizeCircuit = async (functions$1, ceremonyId, circuitId, bucketName, beacon) => {
624
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.finalizeCircuit);
625
+ await cf({
626
+ ceremonyId,
627
+ circuitId,
628
+ bucketName,
629
+ beacon
630
+ });
631
+ };
632
+ /**
633
+ * Conclude the finalization of the ceremony.
634
+ * @param functions <Functions> - the Firebase cloud functions object instance.
635
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
636
+ */
637
+ const finalizeCeremony = async (functions$1, ceremonyId) => {
638
+ const cf = functions.httpsCallable(functions$1, commonTerms.cloudFunctionsNames.finalizeCeremony);
639
+ await cf({
640
+ ceremonyId
641
+ });
642
+ };
643
+
644
+ /**
645
+ * Return the bucket name based on ceremony prefix.
646
+ * @param ceremonyPrefix <string> - the ceremony prefix.
647
+ * @param ceremonyPostfix <string> - the ceremony postfix.
648
+ * @returns <string>
649
+ */
650
+ const getBucketName = (ceremonyPrefix, ceremonyPostfix) => `${ceremonyPrefix}${ceremonyPostfix}`;
651
+ /**
652
+ * Get chunks and signed urls related to an object that must be uploaded using a multi-part upload.
653
+ * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
654
+ * @param bucketName <string> - the name of the ceremony artifacts bucket (AWS S3).
655
+ * @param objectKey <string> - the unique key to identify the object inside the given AWS S3 bucket.
656
+ * @param localFilePath <string> - the local path where the artifact will be downloaded.
657
+ * @param uploadId <string> - the unique identifier of the multi-part upload.
658
+ * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
659
+ * @param [ceremonyId] <string> - the unique identifier of the ceremony.
660
+ * @returns Promise<Array<ChunkWithUrl>> - the chunks with related pre-signed url.
661
+ */
662
+ const getChunksAndPreSignedUrls = async (cloudFunctions, bucketName, objectKey, localFilePath, uploadId, configStreamChunkSize, ceremonyId) => {
663
+ // Prepare a new stream to read the file.
664
+ const stream = fs.createReadStream(localFilePath, {
665
+ highWaterMark: configStreamChunkSize * 1024 * 1024 // convert to MB.
666
+ });
667
+ // Split in chunks.
668
+ const chunks = [];
669
+ for await (const chunk of stream)
670
+ chunks.push(chunk);
671
+ // Check if the file is not empty.
672
+ if (!chunks.length)
673
+ throw new Error("Unable to split an empty file into chunks.");
674
+ // Request pre-signed url generation for each chunk.
675
+ const preSignedUrls = await generatePreSignedUrlsParts(cloudFunctions, bucketName, objectKey, uploadId, chunks.length, ceremonyId);
676
+ // Map pre-signed urls with corresponding chunks.
677
+ return chunks.map((val1, index) => ({
678
+ partNumber: index + 1,
679
+ chunk: val1,
680
+ preSignedUrl: preSignedUrls[index]
681
+ }));
682
+ };
683
+ /**
684
+ * Forward the request to upload each single chunk of the related ceremony artifact.
685
+ * @param chunksWithUrls <Array<ChunkWithUrl>> - the array containing each chunk mapped with the corresponding pre-signed urls.
686
+ * @param contentType <string | false> - the content type of the ceremony artifact.
687
+ * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
688
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
689
+ * @param alreadyUploadedChunks Array<ETagWithPartNumber> - the temporary information about the already uploaded chunks.
690
+ * @returns <Promise<Array<ETagWithPartNumber>>> - the completed (uploaded) chunks information.
691
+ */
692
+ const uploadParts = async (chunksWithUrls, contentType, cloudFunctions, ceremonyId, alreadyUploadedChunks) => {
693
+ // Keep track of uploaded chunks.
694
+ const uploadedChunks = alreadyUploadedChunks || [];
695
+ // Loop through remaining chunks.
696
+ for (let i = alreadyUploadedChunks ? alreadyUploadedChunks.length : 0; i < chunksWithUrls.length; i += 1) {
697
+ // Consume the pre-signed url to upload the chunk.
698
+ // @ts-ignore
699
+ const response = await fetch(chunksWithUrls[i].preSignedUrl, {
700
+ retryOptions: {
701
+ retryInitialDelay: 500,
702
+ socketTimeout: 60000,
703
+ retryMaxDuration: 300000 // 5 minutes.
704
+ },
705
+ method: "PUT",
706
+ body: chunksWithUrls[i].chunk,
707
+ headers: {
708
+ "Content-Type": contentType.toString(),
709
+ "Content-Length": chunksWithUrls[i].chunk.length.toString()
710
+ },
711
+ agent: new https.Agent({ keepAlive: true })
712
+ });
713
+ // Verify the response.
714
+ if (response.status !== 200 || !response.ok)
715
+ throw new Error(`Unable to upload chunk number ${i}. Please, terminate the current session and retry to resume from the latest uploaded chunk.`);
716
+ // Extract uploaded chunk data.
717
+ const chunk = {
718
+ ETag: response.headers.get("etag") || undefined,
719
+ PartNumber: chunksWithUrls[i].partNumber
720
+ };
721
+ uploadedChunks.push(chunk);
722
+ // Temporary store uploaded chunk data to enable later resumable contribution.
723
+ // nb. this must be done only when contributing (not finalizing).
724
+ if (!!ceremonyId && !!cloudFunctions)
725
+ await temporaryStoreCurrentContributionUploadedChunkData(cloudFunctions, ceremonyId, chunk);
726
+ }
727
+ return uploadedChunks;
728
+ };
729
+ /**
730
+ * Upload a ceremony artifact to the corresponding bucket.
731
+ * @notice this method implements the multi-part upload using pre-signed urls, optimal for large files.
732
+ * Steps:
733
+ * 0) Check if current contributor could resume a multi-part upload.
734
+ * 0.A) If yes, continue from last uploaded chunk using the already opened multi-part upload.
735
+ * 0.B) Otherwise, start creating a new multi-part upload.
736
+ * 1) Generate a pre-signed url for each (remaining) chunk of the ceremony artifact.
737
+ * 2) Consume the pre-signed urls to upload chunks.
738
+ * 3) Complete the multi-part upload.
739
+ * @param cloudFunctions <Functions> - the Firebase Cloud Functions service instance.
740
+ * @param bucketName <string> - the name of the ceremony artifacts bucket (AWS S3).
741
+ * @param objectKey <string> - the unique key to identify the object inside the given AWS S3 bucket.
742
+ * @param localPath <string> - the local path where the artifact will be downloaded.
743
+ * @param configStreamChunkSize <number> - size of each chunk into which the artifact is going to be splitted (nb. will be converted in MB).
744
+ * @param [ceremonyId] <string> - the unique identifier of the ceremony (used as a double-edge sword - as identifier and as a check if current contributor is the coordinator finalizing the ceremony).
745
+ * @param [temporaryDataToResumeMultiPartUpload] <TemporaryParticipantContributionData> - the temporary information necessary to resume an already started multi-part upload.
746
+ */
747
+ const multiPartUpload = async (cloudFunctions, bucketName, objectKey, localFilePath, configStreamChunkSize, ceremonyId, temporaryDataToResumeMultiPartUpload) => {
748
+ // The unique identifier of the multi-part upload.
749
+ let multiPartUploadId = "";
750
+ // The list of already uploaded chunks.
751
+ let alreadyUploadedChunks = [];
752
+ // Step (0).
753
+ if (temporaryDataToResumeMultiPartUpload && !!temporaryDataToResumeMultiPartUpload.uploadId) {
754
+ // Step (0.A).
755
+ multiPartUploadId = temporaryDataToResumeMultiPartUpload.uploadId;
756
+ alreadyUploadedChunks = temporaryDataToResumeMultiPartUpload.chunks;
757
+ }
758
+ else {
759
+ // Step (0.B).
760
+ // Open a new multi-part upload for the ceremony artifact.
761
+ multiPartUploadId = await openMultiPartUpload(cloudFunctions, bucketName, objectKey, ceremonyId);
762
+ // Store multi-part upload identifier on document collection.
763
+ if (ceremonyId)
764
+ // Store Multi-Part Upload ID after generation.
765
+ await temporaryStoreCurrentContributionMultiPartUploadId(cloudFunctions, ceremonyId, multiPartUploadId);
766
+ }
767
+ // Step (1).
768
+ const chunksWithUrlsZkey = await getChunksAndPreSignedUrls(cloudFunctions, bucketName, objectKey, localFilePath, multiPartUploadId, configStreamChunkSize, ceremonyId);
769
+ // Step (2).
770
+ const partNumbersAndETagsZkey = await uploadParts(chunksWithUrlsZkey, mime.lookup(localFilePath), // content-type.
771
+ cloudFunctions, ceremonyId, alreadyUploadedChunks);
772
+ // Step (3).
773
+ await completeMultiPartUpload(cloudFunctions, bucketName, objectKey, multiPartUploadId, partNumbersAndETagsZkey, ceremonyId);
774
+ };
775
+ /**
776
+ * Download an artifact from S3 (only for authorized users)
777
+ * @param cloudFunctions <Functions> Firebase cloud functions instance.
778
+ * @param bucketName <string> Name of the bucket where the artifact is stored.
779
+ * @param storagePath <string> Path to the artifact in the bucket.
780
+ * @param localPath <string> Path to the local file where the artifact will be saved.
781
+ */
782
+ const downloadCeremonyArtifact = async (cloudFunctions, bucketName, storagePath, localPath) => {
783
+ // Request pre-signed url to make GET download request.
784
+ const getPreSignedUrl = await generateGetObjectPreSignedUrl(cloudFunctions, bucketName, storagePath);
785
+ // Make fetch to get info about the artifact.
786
+ // @ts-ignore
787
+ const response = await fetch(getPreSignedUrl);
788
+ if (response.status !== 200 && !response.ok)
789
+ throw new Error(`There was an erorr while downloading the object ${storagePath} from the bucket ${bucketName}. Please check the function inputs and try again.`);
790
+ const content = response.body;
791
+ // Prepare stream.
792
+ const writeStream = fs.createWriteStream(localPath);
793
+ // Write chunk by chunk.
794
+ for await (const chunk of content) {
795
+ // Write chunk.
796
+ writeStream.write(chunk);
797
+ }
798
+ };
799
+ /**
800
+ * Get R1CS file path tied to a particular circuit of a ceremony in the storage.
801
+ * @notice each R1CS file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeR1csFilename>`.
802
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
803
+ * @param circuitPrefix <string> - the prefix of the circuit.
804
+ * @param completeR1csFilename <string> - the complete R1CS filename (name + ext).
805
+ * @returns <string> - the storage path of the R1CS file.
806
+ */
807
+ const getR1csStorageFilePath = (circuitPrefix, completeR1csFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeR1csFilename}`;
808
+ /**
809
+ * Get WASM file path tied to a particular circuit of a ceremony in the storage.
810
+ * @notice each WASM file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeWasmFilename>`.
811
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
812
+ * @param circuitPrefix <string> - the prefix of the circuit.
813
+ * @param completeWasmFilename <string> - the complete WASM filename (name + ext).
814
+ * @returns <string> - the storage path of the WASM file.
815
+ */
816
+ const getWasmStorageFilePath = (circuitPrefix, completeWasmFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeWasmFilename}`;
817
+ /**
818
+ * Get PoT file path in the storage.
819
+ * @notice each PoT file in the storage must be stored in the following path: `pot/<completePotFilename>`.
820
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
821
+ * @param completePotFilename <string> - the complete PoT filename (name + ext).
822
+ * @returns <string> - the storage path of the PoT file.
823
+ */
824
+ const getPotStorageFilePath = (completePotFilename) => `${commonTerms.foldersAndPathsTerms.pot}/${completePotFilename}`;
825
+ /**
826
+ * Get zKey file path tied to a particular circuit of a ceremony in the storage.
827
+ * @notice each zKey file in the storage must be stored in the following path: `circuits/<circuitPrefix>/contributions/<completeZkeyFilename>`.
828
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
829
+ * @param circuitPrefix <string> - the prefix of the circuit.
830
+ * @param completeZkeyFilename <string> - the complete zKey filename (name + ext).
831
+ * @returns <string> - the storage path of the zKey file.
832
+ */
833
+ const getZkeyStorageFilePath = (circuitPrefix, completeZkeyFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${commonTerms.collections.contributions.name}/${completeZkeyFilename}`;
834
+ /**
835
+ * Get verification key file path tied to a particular circuit of a ceremony in the storage.
836
+ * @notice each verification key file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeVerificationKeyFilename>`.
837
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
838
+ * @param circuitPrefix <string> - the prefix of the circuit.
839
+ * @param completeVerificationKeyFilename <string> - the complete verification key filename (name + ext).
840
+ * @returns <string> - the storage path of the verification key file.
841
+ */
842
+ const getVerificationKeyStorageFilePath = (circuitPrefix, completeVerificationKeyFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeVerificationKeyFilename}`;
843
+ /**
844
+ * Get verifier contract file path tied to a particular circuit of a ceremony in the storage.
845
+ * @notice each verifier contract file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeVerificationKeyFilename>`.
846
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
847
+ * @param circuitPrefix <string> - the prefix of the circuit.
848
+ * @param completeVerifierContractFilename <string> - the complete verifier contract filename (name + ext).
849
+ * @returns <string> - the storage path of the verifier contract file.
850
+ */
851
+ const getVerifierContractStorageFilePath = (circuitPrefix, completeVerifierContractFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${completeVerifierContractFilename}`;
852
+ /**
853
+ * Get transcript file path tied to a particular circuit of a ceremony in the storage.
854
+ * @notice each R1CS file in the storage must be stored in the following path: `circuits/<circuitPrefix>/<completeTranscriptFilename>`.
855
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
856
+ * @param circuitPrefix <string> - the prefix of the circuit.
857
+ * @param completeTranscriptFilename <string> - the complete transcript filename (name + ext).
858
+ * @returns <string> - the storage path of the transcript file.
859
+ */
860
+ const getTranscriptStorageFilePath = (circuitPrefix, completeTranscriptFilename) => `${commonTerms.collections.circuits.name}/${circuitPrefix}/${commonTerms.foldersAndPathsTerms.transcripts}/${completeTranscriptFilename}`;
861
+
862
+ /**
863
+ * Get participants collection path for database reference.
864
+ * @notice all participants related documents are store under `ceremonies/<ceremonyId>/participants` collection path.
865
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
866
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
867
+ * @returns <string> - the participants collection path.
868
+ */
869
+ const getParticipantsCollectionPath = (ceremonyId) => `${commonTerms.collections.ceremonies.name}/${ceremonyId}/${commonTerms.collections.participants.name}`;
870
+ /**
871
+ * Get circuits collection path for database reference.
872
+ * @notice all circuits related documents are store under `ceremonies/<ceremonyId>/circuits` collection path.
873
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
874
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
875
+ * @returns <string> - the participants collection path.
876
+ */
877
+ const getCircuitsCollectionPath = (ceremonyId) => `${commonTerms.collections.ceremonies.name}/${ceremonyId}/${commonTerms.collections.circuits.name}`;
878
+ /**
879
+ * Get contributions collection path for database reference.
880
+ * @notice all contributions related documents are store under `ceremonies/<ceremonyId>/circuits/<circuitId>/contributions` collection path.
881
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
882
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
883
+ * @param circuitId <string> - the unique identifier of the circuit.
884
+ * @returns <string> - the contributions collection path.
885
+ */
886
+ const getContributionsCollectionPath = (ceremonyId, circuitId) => `${getCircuitsCollectionPath(ceremonyId)}/${circuitId}/${commonTerms.collections.contributions.name}`;
887
+ /**
888
+ * Get timeouts collection path for database reference.
889
+ * @notice all timeouts related documents are store under `ceremonies/<ceremonyId>/participants/<participantId>/timeouts` collection path.
890
+ * nb. This is a rule that must be satisfied. This is NOT an optional convention.
891
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
892
+ * @param participantId <string> - the unique identifier of the participant.
893
+ * @returns <string> - the timeouts collection path.
894
+ */
895
+ const getTimeoutsCollectionPath = (ceremonyId, participantId) => `${getParticipantsCollectionPath(ceremonyId)}/${participantId}/${commonTerms.collections.timeouts.name}`;
896
+ /**
897
+ * Helper for query a collection based on certain constraints.
898
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
899
+ * @param collection <string> - the name of the collection.
900
+ * @param queryConstraints <Array<QueryConstraint>> - a sequence of where conditions.
901
+ * @returns <Promise<QuerySnapshot<DocumentData>>> - return the matching documents (if any).
902
+ */
903
+ const queryCollection = async (firestoreDatabase, collection, queryConstraints) => {
904
+ // Make a query.
905
+ const q = firestore.query(firestore.collection(firestoreDatabase, collection), ...queryConstraints);
906
+ // Get docs.
907
+ const snap = await firestore.getDocs(q);
908
+ return snap;
909
+ };
910
+ /**
911
+ * Helper for obtaining uid and data for query document snapshots.
912
+ * @param queryDocSnap <Array<QueryDocumentSnapshot>> - the array of query document snapshot to be converted.
913
+ * @returns Array<FirebaseDocumentInfo>
914
+ */
915
+ const fromQueryToFirebaseDocumentInfo = (queryDocSnap) => queryDocSnap.map((document) => ({
916
+ id: document.id,
917
+ ref: document.ref,
918
+ data: document.data()
919
+ }));
920
+ /**
921
+ * Fetch for all documents in a collection.
922
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
923
+ * @param collection <string> - the name of the collection.
924
+ * @returns <Promise<Array<QueryDocumentSnapshot<DocumentData>>>> - return all documents (if any).
925
+ */
926
+ const getAllCollectionDocs = async (firestoreDatabase, collection) => (await firestore.getDocs(firestore.collection(firestoreDatabase, collection))).docs;
927
+ /**
928
+ * Get a specific document from database.
929
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
930
+ * @param collection <string> - the name of the collection.
931
+ * @param documentId <string> - the unique identifier of the document in the collection.
932
+ * @returns <Promise<DocumentSnapshot<DocumentData>>> - return the document from Firestore.
933
+ */
934
+ const getDocumentById = async (firestoreDatabase, collection, documentId) => {
935
+ const docRef = firestore.doc(firestoreDatabase, collection, documentId);
936
+ return firestore.getDoc(docRef);
937
+ };
938
+ /**
939
+ * Query for opened ceremonies.
940
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
941
+ * @returns <Promise<Array<FirebaseDocumentInfo>>>
942
+ */
943
+ const getOpenedCeremonies = async (firestoreDatabase) => {
944
+ const runningStateCeremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, [
945
+ firestore.where(commonTerms.collections.ceremonies.fields.state, "==", "OPENED" /* CeremonyState.OPENED */),
946
+ firestore.where(commonTerms.collections.ceremonies.fields.endDate, ">=", Date.now())
947
+ ]);
948
+ return fromQueryToFirebaseDocumentInfo(runningStateCeremoniesQuerySnap.docs);
949
+ };
950
+ /**
951
+ * Query for ceremony circuits.
952
+ * @notice the order by sequence position is fundamental to maintain parallelism among contributions for different circuits.
953
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
954
+ * @param ceremonyId <string> - the ceremony unique identifier.
955
+ * @returns Promise<Array<FirebaseDocumentInfo>> - the ceremony' circuits documents ordered by sequence position.
956
+ */
957
+ const getCeremonyCircuits = async (firestoreDatabase, ceremonyId) => fromQueryToFirebaseDocumentInfo(await getAllCollectionDocs(firestoreDatabase, getCircuitsCollectionPath(ceremonyId))).sort((a, b) => a.data.sequencePosition - b.data.sequencePosition);
958
+ /**
959
+ * Query for a specific ceremony' circuit contribution from a given contributor (if any).
960
+ * @notice if the caller is a coordinator, there could be more than one contribution (= the one from finalization applies to this criteria).
961
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
962
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
963
+ * @param circuitId <string> - the unique identifier of the circuit.
964
+ * @param participantId <string> - the unique identifier of the participant.
965
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the document info about the circuit contributions from contributor.
966
+ */
967
+ const getCircuitContributionsFromContributor = async (firestoreDatabase, ceremonyId, circuitId, participantId) => {
968
+ const participantContributionsQuerySnap = await queryCollection(firestoreDatabase, getContributionsCollectionPath(ceremonyId, circuitId), [firestore.where(commonTerms.collections.contributions.fields.participantId, "==", participantId)]);
969
+ return fromQueryToFirebaseDocumentInfo(participantContributionsQuerySnap.docs);
970
+ };
971
+ /**
972
+ * Query for the active timeout from given participant for a given ceremony (if any).
973
+ * @param ceremonyId <string> - the identifier of the ceremony.
974
+ * @param participantId <string> - the identifier of the participant.
975
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the document info about the current active participant timeout.
976
+ */
977
+ const getCurrentActiveParticipantTimeout = async (firestoreDatabase, ceremonyId, participantId) => {
978
+ const participantTimeoutQuerySnap = await queryCollection(firestoreDatabase, getTimeoutsCollectionPath(ceremonyId, participantId), [firestore.where(commonTerms.collections.timeouts.fields.endDate, ">=", firestore.Timestamp.now().toMillis())]);
979
+ return fromQueryToFirebaseDocumentInfo(participantTimeoutQuerySnap.docs);
980
+ };
981
+ /**
982
+ * Query for the closed ceremonies.
983
+ * @notice a ceremony is closed when the period for receiving new contributions has ended.
984
+ * @dev when the ceremony is closed it becomes ready for finalization.
985
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
986
+ * @returns <Promise<Array<FirebaseDocumentInfo>>> - the list of closed ceremonies.
987
+ */
988
+ const getClosedCeremonies = async (firestoreDatabase) => {
989
+ const closedCeremoniesQuerySnap = await queryCollection(firestoreDatabase, commonTerms.collections.ceremonies.name, [
990
+ firestore.where(commonTerms.collections.ceremonies.fields.state, "==", "CLOSED" /* CeremonyState.CLOSED */),
991
+ firestore.where(commonTerms.collections.ceremonies.fields.endDate, "<=", Date.now())
992
+ ]);
993
+ return fromQueryToFirebaseDocumentInfo(closedCeremoniesQuerySnap.docs);
994
+ };
995
+
996
+ /**
997
+ * @hidden
998
+ */
999
+ const toHexByte = (byte) => (byte < 0x10 ? `0${byte.toString(16)}` : byte.toString(16));
1000
+ /**
1001
+ * Converts Uint8Array to hexadecimal string.
1002
+ * @param buffer arbritrary length of data
1003
+ * @returns hexadecimal string
1004
+ */
1005
+ const toHex = (buffer) => Array.from(buffer).map(toHexByte).join("");
1006
+ /**
1007
+ * Get 512 bit blake hash of the contents of given path.
1008
+ * @param data buffer or hexadecimal string
1009
+ * @returns 64 byte hexadecimal string
1010
+ */
1011
+ const blake512FromPath = async (path) => {
1012
+ const context = blake.blake2bInit(64, undefined);
1013
+ const hash = await new Promise((resolve) => {
1014
+ fs.createReadStream(path)
1015
+ .on("data", (chunk) => {
1016
+ blake.blake2bUpdate(context, chunk);
1017
+ })
1018
+ .on("end", () => {
1019
+ resolve(toHex(blake.blake2bFinal(context)));
1020
+ });
1021
+ });
1022
+ return hash;
1023
+ };
1024
+ /**
1025
+ * Return the SHA256 hash (HEX format) of a given value
1026
+ * @param value <string> - the value to be hashed.
1027
+ * @returns <string> - the HEX format of the SHA256 hash of the given value
1028
+ */
1029
+ const computeSHA256ToHex = (value) => crypto.createHash("sha256").update(value).digest("hex");
1030
+ /**
1031
+ * Helper function that can be used to compare whether two files' hashes are equal or not.
1032
+ * @param path1 <string> Path to the first file.
1033
+ * @param path2 <string> Path to the second file.
1034
+ * @returns <Promise<boolean>> Whether the files are equal or not.
1035
+ */
1036
+ const compareHashes = async (path1, path2) => {
1037
+ const hash1 = await blake512FromPath(path1);
1038
+ const hash2 = await blake512FromPath(path2);
1039
+ return hash1 === hash2;
1040
+ };
1041
+
1042
+ /**
1043
+ * Parse and validate that the ceremony configuration is correct
1044
+ * @notice this does not upload any files to storage
1045
+ * @param path <string> - the path to the configuration file
1046
+ * @param cleanup <boolean> - whether to delete the r1cs file after parsing
1047
+ * @returns any - the data to pass to the cloud function for setup and the circuit artifacts
1048
+ */
1049
+ const parseCeremonyFile = async (path, cleanup = false) => {
1050
+ // check that the path exists
1051
+ if (!fs.existsSync(path))
1052
+ throw new Error("The provided path to the configuration file does not exist. Please provide an absolute path and try again.");
1053
+ try {
1054
+ // read the data
1055
+ const data = JSON.parse(fs.readFileSync(path).toString());
1056
+ // verify that the data is correct
1057
+ if (data['timeoutMechanismType'] !== "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */ && data['timeoutMechanismType'] !== "FIXED" /* CeremonyTimeoutType.FIXED */)
1058
+ throw new Error("Invalid timeout type. Please choose between DYNAMIC and FIXED.");
1059
+ // validate that we have at least 1 circuit input data
1060
+ if (!data.circuits || data.circuits.length === 0)
1061
+ throw new Error("You need to provide the data for at least 1 circuit.");
1062
+ // validate that the end date is in the future
1063
+ let endDate;
1064
+ let startDate;
1065
+ try {
1066
+ endDate = new Date(data.endDate);
1067
+ startDate = new Date(data.startDate);
1068
+ }
1069
+ catch (error) {
1070
+ throw new Error("The dates should follow this format: 2023-07-04T00:00:00.");
1071
+ }
1072
+ if (endDate <= startDate)
1073
+ throw new Error("The end date should be greater than the start date.");
1074
+ const currentDate = new Date();
1075
+ if (endDate <= currentDate || startDate <= currentDate)
1076
+ throw new Error("The start and end dates should be in the future.");
1077
+ // validate penalty
1078
+ if (data.penalty <= 0)
1079
+ throw new Error("The penalty should be greater than zero.");
1080
+ const circuits = [];
1081
+ const urlPattern = /(https?:\/\/[^\s]+)/g;
1082
+ const commitHashPattern = /^[a-f0-9]{40}$/i;
1083
+ const circuitArtifacts = [];
1084
+ for (let i = 0; i < data.circuits.length; i++) {
1085
+ const circuitData = data.circuits[i];
1086
+ const artifacts = circuitData.artifacts;
1087
+ circuitArtifacts.push({
1088
+ artifacts: artifacts
1089
+ });
1090
+ const r1csPath = artifacts.r1csStoragePath;
1091
+ const wasmPath = artifacts.wasmStoragePath;
1092
+ // where we storing the r1cs downloaded
1093
+ const localR1csPath = `./${circuitData.name}.r1cs`;
1094
+ // check that the artifacts exist in S3
1095
+ // we don't need any privileges to download this
1096
+ // just the correct region
1097
+ const s3 = new clientS3.S3Client({ region: artifacts.region });
1098
+ try {
1099
+ await s3.send(new clientS3.HeadObjectCommand({
1100
+ Bucket: artifacts.bucket,
1101
+ Key: r1csPath
1102
+ }));
1103
+ }
1104
+ catch (error) {
1105
+ throw new Error(`The r1cs file (${r1csPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1106
+ }
1107
+ try {
1108
+ await s3.send(new clientS3.HeadObjectCommand({
1109
+ Bucket: artifacts.bucket,
1110
+ Key: wasmPath
1111
+ }));
1112
+ }
1113
+ catch (error) {
1114
+ throw new Error(`The wasm file (${wasmPath}) seems to not exist. Please ensure this is correct and that the object is publicly available.`);
1115
+ }
1116
+ // download the r1cs to extract the metadata
1117
+ const command = new clientS3.GetObjectCommand({ Bucket: artifacts.bucket, Key: artifacts.r1csStoragePath });
1118
+ const response = await s3.send(command);
1119
+ const streamPipeline = util.promisify(stream.pipeline);
1120
+ if (response.$metadata.httpStatusCode !== 200)
1121
+ throw new Error("There was an error while trying to download the r1cs file. Please check that the file has the correct permissions (public) set.");
1122
+ if (response.Body instanceof stream.Readable)
1123
+ await streamPipeline(response.Body, fs.createWriteStream(localR1csPath));
1124
+ // extract the metadata from the r1cs
1125
+ const metadata = getR1CSInfo(localR1csPath);
1126
+ // validate that the circuit hash and template links are valid
1127
+ const template = circuitData.template;
1128
+ const URLMatch = template.source.match(urlPattern);
1129
+ if (!URLMatch || URLMatch.length === 0 || URLMatch.length > 1)
1130
+ throw new Error("You should provide the URL to the circuits templates on GitHub.");
1131
+ const hashMatch = template.commitHash.match(commitHashPattern);
1132
+ if (!hashMatch || hashMatch.length === 0 || hashMatch.length > 1)
1133
+ throw new Error("You should provide a valid commit hash of the circuit templates.");
1134
+ // calculate the hash of the r1cs file
1135
+ const r1csBlake2bHash = await blake512FromPath(localR1csPath);
1136
+ const circuitPrefix = extractPrefix(circuitData.name);
1137
+ // filenames
1138
+ const doubleDigitsPowers = convertToDoubleDigits(metadata.pot);
1139
+ const r1csCompleteFilename = `${circuitData.name}.r1cs`;
1140
+ const wasmCompleteFilename = `${circuitData.name}.wasm`;
1141
+ const smallestPowersOfTauCompleteFilenameForCircuit = `${potFilenameTemplate}${doubleDigitsPowers}.ptau`;
1142
+ const firstZkeyCompleteFilename = `${circuitPrefix}_${genesisZkeyIndex}.zkey`;
1143
+ // storage paths
1144
+ const r1csStorageFilePath = getR1csStorageFilePath(circuitPrefix, r1csCompleteFilename);
1145
+ const wasmStorageFilePath = getWasmStorageFilePath(circuitPrefix, wasmCompleteFilename);
1146
+ const potStorageFilePath = getPotStorageFilePath(smallestPowersOfTauCompleteFilenameForCircuit);
1147
+ const zkeyStorageFilePath = getZkeyStorageFilePath(circuitPrefix, firstZkeyCompleteFilename);
1148
+ const files = {
1149
+ potFilename: smallestPowersOfTauCompleteFilenameForCircuit,
1150
+ r1csFilename: r1csCompleteFilename,
1151
+ wasmFilename: wasmCompleteFilename,
1152
+ initialZkeyFilename: firstZkeyCompleteFilename,
1153
+ potStoragePath: potStorageFilePath,
1154
+ r1csStoragePath: r1csStorageFilePath,
1155
+ wasmStoragePath: wasmStorageFilePath,
1156
+ initialZkeyStoragePath: zkeyStorageFilePath,
1157
+ r1csBlake2bHash: r1csBlake2bHash
1158
+ };
1159
+ // validate that the compiler hash is a valid hash
1160
+ const compiler = circuitData.compiler;
1161
+ const compilerHashMatch = compiler.commitHash.match(commitHashPattern);
1162
+ if (!compilerHashMatch || compilerHashMatch.length === 0 || compilerHashMatch.length > 1)
1163
+ throw new Error("You should provide a valid commit hash of the circuit compiler.");
1164
+ // validate that the verification options are valid
1165
+ const verification = circuitData.verification;
1166
+ if (verification.cfOrVm !== "CF" && verification.cfOrVm !== "VM")
1167
+ throw new Error("Please enter a valid verification mechanism: either CF or VM");
1168
+ // @todo VM parameters verification
1169
+ // if (verification['cfOrVM'] === "VM") {}
1170
+ // check that the timeout is provided for the correct configuration
1171
+ let dynamicThreshold;
1172
+ let fixedTimeWindow;
1173
+ let circuit = {};
1174
+ if (data.timeoutMechanismType === "DYNAMIC" /* CeremonyTimeoutType.DYNAMIC */) {
1175
+ if (circuitData.dynamicThreshold <= 0)
1176
+ throw new Error("The dynamic threshold should be > 0.");
1177
+ dynamicThreshold = circuitData.dynamicThreshold;
1178
+ // the Circuit data for the ceremony setup
1179
+ circuit = {
1180
+ name: circuitData.name,
1181
+ description: circuitData.description,
1182
+ prefix: circuitPrefix,
1183
+ sequencePosition: i + 1,
1184
+ metadata: metadata,
1185
+ files: files,
1186
+ template: template,
1187
+ compiler: compiler,
1188
+ verification: verification,
1189
+ dynamicThreshold: dynamicThreshold,
1190
+ avgTimings: {
1191
+ contributionComputation: 0,
1192
+ fullContribution: 0,
1193
+ verifyCloudFunction: 0
1194
+ },
1195
+ };
1196
+ }
1197
+ if (data.timeoutMechanismType === "FIXED" /* CeremonyTimeoutType.FIXED */) {
1198
+ if (circuitData.fixedTimeWindow <= 0)
1199
+ throw new Error("The fixed time window threshold should be > 0.");
1200
+ fixedTimeWindow = circuitData.fixedTimeWindow;
1201
+ // the Circuit data for the ceremony setup
1202
+ circuit = {
1203
+ name: circuitData.name,
1204
+ description: circuitData.description,
1205
+ prefix: circuitPrefix,
1206
+ sequencePosition: i + 1,
1207
+ metadata: metadata,
1208
+ files: files,
1209
+ template: template,
1210
+ compiler: compiler,
1211
+ verification: verification,
1212
+ fixedTimeWindow: fixedTimeWindow,
1213
+ avgTimings: {
1214
+ contributionComputation: 0,
1215
+ fullContribution: 0,
1216
+ verifyCloudFunction: 0
1217
+ },
1218
+ };
1219
+ }
1220
+ circuits.push(circuit);
1221
+ // remove the local r1cs download (if used for verifying the config only vs setup)
1222
+ if (cleanup)
1223
+ fs.unlinkSync(localR1csPath);
1224
+ }
1225
+ const setupData = {
1226
+ ceremonyInputData: {
1227
+ title: data.title,
1228
+ description: data.description,
1229
+ startDate: startDate.valueOf(),
1230
+ endDate: endDate.valueOf(),
1231
+ timeoutMechanismType: data.timeoutMechanismType,
1232
+ penalty: data.penalty
1233
+ },
1234
+ ceremonyPrefix: extractPrefix(data.title),
1235
+ circuits: circuits,
1236
+ circuitArtifacts: circuitArtifacts
1237
+ };
1238
+ return setupData;
1239
+ }
1240
+ catch (error) {
1241
+ throw new Error(`Error while parsing up the ceremony setup file. ${error.message}`);
1242
+ }
1243
+ };
1244
+ /**
1245
+ * Extract data from a R1CS metadata file generated with a custom file-based logger.
1246
+ * @notice useful for extracting metadata circuits contained in the generated file using a logger
1247
+ * on the `r1cs.info()` method of snarkjs.
1248
+ * @param fullFilePath <string> - the full path of the file.
1249
+ * @param keyRgx <RegExp> - the regular expression linked to the key from which you want to extract the value.
1250
+ * @returns <string> - the stringified extracted value.
1251
+ */
1252
+ const extractR1CSInfoValueForGivenKey = (fullFilePath, keyRgx) => {
1253
+ // Read the logger file.
1254
+ const fileContents = fs.readFileSync(fullFilePath, "utf-8");
1255
+ // Check for the matching value.
1256
+ const matchingValue = fileContents.match(keyRgx);
1257
+ if (!matchingValue)
1258
+ throw new Error(`Unable to retrieve circuit metadata. Possible causes may involve an error while using the logger. Please, check whether the corresponding \`.log\` file is present in your local \`output/setup/metadata\` folder. In any case, we kindly ask you to terminate the current session and repeat the process.`);
1259
+ // Elaborate spaces and special characters to extract the value.
1260
+ // nb. this is a manual process which follows this custom arbitrary extraction rule
1261
+ // accordingly to the output produced by the `r1cs.info()` method from snarkjs library.
1262
+ return matchingValue?.at(0)?.split(":")[1].replace(" ", "").split("#")[0].replace("\n", "");
1263
+ };
1264
+ /**
1265
+ * Calculate the smallest amount of Powers of Tau needed for a circuit with a constraint size.
1266
+ * @param constraints <number> - the number of circuit constraints (extracted from metadata).
1267
+ * @param outputs <number> - the number of circuit outputs (extracted from metadata)
1268
+ * @returns <number> - the smallest amount of Powers of Tau for the given constraint size.
1269
+ */
1270
+ const computeSmallestPowersOfTauForCircuit = (constraints, outputs) => {
1271
+ let power = 2;
1272
+ let tau = 2 ** power;
1273
+ while (constraints + outputs > tau) {
1274
+ power += 1;
1275
+ tau = 2 ** power;
1276
+ }
1277
+ return power;
1278
+ };
1279
+ /**
1280
+ * Transform a number in a zKey index format.
1281
+ * @dev this method is aligned with the number of characters of the genesis zKey index (which is a constant).
1282
+ * @param progress <number> - the progression in zKey index.
1283
+ * @returns <string> - the progression in a zKey index format (`XYZAB`).
1284
+ */
1285
+ const formatZkeyIndex = (progress) => {
1286
+ let index = progress.toString();
1287
+ // Pad with zeros if the progression has less digits.
1288
+ while (index.length < genesisZkeyIndex.length) {
1289
+ index = `0${index}`;
1290
+ }
1291
+ return index;
1292
+ };
1293
+ /**
1294
+ * Extract the amount of powers from Powers of Tau file name.
1295
+ * @dev the PoT files must follow these convention (i_am_a_pot_file_09.ptau) where the numbers before '.ptau' are the powers.
1296
+ * @param potCompleteFilename <string> - the complete filename of the Powers of Tau file.
1297
+ * @returns <number> - the amount of powers.
1298
+ */
1299
+ const extractPoTFromFilename = (potCompleteFilename) => Number(potCompleteFilename.split("_").pop()?.split(".").at(0));
1300
+ /**
1301
+ * Extract a prefix consisting of alphanumeric and underscore characters from a string with arbitrary characters.
1302
+ * @dev replaces all special symbols and whitespaces with an underscore char ('_'). Convert all uppercase chars to lowercase.
1303
+ * @notice example: str = 'Multiplier-2!2.4.zkey'; output prefix = 'multiplier_2_2_4.zkey'.
1304
+ * NB. Prefix extraction is a key process that conditions the name of the ceremony artifacts, download/upload from/to storage, collections paths.
1305
+ * @param str <string> - the arbitrary string from which to extract the prefix.
1306
+ * @returns <string> - the resulting prefix.
1307
+ */
1308
+ const extractPrefix = (str) =>
1309
+ // eslint-disable-next-line no-useless-escape
1310
+ str.replace(/[`\s~!@#$%^&*()|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "-").toLowerCase();
1311
+ /**
1312
+ * Automate the generation of an entropy for a contribution.
1313
+ * @dev Took inspiration from here https://github.com/glamperd/setup-mpc-ui/blob/master/client/src/state/Compute.tsx#L112.
1314
+ * @todo we need to improve the entropy generation (too naive).
1315
+ * @returns <string> - the auto-generated entropy.
1316
+ */
1317
+ const autoGenerateEntropy = () => new Uint8Array(256).map(() => Math.random() * 256).toString();
1318
+ /**
1319
+ * Check and return the circuit document based on its sequence position among a set of circuits (if any).
1320
+ * @dev there should be only one circuit with a provided sequence position. This method checks and return an
1321
+ * error if none is found.
1322
+ * @param circuits <Array<FirebaseDocumentInfo>> - the set of ceremony circuits documents.
1323
+ * @param sequencePosition <number> - the sequence position (index) of the circuit to be found and returned.
1324
+ * @returns <FirebaseDocumentInfo> - the document of the circuit in the set of circuits that has the provided sequence position.
1325
+ */
1326
+ const getCircuitBySequencePosition = (circuits, sequencePosition) => {
1327
+ // Filter by sequence position.
1328
+ const matchedCircuits = circuits.filter((circuitDocument) => circuitDocument.data.sequencePosition === sequencePosition);
1329
+ if (matchedCircuits.length !== 1)
1330
+ throw new Error(`Unable to find the circuit having position ${sequencePosition}. Run the command again and, if this error persists please contact the coordinator.`);
1331
+ return matchedCircuits.at(0);
1332
+ };
1333
+ /**
1334
+ * Convert bytes or chilobytes into gigabytes with customizable precision.
1335
+ * @param bytesOrKb <number> - the amount of bytes or chilobytes to be converted.
1336
+ * @param isBytes <boolean> - true when the amount to be converted is in bytes; otherwise false (= Chilobytes).
1337
+ * @returns <number> - the converted amount in GBs.
1338
+ */
1339
+ const convertBytesOrKbToGb = (bytesOrKb, isBytes) => Number(bytesOrKb / 1024 ** (isBytes ? 3 : 2));
1340
+ /**
1341
+ * Get the validity of contributors' contributions for each circuit of the given ceremony (if any).
1342
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1343
+ * @param circuits <Array<FirebaseDocumentInfo>> - the array of ceremony circuits documents.
1344
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
1345
+ * @param participantId <string> - the unique identifier of the contributor.
1346
+ * @param isFinalizing <boolean> - flag to discriminate between ceremony finalization (true) and contribution (false).
1347
+ * @returns <Promise<Array<ContributionValidity>>> - a list of contributor contributions together with contribution validity (based on coordinator verification).
1348
+ */
1349
+ const getContributionsValidityForContributor = async (firestoreDatabase, circuits, ceremonyId, participantId, isFinalizing) => {
1350
+ const contributionsValidity = [];
1351
+ for await (const circuit of circuits) {
1352
+ // Get circuit contribution from contributor.
1353
+ const circuitContributionsFromContributor = await getCircuitContributionsFromContributor(firestoreDatabase, ceremonyId, circuit.id, participantId);
1354
+ // Check for ceremony finalization (= there could be more than one contribution).
1355
+ const contribution = isFinalizing
1356
+ ? circuitContributionsFromContributor
1357
+ .filter((contributionDocument) => contributionDocument.data.zkeyIndex === finalContributionIndex)
1358
+ .at(0)
1359
+ : circuitContributionsFromContributor.at(0);
1360
+ if (!contribution)
1361
+ throw new Error("Unable to retrieve contributions for the participant. There may have occurred a database-side error. Please, we kindly ask you to terminate the current session and repeat the process");
1362
+ contributionsValidity.push({
1363
+ contributionId: contribution?.id,
1364
+ circuitId: circuit.id,
1365
+ valid: contribution?.data.valid
1366
+ });
1367
+ }
1368
+ return contributionsValidity;
1369
+ };
1370
+ /**
1371
+ * Return the public attestation preamble for given contributor.
1372
+ * @param contributorIdentifier <string> - the identifier of the contributor (handle, name, uid).
1373
+ * @param ceremonyName <string> - the name of the ceremony.
1374
+ * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1375
+ * @returns <string> - the public attestation preamble.
1376
+ */
1377
+ const getPublicAttestationPreambleForContributor = (contributorIdentifier, ceremonyName, isFinalizing) => `Hey, I'm ${contributorIdentifier} and I have ${isFinalizing ? "finalized" : "contributed to"} the ${ceremonyName} MPC Phase2 Trusted Setup ceremony.\nThe following are my contribution signatures:`;
1378
+ /**
1379
+ * Check and prepare public attestation for the contributor made only of its valid contributions.
1380
+ * @param firestoreDatabase <Firestore> - the Firestore service instance associated to the current Firebase application.
1381
+ * @param circuits <Array<FirebaseDocumentInfo>> - the array of ceremony circuits documents.
1382
+ * @param ceremonyId <string> - the unique identifier of the ceremony.
1383
+ * @param participantId <string> - the unique identifier of the contributor.
1384
+ * @param participantContributions <Array<Co> - the document data of the participant.
1385
+ * @param contributorIdentifier <string> - the identifier of the contributor (handle, name, uid).
1386
+ * @param ceremonyName <string> - the name of the ceremony.
1387
+ * @param isFinalizing <boolean> - true when the coordinator is finalizing the ceremony, otherwise false.
1388
+ * @returns <Promise<string>> - the public attestation for the contributor.
1389
+ */
1390
+ const generateValidContributionsAttestation = async (firestoreDatabase, circuits, ceremonyId, participantId, participantContributions, contributorIdentifier, ceremonyName, isFinalizing) => {
1391
+ // Generate the attestation preamble for the contributor.
1392
+ let publicAttestation = getPublicAttestationPreambleForContributor(contributorIdentifier, ceremonyName, isFinalizing);
1393
+ // Get contributors' contributions validity.
1394
+ const contributionsWithValidity = await getContributionsValidityForContributor(firestoreDatabase, circuits, ceremonyId, participantId, isFinalizing);
1395
+ for await (const contributionWithValidity of contributionsWithValidity) {
1396
+ // Filter for the related contribution document info.
1397
+ const matchedContributions = participantContributions.filter((contribution) => contribution.doc === contributionWithValidity.contributionId);
1398
+ if (matchedContributions.length === 0)
1399
+ throw new Error(`Unable to retrieve given circuit contribution information. This could happen due to some errors while writing the information on the database.`);
1400
+ if (matchedContributions.length > 1)
1401
+ throw new Error(`Duplicated circuit contribution information. Please, contact the coordinator.`);
1402
+ const participantContribution = matchedContributions.at(0);
1403
+ // Get circuit document (the one for which the contribution was calculated).
1404
+ const circuitDocument = await getDocumentById(firestoreDatabase, getCircuitsCollectionPath(ceremonyId), contributionWithValidity.circuitId);
1405
+ const contributionDocument = await getDocumentById(firestoreDatabase, getContributionsCollectionPath(ceremonyId, contributionWithValidity.circuitId), participantContribution.doc);
1406
+ if (!contributionDocument.data() || !circuitDocument.data())
1407
+ throw new Error(`Something went wrong when retrieving the data from the database`);
1408
+ // Extract data.
1409
+ const { sequencePosition, prefix } = circuitDocument.data();
1410
+ const { zkeyIndex } = contributionDocument.data();
1411
+ // Update public attestation.
1412
+ publicAttestation = `${publicAttestation}\n\nCircuit # ${sequencePosition} (${prefix})\nContributor # ${zkeyIndex > 0 ? Number(zkeyIndex) : zkeyIndex}\n${participantContribution.hash}`;
1413
+ }
1414
+ return publicAttestation;
1415
+ };
1416
+ /**
1417
+ * Create a custom logger to write logs on a local file.
1418
+ * @param filename <string> - the name of the output file (where the logs are going to be written).
1419
+ * @param level <winston.LoggerOptions["level"]> - the option for the logger level (e.g., info, error).
1420
+ * @returns <Logger> - a customized winston logger for files.
1421
+ */
1422
+ const createCustomLoggerForFile = (filename, level = "info") => winston.createLogger({
1423
+ level,
1424
+ transports: new winston.transports.File({
1425
+ filename,
1426
+ format: winston.format.printf((log) => log.message),
1427
+ level
1428
+ })
1429
+ });
1430
+ /**
1431
+ * Return an amount of bytes read from a file to a particular location in the form of a buffer.
1432
+ * @param localFilePath <string> - the local path where the artifact will be downloaded.
1433
+ * @param offset <number> - the index of the line to be read (0 from the start).
1434
+ * @param length <number> - the length of the line to be read.
1435
+ * @param position <ReadPosition> - the position inside the file.
1436
+ * @returns <Buffer> - the buffer w/ the read bytes.
1437
+ */
1438
+ const readBytesFromFile = (localFilePath, offset, length, position) => {
1439
+ // Open the file (read mode).
1440
+ const fileDescriptor = fs.openSync(localFilePath, "r");
1441
+ // Prepare buffer.
1442
+ const buffer = Buffer.alloc(length);
1443
+ // Read bytes.
1444
+ fs.readSync(fileDescriptor, buffer, offset, length, position);
1445
+ // Return the read bytes.
1446
+ return buffer;
1447
+ };
1448
+ /**
1449
+ * Return the info about the R1CS file.ù
1450
+ * @dev this method was built taking inspiration from
1451
+ * https://github.com/weijiekoh/circom-helper/blob/master/ts/read_num_inputs.ts#L5.
1452
+ * You can find the specs of R1CS file here
1453
+ * https://github.com/iden3/r1csfile/blob/master/doc/r1cs_bin_format.md
1454
+ * @param localR1CSFilePath <string> - the local path to the R1CS file.
1455
+ * @returns <CircuitMetadata> - the info about the R1CS file.
1456
+ */
1457
+ const getR1CSInfo = (localR1CSFilePath) => {
1458
+ /**
1459
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1460
+ * ┃ 4 │ 72 31 63 73 ┃ Magic "r1cs"
1461
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1462
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1463
+ * ┃ 4 │ 01 00 00 00 ┃ Version 1
1464
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1465
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1466
+ * ┃ 4 │ 03 00 00 00 ┃ Number of Sections
1467
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1468
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┓
1469
+ * ┃ 4 │ sectionType ┃ 8 │ SectionSize ┃
1470
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┻━━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━┛
1471
+ * ┏━━━━━━━━━━━━━━━━━━━━━┓
1472
+ * ┃ ┃
1473
+ * ┃ ┃
1474
+ * ┃ ┃
1475
+ * ┃ Section Content ┃
1476
+ * ┃ ┃
1477
+ * ┃ ┃
1478
+ * ┃ ┃
1479
+ * ┗━━━━━━━━━━━━━━━━━━━━━┛
1480
+ *
1481
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┓
1482
+ * ┃ 4 │ sectionType ┃ 8 │ SectionSize ┃
1483
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┻━━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━┛
1484
+ * ┏━━━━━━━━━━━━━━━━━━━━━┓
1485
+ * ┃ ┃
1486
+ * ┃ ┃
1487
+ * ┃ ┃
1488
+ * ┃ Section Content ┃
1489
+ * ┃ ┃
1490
+ * ┃ ┃
1491
+ * ┃ ┃
1492
+ * ┗━━━━━━━━━━━━━━━━━━━━━┛
1493
+ *
1494
+ * ...
1495
+ * ...
1496
+ * ...
1497
+ */
1498
+ // Prepare state.
1499
+ let pointer = 0; // selector to particular file data position in order to read data.
1500
+ let wires = 0;
1501
+ let publicOutputs = 0;
1502
+ let publicInputs = 0;
1503
+ let privateInputs = 0;
1504
+ let labels = 0;
1505
+ let constraints = 0;
1506
+ try {
1507
+ // Get 'number of section' (jump magic r1cs and version1 data).
1508
+ const numberOfSections = ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, 8));
1509
+ // Jump to first section.
1510
+ pointer = 12;
1511
+ // For each section
1512
+ for (let i = 0; i < numberOfSections; i++) {
1513
+ // Read section type.
1514
+ const sectionType = ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer));
1515
+ // Jump to section size.
1516
+ pointer += 4;
1517
+ // Read section size
1518
+ const sectionSize = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1519
+ // If at header section (0x00000001 : Header Section).
1520
+ if (sectionType === BigInt(1)) {
1521
+ // Read info from header section.
1522
+ /**
1523
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1524
+ * ┃ 4 │ 20 00 00 00 ┃ Field Size in bytes (fs)
1525
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1526
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
1527
+ * ┃ fs │ 010000f0 93f5e143 9170b979 48e83328 5d588181 b64550b8 29a031e1 724e6430 ┃ Prime size
1528
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
1529
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1530
+ * ┃ 32 │ 01 00 00 00 ┃ nWires
1531
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1532
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1533
+ * ┃ 32 │ 01 00 00 00 ┃ nPubOut
1534
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1535
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1536
+ * ┃ 32 │ 01 00 00 00 ┃ nPubIn
1537
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1538
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1539
+ * ┃ 32 │ 01 00 00 00 ┃ nPrvIn
1540
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1541
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
1542
+ * ┃ 64 │ 01 00 00 00 00 00 00 00 ┃ nLabels
1543
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛
1544
+ * ┏━━━━┳━━━━━━━━━━━━━━━━━┓
1545
+ * ┃ 32 │ 01 00 00 00 ┃ mConstraints
1546
+ * ┗━━━━┻━━━━━━━━━━━━━━━━━┛
1547
+ */
1548
+ pointer += sectionSize - 20;
1549
+ // Read R1CS info.
1550
+ wires = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1551
+ pointer += 4;
1552
+ publicOutputs = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1553
+ pointer += 4;
1554
+ publicInputs = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1555
+ pointer += 4;
1556
+ privateInputs = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1557
+ pointer += 4;
1558
+ labels = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 8, pointer)));
1559
+ pointer += 8;
1560
+ constraints = Number(ffjavascript.utils.leBuff2int(readBytesFromFile(localR1CSFilePath, 0, 4, pointer)));
1561
+ }
1562
+ pointer += 8 + Number(sectionSize);
1563
+ }
1564
+ return {
1565
+ curve: "bn-128",
1566
+ wires,
1567
+ constraints,
1568
+ privateInputs,
1569
+ publicInputs,
1570
+ labels,
1571
+ outputs: publicOutputs,
1572
+ pot: computeSmallestPowersOfTauForCircuit(constraints, publicOutputs)
1573
+ };
1574
+ }
1575
+ catch (err) {
1576
+ throw new Error(`The R1CS file you provided would not appear to be correct. Please, check that you have provided a valid R1CS file and repeat the process.`);
1577
+ }
1578
+ };
1579
+ /**
1580
+ * Return a string with double digits if the provided input is one digit only.
1581
+ * @param in <number> - the input number to be converted.
1582
+ * @returns <string> - the two digits stringified number derived from the conversion.
1583
+ */
1584
+ const convertToDoubleDigits = (amount) => (amount < 10 ? `0${amount}` : amount.toString());
1585
+
1586
+ /**
1587
+ * Verify that a zKey is valid
1588
+ * @param r1csLocalFilePath <string> path to the r1cs file
1589
+ * @param zkeyLocalPath <string> path to the zKey file
1590
+ * @param potLocalFilePath <string> path to the PoT file
1591
+ * @param logger <any> logger instance
1592
+ * @returns <boolean> true if the zKey is valid, false otherwise
1593
+ */
1594
+ const verifyZKey = async (r1csLocalFilePath, zkeyLocalPath, potLocalFilePath, logger) => {
1595
+ if (!fs.existsSync(r1csLocalFilePath))
1596
+ throw new Error(`R1CS file not found at ${r1csLocalFilePath}`);
1597
+ if (!fs.existsSync(zkeyLocalPath))
1598
+ throw new Error(`zKey file not found at ${zkeyLocalPath}`);
1599
+ if (!fs.existsSync(potLocalFilePath))
1600
+ throw new Error(`PoT file not found at ${potLocalFilePath}`);
1601
+ const res = await snarkjs.zKey.verifyFromR1cs(r1csLocalFilePath, potLocalFilePath, zkeyLocalPath, logger);
1602
+ return res;
1603
+ };
1604
+ /**
1605
+ * Generates a GROTH16 proof
1606
+ * @param circuitInput <object> Input to the circuit
1607
+ * @param zkeyFilePath <string> Path to the proving key
1608
+ * @param wasmFilePath <string> Path to the compiled circuit
1609
+ * @param logger <any> Optional logger
1610
+ * @returns <Promise<object>> The proof
1611
+ */
1612
+ const generateGROTH16Proof = async (circuitInput, zkeyFilePath, wasmFilePath, logger) => {
1613
+ try {
1614
+ const { proof, publicSignals } = await snarkjs.groth16.fullProve(circuitInput, wasmFilePath, zkeyFilePath, logger);
1615
+ return {
1616
+ proof,
1617
+ publicSignals
1618
+ };
1619
+ }
1620
+ catch (error) {
1621
+ throw new Error("There was an error while generating a proof. Please check that the input is correct, as well as the required system paths; and please try again.");
1622
+ }
1623
+ };
1624
+ /**
1625
+ * Verifies a GROTH16 proof
1626
+ * @param verificationKeyPath <string> Path to the verification key
1627
+ * @param publicSignals <object> Public signals
1628
+ * @param proof <object> Proof
1629
+ * @returns <Promise<boolean>> Whether the proof is valid or not
1630
+ */
1631
+ const verifyGROTH16Proof = async (verificationKeyPath, publicSignals, proof) => {
1632
+ const verificationKey = JSON.parse(fs.readFileSync(verificationKeyPath).toString());
1633
+ const success = await snarkjs.groth16.verify(verificationKey, publicSignals, proof);
1634
+ return success;
1635
+ };
1636
+ /**
1637
+ * Helper method to extract the Solidity verifier
1638
+ * from a final zKey file and save it to a local file.
1639
+ * @param finalZkeyPath <string> The path to the zKey file.
1640
+ * @return <any> The Solidity verifier code.
1641
+ */
1642
+ const exportVerifierContract = async (finalZkeyPath, templatePath) => {
1643
+ // Extract verifier.
1644
+ let verifierCode = await snarkjs.zKey.exportSolidityVerifier(finalZkeyPath, {
1645
+ groth16: fs.readFileSync(templatePath).toString()
1646
+ }, console);
1647
+ // Update solidity version.
1648
+ verifierCode = verifierCode.replace(/pragma solidity \^\d+\.\d+\.\d+/, `pragma solidity ^${solidityVersion}`);
1649
+ return verifierCode;
1650
+ };
1651
+ /**
1652
+ * Helpers method to extract the vKey from a final zKey file
1653
+ * @param finalZkeyPath <string> The path to the zKey file.
1654
+ * @return <any> The vKey.
1655
+ */
1656
+ const exportVkey = async (finalZkeyPath) => {
1657
+ const verificationKeyJSONData = await snarkjs.zKey.exportVerificationKey(finalZkeyPath);
1658
+ return verificationKeyJSONData;
1659
+ };
1660
+ /**
1661
+ * Helper method to extract the Solidity verifier and the Verification key
1662
+ * from a final zKey file and save them to local files.
1663
+ * @param finalZkeyPath <string> The path to the zKey file.
1664
+ * @param verifierLocalPath <string> The path to the local file where the verifier will be saved.
1665
+ * @param vKeyLocalPath <string> The path to the local file where the vKey will be saved.
1666
+ * @param templatePath <string> The path to the template file.
1667
+ */
1668
+ const exportVerifierAndVKey = async (finalZkeyPath, verifierLocalPath, vKeyLocalPath, templatePath) => {
1669
+ const verifierCode = await exportVerifierContract(finalZkeyPath, templatePath);
1670
+ fs.writeFileSync(verifierLocalPath, verifierCode);
1671
+ const verificationKeyJSONData = await exportVkey(finalZkeyPath);
1672
+ fs.writeFileSync(vKeyLocalPath, JSON.stringify(verificationKeyJSONData));
1673
+ };
1674
+ /**
1675
+ * Generate a zKey from scratch (useful to compute either the genesis or final zKey)
1676
+ * @param isFinalizing <boolean> Whether the ceremony is finalizing or not
1677
+ * @param r1csLocalPath <string> The path to the local r1cs file
1678
+ * @param potLocalPath <string> The path to the local pot file
1679
+ * @param zkeyLocalPath <string> The path to save the generated zKey
1680
+ * @param logger <any> The logger instance
1681
+ * @param finalContributionZKeyLocalPath <string> The path to the local zkey file of the final contribution (only for final zKey)
1682
+ * @param coordinatorIdentifier <string> The identifier of the coordinator (only for final zKey)
1683
+ * @param beacon <string> The beacon value for the last contribution (only for final zKey)
1684
+ */
1685
+ const generateZkeyFromScratch = async (isFinalizing, r1csLocalPath, potLocalPath, zkeyLocalPath, logger, finalContributionZKeyLocalPath, coordinatorIdentifier, beacon) => {
1686
+ if (!fs.existsSync(r1csLocalPath) || !fs.existsSync(potLocalPath))
1687
+ throw new Error("There was an error while opening the local files. Please make sure that you provided the right paths and try again.");
1688
+ if (isFinalizing) {
1689
+ if (!fs.existsSync(finalContributionZKeyLocalPath))
1690
+ throw new Error("There was an error while opening the last zKey generated by a contributor. Please make sure that you provided the right path and try again.");
1691
+ await snarkjs.zKey.beacon(finalContributionZKeyLocalPath, zkeyLocalPath, coordinatorIdentifier, beacon, numExpIterations, logger);
1692
+ }
1693
+ else
1694
+ await snarkjs.zKey.newZKey(r1csLocalPath, potLocalPath, zkeyLocalPath, logger);
1695
+ };
1696
+ /**
1697
+ * Helper function used to compare two ceremony artifacts
1698
+ * @param firebaseFunctions <Functions> Firebase functions object
1699
+ * @param localPath1 <string> Local path to store the first artifact
1700
+ * @param localPath2 <string> Local path to store the second artifact
1701
+ * @param storagePath1 <string> Storage path to the first artifact
1702
+ * @param storagePath2 <string> Storage path to the second artifact
1703
+ * @param bucketName1 <string> Bucket name of the first artifact
1704
+ * @param bucketName2 <string> Bucket name of the second artifact
1705
+ * @param cleanup <boolean> Whether to delete the downloaded files or not
1706
+ * @returns <Promise<boolean>> true if the hashes match, false otherwise
1707
+ */
1708
+ const compareCeremonyArtifacts = async (firebaseFunctions, localPath1, localPath2, storagePath1, storagePath2, bucketName1, bucketName2, cleanup) => {
1709
+ // 1. download files
1710
+ await downloadCeremonyArtifact(firebaseFunctions, bucketName1, storagePath1, localPath1);
1711
+ await downloadCeremonyArtifact(firebaseFunctions, bucketName2, storagePath2, localPath2);
1712
+ // 2. compare hashes
1713
+ const res = await compareHashes(localPath1, localPath2);
1714
+ // 3. cleanup
1715
+ if (cleanup) {
1716
+ fs.unlinkSync(localPath1);
1717
+ fs.unlinkSync(localPath2);
1718
+ }
1719
+ // 4. return result
1720
+ return res;
1721
+ };
1722
+ /**
1723
+ * Given a ceremony prefix, download all the ceremony artifacts
1724
+ * @param functions <Functions> firebase functions instance
1725
+ * @param firestore <Firestore> firebase firestore instance
1726
+ * @param ceremonyPrefix <string> ceremony prefix
1727
+ * @param outputDirectory <string> output directory where to
1728
+ * @returns <Promise<CeremonyArtifacts[]>> array of ceremony artifacts
1729
+ */
1730
+ const downloadAllCeremonyArtifacts = async (functions, firestore$1, ceremonyPrefix, outputDirectory) => {
1731
+ // mkdir if not exists
1732
+ if (!fs.existsSync(outputDirectory)) {
1733
+ fs.mkdirSync(outputDirectory);
1734
+ }
1735
+ if (!process.env.CONFIG_CEREMONY_BUCKET_POSTFIX)
1736
+ throw new Error("CONFIG_CEREMONY_BUCKET_POSTFIX not set. Please review your env file and try again.");
1737
+ const ceremonyArtifacts = [];
1738
+ // find the ceremony given the prefix
1739
+ const ceremonyQuery = await queryCollection(firestore$1, commonTerms.collections.ceremonies.name, [
1740
+ firestore.where(commonTerms.collections.ceremonies.fields.prefix, "==", ceremonyPrefix)
1741
+ ]);
1742
+ // get the data
1743
+ const ceremonyData = fromQueryToFirebaseDocumentInfo(ceremonyQuery.docs);
1744
+ if (ceremonyData.length === 0)
1745
+ throw new Error("Ceremony not found. Please review your ceremony prefix and try again.");
1746
+ const ceremony = ceremonyData.at(0);
1747
+ // reconstruct the bucket name
1748
+ const bucketName = getBucketName(ceremonyPrefix, process.env.CONFIG_CEREMONY_BUCKET_POSTFIX);
1749
+ const circuits = await getCeremonyCircuits(firestore$1, ceremony.id);
1750
+ if (circuits.length === 0)
1751
+ throw new Error("No circuits found for this ceremony. Please review your ceremony prefix and try again.");
1752
+ // for each circuit we have to download artifacts
1753
+ for (const circuit of circuits) {
1754
+ // make a directory for storing the circuit artifacts
1755
+ const circuitDir = `${outputDirectory}/${ceremony.data.prefix}/${circuit.data.prefix}`;
1756
+ fs.mkdirSync(circuitDir, { recursive: true });
1757
+ // get all required file names in storage and for local storage
1758
+ const { potStoragePath } = circuit.data.files;
1759
+ const potLocalPath = `${circuitDir}/${circuit.data.files.potFilename}`;
1760
+ const { r1csStoragePath } = circuit.data.files;
1761
+ const r1csLocalPath = `${circuitDir}/${circuit.data.files.r1csFilename}`;
1762
+ const contributions = circuit.data.waitingQueue.completedContributions;
1763
+ const zkeyIndex = formatZkeyIndex(contributions);
1764
+ const lastZKeyStoragePath = getZkeyStorageFilePath(circuit.data.prefix, `${circuit.data.prefix}_${zkeyIndex}.zkey`);
1765
+ const lastZKeyLocalPath = `${circuitDir}/${circuit.data.prefix}_${zkeyIndex}.zkey`;
1766
+ const finalZKeyName = `${circuit.data.prefix}_${finalContributionIndex}.zkey`;
1767
+ const finalZkeyStoragePath = getZkeyStorageFilePath(circuit.data.prefix, finalZKeyName);
1768
+ const finalZKeyLocalPath = `${circuitDir}/${finalZKeyName}`;
1769
+ const verifierStoragePath = getVerifierContractStorageFilePath(circuit.data.prefix, `${verifierSmartContractAcronym}.sol`);
1770
+ const verifierLocalPath = `${circuitDir}/${circuit.data.prefix}_${verifierSmartContractAcronym}.sol`;
1771
+ const vKeyStoragePath = getVerificationKeyStorageFilePath(circuit.data.prefix, `${verificationKeyAcronym}.json`);
1772
+ const vKeyLocalPath = `${circuitDir}/${circuit.data.prefix}_${verificationKeyAcronym}.json`;
1773
+ const wasmStoragePath = getWasmStorageFilePath(circuit.data.prefix, `${circuit.data.prefix}.wasm`);
1774
+ const wasmLocalPath = `${circuitDir}/${circuit.data.prefix}.wasm`;
1775
+ // download everything
1776
+ await downloadCeremonyArtifact(functions, bucketName, potStoragePath, potLocalPath);
1777
+ await downloadCeremonyArtifact(functions, bucketName, r1csStoragePath, r1csLocalPath);
1778
+ await downloadCeremonyArtifact(functions, bucketName, lastZKeyStoragePath, lastZKeyLocalPath);
1779
+ await downloadCeremonyArtifact(functions, bucketName, finalZkeyStoragePath, finalZKeyLocalPath);
1780
+ await downloadCeremonyArtifact(functions, bucketName, verifierStoragePath, verifierLocalPath);
1781
+ await downloadCeremonyArtifact(functions, bucketName, vKeyStoragePath, vKeyLocalPath);
1782
+ await downloadCeremonyArtifact(functions, bucketName, wasmStoragePath, wasmLocalPath);
1783
+ ceremonyArtifacts.push({
1784
+ circuitPrefix: circuit.data.prefix,
1785
+ circuitId: circuit.id,
1786
+ directoryRoot: circuitDir,
1787
+ potLocalFilePath: potLocalPath,
1788
+ r1csLocalFilePath: r1csLocalPath,
1789
+ finalZkeyLocalFilePath: finalZKeyLocalPath,
1790
+ lastZkeyLocalFilePath: lastZKeyLocalPath,
1791
+ verifierLocalFilePath: verifierLocalPath,
1792
+ verificationKeyLocalFilePath: vKeyLocalPath,
1793
+ wasmLocalFilePath: wasmLocalPath
1794
+ });
1795
+ }
1796
+ return ceremonyArtifacts;
1797
+ };
1798
+ /**
1799
+ * Fetch the final contribution beacon from Firestore
1800
+ * @param firestore <Firestore> firebase firestore instance
1801
+ * @param ceremonyId <string> ceremony id
1802
+ * @param circuitId <string> circuit id
1803
+ * @param participantId <string> participant id
1804
+ * @returns <Promise<string>> final contribution beacon
1805
+ */
1806
+ const getFinalContributionBeacon = async (firestore, ceremonyId, circuitId, participantId) => {
1807
+ const contributions = await getCircuitContributionsFromContributor(firestore, ceremonyId, circuitId, participantId);
1808
+ const filtered = contributions
1809
+ .filter((contributionDocument) => contributionDocument.data.zkeyIndex === finalContributionIndex)
1810
+ .at(0);
1811
+ if (!filtered)
1812
+ throw new Error("Final contribution not found. Please check that you provided the correct input data and try again.");
1813
+ return filtered.data.beacon.value;
1814
+ };
1815
+
1816
+ /**
1817
+ * This method initialize a Firebase app if no other app has already been initialized.
1818
+ * @param options <FirebaseOptions> - an object w/ every necessary Firebase option to init app.
1819
+ * @returns <FirebaseApp> - the initialized Firebase app object.
1820
+ */
1821
+ const initializeFirebaseApp = (options) => app.initializeApp(options);
1822
+ /**
1823
+ * This method returns the Firestore database instance associated to the given Firebase application.
1824
+ * @param app <FirebaseApp> - the Firebase application.
1825
+ * @returns <Firestore> - the Firebase Firestore associated to the application.
1826
+ */
1827
+ const getFirestoreDatabase = (app) => firestore.getFirestore(app);
1828
+ /**
1829
+ * This method returns the Cloud Functions instance associated to the given Firebase application.
1830
+ * @param app <FirebaseApp> - the Firebase application.
1831
+ * @returns <Functions> - the Cloud Functions associated to the application.
1832
+ */
1833
+ const getFirebaseFunctions = (app) => functions.getFunctions(app, 'europe-west1');
1834
+ /**
1835
+ * Retrieve the configuration variables for the AWS services (S3, EC2).
1836
+ * @returns <AWSVariables> - the values of the AWS services configuration variables.
1837
+ */
1838
+ const getAWSVariables = () => {
1839
+ if (!process.env.AWS_ACCESS_KEY_ID ||
1840
+ !process.env.AWS_SECRET_ACCESS_KEY ||
1841
+ !process.env.AWS_REGION ||
1842
+ !process.env.AWS_ROLE_ARN ||
1843
+ !process.env.AWS_AMI_ID)
1844
+ throw new Error("Could not retrieve the AWS environment variables. Please, verify your environment configuration and retry");
1845
+ return {
1846
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
1847
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
1848
+ region: process.env.AWS_REGION || "us-east-1",
1849
+ roleArn: process.env.AWS_ROLE_ARN,
1850
+ amiId: process.env.AWS_AMI_ID
1851
+ };
1852
+ };
1853
+ /**
1854
+ * Return the core Firebase services instances (App, Database, Functions).
1855
+ * @param apiKey <string> - the API key specified in the application config.
1856
+ * @param authDomain <string> - the authDomain string specified in the application config.
1857
+ * @param projectId <string> - the projectId specified in the application config.
1858
+ * @param messagingSenderId <string> - the messagingSenderId specified in the application config.
1859
+ * @param appId <string> - the appId specified in the application config.
1860
+ * @returns <Promise<FirebaseServices>>
1861
+ */
1862
+ const initializeFirebaseCoreServices = async (apiKey, authDomain, projectId, messagingSenderId, appId) => {
1863
+ const firebaseApp = initializeFirebaseApp({
1864
+ apiKey,
1865
+ authDomain,
1866
+ projectId,
1867
+ messagingSenderId,
1868
+ appId
1869
+ });
1870
+ const firestoreDatabase = getFirestoreDatabase(firebaseApp);
1871
+ const firebaseFunctions = getFirebaseFunctions(firebaseApp);
1872
+ return {
1873
+ firebaseApp,
1874
+ firestoreDatabase,
1875
+ firebaseFunctions
1876
+ };
1877
+ };
1878
+
1879
+ /**
1880
+ * Sign in w/ OAuth 2.0 token.
1881
+ * @param firebaseApp <FirebaseApp> - the configured instance of the Firebase App in use.
1882
+ * @param credentials <OAuthCredential> - the OAuth credential generated from token exchange.
1883
+ */
1884
+ const signInToFirebaseWithCredentials = async (firebaseApp, credentials) => auth.signInWithCredential(auth.initializeAuth(firebaseApp), credentials);
1885
+ /**
1886
+ * Return the current authenticated user in the given Firebase Application.
1887
+ * @param firebaseApp <FirebaseApp> - the configured instance of the Firebase App in use.
1888
+ * @returns <User> - the object containing the data about the current authenticated user in the given Firebase application.
1889
+ */
1890
+ const getCurrentFirebaseAuthUser = (firebaseApp) => {
1891
+ const user = auth.getAuth(firebaseApp).currentUser;
1892
+ if (!user)
1893
+ throw new Error(`Unable to find the user currently authenticated with Firebase. Verify that the Firebase application is properly configured and repeat user authentication before trying again.`);
1894
+ return user;
1895
+ };
1896
+ /**
1897
+ * Check if the user can claim to be a coordinator.
1898
+ * @param user <User> - the user to be checked.
1899
+ * @returns Promise<boolean> - true if the user is a coordinator, false otherwise.
1900
+ */
1901
+ const isCoordinator = async (user) => {
1902
+ const userTokenAndClaims = await user.getIdTokenResult();
1903
+ return !!userTokenAndClaims.claims.coordinator;
1904
+ };
1905
+
1906
+ /**
1907
+ * Formats part of a GROTH16 SNARK proof
1908
+ * @link adapted from SNARKJS p256 function
1909
+ * @param proofPart <any> a part of a proof to be formatted
1910
+ * @returns <string> the formatted proof part
1911
+ */
1912
+ const p256 = (proofPart) => {
1913
+ let nProofPart = proofPart.toString(16);
1914
+ while (nProofPart.length < 64)
1915
+ nProofPart = `0${nProofPart}`;
1916
+ nProofPart = `0x${nProofPart}`;
1917
+ return nProofPart;
1918
+ };
1919
+ /**
1920
+ * This function formats the calldata for Solidity
1921
+ * @link adapted from SNARKJS formatSolidityCalldata function
1922
+ * @dev this function is supposed to be called with
1923
+ * @dev the output of generateGROTH16Proof
1924
+ * @param circuitInput <string[]> Inputs to the circuit
1925
+ * @param _proof <object> Proof
1926
+ * @returns <SolidityCalldata> The calldata formatted for Solidity
1927
+ */
1928
+ const formatSolidityCalldata = (circuitInput, _proof) => {
1929
+ try {
1930
+ const proof = ffjavascript.utils.unstringifyBigInts(_proof);
1931
+ // format the public inputs to the circuit
1932
+ const formattedCircuitInput = [];
1933
+ for (const cInput of circuitInput) {
1934
+ formattedCircuitInput.push(p256(ffjavascript.utils.unstringifyBigInts(cInput)));
1935
+ }
1936
+ // construct calldata
1937
+ const calldata = {
1938
+ arg1: [p256(proof.pi_a[0]), p256(proof.pi_a[1])],
1939
+ arg2: [
1940
+ [p256(proof.pi_b[0][1]), p256(proof.pi_b[0][0])],
1941
+ [p256(proof.pi_b[1][1]), p256(proof.pi_b[1][0])]
1942
+ ],
1943
+ arg3: [p256(proof.pi_c[0]), p256(proof.pi_c[1])],
1944
+ arg4: formattedCircuitInput
1945
+ };
1946
+ return calldata;
1947
+ }
1948
+ catch (error) {
1949
+ throw new Error("There was an error while formatting the calldata. Please make sure that you are calling this function with the output of the generateGROTH16Proof function, and then please try again.");
1950
+ }
1951
+ };
1952
+ /**
1953
+ * Verify a GROTH16 SNARK proof on chain
1954
+ * @param contract <Contract> The contract instance
1955
+ * @param proof <SolidityCalldata> The calldata formatted for Solidity
1956
+ * @returns <Promise<boolean>> Whether the proof is valid or not
1957
+ */
1958
+ const verifyGROTH16ProofOnChain = async (contract, proof) => {
1959
+ const res = await contract.verifyProof(proof.arg1, proof.arg2, proof.arg3, proof.arg4);
1960
+ return res;
1961
+ };
1962
+ /**
1963
+ * Compiles a contract given a path
1964
+ * @param contractPath <string> path to the verifier contract
1965
+ * @returns <Promise<any>> the compiled contract
1966
+ */
1967
+ const compileContract = async (contractPath) => {
1968
+ if (!fs.existsSync(contractPath))
1969
+ throw new Error("The contract path does not exist. Please make sure that you are passing a valid path to the contract and try again.");
1970
+ const data = fs.readFileSync(contractPath).toString();
1971
+ const input = {
1972
+ language: "Solidity",
1973
+ sources: {
1974
+ Verifier: { content: data }
1975
+ },
1976
+ settings: {
1977
+ outputSelection: {
1978
+ "*": {
1979
+ "*": ["*"]
1980
+ }
1981
+ }
1982
+ }
1983
+ };
1984
+ try {
1985
+ const compiled = JSON.parse(solc.compile(JSON.stringify(input), { import: { contents: "" } }));
1986
+ return compiled.contracts.Verifier.Verifier;
1987
+ }
1988
+ catch (error) {
1989
+ throw new Error("There was an error while compiling the smart contract. Please check that the file is not corrupted and try again.");
1990
+ }
1991
+ };
1992
+ /**
1993
+ * Deploy the verifier contract
1994
+ * @param contractFactory <ContractFactory> The contract factory
1995
+ * @returns <Promise<Contract>> The contract instance
1996
+ */
1997
+ const deployVerifierContract = async (contractPath, signer) => {
1998
+ const compiledContract = await compileContract(contractPath);
1999
+ // connect to hardhat node running locally
2000
+ const contractFactory = new ethers.ContractFactory(compiledContract.abi, compiledContract.evm.bytecode.object, signer);
2001
+ const contract = await contractFactory.deploy();
2002
+ await contract.deployed();
2003
+ return contract;
2004
+ };
2005
+ /**
2006
+ * Verify a ceremony validity
2007
+ * 1. Download all artifacts
2008
+ * 2. Verify that the zkeys are valid
2009
+ * 3. Extract the verifier and the vKey
2010
+ * 4. Generate a proof and verify it locally
2011
+ * 5. Deploy Verifier contract and verify the proof on-chain
2012
+ * @param functions <Functions> firebase functions instance
2013
+ * @param firestore <Firestore> firebase firestore instance
2014
+ * @param ceremonyPrefix <string> ceremony prefix
2015
+ * @param outputDirectory <string> output directory where to store the ceremony artifacts
2016
+ * @param circuitInputsPath <string> path to the circuit inputs file
2017
+ * @param verifierTemplatePath <string> path to the verifier template file
2018
+ * @param signer <Signer> signer for contract interaction
2019
+ * @param logger <any> logger for printing snarkjs output
2020
+ */
2021
+ const verifyCeremony = async (functions, firestore$1, ceremonyPrefix, outputDirectory, circuitInputsPath, verifierTemplatePath, signer, logger) => {
2022
+ // 1. download all ceremony artifacts
2023
+ const ceremonyArtifacts = await downloadAllCeremonyArtifacts(functions, firestore$1, ceremonyPrefix, outputDirectory);
2024
+ // if there are no ceremony artifacts, we throw an error
2025
+ if (ceremonyArtifacts.length === 0)
2026
+ throw new Error("There was an error while downloading all ceremony artifacts. Please review your ceremony prefix and try again.");
2027
+ // extract the circuit inputs
2028
+ if (!fs.existsSync(circuitInputsPath))
2029
+ throw new Error("The circuit inputs file does not exist. Please check the path and try again.");
2030
+ const circuitsInputs = JSON.parse(fs.readFileSync(circuitInputsPath).toString());
2031
+ // find the ceremony given the prefix
2032
+ const ceremonyQuery = await queryCollection(firestore$1, commonTerms.collections.ceremonies.name, [
2033
+ firestore.where(commonTerms.collections.ceremonies.fields.prefix, "==", ceremonyPrefix)
2034
+ ]);
2035
+ // get the ceremony data - no need to do an existence check as
2036
+ // we already checked that the ceremony exists in downloafAllCeremonyArtifacts
2037
+ const ceremonyData = fromQueryToFirebaseDocumentInfo(ceremonyQuery.docs);
2038
+ const ceremony = ceremonyData.at(0);
2039
+ // this is required to re-generate the final zKey
2040
+ const { coordinatorId } = ceremony.data;
2041
+ const ceremonyId = ceremony.id;
2042
+ // we verify each circuit separately
2043
+ for (const ceremonyArtifact of ceremonyArtifacts) {
2044
+ // get the index of the circuit in the list of circuits
2045
+ const inputIndex = ceremonyArtifacts.indexOf(ceremonyArtifact);
2046
+ // 2. verify the final zKey
2047
+ const isValid = await verifyZKey(ceremonyArtifact.r1csLocalFilePath, ceremonyArtifact.finalZkeyLocalFilePath, ceremonyArtifact.potLocalFilePath, logger);
2048
+ if (!isValid)
2049
+ throw new Error(`The zkey for Circuit ${ceremonyArtifact.circuitPrefix} is not valid. Please check that the artifact is correct. If not, you might have to re run the final contribution to compute a valid final zKey.`);
2050
+ // 3. get the final contribution beacon
2051
+ const contributionBeacon = await getFinalContributionBeacon(firestore$1, ceremonyId, ceremonyArtifact.circuitId, coordinatorId);
2052
+ const generatedFinalZkeyPath = `${ceremonyArtifact.directoryRoot}/${ceremonyArtifact.circuitPrefix}_${finalContributionIndex}_verification.zkey`;
2053
+ // 4. re generate the zkey using the beacon and check hashes
2054
+ await generateZkeyFromScratch(true, ceremonyArtifact.r1csLocalFilePath, ceremonyArtifact.potLocalFilePath, generatedFinalZkeyPath, logger, ceremonyArtifact.lastZkeyLocalFilePath, coordinatorId, contributionBeacon);
2055
+ const zKeysMatching = await compareHashes(generatedFinalZkeyPath, ceremonyArtifact.finalZkeyLocalFilePath);
2056
+ if (!zKeysMatching)
2057
+ throw new Error(`The final zkey for the Circuit ${ceremonyArtifact.circuitPrefix} does not match the one generated from the beacon. Please confirm manually by downloading from the S3 bucket.`);
2058
+ // 5. extract the verifier and the vKey
2059
+ const verifierLocalPath = `${ceremonyArtifact.directoryRoot}/${ceremonyArtifact.circuitPrefix}_${verifierSmartContractAcronym}_verification.sol`;
2060
+ const vKeyLocalPath = `${ceremonyArtifact.directoryRoot}/${ceremonyArtifact.circuitPrefix}_${verificationKeyAcronym}_verification.json`;
2061
+ await exportVerifierAndVKey(ceremonyArtifact.finalZkeyLocalFilePath, verifierLocalPath, vKeyLocalPath, verifierTemplatePath);
2062
+ // 6. verify that the generated verifier and vkey match the ones downloaded from S3
2063
+ const verifierMatching = await compareHashes(verifierLocalPath, ceremonyArtifact.verifierLocalFilePath);
2064
+ if (!verifierMatching)
2065
+ throw new Error(`The verifier contract for the Contract ${ceremonyArtifact.circuitPrefix} does not match the one downloaded from S3. Please confirm manually by downloading from the S3 bucket.`);
2066
+ const vKeyMatching = await compareHashes(vKeyLocalPath, ceremonyArtifact.verificationKeyLocalFilePath);
2067
+ if (!vKeyMatching)
2068
+ throw new Error(`The verification key for the Contract ${ceremonyArtifact.circuitPrefix} does not match the one downloaded from S3. Please confirm manually by downloading from the S3 bucket.`);
2069
+ // 7. generate a proof and verify it locally (use either of the downloaded or generated as the hashes will have matched at this point)
2070
+ const { proof, publicSignals } = await generateGROTH16Proof(circuitsInputs[inputIndex], ceremonyArtifact.finalZkeyLocalFilePath, ceremonyArtifact.wasmLocalFilePath, logger);
2071
+ const isProofValid = await verifyGROTH16Proof(vKeyLocalPath, publicSignals, proof);
2072
+ if (!isProofValid)
2073
+ throw new Error(`Could not verify the proof for Circuit ${ceremonyArtifact.circuitPrefix}. Please check that the artifacts are correct as well as the inputs to the circuit, and try again.`);
2074
+ // 8. deploy Verifier contract and verify the proof on-chain
2075
+ const verifierContract = await deployVerifierContract(verifierLocalPath, signer);
2076
+ const formattedProof = await formatSolidityCalldata(publicSignals, proof);
2077
+ const isProofValidOnChain = await verifyGROTH16ProofOnChain(verifierContract, formattedProof);
2078
+ if (!isProofValidOnChain)
2079
+ throw new Error(`Could not verify the proof on-chain for Circuit ${ceremonyArtifact.circuitPrefix}. Please check that the artifacts are correct as well as the inputs to the circuit, and try again.`);
2080
+ }
2081
+ };
2082
+
2083
+ /**
2084
+ * This function will return the number of public repos of a user
2085
+ * @param user <string> The username of the user
2086
+ * @returns <number> The number of public repos
2087
+ */
2088
+ const getNumberOfPublicReposGitHub = async (user) => {
2089
+ const response = await fetch(`https://api.github.com/user/${user}/repos`, {
2090
+ method: "GET",
2091
+ headers: {
2092
+ Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2093
+ }
2094
+ });
2095
+ if (response.status !== 200)
2096
+ throw new Error("It was not possible to retrieve the number of public repositories. Please try again.");
2097
+ const jsonData = await response.json();
2098
+ return jsonData.length;
2099
+ };
2100
+ /**
2101
+ * This function will return the number of followers of a user
2102
+ * @param user <string> The username of the user
2103
+ * @returns <number> The number of followers
2104
+ */
2105
+ const getNumberOfFollowersGitHub = async (user) => {
2106
+ const response = await fetch(`https://api.github.com/user/${user}/followers`, {
2107
+ method: "GET",
2108
+ headers: {
2109
+ Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2110
+ }
2111
+ });
2112
+ if (response.status !== 200)
2113
+ throw new Error("It was not possible to retrieve the number of followers. Please try again.");
2114
+ const jsonData = await response.json();
2115
+ return jsonData.length;
2116
+ };
2117
+ /**
2118
+ * This function will return the number of following of a user
2119
+ * @param user <string> The username of the user
2120
+ * @returns <number> The number of following users
2121
+ */
2122
+ const getNumberOfFollowingGitHub = async (user) => {
2123
+ const response = await fetch(`https://api.github.com/user/${user}/following`, {
2124
+ method: "GET",
2125
+ headers: {
2126
+ Authorization: `token ${process.env.GITHUB_ACCESS_TOKEN}`
2127
+ }
2128
+ });
2129
+ if (response.status !== 200)
2130
+ throw new Error("It was not possible to retrieve the number of following. Please try again.");
2131
+ const jsonData = await response.json();
2132
+ return jsonData.length;
2133
+ };
2134
+ /**
2135
+ * This function will check if the user is reputable enough to be able to use the app
2136
+ * @param userLogin <string> The username of the user
2137
+ * @param minimumAmountOfFollowing <number> The minimum amount of following the user should have
2138
+ * @param minimumAmountOfFollowers <number> The minimum amount of followers the user should have
2139
+ * @param minimumAmountOfPublicRepos <number> The minimum amount of public repos the user should have
2140
+ * @returns <boolean> True if the user is reputable enough, false otherwise
2141
+ */
2142
+ const githubReputation = async (userLogin, minimumAmountOfFollowing, minimumAmountOfFollowers, minimumAmountOfPublicRepos) => {
2143
+ if (!process.env.GITHUB_ACCESS_TOKEN)
2144
+ throw new Error("The GitHub access token is missing. Please insert a valid token to be used for anti-sybil checks on user registation, and then try again.");
2145
+ const following = await getNumberOfFollowingGitHub(userLogin);
2146
+ const repos = await getNumberOfPublicReposGitHub(userLogin);
2147
+ const followers = await getNumberOfFollowersGitHub(userLogin);
2148
+ if (following < minimumAmountOfFollowing ||
2149
+ repos < minimumAmountOfPublicRepos ||
2150
+ followers < minimumAmountOfFollowers)
2151
+ return false;
2152
+ return true;
2153
+ };
2154
+
2155
+ /**
2156
+ * Define different states of a ceremony.
2157
+ * @enum {string}
2158
+ * - SCHEDULED: when the ceremony setup has been properly completed but the contribution period has not yet started.
2159
+ * - OPENED: when the contribution period has started.
2160
+ * - PAUSED: When the coordinator has manually paused the ceremony (NB. currently not possible because the relevant functionality has not yet been implemented).
2161
+ * - CLOSED: when the contribution period has finished.
2162
+ * - FINALIZED: when the ceremony finalization has been properly completed.
2163
+ */
2164
+ exports.CeremonyState = void 0;
2165
+ (function (CeremonyState) {
2166
+ CeremonyState["SCHEDULED"] = "SCHEDULED";
2167
+ CeremonyState["OPENED"] = "OPENED";
2168
+ CeremonyState["PAUSED"] = "PAUSED";
2169
+ CeremonyState["CLOSED"] = "CLOSED";
2170
+ CeremonyState["FINALIZED"] = "FINALIZED";
2171
+ })(exports.CeremonyState || (exports.CeremonyState = {}));
2172
+ /**
2173
+ * Define the type of Trusted Setup ceremony (Phase 1 or Phase 2).
2174
+ * @enum {string}
2175
+ * - PHASE1: when the ceremony is a Phase 1 Trusted Setup ceremony.
2176
+ * - PHASE2: when the ceremony is a Phase 2 Trusted Setup ceremony.
2177
+ */
2178
+ exports.CeremonyType = void 0;
2179
+ (function (CeremonyType) {
2180
+ CeremonyType["PHASE1"] = "PHASE1";
2181
+ CeremonyType["PHASE2"] = "PHASE2";
2182
+ })(exports.CeremonyType || (exports.CeremonyType = {}));
2183
+ /**
2184
+ * Define different status of a participant.
2185
+ * @enum {string}
2186
+ * - CREATED: when the participant document has been created in the database.
2187
+ * - WAITING: when the participant is waiting for a contribution (i.e., is currently queued or is waiting for its status to be checked after a timeout expiration).
2188
+ * - READY: when the participant is ready for a contribution.
2189
+ * - CONTRIBUTING: when the participant is currently contributing (i.e., not queued anymore, but the current contributor at this time).
2190
+ * - CONTRIBUTED: when the participant has completed successfully the contribution for all circuits in a ceremony. The participant may need to wait for the latest contribution verification while having this status.
2191
+ * - DONE: when the participant has completed contributions and verifications from coordinator.
2192
+ * - FINALIZING: when the coordinator is currently finalizing the ceremony.
2193
+ * - FINALIZED: when the coordinator has successfully finalized the ceremony.
2194
+ * - TIMEDOUT: when the participant has been timedout while contributing. This may happen due to network or memory issues, un/intentional crash, or contributions lasting for too long.
2195
+ * - EXHUMED: when the participant is ready to resume the contribution after a timeout expiration.
2196
+ */
2197
+ exports.ParticipantStatus = void 0;
2198
+ (function (ParticipantStatus) {
2199
+ ParticipantStatus["CREATED"] = "CREATED";
2200
+ ParticipantStatus["WAITING"] = "WAITING";
2201
+ ParticipantStatus["READY"] = "READY";
2202
+ ParticipantStatus["CONTRIBUTING"] = "CONTRIBUTING";
2203
+ ParticipantStatus["CONTRIBUTED"] = "CONTRIBUTED";
2204
+ ParticipantStatus["DONE"] = "DONE";
2205
+ ParticipantStatus["FINALIZING"] = "FINALIZING";
2206
+ ParticipantStatus["FINALIZED"] = "FINALIZED";
2207
+ ParticipantStatus["TIMEDOUT"] = "TIMEDOUT";
2208
+ ParticipantStatus["EXHUMED"] = "EXHUMED";
2209
+ })(exports.ParticipantStatus || (exports.ParticipantStatus = {}));
2210
+ /**
2211
+ * Define different steps during which the participant may be during the contribution.
2212
+ * @enum {string}
2213
+ * - DOWNLOADING: when the participant is doing the download of the last contribution (from previous participant).
2214
+ * - COMPUTING: when the participant is actively computing the contribution.
2215
+ * - UPLOADING: when the participant is uploading the computed contribution.
2216
+ * - VERIFYING: when the participant is waiting from verification results from the coordinator.
2217
+ * - COMPLETED: when the participant has received the verification results from the coordinator and completed the contribution steps.
2218
+ */
2219
+ exports.ParticipantContributionStep = void 0;
2220
+ (function (ParticipantContributionStep) {
2221
+ ParticipantContributionStep["DOWNLOADING"] = "DOWNLOADING";
2222
+ ParticipantContributionStep["COMPUTING"] = "COMPUTING";
2223
+ ParticipantContributionStep["UPLOADING"] = "UPLOADING";
2224
+ ParticipantContributionStep["VERIFYING"] = "VERIFYING";
2225
+ ParticipantContributionStep["COMPLETED"] = "COMPLETED";
2226
+ })(exports.ParticipantContributionStep || (exports.ParticipantContributionStep = {}));
2227
+ /**
2228
+ * Define what type of timeout was performed.
2229
+ * @enum {string}
2230
+ * - BLOCKING_CONTRIBUTION: when the current contributor was blocking the waiting queue.
2231
+ * - BLOCKING_CLOUD_FUNCTION: when the contribution verification has gone beyond the time limit.
2232
+ */
2233
+ exports.TimeoutType = void 0;
2234
+ (function (TimeoutType) {
2235
+ TimeoutType["BLOCKING_CONTRIBUTION"] = "BLOCKING_CONTRIBUTION";
2236
+ TimeoutType["BLOCKING_CLOUD_FUNCTION"] = "BLOCKING_CLOUD_FUNCTION";
2237
+ })(exports.TimeoutType || (exports.TimeoutType = {}));
2238
+ /**
2239
+ * Define what type of timeout mechanism is currently adopted for a ceremony.
2240
+ * @enum {string}
2241
+ * - DYNAMIC: self-update approach based on latest contribution time.
2242
+ * - FIXED: approach based on a fixed amount of time.
2243
+ */
2244
+ exports.CeremonyTimeoutType = void 0;
2245
+ (function (CeremonyTimeoutType) {
2246
+ CeremonyTimeoutType["DYNAMIC"] = "DYNAMIC";
2247
+ CeremonyTimeoutType["FIXED"] = "FIXED";
2248
+ })(exports.CeremonyTimeoutType || (exports.CeremonyTimeoutType = {}));
2249
+ /**
2250
+ * Define request type for pre-signed urls.
2251
+ */
2252
+ exports.RequestType = void 0;
2253
+ (function (RequestType) {
2254
+ RequestType["PUT"] = "PUT";
2255
+ RequestType["GET"] = "GET";
2256
+ })(exports.RequestType || (exports.RequestType = {}));
2257
+ /**
2258
+ * Define the environment in use when testing.
2259
+ * @enum {string}
2260
+ * - DEVELOPMENT: tests are performed on the local Firebase emulator instance.
2261
+ * - PRODUCTION: tests are performed on the remote (deployed) Firebase application.
2262
+ */
2263
+ exports.TestingEnvironment = void 0;
2264
+ (function (TestingEnvironment) {
2265
+ TestingEnvironment["DEVELOPMENT"] = "DEVELOPMENT";
2266
+ TestingEnvironment["PRODUCTION"] = "PRODUCTION";
2267
+ })(exports.TestingEnvironment || (exports.TestingEnvironment = {}));
2268
+ /**
2269
+ * Define what type of contribution verification mechanism is currently adopted for a circuit.
2270
+ * @enum {string}
2271
+ * - CF: Cloud Functions.
2272
+ * - VM: Virtual Machine.
2273
+ */
2274
+ exports.CircuitContributionVerificationMechanism = void 0;
2275
+ (function (CircuitContributionVerificationMechanism) {
2276
+ CircuitContributionVerificationMechanism["CF"] = "CF";
2277
+ CircuitContributionVerificationMechanism["VM"] = "VM";
2278
+ })(exports.CircuitContributionVerificationMechanism || (exports.CircuitContributionVerificationMechanism = {}));
2279
+ /**
2280
+ * Define the supported VM volume types.
2281
+ * @dev the VM volume types can be retrieved at https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-volume-types.html
2282
+ * @enum {string}
2283
+ * - GP2: General Purpose SSD version 2.
2284
+ * - GP3: General Purpose SSD version 3.
2285
+ * - IO1: Provisioned IOPS SSD volumes version 1.
2286
+ * - ST1: Throughput Optimized HDD volumes.
2287
+ * - SC1: Cold HDD volumes.
2288
+ */
2289
+ exports.DiskTypeForVM = void 0;
2290
+ (function (DiskTypeForVM) {
2291
+ DiskTypeForVM["GP2"] = "gp2";
2292
+ DiskTypeForVM["GP3"] = "gp3";
2293
+ DiskTypeForVM["IO1"] = "io1";
2294
+ DiskTypeForVM["ST1"] = "st1";
2295
+ DiskTypeForVM["SC1"] = "sc1";
2296
+ })(exports.DiskTypeForVM || (exports.DiskTypeForVM = {}));
2297
+
2298
+ dotenv.config();
2299
+ /**
2300
+ * Create a new AWS EC2 client.
2301
+ * @returns <Promise<EC2Client>> - the EC2 client instance.
2302
+ */
2303
+ const createEC2Client = async () => {
2304
+ // Get the AWS variables.
2305
+ const { accessKeyId, secretAccessKey, region } = getAWSVariables();
2306
+ // Instantiate the new client.
2307
+ return new clientEc2.EC2Client({
2308
+ credentials: {
2309
+ accessKeyId,
2310
+ secretAccessKey
2311
+ },
2312
+ region
2313
+ });
2314
+ };
2315
+ /**
2316
+ * Create a new AWS SSM client.
2317
+ * @returns <Promise<SSMClient>> - the SSM client instance.
2318
+ */
2319
+ const createSSMClient = async () => {
2320
+ // Get the AWS variables.
2321
+ const { accessKeyId, secretAccessKey, region } = getAWSVariables();
2322
+ // Instantiate the new client.
2323
+ return new clientSsm.SSMClient({
2324
+ credentials: {
2325
+ accessKeyId,
2326
+ secretAccessKey
2327
+ },
2328
+ region
2329
+ });
2330
+ };
2331
+ /**
2332
+ * Return the list of bootstrap commands to be executed.
2333
+ * @dev the startup commands must be suitable for a shell script.
2334
+ * @param bucketName <string> - the name of the AWS S3 bucket.
2335
+ * @returns <Array<string>> - the list of startup commands to be executed.
2336
+ */
2337
+ const vmBootstrapCommand = (bucketName) => [
2338
+ "#!/bin/bash",
2339
+ `aws s3 cp s3://${bucketName}/${vmBootstrapScriptFilename} ${vmBootstrapScriptFilename}`,
2340
+ `chmod +x ${vmBootstrapScriptFilename} && bash ${vmBootstrapScriptFilename}` // grant permission and execute.
2341
+ ];
2342
+ /**
2343
+ * Return the list of Node environment (and packages) installation plus artifact caching for contribution verification.
2344
+ * @param zKeyPath <string> - the path to zKey artifact inside AWS S3 bucket.
2345
+ * @param potPath <string> - the path to ptau artifact inside AWS S3 bucket.
2346
+ * @param snsTopic <string> - the SNS topic ARN.
2347
+ * @param region <string> - the AWS region.
2348
+ * @returns <Array<string>> - the array of commands to be run by the EC2 instance.
2349
+ */
2350
+ const vmDependenciesAndCacheArtifactsCommand = (zKeyPath, potPath, snsTopic, region) => [
2351
+ "#!/bin/bash",
2352
+ 'MARKER_FILE="/var/run/my_script_ran"',
2353
+ // eslint-disable-next-line no-template-curly-in-string
2354
+ "if [ -e ${MARKER_FILE} ]; then",
2355
+ "exit 0",
2356
+ "else",
2357
+ // eslint-disable-next-line no-template-curly-in-string
2358
+ "touch ${MARKER_FILE}",
2359
+ "sudo yum update -y",
2360
+ "curl -sL https://rpm.nodesource.com/setup_16.x | sudo bash - ",
2361
+ "sudo yum install -y nodejs",
2362
+ "npm install -g snarkjs",
2363
+ `aws s3 cp s3://${zKeyPath} /var/tmp/genesisZkey.zkey`,
2364
+ `aws s3 cp s3://${potPath} /var/tmp/pot.ptau`,
2365
+ "wget https://github.com/BLAKE3-team/BLAKE3/releases/download/1.4.0/b3sum_linux_x64_bin -O /var/tmp/blake3.bin",
2366
+ "chmod +x /var/tmp/blake3.bin",
2367
+ "INSTANCE_ID=$(ec2-metadata -i | awk '{print $2}')",
2368
+ `aws sns publish --topic-arn ${snsTopic} --message "$INSTANCE_ID" --region ${region}`,
2369
+ "fi"
2370
+ ];
2371
+ /**
2372
+ * Return the list of commands for contribution verification.
2373
+ * @dev this method generates the verification transcript as well.
2374
+ * @param bucketName <string> - the name of the AWS S3 bucket.
2375
+ * @param lastZkeyStoragePath <string> - the last zKey storage path.
2376
+ * @param verificationTranscriptStoragePathAndFilename <string> - the verification transcript storage path.
2377
+ * @returns Array<string> - the list of commands for contribution verification.
2378
+ */
2379
+ const vmContributionVerificationCommand = (bucketName, lastZkeyStoragePath, verificationTranscriptStoragePathAndFilename) => [
2380
+ `aws s3 cp s3://${bucketName}/${lastZkeyStoragePath} /var/tmp/lastZKey.zkey > /var/tmp/log.txt`,
2381
+ `snarkjs zkvi /var/tmp/genesisZkey.zkey /var/tmp/pot.ptau /var/tmp/lastZKey.zkey > /var/tmp/verification_transcript.log`,
2382
+ `aws s3 cp /var/tmp/verification_transcript.log s3://${bucketName}/${verificationTranscriptStoragePathAndFilename} &>/dev/null`,
2383
+ `/var/tmp/blake3.bin /var/tmp/verification_transcript.log | awk '{print $1}'`,
2384
+ `rm /var/tmp/lastZKey.zkey /var/tmp/verification_transcript.log /var/tmp/log.txt &>/dev/null`
2385
+ ];
2386
+ /**
2387
+ * Compute the VM disk size.
2388
+ * @dev the disk size is computed using the zKey size in bytes taking into consideration
2389
+ * the verification task (2 * zKeySize) + ptauSize + OS/VM (~8GB).
2390
+ * @param zKeySizeInBytes <number> the size of the zKey in bytes.
2391
+ * @param pot <number> the amount of powers needed for the circuit (index of the PPoT file).
2392
+ * @return <number> the configuration of the VM disk size in GB.
2393
+ */
2394
+ const computeDiskSizeForVM = (zKeySizeInBytes, pot) => Math.ceil(2 * convertBytesOrKbToGb(zKeySizeInBytes, true) + powersOfTauFiles[pot - 1].size) + 8;
2395
+ /**
2396
+ * Creates a new EC2 instance
2397
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2398
+ * @param commands <Array<string>> - the list of commands to be run on the EC2 instance.
2399
+ * @param instanceType <string> - the type of the EC2 VM instance.
2400
+ * @param diskSize <number> - the size of the disk (volume) of the VM.
2401
+ * @param diskType <DiskTypeForVM> - the type of the disk (volume) of the VM.
2402
+ * @returns <Promise<P0tionEC2Instance>> the instance that was created
2403
+ */
2404
+ const createEC2Instance = async (ec2, commands, instanceType, volumeSize, diskType) => {
2405
+ // Get the AWS variables.
2406
+ const { amiId, roleArn } = getAWSVariables();
2407
+ // Parametrize the VM EC2 instance.
2408
+ const params = {
2409
+ ImageId: amiId,
2410
+ InstanceType: instanceType,
2411
+ MaxCount: 1,
2412
+ MinCount: 1,
2413
+ // nb. to find this: iam -> roles -> role_name.
2414
+ IamInstanceProfile: {
2415
+ Arn: roleArn
2416
+ },
2417
+ // nb. for running commands at the startup.
2418
+ UserData: Buffer.from(commands.join("\n")).toString("base64"),
2419
+ BlockDeviceMappings: [
2420
+ {
2421
+ DeviceName: "/dev/xvda",
2422
+ Ebs: {
2423
+ DeleteOnTermination: true,
2424
+ VolumeSize: volumeSize,
2425
+ VolumeType: diskType
2426
+ }
2427
+ }
2428
+ ],
2429
+ // tag the resource
2430
+ TagSpecifications: [
2431
+ {
2432
+ ResourceType: "instance",
2433
+ Tags: [
2434
+ {
2435
+ Key: "Name",
2436
+ Value: ec2InstanceTag
2437
+ },
2438
+ {
2439
+ Key: "Initialized",
2440
+ Value: "false"
2441
+ }
2442
+ ]
2443
+ }
2444
+ ]
2445
+ };
2446
+ try {
2447
+ // Create a new command instance.
2448
+ const command = new clientEc2.RunInstancesCommand(params);
2449
+ // Send the command for execution.
2450
+ const response = await ec2.send(command);
2451
+ if (response.$metadata.httpStatusCode !== 200)
2452
+ throw new Error(`Something went wrong when creating the EC2 instance. More details ${response}`);
2453
+ // Create a new EC2 VM instance.
2454
+ return {
2455
+ instanceId: response.Instances[0].InstanceId,
2456
+ imageId: response.Instances[0].ImageId,
2457
+ instanceType: response.Instances[0].InstanceType,
2458
+ keyName: response.Instances[0].KeyName,
2459
+ launchTime: response.Instances[0].LaunchTime.toISOString()
2460
+ };
2461
+ }
2462
+ catch (error) {
2463
+ throw new Error(`Something went wrong when creating the EC2 instance. More details ${error}`);
2464
+ }
2465
+ };
2466
+ /**
2467
+ * Check if the current VM EC2 instance is running by querying the status.
2468
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2469
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2470
+ * @returns <Promise<boolean>> - true if the current status of the EC2 VM instance is 'running'; otherwise false.
2471
+ */
2472
+ const checkIfRunning = async (ec2Client, instanceId) => {
2473
+ // Generate a new describe status command.
2474
+ const command = new clientEc2.DescribeInstanceStatusCommand({
2475
+ InstanceIds: [instanceId]
2476
+ });
2477
+ // Run the command.
2478
+ const response = await ec2Client.send(command);
2479
+ if (response.$metadata.httpStatusCode !== 200)
2480
+ throw new Error(`Something went wrong when retrieving the EC2 instance (${instanceId}) status. More details ${response}`);
2481
+ return response.InstanceStatuses[0].InstanceState.Name === "running";
2482
+ };
2483
+ /**
2484
+ * Start an EC2 VM instance.
2485
+ * @dev the instance must have been created previously.
2486
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2487
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2488
+ */
2489
+ const startEC2Instance = async (ec2, instanceId) => {
2490
+ // Generate a new start instance command.
2491
+ const command = new clientEc2.StartInstancesCommand({
2492
+ InstanceIds: [instanceId],
2493
+ DryRun: false
2494
+ });
2495
+ // Run the command.
2496
+ const response = await ec2.send(command);
2497
+ if (response.$metadata.httpStatusCode !== 200)
2498
+ throw new Error(`Something went wrong when starting the EC2 instance (${instanceId}). More details ${response}`);
2499
+ };
2500
+ /**
2501
+ * Stop an EC2 VM instance.
2502
+ * @dev the instance must have been in a running status.
2503
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2504
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2505
+ */
2506
+ const stopEC2Instance = async (ec2, instanceId) => {
2507
+ // Generate a new stop instance command.
2508
+ const command = new clientEc2.StopInstancesCommand({
2509
+ InstanceIds: [instanceId],
2510
+ DryRun: false
2511
+ });
2512
+ // Run the command.
2513
+ const response = await ec2.send(command);
2514
+ if (response.$metadata.httpStatusCode !== 200)
2515
+ throw new Error(`Something went wrong when stopping the EC2 instance (${instanceId}). More details ${response}`);
2516
+ };
2517
+ /**
2518
+ * Terminate an EC2 VM instance.
2519
+ * @param ec2 <EC2Client> - the instance of the EC2 client.
2520
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2521
+ */
2522
+ const terminateEC2Instance = async (ec2, instanceId) => {
2523
+ // Generate a new terminate instance command.
2524
+ const command = new clientEc2.TerminateInstancesCommand({
2525
+ InstanceIds: [instanceId],
2526
+ DryRun: false
2527
+ });
2528
+ // Run the command.
2529
+ const response = await ec2.send(command);
2530
+ if (response.$metadata.httpStatusCode !== 200)
2531
+ throw new Error(`Something went wrong when terminating the EC2 instance (${instanceId}). More details ${response}`);
2532
+ };
2533
+ /**
2534
+ * Run a command on an EC2 VM instance by using SSM.
2535
+ * @dev this method returns the command identifier for checking the status and retrieve
2536
+ * the output of the command execution later on.
2537
+ * @param ssm <SSMClient> - the instance of the sSM client.
2538
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2539
+ * @param commands <Array<string>> - the list of commands.
2540
+ * @return <Promise<string>> - the unique identifier of the command.
2541
+ */
2542
+ const runCommandUsingSSM = async (ssm, instanceId, commands) => {
2543
+ // Generate a new send command input command.
2544
+ const params = {
2545
+ DocumentName: "AWS-RunShellScript",
2546
+ InstanceIds: [instanceId],
2547
+ Parameters: {
2548
+ commands
2549
+ },
2550
+ TimeoutSeconds: 1200
2551
+ };
2552
+ try {
2553
+ // Run the command.
2554
+ const response = await ssm.send(new clientSsm.SendCommandCommand(params));
2555
+ // if (response.$metadata.httpStatusCode !== 200)
2556
+ // throw new Error(
2557
+ // `Something went wrong when trying to run a command on the EC2 instance (${instanceId}). More details ${response}`
2558
+ // )
2559
+ return response.Command.CommandId;
2560
+ }
2561
+ catch (error) {
2562
+ throw new Error(`Something went wrong when trying to run a command on the EC2 instance. More details ${error}`);
2563
+ }
2564
+ };
2565
+ /**
2566
+ * Get the output of an SSM command executed on an EC2 VM instance.
2567
+ * @param ssm <SSMClient> - the instance of the sSM client.
2568
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2569
+ * @param commandId <string> - the unique identifier of the command.
2570
+ * @return <Promise<string>> - the command output.
2571
+ */
2572
+ const retrieveCommandOutput = async (ssm, instanceId, commandId) => {
2573
+ // Generate a new get command invocation command.
2574
+ const command = new clientSsm.GetCommandInvocationCommand({
2575
+ CommandId: commandId,
2576
+ InstanceId: instanceId
2577
+ });
2578
+ try {
2579
+ // Run the command.
2580
+ const response = await ssm.send(command);
2581
+ return response.StandardOutputContent;
2582
+ }
2583
+ catch (error) {
2584
+ throw new Error(`Something went wrong when trying to retrieve the command ${commandId} output on the EC2 instance (${instanceId}). More details ${error}`);
2585
+ }
2586
+ };
2587
+ /**
2588
+ * Get the status of an SSM command executed on an EC2 VM instance.
2589
+ * @param ssm <SSMClient> - the instance of the sSM client.
2590
+ * @param instanceId <string> - the unique identifier of the EC2 VM instance.
2591
+ * @param commandId <string> - the unique identifier of the command.
2592
+ * @return <Promise<string>> - the command status.
2593
+ */
2594
+ const retrieveCommandStatus = async (ssm, instanceId, commandId) => {
2595
+ // Generate a new get command invocation command.
2596
+ const command = new clientSsm.GetCommandInvocationCommand({
2597
+ CommandId: commandId,
2598
+ InstanceId: instanceId
2599
+ });
2600
+ try {
2601
+ // Run the command.
2602
+ const response = await ssm.send(command);
2603
+ return response.Status;
2604
+ }
2605
+ catch (error) {
2606
+ throw new Error(`Something went wrong when trying to retrieve the command ${commandId} status on the EC2 instance (${instanceId}). More details ${error}`);
2607
+ }
2608
+ };
2609
+
2610
+ exports.autoGenerateEntropy = autoGenerateEntropy;
2611
+ exports.blake512FromPath = blake512FromPath;
2612
+ exports.checkAndPrepareCoordinatorForFinalization = checkAndPrepareCoordinatorForFinalization;
2613
+ exports.checkIfObjectExist = checkIfObjectExist;
2614
+ exports.checkIfRunning = checkIfRunning;
2615
+ exports.checkParticipantForCeremony = checkParticipantForCeremony;
2616
+ exports.commonTerms = commonTerms;
2617
+ exports.compareCeremonyArtifacts = compareCeremonyArtifacts;
2618
+ exports.compareHashes = compareHashes;
2619
+ exports.compileContract = compileContract;
2620
+ exports.completeMultiPartUpload = completeMultiPartUpload;
2621
+ exports.computeDiskSizeForVM = computeDiskSizeForVM;
2622
+ exports.computeSHA256ToHex = computeSHA256ToHex;
2623
+ exports.computeSmallestPowersOfTauForCircuit = computeSmallestPowersOfTauForCircuit;
2624
+ exports.convertBytesOrKbToGb = convertBytesOrKbToGb;
2625
+ exports.convertToDoubleDigits = convertToDoubleDigits;
2626
+ exports.createCustomLoggerForFile = createCustomLoggerForFile;
2627
+ exports.createEC2Client = createEC2Client;
2628
+ exports.createEC2Instance = createEC2Instance;
2629
+ exports.createS3Bucket = createS3Bucket;
2630
+ exports.createSSMClient = createSSMClient;
2631
+ exports.downloadAllCeremonyArtifacts = downloadAllCeremonyArtifacts;
2632
+ exports.downloadCeremonyArtifact = downloadCeremonyArtifact;
2633
+ exports.ec2InstanceTag = ec2InstanceTag;
2634
+ exports.exportVerifierAndVKey = exportVerifierAndVKey;
2635
+ exports.exportVerifierContract = exportVerifierContract;
2636
+ exports.exportVkey = exportVkey;
2637
+ exports.extractPoTFromFilename = extractPoTFromFilename;
2638
+ exports.extractPrefix = extractPrefix;
2639
+ exports.extractR1CSInfoValueForGivenKey = extractR1CSInfoValueForGivenKey;
2640
+ exports.finalContributionIndex = finalContributionIndex;
2641
+ exports.finalizeCeremony = finalizeCeremony;
2642
+ exports.finalizeCircuit = finalizeCircuit;
2643
+ exports.formatSolidityCalldata = formatSolidityCalldata;
2644
+ exports.formatZkeyIndex = formatZkeyIndex;
2645
+ exports.fromQueryToFirebaseDocumentInfo = fromQueryToFirebaseDocumentInfo;
2646
+ exports.generateGROTH16Proof = generateGROTH16Proof;
2647
+ exports.generateGetObjectPreSignedUrl = generateGetObjectPreSignedUrl;
2648
+ exports.generatePreSignedUrlsParts = generatePreSignedUrlsParts;
2649
+ exports.generateValidContributionsAttestation = generateValidContributionsAttestation;
2650
+ exports.generateZkeyFromScratch = generateZkeyFromScratch;
2651
+ exports.genesisZkeyIndex = genesisZkeyIndex;
2652
+ exports.getAllCollectionDocs = getAllCollectionDocs;
2653
+ exports.getBucketName = getBucketName;
2654
+ exports.getCeremonyCircuits = getCeremonyCircuits;
2655
+ exports.getCircuitBySequencePosition = getCircuitBySequencePosition;
2656
+ exports.getCircuitContributionsFromContributor = getCircuitContributionsFromContributor;
2657
+ exports.getCircuitsCollectionPath = getCircuitsCollectionPath;
2658
+ exports.getClosedCeremonies = getClosedCeremonies;
2659
+ exports.getContributionsCollectionPath = getContributionsCollectionPath;
2660
+ exports.getContributionsValidityForContributor = getContributionsValidityForContributor;
2661
+ exports.getCurrentActiveParticipantTimeout = getCurrentActiveParticipantTimeout;
2662
+ exports.getCurrentFirebaseAuthUser = getCurrentFirebaseAuthUser;
2663
+ exports.getDocumentById = getDocumentById;
2664
+ exports.getOpenedCeremonies = getOpenedCeremonies;
2665
+ exports.getParticipantsCollectionPath = getParticipantsCollectionPath;
2666
+ exports.getPotStorageFilePath = getPotStorageFilePath;
2667
+ exports.getPublicAttestationPreambleForContributor = getPublicAttestationPreambleForContributor;
2668
+ exports.getR1CSInfo = getR1CSInfo;
2669
+ exports.getR1csStorageFilePath = getR1csStorageFilePath;
2670
+ exports.getTimeoutsCollectionPath = getTimeoutsCollectionPath;
2671
+ exports.getTranscriptStorageFilePath = getTranscriptStorageFilePath;
2672
+ exports.getVerificationKeyStorageFilePath = getVerificationKeyStorageFilePath;
2673
+ exports.getVerifierContractStorageFilePath = getVerifierContractStorageFilePath;
2674
+ exports.getWasmStorageFilePath = getWasmStorageFilePath;
2675
+ exports.getZkeyStorageFilePath = getZkeyStorageFilePath;
2676
+ exports.githubReputation = githubReputation;
2677
+ exports.initializeFirebaseCoreServices = initializeFirebaseCoreServices;
2678
+ exports.isCoordinator = isCoordinator;
2679
+ exports.multiPartUpload = multiPartUpload;
2680
+ exports.numExpIterations = numExpIterations;
2681
+ exports.p256 = p256;
2682
+ exports.parseCeremonyFile = parseCeremonyFile;
2683
+ exports.permanentlyStoreCurrentContributionTimeAndHash = permanentlyStoreCurrentContributionTimeAndHash;
2684
+ exports.potFileDownloadMainUrl = potFileDownloadMainUrl;
2685
+ exports.potFilenameTemplate = potFilenameTemplate;
2686
+ exports.powersOfTauFiles = powersOfTauFiles;
2687
+ exports.progressToNextCircuitForContribution = progressToNextCircuitForContribution;
2688
+ exports.progressToNextContributionStep = progressToNextContributionStep;
2689
+ exports.queryCollection = queryCollection;
2690
+ exports.resumeContributionAfterTimeoutExpiration = resumeContributionAfterTimeoutExpiration;
2691
+ exports.retrieveCommandOutput = retrieveCommandOutput;
2692
+ exports.retrieveCommandStatus = retrieveCommandStatus;
2693
+ exports.runCommandUsingSSM = runCommandUsingSSM;
2694
+ exports.setupCeremony = setupCeremony;
2695
+ exports.signInToFirebaseWithCredentials = signInToFirebaseWithCredentials;
2696
+ exports.solidityVersion = solidityVersion;
2697
+ exports.startEC2Instance = startEC2Instance;
2698
+ exports.stopEC2Instance = stopEC2Instance;
2699
+ exports.temporaryStoreCurrentContributionMultiPartUploadId = temporaryStoreCurrentContributionMultiPartUploadId;
2700
+ exports.temporaryStoreCurrentContributionUploadedChunkData = temporaryStoreCurrentContributionUploadedChunkData;
2701
+ exports.terminateEC2Instance = terminateEC2Instance;
2702
+ exports.toHex = toHex;
2703
+ exports.verificationKeyAcronym = verificationKeyAcronym;
2704
+ exports.verifierSmartContractAcronym = verifierSmartContractAcronym;
2705
+ exports.verifyCeremony = verifyCeremony;
2706
+ exports.verifyContribution = verifyContribution;
2707
+ exports.verifyGROTH16Proof = verifyGROTH16Proof;
2708
+ exports.verifyGROTH16ProofOnChain = verifyGROTH16ProofOnChain;
2709
+ exports.verifyZKey = verifyZKey;
2710
+ exports.vmBootstrapCommand = vmBootstrapCommand;
2711
+ exports.vmBootstrapScriptFilename = vmBootstrapScriptFilename;
2712
+ exports.vmConfigurationTypes = vmConfigurationTypes;
2713
+ exports.vmContributionVerificationCommand = vmContributionVerificationCommand;
2714
+ exports.vmDependenciesAndCacheArtifactsCommand = vmDependenciesAndCacheArtifactsCommand;