@aztec/bb.js 0.11.1 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -9
- package/dest/browser/barretenberg_api/index.d.ts +2 -17
- package/dest/browser/barretenberg_api/index.d.ts.map +1 -1
- package/dest/browser/index.js +1 -1
- package/dest/node/barretenberg_api/index.d.ts +2 -17
- package/dest/node/barretenberg_api/index.d.ts.map +1 -1
- package/dest/node/barretenberg_api/index.js +4 -64
- package/dest/node/barretenberg_api/pedersen.test.js +5 -54
- package/dest/node/barretenberg_api/schnorr.test.js +1 -2
- package/dest/node/barretenberg_wasm/barretenberg-threads.wasm +0 -0
- package/dest/node/benchmark/index.d.ts +3 -0
- package/dest/node/benchmark/index.d.ts.map +1 -0
- package/dest/node/benchmark/index.js +25 -0
- package/dest/node/benchmark/timer.d.ts +33 -0
- package/dest/node/benchmark/timer.d.ts.map +1 -0
- package/dest/node/benchmark/timer.js +38 -0
- package/dest/node/main.d.ts.map +1 -1
- package/dest/node/main.js +17 -4
- package/dest/node-cjs/barretenberg_api/index.d.ts +2 -17
- package/dest/node-cjs/barretenberg_api/index.d.ts.map +1 -1
- package/dest/node-cjs/barretenberg_api/index.js +4 -64
- package/dest/node-cjs/barretenberg_api/pedersen.test.js +4 -53
- package/dest/node-cjs/barretenberg_api/schnorr.test.js +1 -2
- package/dest/node-cjs/barretenberg_wasm/barretenberg-threads.wasm +0 -0
- package/dest/node-cjs/benchmark/index.d.ts +3 -0
- package/dest/node-cjs/benchmark/index.d.ts.map +1 -0
- package/dest/node-cjs/benchmark/index.js +30 -0
- package/dest/node-cjs/benchmark/timer.d.ts +33 -0
- package/dest/node-cjs/benchmark/timer.d.ts.map +1 -0
- package/dest/node-cjs/benchmark/timer.js +42 -0
- package/dest/node-cjs/main.d.ts.map +1 -1
- package/dest/node-cjs/main.js +17 -4
- package/package.json +1 -1
- package/src/barretenberg_api/index.ts +4 -91
- package/src/barretenberg_api/pedersen.test.ts +8 -64
- package/src/barretenberg_api/schnorr.test.ts +0 -1
- package/src/benchmark/index.ts +26 -0
- package/src/benchmark/timer.ts +41 -0
- package/src/main.ts +19 -3
|
@@ -17,90 +17,13 @@ export class BarretenbergApi {
|
|
|
17
17
|
await this.binder.wasm.destroy();
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
-
async
|
|
21
|
-
const result = await this.binder.callWasmExport('
|
|
22
|
-
return;
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
async pedersenCompressFields(left: Fr, right: Fr): Promise<Fr> {
|
|
26
|
-
const result = await this.binder.callWasmExport('pedersen___compress_fields', [left, right], [Fr]);
|
|
27
|
-
return result[0];
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
async pedersenPlookupCompressFields(left: Fr, right: Fr): Promise<Fr> {
|
|
31
|
-
const result = await this.binder.callWasmExport('pedersen___compress_fields', [left, right], [Fr]);
|
|
32
|
-
return result[0];
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
async pedersenCompress(inputsBuffer: Fr[]): Promise<Fr> {
|
|
36
|
-
const result = await this.binder.callWasmExport('pedersen___compress', [inputsBuffer], [Fr]);
|
|
37
|
-
return result[0];
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
async pedersenPlookupCompress(inputsBuffer: Fr[]): Promise<Fr> {
|
|
41
|
-
const result = await this.binder.callWasmExport('pedersen___compress', [inputsBuffer], [Fr]);
|
|
42
|
-
return result[0];
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
async pedersenCompressWithHashIndex(inputsBuffer: Fr[], hashIndex: number): Promise<Fr> {
|
|
46
|
-
const result = await this.binder.callWasmExport(
|
|
47
|
-
'pedersen___compress_with_hash_index',
|
|
48
|
-
[inputsBuffer, hashIndex],
|
|
49
|
-
[Fr],
|
|
50
|
-
);
|
|
51
|
-
return result[0];
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
async pedersenCommit(inputsBuffer: Fr[]): Promise<Fr> {
|
|
55
|
-
const result = await this.binder.callWasmExport('pedersen___commit', [inputsBuffer], [Fr]);
|
|
20
|
+
async pedersenCommit(inputsBuffer: Fr[]): Promise<Point> {
|
|
21
|
+
const result = await this.binder.callWasmExport('pedersen___commit', [inputsBuffer], [Point]);
|
|
56
22
|
return result[0];
|
|
57
23
|
}
|
|
58
24
|
|
|
59
|
-
async
|
|
60
|
-
const result = await this.binder.callWasmExport('
|
|
61
|
-
return result[0];
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
async pedersenPlookupCommitWithHashIndex(inputsBuffer: Fr[], hashIndex: number): Promise<Fr> {
|
|
65
|
-
const result = await this.binder.callWasmExport(
|
|
66
|
-
'pedersen___plookup_commit_with_hash_index',
|
|
67
|
-
[inputsBuffer, hashIndex],
|
|
68
|
-
[Fr],
|
|
69
|
-
);
|
|
70
|
-
return result[0];
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
async pedersenBufferToField(data: Uint8Array): Promise<Fr> {
|
|
74
|
-
const result = await this.binder.callWasmExport('pedersen___buffer_to_field', [data], [Fr]);
|
|
75
|
-
return result[0];
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
async pedersenHashInit(): Promise<void> {
|
|
79
|
-
const result = await this.binder.callWasmExport('pedersen_hash_init', [], []);
|
|
80
|
-
return;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
async pedersenHashPair(left: Fr, right: Fr): Promise<Fr> {
|
|
84
|
-
const result = await this.binder.callWasmExport('pedersen_hash_pair', [left, right], [Fr]);
|
|
85
|
-
return result[0];
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
async pedersenHashMultiple(inputsBuffer: Fr[]): Promise<Fr> {
|
|
89
|
-
const result = await this.binder.callWasmExport('pedersen_hash_multiple', [inputsBuffer], [Fr]);
|
|
90
|
-
return result[0];
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
async pedersenHashMultipleWithHashIndex(inputsBuffer: Fr[], hashIndex: number): Promise<Fr> {
|
|
94
|
-
const result = await this.binder.callWasmExport(
|
|
95
|
-
'pedersen_hash_multiple_with_hash_index',
|
|
96
|
-
[inputsBuffer, hashIndex],
|
|
97
|
-
[Fr],
|
|
98
|
-
);
|
|
99
|
-
return result[0];
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
async pedersenHashToTree(data: Fr[]): Promise<Fr[]> {
|
|
103
|
-
const result = await this.binder.callWasmExport('pedersen_hash_to_tree', [data], [VectorDeserializer(Fr)]);
|
|
25
|
+
async pedersenHashWithHashIndex(inputsBuffer: Fr[], hashIndex: number): Promise<Fr> {
|
|
26
|
+
const result = await this.binder.callWasmExport('pedersen_hash_with_hash_index', [inputsBuffer, hashIndex], [Fr]);
|
|
104
27
|
return result[0];
|
|
105
28
|
}
|
|
106
29
|
|
|
@@ -217,16 +140,6 @@ export class BarretenbergApi {
|
|
|
217
140
|
return result[0];
|
|
218
141
|
}
|
|
219
142
|
|
|
220
|
-
async testThreadAbort(): Promise<void> {
|
|
221
|
-
const result = await this.binder.callWasmExport('test_thread_abort', [], []);
|
|
222
|
-
return;
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
async testAbort(): Promise<void> {
|
|
226
|
-
const result = await this.binder.callWasmExport('test_abort', [], []);
|
|
227
|
-
return;
|
|
228
|
-
}
|
|
229
|
-
|
|
230
143
|
async commonInitSlabAllocator(circuitSize: number): Promise<void> {
|
|
231
144
|
const result = await this.binder.callWasmExport('common_init_slab_allocator', [circuitSize], []);
|
|
232
145
|
return;
|
|
@@ -1,85 +1,29 @@
|
|
|
1
1
|
import { Barretenberg } from '../barretenberg/index.js';
|
|
2
|
-
import { Fr } from '../types/index.js';
|
|
2
|
+
import { Fr, Point } from '../types/index.js';
|
|
3
3
|
|
|
4
4
|
describe('pedersen', () => {
|
|
5
5
|
let api: Barretenberg;
|
|
6
6
|
|
|
7
7
|
beforeAll(async () => {
|
|
8
8
|
api = await Barretenberg.new(1);
|
|
9
|
-
await api.pedersenHashInit();
|
|
10
9
|
}, 30000);
|
|
11
10
|
|
|
12
11
|
afterAll(async () => {
|
|
13
12
|
await api.destroy();
|
|
14
13
|
});
|
|
15
14
|
|
|
16
|
-
it('
|
|
17
|
-
const result = await api.
|
|
18
|
-
expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n));
|
|
19
|
-
});
|
|
20
|
-
|
|
21
|
-
it('pedersenPlookupCompressFields', async () => {
|
|
22
|
-
const result = await api.pedersenPlookupCompressFields(new Fr(4n), new Fr(8n));
|
|
23
|
-
expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n));
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
it('pedersenCompress', async () => {
|
|
27
|
-
const result = await api.pedersenCompress([new Fr(4n), new Fr(8n), new Fr(12n)]);
|
|
28
|
-
expect(result).toEqual(new Fr(16354408412011670665169322571938780771784319449166930406648760506154417354381n));
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
it('pedersenPlookupCompress', async () => {
|
|
32
|
-
const result = await api.pedersenPlookupCompress([new Fr(4n), new Fr(8n), new Fr(12n)]);
|
|
33
|
-
expect(result).toEqual(new Fr(16354408412011670665169322571938780771784319449166930406648760506154417354381n));
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
it('pedersenCompressWithHashIndex', async () => {
|
|
37
|
-
const result = await api.pedersenCompressWithHashIndex([new Fr(4n), new Fr(8n)], 7);
|
|
15
|
+
it('pedersenHashWithHashIndex', async () => {
|
|
16
|
+
const result = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 7);
|
|
38
17
|
expect(result).toEqual(new Fr(2152386650411553803409271316104075950536496387580531018130718456431861859990n));
|
|
39
18
|
});
|
|
40
19
|
|
|
41
20
|
it('pedersenCommit', async () => {
|
|
42
21
|
const result = await api.pedersenCommit([new Fr(4n), new Fr(8n), new Fr(12n)]);
|
|
43
|
-
expect(result).toEqual(
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
expect(result).toEqual(new Fr(7336965135159957330095956915667769834743631571088528744280187985812103412470n));
|
|
49
|
-
});
|
|
50
|
-
|
|
51
|
-
it('pedersenBufferToField', async () => {
|
|
52
|
-
const result = await api.pedersenBufferToField(
|
|
53
|
-
Buffer.from('Hello world! I am a buffer to be converted to a field!'),
|
|
22
|
+
expect(result).toEqual(
|
|
23
|
+
new Point(
|
|
24
|
+
new Fr(18374309251862457296563484909553154519357910650678202211610516068880120638872n),
|
|
25
|
+
new Fr(2572141322478528249692953821523229170092797347760799983831061874108357705739n),
|
|
26
|
+
),
|
|
54
27
|
);
|
|
55
|
-
expect(result).toEqual(new Fr(5836632387256708040349959803326023895450290698906238002955147410646852307074n));
|
|
56
|
-
});
|
|
57
|
-
|
|
58
|
-
it('pedersenHashPair', async () => {
|
|
59
|
-
const result = await api.pedersenHashPair(new Fr(4n), new Fr(8n));
|
|
60
|
-
expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n));
|
|
61
|
-
});
|
|
62
|
-
|
|
63
|
-
it('pedersenHashMultiple', async () => {
|
|
64
|
-
const result = await api.pedersenHashMultiple([new Fr(4n), new Fr(8n), new Fr(12n)]);
|
|
65
|
-
expect(result).toEqual(new Fr(16354408412011670665169322571938780771784319449166930406648760506154417354381n));
|
|
66
|
-
});
|
|
67
|
-
|
|
68
|
-
it('pedersenHashMultipleWithHashIndex', async () => {
|
|
69
|
-
const result = await api.pedersenHashMultipleWithHashIndex([new Fr(4n), new Fr(8n)], 7);
|
|
70
|
-
expect(result).toEqual(new Fr(2152386650411553803409271316104075950536496387580531018130718456431861859990n));
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
it('pedersenHashToTree', async () => {
|
|
74
|
-
const result = await api.pedersenHashToTree([new Fr(4n), new Fr(8n), new Fr(12n), new Fr(16n)]);
|
|
75
|
-
expect(result).toEqual([
|
|
76
|
-
new Fr(4n),
|
|
77
|
-
new Fr(8n),
|
|
78
|
-
new Fr(12n),
|
|
79
|
-
new Fr(16n),
|
|
80
|
-
new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n),
|
|
81
|
-
new Fr(18350527319045519333962768191016242826584323959670139897255818770108115223653n),
|
|
82
|
-
new Fr(5972535902427608430534212385621973704186819235181735133037695406667218179357n),
|
|
83
|
-
]);
|
|
84
28
|
});
|
|
85
29
|
});
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
export * from './timer.js';
|
|
3
|
+
|
|
4
|
+
const bfd = (() => {
|
|
5
|
+
const bfdStr = process.env.BENCHMARK_FD;
|
|
6
|
+
const bfd = bfdStr ? parseInt(bfdStr) : -1;
|
|
7
|
+
if (bfd >= 0 && !fs.fstatSync(bfd)) {
|
|
8
|
+
throw new Error('fd is not open. Did you redirect in your shell?');
|
|
9
|
+
}
|
|
10
|
+
return bfd;
|
|
11
|
+
})();
|
|
12
|
+
|
|
13
|
+
export function writeBenchmark<T>(name: string, value: T, labels: Record<string, any> = {}) {
|
|
14
|
+
if (bfd === -1) {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
const data = {
|
|
18
|
+
timestamp: new Date().toISOString(),
|
|
19
|
+
name,
|
|
20
|
+
type: typeof value,
|
|
21
|
+
value,
|
|
22
|
+
...labels,
|
|
23
|
+
};
|
|
24
|
+
const jsonl = JSON.stringify(data) + '\n';
|
|
25
|
+
fs.writeSync(bfd, jsonl);
|
|
26
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Timer class to measure time intervals in milliseconds and seconds.
|
|
3
|
+
* Upon instantiation, it stores the current timestamp as the starting point.
|
|
4
|
+
* The 'ms()' method returns the elapsed time in milliseconds,
|
|
5
|
+
* while the 's()' method returns the elapsed time in seconds.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* const timer = new Timer();
|
|
9
|
+
* setTimeout(() =\> \{
|
|
10
|
+
* console.log(`Elapsed time: ${timer.ms()} ms`);
|
|
11
|
+
* \}, 1000);
|
|
12
|
+
*/
|
|
13
|
+
export class Timer {
|
|
14
|
+
private start: number;
|
|
15
|
+
|
|
16
|
+
constructor() {
|
|
17
|
+
this.start = new Date().getTime();
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Returns the elapsed time in milliseconds since the Timer instance was created.
|
|
22
|
+
* Provides a simple and convenient way to measure the time duration between two events
|
|
23
|
+
* or monitor performance of specific code sections.
|
|
24
|
+
*
|
|
25
|
+
* @returns The elapsed time in milliseconds.
|
|
26
|
+
*/
|
|
27
|
+
public ms() {
|
|
28
|
+
return new Date().getTime() - this.start;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Returns the time elapsed since the Timer instance was created, in seconds.
|
|
33
|
+
* The value is calculated by subtracting the initial start time from the current time
|
|
34
|
+
* and dividing the result by 1000 to convert milliseconds to seconds.
|
|
35
|
+
*
|
|
36
|
+
* @returns The elapsed time in seconds.
|
|
37
|
+
*/
|
|
38
|
+
public s() {
|
|
39
|
+
return (new Date().getTime() - this.start) / 1000;
|
|
40
|
+
}
|
|
41
|
+
}
|
package/src/main.ts
CHANGED
|
@@ -5,6 +5,8 @@ import { readFileSync, writeFileSync } from 'fs';
|
|
|
5
5
|
import { gunzipSync } from 'zlib';
|
|
6
6
|
import { Command } from 'commander';
|
|
7
7
|
import { acvmInfoJson } from './info.js';
|
|
8
|
+
import { Timer, writeBenchmark } from './benchmark/index.js';
|
|
9
|
+
import path from 'path';
|
|
8
10
|
createDebug.log = console.error.bind(console);
|
|
9
11
|
const debug = createDebug('bb.js');
|
|
10
12
|
|
|
@@ -15,6 +17,7 @@ const debug = createDebug('bb.js');
|
|
|
15
17
|
// aware of this discrepancy, when creating proofs in bb versus
|
|
16
18
|
// creating the same proofs in the node CLI.
|
|
17
19
|
const MAX_CIRCUIT_SIZE = 2 ** 19;
|
|
20
|
+
const threads = +process.env.HARDWARE_CONCURRENCY! || undefined;
|
|
18
21
|
|
|
19
22
|
function getBytecode(bytecodePath: string) {
|
|
20
23
|
const encodedCircuit = readFileSync(bytecodePath);
|
|
@@ -41,7 +44,7 @@ async function computeCircuitSize(bytecodePath: string, api: Barretenberg) {
|
|
|
41
44
|
}
|
|
42
45
|
|
|
43
46
|
async function init(bytecodePath: string, crsPath: string) {
|
|
44
|
-
const api = await Barretenberg.new();
|
|
47
|
+
const api = await Barretenberg.new(threads);
|
|
45
48
|
|
|
46
49
|
const circuitSize = await getGates(bytecodePath, api);
|
|
47
50
|
const subgroupSize = Math.pow(2, Math.ceil(Math.log2(circuitSize)));
|
|
@@ -63,7 +66,7 @@ async function init(bytecodePath: string, crsPath: string) {
|
|
|
63
66
|
await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data()));
|
|
64
67
|
|
|
65
68
|
const acirComposer = await api.acirNewAcirComposer(subgroupSize);
|
|
66
|
-
return { api, acirComposer, circuitSize
|
|
69
|
+
return { api, acirComposer, circuitSize, subgroupSize };
|
|
67
70
|
}
|
|
68
71
|
|
|
69
72
|
async function initLite() {
|
|
@@ -80,12 +83,24 @@ async function initLite() {
|
|
|
80
83
|
}
|
|
81
84
|
|
|
82
85
|
export async function proveAndVerify(bytecodePath: string, witnessPath: string, crsPath: string, isRecursive: boolean) {
|
|
83
|
-
|
|
86
|
+
/* eslint-disable camelcase */
|
|
87
|
+
const acir_test = path.basename(process.cwd());
|
|
88
|
+
|
|
89
|
+
const { api, acirComposer, circuitSize, subgroupSize } = await init(bytecodePath, crsPath);
|
|
84
90
|
try {
|
|
85
91
|
debug(`creating proof...`);
|
|
86
92
|
const bytecode = getBytecode(bytecodePath);
|
|
87
93
|
const witness = getWitness(witnessPath);
|
|
94
|
+
|
|
95
|
+
const pkTimer = new Timer();
|
|
96
|
+
await api.acirInitProvingKey(acirComposer, bytecode);
|
|
97
|
+
writeBenchmark('pk_construction_time', pkTimer.ms(), { acir_test, threads });
|
|
98
|
+
writeBenchmark('gate_count', circuitSize, { acir_test, threads });
|
|
99
|
+
writeBenchmark('subgroup_size', subgroupSize, { acir_test, threads });
|
|
100
|
+
|
|
101
|
+
const proofTimer = new Timer();
|
|
88
102
|
const proof = await api.acirCreateProof(acirComposer, bytecode, witness, isRecursive);
|
|
103
|
+
writeBenchmark('proof_construction_time', proofTimer.ms(), { acir_test, threads });
|
|
89
104
|
|
|
90
105
|
debug(`verifying...`);
|
|
91
106
|
const verified = await api.acirVerifyProof(acirComposer, proof, isRecursive);
|
|
@@ -94,6 +109,7 @@ export async function proveAndVerify(bytecodePath: string, witnessPath: string,
|
|
|
94
109
|
} finally {
|
|
95
110
|
await api.destroy();
|
|
96
111
|
}
|
|
112
|
+
/* eslint-enable camelcase */
|
|
97
113
|
}
|
|
98
114
|
|
|
99
115
|
export async function prove(
|