@aztec/blob-lib 0.0.1-commit.b655e406 → 0.0.1-commit.fce3e4f
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/batched_blob.d.ts +25 -0
- package/dest/batched_blob.d.ts.map +1 -0
- package/dest/batched_blob.js +20 -0
- package/dest/blob.d.ts +4 -10
- package/dest/blob.d.ts.map +1 -1
- package/dest/blob_batching.d.ts +33 -83
- package/dest/blob_batching.d.ts.map +1 -1
- package/dest/blob_batching.js +68 -105
- package/dest/blob_utils.d.ts +19 -10
- package/dest/blob_utils.d.ts.map +1 -1
- package/dest/blob_utils.js +28 -19
- package/dest/circuit_types/blob_accumulator.d.ts +2 -1
- package/dest/circuit_types/blob_accumulator.d.ts.map +1 -1
- package/dest/circuit_types/blob_accumulator.js +3 -0
- package/dest/circuit_types/final_blob_accumulator.d.ts +1 -1
- package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -1
- package/dest/circuit_types/final_blob_batching_challenges.d.ts +1 -1
- package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -1
- package/dest/circuit_types/index.d.ts +1 -1
- package/dest/encoding/block_blob_data.d.ts +22 -0
- package/dest/encoding/block_blob_data.d.ts.map +1 -0
- package/dest/encoding/block_blob_data.js +65 -0
- package/dest/encoding/block_end_marker.d.ts +10 -0
- package/dest/encoding/block_end_marker.d.ts.map +1 -0
- package/dest/encoding/block_end_marker.js +40 -0
- package/dest/encoding/block_end_state_field.d.ts +12 -0
- package/dest/encoding/block_end_state_field.d.ts.map +1 -0
- package/dest/encoding/block_end_state_field.js +39 -0
- package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
- package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
- package/dest/encoding/checkpoint_blob_data.js +67 -0
- package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
- package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
- package/dest/encoding/checkpoint_end_marker.js +28 -0
- package/dest/encoding/fixtures.d.ts +41 -0
- package/dest/encoding/fixtures.d.ts.map +1 -0
- package/dest/encoding/fixtures.js +139 -0
- package/dest/encoding/index.d.ts +10 -0
- package/dest/encoding/index.d.ts.map +1 -0
- package/dest/encoding/index.js +9 -0
- package/dest/encoding/tx_blob_data.d.ts +19 -0
- package/dest/encoding/tx_blob_data.d.ts.map +1 -0
- package/dest/encoding/tx_blob_data.js +79 -0
- package/dest/encoding/tx_start_marker.d.ts +16 -0
- package/dest/encoding/tx_start_marker.d.ts.map +1 -0
- package/dest/{encoding.js → encoding/tx_start_marker.js} +12 -58
- package/dest/errors.d.ts +1 -1
- package/dest/errors.d.ts.map +1 -1
- package/dest/hash.d.ts +11 -4
- package/dest/hash.d.ts.map +1 -1
- package/dest/hash.js +14 -4
- package/dest/index.d.ts +3 -4
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +2 -3
- package/dest/interface.d.ts +1 -1
- package/dest/kzg_context.d.ts +1 -1
- package/dest/sponge_blob.d.ts +8 -14
- package/dest/sponge_blob.d.ts.map +1 -1
- package/dest/sponge_blob.js +19 -34
- package/dest/testing.d.ts +8 -16
- package/dest/testing.d.ts.map +1 -1
- package/dest/testing.js +34 -64
- package/dest/types.d.ts +2 -1
- package/dest/types.d.ts.map +1 -1
- package/dest/types.js +1 -0
- package/package.json +8 -7
- package/src/batched_blob.ts +25 -0
- package/src/blob_batching.ts +81 -123
- package/src/blob_utils.ts +31 -21
- package/src/circuit_types/blob_accumulator.ts +11 -0
- package/src/encoding/block_blob_data.ts +102 -0
- package/src/encoding/block_end_marker.ts +54 -0
- package/src/encoding/block_end_state_field.ts +59 -0
- package/src/encoding/checkpoint_blob_data.ts +95 -0
- package/src/encoding/checkpoint_end_marker.ts +40 -0
- package/src/encoding/fixtures.ts +209 -0
- package/src/encoding/index.ts +9 -0
- package/src/encoding/tx_blob_data.ts +116 -0
- package/src/{encoding.ts → encoding/tx_start_marker.ts} +18 -75
- package/src/hash.ts +14 -4
- package/src/index.ts +2 -3
- package/src/sponge_blob.ts +21 -34
- package/src/testing.ts +46 -73
- package/src/types.ts +1 -0
- package/dest/deserialize.d.ts +0 -14
- package/dest/deserialize.d.ts.map +0 -1
- package/dest/deserialize.js +0 -33
- package/dest/encoding.d.ts +0 -26
- package/dest/encoding.d.ts.map +0 -1
- package/src/deserialize.ts +0 -38
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
import { FieldReader } from '@aztec/foundation/serialize';
|
|
3
|
+
import { type TxStartMarker } from './tx_start_marker.js';
|
|
4
|
+
export interface TxBlobData {
|
|
5
|
+
txStartMarker: TxStartMarker;
|
|
6
|
+
txHash: Fr;
|
|
7
|
+
transactionFee: Fr;
|
|
8
|
+
noteHashes: Fr[];
|
|
9
|
+
nullifiers: Fr[];
|
|
10
|
+
l2ToL1Msgs: Fr[];
|
|
11
|
+
publicDataWrites: [Fr, Fr][];
|
|
12
|
+
privateLogs: Fr[][];
|
|
13
|
+
publicLogs: Fr[];
|
|
14
|
+
contractClassLog: Fr[];
|
|
15
|
+
}
|
|
16
|
+
export declare function encodeTxBlobData(txBlobData: TxBlobData): Fr[];
|
|
17
|
+
export declare function decodeTxBlobData(fields: Fr[] | FieldReader): TxBlobData;
|
|
18
|
+
export declare function getNumTxBlobFields(txStartMarker: Omit<TxStartMarker, 'revertCode' | 'numBlobFields'>): number;
|
|
19
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHhfYmxvYl9kYXRhLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvZW5jb2RpbmcvdHhfYmxvYl9kYXRhLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQUM5QyxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sNkJBQTZCLENBQUM7QUFHMUQsT0FBTyxFQUFFLEtBQUssYUFBYSxFQUE0QyxNQUFNLHNCQUFzQixDQUFDO0FBSXBHLE1BQU0sV0FBVyxVQUFVO0lBQ3pCLGFBQWEsRUFBRSxhQUFhLENBQUM7SUFDN0IsTUFBTSxFQUFFLEVBQUUsQ0FBQztJQUNYLGNBQWMsRUFBRSxFQUFFLENBQUM7SUFDbkIsVUFBVSxFQUFFLEVBQUUsRUFBRSxDQUFDO0lBQ2pCLFVBQVUsRUFBRSxFQUFFLEVBQUUsQ0FBQztJQUNqQixVQUFVLEVBQUUsRUFBRSxFQUFFLENBQUM7SUFDakIsZ0JBQWdCLEVBQUUsQ0FBQyxFQUFFLEVBQUUsRUFBRSxDQUFDLEVBQUUsQ0FBQztJQUM3QixXQUFXLEVBQUUsRUFBRSxFQUFFLEVBQUUsQ0FBQztJQUNwQixVQUFVLEVBQUUsRUFBRSxFQUFFLENBQUM7SUFDakIsZ0JBQWdCLEVBQUUsRUFBRSxFQUFFLENBQUM7Q0FDeEI7QUFFRCx3QkFBZ0IsZ0JBQWdCLENBQUMsVUFBVSxFQUFFLFVBQVUsR0FBRyxFQUFFLEVBQUUsQ0FhN0Q7QUFFRCx3QkFBZ0IsZ0JBQWdCLENBQUMsTUFBTSxFQUFFLEVBQUUsRUFBRSxHQUFHLFdBQVcsR0FBRyxVQUFVLENBNkR2RTtBQUVELHdCQUFnQixrQkFBa0IsQ0FBQyxhQUFhLEVBQUUsSUFBSSxDQUFDLGFBQWEsRUFBRSxZQUFZLEdBQUcsZUFBZSxDQUFDLFVBZXBHIn0=
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tx_blob_data.d.ts","sourceRoot":"","sources":["../../src/encoding/tx_blob_data.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAG1D,OAAO,EAAE,KAAK,aAAa,EAA4C,MAAM,sBAAsB,CAAC;AAIpG,MAAM,WAAW,UAAU;IACzB,aAAa,EAAE,aAAa,CAAC;IAC7B,MAAM,EAAE,EAAE,CAAC;IACX,cAAc,EAAE,EAAE,CAAC;IACnB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,gBAAgB,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;IAC7B,WAAW,EAAE,EAAE,EAAE,EAAE,CAAC;IACpB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,gBAAgB,EAAE,EAAE,EAAE,CAAC;CACxB;AAED,wBAAgB,gBAAgB,CAAC,UAAU,EAAE,UAAU,GAAG,EAAE,EAAE,CAa7D;AAED,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,UAAU,CA6DvE;AAED,wBAAgB,kBAAkB,CAAC,aAAa,EAAE,IAAI,CAAC,aAAa,EAAE,YAAY,GAAG,eAAe,CAAC,UAepG"}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { chunk } from '@aztec/foundation/collection';
|
|
2
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
+
import { FieldReader } from '@aztec/foundation/serialize';
|
|
4
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
5
|
+
import { decodeTxStartMarker, encodeTxStartMarker } from './tx_start_marker.js';
|
|
6
|
+
export function encodeTxBlobData(txBlobData) {
|
|
7
|
+
return [
|
|
8
|
+
encodeTxStartMarker(txBlobData.txStartMarker),
|
|
9
|
+
txBlobData.txHash,
|
|
10
|
+
txBlobData.transactionFee,
|
|
11
|
+
...txBlobData.noteHashes,
|
|
12
|
+
...txBlobData.nullifiers,
|
|
13
|
+
...txBlobData.l2ToL1Msgs,
|
|
14
|
+
...txBlobData.publicDataWrites.flat(),
|
|
15
|
+
...txBlobData.privateLogs.map((log)=>[
|
|
16
|
+
new Fr(log.length),
|
|
17
|
+
...log
|
|
18
|
+
]).flat(),
|
|
19
|
+
...txBlobData.publicLogs,
|
|
20
|
+
...txBlobData.contractClassLog
|
|
21
|
+
];
|
|
22
|
+
}
|
|
23
|
+
export function decodeTxBlobData(fields) {
|
|
24
|
+
const reader = FieldReader.asReader(fields);
|
|
25
|
+
if (reader.isFinished()) {
|
|
26
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for tx blob data.`);
|
|
27
|
+
}
|
|
28
|
+
const txStartMarker = decodeTxStartMarker(reader.readField());
|
|
29
|
+
const checkRemainingFields = (requiredFields, type)=>{
|
|
30
|
+
if (requiredFields > reader.remainingFields()) {
|
|
31
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for ${type}. Expected ${requiredFields} fields, only ${reader.remainingFields()} remaining.`);
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
const numTxEffectFields = txStartMarker.numBlobFields - 1; // -1 because we already read the tx start marker.
|
|
35
|
+
checkRemainingFields(numTxEffectFields, 'tx effect');
|
|
36
|
+
const txHash = reader.readField();
|
|
37
|
+
const transactionFee = reader.readField();
|
|
38
|
+
checkRemainingFields(txStartMarker.numNoteHashes, 'note hashes');
|
|
39
|
+
const noteHashes = reader.readFieldArray(txStartMarker.numNoteHashes);
|
|
40
|
+
checkRemainingFields(txStartMarker.numNullifiers, 'nullifiers');
|
|
41
|
+
const nullifiers = reader.readFieldArray(txStartMarker.numNullifiers);
|
|
42
|
+
checkRemainingFields(txStartMarker.numL2ToL1Msgs, 'l2-to-l1 messages');
|
|
43
|
+
const l2ToL1Msgs = reader.readFieldArray(txStartMarker.numL2ToL1Msgs);
|
|
44
|
+
checkRemainingFields(txStartMarker.numPublicDataWrites * 2, 'public data writes'); // *2 for leaf slot and value
|
|
45
|
+
const publicDataWrites = chunk(reader.readFieldArray(txStartMarker.numPublicDataWrites * 2), 2);
|
|
46
|
+
const privateLogs = Array.from({
|
|
47
|
+
length: txStartMarker.numPrivateLogs
|
|
48
|
+
}, ()=>{
|
|
49
|
+
const length = reader.readU32();
|
|
50
|
+
checkRemainingFields(length, 'private log');
|
|
51
|
+
return reader.readFieldArray(length);
|
|
52
|
+
});
|
|
53
|
+
checkRemainingFields(txStartMarker.publicLogsLength, 'public logs');
|
|
54
|
+
const publicLogs = reader.readFieldArray(txStartMarker.publicLogsLength);
|
|
55
|
+
const contractClassLogBlobDataLength = txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
|
|
56
|
+
checkRemainingFields(contractClassLogBlobDataLength, 'contract class logs');
|
|
57
|
+
const contractClassLog = reader.readFieldArray(contractClassLogBlobDataLength);
|
|
58
|
+
return {
|
|
59
|
+
txStartMarker,
|
|
60
|
+
txHash,
|
|
61
|
+
transactionFee,
|
|
62
|
+
noteHashes,
|
|
63
|
+
nullifiers,
|
|
64
|
+
l2ToL1Msgs,
|
|
65
|
+
publicDataWrites,
|
|
66
|
+
privateLogs,
|
|
67
|
+
publicLogs,
|
|
68
|
+
contractClassLog
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
export function getNumTxBlobFields(txStartMarker) {
|
|
72
|
+
return 1 + // tx start marker
|
|
73
|
+
1 + // tx hash
|
|
74
|
+
1 + // transaction fee
|
|
75
|
+
txStartMarker.numNoteHashes + txStartMarker.numNullifiers + txStartMarker.numL2ToL1Msgs + txStartMarker.numPublicDataWrites * 2 + // *2 for leaf slot and value per public data write
|
|
76
|
+
txStartMarker.numPrivateLogs + // +1 length field for each private log
|
|
77
|
+
txStartMarker.privateLogsLength + txStartMarker.publicLogsLength + txStartMarker.contractClassLogLength + (txStartMarker.contractClassLogLength > 0 ? 1 : 0 // +1 for contract address of the contract class log
|
|
78
|
+
);
|
|
79
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
|
+
export interface TxStartMarker {
|
|
3
|
+
numBlobFields: number;
|
|
4
|
+
revertCode: number;
|
|
5
|
+
numNoteHashes: number;
|
|
6
|
+
numNullifiers: number;
|
|
7
|
+
numL2ToL1Msgs: number;
|
|
8
|
+
numPublicDataWrites: number;
|
|
9
|
+
numPrivateLogs: number;
|
|
10
|
+
privateLogsLength: number;
|
|
11
|
+
publicLogsLength: number;
|
|
12
|
+
contractClassLogLength: number;
|
|
13
|
+
}
|
|
14
|
+
export declare function encodeTxStartMarker(txStartMarker: TxStartMarker): Fr;
|
|
15
|
+
export declare function decodeTxStartMarker(field: Fr): TxStartMarker;
|
|
16
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHhfc3RhcnRfbWFya2VyLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvZW5jb2RpbmcvdHhfc3RhcnRfbWFya2VyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQWlCOUMsTUFBTSxXQUFXLGFBQWE7SUFDNUIsYUFBYSxFQUFFLE1BQU0sQ0FBQztJQUN0QixVQUFVLEVBQUUsTUFBTSxDQUFDO0lBQ25CLGFBQWEsRUFBRSxNQUFNLENBQUM7SUFDdEIsYUFBYSxFQUFFLE1BQU0sQ0FBQztJQUN0QixhQUFhLEVBQUUsTUFBTSxDQUFDO0lBQ3RCLG1CQUFtQixFQUFFLE1BQU0sQ0FBQztJQUM1QixjQUFjLEVBQUUsTUFBTSxDQUFDO0lBQ3ZCLGlCQUFpQixFQUFFLE1BQU0sQ0FBQztJQUMxQixnQkFBZ0IsRUFBRSxNQUFNLENBQUM7SUFDekIsc0JBQXNCLEVBQUUsTUFBTSxDQUFDO0NBQ2hDO0FBRUQsd0JBQWdCLG1CQUFtQixDQUFDLGFBQWEsRUFBRSxhQUFhLEdBQUcsRUFBRSxDQXVCcEU7QUFFRCx3QkFBZ0IsbUJBQW1CLENBQUMsS0FBSyxFQUFFLEVBQUUsR0FBRyxhQUFhLENBd0M1RCJ9
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tx_start_marker.d.ts","sourceRoot":"","sources":["../../src/encoding/tx_start_marker.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAiB9C,MAAM,WAAW,aAAa;IAC5B,aAAa,EAAE,MAAM,CAAC;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,cAAc,EAAE,MAAM,CAAC;IACvB,iBAAiB,EAAE,MAAM,CAAC;IAC1B,gBAAgB,EAAE,MAAM,CAAC;IACzB,sBAAsB,EAAE,MAAM,CAAC;CAChC;AAED,wBAAgB,mBAAmB,CAAC,aAAa,EAAE,aAAa,GAAG,EAAE,CAuBpE;AAED,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,EAAE,GAAG,aAAa,CAwC5D"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { TX_START_PREFIX } from '@aztec/constants';
|
|
2
2
|
import { Fr } from '@aztec/foundation/fields';
|
|
3
|
-
import {
|
|
3
|
+
import { BlobDeserializationError } from '../errors.js';
|
|
4
|
+
// Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr`.
|
|
4
5
|
const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
|
|
5
6
|
const REVERT_CODE_BIT_SIZE = 8n;
|
|
6
7
|
const NUM_NOTE_HASH_BIT_SIZE = 16n;
|
|
@@ -8,9 +9,9 @@ const NUM_NULLIFIER_BIT_SIZE = 16n;
|
|
|
8
9
|
const NUM_L2_TO_L1_MSG_BIT_SIZE = 16n;
|
|
9
10
|
const NUM_PUBLIC_DATA_WRITE_BIT_SIZE = 16n;
|
|
10
11
|
const NUM_PRIVATE_LOG_BIT_SIZE = 16n;
|
|
12
|
+
const PRIVATE_LOGS_LENGTH_BIT_SIZE = 16n;
|
|
11
13
|
const PUBLIC_LOGS_LENGTH_BIT_SIZE = 32n;
|
|
12
14
|
const CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE = 16n;
|
|
13
|
-
// Must match the implementation in `noir-protocol-circuits/crates/rollup-lib/src/tx_base/components/tx_blob_data.nr`.
|
|
14
15
|
export function encodeTxStartMarker(txStartMarker) {
|
|
15
16
|
let value = TX_START_PREFIX;
|
|
16
17
|
value <<= NUM_NOTE_HASH_BIT_SIZE;
|
|
@@ -23,6 +24,8 @@ export function encodeTxStartMarker(txStartMarker) {
|
|
|
23
24
|
value += BigInt(txStartMarker.numPublicDataWrites);
|
|
24
25
|
value <<= NUM_PRIVATE_LOG_BIT_SIZE;
|
|
25
26
|
value += BigInt(txStartMarker.numPrivateLogs);
|
|
27
|
+
value <<= PRIVATE_LOGS_LENGTH_BIT_SIZE;
|
|
28
|
+
value += BigInt(txStartMarker.privateLogsLength);
|
|
26
29
|
value <<= PUBLIC_LOGS_LENGTH_BIT_SIZE;
|
|
27
30
|
value += BigInt(txStartMarker.publicLogsLength);
|
|
28
31
|
value <<= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
|
|
@@ -43,6 +46,8 @@ export function decodeTxStartMarker(field) {
|
|
|
43
46
|
value >>= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
|
|
44
47
|
const publicLogsLength = Number(value & 2n ** PUBLIC_LOGS_LENGTH_BIT_SIZE - 1n);
|
|
45
48
|
value >>= PUBLIC_LOGS_LENGTH_BIT_SIZE;
|
|
49
|
+
const privateLogsLength = Number(value & 2n ** PRIVATE_LOGS_LENGTH_BIT_SIZE - 1n);
|
|
50
|
+
value >>= PRIVATE_LOGS_LENGTH_BIT_SIZE;
|
|
46
51
|
const numPrivateLogs = Number(value & 2n ** NUM_PRIVATE_LOG_BIT_SIZE - 1n);
|
|
47
52
|
value >>= NUM_PRIVATE_LOG_BIT_SIZE;
|
|
48
53
|
const numPublicDataWrites = Number(value & 2n ** NUM_PUBLIC_DATA_WRITE_BIT_SIZE - 1n);
|
|
@@ -53,11 +58,11 @@ export function decodeTxStartMarker(field) {
|
|
|
53
58
|
value >>= NUM_NULLIFIER_BIT_SIZE;
|
|
54
59
|
const numNoteHashes = Number(value & 2n ** NUM_NOTE_HASH_BIT_SIZE - 1n);
|
|
55
60
|
value >>= NUM_NOTE_HASH_BIT_SIZE;
|
|
56
|
-
// Do not throw if the prefix doesn't match.
|
|
57
|
-
// The caller function can check it by calling `isValidTxStartMarker`, and decide what to do if it's incorrect.
|
|
58
61
|
const prefix = value;
|
|
62
|
+
if (prefix !== TX_START_PREFIX) {
|
|
63
|
+
throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid tx start marker.`);
|
|
64
|
+
}
|
|
59
65
|
return {
|
|
60
|
-
prefix,
|
|
61
66
|
numBlobFields,
|
|
62
67
|
revertCode,
|
|
63
68
|
numNoteHashes,
|
|
@@ -65,59 +70,8 @@ export function decodeTxStartMarker(field) {
|
|
|
65
70
|
numL2ToL1Msgs,
|
|
66
71
|
numPublicDataWrites,
|
|
67
72
|
numPrivateLogs,
|
|
73
|
+
privateLogsLength,
|
|
68
74
|
publicLogsLength,
|
|
69
75
|
contractClassLogLength
|
|
70
76
|
};
|
|
71
77
|
}
|
|
72
|
-
export function getNumBlobFieldsFromTxStartMarker(field) {
|
|
73
|
-
return Number(field.toBigInt() & 2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n);
|
|
74
|
-
}
|
|
75
|
-
export function isValidTxStartMarker(txStartMarker) {
|
|
76
|
-
return txStartMarker.prefix === TX_START_PREFIX;
|
|
77
|
-
}
|
|
78
|
-
export function createBlockEndMarker(numTxs) {
|
|
79
|
-
// Must match the implementation in `block_rollup_public_inputs_composer.nr > create_block_end_marker`.
|
|
80
|
-
return new Fr(BLOCK_END_PREFIX * 256n * 256n + BigInt(numTxs));
|
|
81
|
-
}
|
|
82
|
-
export function getNumTxsFromBlockEndMarker(field) {
|
|
83
|
-
return Number(field.toBigInt() & 0xffffn);
|
|
84
|
-
}
|
|
85
|
-
export function isBlockEndMarker(field) {
|
|
86
|
-
const value = field.toBigInt();
|
|
87
|
-
const numTxs = value & 0xffffn;
|
|
88
|
-
return value - numTxs === BLOCK_END_PREFIX * 256n * 256n;
|
|
89
|
-
}
|
|
90
|
-
/**
|
|
91
|
-
* Check that the fields are emitted from the circuits and conform to the encoding.
|
|
92
|
-
* @param blobFields - The concatenated fields from all blobs of an L1 block.
|
|
93
|
-
*/ export function checkBlobFieldsEncoding(blobFields) {
|
|
94
|
-
const reader = FieldReader.asReader(blobFields);
|
|
95
|
-
const checkpointPrefix = reader.readField();
|
|
96
|
-
if (checkpointPrefix.toBigInt() !== BigInt(blobFields.length)) {
|
|
97
|
-
return false;
|
|
98
|
-
}
|
|
99
|
-
const numFieldsInCheckpoint = checkpointPrefix.toNumber();
|
|
100
|
-
let seenNumTxs = 0;
|
|
101
|
-
while(reader.cursor < numFieldsInCheckpoint){
|
|
102
|
-
const currentField = reader.readField();
|
|
103
|
-
if (isBlockEndMarker(currentField)) {
|
|
104
|
-
// Found a block end marker. Confirm that the number of txs in this block is correct.
|
|
105
|
-
const numTxs = getNumTxsFromBlockEndMarker(currentField);
|
|
106
|
-
if (numTxs !== seenNumTxs) {
|
|
107
|
-
return false;
|
|
108
|
-
}
|
|
109
|
-
seenNumTxs = 0;
|
|
110
|
-
continue;
|
|
111
|
-
}
|
|
112
|
-
// If the field is not a block end marker, it must be a tx start marker.
|
|
113
|
-
const txStartMarker = decodeTxStartMarker(currentField);
|
|
114
|
-
if (!isValidTxStartMarker(txStartMarker)) {
|
|
115
|
-
return false;
|
|
116
|
-
}
|
|
117
|
-
seenNumTxs += 1;
|
|
118
|
-
// Skip the remaining fields in this tx. -1 because we already read the tx start marker.
|
|
119
|
-
reader.skip(txStartMarker.numBlobFields - 1);
|
|
120
|
-
// TODO: Check the encoding of the tx if we want to be more strict.
|
|
121
|
-
}
|
|
122
|
-
return true;
|
|
123
|
-
}
|
package/dest/errors.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
export declare class BlobDeserializationError extends Error {
|
|
2
2
|
constructor(message: string);
|
|
3
3
|
}
|
|
4
|
-
//# sourceMappingURL=
|
|
4
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXJyb3JzLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvZXJyb3JzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLHFCQUFhLHdCQUF5QixTQUFRLEtBQUs7SUFDakQsWUFBWSxPQUFPLEVBQUUsTUFBTSxFQUcxQjtDQUNGIn0=
|
package/dest/errors.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,qBAAa,wBAAyB,SAAQ,KAAK;
|
|
1
|
+
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,qBAAa,wBAAyB,SAAQ,KAAK;IACjD,YAAY,OAAO,EAAE,MAAM,EAG1B;CACF"}
|
package/dest/hash.d.ts
CHANGED
|
@@ -5,9 +5,16 @@ import { BLS12Fr, Fr } from '@aztec/foundation/fields';
|
|
|
5
5
|
export declare function computeEthVersionedBlobHash(commitment: Buffer): Buffer;
|
|
6
6
|
export declare function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr;
|
|
7
7
|
/**
|
|
8
|
-
*
|
|
9
|
-
*
|
|
10
|
-
* This
|
|
8
|
+
* Computes a non-standard Poseidon2 hash over the provided fields.
|
|
9
|
+
*
|
|
10
|
+
* This function is used to compute:
|
|
11
|
+
* - `blobFieldsHash` of a checkpoint:
|
|
12
|
+
* Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
|
|
13
|
+
* The exact number of fields is encoded in the checkpoint end marker (the last field).
|
|
14
|
+
* This hash is used when generating the challenge `z` for all blobs in the checkpoint.
|
|
15
|
+
* - `spongeBlobHash` of a block:
|
|
16
|
+
* Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
|
|
17
|
+
* This hash is included in the block header.
|
|
11
18
|
*/
|
|
12
19
|
export declare function computeBlobFieldsHash(fields: Fr[]): Promise<Fr>;
|
|
13
20
|
export declare function computeBlobCommitment(data: Uint8Array): Buffer;
|
|
@@ -32,4 +39,4 @@ export declare function computeChallengeZ(blobFieldsHash: Fr, commitment: Buffer
|
|
|
32
39
|
* rollup circuits.
|
|
33
40
|
*/
|
|
34
41
|
export declare function hashNoirBigNumLimbs(field: BLS12Fr): Promise<Fr>;
|
|
35
|
-
//# sourceMappingURL=
|
|
42
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaGFzaC5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2hhc2gudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQ0EsT0FBTyxFQUFFLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQU92RDs7R0FFRztBQUNILHdCQUFnQiwyQkFBMkIsQ0FBQyxVQUFVLEVBQUUsTUFBTSxHQUFHLE1BQU0sQ0FJdEU7QUFPRCx3QkFBZ0IsZ0JBQWdCLENBQUMsc0JBQXNCLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUVyRTtBQUVEOzs7Ozs7Ozs7OztHQVdHO0FBQ0gsd0JBQXNCLHFCQUFxQixDQUFDLE1BQU0sRUFBRSxFQUFFLEVBQUUsR0FBRyxPQUFPLENBQUMsRUFBRSxDQUFDLENBSXJFO0FBRUQsd0JBQWdCLHFCQUFxQixDQUFDLElBQUksRUFBRSxVQUFVLEdBQUcsTUFBTSxDQU05RDtBQUVEOzs7Ozs7Ozs7Ozs7O0dBYUc7QUFDSCx3QkFBZ0Isa0JBQWtCLENBQUMsVUFBVSxFQUFFLE1BQU0sR0FBRyxDQUFDLEVBQUUsRUFBRSxFQUFFLENBQUMsQ0FNL0Q7QUFFRCx3QkFBc0IsaUJBQWlCLENBQUMsY0FBYyxFQUFFLEVBQUUsRUFBRSxVQUFVLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxFQUFFLENBQUMsQ0FHM0Y7QUFFRDs7O0dBR0c7QUFDSCx3QkFBc0IsbUJBQW1CLENBQUMsS0FBSyxFQUFFLE9BQU8sR0FBRyxPQUFPLENBQUMsRUFBRSxDQUFDLENBR3JFIn0=
|
package/dest/hash.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAOvD;;GAEG;AACH,wBAAgB,2BAA2B,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAItE;AAOD,wBAAgB,gBAAgB,CAAC,sBAAsB,EAAE,MAAM,EAAE,GAAG,EAAE,CAErE;AAED;;;;;;;;;;;GAWG;AACH,wBAAsB,qBAAqB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAIrE;AAED,wBAAgB,qBAAqB,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,CAM9D;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,kBAAkB,CAAC,UAAU,EAAE,MAAM,GAAG,CAAC,EAAE,EAAE,EAAE,CAAC,CAM/D;AAED,wBAAsB,iBAAiB,CAAC,cAAc,EAAE,EAAE,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC,CAG3F;AAED;;;GAGG;AACH,wBAAsB,mBAAmB,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC,CAGrE"}
|
package/dest/hash.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
|
|
2
2
|
import { Fr } from '@aztec/foundation/fields';
|
|
3
3
|
import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
|
|
4
|
+
import { SpongeBlob } from './sponge_blob.js';
|
|
4
5
|
const VERSIONED_HASH_VERSION_KZG = 0x01;
|
|
5
6
|
/**
|
|
6
7
|
* Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
|
|
@@ -18,11 +19,20 @@ export function computeBlobsHash(evmVersionedBlobHashes) {
|
|
|
18
19
|
return sha256ToField(evmVersionedBlobHashes);
|
|
19
20
|
}
|
|
20
21
|
/**
|
|
21
|
-
*
|
|
22
|
-
*
|
|
23
|
-
* This
|
|
22
|
+
* Computes a non-standard Poseidon2 hash over the provided fields.
|
|
23
|
+
*
|
|
24
|
+
* This function is used to compute:
|
|
25
|
+
* - `blobFieldsHash` of a checkpoint:
|
|
26
|
+
* Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
|
|
27
|
+
* The exact number of fields is encoded in the checkpoint end marker (the last field).
|
|
28
|
+
* This hash is used when generating the challenge `z` for all blobs in the checkpoint.
|
|
29
|
+
* - `spongeBlobHash` of a block:
|
|
30
|
+
* Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
|
|
31
|
+
* This hash is included in the block header.
|
|
24
32
|
*/ export async function computeBlobFieldsHash(fields) {
|
|
25
|
-
|
|
33
|
+
const sponge = SpongeBlob.init();
|
|
34
|
+
await sponge.absorb(fields);
|
|
35
|
+
return sponge.squeeze();
|
|
26
36
|
}
|
|
27
37
|
export function computeBlobCommitment(data) {
|
|
28
38
|
if (data.length !== BYTES_PER_BLOB) {
|
package/dest/index.d.ts
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
|
+
export * from './batched_blob.js';
|
|
1
2
|
export * from './blob.js';
|
|
2
3
|
export * from './blob_batching.js';
|
|
3
4
|
export * from './blob_utils.js';
|
|
4
5
|
export * from './circuit_types/index.js';
|
|
5
|
-
export * from './
|
|
6
|
-
export * from './encoding.js';
|
|
7
|
-
export * from './errors.js';
|
|
6
|
+
export * from './encoding/index.js';
|
|
8
7
|
export * from './hash.js';
|
|
9
8
|
export * from './interface.js';
|
|
10
9
|
export * from './sponge_blob.js';
|
|
11
|
-
//# sourceMappingURL=
|
|
10
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLG1CQUFtQixDQUFDO0FBQ2xDLGNBQWMsV0FBVyxDQUFDO0FBQzFCLGNBQWMsb0JBQW9CLENBQUM7QUFDbkMsY0FBYyxpQkFBaUIsQ0FBQztBQUNoQyxjQUFjLDBCQUEwQixDQUFDO0FBQ3pDLGNBQWMscUJBQXFCLENBQUM7QUFDcEMsY0FBYyxXQUFXLENBQUM7QUFDMUIsY0FBYyxnQkFBZ0IsQ0FBQztBQUMvQixjQUFjLGtCQUFrQixDQUFDIn0=
|
package/dest/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAC;AAC1B,cAAc,oBAAoB,CAAC;AACnC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,WAAW,CAAC;AAC1B,cAAc,oBAAoB,CAAC;AACnC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,kBAAkB,CAAC"}
|
package/dest/index.js
CHANGED
|
@@ -1,10 +1,9 @@
|
|
|
1
|
+
export * from './batched_blob.js';
|
|
1
2
|
export * from './blob.js';
|
|
2
3
|
export * from './blob_batching.js';
|
|
3
4
|
export * from './blob_utils.js';
|
|
4
5
|
export * from './circuit_types/index.js';
|
|
5
|
-
export * from './
|
|
6
|
-
export * from './encoding.js';
|
|
7
|
-
export * from './errors.js';
|
|
6
|
+
export * from './encoding/index.js';
|
|
8
7
|
export * from './hash.js';
|
|
9
8
|
export * from './interface.js';
|
|
10
9
|
export * from './sponge_blob.js';
|
package/dest/interface.d.ts
CHANGED
|
@@ -6,4 +6,4 @@ export interface BlobJson {
|
|
|
6
6
|
index: string;
|
|
7
7
|
kzg_commitment: string;
|
|
8
8
|
}
|
|
9
|
-
//# sourceMappingURL=
|
|
9
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW50ZXJmYWNlLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW50ZXJmYWNlLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOztHQUVHO0FBQ0gsTUFBTSxXQUFXLFFBQVE7SUFDdkIsSUFBSSxFQUFFLE1BQU0sQ0FBQztJQUNiLEtBQUssRUFBRSxNQUFNLENBQUM7SUFDZCxjQUFjLEVBQUUsTUFBTSxDQUFDO0NBQ3hCIn0=
|
package/dest/kzg_context.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import { DasContextJs } from '@crate-crypto/node-eth-kzg';
|
|
2
2
|
export * from '@crate-crypto/node-eth-kzg';
|
|
3
3
|
export declare const kzg: DasContextJs;
|
|
4
|
-
//# sourceMappingURL=
|
|
4
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoia3pnX2NvbnRleHQuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9remdfY29udGV4dC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sNEJBQTRCLENBQUM7QUFFMUQsY0FBYyw0QkFBNEIsQ0FBQztBQUUzQyxlQUFPLE1BQU0sR0FBRyxjQUE0QyxDQUFDIn0=
|
package/dest/sponge_blob.d.ts
CHANGED
|
@@ -6,19 +6,18 @@ import { BufferReader, FieldReader, type Tuple } from '@aztec/foundation/seriali
|
|
|
6
6
|
* See noir-projects/noir-protocol-circuits/crates/types/src/abis/sponge_blob.nr.
|
|
7
7
|
*/
|
|
8
8
|
export declare class SpongeBlob {
|
|
9
|
-
/** Sponge with absorbed fields that will go into one or more blobs. */
|
|
10
9
|
readonly sponge: Poseidon2Sponge;
|
|
11
|
-
/** Number of effects absorbed so far. */
|
|
12
10
|
numAbsorbedFields: number;
|
|
13
|
-
|
|
14
|
-
readonly numExpectedFields: number;
|
|
11
|
+
static MAX_FIELDS: number;
|
|
15
12
|
constructor(
|
|
16
13
|
/** Sponge with absorbed fields that will go into one or more blobs. */
|
|
17
14
|
sponge: Poseidon2Sponge,
|
|
18
15
|
/** Number of effects absorbed so far. */
|
|
19
|
-
numAbsorbedFields: number
|
|
20
|
-
/**
|
|
21
|
-
|
|
16
|
+
numAbsorbedFields: number);
|
|
17
|
+
/**
|
|
18
|
+
* Initialize the sponge blob to absorb data for a checkpoint.
|
|
19
|
+
*/
|
|
20
|
+
static init(): SpongeBlob;
|
|
22
21
|
static fromBuffer(buffer: Buffer | BufferReader): SpongeBlob;
|
|
23
22
|
toBuffer(): Buffer<ArrayBufferLike>;
|
|
24
23
|
static getFields(fields: FieldsOf<SpongeBlob>): (number | Poseidon2Sponge)[];
|
|
@@ -28,11 +27,6 @@ export declare class SpongeBlob {
|
|
|
28
27
|
absorb(fields: Fr[]): Promise<void>;
|
|
29
28
|
squeeze(): Promise<Fr>;
|
|
30
29
|
static empty(): SpongeBlob;
|
|
31
|
-
/**
|
|
32
|
-
* Initialize the sponge blob with the number of expected fields in the checkpoint and absorb it as the first field.
|
|
33
|
-
* Note: `numExpectedFields` includes the first field absorbed in this method.
|
|
34
|
-
*/
|
|
35
|
-
static init(numExpectedFields: number): Promise<SpongeBlob>;
|
|
36
30
|
}
|
|
37
31
|
export declare class Poseidon2Sponge {
|
|
38
32
|
cache: Tuple<Fr, 3>;
|
|
@@ -46,9 +40,9 @@ export declare class Poseidon2Sponge {
|
|
|
46
40
|
toFields(): Fr[];
|
|
47
41
|
static fromFields(fields: Fr[] | FieldReader): Poseidon2Sponge;
|
|
48
42
|
static empty(): Poseidon2Sponge;
|
|
49
|
-
static init(
|
|
43
|
+
static init(iv: Fr): Poseidon2Sponge;
|
|
50
44
|
performDuplex(): Promise<void>;
|
|
51
45
|
absorb(fields: Fr[]): Promise<void>;
|
|
52
46
|
squeeze(): Promise<Fr>;
|
|
53
47
|
}
|
|
54
|
-
//# sourceMappingURL=
|
|
48
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3BvbmdlX2Jsb2IuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9zcG9uZ2VfYmxvYi50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFDQSxPQUFPLEVBQUUsS0FBSyxRQUFRLEVBQWEsTUFBTSx5QkFBeUIsQ0FBQztBQUVuRSxPQUFPLEVBQUUsRUFBRSxFQUFFLE1BQU0sMEJBQTBCLENBQUM7QUFDOUMsT0FBTyxFQUNMLFlBQVksRUFDWixXQUFXLEVBQ1gsS0FBSyxLQUFLLEVBR1gsTUFBTSw2QkFBNkIsQ0FBQztBQUVyQzs7O0dBR0c7QUFDSCxxQkFBYSxVQUFVO2FBS0gsTUFBTSxFQUFFLGVBQWU7SUFFaEMsaUJBQWlCLEVBQUUsTUFBTTtJQU5sQyxNQUFNLENBQUMsVUFBVSxTQUEwQztJQUUzRDtJQUNFLHVFQUF1RTtJQUN2RCxNQUFNLEVBQUUsZUFBZTtJQUN2Qyx5Q0FBeUM7SUFDbEMsaUJBQWlCLEVBQUUsTUFBTSxFQUM5QjtJQUVKOztPQUVHO0lBQ0gsTUFBTSxDQUFDLElBQUksSUFBSSxVQUFVLENBS3hCO0lBRUQsTUFBTSxDQUFDLFVBQVUsQ0FBQyxNQUFNLEVBQUUsTUFBTSxHQUFHLFlBQVksR0FBRyxVQUFVLENBRzNEO0lBRUQsUUFBUSw0QkFFUDtJQUVELE1BQU0sQ0FBQyxTQUFTLENBQUMsTUFBTSxFQUFFLFFBQVEsQ0FBQyxVQUFVLENBQUMsZ0NBRTVDO0lBRUQsUUFBUSxJQUFJLEVBQUUsRUFBRSxDQUVmO0lBRUQsTUFBTSxDQUFDLFVBQVUsQ0FBQyxNQUFNLEVBQUUsRUFBRSxFQUFFLEdBQUcsV0FBVyxHQUFHLFVBQVUsQ0FHeEQ7SUFFRCxLQUFLLGVBRUo7SUFFSyxNQUFNLENBQUMsTUFBTSxFQUFFLEVBQUUsRUFBRSxpQkFReEI7SUFFSyxPQUFPLElBQUksT0FBTyxDQUFDLEVBQUUsQ0FBQyxDQUUzQjtJQUVELE1BQU0sQ0FBQyxLQUFLLElBQUksVUFBVSxDQUV6QjtDQUNGO0FBR0QscUJBQWEsZUFBZTtJQUVqQixLQUFLLEVBQUUsS0FBSyxDQUFDLEVBQUUsRUFBRSxDQUFDLENBQUM7SUFDbkIsS0FBSyxFQUFFLEtBQUssQ0FBQyxFQUFFLEVBQUUsQ0FBQyxDQUFDO0lBQ25CLFNBQVMsRUFBRSxNQUFNO0lBQ2pCLFdBQVcsRUFBRSxPQUFPO0lBSjdCLFlBQ1MsS0FBSyxFQUFFLEtBQUssQ0FBQyxFQUFFLEVBQUUsQ0FBQyxDQUFDLEVBQ25CLEtBQUssRUFBRSxLQUFLLENBQUMsRUFBRSxFQUFFLENBQUMsQ0FBQyxFQUNuQixTQUFTLEVBQUUsTUFBTSxFQUNqQixXQUFXLEVBQUUsT0FBTyxFQUN6QjtJQUVKLE1BQU0sQ0FBQyxVQUFVLENBQUMsTUFBTSxFQUFFLE1BQU0sR0FBRyxZQUFZLEdBQUcsZUFBZSxDQVFoRTtJQUVELFFBQVEsNEJBRVA7SUFFRCxNQUFNLENBQUMsU0FBUyxDQUFDLE1BQU0sRUFBRSxRQUFRLENBQUMsZUFBZSxDQUFDLDBEQUVqRDtJQUVELFFBQVEsSUFBSSxFQUFFLEVBQUUsQ0FFZjtJQUVELE1BQU0sQ0FBQyxVQUFVLENBQUMsTUFBTSxFQUFFLEVBQUUsRUFBRSxHQUFHLFdBQVcsR0FBRyxlQUFlLENBUTdEO0lBRUQsTUFBTSxDQUFDLEtBQUssSUFBSSxlQUFlLENBTzlCO0lBRUQsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLEVBQUUsRUFBRSxHQUFHLGVBQWUsQ0FJbkM7SUFLSyxhQUFhLGtCQVNsQjtJQUVLLE1BQU0sQ0FBQyxNQUFNLEVBQUUsRUFBRSxFQUFFLGlCQWF4QjtJQUVLLE9BQU8sSUFBSSxPQUFPLENBQUMsRUFBRSxDQUFDLENBTzNCO0NBQ0YifQ==
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sponge_blob.d.ts","sourceRoot":"","sources":["../src/sponge_blob.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,QAAQ,EAAa,MAAM,yBAAyB,CAAC;AAEnE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EACL,YAAY,EACZ,WAAW,EACX,KAAK,KAAK,EAGX,MAAM,6BAA6B,CAAC;AAErC;;;GAGG;AACH,qBAAa,UAAU;
|
|
1
|
+
{"version":3,"file":"sponge_blob.d.ts","sourceRoot":"","sources":["../src/sponge_blob.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,QAAQ,EAAa,MAAM,yBAAyB,CAAC;AAEnE,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EACL,YAAY,EACZ,WAAW,EACX,KAAK,KAAK,EAGX,MAAM,6BAA6B,CAAC;AAErC;;;GAGG;AACH,qBAAa,UAAU;aAKH,MAAM,EAAE,eAAe;IAEhC,iBAAiB,EAAE,MAAM;IANlC,MAAM,CAAC,UAAU,SAA0C;IAE3D;IACE,uEAAuE;IACvD,MAAM,EAAE,eAAe;IACvC,yCAAyC;IAClC,iBAAiB,EAAE,MAAM,EAC9B;IAEJ;;OAEG;IACH,MAAM,CAAC,IAAI,IAAI,UAAU,CAKxB;IAED,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,UAAU,CAG3D;IAED,QAAQ,4BAEP;IAED,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,UAAU,CAAC,gCAE5C;IAED,QAAQ,IAAI,EAAE,EAAE,CAEf;IAED,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,UAAU,CAGxD;IAED,KAAK,eAEJ;IAEK,MAAM,CAAC,MAAM,EAAE,EAAE,EAAE,iBAQxB;IAEK,OAAO,IAAI,OAAO,CAAC,EAAE,CAAC,CAE3B;IAED,MAAM,CAAC,KAAK,IAAI,UAAU,CAEzB;CACF;AAGD,qBAAa,eAAe;IAEjB,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;IACnB,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC;IACnB,SAAS,EAAE,MAAM;IACjB,WAAW,EAAE,OAAO;IAJ7B,YACS,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,EACnB,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,EACnB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,OAAO,EACzB;IAEJ,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,GAAG,eAAe,CAQhE;IAED,QAAQ,4BAEP;IAED,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,eAAe,CAAC,0DAEjD;IAED,QAAQ,IAAI,EAAE,EAAE,CAEf;IAED,MAAM,CAAC,UAAU,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,eAAe,CAQ7D;IAED,MAAM,CAAC,KAAK,IAAI,eAAe,CAO9B;IAED,MAAM,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,GAAG,eAAe,CAInC;IAKK,aAAa,kBASlB;IAEK,MAAM,CAAC,MAAM,EAAE,EAAE,EAAE,iBAaxB;IAEK,OAAO,IAAI,OAAO,CAAC,EAAE,CAAC,CAO3B;CACF"}
|
package/dest/sponge_blob.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { TWO_POW_64 } from '@aztec/constants';
|
|
1
|
+
import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, TWO_POW_64 } from '@aztec/constants';
|
|
2
2
|
import { makeTuple } from '@aztec/foundation/array';
|
|
3
3
|
import { poseidon2Permutation } from '@aztec/foundation/crypto';
|
|
4
4
|
import { Fr } from '@aztec/foundation/fields';
|
|
@@ -9,15 +9,22 @@ import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from
|
|
|
9
9
|
*/ export class SpongeBlob {
|
|
10
10
|
sponge;
|
|
11
11
|
numAbsorbedFields;
|
|
12
|
-
|
|
13
|
-
constructor(/** Sponge with absorbed fields that will go into one or more blobs. */ sponge, /** Number of effects absorbed so far. */ numAbsorbedFields
|
|
12
|
+
static MAX_FIELDS = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB;
|
|
13
|
+
constructor(/** Sponge with absorbed fields that will go into one or more blobs. */ sponge, /** Number of effects absorbed so far. */ numAbsorbedFields){
|
|
14
14
|
this.sponge = sponge;
|
|
15
15
|
this.numAbsorbedFields = numAbsorbedFields;
|
|
16
|
-
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Initialize the sponge blob to absorb data for a checkpoint.
|
|
19
|
+
*/ static init() {
|
|
20
|
+
// This must match the implementation in noir-projects/noir-protocol-circuits/types/src/abis/sponge_blob.nr
|
|
21
|
+
const iv = new Fr(BigInt(SpongeBlob.MAX_FIELDS) * TWO_POW_64);
|
|
22
|
+
const sponge = Poseidon2Sponge.init(iv);
|
|
23
|
+
return new SpongeBlob(sponge, 0);
|
|
17
24
|
}
|
|
18
25
|
static fromBuffer(buffer) {
|
|
19
26
|
const reader = BufferReader.asReader(buffer);
|
|
20
|
-
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber()
|
|
27
|
+
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber());
|
|
21
28
|
}
|
|
22
29
|
toBuffer() {
|
|
23
30
|
return serializeToBuffer(...SpongeBlob.getFields(this));
|
|
@@ -25,8 +32,7 @@ import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from
|
|
|
25
32
|
static getFields(fields) {
|
|
26
33
|
return [
|
|
27
34
|
fields.sponge,
|
|
28
|
-
fields.numAbsorbedFields
|
|
29
|
-
fields.numExpectedFields
|
|
35
|
+
fields.numAbsorbedFields
|
|
30
36
|
];
|
|
31
37
|
}
|
|
32
38
|
toFields() {
|
|
@@ -34,43 +40,23 @@ import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from
|
|
|
34
40
|
}
|
|
35
41
|
static fromFields(fields) {
|
|
36
42
|
const reader = FieldReader.asReader(fields);
|
|
37
|
-
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readField().toNumber()
|
|
43
|
+
return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readField().toNumber());
|
|
38
44
|
}
|
|
39
45
|
clone() {
|
|
40
46
|
return SpongeBlob.fromBuffer(this.toBuffer());
|
|
41
47
|
}
|
|
42
48
|
async absorb(fields) {
|
|
43
|
-
if (this.numAbsorbedFields + fields.length >
|
|
44
|
-
throw new Error(`Attempted to fill spongeBlob with ${this.numAbsorbedFields + fields.length}, but it has a max of ${
|
|
49
|
+
if (this.numAbsorbedFields + fields.length > SpongeBlob.MAX_FIELDS) {
|
|
50
|
+
throw new Error(`Attempted to fill spongeBlob with ${this.numAbsorbedFields + fields.length}, but it has a max of ${SpongeBlob.MAX_FIELDS}`);
|
|
45
51
|
}
|
|
46
52
|
await this.sponge.absorb(fields);
|
|
47
53
|
this.numAbsorbedFields += fields.length;
|
|
48
54
|
}
|
|
49
55
|
async squeeze() {
|
|
50
|
-
|
|
51
|
-
// NB: There is currently no use case in which we don't 'fill' a blob sponge, but adding for completeness
|
|
52
|
-
if (this.numAbsorbedFields != this.numExpectedFields) {
|
|
53
|
-
await this.sponge.absorb([
|
|
54
|
-
Fr.ONE
|
|
55
|
-
]);
|
|
56
|
-
}
|
|
57
|
-
return this.sponge.squeeze();
|
|
56
|
+
return await this.sponge.squeeze();
|
|
58
57
|
}
|
|
59
58
|
static empty() {
|
|
60
|
-
return new SpongeBlob(Poseidon2Sponge.empty(), 0
|
|
61
|
-
}
|
|
62
|
-
/**
|
|
63
|
-
* Initialize the sponge blob with the number of expected fields in the checkpoint and absorb it as the first field.
|
|
64
|
-
* Note: `numExpectedFields` includes the first field absorbed in this method.
|
|
65
|
-
*/ static async init(numExpectedFields) {
|
|
66
|
-
// This must match what the checkpoint root rollup circuit expects.
|
|
67
|
-
// See noir-projects/noir-protocol-circuits/types/src/abis/sponge_blob.nr -> init_for_checkpoint.
|
|
68
|
-
const sponge = Poseidon2Sponge.init(numExpectedFields);
|
|
69
|
-
await sponge.absorb([
|
|
70
|
-
new Fr(numExpectedFields)
|
|
71
|
-
]);
|
|
72
|
-
const numAbsorbedFields = 1;
|
|
73
|
-
return new SpongeBlob(sponge, numAbsorbedFields, numExpectedFields);
|
|
59
|
+
return new SpongeBlob(Poseidon2Sponge.empty(), 0);
|
|
74
60
|
}
|
|
75
61
|
}
|
|
76
62
|
// This is just noir's stdlib version of the poseidon2 sponge. We use it for a blob-specific implmentation of the hasher.
|
|
@@ -110,8 +96,7 @@ export class Poseidon2Sponge {
|
|
|
110
96
|
static empty() {
|
|
111
97
|
return new Poseidon2Sponge(makeTuple(3, ()=>Fr.ZERO), makeTuple(4, ()=>Fr.ZERO), 0, false);
|
|
112
98
|
}
|
|
113
|
-
static init(
|
|
114
|
-
const iv = new Fr(numExpectedFields).mul(new Fr(TWO_POW_64));
|
|
99
|
+
static init(iv) {
|
|
115
100
|
const sponge = Poseidon2Sponge.empty();
|
|
116
101
|
sponge.state[3] = iv;
|
|
117
102
|
return sponge;
|
package/dest/testing.d.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
2
1
|
import { Blob } from './blob.js';
|
|
3
|
-
import {
|
|
2
|
+
import { BlobAccumulator } from './circuit_types/blob_accumulator.js';
|
|
3
|
+
import { FinalBlobAccumulator } from './circuit_types/final_blob_accumulator.js';
|
|
4
|
+
import { FinalBlobBatchingChallenges } from './circuit_types/final_blob_batching_challenges.js';
|
|
4
5
|
import { SpongeBlob } from './sponge_blob.js';
|
|
6
|
+
export * from './encoding/fixtures.js';
|
|
5
7
|
/**
|
|
6
8
|
* Makes arbitrary poseidon sponge for blob inputs.
|
|
7
9
|
* Note: will not verify inside the circuit.
|
|
@@ -15,19 +17,9 @@ export declare function makeSpongeBlob(seed?: number): SpongeBlob;
|
|
|
15
17
|
* @param seed - The seed to use for generating the blob accumulator.
|
|
16
18
|
* @returns A blob accumulator instance.
|
|
17
19
|
*/
|
|
18
|
-
export declare function
|
|
19
|
-
export declare function
|
|
20
|
-
export declare function
|
|
21
|
-
export declare function makeEncodedBlobFields(length: number): Fr[];
|
|
22
|
-
/**
|
|
23
|
-
* Make an encoded blob with the given length
|
|
24
|
-
*
|
|
25
|
-
* This will deserialise correctly in the archiver
|
|
26
|
-
* @param length
|
|
27
|
-
* @returns
|
|
28
|
-
*/
|
|
29
|
-
export declare function makeEncodedBlob(length: number): Blob;
|
|
30
|
-
export declare function makeEncodedBlobs(length: number): Blob[];
|
|
20
|
+
export declare function makeBlobAccumulator(seed?: number): BlobAccumulator;
|
|
21
|
+
export declare function makeFinalBlobAccumulator(seed?: number): FinalBlobAccumulator;
|
|
22
|
+
export declare function makeFinalBlobBatchingChallenges(seed?: number): FinalBlobBatchingChallenges;
|
|
31
23
|
/**
|
|
32
24
|
* Make a blob with random fields.
|
|
33
25
|
*
|
|
@@ -36,4 +28,4 @@ export declare function makeEncodedBlobs(length: number): Blob[];
|
|
|
36
28
|
* @returns
|
|
37
29
|
*/
|
|
38
30
|
export declare function makeRandomBlob(length: number): Blob;
|
|
39
|
-
//# sourceMappingURL=
|
|
31
|
+
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidGVzdGluZy5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL3Rlc3RpbmcudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBR0EsT0FBTyxFQUFFLElBQUksRUFBRSxNQUFNLFdBQVcsQ0FBQztBQUNqQyxPQUFPLEVBQUUsZUFBZSxFQUFFLE1BQU0scUNBQXFDLENBQUM7QUFDdEUsT0FBTyxFQUFFLG9CQUFvQixFQUFFLE1BQU0sMkNBQTJDLENBQUM7QUFDakYsT0FBTyxFQUFFLDJCQUEyQixFQUFFLE1BQU0sbURBQW1ELENBQUM7QUFDaEcsT0FBTyxFQUFtQixVQUFVLEVBQUUsTUFBTSxrQkFBa0IsQ0FBQztBQUUvRCxjQUFjLHdCQUF3QixDQUFDO0FBRXZDOzs7OztHQUtHO0FBQ0gsd0JBQWdCLGNBQWMsQ0FBQyxJQUFJLFNBQUksR0FBRyxVQUFVLENBVW5EO0FBMEJEOzs7OztHQUtHO0FBQ0gsd0JBQWdCLG1CQUFtQixDQUFDLElBQUksU0FBSSxHQUFHLGVBQWUsQ0FTN0Q7QUFFRCx3QkFBZ0Isd0JBQXdCLENBQUMsSUFBSSxTQUFJLHdCQU9oRDtBQUVELHdCQUFnQiwrQkFBK0IsQ0FBQyxJQUFJLFNBQUksK0JBRXZEO0FBRUQ7Ozs7OztHQU1HO0FBQ0gsd0JBQWdCLGNBQWMsQ0FBQyxNQUFNLEVBQUUsTUFBTSxHQUFHLElBQUksQ0FFbkQifQ==
|
package/dest/testing.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"testing.d.ts","sourceRoot":"","sources":["../src/testing.ts"],"names":[],"mappings":"AAGA,OAAO,
|
|
1
|
+
{"version":3,"file":"testing.d.ts","sourceRoot":"","sources":["../src/testing.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,eAAe,EAAE,MAAM,qCAAqC,CAAC;AACtE,OAAO,EAAE,oBAAoB,EAAE,MAAM,2CAA2C,CAAC;AACjF,OAAO,EAAE,2BAA2B,EAAE,MAAM,mDAAmD,CAAC;AAChG,OAAO,EAAmB,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAE/D,cAAc,wBAAwB,CAAC;AAEvC;;;;;GAKG;AACH,wBAAgB,cAAc,CAAC,IAAI,SAAI,GAAG,UAAU,CAUnD;AA0BD;;;;;GAKG;AACH,wBAAgB,mBAAmB,CAAC,IAAI,SAAI,GAAG,eAAe,CAS7D;AAED,wBAAgB,wBAAwB,CAAC,IAAI,SAAI,wBAOhD;AAED,wBAAgB,+BAA+B,CAAC,IAAI,SAAI,+BAEvD;AAED;;;;;;GAMG;AACH,wBAAgB,cAAc,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAEnD"}
|