@aztec/blob-lib 3.0.0-devnet.2 → 3.0.0-devnet.20251212

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/dest/batched_blob.d.ts +26 -0
  2. package/dest/batched_blob.d.ts.map +1 -0
  3. package/dest/batched_blob.js +20 -0
  4. package/dest/blob.d.ts +6 -11
  5. package/dest/blob.d.ts.map +1 -1
  6. package/dest/blob.js +5 -2
  7. package/dest/blob_batching.d.ts +35 -84
  8. package/dest/blob_batching.d.ts.map +1 -1
  9. package/dest/blob_batching.js +73 -108
  10. package/dest/blob_utils.d.ts +21 -11
  11. package/dest/blob_utils.d.ts.map +1 -1
  12. package/dest/blob_utils.js +29 -20
  13. package/dest/circuit_types/blob_accumulator.d.ts +4 -2
  14. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -1
  15. package/dest/circuit_types/blob_accumulator.js +5 -1
  16. package/dest/circuit_types/final_blob_accumulator.d.ts +3 -2
  17. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -1
  18. package/dest/circuit_types/final_blob_accumulator.js +2 -1
  19. package/dest/circuit_types/final_blob_batching_challenges.d.ts +3 -2
  20. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -1
  21. package/dest/circuit_types/final_blob_batching_challenges.js +2 -1
  22. package/dest/circuit_types/index.d.ts +1 -1
  23. package/dest/encoding/block_blob_data.d.ts +22 -0
  24. package/dest/encoding/block_blob_data.d.ts.map +1 -0
  25. package/dest/encoding/block_blob_data.js +65 -0
  26. package/dest/encoding/block_end_marker.d.ts +11 -0
  27. package/dest/encoding/block_end_marker.d.ts.map +1 -0
  28. package/dest/encoding/block_end_marker.js +41 -0
  29. package/dest/encoding/block_end_state_field.d.ts +12 -0
  30. package/dest/encoding/block_end_state_field.d.ts.map +1 -0
  31. package/dest/encoding/block_end_state_field.js +39 -0
  32. package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
  33. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
  34. package/dest/encoding/checkpoint_blob_data.js +67 -0
  35. package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
  36. package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
  37. package/dest/encoding/checkpoint_end_marker.js +28 -0
  38. package/dest/encoding/fixtures.d.ts +41 -0
  39. package/dest/encoding/fixtures.d.ts.map +1 -0
  40. package/dest/encoding/fixtures.js +140 -0
  41. package/dest/encoding/index.d.ts +10 -0
  42. package/dest/encoding/index.d.ts.map +1 -0
  43. package/dest/encoding/index.js +9 -0
  44. package/dest/encoding/tx_blob_data.d.ts +19 -0
  45. package/dest/encoding/tx_blob_data.d.ts.map +1 -0
  46. package/dest/encoding/tx_blob_data.js +79 -0
  47. package/dest/encoding/tx_start_marker.d.ts +16 -0
  48. package/dest/encoding/tx_start_marker.d.ts.map +1 -0
  49. package/dest/{encoding.js → encoding/tx_start_marker.js} +13 -59
  50. package/dest/errors.d.ts +1 -1
  51. package/dest/errors.d.ts.map +1 -1
  52. package/dest/hash.d.ts +13 -5
  53. package/dest/hash.d.ts.map +1 -1
  54. package/dest/hash.js +19 -8
  55. package/dest/index.d.ts +4 -4
  56. package/dest/index.d.ts.map +1 -1
  57. package/dest/index.js +3 -3
  58. package/dest/interface.d.ts +1 -1
  59. package/dest/kzg_context.d.ts +6 -2
  60. package/dest/kzg_context.d.ts.map +1 -1
  61. package/dest/kzg_context.js +12 -3
  62. package/dest/sponge_blob.d.ts +9 -15
  63. package/dest/sponge_blob.d.ts.map +1 -1
  64. package/dest/sponge_blob.js +21 -36
  65. package/dest/testing.d.ts +8 -16
  66. package/dest/testing.d.ts.map +1 -1
  67. package/dest/testing.js +35 -64
  68. package/dest/types.d.ts +2 -1
  69. package/dest/types.d.ts.map +1 -1
  70. package/dest/types.js +1 -0
  71. package/package.json +8 -7
  72. package/src/batched_blob.ts +26 -0
  73. package/src/blob.ts +5 -2
  74. package/src/blob_batching.ts +91 -126
  75. package/src/blob_utils.ts +33 -22
  76. package/src/circuit_types/blob_accumulator.ts +13 -1
  77. package/src/circuit_types/final_blob_accumulator.ts +2 -1
  78. package/src/circuit_types/final_blob_batching_challenges.ts +2 -1
  79. package/src/encoding/block_blob_data.ts +102 -0
  80. package/src/encoding/block_end_marker.ts +55 -0
  81. package/src/encoding/block_end_state_field.ts +59 -0
  82. package/src/encoding/checkpoint_blob_data.ts +95 -0
  83. package/src/encoding/checkpoint_end_marker.ts +40 -0
  84. package/src/encoding/fixtures.ts +210 -0
  85. package/src/encoding/index.ts +9 -0
  86. package/src/encoding/tx_blob_data.ts +116 -0
  87. package/src/{encoding.ts → encoding/tx_start_marker.ts} +19 -76
  88. package/src/hash.ts +20 -8
  89. package/src/index.ts +3 -3
  90. package/src/kzg_context.ts +12 -1
  91. package/src/sponge_blob.ts +23 -36
  92. package/src/testing.ts +47 -73
  93. package/src/types.ts +1 -0
  94. package/dest/deserialize.d.ts +0 -14
  95. package/dest/deserialize.d.ts.map +0 -1
  96. package/dest/deserialize.js +0 -33
  97. package/dest/encoding.d.ts +0 -26
  98. package/dest/encoding.d.ts.map +0 -1
  99. package/src/deserialize.ts +0 -38
@@ -0,0 +1,95 @@
1
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
+ import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
3
+
4
+ import { BlobDeserializationError } from '../errors.js';
5
+ import { type BlockBlobData, decodeBlockBlobData, encodeBlockBlobData } from './block_blob_data.js';
6
+ import {
7
+ type CheckpointEndMarker,
8
+ decodeCheckpointEndMarker,
9
+ encodeCheckpointEndMarker,
10
+ isCheckpointEndMarker,
11
+ } from './checkpoint_end_marker.js';
12
+ import type { TxStartMarker } from './tx_start_marker.js';
13
+
14
+ export interface CheckpointBlobData {
15
+ checkpointEndMarker: CheckpointEndMarker;
16
+ blocks: BlockBlobData[];
17
+ }
18
+
19
+ export function encodeCheckpointBlobData(checkpointBlobData: CheckpointBlobData): Fr[] {
20
+ return [
21
+ ...checkpointBlobData.blocks.map(block => encodeBlockBlobData(block)).flat(),
22
+ encodeCheckpointEndMarker(checkpointBlobData.checkpointEndMarker),
23
+ ];
24
+ }
25
+
26
+ export function encodeCheckpointBlobDataFromBlocks(blocks: BlockBlobData[]): Fr[] {
27
+ const blocksBlobFields = blocks.map(block => encodeBlockBlobData(block)).flat();
28
+ const numBlobFields = blocksBlobFields.length + 1; // +1 for the checkpoint end marker.
29
+ return blocksBlobFields.concat(encodeCheckpointEndMarker({ numBlobFields }));
30
+ }
31
+
32
+ export function decodeCheckpointBlobData(fields: Fr[] | FieldReader): CheckpointBlobData {
33
+ const reader = FieldReader.asReader(fields);
34
+
35
+ if (reader.isFinished()) {
36
+ throw new BlobDeserializationError(`Cannot decode empty blob data.`);
37
+ }
38
+
39
+ const blocks = [];
40
+ let checkpointEndMarker: CheckpointEndMarker | undefined;
41
+ while (!reader.isFinished() && !checkpointEndMarker) {
42
+ blocks.push(decodeBlockBlobData(reader, blocks.length === 0 /* isFirstBlock */));
43
+
44
+ // After reading a block, the next item must be either a checkpoint end marker or another block.
45
+ // The first field of a block is always a tx start marker. So if the provided fields are valid, it's not possible to
46
+ // misinterpret a tx start marker as checkpoint end marker, or vice versa.
47
+ const nextField = reader.peekField();
48
+ if (isCheckpointEndMarker(nextField)) {
49
+ checkpointEndMarker = decodeCheckpointEndMarker(reader.readField());
50
+ const numFieldsRead = reader.cursor;
51
+ if (numFieldsRead !== checkpointEndMarker.numBlobFields) {
52
+ throw new BlobDeserializationError(
53
+ `Incorrect encoding of blob fields: mismatch number of blob fields. Expected ${checkpointEndMarker.numBlobFields} fields, got ${numFieldsRead}.`,
54
+ );
55
+ }
56
+ }
57
+ }
58
+
59
+ if (!checkpointEndMarker) {
60
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: checkpoint end marker does not exist.`);
61
+ }
62
+
63
+ const remainingFields = reader.readFieldArray(reader.remainingFields());
64
+ if (!remainingFields.every(f => f.isZero())) {
65
+ throw new BlobDeserializationError(
66
+ `Incorrect encoding of blob fields: unexpected non-zero field after checkpoint end marker.`,
67
+ );
68
+ }
69
+
70
+ return {
71
+ checkpointEndMarker,
72
+ blocks,
73
+ };
74
+ }
75
+
76
+ export function decodeCheckpointBlobDataFromBuffer(buf: Buffer): CheckpointBlobData {
77
+ const reader = BufferReader.asReader(buf);
78
+ const totalFieldsInBuffer = Math.floor(buf.length / Fr.SIZE_IN_BYTES);
79
+ const blobFields = reader.readArray(totalFieldsInBuffer, Fr);
80
+ return decodeCheckpointBlobData(blobFields);
81
+ }
82
+
83
+ export function getTotalNumBlobFieldsFromTxs(txsPerBlock: TxStartMarker[][]): number {
84
+ const numBlocks = txsPerBlock.length;
85
+ if (!numBlocks) {
86
+ return 0;
87
+ }
88
+
89
+ return (
90
+ (numBlocks ? 1 : 0) + // l1ToL2Messages root in the first block
91
+ numBlocks * 6 + // 6 fields for each block end blob data.
92
+ txsPerBlock.reduce((total, txs) => total + txs.reduce((total, tx) => total + tx.numBlobFields, 0), 0) +
93
+ 1 // checkpoint end marker
94
+ );
95
+ }
@@ -0,0 +1,40 @@
1
+ import { CHECKPOINT_END_PREFIX } from '@aztec/constants';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+
4
+ import { BlobDeserializationError } from '../errors.js';
5
+
6
+ // Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/checkpoint_blob_data.nr`.
7
+
8
+ const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
9
+
10
+ export interface CheckpointEndMarker {
11
+ numBlobFields: number;
12
+ }
13
+
14
+ export function encodeCheckpointEndMarker(checkpointEndMarker: CheckpointEndMarker) {
15
+ let value = CHECKPOINT_END_PREFIX;
16
+ value <<= NUM_BLOB_FIELDS_BIT_SIZE;
17
+ value += BigInt(checkpointEndMarker.numBlobFields);
18
+ return new Fr(value);
19
+ }
20
+
21
+ export function decodeCheckpointEndMarker(field: Fr): CheckpointEndMarker {
22
+ let value = field.toBigInt();
23
+ const numBlobFields = Number(value & (2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n));
24
+ value >>= NUM_BLOB_FIELDS_BIT_SIZE;
25
+
26
+ const prefix = value;
27
+ if (prefix !== CHECKPOINT_END_PREFIX) {
28
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid checkpoint end marker.`);
29
+ }
30
+
31
+ return {
32
+ numBlobFields,
33
+ };
34
+ }
35
+
36
+ // Check if a field is a checkpoint end marker. Used to check if it has reached the end of the blob fields.
37
+ export function isCheckpointEndMarker(field: Fr): boolean {
38
+ const prefix = field.toBigInt() >> NUM_BLOB_FIELDS_BIT_SIZE;
39
+ return prefix === CHECKPOINT_END_PREFIX;
40
+ }
@@ -0,0 +1,210 @@
1
+ import {
2
+ FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH,
3
+ MAX_CONTRACT_CLASS_LOGS_PER_TX,
4
+ MAX_L2_TO_L1_MSGS_PER_TX,
5
+ MAX_NOTE_HASHES_PER_TX,
6
+ MAX_NULLIFIERS_PER_TX,
7
+ MAX_PRIVATE_LOGS_PER_TX,
8
+ MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
9
+ PRIVATE_LOG_SIZE_IN_FIELDS,
10
+ } from '@aztec/constants';
11
+ import { makeTuple } from '@aztec/foundation/array';
12
+ import { BlockNumber } from '@aztec/foundation/branded-types';
13
+ import { Fr } from '@aztec/foundation/curves/bn254';
14
+
15
+ import type { BlockBlobData, BlockEndBlobData } from './block_blob_data.js';
16
+ import type { BlockEndMarker } from './block_end_marker.js';
17
+ import type { BlockEndStateField } from './block_end_state_field.js';
18
+ import { type CheckpointBlobData, getTotalNumBlobFieldsFromTxs } from './checkpoint_blob_data.js';
19
+ import { type TxBlobData, getNumTxBlobFields } from './tx_blob_data.js';
20
+ import type { TxStartMarker } from './tx_start_marker.js';
21
+
22
+ const fr = (seed: number) => new Fr(BigInt(seed));
23
+
24
+ export function makeTxStartMarker({
25
+ isFullTx = false,
26
+ ...overrides
27
+ }: { isFullTx?: boolean } & Partial<TxStartMarker> = {}): TxStartMarker {
28
+ const partialTxStartMarker = {
29
+ revertCode: 0,
30
+ numNoteHashes: isFullTx ? MAX_NOTE_HASHES_PER_TX : 1,
31
+ numNullifiers: isFullTx ? MAX_NULLIFIERS_PER_TX : 1,
32
+ numL2ToL1Msgs: isFullTx ? MAX_L2_TO_L1_MSGS_PER_TX : 1,
33
+ numPublicDataWrites: isFullTx ? MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX : 1,
34
+ numPrivateLogs: isFullTx ? MAX_PRIVATE_LOGS_PER_TX : 1,
35
+ privateLogsLength: isFullTx ? PRIVATE_LOG_SIZE_IN_FIELDS * MAX_PRIVATE_LOGS_PER_TX : 1,
36
+ publicLogsLength: isFullTx ? FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH : 1,
37
+ contractClassLogLength: isFullTx ? MAX_CONTRACT_CLASS_LOGS_PER_TX : 1,
38
+ ...overrides,
39
+ };
40
+
41
+ const numBlobFields = overrides.numBlobFields ?? getNumTxBlobFields(partialTxStartMarker);
42
+ return {
43
+ ...partialTxStartMarker,
44
+ numBlobFields,
45
+ };
46
+ }
47
+
48
+ export function makeTxBlobData({
49
+ isFullTx = false,
50
+ seed = 1,
51
+ ...overrides
52
+ }: { isFullTx?: boolean; seed?: number } & Partial<
53
+ Omit<TxBlobData, 'txStartMarker'> & { txStartMarker?: Partial<TxStartMarker> }
54
+ > = {}): TxBlobData {
55
+ const { txStartMarker: txStartMarkerOverrides, ...txBlobDataOverrides } = overrides;
56
+ const txStartMarker = makeTxStartMarker({ isFullTx, ...txStartMarkerOverrides });
57
+
58
+ const noteHashes = makeTuple(txStartMarker.numNoteHashes, fr, seed);
59
+ const nullifiers = makeTuple(txStartMarker.numNullifiers, fr, seed + 0x100);
60
+ const l2ToL1Msgs = makeTuple(txStartMarker.numL2ToL1Msgs, fr, seed + 0x200);
61
+ const publicDataWrites = makeTuple(
62
+ txStartMarker.numPublicDataWrites,
63
+ i => [fr(seed + i * 2), fr(seed + i * 2 + 1)] satisfies [Fr, Fr],
64
+ seed + 0x300,
65
+ );
66
+
67
+ const privateLogs = [];
68
+ if (txStartMarker.privateLogsLength > txStartMarker.numPrivateLogs * PRIVATE_LOG_SIZE_IN_FIELDS) {
69
+ throw new Error('Private logs length is too large');
70
+ }
71
+ if (txStartMarker.privateLogsLength < txStartMarker.numPrivateLogs) {
72
+ throw new Error('Private logs length is too small');
73
+ }
74
+ let remainingNumPrivateLogs = txStartMarker.numPrivateLogs;
75
+ let remainingPrivateLogsLength = txStartMarker.privateLogsLength;
76
+ for (let i = 0; i < txStartMarker.numPrivateLogs; i++) {
77
+ const minLength = Math.max(
78
+ 1,
79
+ remainingPrivateLogsLength - (remainingNumPrivateLogs - 1) * PRIVATE_LOG_SIZE_IN_FIELDS,
80
+ );
81
+ const length = Math.max(minLength, Math.floor(remainingPrivateLogsLength / remainingNumPrivateLogs));
82
+ privateLogs.push(makeTuple(length, fr, seed + 0x400 + i * PRIVATE_LOG_SIZE_IN_FIELDS));
83
+ remainingNumPrivateLogs -= 1;
84
+ remainingPrivateLogsLength -= length;
85
+ }
86
+
87
+ const publicLogs = makeTuple(txStartMarker.publicLogsLength, fr, seed + 0x500);
88
+ const contractClassLogBlobDataLength =
89
+ txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
90
+ const contractClassLog = makeTuple(contractClassLogBlobDataLength, fr, seed + 0x600);
91
+
92
+ return {
93
+ txStartMarker,
94
+ txHash: fr(seed + 0x700),
95
+ transactionFee: fr(seed + 0x800),
96
+ noteHashes,
97
+ nullifiers,
98
+ l2ToL1Msgs,
99
+ publicDataWrites,
100
+ privateLogs,
101
+ publicLogs,
102
+ contractClassLog,
103
+ ...txBlobDataOverrides,
104
+ };
105
+ }
106
+
107
+ export function makeBlockEndMarker({
108
+ seed = 1,
109
+ ...overrides
110
+ }: { seed?: number } & Partial<BlockEndMarker> = {}): BlockEndMarker {
111
+ return {
112
+ numTxs: seed,
113
+ blockNumber: BlockNumber(seed + 1),
114
+ timestamp: BigInt(seed + 2),
115
+ ...overrides,
116
+ };
117
+ }
118
+
119
+ export function makeBlockEndStateField({
120
+ seed = 1,
121
+ ...overrides
122
+ }: { seed?: number } & Partial<BlockEndStateField> = {}): BlockEndStateField {
123
+ return {
124
+ l1ToL2MessageNextAvailableLeafIndex: seed,
125
+ noteHashNextAvailableLeafIndex: seed + 0x10,
126
+ nullifierNextAvailableLeafIndex: seed + 0x20,
127
+ publicDataNextAvailableLeafIndex: seed + 0x30,
128
+ totalManaUsed: BigInt(seed + 0x40),
129
+ ...overrides,
130
+ };
131
+ }
132
+
133
+ export function makeBlockEndBlobData({
134
+ isFirstBlock = true,
135
+ seed = 1,
136
+ ...overrides
137
+ }: { seed?: number; isFirstBlock?: boolean } & Partial<
138
+ Omit<BlockEndBlobData, 'blockEndMarker' | 'blockEndStateField'>
139
+ > & {
140
+ blockEndMarker?: Partial<BlockEndMarker>;
141
+ blockEndStateField?: Partial<BlockEndStateField>;
142
+ } = {}): BlockEndBlobData {
143
+ const {
144
+ blockEndMarker: blockEndMarkerOverrides,
145
+ blockEndStateField: blockEndStateFieldOverrides,
146
+ ...blockEndBlobDataOverrides
147
+ } = overrides;
148
+ return {
149
+ blockEndMarker: makeBlockEndMarker({ seed, ...blockEndMarkerOverrides }),
150
+ blockEndStateField: makeBlockEndStateField({ seed: seed + 0x100, ...blockEndStateFieldOverrides }),
151
+ lastArchiveRoot: fr(seed + 0x200),
152
+ noteHashRoot: fr(seed + 0x300),
153
+ nullifierRoot: fr(seed + 0x400),
154
+ publicDataRoot: fr(seed + 0x500),
155
+ l1ToL2MessageRoot: isFirstBlock ? fr(seed + 0x600) : undefined,
156
+ ...blockEndBlobDataOverrides,
157
+ };
158
+ }
159
+
160
+ export function makeBlockBlobData({
161
+ numTxs = 1,
162
+ isFirstBlock = true,
163
+ isFullTx = false,
164
+ seed = 1,
165
+ ...overrides
166
+ }: { numTxs?: number; isFirstBlock?: boolean; isFullTx?: boolean; seed?: number } & Partial<
167
+ Parameters<typeof makeBlockEndBlobData>[0]
168
+ > = {}): BlockBlobData {
169
+ return {
170
+ txs: makeTuple(numTxs, i => makeTxBlobData({ isFullTx, seed: seed + i * 0x100 }), seed),
171
+ ...makeBlockEndBlobData({
172
+ seed: seed + 0x1000 * numTxs,
173
+ blockEndMarker: {
174
+ numTxs,
175
+ },
176
+ isFirstBlock,
177
+ ...overrides,
178
+ }),
179
+ };
180
+ }
181
+
182
+ export function makeCheckpointBlobData({
183
+ numBlocks = 1,
184
+ numTxsPerBlock = 1,
185
+ isFullTx = false,
186
+ seed = 1,
187
+ ...overrides
188
+ }: {
189
+ numBlocks?: number;
190
+ numTxsPerBlock?: number;
191
+ isFullTx?: boolean;
192
+ seed?: number;
193
+ } & Partial<CheckpointBlobData> = {}): CheckpointBlobData {
194
+ const blocks =
195
+ overrides.blocks ??
196
+ makeTuple(
197
+ numBlocks,
198
+ i => makeBlockBlobData({ numTxs: numTxsPerBlock, isFirstBlock: i === seed, isFullTx, seed: seed + i * 0x1000 }),
199
+ seed,
200
+ );
201
+
202
+ const numBlobFields =
203
+ overrides.checkpointEndMarker?.numBlobFields ??
204
+ getTotalNumBlobFieldsFromTxs(blocks.map(block => block.txs.map(tx => tx.txStartMarker)));
205
+
206
+ return {
207
+ blocks,
208
+ checkpointEndMarker: { numBlobFields },
209
+ };
210
+ }
@@ -0,0 +1,9 @@
1
+ export * from './block_blob_data.js';
2
+ export * from './block_end_marker.js';
3
+ export * from './block_end_state_field.js';
4
+ export * from './checkpoint_blob_data.js';
5
+ export * from './checkpoint_end_marker.js';
6
+ export * from './fixtures.js';
7
+ export * from './tx_blob_data.js';
8
+ export * from './tx_start_marker.js';
9
+ export { BlobDeserializationError } from '../errors.js';
@@ -0,0 +1,116 @@
1
+ import { chunk } from '@aztec/foundation/collection';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+ import { FieldReader } from '@aztec/foundation/serialize';
4
+
5
+ import { BlobDeserializationError } from '../errors.js';
6
+ import { type TxStartMarker, decodeTxStartMarker, encodeTxStartMarker } from './tx_start_marker.js';
7
+
8
+ // Must match the implementation in noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr.
9
+
10
+ export interface TxBlobData {
11
+ txStartMarker: TxStartMarker;
12
+ txHash: Fr;
13
+ transactionFee: Fr;
14
+ noteHashes: Fr[];
15
+ nullifiers: Fr[];
16
+ l2ToL1Msgs: Fr[];
17
+ publicDataWrites: [Fr, Fr][];
18
+ privateLogs: Fr[][];
19
+ publicLogs: Fr[];
20
+ contractClassLog: Fr[];
21
+ }
22
+
23
+ export function encodeTxBlobData(txBlobData: TxBlobData): Fr[] {
24
+ return [
25
+ encodeTxStartMarker(txBlobData.txStartMarker),
26
+ txBlobData.txHash,
27
+ txBlobData.transactionFee,
28
+ ...txBlobData.noteHashes,
29
+ ...txBlobData.nullifiers,
30
+ ...txBlobData.l2ToL1Msgs,
31
+ ...txBlobData.publicDataWrites.flat(),
32
+ ...txBlobData.privateLogs.map(log => [new Fr(log.length), ...log]).flat(),
33
+ ...txBlobData.publicLogs,
34
+ ...txBlobData.contractClassLog,
35
+ ];
36
+ }
37
+
38
+ export function decodeTxBlobData(fields: Fr[] | FieldReader): TxBlobData {
39
+ const reader = FieldReader.asReader(fields);
40
+
41
+ if (reader.isFinished()) {
42
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for tx blob data.`);
43
+ }
44
+
45
+ const txStartMarker = decodeTxStartMarker(reader.readField());
46
+
47
+ const checkRemainingFields = (requiredFields: number, type: string) => {
48
+ if (requiredFields > reader.remainingFields()) {
49
+ throw new BlobDeserializationError(
50
+ `Incorrect encoding of blob fields: not enough fields for ${type}. Expected ${requiredFields} fields, only ${reader.remainingFields()} remaining.`,
51
+ );
52
+ }
53
+ };
54
+
55
+ const numTxEffectFields = txStartMarker.numBlobFields - 1; // -1 because we already read the tx start marker.
56
+ checkRemainingFields(numTxEffectFields, 'tx effect');
57
+
58
+ const txHash = reader.readField();
59
+ const transactionFee = reader.readField();
60
+
61
+ checkRemainingFields(txStartMarker.numNoteHashes, 'note hashes');
62
+ const noteHashes = reader.readFieldArray(txStartMarker.numNoteHashes);
63
+
64
+ checkRemainingFields(txStartMarker.numNullifiers, 'nullifiers');
65
+ const nullifiers = reader.readFieldArray(txStartMarker.numNullifiers);
66
+
67
+ checkRemainingFields(txStartMarker.numL2ToL1Msgs, 'l2-to-l1 messages');
68
+ const l2ToL1Msgs = reader.readFieldArray(txStartMarker.numL2ToL1Msgs);
69
+
70
+ checkRemainingFields(txStartMarker.numPublicDataWrites * 2, 'public data writes'); // *2 for leaf slot and value
71
+ const publicDataWrites = chunk(reader.readFieldArray(txStartMarker.numPublicDataWrites * 2), 2) as [Fr, Fr][];
72
+
73
+ const privateLogs = Array.from({ length: txStartMarker.numPrivateLogs }, () => {
74
+ const length = reader.readU32();
75
+ checkRemainingFields(length, 'private log');
76
+ return reader.readFieldArray(length);
77
+ });
78
+
79
+ checkRemainingFields(txStartMarker.publicLogsLength, 'public logs');
80
+ const publicLogs = reader.readFieldArray(txStartMarker.publicLogsLength);
81
+
82
+ const contractClassLogBlobDataLength =
83
+ txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
84
+ checkRemainingFields(contractClassLogBlobDataLength, 'contract class logs');
85
+ const contractClassLog = reader.readFieldArray(contractClassLogBlobDataLength);
86
+
87
+ return {
88
+ txStartMarker,
89
+ txHash,
90
+ transactionFee,
91
+ noteHashes,
92
+ nullifiers,
93
+ l2ToL1Msgs,
94
+ publicDataWrites,
95
+ privateLogs,
96
+ publicLogs,
97
+ contractClassLog,
98
+ };
99
+ }
100
+
101
+ export function getNumTxBlobFields(txStartMarker: Omit<TxStartMarker, 'revertCode' | 'numBlobFields'>) {
102
+ return (
103
+ 1 + // tx start marker
104
+ 1 + // tx hash
105
+ 1 + // transaction fee
106
+ txStartMarker.numNoteHashes +
107
+ txStartMarker.numNullifiers +
108
+ txStartMarker.numL2ToL1Msgs +
109
+ txStartMarker.numPublicDataWrites * 2 + // *2 for leaf slot and value per public data write
110
+ txStartMarker.numPrivateLogs + // +1 length field for each private log
111
+ txStartMarker.privateLogsLength +
112
+ txStartMarker.publicLogsLength +
113
+ txStartMarker.contractClassLogLength +
114
+ (txStartMarker.contractClassLogLength > 0 ? 1 : 0) // +1 for contract address of the contract class log
115
+ );
116
+ }
@@ -1,6 +1,9 @@
1
- import { BLOCK_END_PREFIX, TX_START_PREFIX } from '@aztec/constants';
2
- import { Fr } from '@aztec/foundation/fields';
3
- import { FieldReader } from '@aztec/foundation/serialize';
1
+ import { TX_START_PREFIX } from '@aztec/constants';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+
4
+ import { BlobDeserializationError } from '../errors.js';
5
+
6
+ // Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr`.
4
7
 
5
8
  const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
6
9
  const REVERT_CODE_BIT_SIZE = 8n;
@@ -9,11 +12,11 @@ const NUM_NULLIFIER_BIT_SIZE = 16n;
9
12
  const NUM_L2_TO_L1_MSG_BIT_SIZE = 16n;
10
13
  const NUM_PUBLIC_DATA_WRITE_BIT_SIZE = 16n;
11
14
  const NUM_PRIVATE_LOG_BIT_SIZE = 16n;
15
+ const PRIVATE_LOGS_LENGTH_BIT_SIZE = 16n;
12
16
  const PUBLIC_LOGS_LENGTH_BIT_SIZE = 32n;
13
17
  const CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE = 16n;
14
18
 
15
19
  export interface TxStartMarker {
16
- prefix: bigint;
17
20
  numBlobFields: number;
18
21
  revertCode: number;
19
22
  numNoteHashes: number;
@@ -21,12 +24,12 @@ export interface TxStartMarker {
21
24
  numL2ToL1Msgs: number;
22
25
  numPublicDataWrites: number;
23
26
  numPrivateLogs: number;
27
+ privateLogsLength: number;
24
28
  publicLogsLength: number;
25
29
  contractClassLogLength: number;
26
30
  }
27
31
 
28
- // Must match the implementation in `noir-protocol-circuits/crates/rollup-lib/src/tx_base/components/tx_blob_data.nr`.
29
- export function encodeTxStartMarker(txStartMarker: Omit<TxStartMarker, 'prefix'>) {
32
+ export function encodeTxStartMarker(txStartMarker: TxStartMarker): Fr {
30
33
  let value = TX_START_PREFIX;
31
34
  value <<= NUM_NOTE_HASH_BIT_SIZE;
32
35
  value += BigInt(txStartMarker.numNoteHashes);
@@ -38,6 +41,8 @@ export function encodeTxStartMarker(txStartMarker: Omit<TxStartMarker, 'prefix'>
38
41
  value += BigInt(txStartMarker.numPublicDataWrites);
39
42
  value <<= NUM_PRIVATE_LOG_BIT_SIZE;
40
43
  value += BigInt(txStartMarker.numPrivateLogs);
44
+ value <<= PRIVATE_LOGS_LENGTH_BIT_SIZE;
45
+ value += BigInt(txStartMarker.privateLogsLength);
41
46
  value <<= PUBLIC_LOGS_LENGTH_BIT_SIZE;
42
47
  value += BigInt(txStartMarker.publicLogsLength);
43
48
  value <<= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
@@ -59,6 +64,8 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
59
64
  value >>= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
60
65
  const publicLogsLength = Number(value & (2n ** PUBLIC_LOGS_LENGTH_BIT_SIZE - 1n));
61
66
  value >>= PUBLIC_LOGS_LENGTH_BIT_SIZE;
67
+ const privateLogsLength = Number(value & (2n ** PRIVATE_LOGS_LENGTH_BIT_SIZE - 1n));
68
+ value >>= PRIVATE_LOGS_LENGTH_BIT_SIZE;
62
69
  const numPrivateLogs = Number(value & (2n ** NUM_PRIVATE_LOG_BIT_SIZE - 1n));
63
70
  value >>= NUM_PRIVATE_LOG_BIT_SIZE;
64
71
  const numPublicDataWrites = Number(value & (2n ** NUM_PUBLIC_DATA_WRITE_BIT_SIZE - 1n));
@@ -69,11 +76,13 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
69
76
  value >>= NUM_NULLIFIER_BIT_SIZE;
70
77
  const numNoteHashes = Number(value & (2n ** NUM_NOTE_HASH_BIT_SIZE - 1n));
71
78
  value >>= NUM_NOTE_HASH_BIT_SIZE;
72
- // Do not throw if the prefix doesn't match.
73
- // The caller function can check it by calling `isValidTxStartMarker`, and decide what to do if it's incorrect.
79
+
74
80
  const prefix = value;
81
+ if (prefix !== TX_START_PREFIX) {
82
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid tx start marker.`);
83
+ }
84
+
75
85
  return {
76
- prefix,
77
86
  numBlobFields,
78
87
  revertCode,
79
88
  numNoteHashes,
@@ -81,74 +90,8 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
81
90
  numL2ToL1Msgs,
82
91
  numPublicDataWrites,
83
92
  numPrivateLogs,
93
+ privateLogsLength,
84
94
  publicLogsLength,
85
95
  contractClassLogLength,
86
96
  };
87
97
  }
88
-
89
- export function getNumBlobFieldsFromTxStartMarker(field: Fr) {
90
- return Number(field.toBigInt() & (2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n));
91
- }
92
-
93
- export function isValidTxStartMarker(txStartMarker: TxStartMarker) {
94
- return txStartMarker.prefix === TX_START_PREFIX;
95
- }
96
-
97
- export function createBlockEndMarker(numTxs: number) {
98
- // Must match the implementation in `block_rollup_public_inputs_composer.nr > create_block_end_marker`.
99
- return new Fr(BLOCK_END_PREFIX * 256n * 256n + BigInt(numTxs));
100
- }
101
-
102
- export function getNumTxsFromBlockEndMarker(field: Fr) {
103
- return Number(field.toBigInt() & 0xffffn);
104
- }
105
-
106
- export function isBlockEndMarker(field: Fr) {
107
- const value = field.toBigInt();
108
- const numTxs = value & 0xffffn;
109
- return value - numTxs === BLOCK_END_PREFIX * 256n * 256n;
110
- }
111
-
112
- /**
113
- * Check that the fields are emitted from the circuits and conform to the encoding.
114
- * @param blobFields - The concatenated fields from all blobs of an L1 block.
115
- */
116
- export function checkBlobFieldsEncoding(blobFields: Fr[]) {
117
- const reader = FieldReader.asReader(blobFields);
118
-
119
- const checkpointPrefix = reader.readField();
120
- if (checkpointPrefix.toBigInt() !== BigInt(blobFields.length)) {
121
- return false;
122
- }
123
-
124
- const numFieldsInCheckpoint = checkpointPrefix.toNumber();
125
- let seenNumTxs = 0;
126
- while (reader.cursor < numFieldsInCheckpoint) {
127
- const currentField = reader.readField();
128
-
129
- if (isBlockEndMarker(currentField)) {
130
- // Found a block end marker. Confirm that the number of txs in this block is correct.
131
- const numTxs = getNumTxsFromBlockEndMarker(currentField);
132
- if (numTxs !== seenNumTxs) {
133
- return false;
134
- }
135
- seenNumTxs = 0;
136
- // Continue the loop to process the next field.
137
- continue;
138
- }
139
-
140
- // If the field is not a block end marker, it must be a tx start marker.
141
- const txStartMarker = decodeTxStartMarker(currentField);
142
- if (!isValidTxStartMarker(txStartMarker)) {
143
- return false;
144
- }
145
-
146
- seenNumTxs += 1;
147
-
148
- // Skip the remaining fields in this tx. -1 because we already read the tx start marker.
149
- reader.skip(txStartMarker.numBlobFields - 1);
150
- // TODO: Check the encoding of the tx if we want to be more strict.
151
- }
152
-
153
- return true;
154
- }
package/src/hash.ts CHANGED
@@ -1,7 +1,10 @@
1
- import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
2
- import { BLS12Fr, Fr } from '@aztec/foundation/fields';
1
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
2
+ import { sha256, sha256ToField } from '@aztec/foundation/crypto/sha256';
3
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
4
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
5
 
4
- import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
6
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, getKzg } from './kzg_context.js';
7
+ import { SpongeBlob } from './sponge_blob.js';
5
8
 
6
9
  const VERSIONED_HASH_VERSION_KZG = 0x01;
7
10
 
@@ -24,12 +27,21 @@ export function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr {
24
27
  }
25
28
 
26
29
  /**
27
- * The hash of the fields added throughout the checkpoint. The exact number of fields is specified by the checkpoint
28
- * prefix (the first field). It's verified in the circuit against the fields absorbed into the sponge blob.
29
- * This hash is used in generating the challenge z for all blobs in the same checkpoint.
30
+ * Computes a non-standard Poseidon2 hash over the provided fields.
31
+ *
32
+ * This function is used to compute:
33
+ * - `blobFieldsHash` of a checkpoint:
34
+ * Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
35
+ * The exact number of fields is encoded in the checkpoint end marker (the last field).
36
+ * This hash is used when generating the challenge `z` for all blobs in the checkpoint.
37
+ * - `spongeBlobHash` of a block:
38
+ * Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
39
+ * This hash is included in the block header.
30
40
  */
31
41
  export async function computeBlobFieldsHash(fields: Fr[]): Promise<Fr> {
32
- return await poseidon2Hash(fields);
42
+ const sponge = SpongeBlob.init();
43
+ await sponge.absorb(fields);
44
+ return sponge.squeeze();
33
45
  }
34
46
 
35
47
  export function computeBlobCommitment(data: Uint8Array): Buffer {
@@ -37,7 +49,7 @@ export function computeBlobCommitment(data: Uint8Array): Buffer {
37
49
  throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`);
38
50
  }
39
51
 
40
- return Buffer.from(kzg.blobToKzgCommitment(data));
52
+ return Buffer.from(getKzg().blobToKzgCommitment(data));
41
53
  }
42
54
 
43
55
  /**
package/src/index.ts CHANGED
@@ -1,10 +1,10 @@
1
+ export * from './batched_blob.js';
1
2
  export * from './blob.js';
2
3
  export * from './blob_batching.js';
3
4
  export * from './blob_utils.js';
4
5
  export * from './circuit_types/index.js';
5
- export * from './deserialize.js';
6
- export * from './encoding.js';
7
- export * from './errors.js';
6
+ export * from './encoding/index.js';
8
7
  export * from './hash.js';
9
8
  export * from './interface.js';
10
9
  export * from './sponge_blob.js';
10
+ export * from './kzg_context.js';