@aztec/blob-lib 3.0.0-nightly.20251114 → 3.0.0-nightly.20251115

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dest/blob_utils.d.ts +5 -8
  2. package/dest/blob_utils.d.ts.map +1 -1
  3. package/dest/blob_utils.js +7 -10
  4. package/dest/encoding/block_blob_data.d.ts +22 -0
  5. package/dest/encoding/block_blob_data.d.ts.map +1 -0
  6. package/dest/encoding/block_blob_data.js +65 -0
  7. package/dest/encoding/block_end_marker.d.ts +10 -0
  8. package/dest/encoding/block_end_marker.d.ts.map +1 -0
  9. package/dest/encoding/block_end_marker.js +40 -0
  10. package/dest/encoding/block_end_state_field.d.ts +12 -0
  11. package/dest/encoding/block_end_state_field.d.ts.map +1 -0
  12. package/dest/encoding/block_end_state_field.js +39 -0
  13. package/dest/encoding/checkpoint_blob_data.d.ts +13 -0
  14. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
  15. package/dest/encoding/checkpoint_blob_data.js +54 -0
  16. package/dest/encoding/fixtures.d.ts +41 -0
  17. package/dest/encoding/fixtures.d.ts.map +1 -0
  18. package/dest/encoding/fixtures.js +137 -0
  19. package/dest/encoding/index.d.ts +8 -0
  20. package/dest/encoding/index.d.ts.map +1 -0
  21. package/dest/encoding/index.js +7 -0
  22. package/dest/encoding/tx_blob_data.d.ts +19 -0
  23. package/dest/encoding/tx_blob_data.d.ts.map +1 -0
  24. package/dest/encoding/tx_blob_data.js +79 -0
  25. package/dest/encoding/tx_start_marker.d.ts +16 -0
  26. package/dest/encoding/tx_start_marker.d.ts.map +1 -0
  27. package/dest/{encoding.js → encoding/tx_start_marker.js} +12 -58
  28. package/dest/index.d.ts +1 -2
  29. package/dest/index.d.ts.map +1 -1
  30. package/dest/index.js +1 -2
  31. package/dest/testing.d.ts +1 -13
  32. package/dest/testing.d.ts.map +1 -1
  33. package/dest/testing.js +1 -60
  34. package/package.json +4 -4
  35. package/src/blob_utils.ts +7 -10
  36. package/src/encoding/block_blob_data.ts +102 -0
  37. package/src/encoding/block_end_marker.ts +54 -0
  38. package/src/encoding/block_end_state_field.ts +59 -0
  39. package/src/encoding/checkpoint_blob_data.ts +75 -0
  40. package/src/encoding/fixtures.ts +209 -0
  41. package/src/encoding/index.ts +7 -0
  42. package/src/encoding/tx_blob_data.ts +116 -0
  43. package/src/{encoding.ts → encoding/tx_start_marker.ts} +18 -75
  44. package/src/index.ts +1 -2
  45. package/src/testing.ts +2 -64
  46. package/dest/deserialize.d.ts +0 -14
  47. package/dest/deserialize.d.ts.map +0 -1
  48. package/dest/deserialize.js +0 -33
  49. package/dest/encoding.d.ts +0 -26
  50. package/dest/encoding.d.ts.map +0 -1
  51. package/src/deserialize.ts +0 -38
@@ -0,0 +1,75 @@
1
+ import { Fr } from '@aztec/foundation/fields';
2
+ import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
3
+
4
+ import { BlobDeserializationError } from '../errors.js';
5
+ import { type BlockBlobData, decodeBlockBlobData, encodeBlockBlobData } from './block_blob_data.js';
6
+ import type { TxStartMarker } from './tx_start_marker.js';
7
+
8
+ export interface CheckpointBlobData {
9
+ totalNumBlobFields: number;
10
+ blocks: BlockBlobData[];
11
+ }
12
+
13
+ export function encodeCheckpointBlobData(checkpointBlobData: CheckpointBlobData): Fr[] {
14
+ return [
15
+ new Fr(checkpointBlobData.totalNumBlobFields),
16
+ ...checkpointBlobData.blocks.map(block => encodeBlockBlobData(block)).flat(),
17
+ ];
18
+ }
19
+
20
+ export function decodeCheckpointBlobData(fields: Fr[] | FieldReader): CheckpointBlobData {
21
+ const reader = FieldReader.asReader(fields);
22
+
23
+ if (reader.isFinished()) {
24
+ throw new BlobDeserializationError(`Cannot decode empty blob data.`);
25
+ }
26
+
27
+ const firstField = reader.readField();
28
+ // Use toBigInt instead of toNumber so that we can catch it and throw a more descriptive error if the first field is
29
+ // larger than a javascript integer.
30
+ const totalNumBlobFields = firstField.toBigInt();
31
+ if (totalNumBlobFields > BigInt(reader.remainingFields() + 1)) {
32
+ // +1 because we already read the first field.
33
+ throw new BlobDeserializationError(
34
+ `Incorrect encoding of blob fields: not enough fields for checkpoint blob data. Expected ${totalNumBlobFields} fields, got ${reader.remainingFields() + 1}.`,
35
+ );
36
+ }
37
+
38
+ const blocks = [];
39
+ while (reader.cursor < totalNumBlobFields) {
40
+ blocks.push(decodeBlockBlobData(reader, blocks.length === 0 /* isFirstBlock */));
41
+ }
42
+ return {
43
+ totalNumBlobFields: Number(totalNumBlobFields),
44
+ blocks,
45
+ };
46
+ }
47
+
48
+ export function decodeCheckpointBlobDataFromBuffer(buf: Buffer): CheckpointBlobData {
49
+ const reader = BufferReader.asReader(buf);
50
+ const firstField = reader.readObject(Fr);
51
+
52
+ // Use toBigInt instead of toNumber so that we can catch it and throw a more descriptive error if the first field is
53
+ // larger than a javascript integer.
54
+ const numFields = firstField.toBigInt();
55
+ const totalFieldsInBuffer = BigInt(buf.length / Fr.SIZE_IN_BYTES);
56
+ if (numFields > totalFieldsInBuffer) {
57
+ throw new BlobDeserializationError(
58
+ `Failed to deserialize blob buffer: not enough fields for checkpoint blob data. Expected ${numFields} fields, got ${totalFieldsInBuffer}.`,
59
+ );
60
+ }
61
+
62
+ const numFieldsWithoutPrefix = Number(numFields) - 1;
63
+ const blobFields = [firstField].concat(reader.readArray(numFieldsWithoutPrefix, Fr));
64
+
65
+ return decodeCheckpointBlobData(blobFields);
66
+ }
67
+
68
+ export function getTotalNumBlobFieldsFromTxs(txs: TxStartMarker[][]): number {
69
+ return (
70
+ 1 + // totalNumBlobFields
71
+ (txs.length ? 1 : 0) + // l1ToL2Messages root in the first block
72
+ txs.length * 6 + // 6 fields for each block end blob data.
73
+ txs.reduce((total, txs) => total + txs.reduce((total, tx) => total + tx.numBlobFields, 0), 0)
74
+ );
75
+ }
@@ -0,0 +1,209 @@
1
+ import {
2
+ FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH,
3
+ MAX_CONTRACT_CLASS_LOGS_PER_TX,
4
+ MAX_L2_TO_L1_MSGS_PER_TX,
5
+ MAX_NOTE_HASHES_PER_TX,
6
+ MAX_NULLIFIERS_PER_TX,
7
+ MAX_PRIVATE_LOGS_PER_TX,
8
+ MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
9
+ PRIVATE_LOG_SIZE_IN_FIELDS,
10
+ } from '@aztec/constants';
11
+ import { makeTuple } from '@aztec/foundation/array';
12
+ import { Fr } from '@aztec/foundation/fields';
13
+
14
+ import type { BlockBlobData, BlockEndBlobData } from './block_blob_data.js';
15
+ import type { BlockEndMarker } from './block_end_marker.js';
16
+ import type { BlockEndStateField } from './block_end_state_field.js';
17
+ import { type CheckpointBlobData, getTotalNumBlobFieldsFromTxs } from './checkpoint_blob_data.js';
18
+ import { type TxBlobData, getNumTxBlobFields } from './tx_blob_data.js';
19
+ import type { TxStartMarker } from './tx_start_marker.js';
20
+
21
+ const fr = (seed: number) => new Fr(BigInt(seed));
22
+
23
+ export function makeTxStartMarker({
24
+ isFullTx = false,
25
+ ...overrides
26
+ }: { isFullTx?: boolean } & Partial<TxStartMarker> = {}): TxStartMarker {
27
+ const partialTxStartMarker = {
28
+ revertCode: 0,
29
+ numNoteHashes: isFullTx ? MAX_NOTE_HASHES_PER_TX : 1,
30
+ numNullifiers: isFullTx ? MAX_NULLIFIERS_PER_TX : 1,
31
+ numL2ToL1Msgs: isFullTx ? MAX_L2_TO_L1_MSGS_PER_TX : 1,
32
+ numPublicDataWrites: isFullTx ? MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX : 1,
33
+ numPrivateLogs: isFullTx ? MAX_PRIVATE_LOGS_PER_TX : 1,
34
+ privateLogsLength: isFullTx ? PRIVATE_LOG_SIZE_IN_FIELDS * MAX_PRIVATE_LOGS_PER_TX : 1,
35
+ publicLogsLength: isFullTx ? FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH : 1,
36
+ contractClassLogLength: isFullTx ? MAX_CONTRACT_CLASS_LOGS_PER_TX : 1,
37
+ ...overrides,
38
+ };
39
+
40
+ const numBlobFields = overrides.numBlobFields ?? getNumTxBlobFields(partialTxStartMarker);
41
+ return {
42
+ ...partialTxStartMarker,
43
+ numBlobFields,
44
+ };
45
+ }
46
+
47
+ export function makeTxBlobData({
48
+ isFullTx = false,
49
+ seed = 1,
50
+ ...overrides
51
+ }: { isFullTx?: boolean; seed?: number } & Partial<
52
+ Omit<TxBlobData, 'txStartMarker'> & { txStartMarker?: Partial<TxStartMarker> }
53
+ > = {}): TxBlobData {
54
+ const { txStartMarker: txStartMarkerOverrides, ...txBlobDataOverrides } = overrides;
55
+ const txStartMarker = makeTxStartMarker({ isFullTx, ...txStartMarkerOverrides });
56
+
57
+ const noteHashes = makeTuple(txStartMarker.numNoteHashes, fr, seed);
58
+ const nullifiers = makeTuple(txStartMarker.numNullifiers, fr, seed + 0x100);
59
+ const l2ToL1Msgs = makeTuple(txStartMarker.numL2ToL1Msgs, fr, seed + 0x200);
60
+ const publicDataWrites = makeTuple(
61
+ txStartMarker.numPublicDataWrites,
62
+ i => [fr(seed + i * 2), fr(seed + i * 2 + 1)] satisfies [Fr, Fr],
63
+ seed + 0x300,
64
+ );
65
+
66
+ const privateLogs = [];
67
+ if (txStartMarker.privateLogsLength > txStartMarker.numPrivateLogs * PRIVATE_LOG_SIZE_IN_FIELDS) {
68
+ throw new Error('Private logs length is too large');
69
+ }
70
+ if (txStartMarker.privateLogsLength < txStartMarker.numPrivateLogs) {
71
+ throw new Error('Private logs length is too small');
72
+ }
73
+ let remainingNumPrivateLogs = txStartMarker.numPrivateLogs;
74
+ let remainingPrivateLogsLength = txStartMarker.privateLogsLength;
75
+ for (let i = 0; i < txStartMarker.numPrivateLogs; i++) {
76
+ const minLength = Math.max(
77
+ 1,
78
+ remainingPrivateLogsLength - (remainingNumPrivateLogs - 1) * PRIVATE_LOG_SIZE_IN_FIELDS,
79
+ );
80
+ const length = Math.max(minLength, Math.floor(remainingPrivateLogsLength / remainingNumPrivateLogs));
81
+ privateLogs.push(makeTuple(length, fr, seed + 0x400 + i * PRIVATE_LOG_SIZE_IN_FIELDS));
82
+ remainingNumPrivateLogs -= 1;
83
+ remainingPrivateLogsLength -= length;
84
+ }
85
+
86
+ const publicLogs = makeTuple(txStartMarker.publicLogsLength, fr, seed + 0x500);
87
+ const contractClassLogBlobDataLength =
88
+ txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
89
+ const contractClassLog = makeTuple(contractClassLogBlobDataLength, fr, seed + 0x600);
90
+
91
+ return {
92
+ txStartMarker,
93
+ txHash: fr(seed + 0x700),
94
+ transactionFee: fr(seed + 0x800),
95
+ noteHashes,
96
+ nullifiers,
97
+ l2ToL1Msgs,
98
+ publicDataWrites,
99
+ privateLogs,
100
+ publicLogs,
101
+ contractClassLog,
102
+ ...txBlobDataOverrides,
103
+ };
104
+ }
105
+
106
+ export function makeBlockEndMarker({
107
+ seed = 1,
108
+ ...overrides
109
+ }: { seed?: number } & Partial<BlockEndMarker> = {}): BlockEndMarker {
110
+ return {
111
+ numTxs: seed,
112
+ blockNumber: seed + 1,
113
+ timestamp: BigInt(seed + 2),
114
+ ...overrides,
115
+ };
116
+ }
117
+
118
+ export function makeBlockEndStateField({
119
+ seed = 1,
120
+ ...overrides
121
+ }: { seed?: number } & Partial<BlockEndStateField> = {}): BlockEndStateField {
122
+ return {
123
+ l1ToL2MessageNextAvailableLeafIndex: seed,
124
+ noteHashNextAvailableLeafIndex: seed + 0x10,
125
+ nullifierNextAvailableLeafIndex: seed + 0x20,
126
+ publicDataNextAvailableLeafIndex: seed + 0x30,
127
+ totalManaUsed: BigInt(seed + 0x40),
128
+ ...overrides,
129
+ };
130
+ }
131
+
132
+ export function makeBlockEndBlobData({
133
+ isFirstBlock = true,
134
+ seed = 1,
135
+ ...overrides
136
+ }: { seed?: number; isFirstBlock?: boolean } & Partial<
137
+ Omit<BlockEndBlobData, 'blockEndMarker' | 'blockEndStateField'>
138
+ > & {
139
+ blockEndMarker?: Partial<BlockEndMarker>;
140
+ blockEndStateField?: Partial<BlockEndStateField>;
141
+ } = {}): BlockEndBlobData {
142
+ const {
143
+ blockEndMarker: blockEndMarkerOverrides,
144
+ blockEndStateField: blockEndStateFieldOverrides,
145
+ ...blockEndBlobDataOverrides
146
+ } = overrides;
147
+ return {
148
+ blockEndMarker: makeBlockEndMarker({ seed, ...blockEndMarkerOverrides }),
149
+ blockEndStateField: makeBlockEndStateField({ seed: seed + 0x100, ...blockEndStateFieldOverrides }),
150
+ lastArchiveRoot: fr(seed + 0x200),
151
+ noteHashRoot: fr(seed + 0x300),
152
+ nullifierRoot: fr(seed + 0x400),
153
+ publicDataRoot: fr(seed + 0x500),
154
+ l1ToL2MessageRoot: isFirstBlock ? fr(seed + 0x600) : undefined,
155
+ ...blockEndBlobDataOverrides,
156
+ };
157
+ }
158
+
159
+ export function makeBlockBlobData({
160
+ numTxs = 1,
161
+ isFirstBlock = true,
162
+ isFullTx = false,
163
+ seed = 1,
164
+ ...overrides
165
+ }: { numTxs?: number; isFirstBlock?: boolean; isFullTx?: boolean; seed?: number } & Partial<
166
+ Parameters<typeof makeBlockEndBlobData>[0]
167
+ > = {}): BlockBlobData {
168
+ return {
169
+ txs: makeTuple(numTxs, i => makeTxBlobData({ isFullTx, seed: seed + i * 0x100 }), seed),
170
+ ...makeBlockEndBlobData({
171
+ seed: seed + 0x1000 * numTxs,
172
+ blockEndMarker: {
173
+ numTxs,
174
+ },
175
+ isFirstBlock,
176
+ ...overrides,
177
+ }),
178
+ };
179
+ }
180
+
181
+ export function makeCheckpointBlobData({
182
+ numBlocks = 1,
183
+ numTxsPerBlock = 1,
184
+ isFullTx = false,
185
+ seed = 1,
186
+ ...overrides
187
+ }: {
188
+ numBlocks?: number;
189
+ numTxsPerBlock?: number;
190
+ isFullTx?: boolean;
191
+ seed?: number;
192
+ } & Partial<CheckpointBlobData> = {}): CheckpointBlobData {
193
+ const blocks =
194
+ overrides.blocks ??
195
+ makeTuple(
196
+ numBlocks,
197
+ i => makeBlockBlobData({ numTxs: numTxsPerBlock, isFirstBlock: i === seed, isFullTx, seed: seed + i * 0x1000 }),
198
+ seed,
199
+ );
200
+
201
+ const totalNumBlobFields =
202
+ overrides.totalNumBlobFields ??
203
+ getTotalNumBlobFieldsFromTxs(blocks.map(block => block.txs.map(tx => tx.txStartMarker)));
204
+
205
+ return {
206
+ totalNumBlobFields,
207
+ blocks,
208
+ };
209
+ }
@@ -0,0 +1,7 @@
1
+ export * from './block_blob_data.js';
2
+ export * from './block_end_marker.js';
3
+ export * from './block_end_state_field.js';
4
+ export * from './checkpoint_blob_data.js';
5
+ export * from './fixtures.js';
6
+ export * from './tx_blob_data.js';
7
+ export * from './tx_start_marker.js';
@@ -0,0 +1,116 @@
1
+ import { chunk } from '@aztec/foundation/collection';
2
+ import { Fr } from '@aztec/foundation/fields';
3
+ import { FieldReader } from '@aztec/foundation/serialize';
4
+
5
+ import { BlobDeserializationError } from '../errors.js';
6
+ import { type TxStartMarker, decodeTxStartMarker, encodeTxStartMarker } from './tx_start_marker.js';
7
+
8
+ // Must match the implementation in noir-protocol-circuits/crates/rollup-lib/src/tx_base/components/tx_blob_data.nr.
9
+
10
+ export interface TxBlobData {
11
+ txStartMarker: TxStartMarker;
12
+ txHash: Fr;
13
+ transactionFee: Fr;
14
+ noteHashes: Fr[];
15
+ nullifiers: Fr[];
16
+ l2ToL1Msgs: Fr[];
17
+ publicDataWrites: [Fr, Fr][];
18
+ privateLogs: Fr[][];
19
+ publicLogs: Fr[];
20
+ contractClassLog: Fr[];
21
+ }
22
+
23
+ export function encodeTxBlobData(txBlobData: TxBlobData): Fr[] {
24
+ return [
25
+ encodeTxStartMarker(txBlobData.txStartMarker),
26
+ txBlobData.txHash,
27
+ txBlobData.transactionFee,
28
+ ...txBlobData.noteHashes,
29
+ ...txBlobData.nullifiers,
30
+ ...txBlobData.l2ToL1Msgs,
31
+ ...txBlobData.publicDataWrites.flat(),
32
+ ...txBlobData.privateLogs.map(log => [new Fr(log.length), ...log]).flat(),
33
+ ...txBlobData.publicLogs,
34
+ ...txBlobData.contractClassLog,
35
+ ];
36
+ }
37
+
38
+ export function decodeTxBlobData(fields: Fr[] | FieldReader): TxBlobData {
39
+ const reader = FieldReader.asReader(fields);
40
+
41
+ if (reader.isFinished()) {
42
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for tx blob data.`);
43
+ }
44
+
45
+ const txStartMarker = decodeTxStartMarker(reader.readField());
46
+
47
+ const checkRemainingFields = (requiredFields: number, type: string) => {
48
+ if (requiredFields > reader.remainingFields()) {
49
+ throw new BlobDeserializationError(
50
+ `Incorrect encoding of blob fields: not enough fields for ${type}. Expected ${requiredFields} fields, only ${reader.remainingFields()} remaining.`,
51
+ );
52
+ }
53
+ };
54
+
55
+ const numTxEffectFields = txStartMarker.numBlobFields - 1; // -1 because we already read the tx start marker.
56
+ checkRemainingFields(numTxEffectFields, 'tx effect');
57
+
58
+ const txHash = reader.readField();
59
+ const transactionFee = reader.readField();
60
+
61
+ checkRemainingFields(txStartMarker.numNoteHashes, 'note hashes');
62
+ const noteHashes = reader.readFieldArray(txStartMarker.numNoteHashes);
63
+
64
+ checkRemainingFields(txStartMarker.numNullifiers, 'nullifiers');
65
+ const nullifiers = reader.readFieldArray(txStartMarker.numNullifiers);
66
+
67
+ checkRemainingFields(txStartMarker.numL2ToL1Msgs, 'l2-to-l1 messages');
68
+ const l2ToL1Msgs = reader.readFieldArray(txStartMarker.numL2ToL1Msgs);
69
+
70
+ checkRemainingFields(txStartMarker.numPublicDataWrites * 2, 'public data writes'); // *2 for leaf slot and value
71
+ const publicDataWrites = chunk(reader.readFieldArray(txStartMarker.numPublicDataWrites * 2), 2) as [Fr, Fr][];
72
+
73
+ const privateLogs = Array.from({ length: txStartMarker.numPrivateLogs }, () => {
74
+ const length = reader.readU32();
75
+ checkRemainingFields(length, 'private log');
76
+ return reader.readFieldArray(length);
77
+ });
78
+
79
+ checkRemainingFields(txStartMarker.publicLogsLength, 'public logs');
80
+ const publicLogs = reader.readFieldArray(txStartMarker.publicLogsLength);
81
+
82
+ const contractClassLogBlobDataLength =
83
+ txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
84
+ checkRemainingFields(contractClassLogBlobDataLength, 'contract class logs');
85
+ const contractClassLog = reader.readFieldArray(contractClassLogBlobDataLength);
86
+
87
+ return {
88
+ txStartMarker,
89
+ txHash,
90
+ transactionFee,
91
+ noteHashes,
92
+ nullifiers,
93
+ l2ToL1Msgs,
94
+ publicDataWrites,
95
+ privateLogs,
96
+ publicLogs,
97
+ contractClassLog,
98
+ };
99
+ }
100
+
101
+ export function getNumTxBlobFields(txStartMarker: Omit<TxStartMarker, 'revertCode' | 'numBlobFields'>) {
102
+ return (
103
+ 1 + // tx start marker
104
+ 1 + // tx hash
105
+ 1 + // transaction fee
106
+ txStartMarker.numNoteHashes +
107
+ txStartMarker.numNullifiers +
108
+ txStartMarker.numL2ToL1Msgs +
109
+ txStartMarker.numPublicDataWrites * 2 + // *2 for leaf slot and value per public data write
110
+ txStartMarker.numPrivateLogs + // +1 length field for each private log
111
+ txStartMarker.privateLogsLength +
112
+ txStartMarker.publicLogsLength +
113
+ txStartMarker.contractClassLogLength +
114
+ (txStartMarker.contractClassLogLength > 0 ? 1 : 0) // +1 for contract address of the contract class log
115
+ );
116
+ }
@@ -1,6 +1,9 @@
1
- import { BLOCK_END_PREFIX, TX_START_PREFIX } from '@aztec/constants';
1
+ import { TX_START_PREFIX } from '@aztec/constants';
2
2
  import { Fr } from '@aztec/foundation/fields';
3
- import { FieldReader } from '@aztec/foundation/serialize';
3
+
4
+ import { BlobDeserializationError } from '../errors.js';
5
+
6
+ // Must match the implementation in `noir-protocol-circuits/crates/rollup-lib/src/tx_base/components/tx_blob_data.nr`.
4
7
 
5
8
  const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
6
9
  const REVERT_CODE_BIT_SIZE = 8n;
@@ -9,11 +12,11 @@ const NUM_NULLIFIER_BIT_SIZE = 16n;
9
12
  const NUM_L2_TO_L1_MSG_BIT_SIZE = 16n;
10
13
  const NUM_PUBLIC_DATA_WRITE_BIT_SIZE = 16n;
11
14
  const NUM_PRIVATE_LOG_BIT_SIZE = 16n;
15
+ const PRIVATE_LOGS_LENGTH_BIT_SIZE = 16n;
12
16
  const PUBLIC_LOGS_LENGTH_BIT_SIZE = 32n;
13
17
  const CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE = 16n;
14
18
 
15
19
  export interface TxStartMarker {
16
- prefix: bigint;
17
20
  numBlobFields: number;
18
21
  revertCode: number;
19
22
  numNoteHashes: number;
@@ -21,12 +24,12 @@ export interface TxStartMarker {
21
24
  numL2ToL1Msgs: number;
22
25
  numPublicDataWrites: number;
23
26
  numPrivateLogs: number;
27
+ privateLogsLength: number;
24
28
  publicLogsLength: number;
25
29
  contractClassLogLength: number;
26
30
  }
27
31
 
28
- // Must match the implementation in `noir-protocol-circuits/crates/rollup-lib/src/tx_base/components/tx_blob_data.nr`.
29
- export function encodeTxStartMarker(txStartMarker: Omit<TxStartMarker, 'prefix'>) {
32
+ export function encodeTxStartMarker(txStartMarker: TxStartMarker): Fr {
30
33
  let value = TX_START_PREFIX;
31
34
  value <<= NUM_NOTE_HASH_BIT_SIZE;
32
35
  value += BigInt(txStartMarker.numNoteHashes);
@@ -38,6 +41,8 @@ export function encodeTxStartMarker(txStartMarker: Omit<TxStartMarker, 'prefix'>
38
41
  value += BigInt(txStartMarker.numPublicDataWrites);
39
42
  value <<= NUM_PRIVATE_LOG_BIT_SIZE;
40
43
  value += BigInt(txStartMarker.numPrivateLogs);
44
+ value <<= PRIVATE_LOGS_LENGTH_BIT_SIZE;
45
+ value += BigInt(txStartMarker.privateLogsLength);
41
46
  value <<= PUBLIC_LOGS_LENGTH_BIT_SIZE;
42
47
  value += BigInt(txStartMarker.publicLogsLength);
43
48
  value <<= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
@@ -59,6 +64,8 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
59
64
  value >>= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
60
65
  const publicLogsLength = Number(value & (2n ** PUBLIC_LOGS_LENGTH_BIT_SIZE - 1n));
61
66
  value >>= PUBLIC_LOGS_LENGTH_BIT_SIZE;
67
+ const privateLogsLength = Number(value & (2n ** PRIVATE_LOGS_LENGTH_BIT_SIZE - 1n));
68
+ value >>= PRIVATE_LOGS_LENGTH_BIT_SIZE;
62
69
  const numPrivateLogs = Number(value & (2n ** NUM_PRIVATE_LOG_BIT_SIZE - 1n));
63
70
  value >>= NUM_PRIVATE_LOG_BIT_SIZE;
64
71
  const numPublicDataWrites = Number(value & (2n ** NUM_PUBLIC_DATA_WRITE_BIT_SIZE - 1n));
@@ -69,11 +76,13 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
69
76
  value >>= NUM_NULLIFIER_BIT_SIZE;
70
77
  const numNoteHashes = Number(value & (2n ** NUM_NOTE_HASH_BIT_SIZE - 1n));
71
78
  value >>= NUM_NOTE_HASH_BIT_SIZE;
72
- // Do not throw if the prefix doesn't match.
73
- // The caller function can check it by calling `isValidTxStartMarker`, and decide what to do if it's incorrect.
79
+
74
80
  const prefix = value;
81
+ if (prefix !== TX_START_PREFIX) {
82
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid tx start marker.`);
83
+ }
84
+
75
85
  return {
76
- prefix,
77
86
  numBlobFields,
78
87
  revertCode,
79
88
  numNoteHashes,
@@ -81,74 +90,8 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
81
90
  numL2ToL1Msgs,
82
91
  numPublicDataWrites,
83
92
  numPrivateLogs,
93
+ privateLogsLength,
84
94
  publicLogsLength,
85
95
  contractClassLogLength,
86
96
  };
87
97
  }
88
-
89
- export function getNumBlobFieldsFromTxStartMarker(field: Fr) {
90
- return Number(field.toBigInt() & (2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n));
91
- }
92
-
93
- export function isValidTxStartMarker(txStartMarker: TxStartMarker) {
94
- return txStartMarker.prefix === TX_START_PREFIX;
95
- }
96
-
97
- export function createBlockEndMarker(numTxs: number) {
98
- // Must match the implementation in `block_rollup_public_inputs_composer.nr > create_block_end_marker`.
99
- return new Fr(BLOCK_END_PREFIX * 256n * 256n + BigInt(numTxs));
100
- }
101
-
102
- export function getNumTxsFromBlockEndMarker(field: Fr) {
103
- return Number(field.toBigInt() & 0xffffn);
104
- }
105
-
106
- export function isBlockEndMarker(field: Fr) {
107
- const value = field.toBigInt();
108
- const numTxs = value & 0xffffn;
109
- return value - numTxs === BLOCK_END_PREFIX * 256n * 256n;
110
- }
111
-
112
- /**
113
- * Check that the fields are emitted from the circuits and conform to the encoding.
114
- * @param blobFields - The concatenated fields from all blobs of an L1 block.
115
- */
116
- export function checkBlobFieldsEncoding(blobFields: Fr[]) {
117
- const reader = FieldReader.asReader(blobFields);
118
-
119
- const checkpointPrefix = reader.readField();
120
- if (checkpointPrefix.toBigInt() !== BigInt(blobFields.length)) {
121
- return false;
122
- }
123
-
124
- const numFieldsInCheckpoint = checkpointPrefix.toNumber();
125
- let seenNumTxs = 0;
126
- while (reader.cursor < numFieldsInCheckpoint) {
127
- const currentField = reader.readField();
128
-
129
- if (isBlockEndMarker(currentField)) {
130
- // Found a block end marker. Confirm that the number of txs in this block is correct.
131
- const numTxs = getNumTxsFromBlockEndMarker(currentField);
132
- if (numTxs !== seenNumTxs) {
133
- return false;
134
- }
135
- seenNumTxs = 0;
136
- // Continue the loop to process the next field.
137
- continue;
138
- }
139
-
140
- // If the field is not a block end marker, it must be a tx start marker.
141
- const txStartMarker = decodeTxStartMarker(currentField);
142
- if (!isValidTxStartMarker(txStartMarker)) {
143
- return false;
144
- }
145
-
146
- seenNumTxs += 1;
147
-
148
- // Skip the remaining fields in this tx. -1 because we already read the tx start marker.
149
- reader.skip(txStartMarker.numBlobFields - 1);
150
- // TODO: Check the encoding of the tx if we want to be more strict.
151
- }
152
-
153
- return true;
154
- }
package/src/index.ts CHANGED
@@ -2,8 +2,7 @@ export * from './blob.js';
2
2
  export * from './blob_batching.js';
3
3
  export * from './blob_utils.js';
4
4
  export * from './circuit_types/index.js';
5
- export * from './deserialize.js';
6
- export * from './encoding.js';
5
+ export * from './encoding/index.js';
7
6
  export * from './errors.js';
8
7
  export * from './hash.js';
9
8
  export * from './interface.js';
package/src/testing.ts CHANGED
@@ -1,15 +1,13 @@
1
- import { FIELDS_PER_BLOB } from '@aztec/constants';
2
1
  import { makeTuple } from '@aztec/foundation/array';
3
- import { randomInt } from '@aztec/foundation/crypto';
4
2
  import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
5
3
 
6
4
  import { Blob } from './blob.js';
7
5
  import { BatchedBlobAccumulator } from './blob_batching.js';
8
- import { getBlobsPerL1Block } from './blob_utils.js';
9
6
  import { FinalBlobBatchingChallenges } from './circuit_types/index.js';
10
- import { createBlockEndMarker, encodeTxStartMarker } from './encoding.js';
11
7
  import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
12
8
 
9
+ export * from './encoding/fixtures.js';
10
+
13
11
  /**
14
12
  * Makes arbitrary poseidon sponge for blob inputs.
15
13
  * Note: will not verify inside the circuit.
@@ -48,66 +46,6 @@ export function makeBatchedBlobAccumulator(seed = 1): BatchedBlobAccumulator {
48
46
  );
49
47
  }
50
48
 
51
- export function makeEncodedTxBlobFields(length: number): Fr[] {
52
- const txStartMarker = {
53
- numBlobFields: length,
54
- // The rest of the values don't matter. The test components using it do not try to deserialize everything.
55
- // Only `checkBlobFieldsEncoding` is used and it only looks at `numBlobFields`. This might change in the future
56
- // when we add more thorough checks to `checkBlobFieldsEncoding`.
57
- revertCode: 0,
58
- numNoteHashes: 0,
59
- numNullifiers: 0,
60
- numL2ToL1Msgs: 0,
61
- numPublicDataWrites: 0,
62
- numPrivateLogs: 0,
63
- publicLogsLength: 0,
64
- contractClassLogLength: 0,
65
- };
66
-
67
- return [
68
- encodeTxStartMarker(txStartMarker),
69
- ...Array.from({ length: length - 1 }, () => new Fr(randomInt(Number.MAX_SAFE_INTEGER))), // -1 to account for the tx start marker.
70
- ];
71
- }
72
-
73
- export function makeEncodedBlockBlobFields(...lengths: number[]): Fr[] {
74
- return [
75
- ...(lengths.length > 0 ? makeEncodedTxBlobFields(lengths[0] - 1) : []), // -1 to account for the block end marker.
76
- ...lengths.slice(1).flatMap(length => makeEncodedTxBlobFields(length)),
77
- createBlockEndMarker(lengths.length),
78
- ];
79
- }
80
-
81
- // Create blob fields for a checkpoint with a single block.
82
- export function makeEncodedBlobFields(length: number): Fr[] {
83
- if (length <= 2) {
84
- throw new Error('Encoded blob fields length must be greater than 2');
85
- }
86
-
87
- const checkpointPrefix = new Fr(length);
88
- return [checkpointPrefix, ...makeEncodedBlockBlobFields(length - 1)]; // -1 to account for the checkpoint prefix.
89
- }
90
-
91
- /**
92
- * Make an encoded blob with the given length
93
- *
94
- * This will deserialise correctly in the archiver
95
- * @param length
96
- * @returns
97
- */
98
- export function makeEncodedBlob(length: number): Blob {
99
- if (length > FIELDS_PER_BLOB) {
100
- throw new Error(`A single encoded blob must be less than ${FIELDS_PER_BLOB} fields`);
101
- }
102
-
103
- return Blob.fromFields(makeEncodedBlobFields(length));
104
- }
105
-
106
- export function makeEncodedBlobs(length: number): Blob[] {
107
- const fields = makeEncodedBlobFields(length);
108
- return getBlobsPerL1Block(fields);
109
- }
110
-
111
49
  /**
112
50
  * Make a blob with random fields.
113
51
  *
@@ -1,14 +0,0 @@
1
- import { Fr } from '@aztec/foundation/fields';
2
- /**
3
- * Deserializes a buffer into an array of field elements.
4
- *
5
- * This function returns the fields that were actually added in a checkpoint. The number of fields is specified by the
6
- * first field.
7
- *
8
- * @param buf - The buffer to deserialize.
9
- * @param checkEncoding - Whether to check if the encoding is correct. If false, it will still check the checkpoint
10
- * prefix and throw if there's not enough fields.
11
- * @returns An array of field elements.
12
- */
13
- export declare function deserializeEncodedBlobToFields(buf: Uint8Array, checkEncoding?: boolean): Fr[];
14
- //# sourceMappingURL=deserialize.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"deserialize.d.ts","sourceRoot":"","sources":["../src/deserialize.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAM9C;;;;;;;;;;GAUG;AACH,wBAAgB,8BAA8B,CAAC,GAAG,EAAE,UAAU,EAAE,aAAa,UAAQ,GAAG,EAAE,EAAE,CAoB3F"}