@aztec/blob-lib 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. package/dest/batched_blob.d.ts +26 -0
  2. package/dest/batched_blob.d.ts.map +1 -0
  3. package/dest/batched_blob.js +20 -0
  4. package/dest/blob.d.ts +50 -99
  5. package/dest/blob.d.ts.map +1 -1
  6. package/dest/blob.js +78 -169
  7. package/dest/blob_batching.d.ts +41 -123
  8. package/dest/blob_batching.d.ts.map +1 -1
  9. package/dest/blob_batching.js +129 -203
  10. package/dest/blob_utils.d.ts +40 -0
  11. package/dest/blob_utils.d.ts.map +1 -0
  12. package/dest/blob_utils.js +69 -0
  13. package/dest/circuit_types/blob_accumulator.d.ts +23 -0
  14. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/blob_accumulator.js +62 -0
  16. package/dest/circuit_types/final_blob_accumulator.d.ts +23 -0
  17. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_accumulator.js +66 -0
  19. package/dest/circuit_types/final_blob_batching_challenges.d.ts +16 -0
  20. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  21. package/dest/circuit_types/final_blob_batching_challenges.js +26 -0
  22. package/dest/circuit_types/index.d.ts +4 -0
  23. package/dest/circuit_types/index.d.ts.map +1 -0
  24. package/dest/circuit_types/index.js +4 -0
  25. package/dest/encoding/block_blob_data.d.ts +22 -0
  26. package/dest/encoding/block_blob_data.d.ts.map +1 -0
  27. package/dest/encoding/block_blob_data.js +65 -0
  28. package/dest/encoding/block_end_marker.d.ts +11 -0
  29. package/dest/encoding/block_end_marker.d.ts.map +1 -0
  30. package/dest/encoding/block_end_marker.js +41 -0
  31. package/dest/encoding/block_end_state_field.d.ts +12 -0
  32. package/dest/encoding/block_end_state_field.d.ts.map +1 -0
  33. package/dest/encoding/block_end_state_field.js +39 -0
  34. package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
  35. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
  36. package/dest/encoding/checkpoint_blob_data.js +67 -0
  37. package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
  38. package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
  39. package/dest/encoding/checkpoint_end_marker.js +28 -0
  40. package/dest/encoding/fixtures.d.ts +41 -0
  41. package/dest/encoding/fixtures.d.ts.map +1 -0
  42. package/dest/encoding/fixtures.js +140 -0
  43. package/dest/encoding/index.d.ts +10 -0
  44. package/dest/encoding/index.d.ts.map +1 -0
  45. package/dest/encoding/index.js +9 -0
  46. package/dest/encoding/tx_blob_data.d.ts +19 -0
  47. package/dest/encoding/tx_blob_data.d.ts.map +1 -0
  48. package/dest/encoding/tx_blob_data.js +79 -0
  49. package/dest/encoding/tx_start_marker.d.ts +16 -0
  50. package/dest/encoding/tx_start_marker.d.ts.map +1 -0
  51. package/dest/encoding/tx_start_marker.js +77 -0
  52. package/dest/errors.d.ts +1 -1
  53. package/dest/errors.d.ts.map +1 -1
  54. package/dest/hash.d.ts +43 -0
  55. package/dest/hash.d.ts.map +1 -0
  56. package/dest/hash.js +80 -0
  57. package/dest/index.d.ts +7 -4
  58. package/dest/index.d.ts.map +1 -1
  59. package/dest/index.js +6 -16
  60. package/dest/interface.d.ts +1 -2
  61. package/dest/interface.d.ts.map +1 -1
  62. package/dest/kzg_context.d.ts +8 -0
  63. package/dest/kzg_context.d.ts.map +1 -0
  64. package/dest/kzg_context.js +14 -0
  65. package/dest/sponge_blob.d.ts +12 -14
  66. package/dest/sponge_blob.d.ts.map +1 -1
  67. package/dest/sponge_blob.js +26 -30
  68. package/dest/testing.d.ts +10 -23
  69. package/dest/testing.d.ts.map +1 -1
  70. package/dest/testing.js +37 -53
  71. package/dest/types.d.ts +17 -0
  72. package/dest/types.d.ts.map +1 -0
  73. package/dest/types.js +4 -0
  74. package/package.json +10 -7
  75. package/src/batched_blob.ts +26 -0
  76. package/src/blob.ts +81 -195
  77. package/src/blob_batching.ts +168 -231
  78. package/src/blob_utils.ts +82 -0
  79. package/src/circuit_types/blob_accumulator.ts +96 -0
  80. package/src/circuit_types/final_blob_accumulator.ts +76 -0
  81. package/src/circuit_types/final_blob_batching_challenges.ts +30 -0
  82. package/src/circuit_types/index.ts +4 -0
  83. package/src/encoding/block_blob_data.ts +102 -0
  84. package/src/encoding/block_end_marker.ts +55 -0
  85. package/src/encoding/block_end_state_field.ts +59 -0
  86. package/src/encoding/checkpoint_blob_data.ts +95 -0
  87. package/src/encoding/checkpoint_end_marker.ts +40 -0
  88. package/src/encoding/fixtures.ts +210 -0
  89. package/src/encoding/index.ts +9 -0
  90. package/src/encoding/tx_blob_data.ts +116 -0
  91. package/src/encoding/tx_start_marker.ts +97 -0
  92. package/src/hash.ts +89 -0
  93. package/src/index.ts +6 -19
  94. package/src/interface.ts +0 -1
  95. package/src/kzg_context.ts +16 -0
  96. package/src/sponge_blob.ts +28 -31
  97. package/src/testing.ts +48 -59
  98. package/src/types.ts +17 -0
  99. package/dest/blob_batching_public_inputs.d.ts +0 -71
  100. package/dest/blob_batching_public_inputs.d.ts.map +0 -1
  101. package/dest/blob_batching_public_inputs.js +0 -168
  102. package/dest/encoding.d.ts +0 -66
  103. package/dest/encoding.d.ts.map +0 -1
  104. package/dest/encoding.js +0 -113
  105. package/src/blob_batching_public_inputs.ts +0 -252
  106. package/src/encoding.ts +0 -138
@@ -0,0 +1,116 @@
1
+ import { chunk } from '@aztec/foundation/collection';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+ import { FieldReader } from '@aztec/foundation/serialize';
4
+
5
+ import { BlobDeserializationError } from '../errors.js';
6
+ import { type TxStartMarker, decodeTxStartMarker, encodeTxStartMarker } from './tx_start_marker.js';
7
+
8
+ // Must match the implementation in noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr.
9
+
10
+ export interface TxBlobData {
11
+ txStartMarker: TxStartMarker;
12
+ txHash: Fr;
13
+ transactionFee: Fr;
14
+ noteHashes: Fr[];
15
+ nullifiers: Fr[];
16
+ l2ToL1Msgs: Fr[];
17
+ publicDataWrites: [Fr, Fr][];
18
+ privateLogs: Fr[][];
19
+ publicLogs: Fr[];
20
+ contractClassLog: Fr[];
21
+ }
22
+
23
+ export function encodeTxBlobData(txBlobData: TxBlobData): Fr[] {
24
+ return [
25
+ encodeTxStartMarker(txBlobData.txStartMarker),
26
+ txBlobData.txHash,
27
+ txBlobData.transactionFee,
28
+ ...txBlobData.noteHashes,
29
+ ...txBlobData.nullifiers,
30
+ ...txBlobData.l2ToL1Msgs,
31
+ ...txBlobData.publicDataWrites.flat(),
32
+ ...txBlobData.privateLogs.map(log => [new Fr(log.length), ...log]).flat(),
33
+ ...txBlobData.publicLogs,
34
+ ...txBlobData.contractClassLog,
35
+ ];
36
+ }
37
+
38
+ export function decodeTxBlobData(fields: Fr[] | FieldReader): TxBlobData {
39
+ const reader = FieldReader.asReader(fields);
40
+
41
+ if (reader.isFinished()) {
42
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for tx blob data.`);
43
+ }
44
+
45
+ const txStartMarker = decodeTxStartMarker(reader.readField());
46
+
47
+ const checkRemainingFields = (requiredFields: number, type: string) => {
48
+ if (requiredFields > reader.remainingFields()) {
49
+ throw new BlobDeserializationError(
50
+ `Incorrect encoding of blob fields: not enough fields for ${type}. Expected ${requiredFields} fields, only ${reader.remainingFields()} remaining.`,
51
+ );
52
+ }
53
+ };
54
+
55
+ const numTxEffectFields = txStartMarker.numBlobFields - 1; // -1 because we already read the tx start marker.
56
+ checkRemainingFields(numTxEffectFields, 'tx effect');
57
+
58
+ const txHash = reader.readField();
59
+ const transactionFee = reader.readField();
60
+
61
+ checkRemainingFields(txStartMarker.numNoteHashes, 'note hashes');
62
+ const noteHashes = reader.readFieldArray(txStartMarker.numNoteHashes);
63
+
64
+ checkRemainingFields(txStartMarker.numNullifiers, 'nullifiers');
65
+ const nullifiers = reader.readFieldArray(txStartMarker.numNullifiers);
66
+
67
+ checkRemainingFields(txStartMarker.numL2ToL1Msgs, 'l2-to-l1 messages');
68
+ const l2ToL1Msgs = reader.readFieldArray(txStartMarker.numL2ToL1Msgs);
69
+
70
+ checkRemainingFields(txStartMarker.numPublicDataWrites * 2, 'public data writes'); // *2 for leaf slot and value
71
+ const publicDataWrites = chunk(reader.readFieldArray(txStartMarker.numPublicDataWrites * 2), 2) as [Fr, Fr][];
72
+
73
+ const privateLogs = Array.from({ length: txStartMarker.numPrivateLogs }, () => {
74
+ const length = reader.readU32();
75
+ checkRemainingFields(length, 'private log');
76
+ return reader.readFieldArray(length);
77
+ });
78
+
79
+ checkRemainingFields(txStartMarker.publicLogsLength, 'public logs');
80
+ const publicLogs = reader.readFieldArray(txStartMarker.publicLogsLength);
81
+
82
+ const contractClassLogBlobDataLength =
83
+ txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
84
+ checkRemainingFields(contractClassLogBlobDataLength, 'contract class logs');
85
+ const contractClassLog = reader.readFieldArray(contractClassLogBlobDataLength);
86
+
87
+ return {
88
+ txStartMarker,
89
+ txHash,
90
+ transactionFee,
91
+ noteHashes,
92
+ nullifiers,
93
+ l2ToL1Msgs,
94
+ publicDataWrites,
95
+ privateLogs,
96
+ publicLogs,
97
+ contractClassLog,
98
+ };
99
+ }
100
+
101
+ export function getNumTxBlobFields(txStartMarker: Omit<TxStartMarker, 'revertCode' | 'numBlobFields'>) {
102
+ return (
103
+ 1 + // tx start marker
104
+ 1 + // tx hash
105
+ 1 + // transaction fee
106
+ txStartMarker.numNoteHashes +
107
+ txStartMarker.numNullifiers +
108
+ txStartMarker.numL2ToL1Msgs +
109
+ txStartMarker.numPublicDataWrites * 2 + // *2 for leaf slot and value per public data write
110
+ txStartMarker.numPrivateLogs + // +1 length field for each private log
111
+ txStartMarker.privateLogsLength +
112
+ txStartMarker.publicLogsLength +
113
+ txStartMarker.contractClassLogLength +
114
+ (txStartMarker.contractClassLogLength > 0 ? 1 : 0) // +1 for contract address of the contract class log
115
+ );
116
+ }
@@ -0,0 +1,97 @@
1
+ import { TX_START_PREFIX } from '@aztec/constants';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+
4
+ import { BlobDeserializationError } from '../errors.js';
5
+
6
+ // Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr`.
7
+
8
+ const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
9
+ const REVERT_CODE_BIT_SIZE = 8n;
10
+ const NUM_NOTE_HASH_BIT_SIZE = 16n;
11
+ const NUM_NULLIFIER_BIT_SIZE = 16n;
12
+ const NUM_L2_TO_L1_MSG_BIT_SIZE = 16n;
13
+ const NUM_PUBLIC_DATA_WRITE_BIT_SIZE = 16n;
14
+ const NUM_PRIVATE_LOG_BIT_SIZE = 16n;
15
+ const PRIVATE_LOGS_LENGTH_BIT_SIZE = 16n;
16
+ const PUBLIC_LOGS_LENGTH_BIT_SIZE = 32n;
17
+ const CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE = 16n;
18
+
19
+ export interface TxStartMarker {
20
+ numBlobFields: number;
21
+ revertCode: number;
22
+ numNoteHashes: number;
23
+ numNullifiers: number;
24
+ numL2ToL1Msgs: number;
25
+ numPublicDataWrites: number;
26
+ numPrivateLogs: number;
27
+ privateLogsLength: number;
28
+ publicLogsLength: number;
29
+ contractClassLogLength: number;
30
+ }
31
+
32
+ export function encodeTxStartMarker(txStartMarker: TxStartMarker): Fr {
33
+ let value = TX_START_PREFIX;
34
+ value <<= NUM_NOTE_HASH_BIT_SIZE;
35
+ value += BigInt(txStartMarker.numNoteHashes);
36
+ value <<= NUM_NULLIFIER_BIT_SIZE;
37
+ value += BigInt(txStartMarker.numNullifiers);
38
+ value <<= NUM_L2_TO_L1_MSG_BIT_SIZE;
39
+ value += BigInt(txStartMarker.numL2ToL1Msgs);
40
+ value <<= NUM_PUBLIC_DATA_WRITE_BIT_SIZE;
41
+ value += BigInt(txStartMarker.numPublicDataWrites);
42
+ value <<= NUM_PRIVATE_LOG_BIT_SIZE;
43
+ value += BigInt(txStartMarker.numPrivateLogs);
44
+ value <<= PRIVATE_LOGS_LENGTH_BIT_SIZE;
45
+ value += BigInt(txStartMarker.privateLogsLength);
46
+ value <<= PUBLIC_LOGS_LENGTH_BIT_SIZE;
47
+ value += BigInt(txStartMarker.publicLogsLength);
48
+ value <<= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
49
+ value += BigInt(txStartMarker.contractClassLogLength);
50
+ value <<= REVERT_CODE_BIT_SIZE;
51
+ value += BigInt(txStartMarker.revertCode);
52
+ value <<= NUM_BLOB_FIELDS_BIT_SIZE;
53
+ value += BigInt(txStartMarker.numBlobFields);
54
+ return new Fr(value);
55
+ }
56
+
57
+ export function decodeTxStartMarker(field: Fr): TxStartMarker {
58
+ let value = field.toBigInt();
59
+ const numBlobFields = Number(value & (2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n));
60
+ value >>= NUM_BLOB_FIELDS_BIT_SIZE;
61
+ const revertCode = Number(value & (2n ** REVERT_CODE_BIT_SIZE - 1n));
62
+ value >>= REVERT_CODE_BIT_SIZE;
63
+ const contractClassLogLength = Number(value & (2n ** CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE - 1n));
64
+ value >>= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
65
+ const publicLogsLength = Number(value & (2n ** PUBLIC_LOGS_LENGTH_BIT_SIZE - 1n));
66
+ value >>= PUBLIC_LOGS_LENGTH_BIT_SIZE;
67
+ const privateLogsLength = Number(value & (2n ** PRIVATE_LOGS_LENGTH_BIT_SIZE - 1n));
68
+ value >>= PRIVATE_LOGS_LENGTH_BIT_SIZE;
69
+ const numPrivateLogs = Number(value & (2n ** NUM_PRIVATE_LOG_BIT_SIZE - 1n));
70
+ value >>= NUM_PRIVATE_LOG_BIT_SIZE;
71
+ const numPublicDataWrites = Number(value & (2n ** NUM_PUBLIC_DATA_WRITE_BIT_SIZE - 1n));
72
+ value >>= NUM_PUBLIC_DATA_WRITE_BIT_SIZE;
73
+ const numL2ToL1Msgs = Number(value & (2n ** NUM_L2_TO_L1_MSG_BIT_SIZE - 1n));
74
+ value >>= NUM_L2_TO_L1_MSG_BIT_SIZE;
75
+ const numNullifiers = Number(value & (2n ** NUM_NULLIFIER_BIT_SIZE - 1n));
76
+ value >>= NUM_NULLIFIER_BIT_SIZE;
77
+ const numNoteHashes = Number(value & (2n ** NUM_NOTE_HASH_BIT_SIZE - 1n));
78
+ value >>= NUM_NOTE_HASH_BIT_SIZE;
79
+
80
+ const prefix = value;
81
+ if (prefix !== TX_START_PREFIX) {
82
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid tx start marker.`);
83
+ }
84
+
85
+ return {
86
+ numBlobFields,
87
+ revertCode,
88
+ numNoteHashes,
89
+ numNullifiers,
90
+ numL2ToL1Msgs,
91
+ numPublicDataWrites,
92
+ numPrivateLogs,
93
+ privateLogsLength,
94
+ publicLogsLength,
95
+ contractClassLogLength,
96
+ };
97
+ }
package/src/hash.ts ADDED
@@ -0,0 +1,89 @@
1
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
2
+ import { sha256, sha256ToField } from '@aztec/foundation/crypto/sha256';
3
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
4
+ import { Fr } from '@aztec/foundation/curves/bn254';
5
+
6
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, getKzg } from './kzg_context.js';
7
+ import { SpongeBlob } from './sponge_blob.js';
8
+
9
+ const VERSIONED_HASH_VERSION_KZG = 0x01;
10
+
11
+ /**
12
+ * Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
13
+ */
14
+ export function computeEthVersionedBlobHash(commitment: Buffer): Buffer {
15
+ const hash = sha256(commitment);
16
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
17
+ return hash;
18
+ }
19
+
20
+ // TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
21
+ // - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
22
+ // - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
23
+ // exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
24
+ // We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
25
+ export function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr {
26
+ return sha256ToField(evmVersionedBlobHashes);
27
+ }
28
+
29
+ /**
30
+ * Computes a non-standard Poseidon2 hash over the provided fields.
31
+ *
32
+ * This function is used to compute:
33
+ * - `blobFieldsHash` of a checkpoint:
34
+ * Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
35
+ * The exact number of fields is encoded in the checkpoint end marker (the last field).
36
+ * This hash is used when generating the challenge `z` for all blobs in the checkpoint.
37
+ * - `spongeBlobHash` of a block:
38
+ * Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
39
+ * This hash is included in the block header.
40
+ */
41
+ export async function computeBlobFieldsHash(fields: Fr[]): Promise<Fr> {
42
+ const sponge = SpongeBlob.init();
43
+ await sponge.absorb(fields);
44
+ return sponge.squeeze();
45
+ }
46
+
47
+ export function computeBlobCommitment(data: Uint8Array): Buffer {
48
+ if (data.length !== BYTES_PER_BLOB) {
49
+ throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`);
50
+ }
51
+
52
+ return Buffer.from(getKzg().blobToKzgCommitment(data));
53
+ }
54
+
55
+ /**
56
+ * Get the commitment fields of the blob, to compute the challenge z.
57
+ *
58
+ * The 48-byte commitment is encoded into two field elements:
59
+ * +-------------------+------------------------+
60
+ * | 31 bytes | 17 bytes |
61
+ * +-------------------+------------------------+
62
+ * | Field Element 1 | Field Element 2 |
63
+ * | [0][bytes 0-30] | [0...0][bytes 31-47] |
64
+ * +-------------------+------------------------+
65
+ *
66
+ * @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
67
+ * @returns The fields representing the commitment buffer.
68
+ */
69
+ export function commitmentToFields(commitment: Buffer): [Fr, Fr] {
70
+ if (commitment.length !== BYTES_PER_COMMITMENT) {
71
+ throw new Error(`Expected ${BYTES_PER_COMMITMENT} bytes for blob commitment. Got ${commitment.length}.`);
72
+ }
73
+
74
+ return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, BYTES_PER_COMMITMENT))];
75
+ }
76
+
77
+ export async function computeChallengeZ(blobFieldsHash: Fr, commitment: Buffer): Promise<Fr> {
78
+ const commitmentFields = commitmentToFields(commitment);
79
+ return await poseidon2Hash([blobFieldsHash, commitmentFields[0], commitmentFields[1]]);
80
+ }
81
+
82
+ /**
83
+ * Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
84
+ * rollup circuits.
85
+ */
86
+ export async function hashNoirBigNumLimbs(field: BLS12Fr): Promise<Fr> {
87
+ const num = field.toNoirBigNum();
88
+ return await poseidon2Hash(num.limbs.map(Fr.fromHexString));
89
+ }
package/src/index.ts CHANGED
@@ -1,23 +1,10 @@
1
- import cKzg from 'c-kzg';
2
-
3
- const { loadTrustedSetup } = cKzg;
4
-
1
+ export * from './batched_blob.js';
5
2
  export * from './blob.js';
6
3
  export * from './blob_batching.js';
7
- export * from './encoding.js';
4
+ export * from './blob_utils.js';
5
+ export * from './circuit_types/index.js';
6
+ export * from './encoding/index.js';
7
+ export * from './hash.js';
8
8
  export * from './interface.js';
9
- export * from './errors.js';
10
- export * from './blob_batching_public_inputs.js';
11
9
  export * from './sponge_blob.js';
12
-
13
- try {
14
- loadTrustedSetup();
15
- } catch (error: any) {
16
- if (error.message.includes('trusted setup is already loaded')) {
17
- // NB: The c-kzg lib has no way of checking whether the setup is loaded or not,
18
- // and it throws an error if it's already loaded, even though nothing is wrong.
19
- // This is a rudimentary way of ensuring we load the trusted setup if we need it.
20
- } else {
21
- throw new Error(error);
22
- }
23
- }
10
+ export * from './kzg_context.js';
package/src/interface.ts CHANGED
@@ -3,6 +3,5 @@
3
3
  */
4
4
  export interface BlobJson {
5
5
  blob: string;
6
- index: string;
7
6
  kzg_commitment: string;
8
7
  }
@@ -0,0 +1,16 @@
1
+ import { DasContextJs } from '@crate-crypto/node-eth-kzg';
2
+
3
+ export * from '@crate-crypto/node-eth-kzg';
4
+
5
+ let kzgInstance: DasContextJs | undefined;
6
+
7
+ /**
8
+ * Returns the lazily-initialized KZG context.
9
+ * The first call takes ~3 seconds to initialize the precomputation tables.
10
+ */
11
+ export function getKzg(): DasContextJs {
12
+ if (!kzgInstance) {
13
+ kzgInstance = DasContextJs.create({ usePrecomp: true });
14
+ }
15
+ return kzgInstance;
16
+ }
@@ -1,6 +1,7 @@
1
+ import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, TWO_POW_64 } from '@aztec/constants';
1
2
  import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
2
- import { poseidon2Permutation } from '@aztec/foundation/crypto';
3
- import { Fr } from '@aztec/foundation/fields';
3
+ import { poseidon2Permutation } from '@aztec/foundation/crypto/poseidon';
4
+ import { Fr } from '@aztec/foundation/curves/bn254';
4
5
  import {
5
6
  BufferReader,
6
7
  FieldReader,
@@ -10,30 +11,40 @@ import {
10
11
  } from '@aztec/foundation/serialize';
11
12
 
12
13
  /**
13
- * A Poseidon2 sponge used to accumulate data that will be added to a blob.
14
+ * A Poseidon2 sponge used to accumulate data that will be added to blobs.
14
15
  * See noir-projects/noir-protocol-circuits/crates/types/src/abis/sponge_blob.nr.
15
16
  */
16
17
  export class SpongeBlob {
18
+ static MAX_FIELDS = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB;
19
+
17
20
  constructor(
18
- /** Sponge with absorbed tx effects that will go into a blob. */
21
+ /** Sponge with absorbed fields that will go into one or more blobs. */
19
22
  public readonly sponge: Poseidon2Sponge,
20
23
  /** Number of effects absorbed so far. */
21
- public fields: number,
22
- /** Number of effects that will be absorbed. */
23
- public readonly expectedFields: number,
24
+ public numAbsorbedFields: number,
24
25
  ) {}
25
26
 
27
+ /**
28
+ * Initialize the sponge blob to absorb data for a checkpoint.
29
+ */
30
+ static init(): SpongeBlob {
31
+ // This must match the implementation in noir-projects/noir-protocol-circuits/types/src/abis/sponge_blob.nr
32
+ const iv = new Fr(BigInt(SpongeBlob.MAX_FIELDS) * TWO_POW_64);
33
+ const sponge = Poseidon2Sponge.init(iv);
34
+ return new SpongeBlob(sponge, 0);
35
+ }
36
+
26
37
  static fromBuffer(buffer: Buffer | BufferReader): SpongeBlob {
27
38
  const reader = BufferReader.asReader(buffer);
28
- return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber(), reader.readNumber());
39
+ return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readNumber());
29
40
  }
30
41
 
31
42
  toBuffer() {
32
- return serializeToBuffer(this.sponge, this.fields, this.expectedFields);
43
+ return serializeToBuffer(...SpongeBlob.getFields(this));
33
44
  }
34
45
 
35
46
  static getFields(fields: FieldsOf<SpongeBlob>) {
36
- return [fields.sponge, fields.fields, fields.expectedFields];
47
+ return [fields.sponge, fields.numAbsorbedFields];
37
48
  }
38
49
 
39
50
  toFields(): Fr[] {
@@ -42,11 +53,7 @@ export class SpongeBlob {
42
53
 
43
54
  static fromFields(fields: Fr[] | FieldReader): SpongeBlob {
44
55
  const reader = FieldReader.asReader(fields);
45
- return new SpongeBlob(
46
- reader.readObject(Poseidon2Sponge),
47
- reader.readField().toNumber(),
48
- reader.readField().toNumber(),
49
- );
56
+ return new SpongeBlob(reader.readObject(Poseidon2Sponge), reader.readField().toNumber());
50
57
  }
51
58
 
52
59
  clone() {
@@ -54,30 +61,21 @@ export class SpongeBlob {
54
61
  }
55
62
 
56
63
  async absorb(fields: Fr[]) {
57
- if (this.fields + fields.length > this.expectedFields) {
64
+ if (this.numAbsorbedFields + fields.length > SpongeBlob.MAX_FIELDS) {
58
65
  throw new Error(
59
- `Attempted to fill spongeblob with ${this.fields + fields.length}, but it has a max of ${this.expectedFields}`,
66
+ `Attempted to fill spongeBlob with ${this.numAbsorbedFields + fields.length}, but it has a max of ${SpongeBlob.MAX_FIELDS}`,
60
67
  );
61
68
  }
62
69
  await this.sponge.absorb(fields);
63
- this.fields += fields.length;
70
+ this.numAbsorbedFields += fields.length;
64
71
  }
65
72
 
66
73
  async squeeze(): Promise<Fr> {
67
- // If the blob sponge is not 'full', we append 1 to match Poseidon2::hash_internal()
68
- // NB: There is currently no use case in which we don't 'fill' a blob sponge, but adding for completeness
69
- if (this.fields != this.expectedFields) {
70
- await this.sponge.absorb([Fr.ONE]);
71
- }
72
- return this.sponge.squeeze();
74
+ return await this.sponge.squeeze();
73
75
  }
74
76
 
75
77
  static empty(): SpongeBlob {
76
- return new SpongeBlob(Poseidon2Sponge.empty(), 0, 0);
77
- }
78
-
79
- static init(expectedFields: number): SpongeBlob {
80
- return new SpongeBlob(Poseidon2Sponge.init(expectedFields), 0, expectedFields);
78
+ return new SpongeBlob(Poseidon2Sponge.empty(), 0);
81
79
  }
82
80
  }
83
81
 
@@ -131,8 +129,7 @@ export class Poseidon2Sponge {
131
129
  );
132
130
  }
133
131
 
134
- static init(expectedFields: number): Poseidon2Sponge {
135
- const iv = new Fr(expectedFields).mul(new Fr(BigInt('18446744073709551616')));
132
+ static init(iv: Fr): Poseidon2Sponge {
136
133
  const sponge = Poseidon2Sponge.empty();
137
134
  sponge.state[3] = iv;
138
135
  return sponge;
package/src/testing.ts CHANGED
@@ -1,13 +1,15 @@
1
1
  import { makeTuple } from '@aztec/foundation/array';
2
- import { toBufferBE } from '@aztec/foundation/bigint-buffer';
3
- import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
2
+ import { BLS12Fq, BLS12Fr, BLS12Point, BLSPointNotOnCurveError } from '@aztec/foundation/curves/bls12';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
4
4
 
5
5
  import { Blob } from './blob.js';
6
- import { BatchedBlobAccumulator, FinalBlobBatchingChallenges } from './blob_batching.js';
7
- import { BlobAccumulatorPublicInputs, BlockBlobPublicInputs } from './blob_batching_public_inputs.js';
8
- import { TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from './encoding.js';
6
+ import { BlobAccumulator } from './circuit_types/blob_accumulator.js';
7
+ import { FinalBlobAccumulator } from './circuit_types/final_blob_accumulator.js';
8
+ import { FinalBlobBatchingChallenges } from './circuit_types/final_blob_batching_challenges.js';
9
9
  import { Poseidon2Sponge, SpongeBlob } from './sponge_blob.js';
10
10
 
11
+ export * from './encoding/fixtures.js';
12
+
11
13
  /**
12
14
  * Makes arbitrary poseidon sponge for blob inputs.
13
15
  * Note: will not verify inside the circuit.
@@ -23,83 +25,70 @@ export function makeSpongeBlob(seed = 1): SpongeBlob {
23
25
  false,
24
26
  ),
25
27
  seed,
26
- seed + 1,
27
28
  );
28
29
  }
29
30
 
30
31
  /**
31
- * Makes arbitrary blob public accumulator.
32
- * Note: will not verify inside the circuit.
33
- * @param seed - The seed to use for generating the blob accumulator.
34
- * @returns A blob accumulator instance.
32
+ * Makes an arbitrary but valid BLS12 point. The value is deterministic for a given seed.
33
+ * @param seed - The seed to use for generating the point.
34
+ * @returns A BLS12 point instance.
35
35
  */
36
- export function makeBatchedBlobAccumulator(seed = 1): BatchedBlobAccumulator {
37
- return new BatchedBlobAccumulator(
38
- new Fr(seed),
39
- new Fr(seed + 1),
40
- new BLS12Fr(seed + 2),
41
- BLS12Point.random(),
42
- BLS12Point.random(),
43
- new Fr(seed + 3),
44
- new BLS12Fr(seed + 4),
45
- new FinalBlobBatchingChallenges(new Fr(seed + 5), new BLS12Fr(seed + 6)),
46
- );
36
+ function makeBLS12Point(seed = 1): BLS12Point {
37
+ let accum = 0;
38
+ while (true) {
39
+ try {
40
+ const x = new BLS12Fq(seed + accum);
41
+ const y = BLS12Point.YFromX(x);
42
+ if (y) {
43
+ return new BLS12Point(x, y, false);
44
+ }
45
+ accum++;
46
+ } catch (e: any) {
47
+ if (!(e instanceof BLSPointNotOnCurveError)) {
48
+ throw e;
49
+ }
50
+ // The point is not on the curve - try again
51
+ }
52
+ }
47
53
  }
48
54
 
49
55
  /**
50
- * Makes arbitrary block blob public inputs.
56
+ * Makes arbitrary blob public accumulator.
51
57
  * Note: will not verify inside the circuit.
52
- * @param seed - The seed to use for generating the blob inputs.
53
- * @returns A block blob public inputs instance.
58
+ * @param seed - The seed to use for generating the blob accumulator.
59
+ * @returns A blob accumulator instance.
54
60
  */
55
- export function makeBlockBlobPublicInputs(seed = 1): BlockBlobPublicInputs {
56
- const startBlobAccumulator = makeBatchedBlobAccumulator(seed);
57
- return new BlockBlobPublicInputs(
58
- BlobAccumulatorPublicInputs.fromBatchedBlobAccumulator(startBlobAccumulator),
59
- BlobAccumulatorPublicInputs.fromBatchedBlobAccumulator(makeBatchedBlobAccumulator(seed + 1)),
60
- startBlobAccumulator.finalBlobChallenges,
61
+ export function makeBlobAccumulator(seed = 1): BlobAccumulator {
62
+ return new BlobAccumulator(
63
+ new Fr(seed),
64
+ new Fr(seed + 0x10),
65
+ new BLS12Fr(seed + 0x20),
66
+ makeBLS12Point(seed + 0x30),
67
+ new Fr(seed + 0x50),
68
+ new BLS12Fr(seed + 0x60),
61
69
  );
62
70
  }
63
71
 
64
- // TODO: copied form stdlib tx effect
65
- function encodeFirstField(length: number): Fr {
66
- const lengthBuf = Buffer.alloc(2);
67
- lengthBuf.writeUInt16BE(length, 0);
68
- return new Fr(
69
- Buffer.concat([
70
- toBufferBE(TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH),
71
- Buffer.alloc(1),
72
- lengthBuf,
73
- Buffer.alloc(1),
74
- Buffer.from([1]),
75
- Buffer.alloc(1),
76
- Buffer.alloc(1),
77
- ]),
72
+ export function makeFinalBlobAccumulator(seed = 1) {
73
+ return new FinalBlobAccumulator(
74
+ new Fr(seed),
75
+ new Fr(seed + 0x10),
76
+ new BLS12Fr(seed + 0x20),
77
+ makeBLS12Point(seed + 0x30),
78
78
  );
79
79
  }
80
80
 
81
- /**
82
- * Make an encoded blob with the given length
83
- *
84
- * This will deserialise correctly in the archiver
85
- * @param length
86
- * @returns
87
- */
88
- export function makeEncodedBlob(length: number): Promise<Blob> {
89
- return Blob.fromFields([encodeFirstField(length + 1), ...Array.from({ length: length }, () => Fr.random())]);
81
+ export function makeFinalBlobBatchingChallenges(seed = 1) {
82
+ return new FinalBlobBatchingChallenges(new Fr(seed), new BLS12Fr(seed + 0x10));
90
83
  }
91
84
 
92
85
  /**
93
- * Make an unencoded blob with the given length
86
+ * Make a blob with random fields.
94
87
  *
95
88
  * This will fail deserialisation in the archiver
96
89
  * @param length
97
90
  * @returns
98
91
  */
99
- export function makeUnencodedBlob(length: number): Promise<Blob> {
92
+ export function makeRandomBlob(length: number): Blob {
100
93
  return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
101
94
  }
102
-
103
- export function makeEncodedBlobFields(fields: Fr[]): Promise<Blob> {
104
- return Blob.fromFields([encodeFirstField(fields.length + 1), ...fields]);
105
- }
package/src/types.ts ADDED
@@ -0,0 +1,17 @@
1
+ export * from './batched_blob.js';
2
+ export * from './circuit_types/index.js';
3
+ export * from './interface.js';
4
+ export * from './sponge_blob.js';
5
+
6
+ /**
7
+ * Type definition for the KZG instance returned by Blob.getViemKzgInstance().
8
+ * Contains the cryptographic functions needed for blob commitment and proof generation.
9
+ */
10
+ export interface BlobKzgInstance {
11
+ /** Function to compute KZG commitment from blob data */
12
+ blobToKzgCommitment(blob: Uint8Array): Uint8Array;
13
+ /** Function to compute KZG proof for blob data */
14
+ computeBlobKzgProof(blob: Uint8Array, commitment: Uint8Array): Uint8Array;
15
+ /** Function to compute both blob data cells and their corresponding KZG proofs for EIP7594 */
16
+ computeCellsAndKzgProofs(blob: Uint8Array): [Uint8Array[], Uint8Array[]];
17
+ }