@aztec/blob-lib 0.0.1-commit.fce3e4f → 0.0.1-commit.ff7989d6c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/dest/batched_blob.d.ts +8 -2
  2. package/dest/batched_blob.d.ts.map +1 -1
  3. package/dest/blob.d.ts +16 -10
  4. package/dest/blob.d.ts.map +1 -1
  5. package/dest/blob.js +15 -14
  6. package/dest/blob_batching.d.ts +15 -2
  7. package/dest/blob_batching.d.ts.map +1 -1
  8. package/dest/blob_batching.js +13 -11
  9. package/dest/blob_utils.d.ts +4 -3
  10. package/dest/blob_utils.d.ts.map +1 -1
  11. package/dest/blob_utils.js +4 -4
  12. package/dest/circuit_types/blob_accumulator.d.ts +3 -2
  13. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -1
  14. package/dest/circuit_types/blob_accumulator.js +2 -1
  15. package/dest/circuit_types/final_blob_accumulator.d.ts +3 -2
  16. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -1
  17. package/dest/circuit_types/final_blob_accumulator.js +5 -2
  18. package/dest/circuit_types/final_blob_batching_challenges.d.ts +3 -2
  19. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -1
  20. package/dest/circuit_types/final_blob_batching_challenges.js +2 -1
  21. package/dest/encoding/block_blob_data.d.ts +10 -2
  22. package/dest/encoding/block_blob_data.d.ts.map +1 -1
  23. package/dest/encoding/block_blob_data.js +11 -1
  24. package/dest/encoding/block_end_marker.d.ts +4 -3
  25. package/dest/encoding/block_end_marker.d.ts.map +1 -1
  26. package/dest/encoding/block_end_marker.js +6 -5
  27. package/dest/encoding/block_end_state_field.d.ts +2 -2
  28. package/dest/encoding/block_end_state_field.d.ts.map +1 -1
  29. package/dest/encoding/block_end_state_field.js +1 -1
  30. package/dest/encoding/checkpoint_blob_data.d.ts +2 -2
  31. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -1
  32. package/dest/encoding/checkpoint_blob_data.js +6 -6
  33. package/dest/encoding/checkpoint_end_marker.d.ts +2 -2
  34. package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -1
  35. package/dest/encoding/checkpoint_end_marker.js +4 -4
  36. package/dest/encoding/fixtures.d.ts +1 -1
  37. package/dest/encoding/fixtures.d.ts.map +1 -1
  38. package/dest/encoding/fixtures.js +3 -2
  39. package/dest/encoding/tx_blob_data.d.ts +2 -2
  40. package/dest/encoding/tx_blob_data.d.ts.map +1 -1
  41. package/dest/encoding/tx_blob_data.js +1 -1
  42. package/dest/encoding/tx_start_marker.d.ts +2 -2
  43. package/dest/encoding/tx_start_marker.d.ts.map +1 -1
  44. package/dest/encoding/tx_start_marker.js +3 -3
  45. package/dest/hash.d.ts +4 -3
  46. package/dest/hash.d.ts.map +1 -1
  47. package/dest/hash.js +6 -5
  48. package/dest/index.d.ts +2 -1
  49. package/dest/index.d.ts.map +1 -1
  50. package/dest/index.js +1 -0
  51. package/dest/interface.d.ts +1 -2
  52. package/dest/interface.d.ts.map +1 -1
  53. package/dest/kzg_context.d.ts +6 -2
  54. package/dest/kzg_context.d.ts.map +1 -1
  55. package/dest/kzg_context.js +12 -3
  56. package/dest/sponge_blob.d.ts +4 -2
  57. package/dest/sponge_blob.d.ts.map +1 -1
  58. package/dest/sponge_blob.js +2 -2
  59. package/dest/testing.d.ts +2 -2
  60. package/dest/testing.d.ts.map +1 -1
  61. package/dest/testing.js +2 -1
  62. package/package.json +7 -7
  63. package/src/batched_blob.ts +2 -1
  64. package/src/blob.ts +15 -14
  65. package/src/blob_batching.ts +18 -11
  66. package/src/blob_utils.ts +7 -4
  67. package/src/circuit_types/blob_accumulator.ts +2 -1
  68. package/src/circuit_types/final_blob_accumulator.ts +2 -1
  69. package/src/circuit_types/final_blob_batching_challenges.ts +2 -1
  70. package/src/encoding/block_blob_data.ts +14 -2
  71. package/src/encoding/block_end_marker.ts +7 -6
  72. package/src/encoding/block_end_state_field.ts +1 -1
  73. package/src/encoding/checkpoint_blob_data.ts +13 -6
  74. package/src/encoding/checkpoint_end_marker.ts +4 -4
  75. package/src/encoding/fixtures.ts +3 -2
  76. package/src/encoding/tx_blob_data.ts +1 -1
  77. package/src/encoding/tx_start_marker.ts +3 -3
  78. package/src/hash.ts +7 -5
  79. package/src/index.ts +1 -0
  80. package/src/interface.ts +0 -1
  81. package/src/kzg_context.ts +12 -1
  82. package/src/sponge_blob.ts +2 -2
  83. package/src/testing.ts +3 -2
package/src/blob.ts CHANGED
@@ -1,10 +1,11 @@
1
1
  import { FIELDS_PER_BLOB } from '@aztec/constants';
2
- import { BLS12Fr, Fr } from '@aztec/foundation/fields';
2
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
5
 
5
6
  import { computeBlobCommitment, computeChallengeZ, computeEthVersionedBlobHash } from './hash.js';
6
7
  import type { BlobJson } from './interface.js';
7
- import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
8
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, getKzg } from './kzg_context.js';
8
9
 
9
10
  export { FIELDS_PER_BLOB };
10
11
 
@@ -41,8 +42,8 @@ export class Blob {
41
42
  *
42
43
  * @throws If data does not match the expected length (BYTES_PER_BLOB).
43
44
  */
44
- static fromBlobBuffer(data: Uint8Array): Blob {
45
- const commitment = computeBlobCommitment(data);
45
+ static async fromBlobBuffer(data: Uint8Array): Promise<Blob> {
46
+ const commitment = await computeBlobCommitment(data);
46
47
  return new Blob(data, commitment);
47
48
  }
48
49
 
@@ -54,13 +55,13 @@ export class Blob {
54
55
  * @param fields - The array of fields to create the Blob from.
55
56
  * @returns A Blob created from the array of fields.
56
57
  */
57
- static fromFields(fields: Fr[]): Blob {
58
+ static async fromFields(fields: Fr[]): Promise<Blob> {
58
59
  if (fields.length > FIELDS_PER_BLOB) {
59
60
  throw new Error(`Attempted to overfill blob with ${fields.length} fields. The maximum is ${FIELDS_PER_BLOB}.`);
60
61
  }
61
62
 
62
63
  const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);
63
- const commitment = computeBlobCommitment(data);
64
+ const commitment = await computeBlobCommitment(data);
64
65
  return new Blob(data, commitment);
65
66
  }
66
67
 
@@ -80,16 +81,16 @@ export class Blob {
80
81
  /**
81
82
  * Create a Blob from a JSON object.
82
83
  *
83
- * Blobs will be in this form when requested from the blob sink, or from
84
+ * Blobs will be in this form when requested from the blob client, or from
84
85
  * the beacon chain via `getBlobSidecars`
85
86
  * https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars
86
87
  *
87
88
  * @param json - The JSON object to create the Blob from.
88
89
  * @returns A Blob created from the JSON object.
89
90
  */
90
- static fromJson(json: BlobJson): Blob {
91
+ static async fromJson(json: BlobJson): Promise<Blob> {
91
92
  const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');
92
- const blob = Blob.fromBlobBuffer(blobBuffer);
93
+ const blob = await Blob.fromBlobBuffer(blobBuffer);
93
94
 
94
95
  if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
95
96
  throw new Error('KZG commitment does not match');
@@ -101,13 +102,11 @@ export class Blob {
101
102
  /**
102
103
  * Get the JSON representation of the blob.
103
104
  *
104
- * @param index - optional - The index of the blob in the block.
105
105
  * @returns The JSON representation of the blob.
106
106
  */
107
- toJson(index: number): BlobJson {
107
+ toJSON(): BlobJson {
108
108
  return {
109
109
  blob: `0x${Buffer.from(this.data).toString('hex')}`,
110
- index: index.toString(),
111
110
  // eslint-disable-next-line camelcase
112
111
  kzg_commitment: `0x${this.commitment.toString('hex')}`,
113
112
  };
@@ -135,8 +134,9 @@ export class Blob {
135
134
  * y: BLS12Fr - Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts.
136
135
  * proof: Buffer - KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes).
137
136
  */
138
- evaluate(challengeZ: Fr, verifyProof = false) {
139
- const res = kzg.computeKzgProof(this.data, challengeZ.toBuffer());
137
+ async evaluate(challengeZ: Fr, verifyProof = false) {
138
+ const kzg = getKzg();
139
+ const res = await kzg.asyncComputeKzgProof(this.data, challengeZ.toBuffer());
140
140
  if (verifyProof && !kzg.verifyKzgProof(this.commitment, challengeZ.toBuffer(), res[1], res[0])) {
141
141
  throw new Error(`KZG proof did not verify.`);
142
142
  }
@@ -178,6 +178,7 @@ export class Blob {
178
178
  }
179
179
 
180
180
  static getViemKzgInstance() {
181
+ const kzg = getKzg();
181
182
  return {
182
183
  blobToKzgCommitment: kzg.blobToKzgCommitment.bind(kzg),
183
184
  computeBlobKzgProof: kzg.computeBlobKzgProof.bind(kzg),
@@ -1,13 +1,15 @@
1
- import { AZTEC_MAX_EPOCH_DURATION, BLOBS_PER_CHECKPOINT } from '@aztec/constants';
2
- import { poseidon2Hash, sha256ToField } from '@aztec/foundation/crypto';
3
- import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
1
+ import { BLOBS_PER_CHECKPOINT, MAX_CHECKPOINTS_PER_EPOCH } from '@aztec/constants';
2
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
3
+ import { sha256ToField } from '@aztec/foundation/crypto/sha256';
4
+ import { BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
5
+ import { Fr } from '@aztec/foundation/curves/bn254';
4
6
 
5
7
  import { BatchedBlob } from './batched_blob.js';
6
8
  import { Blob } from './blob.js';
7
9
  import { getBlobsPerL1Block } from './blob_utils.js';
8
10
  import { BlobAccumulator, FinalBlobAccumulator, FinalBlobBatchingChallenges } from './circuit_types/index.js';
9
11
  import { computeBlobFieldsHash, hashNoirBigNumLimbs } from './hash.js';
10
- import { kzg } from './kzg_context.js';
12
+ import { getKzg } from './kzg_context.js';
11
13
 
12
14
  /**
13
15
  * A class to create, manage, and prove batched EVM blobs.
@@ -73,9 +75,9 @@ export class BatchedBlobAccumulator {
73
75
  */
74
76
  static async batch(blobFieldsPerCheckpoint: Fr[][], verifyProof = false): Promise<BatchedBlob> {
75
77
  const numCheckpoints = blobFieldsPerCheckpoint.length;
76
- if (numCheckpoints > AZTEC_MAX_EPOCH_DURATION) {
78
+ if (numCheckpoints > MAX_CHECKPOINTS_PER_EPOCH) {
77
79
  throw new Error(
78
- `Too many checkpoints sent to batch(). The maximum is ${AZTEC_MAX_EPOCH_DURATION}. Got ${numCheckpoints}.`,
80
+ `Too many checkpoints sent to batch(). The maximum is ${MAX_CHECKPOINTS_PER_EPOCH}. Got ${numCheckpoints}.`,
79
81
  );
80
82
  }
81
83
 
@@ -107,7 +109,7 @@ export class BatchedBlobAccumulator {
107
109
  for (const blobFields of blobFieldsPerCheckpoint) {
108
110
  // Compute the hash of all the fields in the block.
109
111
  const blobFieldsHash = await computeBlobFieldsHash(blobFields);
110
- const blobs = getBlobsPerL1Block(blobFields);
112
+ const blobs = await getBlobsPerL1Block(blobFields);
111
113
  for (const blob of blobs) {
112
114
  // Compute the challenge z for each blob and accumulate it.
113
115
  const challengeZ = await blob.computeChallengeZ(blobFieldsHash);
@@ -124,7 +126,7 @@ export class BatchedBlobAccumulator {
124
126
  }
125
127
 
126
128
  // Now we have a shared challenge for all blobs, evaluate them...
127
- const proofObjects = allBlobs.map(b => b.evaluate(z));
129
+ const proofObjects = await Promise.all(allBlobs.map(b => b.evaluate(z)));
128
130
  const evaluations = await Promise.all(proofObjects.map(({ y }) => hashNoirBigNumLimbs(y)));
129
131
  // ...and find the challenge for the linear combination of blobs.
130
132
  let gamma = evaluations[0];
@@ -143,7 +145,7 @@ export class BatchedBlobAccumulator {
143
145
  * @returns An updated blob accumulator.
144
146
  */
145
147
  async accumulateBlob(blob: Blob, blobFieldsHash: Fr) {
146
- const { proof, y: thisY } = blob.evaluate(this.finalBlobChallenges.z);
148
+ const { proof, y: thisY } = await blob.evaluate(this.finalBlobChallenges.z);
147
149
  const thisC = BLS12Point.decompress(blob.commitment);
148
150
  const thisQ = BLS12Point.decompress(proof);
149
151
  const blobChallengeZ = await blob.computeChallengeZ(blobFieldsHash);
@@ -190,7 +192,7 @@ export class BatchedBlobAccumulator {
190
192
  * @returns An updated blob accumulator.
191
193
  */
192
194
  async accumulateFields(blobFields: Fr[]) {
193
- const blobs = getBlobsPerL1Block(blobFields);
195
+ const blobs = await getBlobsPerL1Block(blobFields);
194
196
 
195
197
  if (blobs.length > BLOBS_PER_CHECKPOINT) {
196
198
  throw new Error(
@@ -248,7 +250,12 @@ export class BatchedBlobAccumulator {
248
250
  }
249
251
 
250
252
  verify() {
251
- return kzg.verifyKzgProof(this.cAcc.compress(), this.zAcc.toBuffer(), this.yAcc.toBuffer(), this.qAcc.compress());
253
+ return getKzg().verifyKzgProof(
254
+ this.cAcc.compress(),
255
+ this.zAcc.toBuffer(),
256
+ this.yAcc.toBuffer(),
257
+ this.qAcc.compress(),
258
+ );
252
259
  }
253
260
 
254
261
  isEmptyState() {
package/src/blob_utils.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  import { FIELDS_PER_BLOB } from '@aztec/constants';
2
- import { BLS12Point, Fr } from '@aztec/foundation/fields';
2
+ import { BLS12Point } from '@aztec/foundation/curves/bls12';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
 
4
5
  import type { BatchedBlob } from './batched_blob.js';
5
6
  import { Blob } from './blob.js';
@@ -29,14 +30,16 @@ export function getPrefixedEthBlobCommitments(blobs: Blob[]): `0x${string}` {
29
30
  *
30
31
  * @throws If the number of fields does not match what's indicated by the checkpoint prefix.
31
32
  */
32
- export function getBlobsPerL1Block(fields: Fr[]): Blob[] {
33
+ export async function getBlobsPerL1Block(fields: Fr[]): Promise<Blob[]> {
33
34
  if (!fields.length) {
34
35
  throw new Error('Cannot create blobs from empty fields.');
35
36
  }
36
37
 
37
38
  const numBlobs = Math.ceil(fields.length / FIELDS_PER_BLOB);
38
- return Array.from({ length: numBlobs }, (_, i) =>
39
- Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)),
39
+ return await Promise.all(
40
+ Array.from({ length: numBlobs }, (_, i) =>
41
+ Blob.fromFields(fields.slice(i * FIELDS_PER_BLOB, (i + 1) * FIELDS_PER_BLOB)),
42
+ ),
40
43
  );
41
44
  }
42
45
 
@@ -1,5 +1,6 @@
1
1
  import { BLS12_FQ_LIMBS, BLS12_FR_LIMBS } from '@aztec/constants';
2
- import { BLS12Fq, BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
2
+ import { BLS12Fq, BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
  import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
5
 
5
6
  /**
@@ -1,4 +1,5 @@
1
- import { BLS12Fr, BLS12Point, Fr } from '@aztec/foundation/fields';
1
+ import { BLS12Fr, BLS12Point } from '@aztec/foundation/curves/bls12';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
4
 
4
5
  import { inspect } from 'util';
@@ -1,4 +1,5 @@
1
- import { BLS12Fr, Fr } from '@aztec/foundation/fields';
1
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
4
 
4
5
  /**
@@ -1,4 +1,4 @@
1
- import { Fr } from '@aztec/foundation/fields';
1
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
2
  import { FieldReader } from '@aztec/foundation/serialize';
3
3
 
4
4
  import { BlobDeserializationError } from '../errors.js';
@@ -17,6 +17,18 @@ import { type TxBlobData, decodeTxBlobData, encodeTxBlobData } from './tx_blob_d
17
17
 
18
18
  // Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/block_blob_data.nr`.
19
19
 
20
+ export const NUM_BLOCK_END_BLOB_FIELDS = 6;
21
+ export const NUM_FIRST_BLOCK_END_BLOB_FIELDS = 7;
22
+ export const NUM_CHECKPOINT_END_MARKER_FIELDS = 1;
23
+
24
+ /**
25
+ * Returns the number of blob fields used for block end data.
26
+ * @param isFirstBlockInCheckpoint - Whether this is the first block in a checkpoint.
27
+ */
28
+ export function getNumBlockEndBlobFields(isFirstBlockInCheckpoint: boolean): number {
29
+ return isFirstBlockInCheckpoint ? NUM_FIRST_BLOCK_END_BLOB_FIELDS : NUM_BLOCK_END_BLOB_FIELDS;
30
+ }
31
+
20
32
  export interface BlockEndBlobData {
21
33
  blockEndMarker: BlockEndMarker;
22
34
  blockEndStateField: BlockEndStateField;
@@ -46,7 +58,7 @@ export function encodeBlockEndBlobData(blockEndBlobData: BlockEndBlobData): Fr[]
46
58
  export function decodeBlockEndBlobData(fields: Fr[] | FieldReader, isFirstBlock: boolean): BlockEndBlobData {
47
59
  const reader = FieldReader.asReader(fields);
48
60
 
49
- const numBlockEndData = isFirstBlock ? 7 : 6;
61
+ const numBlockEndData = getNumBlockEndBlobFields(isFirstBlock);
50
62
  if (numBlockEndData > reader.remainingFields()) {
51
63
  throw new BlobDeserializationError(
52
64
  `Incorrect encoding of blob fields: not enough fields for block end data. Expected ${numBlockEndData} fields, only ${reader.remainingFields()} remaining.`,
@@ -1,5 +1,6 @@
1
1
  import { BLOCK_END_PREFIX } from '@aztec/constants';
2
- import { Fr } from '@aztec/foundation/fields';
2
+ import { BlockNumber } from '@aztec/foundation/branded-types';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
 
4
5
  import { BlobDeserializationError } from '../errors.js';
5
6
 
@@ -11,12 +12,12 @@ const NUM_TXS_BIT_SIZE = 16n;
11
12
 
12
13
  export interface BlockEndMarker {
13
14
  timestamp: bigint;
14
- blockNumber: number;
15
+ blockNumber: BlockNumber;
15
16
  numTxs: number;
16
17
  }
17
18
 
18
19
  export function encodeBlockEndMarker(blockEndMarker: BlockEndMarker) {
19
- let value = BLOCK_END_PREFIX;
20
+ let value = BigInt(BLOCK_END_PREFIX);
20
21
  value <<= TIMESTAMP_BIT_SIZE;
21
22
  value += blockEndMarker.timestamp;
22
23
  value <<= BLOCK_NUMBER_BIT_SIZE;
@@ -30,13 +31,13 @@ export function decodeBlockEndMarker(field: Fr): BlockEndMarker {
30
31
  let value = field.toBigInt();
31
32
  const numTxs = Number(value & (2n ** NUM_TXS_BIT_SIZE - 1n));
32
33
  value >>= NUM_TXS_BIT_SIZE;
33
- const blockNumber = Number(value & (2n ** BLOCK_NUMBER_BIT_SIZE - 1n));
34
+ const blockNumber = BlockNumber(Number(value & (2n ** BLOCK_NUMBER_BIT_SIZE - 1n)));
34
35
  value >>= BLOCK_NUMBER_BIT_SIZE;
35
36
  const timestamp = value & (2n ** TIMESTAMP_BIT_SIZE - 1n);
36
37
  value >>= TIMESTAMP_BIT_SIZE;
37
38
 
38
39
  const prefix = value;
39
- if (prefix !== BLOCK_END_PREFIX) {
40
+ if (prefix !== BigInt(BLOCK_END_PREFIX)) {
40
41
  throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid block end marker.`);
41
42
  }
42
43
 
@@ -50,5 +51,5 @@ export function decodeBlockEndMarker(field: Fr): BlockEndMarker {
50
51
  // Check if a field is a block end marker. Used before decoding to check if it has reached the end of the block.
51
52
  export function isBlockEndMarker(field: Fr): boolean {
52
53
  const prefix = field.toBigInt() >> (NUM_TXS_BIT_SIZE + BLOCK_NUMBER_BIT_SIZE + TIMESTAMP_BIT_SIZE);
53
- return prefix === BLOCK_END_PREFIX;
54
+ return prefix === BigInt(BLOCK_END_PREFIX);
54
55
  }
@@ -4,7 +4,7 @@ import {
4
4
  NULLIFIER_TREE_HEIGHT,
5
5
  PUBLIC_DATA_TREE_HEIGHT,
6
6
  } from '@aztec/constants';
7
- import { Fr } from '@aztec/foundation/fields';
7
+ import { Fr } from '@aztec/foundation/curves/bn254';
8
8
 
9
9
  import { BlobDeserializationError } from '../errors.js';
10
10
 
@@ -1,8 +1,15 @@
1
- import { Fr } from '@aztec/foundation/fields';
1
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
2
  import { BufferReader, FieldReader } from '@aztec/foundation/serialize';
3
3
 
4
4
  import { BlobDeserializationError } from '../errors.js';
5
- import { type BlockBlobData, decodeBlockBlobData, encodeBlockBlobData } from './block_blob_data.js';
5
+ import {
6
+ type BlockBlobData,
7
+ NUM_BLOCK_END_BLOB_FIELDS,
8
+ NUM_CHECKPOINT_END_MARKER_FIELDS,
9
+ NUM_FIRST_BLOCK_END_BLOB_FIELDS,
10
+ decodeBlockBlobData,
11
+ encodeBlockBlobData,
12
+ } from './block_blob_data.js';
6
13
  import {
7
14
  type CheckpointEndMarker,
8
15
  decodeCheckpointEndMarker,
@@ -25,7 +32,7 @@ export function encodeCheckpointBlobData(checkpointBlobData: CheckpointBlobData)
25
32
 
26
33
  export function encodeCheckpointBlobDataFromBlocks(blocks: BlockBlobData[]): Fr[] {
27
34
  const blocksBlobFields = blocks.map(block => encodeBlockBlobData(block)).flat();
28
- const numBlobFields = blocksBlobFields.length + 1; // +1 for the checkpoint end marker.
35
+ const numBlobFields = blocksBlobFields.length + NUM_CHECKPOINT_END_MARKER_FIELDS;
29
36
  return blocksBlobFields.concat(encodeCheckpointEndMarker({ numBlobFields }));
30
37
  }
31
38
 
@@ -87,9 +94,9 @@ export function getTotalNumBlobFieldsFromTxs(txsPerBlock: TxStartMarker[][]): nu
87
94
  }
88
95
 
89
96
  return (
90
- (numBlocks ? 1 : 0) + // l1ToL2Messages root in the first block
91
- numBlocks * 6 + // 6 fields for each block end blob data.
97
+ (numBlocks ? NUM_FIRST_BLOCK_END_BLOB_FIELDS - NUM_BLOCK_END_BLOB_FIELDS : 0) + // l1ToL2Messages root in the first block
98
+ numBlocks * NUM_BLOCK_END_BLOB_FIELDS + // 6 fields for each block end blob data.
92
99
  txsPerBlock.reduce((total, txs) => total + txs.reduce((total, tx) => total + tx.numBlobFields, 0), 0) +
93
- 1 // checkpoint end marker
100
+ NUM_CHECKPOINT_END_MARKER_FIELDS // checkpoint end marker
94
101
  );
95
102
  }
@@ -1,5 +1,5 @@
1
1
  import { CHECKPOINT_END_PREFIX } from '@aztec/constants';
2
- import { Fr } from '@aztec/foundation/fields';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
3
 
4
4
  import { BlobDeserializationError } from '../errors.js';
5
5
 
@@ -12,7 +12,7 @@ export interface CheckpointEndMarker {
12
12
  }
13
13
 
14
14
  export function encodeCheckpointEndMarker(checkpointEndMarker: CheckpointEndMarker) {
15
- let value = CHECKPOINT_END_PREFIX;
15
+ let value = BigInt(CHECKPOINT_END_PREFIX);
16
16
  value <<= NUM_BLOB_FIELDS_BIT_SIZE;
17
17
  value += BigInt(checkpointEndMarker.numBlobFields);
18
18
  return new Fr(value);
@@ -24,7 +24,7 @@ export function decodeCheckpointEndMarker(field: Fr): CheckpointEndMarker {
24
24
  value >>= NUM_BLOB_FIELDS_BIT_SIZE;
25
25
 
26
26
  const prefix = value;
27
- if (prefix !== CHECKPOINT_END_PREFIX) {
27
+ if (prefix !== BigInt(CHECKPOINT_END_PREFIX)) {
28
28
  throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid checkpoint end marker.`);
29
29
  }
30
30
 
@@ -36,5 +36,5 @@ export function decodeCheckpointEndMarker(field: Fr): CheckpointEndMarker {
36
36
  // Check if a field is a checkpoint end marker. Used to check if it has reached the end of the blob fields.
37
37
  export function isCheckpointEndMarker(field: Fr): boolean {
38
38
  const prefix = field.toBigInt() >> NUM_BLOB_FIELDS_BIT_SIZE;
39
- return prefix === CHECKPOINT_END_PREFIX;
39
+ return prefix === BigInt(CHECKPOINT_END_PREFIX);
40
40
  }
@@ -9,7 +9,8 @@ import {
9
9
  PRIVATE_LOG_SIZE_IN_FIELDS,
10
10
  } from '@aztec/constants';
11
11
  import { makeTuple } from '@aztec/foundation/array';
12
- import { Fr } from '@aztec/foundation/fields';
12
+ import { BlockNumber } from '@aztec/foundation/branded-types';
13
+ import { Fr } from '@aztec/foundation/curves/bn254';
13
14
 
14
15
  import type { BlockBlobData, BlockEndBlobData } from './block_blob_data.js';
15
16
  import type { BlockEndMarker } from './block_end_marker.js';
@@ -109,7 +110,7 @@ export function makeBlockEndMarker({
109
110
  }: { seed?: number } & Partial<BlockEndMarker> = {}): BlockEndMarker {
110
111
  return {
111
112
  numTxs: seed,
112
- blockNumber: seed + 1,
113
+ blockNumber: BlockNumber(seed + 1),
113
114
  timestamp: BigInt(seed + 2),
114
115
  ...overrides,
115
116
  };
@@ -1,5 +1,5 @@
1
1
  import { chunk } from '@aztec/foundation/collection';
2
- import { Fr } from '@aztec/foundation/fields';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
3
  import { FieldReader } from '@aztec/foundation/serialize';
4
4
 
5
5
  import { BlobDeserializationError } from '../errors.js';
@@ -1,5 +1,5 @@
1
1
  import { TX_START_PREFIX } from '@aztec/constants';
2
- import { Fr } from '@aztec/foundation/fields';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
3
 
4
4
  import { BlobDeserializationError } from '../errors.js';
5
5
 
@@ -30,7 +30,7 @@ export interface TxStartMarker {
30
30
  }
31
31
 
32
32
  export function encodeTxStartMarker(txStartMarker: TxStartMarker): Fr {
33
- let value = TX_START_PREFIX;
33
+ let value = BigInt(TX_START_PREFIX);
34
34
  value <<= NUM_NOTE_HASH_BIT_SIZE;
35
35
  value += BigInt(txStartMarker.numNoteHashes);
36
36
  value <<= NUM_NULLIFIER_BIT_SIZE;
@@ -78,7 +78,7 @@ export function decodeTxStartMarker(field: Fr): TxStartMarker {
78
78
  value >>= NUM_NOTE_HASH_BIT_SIZE;
79
79
 
80
80
  const prefix = value;
81
- if (prefix !== TX_START_PREFIX) {
81
+ if (prefix !== BigInt(TX_START_PREFIX)) {
82
82
  throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid tx start marker.`);
83
83
  }
84
84
 
package/src/hash.ts CHANGED
@@ -1,7 +1,9 @@
1
- import { poseidon2Hash, sha256, sha256ToField } from '@aztec/foundation/crypto';
2
- import { BLS12Fr, Fr } from '@aztec/foundation/fields';
1
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
2
+ import { sha256, sha256ToField } from '@aztec/foundation/crypto/sha256';
3
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
4
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
5
 
4
- import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, kzg } from './kzg_context.js';
6
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, getKzg } from './kzg_context.js';
5
7
  import { SpongeBlob } from './sponge_blob.js';
6
8
 
7
9
  const VERSIONED_HASH_VERSION_KZG = 0x01;
@@ -42,12 +44,12 @@ export async function computeBlobFieldsHash(fields: Fr[]): Promise<Fr> {
42
44
  return sponge.squeeze();
43
45
  }
44
46
 
45
- export function computeBlobCommitment(data: Uint8Array): Buffer {
47
+ export async function computeBlobCommitment(data: Uint8Array): Promise<Buffer> {
46
48
  if (data.length !== BYTES_PER_BLOB) {
47
49
  throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`);
48
50
  }
49
51
 
50
- return Buffer.from(kzg.blobToKzgCommitment(data));
52
+ return Buffer.from(await getKzg().asyncBlobToKzgCommitment(data));
51
53
  }
52
54
 
53
55
  /**
package/src/index.ts CHANGED
@@ -7,3 +7,4 @@ export * from './encoding/index.js';
7
7
  export * from './hash.js';
8
8
  export * from './interface.js';
9
9
  export * from './sponge_blob.js';
10
+ export * from './kzg_context.js';
package/src/interface.ts CHANGED
@@ -3,6 +3,5 @@
3
3
  */
4
4
  export interface BlobJson {
5
5
  blob: string;
6
- index: string;
7
6
  kzg_commitment: string;
8
7
  }
@@ -2,4 +2,15 @@ import { DasContextJs } from '@crate-crypto/node-eth-kzg';
2
2
 
3
3
  export * from '@crate-crypto/node-eth-kzg';
4
4
 
5
- export const kzg = DasContextJs.create({ usePrecomp: true });
5
+ let kzgInstance: DasContextJs | undefined;
6
+
7
+ /**
8
+ * Returns the lazily-initialized KZG context.
9
+ * The first call takes ~3 seconds to initialize the precomputation tables.
10
+ */
11
+ export function getKzg(): DasContextJs {
12
+ if (!kzgInstance) {
13
+ kzgInstance = DasContextJs.create({ usePrecomp: true });
14
+ }
15
+ return kzgInstance;
16
+ }
@@ -1,7 +1,7 @@
1
1
  import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, TWO_POW_64 } from '@aztec/constants';
2
2
  import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
3
- import { poseidon2Permutation } from '@aztec/foundation/crypto';
4
- import { Fr } from '@aztec/foundation/fields';
3
+ import { poseidon2Permutation } from '@aztec/foundation/crypto/poseidon';
4
+ import { Fr } from '@aztec/foundation/curves/bn254';
5
5
  import {
6
6
  BufferReader,
7
7
  FieldReader,
package/src/testing.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  import { makeTuple } from '@aztec/foundation/array';
2
- import { BLS12Fq, BLS12Fr, BLS12Point, BLSPointNotOnCurveError, Fr } from '@aztec/foundation/fields';
2
+ import { BLS12Fq, BLS12Fr, BLS12Point, BLSPointNotOnCurveError } from '@aztec/foundation/curves/bls12';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
4
 
4
5
  import { Blob } from './blob.js';
5
6
  import { BlobAccumulator } from './circuit_types/blob_accumulator.js';
@@ -88,6 +89,6 @@ export function makeFinalBlobBatchingChallenges(seed = 1) {
88
89
  * @param length
89
90
  * @returns
90
91
  */
91
- export function makeRandomBlob(length: number): Blob {
92
+ export function makeRandomBlob(length: number): Promise<Blob> {
92
93
  return Blob.fromFields([...Array.from({ length: length }, () => Fr.random())]);
93
94
  }