@aztec/blob-lib 0.0.0-test.1 → 0.0.1-commit.0b941701

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dest/batched_blob.d.ts +31 -0
  2. package/dest/batched_blob.d.ts.map +1 -0
  3. package/dest/batched_blob.js +20 -0
  4. package/dest/blob.d.ts +61 -102
  5. package/dest/blob.d.ts.map +1 -1
  6. package/dest/blob.js +89 -188
  7. package/dest/blob_batching.d.ts +118 -0
  8. package/dest/blob_batching.d.ts.map +1 -0
  9. package/dest/blob_batching.js +225 -0
  10. package/dest/blob_utils.d.ts +40 -0
  11. package/dest/blob_utils.d.ts.map +1 -0
  12. package/dest/blob_utils.js +69 -0
  13. package/dest/circuit_types/blob_accumulator.d.ts +23 -0
  14. package/dest/circuit_types/blob_accumulator.d.ts.map +1 -0
  15. package/dest/circuit_types/blob_accumulator.js +62 -0
  16. package/dest/circuit_types/final_blob_accumulator.d.ts +23 -0
  17. package/dest/circuit_types/final_blob_accumulator.d.ts.map +1 -0
  18. package/dest/circuit_types/final_blob_accumulator.js +66 -0
  19. package/dest/circuit_types/final_blob_batching_challenges.d.ts +16 -0
  20. package/dest/circuit_types/final_blob_batching_challenges.d.ts.map +1 -0
  21. package/dest/circuit_types/final_blob_batching_challenges.js +26 -0
  22. package/dest/circuit_types/index.d.ts +4 -0
  23. package/dest/circuit_types/index.d.ts.map +1 -0
  24. package/dest/circuit_types/index.js +4 -0
  25. package/dest/encoding/block_blob_data.d.ts +30 -0
  26. package/dest/encoding/block_blob_data.d.ts.map +1 -0
  27. package/dest/encoding/block_blob_data.js +75 -0
  28. package/dest/encoding/block_end_marker.d.ts +11 -0
  29. package/dest/encoding/block_end_marker.d.ts.map +1 -0
  30. package/dest/encoding/block_end_marker.js +41 -0
  31. package/dest/encoding/block_end_state_field.d.ts +12 -0
  32. package/dest/encoding/block_end_state_field.d.ts.map +1 -0
  33. package/dest/encoding/block_end_state_field.js +39 -0
  34. package/dest/encoding/checkpoint_blob_data.d.ts +15 -0
  35. package/dest/encoding/checkpoint_blob_data.d.ts.map +1 -0
  36. package/dest/encoding/checkpoint_blob_data.js +67 -0
  37. package/dest/encoding/checkpoint_end_marker.d.ts +8 -0
  38. package/dest/encoding/checkpoint_end_marker.d.ts.map +1 -0
  39. package/dest/encoding/checkpoint_end_marker.js +28 -0
  40. package/dest/encoding/fixtures.d.ts +41 -0
  41. package/dest/encoding/fixtures.d.ts.map +1 -0
  42. package/dest/encoding/fixtures.js +140 -0
  43. package/dest/encoding/index.d.ts +10 -0
  44. package/dest/encoding/index.d.ts.map +1 -0
  45. package/dest/encoding/index.js +9 -0
  46. package/dest/encoding/tx_blob_data.d.ts +19 -0
  47. package/dest/encoding/tx_blob_data.d.ts.map +1 -0
  48. package/dest/encoding/tx_blob_data.js +79 -0
  49. package/dest/encoding/tx_start_marker.d.ts +16 -0
  50. package/dest/encoding/tx_start_marker.d.ts.map +1 -0
  51. package/dest/encoding/tx_start_marker.js +77 -0
  52. package/dest/errors.d.ts +1 -1
  53. package/dest/errors.d.ts.map +1 -1
  54. package/dest/hash.d.ts +43 -0
  55. package/dest/hash.d.ts.map +1 -0
  56. package/dest/hash.js +80 -0
  57. package/dest/index.d.ts +8 -4
  58. package/dest/index.d.ts.map +1 -1
  59. package/dest/index.js +7 -16
  60. package/dest/interface.d.ts +1 -3
  61. package/dest/interface.d.ts.map +1 -1
  62. package/dest/kzg_context.d.ts +8 -0
  63. package/dest/kzg_context.d.ts.map +1 -0
  64. package/dest/kzg_context.js +14 -0
  65. package/dest/sponge_blob.d.ts +15 -17
  66. package/dest/sponge_blob.d.ts.map +1 -1
  67. package/dest/sponge_blob.js +26 -30
  68. package/dest/testing.d.ts +13 -25
  69. package/dest/testing.d.ts.map +1 -1
  70. package/dest/testing.js +38 -53
  71. package/dest/types.d.ts +17 -0
  72. package/dest/types.d.ts.map +1 -0
  73. package/dest/types.js +4 -0
  74. package/package.json +20 -15
  75. package/src/batched_blob.ts +26 -0
  76. package/src/blob.ts +85 -223
  77. package/src/blob_batching.ts +300 -0
  78. package/src/blob_utils.ts +82 -0
  79. package/src/circuit_types/blob_accumulator.ts +96 -0
  80. package/src/circuit_types/final_blob_accumulator.ts +76 -0
  81. package/src/circuit_types/final_blob_batching_challenges.ts +30 -0
  82. package/src/circuit_types/index.ts +4 -0
  83. package/src/encoding/block_blob_data.ts +114 -0
  84. package/src/encoding/block_end_marker.ts +55 -0
  85. package/src/encoding/block_end_state_field.ts +59 -0
  86. package/src/encoding/checkpoint_blob_data.ts +102 -0
  87. package/src/encoding/checkpoint_end_marker.ts +40 -0
  88. package/src/encoding/fixtures.ts +210 -0
  89. package/src/encoding/index.ts +9 -0
  90. package/src/encoding/tx_blob_data.ts +116 -0
  91. package/src/encoding/tx_start_marker.ts +97 -0
  92. package/src/hash.ts +89 -0
  93. package/src/index.ts +7 -20
  94. package/src/interface.ts +0 -4
  95. package/src/kzg_context.ts +16 -0
  96. package/src/sponge_blob.ts +28 -31
  97. package/src/testing.ts +49 -50
  98. package/src/trusted_setup_bit_reversed.json +4100 -0
  99. package/src/types.ts +17 -0
  100. package/dest/blob_public_inputs.d.ts +0 -50
  101. package/dest/blob_public_inputs.d.ts.map +0 -1
  102. package/dest/blob_public_inputs.js +0 -146
  103. package/dest/encoding.d.ts +0 -66
  104. package/dest/encoding.d.ts.map +0 -1
  105. package/dest/encoding.js +0 -113
  106. package/src/blob_public_inputs.ts +0 -157
  107. package/src/encoding.ts +0 -138
@@ -0,0 +1,140 @@
1
+ import { FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_PRIVATE_LOGS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PRIVATE_LOG_SIZE_IN_FIELDS } from '@aztec/constants';
2
+ import { makeTuple } from '@aztec/foundation/array';
3
+ import { BlockNumber } from '@aztec/foundation/branded-types';
4
+ import { Fr } from '@aztec/foundation/curves/bn254';
5
+ import { getTotalNumBlobFieldsFromTxs } from './checkpoint_blob_data.js';
6
+ import { getNumTxBlobFields } from './tx_blob_data.js';
7
+ const fr = (seed)=>new Fr(BigInt(seed));
8
+ export function makeTxStartMarker({ isFullTx = false, ...overrides } = {}) {
9
+ const partialTxStartMarker = {
10
+ revertCode: 0,
11
+ numNoteHashes: isFullTx ? MAX_NOTE_HASHES_PER_TX : 1,
12
+ numNullifiers: isFullTx ? MAX_NULLIFIERS_PER_TX : 1,
13
+ numL2ToL1Msgs: isFullTx ? MAX_L2_TO_L1_MSGS_PER_TX : 1,
14
+ numPublicDataWrites: isFullTx ? MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX : 1,
15
+ numPrivateLogs: isFullTx ? MAX_PRIVATE_LOGS_PER_TX : 1,
16
+ privateLogsLength: isFullTx ? PRIVATE_LOG_SIZE_IN_FIELDS * MAX_PRIVATE_LOGS_PER_TX : 1,
17
+ publicLogsLength: isFullTx ? FLAT_PUBLIC_LOGS_PAYLOAD_LENGTH : 1,
18
+ contractClassLogLength: isFullTx ? MAX_CONTRACT_CLASS_LOGS_PER_TX : 1,
19
+ ...overrides
20
+ };
21
+ const numBlobFields = overrides.numBlobFields ?? getNumTxBlobFields(partialTxStartMarker);
22
+ return {
23
+ ...partialTxStartMarker,
24
+ numBlobFields
25
+ };
26
+ }
27
+ export function makeTxBlobData({ isFullTx = false, seed = 1, ...overrides } = {}) {
28
+ const { txStartMarker: txStartMarkerOverrides, ...txBlobDataOverrides } = overrides;
29
+ const txStartMarker = makeTxStartMarker({
30
+ isFullTx,
31
+ ...txStartMarkerOverrides
32
+ });
33
+ const noteHashes = makeTuple(txStartMarker.numNoteHashes, fr, seed);
34
+ const nullifiers = makeTuple(txStartMarker.numNullifiers, fr, seed + 0x100);
35
+ const l2ToL1Msgs = makeTuple(txStartMarker.numL2ToL1Msgs, fr, seed + 0x200);
36
+ const publicDataWrites = makeTuple(txStartMarker.numPublicDataWrites, (i)=>[
37
+ fr(seed + i * 2),
38
+ fr(seed + i * 2 + 1)
39
+ ], seed + 0x300);
40
+ const privateLogs = [];
41
+ if (txStartMarker.privateLogsLength > txStartMarker.numPrivateLogs * PRIVATE_LOG_SIZE_IN_FIELDS) {
42
+ throw new Error('Private logs length is too large');
43
+ }
44
+ if (txStartMarker.privateLogsLength < txStartMarker.numPrivateLogs) {
45
+ throw new Error('Private logs length is too small');
46
+ }
47
+ let remainingNumPrivateLogs = txStartMarker.numPrivateLogs;
48
+ let remainingPrivateLogsLength = txStartMarker.privateLogsLength;
49
+ for(let i = 0; i < txStartMarker.numPrivateLogs; i++){
50
+ const minLength = Math.max(1, remainingPrivateLogsLength - (remainingNumPrivateLogs - 1) * PRIVATE_LOG_SIZE_IN_FIELDS);
51
+ const length = Math.max(minLength, Math.floor(remainingPrivateLogsLength / remainingNumPrivateLogs));
52
+ privateLogs.push(makeTuple(length, fr, seed + 0x400 + i * PRIVATE_LOG_SIZE_IN_FIELDS));
53
+ remainingNumPrivateLogs -= 1;
54
+ remainingPrivateLogsLength -= length;
55
+ }
56
+ const publicLogs = makeTuple(txStartMarker.publicLogsLength, fr, seed + 0x500);
57
+ const contractClassLogBlobDataLength = txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
58
+ const contractClassLog = makeTuple(contractClassLogBlobDataLength, fr, seed + 0x600);
59
+ return {
60
+ txStartMarker,
61
+ txHash: fr(seed + 0x700),
62
+ transactionFee: fr(seed + 0x800),
63
+ noteHashes,
64
+ nullifiers,
65
+ l2ToL1Msgs,
66
+ publicDataWrites,
67
+ privateLogs,
68
+ publicLogs,
69
+ contractClassLog,
70
+ ...txBlobDataOverrides
71
+ };
72
+ }
73
+ export function makeBlockEndMarker({ seed = 1, ...overrides } = {}) {
74
+ return {
75
+ numTxs: seed,
76
+ blockNumber: BlockNumber(seed + 1),
77
+ timestamp: BigInt(seed + 2),
78
+ ...overrides
79
+ };
80
+ }
81
+ export function makeBlockEndStateField({ seed = 1, ...overrides } = {}) {
82
+ return {
83
+ l1ToL2MessageNextAvailableLeafIndex: seed,
84
+ noteHashNextAvailableLeafIndex: seed + 0x10,
85
+ nullifierNextAvailableLeafIndex: seed + 0x20,
86
+ publicDataNextAvailableLeafIndex: seed + 0x30,
87
+ totalManaUsed: BigInt(seed + 0x40),
88
+ ...overrides
89
+ };
90
+ }
91
+ export function makeBlockEndBlobData({ isFirstBlock = true, seed = 1, ...overrides } = {}) {
92
+ const { blockEndMarker: blockEndMarkerOverrides, blockEndStateField: blockEndStateFieldOverrides, ...blockEndBlobDataOverrides } = overrides;
93
+ return {
94
+ blockEndMarker: makeBlockEndMarker({
95
+ seed,
96
+ ...blockEndMarkerOverrides
97
+ }),
98
+ blockEndStateField: makeBlockEndStateField({
99
+ seed: seed + 0x100,
100
+ ...blockEndStateFieldOverrides
101
+ }),
102
+ lastArchiveRoot: fr(seed + 0x200),
103
+ noteHashRoot: fr(seed + 0x300),
104
+ nullifierRoot: fr(seed + 0x400),
105
+ publicDataRoot: fr(seed + 0x500),
106
+ l1ToL2MessageRoot: isFirstBlock ? fr(seed + 0x600) : undefined,
107
+ ...blockEndBlobDataOverrides
108
+ };
109
+ }
110
+ export function makeBlockBlobData({ numTxs = 1, isFirstBlock = true, isFullTx = false, seed = 1, ...overrides } = {}) {
111
+ return {
112
+ txs: makeTuple(numTxs, (i)=>makeTxBlobData({
113
+ isFullTx,
114
+ seed: seed + i * 0x100
115
+ }), seed),
116
+ ...makeBlockEndBlobData({
117
+ seed: seed + 0x1000 * numTxs,
118
+ blockEndMarker: {
119
+ numTxs
120
+ },
121
+ isFirstBlock,
122
+ ...overrides
123
+ })
124
+ };
125
+ }
126
+ export function makeCheckpointBlobData({ numBlocks = 1, numTxsPerBlock = 1, isFullTx = false, seed = 1, ...overrides } = {}) {
127
+ const blocks = overrides.blocks ?? makeTuple(numBlocks, (i)=>makeBlockBlobData({
128
+ numTxs: numTxsPerBlock,
129
+ isFirstBlock: i === seed,
130
+ isFullTx,
131
+ seed: seed + i * 0x1000
132
+ }), seed);
133
+ const numBlobFields = overrides.checkpointEndMarker?.numBlobFields ?? getTotalNumBlobFieldsFromTxs(blocks.map((block)=>block.txs.map((tx)=>tx.txStartMarker)));
134
+ return {
135
+ blocks,
136
+ checkpointEndMarker: {
137
+ numBlobFields
138
+ }
139
+ };
140
+ }
@@ -0,0 +1,10 @@
1
+ export * from './block_blob_data.js';
2
+ export * from './block_end_marker.js';
3
+ export * from './block_end_state_field.js';
4
+ export * from './checkpoint_blob_data.js';
5
+ export * from './checkpoint_end_marker.js';
6
+ export * from './fixtures.js';
7
+ export * from './tx_blob_data.js';
8
+ export * from './tx_start_marker.js';
9
+ export { BlobDeserializationError } from '../errors.js';
10
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9lbmNvZGluZy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLHNCQUFzQixDQUFDO0FBQ3JDLGNBQWMsdUJBQXVCLENBQUM7QUFDdEMsY0FBYyw0QkFBNEIsQ0FBQztBQUMzQyxjQUFjLDJCQUEyQixDQUFDO0FBQzFDLGNBQWMsNEJBQTRCLENBQUM7QUFDM0MsY0FBYyxlQUFlLENBQUM7QUFDOUIsY0FBYyxtQkFBbUIsQ0FBQztBQUNsQyxjQUFjLHNCQUFzQixDQUFDO0FBQ3JDLE9BQU8sRUFBRSx3QkFBd0IsRUFBRSxNQUFNLGNBQWMsQ0FBQyJ9
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/encoding/index.ts"],"names":[],"mappings":"AAAA,cAAc,sBAAsB,CAAC;AACrC,cAAc,uBAAuB,CAAC;AACtC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,2BAA2B,CAAC;AAC1C,cAAc,4BAA4B,CAAC;AAC3C,cAAc,eAAe,CAAC;AAC9B,cAAc,mBAAmB,CAAC;AAClC,cAAc,sBAAsB,CAAC;AACrC,OAAO,EAAE,wBAAwB,EAAE,MAAM,cAAc,CAAC"}
@@ -0,0 +1,9 @@
1
+ export * from './block_blob_data.js';
2
+ export * from './block_end_marker.js';
3
+ export * from './block_end_state_field.js';
4
+ export * from './checkpoint_blob_data.js';
5
+ export * from './checkpoint_end_marker.js';
6
+ export * from './fixtures.js';
7
+ export * from './tx_blob_data.js';
8
+ export * from './tx_start_marker.js';
9
+ export { BlobDeserializationError } from '../errors.js';
@@ -0,0 +1,19 @@
1
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
+ import { FieldReader } from '@aztec/foundation/serialize';
3
+ import { type TxStartMarker } from './tx_start_marker.js';
4
+ export interface TxBlobData {
5
+ txStartMarker: TxStartMarker;
6
+ txHash: Fr;
7
+ transactionFee: Fr;
8
+ noteHashes: Fr[];
9
+ nullifiers: Fr[];
10
+ l2ToL1Msgs: Fr[];
11
+ publicDataWrites: [Fr, Fr][];
12
+ privateLogs: Fr[][];
13
+ publicLogs: Fr[];
14
+ contractClassLog: Fr[];
15
+ }
16
+ export declare function encodeTxBlobData(txBlobData: TxBlobData): Fr[];
17
+ export declare function decodeTxBlobData(fields: Fr[] | FieldReader): TxBlobData;
18
+ export declare function getNumTxBlobFields(txStartMarker: Omit<TxStartMarker, 'revertCode' | 'numBlobFields'>): number;
19
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHhfYmxvYl9kYXRhLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvZW5jb2RpbmcvdHhfYmxvYl9kYXRhLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSxnQ0FBZ0MsQ0FBQztBQUNwRCxPQUFPLEVBQUUsV0FBVyxFQUFFLE1BQU0sNkJBQTZCLENBQUM7QUFHMUQsT0FBTyxFQUFFLEtBQUssYUFBYSxFQUE0QyxNQUFNLHNCQUFzQixDQUFDO0FBSXBHLE1BQU0sV0FBVyxVQUFVO0lBQ3pCLGFBQWEsRUFBRSxhQUFhLENBQUM7SUFDN0IsTUFBTSxFQUFFLEVBQUUsQ0FBQztJQUNYLGNBQWMsRUFBRSxFQUFFLENBQUM7SUFDbkIsVUFBVSxFQUFFLEVBQUUsRUFBRSxDQUFDO0lBQ2pCLFVBQVUsRUFBRSxFQUFFLEVBQUUsQ0FBQztJQUNqQixVQUFVLEVBQUUsRUFBRSxFQUFFLENBQUM7SUFDakIsZ0JBQWdCLEVBQUUsQ0FBQyxFQUFFLEVBQUUsRUFBRSxDQUFDLEVBQUUsQ0FBQztJQUM3QixXQUFXLEVBQUUsRUFBRSxFQUFFLEVBQUUsQ0FBQztJQUNwQixVQUFVLEVBQUUsRUFBRSxFQUFFLENBQUM7SUFDakIsZ0JBQWdCLEVBQUUsRUFBRSxFQUFFLENBQUM7Q0FDeEI7QUFFRCx3QkFBZ0IsZ0JBQWdCLENBQUMsVUFBVSxFQUFFLFVBQVUsR0FBRyxFQUFFLEVBQUUsQ0FhN0Q7QUFFRCx3QkFBZ0IsZ0JBQWdCLENBQUMsTUFBTSxFQUFFLEVBQUUsRUFBRSxHQUFHLFdBQVcsR0FBRyxVQUFVLENBNkR2RTtBQUVELHdCQUFnQixrQkFBa0IsQ0FBQyxhQUFhLEVBQUUsSUFBSSxDQUFDLGFBQWEsRUFBRSxZQUFZLEdBQUcsZUFBZSxDQUFDLFVBZXBHIn0=
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tx_blob_data.d.ts","sourceRoot":"","sources":["../../src/encoding/tx_blob_data.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAG1D,OAAO,EAAE,KAAK,aAAa,EAA4C,MAAM,sBAAsB,CAAC;AAIpG,MAAM,WAAW,UAAU;IACzB,aAAa,EAAE,aAAa,CAAC;IAC7B,MAAM,EAAE,EAAE,CAAC;IACX,cAAc,EAAE,EAAE,CAAC;IACnB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,gBAAgB,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;IAC7B,WAAW,EAAE,EAAE,EAAE,EAAE,CAAC;IACpB,UAAU,EAAE,EAAE,EAAE,CAAC;IACjB,gBAAgB,EAAE,EAAE,EAAE,CAAC;CACxB;AAED,wBAAgB,gBAAgB,CAAC,UAAU,EAAE,UAAU,GAAG,EAAE,EAAE,CAa7D;AAED,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,WAAW,GAAG,UAAU,CA6DvE;AAED,wBAAgB,kBAAkB,CAAC,aAAa,EAAE,IAAI,CAAC,aAAa,EAAE,YAAY,GAAG,eAAe,CAAC,UAepG"}
@@ -0,0 +1,79 @@
1
+ import { chunk } from '@aztec/foundation/collection';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+ import { FieldReader } from '@aztec/foundation/serialize';
4
+ import { BlobDeserializationError } from '../errors.js';
5
+ import { decodeTxStartMarker, encodeTxStartMarker } from './tx_start_marker.js';
6
+ export function encodeTxBlobData(txBlobData) {
7
+ return [
8
+ encodeTxStartMarker(txBlobData.txStartMarker),
9
+ txBlobData.txHash,
10
+ txBlobData.transactionFee,
11
+ ...txBlobData.noteHashes,
12
+ ...txBlobData.nullifiers,
13
+ ...txBlobData.l2ToL1Msgs,
14
+ ...txBlobData.publicDataWrites.flat(),
15
+ ...txBlobData.privateLogs.map((log)=>[
16
+ new Fr(log.length),
17
+ ...log
18
+ ]).flat(),
19
+ ...txBlobData.publicLogs,
20
+ ...txBlobData.contractClassLog
21
+ ];
22
+ }
23
+ export function decodeTxBlobData(fields) {
24
+ const reader = FieldReader.asReader(fields);
25
+ if (reader.isFinished()) {
26
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for tx blob data.`);
27
+ }
28
+ const txStartMarker = decodeTxStartMarker(reader.readField());
29
+ const checkRemainingFields = (requiredFields, type)=>{
30
+ if (requiredFields > reader.remainingFields()) {
31
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: not enough fields for ${type}. Expected ${requiredFields} fields, only ${reader.remainingFields()} remaining.`);
32
+ }
33
+ };
34
+ const numTxEffectFields = txStartMarker.numBlobFields - 1; // -1 because we already read the tx start marker.
35
+ checkRemainingFields(numTxEffectFields, 'tx effect');
36
+ const txHash = reader.readField();
37
+ const transactionFee = reader.readField();
38
+ checkRemainingFields(txStartMarker.numNoteHashes, 'note hashes');
39
+ const noteHashes = reader.readFieldArray(txStartMarker.numNoteHashes);
40
+ checkRemainingFields(txStartMarker.numNullifiers, 'nullifiers');
41
+ const nullifiers = reader.readFieldArray(txStartMarker.numNullifiers);
42
+ checkRemainingFields(txStartMarker.numL2ToL1Msgs, 'l2-to-l1 messages');
43
+ const l2ToL1Msgs = reader.readFieldArray(txStartMarker.numL2ToL1Msgs);
44
+ checkRemainingFields(txStartMarker.numPublicDataWrites * 2, 'public data writes'); // *2 for leaf slot and value
45
+ const publicDataWrites = chunk(reader.readFieldArray(txStartMarker.numPublicDataWrites * 2), 2);
46
+ const privateLogs = Array.from({
47
+ length: txStartMarker.numPrivateLogs
48
+ }, ()=>{
49
+ const length = reader.readU32();
50
+ checkRemainingFields(length, 'private log');
51
+ return reader.readFieldArray(length);
52
+ });
53
+ checkRemainingFields(txStartMarker.publicLogsLength, 'public logs');
54
+ const publicLogs = reader.readFieldArray(txStartMarker.publicLogsLength);
55
+ const contractClassLogBlobDataLength = txStartMarker.contractClassLogLength > 0 ? txStartMarker.contractClassLogLength + 1 : 0; // If the log exists, +1 for the contract address
56
+ checkRemainingFields(contractClassLogBlobDataLength, 'contract class logs');
57
+ const contractClassLog = reader.readFieldArray(contractClassLogBlobDataLength);
58
+ return {
59
+ txStartMarker,
60
+ txHash,
61
+ transactionFee,
62
+ noteHashes,
63
+ nullifiers,
64
+ l2ToL1Msgs,
65
+ publicDataWrites,
66
+ privateLogs,
67
+ publicLogs,
68
+ contractClassLog
69
+ };
70
+ }
71
+ export function getNumTxBlobFields(txStartMarker) {
72
+ return 1 + // tx start marker
73
+ 1 + // tx hash
74
+ 1 + // transaction fee
75
+ txStartMarker.numNoteHashes + txStartMarker.numNullifiers + txStartMarker.numL2ToL1Msgs + txStartMarker.numPublicDataWrites * 2 + // *2 for leaf slot and value per public data write
76
+ txStartMarker.numPrivateLogs + // +1 length field for each private log
77
+ txStartMarker.privateLogsLength + txStartMarker.publicLogsLength + txStartMarker.contractClassLogLength + (txStartMarker.contractClassLogLength > 0 ? 1 : 0 // +1 for contract address of the contract class log
78
+ );
79
+ }
@@ -0,0 +1,16 @@
1
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
+ export interface TxStartMarker {
3
+ numBlobFields: number;
4
+ revertCode: number;
5
+ numNoteHashes: number;
6
+ numNullifiers: number;
7
+ numL2ToL1Msgs: number;
8
+ numPublicDataWrites: number;
9
+ numPrivateLogs: number;
10
+ privateLogsLength: number;
11
+ publicLogsLength: number;
12
+ contractClassLogLength: number;
13
+ }
14
+ export declare function encodeTxStartMarker(txStartMarker: TxStartMarker): Fr;
15
+ export declare function decodeTxStartMarker(field: Fr): TxStartMarker;
16
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidHhfc3RhcnRfbWFya2VyLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvZW5jb2RpbmcvdHhfc3RhcnRfbWFya2VyLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUNBLE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSxnQ0FBZ0MsQ0FBQztBQWlCcEQsTUFBTSxXQUFXLGFBQWE7SUFDNUIsYUFBYSxFQUFFLE1BQU0sQ0FBQztJQUN0QixVQUFVLEVBQUUsTUFBTSxDQUFDO0lBQ25CLGFBQWEsRUFBRSxNQUFNLENBQUM7SUFDdEIsYUFBYSxFQUFFLE1BQU0sQ0FBQztJQUN0QixhQUFhLEVBQUUsTUFBTSxDQUFDO0lBQ3RCLG1CQUFtQixFQUFFLE1BQU0sQ0FBQztJQUM1QixjQUFjLEVBQUUsTUFBTSxDQUFDO0lBQ3ZCLGlCQUFpQixFQUFFLE1BQU0sQ0FBQztJQUMxQixnQkFBZ0IsRUFBRSxNQUFNLENBQUM7SUFDekIsc0JBQXNCLEVBQUUsTUFBTSxDQUFDO0NBQ2hDO0FBRUQsd0JBQWdCLG1CQUFtQixDQUFDLGFBQWEsRUFBRSxhQUFhLEdBQUcsRUFBRSxDQXVCcEU7QUFFRCx3QkFBZ0IsbUJBQW1CLENBQUMsS0FBSyxFQUFFLEVBQUUsR0FBRyxhQUFhLENBd0M1RCJ9
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tx_start_marker.d.ts","sourceRoot":"","sources":["../../src/encoding/tx_start_marker.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAiBpD,MAAM,WAAW,aAAa;IAC5B,aAAa,EAAE,MAAM,CAAC;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,cAAc,EAAE,MAAM,CAAC;IACvB,iBAAiB,EAAE,MAAM,CAAC;IAC1B,gBAAgB,EAAE,MAAM,CAAC;IACzB,sBAAsB,EAAE,MAAM,CAAC;CAChC;AAED,wBAAgB,mBAAmB,CAAC,aAAa,EAAE,aAAa,GAAG,EAAE,CAuBpE;AAED,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,EAAE,GAAG,aAAa,CAwC5D"}
@@ -0,0 +1,77 @@
1
+ import { TX_START_PREFIX } from '@aztec/constants';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+ import { BlobDeserializationError } from '../errors.js';
4
+ // Must match the implementation in `noir-protocol-circuits/crates/types/src/blob_data/tx_blob_data.nr`.
5
+ const NUM_BLOB_FIELDS_BIT_SIZE = 32n;
6
+ const REVERT_CODE_BIT_SIZE = 8n;
7
+ const NUM_NOTE_HASH_BIT_SIZE = 16n;
8
+ const NUM_NULLIFIER_BIT_SIZE = 16n;
9
+ const NUM_L2_TO_L1_MSG_BIT_SIZE = 16n;
10
+ const NUM_PUBLIC_DATA_WRITE_BIT_SIZE = 16n;
11
+ const NUM_PRIVATE_LOG_BIT_SIZE = 16n;
12
+ const PRIVATE_LOGS_LENGTH_BIT_SIZE = 16n;
13
+ const PUBLIC_LOGS_LENGTH_BIT_SIZE = 32n;
14
+ const CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE = 16n;
15
+ export function encodeTxStartMarker(txStartMarker) {
16
+ let value = TX_START_PREFIX;
17
+ value <<= NUM_NOTE_HASH_BIT_SIZE;
18
+ value += BigInt(txStartMarker.numNoteHashes);
19
+ value <<= NUM_NULLIFIER_BIT_SIZE;
20
+ value += BigInt(txStartMarker.numNullifiers);
21
+ value <<= NUM_L2_TO_L1_MSG_BIT_SIZE;
22
+ value += BigInt(txStartMarker.numL2ToL1Msgs);
23
+ value <<= NUM_PUBLIC_DATA_WRITE_BIT_SIZE;
24
+ value += BigInt(txStartMarker.numPublicDataWrites);
25
+ value <<= NUM_PRIVATE_LOG_BIT_SIZE;
26
+ value += BigInt(txStartMarker.numPrivateLogs);
27
+ value <<= PRIVATE_LOGS_LENGTH_BIT_SIZE;
28
+ value += BigInt(txStartMarker.privateLogsLength);
29
+ value <<= PUBLIC_LOGS_LENGTH_BIT_SIZE;
30
+ value += BigInt(txStartMarker.publicLogsLength);
31
+ value <<= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
32
+ value += BigInt(txStartMarker.contractClassLogLength);
33
+ value <<= REVERT_CODE_BIT_SIZE;
34
+ value += BigInt(txStartMarker.revertCode);
35
+ value <<= NUM_BLOB_FIELDS_BIT_SIZE;
36
+ value += BigInt(txStartMarker.numBlobFields);
37
+ return new Fr(value);
38
+ }
39
+ export function decodeTxStartMarker(field) {
40
+ let value = field.toBigInt();
41
+ const numBlobFields = Number(value & 2n ** NUM_BLOB_FIELDS_BIT_SIZE - 1n);
42
+ value >>= NUM_BLOB_FIELDS_BIT_SIZE;
43
+ const revertCode = Number(value & 2n ** REVERT_CODE_BIT_SIZE - 1n);
44
+ value >>= REVERT_CODE_BIT_SIZE;
45
+ const contractClassLogLength = Number(value & 2n ** CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE - 1n);
46
+ value >>= CONTRACT_CLASS_LOG_LENGTH_BIT_SIZE;
47
+ const publicLogsLength = Number(value & 2n ** PUBLIC_LOGS_LENGTH_BIT_SIZE - 1n);
48
+ value >>= PUBLIC_LOGS_LENGTH_BIT_SIZE;
49
+ const privateLogsLength = Number(value & 2n ** PRIVATE_LOGS_LENGTH_BIT_SIZE - 1n);
50
+ value >>= PRIVATE_LOGS_LENGTH_BIT_SIZE;
51
+ const numPrivateLogs = Number(value & 2n ** NUM_PRIVATE_LOG_BIT_SIZE - 1n);
52
+ value >>= NUM_PRIVATE_LOG_BIT_SIZE;
53
+ const numPublicDataWrites = Number(value & 2n ** NUM_PUBLIC_DATA_WRITE_BIT_SIZE - 1n);
54
+ value >>= NUM_PUBLIC_DATA_WRITE_BIT_SIZE;
55
+ const numL2ToL1Msgs = Number(value & 2n ** NUM_L2_TO_L1_MSG_BIT_SIZE - 1n);
56
+ value >>= NUM_L2_TO_L1_MSG_BIT_SIZE;
57
+ const numNullifiers = Number(value & 2n ** NUM_NULLIFIER_BIT_SIZE - 1n);
58
+ value >>= NUM_NULLIFIER_BIT_SIZE;
59
+ const numNoteHashes = Number(value & 2n ** NUM_NOTE_HASH_BIT_SIZE - 1n);
60
+ value >>= NUM_NOTE_HASH_BIT_SIZE;
61
+ const prefix = value;
62
+ if (prefix !== TX_START_PREFIX) {
63
+ throw new BlobDeserializationError(`Incorrect encoding of blob fields: invalid tx start marker.`);
64
+ }
65
+ return {
66
+ numBlobFields,
67
+ revertCode,
68
+ numNoteHashes,
69
+ numNullifiers,
70
+ numL2ToL1Msgs,
71
+ numPublicDataWrites,
72
+ numPrivateLogs,
73
+ privateLogsLength,
74
+ publicLogsLength,
75
+ contractClassLogLength
76
+ };
77
+ }
package/dest/errors.d.ts CHANGED
@@ -1,4 +1,4 @@
1
1
  export declare class BlobDeserializationError extends Error {
2
2
  constructor(message: string);
3
3
  }
4
- //# sourceMappingURL=errors.d.ts.map
4
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZXJyb3JzLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvZXJyb3JzLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLHFCQUFhLHdCQUF5QixTQUFRLEtBQUs7SUFDakQsWUFBWSxPQUFPLEVBQUUsTUFBTSxFQUcxQjtDQUNGIn0=
@@ -1 +1 @@
1
- {"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,qBAAa,wBAAyB,SAAQ,KAAK;gBACrC,OAAO,EAAE,MAAM;CAI5B"}
1
+ {"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,qBAAa,wBAAyB,SAAQ,KAAK;IACjD,YAAY,OAAO,EAAE,MAAM,EAG1B;CACF"}
package/dest/hash.d.ts ADDED
@@ -0,0 +1,43 @@
1
+ import { BLS12Fr } from '@aztec/foundation/curves/bls12';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
+ /**
4
+ * Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
5
+ */
6
+ export declare function computeEthVersionedBlobHash(commitment: Buffer): Buffer;
7
+ export declare function computeBlobsHash(evmVersionedBlobHashes: Buffer[]): Fr;
8
+ /**
9
+ * Computes a non-standard Poseidon2 hash over the provided fields.
10
+ *
11
+ * This function is used to compute:
12
+ * - `blobFieldsHash` of a checkpoint:
13
+ * Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
14
+ * The exact number of fields is encoded in the checkpoint end marker (the last field).
15
+ * This hash is used when generating the challenge `z` for all blobs in the checkpoint.
16
+ * - `spongeBlobHash` of a block:
17
+ * Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
18
+ * This hash is included in the block header.
19
+ */
20
+ export declare function computeBlobFieldsHash(fields: Fr[]): Promise<Fr>;
21
+ export declare function computeBlobCommitment(data: Uint8Array): Buffer;
22
+ /**
23
+ * Get the commitment fields of the blob, to compute the challenge z.
24
+ *
25
+ * The 48-byte commitment is encoded into two field elements:
26
+ * +-------------------+------------------------+
27
+ * | 31 bytes | 17 bytes |
28
+ * +-------------------+------------------------+
29
+ * | Field Element 1 | Field Element 2 |
30
+ * | [0][bytes 0-30] | [0...0][bytes 31-47] |
31
+ * +-------------------+------------------------+
32
+ *
33
+ * @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
34
+ * @returns The fields representing the commitment buffer.
35
+ */
36
+ export declare function commitmentToFields(commitment: Buffer): [Fr, Fr];
37
+ export declare function computeChallengeZ(blobFieldsHash: Fr, commitment: Buffer): Promise<Fr>;
38
+ /**
39
+ * Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
40
+ * rollup circuits.
41
+ */
42
+ export declare function hashNoirBigNumLimbs(field: BLS12Fr): Promise<Fr>;
43
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaGFzaC5kLnRzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2hhc2gudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBRUEsT0FBTyxFQUFFLE9BQU8sRUFBRSxNQUFNLGdDQUFnQyxDQUFDO0FBQ3pELE9BQU8sRUFBRSxFQUFFLEVBQUUsTUFBTSxnQ0FBZ0MsQ0FBQztBQU9wRDs7R0FFRztBQUNILHdCQUFnQiwyQkFBMkIsQ0FBQyxVQUFVLEVBQUUsTUFBTSxHQUFHLE1BQU0sQ0FJdEU7QUFPRCx3QkFBZ0IsZ0JBQWdCLENBQUMsc0JBQXNCLEVBQUUsTUFBTSxFQUFFLEdBQUcsRUFBRSxDQUVyRTtBQUVEOzs7Ozs7Ozs7OztHQVdHO0FBQ0gsd0JBQXNCLHFCQUFxQixDQUFDLE1BQU0sRUFBRSxFQUFFLEVBQUUsR0FBRyxPQUFPLENBQUMsRUFBRSxDQUFDLENBSXJFO0FBRUQsd0JBQWdCLHFCQUFxQixDQUFDLElBQUksRUFBRSxVQUFVLEdBQUcsTUFBTSxDQU05RDtBQUVEOzs7Ozs7Ozs7Ozs7O0dBYUc7QUFDSCx3QkFBZ0Isa0JBQWtCLENBQUMsVUFBVSxFQUFFLE1BQU0sR0FBRyxDQUFDLEVBQUUsRUFBRSxFQUFFLENBQUMsQ0FNL0Q7QUFFRCx3QkFBc0IsaUJBQWlCLENBQUMsY0FBYyxFQUFFLEVBQUUsRUFBRSxVQUFVLEVBQUUsTUFBTSxHQUFHLE9BQU8sQ0FBQyxFQUFFLENBQUMsQ0FHM0Y7QUFFRDs7O0dBR0c7QUFDSCx3QkFBc0IsbUJBQW1CLENBQUMsS0FBSyxFQUFFLE9BQU8sR0FBRyxPQUFPLENBQUMsRUFBRSxDQUFDLENBR3JFIn0=
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../src/hash.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,OAAO,EAAE,MAAM,gCAAgC,CAAC;AACzD,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAOpD;;GAEG;AACH,wBAAgB,2BAA2B,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAItE;AAOD,wBAAgB,gBAAgB,CAAC,sBAAsB,EAAE,MAAM,EAAE,GAAG,EAAE,CAErE;AAED;;;;;;;;;;;GAWG;AACH,wBAAsB,qBAAqB,CAAC,MAAM,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAIrE;AAED,wBAAgB,qBAAqB,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,CAM9D;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,kBAAkB,CAAC,UAAU,EAAE,MAAM,GAAG,CAAC,EAAE,EAAE,EAAE,CAAC,CAM/D;AAED,wBAAsB,iBAAiB,CAAC,cAAc,EAAE,EAAE,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,EAAE,CAAC,CAG3F;AAED;;;GAGG;AACH,wBAAsB,mBAAmB,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,EAAE,CAAC,CAGrE"}
package/dest/hash.js ADDED
@@ -0,0 +1,80 @@
1
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
2
+ import { sha256, sha256ToField } from '@aztec/foundation/crypto/sha256';
3
+ import { Fr } from '@aztec/foundation/curves/bn254';
4
+ import { BYTES_PER_BLOB, BYTES_PER_COMMITMENT, getKzg } from './kzg_context.js';
5
+ import { SpongeBlob } from './sponge_blob.js';
6
+ const VERSIONED_HASH_VERSION_KZG = 0x01;
7
+ /**
8
+ * Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
9
+ */ export function computeEthVersionedBlobHash(commitment) {
10
+ const hash = sha256(commitment);
11
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
12
+ return hash;
13
+ }
14
+ // TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
15
+ // - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
16
+ // - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
17
+ // exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
18
+ // We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
19
+ export function computeBlobsHash(evmVersionedBlobHashes) {
20
+ return sha256ToField(evmVersionedBlobHashes);
21
+ }
22
+ /**
23
+ * Computes a non-standard Poseidon2 hash over the provided fields.
24
+ *
25
+ * This function is used to compute:
26
+ * - `blobFieldsHash` of a checkpoint:
27
+ * Verified in the circuit against all fields absorbed into the blob sponge over the entire checkpoint.
28
+ * The exact number of fields is encoded in the checkpoint end marker (the last field).
29
+ * This hash is used when generating the challenge `z` for all blobs in the checkpoint.
30
+ * - `spongeBlobHash` of a block:
31
+ * Computed from the block's tx effects, its end-state, and the blob fields of all prior blocks in the same checkpoint.
32
+ * This hash is included in the block header.
33
+ */ export async function computeBlobFieldsHash(fields) {
34
+ const sponge = SpongeBlob.init();
35
+ await sponge.absorb(fields);
36
+ return sponge.squeeze();
37
+ }
38
+ export function computeBlobCommitment(data) {
39
+ if (data.length !== BYTES_PER_BLOB) {
40
+ throw new Error(`Expected ${BYTES_PER_BLOB} bytes per blob. Got ${data.length}.`);
41
+ }
42
+ return Buffer.from(getKzg().blobToKzgCommitment(data));
43
+ }
44
+ /**
45
+ * Get the commitment fields of the blob, to compute the challenge z.
46
+ *
47
+ * The 48-byte commitment is encoded into two field elements:
48
+ * +-------------------+------------------------+
49
+ * | 31 bytes | 17 bytes |
50
+ * +-------------------+------------------------+
51
+ * | Field Element 1 | Field Element 2 |
52
+ * | [0][bytes 0-30] | [0...0][bytes 31-47] |
53
+ * +-------------------+------------------------+
54
+ *
55
+ * @param commitment - The commitment to convert to fields. Computed from `computeBlobCommitment`.
56
+ * @returns The fields representing the commitment buffer.
57
+ */ export function commitmentToFields(commitment) {
58
+ if (commitment.length !== BYTES_PER_COMMITMENT) {
59
+ throw new Error(`Expected ${BYTES_PER_COMMITMENT} bytes for blob commitment. Got ${commitment.length}.`);
60
+ }
61
+ return [
62
+ new Fr(commitment.subarray(0, 31)),
63
+ new Fr(commitment.subarray(31, BYTES_PER_COMMITMENT))
64
+ ];
65
+ }
66
+ export async function computeChallengeZ(blobFieldsHash, commitment) {
67
+ const commitmentFields = commitmentToFields(commitment);
68
+ return await poseidon2Hash([
69
+ blobFieldsHash,
70
+ commitmentFields[0],
71
+ commitmentFields[1]
72
+ ]);
73
+ }
74
+ /**
75
+ * Hash each u128 limb of the noir bignum struct representing the BLS field, to mimic the hash accumulation in the
76
+ * rollup circuits.
77
+ */ export async function hashNoirBigNumLimbs(field) {
78
+ const num = field.toNoirBigNum();
79
+ return await poseidon2Hash(num.limbs.map(Fr.fromHexString));
80
+ }
package/dest/index.d.ts CHANGED
@@ -1,7 +1,11 @@
1
+ export * from './batched_blob.js';
1
2
  export * from './blob.js';
2
- export * from './encoding.js';
3
+ export * from './blob_batching.js';
4
+ export * from './blob_utils.js';
5
+ export * from './circuit_types/index.js';
6
+ export * from './encoding/index.js';
7
+ export * from './hash.js';
3
8
  export * from './interface.js';
4
- export * from './errors.js';
5
- export * from './blob_public_inputs.js';
6
9
  export * from './sponge_blob.js';
7
- //# sourceMappingURL=index.d.ts.map
10
+ export * from './kzg_context.js';
11
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxjQUFjLG1CQUFtQixDQUFDO0FBQ2xDLGNBQWMsV0FBVyxDQUFDO0FBQzFCLGNBQWMsb0JBQW9CLENBQUM7QUFDbkMsY0FBYyxpQkFBaUIsQ0FBQztBQUNoQyxjQUFjLDBCQUEwQixDQUFDO0FBQ3pDLGNBQWMscUJBQXFCLENBQUM7QUFDcEMsY0FBYyxXQUFXLENBQUM7QUFDMUIsY0FBYyxnQkFBZ0IsQ0FBQztBQUMvQixjQUFjLGtCQUFrQixDQUFDO0FBQ2pDLGNBQWMsa0JBQWtCLENBQUMifQ==
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAKA,cAAc,WAAW,CAAC;AAC1B,cAAc,eAAe,CAAC;AAC9B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,aAAa,CAAC;AAC5B,cAAc,yBAAyB,CAAC;AACxC,cAAc,kBAAkB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,WAAW,CAAC;AAC1B,cAAc,oBAAoB,CAAC;AACnC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,kBAAkB,CAAC;AACjC,cAAc,kBAAkB,CAAC"}
package/dest/index.js CHANGED
@@ -1,19 +1,10 @@
1
- import cKzg from 'c-kzg';
2
- /* eslint-disable import/no-named-as-default-member */ const { loadTrustedSetup } = cKzg;
1
+ export * from './batched_blob.js';
3
2
  export * from './blob.js';
4
- export * from './encoding.js';
3
+ export * from './blob_batching.js';
4
+ export * from './blob_utils.js';
5
+ export * from './circuit_types/index.js';
6
+ export * from './encoding/index.js';
7
+ export * from './hash.js';
5
8
  export * from './interface.js';
6
- export * from './errors.js';
7
- export * from './blob_public_inputs.js';
8
9
  export * from './sponge_blob.js';
9
- try {
10
- loadTrustedSetup();
11
- } catch (error) {
12
- if (error.message.includes('trusted setup is already loaded')) {
13
- // NB: The c-kzg lib has no way of checking whether the setup is loaded or not,
14
- // and it throws an error if it's already loaded, even though nothing is wrong.
15
- // This is a rudimentary way of ensuring we load the trusted setup if we need it.
16
- } else {
17
- throw new Error(error);
18
- }
19
- }
10
+ export * from './kzg_context.js';
@@ -3,8 +3,6 @@
3
3
  */
4
4
  export interface BlobJson {
5
5
  blob: string;
6
- index?: number;
7
6
  kzg_commitment: string;
8
- kzg_proof: string;
9
7
  }
10
- //# sourceMappingURL=interface.d.ts.map
8
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW50ZXJmYWNlLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW50ZXJmYWNlLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOztHQUVHO0FBQ0gsTUFBTSxXQUFXLFFBQVE7SUFDdkIsSUFBSSxFQUFFLE1BQU0sQ0FBQztJQUNiLGNBQWMsRUFBRSxNQUFNLENBQUM7Q0FDeEIifQ==
@@ -1 +1 @@
1
- {"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../src/interface.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf,cAAc,EAAE,MAAM,CAAC;IAEvB,SAAS,EAAE,MAAM,CAAC;CACnB"}
1
+ {"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../src/interface.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,EAAE,MAAM,CAAC;CACxB"}
@@ -0,0 +1,8 @@
1
+ import { DasContextJs } from '@crate-crypto/node-eth-kzg';
2
+ export * from '@crate-crypto/node-eth-kzg';
3
+ /**
4
+ * Returns the lazily-initialized KZG context.
5
+ * The first call takes ~3 seconds to initialize the precomputation tables.
6
+ */
7
+ export declare function getKzg(): DasContextJs;
8
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoia3pnX2NvbnRleHQuZC50cyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9remdfY29udGV4dC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsWUFBWSxFQUFFLE1BQU0sNEJBQTRCLENBQUM7QUFFMUQsY0FBYyw0QkFBNEIsQ0FBQztBQUkzQzs7O0dBR0c7QUFDSCx3QkFBZ0IsTUFBTSxJQUFJLFlBQVksQ0FLckMifQ==
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kzg_context.d.ts","sourceRoot":"","sources":["../src/kzg_context.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,cAAc,4BAA4B,CAAC;AAI3C;;;GAGG;AACH,wBAAgB,MAAM,IAAI,YAAY,CAKrC"}
@@ -0,0 +1,14 @@
1
+ import { DasContextJs } from '@crate-crypto/node-eth-kzg';
2
+ export * from '@crate-crypto/node-eth-kzg';
3
+ let kzgInstance;
4
+ /**
5
+ * Returns the lazily-initialized KZG context.
6
+ * The first call takes ~3 seconds to initialize the precomputation tables.
7
+ */ export function getKzg() {
8
+ if (!kzgInstance) {
9
+ kzgInstance = DasContextJs.create({
10
+ usePrecomp: true
11
+ });
12
+ }
13
+ return kzgInstance;
14
+ }