@haneullabs/walrus-wasm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +55 -0
- package/Cargo.lock +2010 -0
- package/Cargo.toml +39 -0
- package/README.md +1 -0
- package/index.js +7 -0
- package/index.mjs +5 -0
- package/nodejs/walrus_wasm.d.ts +48 -0
- package/nodejs/walrus_wasm.js +560 -0
- package/nodejs/walrus_wasm_bg.wasm +0 -0
- package/nodejs/walrus_wasm_bg.wasm.d.ts +23 -0
- package/package.json +38 -0
- package/rust-toolchain.toml +3 -0
- package/src/bls12381.rs +47 -0
- package/src/encoder.rs +178 -0
- package/src/lib.rs +9 -0
- package/test/encoder.test.ts +260 -0
- package/vitest.config.mts +17 -0
- package/web/walrus_wasm.d.ts +95 -0
- package/web/walrus_wasm.js +626 -0
- package/web/walrus_wasm_bg.js +590 -0
- package/web/walrus_wasm_bg.wasm +0 -0
- package/web/walrus_wasm_bg.wasm.d.ts +23 -0
package/package.json
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@haneullabs/walrus-wasm",
|
|
3
|
+
"collaborators": [
|
|
4
|
+
"jonaslindstrom"
|
|
5
|
+
],
|
|
6
|
+
"version": "0.1.0",
|
|
7
|
+
"private": false,
|
|
8
|
+
"main": "index.js",
|
|
9
|
+
"types": "./web/walrus_wasm.d.ts",
|
|
10
|
+
"repository": {
|
|
11
|
+
"type": "git",
|
|
12
|
+
"url": "git+https://github.com/GeunhwaJeong/haneul-ts-sdks.git"
|
|
13
|
+
},
|
|
14
|
+
"exports": {
|
|
15
|
+
".": {
|
|
16
|
+
"types": "./web/walrus_wasm.d.ts",
|
|
17
|
+
"browser": "./web/walrus_wasm.js",
|
|
18
|
+
"import": "./index.mjs",
|
|
19
|
+
"require": "./index.js"
|
|
20
|
+
},
|
|
21
|
+
"./web/walrus_wasm_bg.wasm": {
|
|
22
|
+
"browser": "./web/walrus_wasm_bg.wasm",
|
|
23
|
+
"import": "./web/walrus_wasm_bg.wasm",
|
|
24
|
+
"require": "./web/walrus_wasm_bg.wasm"
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"vite": "^7.2.6",
|
|
29
|
+
"vitest": "^4.0.15",
|
|
30
|
+
"@haneullabs/bcs": "0.1.0"
|
|
31
|
+
},
|
|
32
|
+
"scripts": {
|
|
33
|
+
"build:wasm": "pnpm build:nodejs && pnpm build:web",
|
|
34
|
+
"build:nodejs": "wasm-pack build --target nodejs --release --out-dir nodejs --no-pack && rm nodejs/.gitignore",
|
|
35
|
+
"build:web": "wasm-pack build --target web --release --out-dir web --no-pack && rm web/.gitignore",
|
|
36
|
+
"test": "vitest run"
|
|
37
|
+
}
|
|
38
|
+
}
|
package/src/bls12381.rs
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
use fastcrypto::bls12381::min_pk::BLS12381AggregateSignature;
|
|
2
|
+
use fastcrypto::bls12381::min_pk::{BLS12381PublicKey, BLS12381Signature};
|
|
3
|
+
use fastcrypto::traits::AggregateAuthenticator;
|
|
4
|
+
use fastcrypto::traits::ToFromBytes;
|
|
5
|
+
use fastcrypto::traits::VerifyingKey;
|
|
6
|
+
use wasm_bindgen::prelude::wasm_bindgen;
|
|
7
|
+
use wasm_bindgen::{JsError, JsValue};
|
|
8
|
+
|
|
9
|
+
#[wasm_bindgen]
|
|
10
|
+
pub fn bls12381_min_pk_verify(
|
|
11
|
+
signature: &[u8],
|
|
12
|
+
public_key: &[u8],
|
|
13
|
+
msg: &[u8],
|
|
14
|
+
) -> Result<bool, JsError> {
|
|
15
|
+
let signature = BLS12381Signature::from_bytes(signature)?;
|
|
16
|
+
let public_key = BLS12381PublicKey::from_bytes(public_key)?;
|
|
17
|
+
Ok(public_key.verify(msg, &signature).is_ok())
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/// Aggregate a list of signatures.
|
|
21
|
+
/// The signatures must be of the type Vec<Vec<u8>> with each signature being a 96 bytes long serialized signature.
|
|
22
|
+
#[wasm_bindgen]
|
|
23
|
+
pub fn bls12381_min_pk_aggregate(signatures: JsValue) -> Result<Vec<u8>, JsError> {
|
|
24
|
+
let signatures = serde_wasm_bindgen::from_value::<Vec<Vec<u8>>>(signatures)?;
|
|
25
|
+
let signatures = signatures
|
|
26
|
+
.iter()
|
|
27
|
+
.map(|sig| BLS12381Signature::from_bytes(sig))
|
|
28
|
+
.collect::<Result<Vec<_>, _>>()?;
|
|
29
|
+
let aggregate_signatures = BLS12381AggregateSignature::aggregate(&signatures)?;
|
|
30
|
+
Ok(aggregate_signatures.as_bytes().to_vec())
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/// Verify an aggregate signature.
|
|
34
|
+
#[wasm_bindgen]
|
|
35
|
+
pub fn bls12381_min_pk_verify_aggregate(
|
|
36
|
+
public_keys: JsValue, // Vec<Vec<u8>>
|
|
37
|
+
msg: &[u8],
|
|
38
|
+
signature: &[u8],
|
|
39
|
+
) -> Result<bool, JsError> {
|
|
40
|
+
let public_keys = serde_wasm_bindgen::from_value::<Vec<Vec<u8>>>(public_keys)?;
|
|
41
|
+
let public_keys = public_keys
|
|
42
|
+
.iter()
|
|
43
|
+
.map(|pk| BLS12381PublicKey::from_bytes(pk))
|
|
44
|
+
.collect::<Result<Vec<_>, _>>()?;
|
|
45
|
+
let signature = BLS12381AggregateSignature::from_bytes(signature)?;
|
|
46
|
+
Ok(signature.verify(&public_keys, msg).is_ok())
|
|
47
|
+
}
|
package/src/encoder.rs
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
use core::num::NonZeroU16;
|
|
2
|
+
use js_sys::{Array, Uint8Array};
|
|
3
|
+
use walrus_core::encoding::{
|
|
4
|
+
EncodingConfig, EncodingConfigEnum, EncodingFactory, Primary, SliverData,
|
|
5
|
+
};
|
|
6
|
+
use walrus_core::metadata::{BlobMetadata, BlobMetadataApi};
|
|
7
|
+
use walrus_core::{BlobId, EncodingType};
|
|
8
|
+
use wasm_bindgen::prelude::wasm_bindgen;
|
|
9
|
+
use wasm_bindgen::{JsCast, JsError, JsValue};
|
|
10
|
+
|
|
11
|
+
#[wasm_bindgen]
|
|
12
|
+
pub struct BlobEncoder {
|
|
13
|
+
encoder: EncodingConfigEnum,
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
#[wasm_bindgen]
|
|
17
|
+
impl BlobEncoder {
|
|
18
|
+
#[wasm_bindgen(constructor)]
|
|
19
|
+
pub fn new(n_shards: u16) -> Result<Self, JsError> {
|
|
20
|
+
let config = EncodingConfig::new(
|
|
21
|
+
NonZeroU16::new(n_shards).ok_or(JsError::new("n_shards must be greater than 0"))?,
|
|
22
|
+
);
|
|
23
|
+
let encoder = config.get_for_type(EncodingType::RS2);
|
|
24
|
+
Ok(Self { encoder })
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/// Encode data and write BCS-encoded SliverData directly into pre-allocated buffers.
|
|
28
|
+
///
|
|
29
|
+
/// Arguments:
|
|
30
|
+
/// - data: Input data to encode
|
|
31
|
+
/// - primary_buffers: Array of Uint8Array buffers (one per shard) for primary slivers
|
|
32
|
+
/// - secondary_buffers: Array of Uint8Array buffers (one per shard) for secondary slivers
|
|
33
|
+
///
|
|
34
|
+
/// Each buffer will be written with BCS-encoded SliverData.
|
|
35
|
+
///
|
|
36
|
+
/// Returns: JsValue with (metadata, root_hash)
|
|
37
|
+
#[wasm_bindgen]
|
|
38
|
+
pub fn encode(
|
|
39
|
+
&self,
|
|
40
|
+
data: &Uint8Array,
|
|
41
|
+
primary_buffers: &Array,
|
|
42
|
+
secondary_buffers: &Array,
|
|
43
|
+
) -> Result<JsValue, JsError> {
|
|
44
|
+
let data_vec = data.to_vec();
|
|
45
|
+
let (sliver_pairs, metadata) = self.encoder.encode_with_metadata(data_vec)?;
|
|
46
|
+
|
|
47
|
+
// Validate buffer counts
|
|
48
|
+
if primary_buffers.length() != sliver_pairs.len() as u32 {
|
|
49
|
+
return Err(JsError::new(&format!(
|
|
50
|
+
"Expected {} primary buffers, got {}",
|
|
51
|
+
sliver_pairs.len(),
|
|
52
|
+
primary_buffers.length()
|
|
53
|
+
)));
|
|
54
|
+
}
|
|
55
|
+
if secondary_buffers.length() != sliver_pairs.len() as u32 {
|
|
56
|
+
return Err(JsError::new(&format!(
|
|
57
|
+
"Expected {} secondary buffers, got {}",
|
|
58
|
+
sliver_pairs.len(),
|
|
59
|
+
secondary_buffers.length()
|
|
60
|
+
)));
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Write BCS-encoded slivers to buffers
|
|
64
|
+
for (i, sliver_pair) in sliver_pairs.iter().enumerate() {
|
|
65
|
+
let primary_buffer = primary_buffers
|
|
66
|
+
.get(i as u32)
|
|
67
|
+
.dyn_into::<Uint8Array>()
|
|
68
|
+
.map_err(|_| JsError::new(&format!("Primary buffer {} is not a Uint8Array", i)))?;
|
|
69
|
+
Self::write_sliver_data_bcs(&sliver_pair.primary, &primary_buffer)?;
|
|
70
|
+
|
|
71
|
+
let secondary_buffer = secondary_buffers
|
|
72
|
+
.get(i as u32)
|
|
73
|
+
.dyn_into::<Uint8Array>()
|
|
74
|
+
.map_err(|_| {
|
|
75
|
+
JsError::new(&format!("Secondary buffer {} is not a Uint8Array", i))
|
|
76
|
+
})?;
|
|
77
|
+
Self::write_sliver_data_bcs(&sliver_pair.secondary, &secondary_buffer)?;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
let root_hash = match metadata.metadata() {
|
|
81
|
+
BlobMetadata::V1(inner) => inner.compute_root_hash(),
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
Ok(serde_wasm_bindgen::to_value(&(metadata, root_hash))?)
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/// Compute metadata for data without encoding it.
|
|
88
|
+
/// Returns only the essential fields needed for blob registration:
|
|
89
|
+
/// (blob_id, root_hash, unencoded_length, encoding_type)
|
|
90
|
+
///
|
|
91
|
+
/// This avoids serializing all 2k sliver hashes across the JS/WASM boundary.
|
|
92
|
+
#[wasm_bindgen]
|
|
93
|
+
pub fn compute_metadata(&self, data: &Uint8Array) -> Result<JsValue, JsError> {
|
|
94
|
+
let data_vec = data.to_vec();
|
|
95
|
+
let metadata = self.encoder.compute_metadata(&data_vec)?;
|
|
96
|
+
let blob_id = metadata.blob_id();
|
|
97
|
+
let (root_hash, unencoded_length, encoding_type) = match metadata.metadata() {
|
|
98
|
+
BlobMetadata::V1(inner) => (
|
|
99
|
+
inner.compute_root_hash(),
|
|
100
|
+
inner.unencoded_length,
|
|
101
|
+
inner.encoding_type,
|
|
102
|
+
),
|
|
103
|
+
};
|
|
104
|
+
Ok(serde_wasm_bindgen::to_value(&(
|
|
105
|
+
blob_id,
|
|
106
|
+
root_hash,
|
|
107
|
+
unencoded_length,
|
|
108
|
+
encoding_type,
|
|
109
|
+
))?)
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/// Decode blob from BCS-encoded SliverData buffers.
|
|
113
|
+
///
|
|
114
|
+
/// Arguments:
|
|
115
|
+
/// - blob_id: The blob identifier
|
|
116
|
+
/// - blob_size: The original unencoded blob size in bytes
|
|
117
|
+
/// - bcs_buffers: Vec<Uint8Array>, each containing BCS-encoded SliverData<Primary>
|
|
118
|
+
/// - output_buffer: Uint8Array to write decoded data into (must be exactly blob_size bytes)
|
|
119
|
+
#[wasm_bindgen]
|
|
120
|
+
pub fn decode(
|
|
121
|
+
&self,
|
|
122
|
+
blob_id: JsValue,
|
|
123
|
+
blob_size: u64,
|
|
124
|
+
bcs_buffers: Vec<Uint8Array>,
|
|
125
|
+
output_buffer: &Uint8Array,
|
|
126
|
+
) -> Result<(), JsError> {
|
|
127
|
+
let _blob_id = serde_wasm_bindgen::from_value::<BlobId>(blob_id)?;
|
|
128
|
+
|
|
129
|
+
if output_buffer.length() as u64 != blob_size {
|
|
130
|
+
return Err(JsError::new(&format!(
|
|
131
|
+
"Output buffer size mismatch: expected {}, got {}",
|
|
132
|
+
blob_size,
|
|
133
|
+
output_buffer.length()
|
|
134
|
+
)));
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
let mut sliver_data: Vec<SliverData<Primary>> = Vec::with_capacity(bcs_buffers.len());
|
|
138
|
+
|
|
139
|
+
for (i, buffer) in bcs_buffers.iter().enumerate() {
|
|
140
|
+
let bytes = buffer.to_vec();
|
|
141
|
+
let sliver: SliverData<Primary> = bcs::from_bytes(&bytes).map_err(|e| {
|
|
142
|
+
JsError::new(&format!(
|
|
143
|
+
"BCS deserialization failed at index {} (buffer size {}): {}",
|
|
144
|
+
i,
|
|
145
|
+
bytes.len(),
|
|
146
|
+
e
|
|
147
|
+
))
|
|
148
|
+
})?;
|
|
149
|
+
sliver_data.push(sliver);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
let decoded = self.encoder.decode(blob_size, sliver_data)?;
|
|
153
|
+
output_buffer.copy_from(&decoded[..]);
|
|
154
|
+
|
|
155
|
+
Ok(())
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
fn write_sliver_data_bcs<T: walrus_core::encoding::EncodingAxis>(
|
|
159
|
+
sliver: &SliverData<T>,
|
|
160
|
+
buffer: &Uint8Array,
|
|
161
|
+
) -> Result<(), JsError> {
|
|
162
|
+
let serialized = bcs::to_bytes(sliver)
|
|
163
|
+
.map_err(|e| JsError::new(&format!("BCS serialization failed: {}", e)))?;
|
|
164
|
+
|
|
165
|
+
// Verify buffer size
|
|
166
|
+
if buffer.length() as usize != serialized.len() {
|
|
167
|
+
return Err(JsError::new(&format!(
|
|
168
|
+
"Buffer size mismatch: expected {}, got {}",
|
|
169
|
+
serialized.len(),
|
|
170
|
+
buffer.length()
|
|
171
|
+
)));
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
buffer.copy_from(&serialized[..]);
|
|
175
|
+
|
|
176
|
+
Ok(())
|
|
177
|
+
}
|
|
178
|
+
}
|
package/src/lib.rs
ADDED
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
// Copyright (c) Mysten Labs, Inc.
|
|
2
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
import { describe, expect, it } from 'vitest';
|
|
5
|
+
import { BlobEncoder } from '../nodejs/walrus_wasm.js';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Helper functions for computing BCS buffer sizes (copied from walrus SDK)
|
|
9
|
+
*/
|
|
10
|
+
interface BcsBufferSizes {
|
|
11
|
+
primary: number;
|
|
12
|
+
secondary: number;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function getSourceSymbols(nShards: number) {
|
|
16
|
+
const maxFaulty = Math.floor((nShards - 1) / 3);
|
|
17
|
+
const minCorrect = nShards - maxFaulty;
|
|
18
|
+
return {
|
|
19
|
+
primarySymbols: minCorrect - maxFaulty,
|
|
20
|
+
secondarySymbols: minCorrect,
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function uleb128Size(value: number): number {
|
|
25
|
+
let size = 1;
|
|
26
|
+
value >>= 7;
|
|
27
|
+
while (value !== 0) {
|
|
28
|
+
size++;
|
|
29
|
+
value >>= 7;
|
|
30
|
+
}
|
|
31
|
+
return size;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function computeBcsBufferSize(dataLength: number): number {
|
|
35
|
+
const ulebSize = uleb128Size(dataLength);
|
|
36
|
+
return ulebSize + dataLength + 2 + 2; // ULEB128 + data + symbol_size + index
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function computeBcsBufferSizes(blobSize: number, nShards: number): BcsBufferSizes[] {
|
|
40
|
+
const { primarySymbols, secondarySymbols } = getSourceSymbols(nShards);
|
|
41
|
+
|
|
42
|
+
let symbolSize = Math.floor((Math.max(blobSize, 1) - 1) / (primarySymbols * secondarySymbols)) + 1;
|
|
43
|
+
if (symbolSize % 2 === 1) {
|
|
44
|
+
symbolSize = symbolSize + 1;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const primarySliverSize = secondarySymbols * symbolSize;
|
|
48
|
+
const secondarySliverSize = primarySymbols * symbolSize;
|
|
49
|
+
|
|
50
|
+
const primaryBcsSize = computeBcsBufferSize(primarySliverSize);
|
|
51
|
+
const secondaryBcsSize = computeBcsBufferSize(secondarySliverSize);
|
|
52
|
+
|
|
53
|
+
const sizes: BcsBufferSizes[] = [];
|
|
54
|
+
for (let i = 0; i < nShards; i++) {
|
|
55
|
+
sizes.push({
|
|
56
|
+
primary: primaryBcsSize,
|
|
57
|
+
secondary: secondaryBcsSize,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return sizes;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async function sha256(data: Uint8Array<ArrayBuffer>): Promise<string> {
|
|
65
|
+
const hashBuffer = await crypto.subtle.digest('SHA-256', data);
|
|
66
|
+
return Array.from(new Uint8Array(hashBuffer))
|
|
67
|
+
.map((b) => b.toString(16).padStart(2, '0'))
|
|
68
|
+
.join('');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
describe('BlobEncoder', () => {
|
|
72
|
+
const nShards = 1000;
|
|
73
|
+
|
|
74
|
+
it('should encode and decode a 5MB blob correctly', async () => {
|
|
75
|
+
const blobSize = 5 * 1024 * 1024; // 5MB
|
|
76
|
+
const inputData = new Uint8Array(blobSize);
|
|
77
|
+
|
|
78
|
+
// Fill with pseudo-random data for better testing
|
|
79
|
+
for (let i = 0; i < blobSize; i++) {
|
|
80
|
+
inputData[i] = (i * 7 + 13) % 256;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Compute hash of original data
|
|
84
|
+
const originalHash = await sha256(inputData);
|
|
85
|
+
|
|
86
|
+
// Create encoder
|
|
87
|
+
const encoder = new BlobEncoder(nShards);
|
|
88
|
+
|
|
89
|
+
// Pre-allocate BCS buffers
|
|
90
|
+
const bufferSizes = computeBcsBufferSizes(blobSize, nShards);
|
|
91
|
+
const primaryBuffers = bufferSizes.map((size) => new Uint8Array(size.primary));
|
|
92
|
+
const secondaryBuffers = bufferSizes.map((size) => new Uint8Array(size.secondary));
|
|
93
|
+
|
|
94
|
+
// Encode
|
|
95
|
+
const [metadata, rootHash] = encoder.encode(inputData, primaryBuffers, secondaryBuffers);
|
|
96
|
+
|
|
97
|
+
// Verify we got metadata and root hash
|
|
98
|
+
expect(metadata).toBeDefined();
|
|
99
|
+
expect(metadata.blob_id).toBeDefined();
|
|
100
|
+
expect(rootHash).toBeDefined();
|
|
101
|
+
expect(rootHash.Digest).toBeDefined();
|
|
102
|
+
|
|
103
|
+
// Convert to Uint8Array if needed (wasm-bindgen may return Array)
|
|
104
|
+
const rootHashBytes = rootHash.Digest instanceof Uint8Array ? rootHash.Digest : new Uint8Array(rootHash.Digest);
|
|
105
|
+
expect(rootHashBytes.length).toBe(32); // SHA-256 hash
|
|
106
|
+
|
|
107
|
+
// Verify buffers were written
|
|
108
|
+
expect(primaryBuffers.length).toBe(nShards);
|
|
109
|
+
expect(secondaryBuffers.length).toBe(nShards);
|
|
110
|
+
|
|
111
|
+
// Verify buffers contain data (not all zeros)
|
|
112
|
+
const primaryHasData = primaryBuffers.some((buf) => buf.some((byte) => byte !== 0));
|
|
113
|
+
expect(primaryHasData).toBe(true);
|
|
114
|
+
|
|
115
|
+
// Decode using only primary slivers (minimum required for RS2)
|
|
116
|
+
const outputBuffer = new Uint8Array(blobSize);
|
|
117
|
+
encoder.decode(metadata.blob_id, BigInt(blobSize), primaryBuffers, outputBuffer);
|
|
118
|
+
|
|
119
|
+
// Verify decoded data matches original
|
|
120
|
+
const decodedHash = await sha256(outputBuffer);
|
|
121
|
+
expect(decodedHash).toBe(originalHash);
|
|
122
|
+
|
|
123
|
+
// Byte-by-byte comparison for first 100 bytes
|
|
124
|
+
for (let i = 0; i < 100; i++) {
|
|
125
|
+
expect(outputBuffer[i]).toBe(inputData[i]);
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
it('should encode and decode a small 1KB blob correctly', async () => {
|
|
130
|
+
const blobSize = 1024; // 1KB
|
|
131
|
+
const inputData = new Uint8Array(blobSize);
|
|
132
|
+
|
|
133
|
+
// Fill with pattern
|
|
134
|
+
for (let i = 0; i < blobSize; i++) {
|
|
135
|
+
inputData[i] = i % 256;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const originalHash = await sha256(inputData);
|
|
139
|
+
|
|
140
|
+
const encoder = new BlobEncoder(nShards);
|
|
141
|
+
|
|
142
|
+
const bufferSizes = computeBcsBufferSizes(blobSize, nShards);
|
|
143
|
+
const primaryBuffers = bufferSizes.map((size) => new Uint8Array(size.primary));
|
|
144
|
+
const secondaryBuffers = bufferSizes.map((size) => new Uint8Array(size.secondary));
|
|
145
|
+
|
|
146
|
+
const [metadata] = encoder.encode(inputData, primaryBuffers, secondaryBuffers);
|
|
147
|
+
|
|
148
|
+
const outputBuffer = new Uint8Array(blobSize);
|
|
149
|
+
encoder.decode(metadata.blob_id, BigInt(blobSize), primaryBuffers, outputBuffer);
|
|
150
|
+
|
|
151
|
+
const decodedHash = await sha256(outputBuffer);
|
|
152
|
+
expect(decodedHash).toBe(originalHash);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
it('should compute metadata without encoding', async () => {
|
|
156
|
+
const blobSize = 1024 * 1024; // 1MB
|
|
157
|
+
const inputData = new Uint8Array(blobSize);
|
|
158
|
+
|
|
159
|
+
// Fill with data
|
|
160
|
+
for (let i = 0; i < blobSize; i++) {
|
|
161
|
+
inputData[i] = (i * 3) % 256;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const encoder = new BlobEncoder(nShards);
|
|
165
|
+
|
|
166
|
+
// Compute metadata only - returns (blob_id, root_hash, unencoded_length, encoding_type)
|
|
167
|
+
const [blobId, rootHash, unencodedLength, encodingType] = encoder.compute_metadata(inputData);
|
|
168
|
+
|
|
169
|
+
// Verify blob_id
|
|
170
|
+
expect(blobId).toBeDefined();
|
|
171
|
+
const blobIdBytes = blobId instanceof Uint8Array ? blobId : new Uint8Array(blobId);
|
|
172
|
+
expect(blobIdBytes.length).toBe(32); // u256 = 32 bytes
|
|
173
|
+
|
|
174
|
+
// Verify root hash
|
|
175
|
+
expect(rootHash).toBeDefined();
|
|
176
|
+
const rootHashBytes = rootHash.Digest instanceof Uint8Array ? rootHash.Digest : new Uint8Array(rootHash.Digest);
|
|
177
|
+
expect(rootHashBytes.length).toBe(32);
|
|
178
|
+
|
|
179
|
+
// Verify unencoded_length
|
|
180
|
+
expect(Number(unencodedLength)).toBe(blobSize);
|
|
181
|
+
|
|
182
|
+
// Verify encoding_type (RS2)
|
|
183
|
+
expect(encodingType).toBe('RS2');
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
it('should produce deterministic blob IDs for known inputs', async () => {
|
|
187
|
+
const encoder = new BlobEncoder(nShards);
|
|
188
|
+
|
|
189
|
+
// Test with empty blob
|
|
190
|
+
const emptyData = new Uint8Array(0);
|
|
191
|
+
const [emptyBlobId] = encoder.compute_metadata(emptyData);
|
|
192
|
+
const emptyBlobIdHex = Buffer.from(emptyBlobId).toString('hex');
|
|
193
|
+
|
|
194
|
+
// Empty blob should always produce the same blob ID
|
|
195
|
+
const [emptyBlobId2] = encoder.compute_metadata(emptyData);
|
|
196
|
+
const emptyBlobIdHex2 = Buffer.from(emptyBlobId2).toString('hex');
|
|
197
|
+
expect(emptyBlobIdHex).toBe(emptyBlobIdHex2);
|
|
198
|
+
|
|
199
|
+
// Verify against known static value (for nShards=1000)
|
|
200
|
+
expect(emptyBlobIdHex).toBe('dc63d02f71d936716137f17b97901af97d553ad00ac08b20f73b9693c47cd6fe');
|
|
201
|
+
|
|
202
|
+
// Test with simple known data
|
|
203
|
+
const testData = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
|
|
204
|
+
const [testBlobId] = encoder.compute_metadata(testData);
|
|
205
|
+
const testBlobIdHex = Buffer.from(testBlobId).toString('hex');
|
|
206
|
+
|
|
207
|
+
// Same input should always produce the same blob ID
|
|
208
|
+
const [testBlobId2] = encoder.compute_metadata(testData);
|
|
209
|
+
const testBlobIdHex2 = Buffer.from(testBlobId2).toString('hex');
|
|
210
|
+
expect(testBlobIdHex).toBe(testBlobIdHex2);
|
|
211
|
+
|
|
212
|
+
// Verify against known static value (for nShards=1000)
|
|
213
|
+
expect(testBlobIdHex).toBe('865ca48479104a9bdc136f7d6730b7f3920012eccb4e99ba8540b9363766e093');
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
it('should produce consistent metadata for the same input', async () => {
|
|
217
|
+
const blobSize = 512 * 1024; // 512KB
|
|
218
|
+
const inputData = new Uint8Array(blobSize);
|
|
219
|
+
|
|
220
|
+
// Fill with deterministic data
|
|
221
|
+
for (let i = 0; i < blobSize; i++) {
|
|
222
|
+
inputData[i] = (i * 11 + 7) % 256;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
const encoder = new BlobEncoder(nShards);
|
|
226
|
+
|
|
227
|
+
// Compute metadata twice
|
|
228
|
+
const [blobId1, rootHash1, unencodedLength1] = encoder.compute_metadata(inputData);
|
|
229
|
+
const [blobId2, rootHash2, unencodedLength2] = encoder.compute_metadata(inputData);
|
|
230
|
+
|
|
231
|
+
// Blob IDs should match
|
|
232
|
+
expect(blobId1).toEqual(blobId2);
|
|
233
|
+
|
|
234
|
+
// Root hashes should match
|
|
235
|
+
expect(rootHash1.Digest).toEqual(rootHash2.Digest);
|
|
236
|
+
|
|
237
|
+
// Unencoded lengths should match
|
|
238
|
+
expect(unencodedLength1).toBe(unencodedLength2);
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
it('should fail decode with wrong buffer size', () => {
|
|
242
|
+
const blobSize = 1024;
|
|
243
|
+
const inputData = new Uint8Array(blobSize);
|
|
244
|
+
|
|
245
|
+
const encoder = new BlobEncoder(nShards);
|
|
246
|
+
|
|
247
|
+
const bufferSizes = computeBcsBufferSizes(blobSize, nShards);
|
|
248
|
+
const primaryBuffers = bufferSizes.map((size) => new Uint8Array(size.primary));
|
|
249
|
+
const secondaryBuffers = bufferSizes.map((size) => new Uint8Array(size.secondary));
|
|
250
|
+
|
|
251
|
+
const [metadata] = encoder.encode(inputData, primaryBuffers, secondaryBuffers);
|
|
252
|
+
|
|
253
|
+
// Try to decode with wrong output buffer size
|
|
254
|
+
const wrongSizeBuffer = new Uint8Array(blobSize - 1);
|
|
255
|
+
|
|
256
|
+
expect(() => {
|
|
257
|
+
encoder.decode(metadata.blob_id, BigInt(blobSize), primaryBuffers, wrongSizeBuffer);
|
|
258
|
+
}).toThrow(/Output buffer size mismatch/);
|
|
259
|
+
});
|
|
260
|
+
});
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
// Copyright (c) Mysten Labs, Inc.
|
|
2
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
import { defineConfig } from 'vitest/config';
|
|
5
|
+
|
|
6
|
+
export default defineConfig({
|
|
7
|
+
test: {
|
|
8
|
+
minWorkers: 1,
|
|
9
|
+
maxWorkers: 4,
|
|
10
|
+
hookTimeout: 60000,
|
|
11
|
+
testTimeout: 60000,
|
|
12
|
+
env: {
|
|
13
|
+
NODE_ENV: 'test',
|
|
14
|
+
},
|
|
15
|
+
pool: 'threads',
|
|
16
|
+
},
|
|
17
|
+
});
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
/* tslint:disable */
|
|
2
|
+
/* eslint-disable */
|
|
3
|
+
/**
|
|
4
|
+
* Aggregate a list of signatures.
|
|
5
|
+
* The signatures must be of the type Vec<Vec<u8>> with each signature being a 96 bytes long serialized signature.
|
|
6
|
+
*/
|
|
7
|
+
export function bls12381_min_pk_aggregate(signatures: any): Uint8Array;
|
|
8
|
+
/**
|
|
9
|
+
* Verify an aggregate signature.
|
|
10
|
+
*/
|
|
11
|
+
export function bls12381_min_pk_verify_aggregate(public_keys: any, msg: Uint8Array, signature: Uint8Array): boolean;
|
|
12
|
+
export function bls12381_min_pk_verify(signature: Uint8Array, public_key: Uint8Array, msg: Uint8Array): boolean;
|
|
13
|
+
export class BlobEncoder {
|
|
14
|
+
free(): void;
|
|
15
|
+
[Symbol.dispose](): void;
|
|
16
|
+
/**
|
|
17
|
+
* Compute metadata for data without encoding it.
|
|
18
|
+
* Returns only the essential fields needed for blob registration:
|
|
19
|
+
* (blob_id, root_hash, unencoded_length, encoding_type)
|
|
20
|
+
*
|
|
21
|
+
* This avoids serializing all 2k sliver hashes across the JS/WASM boundary.
|
|
22
|
+
*/
|
|
23
|
+
compute_metadata(data: Uint8Array): any;
|
|
24
|
+
constructor(n_shards: number);
|
|
25
|
+
/**
|
|
26
|
+
* Decode blob from BCS-encoded SliverData buffers.
|
|
27
|
+
*
|
|
28
|
+
* Arguments:
|
|
29
|
+
* - blob_id: The blob identifier
|
|
30
|
+
* - blob_size: The original unencoded blob size in bytes
|
|
31
|
+
* - bcs_buffers: Vec<Uint8Array>, each containing BCS-encoded SliverData<Primary>
|
|
32
|
+
* - output_buffer: Uint8Array to write decoded data into (must be exactly blob_size bytes)
|
|
33
|
+
*/
|
|
34
|
+
decode(blob_id: any, blob_size: bigint, bcs_buffers: Uint8Array[], output_buffer: Uint8Array): void;
|
|
35
|
+
/**
|
|
36
|
+
* Encode data and write BCS-encoded SliverData directly into pre-allocated buffers.
|
|
37
|
+
*
|
|
38
|
+
* Arguments:
|
|
39
|
+
* - data: Input data to encode
|
|
40
|
+
* - primary_buffers: Array of Uint8Array buffers (one per shard) for primary slivers
|
|
41
|
+
* - secondary_buffers: Array of Uint8Array buffers (one per shard) for secondary slivers
|
|
42
|
+
*
|
|
43
|
+
* Each buffer will be written with BCS-encoded SliverData.
|
|
44
|
+
*
|
|
45
|
+
* Returns: JsValue with (metadata, root_hash)
|
|
46
|
+
*/
|
|
47
|
+
encode(data: Uint8Array, primary_buffers: Array<any>, secondary_buffers: Array<any>): any;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
|
|
51
|
+
|
|
52
|
+
export interface InitOutput {
|
|
53
|
+
readonly memory: WebAssembly.Memory;
|
|
54
|
+
readonly __wbg_blobencoder_free: (a: number, b: number) => void;
|
|
55
|
+
readonly blobencoder_compute_metadata: (a: number, b: any) => [number, number, number];
|
|
56
|
+
readonly blobencoder_decode: (a: number, b: any, c: bigint, d: number, e: number, f: any) => [number, number];
|
|
57
|
+
readonly blobencoder_encode: (a: number, b: any, c: any, d: any) => [number, number, number];
|
|
58
|
+
readonly blobencoder_new: (a: number) => [number, number, number];
|
|
59
|
+
readonly bls12381_min_pk_aggregate: (a: any) => [number, number, number, number];
|
|
60
|
+
readonly bls12381_min_pk_verify: (a: number, b: number, c: number, d: number, e: number, f: number) => [number, number, number];
|
|
61
|
+
readonly bls12381_min_pk_verify_aggregate: (a: any, b: number, c: number, d: number, e: number) => [number, number, number];
|
|
62
|
+
readonly rustsecp256k1_v0_8_1_context_create: (a: number) => number;
|
|
63
|
+
readonly rustsecp256k1_v0_8_1_context_destroy: (a: number) => void;
|
|
64
|
+
readonly rustsecp256k1_v0_8_1_default_error_callback_fn: (a: number, b: number) => void;
|
|
65
|
+
readonly rustsecp256k1_v0_8_1_default_illegal_callback_fn: (a: number, b: number) => void;
|
|
66
|
+
readonly __wbindgen_malloc: (a: number, b: number) => number;
|
|
67
|
+
readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number;
|
|
68
|
+
readonly __wbindgen_exn_store: (a: number) => void;
|
|
69
|
+
readonly __externref_table_alloc: () => number;
|
|
70
|
+
readonly __wbindgen_externrefs: WebAssembly.Table;
|
|
71
|
+
readonly __externref_table_dealloc: (a: number) => void;
|
|
72
|
+
readonly __wbindgen_free: (a: number, b: number, c: number) => void;
|
|
73
|
+
readonly __wbindgen_start: () => void;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export type SyncInitInput = BufferSource | WebAssembly.Module;
|
|
77
|
+
/**
|
|
78
|
+
* Instantiates the given `module`, which can either be bytes or
|
|
79
|
+
* a precompiled `WebAssembly.Module`.
|
|
80
|
+
*
|
|
81
|
+
* @param {{ module: SyncInitInput }} module - Passing `SyncInitInput` directly is deprecated.
|
|
82
|
+
*
|
|
83
|
+
* @returns {InitOutput}
|
|
84
|
+
*/
|
|
85
|
+
export function initSync(module: { module: SyncInitInput } | SyncInitInput): InitOutput;
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
|
|
89
|
+
* for everything else, calls `WebAssembly.instantiate` directly.
|
|
90
|
+
*
|
|
91
|
+
* @param {{ module_or_path: InitInput | Promise<InitInput> }} module_or_path - Passing `InitInput` directly is deprecated.
|
|
92
|
+
*
|
|
93
|
+
* @returns {Promise<InitOutput>}
|
|
94
|
+
*/
|
|
95
|
+
export default function __wbg_init (module_or_path?: { module_or_path: InitInput | Promise<InitInput> } | InitInput | Promise<InitInput>): Promise<InitOutput>;
|