@waku/core 0.0.33-45523ca.0 → 0.0.33-a89e69f.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundle/base_protocol-DtQR2CNC.js +265 -0
- package/bundle/{index-tdQNdKHx.js → index-BVysxsMu.js} +6 -444
- package/bundle/index.js +66 -9
- package/bundle/lib/base_protocol.js +2 -2
- package/bundle/lib/message/version_0.js +2 -2
- package/bundle/{version_0-BrbNEwD-.js → version_0-C5ObpJ_0.js} +445 -2
- package/dist/.tsbuildinfo +1 -1
- package/dist/lib/base_protocol.d.ts +5 -7
- package/dist/lib/base_protocol.js +14 -15
- package/dist/lib/base_protocol.js.map +1 -1
- package/dist/lib/filter/index.d.ts +2 -1
- package/dist/lib/filter/index.js +6 -3
- package/dist/lib/filter/index.js.map +1 -1
- package/dist/lib/metadata/index.js +1 -1
- package/dist/lib/metadata/index.js.map +1 -1
- package/dist/lib/stream_manager/stream_manager.d.ts +10 -9
- package/dist/lib/stream_manager/stream_manager.js +72 -55
- package/dist/lib/stream_manager/stream_manager.js.map +1 -1
- package/dist/lib/stream_manager/utils.d.ts +1 -1
- package/dist/lib/stream_manager/utils.js +5 -17
- package/dist/lib/stream_manager/utils.js.map +1 -1
- package/package.json +1 -1
- package/src/lib/base_protocol.ts +19 -30
- package/src/lib/filter/index.ts +13 -2
- package/src/lib/metadata/index.ts +1 -1
- package/src/lib/stream_manager/stream_manager.ts +105 -66
- package/src/lib/stream_manager/utils.ts +5 -17
- package/bundle/base_protocol-BS9mxaB7.js +0 -336
@@ -1,4 +1,4 @@
|
|
1
|
-
import { a as allocUnsafe, f as fromString, b as alloc$1,
|
1
|
+
import { a as allocUnsafe, f as fromString, b as alloc$1, e as concat, u as utf8ToBytes$1, L as Logger } from './index-BVysxsMu.js';
|
2
2
|
|
3
3
|
/* eslint-disable no-fallthrough */
|
4
4
|
const N1 = Math.pow(2, 7);
|
@@ -3750,6 +3750,449 @@ var WakuMetadataResponse;
|
|
3750
3750
|
};
|
3751
3751
|
})(WakuMetadataResponse || (WakuMetadataResponse = {}));
|
3752
3752
|
|
3753
|
+
// copied from utils
|
3754
|
+
function isBytes(a) {
|
3755
|
+
return (a instanceof Uint8Array ||
|
3756
|
+
(a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array'));
|
3757
|
+
}
|
3758
|
+
function bytes(b, ...lengths) {
|
3759
|
+
if (!isBytes(b))
|
3760
|
+
throw new Error('Uint8Array expected');
|
3761
|
+
if (lengths.length > 0 && !lengths.includes(b.length))
|
3762
|
+
throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`);
|
3763
|
+
}
|
3764
|
+
function exists(instance, checkFinished = true) {
|
3765
|
+
if (instance.destroyed)
|
3766
|
+
throw new Error('Hash instance has been destroyed');
|
3767
|
+
if (checkFinished && instance.finished)
|
3768
|
+
throw new Error('Hash#digest() has already been called');
|
3769
|
+
}
|
3770
|
+
function output(out, instance) {
|
3771
|
+
bytes(out);
|
3772
|
+
const min = instance.outputLen;
|
3773
|
+
if (out.length < min) {
|
3774
|
+
throw new Error(`digestInto() expects output buffer of length at least ${min}`);
|
3775
|
+
}
|
3776
|
+
}
|
3777
|
+
|
3778
|
+
/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */
|
3779
|
+
// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+.
|
3780
|
+
// node.js versions earlier than v19 don't declare it in global scope.
|
3781
|
+
// For node.js, package.json#exports field mapping rewrites import
|
3782
|
+
// from `crypto` to `cryptoNode`, which imports native module.
|
3783
|
+
// Makes the utils un-importable in browsers without a bundler.
|
3784
|
+
// Once node.js 18 is deprecated (2025-04-30), we can just drop the import.
|
3785
|
+
// Cast array to view
|
3786
|
+
const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
|
3787
|
+
// The rotate right (circular right shift) operation for uint32
|
3788
|
+
const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift);
|
3789
|
+
new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44;
|
3790
|
+
/**
|
3791
|
+
* @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99])
|
3792
|
+
*/
|
3793
|
+
function utf8ToBytes(str) {
|
3794
|
+
if (typeof str !== 'string')
|
3795
|
+
throw new Error(`utf8ToBytes expected string, got ${typeof str}`);
|
3796
|
+
return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809
|
3797
|
+
}
|
3798
|
+
/**
|
3799
|
+
* Normalizes (non-hex) string or Uint8Array to Uint8Array.
|
3800
|
+
* Warning: when Uint8Array is passed, it would NOT get copied.
|
3801
|
+
* Keep in mind for future mutable operations.
|
3802
|
+
*/
|
3803
|
+
function toBytes(data) {
|
3804
|
+
if (typeof data === 'string')
|
3805
|
+
data = utf8ToBytes(data);
|
3806
|
+
bytes(data);
|
3807
|
+
return data;
|
3808
|
+
}
|
3809
|
+
// For runtime check if class implements interface
|
3810
|
+
class Hash {
|
3811
|
+
// Safe version that clones internal state
|
3812
|
+
clone() {
|
3813
|
+
return this._cloneInto();
|
3814
|
+
}
|
3815
|
+
}
|
3816
|
+
function wrapConstructor(hashCons) {
|
3817
|
+
const hashC = (msg) => hashCons().update(toBytes(msg)).digest();
|
3818
|
+
const tmp = hashCons();
|
3819
|
+
hashC.outputLen = tmp.outputLen;
|
3820
|
+
hashC.blockLen = tmp.blockLen;
|
3821
|
+
hashC.create = () => hashCons();
|
3822
|
+
return hashC;
|
3823
|
+
}
|
3824
|
+
|
3825
|
+
// Polyfill for Safari 14
|
3826
|
+
function setBigUint64(view, byteOffset, value, isLE) {
|
3827
|
+
if (typeof view.setBigUint64 === 'function')
|
3828
|
+
return view.setBigUint64(byteOffset, value, isLE);
|
3829
|
+
const _32n = BigInt(32);
|
3830
|
+
const _u32_max = BigInt(0xffffffff);
|
3831
|
+
const wh = Number((value >> _32n) & _u32_max);
|
3832
|
+
const wl = Number(value & _u32_max);
|
3833
|
+
const h = isLE ? 4 : 0;
|
3834
|
+
const l = isLE ? 0 : 4;
|
3835
|
+
view.setUint32(byteOffset + h, wh, isLE);
|
3836
|
+
view.setUint32(byteOffset + l, wl, isLE);
|
3837
|
+
}
|
3838
|
+
// Choice: a ? b : c
|
3839
|
+
const Chi = (a, b, c) => (a & b) ^ (~a & c);
|
3840
|
+
// Majority function, true if any two inpust is true
|
3841
|
+
const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c);
|
3842
|
+
/**
|
3843
|
+
* Merkle-Damgard hash construction base class.
|
3844
|
+
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
|
3845
|
+
*/
|
3846
|
+
class HashMD extends Hash {
|
3847
|
+
constructor(blockLen, outputLen, padOffset, isLE) {
|
3848
|
+
super();
|
3849
|
+
this.blockLen = blockLen;
|
3850
|
+
this.outputLen = outputLen;
|
3851
|
+
this.padOffset = padOffset;
|
3852
|
+
this.isLE = isLE;
|
3853
|
+
this.finished = false;
|
3854
|
+
this.length = 0;
|
3855
|
+
this.pos = 0;
|
3856
|
+
this.destroyed = false;
|
3857
|
+
this.buffer = new Uint8Array(blockLen);
|
3858
|
+
this.view = createView(this.buffer);
|
3859
|
+
}
|
3860
|
+
update(data) {
|
3861
|
+
exists(this);
|
3862
|
+
const { view, buffer, blockLen } = this;
|
3863
|
+
data = toBytes(data);
|
3864
|
+
const len = data.length;
|
3865
|
+
for (let pos = 0; pos < len;) {
|
3866
|
+
const take = Math.min(blockLen - this.pos, len - pos);
|
3867
|
+
// Fast path: we have at least one block in input, cast it to view and process
|
3868
|
+
if (take === blockLen) {
|
3869
|
+
const dataView = createView(data);
|
3870
|
+
for (; blockLen <= len - pos; pos += blockLen)
|
3871
|
+
this.process(dataView, pos);
|
3872
|
+
continue;
|
3873
|
+
}
|
3874
|
+
buffer.set(data.subarray(pos, pos + take), this.pos);
|
3875
|
+
this.pos += take;
|
3876
|
+
pos += take;
|
3877
|
+
if (this.pos === blockLen) {
|
3878
|
+
this.process(view, 0);
|
3879
|
+
this.pos = 0;
|
3880
|
+
}
|
3881
|
+
}
|
3882
|
+
this.length += data.length;
|
3883
|
+
this.roundClean();
|
3884
|
+
return this;
|
3885
|
+
}
|
3886
|
+
digestInto(out) {
|
3887
|
+
exists(this);
|
3888
|
+
output(out, this);
|
3889
|
+
this.finished = true;
|
3890
|
+
// Padding
|
3891
|
+
// We can avoid allocation of buffer for padding completely if it
|
3892
|
+
// was previously not allocated here. But it won't change performance.
|
3893
|
+
const { buffer, view, blockLen, isLE } = this;
|
3894
|
+
let { pos } = this;
|
3895
|
+
// append the bit '1' to the message
|
3896
|
+
buffer[pos++] = 0b10000000;
|
3897
|
+
this.buffer.subarray(pos).fill(0);
|
3898
|
+
// we have less than padOffset left in buffer, so we cannot put length in
|
3899
|
+
// current block, need process it and pad again
|
3900
|
+
if (this.padOffset > blockLen - pos) {
|
3901
|
+
this.process(view, 0);
|
3902
|
+
pos = 0;
|
3903
|
+
}
|
3904
|
+
// Pad until full block byte with zeros
|
3905
|
+
for (let i = pos; i < blockLen; i++)
|
3906
|
+
buffer[i] = 0;
|
3907
|
+
// Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that
|
3908
|
+
// You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen.
|
3909
|
+
// So we just write lowest 64 bits of that value.
|
3910
|
+
setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE);
|
3911
|
+
this.process(view, 0);
|
3912
|
+
const oview = createView(out);
|
3913
|
+
const len = this.outputLen;
|
3914
|
+
// NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT
|
3915
|
+
if (len % 4)
|
3916
|
+
throw new Error('_sha2: outputLen should be aligned to 32bit');
|
3917
|
+
const outLen = len / 4;
|
3918
|
+
const state = this.get();
|
3919
|
+
if (outLen > state.length)
|
3920
|
+
throw new Error('_sha2: outputLen bigger than state');
|
3921
|
+
for (let i = 0; i < outLen; i++)
|
3922
|
+
oview.setUint32(4 * i, state[i], isLE);
|
3923
|
+
}
|
3924
|
+
digest() {
|
3925
|
+
const { buffer, outputLen } = this;
|
3926
|
+
this.digestInto(buffer);
|
3927
|
+
const res = buffer.slice(0, outputLen);
|
3928
|
+
this.destroy();
|
3929
|
+
return res;
|
3930
|
+
}
|
3931
|
+
_cloneInto(to) {
|
3932
|
+
to || (to = new this.constructor());
|
3933
|
+
to.set(...this.get());
|
3934
|
+
const { blockLen, buffer, length, finished, destroyed, pos } = this;
|
3935
|
+
to.length = length;
|
3936
|
+
to.pos = pos;
|
3937
|
+
to.finished = finished;
|
3938
|
+
to.destroyed = destroyed;
|
3939
|
+
if (length % blockLen)
|
3940
|
+
to.buffer.set(buffer);
|
3941
|
+
return to;
|
3942
|
+
}
|
3943
|
+
}
|
3944
|
+
|
3945
|
+
// SHA2-256 need to try 2^128 hashes to execute birthday attack.
|
3946
|
+
// BTC network is doing 2^67 hashes/sec as per early 2023.
|
3947
|
+
// Round constants:
|
3948
|
+
// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311)
|
3949
|
+
// prettier-ignore
|
3950
|
+
const SHA256_K = /* @__PURE__ */ new Uint32Array([
|
3951
|
+
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
|
3952
|
+
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
|
3953
|
+
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
|
3954
|
+
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
|
3955
|
+
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
|
3956
|
+
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
|
3957
|
+
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
|
3958
|
+
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
|
3959
|
+
]);
|
3960
|
+
// Initial state:
|
3961
|
+
// first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19
|
3962
|
+
// prettier-ignore
|
3963
|
+
const SHA256_IV = /* @__PURE__ */ new Uint32Array([
|
3964
|
+
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19
|
3965
|
+
]);
|
3966
|
+
// Temporary buffer, not used to store anything between runs
|
3967
|
+
// Named this way because it matches specification.
|
3968
|
+
const SHA256_W = /* @__PURE__ */ new Uint32Array(64);
|
3969
|
+
class SHA256 extends HashMD {
|
3970
|
+
constructor() {
|
3971
|
+
super(64, 32, 8, false);
|
3972
|
+
// We cannot use array here since array allows indexing by variable
|
3973
|
+
// which means optimizer/compiler cannot use registers.
|
3974
|
+
this.A = SHA256_IV[0] | 0;
|
3975
|
+
this.B = SHA256_IV[1] | 0;
|
3976
|
+
this.C = SHA256_IV[2] | 0;
|
3977
|
+
this.D = SHA256_IV[3] | 0;
|
3978
|
+
this.E = SHA256_IV[4] | 0;
|
3979
|
+
this.F = SHA256_IV[5] | 0;
|
3980
|
+
this.G = SHA256_IV[6] | 0;
|
3981
|
+
this.H = SHA256_IV[7] | 0;
|
3982
|
+
}
|
3983
|
+
get() {
|
3984
|
+
const { A, B, C, D, E, F, G, H } = this;
|
3985
|
+
return [A, B, C, D, E, F, G, H];
|
3986
|
+
}
|
3987
|
+
// prettier-ignore
|
3988
|
+
set(A, B, C, D, E, F, G, H) {
|
3989
|
+
this.A = A | 0;
|
3990
|
+
this.B = B | 0;
|
3991
|
+
this.C = C | 0;
|
3992
|
+
this.D = D | 0;
|
3993
|
+
this.E = E | 0;
|
3994
|
+
this.F = F | 0;
|
3995
|
+
this.G = G | 0;
|
3996
|
+
this.H = H | 0;
|
3997
|
+
}
|
3998
|
+
process(view, offset) {
|
3999
|
+
// Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array
|
4000
|
+
for (let i = 0; i < 16; i++, offset += 4)
|
4001
|
+
SHA256_W[i] = view.getUint32(offset, false);
|
4002
|
+
for (let i = 16; i < 64; i++) {
|
4003
|
+
const W15 = SHA256_W[i - 15];
|
4004
|
+
const W2 = SHA256_W[i - 2];
|
4005
|
+
const s0 = rotr(W15, 7) ^ rotr(W15, 18) ^ (W15 >>> 3);
|
4006
|
+
const s1 = rotr(W2, 17) ^ rotr(W2, 19) ^ (W2 >>> 10);
|
4007
|
+
SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0;
|
4008
|
+
}
|
4009
|
+
// Compression function main loop, 64 rounds
|
4010
|
+
let { A, B, C, D, E, F, G, H } = this;
|
4011
|
+
for (let i = 0; i < 64; i++) {
|
4012
|
+
const sigma1 = rotr(E, 6) ^ rotr(E, 11) ^ rotr(E, 25);
|
4013
|
+
const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0;
|
4014
|
+
const sigma0 = rotr(A, 2) ^ rotr(A, 13) ^ rotr(A, 22);
|
4015
|
+
const T2 = (sigma0 + Maj(A, B, C)) | 0;
|
4016
|
+
H = G;
|
4017
|
+
G = F;
|
4018
|
+
F = E;
|
4019
|
+
E = (D + T1) | 0;
|
4020
|
+
D = C;
|
4021
|
+
C = B;
|
4022
|
+
B = A;
|
4023
|
+
A = (T1 + T2) | 0;
|
4024
|
+
}
|
4025
|
+
// Add the compressed chunk to the current hash value
|
4026
|
+
A = (A + this.A) | 0;
|
4027
|
+
B = (B + this.B) | 0;
|
4028
|
+
C = (C + this.C) | 0;
|
4029
|
+
D = (D + this.D) | 0;
|
4030
|
+
E = (E + this.E) | 0;
|
4031
|
+
F = (F + this.F) | 0;
|
4032
|
+
G = (G + this.G) | 0;
|
4033
|
+
H = (H + this.H) | 0;
|
4034
|
+
this.set(A, B, C, D, E, F, G, H);
|
4035
|
+
}
|
4036
|
+
roundClean() {
|
4037
|
+
SHA256_W.fill(0);
|
4038
|
+
}
|
4039
|
+
destroy() {
|
4040
|
+
this.set(0, 0, 0, 0, 0, 0, 0, 0);
|
4041
|
+
this.buffer.fill(0);
|
4042
|
+
}
|
4043
|
+
}
|
4044
|
+
/**
|
4045
|
+
* SHA2-256 hash function
|
4046
|
+
* @param message - data that would be hashed
|
4047
|
+
*/
|
4048
|
+
const sha256 = /* @__PURE__ */ wrapConstructor(() => new SHA256());
|
4049
|
+
|
4050
|
+
/**
|
4051
|
+
* The default cluster ID for The Waku Network
|
4052
|
+
*/
|
4053
|
+
const DEFAULT_CLUSTER_ID = 1;
|
4054
|
+
|
4055
|
+
const singleShardInfoToPubsubTopic = (shardInfo) => {
|
4056
|
+
if (shardInfo.shard === undefined)
|
4057
|
+
throw new Error("Invalid shard");
|
4058
|
+
return `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${shardInfo.shard}`;
|
4059
|
+
};
|
4060
|
+
const shardInfoToPubsubTopics = (shardInfo) => {
|
4061
|
+
if ("contentTopics" in shardInfo && shardInfo.contentTopics) {
|
4062
|
+
// Autosharding: explicitly defined content topics
|
4063
|
+
return Array.from(new Set(shardInfo.contentTopics.map((contentTopic) => contentTopicToPubsubTopic(contentTopic, shardInfo.clusterId))));
|
4064
|
+
}
|
4065
|
+
else if ("shards" in shardInfo) {
|
4066
|
+
// Static sharding
|
4067
|
+
if (shardInfo.shards === undefined)
|
4068
|
+
throw new Error("Invalid shard");
|
4069
|
+
return Array.from(new Set(shardInfo.shards.map((index) => `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${index}`)));
|
4070
|
+
}
|
4071
|
+
else if ("application" in shardInfo && "version" in shardInfo) {
|
4072
|
+
// Autosharding: single shard from application and version
|
4073
|
+
return [
|
4074
|
+
contentTopicToPubsubTopic(`/${shardInfo.application}/${shardInfo.version}/default/default`, shardInfo.clusterId)
|
4075
|
+
];
|
4076
|
+
}
|
4077
|
+
else {
|
4078
|
+
throw new Error("Missing required configuration in shard parameters");
|
4079
|
+
}
|
4080
|
+
};
|
4081
|
+
const pubsubTopicToSingleShardInfo = (pubsubTopics) => {
|
4082
|
+
const parts = pubsubTopics.split("/");
|
4083
|
+
if (parts.length != 6 ||
|
4084
|
+
parts[1] !== "waku" ||
|
4085
|
+
parts[2] !== "2" ||
|
4086
|
+
parts[3] !== "rs")
|
4087
|
+
throw new Error("Invalid pubsub topic");
|
4088
|
+
const clusterId = parseInt(parts[4]);
|
4089
|
+
const shard = parseInt(parts[5]);
|
4090
|
+
if (isNaN(clusterId) || isNaN(shard))
|
4091
|
+
throw new Error("Invalid clusterId or shard");
|
4092
|
+
return {
|
4093
|
+
clusterId,
|
4094
|
+
shard
|
4095
|
+
};
|
4096
|
+
};
|
4097
|
+
const pubsubTopicsToShardInfo = (pubsubTopics) => {
|
4098
|
+
const shardInfoSet = new Set();
|
4099
|
+
const clusterIds = new Set();
|
4100
|
+
for (const topic of pubsubTopics) {
|
4101
|
+
const { clusterId, shard } = pubsubTopicToSingleShardInfo(topic);
|
4102
|
+
shardInfoSet.add(`${clusterId}:${shard}`);
|
4103
|
+
clusterIds.add(clusterId);
|
4104
|
+
}
|
4105
|
+
if (shardInfoSet.size === 0) {
|
4106
|
+
throw new Error("No valid pubsub topics provided");
|
4107
|
+
}
|
4108
|
+
if (clusterIds.size > 1) {
|
4109
|
+
throw new Error("Pubsub topics from multiple cluster IDs are not supported");
|
4110
|
+
}
|
4111
|
+
const clusterId = clusterIds.values().next().value;
|
4112
|
+
const shards = Array.from(shardInfoSet).map((info) => parseInt(info.split(":")[1]));
|
4113
|
+
return {
|
4114
|
+
clusterId,
|
4115
|
+
shards
|
4116
|
+
};
|
4117
|
+
};
|
4118
|
+
/**
|
4119
|
+
* Given a string, will throw an error if it is not formatted as a valid content topic for autosharding based on https://rfc.vac.dev/spec/51/
|
4120
|
+
* @param contentTopic String to validate
|
4121
|
+
* @returns Object with each content topic field as an attribute
|
4122
|
+
*/
|
4123
|
+
function ensureValidContentTopic(contentTopic) {
|
4124
|
+
const parts = contentTopic.split("/");
|
4125
|
+
if (parts.length < 5 || parts.length > 6) {
|
4126
|
+
throw Error("Content topic format is invalid");
|
4127
|
+
}
|
4128
|
+
// Validate generation field if present
|
4129
|
+
let generation = 0;
|
4130
|
+
if (parts.length == 6) {
|
4131
|
+
generation = parseInt(parts[1]);
|
4132
|
+
if (isNaN(generation)) {
|
4133
|
+
throw new Error("Invalid generation field in content topic");
|
4134
|
+
}
|
4135
|
+
if (generation > 0) {
|
4136
|
+
throw new Error("Generation greater than 0 is not supported");
|
4137
|
+
}
|
4138
|
+
}
|
4139
|
+
// Validate remaining fields
|
4140
|
+
const fields = parts.splice(-4);
|
4141
|
+
// Validate application field
|
4142
|
+
if (fields[0].length == 0) {
|
4143
|
+
throw new Error("Application field cannot be empty");
|
4144
|
+
}
|
4145
|
+
// Validate version field
|
4146
|
+
if (fields[1].length == 0) {
|
4147
|
+
throw new Error("Version field cannot be empty");
|
4148
|
+
}
|
4149
|
+
// Validate topic name field
|
4150
|
+
if (fields[2].length == 0) {
|
4151
|
+
throw new Error("Topic name field cannot be empty");
|
4152
|
+
}
|
4153
|
+
// Validate encoding field
|
4154
|
+
if (fields[3].length == 0) {
|
4155
|
+
throw new Error("Encoding field cannot be empty");
|
4156
|
+
}
|
4157
|
+
return {
|
4158
|
+
generation,
|
4159
|
+
application: fields[0],
|
4160
|
+
version: fields[1],
|
4161
|
+
topicName: fields[2],
|
4162
|
+
encoding: fields[3]
|
4163
|
+
};
|
4164
|
+
}
|
4165
|
+
/**
|
4166
|
+
* Given a string, determines which autoshard index to use for its pubsub topic.
|
4167
|
+
* Based on the algorithm described in the RFC: https://rfc.vac.dev/spec/51//#algorithm
|
4168
|
+
*/
|
4169
|
+
function contentTopicToShardIndex(contentTopic, networkShards = 8) {
|
4170
|
+
const { application, version } = ensureValidContentTopic(contentTopic);
|
4171
|
+
const digest = sha256(concat([utf8ToBytes$1(application), utf8ToBytes$1(version)]));
|
4172
|
+
const dataview = new DataView(digest.buffer.slice(-8));
|
4173
|
+
return Number(dataview.getBigUint64(0, false) % BigInt(networkShards));
|
4174
|
+
}
|
4175
|
+
function contentTopicToPubsubTopic(contentTopic, clusterId = DEFAULT_CLUSTER_ID, networkShards = 8) {
|
4176
|
+
if (!contentTopic) {
|
4177
|
+
throw Error("Content topic must be specified");
|
4178
|
+
}
|
4179
|
+
const shardIndex = contentTopicToShardIndex(contentTopic, networkShards);
|
4180
|
+
return `/waku/2/rs/${clusterId}/${shardIndex}`;
|
4181
|
+
}
|
4182
|
+
/**
|
4183
|
+
* Used when creating encoders/decoders to determine which pubsub topic to use
|
4184
|
+
*/
|
4185
|
+
function determinePubsubTopic(contentTopic,
|
4186
|
+
// TODO: make it accept ShardInfo https://github.com/waku-org/js-waku/issues/2086
|
4187
|
+
pubsubTopicShardInfo) {
|
4188
|
+
if (typeof pubsubTopicShardInfo == "string") {
|
4189
|
+
return pubsubTopicShardInfo;
|
4190
|
+
}
|
4191
|
+
return pubsubTopicShardInfo?.shard !== undefined
|
4192
|
+
? singleShardInfoToPubsubTopic(pubsubTopicShardInfo)
|
4193
|
+
: contentTopicToPubsubTopic(contentTopic, pubsubTopicShardInfo?.clusterId ?? DEFAULT_CLUSTER_ID);
|
4194
|
+
}
|
4195
|
+
|
3753
4196
|
const log = new Logger("message:version-0");
|
3754
4197
|
const OneMillion = BigInt(1_000_000);
|
3755
4198
|
const Version = 0;
|
@@ -3902,4 +4345,4 @@ var version_0 = /*#__PURE__*/Object.freeze({
|
|
3902
4345
|
proto: message
|
3903
4346
|
});
|
3904
4347
|
|
3905
|
-
export { DecodedMessage as D, Encoder as E, FilterSubscribeRequest as F, MessagePush as M, PushRpc$1 as P, StoreQueryRequest$1 as S, Version as V, WakuMetadataRequest as W, encode as a, FilterSubscribeResponse$1 as b, PushResponse as c, decode as d, encodingLength as e, StoreQueryResponse$1 as f, createEncoder as g,
|
4348
|
+
export { DecodedMessage as D, Encoder as E, FilterSubscribeRequest as F, MessagePush as M, PushRpc$1 as P, StoreQueryRequest$1 as S, Version as V, WakuMetadataRequest as W, encode as a, FilterSubscribeResponse$1 as b, PushResponse as c, decode as d, encodingLength as e, StoreQueryResponse$1 as f, createEncoder as g, pubsubTopicsToShardInfo as h, WakuMetadataResponse as i, createDecoder as j, Decoder as k, message as m, pubsubTopicToSingleShardInfo as p, shardInfoToPubsubTopics as s, version_0 as v };
|