@waku/core 0.0.32 → 0.0.33-09028e7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundle/{base_protocol-C47QkJ2o.js → base_protocol-CS0EDeEY.js} +16 -91
- package/bundle/{index-tdQNdKHx.js → index-BVysxsMu.js} +6 -444
- package/bundle/index.js +66 -9
- package/bundle/lib/base_protocol.js +2 -2
- package/bundle/lib/message/version_0.js +2 -2
- package/bundle/{version_0-BrbNEwD-.js → version_0-C5ObpJ_0.js} +445 -2
- package/dist/.tsbuildinfo +1 -1
- package/dist/lib/base_protocol.d.ts +5 -7
- package/dist/lib/base_protocol.js +15 -15
- package/dist/lib/base_protocol.js.map +1 -1
- package/dist/lib/filter/index.d.ts +2 -1
- package/dist/lib/filter/index.js +6 -3
- package/dist/lib/filter/index.js.map +1 -1
- package/dist/lib/metadata/index.js +1 -1
- package/dist/lib/metadata/index.js.map +1 -1
- package/package.json +1 -126
- package/src/lib/base_protocol.ts +21 -29
- package/src/lib/filter/index.ts +13 -2
- package/src/lib/metadata/index.ts +1 -1
@@ -1,58 +1,4 @@
|
|
1
|
-
import {
|
2
|
-
|
3
|
-
const decodeRelayShard = (bytes) => {
|
4
|
-
// explicitly converting to Uint8Array to avoid Buffer
|
5
|
-
// https://github.com/libp2p/js-libp2p/issues/2146
|
6
|
-
bytes = new Uint8Array(bytes);
|
7
|
-
if (bytes.length < 3)
|
8
|
-
throw new Error("Insufficient data");
|
9
|
-
const view = new DataView(bytes.buffer);
|
10
|
-
const clusterId = view.getUint16(0);
|
11
|
-
const shards = [];
|
12
|
-
if (bytes.length === 130) {
|
13
|
-
// rsv format (Bit Vector)
|
14
|
-
for (let i = 0; i < 1024; i++) {
|
15
|
-
const byteIndex = Math.floor(i / 8) + 2; // Adjusted for the 2-byte cluster field
|
16
|
-
const bitIndex = 7 - (i % 8);
|
17
|
-
if (view.getUint8(byteIndex) & (1 << bitIndex)) {
|
18
|
-
shards.push(i);
|
19
|
-
}
|
20
|
-
}
|
21
|
-
}
|
22
|
-
else {
|
23
|
-
// rs format (Index List)
|
24
|
-
const numIndices = view.getUint8(2);
|
25
|
-
for (let i = 0, offset = 3; i < numIndices; i++, offset += 2) {
|
26
|
-
if (offset + 1 >= bytes.length)
|
27
|
-
throw new Error("Unexpected end of data");
|
28
|
-
shards.push(view.getUint16(offset));
|
29
|
-
}
|
30
|
-
}
|
31
|
-
return { clusterId, shards };
|
32
|
-
};
|
33
|
-
const encodeRelayShard = (shardInfo) => {
|
34
|
-
const { clusterId, shards } = shardInfo;
|
35
|
-
const totalLength = shards.length >= 64 ? 130 : 3 + 2 * shards.length;
|
36
|
-
const buffer = new ArrayBuffer(totalLength);
|
37
|
-
const view = new DataView(buffer);
|
38
|
-
view.setUint16(0, clusterId);
|
39
|
-
if (shards.length >= 64) {
|
40
|
-
// rsv format (Bit Vector)
|
41
|
-
for (const index of shards) {
|
42
|
-
const byteIndex = Math.floor(index / 8) + 2; // Adjusted for the 2-byte cluster field
|
43
|
-
const bitIndex = 7 - (index % 8);
|
44
|
-
view.setUint8(byteIndex, view.getUint8(byteIndex) | (1 << bitIndex));
|
45
|
-
}
|
46
|
-
}
|
47
|
-
else {
|
48
|
-
// rs format (Index List)
|
49
|
-
view.setUint8(2, shards.length);
|
50
|
-
for (let i = 0, offset = 3; i < shards.length; i++, offset += 2) {
|
51
|
-
view.setUint16(offset, shards[i]);
|
52
|
-
}
|
53
|
-
}
|
54
|
-
return new Uint8Array(buffer);
|
55
|
-
};
|
1
|
+
import { g as bytesToUtf8, T as Tags, L as Logger } from './index-BVysxsMu.js';
|
56
2
|
|
57
3
|
/**
|
58
4
|
* Function to sort peers by latency from lowest to highest
|
@@ -96,28 +42,6 @@ async function getPeersForProtocol(peerStore, protocols) {
|
|
96
42
|
});
|
97
43
|
return peers;
|
98
44
|
}
|
99
|
-
async function getConnectedPeersForProtocolAndShard(connections, peerStore, protocols, shardInfo) {
|
100
|
-
const openConnections = connections.filter((connection) => connection.status === "open");
|
101
|
-
const peerPromises = openConnections.map(async (connection) => {
|
102
|
-
const peer = await peerStore.get(connection.remotePeer);
|
103
|
-
const supportsProtocol = protocols.some((protocol) => peer.protocols.includes(protocol));
|
104
|
-
if (supportsProtocol) {
|
105
|
-
if (shardInfo) {
|
106
|
-
const encodedPeerShardInfo = peer.metadata.get("shardInfo");
|
107
|
-
const peerShardInfo = encodedPeerShardInfo && decodeRelayShard(encodedPeerShardInfo);
|
108
|
-
if (peerShardInfo && shardInfo.clusterId === peerShardInfo.clusterId) {
|
109
|
-
return peer;
|
110
|
-
}
|
111
|
-
}
|
112
|
-
else {
|
113
|
-
return peer;
|
114
|
-
}
|
115
|
-
}
|
116
|
-
return null;
|
117
|
-
});
|
118
|
-
const peersWithNulls = await Promise.all(peerPromises);
|
119
|
-
return peersWithNulls.filter((peer) => peer !== null);
|
120
|
-
}
|
121
45
|
|
122
46
|
/**
|
123
47
|
* Retrieves a list of peers based on the specified criteria:
|
@@ -287,21 +211,23 @@ class BaseProtocol {
|
|
287
211
|
async getStream(peer) {
|
288
212
|
return this.streamManager.getStream(peer);
|
289
213
|
}
|
290
|
-
|
291
|
-
return this.components.peerStore;
|
292
|
-
}
|
214
|
+
//TODO: move to SDK
|
293
215
|
/**
|
294
216
|
* Returns known peers from the address book (`libp2p.peerStore`) that support
|
295
217
|
* the class protocol. Waku may or may not be currently connected to these
|
296
218
|
* peers.
|
297
219
|
*/
|
298
220
|
async allPeers() {
|
299
|
-
return getPeersForProtocol(this.peerStore, [this.multicodec]);
|
221
|
+
return getPeersForProtocol(this.components.peerStore, [this.multicodec]);
|
300
222
|
}
|
301
|
-
async connectedPeers() {
|
223
|
+
async connectedPeers(withOpenStreams = false) {
|
302
224
|
const peers = await this.allPeers();
|
303
225
|
return peers.filter((peer) => {
|
304
|
-
|
226
|
+
const connections = this.components.connectionManager.getConnections(peer.id);
|
227
|
+
if (withOpenStreams) {
|
228
|
+
return connections.some((c) => c.streams.some((s) => s.protocol === this.multicodec));
|
229
|
+
}
|
230
|
+
return connections.length > 0;
|
305
231
|
});
|
306
232
|
}
|
307
233
|
/**
|
@@ -309,19 +235,18 @@ class BaseProtocol {
|
|
309
235
|
*
|
310
236
|
* @param numPeers - The total number of peers to retrieve. If 0, all peers are returned.
|
311
237
|
* @param maxBootstrapPeers - The maximum number of bootstrap peers to retrieve.
|
312
|
-
|
313
|
-
|
314
|
-
*/
|
238
|
+
* @returns A list of peers that support the protocol sorted by latency. By default, returns all peers available, including bootstrap.
|
239
|
+
*/
|
315
240
|
async getPeers({ numPeers, maxBootstrapPeers } = {
|
316
|
-
maxBootstrapPeers:
|
241
|
+
maxBootstrapPeers: 0,
|
317
242
|
numPeers: 0
|
318
243
|
}) {
|
319
244
|
// Retrieve all connected peers that support the protocol & shard (if configured)
|
320
|
-
const
|
245
|
+
const allAvailableConnectedPeers = await this.connectedPeers();
|
321
246
|
// Filter the peers based on discovery & number of peers requested
|
322
|
-
const filteredPeers = filterPeersByDiscovery(
|
247
|
+
const filteredPeers = filterPeersByDiscovery(allAvailableConnectedPeers, numPeers, maxBootstrapPeers);
|
323
248
|
// Sort the peers by latency
|
324
|
-
const sortedFilteredPeers = await sortPeersByLatency(this.peerStore, filteredPeers);
|
249
|
+
const sortedFilteredPeers = await sortPeersByLatency(this.components.peerStore, filteredPeers);
|
325
250
|
if (sortedFilteredPeers.length === 0) {
|
326
251
|
this.log.warn("No peers found. Ensure you have a connection to the network.");
|
327
252
|
}
|
@@ -332,4 +257,4 @@ class BaseProtocol {
|
|
332
257
|
}
|
333
258
|
}
|
334
259
|
|
335
|
-
export { BaseProtocol as B, StreamManager as S
|
260
|
+
export { BaseProtocol as B, StreamManager as S };
|
@@ -699,303 +699,6 @@ function fromString(string, encoding = 'utf8') {
|
|
699
699
|
return base.decoder.decode(`${base.prefix}${string}`); // eslint-disable-line @typescript-eslint/restrict-template-expressions
|
700
700
|
}
|
701
701
|
|
702
|
-
// copied from utils
|
703
|
-
function isBytes(a) {
|
704
|
-
return (a instanceof Uint8Array ||
|
705
|
-
(a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array'));
|
706
|
-
}
|
707
|
-
function bytes(b, ...lengths) {
|
708
|
-
if (!isBytes(b))
|
709
|
-
throw new Error('Uint8Array expected');
|
710
|
-
if (lengths.length > 0 && !lengths.includes(b.length))
|
711
|
-
throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`);
|
712
|
-
}
|
713
|
-
function exists(instance, checkFinished = true) {
|
714
|
-
if (instance.destroyed)
|
715
|
-
throw new Error('Hash instance has been destroyed');
|
716
|
-
if (checkFinished && instance.finished)
|
717
|
-
throw new Error('Hash#digest() has already been called');
|
718
|
-
}
|
719
|
-
function output(out, instance) {
|
720
|
-
bytes(out);
|
721
|
-
const min = instance.outputLen;
|
722
|
-
if (out.length < min) {
|
723
|
-
throw new Error(`digestInto() expects output buffer of length at least ${min}`);
|
724
|
-
}
|
725
|
-
}
|
726
|
-
|
727
|
-
/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */
|
728
|
-
// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+.
|
729
|
-
// node.js versions earlier than v19 don't declare it in global scope.
|
730
|
-
// For node.js, package.json#exports field mapping rewrites import
|
731
|
-
// from `crypto` to `cryptoNode`, which imports native module.
|
732
|
-
// Makes the utils un-importable in browsers without a bundler.
|
733
|
-
// Once node.js 18 is deprecated (2025-04-30), we can just drop the import.
|
734
|
-
// Cast array to view
|
735
|
-
const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
|
736
|
-
// The rotate right (circular right shift) operation for uint32
|
737
|
-
const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift);
|
738
|
-
new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44;
|
739
|
-
/**
|
740
|
-
* @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99])
|
741
|
-
*/
|
742
|
-
function utf8ToBytes$1(str) {
|
743
|
-
if (typeof str !== 'string')
|
744
|
-
throw new Error(`utf8ToBytes expected string, got ${typeof str}`);
|
745
|
-
return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809
|
746
|
-
}
|
747
|
-
/**
|
748
|
-
* Normalizes (non-hex) string or Uint8Array to Uint8Array.
|
749
|
-
* Warning: when Uint8Array is passed, it would NOT get copied.
|
750
|
-
* Keep in mind for future mutable operations.
|
751
|
-
*/
|
752
|
-
function toBytes(data) {
|
753
|
-
if (typeof data === 'string')
|
754
|
-
data = utf8ToBytes$1(data);
|
755
|
-
bytes(data);
|
756
|
-
return data;
|
757
|
-
}
|
758
|
-
// For runtime check if class implements interface
|
759
|
-
class Hash {
|
760
|
-
// Safe version that clones internal state
|
761
|
-
clone() {
|
762
|
-
return this._cloneInto();
|
763
|
-
}
|
764
|
-
}
|
765
|
-
function wrapConstructor(hashCons) {
|
766
|
-
const hashC = (msg) => hashCons().update(toBytes(msg)).digest();
|
767
|
-
const tmp = hashCons();
|
768
|
-
hashC.outputLen = tmp.outputLen;
|
769
|
-
hashC.blockLen = tmp.blockLen;
|
770
|
-
hashC.create = () => hashCons();
|
771
|
-
return hashC;
|
772
|
-
}
|
773
|
-
|
774
|
-
// Polyfill for Safari 14
|
775
|
-
function setBigUint64(view, byteOffset, value, isLE) {
|
776
|
-
if (typeof view.setBigUint64 === 'function')
|
777
|
-
return view.setBigUint64(byteOffset, value, isLE);
|
778
|
-
const _32n = BigInt(32);
|
779
|
-
const _u32_max = BigInt(0xffffffff);
|
780
|
-
const wh = Number((value >> _32n) & _u32_max);
|
781
|
-
const wl = Number(value & _u32_max);
|
782
|
-
const h = isLE ? 4 : 0;
|
783
|
-
const l = isLE ? 0 : 4;
|
784
|
-
view.setUint32(byteOffset + h, wh, isLE);
|
785
|
-
view.setUint32(byteOffset + l, wl, isLE);
|
786
|
-
}
|
787
|
-
// Choice: a ? b : c
|
788
|
-
const Chi = (a, b, c) => (a & b) ^ (~a & c);
|
789
|
-
// Majority function, true if any two inpust is true
|
790
|
-
const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c);
|
791
|
-
/**
|
792
|
-
* Merkle-Damgard hash construction base class.
|
793
|
-
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
|
794
|
-
*/
|
795
|
-
class HashMD extends Hash {
|
796
|
-
constructor(blockLen, outputLen, padOffset, isLE) {
|
797
|
-
super();
|
798
|
-
this.blockLen = blockLen;
|
799
|
-
this.outputLen = outputLen;
|
800
|
-
this.padOffset = padOffset;
|
801
|
-
this.isLE = isLE;
|
802
|
-
this.finished = false;
|
803
|
-
this.length = 0;
|
804
|
-
this.pos = 0;
|
805
|
-
this.destroyed = false;
|
806
|
-
this.buffer = new Uint8Array(blockLen);
|
807
|
-
this.view = createView(this.buffer);
|
808
|
-
}
|
809
|
-
update(data) {
|
810
|
-
exists(this);
|
811
|
-
const { view, buffer, blockLen } = this;
|
812
|
-
data = toBytes(data);
|
813
|
-
const len = data.length;
|
814
|
-
for (let pos = 0; pos < len;) {
|
815
|
-
const take = Math.min(blockLen - this.pos, len - pos);
|
816
|
-
// Fast path: we have at least one block in input, cast it to view and process
|
817
|
-
if (take === blockLen) {
|
818
|
-
const dataView = createView(data);
|
819
|
-
for (; blockLen <= len - pos; pos += blockLen)
|
820
|
-
this.process(dataView, pos);
|
821
|
-
continue;
|
822
|
-
}
|
823
|
-
buffer.set(data.subarray(pos, pos + take), this.pos);
|
824
|
-
this.pos += take;
|
825
|
-
pos += take;
|
826
|
-
if (this.pos === blockLen) {
|
827
|
-
this.process(view, 0);
|
828
|
-
this.pos = 0;
|
829
|
-
}
|
830
|
-
}
|
831
|
-
this.length += data.length;
|
832
|
-
this.roundClean();
|
833
|
-
return this;
|
834
|
-
}
|
835
|
-
digestInto(out) {
|
836
|
-
exists(this);
|
837
|
-
output(out, this);
|
838
|
-
this.finished = true;
|
839
|
-
// Padding
|
840
|
-
// We can avoid allocation of buffer for padding completely if it
|
841
|
-
// was previously not allocated here. But it won't change performance.
|
842
|
-
const { buffer, view, blockLen, isLE } = this;
|
843
|
-
let { pos } = this;
|
844
|
-
// append the bit '1' to the message
|
845
|
-
buffer[pos++] = 0b10000000;
|
846
|
-
this.buffer.subarray(pos).fill(0);
|
847
|
-
// we have less than padOffset left in buffer, so we cannot put length in
|
848
|
-
// current block, need process it and pad again
|
849
|
-
if (this.padOffset > blockLen - pos) {
|
850
|
-
this.process(view, 0);
|
851
|
-
pos = 0;
|
852
|
-
}
|
853
|
-
// Pad until full block byte with zeros
|
854
|
-
for (let i = pos; i < blockLen; i++)
|
855
|
-
buffer[i] = 0;
|
856
|
-
// Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that
|
857
|
-
// You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen.
|
858
|
-
// So we just write lowest 64 bits of that value.
|
859
|
-
setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE);
|
860
|
-
this.process(view, 0);
|
861
|
-
const oview = createView(out);
|
862
|
-
const len = this.outputLen;
|
863
|
-
// NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT
|
864
|
-
if (len % 4)
|
865
|
-
throw new Error('_sha2: outputLen should be aligned to 32bit');
|
866
|
-
const outLen = len / 4;
|
867
|
-
const state = this.get();
|
868
|
-
if (outLen > state.length)
|
869
|
-
throw new Error('_sha2: outputLen bigger than state');
|
870
|
-
for (let i = 0; i < outLen; i++)
|
871
|
-
oview.setUint32(4 * i, state[i], isLE);
|
872
|
-
}
|
873
|
-
digest() {
|
874
|
-
const { buffer, outputLen } = this;
|
875
|
-
this.digestInto(buffer);
|
876
|
-
const res = buffer.slice(0, outputLen);
|
877
|
-
this.destroy();
|
878
|
-
return res;
|
879
|
-
}
|
880
|
-
_cloneInto(to) {
|
881
|
-
to || (to = new this.constructor());
|
882
|
-
to.set(...this.get());
|
883
|
-
const { blockLen, buffer, length, finished, destroyed, pos } = this;
|
884
|
-
to.length = length;
|
885
|
-
to.pos = pos;
|
886
|
-
to.finished = finished;
|
887
|
-
to.destroyed = destroyed;
|
888
|
-
if (length % blockLen)
|
889
|
-
to.buffer.set(buffer);
|
890
|
-
return to;
|
891
|
-
}
|
892
|
-
}
|
893
|
-
|
894
|
-
// SHA2-256 need to try 2^128 hashes to execute birthday attack.
|
895
|
-
// BTC network is doing 2^67 hashes/sec as per early 2023.
|
896
|
-
// Round constants:
|
897
|
-
// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311)
|
898
|
-
// prettier-ignore
|
899
|
-
const SHA256_K = /* @__PURE__ */ new Uint32Array([
|
900
|
-
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
|
901
|
-
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
|
902
|
-
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
|
903
|
-
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
|
904
|
-
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
|
905
|
-
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
|
906
|
-
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
|
907
|
-
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
|
908
|
-
]);
|
909
|
-
// Initial state:
|
910
|
-
// first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19
|
911
|
-
// prettier-ignore
|
912
|
-
const SHA256_IV = /* @__PURE__ */ new Uint32Array([
|
913
|
-
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19
|
914
|
-
]);
|
915
|
-
// Temporary buffer, not used to store anything between runs
|
916
|
-
// Named this way because it matches specification.
|
917
|
-
const SHA256_W = /* @__PURE__ */ new Uint32Array(64);
|
918
|
-
class SHA256 extends HashMD {
|
919
|
-
constructor() {
|
920
|
-
super(64, 32, 8, false);
|
921
|
-
// We cannot use array here since array allows indexing by variable
|
922
|
-
// which means optimizer/compiler cannot use registers.
|
923
|
-
this.A = SHA256_IV[0] | 0;
|
924
|
-
this.B = SHA256_IV[1] | 0;
|
925
|
-
this.C = SHA256_IV[2] | 0;
|
926
|
-
this.D = SHA256_IV[3] | 0;
|
927
|
-
this.E = SHA256_IV[4] | 0;
|
928
|
-
this.F = SHA256_IV[5] | 0;
|
929
|
-
this.G = SHA256_IV[6] | 0;
|
930
|
-
this.H = SHA256_IV[7] | 0;
|
931
|
-
}
|
932
|
-
get() {
|
933
|
-
const { A, B, C, D, E, F, G, H } = this;
|
934
|
-
return [A, B, C, D, E, F, G, H];
|
935
|
-
}
|
936
|
-
// prettier-ignore
|
937
|
-
set(A, B, C, D, E, F, G, H) {
|
938
|
-
this.A = A | 0;
|
939
|
-
this.B = B | 0;
|
940
|
-
this.C = C | 0;
|
941
|
-
this.D = D | 0;
|
942
|
-
this.E = E | 0;
|
943
|
-
this.F = F | 0;
|
944
|
-
this.G = G | 0;
|
945
|
-
this.H = H | 0;
|
946
|
-
}
|
947
|
-
process(view, offset) {
|
948
|
-
// Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array
|
949
|
-
for (let i = 0; i < 16; i++, offset += 4)
|
950
|
-
SHA256_W[i] = view.getUint32(offset, false);
|
951
|
-
for (let i = 16; i < 64; i++) {
|
952
|
-
const W15 = SHA256_W[i - 15];
|
953
|
-
const W2 = SHA256_W[i - 2];
|
954
|
-
const s0 = rotr(W15, 7) ^ rotr(W15, 18) ^ (W15 >>> 3);
|
955
|
-
const s1 = rotr(W2, 17) ^ rotr(W2, 19) ^ (W2 >>> 10);
|
956
|
-
SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0;
|
957
|
-
}
|
958
|
-
// Compression function main loop, 64 rounds
|
959
|
-
let { A, B, C, D, E, F, G, H } = this;
|
960
|
-
for (let i = 0; i < 64; i++) {
|
961
|
-
const sigma1 = rotr(E, 6) ^ rotr(E, 11) ^ rotr(E, 25);
|
962
|
-
const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0;
|
963
|
-
const sigma0 = rotr(A, 2) ^ rotr(A, 13) ^ rotr(A, 22);
|
964
|
-
const T2 = (sigma0 + Maj(A, B, C)) | 0;
|
965
|
-
H = G;
|
966
|
-
G = F;
|
967
|
-
F = E;
|
968
|
-
E = (D + T1) | 0;
|
969
|
-
D = C;
|
970
|
-
C = B;
|
971
|
-
B = A;
|
972
|
-
A = (T1 + T2) | 0;
|
973
|
-
}
|
974
|
-
// Add the compressed chunk to the current hash value
|
975
|
-
A = (A + this.A) | 0;
|
976
|
-
B = (B + this.B) | 0;
|
977
|
-
C = (C + this.C) | 0;
|
978
|
-
D = (D + this.D) | 0;
|
979
|
-
E = (E + this.E) | 0;
|
980
|
-
F = (F + this.F) | 0;
|
981
|
-
G = (G + this.G) | 0;
|
982
|
-
H = (H + this.H) | 0;
|
983
|
-
this.set(A, B, C, D, E, F, G, H);
|
984
|
-
}
|
985
|
-
roundClean() {
|
986
|
-
SHA256_W.fill(0);
|
987
|
-
}
|
988
|
-
destroy() {
|
989
|
-
this.set(0, 0, 0, 0, 0, 0, 0, 0);
|
990
|
-
this.buffer.fill(0);
|
991
|
-
}
|
992
|
-
}
|
993
|
-
/**
|
994
|
-
* SHA2-256 hash function
|
995
|
-
* @param message - data that would be hashed
|
996
|
-
*/
|
997
|
-
const sha256 = /* @__PURE__ */ wrapConstructor(() => new SHA256());
|
998
|
-
|
999
702
|
var Protocols;
|
1000
703
|
(function (Protocols) {
|
1001
704
|
Protocols["Relay"] = "relay";
|
@@ -1110,11 +813,6 @@ var EConnectionStateEvents;
|
|
1110
813
|
EConnectionStateEvents["CONNECTION_STATUS"] = "waku:connection";
|
1111
814
|
})(EConnectionStateEvents || (EConnectionStateEvents = {}));
|
1112
815
|
|
1113
|
-
/**
|
1114
|
-
* The default cluster ID for The Waku Network
|
1115
|
-
*/
|
1116
|
-
const DEFAULT_CLUSTER_ID = 1;
|
1117
|
-
|
1118
816
|
var HealthStatus;
|
1119
817
|
(function (HealthStatus) {
|
1120
818
|
HealthStatus["Unhealthy"] = "Unhealthy";
|
@@ -1160,147 +858,6 @@ function concat(byteArrays, totalLength) {
|
|
1160
858
|
return res;
|
1161
859
|
}
|
1162
860
|
|
1163
|
-
const singleShardInfoToPubsubTopic = (shardInfo) => {
|
1164
|
-
if (shardInfo.shard === undefined)
|
1165
|
-
throw new Error("Invalid shard");
|
1166
|
-
return `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${shardInfo.shard}`;
|
1167
|
-
};
|
1168
|
-
const shardInfoToPubsubTopics = (shardInfo) => {
|
1169
|
-
if ("contentTopics" in shardInfo && shardInfo.contentTopics) {
|
1170
|
-
// Autosharding: explicitly defined content topics
|
1171
|
-
return Array.from(new Set(shardInfo.contentTopics.map((contentTopic) => contentTopicToPubsubTopic(contentTopic, shardInfo.clusterId))));
|
1172
|
-
}
|
1173
|
-
else if ("shards" in shardInfo) {
|
1174
|
-
// Static sharding
|
1175
|
-
if (shardInfo.shards === undefined)
|
1176
|
-
throw new Error("Invalid shard");
|
1177
|
-
return Array.from(new Set(shardInfo.shards.map((index) => `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${index}`)));
|
1178
|
-
}
|
1179
|
-
else if ("application" in shardInfo && "version" in shardInfo) {
|
1180
|
-
// Autosharding: single shard from application and version
|
1181
|
-
return [
|
1182
|
-
contentTopicToPubsubTopic(`/${shardInfo.application}/${shardInfo.version}/default/default`, shardInfo.clusterId)
|
1183
|
-
];
|
1184
|
-
}
|
1185
|
-
else {
|
1186
|
-
throw new Error("Missing required configuration in shard parameters");
|
1187
|
-
}
|
1188
|
-
};
|
1189
|
-
const pubsubTopicToSingleShardInfo = (pubsubTopics) => {
|
1190
|
-
const parts = pubsubTopics.split("/");
|
1191
|
-
if (parts.length != 6 ||
|
1192
|
-
parts[1] !== "waku" ||
|
1193
|
-
parts[2] !== "2" ||
|
1194
|
-
parts[3] !== "rs")
|
1195
|
-
throw new Error("Invalid pubsub topic");
|
1196
|
-
const clusterId = parseInt(parts[4]);
|
1197
|
-
const shard = parseInt(parts[5]);
|
1198
|
-
if (isNaN(clusterId) || isNaN(shard))
|
1199
|
-
throw new Error("Invalid clusterId or shard");
|
1200
|
-
return {
|
1201
|
-
clusterId,
|
1202
|
-
shard
|
1203
|
-
};
|
1204
|
-
};
|
1205
|
-
const pubsubTopicsToShardInfo = (pubsubTopics) => {
|
1206
|
-
const shardInfoSet = new Set();
|
1207
|
-
const clusterIds = new Set();
|
1208
|
-
for (const topic of pubsubTopics) {
|
1209
|
-
const { clusterId, shard } = pubsubTopicToSingleShardInfo(topic);
|
1210
|
-
shardInfoSet.add(`${clusterId}:${shard}`);
|
1211
|
-
clusterIds.add(clusterId);
|
1212
|
-
}
|
1213
|
-
if (shardInfoSet.size === 0) {
|
1214
|
-
throw new Error("No valid pubsub topics provided");
|
1215
|
-
}
|
1216
|
-
if (clusterIds.size > 1) {
|
1217
|
-
throw new Error("Pubsub topics from multiple cluster IDs are not supported");
|
1218
|
-
}
|
1219
|
-
const clusterId = clusterIds.values().next().value;
|
1220
|
-
const shards = Array.from(shardInfoSet).map((info) => parseInt(info.split(":")[1]));
|
1221
|
-
return {
|
1222
|
-
clusterId,
|
1223
|
-
shards
|
1224
|
-
};
|
1225
|
-
};
|
1226
|
-
/**
|
1227
|
-
* Given a string, will throw an error if it is not formatted as a valid content topic for autosharding based on https://rfc.vac.dev/spec/51/
|
1228
|
-
* @param contentTopic String to validate
|
1229
|
-
* @returns Object with each content topic field as an attribute
|
1230
|
-
*/
|
1231
|
-
function ensureValidContentTopic(contentTopic) {
|
1232
|
-
const parts = contentTopic.split("/");
|
1233
|
-
if (parts.length < 5 || parts.length > 6) {
|
1234
|
-
throw Error("Content topic format is invalid");
|
1235
|
-
}
|
1236
|
-
// Validate generation field if present
|
1237
|
-
let generation = 0;
|
1238
|
-
if (parts.length == 6) {
|
1239
|
-
generation = parseInt(parts[1]);
|
1240
|
-
if (isNaN(generation)) {
|
1241
|
-
throw new Error("Invalid generation field in content topic");
|
1242
|
-
}
|
1243
|
-
if (generation > 0) {
|
1244
|
-
throw new Error("Generation greater than 0 is not supported");
|
1245
|
-
}
|
1246
|
-
}
|
1247
|
-
// Validate remaining fields
|
1248
|
-
const fields = parts.splice(-4);
|
1249
|
-
// Validate application field
|
1250
|
-
if (fields[0].length == 0) {
|
1251
|
-
throw new Error("Application field cannot be empty");
|
1252
|
-
}
|
1253
|
-
// Validate version field
|
1254
|
-
if (fields[1].length == 0) {
|
1255
|
-
throw new Error("Version field cannot be empty");
|
1256
|
-
}
|
1257
|
-
// Validate topic name field
|
1258
|
-
if (fields[2].length == 0) {
|
1259
|
-
throw new Error("Topic name field cannot be empty");
|
1260
|
-
}
|
1261
|
-
// Validate encoding field
|
1262
|
-
if (fields[3].length == 0) {
|
1263
|
-
throw new Error("Encoding field cannot be empty");
|
1264
|
-
}
|
1265
|
-
return {
|
1266
|
-
generation,
|
1267
|
-
application: fields[0],
|
1268
|
-
version: fields[1],
|
1269
|
-
topicName: fields[2],
|
1270
|
-
encoding: fields[3]
|
1271
|
-
};
|
1272
|
-
}
|
1273
|
-
/**
|
1274
|
-
* Given a string, determines which autoshard index to use for its pubsub topic.
|
1275
|
-
* Based on the algorithm described in the RFC: https://rfc.vac.dev/spec/51//#algorithm
|
1276
|
-
*/
|
1277
|
-
function contentTopicToShardIndex(contentTopic, networkShards = 8) {
|
1278
|
-
const { application, version } = ensureValidContentTopic(contentTopic);
|
1279
|
-
const digest = sha256(concat([utf8ToBytes(application), utf8ToBytes(version)]));
|
1280
|
-
const dataview = new DataView(digest.buffer.slice(-8));
|
1281
|
-
return Number(dataview.getBigUint64(0, false) % BigInt(networkShards));
|
1282
|
-
}
|
1283
|
-
function contentTopicToPubsubTopic(contentTopic, clusterId = DEFAULT_CLUSTER_ID, networkShards = 8) {
|
1284
|
-
if (!contentTopic) {
|
1285
|
-
throw Error("Content topic must be specified");
|
1286
|
-
}
|
1287
|
-
const shardIndex = contentTopicToShardIndex(contentTopic, networkShards);
|
1288
|
-
return `/waku/2/rs/${clusterId}/${shardIndex}`;
|
1289
|
-
}
|
1290
|
-
/**
|
1291
|
-
* Used when creating encoders/decoders to determine which pubsub topic to use
|
1292
|
-
*/
|
1293
|
-
function determinePubsubTopic(contentTopic,
|
1294
|
-
// TODO: make it accept ShardInfo https://github.com/waku-org/js-waku/issues/2086
|
1295
|
-
pubsubTopicShardInfo) {
|
1296
|
-
if (typeof pubsubTopicShardInfo == "string") {
|
1297
|
-
return pubsubTopicShardInfo;
|
1298
|
-
}
|
1299
|
-
return pubsubTopicShardInfo?.shard !== undefined
|
1300
|
-
? singleShardInfoToPubsubTopic(pubsubTopicShardInfo)
|
1301
|
-
: contentTopicToPubsubTopic(contentTopic, pubsubTopicShardInfo?.clusterId ?? DEFAULT_CLUSTER_ID);
|
1302
|
-
}
|
1303
|
-
|
1304
861
|
function getDefaultExportFromCjs (x) {
|
1305
862
|
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
1306
863
|
}
|
@@ -2031,6 +1588,7 @@ var debug = /*@__PURE__*/getDefaultExportFromCjs(browserExports);
|
|
2031
1588
|
|
2032
1589
|
const APP_NAME = "waku";
|
2033
1590
|
class Logger {
|
1591
|
+
_debug;
|
2034
1592
|
_info;
|
2035
1593
|
_warn;
|
2036
1594
|
_error;
|
@@ -2038,10 +1596,14 @@ class Logger {
|
|
2038
1596
|
return prefix ? `${APP_NAME}:${level}:${prefix}` : `${APP_NAME}:${level}`;
|
2039
1597
|
}
|
2040
1598
|
constructor(prefix) {
|
1599
|
+
this._debug = debug(Logger.createDebugNamespace("debug", prefix));
|
2041
1600
|
this._info = debug(Logger.createDebugNamespace("info", prefix));
|
2042
1601
|
this._warn = debug(Logger.createDebugNamespace("warn", prefix));
|
2043
1602
|
this._error = debug(Logger.createDebugNamespace("error", prefix));
|
2044
1603
|
}
|
1604
|
+
get debug() {
|
1605
|
+
return this._debug;
|
1606
|
+
}
|
2045
1607
|
get info() {
|
2046
1608
|
return this._info;
|
2047
1609
|
}
|
@@ -2057,4 +1619,4 @@ class Logger {
|
|
2057
1619
|
}
|
2058
1620
|
}
|
2059
1621
|
|
2060
|
-
export { EPeersByDiscoveryEvents as E, HealthStatus as H, Logger as L, ProtocolError as P, Tags as T, allocUnsafe as a, alloc as b, Protocols as c, EConnectionStateEvents as d,
|
1622
|
+
export { EPeersByDiscoveryEvents as E, HealthStatus as H, Logger as L, ProtocolError as P, Tags as T, allocUnsafe as a, alloc as b, Protocols as c, EConnectionStateEvents as d, concat as e, fromString as f, bytesToUtf8 as g, utf8ToBytes as u };
|