@waku/core 0.0.31-ce62600.0 → 0.0.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +29 -0
- package/bundle/{base_protocol-C6HnrRx8.js → base_protocol-C47QkJ2o.js} +22 -25
- package/bundle/{index-DnW8ifxc.js → index-tdQNdKHx.js} +62 -57
- package/bundle/index.js +381 -315
- package/bundle/lib/base_protocol.js +2 -2
- package/bundle/lib/message/version_0.js +2 -2
- package/bundle/{version_0-DQ9xsSLk.js → version_0-BrbNEwD-.js} +154 -308
- package/dist/.tsbuildinfo +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/lib/base_protocol.d.ts +3 -4
- package/dist/lib/base_protocol.js +4 -8
- package/dist/lib/base_protocol.js.map +1 -1
- package/dist/lib/connection_manager.d.ts +9 -6
- package/dist/lib/connection_manager.js +70 -45
- package/dist/lib/connection_manager.js.map +1 -1
- package/dist/lib/filter/filter_rpc.js.map +1 -1
- package/dist/lib/filter/index.d.ts +4 -3
- package/dist/lib/filter/index.js +37 -35
- package/dist/lib/filter/index.js.map +1 -1
- package/dist/lib/health_manager.d.ts +14 -0
- package/dist/lib/health_manager.js +70 -0
- package/dist/lib/health_manager.js.map +1 -0
- package/dist/lib/keep_alive_manager.d.ts +13 -7
- package/dist/lib/keep_alive_manager.js +9 -9
- package/dist/lib/keep_alive_manager.js.map +1 -1
- package/dist/lib/light_push/index.d.ts +3 -2
- package/dist/lib/light_push/index.js +17 -3
- package/dist/lib/light_push/index.js.map +1 -1
- package/dist/lib/light_push/push_rpc.js.map +1 -1
- package/dist/lib/light_push/utils.d.ts +3 -0
- package/dist/lib/light_push/utils.js +26 -0
- package/dist/lib/light_push/utils.js.map +1 -0
- package/dist/lib/message/version_0.js.map +1 -1
- package/dist/lib/metadata/index.d.ts +2 -2
- package/dist/lib/metadata/index.js +26 -26
- package/dist/lib/metadata/index.js.map +1 -1
- package/dist/lib/store/index.d.ts +5 -44
- package/dist/lib/store/index.js +39 -45
- package/dist/lib/store/index.js.map +1 -1
- package/dist/lib/store/rpc.d.ts +22 -0
- package/dist/lib/store/rpc.js +74 -0
- package/dist/lib/store/rpc.js.map +1 -0
- package/dist/lib/stream_manager/index.d.ts +1 -0
- package/dist/lib/stream_manager/index.js +2 -0
- package/dist/lib/stream_manager/index.js.map +1 -0
- package/dist/lib/{stream_manager.js → stream_manager/stream_manager.js} +1 -1
- package/dist/lib/stream_manager/stream_manager.js.map +1 -0
- package/dist/lib/stream_manager/utils.d.ts +2 -0
- package/dist/lib/stream_manager/utils.js +19 -0
- package/dist/lib/stream_manager/utils.js.map +1 -0
- package/package.json +130 -1
- package/src/index.ts +3 -3
- package/src/lib/base_protocol.ts +7 -10
- package/src/lib/connection_manager.ts +100 -57
- package/src/lib/filter/filter_rpc.ts +21 -19
- package/src/lib/filter/index.ts +54 -56
- package/src/lib/health_manager.ts +90 -0
- package/src/lib/keep_alive_manager.ts +27 -18
- package/src/lib/light_push/index.ts +21 -11
- package/src/lib/light_push/push_rpc.ts +5 -5
- package/src/lib/light_push/utils.ts +31 -0
- package/src/lib/message/version_0.ts +17 -15
- package/src/lib/metadata/index.ts +43 -44
- package/src/lib/store/index.ts +54 -94
- package/src/lib/store/rpc.ts +92 -0
- package/src/lib/stream_manager/index.ts +1 -0
- package/src/lib/{stream_manager.ts → stream_manager/stream_manager.ts} +3 -2
- package/src/lib/stream_manager/utils.ts +22 -0
- package/dist/lib/store/history_rpc.d.ts +0 -27
- package/dist/lib/store/history_rpc.js +0 -72
- package/dist/lib/store/history_rpc.js.map +0 -1
- package/dist/lib/stream_manager.js.map +0 -1
- package/src/lib/store/history_rpc.ts +0 -93
- /package/dist/lib/{stream_manager.d.ts → stream_manager/stream_manager.d.ts} +0 -0
package/bundle/index.js
CHANGED
@@ -1,8 +1,8 @@
|
|
1
|
-
import { v as version_0, e as encodingLength, a as encode$1, d as decode$1, M as MessagePush, F as FilterSubscribeRequest, b as FilterSubscribeResponse$1, P as PushRpc$1, c as PushResponse,
|
2
|
-
export {
|
3
|
-
import { a as allocUnsafe, b as alloc,
|
4
|
-
import { B as BaseProtocol, d as decodeRelayShard, e as encodeRelayShard } from './base_protocol-
|
5
|
-
export { S as StreamManager } from './base_protocol-
|
1
|
+
import { v as version_0, e as encodingLength, a as encode$1, d as decode$1, M as MessagePush, F as FilterSubscribeRequest, b as FilterSubscribeResponse$1, P as PushRpc$1, c as PushResponse, S as StoreQueryRequest$1, f as StoreQueryResponse$1, g as createEncoder, W as WakuMetadataRequest, h as WakuMetadataResponse } from './version_0-BrbNEwD-.js';
|
2
|
+
export { i as createDecoder } from './version_0-BrbNEwD-.js';
|
3
|
+
import { a as allocUnsafe, b as alloc, L as Logger, P as ProtocolError, c as Protocols, u as utf8ToBytes, p as pubsubTopicToSingleShardInfo, T as Tags, E as EPeersByDiscoveryEvents, s as shardInfoToPubsubTopics, d as EConnectionStateEvents, H as HealthStatus, e as pubsubTopicsToShardInfo } from './index-tdQNdKHx.js';
|
4
|
+
import { B as BaseProtocol, d as decodeRelayShard, e as encodeRelayShard } from './base_protocol-C47QkJ2o.js';
|
5
|
+
export { S as StreamManager } from './base_protocol-C47QkJ2o.js';
|
6
6
|
|
7
7
|
const MB = 1024 ** 2;
|
8
8
|
const SIZE_CAP_IN_MB = 1;
|
@@ -743,74 +743,35 @@ encode.single = (chunk, options) => {
|
|
743
743
|
};
|
744
744
|
|
745
745
|
/**
|
746
|
-
*
|
747
|
-
* @typedef {Error} Err
|
748
|
-
* @property {string} message
|
746
|
+
* The reported length of the next data message was not a positive integer
|
749
747
|
*/
|
750
|
-
|
748
|
+
class InvalidMessageLengthError extends Error {
|
749
|
+
name = 'InvalidMessageLengthError';
|
750
|
+
code = 'ERR_INVALID_MSG_LENGTH';
|
751
|
+
}
|
751
752
|
/**
|
752
|
-
*
|
753
|
-
*
|
754
|
-
* @param {Extensions} props
|
755
|
-
* @returns {Error & Extensions}
|
753
|
+
* The reported length of the next data message was larger than the configured
|
754
|
+
* max allowable value
|
756
755
|
*/
|
757
|
-
|
758
|
-
|
759
|
-
|
760
|
-
value: props[key],
|
761
|
-
enumerable: true,
|
762
|
-
configurable: true,
|
763
|
-
});
|
764
|
-
}
|
765
|
-
|
766
|
-
return obj;
|
756
|
+
class InvalidDataLengthError extends Error {
|
757
|
+
name = 'InvalidDataLengthError';
|
758
|
+
code = 'ERR_MSG_DATA_TOO_LONG';
|
767
759
|
}
|
768
|
-
|
769
760
|
/**
|
770
|
-
*
|
771
|
-
*
|
772
|
-
* @param {string|Extensions} code - A string code or props to set on the error
|
773
|
-
* @param {Extensions} [props] - Props to set on the error
|
774
|
-
* @returns {Error & Extensions}
|
761
|
+
* The varint used to specify the length of the next data message contained more
|
762
|
+
* bytes than the configured max allowable value
|
775
763
|
*/
|
776
|
-
|
777
|
-
|
778
|
-
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
|
784
|
-
|
785
|
-
|
786
|
-
props = code;
|
787
|
-
code = '';
|
788
|
-
}
|
789
|
-
|
790
|
-
if (code) {
|
791
|
-
props.code = code;
|
792
|
-
}
|
793
|
-
|
794
|
-
try {
|
795
|
-
return assign(err, props);
|
796
|
-
} catch (_) {
|
797
|
-
props.message = err.message;
|
798
|
-
props.stack = err.stack;
|
799
|
-
|
800
|
-
const ErrClass = function () {};
|
801
|
-
|
802
|
-
ErrClass.prototype = Object.create(Object.getPrototypeOf(err));
|
803
|
-
|
804
|
-
// @ts-ignore
|
805
|
-
const output = assign(new ErrClass(), props);
|
806
|
-
|
807
|
-
return output;
|
808
|
-
}
|
764
|
+
class InvalidDataLengthLengthError extends Error {
|
765
|
+
name = 'InvalidDataLengthLengthError';
|
766
|
+
code = 'ERR_MSG_LENGTH_TOO_LONG';
|
767
|
+
}
|
768
|
+
/**
|
769
|
+
* The incoming stream ended before the expected number of bytes were read
|
770
|
+
*/
|
771
|
+
class UnexpectedEOFError extends Error {
|
772
|
+
name = 'UnexpectedEOFError';
|
773
|
+
code = 'ERR_UNEXPECTED_EOF';
|
809
774
|
}
|
810
|
-
|
811
|
-
var errCode = createError;
|
812
|
-
|
813
|
-
var errCode$1 = /*@__PURE__*/getDefaultExportFromCjs(errCode);
|
814
775
|
|
815
776
|
/* eslint max-depth: ["error", 6] */
|
816
777
|
// Maximum length of the length section of the message
|
@@ -842,10 +803,10 @@ function decode(source, options) {
|
|
842
803
|
try {
|
843
804
|
dataLength = lengthDecoder(buffer);
|
844
805
|
if (dataLength < 0) {
|
845
|
-
throw
|
806
|
+
throw new InvalidMessageLengthError('Invalid message length');
|
846
807
|
}
|
847
808
|
if (dataLength > maxDataLength) {
|
848
|
-
throw
|
809
|
+
throw new InvalidDataLengthError('Message length too long');
|
849
810
|
}
|
850
811
|
const dataLengthLength = lengthDecoder.bytes;
|
851
812
|
buffer.consume(dataLengthLength);
|
@@ -857,7 +818,7 @@ function decode(source, options) {
|
|
857
818
|
catch (err) {
|
858
819
|
if (err instanceof RangeError) {
|
859
820
|
if (buffer.byteLength > maxLengthLength) {
|
860
|
-
throw
|
821
|
+
throw new InvalidDataLengthLengthError('Message length length too long');
|
861
822
|
}
|
862
823
|
break;
|
863
824
|
}
|
@@ -886,7 +847,7 @@ function decode(source, options) {
|
|
886
847
|
yield* maybeYield();
|
887
848
|
}
|
888
849
|
if (buffer.byteLength > 0) {
|
889
|
-
throw
|
850
|
+
throw new UnexpectedEOFError('Unexpected end of input');
|
890
851
|
}
|
891
852
|
})();
|
892
853
|
}
|
@@ -896,7 +857,7 @@ function decode(source, options) {
|
|
896
857
|
yield* maybeYield();
|
897
858
|
}
|
898
859
|
if (buffer.byteLength > 0) {
|
899
|
-
throw
|
860
|
+
throw new UnexpectedEOFError('Unexpected end of input');
|
900
861
|
}
|
901
862
|
})();
|
902
863
|
}
|
@@ -1484,16 +1445,6 @@ function v4(options, buf, offset) {
|
|
1484
1445
|
rnds[6] = rnds[6] & 0x0f | 0x40;
|
1485
1446
|
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
|
1486
1447
|
|
1487
|
-
if (buf) {
|
1488
|
-
offset = offset || 0;
|
1489
|
-
|
1490
|
-
for (let i = 0; i < 16; ++i) {
|
1491
|
-
buf[offset + i] = rnds[i];
|
1492
|
-
}
|
1493
|
-
|
1494
|
-
return buf;
|
1495
|
-
}
|
1496
|
-
|
1497
1448
|
return unsafeStringify(rnds);
|
1498
1449
|
}
|
1499
1450
|
|
@@ -1611,9 +1562,11 @@ const FilterCodecs = {
|
|
1611
1562
|
};
|
1612
1563
|
class FilterCore extends BaseProtocol {
|
1613
1564
|
handleIncomingMessage;
|
1614
|
-
|
1615
|
-
|
1565
|
+
pubsubTopics;
|
1566
|
+
constructor(handleIncomingMessage, pubsubTopics, libp2p) {
|
1567
|
+
super(FilterCodecs.SUBSCRIBE, libp2p.components, log$6, pubsubTopics);
|
1616
1568
|
this.handleIncomingMessage = handleIncomingMessage;
|
1569
|
+
this.pubsubTopics = pubsubTopics;
|
1617
1570
|
libp2p
|
1618
1571
|
.handle(FilterCodecs.PUSH, this.onRequest.bind(this), {
|
1619
1572
|
maxInboundStreams: 100
|
@@ -1622,35 +1575,6 @@ class FilterCore extends BaseProtocol {
|
|
1622
1575
|
log$6.error("Failed to register ", FilterCodecs.PUSH, e);
|
1623
1576
|
});
|
1624
1577
|
}
|
1625
|
-
onRequest(streamData) {
|
1626
|
-
const { connection, stream } = streamData;
|
1627
|
-
const { remotePeer } = connection;
|
1628
|
-
log$6.info(`Received message from ${remotePeer.toString()}`);
|
1629
|
-
try {
|
1630
|
-
pipe(stream, decode, async (source) => {
|
1631
|
-
for await (const bytes of source) {
|
1632
|
-
const response = FilterPushRpc.decode(bytes.slice());
|
1633
|
-
const { pubsubTopic, wakuMessage } = response;
|
1634
|
-
if (!wakuMessage) {
|
1635
|
-
log$6.error("Received empty message");
|
1636
|
-
return;
|
1637
|
-
}
|
1638
|
-
if (!pubsubTopic) {
|
1639
|
-
log$6.error("Pubsub topic missing from push message");
|
1640
|
-
return;
|
1641
|
-
}
|
1642
|
-
await this.handleIncomingMessage(pubsubTopic, wakuMessage);
|
1643
|
-
}
|
1644
|
-
}).then(() => {
|
1645
|
-
log$6.info("Receiving pipe closed.");
|
1646
|
-
}, (e) => {
|
1647
|
-
log$6.error("Error with receiving pipe", e);
|
1648
|
-
});
|
1649
|
-
}
|
1650
|
-
catch (e) {
|
1651
|
-
log$6.error("Error decoding message", e);
|
1652
|
-
}
|
1653
|
-
}
|
1654
1578
|
async subscribe(pubsubTopic, peer, contentTopics) {
|
1655
1579
|
const stream = await this.getStream(peer);
|
1656
1580
|
const request = FilterSubscribeRpc.createSubscribeRequest(pubsubTopic, contentTopics);
|
@@ -1694,7 +1618,7 @@ class FilterCore extends BaseProtocol {
|
|
1694
1618
|
return {
|
1695
1619
|
success: null,
|
1696
1620
|
failure: {
|
1697
|
-
error: ProtocolError.
|
1621
|
+
error: ProtocolError.NO_STREAM_AVAILABLE,
|
1698
1622
|
peerId: peer.id
|
1699
1623
|
}
|
1700
1624
|
};
|
@@ -1725,7 +1649,7 @@ class FilterCore extends BaseProtocol {
|
|
1725
1649
|
if (!res || !res.length) {
|
1726
1650
|
return {
|
1727
1651
|
failure: {
|
1728
|
-
error: ProtocolError.
|
1652
|
+
error: ProtocolError.NO_RESPONSE,
|
1729
1653
|
peerId: peer.id
|
1730
1654
|
},
|
1731
1655
|
success: null
|
@@ -1757,7 +1681,7 @@ class FilterCore extends BaseProtocol {
|
|
1757
1681
|
return {
|
1758
1682
|
success: null,
|
1759
1683
|
failure: {
|
1760
|
-
error: ProtocolError.
|
1684
|
+
error: ProtocolError.NO_STREAM_AVAILABLE,
|
1761
1685
|
peerId: peer.id
|
1762
1686
|
}
|
1763
1687
|
};
|
@@ -1781,7 +1705,7 @@ class FilterCore extends BaseProtocol {
|
|
1781
1705
|
return {
|
1782
1706
|
success: null,
|
1783
1707
|
failure: {
|
1784
|
-
error: ProtocolError.
|
1708
|
+
error: ProtocolError.NO_RESPONSE,
|
1785
1709
|
peerId: peer.id
|
1786
1710
|
}
|
1787
1711
|
};
|
@@ -1802,6 +1726,35 @@ class FilterCore extends BaseProtocol {
|
|
1802
1726
|
failure: null
|
1803
1727
|
};
|
1804
1728
|
}
|
1729
|
+
onRequest(streamData) {
|
1730
|
+
const { connection, stream } = streamData;
|
1731
|
+
const { remotePeer } = connection;
|
1732
|
+
log$6.info(`Received message from ${remotePeer.toString()}`);
|
1733
|
+
try {
|
1734
|
+
pipe(stream, decode, async (source) => {
|
1735
|
+
for await (const bytes of source) {
|
1736
|
+
const response = FilterPushRpc.decode(bytes.slice());
|
1737
|
+
const { pubsubTopic, wakuMessage } = response;
|
1738
|
+
if (!wakuMessage) {
|
1739
|
+
log$6.error("Received empty message");
|
1740
|
+
return;
|
1741
|
+
}
|
1742
|
+
if (!pubsubTopic) {
|
1743
|
+
log$6.error("Pubsub topic missing from push message");
|
1744
|
+
return;
|
1745
|
+
}
|
1746
|
+
await this.handleIncomingMessage(pubsubTopic, wakuMessage, connection.remotePeer.toString());
|
1747
|
+
}
|
1748
|
+
}).then(() => {
|
1749
|
+
log$6.info("Receiving pipe closed.");
|
1750
|
+
}, (e) => {
|
1751
|
+
log$6.error("Error with receiving pipe", e);
|
1752
|
+
});
|
1753
|
+
}
|
1754
|
+
catch (e) {
|
1755
|
+
log$6.error("Error decoding message", e);
|
1756
|
+
}
|
1757
|
+
}
|
1805
1758
|
}
|
1806
1759
|
|
1807
1760
|
var index$2 = /*#__PURE__*/Object.freeze({
|
@@ -1840,14 +1793,41 @@ class PushRpc {
|
|
1840
1793
|
}
|
1841
1794
|
}
|
1842
1795
|
|
1796
|
+
// should match nwaku
|
1797
|
+
// https://github.com/waku-org/nwaku/blob/c3cb06ac6c03f0f382d3941ea53b330f6a8dd127/waku/waku_rln_relay/rln_relay.nim#L309
|
1798
|
+
// https://github.com/waku-org/nwaku/blob/c3cb06ac6c03f0f382d3941ea53b330f6a8dd127/tests/waku_rln_relay/rln/waku_rln_relay_utils.nim#L20
|
1799
|
+
const RLN_GENERATION_PREFIX_ERROR = "could not generate rln-v2 proof";
|
1800
|
+
const isRLNResponseError = (info) => {
|
1801
|
+
if (!info) {
|
1802
|
+
return false;
|
1803
|
+
}
|
1804
|
+
return info.includes(RLN_GENERATION_PREFIX_ERROR);
|
1805
|
+
};
|
1806
|
+
const matchRLNErrorMessage = (info) => {
|
1807
|
+
const rlnErrorMap = {
|
1808
|
+
[ProtocolError.RLN_IDENTITY_MISSING]: ProtocolError.RLN_IDENTITY_MISSING,
|
1809
|
+
[ProtocolError.RLN_MEMBERSHIP_INDEX]: ProtocolError.RLN_MEMBERSHIP_INDEX,
|
1810
|
+
[ProtocolError.RLN_LIMIT_MISSING]: ProtocolError.RLN_LIMIT_MISSING
|
1811
|
+
};
|
1812
|
+
const infoLowerCase = info.toLowerCase();
|
1813
|
+
for (const errorKey in rlnErrorMap) {
|
1814
|
+
if (infoLowerCase.includes(errorKey.toLowerCase())) {
|
1815
|
+
return rlnErrorMap[errorKey];
|
1816
|
+
}
|
1817
|
+
}
|
1818
|
+
return ProtocolError.RLN_PROOF_GENERATION;
|
1819
|
+
};
|
1820
|
+
|
1843
1821
|
const log$5 = new Logger("light-push");
|
1844
1822
|
const LightPushCodec = "/vac/waku/lightpush/2.0.0-beta1";
|
1845
1823
|
/**
|
1846
1824
|
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
|
1847
1825
|
*/
|
1848
1826
|
class LightPushCore extends BaseProtocol {
|
1849
|
-
|
1850
|
-
|
1827
|
+
pubsubTopics;
|
1828
|
+
constructor(pubsubTopics, libp2p) {
|
1829
|
+
super(LightPushCodec, libp2p.components, log$5, pubsubTopics);
|
1830
|
+
this.pubsubTopics = pubsubTopics;
|
1851
1831
|
}
|
1852
1832
|
async preparePushMessage(encoder, message) {
|
1853
1833
|
try {
|
@@ -1940,7 +1920,18 @@ class LightPushCore extends BaseProtocol {
|
|
1940
1920
|
return {
|
1941
1921
|
success: null,
|
1942
1922
|
failure: {
|
1943
|
-
error: ProtocolError.
|
1923
|
+
error: ProtocolError.NO_RESPONSE,
|
1924
|
+
peerId: peer.id
|
1925
|
+
}
|
1926
|
+
};
|
1927
|
+
}
|
1928
|
+
if (isRLNResponseError(response.info)) {
|
1929
|
+
const rlnErrorCase = matchRLNErrorMessage(response.info);
|
1930
|
+
log$5.error("Remote peer rejected the message: ", rlnErrorCase);
|
1931
|
+
return {
|
1932
|
+
success: null,
|
1933
|
+
failure: {
|
1934
|
+
error: rlnErrorCase,
|
1944
1935
|
peerId: peer.id
|
1945
1936
|
}
|
1946
1937
|
};
|
@@ -1979,97 +1970,97 @@ function toProtoMessage(wire) {
|
|
1979
1970
|
return { ...EmptyMessage, ...wire };
|
1980
1971
|
}
|
1981
1972
|
|
1982
|
-
|
1983
|
-
|
1984
|
-
|
1985
|
-
|
1986
|
-
|
1987
|
-
})(PageDirection || (PageDirection = {}));
|
1988
|
-
class HistoryRpc {
|
1973
|
+
// https://github.com/waku-org/nwaku/blob/7205f95cff9f49ca0bb762e8fd0bf56a6a7f3b3b/waku/waku_store/common.nim#L12
|
1974
|
+
const DEFAULT_PAGE_SIZE = 20;
|
1975
|
+
const MAX_PAGE_SIZE = 100;
|
1976
|
+
const ONE_MILLION = 1_000000;
|
1977
|
+
class StoreQueryRequest {
|
1989
1978
|
proto;
|
1990
1979
|
constructor(proto) {
|
1991
1980
|
this.proto = proto;
|
1992
1981
|
}
|
1993
|
-
|
1994
|
-
|
1995
|
-
|
1996
|
-
|
1997
|
-
|
1998
|
-
|
1999
|
-
|
2000
|
-
|
2001
|
-
|
2002
|
-
|
2003
|
-
|
2004
|
-
|
1982
|
+
static create(params) {
|
1983
|
+
const request = new StoreQueryRequest({
|
1984
|
+
...params,
|
1985
|
+
requestId: v4(),
|
1986
|
+
timeStart: params.timeStart
|
1987
|
+
? BigInt(params.timeStart.getTime() * ONE_MILLION)
|
1988
|
+
: undefined,
|
1989
|
+
timeEnd: params.timeEnd
|
1990
|
+
? BigInt(params.timeEnd.getTime() * ONE_MILLION)
|
1991
|
+
: undefined,
|
1992
|
+
messageHashes: params.messageHashes || [],
|
1993
|
+
paginationLimit: params.paginationLimit
|
1994
|
+
? BigInt(params.paginationLimit)
|
1995
|
+
: undefined
|
2005
1996
|
});
|
2006
|
-
|
2007
|
-
|
2008
|
-
|
2009
|
-
|
2010
|
-
direction
|
2011
|
-
};
|
2012
|
-
let startTime, endTime;
|
2013
|
-
if (params.startTime) {
|
2014
|
-
// milliseconds 10^-3 to nanoseconds 10^-9
|
2015
|
-
startTime = BigInt(params.startTime.valueOf()) * OneMillion;
|
1997
|
+
// Validate request parameters based on RFC
|
1998
|
+
if ((params.pubsubTopic && !params.contentTopics) ||
|
1999
|
+
(!params.pubsubTopic && params.contentTopics)) {
|
2000
|
+
throw new Error("Both pubsubTopic and contentTopics must be set or unset");
|
2016
2001
|
}
|
2017
|
-
if (params.
|
2018
|
-
|
2019
|
-
|
2002
|
+
if (params.messageHashes &&
|
2003
|
+
(params.pubsubTopic ||
|
2004
|
+
params.contentTopics ||
|
2005
|
+
params.timeStart ||
|
2006
|
+
params.timeEnd)) {
|
2007
|
+
throw new Error("Message hash lookup queries cannot include content filter criteria");
|
2020
2008
|
}
|
2021
|
-
return
|
2022
|
-
requestId: v4(),
|
2023
|
-
query: {
|
2024
|
-
pubsubTopic: params.pubsubTopic,
|
2025
|
-
contentFilters,
|
2026
|
-
pagingInfo,
|
2027
|
-
startTime,
|
2028
|
-
endTime
|
2029
|
-
},
|
2030
|
-
response: undefined
|
2031
|
-
});
|
2009
|
+
return request;
|
2032
2010
|
}
|
2033
|
-
decode(bytes) {
|
2034
|
-
const res =
|
2035
|
-
return new
|
2011
|
+
static decode(bytes) {
|
2012
|
+
const res = StoreQueryRequest$1.decode(bytes);
|
2013
|
+
return new StoreQueryRequest(res);
|
2036
2014
|
}
|
2037
2015
|
encode() {
|
2038
|
-
return
|
2016
|
+
return StoreQueryRequest$1.encode(this.proto);
|
2039
2017
|
}
|
2040
2018
|
}
|
2041
|
-
|
2042
|
-
|
2043
|
-
|
2044
|
-
|
2045
|
-
|
2046
|
-
|
2047
|
-
|
2048
|
-
|
2019
|
+
class StoreQueryResponse {
|
2020
|
+
proto;
|
2021
|
+
constructor(proto) {
|
2022
|
+
this.proto = proto;
|
2023
|
+
}
|
2024
|
+
static decode(bytes) {
|
2025
|
+
const res = StoreQueryResponse$1.decode(bytes);
|
2026
|
+
return new StoreQueryResponse(res);
|
2027
|
+
}
|
2028
|
+
encode() {
|
2029
|
+
return StoreQueryResponse$1.encode(this.proto);
|
2030
|
+
}
|
2031
|
+
get statusCode() {
|
2032
|
+
return this.proto.statusCode;
|
2033
|
+
}
|
2034
|
+
get statusDesc() {
|
2035
|
+
return this.proto.statusDesc;
|
2036
|
+
}
|
2037
|
+
get messages() {
|
2038
|
+
return this.proto.messages;
|
2039
|
+
}
|
2040
|
+
get paginationCursor() {
|
2041
|
+
return this.proto.paginationCursor;
|
2049
2042
|
}
|
2050
2043
|
}
|
2051
2044
|
|
2052
|
-
var HistoryError = HistoryResponse.HistoryError;
|
2053
2045
|
const log$4 = new Logger("store");
|
2054
|
-
const StoreCodec = "/vac/waku/store/
|
2055
|
-
/**
|
2056
|
-
* Implements the [Waku v2 Store protocol](https://rfc.vac.dev/spec/13/).
|
2057
|
-
*
|
2058
|
-
* The Waku Store protocol can be used to retrieved historical messages.
|
2059
|
-
*/
|
2046
|
+
const StoreCodec = "/vac/waku/store-query/3.0.0";
|
2060
2047
|
class StoreCore extends BaseProtocol {
|
2061
|
-
|
2062
|
-
|
2048
|
+
pubsubTopics;
|
2049
|
+
constructor(pubsubTopics, libp2p) {
|
2050
|
+
super(StoreCodec, libp2p.components, log$4, pubsubTopics);
|
2051
|
+
this.pubsubTopics = pubsubTopics;
|
2063
2052
|
}
|
2064
2053
|
async *queryPerPage(queryOpts, decoders, peer) {
|
2065
2054
|
if (queryOpts.contentTopics.toString() !==
|
2066
2055
|
Array.from(decoders.keys()).toString()) {
|
2067
2056
|
throw new Error("Internal error, the decoders should match the query's content topics");
|
2068
2057
|
}
|
2069
|
-
let currentCursor = queryOpts.
|
2058
|
+
let currentCursor = queryOpts.paginationCursor;
|
2070
2059
|
while (true) {
|
2071
|
-
|
2072
|
-
|
2060
|
+
const storeQueryRequest = StoreQueryRequest.create({
|
2061
|
+
...queryOpts,
|
2062
|
+
paginationCursor: currentCursor
|
2063
|
+
});
|
2073
2064
|
let stream;
|
2074
2065
|
try {
|
2075
2066
|
stream = await this.getStream(peer);
|
@@ -2078,50 +2069,48 @@ class StoreCore extends BaseProtocol {
|
|
2078
2069
|
log$4.error("Failed to get stream", e);
|
2079
2070
|
break;
|
2080
2071
|
}
|
2081
|
-
const res = await pipe([
|
2072
|
+
const res = await pipe([storeQueryRequest.encode()], encode, stream, decode, async (source) => await all(source));
|
2082
2073
|
const bytes = new Uint8ArrayList();
|
2083
2074
|
res.forEach((chunk) => {
|
2084
2075
|
bytes.append(chunk);
|
2085
2076
|
});
|
2086
|
-
const
|
2087
|
-
if (!
|
2088
|
-
|
2089
|
-
|
2090
|
-
|
2091
|
-
|
2092
|
-
|
2093
|
-
|
2094
|
-
|
2095
|
-
if (!response.messages || !response.messages.length) {
|
2096
|
-
log$4.warn("Stopping pagination due to store `response.messages` field missing or empty");
|
2077
|
+
const storeQueryResponse = StoreQueryResponse.decode(bytes);
|
2078
|
+
if (!storeQueryResponse.statusCode ||
|
2079
|
+
storeQueryResponse.statusCode >= 300) {
|
2080
|
+
const errorMessage = `Store query failed with status code: ${storeQueryResponse.statusCode}, description: ${storeQueryResponse.statusDesc}`;
|
2081
|
+
log$4.error(errorMessage);
|
2082
|
+
throw new Error(errorMessage);
|
2083
|
+
}
|
2084
|
+
if (!storeQueryResponse.messages || !storeQueryResponse.messages.length) {
|
2085
|
+
log$4.warn("Stopping pagination due to empty messages in response");
|
2097
2086
|
break;
|
2098
2087
|
}
|
2099
|
-
log$4.
|
2100
|
-
|
2101
|
-
|
2102
|
-
|
2088
|
+
log$4.info(`${storeQueryResponse.messages.length} messages retrieved from store`);
|
2089
|
+
const decodedMessages = storeQueryResponse.messages.map((protoMsg) => {
|
2090
|
+
if (!protoMsg.message) {
|
2091
|
+
return Promise.resolve(undefined);
|
2092
|
+
}
|
2093
|
+
const contentTopic = protoMsg.message.contentTopic;
|
2094
|
+
if (contentTopic) {
|
2103
2095
|
const decoder = decoders.get(contentTopic);
|
2104
2096
|
if (decoder) {
|
2105
|
-
return decoder.fromProtoObj(
|
2097
|
+
return decoder.fromProtoObj(protoMsg.pubsubTopic || "", toProtoMessage(protoMsg.message));
|
2106
2098
|
}
|
2107
2099
|
}
|
2108
2100
|
return Promise.resolve(undefined);
|
2109
2101
|
});
|
2110
|
-
|
2111
|
-
if (
|
2112
|
-
|
2113
|
-
|
2114
|
-
|
2115
|
-
break;
|
2102
|
+
yield decodedMessages;
|
2103
|
+
if (queryOpts.paginationForward) {
|
2104
|
+
currentCursor =
|
2105
|
+
storeQueryResponse.messages[storeQueryResponse.messages.length - 1]
|
2106
|
+
.messageHash;
|
2116
2107
|
}
|
2117
|
-
|
2118
|
-
|
2119
|
-
|
2120
|
-
if (
|
2121
|
-
|
2122
|
-
|
2123
|
-
queryPageSize &&
|
2124
|
-
responsePageSize < queryPageSize) {
|
2108
|
+
else {
|
2109
|
+
currentCursor = storeQueryResponse.messages[0].messageHash;
|
2110
|
+
}
|
2111
|
+
if (storeQueryResponse.messages.length > MAX_PAGE_SIZE &&
|
2112
|
+
storeQueryResponse.messages.length <
|
2113
|
+
(queryOpts.paginationLimit || DEFAULT_PAGE_SIZE)) {
|
2125
2114
|
break;
|
2126
2115
|
}
|
2127
2116
|
}
|
@@ -2130,7 +2119,6 @@ class StoreCore extends BaseProtocol {
|
|
2130
2119
|
|
2131
2120
|
var index = /*#__PURE__*/Object.freeze({
|
2132
2121
|
__proto__: null,
|
2133
|
-
get PageDirection () { return PageDirection; },
|
2134
2122
|
StoreCodec: StoreCodec,
|
2135
2123
|
StoreCore: StoreCore
|
2136
2124
|
});
|
@@ -2573,38 +2561,22 @@ class TypedEventEmitter extends EventTarget {
|
|
2573
2561
|
return this.dispatchEvent(new CustomEvent(type, detail));
|
2574
2562
|
}
|
2575
2563
|
}
|
2576
|
-
|
2577
|
-
* CustomEvent is a standard event but it's not supported by node.
|
2578
|
-
*
|
2579
|
-
* Remove this when https://github.com/nodejs/node/issues/40678 is closed.
|
2580
|
-
*
|
2581
|
-
* Ref: https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent
|
2582
|
-
*/
|
2583
|
-
class CustomEventPolyfill extends Event {
|
2584
|
-
/** Returns any custom data event was created with. Typically used for synthetic events. */
|
2585
|
-
detail;
|
2586
|
-
constructor(message, data) {
|
2587
|
-
super(message, data);
|
2588
|
-
// @ts-expect-error could be undefined
|
2589
|
-
this.detail = data?.detail;
|
2590
|
-
}
|
2591
|
-
}
|
2592
|
-
const CustomEvent = globalThis.CustomEvent ?? CustomEventPolyfill;
|
2564
|
+
const CustomEvent = globalThis.CustomEvent;
|
2593
2565
|
|
2594
2566
|
const RelayPingContentTopic = "/relay-ping/1/ping/null";
|
2595
2567
|
const log$2 = new Logger("keep-alive");
|
2596
2568
|
class KeepAliveManager {
|
2597
|
-
pingKeepAliveTimers;
|
2598
|
-
relayKeepAliveTimers;
|
2599
|
-
options;
|
2600
2569
|
relay;
|
2601
|
-
|
2602
|
-
|
2603
|
-
|
2570
|
+
libp2p;
|
2571
|
+
options;
|
2572
|
+
pingKeepAliveTimers = new Map();
|
2573
|
+
relayKeepAliveTimers = new Map();
|
2574
|
+
constructor({ options, relay, libp2p }) {
|
2604
2575
|
this.options = options;
|
2605
2576
|
this.relay = relay;
|
2577
|
+
this.libp2p = libp2p;
|
2606
2578
|
}
|
2607
|
-
start(peerId
|
2579
|
+
start(peerId) {
|
2608
2580
|
// Just in case a timer already exists for this peer
|
2609
2581
|
this.stop(peerId);
|
2610
2582
|
const { pingKeepAlive: pingPeriodSecs, relayKeepAlive: relayPeriodSecs } = this.options;
|
@@ -2619,7 +2591,7 @@ class KeepAliveManager {
|
|
2619
2591
|
// ping the peer for keep alive
|
2620
2592
|
// also update the peer store with the latency
|
2621
2593
|
try {
|
2622
|
-
ping = await
|
2594
|
+
ping = await this.libp2p.services.ping.ping(peerId);
|
2623
2595
|
log$2.info(`Ping succeeded (${peerIdStr})`, ping);
|
2624
2596
|
}
|
2625
2597
|
catch (error) {
|
@@ -2629,7 +2601,7 @@ class KeepAliveManager {
|
|
2629
2601
|
return;
|
2630
2602
|
}
|
2631
2603
|
try {
|
2632
|
-
await peerStore.merge(peerId, {
|
2604
|
+
await this.libp2p.peerStore.merge(peerId, {
|
2633
2605
|
metadata: {
|
2634
2606
|
ping: utf8ToBytes(ping.toString())
|
2635
2607
|
}
|
@@ -2714,25 +2686,12 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2714
2686
|
dialErrorsForPeer = new Map();
|
2715
2687
|
currentActiveParallelDialCount = 0;
|
2716
2688
|
pendingPeerDialQueue = [];
|
2717
|
-
|
2689
|
+
isP2PNetworkConnected = false;
|
2718
2690
|
isConnected() {
|
2719
|
-
|
2720
|
-
|
2721
|
-
toggleOnline() {
|
2722
|
-
if (!this.online) {
|
2723
|
-
this.online = true;
|
2724
|
-
this.dispatchEvent(new CustomEvent(EConnectionStateEvents.CONNECTION_STATUS, {
|
2725
|
-
detail: this.online
|
2726
|
-
}));
|
2727
|
-
}
|
2728
|
-
}
|
2729
|
-
toggleOffline() {
|
2730
|
-
if (this.online && this.libp2p.getConnections().length == 0) {
|
2731
|
-
this.online = false;
|
2732
|
-
this.dispatchEvent(new CustomEvent(EConnectionStateEvents.CONNECTION_STATUS, {
|
2733
|
-
detail: this.online
|
2734
|
-
}));
|
2691
|
+
if (globalThis?.navigator && !globalThis?.navigator?.onLine) {
|
2692
|
+
return false;
|
2735
2693
|
}
|
2694
|
+
return this.isP2PNetworkConnected;
|
2736
2695
|
}
|
2737
2696
|
static create(peerId, libp2p, keepAliveOptions, pubsubTopics, relay, options) {
|
2738
2697
|
let instance = ConnectionManager.instances.get(peerId);
|
@@ -2747,6 +2706,7 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2747
2706
|
this.libp2p.removeEventListener("peer:connect", this.onEventHandlers["peer:connect"]);
|
2748
2707
|
this.libp2p.removeEventListener("peer:disconnect", this.onEventHandlers["peer:disconnect"]);
|
2749
2708
|
this.libp2p.removeEventListener("peer:discovery", this.onEventHandlers["peer:discovery"]);
|
2709
|
+
this.stopNetworkStatusListener();
|
2750
2710
|
}
|
2751
2711
|
async dropConnection(peerId) {
|
2752
2712
|
try {
|
@@ -2818,8 +2778,12 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2818
2778
|
maxParallelDials: DEFAULT_MAX_PARALLEL_DIALS,
|
2819
2779
|
...options
|
2820
2780
|
};
|
2821
|
-
this.keepAliveManager = new KeepAliveManager(
|
2822
|
-
|
2781
|
+
this.keepAliveManager = new KeepAliveManager({
|
2782
|
+
relay,
|
2783
|
+
libp2p,
|
2784
|
+
options: keepAliveOptions
|
2785
|
+
});
|
2786
|
+
this.startEventListeners()
|
2823
2787
|
.then(() => log$1.info(`Connection Manager is now running`))
|
2824
2788
|
.catch((error) => log$1.error(`Unexpected error while running service`, error));
|
2825
2789
|
// libp2p emits `peer:discovery` events during its initialization
|
@@ -2842,11 +2806,11 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2842
2806
|
log$1.error(`Unexpected error while dialing peer store peers`, error);
|
2843
2807
|
}
|
2844
2808
|
}
|
2845
|
-
async
|
2846
|
-
// start event listeners
|
2809
|
+
async startEventListeners() {
|
2847
2810
|
this.startPeerDiscoveryListener();
|
2848
2811
|
this.startPeerConnectionListener();
|
2849
2812
|
this.startPeerDisconnectionListener();
|
2813
|
+
this.startNetworkStatusListener();
|
2850
2814
|
}
|
2851
2815
|
async dialPeer(peerId) {
|
2852
2816
|
this.currentActiveParallelDialCount += 1;
|
@@ -2865,6 +2829,7 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2865
2829
|
// this helps us keep track of peers that have been dialed before
|
2866
2830
|
this.dialAttemptsForPeer.set(peerId.toString(), -1);
|
2867
2831
|
// Dialing succeeded, break the loop
|
2832
|
+
this.keepAliveManager.start(peerId);
|
2868
2833
|
break;
|
2869
2834
|
}
|
2870
2835
|
catch (error) {
|
@@ -2953,9 +2918,7 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2953
2918
|
this.pendingPeerDialQueue.push(peerId);
|
2954
2919
|
return;
|
2955
2920
|
}
|
2956
|
-
this.dialPeer(peerId)
|
2957
|
-
log$1.error(`Error dialing peer ${peerId.toString()} : ${err}`);
|
2958
|
-
});
|
2921
|
+
await this.dialPeer(peerId);
|
2959
2922
|
}
|
2960
2923
|
onEventHandlers = {
|
2961
2924
|
"peer:discovery": (evt) => {
|
@@ -2974,7 +2937,7 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2974
2937
|
void (async () => {
|
2975
2938
|
log$1.info(`Connected to peer ${evt.detail.toString()}`);
|
2976
2939
|
const peerId = evt.detail;
|
2977
|
-
this.keepAliveManager.start(peerId
|
2940
|
+
this.keepAliveManager.start(peerId);
|
2978
2941
|
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(Tags.BOOTSTRAP);
|
2979
2942
|
if (isBootstrap) {
|
2980
2943
|
const bootstrapConnections = this.libp2p
|
@@ -2995,14 +2958,17 @@ class ConnectionManager extends TypedEventEmitter {
|
|
2995
2958
|
detail: peerId
|
2996
2959
|
}));
|
2997
2960
|
}
|
2998
|
-
this.
|
2961
|
+
this.setP2PNetworkConnected();
|
2999
2962
|
})();
|
3000
2963
|
},
|
3001
2964
|
"peer:disconnect": (evt) => {
|
3002
2965
|
void (async () => {
|
3003
2966
|
this.keepAliveManager.stop(evt.detail);
|
3004
|
-
this.
|
2967
|
+
this.setP2PNetworkDisconnected();
|
3005
2968
|
})();
|
2969
|
+
},
|
2970
|
+
"browser:network": () => {
|
2971
|
+
this.dispatchWakuConnectionEvent();
|
3006
2972
|
}
|
3007
2973
|
};
|
3008
2974
|
/**
|
@@ -3014,26 +2980,25 @@ class ConnectionManager extends TypedEventEmitter {
|
|
3014
2980
|
* @returns true if the peer should be dialed, false otherwise
|
3015
2981
|
*/
|
3016
2982
|
async shouldDialPeer(peerId) {
|
3017
|
-
// if we're already connected to the peer, don't dial
|
3018
2983
|
const isConnected = this.libp2p.getConnections(peerId).length > 0;
|
3019
2984
|
if (isConnected) {
|
3020
2985
|
log$1.warn(`Already connected to peer ${peerId.toString()}. Not dialing.`);
|
3021
2986
|
return false;
|
3022
2987
|
}
|
3023
|
-
|
3024
|
-
if (!
|
2988
|
+
const isSameShard = await this.isPeerTopicConfigured(peerId);
|
2989
|
+
if (!isSameShard) {
|
3025
2990
|
const shardInfo = await this.getPeerShardInfo(peerId, this.libp2p.peerStore);
|
3026
2991
|
log$1.warn(`Discovered peer ${peerId.toString()} with ShardInfo ${shardInfo} is not part of any of the configured pubsub topics (${this.configuredPubsubTopics}).
|
3027
2992
|
Not dialing.`);
|
3028
2993
|
return false;
|
3029
2994
|
}
|
3030
|
-
|
3031
|
-
if (!
|
2995
|
+
const isPreferredBasedOnBootstrap = await this.isPeerDialableBasedOnBootstrapStatus(peerId);
|
2996
|
+
if (!isPreferredBasedOnBootstrap) {
|
3032
2997
|
log$1.warn(`Peer ${peerId.toString()} is not dialable based on bootstrap status. Not dialing.`);
|
3033
2998
|
return false;
|
3034
2999
|
}
|
3035
|
-
|
3036
|
-
if (
|
3000
|
+
const hasBeenDialed = this.dialAttemptsForPeer.has(peerId.toString());
|
3001
|
+
if (hasBeenDialed) {
|
3037
3002
|
log$1.warn(`Peer ${peerId.toString()} has already been attempted dial before, or already has a dial attempt in progress, skipping dial`);
|
3038
3003
|
return false;
|
3039
3004
|
}
|
@@ -3047,19 +3012,15 @@ class ConnectionManager extends TypedEventEmitter {
|
|
3047
3012
|
async isPeerDialableBasedOnBootstrapStatus(peerId) {
|
3048
3013
|
const tagNames = await this.getTagNamesForPeer(peerId);
|
3049
3014
|
const isBootstrap = tagNames.some((tagName) => tagName === Tags.BOOTSTRAP);
|
3050
|
-
if (isBootstrap) {
|
3051
|
-
const currentBootstrapConnections = this.libp2p
|
3052
|
-
.getConnections()
|
3053
|
-
.filter((conn) => {
|
3054
|
-
return conn.tags.find((name) => name === Tags.BOOTSTRAP);
|
3055
|
-
}).length;
|
3056
|
-
if (currentBootstrapConnections < this.options.maxBootstrapPeersAllowed)
|
3057
|
-
return true;
|
3058
|
-
}
|
3059
|
-
else {
|
3015
|
+
if (!isBootstrap) {
|
3060
3016
|
return true;
|
3061
3017
|
}
|
3062
|
-
|
3018
|
+
const currentBootstrapConnections = this.libp2p
|
3019
|
+
.getConnections()
|
3020
|
+
.filter((conn) => {
|
3021
|
+
return conn.tags.find((name) => name === Tags.BOOTSTRAP);
|
3022
|
+
}).length;
|
3023
|
+
return currentBootstrapConnections < this.options.maxBootstrapPeersAllowed;
|
3063
3024
|
}
|
3064
3025
|
async dispatchDiscoveryEvent(peerId) {
|
3065
3026
|
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(Tags.BOOTSTRAP);
|
@@ -3098,45 +3059,132 @@ class ConnectionManager extends TypedEventEmitter {
|
|
3098
3059
|
return undefined;
|
3099
3060
|
return decodeRelayShard(shardInfoBytes);
|
3100
3061
|
}
|
3062
|
+
startNetworkStatusListener() {
|
3063
|
+
try {
|
3064
|
+
globalThis.addEventListener("online", this.onEventHandlers["browser:network"]);
|
3065
|
+
globalThis.addEventListener("offline", this.onEventHandlers["browser:network"]);
|
3066
|
+
}
|
3067
|
+
catch (err) {
|
3068
|
+
log$1.error(`Failed to start network listener: ${err}`);
|
3069
|
+
}
|
3070
|
+
}
|
3071
|
+
stopNetworkStatusListener() {
|
3072
|
+
try {
|
3073
|
+
globalThis.removeEventListener("online", this.onEventHandlers["browser:network"]);
|
3074
|
+
globalThis.removeEventListener("offline", this.onEventHandlers["browser:network"]);
|
3075
|
+
}
|
3076
|
+
catch (err) {
|
3077
|
+
log$1.error(`Failed to stop network listener: ${err}`);
|
3078
|
+
}
|
3079
|
+
}
|
3080
|
+
setP2PNetworkConnected() {
|
3081
|
+
if (!this.isP2PNetworkConnected) {
|
3082
|
+
this.isP2PNetworkConnected = true;
|
3083
|
+
this.dispatchWakuConnectionEvent();
|
3084
|
+
}
|
3085
|
+
}
|
3086
|
+
setP2PNetworkDisconnected() {
|
3087
|
+
if (this.isP2PNetworkConnected &&
|
3088
|
+
this.libp2p.getConnections().length === 0) {
|
3089
|
+
this.isP2PNetworkConnected = false;
|
3090
|
+
this.dispatchWakuConnectionEvent();
|
3091
|
+
}
|
3092
|
+
}
|
3093
|
+
dispatchWakuConnectionEvent() {
|
3094
|
+
this.dispatchEvent(new CustomEvent(EConnectionStateEvents.CONNECTION_STATUS, {
|
3095
|
+
detail: this.isConnected()
|
3096
|
+
}));
|
3097
|
+
}
|
3098
|
+
}
|
3099
|
+
|
3100
|
+
class HealthManager {
|
3101
|
+
static instance;
|
3102
|
+
health;
|
3103
|
+
constructor() {
|
3104
|
+
this.health = {
|
3105
|
+
overallStatus: HealthStatus.Unhealthy,
|
3106
|
+
protocolStatuses: new Map()
|
3107
|
+
};
|
3108
|
+
}
|
3109
|
+
static getInstance() {
|
3110
|
+
if (!HealthManager.instance) {
|
3111
|
+
HealthManager.instance = new HealthManager();
|
3112
|
+
}
|
3113
|
+
return HealthManager.instance;
|
3114
|
+
}
|
3115
|
+
getHealthStatus() {
|
3116
|
+
return this.health.overallStatus;
|
3117
|
+
}
|
3118
|
+
getProtocolStatus(protocol) {
|
3119
|
+
return this.health.protocolStatuses.get(protocol);
|
3120
|
+
}
|
3121
|
+
updateProtocolHealth(multicodec, connectedPeers) {
|
3122
|
+
const protocol = this.getNameFromMulticodec(multicodec);
|
3123
|
+
let status = HealthStatus.Unhealthy;
|
3124
|
+
if (connectedPeers == 1) {
|
3125
|
+
status = HealthStatus.MinimallyHealthy;
|
3126
|
+
}
|
3127
|
+
else if (connectedPeers >= 2) {
|
3128
|
+
status = HealthStatus.SufficientlyHealthy;
|
3129
|
+
}
|
3130
|
+
this.health.protocolStatuses.set(protocol, {
|
3131
|
+
name: protocol,
|
3132
|
+
status: status,
|
3133
|
+
lastUpdate: new Date()
|
3134
|
+
});
|
3135
|
+
this.updateOverallHealth();
|
3136
|
+
}
|
3137
|
+
getNameFromMulticodec(multicodec) {
|
3138
|
+
let name;
|
3139
|
+
if (multicodec.includes("filter")) {
|
3140
|
+
name = Protocols.Filter;
|
3141
|
+
}
|
3142
|
+
else if (multicodec.includes("lightpush")) {
|
3143
|
+
name = Protocols.LightPush;
|
3144
|
+
}
|
3145
|
+
else if (multicodec.includes("store")) {
|
3146
|
+
name = Protocols.Store;
|
3147
|
+
}
|
3148
|
+
else {
|
3149
|
+
throw new Error(`Unknown protocol: ${multicodec}`);
|
3150
|
+
}
|
3151
|
+
return name;
|
3152
|
+
}
|
3153
|
+
updateOverallHealth() {
|
3154
|
+
const relevantProtocols = [Protocols.LightPush, Protocols.Filter];
|
3155
|
+
const statuses = relevantProtocols.map((p) => this.getProtocolStatus(p)?.status);
|
3156
|
+
if (statuses.some((status) => status === HealthStatus.Unhealthy)) {
|
3157
|
+
this.health.overallStatus = HealthStatus.Unhealthy;
|
3158
|
+
}
|
3159
|
+
else if (statuses.some((status) => status === HealthStatus.MinimallyHealthy)) {
|
3160
|
+
this.health.overallStatus = HealthStatus.MinimallyHealthy;
|
3161
|
+
}
|
3162
|
+
else {
|
3163
|
+
this.health.overallStatus = HealthStatus.SufficientlyHealthy;
|
3164
|
+
}
|
3165
|
+
}
|
3101
3166
|
}
|
3167
|
+
const getHealthManager = () => HealthManager.getInstance();
|
3102
3168
|
|
3103
3169
|
const log = new Logger("metadata");
|
3104
3170
|
const MetadataCodec = "/vac/waku/metadata/1.0.0";
|
3105
3171
|
class Metadata extends BaseProtocol {
|
3106
|
-
|
3172
|
+
pubsubTopics;
|
3107
3173
|
libp2pComponents;
|
3108
3174
|
handshakesConfirmed = new Map();
|
3109
|
-
constructor(
|
3110
|
-
super(MetadataCodec, libp2p.components, log,
|
3111
|
-
this.
|
3175
|
+
constructor(pubsubTopics, libp2p) {
|
3176
|
+
super(MetadataCodec, libp2p.components, log, pubsubTopics);
|
3177
|
+
this.pubsubTopics = pubsubTopics;
|
3112
3178
|
this.libp2pComponents = libp2p;
|
3113
3179
|
void libp2p.registrar.handle(MetadataCodec, (streamData) => {
|
3114
3180
|
void this.onRequest(streamData);
|
3115
3181
|
});
|
3116
3182
|
}
|
3117
|
-
/**
|
3118
|
-
* Handle an incoming metadata request
|
3119
|
-
*/
|
3120
|
-
async onRequest(streamData) {
|
3121
|
-
try {
|
3122
|
-
const { stream, connection } = streamData;
|
3123
|
-
const encodedShardInfo = WakuMetadataResponse.encode(this.shardInfo);
|
3124
|
-
const encodedResponse = await pipe([encodedShardInfo], encode, stream, decode, async (source) => await all(source));
|
3125
|
-
const { error, shardInfo } = this.decodeMetadataResponse(encodedResponse);
|
3126
|
-
if (error) {
|
3127
|
-
return;
|
3128
|
-
}
|
3129
|
-
await this.savePeerShardInfo(connection.remotePeer, shardInfo);
|
3130
|
-
}
|
3131
|
-
catch (error) {
|
3132
|
-
log.error("Error handling metadata request", error);
|
3133
|
-
}
|
3134
|
-
}
|
3135
3183
|
/**
|
3136
3184
|
* Make a metadata query to a peer
|
3137
3185
|
*/
|
3138
3186
|
async query(peerId) {
|
3139
|
-
const request = WakuMetadataRequest.encode(this.
|
3187
|
+
const request = WakuMetadataRequest.encode(pubsubTopicsToShardInfo(this.pubsubTopics));
|
3140
3188
|
const peer = await this.peerStore.get(peerId);
|
3141
3189
|
if (!peer) {
|
3142
3190
|
return {
|
@@ -3179,6 +3227,24 @@ class Metadata extends BaseProtocol {
|
|
3179
3227
|
}
|
3180
3228
|
return await this.query(peerId);
|
3181
3229
|
}
|
3230
|
+
/**
|
3231
|
+
* Handle an incoming metadata request
|
3232
|
+
*/
|
3233
|
+
async onRequest(streamData) {
|
3234
|
+
try {
|
3235
|
+
const { stream, connection } = streamData;
|
3236
|
+
const encodedShardInfo = WakuMetadataResponse.encode(pubsubTopicsToShardInfo(this.pubsubTopics));
|
3237
|
+
const encodedResponse = await pipe([encodedShardInfo], encode, stream, decode, async (source) => await all(source));
|
3238
|
+
const { error, shardInfo } = this.decodeMetadataResponse(encodedResponse);
|
3239
|
+
if (error) {
|
3240
|
+
return;
|
3241
|
+
}
|
3242
|
+
await this.savePeerShardInfo(connection.remotePeer, shardInfo);
|
3243
|
+
}
|
3244
|
+
catch (error) {
|
3245
|
+
log.error("Error handling metadata request", error);
|
3246
|
+
}
|
3247
|
+
}
|
3182
3248
|
decodeMetadataResponse(encodedResponse) {
|
3183
3249
|
const bytes = new Uint8ArrayList();
|
3184
3250
|
encodedResponse.forEach((chunk) => {
|
@@ -3207,8 +3273,8 @@ class Metadata extends BaseProtocol {
|
|
3207
3273
|
this.handshakesConfirmed.set(peerId.toString(), shardInfo);
|
3208
3274
|
}
|
3209
3275
|
}
|
3210
|
-
function wakuMetadata(
|
3211
|
-
return (components) => new Metadata(
|
3276
|
+
function wakuMetadata(pubsubTopics) {
|
3277
|
+
return (components) => new Metadata(pubsubTopics, components);
|
3212
3278
|
}
|
3213
3279
|
|
3214
|
-
export { ConnectionManager, FilterCodecs, FilterCore, KeepAliveManager, LightPushCodec, LightPushCore, MetadataCodec,
|
3280
|
+
export { ConnectionManager, FilterCodecs, FilterCore, KeepAliveManager, LightPushCodec, LightPushCore, MetadataCodec, StoreCore, createEncoder, getHealthManager, index$3 as message, waitForRemotePeer, wakuMetadata, index$2 as waku_filter, index$1 as waku_light_push, index as waku_store };
|