@waku/core 0.0.36-b7e9b08.0 → 0.0.36-f7778a9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/bundle/{base_protocol-Cql7Ssk4.js → base_protocol-DvQrudwy.js} +1 -1
  2. package/bundle/{index-BF77B5GY.js → index-CTo1my9M.js} +1 -1
  3. package/bundle/index.js +186 -188
  4. package/bundle/lib/base_protocol.js +2 -2
  5. package/bundle/lib/message/version_0.js +2 -2
  6. package/bundle/{version_0-Duf8Mz94.js → version_0-CyeTW0Vr.js} +18 -6
  7. package/dist/.tsbuildinfo +1 -1
  8. package/dist/index.d.ts +1 -0
  9. package/dist/index.js +1 -0
  10. package/dist/index.js.map +1 -1
  11. package/dist/lib/filter/filter.d.ts +4 -2
  12. package/dist/lib/filter/filter.js +21 -3
  13. package/dist/lib/filter/filter.js.map +1 -1
  14. package/dist/lib/message/version_0.d.ts +2 -3
  15. package/dist/lib/message/version_0.js +1 -4
  16. package/dist/lib/message/version_0.js.map +1 -1
  17. package/dist/lib/message_hash/index.d.ts +1 -0
  18. package/dist/lib/message_hash/index.js +2 -0
  19. package/dist/lib/message_hash/index.js.map +1 -0
  20. package/dist/lib/message_hash/message_hash.d.ts +52 -0
  21. package/dist/lib/message_hash/message_hash.js +84 -0
  22. package/dist/lib/message_hash/message_hash.js.map +1 -0
  23. package/dist/lib/store/rpc.js +16 -10
  24. package/dist/lib/store/rpc.js.map +1 -1
  25. package/dist/lib/store/store.js +12 -2
  26. package/dist/lib/store/store.js.map +1 -1
  27. package/package.json +1 -1
  28. package/src/index.ts +2 -0
  29. package/src/lib/filter/filter.ts +33 -6
  30. package/src/lib/message/version_0.ts +3 -7
  31. package/src/lib/message_hash/index.ts +1 -0
  32. package/src/lib/message_hash/message_hash.ts +106 -0
  33. package/src/lib/store/rpc.ts +23 -19
  34. package/src/lib/store/store.ts +13 -1
@@ -1,4 +1,4 @@
1
- import { L as Logger } from './index-BF77B5GY.js';
1
+ import { L as Logger } from './index-CTo1my9M.js';
2
2
 
3
3
  function selectOpenConnection(connections) {
4
4
  return connections
@@ -1540,4 +1540,4 @@ class Logger {
1540
1540
  }
1541
1541
  }
1542
1542
 
1543
- export { Logger as L, base58btc as a, base32 as b, coerce as c, base36 as d, equals as e, base256emoji$1 as f, base64$1 as g, base58 as h, base36$1 as i, base32$1 as j, base16$1 as k, base10$1 as l, base8$1 as m, base2$1 as n, identityBase as o };
1543
+ export { EPeersByDiscoveryEvents as E, Logger as L, ProtocolError as P, Tags as T, base58btc as a, base32 as b, coerce as c, base36 as d, equals as e, EConnectionStateEvents as f, base256emoji$1 as g, base64$1 as h, base58 as i, base36$1 as j, base32$1 as k, base16$1 as l, base10$1 as m, base8$1 as n, base2$1 as o, identityBase as p };
package/bundle/index.js CHANGED
@@ -1,8 +1,8 @@
1
- import { v as version_0, a as allocUnsafe, b as alloc, e as encodingLength$1, c as encode$2, d as decode$4, F as FilterSubscribeRequest, f as FilterSubscribeResponse$1, M as MessagePush, P as PushRpc$1, g as PushResponse, S as StoreQueryRequest$1, h as StoreQueryResponse$1, t as toString$1, i as bases, j as fromString, u as utf8ToBytes, k as createEncoder, p as pubsubTopicToSingleShardInfo, l as bytesToUtf8, s as shardInfoToPubsubTopics, W as WakuMetadataRequest, m as pubsubTopicsToShardInfo, n as WakuMetadataResponse } from './version_0-Duf8Mz94.js';
2
- export { o as createDecoder } from './version_0-Duf8Mz94.js';
3
- import { e as equals$2, c as coerce, b as base32, a as base58btc, d as base36, L as Logger } from './index-BF77B5GY.js';
4
- import { B as BaseProtocol } from './base_protocol-Cql7Ssk4.js';
5
- export { S as StreamManager } from './base_protocol-Cql7Ssk4.js';
1
+ import { v as version_0, a as allocUnsafe, b as alloc, e as encodingLength$1, c as encode$2, d as decode$4, F as FilterSubscribeRequest, f as FilterSubscribeResponse$1, M as MessagePush, P as PushRpc$1, g as PushResponse, S as StoreQueryRequest$1, h as StoreQueryResponse$1, t as toString$1, i as bases, j as fromString, u as utf8ToBytes, k as createEncoder, p as pubsubTopicToSingleShardInfo, l as bytesToUtf8, s as shardInfoToPubsubTopics, W as WakuMetadataRequest, m as pubsubTopicsToShardInfo, n as WakuMetadataResponse, o as concat$1, q as sha256, r as bytesToHex, w as numberToBytes } from './version_0-CyeTW0Vr.js';
2
+ export { x as createDecoder } from './version_0-CyeTW0Vr.js';
3
+ import { e as equals$2, c as coerce, b as base32, a as base58btc, d as base36, L as Logger, P as ProtocolError, T as Tags, E as EPeersByDiscoveryEvents, f as EConnectionStateEvents } from './index-CTo1my9M.js';
4
+ import { B as BaseProtocol } from './base_protocol-DvQrudwy.js';
5
+ export { S as StreamManager } from './base_protocol-DvQrudwy.js';
6
6
 
7
7
  /* eslint-disable */
8
8
  var encode_1 = encode$1;
@@ -514,6 +514,10 @@ function encodeCID(version, code, multihash) {
514
514
  }
515
515
  const cidSymbol = Symbol.for('@ipld/js-cid/CID');
516
516
 
517
+ function isDefined(value) {
518
+ return Boolean(value);
519
+ }
520
+
517
521
  const MB = 1024 ** 2;
518
522
  const SIZE_CAP_IN_MB = 1;
519
523
  /**
@@ -531,6 +535,8 @@ async function isMessageSizeUnderCap(encoder, message) {
531
535
  }
532
536
  const isWireSizeUnderCap = (buf) => buf.length / MB <= SIZE_CAP_IN_MB;
533
537
 
538
+ const DNS_DISCOVERY_TAG = "@waku/bootstrap";
539
+
534
540
  const decodeRelayShard = (bytes) => {
535
541
  // explicitly converting to Uint8Array to avoid Buffer
536
542
  // https://github.com/libp2p/js-libp2p/issues/2146
@@ -590,128 +596,6 @@ var index$3 = /*#__PURE__*/Object.freeze({
590
596
  version_0: version_0
591
597
  });
592
598
 
593
- var Protocols;
594
- (function (Protocols) {
595
- Protocols["Relay"] = "relay";
596
- Protocols["Store"] = "store";
597
- Protocols["LightPush"] = "lightpush";
598
- Protocols["Filter"] = "filter";
599
- })(Protocols || (Protocols = {}));
600
- var ProtocolError;
601
- (function (ProtocolError) {
602
- //
603
- // GENERAL ERRORS SECTION
604
- //
605
- /**
606
- * Could not determine the origin of the fault. Best to check connectivity and try again
607
- * */
608
- ProtocolError["GENERIC_FAIL"] = "Generic error";
609
- /**
610
- * The remote peer rejected the message. Information provided by the remote peer
611
- * is logged. Review message validity, or mitigation for `NO_PEER_AVAILABLE`
612
- * or `DECODE_FAILED` can be used.
613
- */
614
- ProtocolError["REMOTE_PEER_REJECTED"] = "Remote peer rejected";
615
- /**
616
- * Failure to protobuf decode the message. May be due to a remote peer issue,
617
- * ensuring that messages are sent via several peer enable mitigation of this error.
618
- */
619
- ProtocolError["DECODE_FAILED"] = "Failed to decode";
620
- /**
621
- * Failure to find a peer with suitable protocols. This may due to a connection issue.
622
- * Mitigation can be: retrying after a given time period, display connectivity issue
623
- * to user or listening for `peer:connected:bootstrap` or `peer:connected:peer-exchange`
624
- * on the connection manager before retrying.
625
- */
626
- ProtocolError["NO_PEER_AVAILABLE"] = "No peer available";
627
- /**
628
- * Failure to find a stream to the peer. This may be because the connection with the peer is not still alive.
629
- * Mitigation can be: retrying after a given time period, or mitigation for `NO_PEER_AVAILABLE` can be used.
630
- */
631
- ProtocolError["NO_STREAM_AVAILABLE"] = "No stream available";
632
- /**
633
- * The remote peer did not behave as expected. Mitigation for `NO_PEER_AVAILABLE`
634
- * or `DECODE_FAILED` can be used.
635
- */
636
- ProtocolError["NO_RESPONSE"] = "No response received";
637
- //
638
- // SEND ERRORS SECTION
639
- //
640
- /**
641
- * Failure to protobuf encode the message. This is not recoverable and needs
642
- * further investigation.
643
- */
644
- ProtocolError["ENCODE_FAILED"] = "Failed to encode";
645
- /**
646
- * The message payload is empty, making the message invalid. Ensure that a non-empty
647
- * payload is set on the outgoing message.
648
- */
649
- ProtocolError["EMPTY_PAYLOAD"] = "Payload is empty";
650
- /**
651
- * The message size is above the maximum message size allowed on the Waku Network.
652
- * Compressing the message or using an alternative strategy for large messages is recommended.
653
- */
654
- ProtocolError["SIZE_TOO_BIG"] = "Size is too big";
655
- /**
656
- * The PubsubTopic passed to the send function is not configured on the Waku node.
657
- * Please ensure that the PubsubTopic is used when initializing the Waku node.
658
- */
659
- ProtocolError["TOPIC_NOT_CONFIGURED"] = "Topic not configured";
660
- /**
661
- * Fails when
662
- */
663
- ProtocolError["STREAM_ABORTED"] = "Stream aborted";
664
- /**
665
- * General proof generation error message.
666
- * nwaku: https://github.com/waku-org/nwaku/blob/c3cb06ac6c03f0f382d3941ea53b330f6a8dd127/waku/waku_rln_relay/group_manager/group_manager_base.nim#L201C19-L201C42
667
- */
668
- ProtocolError["RLN_PROOF_GENERATION"] = "Proof generation failed";
669
- //
670
- // RECEIVE ERRORS SECTION
671
- //
672
- /**
673
- * The pubsub topic configured on the decoder does not match the pubsub topic setup on the protocol.
674
- * Ensure that the pubsub topic used for decoder creation is the same as the one used for protocol.
675
- */
676
- ProtocolError["TOPIC_DECODER_MISMATCH"] = "Topic decoder mismatch";
677
- /**
678
- * The topics passed in the decoders do not match each other, or don't exist at all.
679
- * Ensure that all the pubsub topics used in the decoders are valid and match each other.
680
- */
681
- ProtocolError["INVALID_DECODER_TOPICS"] = "Invalid decoder topics";
682
- })(ProtocolError || (ProtocolError = {}));
683
-
684
- var Tags;
685
- (function (Tags) {
686
- Tags["BOOTSTRAP"] = "bootstrap";
687
- Tags["PEER_EXCHANGE"] = "peer-exchange";
688
- Tags["LOCAL"] = "local-peer-cache";
689
- })(Tags || (Tags = {}));
690
- var EPeersByDiscoveryEvents;
691
- (function (EPeersByDiscoveryEvents) {
692
- EPeersByDiscoveryEvents["PEER_DISCOVERY_BOOTSTRAP"] = "peer:discovery:bootstrap";
693
- EPeersByDiscoveryEvents["PEER_DISCOVERY_PEER_EXCHANGE"] = "peer:discovery:peer-exchange";
694
- EPeersByDiscoveryEvents["PEER_CONNECT_BOOTSTRAP"] = "peer:connected:bootstrap";
695
- EPeersByDiscoveryEvents["PEER_CONNECT_PEER_EXCHANGE"] = "peer:connected:peer-exchange";
696
- })(EPeersByDiscoveryEvents || (EPeersByDiscoveryEvents = {}));
697
- var EConnectionStateEvents;
698
- (function (EConnectionStateEvents) {
699
- EConnectionStateEvents["CONNECTION_STATUS"] = "waku:connection";
700
- })(EConnectionStateEvents || (EConnectionStateEvents = {}));
701
-
702
- const DNS_DISCOVERY_TAG = "@waku/bootstrap";
703
-
704
- var HealthStatusChangeEvents;
705
- (function (HealthStatusChangeEvents) {
706
- HealthStatusChangeEvents["StatusChange"] = "health:change";
707
- })(HealthStatusChangeEvents || (HealthStatusChangeEvents = {}));
708
- var HealthStatus;
709
- (function (HealthStatus) {
710
- HealthStatus["Unhealthy"] = "Unhealthy";
711
- HealthStatus["MinimallyHealthy"] = "MinimallyHealthy";
712
- HealthStatus["SufficientlyHealthy"] = "SufficientlyHealthy";
713
- })(HealthStatus || (HealthStatus = {}));
714
-
715
599
  /**
716
600
  * @packageDocumentation
717
601
  *
@@ -2252,60 +2136,59 @@ const duplexPipelineFn = (duplex) => {
2252
2136
  };
2253
2137
  };
2254
2138
 
2255
- const byteToHex = [];
2256
- for (let i = 0; i < 256; ++i) {
2257
- byteToHex.push((i + 0x100).toString(16).slice(1));
2258
- }
2259
- function unsafeStringify(arr, offset = 0) {
2260
- return (byteToHex[arr[offset + 0]] +
2261
- byteToHex[arr[offset + 1]] +
2262
- byteToHex[arr[offset + 2]] +
2263
- byteToHex[arr[offset + 3]] +
2264
- '-' +
2265
- byteToHex[arr[offset + 4]] +
2266
- byteToHex[arr[offset + 5]] +
2267
- '-' +
2268
- byteToHex[arr[offset + 6]] +
2269
- byteToHex[arr[offset + 7]] +
2270
- '-' +
2271
- byteToHex[arr[offset + 8]] +
2272
- byteToHex[arr[offset + 9]] +
2273
- '-' +
2274
- byteToHex[arr[offset + 10]] +
2275
- byteToHex[arr[offset + 11]] +
2276
- byteToHex[arr[offset + 12]] +
2277
- byteToHex[arr[offset + 13]] +
2278
- byteToHex[arr[offset + 14]] +
2279
- byteToHex[arr[offset + 15]]).toLowerCase();
2280
- }
2281
-
2139
+ // Unique ID creation requires a high quality random # generator. In the browser we therefore
2140
+ // require the crypto API and do not support built-in fallback to lower quality random number
2141
+ // generators (like Math.random()).
2282
2142
  let getRandomValues;
2283
2143
  const rnds8 = new Uint8Array(16);
2284
2144
  function rng() {
2145
+ // lazy load so that environments that need to polyfill have a chance to do so
2146
+ if (!getRandomValues) {
2147
+ // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation.
2148
+ getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto);
2149
+
2285
2150
  if (!getRandomValues) {
2286
- if (typeof crypto === 'undefined' || !crypto.getRandomValues) {
2287
- throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
2288
- }
2289
- getRandomValues = crypto.getRandomValues.bind(crypto);
2151
+ throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
2290
2152
  }
2291
- return getRandomValues(rnds8);
2153
+ }
2154
+
2155
+ return getRandomValues(rnds8);
2156
+ }
2157
+
2158
+ /**
2159
+ * Convert array of 16 byte values to UUID string format of the form:
2160
+ * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
2161
+ */
2162
+
2163
+ const byteToHex = [];
2164
+
2165
+ for (let i = 0; i < 256; ++i) {
2166
+ byteToHex.push((i + 0x100).toString(16).slice(1));
2167
+ }
2168
+
2169
+ function unsafeStringify(arr, offset = 0) {
2170
+ // Note: Be careful editing this code! It's been tuned for performance
2171
+ // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
2172
+ return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]];
2292
2173
  }
2293
2174
 
2294
2175
  const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto);
2295
- var native = { randomUUID };
2176
+ var native = {
2177
+ randomUUID
2178
+ };
2296
2179
 
2297
2180
  function v4(options, buf, offset) {
2298
- if (native.randomUUID && true && !options) {
2299
- return native.randomUUID();
2300
- }
2301
- options = options || {};
2302
- const rnds = options.random ?? options.rng?.() ?? rng();
2303
- if (rnds.length < 16) {
2304
- throw new Error('Random bytes length must be >= 16');
2305
- }
2306
- rnds[6] = (rnds[6] & 0x0f) | 0x40;
2307
- rnds[8] = (rnds[8] & 0x3f) | 0x80;
2308
- return unsafeStringify(rnds);
2181
+ if (native.randomUUID && true && !options) {
2182
+ return native.randomUUID();
2183
+ }
2184
+
2185
+ options = options || {};
2186
+ const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
2187
+
2188
+ rnds[6] = rnds[6] & 0x0f | 0x40;
2189
+ rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
2190
+
2191
+ return unsafeStringify(rnds);
2309
2192
  }
2310
2193
 
2311
2194
  /**
@@ -2421,12 +2304,30 @@ const FilterCodecs = {
2421
2304
  PUSH: "/vac/waku/filter-push/2.0.0-beta1"
2422
2305
  };
2423
2306
  class FilterCore extends BaseProtocol {
2424
- handleIncomingMessage;
2425
2307
  pubsubTopics;
2308
+ static handleIncomingMessage;
2426
2309
  constructor(handleIncomingMessage, pubsubTopics, libp2p) {
2427
2310
  super(FilterCodecs.SUBSCRIBE, libp2p.components, pubsubTopics);
2428
- this.handleIncomingMessage = handleIncomingMessage;
2429
2311
  this.pubsubTopics = pubsubTopics;
2312
+ // TODO(weboko): remove when @waku/sdk 0.0.33 is released
2313
+ const prevHandler = FilterCore.handleIncomingMessage;
2314
+ FilterCore.handleIncomingMessage = !prevHandler
2315
+ ? handleIncomingMessage
2316
+ : async (pubsubTopic, message, peerIdStr) => {
2317
+ try {
2318
+ await prevHandler(pubsubTopic, message, peerIdStr);
2319
+ }
2320
+ catch (e) {
2321
+ log$5.error("Previous FilterCore incoming message handler failed ", e);
2322
+ }
2323
+ try {
2324
+ await handleIncomingMessage(pubsubTopic, message, peerIdStr);
2325
+ }
2326
+ catch (e) {
2327
+ log$5.error("Present FilterCore incoming message handler failed ", e);
2328
+ }
2329
+ return;
2330
+ };
2430
2331
  libp2p
2431
2332
  .handle(FilterCodecs.PUSH, this.onRequest.bind(this), {
2432
2333
  maxInboundStreams: 100
@@ -2603,7 +2504,7 @@ class FilterCore extends BaseProtocol {
2603
2504
  log$5.error("Pubsub topic missing from push message");
2604
2505
  return;
2605
2506
  }
2606
- await this.handleIncomingMessage(pubsubTopic, wakuMessage, connection.remotePeer.toString());
2507
+ await FilterCore.handleIncomingMessage?.(pubsubTopic, wakuMessage, connection.remotePeer.toString());
2607
2508
  }
2608
2509
  }).then(() => {
2609
2510
  log$5.info("Receiving pipe closed.");
@@ -2835,6 +2736,7 @@ class StoreQueryRequest {
2835
2736
  static create(params) {
2836
2737
  const request = new StoreQueryRequest({
2837
2738
  ...params,
2739
+ contentTopics: params.contentTopics || [],
2838
2740
  requestId: v4(),
2839
2741
  timeStart: params.timeStart
2840
2742
  ? BigInt(params.timeStart.getTime() * ONE_MILLION)
@@ -2847,17 +2749,22 @@ class StoreQueryRequest {
2847
2749
  ? BigInt(params.paginationLimit)
2848
2750
  : undefined
2849
2751
  });
2850
- // Validate request parameters based on RFC
2851
- if ((params.pubsubTopic && !params.contentTopics) ||
2852
- (!params.pubsubTopic && params.contentTopics)) {
2853
- throw new Error("Both pubsubTopic and contentTopics must be set or unset");
2854
- }
2855
- if (params.messageHashes &&
2856
- (params.pubsubTopic ||
2857
- params.contentTopics ||
2858
- params.timeStart ||
2859
- params.timeEnd)) {
2860
- throw new Error("Message hash lookup queries cannot include content filter criteria");
2752
+ const isHashQuery = params.messageHashes && params.messageHashes.length > 0;
2753
+ const hasContentTopics = params.contentTopics && params.contentTopics.length > 0;
2754
+ const hasTimeFilter = params.timeStart || params.timeEnd;
2755
+ if (isHashQuery) {
2756
+ if (hasContentTopics || hasTimeFilter) {
2757
+ throw new Error("Message hash lookup queries cannot include content filter criteria (contentTopics, timeStart, or timeEnd)");
2758
+ }
2759
+ }
2760
+ else {
2761
+ if ((params.pubsubTopic &&
2762
+ (!params.contentTopics || params.contentTopics.length === 0)) ||
2763
+ (!params.pubsubTopic &&
2764
+ params.contentTopics &&
2765
+ params.contentTopics.length > 0)) {
2766
+ throw new Error("Both pubsubTopic and contentTopics must be set together for content-filtered queries");
2767
+ }
2861
2768
  }
2862
2769
  return request;
2863
2770
  }
@@ -2904,8 +2811,12 @@ class StoreCore extends BaseProtocol {
2904
2811
  this.pubsubTopics = pubsubTopics;
2905
2812
  }
2906
2813
  async *queryPerPage(queryOpts, decoders, peerId) {
2907
- if (queryOpts.contentTopics.toString() !==
2908
- Array.from(decoders.keys()).toString()) {
2814
+ // Only validate decoder content topics for content-filtered queries
2815
+ const isHashQuery = queryOpts.messageHashes && queryOpts.messageHashes.length > 0;
2816
+ if (!isHashQuery &&
2817
+ queryOpts.contentTopics &&
2818
+ queryOpts.contentTopics.toString() !==
2819
+ Array.from(decoders.keys()).toString()) {
2909
2820
  throw new Error("Internal error, the decoders should match the query's content topics");
2910
2821
  }
2911
2822
  let currentCursor = queryOpts.paginationCursor;
@@ -2914,6 +2825,12 @@ class StoreCore extends BaseProtocol {
2914
2825
  ...queryOpts,
2915
2826
  paginationCursor: currentCursor
2916
2827
  });
2828
+ log$3.info("Sending store query request:", {
2829
+ hasMessageHashes: !!queryOpts.messageHashes?.length,
2830
+ messageHashCount: queryOpts.messageHashes?.length,
2831
+ pubsubTopic: queryOpts.pubsubTopic,
2832
+ contentTopics: queryOpts.contentTopics
2833
+ });
2917
2834
  let stream;
2918
2835
  try {
2919
2836
  stream = await this.getStream(peerId);
@@ -4975,4 +4892,85 @@ function wakuMetadata(pubsubTopics) {
4975
4892
  return (components) => new Metadata(pubsubTopics, components);
4976
4893
  }
4977
4894
 
4978
- export { ConnectionManager, FilterCodecs, FilterCore, LightPushCodec, LightPushCore, MetadataCodec, StoreCodec, StoreCore, createEncoder, index$3 as message, wakuMetadata, index$2 as waku_filter, index$1 as waku_light_push, index as waku_store };
4895
+ /**
4896
+ * Deterministic Message Hashing as defined in
4897
+ * [14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/#deterministic-message-hashing)
4898
+ *
4899
+ * Computes a SHA-256 hash of the concatenation of pubsub topic, payload, content topic, meta, and timestamp.
4900
+ *
4901
+ * @param pubsubTopic - The pubsub topic string
4902
+ * @param message - The message to be hashed
4903
+ * @returns A Uint8Array containing the SHA-256 hash
4904
+ *
4905
+ * @example
4906
+ * ```typescript
4907
+ * import { messageHash } from "@waku/core";
4908
+ *
4909
+ * const pubsubTopic = "/waku/2/default-waku/proto";
4910
+ * const message = {
4911
+ * payload: new Uint8Array([1, 2, 3, 4]),
4912
+ * contentTopic: "/waku/2/default-content/proto",
4913
+ * meta: new Uint8Array([5, 6, 7, 8]),
4914
+ * timestamp: new Date()
4915
+ * };
4916
+ *
4917
+ * const hash = messageHash(pubsubTopic, message);
4918
+ * ```
4919
+ */
4920
+ function messageHash(pubsubTopic, message) {
4921
+ const pubsubTopicBytes = utf8ToBytes(pubsubTopic);
4922
+ const contentTopicBytes = utf8ToBytes(message.contentTopic);
4923
+ const timestampBytes = tryConvertTimestampToBytes(message.timestamp);
4924
+ const bytes = concat$1([
4925
+ pubsubTopicBytes,
4926
+ message.payload,
4927
+ contentTopicBytes,
4928
+ message.meta,
4929
+ timestampBytes
4930
+ ].filter(isDefined));
4931
+ return sha256(bytes);
4932
+ }
4933
+ function tryConvertTimestampToBytes(timestamp) {
4934
+ if (!timestamp) {
4935
+ return;
4936
+ }
4937
+ let bigIntTimestamp;
4938
+ if (typeof timestamp === "bigint") {
4939
+ bigIntTimestamp = timestamp;
4940
+ }
4941
+ else {
4942
+ bigIntTimestamp = BigInt(timestamp.valueOf()) * 1000000n;
4943
+ }
4944
+ return numberToBytes(bigIntTimestamp);
4945
+ }
4946
+ /**
4947
+ * Computes a deterministic message hash and returns it as a hexadecimal string.
4948
+ * This is a convenience wrapper around messageHash that converts the result to a hex string.
4949
+ *
4950
+ * @param pubsubTopic - The pubsub topic string
4951
+ * @param message - The message to be hashed
4952
+ * @returns A string containing the hex representation of the SHA-256 hash
4953
+ *
4954
+ * @example
4955
+ * ```typescript
4956
+ * import { messageHashStr } from "@waku/core";
4957
+ *
4958
+ * const pubsubTopic = "/waku/2/default-waku/proto";
4959
+ * const message = {
4960
+ * payload: new Uint8Array([1, 2, 3, 4]),
4961
+ * contentTopic: "/waku/2/default-content/proto",
4962
+ * meta: new Uint8Array([5, 6, 7, 8]),
4963
+ * timestamp: new Date()
4964
+ * };
4965
+ *
4966
+ * const hashString = messageHashStr(pubsubTopic, message);
4967
+ * console.log(hashString); // e.g. "a1b2c3d4..."
4968
+ * ```
4969
+ */
4970
+ function messageHashStr(pubsubTopic, message) {
4971
+ const hash = messageHash(pubsubTopic, message);
4972
+ const hashStr = bytesToHex(hash);
4973
+ return hashStr;
4974
+ }
4975
+
4976
+ export { ConnectionManager, FilterCodecs, FilterCore, LightPushCodec, LightPushCore, MetadataCodec, StoreCodec, StoreCore, createEncoder, index$3 as message, messageHash, messageHashStr, wakuMetadata, index$2 as waku_filter, index$1 as waku_light_push, index as waku_store };
@@ -1,2 +1,2 @@
1
- export { B as BaseProtocol } from '../base_protocol-Cql7Ssk4.js';
2
- import '../index-BF77B5GY.js';
1
+ export { B as BaseProtocol } from '../base_protocol-DvQrudwy.js';
2
+ import '../index-CTo1my9M.js';
@@ -1,2 +1,2 @@
1
- export { D as DecodedMessage, r as Decoder, E as Encoder, V as Version, o as createDecoder, k as createEncoder, q as proto } from '../../version_0-Duf8Mz94.js';
2
- import '../../index-BF77B5GY.js';
1
+ export { D as DecodedMessage, z as Decoder, E as Encoder, V as Version, x as createDecoder, k as createEncoder, y as proto } from '../../version_0-CyeTW0Vr.js';
2
+ import '../../index-CTo1my9M.js';
@@ -1,4 +1,4 @@
1
- import { f as base256emoji, g as base64, h as base58, i as base36, j as base32, k as base16, l as base10, m as base8, n as base2, o as identityBase, L as Logger } from './index-BF77B5GY.js';
1
+ import { g as base256emoji, h as base64, i as base58, j as base36, k as base32, l as base16, m as base10, n as base8, o as base2, p as identityBase, L as Logger } from './index-CTo1my9M.js';
2
2
 
3
3
  /**
4
4
  * Returns a `Uint8Array` of the requested size. Referenced memory will
@@ -4350,6 +4350,21 @@ function toString(array, encoding = 'utf8') {
4350
4350
  return base.encoder.encode(array).substring(1);
4351
4351
  }
4352
4352
 
4353
+ function numberToBytes(value) {
4354
+ const buffer = new ArrayBuffer(8);
4355
+ const view = new DataView(buffer);
4356
+ if (typeof value === "number") {
4357
+ view.setFloat64(0, value, false);
4358
+ }
4359
+ else {
4360
+ view.setBigInt64(0, value, false);
4361
+ }
4362
+ return new Uint8Array(buffer);
4363
+ }
4364
+ /**
4365
+ * Convert byte array to hex string (no `0x` prefix).
4366
+ */
4367
+ const bytesToHex = (bytes) => toString(bytes, "base16");
4353
4368
  /**
4354
4369
  * Decode byte array to utf-8 string.
4355
4370
  */
@@ -4532,9 +4547,6 @@ class DecodedMessage {
4532
4547
  get contentTopic() {
4533
4548
  return this.proto.contentTopic;
4534
4549
  }
4535
- get _rawTimestamp() {
4536
- return this.proto.timestamp;
4537
- }
4538
4550
  get timestamp() {
4539
4551
  // In the case we receive a value that is bigger than JS's max number,
4540
4552
  // we catch the error and return undefined.
@@ -4556,7 +4568,7 @@ class DecodedMessage {
4556
4568
  get version() {
4557
4569
  // https://rfc.vac.dev/spec/14/
4558
4570
  // > If omitted, the value SHOULD be interpreted as version 0.
4559
- return this.proto.version ?? 0;
4571
+ return this.proto.version ?? Version;
4560
4572
  }
4561
4573
  get rateLimitProof() {
4562
4574
  return this.proto.rateLimitProof;
@@ -4665,4 +4677,4 @@ var version_0 = /*#__PURE__*/Object.freeze({
4665
4677
  proto: message
4666
4678
  });
4667
4679
 
4668
- export { DecodedMessage as D, Encoder as E, FilterSubscribeRequest as F, MessagePush as M, PushRpc$1 as P, StoreQueryRequest$1 as S, Version as V, WakuMetadataRequest as W, allocUnsafe as a, alloc$1 as b, encode as c, decode as d, encodingLength as e, FilterSubscribeResponse$1 as f, PushResponse as g, StoreQueryResponse$1 as h, bases as i, fromString as j, createEncoder as k, bytesToUtf8 as l, pubsubTopicsToShardInfo as m, WakuMetadataResponse as n, createDecoder as o, pubsubTopicToSingleShardInfo as p, message as q, Decoder as r, shardInfoToPubsubTopics as s, toString as t, utf8ToBytes as u, version_0 as v };
4680
+ export { DecodedMessage as D, Encoder as E, FilterSubscribeRequest as F, MessagePush as M, PushRpc$1 as P, StoreQueryRequest$1 as S, Version as V, WakuMetadataRequest as W, allocUnsafe as a, alloc$1 as b, encode as c, decode as d, encodingLength as e, FilterSubscribeResponse$1 as f, PushResponse as g, StoreQueryResponse$1 as h, bases as i, fromString as j, createEncoder as k, bytesToUtf8 as l, pubsubTopicsToShardInfo as m, WakuMetadataResponse as n, concat as o, pubsubTopicToSingleShardInfo as p, sha256 as q, bytesToHex as r, shardInfoToPubsubTopics as s, toString as t, utf8ToBytes as u, version_0 as v, numberToBytes as w, createDecoder as x, message as y, Decoder as z };