@waku/core 0.0.28-b5e8b17.0 → 0.0.28-efe9b8d.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- import { b as bytesToUtf8, L as Logger, k as ensureShardingConfigured } from './index-vlQahmUj.js';
1
+ import { b as bytesToUtf8, L as Logger, j as ensureShardingConfigured } from './index-BJwgMx4y.js';
2
2
  import { T as Tags } from './browser-DoQRY-an.js';
3
3
 
4
4
  const decodeRelayShard = (bytes) => {
@@ -1,24 +1,10 @@
1
1
  import { i as identityBase, c as base2, d as base8, e as base10, f as base16, h as base32, j as base36, k as base58, l as base64, m as base256emoji, n as debug } from './browser-DoQRY-an.js';
2
2
 
3
- /**
4
- * To guarantee Uint8Array semantics, convert nodejs Buffers
5
- * into vanilla Uint8Arrays
6
- */
7
- function asUint8Array(buf) {
8
- if (globalThis.Buffer != null) {
9
- return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength);
10
- }
11
- return buf;
12
- }
13
-
14
3
  /**
15
4
  * Returns a `Uint8Array` of the requested size. Referenced memory will
16
5
  * be initialized to 0.
17
6
  */
18
7
  function alloc(size = 0) {
19
- if (globalThis.Buffer?.alloc != null) {
20
- return asUint8Array(globalThis.Buffer.alloc(size));
21
- }
22
8
  return new Uint8Array(size);
23
9
  }
24
10
  /**
@@ -27,9 +13,6 @@ function alloc(size = 0) {
27
13
  * overwrite every value in the returned `Uint8Array`.
28
14
  */
29
15
  function allocUnsafe(size = 0) {
30
- if (globalThis.Buffer?.allocUnsafe != null) {
31
- return asUint8Array(globalThis.Buffer.allocUnsafe(size));
32
- }
33
16
  return new Uint8Array(size);
34
17
  }
35
18
 
@@ -92,23 +75,20 @@ function fromString(string, encoding = 'utf8') {
92
75
  if (base == null) {
93
76
  throw new Error(`Unsupported encoding "${encoding}"`);
94
77
  }
95
- if ((encoding === 'utf8' || encoding === 'utf-8') && globalThis.Buffer != null && globalThis.Buffer.from != null) {
96
- return asUint8Array(globalThis.Buffer.from(string, 'utf-8'));
97
- }
98
78
  // add multibase prefix
99
79
  return base.decoder.decode(`${base.prefix}${string}`); // eslint-disable-line @typescript-eslint/restrict-template-expressions
100
80
  }
101
81
 
102
82
  // copied from utils
103
- function isBytes$1(a) {
83
+ function isBytes(a) {
104
84
  return (a instanceof Uint8Array ||
105
85
  (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array'));
106
86
  }
107
87
  function bytes(b, ...lengths) {
108
- if (!isBytes$1(b))
109
- throw new Error('Expected Uint8Array');
88
+ if (!isBytes(b))
89
+ throw new Error('Uint8Array expected');
110
90
  if (lengths.length > 0 && !lengths.includes(b.length))
111
- throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`);
91
+ throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`);
112
92
  }
113
93
  function exists(instance, checkFinished = true) {
114
94
  if (instance.destroyed)
@@ -131,21 +111,11 @@ function output(out, instance) {
131
111
  // from `crypto` to `cryptoNode`, which imports native module.
132
112
  // Makes the utils un-importable in browsers without a bundler.
133
113
  // Once node.js 18 is deprecated (2025-04-30), we can just drop the import.
134
- function isBytes(a) {
135
- return (a instanceof Uint8Array ||
136
- (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array'));
137
- }
138
114
  // Cast array to view
139
115
  const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
140
116
  // The rotate right (circular right shift) operation for uint32
141
117
  const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift);
142
- // big-endian hardware is rare. Just in case someone still decides to run hashes:
143
- // early-throw an error because we don't support BE yet.
144
- // Other libraries would silently corrupt the data instead of throwing an error,
145
- // when they don't support it.
146
- const isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44;
147
- if (!isLE)
148
- throw new Error('Non little-endian hardware is not supported');
118
+ new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44;
149
119
  /**
150
120
  * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99])
151
121
  */
@@ -162,8 +132,7 @@ function utf8ToBytes$1(str) {
162
132
  function toBytes(data) {
163
133
  if (typeof data === 'string')
164
134
  data = utf8ToBytes$1(data);
165
- if (!isBytes(data))
166
- throw new Error(`expected Uint8Array, got ${typeof data}`);
135
+ bytes(data);
167
136
  return data;
168
137
  }
169
138
  // For runtime check if class implements interface
@@ -195,8 +164,15 @@ function setBigUint64(view, byteOffset, value, isLE) {
195
164
  view.setUint32(byteOffset + h, wh, isLE);
196
165
  view.setUint32(byteOffset + l, wl, isLE);
197
166
  }
198
- // Base SHA2 class (RFC 6234)
199
- class SHA2 extends Hash {
167
+ // Choice: a ? b : c
168
+ const Chi = (a, b, c) => (a & b) ^ (~a & c);
169
+ // Majority function, true if any two inpust is true
170
+ const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c);
171
+ /**
172
+ * Merkle-Damgard hash construction base class.
173
+ * Could be used to create MD5, RIPEMD, SHA1, SHA2.
174
+ */
175
+ class HashMD extends Hash {
200
176
  constructor(blockLen, outputLen, padOffset, isLE) {
201
177
  super();
202
178
  this.blockLen = blockLen;
@@ -248,7 +224,8 @@ class SHA2 extends Hash {
248
224
  // append the bit '1' to the message
249
225
  buffer[pos++] = 0b10000000;
250
226
  this.buffer.subarray(pos).fill(0);
251
- // we have less than padOffset left in buffer, so we cannot put length in current block, need process it and pad again
227
+ // we have less than padOffset left in buffer, so we cannot put length in
228
+ // current block, need process it and pad again
252
229
  if (this.padOffset > blockLen - pos) {
253
230
  this.process(view, 0);
254
231
  pos = 0;
@@ -296,10 +273,6 @@ class SHA2 extends Hash {
296
273
 
297
274
  // SHA2-256 need to try 2^128 hashes to execute birthday attack.
298
275
  // BTC network is doing 2^67 hashes/sec as per early 2023.
299
- // Choice: a ? b : c
300
- const Chi = (a, b, c) => (a & b) ^ (~a & c);
301
- // Majority function, true if any two inpust is true
302
- const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c);
303
276
  // Round constants:
304
277
  // first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311)
305
278
  // prettier-ignore
@@ -313,27 +286,28 @@ const SHA256_K = /* @__PURE__ */ new Uint32Array([
313
286
  0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
314
287
  0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
315
288
  ]);
316
- // Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19):
289
+ // Initial state:
290
+ // first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19
317
291
  // prettier-ignore
318
- const IV = /* @__PURE__ */ new Uint32Array([
292
+ const SHA256_IV = /* @__PURE__ */ new Uint32Array([
319
293
  0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19
320
294
  ]);
321
295
  // Temporary buffer, not used to store anything between runs
322
296
  // Named this way because it matches specification.
323
297
  const SHA256_W = /* @__PURE__ */ new Uint32Array(64);
324
- class SHA256 extends SHA2 {
298
+ class SHA256 extends HashMD {
325
299
  constructor() {
326
300
  super(64, 32, 8, false);
327
301
  // We cannot use array here since array allows indexing by variable
328
302
  // which means optimizer/compiler cannot use registers.
329
- this.A = IV[0] | 0;
330
- this.B = IV[1] | 0;
331
- this.C = IV[2] | 0;
332
- this.D = IV[3] | 0;
333
- this.E = IV[4] | 0;
334
- this.F = IV[5] | 0;
335
- this.G = IV[6] | 0;
336
- this.H = IV[7] | 0;
303
+ this.A = SHA256_IV[0] | 0;
304
+ this.B = SHA256_IV[1] | 0;
305
+ this.C = SHA256_IV[2] | 0;
306
+ this.D = SHA256_IV[3] | 0;
307
+ this.E = SHA256_IV[4] | 0;
308
+ this.F = SHA256_IV[5] | 0;
309
+ this.G = SHA256_IV[6] | 0;
310
+ this.H = SHA256_IV[7] | 0;
337
311
  }
338
312
  get() {
339
313
  const { A, B, C, D, E, F, G, H } = this;
@@ -423,9 +397,6 @@ function toString(array, encoding = 'utf8') {
423
397
  if (base == null) {
424
398
  throw new Error(`Unsupported encoding "${encoding}"`);
425
399
  }
426
- if ((encoding === 'utf8' || encoding === 'utf-8') && globalThis.Buffer != null && globalThis.Buffer.from != null) {
427
- return globalThis.Buffer.from(array.buffer, array.byteOffset, array.byteLength).toString('utf8');
428
- }
429
400
  // strip multibase prefix
430
401
  return base.encoder.encode(array).substring(1);
431
402
  }
@@ -471,7 +442,7 @@ const shardInfoToPubsubTopics = (shardInfo) => {
471
442
  else if ("application" in shardInfo && "version" in shardInfo) {
472
443
  // Autosharding: single shard from application and version
473
444
  return [
474
- contentTopicToPubsubTopic(`/${shardInfo.application}/${shardInfo.version}/default/default`)
445
+ contentTopicToPubsubTopic(`/${shardInfo.application}/${shardInfo.version}/default/default`, shardInfo.clusterId)
475
446
  ];
476
447
  }
477
448
  else {
@@ -608,7 +579,7 @@ const ensureShardingConfigured = (shardInfo) => {
608
579
  };
609
580
  }
610
581
  if (isApplicationVersionConfigured) {
611
- const pubsubTopic = contentTopicToPubsubTopic(`/${application}/${version}/default/default`);
582
+ const pubsubTopic = contentTopicToPubsubTopic(`/${application}/${version}/default/default`, clusterId);
612
583
  return {
613
584
  shardingParams: { clusterId, application, version },
614
585
  shardInfo: {
@@ -649,4 +620,4 @@ class Logger {
649
620
  }
650
621
  }
651
622
 
652
- export { DefaultPubsubTopic as D, Logger as L, asUint8Array as a, bytesToUtf8 as b, concat as c, allocUnsafe as d, alloc as e, singleShardInfoToPubsubTopic as f, ensurePubsubTopicIsConfigured as g, shardInfoToPubsubTopics as h, fromString as i, determinePubsubTopic as j, ensureShardingConfigured as k, pubsubTopicToSingleShardInfo as p, sha256 as s, utf8ToBytes as u };
623
+ export { DefaultPubsubTopic as D, Logger as L, allocUnsafe as a, bytesToUtf8 as b, concat as c, alloc as d, singleShardInfoToPubsubTopic as e, ensurePubsubTopicIsConfigured as f, shardInfoToPubsubTopics as g, fromString as h, determinePubsubTopic as i, ensureShardingConfigured as j, pubsubTopicToSingleShardInfo as p, sha256 as s, utf8ToBytes as u };
package/bundle/index.js CHANGED
@@ -1,13 +1,9 @@
1
- import { v as version_0, e as encodingLength, a as encode$1, d as decode$1, M as MessagePush, F as FilterSubscribeRequest, b as FilterSubscribeResponse$1, P as PushRpc$1, c as PushResponse, H as HistoryRpc$1, f as PagingInfo, g as HistoryResponse, h as createEncoder, W as WakuMetadataResponse, i as WakuMetadataRequest } from './version_0-DiakMc1A.js';
2
- export { j as createDecoder } from './version_0-DiakMc1A.js';
1
+ import { v as version_0, e as encodingLength, a as encode$1, d as decode$1, M as MessagePush, F as FilterSubscribeRequest, b as FilterSubscribeResponse$1, P as PushRpc$1, c as PushResponse, H as HistoryRpc$1, f as PagingInfo, g as HistoryResponse, h as createEncoder, W as WakuMetadataResponse, i as WakuMetadataRequest } from './version_0-C6o0DvNW.js';
2
+ export { j as createDecoder } from './version_0-C6o0DvNW.js';
3
3
  import { g as getDefaultExportFromCjs, P as ProtocolError, a as Protocols, E as EConnectionStateEvents, T as Tags, b as EPeersByDiscoveryEvents } from './browser-DoQRY-an.js';
4
- import { b as bytesToUtf8, u as utf8ToBytes, c as concat$1, s as sha256, a as asUint8Array, d as allocUnsafe, e as alloc, L as Logger, f as singleShardInfoToPubsubTopic, g as ensurePubsubTopicIsConfigured, D as DefaultPubsubTopic, p as pubsubTopicToSingleShardInfo, h as shardInfoToPubsubTopics } from './index-vlQahmUj.js';
5
- import { B as BaseProtocol, d as decodeRelayShard, e as encodeRelayShard } from './base_protocol-BCwLeb-A.js';
6
- export { S as StreamManager } from './base_protocol-BCwLeb-A.js';
7
-
8
- function isDefined(value) {
9
- return Boolean(value);
10
- }
4
+ import { b as bytesToUtf8, u as utf8ToBytes, c as concat$1, s as sha256, a as allocUnsafe, d as alloc, L as Logger, e as singleShardInfoToPubsubTopic, f as ensurePubsubTopicIsConfigured, D as DefaultPubsubTopic, p as pubsubTopicToSingleShardInfo, g as shardInfoToPubsubTopics } from './index-BJwgMx4y.js';
5
+ import { B as BaseProtocol, d as decodeRelayShard, e as encodeRelayShard } from './base_protocol-D0Zdzb-v.js';
6
+ export { S as StreamManager } from './base_protocol-D0Zdzb-v.js';
11
7
 
12
8
  function groupByContentTopic(values) {
13
9
  const groupedDecoders = new Map();
@@ -174,13 +170,18 @@ function all(source) {
174
170
  return arr;
175
171
  }
176
172
 
173
+ /**
174
+ * To guarantee Uint8Array semantics, convert nodejs Buffers
175
+ * into vanilla Uint8Arrays
176
+ */
177
+ function asUint8Array(buf) {
178
+ return buf;
179
+ }
180
+
177
181
  /**
178
182
  * Returns a new Uint8Array created by concatenating the passed Uint8Arrays
179
183
  */
180
184
  function concat(arrays, length) {
181
- if (globalThis.Buffer != null) {
182
- return asUint8Array(globalThis.Buffer.concat(arrays, length));
183
- }
184
185
  if (length == null) {
185
186
  length = arrays.reduce((acc, curr) => acc + curr.length, 0);
186
187
  }
@@ -1358,6 +1359,47 @@ function _pushable(getNext, options) {
1358
1359
  return pushable;
1359
1360
  }
1360
1361
 
1362
+ /**
1363
+ * @packageDocumentation
1364
+ *
1365
+ * Merge several (async)iterables into one, yield values as they arrive.
1366
+ *
1367
+ * Nb. sources are iterated over in parallel so the order of emitted items is not guaranteed.
1368
+ *
1369
+ * @example
1370
+ *
1371
+ * ```javascript
1372
+ * import merge from 'it-merge'
1373
+ * import all from 'it-all'
1374
+ *
1375
+ * // This can also be an iterator, generator, etc
1376
+ * const values1 = [0, 1, 2, 3, 4]
1377
+ * const values2 = [5, 6, 7, 8, 9]
1378
+ *
1379
+ * const arr = all(merge(values1, values2))
1380
+ *
1381
+ * console.info(arr) // 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
1382
+ * ```
1383
+ *
1384
+ * Async sources must be awaited:
1385
+ *
1386
+ * ```javascript
1387
+ * import merge from 'it-merge'
1388
+ * import all from 'it-all'
1389
+ *
1390
+ * // This can also be an iterator, async iterator, generator, etc
1391
+ * const values1 = async function * () {
1392
+ * yield * [0, 1, 2, 3, 4]
1393
+ * }
1394
+ * const values2 = async function * () {
1395
+ * yield * [5, 6, 7, 8, 9]
1396
+ * }
1397
+ *
1398
+ * const arr = await all(merge(values1(), values2()))
1399
+ *
1400
+ * console.info(arr) // 0, 1, 5, 6, 2, 3, 4, 7, 8, 9 <- nb. order is not guaranteed
1401
+ * ```
1402
+ */
1361
1403
  function isAsyncIterable$1(thing) {
1362
1404
  return thing[Symbol.asyncIterator] != null;
1363
1405
  }
@@ -2128,7 +2170,7 @@ function toProtoMessage(wire) {
2128
2170
  return { ...EmptyMessage, ...wire };
2129
2171
  }
2130
2172
 
2131
- const OneMillion = BigInt(1000000);
2173
+ const OneMillion = BigInt(1_000_000);
2132
2174
  var PageDirection;
2133
2175
  (function (PageDirection) {
2134
2176
  PageDirection["BACKWARD"] = "backward";
@@ -2201,261 +2243,80 @@ function directionToProto(pageDirection) {
2201
2243
  var HistoryError = HistoryResponse.HistoryError;
2202
2244
  const log$4 = new Logger("store");
2203
2245
  const StoreCodec = "/vac/waku/store/2.0.0-beta4";
2204
- const DefaultPageSize = 10;
2205
2246
  /**
2206
2247
  * Implements the [Waku v2 Store protocol](https://rfc.vac.dev/spec/13/).
2207
2248
  *
2208
2249
  * The Waku Store protocol can be used to retrieved historical messages.
2209
2250
  */
2210
- class Store extends BaseProtocol {
2211
- NUM_PEERS_PROTOCOL = 1;
2251
+ class StoreCore extends BaseProtocol {
2212
2252
  constructor(libp2p, options) {
2213
2253
  super(StoreCodec, libp2p.components, log$4, options.pubsubTopics, options);
2214
2254
  }
2215
- /**
2216
- * Processes messages based on the provided callback and options.
2217
- * @private
2218
- */
2219
- async processMessages(messages, callback, options) {
2220
- let abort = false;
2221
- const messagesOrUndef = await Promise.all(messages);
2222
- let processedMessages = messagesOrUndef.filter(isDefined);
2223
- if (this.shouldReverseOrder(options)) {
2224
- processedMessages = processedMessages.reverse();
2255
+ async *queryPerPage(queryOpts, decoders, peer) {
2256
+ if (queryOpts.contentTopics.toString() !==
2257
+ Array.from(decoders.keys()).toString()) {
2258
+ throw new Error("Internal error, the decoders should match the query's content topics");
2225
2259
  }
2226
- await Promise.all(processedMessages.map(async (msg) => {
2227
- if (msg && !abort) {
2228
- abort = Boolean(await callback(msg));
2260
+ let currentCursor = queryOpts.cursor;
2261
+ while (true) {
2262
+ queryOpts.cursor = currentCursor;
2263
+ const historyRpcQuery = HistoryRpc.createQuery(queryOpts);
2264
+ const stream = await this.getStream(peer);
2265
+ const res = await pipe([historyRpcQuery.encode()], encode, stream, decode, async (source) => await all(source));
2266
+ const bytes = new Uint8ArrayList();
2267
+ res.forEach((chunk) => {
2268
+ bytes.append(chunk);
2269
+ });
2270
+ const reply = historyRpcQuery.decode(bytes);
2271
+ if (!reply.response) {
2272
+ log$4.warn("Stopping pagination due to store `response` field missing");
2273
+ break;
2229
2274
  }
2230
- }));
2231
- return abort;
2232
- }
2233
- /**
2234
- * Determines whether to reverse the order of messages based on the provided options.
2235
- *
2236
- * Messages in pages are ordered from oldest (first) to most recent (last).
2237
- * https://github.com/vacp2p/rfc/issues/533
2238
- *
2239
- * @private
2240
- */
2241
- shouldReverseOrder(options) {
2242
- return (typeof options?.pageDirection === "undefined" ||
2243
- options?.pageDirection === PageDirection.BACKWARD);
2244
- }
2245
- /**
2246
- * @deprecated Use `queryWithOrderedCallback` instead
2247
- **/
2248
- queryOrderedCallback = this.queryWithOrderedCallback;
2249
- /**
2250
- * Do a query to a Waku Store to retrieve historical/missed messages.
2251
- *
2252
- * The callback function takes a `WakuMessage` in input,
2253
- * messages are processed in order:
2254
- * - oldest to latest if `options.pageDirection` == { @link PageDirection.FORWARD }
2255
- * - latest to oldest if `options.pageDirection` == { @link PageDirection.BACKWARD }
2256
- *
2257
- * The ordering may affect performance.
2258
- * The ordering depends on the behavior of the remote store node.
2259
- * If strong ordering is needed, you may need to handle this at application level
2260
- * and set your own timestamps too (the WakuMessage timestamps are not certified).
2261
- *
2262
- * @throws If not able to reach a Waku Store peer to query,
2263
- * or if an error is encountered when processing the reply,
2264
- * or if two decoders with the same content topic are passed.
2265
- */
2266
- async queryWithOrderedCallback(decoders, callback, options) {
2267
- for await (const promises of this.queryGenerator(decoders, options)) {
2268
- if (await this.processMessages(promises, callback, options))
2275
+ const response = reply.response;
2276
+ if (response.error && response.error !== HistoryError.NONE) {
2277
+ throw "History response contains an Error: " + response.error;
2278
+ }
2279
+ if (!response.messages || !response.messages.length) {
2280
+ log$4.warn("Stopping pagination due to store `response.messages` field missing or empty");
2269
2281
  break;
2270
- }
2271
- }
2272
- /**
2273
- * Do a query to a Waku Store to retrieve historical/missed messages.
2274
- * The callback function takes a `Promise<WakuMessage>` in input,
2275
- * useful if messages need to be decrypted and performance matters.
2276
- *
2277
- * The order of the messages passed to the callback is as follows:
2278
- * - within a page, messages are expected to be ordered from oldest to most recent
2279
- * - pages direction depends on { @link QueryOptions.pageDirection }
2280
- *
2281
- * Do note that the resolution of the `Promise<WakuMessage | undefined` may
2282
- * break the order as it may rely on the browser decryption API, which in turn,
2283
- * may have a different speed depending on the type of decryption.
2284
- *
2285
- * @throws If not able to reach a Waku Store peer to query,
2286
- * or if an error is encountered when processing the reply,
2287
- * or if two decoders with the same content topic are passed.
2288
- */
2289
- async queryWithPromiseCallback(decoders, callback, options) {
2290
- let abort = false;
2291
- for await (const page of this.queryGenerator(decoders, options)) {
2292
- const _promises = page.map(async (msgPromise) => {
2293
- if (abort)
2294
- return;
2295
- abort = Boolean(await callback(msgPromise));
2282
+ }
2283
+ log$4.error(`${response.messages.length} messages retrieved from store`);
2284
+ yield response.messages.map((protoMsg) => {
2285
+ const contentTopic = protoMsg.contentTopic;
2286
+ if (typeof contentTopic !== "undefined") {
2287
+ const decoder = decoders.get(contentTopic);
2288
+ if (decoder) {
2289
+ return decoder.fromProtoObj(queryOpts.pubsubTopic, toProtoMessage(protoMsg));
2290
+ }
2291
+ }
2292
+ return Promise.resolve(undefined);
2296
2293
  });
2297
- await Promise.all(_promises);
2298
- if (abort)
2294
+ const nextCursor = response.pagingInfo?.cursor;
2295
+ if (typeof nextCursor === "undefined") {
2296
+ // If the server does not return cursor then there is an issue,
2297
+ // Need to abort, or we end up in an infinite loop
2298
+ log$4.warn("Stopping pagination due to `response.pagingInfo.cursor` missing from store response");
2299
+ break;
2300
+ }
2301
+ currentCursor = nextCursor;
2302
+ const responsePageSize = response.pagingInfo?.pageSize;
2303
+ const queryPageSize = historyRpcQuery.query?.pagingInfo?.pageSize;
2304
+ if (
2305
+ // Response page size smaller than query, meaning this is the last page
2306
+ responsePageSize &&
2307
+ queryPageSize &&
2308
+ responsePageSize < queryPageSize) {
2299
2309
  break;
2300
- }
2301
- }
2302
- /**
2303
- * Do a query to a Waku Store to retrieve historical/missed messages.
2304
- *
2305
- * This is a generator, useful if you want most control on how messages
2306
- * are processed.
2307
- *
2308
- * The order of the messages returned by the remote Waku node SHOULD BE
2309
- * as follows:
2310
- * - within a page, messages SHOULD be ordered from oldest to most recent
2311
- * - pages direction depends on { @link QueryOptions.pageDirection }
2312
- * @throws If not able to reach a Waku Store peer to query,
2313
- * or if an error is encountered when processing the reply,
2314
- * or if two decoders with the same content topic are passed.
2315
- *
2316
- * This API only supports querying a single pubsub topic at a time.
2317
- * If multiple decoders are provided, they must all have the same pubsub topic.
2318
- * @throws If multiple decoders with different pubsub topics are provided.
2319
- * @throws If no decoders are provided.
2320
- * @throws If no decoders are found for the provided pubsub topic.
2321
- */
2322
- async *queryGenerator(decoders, options) {
2323
- if (decoders.length === 0) {
2324
- throw new Error("No decoders provided");
2325
- }
2326
- let startTime, endTime;
2327
- if (options?.timeFilter) {
2328
- startTime = options.timeFilter.startTime;
2329
- endTime = options.timeFilter.endTime;
2330
- }
2331
- // convert array to set to remove duplicates
2332
- const uniquePubsubTopicsInQuery = Array.from(new Set(decoders.map((decoder) => decoder.pubsubTopic)));
2333
- // If multiple pubsub topics are provided, throw an error
2334
- if (uniquePubsubTopicsInQuery.length > 1) {
2335
- throw new Error("API does not support querying multiple pubsub topics at once");
2336
- }
2337
- // we can be certain that there is only one pubsub topic in the query
2338
- const pubsubTopicForQuery = uniquePubsubTopicsInQuery[0];
2339
- ensurePubsubTopicIsConfigured(pubsubTopicForQuery, this.pubsubTopics);
2340
- // check that the pubsubTopic from the Cursor and Decoder match
2341
- if (options?.cursor?.pubsubTopic &&
2342
- options.cursor.pubsubTopic !== pubsubTopicForQuery) {
2343
- throw new Error(`Cursor pubsub topic (${options?.cursor?.pubsubTopic}) does not match decoder pubsub topic (${pubsubTopicForQuery})`);
2344
- }
2345
- const decodersAsMap = new Map();
2346
- decoders.forEach((dec) => {
2347
- if (decodersAsMap.has(dec.contentTopic)) {
2348
- throw new Error("API does not support different decoder per content topic");
2349
- }
2350
- decodersAsMap.set(dec.contentTopic, dec);
2351
- });
2352
- const contentTopics = decoders
2353
- .filter((decoder) => decoder.pubsubTopic === pubsubTopicForQuery)
2354
- .map((dec) => dec.contentTopic);
2355
- if (contentTopics.length === 0) {
2356
- throw new Error("No decoders found for topic " + pubsubTopicForQuery);
2357
- }
2358
- const queryOpts = Object.assign({
2359
- pubsubTopic: pubsubTopicForQuery,
2360
- pageDirection: PageDirection.BACKWARD,
2361
- pageSize: DefaultPageSize
2362
- }, options, { contentTopics, startTime, endTime });
2363
- const peer = (await this.getPeers({
2364
- numPeers: this.NUM_PEERS_PROTOCOL,
2365
- maxBootstrapPeers: 1
2366
- }))[0];
2367
- for await (const messages of paginate(this.getStream.bind(this, peer), queryOpts, decodersAsMap, options?.cursor)) {
2368
- yield messages;
2369
- }
2370
- }
2371
- }
2372
- async function* paginate(streamFactory, queryOpts, decoders, cursor) {
2373
- if (queryOpts.contentTopics.toString() !==
2374
- Array.from(decoders.keys()).toString()) {
2375
- throw new Error("Internal error, the decoders should match the query's content topics");
2376
- }
2377
- let currentCursor = cursor;
2378
- while (true) {
2379
- queryOpts.cursor = currentCursor;
2380
- const historyRpcQuery = HistoryRpc.createQuery(queryOpts);
2381
- log$4.info("Querying store peer", `for (${queryOpts.pubsubTopic})`, queryOpts.contentTopics);
2382
- const stream = await streamFactory();
2383
- const res = await pipe([historyRpcQuery.encode()], encode, stream, decode, async (source) => await all(source));
2384
- const bytes = new Uint8ArrayList();
2385
- res.forEach((chunk) => {
2386
- bytes.append(chunk);
2387
- });
2388
- const reply = historyRpcQuery.decode(bytes);
2389
- if (!reply.response) {
2390
- log$4.warn("Stopping pagination due to store `response` field missing");
2391
- break;
2392
- }
2393
- const response = reply.response;
2394
- if (response.error && response.error !== HistoryError.NONE) {
2395
- throw "History response contains an Error: " + response.error;
2396
- }
2397
- if (!response.messages || !response.messages.length) {
2398
- log$4.warn("Stopping pagination due to store `response.messages` field missing or empty");
2399
- break;
2400
- }
2401
- log$4.error(`${response.messages.length} messages retrieved from store`);
2402
- yield response.messages.map((protoMsg) => {
2403
- const contentTopic = protoMsg.contentTopic;
2404
- if (typeof contentTopic !== "undefined") {
2405
- const decoder = decoders.get(contentTopic);
2406
- if (decoder) {
2407
- return decoder.fromProtoObj(queryOpts.pubsubTopic, toProtoMessage(protoMsg));
2408
- }
2409
2310
  }
2410
- return Promise.resolve(undefined);
2411
- });
2412
- const nextCursor = response.pagingInfo?.cursor;
2413
- if (typeof nextCursor === "undefined") {
2414
- // If the server does not return cursor then there is an issue,
2415
- // Need to abort, or we end up in an infinite loop
2416
- log$4.warn("Stopping pagination due to `response.pagingInfo.cursor` missing from store response");
2417
- break;
2418
- }
2419
- currentCursor = nextCursor;
2420
- const responsePageSize = response.pagingInfo?.pageSize;
2421
- const queryPageSize = historyRpcQuery.query?.pagingInfo?.pageSize;
2422
- if (
2423
- // Response page size smaller than query, meaning this is the last page
2424
- responsePageSize &&
2425
- queryPageSize &&
2426
- responsePageSize < queryPageSize) {
2427
- break;
2428
2311
  }
2429
2312
  }
2430
2313
  }
2431
- async function createCursor(message) {
2432
- if (!message ||
2433
- !message.timestamp ||
2434
- !message.payload ||
2435
- !message.contentTopic) {
2436
- throw new Error("Message is missing required fields");
2437
- }
2438
- const contentTopicBytes = utf8ToBytes(message.contentTopic);
2439
- const digest = sha256(concat$1([contentTopicBytes, message.payload]));
2440
- const messageTime = BigInt(message.timestamp.getTime()) * BigInt(1000000);
2441
- return {
2442
- digest,
2443
- pubsubTopic: message.pubsubTopic,
2444
- senderTime: messageTime,
2445
- receiverTime: messageTime
2446
- };
2447
- }
2448
- function wakuStore(init = {}) {
2449
- return (libp2p) => new Store(libp2p, init);
2450
- }
2451
2314
 
2452
2315
  var index = /*#__PURE__*/Object.freeze({
2453
2316
  __proto__: null,
2454
- DefaultPageSize: DefaultPageSize,
2455
2317
  get PageDirection () { return PageDirection; },
2456
2318
  StoreCodec: StoreCodec,
2457
- createCursor: createCursor,
2458
- wakuStore: wakuStore
2319
+ StoreCore: StoreCore
2459
2320
  });
2460
2321
 
2461
2322
  class TimeoutError extends Error {
@@ -2576,8 +2437,8 @@ function pTimeout(promise, options) {
2576
2437
  }
2577
2438
 
2578
2439
  const normalizeEmitter = emitter => {
2579
- const addListener = emitter.on || emitter.addListener || emitter.addEventListener;
2580
- const removeListener = emitter.off || emitter.removeListener || emitter.removeEventListener;
2440
+ const addListener = emitter.addEventListener || emitter.on || emitter.addListener;
2441
+ const removeListener = emitter.removeEventListener || emitter.off || emitter.removeListener;
2581
2442
 
2582
2443
  if (!addListener || !removeListener) {
2583
2444
  throw new TypeError('Emitter is not compatible');
@@ -2724,7 +2585,7 @@ async function waitForRemotePeer(waku, protocols, timeoutMs) {
2724
2585
  if (protocols.includes(Protocols.Store)) {
2725
2586
  if (!waku.store)
2726
2587
  throw new Error("Cannot wait for Store peer: protocol not mounted");
2727
- promises.push(waitForConnectedPeer(waku.store, waku.libp2p.services.metadata));
2588
+ promises.push(waitForConnectedPeer(waku.store.protocol, waku.libp2p.services.metadata));
2728
2589
  }
2729
2590
  if (protocols.includes(Protocols.LightPush)) {
2730
2591
  if (!waku.lightPush)
@@ -3524,4 +3385,4 @@ function wakuMetadata(shardInfo) {
3524
3385
  return (components) => new Metadata(shardInfo, components);
3525
3386
  }
3526
3387
 
3527
- export { ConnectionManager, FilterCodecs, KeepAliveManager, LightPushCodec, LightPushCore, MetadataCodec, PageDirection, createCursor, createEncoder, index$3 as message, waitForRemotePeer, wakuFilter, wakuMetadata, wakuStore, index$2 as waku_filter, index$1 as waku_light_push, index as waku_store };
3388
+ export { ConnectionManager, FilterCodecs, KeepAliveManager, LightPushCodec, LightPushCore, MetadataCodec, PageDirection, StoreCore, createEncoder, index$3 as message, waitForRemotePeer, wakuFilter, wakuMetadata, index$2 as waku_filter, index$1 as waku_light_push, index as waku_store };
@@ -1,3 +1,3 @@
1
- import '../index-vlQahmUj.js';
1
+ import '../index-BJwgMx4y.js';
2
2
  import '../browser-DoQRY-an.js';
3
- export { B as BaseProtocol } from '../base_protocol-BCwLeb-A.js';
3
+ export { B as BaseProtocol } from '../base_protocol-D0Zdzb-v.js';
@@ -1,3 +1,3 @@
1
- export { D as DecodedMessage, k as Decoder, E as Encoder, V as Version, j as createDecoder, h as createEncoder, m as proto } from '../../version_0-DiakMc1A.js';
2
- import '../../index-vlQahmUj.js';
1
+ export { D as DecodedMessage, k as Decoder, E as Encoder, V as Version, j as createDecoder, h as createEncoder, m as proto } from '../../version_0-C6o0DvNW.js';
2
+ import '../../index-BJwgMx4y.js';
3
3
  import '../../browser-DoQRY-an.js';