@wovin/core 0.0.10 → 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/applog.min.js +2 -2
  2. package/dist/{chunk-5UN776TZ.min.js → chunk-6EBQRFQC.min.js} +1 -1
  3. package/dist/{chunk-5UN776TZ.min.js.map → chunk-6EBQRFQC.min.js.map} +1 -1
  4. package/dist/{chunk-BLAIQY2K.min.js → chunk-C75X52ZX.min.js} +1101 -261
  5. package/dist/chunk-C75X52ZX.min.js.map +1 -0
  6. package/dist/chunk-L4AYAZCN.min.js +66 -0
  7. package/dist/chunk-L4AYAZCN.min.js.map +1 -0
  8. package/dist/chunk-RPPZKO5L.min.js +1 -0
  9. package/dist/chunk-RPPZKO5L.min.js.map +1 -0
  10. package/dist/{chunk-4432UCLM.min.js → chunk-TPRWMCU7.min.js} +3 -53
  11. package/dist/chunk-TPRWMCU7.min.js.map +1 -0
  12. package/dist/{chunk-ITYJ7DMX.min.js → chunk-UQJEOS7T.min.js} +11 -3
  13. package/dist/chunk-UQJEOS7T.min.js.map +1 -0
  14. package/dist/chunk-UREBOWFC.min.js +40 -0
  15. package/dist/chunk-UREBOWFC.min.js.map +1 -0
  16. package/dist/index.min.js +18 -11
  17. package/dist/ipfs/car.d.ts +4 -4
  18. package/dist/ipfs/car.d.ts.map +1 -1
  19. package/dist/ipfs/ipfs-utils.d.ts +1 -0
  20. package/dist/ipfs/ipfs-utils.d.ts.map +1 -1
  21. package/dist/ipfs.min.js +7 -6
  22. package/dist/mobx/mobx-utils.d.ts.map +1 -1
  23. package/dist/pubsub/pub-push.d.ts.map +1 -1
  24. package/dist/pubsub.min.js +9 -9
  25. package/dist/query/types.d.ts +3 -3
  26. package/dist/query/types.d.ts.map +1 -1
  27. package/dist/query.min.js +8 -6
  28. package/dist/thread/filters.d.ts +1 -1
  29. package/dist/thread.min.js +2 -2
  30. package/dist/types/typescript-utils.d.ts.map +1 -1
  31. package/dist/types.min.js +1 -1
  32. package/package.json +2 -1
  33. package/dist/chunk-4432UCLM.min.js.map +0 -1
  34. package/dist/chunk-BLAIQY2K.min.js.map +0 -1
  35. package/dist/chunk-ITYJ7DMX.min.js.map +0 -1
@@ -3,7 +3,7 @@ import {
3
3
  } from "./chunk-QPGEBDMJ.min.js";
4
4
  import {
5
5
  lastWriteWins
6
- } from "./chunk-4432UCLM.min.js";
6
+ } from "./chunk-TPRWMCU7.min.js";
7
7
  import {
8
8
  CID,
9
9
  E,
@@ -11,7 +11,6 @@ import {
11
11
  Type,
12
12
  areCidsEqual,
13
13
  containsCid,
14
- cyrb53hash,
15
14
  decode,
16
15
  decode2,
17
16
  decode3,
@@ -24,7 +23,7 @@ import {
24
23
  rollingFilter,
25
24
  src_exports,
26
25
  wrapper_default
27
- } from "./chunk-ITYJ7DMX.min.js";
26
+ } from "./chunk-UQJEOS7T.min.js";
28
27
  import {
29
28
  __commonJS,
30
29
  __toESM
@@ -34,29 +33,29 @@ import {
34
33
  var require_encode = __commonJS({
35
34
  "../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/encode.js"(exports, module) {
36
35
  "use strict";
37
- module.exports = encode3;
38
- var MSB = 128;
39
- var REST = 127;
40
- var MSBALL = ~REST;
41
- var INT = Math.pow(2, 31);
42
- function encode3(num, out, offset) {
36
+ module.exports = encode5;
37
+ var MSB2 = 128;
38
+ var REST2 = 127;
39
+ var MSBALL2 = ~REST2;
40
+ var INT2 = Math.pow(2, 31);
41
+ function encode5(num, out, offset) {
43
42
  if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) {
44
- encode3.bytes = 0;
43
+ encode5.bytes = 0;
45
44
  throw new RangeError("Could not encode varint");
46
45
  }
47
46
  out = out || [];
48
47
  offset = offset || 0;
49
48
  var oldOffset = offset;
50
- while (num >= INT) {
51
- out[offset++] = num & 255 | MSB;
49
+ while (num >= INT2) {
50
+ out[offset++] = num & 255 | MSB2;
52
51
  num /= 128;
53
52
  }
54
- while (num & MSBALL) {
55
- out[offset++] = num & 255 | MSB;
53
+ while (num & MSBALL2) {
54
+ out[offset++] = num & 255 | MSB2;
56
55
  num >>>= 7;
57
56
  }
58
57
  out[offset] = num | 0;
59
- encode3.bytes = offset - oldOffset + 1;
58
+ encode5.bytes = offset - oldOffset + 1;
60
59
  return out;
61
60
  }
62
61
  }
@@ -66,21 +65,21 @@ var require_encode = __commonJS({
66
65
  var require_decode = __commonJS({
67
66
  "../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/decode.js"(exports, module) {
68
67
  "use strict";
69
- module.exports = read;
70
- var MSB = 128;
71
- var REST = 127;
72
- function read(buf, offset) {
68
+ module.exports = read2;
69
+ var MSB2 = 128;
70
+ var REST2 = 127;
71
+ function read2(buf, offset) {
73
72
  var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length;
74
73
  do {
75
74
  if (counter >= l || shift > 49) {
76
- read.bytes = 0;
75
+ read2.bytes = 0;
77
76
  throw new RangeError("Could not decode varint");
78
77
  }
79
78
  b = buf[counter++];
80
- res += shift < 28 ? (b & REST) << shift : (b & REST) * Math.pow(2, shift);
79
+ res += shift < 28 ? (b & REST2) << shift : (b & REST2) * Math.pow(2, shift);
81
80
  shift += 7;
82
- } while (b >= MSB);
83
- read.bytes = counter - offset;
81
+ } while (b >= MSB2);
82
+ read2.bytes = counter - offset;
84
83
  return res;
85
84
  }
86
85
  }
@@ -90,17 +89,17 @@ var require_decode = __commonJS({
90
89
  var require_length = __commonJS({
91
90
  "../../../node_modules/.pnpm/varint@6.0.0/node_modules/varint/length.js"(exports, module) {
92
91
  "use strict";
93
- var N1 = Math.pow(2, 7);
94
- var N2 = Math.pow(2, 14);
95
- var N3 = Math.pow(2, 21);
96
- var N4 = Math.pow(2, 28);
97
- var N5 = Math.pow(2, 35);
98
- var N6 = Math.pow(2, 42);
99
- var N7 = Math.pow(2, 49);
100
- var N8 = Math.pow(2, 56);
101
- var N9 = Math.pow(2, 63);
92
+ var N12 = Math.pow(2, 7);
93
+ var N22 = Math.pow(2, 14);
94
+ var N32 = Math.pow(2, 21);
95
+ var N42 = Math.pow(2, 28);
96
+ var N52 = Math.pow(2, 35);
97
+ var N62 = Math.pow(2, 42);
98
+ var N72 = Math.pow(2, 49);
99
+ var N82 = Math.pow(2, 56);
100
+ var N92 = Math.pow(2, 63);
102
101
  module.exports = function(value) {
103
- return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
102
+ return value < N12 ? 1 : value < N22 ? 2 : value < N32 ? 3 : value < N42 ? 4 : value < N52 ? 5 : value < N62 ? 6 : value < N72 ? 7 : value < N82 ? 8 : value < N92 ? 9 : 10;
104
103
  };
105
104
  }
106
105
  });
@@ -117,13 +116,879 @@ var require_varint = __commonJS({
117
116
  }
118
117
  });
119
118
 
119
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/bytes.js
120
+ var empty = new Uint8Array(0);
121
+ function equals(aa, bb) {
122
+ if (aa === bb)
123
+ return true;
124
+ if (aa.byteLength !== bb.byteLength) {
125
+ return false;
126
+ }
127
+ for (let ii = 0; ii < aa.byteLength; ii++) {
128
+ if (aa[ii] !== bb[ii]) {
129
+ return false;
130
+ }
131
+ }
132
+ return true;
133
+ }
134
+ function coerce(o) {
135
+ if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
136
+ return o;
137
+ if (o instanceof ArrayBuffer)
138
+ return new Uint8Array(o);
139
+ if (ArrayBuffer.isView(o)) {
140
+ return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
141
+ }
142
+ throw new Error("Unknown type, must be binary type");
143
+ }
144
+
145
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/vendor/base-x.js
146
+ function base(ALPHABET, name) {
147
+ if (ALPHABET.length >= 255) {
148
+ throw new TypeError("Alphabet too long");
149
+ }
150
+ var BASE_MAP = new Uint8Array(256);
151
+ for (var j = 0; j < BASE_MAP.length; j++) {
152
+ BASE_MAP[j] = 255;
153
+ }
154
+ for (var i = 0; i < ALPHABET.length; i++) {
155
+ var x = ALPHABET.charAt(i);
156
+ var xc = x.charCodeAt(0);
157
+ if (BASE_MAP[xc] !== 255) {
158
+ throw new TypeError(x + " is ambiguous");
159
+ }
160
+ BASE_MAP[xc] = i;
161
+ }
162
+ var BASE = ALPHABET.length;
163
+ var LEADER = ALPHABET.charAt(0);
164
+ var FACTOR = Math.log(BASE) / Math.log(256);
165
+ var iFACTOR = Math.log(256) / Math.log(BASE);
166
+ function encode5(source) {
167
+ if (source instanceof Uint8Array)
168
+ ;
169
+ else if (ArrayBuffer.isView(source)) {
170
+ source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
171
+ } else if (Array.isArray(source)) {
172
+ source = Uint8Array.from(source);
173
+ }
174
+ if (!(source instanceof Uint8Array)) {
175
+ throw new TypeError("Expected Uint8Array");
176
+ }
177
+ if (source.length === 0) {
178
+ return "";
179
+ }
180
+ var zeroes = 0;
181
+ var length2 = 0;
182
+ var pbegin = 0;
183
+ var pend = source.length;
184
+ while (pbegin !== pend && source[pbegin] === 0) {
185
+ pbegin++;
186
+ zeroes++;
187
+ }
188
+ var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
189
+ var b58 = new Uint8Array(size);
190
+ while (pbegin !== pend) {
191
+ var carry = source[pbegin];
192
+ var i2 = 0;
193
+ for (var it1 = size - 1; (carry !== 0 || i2 < length2) && it1 !== -1; it1--, i2++) {
194
+ carry += 256 * b58[it1] >>> 0;
195
+ b58[it1] = carry % BASE >>> 0;
196
+ carry = carry / BASE >>> 0;
197
+ }
198
+ if (carry !== 0) {
199
+ throw new Error("Non-zero carry");
200
+ }
201
+ length2 = i2;
202
+ pbegin++;
203
+ }
204
+ var it2 = size - length2;
205
+ while (it2 !== size && b58[it2] === 0) {
206
+ it2++;
207
+ }
208
+ var str = LEADER.repeat(zeroes);
209
+ for (; it2 < size; ++it2) {
210
+ str += ALPHABET.charAt(b58[it2]);
211
+ }
212
+ return str;
213
+ }
214
+ function decodeUnsafe(source) {
215
+ if (typeof source !== "string") {
216
+ throw new TypeError("Expected String");
217
+ }
218
+ if (source.length === 0) {
219
+ return new Uint8Array();
220
+ }
221
+ var psz = 0;
222
+ if (source[psz] === " ") {
223
+ return;
224
+ }
225
+ var zeroes = 0;
226
+ var length2 = 0;
227
+ while (source[psz] === LEADER) {
228
+ zeroes++;
229
+ psz++;
230
+ }
231
+ var size = (source.length - psz) * FACTOR + 1 >>> 0;
232
+ var b256 = new Uint8Array(size);
233
+ while (source[psz]) {
234
+ var carry = BASE_MAP[source.charCodeAt(psz)];
235
+ if (carry === 255) {
236
+ return;
237
+ }
238
+ var i2 = 0;
239
+ for (var it3 = size - 1; (carry !== 0 || i2 < length2) && it3 !== -1; it3--, i2++) {
240
+ carry += BASE * b256[it3] >>> 0;
241
+ b256[it3] = carry % 256 >>> 0;
242
+ carry = carry / 256 >>> 0;
243
+ }
244
+ if (carry !== 0) {
245
+ throw new Error("Non-zero carry");
246
+ }
247
+ length2 = i2;
248
+ psz++;
249
+ }
250
+ if (source[psz] === " ") {
251
+ return;
252
+ }
253
+ var it4 = size - length2;
254
+ while (it4 !== size && b256[it4] === 0) {
255
+ it4++;
256
+ }
257
+ var vch = new Uint8Array(zeroes + (size - it4));
258
+ var j2 = zeroes;
259
+ while (it4 !== size) {
260
+ vch[j2++] = b256[it4++];
261
+ }
262
+ return vch;
263
+ }
264
+ function decode9(string) {
265
+ var buffer = decodeUnsafe(string);
266
+ if (buffer) {
267
+ return buffer;
268
+ }
269
+ throw new Error(`Non-${name} character`);
270
+ }
271
+ return {
272
+ encode: encode5,
273
+ decodeUnsafe,
274
+ decode: decode9
275
+ };
276
+ }
277
+ var src = base;
278
+ var _brrp__multiformats_scope_baseX = src;
279
+ var base_x_default = _brrp__multiformats_scope_baseX;
280
+
281
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/bases/base.js
282
+ var Encoder = class {
283
+ name;
284
+ prefix;
285
+ baseEncode;
286
+ constructor(name, prefix, baseEncode) {
287
+ this.name = name;
288
+ this.prefix = prefix;
289
+ this.baseEncode = baseEncode;
290
+ }
291
+ encode(bytes) {
292
+ if (bytes instanceof Uint8Array) {
293
+ return `${this.prefix}${this.baseEncode(bytes)}`;
294
+ } else {
295
+ throw Error("Unknown type, must be binary type");
296
+ }
297
+ }
298
+ };
299
+ var Decoder = class {
300
+ name;
301
+ prefix;
302
+ baseDecode;
303
+ prefixCodePoint;
304
+ constructor(name, prefix, baseDecode) {
305
+ this.name = name;
306
+ this.prefix = prefix;
307
+ const prefixCodePoint = prefix.codePointAt(0);
308
+ if (prefixCodePoint === void 0) {
309
+ throw new Error("Invalid prefix character");
310
+ }
311
+ this.prefixCodePoint = prefixCodePoint;
312
+ this.baseDecode = baseDecode;
313
+ }
314
+ decode(text) {
315
+ if (typeof text === "string") {
316
+ if (text.codePointAt(0) !== this.prefixCodePoint) {
317
+ throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
318
+ }
319
+ return this.baseDecode(text.slice(this.prefix.length));
320
+ } else {
321
+ throw Error("Can only multibase decode strings");
322
+ }
323
+ }
324
+ or(decoder) {
325
+ return or(this, decoder);
326
+ }
327
+ };
328
+ var ComposedDecoder = class {
329
+ decoders;
330
+ constructor(decoders) {
331
+ this.decoders = decoders;
332
+ }
333
+ or(decoder) {
334
+ return or(this, decoder);
335
+ }
336
+ decode(input) {
337
+ const prefix = input[0];
338
+ const decoder = this.decoders[prefix];
339
+ if (decoder != null) {
340
+ return decoder.decode(input);
341
+ } else {
342
+ throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
343
+ }
344
+ }
345
+ };
346
+ function or(left, right) {
347
+ return new ComposedDecoder({
348
+ ...left.decoders ?? { [left.prefix]: left },
349
+ ...right.decoders ?? { [right.prefix]: right }
350
+ });
351
+ }
352
+ var Codec = class {
353
+ name;
354
+ prefix;
355
+ baseEncode;
356
+ baseDecode;
357
+ encoder;
358
+ decoder;
359
+ constructor(name, prefix, baseEncode, baseDecode) {
360
+ this.name = name;
361
+ this.prefix = prefix;
362
+ this.baseEncode = baseEncode;
363
+ this.baseDecode = baseDecode;
364
+ this.encoder = new Encoder(name, prefix, baseEncode);
365
+ this.decoder = new Decoder(name, prefix, baseDecode);
366
+ }
367
+ encode(input) {
368
+ return this.encoder.encode(input);
369
+ }
370
+ decode(input) {
371
+ return this.decoder.decode(input);
372
+ }
373
+ };
374
+ function from({ name, prefix, encode: encode5, decode: decode9 }) {
375
+ return new Codec(name, prefix, encode5, decode9);
376
+ }
377
+ function baseX({ name, prefix, alphabet }) {
378
+ const { encode: encode5, decode: decode9 } = base_x_default(alphabet, name);
379
+ return from({
380
+ prefix,
381
+ name,
382
+ encode: encode5,
383
+ decode: (text) => coerce(decode9(text))
384
+ });
385
+ }
386
+ function decode4(string, alphabet, bitsPerChar, name) {
387
+ const codes = {};
388
+ for (let i = 0; i < alphabet.length; ++i) {
389
+ codes[alphabet[i]] = i;
390
+ }
391
+ let end = string.length;
392
+ while (string[end - 1] === "=") {
393
+ --end;
394
+ }
395
+ const out = new Uint8Array(end * bitsPerChar / 8 | 0);
396
+ let bits = 0;
397
+ let buffer = 0;
398
+ let written = 0;
399
+ for (let i = 0; i < end; ++i) {
400
+ const value = codes[string[i]];
401
+ if (value === void 0) {
402
+ throw new SyntaxError(`Non-${name} character`);
403
+ }
404
+ buffer = buffer << bitsPerChar | value;
405
+ bits += bitsPerChar;
406
+ if (bits >= 8) {
407
+ bits -= 8;
408
+ out[written++] = 255 & buffer >> bits;
409
+ }
410
+ }
411
+ if (bits >= bitsPerChar || (255 & buffer << 8 - bits) !== 0) {
412
+ throw new SyntaxError("Unexpected end of data");
413
+ }
414
+ return out;
415
+ }
416
+ function encode2(data, alphabet, bitsPerChar) {
417
+ const pad = alphabet[alphabet.length - 1] === "=";
418
+ const mask = (1 << bitsPerChar) - 1;
419
+ let out = "";
420
+ let bits = 0;
421
+ let buffer = 0;
422
+ for (let i = 0; i < data.length; ++i) {
423
+ buffer = buffer << 8 | data[i];
424
+ bits += 8;
425
+ while (bits > bitsPerChar) {
426
+ bits -= bitsPerChar;
427
+ out += alphabet[mask & buffer >> bits];
428
+ }
429
+ }
430
+ if (bits !== 0) {
431
+ out += alphabet[mask & buffer << bitsPerChar - bits];
432
+ }
433
+ if (pad) {
434
+ while ((out.length * bitsPerChar & 7) !== 0) {
435
+ out += "=";
436
+ }
437
+ }
438
+ return out;
439
+ }
440
+ function rfc4648({ name, prefix, bitsPerChar, alphabet }) {
441
+ return from({
442
+ prefix,
443
+ name,
444
+ encode(input) {
445
+ return encode2(input, alphabet, bitsPerChar);
446
+ },
447
+ decode(input) {
448
+ return decode4(input, alphabet, bitsPerChar, name);
449
+ }
450
+ });
451
+ }
452
+
453
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/bases/base32.js
454
+ var base32 = rfc4648({
455
+ prefix: "b",
456
+ name: "base32",
457
+ alphabet: "abcdefghijklmnopqrstuvwxyz234567",
458
+ bitsPerChar: 5
459
+ });
460
+ var base32upper = rfc4648({
461
+ prefix: "B",
462
+ name: "base32upper",
463
+ alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
464
+ bitsPerChar: 5
465
+ });
466
+ var base32pad = rfc4648({
467
+ prefix: "c",
468
+ name: "base32pad",
469
+ alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
470
+ bitsPerChar: 5
471
+ });
472
+ var base32padupper = rfc4648({
473
+ prefix: "C",
474
+ name: "base32padupper",
475
+ alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
476
+ bitsPerChar: 5
477
+ });
478
+ var base32hex = rfc4648({
479
+ prefix: "v",
480
+ name: "base32hex",
481
+ alphabet: "0123456789abcdefghijklmnopqrstuv",
482
+ bitsPerChar: 5
483
+ });
484
+ var base32hexupper = rfc4648({
485
+ prefix: "V",
486
+ name: "base32hexupper",
487
+ alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
488
+ bitsPerChar: 5
489
+ });
490
+ var base32hexpad = rfc4648({
491
+ prefix: "t",
492
+ name: "base32hexpad",
493
+ alphabet: "0123456789abcdefghijklmnopqrstuv=",
494
+ bitsPerChar: 5
495
+ });
496
+ var base32hexpadupper = rfc4648({
497
+ prefix: "T",
498
+ name: "base32hexpadupper",
499
+ alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
500
+ bitsPerChar: 5
501
+ });
502
+ var base32z = rfc4648({
503
+ prefix: "h",
504
+ name: "base32z",
505
+ alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
506
+ bitsPerChar: 5
507
+ });
508
+
509
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/bases/base36.js
510
+ var base36 = baseX({
511
+ prefix: "k",
512
+ name: "base36",
513
+ alphabet: "0123456789abcdefghijklmnopqrstuvwxyz"
514
+ });
515
+ var base36upper = baseX({
516
+ prefix: "K",
517
+ name: "base36upper",
518
+ alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
519
+ });
520
+
521
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/bases/base58.js
522
+ var base58btc = baseX({
523
+ name: "base58btc",
524
+ prefix: "z",
525
+ alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
526
+ });
527
+ var base58flickr = baseX({
528
+ name: "base58flickr",
529
+ prefix: "Z",
530
+ alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
531
+ });
532
+
533
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/vendor/varint.js
534
+ var encode_1 = encode3;
535
+ var MSB = 128;
536
+ var REST = 127;
537
+ var MSBALL = ~REST;
538
+ var INT = Math.pow(2, 31);
539
+ function encode3(num, out, offset) {
540
+ out = out || [];
541
+ offset = offset || 0;
542
+ var oldOffset = offset;
543
+ while (num >= INT) {
544
+ out[offset++] = num & 255 | MSB;
545
+ num /= 128;
546
+ }
547
+ while (num & MSBALL) {
548
+ out[offset++] = num & 255 | MSB;
549
+ num >>>= 7;
550
+ }
551
+ out[offset] = num | 0;
552
+ encode3.bytes = offset - oldOffset + 1;
553
+ return out;
554
+ }
555
+ var decode5 = read;
556
+ var MSB$1 = 128;
557
+ var REST$1 = 127;
558
+ function read(buf, offset) {
559
+ var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length;
560
+ do {
561
+ if (counter >= l) {
562
+ read.bytes = 0;
563
+ throw new RangeError("Could not decode varint");
564
+ }
565
+ b = buf[counter++];
566
+ res += shift < 28 ? (b & REST$1) << shift : (b & REST$1) * Math.pow(2, shift);
567
+ shift += 7;
568
+ } while (b >= MSB$1);
569
+ read.bytes = counter - offset;
570
+ return res;
571
+ }
572
+ var N1 = Math.pow(2, 7);
573
+ var N2 = Math.pow(2, 14);
574
+ var N3 = Math.pow(2, 21);
575
+ var N4 = Math.pow(2, 28);
576
+ var N5 = Math.pow(2, 35);
577
+ var N6 = Math.pow(2, 42);
578
+ var N7 = Math.pow(2, 49);
579
+ var N8 = Math.pow(2, 56);
580
+ var N9 = Math.pow(2, 63);
581
+ var length = function(value) {
582
+ return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
583
+ };
584
+ var varint = {
585
+ encode: encode_1,
586
+ decode: decode5,
587
+ encodingLength: length
588
+ };
589
+ var _brrp_varint = varint;
590
+ var varint_default = _brrp_varint;
591
+
592
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/varint.js
593
+ function decode6(data, offset = 0) {
594
+ const code = varint_default.decode(data, offset);
595
+ return [code, varint_default.decode.bytes];
596
+ }
597
+ function encodeTo(int, target, offset = 0) {
598
+ varint_default.encode(int, target, offset);
599
+ return target;
600
+ }
601
+ function encodingLength(int) {
602
+ return varint_default.encodingLength(int);
603
+ }
604
+
605
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/hashes/digest.js
606
+ function create(code, digest) {
607
+ const size = digest.byteLength;
608
+ const sizeOffset = encodingLength(code);
609
+ const digestOffset = sizeOffset + encodingLength(size);
610
+ const bytes = new Uint8Array(digestOffset + size);
611
+ encodeTo(code, bytes, 0);
612
+ encodeTo(size, bytes, sizeOffset);
613
+ bytes.set(digest, digestOffset);
614
+ return new Digest(code, size, digest, bytes);
615
+ }
616
+ function decode7(multihash) {
617
+ const bytes = coerce(multihash);
618
+ const [code, sizeOffset] = decode6(bytes);
619
+ const [size, digestOffset] = decode6(bytes.subarray(sizeOffset));
620
+ const digest = bytes.subarray(sizeOffset + digestOffset);
621
+ if (digest.byteLength !== size) {
622
+ throw new Error("Incorrect length");
623
+ }
624
+ return new Digest(code, size, digest, bytes);
625
+ }
626
+ function equals2(a, b) {
627
+ if (a === b) {
628
+ return true;
629
+ } else {
630
+ const data = b;
631
+ return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals(a.bytes, data.bytes);
632
+ }
633
+ }
634
+ var Digest = class {
635
+ code;
636
+ size;
637
+ digest;
638
+ bytes;
639
+ /**
640
+ * Creates a multihash digest.
641
+ */
642
+ constructor(code, size, digest, bytes) {
643
+ this.code = code;
644
+ this.size = size;
645
+ this.digest = digest;
646
+ this.bytes = bytes;
647
+ }
648
+ };
649
+
650
+ // ../../../node_modules/.pnpm/multiformats@13.3.1/node_modules/multiformats/dist/src/cid.js
651
+ function format(link, base2) {
652
+ const { bytes, version } = link;
653
+ switch (version) {
654
+ case 0:
655
+ return toStringV0(bytes, baseCache(link), base2 ?? base58btc.encoder);
656
+ default:
657
+ return toStringV1(bytes, baseCache(link), base2 ?? base32.encoder);
658
+ }
659
+ }
660
+ var cache = /* @__PURE__ */ new WeakMap();
661
+ function baseCache(cid) {
662
+ const baseCache2 = cache.get(cid);
663
+ if (baseCache2 == null) {
664
+ const baseCache3 = /* @__PURE__ */ new Map();
665
+ cache.set(cid, baseCache3);
666
+ return baseCache3;
667
+ }
668
+ return baseCache2;
669
+ }
670
+ var CID2 = class _CID {
671
+ code;
672
+ version;
673
+ multihash;
674
+ bytes;
675
+ "/";
676
+ /**
677
+ * @param version - Version of the CID
678
+ * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
679
+ * @param multihash - (Multi)hash of the of the content.
680
+ */
681
+ constructor(version, code, multihash, bytes) {
682
+ this.code = code;
683
+ this.version = version;
684
+ this.multihash = multihash;
685
+ this.bytes = bytes;
686
+ this["/"] = bytes;
687
+ }
688
+ /**
689
+ * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
690
+ * please either use `CID.asCID(cid)` or switch to new signalling mechanism
691
+ *
692
+ * @deprecated
693
+ */
694
+ get asCID() {
695
+ return this;
696
+ }
697
+ // ArrayBufferView
698
+ get byteOffset() {
699
+ return this.bytes.byteOffset;
700
+ }
701
+ // ArrayBufferView
702
+ get byteLength() {
703
+ return this.bytes.byteLength;
704
+ }
705
+ toV0() {
706
+ switch (this.version) {
707
+ case 0: {
708
+ return this;
709
+ }
710
+ case 1: {
711
+ const { code, multihash } = this;
712
+ if (code !== DAG_PB_CODE) {
713
+ throw new Error("Cannot convert a non dag-pb CID to CIDv0");
714
+ }
715
+ if (multihash.code !== SHA_256_CODE) {
716
+ throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
717
+ }
718
+ return _CID.createV0(multihash);
719
+ }
720
+ default: {
721
+ throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`);
722
+ }
723
+ }
724
+ }
725
+ toV1() {
726
+ switch (this.version) {
727
+ case 0: {
728
+ const { code, digest } = this.multihash;
729
+ const multihash = create(code, digest);
730
+ return _CID.createV1(this.code, multihash);
731
+ }
732
+ case 1: {
733
+ return this;
734
+ }
735
+ default: {
736
+ throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`);
737
+ }
738
+ }
739
+ }
740
+ equals(other) {
741
+ return _CID.equals(this, other);
742
+ }
743
+ static equals(self, other) {
744
+ const unknown = other;
745
+ return unknown != null && self.code === unknown.code && self.version === unknown.version && equals2(self.multihash, unknown.multihash);
746
+ }
747
+ toString(base2) {
748
+ return format(this, base2);
749
+ }
750
+ toJSON() {
751
+ return { "/": format(this) };
752
+ }
753
+ link() {
754
+ return this;
755
+ }
756
+ [Symbol.toStringTag] = "CID";
757
+ // Legacy
758
+ [Symbol.for("nodejs.util.inspect.custom")]() {
759
+ return `CID(${this.toString()})`;
760
+ }
761
+ /**
762
+ * Takes any input `value` and returns a `CID` instance if it was
763
+ * a `CID` otherwise returns `null`. If `value` is instanceof `CID`
764
+ * it will return value back. If `value` is not instance of this CID
765
+ * class, but is compatible CID it will return new instance of this
766
+ * `CID` class. Otherwise returns null.
767
+ *
768
+ * This allows two different incompatible versions of CID library to
769
+ * co-exist and interop as long as binary interface is compatible.
770
+ */
771
+ static asCID(input) {
772
+ if (input == null) {
773
+ return null;
774
+ }
775
+ const value = input;
776
+ if (value instanceof _CID) {
777
+ return value;
778
+ } else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
779
+ const { version, code, multihash, bytes } = value;
780
+ return new _CID(version, code, multihash, bytes ?? encodeCID(version, code, multihash.bytes));
781
+ } else if (value[cidSymbol] === true) {
782
+ const { version, multihash, code } = value;
783
+ const digest = decode7(multihash);
784
+ return _CID.create(version, code, digest);
785
+ } else {
786
+ return null;
787
+ }
788
+ }
789
+ /**
790
+ * @param version - Version of the CID
791
+ * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
792
+ * @param digest - (Multi)hash of the of the content.
793
+ */
794
+ static create(version, code, digest) {
795
+ if (typeof code !== "number") {
796
+ throw new Error("String codecs are no longer supported");
797
+ }
798
+ if (!(digest.bytes instanceof Uint8Array)) {
799
+ throw new Error("Invalid digest");
800
+ }
801
+ switch (version) {
802
+ case 0: {
803
+ if (code !== DAG_PB_CODE) {
804
+ throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`);
805
+ } else {
806
+ return new _CID(version, code, digest, digest.bytes);
807
+ }
808
+ }
809
+ case 1: {
810
+ const bytes = encodeCID(version, code, digest.bytes);
811
+ return new _CID(version, code, digest, bytes);
812
+ }
813
+ default: {
814
+ throw new Error("Invalid version");
815
+ }
816
+ }
817
+ }
818
+ /**
819
+ * Simplified version of `create` for CIDv0.
820
+ */
821
+ static createV0(digest) {
822
+ return _CID.create(0, DAG_PB_CODE, digest);
823
+ }
824
+ /**
825
+ * Simplified version of `create` for CIDv1.
826
+ *
827
+ * @param code - Content encoding format code.
828
+ * @param digest - Multihash of the content.
829
+ */
830
+ static createV1(code, digest) {
831
+ return _CID.create(1, code, digest);
832
+ }
833
+ /**
834
+ * Decoded a CID from its binary representation. The byte array must contain
835
+ * only the CID with no additional bytes.
836
+ *
837
+ * An error will be thrown if the bytes provided do not contain a valid
838
+ * binary representation of a CID.
839
+ */
840
+ static decode(bytes) {
841
+ const [cid, remainder] = _CID.decodeFirst(bytes);
842
+ if (remainder.length !== 0) {
843
+ throw new Error("Incorrect length");
844
+ }
845
+ return cid;
846
+ }
847
+ /**
848
+ * Decoded a CID from its binary representation at the beginning of a byte
849
+ * array.
850
+ *
851
+ * Returns an array with the first element containing the CID and the second
852
+ * element containing the remainder of the original byte array. The remainder
853
+ * will be a zero-length byte array if the provided bytes only contained a
854
+ * binary CID representation.
855
+ */
856
+ static decodeFirst(bytes) {
857
+ const specs = _CID.inspectBytes(bytes);
858
+ const prefixSize = specs.size - specs.multihashSize;
859
+ const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
860
+ if (multihashBytes.byteLength !== specs.multihashSize) {
861
+ throw new Error("Incorrect length");
862
+ }
863
+ const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
864
+ const digest = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
865
+ const cid = specs.version === 0 ? _CID.createV0(digest) : _CID.createV1(specs.codec, digest);
866
+ return [cid, bytes.subarray(specs.size)];
867
+ }
868
+ /**
869
+ * Inspect the initial bytes of a CID to determine its properties.
870
+ *
871
+ * Involves decoding up to 4 varints. Typically this will require only 4 to 6
872
+ * bytes but for larger multicodec code values and larger multihash digest
873
+ * lengths these varints can be quite large. It is recommended that at least
874
+ * 10 bytes be made available in the `initialBytes` argument for a complete
875
+ * inspection.
876
+ */
877
+ static inspectBytes(initialBytes) {
878
+ let offset = 0;
879
+ const next = () => {
880
+ const [i, length2] = decode6(initialBytes.subarray(offset));
881
+ offset += length2;
882
+ return i;
883
+ };
884
+ let version = next();
885
+ let codec = DAG_PB_CODE;
886
+ if (version === 18) {
887
+ version = 0;
888
+ offset = 0;
889
+ } else {
890
+ codec = next();
891
+ }
892
+ if (version !== 0 && version !== 1) {
893
+ throw new RangeError(`Invalid CID version ${version}`);
894
+ }
895
+ const prefixSize = offset;
896
+ const multihashCode = next();
897
+ const digestSize = next();
898
+ const size = offset + digestSize;
899
+ const multihashSize = size - prefixSize;
900
+ return { version, codec, multihashCode, digestSize, multihashSize, size };
901
+ }
902
+ /**
903
+ * Takes cid in a string representation and creates an instance. If `base`
904
+ * decoder is not provided will use a default from the configuration. It will
905
+ * throw an error if encoding of the CID is not compatible with supplied (or
906
+ * a default decoder).
907
+ */
908
+ static parse(source, base2) {
909
+ const [prefix, bytes] = parseCIDtoBytes(source, base2);
910
+ const cid = _CID.decode(bytes);
911
+ if (cid.version === 0 && source[0] !== "Q") {
912
+ throw Error("Version 0 CID string must not include multibase prefix");
913
+ }
914
+ baseCache(cid).set(prefix, source);
915
+ return cid;
916
+ }
917
+ };
918
+ function parseCIDtoBytes(source, base2) {
919
+ switch (source[0]) {
920
+ case "Q": {
921
+ const decoder = base2 ?? base58btc;
922
+ return [
923
+ base58btc.prefix,
924
+ decoder.decode(`${base58btc.prefix}${source}`)
925
+ ];
926
+ }
927
+ case base58btc.prefix: {
928
+ const decoder = base2 ?? base58btc;
929
+ return [base58btc.prefix, decoder.decode(source)];
930
+ }
931
+ case base32.prefix: {
932
+ const decoder = base2 ?? base32;
933
+ return [base32.prefix, decoder.decode(source)];
934
+ }
935
+ case base36.prefix: {
936
+ const decoder = base2 ?? base36;
937
+ return [base36.prefix, decoder.decode(source)];
938
+ }
939
+ default: {
940
+ if (base2 == null) {
941
+ throw Error("To parse non base32, base36 or base58btc encoded CID multibase decoder must be provided");
942
+ }
943
+ return [source[0], base2.decode(source)];
944
+ }
945
+ }
946
+ }
947
+ function toStringV0(bytes, cache2, base2) {
948
+ const { prefix } = base2;
949
+ if (prefix !== base58btc.prefix) {
950
+ throw Error(`Cannot string encode V0 in ${base2.name} encoding`);
951
+ }
952
+ const cid = cache2.get(prefix);
953
+ if (cid == null) {
954
+ const cid2 = base2.encode(bytes).slice(1);
955
+ cache2.set(prefix, cid2);
956
+ return cid2;
957
+ } else {
958
+ return cid;
959
+ }
960
+ }
961
+ function toStringV1(bytes, cache2, base2) {
962
+ const { prefix } = base2;
963
+ const cid = cache2.get(prefix);
964
+ if (cid == null) {
965
+ const cid2 = base2.encode(bytes);
966
+ cache2.set(prefix, cid2);
967
+ return cid2;
968
+ } else {
969
+ return cid;
970
+ }
971
+ }
972
+ var DAG_PB_CODE = 112;
973
+ var SHA_256_CODE = 18;
974
+ function encodeCID(version, code, multihash) {
975
+ const codeOffset = encodingLength(version);
976
+ const hashOffset = codeOffset + encodingLength(code);
977
+ const bytes = new Uint8Array(hashOffset + multihash.byteLength);
978
+ encodeTo(version, bytes, 0);
979
+ encodeTo(code, bytes, codeOffset);
980
+ bytes.set(multihash, hashOffset);
981
+ return bytes;
982
+ }
983
+ var cidSymbol = Symbol.for("@ipld/js-cid/CID");
984
+
120
985
  // ../../../node_modules/.pnpm/@ipld+dag-cbor@9.1.0/node_modules/@ipld/dag-cbor/src/index.js
121
986
  var CID_CBOR_TAG = 42;
122
987
  function cidEncoder(obj) {
123
988
  if (obj.asCID !== obj && obj["/"] !== obj.bytes) {
124
989
  return null;
125
990
  }
126
- const cid = CID.asCID(obj);
991
+ const cid = CID2.asCID(obj);
127
992
  if (!cid) {
128
993
  return null;
129
994
  }
@@ -164,7 +1029,7 @@ function cidDecoder(bytes) {
164
1029
  if (bytes[0] !== 0) {
165
1030
  throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00");
166
1031
  }
167
- return CID.decode(bytes.subarray(1));
1032
+ return CID2.decode(bytes.subarray(1));
168
1033
  }
169
1034
  var _decodeOptions = {
170
1035
  allowIndefinite: false,
@@ -185,11 +1050,11 @@ var decodeOptions = {
185
1050
  ..._decodeOptions,
186
1051
  tags: _decodeOptions.tags.slice()
187
1052
  };
188
- var encode2 = (node) => encode(node, _encodeOptions);
189
- var decode4 = (data) => decode(data, _decodeOptions);
1053
+ var encode4 = (node) => encode(node, _encodeOptions);
1054
+ var decode8 = (data) => decode(data, _decodeOptions);
190
1055
 
191
1056
  // ../../../node_modules/.pnpm/@ipld+car@5.2.6/node_modules/@ipld/car/src/decoder-common.js
192
- var import_varint = __toESM(require_varint(), 1);
1057
+ var import_varint2 = __toESM(require_varint(), 1);
193
1058
  var CIDV0_BYTES = {
194
1059
  SHA2_256: 18,
195
1060
  LENGTH: 32,
@@ -203,10 +1068,10 @@ function decodeVarint(bytes, seeker) {
203
1068
  if (!bytes.length) {
204
1069
  throw new Error("Unexpected end of data");
205
1070
  }
206
- const i = import_varint.default.decode(bytes);
1071
+ const i = import_varint2.default.decode(bytes);
207
1072
  seeker.seek(
208
1073
  /** @type {number} */
209
- import_varint.default.decode.bytes
1074
+ import_varint2.default.decode.bytes
210
1075
  );
211
1076
  return i;
212
1077
  }
@@ -227,17 +1092,17 @@ function decodeV2Header(bytes) {
227
1092
  return header;
228
1093
  }
229
1094
  function getMultihashLength(bytes) {
230
- import_varint.default.decode(bytes);
1095
+ import_varint2.default.decode(bytes);
231
1096
  const codeLength = (
232
1097
  /** @type {number} */
233
- import_varint.default.decode.bytes
1098
+ import_varint2.default.decode.bytes
234
1099
  );
235
- const length = import_varint.default.decode(bytes.subarray(import_varint.default.decode.bytes));
1100
+ const length2 = import_varint2.default.decode(bytes.subarray(import_varint2.default.decode.bytes));
236
1101
  const lengthLength = (
237
1102
  /** @type {number} */
238
- import_varint.default.decode.bytes
1103
+ import_varint2.default.decode.bytes
239
1104
  );
240
- const mhLength = codeLength + lengthLength + length;
1105
+ const mhLength = codeLength + lengthLength + length2;
241
1106
  return mhLength;
242
1107
  }
243
1108
 
@@ -467,7 +1332,7 @@ var CarV1HeaderOrV2Pragma = {
467
1332
  var cborEncoders = makeCborEncoders();
468
1333
 
469
1334
  // ../../../node_modules/.pnpm/@ipld+car@5.2.6/node_modules/@ipld/car/src/buffer-writer.js
470
- var import_varint2 = __toESM(require_varint(), 1);
1335
+ var import_varint3 = __toESM(require_varint(), 1);
471
1336
  var headerPreludeTokens = [
472
1337
  new Token(Type.map, 2),
473
1338
  new Token(Type.string, "version"),
@@ -478,12 +1343,12 @@ var CID_TAG = new Token(Type.tag, 42);
478
1343
 
479
1344
  // ../../../node_modules/.pnpm/@ipld+car@5.2.6/node_modules/@ipld/car/src/decoder.js
480
1345
  async function readHeader(reader, strictVersion) {
481
- const length = decodeVarint(await reader.upTo(8), reader);
482
- if (length === 0) {
1346
+ const length2 = decodeVarint(await reader.upTo(8), reader);
1347
+ if (length2 === 0) {
483
1348
  throw new Error("Invalid CAR header (zero length)");
484
1349
  }
485
- const header = await reader.exactly(length, true);
486
- const block = decode4(header);
1350
+ const header = await reader.exactly(length2, true);
1351
+ const block = decode8(header);
487
1352
  if (CarV1HeaderOrV2Pragma.toTyped(block) === void 0) {
488
1353
  throw new Error("Invalid CAR header format");
489
1354
  }
@@ -522,14 +1387,14 @@ async function readCid(reader) {
522
1387
  }
523
1388
  async function readBlockHead(reader) {
524
1389
  const start = reader.pos;
525
- let length = decodeVarint(await reader.upTo(8), reader);
526
- if (length === 0) {
1390
+ let length2 = decodeVarint(await reader.upTo(8), reader);
1391
+ if (length2 === 0) {
527
1392
  throw new Error("Invalid CAR section (zero length)");
528
1393
  }
529
- length += reader.pos - start;
1394
+ length2 += reader.pos - start;
530
1395
  const cid = await readCid(reader);
531
- const blockLength = length - Number(reader.pos - start);
532
- return { cid, length, blockLength };
1396
+ const blockLength = length2 - Number(reader.pos - start);
1397
+ return { cid, length: length2, blockLength };
533
1398
  }
534
1399
  async function readBlock(reader) {
535
1400
  const { cid, blockLength } = await readBlockHead(reader);
@@ -538,8 +1403,8 @@ async function readBlock(reader) {
538
1403
  }
539
1404
  async function readBlockIndex(reader) {
540
1405
  const offset = reader.pos;
541
- const { cid, length, blockLength } = await readBlockHead(reader);
542
- const index = { cid, length, blockLength, offset, blockOffset: reader.pos };
1406
+ const { cid, length: length2, blockLength } = await readBlockHead(reader);
1407
+ const index = { cid, length: length2, blockLength, offset, blockOffset: reader.pos };
543
1408
  reader.seek(index.blockLength);
544
1409
  return index;
545
1410
  }
@@ -571,22 +1436,22 @@ function createDecoder(reader) {
571
1436
  function bytesReader(bytes) {
572
1437
  let pos = 0;
573
1438
  return {
574
- async upTo(length) {
575
- const out = bytes.subarray(pos, pos + Math.min(length, bytes.length - pos));
1439
+ async upTo(length2) {
1440
+ const out = bytes.subarray(pos, pos + Math.min(length2, bytes.length - pos));
576
1441
  return out;
577
1442
  },
578
- async exactly(length, seek = false) {
579
- if (length > bytes.length - pos) {
1443
+ async exactly(length2, seek = false) {
1444
+ if (length2 > bytes.length - pos) {
580
1445
  throw new Error("Unexpected end of data");
581
1446
  }
582
- const out = bytes.subarray(pos, pos + length);
1447
+ const out = bytes.subarray(pos, pos + length2);
583
1448
  if (seek) {
584
- pos += length;
1449
+ pos += length2;
585
1450
  }
586
1451
  return out;
587
1452
  },
588
- seek(length) {
589
- pos += length;
1453
+ seek(length2) {
1454
+ pos += length2;
590
1455
  },
591
1456
  get pos() {
592
1457
  return pos;
@@ -598,10 +1463,10 @@ function chunkReader(readChunk) {
598
1463
  let have = 0;
599
1464
  let offset = 0;
600
1465
  let currentChunk = new Uint8Array(0);
601
- const read = async (length) => {
1466
+ const read2 = async (length2) => {
602
1467
  have = currentChunk.length - offset;
603
1468
  const bufa = [currentChunk.subarray(offset)];
604
- while (have < length) {
1469
+ while (have < length2) {
605
1470
  const chunk = await readChunk();
606
1471
  if (chunk == null) {
607
1472
  break;
@@ -624,29 +1489,29 @@ function chunkReader(readChunk) {
624
1489
  offset = 0;
625
1490
  };
626
1491
  return {
627
- async upTo(length) {
628
- if (currentChunk.length - offset < length) {
629
- await read(length);
1492
+ async upTo(length2) {
1493
+ if (currentChunk.length - offset < length2) {
1494
+ await read2(length2);
630
1495
  }
631
- return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length));
1496
+ return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length2));
632
1497
  },
633
- async exactly(length, seek = false) {
634
- if (currentChunk.length - offset < length) {
635
- await read(length);
1498
+ async exactly(length2, seek = false) {
1499
+ if (currentChunk.length - offset < length2) {
1500
+ await read2(length2);
636
1501
  }
637
- if (currentChunk.length - offset < length) {
1502
+ if (currentChunk.length - offset < length2) {
638
1503
  throw new Error("Unexpected end of data");
639
1504
  }
640
- const out = currentChunk.subarray(offset, offset + length);
1505
+ const out = currentChunk.subarray(offset, offset + length2);
641
1506
  if (seek) {
642
- pos += length;
643
- offset += length;
1507
+ pos += length2;
1508
+ offset += length2;
644
1509
  }
645
1510
  return out;
646
1511
  },
647
- seek(length) {
648
- pos += length;
649
- offset += length;
1512
+ seek(length2) {
1513
+ pos += length2;
1514
+ offset += length2;
650
1515
  },
651
1516
  get pos() {
652
1517
  return pos;
@@ -667,26 +1532,26 @@ function asyncIterableReader(asyncIterable) {
667
1532
  function limitReader(reader, byteLimit) {
668
1533
  let bytesRead = 0;
669
1534
  return {
670
- async upTo(length) {
671
- let bytes = await reader.upTo(length);
1535
+ async upTo(length2) {
1536
+ let bytes = await reader.upTo(length2);
672
1537
  if (bytes.length + bytesRead > byteLimit) {
673
1538
  bytes = bytes.subarray(0, byteLimit - bytesRead);
674
1539
  }
675
1540
  return bytes;
676
1541
  },
677
- async exactly(length, seek = false) {
678
- const bytes = await reader.exactly(length, seek);
1542
+ async exactly(length2, seek = false) {
1543
+ const bytes = await reader.exactly(length2, seek);
679
1544
  if (bytes.length + bytesRead > byteLimit) {
680
1545
  throw new Error("Unexpected end of data");
681
1546
  }
682
1547
  if (seek) {
683
- bytesRead += length;
1548
+ bytesRead += length2;
684
1549
  }
685
1550
  return bytes;
686
1551
  },
687
- seek(length) {
688
- bytesRead += length;
689
- reader.seek(length);
1552
+ seek(length2) {
1553
+ bytesRead += length2;
1554
+ reader.seek(length2);
690
1555
  },
691
1556
  get pos() {
692
1557
  return reader.pos;
@@ -842,10 +1707,10 @@ async function decodeReaderComplete(reader) {
842
1707
  }
843
1708
 
844
1709
  // ../../../node_modules/.pnpm/@ipld+car@5.2.6/node_modules/@ipld/car/src/encoder.js
845
- var import_varint3 = __toESM(require_varint(), 1);
1710
+ var import_varint4 = __toESM(require_varint(), 1);
846
1711
  function createHeader(roots) {
847
- const headerBytes = encode2({ version: 1, roots });
848
- const varintBytes = import_varint3.default.encode(headerBytes.length);
1712
+ const headerBytes = encode4({ version: 1, roots });
1713
+ const varintBytes = import_varint4.default.encode(headerBytes.length);
849
1714
  const header = new Uint8Array(varintBytes.length + headerBytes.length);
850
1715
  header.set(varintBytes, 0);
851
1716
  header.set(headerBytes, varintBytes.length);
@@ -867,7 +1732,7 @@ function createEncoder(writer) {
867
1732
  */
868
1733
  async writeBlock(block) {
869
1734
  const { cid, bytes } = block;
870
- await writer.write(new Uint8Array(import_varint3.default.encode(cid.bytes.length + bytes.length)));
1735
+ await writer.write(new Uint8Array(import_varint4.default.encode(cid.bytes.length + bytes.length)));
871
1736
  await writer.write(cid.bytes);
872
1737
  if (bytes.length) {
873
1738
  await writer.write(bytes);
@@ -885,7 +1750,7 @@ function createEncoder(writer) {
885
1750
  // ../../../node_modules/.pnpm/@ipld+car@5.2.6/node_modules/@ipld/car/src/iterator-channel.js
886
1751
  function noop() {
887
1752
  }
888
- function create() {
1753
+ function create2() {
889
1754
  const chunkQueue = [];
890
1755
  let drainer = null;
891
1756
  let drainerResolver = noop;
@@ -1094,7 +1959,7 @@ var CarWriterOut = class {
1094
1959
  }
1095
1960
  };
1096
1961
  function encodeWriter() {
1097
- const iw = create();
1962
+ const iw = create2();
1098
1963
  const { writer, iterator } = iw;
1099
1964
  const encoder = createEncoder(writer);
1100
1965
  return { encoder, iterator };
@@ -1121,24 +1986,133 @@ function toRoots(roots) {
1121
1986
  return _roots;
1122
1987
  }
1123
1988
 
1124
- // src/pubsub/pub-pull.ts
1125
- var { WARN, LOG, DEBUG, VERBOSE, ERROR } = E.setup(E.DEBUG);
1126
- function integratePub({ targetThread, agentHash, subID, pubData }) {
1127
- const newLogs = pubData.thread.applogs.filter((log) => !targetThread.hasApplog(log, false));
1128
- LOG(`[integratePub] integrating ${newLogs.length} logs`, { targetThread, subID, pubData });
1129
- let toInsert = newLogs;
1130
- if (subID) {
1131
- toInsert = toInsert.concat(ensureTsPvAndFinalizeApplog(
1132
- { en: subID, at: "subscription/cid", vl: pubData.cid.toString(), ag: agentHash },
1133
- targetThread
1134
- ));
1135
- }
1136
- targetThread.insertRaw(toInsert);
1137
- return newLogs;
1989
+ // src/ipfs/car.ts
1990
+ var { WARN, LOG, DEBUG, VERBOSE, ERROR } = E.setup(E.INFO);
1991
+ async function decodePubFromCar(car) {
1992
+ const decoded = await getBlocksOfCar(car);
1993
+ return await decodePubFromBlocks(decoded);
1994
+ }
1995
+ async function decodePubFromBlocks({ rootCID, blockStore }, recursionTrace = []) {
1996
+ if (!rootCID || !blockStore) {
1997
+ throw ERROR("Empty roots/blocks", { rootCID, blockStore });
1998
+ }
1999
+ const root = await getDecodedBlock(blockStore, rootCID);
2000
+ let pubLogsArray;
2001
+ let applogsCID = null;
2002
+ let info = null;
2003
+ VERBOSE(`[decodePubFromBlocks] root:`, rootCID.toString(), root, { blockStore });
2004
+ if (!root)
2005
+ throw ERROR("root not found in blockStore", { blockStore, rootCID });
2006
+ if (root?.info) {
2007
+ applogsCID = root.applogs;
2008
+ const applogsBlock = await getDecodedBlock(blockStore, applogsCID);
2009
+ pubLogsArray = await unchunkApplogsBlock(applogsBlock, blockStore);
2010
+ info = await getDecodedBlock(blockStore, root.info);
2011
+ DEBUG(`new format - infoLogs`, info.logs.map((l) => ({ [l.toString()]: l })));
2012
+ } else {
2013
+ pubLogsArray = root.applogs;
2014
+ }
2015
+ const resolveLogFromCidLink = async (cidOrLink) => {
2016
+ const cid = cidOrLink;
2017
+ const applog = await getDecodedBlock(blockStore, cid);
2018
+ if (!applog) {
2019
+ ERROR(`Could not find applog CID in pub blocks:`, cid.toString(), { cid, root, blockStore });
2020
+ throw new Error(`Could not find applog CID in pub blocks: ${cid.toString()}`);
2021
+ }
2022
+ if (applog.pv instanceof CID)
2023
+ applog.pv = applog.pv.toV1().toString();
2024
+ return {
2025
+ ...applog,
2026
+ cid: cid.toV1().toString()
2027
+ };
2028
+ };
2029
+ let applogs;
2030
+ applogs = await Promise.all(pubLogsArray.map(resolveLogFromCidLink));
2031
+ if (root.prev) {
2032
+ if (areCidsEqual(root.prev, rootCID) || containsCid(recursionTrace, root.prev)) {
2033
+ throw ERROR(`[pubFromBlocks] pub chain has a loop`, { rootCID, prev: root.prev, recursionTrace });
2034
+ }
2035
+ applogs.push(...(await decodePubFromBlocks({ rootCID: root.prev, blockStore }, [...recursionTrace, rootCID])).applogs);
2036
+ }
2037
+ const result = {
2038
+ cid: rootCID,
2039
+ info: {
2040
+ ...info,
2041
+ logs: await Promise.all(info.logs.map(resolveLogFromCidLink))
2042
+ },
2043
+ applogsCID,
2044
+ applogs
2045
+ };
2046
+ DEBUG("[decodePubFromBlocks] result:", result, { rootCID: rootCID.toString(), root, blockStore, applogs, info });
2047
+ return result;
2048
+ }
2049
+ async function getBlocksOfCar(car) {
2050
+ const rootsFromCar = await car.getRoots();
2051
+ const roots = rootsFromCar.map((c) => (typeof c.toV1 === "function" ? c : CID.decode(c.bytes)).toV1().toString());
2052
+ const blocks = /* @__PURE__ */ new Map();
2053
+ for await (const { cid: cidFromCarblocks, bytes } of car.blocks()) {
2054
+ const cid = typeof cidFromCarblocks.toV1 === "function" ? cidFromCarblocks : CID.decode(cidFromCarblocks.bytes);
2055
+ VERBOSE({ cidFromCarblocks, cid });
2056
+ blocks.set(cid.toV1().toString(), bytes);
2057
+ }
2058
+ if (roots.length !== 1) {
2059
+ WARN("Unexpected roots count:", roots);
2060
+ }
2061
+ return {
2062
+ rootCID: CID.parse(roots[0]),
2063
+ blockStore: {
2064
+ get: (cid) => blocks.get(cid.toV1().toString())
2065
+ }
2066
+ };
2067
+ }
2068
+ async function getDecodedBlock(blockStore, cid) {
2069
+ try {
2070
+ var blob = await blockStore.get(cid);
2071
+ if (!blob) {
2072
+ WARN("returning null");
2073
+ return null;
2074
+ }
2075
+ } catch (err) {
2076
+ if (err.message === "Not Found")
2077
+ return null;
2078
+ throw err;
2079
+ }
2080
+ return decode3(blob);
2081
+ }
2082
+ async function makeCarOut(roots, blocks) {
2083
+ const { writer, out } = CarWriter.create(Array.isArray(roots) ? roots : [roots]);
2084
+ VERBOSE(`Writing ${blocks.length} blocks to CAR`, { roots, blocks });
2085
+ blocks.forEach((b) => writer.put(b));
2086
+ writer.close();
2087
+ return out;
2088
+ }
2089
+ async function makeCarBlob(roots, blocks) {
2090
+ const carOut = await makeCarOut(roots, blocks);
2091
+ const chunks = [];
2092
+ for await (const chunk of carOut) {
2093
+ chunks.push(chunk);
2094
+ }
2095
+ const blob = new Blob(chunks);
2096
+ return blob;
2097
+ }
2098
+ async function carFromBlob(blob) {
2099
+ return CarReader.fromBytes(new Uint8Array(await blob.arrayBuffer()));
2100
+ }
2101
+ function streamReaderToIterable(bodyReader) {
2102
+ return async function* () {
2103
+ while (true) {
2104
+ const { done, value } = await bodyReader.read();
2105
+ VERBOSE(`[car] chunk`, { done, value });
2106
+ if (done) {
2107
+ break;
2108
+ }
2109
+ yield value;
2110
+ }
2111
+ }();
1138
2112
  }
1139
2113
 
1140
2114
  // src/pubsub/pub-push.ts
1141
- var { WARN: WARN2, LOG: LOG2, DEBUG: DEBUG2, VERBOSE: VERBOSE2, ERROR: ERROR2 } = E.setup(E.DEBUG);
2115
+ var { WARN: WARN2, LOG: LOG2, DEBUG: DEBUG2, VERBOSE: VERBOSE2, ERROR: ERROR2 } = E.setup(E.INFO);
1142
2116
  async function preparePubForPush(agent, appThread, threadToPublish, publication, prevPubCID) {
1143
2117
  let logsToPublish = getLogsFromThread(threadToPublish);
1144
2118
  DEBUG2(`[preparePubForPush] Collected ${logsToPublish.length} logs :`, {
@@ -1176,7 +2150,7 @@ async function preparePubForPush(agent, appThread, threadToPublish, publication,
1176
2150
  if (!sharedKey || !sharedKeyMap) {
1177
2151
  throw ERROR2("sharedAgents but no Keys/Map", { sharedAgents, sharedKeyMap, sharedKey });
1178
2152
  }
1179
- LOG2("encrypting", { sharedAgents, sharedKeyMap });
2153
+ VERBOSE2("encrypting", { sharedAgents, sharedKeyMap });
1180
2154
  for (const [eachAgent, eachEncKey] of Array.from(sharedKeyMap.entries())) {
1181
2155
  VERBOSE2.force("adding key", { eachAgent, eachEncKey });
1182
2156
  agentSharedKeyLogs.push({
@@ -1199,7 +2173,13 @@ async function preparePubForPush(agent, appThread, threadToPublish, publication,
1199
2173
  } else {
1200
2174
  maybeEncryptedApplogs = logsToPublish;
1201
2175
  }
1202
- DEBUG2.force("adding all agent info and pubAtoms", { publication, agent, logsToPublish, threadToPublish, agentSharedKeyLogs });
2176
+ DEBUG2("adding all agent info and pubAtoms", {
2177
+ publication,
2178
+ agent,
2179
+ logsToPublish,
2180
+ // threadToPublish, - very verbose
2181
+ agentSharedKeyLogs
2182
+ });
1203
2183
  const infoLogs = [
1204
2184
  ...rollingFilter(lastWriteWins(appThread), {
1205
2185
  // TODO: use static filter for performance
@@ -1221,7 +2201,7 @@ async function preparePubForPush(agent, appThread, threadToPublish, publication,
1221
2201
  if (!infologsToEncode.length) {
1222
2202
  throw ERROR2("no valid infologs", { agent, maybeEncryptedApplogs, infoLogs, applogsToEncode, infologsToEncode, prevPubCID });
1223
2203
  }
1224
- const encodedPub = encodePubAsCar(agent, applogsToEncode, infologsToEncode, prevPubCID);
2204
+ const encodedPub = await encodePubAsCar(agent, applogsToEncode, infologsToEncode, prevPubCID);
1225
2205
  DEBUG2("inPreparePubForPush", { encodedPub });
1226
2206
  return encodedPub;
1227
2207
  }
@@ -1315,143 +2295,13 @@ async function encodeApplogsAsIPLD(applogs) {
1315
2295
  return { cids, encodedApplogs };
1316
2296
  }
1317
2297
 
1318
- // src/pubsub/pubsub-types.ts
1319
- function isPublication(obj) {
1320
- return obj?.pk !== void 0 && obj?.lastPush !== void 0;
1321
- }
1322
- function isSubscription(obj) {
1323
- return obj?.lastPull !== void 0;
1324
- }
1325
- function agentToShortHash(agentString) {
1326
- return cyrb53hash(agentString, 31, 7);
1327
- }
1328
-
1329
- // src/ipfs/car.ts
1330
- var { WARN: WARN3, LOG: LOG3, DEBUG: DEBUG3, VERBOSE: VERBOSE3, ERROR: ERROR3 } = E.setup(E.DEBUG);
1331
- async function decodePubFromCar(car) {
1332
- const decoded = await getBlocksOfCar(car);
1333
- return await decodePubFromBlocks(decoded);
1334
- }
1335
- async function decodePubFromBlocks({ rootCID, blockStore }, recursionTrace = []) {
1336
- if (!rootCID || !blockStore) {
1337
- throw ERROR3("Empty roots/blocks", { rootCID, blockStore });
1338
- }
1339
- const root = await getDecodedBlock(blockStore, rootCID);
1340
- let pubLogsArray;
1341
- let applogsCID = null;
1342
- let info = null;
1343
- VERBOSE3(`[decodePubFromBlocks] root:`, rootCID.toString(), root, { blockStore });
1344
- if (!root)
1345
- throw ERROR3("root not found in blockStore", { blockStore, rootCID });
1346
- if (root?.info) {
1347
- applogsCID = root.applogs;
1348
- const applogsBlock = await getDecodedBlock(blockStore, applogsCID);
1349
- pubLogsArray = await unchunkApplogsBlock(applogsBlock, blockStore);
1350
- info = await getDecodedBlock(blockStore, root.info);
1351
- DEBUG3(`new format - infoLogs`, info.logs.map((l) => ({ [l.toString()]: l })));
1352
- } else {
1353
- pubLogsArray = root.applogs;
1354
- }
1355
- const resolveLogFromCidLink = async (cidOrLink) => {
1356
- const cid = cidOrLink;
1357
- const applog = await getDecodedBlock(blockStore, cid);
1358
- if (!applog) {
1359
- ERROR3(`Could not find applog CID in pub blocks:`, cid.toString(), { cid, root, blockStore });
1360
- throw new Error(`Could not find applog CID in pub blocks: ${cid.toString()}`);
1361
- }
1362
- if (applog.pv instanceof CID)
1363
- applog.pv = applog.pv.toV1().toString();
1364
- return {
1365
- ...applog,
1366
- cid: cid.toV1().toString()
1367
- };
1368
- };
1369
- let applogs;
1370
- applogs = await Promise.all(pubLogsArray.map(resolveLogFromCidLink));
1371
- if (root.prev) {
1372
- if (areCidsEqual(root.prev, rootCID) || containsCid(recursionTrace, root.prev)) {
1373
- throw ERROR3(`[pubFromBlocks] pub chain has a loop`, { rootCID, prev: root.prev, recursionTrace });
1374
- }
1375
- applogs.push(...(await decodePubFromBlocks({ rootCID: root.prev, blockStore }, [...recursionTrace, rootCID])).applogs);
1376
- }
1377
- const result = {
1378
- cid: rootCID,
1379
- info: {
1380
- ...info,
1381
- logs: await Promise.all(info.logs.map(resolveLogFromCidLink))
1382
- },
1383
- applogsCID,
1384
- applogs
1385
- };
1386
- DEBUG3("[decodePubFromBlocks] result:", result, { rootCID: rootCID.toString(), root, blockStore, applogs, info });
1387
- return result;
1388
- }
1389
- async function getBlocksOfCar(car) {
1390
- const rootsFromCar = await car.getRoots();
1391
- const roots = rootsFromCar.map((c) => (typeof c.toV1 === "function" ? c : CID.decode(c.bytes)).toV1().toString());
1392
- const blocks = /* @__PURE__ */ new Map();
1393
- for await (const { cid: cidFromCarblocks, bytes } of car.blocks()) {
1394
- const cid = typeof cidFromCarblocks.toV1 === "function" ? cidFromCarblocks : CID.decode(cidFromCarblocks.bytes);
1395
- VERBOSE3({ cidFromCarblocks, cid });
1396
- blocks.set(cid.toV1().toString(), bytes);
1397
- }
1398
- if (roots.length !== 1) {
1399
- WARN3("Unexpected roots count:", roots);
1400
- }
1401
- return {
1402
- rootCID: CID.parse(roots[0]),
1403
- blockStore: {
1404
- get: (cid) => blocks.get(cid.toV1().toString())
1405
- }
1406
- };
1407
- }
1408
- async function getDecodedBlock(blockStore, cid) {
1409
- try {
1410
- var blob = await blockStore.get(cid);
1411
- if (!blob) {
1412
- WARN3("returning null");
1413
- return null;
1414
- }
1415
- } catch (err) {
1416
- if (err.message === "Not Found")
1417
- return null;
1418
- throw err;
1419
- }
1420
- return decode3(blob);
1421
- }
1422
- async function makeCarOut(roots, blocks) {
1423
- const { writer, out } = CarWriter.create(Array.isArray(roots) ? roots : [roots]);
1424
- VERBOSE3(`Writing ${blocks.length} blocks to CAR`, { roots, blocks });
1425
- blocks.forEach((b) => writer.put(b));
1426
- writer.close();
1427
- return out;
1428
- }
1429
- async function makeCarBlob(roots, blocks) {
1430
- const carOut = await makeCarOut(roots, blocks);
1431
- const chunks = [];
1432
- for await (const chunk of carOut) {
1433
- chunks.push(chunk);
1434
- }
1435
- const blob = new Blob(chunks);
1436
- return blob;
1437
- }
1438
- async function carFromBlob(blob) {
1439
- return CarReader.fromBytes(new Uint8Array(await blob.arrayBuffer()));
1440
- }
1441
- function streamReaderToIterable(bodyReader) {
1442
- return async function* () {
1443
- while (true) {
1444
- const { done, value } = await bodyReader.read();
1445
- VERBOSE3(`[car] chunk`, { done, value });
1446
- if (done) {
1447
- break;
1448
- }
1449
- yield value;
1450
- }
1451
- }();
1452
- }
1453
-
1454
2298
  export {
2299
+ preparePubForPush,
2300
+ encodePubAsCar,
2301
+ chunkApplogs,
2302
+ unchunkApplogsBlock,
2303
+ isPubBlockChunks,
2304
+ encodeApplogsAsCar,
1455
2305
  decodePubFromCar,
1456
2306
  decodePubFromBlocks,
1457
2307
  getBlocksOfCar,
@@ -1459,16 +2309,6 @@ export {
1459
2309
  makeCarOut,
1460
2310
  makeCarBlob,
1461
2311
  carFromBlob,
1462
- streamReaderToIterable,
1463
- integratePub,
1464
- preparePubForPush,
1465
- encodePubAsCar,
1466
- chunkApplogs,
1467
- unchunkApplogsBlock,
1468
- isPubBlockChunks,
1469
- encodeApplogsAsCar,
1470
- isPublication,
1471
- isSubscription,
1472
- agentToShortHash
2312
+ streamReaderToIterable
1473
2313
  };
1474
- //# sourceMappingURL=chunk-BLAIQY2K.min.js.map
2314
+ //# sourceMappingURL=chunk-C75X52ZX.min.js.map