@fireproof/core 0.5.16 → 0.5.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2318,7 +2318,7 @@ const coerce$1 = o => {
2318
2318
  * @param {Code} code
2319
2319
  * @param {Uint8Array} digest
2320
2320
  */
2321
- const create$6 = (code, digest) => {
2321
+ const create$8 = (code, digest) => {
2322
2322
  const size = digest.byteLength;
2323
2323
  const sizeOffset = encodingLength$1(code);
2324
2324
  const digestOffset = sizeOffset + encodingLength$1(size);
@@ -3078,7 +3078,7 @@ let CID$1 = class CID {
3078
3078
  switch (this.version) {
3079
3079
  case 0: {
3080
3080
  const { code, digest } = this.multihash;
3081
- const multihash = create$6(code, digest);
3081
+ const multihash = create$8(code, digest);
3082
3082
  return /** @type {CID<Data, Format, Alg, 1>} */ (
3083
3083
  CID.createV1(this.code, multihash)
3084
3084
  )
@@ -3552,9 +3552,9 @@ class Hasher {
3552
3552
  if (input instanceof Uint8Array) {
3553
3553
  const result = this.encode(input);
3554
3554
  return result instanceof Uint8Array
3555
- ? create$6(this.code, result)
3555
+ ? create$8(this.code, result)
3556
3556
  /* c8 ignore next 1 */
3557
- : result.then(digest => create$6(this.code, digest))
3557
+ : result.then(digest => create$8(this.code, digest))
3558
3558
  } else {
3559
3559
  throw Error('Unknown type, must be binary type')
3560
3560
  /* c8 ignore next 1 */
@@ -3821,7 +3821,7 @@ function createUnsafe ({ bytes, cid, value: maybeValue, codec }) {
3821
3821
  * @param {API.MultihashHasher<Alg>} options.hasher
3822
3822
  * @returns {Promise<API.BlockView<T, Code, Alg, V>>}
3823
3823
  */
3824
- async function create$5 ({ bytes, cid, hasher, codec }) {
3824
+ async function create$7 ({ bytes, cid, hasher, codec }) {
3825
3825
  if (!bytes) throw new Error('Missing required argument "bytes"')
3826
3826
  if (!hasher) throw new Error('Missing required argument "hasher"')
3827
3827
  const value = codec.decode(bytes);
@@ -3978,8 +3978,8 @@ class Token {
3978
3978
  }
3979
3979
 
3980
3980
  const useBuffer = globalThis.process && !globalThis.process.browser && globalThis.Buffer && typeof globalThis.Buffer.isBuffer === 'function';
3981
- const textDecoder = new TextDecoder();
3982
- const textEncoder = new TextEncoder();
3981
+ const textDecoder$1 = new TextDecoder();
3982
+ const textEncoder$1 = new TextEncoder();
3983
3983
  function isBuffer$1(buf) {
3984
3984
  return useBuffer && globalThis.Buffer.isBuffer(buf);
3985
3985
  }
@@ -3992,12 +3992,12 @@ function asU8A(buf) {
3992
3992
  const toString = useBuffer ? (bytes, start, end) => {
3993
3993
  return end - start > 64 ? globalThis.Buffer.from(bytes.subarray(start, end)).toString('utf8') : utf8Slice(bytes, start, end);
3994
3994
  } : (bytes, start, end) => {
3995
- return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
3995
+ return end - start > 64 ? textDecoder$1.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
3996
3996
  };
3997
3997
  const fromString = useBuffer ? string => {
3998
3998
  return string.length > 64 ? globalThis.Buffer.from(string) : utf8ToBytes(string);
3999
3999
  } : string => {
4000
- return string.length > 64 ? textEncoder.encode(string) : utf8ToBytes(string);
4000
+ return string.length > 64 ? textEncoder$1.encode(string) : utf8ToBytes(string);
4001
4001
  };
4002
4002
  const fromArray$1 = arr => {
4003
4003
  return Uint8Array.from(arr);
@@ -5715,12 +5715,13 @@ async function findEventsToSync (blocks, head) {
5715
5715
  // console.time(callTag + '.findCommonAncestorWithSortedEvents')
5716
5716
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
5717
5717
  // console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
5718
- // console.log('sorted', !!ancestor, sorted.length)
5718
+ // console.log('sorted', !!ancestor, sorted)
5719
5719
  // console.time(callTag + '.contains')
5720
5720
 
5721
5721
  const toSync = ancestor ? await asyncFilter(sorted, async uks => !(await contains(events, ancestor, uks.cid))) : sorted;
5722
5722
  // console.timeEnd(callTag + '.contains')
5723
- // console.log('optimize sorted', !!ancestor, sorted.length - toSync.length)
5723
+ const sortDifference = sorted.length - toSync.length;
5724
+ if (sortDifference / sorted.length > 0.6) console.log('optimize sorted', !!ancestor, sortDifference);
5724
5725
 
5725
5726
  return { cids: events, events: toSync }
5726
5727
  }
@@ -6756,7 +6757,7 @@ class IPLDLeaf extends IPLDNode {
6756
6757
  return true;
6757
6758
  }
6758
6759
  }
6759
- const create$4 = async function* (obj) {
6760
+ const create$6 = async function* (obj) {
6760
6761
  let {LeafClass, LeafEntryClass, BranchClass, BranchEntryClass, list, chunker, compare, ...opts} = obj;
6761
6762
  list = list.map(value => new LeafEntryClass(value, opts));
6762
6763
  opts.compare = compare;
@@ -6788,35 +6789,36 @@ const create$4 = async function* (obj) {
6788
6789
  };
6789
6790
 
6790
6791
  class MapEntry extends Entry {
6791
- async identity() {
6792
+ async identity () {
6792
6793
  const encoded = await this.codec.encode(await this.encodeNode());
6793
6794
  const hash = await this.hasher.encode(encoded);
6794
- return readUInt32LE$1(hash);
6795
+ return readUInt32LE$1(hash)
6795
6796
  }
6796
6797
  }
6797
6798
  class MapLeafEntry extends MapEntry {
6798
- constructor(node, opts) {
6799
+ constructor (node, opts) {
6799
6800
  super(node, opts);
6800
6801
  this.value = node.value;
6801
6802
  }
6802
- encodeNode() {
6803
+
6804
+ encodeNode () {
6803
6805
  return [
6804
6806
  this.key,
6805
6807
  this.value
6806
- ];
6808
+ ]
6807
6809
  }
6808
6810
  }
6809
6811
  class MapBranchEntry extends MapEntry {
6810
- constructor(node, opts) {
6811
- if (!node.address)
6812
- throw new Error('Cannot create MapBranchEntry without address');
6812
+ constructor (node, opts) {
6813
+ if (!node.address) { throw new Error('Cannot create MapBranchEntry without address') }
6813
6814
  super(node, opts);
6814
6815
  }
6815
- async encodeNode() {
6816
+
6817
+ async encodeNode () {
6816
6818
  return [
6817
6819
  this.key,
6818
6820
  await this.address
6819
- ];
6821
+ ]
6820
6822
  }
6821
6823
  }
6822
6824
  const getValue = async (node, key) => {
@@ -6827,7 +6829,7 @@ const getValue = async (node, key) => {
6827
6829
  return {
6828
6830
  result: entry.value,
6829
6831
  cids
6830
- };
6832
+ }
6831
6833
  };
6832
6834
  const getManyValues = async (node, keys) => {
6833
6835
  const {
@@ -6837,34 +6839,38 @@ const getManyValues = async (node, keys) => {
6837
6839
  return {
6838
6840
  result: entries.map(entry => entry.value),
6839
6841
  cids
6840
- };
6842
+ }
6841
6843
  };
6842
6844
  class MapLeaf extends IPLDLeaf {
6843
- get(key) {
6844
- return getValue(this, key);
6845
+ get (key) {
6846
+ return getValue(this, key)
6845
6847
  }
6846
- getMany(keys) {
6847
- return getManyValues(this, keys);
6848
+
6849
+ getMany (keys) {
6850
+ return getManyValues(this, keys)
6848
6851
  }
6849
- bulk(bulk, opts = {}, isRoot = true) {
6852
+
6853
+ bulk (bulk, opts = {}, isRoot = true) {
6850
6854
  return super.bulk(bulk, {
6851
6855
  ...classes$1,
6852
6856
  ...opts
6853
- }, isRoot);
6857
+ }, isRoot)
6854
6858
  }
6855
6859
  }
6856
6860
  class MapBranch extends IPLDBranch {
6857
- get(key) {
6858
- return getValue(this, key);
6861
+ get (key) {
6862
+ return getValue(this, key)
6859
6863
  }
6860
- getMany(keys) {
6861
- return getManyValues(this, keys);
6864
+
6865
+ getMany (keys) {
6866
+ return getManyValues(this, keys)
6862
6867
  }
6863
- bulk(bulk, opts = {}, isRoot = true) {
6868
+
6869
+ bulk (bulk, opts = {}, isRoot = true) {
6864
6870
  return super.bulk(bulk, {
6865
6871
  ...classes$1,
6866
6872
  ...opts
6867
- }, isRoot);
6873
+ }, isRoot)
6868
6874
  }
6869
6875
  }
6870
6876
  const classes$1 = {
@@ -6879,9 +6885,8 @@ const createGetNode$1 = (get, cache, chunker, codec, hasher, compare, opts) => {
6879
6885
  const BranchClass = opts.BranchClass || MapBranch;
6880
6886
  const BranchEntryClass = opts.BranchEntryClass || MapBranchEntry;
6881
6887
  const getNode = async cid => {
6882
- if (cache.has(cid))
6883
- return cache.get(cid);
6884
- return get(cid).then(block => decoder(block));
6888
+ if (cache.has(cid)) { return cache.get(cid) }
6889
+ return get(cid).then(block => decoder(block))
6885
6890
  };
6886
6891
  const decoder = makeDecoder({
6887
6892
  chunker,
@@ -6895,11 +6900,10 @@ const createGetNode$1 = (get, cache, chunker, codec, hasher, compare, opts) => {
6895
6900
  BranchEntryClass,
6896
6901
  BranchClass
6897
6902
  });
6898
- return getNode;
6903
+ return getNode
6899
6904
  };
6900
- const create$3 = ({get, cache, chunker, list, codec, hasher, sorted, compare, ...opts}) => {
6901
- if (!sorted)
6902
- list = list.sort(({key: a}, {key: b}) => compare(a, b));
6905
+ const create$5 = ({ get, cache, chunker, list, codec, hasher, sorted, compare, ...opts }) => {
6906
+ if (!sorted) { list = list.sort(({ key: a }, { key: b }) => compare(a, b)); }
6903
6907
  const getNode = createGetNode$1(get, cache, chunker, codec, hasher, compare, opts);
6904
6908
  const _opts = {
6905
6909
  list,
@@ -6915,19 +6919,19 @@ const create$3 = ({get, cache, chunker, list, codec, hasher, sorted, compare, ..
6915
6919
  BranchClass: opts.BranchClass || MapBranch,
6916
6920
  BranchEntryClass: opts.BranchEntryClass || MapBranchEntry
6917
6921
  };
6918
- return create$4(_opts);
6922
+ return create$6(_opts)
6919
6923
  };
6920
- const load$2 = ({cid, get, cache, chunker, codec, hasher, compare, ...opts}) => {
6924
+ const load$4 = ({ cid, get, cache, chunker, codec, hasher, compare, ...opts }) => {
6921
6925
  const getNode = createGetNode$1(get, cache, chunker, codec, hasher, compare, opts);
6922
- return getNode(cid);
6926
+ return getNode(cid)
6923
6927
  };
6924
- function makeDecoder({chunker, cache, getNode, codec, hasher, compare, LeafEntryClass, LeafClass, BranchEntryClass, BranchClass}) {
6928
+ function makeDecoder ({ chunker, cache, getNode, codec, hasher, compare, LeafEntryClass, LeafClass, BranchEntryClass, BranchClass }) {
6925
6929
  const entryOpts = {
6926
6930
  codec,
6927
6931
  hasher
6928
6932
  };
6929
6933
  return block => {
6930
- const {value} = block;
6934
+ const { value } = block;
6931
6935
  const opts = {
6932
6936
  chunker,
6933
6937
  cache,
@@ -6954,7 +6958,7 @@ function makeDecoder({chunker, cache, getNode, codec, hasher, compare, LeafEntry
6954
6958
  }, entryOpts));
6955
6959
  CLS = BranchClass;
6956
6960
  } else {
6957
- throw new Error('Unknown block data, does not match schema');
6961
+ throw new Error('Unknown block data, does not match schema')
6958
6962
  }
6959
6963
  const entryList = new EntryList({
6960
6964
  entries,
@@ -6965,8 +6969,8 @@ function makeDecoder({chunker, cache, getNode, codec, hasher, compare, LeafEntry
6965
6969
  ...opts
6966
6970
  });
6967
6971
  cache.set(node);
6968
- return node;
6969
- };
6972
+ return node
6973
+ }
6970
6974
  }
6971
6975
 
6972
6976
  const nocache = {
@@ -7010,7 +7014,7 @@ const parse = (source, base) => CID$1.parse(source, base);
7010
7014
  *
7011
7015
  */
7012
7016
 
7013
- const Kinds = {
7017
+ const Kinds$1 = {
7014
7018
  Null: /**
7015
7019
  * @param {any} obj
7016
7020
  * @returns {boolean}
@@ -7038,7 +7042,7 @@ const Kinds = {
7038
7042
  Link: /**
7039
7043
  * @param {any} obj
7040
7044
  * @returns {boolean}
7041
- */ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID === obj,
7045
+ */ (/** @type {any} */ obj) => !Kinds$1.Null(obj) && typeof obj === 'object' && obj.asCID === obj,
7042
7046
  List: /**
7043
7047
  * @param {any} obj
7044
7048
  * @returns {boolean}
@@ -7046,31 +7050,31 @@ const Kinds = {
7046
7050
  Map: /**
7047
7051
  * @param {any} obj
7048
7052
  * @returns {boolean}
7049
- */ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID !== obj && !Kinds.List(obj) && !Kinds.Bytes(obj)
7053
+ */ (/** @type {any} */ obj) => !Kinds$1.Null(obj) && typeof obj === 'object' && obj.asCID !== obj && !Kinds$1.List(obj) && !Kinds$1.Bytes(obj)
7050
7054
  };
7051
7055
  /** @type {{ [k in string]: (obj:any)=>boolean}} */
7052
- const Types = {
7053
- Int: Kinds.Int,
7056
+ const Types$1 = {
7057
+ Int: Kinds$1.Int,
7054
7058
  'CarHeader > version': /**
7055
7059
  * @param {any} obj
7056
7060
  * @returns {boolean}
7057
- */ (/** @type {any} */ obj) => Types.Int(obj),
7058
- 'CarHeader > roots (anon) > valueType (anon)': Kinds.Link,
7061
+ */ (/** @type {any} */ obj) => Types$1.Int(obj),
7062
+ 'CarHeader > roots (anon) > valueType (anon)': Kinds$1.Link,
7059
7063
  'CarHeader > roots (anon)': /**
7060
7064
  * @param {any} obj
7061
7065
  * @returns {boolean}
7062
- */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types['CarHeader > roots (anon) > valueType (anon)']),
7066
+ */ (/** @type {any} */ obj) => Kinds$1.List(obj) && Array.prototype.every.call(obj, Types$1['CarHeader > roots (anon) > valueType (anon)']),
7063
7067
  'CarHeader > roots': /**
7064
7068
  * @param {any} obj
7065
7069
  * @returns {boolean}
7066
- */ (/** @type {any} */ obj) => Types['CarHeader > roots (anon)'](obj),
7070
+ */ (/** @type {any} */ obj) => Types$1['CarHeader > roots (anon)'](obj),
7067
7071
  CarHeader: /**
7068
7072
  * @param {any} obj
7069
7073
  * @returns {boolean}
7070
- */ (/** @type {any} */ obj) => { const keys = obj && Object.keys(obj); return Kinds.Map(obj) && ['version'].every((k) => keys.includes(k)) && Object.entries(obj).every(([name, value]) => Types['CarHeader > ' + name] && Types['CarHeader > ' + name](value)) }
7074
+ */ (/** @type {any} */ obj) => { const keys = obj && Object.keys(obj); return Kinds$1.Map(obj) && ['version'].every((k) => keys.includes(k)) && Object.entries(obj).every(([name, value]) => Types$1['CarHeader > ' + name] && Types$1['CarHeader > ' + name](value)) }
7071
7075
  };
7072
7076
 
7073
- const CarHeader = Types.CarHeader;
7077
+ const CarHeader = Types$1.CarHeader;
7074
7078
 
7075
7079
  var encode_1$1 = encode$4;
7076
7080
 
@@ -9812,15 +9816,15 @@ var versions = {};
9812
9816
  var release = {};
9813
9817
  var config = {};
9814
9818
 
9815
- function noop$2() {}
9819
+ function noop$3() {}
9816
9820
 
9817
- var on = noop$2;
9818
- var addListener = noop$2;
9819
- var once$2 = noop$2;
9820
- var off = noop$2;
9821
- var removeListener = noop$2;
9822
- var removeAllListeners = noop$2;
9823
- var emit = noop$2;
9821
+ var on = noop$3;
9822
+ var addListener = noop$3;
9823
+ var once$2 = noop$3;
9824
+ var off = noop$3;
9825
+ var removeListener = noop$3;
9826
+ var removeAllListeners = noop$3;
9827
+ var emit = noop$3;
9824
9828
 
9825
9829
  function binding(name) {
9826
9830
  throw new Error('process.binding is not supported');
@@ -12059,14 +12063,14 @@ function once$1(callback) {
12059
12063
  callback.apply(this, args);
12060
12064
  };
12061
12065
  }
12062
- function noop$1() {}
12066
+ function noop$2() {}
12063
12067
  function isRequest$1(stream) {
12064
12068
  return stream.setHeader && typeof stream.abort === 'function';
12065
12069
  }
12066
12070
  function eos$1(stream, opts, callback) {
12067
12071
  if (typeof opts === 'function') return eos$1(stream, null, opts);
12068
12072
  if (!opts) opts = {};
12069
- callback = once$1(callback || noop$1);
12073
+ callback = once$1(callback || noop$2);
12070
12074
  let readable = opts.readable || opts.readable !== false && stream.readable;
12071
12075
  let writable = opts.writable || opts.writable !== false && stream.writable;
12072
12076
  const onlegacyfinish = () => {
@@ -13495,7 +13499,7 @@ function once(callback) {
13495
13499
  const _require$codes = errorsBrowser.codes,
13496
13500
  ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
13497
13501
  ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
13498
- function noop(err) {
13502
+ function noop$1(err) {
13499
13503
  // Rethrow the error if it exists to avoid swallowing it
13500
13504
  if (err) throw err;
13501
13505
  }
@@ -13536,8 +13540,8 @@ function pipe(from, to) {
13536
13540
  return from.pipe(to);
13537
13541
  }
13538
13542
  function popCallback(streams) {
13539
- if (!streams.length) return noop;
13540
- if (typeof streams[streams.length - 1] !== 'function') return noop;
13543
+ if (!streams.length) return noop$1;
13544
+ if (typeof streams[streams.length - 1] !== 'function') return noop$1;
13541
13545
  return streams.pop();
13542
13546
  }
13543
13547
  function pipeline() {
@@ -17451,19 +17455,19 @@ utils$n.padSplit = function padSplit(num, size, group) {
17451
17455
  return out.join(' ');
17452
17456
  };
17453
17457
 
17454
- var minimalisticAssert = assert$j;
17458
+ var minimalisticAssert = assert$k;
17455
17459
 
17456
- function assert$j(val, msg) {
17460
+ function assert$k(val, msg) {
17457
17461
  if (!val)
17458
17462
  throw new Error(msg || 'Assertion failed');
17459
17463
  }
17460
17464
 
17461
- assert$j.equal = function assertEqual(l, r, msg) {
17465
+ assert$k.equal = function assertEqual(l, r, msg) {
17462
17466
  if (l != r)
17463
17467
  throw new Error(msg || ('Assertion failed: ' + l + ' != ' + r));
17464
17468
  };
17465
17469
 
17466
- var assert$i = minimalisticAssert;
17470
+ var assert$j = minimalisticAssert;
17467
17471
 
17468
17472
  function Cipher$3(options) {
17469
17473
  this.options = options;
@@ -17596,14 +17600,14 @@ Cipher$3.prototype._unpad = function _unpad(buffer) {
17596
17600
  };
17597
17601
 
17598
17602
  Cipher$3.prototype._finalDecrypt = function _finalDecrypt() {
17599
- assert$i.equal(this.bufferOff, this.blockSize, 'Not enough data to decrypt');
17603
+ assert$j.equal(this.bufferOff, this.blockSize, 'Not enough data to decrypt');
17600
17604
  var out = new Array(this.blockSize);
17601
17605
  this._flushBuffer(out, 0);
17602
17606
 
17603
17607
  return this._unpad(out);
17604
17608
  };
17605
17609
 
17606
- var assert$h = minimalisticAssert;
17610
+ var assert$i = minimalisticAssert;
17607
17611
  var inherits$h = require$$3$1;
17608
17612
 
17609
17613
  var utils$m = utils$n;
@@ -17637,7 +17641,7 @@ var shiftTable = [
17637
17641
  DES$3.prototype.deriveKeys = function deriveKeys(state, key) {
17638
17642
  state.keys = new Array(16 * 2);
17639
17643
 
17640
- assert$h.equal(key.length, this.blockSize, 'Invalid key length');
17644
+ assert$i.equal(key.length, this.blockSize, 'Invalid key length');
17641
17645
 
17642
17646
  var kL = utils$m.readUInt32BE(key, 0);
17643
17647
  var kR = utils$m.readUInt32BE(key, 4);
@@ -17687,7 +17691,7 @@ DES$3.prototype._pad = function _pad(buffer, off) {
17687
17691
  DES$3.prototype._unpad = function _unpad(buffer) {
17688
17692
  var pad = buffer[buffer.length - 1];
17689
17693
  for (var i = buffer.length - pad; i < buffer.length; i++)
17690
- assert$h.equal(buffer[i], pad);
17694
+ assert$i.equal(buffer[i], pad);
17691
17695
 
17692
17696
  return buffer.slice(0, buffer.length - pad);
17693
17697
  };
@@ -17746,13 +17750,13 @@ DES$3.prototype._decrypt = function _decrypt(state, lStart, rStart, out, off) {
17746
17750
 
17747
17751
  var cbc$1 = {};
17748
17752
 
17749
- var assert$g = minimalisticAssert;
17753
+ var assert$h = minimalisticAssert;
17750
17754
  var inherits$g = require$$3$1;
17751
17755
 
17752
17756
  var proto = {};
17753
17757
 
17754
17758
  function CBCState(iv) {
17755
- assert$g.equal(iv.length, 8, 'Invalid IV length');
17759
+ assert$h.equal(iv.length, 8, 'Invalid IV length');
17756
17760
 
17757
17761
  this.iv = new Array(8);
17758
17762
  for (var i = 0; i < this.iv.length; i++)
@@ -17810,14 +17814,14 @@ proto._update = function _update(inp, inOff, out, outOff) {
17810
17814
  }
17811
17815
  };
17812
17816
 
17813
- var assert$f = minimalisticAssert;
17817
+ var assert$g = minimalisticAssert;
17814
17818
  var inherits$f = require$$3$1;
17815
17819
 
17816
17820
  var Cipher$1 = cipher;
17817
17821
  var DES$2 = des$1;
17818
17822
 
17819
17823
  function EDEState(type, key) {
17820
- assert$f.equal(key.length, 24, 'Invalid key length');
17824
+ assert$g.equal(key.length, 24, 'Invalid key length');
17821
17825
 
17822
17826
  var k1 = key.slice(0, 8);
17823
17827
  var k2 = key.slice(8, 16);
@@ -26921,7 +26925,7 @@ var BN$9 = bnExports$1;
26921
26925
  var utils$j = utils$l;
26922
26926
  var getNAF = utils$j.getNAF;
26923
26927
  var getJSF = utils$j.getJSF;
26924
- var assert$e = utils$j.assert;
26928
+ var assert$f = utils$j.assert;
26925
26929
 
26926
26930
  function BaseCurve(type, conf) {
26927
26931
  this.type = type;
@@ -26967,7 +26971,7 @@ BaseCurve.prototype.validate = function validate() {
26967
26971
  };
26968
26972
 
26969
26973
  BaseCurve.prototype._fixedNafMul = function _fixedNafMul(p, k) {
26970
- assert$e(p.precomputed);
26974
+ assert$f(p.precomputed);
26971
26975
  var doubles = p._getDoubles();
26972
26976
 
26973
26977
  var naf = getNAF(k, 1, this._bitLength);
@@ -27024,7 +27028,7 @@ BaseCurve.prototype._wnafMul = function _wnafMul(p, k) {
27024
27028
  if (i < 0)
27025
27029
  break;
27026
27030
  var z = naf[i];
27027
- assert$e(z !== 0);
27031
+ assert$f(z !== 0);
27028
27032
  if (p.type === 'affine') {
27029
27033
  // J +- P
27030
27034
  if (z > 0)
@@ -27192,9 +27196,9 @@ BaseCurve.prototype.decodePoint = function decodePoint(bytes, enc) {
27192
27196
  if ((bytes[0] === 0x04 || bytes[0] === 0x06 || bytes[0] === 0x07) &&
27193
27197
  bytes.length - 1 === 2 * len) {
27194
27198
  if (bytes[0] === 0x06)
27195
- assert$e(bytes[bytes.length - 1] % 2 === 0);
27199
+ assert$f(bytes[bytes.length - 1] % 2 === 0);
27196
27200
  else if (bytes[0] === 0x07)
27197
- assert$e(bytes[bytes.length - 1] % 2 === 1);
27201
+ assert$f(bytes[bytes.length - 1] % 2 === 1);
27198
27202
 
27199
27203
  var res = this.point(bytes.slice(1, 1 + len),
27200
27204
  bytes.slice(1 + len, 1 + 2 * len));
@@ -27301,7 +27305,7 @@ var BN$8 = bnExports$1;
27301
27305
  var inherits$9 = require$$3$1;
27302
27306
  var Base$2 = base$2;
27303
27307
 
27304
- var assert$d = utils$i.assert;
27308
+ var assert$e = utils$i.assert;
27305
27309
 
27306
27310
  function ShortCurve(conf) {
27307
27311
  Base$2.call(this, 'short', conf);
@@ -27346,7 +27350,7 @@ ShortCurve.prototype._getEndomorphism = function _getEndomorphism(conf) {
27346
27350
  lambda = lambdas[0];
27347
27351
  } else {
27348
27352
  lambda = lambdas[1];
27349
- assert$d(this.g.mul(lambda).x.cmp(this.g.x.redMul(beta)) === 0);
27353
+ assert$e(this.g.mul(lambda).x.cmp(this.g.x.redMul(beta)) === 0);
27350
27354
  }
27351
27355
  }
27352
27356
 
@@ -28415,7 +28419,7 @@ var BN$6 = bnExports$1;
28415
28419
  var inherits$7 = require$$3$1;
28416
28420
  var Base = base$2;
28417
28421
 
28418
- var assert$c = utils$g.assert;
28422
+ var assert$d = utils$g.assert;
28419
28423
 
28420
28424
  function EdwardsCurve(conf) {
28421
28425
  // NOTE: Important as we are creating point in Base.call()
@@ -28432,7 +28436,7 @@ function EdwardsCurve(conf) {
28432
28436
  this.d = new BN$6(conf.d, 16).toRed(this.red);
28433
28437
  this.dd = this.d.redAdd(this.d);
28434
28438
 
28435
- assert$c(!this.twisted || this.c.fromRed().cmpn(1) === 0);
28439
+ assert$d(!this.twisted || this.c.fromRed().cmpn(1) === 0);
28436
28440
  this.oneC = (conf.c | 0) === 1;
28437
28441
  }
28438
28442
  inherits$7(EdwardsCurve, Base);
@@ -28860,7 +28864,7 @@ var hash$2 = {};
28860
28864
 
28861
28865
  var utils$f = {};
28862
28866
 
28863
- var assert$b = minimalisticAssert;
28867
+ var assert$c = minimalisticAssert;
28864
28868
  var inherits$6 = require$$3$1;
28865
28869
 
28866
28870
  utils$f.inherits = inherits$6;
@@ -28981,7 +28985,7 @@ utils$f.zero8 = zero8;
28981
28985
 
28982
28986
  function join32(msg, start, end, endian) {
28983
28987
  var len = end - start;
28984
- assert$b(len % 4 === 0);
28988
+ assert$c(len % 4 === 0);
28985
28989
  var res = new Array(len / 4);
28986
28990
  for (var i = 0, k = start; i < res.length; i++, k += 4) {
28987
28991
  var w;
@@ -29140,7 +29144,7 @@ utils$f.shr64_lo = shr64_lo$1;
29140
29144
  var common$6 = {};
29141
29145
 
29142
29146
  var utils$e = utils$f;
29143
- var assert$a = minimalisticAssert;
29147
+ var assert$b = minimalisticAssert;
29144
29148
 
29145
29149
  function BlockHash$4() {
29146
29150
  this.pending = null;
@@ -29185,7 +29189,7 @@ BlockHash$4.prototype.update = function update(msg, enc) {
29185
29189
 
29186
29190
  BlockHash$4.prototype.digest = function digest(enc) {
29187
29191
  this.update(this._pad());
29188
- assert$a(this.pending === null);
29192
+ assert$b(this.pending === null);
29189
29193
 
29190
29194
  return this._digest(enc);
29191
29195
  };
@@ -29358,7 +29362,7 @@ SHA1.prototype._digest = function digest(enc) {
29358
29362
  var utils$b = utils$f;
29359
29363
  var common$3 = common$6;
29360
29364
  var shaCommon = common$5;
29361
- var assert$9 = minimalisticAssert;
29365
+ var assert$a = minimalisticAssert;
29362
29366
 
29363
29367
  var sum32$1 = utils$b.sum32;
29364
29368
  var sum32_4$1 = utils$b.sum32_4;
@@ -29428,7 +29432,7 @@ SHA256$1.prototype._update = function _update(msg, start) {
29428
29432
  var g = this.h[6];
29429
29433
  var h = this.h[7];
29430
29434
 
29431
- assert$9(this.k.length === W.length);
29435
+ assert$a(this.k.length === W.length);
29432
29436
  for (i = 0; i < W.length; i++) {
29433
29437
  var T1 = sum32_5(h, s1_256(e), ch32(e, f, g), this.k[i], W[i]);
29434
29438
  var T2 = sum32$1(s0_256(a), maj32(a, b, c));
@@ -29489,7 +29493,7 @@ SHA224.prototype._digest = function digest(enc) {
29489
29493
 
29490
29494
  var utils$9 = utils$f;
29491
29495
  var common$2 = common$6;
29492
- var assert$8 = minimalisticAssert;
29496
+ var assert$9 = minimalisticAssert;
29493
29497
 
29494
29498
  var rotr64_hi = utils$9.rotr64_hi;
29495
29499
  var rotr64_lo = utils$9.rotr64_lo;
@@ -29624,7 +29628,7 @@ SHA512$1.prototype._update = function _update(msg, start) {
29624
29628
  var hh = this.h[14];
29625
29629
  var hl = this.h[15];
29626
29630
 
29627
- assert$8(this.k.length === W.length);
29631
+ assert$9(this.k.length === W.length);
29628
29632
  for (var i = 0; i < W.length; i += 2) {
29629
29633
  var c0_hi = hh;
29630
29634
  var c0_lo = hl;
@@ -30004,7 +30008,7 @@ var sh = [
30004
30008
  ];
30005
30009
 
30006
30010
  var utils$6 = utils$f;
30007
- var assert$7 = minimalisticAssert;
30011
+ var assert$8 = minimalisticAssert;
30008
30012
 
30009
30013
  function Hmac(hash, key, enc) {
30010
30014
  if (!(this instanceof Hmac))
@@ -30023,7 +30027,7 @@ Hmac.prototype._init = function init(key) {
30023
30027
  // Shorten key, if needed
30024
30028
  if (key.length > this.blockSize)
30025
30029
  key = new this.Hash().update(key).digest();
30026
- assert$7(key.length <= this.blockSize);
30030
+ assert$8(key.length <= this.blockSize);
30027
30031
 
30028
30032
  // Add padding to key
30029
30033
  for (var i = key.length; i < this.blockSize; i++)
@@ -31066,7 +31070,7 @@ function requireSecp256k1 () {
31066
31070
 
31067
31071
  var hash$1 = hash$2;
31068
31072
  var utils$5 = utils$k;
31069
- var assert$6 = minimalisticAssert;
31073
+ var assert$7 = minimalisticAssert;
31070
31074
 
31071
31075
  function HmacDRBG(options) {
31072
31076
  if (!(this instanceof HmacDRBG))
@@ -31085,7 +31089,7 @@ function HmacDRBG(options) {
31085
31089
  var entropy = utils$5.toArray(options.entropy, options.entropyEnc || 'hex');
31086
31090
  var nonce = utils$5.toArray(options.nonce, options.nonceEnc || 'hex');
31087
31091
  var pers = utils$5.toArray(options.pers, options.persEnc || 'hex');
31088
- assert$6(entropy.length >= (this.minEntropy / 8),
31092
+ assert$7(entropy.length >= (this.minEntropy / 8),
31089
31093
  'Not enough entropy. Minimum is: ' + this.minEntropy + ' bits');
31090
31094
  this._init(entropy, nonce, pers);
31091
31095
  }
@@ -31140,7 +31144,7 @@ HmacDRBG.prototype.reseed = function reseed(entropy, entropyEnc, add, addEnc) {
31140
31144
  entropy = utils$5.toArray(entropy, entropyEnc);
31141
31145
  add = utils$5.toArray(add, addEnc);
31142
31146
 
31143
- assert$6(entropy.length >= (this.minEntropy / 8),
31147
+ assert$7(entropy.length >= (this.minEntropy / 8),
31144
31148
  'Not enough entropy. Minimum is: ' + this.minEntropy + ' bits');
31145
31149
 
31146
31150
  this._update(entropy.concat(add || []));
@@ -31178,7 +31182,7 @@ HmacDRBG.prototype.generate = function generate(len, enc, add, addEnc) {
31178
31182
 
31179
31183
  var BN$5 = bnExports$1;
31180
31184
  var utils$4 = utils$l;
31181
- var assert$5 = utils$4.assert;
31185
+ var assert$6 = utils$4.assert;
31182
31186
 
31183
31187
  function KeyPair$2(ec, options) {
31184
31188
  this.ec = ec;
@@ -31263,10 +31267,10 @@ KeyPair$2.prototype._importPublic = function _importPublic(key, enc) {
31263
31267
  // Weierstrass/Edwards points on the other hand have both `x` and
31264
31268
  // `y` coordinates.
31265
31269
  if (this.ec.curve.type === 'mont') {
31266
- assert$5(key.x, 'Need x coordinate');
31270
+ assert$6(key.x, 'Need x coordinate');
31267
31271
  } else if (this.ec.curve.type === 'short' ||
31268
31272
  this.ec.curve.type === 'edwards') {
31269
- assert$5(key.x && key.y, 'Need both x and y coordinate');
31273
+ assert$6(key.x && key.y, 'Need both x and y coordinate');
31270
31274
  }
31271
31275
  this.pub = this.ec.curve.point(key.x, key.y);
31272
31276
  return;
@@ -31277,7 +31281,7 @@ KeyPair$2.prototype._importPublic = function _importPublic(key, enc) {
31277
31281
  // ECDH
31278
31282
  KeyPair$2.prototype.derive = function derive(pub) {
31279
31283
  if(!pub.validate()) {
31280
- assert$5(pub.validate(), 'public point not validated');
31284
+ assert$6(pub.validate(), 'public point not validated');
31281
31285
  }
31282
31286
  return pub.mul(this.priv).getX();
31283
31287
  };
@@ -31299,7 +31303,7 @@ KeyPair$2.prototype.inspect = function inspect() {
31299
31303
  var BN$4 = bnExports$1;
31300
31304
 
31301
31305
  var utils$3 = utils$l;
31302
- var assert$4 = utils$3.assert;
31306
+ var assert$5 = utils$3.assert;
31303
31307
 
31304
31308
  function Signature$2(options, enc) {
31305
31309
  if (options instanceof Signature$2)
@@ -31308,7 +31312,7 @@ function Signature$2(options, enc) {
31308
31312
  if (this._importDER(options, enc))
31309
31313
  return;
31310
31314
 
31311
- assert$4(options.r && options.s, 'Signature without r or s');
31315
+ assert$5(options.r && options.s, 'Signature without r or s');
31312
31316
  this.r = new BN$4(options.r, 16);
31313
31317
  this.s = new BN$4(options.s, 16);
31314
31318
  if (options.recoveryParam === undefined)
@@ -31713,7 +31717,7 @@ function requireEc () {
31713
31717
  }
31714
31718
 
31715
31719
  var utils$2 = utils$l;
31716
- var assert$3 = utils$2.assert;
31720
+ var assert$4 = utils$2.assert;
31717
31721
  var parseBytes$2 = utils$2.parseBytes;
31718
31722
  var cachedProperty$1 = utils$2.cachedProperty;
31719
31723
 
@@ -31787,7 +31791,7 @@ cachedProperty$1(KeyPair$1, 'messagePrefix', function messagePrefix() {
31787
31791
  });
31788
31792
 
31789
31793
  KeyPair$1.prototype.sign = function sign(message) {
31790
- assert$3(this._secret, 'KeyPair can only verify');
31794
+ assert$4(this._secret, 'KeyPair can only verify');
31791
31795
  return this.eddsa.sign(message, this);
31792
31796
  };
31793
31797
 
@@ -31796,7 +31800,7 @@ KeyPair$1.prototype.verify = function verify(message, sig) {
31796
31800
  };
31797
31801
 
31798
31802
  KeyPair$1.prototype.getSecret = function getSecret(enc) {
31799
- assert$3(this._secret, 'KeyPair is public only');
31803
+ assert$4(this._secret, 'KeyPair is public only');
31800
31804
  return utils$2.encode(this.secret(), enc);
31801
31805
  };
31802
31806
 
@@ -31808,7 +31812,7 @@ var key$1 = KeyPair$1;
31808
31812
 
31809
31813
  var BN$3 = bnExports$1;
31810
31814
  var utils$1 = utils$l;
31811
- var assert$2 = utils$1.assert;
31815
+ var assert$3 = utils$1.assert;
31812
31816
  var cachedProperty = utils$1.cachedProperty;
31813
31817
  var parseBytes$1 = utils$1.parseBytes;
31814
31818
 
@@ -31833,7 +31837,7 @@ function Signature$1(eddsa, sig) {
31833
31837
  };
31834
31838
  }
31835
31839
 
31836
- assert$2(sig.R && sig.S, 'Signature without R or S');
31840
+ assert$3(sig.R && sig.S, 'Signature without R or S');
31837
31841
 
31838
31842
  if (eddsa.isPoint(sig.R))
31839
31843
  this._R = sig.R;
@@ -31873,13 +31877,13 @@ var signature = Signature$1;
31873
31877
  var hash = hash$2;
31874
31878
  var curves = curves$1;
31875
31879
  var utils = utils$l;
31876
- var assert$1 = utils.assert;
31880
+ var assert$2 = utils.assert;
31877
31881
  var parseBytes = utils.parseBytes;
31878
31882
  var KeyPair = key$1;
31879
31883
  var Signature = signature;
31880
31884
 
31881
31885
  function EDDSA(curve) {
31882
- assert$1(curve === 'ed25519', 'only tested with ed25519 so far');
31886
+ assert$2(curve === 'ed25519', 'only tested with ed25519 so far');
31883
31887
 
31884
31888
  if (!(this instanceof EDDSA))
31885
31889
  return new EDDSA(curve);
@@ -32374,7 +32378,7 @@ EncoderBuffer$1.prototype.join = function join(out, offset) {
32374
32378
  const Reporter = reporter.Reporter;
32375
32379
  const EncoderBuffer = buffer.EncoderBuffer;
32376
32380
  const DecoderBuffer$1 = buffer.DecoderBuffer;
32377
- const assert = minimalisticAssert;
32381
+ const assert$1 = minimalisticAssert;
32378
32382
 
32379
32383
  // Supported tags
32380
32384
  const tags = [
@@ -32465,14 +32469,14 @@ Node$2.prototype._wrap = function wrap() {
32465
32469
  Node$2.prototype._init = function init(body) {
32466
32470
  const state = this._baseState;
32467
32471
 
32468
- assert(state.parent === null);
32472
+ assert$1(state.parent === null);
32469
32473
  body.call(this);
32470
32474
 
32471
32475
  // Filter children
32472
32476
  state.children = state.children.filter(function(child) {
32473
32477
  return child._baseState.parent === this;
32474
32478
  }, this);
32475
- assert.equal(state.children.length, 1, 'Root node can have only one child');
32479
+ assert$1.equal(state.children.length, 1, 'Root node can have only one child');
32476
32480
  };
32477
32481
 
32478
32482
  Node$2.prototype._useArgs = function useArgs(args) {
@@ -32487,7 +32491,7 @@ Node$2.prototype._useArgs = function useArgs(args) {
32487
32491
  }, this);
32488
32492
 
32489
32493
  if (children.length !== 0) {
32490
- assert(state.children === null);
32494
+ assert$1(state.children === null);
32491
32495
  state.children = children;
32492
32496
 
32493
32497
  // Replace parent to maintain backward link
@@ -32496,7 +32500,7 @@ Node$2.prototype._useArgs = function useArgs(args) {
32496
32500
  }, this);
32497
32501
  }
32498
32502
  if (args.length !== 0) {
32499
- assert(state.args === null);
32503
+ assert$1(state.args === null);
32500
32504
  state.args = args;
32501
32505
  state.reverseArgs = args.map(function(arg) {
32502
32506
  if (typeof arg !== 'object' || arg.constructor !== Object)
@@ -32534,7 +32538,7 @@ tags.forEach(function(tag) {
32534
32538
  const state = this._baseState;
32535
32539
  const args = Array.prototype.slice.call(arguments);
32536
32540
 
32537
- assert(state.tag === null);
32541
+ assert$1(state.tag === null);
32538
32542
  state.tag = tag;
32539
32543
 
32540
32544
  this._useArgs(args);
@@ -32544,10 +32548,10 @@ tags.forEach(function(tag) {
32544
32548
  });
32545
32549
 
32546
32550
  Node$2.prototype.use = function use(item) {
32547
- assert(item);
32551
+ assert$1(item);
32548
32552
  const state = this._baseState;
32549
32553
 
32550
- assert(state.use === null);
32554
+ assert$1(state.use === null);
32551
32555
  state.use = item;
32552
32556
 
32553
32557
  return this;
@@ -32564,7 +32568,7 @@ Node$2.prototype.optional = function optional() {
32564
32568
  Node$2.prototype.def = function def(val) {
32565
32569
  const state = this._baseState;
32566
32570
 
32567
- assert(state['default'] === null);
32571
+ assert$1(state['default'] === null);
32568
32572
  state['default'] = val;
32569
32573
  state.optional = true;
32570
32574
 
@@ -32574,7 +32578,7 @@ Node$2.prototype.def = function def(val) {
32574
32578
  Node$2.prototype.explicit = function explicit(num) {
32575
32579
  const state = this._baseState;
32576
32580
 
32577
- assert(state.explicit === null && state.implicit === null);
32581
+ assert$1(state.explicit === null && state.implicit === null);
32578
32582
  state.explicit = num;
32579
32583
 
32580
32584
  return this;
@@ -32583,7 +32587,7 @@ Node$2.prototype.explicit = function explicit(num) {
32583
32587
  Node$2.prototype.implicit = function implicit(num) {
32584
32588
  const state = this._baseState;
32585
32589
 
32586
- assert(state.explicit === null && state.implicit === null);
32590
+ assert$1(state.explicit === null && state.implicit === null);
32587
32591
  state.implicit = num;
32588
32592
 
32589
32593
  return this;
@@ -32604,7 +32608,7 @@ Node$2.prototype.obj = function obj() {
32604
32608
  Node$2.prototype.key = function key(newKey) {
32605
32609
  const state = this._baseState;
32606
32610
 
32607
- assert(state.key === null);
32611
+ assert$1(state.key === null);
32608
32612
  state.key = newKey;
32609
32613
 
32610
32614
  return this;
@@ -32621,7 +32625,7 @@ Node$2.prototype.any = function any() {
32621
32625
  Node$2.prototype.choice = function choice(obj) {
32622
32626
  const state = this._baseState;
32623
32627
 
32624
- assert(state.choice === null);
32628
+ assert$1(state.choice === null);
32625
32629
  state.choice = obj;
32626
32630
  this._useArgs(Object.keys(obj).map(function(key) {
32627
32631
  return obj[key];
@@ -32633,7 +32637,7 @@ Node$2.prototype.choice = function choice(obj) {
32633
32637
  Node$2.prototype.contains = function contains(item) {
32634
32638
  const state = this._baseState;
32635
32639
 
32636
- assert(state.use === null);
32640
+ assert$1(state.use === null);
32637
32641
  state.contains = item;
32638
32642
 
32639
32643
  return this;
@@ -32806,7 +32810,7 @@ Node$2.prototype._getUse = function _getUse(entity, obj) {
32806
32810
  const state = this._baseState;
32807
32811
  // Create altered use decoder if implicit is set
32808
32812
  state.useDecoder = this._use(entity, obj);
32809
- assert(state.useDecoder._baseState.parent === null);
32813
+ assert$1(state.useDecoder._baseState.parent === null);
32810
32814
  state.useDecoder = state.useDecoder._baseState.children[0];
32811
32815
  if (state.implicit !== state.useDecoder._baseState.implicit) {
32812
32816
  state.useDecoder = state.useDecoder.clone();
@@ -32967,7 +32971,7 @@ Node$2.prototype._encodeChoice = function encodeChoice(data, reporter) {
32967
32971
 
32968
32972
  const node = state.choice[data.type];
32969
32973
  if (!node) {
32970
- assert(
32974
+ assert$1(
32971
32975
  false,
32972
32976
  data.type + ' not found in ' +
32973
32977
  JSON.stringify(Object.keys(state.choice)));
@@ -36085,7 +36089,7 @@ const coerce = o => {
36085
36089
  throw new Error('Unknown type, must be binary type');
36086
36090
  };
36087
36091
 
36088
- const create$2 = (code, digest) => {
36092
+ const create$4 = (code, digest) => {
36089
36093
  const size = digest.byteLength;
36090
36094
  const sizeOffset = encodingLength(code);
36091
36095
  const digestOffset = sizeOffset + encodingLength(size);
@@ -36461,7 +36465,7 @@ class CID {
36461
36465
  switch (this.version) {
36462
36466
  case 0: {
36463
36467
  const {code, digest} = this.multihash;
36464
- const multihash = create$2(code, digest);
36468
+ const multihash = create$4(code, digest);
36465
36469
  return CID.createV1(this.code, multihash);
36466
36470
  }
36467
36471
  case 1: {
@@ -36884,7 +36888,7 @@ const createGetNode = (get, cache, chunker, codec, hasher) => {
36884
36888
  };
36885
36889
  return getNode;
36886
36890
  };
36887
- const create$1 = ({get, cache, chunker, list, codec, hasher, sorted}) => {
36891
+ const create$3 = ({get, cache, chunker, list, codec, hasher, sorted}) => {
36888
36892
  if (!sorted)
36889
36893
  list = list.sort(compare$2);
36890
36894
  const getNode = createGetNode(get, cache, chunker, codec, hasher);
@@ -36902,16 +36906,16 @@ const create$1 = ({get, cache, chunker, list, codec, hasher, sorted}) => {
36902
36906
  BranchClass: CIDSetBranch,
36903
36907
  BranchEntryClass: CIDNodeEntry
36904
36908
  };
36905
- return create$4(opts);
36909
+ return create$6(opts);
36906
36910
  };
36907
- const load$1 = ({cid, get, cache, chunker, codec, hasher, ...opts}) => {
36911
+ const load$3 = ({cid, get, cache, chunker, codec, hasher, ...opts}) => {
36908
36912
  const getNode = createGetNode(get, cache, chunker, codec, hasher);
36909
36913
  return getNode(cid);
36910
36914
  };
36911
36915
 
36912
36916
  // @ts-nocheck
36913
36917
 
36914
- const createBlock = (bytes, cid) => create$5({ cid, bytes, hasher: sha256$2, codec });
36918
+ const createBlock = (bytes, cid) => create$7({ cid, bytes, hasher: sha256$2, codec });
36915
36919
 
36916
36920
  const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
36917
36921
  const set = new Set();
@@ -36928,7 +36932,7 @@ const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root
36928
36932
  if (!eroot) throw new Error('cids does not include root')
36929
36933
  const list = [...set].map(s => CID$1.parse(s));
36930
36934
  let last;
36931
- for await (const node of create$1({ list, get, cache, chunker, hasher, codec: codec$1 })) {
36935
+ for await (const node of create$3({ list, get, cache, chunker, hasher, codec: codec$1 })) {
36932
36936
  const block = await node.block;
36933
36937
  yield block;
36934
36938
  last = block;
@@ -36944,7 +36948,7 @@ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
36944
36948
  // console.log('decodedRoot', decodedRoot)
36945
36949
  const { value: [eroot, tree] } = decodedRoot;
36946
36950
  const rootBlock = await get(eroot); // should I decrypt?
36947
- const cidset = await load$1({ cid: tree, get, cache, chunker, codec, hasher });
36951
+ const cidset = await load$3({ cid: tree, get, cache, chunker, codec, hasher });
36948
36952
  const { result: nodes } = await cidset.getAllEntries();
36949
36953
  const unwrap = async (eblock) => {
36950
36954
  const { bytes, cid } = await decrypt$1({ ...eblock, key }).catch(e => {
@@ -36962,321 +36966,2925 @@ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
36962
36966
  yield unwrap(rootBlock);
36963
36967
  };
36964
36968
 
36965
- // @ts-nocheck
36966
- // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
36967
- // MIT License Copyright (c) 2020 Dumitru Uzun
36968
- // Permission is hereby granted, free of charge, to any person obtaining a copy
36969
- // of this software and associated documentation files (the "Software"), to deal
36970
- // in the Software without restriction, including without limitation the rights
36971
- // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
36972
- // copies of the Software, and to permit persons to whom the Software is
36973
- // furnished to do so, subject to the following conditions:
36969
+ var bitUtils = {};
36974
36970
 
36975
- // The above copyright notice and this permission notice shall be included in all
36976
- // copies or substantial portions of the Software.
36971
+ /**
36972
+ * @param {Uint8Array} bytes
36973
+ * @param {number} bitStart
36974
+ * @param {number} bitLength
36975
+ * @returns {number}
36976
+ */
36977
36977
 
36978
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
36979
- // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
36980
- // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
36981
- // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
36982
- // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
36983
- // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
36984
- // SOFTWARE.
36978
+ function bitSequence$1 (bytes, bitStart, bitLength) {
36979
+ // making an assumption that bytes is an Array-like that will give us one
36980
+ // byte per element, so either an Array full of 8-bit integers or a
36981
+ // Uint8Array or a Node.js Buffer, or something like that
36985
36982
 
36986
- // import {
36987
- // isLittleEndian, switchEndianness32
36988
- // } from 'string-encode'
36983
+ const startOffset = bitStart % 8;
36984
+ const byteCount = Math.ceil((startOffset + bitLength) / 8);
36985
+ const byteStart = bitStart >> 3;
36986
+ const endOffset = byteCount * 8 - bitLength - startOffset;
36987
+
36988
+ let result = 0;
36989
+
36990
+ for (let i = 0; i < byteCount; i++) {
36991
+ let local = bytes[byteStart + i];
36992
+ let shift = 0;
36993
+ let localBitLength = 8; // how many bits of this byte we are extracting
36994
+
36995
+ if (i === 0) {
36996
+ localBitLength -= startOffset;
36997
+ }
36998
+
36999
+ if (i === byteCount - 1) {
37000
+ localBitLength -= endOffset;
37001
+ shift = endOffset;
37002
+ local >>= shift; // take off the trailing bits
37003
+ }
37004
+
37005
+ if (localBitLength < 8) {
37006
+ const mask = (1 << localBitLength) - 1;
37007
+ local &= mask; // mask off the leading bits
37008
+ }
37009
+
37010
+ if (i < 3) {
37011
+ if (shift < 8) {
37012
+ result = result << (8 - shift);
37013
+ }
37014
+ result |= local;
37015
+ } else {
37016
+ // once we start shifting beyond the 24-bit range we get to signed integers
37017
+ // and our bitwise operations break down, because JavaScript. But if we do
37018
+ // it without bitwise operations then we can cheat into very large numbers
37019
+ if (shift < 8) {
37020
+ result = result * Math.pow(2, (8 - shift));
37021
+ }
37022
+ result += local;
37023
+ }
37024
+ }
37025
+
37026
+ return result
37027
+ }
37028
+
37029
+ var bitSequence_1 = bitSequence$1;
37030
+
37031
+ // Copyright Rod Vagg; Licensed under the Apache License, Version 2.0, see README.md for more information
37032
+
37033
+ const bitSequence = bitSequence_1;
36989
37034
 
36990
37035
  /**
36991
- * SHA1 on binary array
36992
- *
36993
- * @param {Uint8Array} b Data to hash
36994
- *
36995
- * @return {Uint8Array} sha1 hash
36996
- */
36997
- function rawSha1 (b) {
36998
- let i = b.byteLength;
36999
- let bs = 0;
37000
- let A; let B; let C; let D; let G;
37001
- const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
37002
- const W = new Uint32Array(80);
37003
- const nrWords = (i / 4 + 2) | 15;
37004
- const words = new Uint32Array(nrWords + 1);
37005
- let j;
37036
+ * @param {Uint8Array} hash
37037
+ * @param {number} depth
37038
+ * @param {number} nbits
37039
+ * @returns {number}
37040
+ */
37041
+ function mask$1 (hash, depth, nbits) {
37042
+ return bitSequence(hash, depth * nbits, nbits)
37043
+ }
37006
37044
 
37007
- words[nrWords] = i * 8;
37008
- words[i >> 2] |= 0x80 << (~i << 3);
37009
- for (;i--;) {
37010
- words[i >> 2] |= b[i] << (~i << 3);
37045
+ /**
37046
+ * set the `position` bit in the given `bitmap` to be `set` (truthy=1, falsey=0)
37047
+ * @param {Uint8Array} bitmap
37048
+ * @param {number} position
37049
+ * @param {boolean|0|1} set
37050
+ * @returns {Uint8Array}
37051
+ */
37052
+ function setBit$1 (bitmap, position, set) {
37053
+ // if we assume that `bitmap` is already the opposite of `set`, we could skip this check
37054
+ const byte = Math.floor(position / 8);
37055
+ const offset = position % 8;
37056
+ const has = bitmapHas$1(bitmap, undefined, byte, offset);
37057
+ if ((set && !has) || (!set && has)) {
37058
+ const newBitmap = Uint8Array.from(bitmap);
37059
+ let b = bitmap[byte];
37060
+ if (set) {
37061
+ b |= (1 << offset);
37062
+ } else {
37063
+ b ^= (1 << offset);
37064
+ }
37065
+ newBitmap[byte] = b;
37066
+ return newBitmap
37011
37067
  }
37068
+ return bitmap
37069
+ }
37012
37070
 
37013
- for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
37014
- for (i = 0; i < 80;
37015
- A[0] = (
37016
- G = ((b = A[0]) << 5 | b >>> 27) +
37017
- A[4] +
37018
- (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
37019
- 0x5A827999,
37020
- B = A[1],
37021
- C = A[2],
37022
- D = A[3],
37023
- G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
37024
- ? j !== 2
37025
- ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
37026
- : (B & C | B & D | C & D) + 0x34994343
37027
- : B & C | ~B & D
37028
- )
37029
- )
37030
- , A[1] = b
37031
- , A[2] = B << 30 | B >>> 2
37032
- , A[3] = C
37033
- , A[4] = D
37034
- , ++i
37035
- ) {
37036
- G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
37071
+ /**
37072
+ * check whether `bitmap` has a `1` at the given `position` bit
37073
+ * @param {Uint8Array} bitmap
37074
+ * @param {number} [position]
37075
+ * @param {number} [byte]
37076
+ * @param {number} [offset]
37077
+ * @returns {boolean}
37078
+ */
37079
+ function bitmapHas$1 (bitmap, position, byte, offset) {
37080
+ if (typeof byte !== 'number' || typeof offset !== 'number') {
37081
+ /* c8 ignore next 3 */
37082
+ if (position === undefined) {
37083
+ throw new Error('`position` expected')
37037
37084
  }
37085
+ byte = Math.floor(position / 8);
37086
+ offset = position % 8;
37087
+ }
37088
+ return ((bitmap[byte] >> offset) & 1) === 1
37089
+ }
37038
37090
 
37039
- for (i = 5; i;) H[--i] = H[i] + A[i];
37091
+ /**
37092
+ * count how many `1` bits are in `bitmap up until `position`
37093
+ * tells us where in the compacted element array an element should live
37094
+ * TODO: optimize with a popcount on a `position` shifted bitmap?
37095
+ * assumes bitmapHas(bitmap, position) == true, hence the i<position and +1 in the return
37096
+ * @param {Uint8Array} bitmap
37097
+ * @param {number} position
37098
+ * @returns {number}
37099
+ */
37100
+ function index$1 (bitmap, position) {
37101
+ let t = 0;
37102
+ for (let i = 0; i < position; i++) {
37103
+ if (bitmapHas$1(bitmap, i)) {
37104
+ t++;
37105
+ }
37040
37106
  }
37107
+ return t
37108
+ }
37041
37109
 
37042
- // if (isLittleEndian()) {
37043
- // H = H.map(switchEndianness32)
37044
- // }
37110
+ bitUtils.mask = mask$1;
37111
+ bitUtils.setBit = setBit$1;
37112
+ bitUtils.bitmapHas = bitmapHas$1;
37113
+ bitUtils.index = index$1;
37045
37114
 
37046
- return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
37115
+ // Copyright Rod Vagg; Licensed under the Apache License, Version 2.0, see README.md for more information
37116
+
37117
+ const { mask, setBit, bitmapHas, index } = bitUtils;
37118
+
37119
+ const defaultBitWidth = 8; // 2^8 = 256 buckets or children per node
37120
+ const defaultBucketSize = 5; // array size for a bucket of values
37121
+
37122
+ /**
37123
+ * @template T
37124
+ * @typedef {import('./interface').Store<T>} Store<T>
37125
+ */
37126
+ /**
37127
+ * @typedef {import('./interface').Config} Config
37128
+ * @typedef {import('./interface').Options} Options
37129
+ * @typedef {import('./interface').SerializedKV} SerializedKV
37130
+ * @typedef {import('./interface').SerializedElement} SerializedElement
37131
+ * @typedef {import('./interface').SerializedNode} SerializedNode
37132
+ * @typedef {import('./interface').SerializedRoot} SerializedRoot
37133
+ * @typedef {(inp:Uint8Array)=>(Uint8Array|Promise<Uint8Array>)} Hasher
37134
+ * @typedef {{ hasher: Hasher, hashBytes: number }[]} Registry
37135
+ * @typedef {(link:any)=>boolean} IsLink
37136
+ * @typedef {readonly Element[]} ReadonlyElement
37137
+ * @typedef {{data?: { found: boolean, elementAt: number, element: Element, bucketIndex?: number, bucketEntry?: KV }, link?: { elementAt: number, element: Element }}} FoundElement
37138
+ */
37139
+
37140
+ /**
37141
+ * @type {Registry}
37142
+ * @ignore
37143
+ */
37144
+ const hasherRegistry = [];
37145
+
37146
+ const textEncoder = new TextEncoder();
37147
+
37148
+ /**
37149
+ * @ignore
37150
+ * @param {boolean} condition
37151
+ * @param {string} [message]
37152
+ */
37153
+ function assert (condition, message) {
37154
+ if (!condition) {
37155
+ throw new Error(message || 'Unexpected error')
37156
+ }
37047
37157
  }
37048
37158
 
37049
- const chunker = bf(30);
37159
+ /**
37160
+ * ```js
37161
+ * let map = await iamap.create(store, options)
37162
+ * ```
37163
+ *
37164
+ * Create a new IAMap instance with a backing store. This operation is asynchronous and returns a `Promise` that
37165
+ * resolves to a `IAMap` instance.
37166
+ *
37167
+ * @name iamap.create
37168
+ * @function
37169
+ * @async
37170
+ * @template T
37171
+ * @param {Store<T>} store - A backing store for this Map. The store should be able to save and load a serialised
37172
+ * form of a single node of a IAMap which is provided as a plain object representation. `store.save(node)` takes
37173
+ * a serialisable node and should return a content address / ID for the node. `store.load(id)` serves the inverse
37174
+ * purpose, taking a content address / ID as provided by a `save()` operation and returning the serialised form
37175
+ * of a node which can be instantiated by IAMap. In addition, two identifier handling methods are needed:
37176
+ * `store.isEqual(id1, id2)` is required to check the equality of the two content addresses / IDs
37177
+ * (which may be custom for that data type). `store.isLink(obj)` is used to determine if an object is a link type
37178
+ * that can be used for `load()` operations on the store. It is important that link types be different to standard
37179
+ * JavaScript arrays and don't share properties used by the serialized form of an IAMap (e.g. such that a
37180
+ * `typeof obj === 'object' && Array.isArray(obj.data)`) .This is because a node data element may either be a link to
37181
+ * a child node, or an inlined child node, so `isLink()` should be able to determine if an object is a link, and if not,
37182
+ * `Array.isArray(obj)` will determine if that data element is a bucket of elements, or the above object check be able
37183
+ * to determine that an inline child node exists at the data element.
37184
+ * The `store` object should take the following form:
37185
+ * `{ async save(node):id, async load(id):node, isEqual(id,id):boolean, isLink(obj):boolean }`
37186
+ * A `store` should throw an appropriately informative error when a node that is requested does not exist in the backing
37187
+ * store.
37188
+ *
37189
+ * Options:
37190
+ * - hashAlg (number) - A [multicodec](https://github.com/multiformats/multicodec/blob/master/table.csv)
37191
+ * hash function identifier, e.g. `0x23` for `murmur3-32`. Hash functions must be registered with {@link iamap.registerHasher}.
37192
+ * - bitWidth (number, default 8) - The number of bits to extract from the hash to form a data element index at
37193
+ * each level of the Map, e.g. a bitWidth of 5 will extract 5 bits to be used as the data element index, since 2^5=32,
37194
+ * each node will store up to 32 data elements (child nodes and/or entry buckets). The maximum depth of the Map is
37195
+ * determined by `floor((hashBytes * 8) / bitWidth)` where `hashBytes` is the number of bytes the hash function
37196
+ * produces, e.g. `hashBytes=32` and `bitWidth=5` yields a maximum depth of 51 nodes. The maximum `bitWidth`
37197
+ * currently allowed is `8` which will store 256 data elements in each node.
37198
+ * - bucketSize (number, default 5) - The maximum number of collisions acceptable at each level of the Map. A
37199
+ * collision in the `bitWidth` index at a given depth will result in entries stored in a bucket (array). Once the
37200
+ * bucket exceeds `bucketSize`, a new child node is created for that index and all entries in the bucket are
37201
+ * pushed
37202
+ *
37203
+ * @param {Options} options - Options for this IAMap
37204
+ * @param {Uint8Array} [map] - for internal use
37205
+ * @param {number} [depth] - for internal use
37206
+ * @param {Element[]} [data] - for internal use
37207
+ */
37208
+ async function create$2 (store, options, map, depth, data) {
37209
+ // map, depth and data are intended for internal use
37210
+ const newNode = new IAMap(store, options, map, depth, data);
37211
+ return save(store, newNode)
37212
+ }
37050
37213
 
37051
- const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT;
37052
- // ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
37214
+ /**
37215
+ * ```js
37216
+ * let map = await iamap.load(store, id)
37217
+ * ```
37218
+ *
37219
+ * Create a IAMap instance loaded from a serialised form in a backing store. See {@link iamap.create}.
37220
+ *
37221
+ * @name iamap.load
37222
+ * @function
37223
+ * @async
37224
+ * @template T
37225
+ * @param {Store<T>} store - A backing store for this Map. See {@link iamap.create}.
37226
+ * @param {any} id - An content address / ID understood by the backing `store`.
37227
+ * @param {number} [depth=0]
37228
+ * @param {Options} [options]
37229
+ */
37230
+ async function load$2 (store, id, depth = 0, options) {
37231
+ // depth and options are internal arguments that the user doesn't need to interact with
37232
+ if (depth !== 0 && typeof options !== 'object') {
37233
+ throw new Error('Cannot load() without options at depth > 0')
37234
+ }
37235
+ const serialized = await store.load(id);
37236
+ return fromSerializable(store, id, serialized, options, depth)
37237
+ }
37053
37238
 
37054
- class Valet {
37055
- idb = null
37056
- name = null
37057
- uploadQueue = null
37058
- alreadyEnqueued = new Set()
37059
- keyMaterial = null
37060
- keyId = 'null'
37239
+ /**
37240
+ * ```js
37241
+ * iamap.registerHasher(hashAlg, hashBytes, hasher)
37242
+ * ```
37243
+ *
37244
+ * Register a new hash function. IAMap has no hash functions by default, at least one is required to create a new
37245
+ * IAMap.
37246
+ *
37247
+ * @name iamap.registerHasher
37248
+ * @function
37249
+ * @param {number} hashAlg - A [multicodec](https://github.com/multiformats/multicodec/blob/master/table.csv) hash
37250
+ * function identifier **number**, e.g. `0x23` for `murmur3-32`.
37251
+ * @param {number} hashBytes - The number of bytes to use from the result of the `hasher()` function (e.g. `32`)
37252
+ * @param {Hasher} hasher - A hash function that takes a `Uint8Array` derived from the `key` values used for this
37253
+ * Map and returns a `Uint8Array` (or a `Uint8Array`-like, such that each data element of the array contains a single byte value). The function
37254
+ * may or may not be asynchronous but will be called with an `await`.
37255
+ */
37256
+ function registerHasher (hashAlg, hashBytes, hasher) {
37257
+ if (!Number.isInteger(hashAlg)) {
37258
+ throw new Error('Invalid `hashAlg`')
37259
+ }
37260
+ if (!Number.isInteger(hashBytes)) {
37261
+ throw new TypeError('Invalid `hashBytes`')
37262
+ }
37263
+ if (typeof hasher !== 'function') {
37264
+ throw new TypeError('Invalid `hasher` function }')
37265
+ }
37266
+ hasherRegistry[hashAlg] = { hashBytes, hasher };
37267
+ }
37061
37268
 
37269
+ // simple stable key/value representation
37270
+ /**
37271
+ * @ignore
37272
+ */
37273
+ class KV {
37062
37274
  /**
37063
- * Function installed by the database to upload car files
37064
- * @type {null|function(string, Uint8Array):Promise<void>}
37275
+ * @ignore
37276
+ * @param {Uint8Array} key
37277
+ * @param {any} value
37065
37278
  */
37066
- uploadFunction = null
37067
-
37068
- constructor (name = 'default', keyMaterial) {
37069
- this.name = name;
37070
- this.setKeyMaterial(keyMaterial);
37071
- this.uploadQueue = cargoQueue(async (tasks, callback) => {
37072
- // console.log(
37073
- // 'queue worker',
37074
- // tasks.length,
37075
- // tasks.reduce((acc, t) => acc + t.value.length, 0)
37076
- // )
37077
- if (this.uploadFunction) {
37078
- // todo we can coalesce these into a single car file
37079
- return await this.withDB(async db => {
37080
- for (const task of tasks) {
37081
- await this.uploadFunction(task.carCid, task.value);
37082
- // update the indexedb to mark this car as no longer pending
37083
- const carMeta = await db.get('cidToCar', task.carCid);
37084
- delete carMeta.pending;
37085
- await db.put('cidToCar', carMeta);
37086
- }
37087
- })
37088
- }
37089
- callback();
37090
- });
37279
+ constructor (key, value) {
37280
+ this.key = key;
37281
+ this.value = value;
37282
+ }
37091
37283
 
37092
- this.uploadQueue.drain(async () => {
37093
- return await this.withDB(async db => {
37094
- const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
37095
- for (const carKey of carKeys) {
37096
- await this.uploadFunction(carKey, await db.get('cars', carKey));
37097
- const carMeta = await db.get('cidToCar', carKey);
37098
- delete carMeta.pending;
37099
- await db.put('cidToCar', carMeta);
37100
- }
37101
- })
37102
- });
37284
+ /**
37285
+ * @ignore
37286
+ * @returns {SerializedKV}
37287
+ */
37288
+ toSerializable () {
37289
+ return [this.key, this.value]
37103
37290
  }
37291
+ }
37104
37292
 
37105
- getKeyMaterial () {
37106
- return this.keyMaterial
37293
+ /**
37294
+ * @ignore
37295
+ * @param {SerializedKV} obj
37296
+ * @returns {KV}
37297
+ */
37298
+ KV.fromSerializable = function (obj) {
37299
+ assert(Array.isArray(obj));
37300
+ assert(obj.length === 2);
37301
+ return new KV(obj[0], obj[1])
37302
+ };
37303
+
37304
+ // a element in the data array that each node holds, each element could be either a container of
37305
+ // an array (bucket) of KVs or a link to a child node
37306
+ class Element {
37307
+ /**
37308
+ * @ignore
37309
+ * @param {KV[]} [bucket]
37310
+ * @param {any} [link]
37311
+ */
37312
+ constructor (bucket, link) {
37313
+ this.bucket = bucket || null;
37314
+ this.link = link !== undefined ? link : null;
37315
+ assert((this.bucket === null) === (this.link !== null));
37107
37316
  }
37108
37317
 
37109
- setKeyMaterial (km) {
37110
- if (km && !NO_ENCRYPT) {
37111
- const hex = Uint8Array.from(Buffer$G.from(km, 'hex'));
37112
- this.keyMaterial = km;
37113
- const hash = rawSha1(hex);
37114
- this.keyId = Buffer$G.from(hash).toString('hex');
37318
+ /**
37319
+ * @ignore
37320
+ * @returns {SerializedElement}
37321
+ */
37322
+ toSerializable () {
37323
+ if (this.bucket) {
37324
+ return this.bucket.map((c) => {
37325
+ return c.toSerializable()
37326
+ })
37115
37327
  } else {
37116
- this.keyMaterial = null;
37117
- this.keyId = 'null';
37328
+ assert(!IAMap.isIAMap(this.link)); // TODO: inline here
37329
+ return this.link
37118
37330
  }
37119
- // console.trace('keyId', this.name, this.keyId)
37120
37331
  }
37332
+ }
37333
+
37334
+ /**
37335
+ * @ignore
37336
+ * @param {IsLink} isLink
37337
+ * @param {any} obj
37338
+ * @returns {Element}
37339
+ */
37340
+ Element.fromSerializable = function (isLink, obj) {
37341
+ if (isLink(obj)) {
37342
+ return new Element(undefined, obj)
37343
+ } else if (Array.isArray(obj)) {
37344
+ return new Element(obj.map(KV.fromSerializable))
37345
+ }
37346
+ throw new Error('Unexpected error: badly formed data element')
37347
+ };
37121
37348
 
37349
+ /**
37350
+ * Immutable Asynchronous Map
37351
+ *
37352
+ * The `IAMap` constructor should not be used directly. Use `iamap.create()` or `iamap.load()` to instantiate.
37353
+ *
37354
+ * @class
37355
+ * @template T
37356
+ * @property {any} id - A unique identifier for this `IAMap` instance. IDs are generated by the backing store and
37357
+ * are returned on `save()` operations.
37358
+ * @property {number} config.hashAlg - The hash function used by this `IAMap` instance. See {@link iamap.create} for more
37359
+ * details.
37360
+ * @property {number} config.bitWidth - The number of bits used at each level of this `IAMap`. See {@link iamap.create}
37361
+ * for more details.
37362
+ * @property {number} config.bucketSize - TThe maximum number of collisions acceptable at each level of the Map.
37363
+ * @property {Uint8Array} [map=Uint8Array] - Bitmap indicating which slots are occupied by data entries or child node links,
37364
+ * each data entry contains an bucket of entries. Must be the appropriate size for `config.bitWidth`
37365
+ * (`2 ** config.bitWith / 8` bytes).
37366
+ * @property {number} [depth=0] - Depth of the current node in the IAMap, `depth` is used to extract bits from the
37367
+ * key hashes to locate slots
37368
+ * @property {Array} [data=[]] - Array of data elements (an internal `Element` type), each of which contains a
37369
+ * bucket of entries or an ID of a child node
37370
+ * See {@link iamap.create} for more details.
37371
+ */
37372
+ class IAMap {
37122
37373
  /**
37123
- * Group the blocks into a car and write it to the valet.
37124
- * @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
37125
- * @param {Set<string>} cids
37126
- * @returns {Promise<void>}
37127
- * @memberof Valet
37374
+ * @ignore
37375
+ * @param {Store<T>} store
37376
+ * @param {Options} [options]
37377
+ * @param {Uint8Array} [map]
37378
+ * @param {number} [depth]
37379
+ * @param {Element[]} [data]
37128
37380
  */
37129
- async writeTransaction (innerBlockstore, cids) {
37130
- if (innerBlockstore.lastCid) {
37131
- if (this.keyMaterial) {
37132
- // console.log('encrypting car', innerBlockstore.label)
37133
- // should we pass cids in instead of iterating frin innerBlockstore?
37134
- const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
37135
- await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
37136
- } else {
37137
- const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
37138
- await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
37381
+ constructor (store, options, map, depth, data) {
37382
+ if (!store || typeof store.save !== 'function' ||
37383
+ typeof store.load !== 'function' ||
37384
+ typeof store.isLink !== 'function' ||
37385
+ typeof store.isEqual !== 'function') {
37386
+ throw new TypeError('Invalid `store` option, must be of type: { save(node):id, load(id):node, isEqual(id,id):boolean, isLink(obj):boolean }')
37387
+ }
37388
+ this.store = store;
37389
+
37390
+ /**
37391
+ * @ignore
37392
+ * @type {any|null}
37393
+ */
37394
+ this.id = null;
37395
+ this.config = buildConfig(options);
37396
+
37397
+ const hashBytes = hasherRegistry[this.config.hashAlg].hashBytes;
37398
+
37399
+ if (map !== undefined && !(map instanceof Uint8Array)) {
37400
+ throw new TypeError('`map` must be a Uint8Array')
37401
+ }
37402
+ const mapLength = Math.ceil(Math.pow(2, this.config.bitWidth) / 8);
37403
+ if (map !== undefined && map.length !== mapLength) {
37404
+ throw new Error('`map` must be a Uint8Array of length ' + mapLength)
37405
+ }
37406
+ this.map = map || new Uint8Array(mapLength);
37407
+
37408
+ if (depth !== undefined && (!Number.isInteger(depth) || depth < 0)) {
37409
+ throw new TypeError('`depth` must be an integer >= 0')
37410
+ }
37411
+ this.depth = depth || 0;
37412
+ if (this.depth > Math.floor((hashBytes * 8) / this.config.bitWidth)) {
37413
+ // our hasher only has `hashBytes` to work with and we take off `bitWidth` bits with each level
37414
+ // e.g. 32-byte hash gives us a maximum depth of 51 levels
37415
+ throw new Error('Overflow: maximum tree depth reached')
37416
+ }
37417
+
37418
+ /**
37419
+ * @ignore
37420
+ * @type {ReadonlyElement}
37421
+ */
37422
+ this.data = Object.freeze(data || []);
37423
+ for (const e of this.data) {
37424
+ if (!(e instanceof Element)) {
37425
+ throw new TypeError('`data` array must contain only `Element` types')
37139
37426
  }
37140
- } else {
37141
- throw new Error('missing lastCid for car header')
37142
37427
  }
37143
37428
  }
37144
37429
 
37145
- withDB = async dbWorkFun => {
37146
- if (!this.idb) {
37147
- this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
37148
- upgrade (db, oldVersion, newVersion, transaction) {
37149
- if (oldVersion < 1) {
37150
- db.createObjectStore('cars'); // todo use database name
37151
- const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
37152
- cidToCar.createIndex('cids', 'cids', { multiEntry: true });
37430
+ /**
37431
+ * Asynchronously create a new `IAMap` instance identical to this one but with `key` set to `value`.
37432
+ *
37433
+ * @param {(string|Uint8Array)} key - A key for the `value` being set whereby that same `value` may
37434
+ * be retrieved with a `get()` operation with the same `key`. The type of the `key` object should either be a
37435
+ * `Uint8Array` or be convertable to a `Uint8Array` via `TextEncoder.
37436
+ * @param {any} value - Any value that can be stored in the backing store. A value could be a serialisable object
37437
+ * or an address or content address or other kind of link to the actual value.
37438
+ * @param {Uint8Array} [_cachedHash] - for internal use
37439
+ * @returns {Promise<IAMap<T>>} A `Promise` containing a new `IAMap` that contains the new key/value pair.
37440
+ * @async
37441
+ */
37442
+ async set (key, value, _cachedHash) {
37443
+ if (!(key instanceof Uint8Array)) {
37444
+ key = textEncoder.encode(key);
37445
+ }
37446
+ const hash = _cachedHash instanceof Uint8Array ? _cachedHash : await hasher(this)(key);
37447
+ const bitpos = mask(hash, this.depth, this.config.bitWidth);
37448
+
37449
+ if (bitmapHas(this.map, bitpos)) { // should be in a bucket in this node
37450
+ const { data, link } = findElement(this, bitpos, key);
37451
+ if (data) {
37452
+ if (data.found) {
37453
+ /* c8 ignore next 3 */
37454
+ if (data.bucketIndex === undefined || data.bucketEntry === undefined) {
37455
+ throw new Error('Unexpected error')
37153
37456
  }
37154
- if (oldVersion < 2) {
37155
- const cidToCar = transaction.objectStore('cidToCar');
37156
- cidToCar.createIndex('pending', 'pending');
37457
+ if (data.bucketEntry.value === value) {
37458
+ return this // no change, identical value
37459
+ }
37460
+ // replace entry for this key with a new value
37461
+ // note that === will fail for two complex objects representing the same data so we may end up
37462
+ // with a node of the same ID anyway
37463
+ return updateBucket(this, data.elementAt, data.bucketIndex, key, value)
37464
+ } else {
37465
+ /* c8 ignore next 3 */
37466
+ if (!data.element.bucket) {
37467
+ throw new Error('Unexpected error')
37157
37468
  }
37469
+ if (data.element.bucket.length >= this.config.bucketSize) {
37470
+ // too many collisions at this level, replace a bucket with a child node
37471
+ return (await replaceBucketWithNode(this, data.elementAt)).set(key, value, hash)
37472
+ }
37473
+ // insert into the bucket and sort it
37474
+ return updateBucket(this, data.elementAt, -1, key, value)
37158
37475
  }
37159
- });
37476
+ } else if (link) {
37477
+ const child = await load$2(this.store, link.element.link, this.depth + 1, this.config);
37478
+ assert(!!child);
37479
+ const newChild = await child.set(key, value, hash);
37480
+ return updateNode(this, link.elementAt, newChild)
37481
+ /* c8 ignore next 3 */
37482
+ } else {
37483
+ throw new Error('Unexpected error')
37484
+ }
37485
+ } else { // we don't have an element for this hash portion, make one
37486
+ return addNewElement(this, bitpos, key, value)
37160
37487
  }
37161
- return await dbWorkFun(this.idb)
37162
37488
  }
37163
37489
 
37164
37490
  /**
37165
- * Iterate over all blocks in the store.
37491
+ * Asynchronously find and return a value for the given `key` if it exists within this `IAMap`.
37166
37492
  *
37167
- * @yields {{cid: string, value: Uint8Array}}
37168
- * @returns {AsyncGenerator<any, any, any>}
37493
+ * @param {string|Uint8Array} key - A key for the value being sought. See {@link IAMap#set} for
37494
+ * details about acceptable `key` types.
37495
+ * @param {Uint8Array} [_cachedHash] - for internal use
37496
+ * @returns {Promise<any>} A `Promise` that resolves to the value being sought if that value exists within this `IAMap`. If the
37497
+ * key is not found in this `IAMap`, the `Promise` will resolve to `undefined`.
37498
+ * @async
37169
37499
  */
37170
- async * cids () {
37171
- // console.log('valet cids')
37172
- const db = await this.withDB(async db => db);
37173
- const tx = db.transaction(['cidToCar'], 'readonly');
37174
- let cursor = await tx.store.openCursor();
37175
- while (cursor) {
37176
- yield { cid: cursor.key, car: cursor.value.car };
37177
- cursor = await cursor.continue();
37500
+ async get (key, _cachedHash) {
37501
+ if (!(key instanceof Uint8Array)) {
37502
+ key = textEncoder.encode(key);
37503
+ }
37504
+ const hash = _cachedHash instanceof Uint8Array ? _cachedHash : await hasher(this)(key);
37505
+ const bitpos = mask(hash, this.depth, this.config.bitWidth);
37506
+ if (bitmapHas(this.map, bitpos)) { // should be in a bucket in this node
37507
+ const { data, link } = findElement(this, bitpos, key);
37508
+ if (data) {
37509
+ if (data.found) {
37510
+ /* c8 ignore next 3 */
37511
+ if (data.bucketIndex === undefined || data.bucketEntry === undefined) {
37512
+ throw new Error('Unexpected error')
37513
+ }
37514
+ return data.bucketEntry.value
37515
+ }
37516
+ return undefined // not found
37517
+ } else if (link) {
37518
+ const child = await load$2(this.store, link.element.link, this.depth + 1, this.config);
37519
+ assert(!!child);
37520
+ return await child.get(key, hash)
37521
+ /* c8 ignore next 3 */
37522
+ } else {
37523
+ throw new Error('Unexpected error')
37524
+ }
37525
+ } else { // we don't have an element for this hash portion, not found
37526
+ return undefined
37527
+ }
37528
+
37529
+ /*
37530
+ const traversal = traverseGet(this, key, this.store.isEqual, this.store.isLink, this.depth)
37531
+ while (true) {
37532
+ const nextId = traversal.traverse()
37533
+ if (!nextId) {
37534
+ return traversal.value()
37535
+ }
37536
+ const child = await this.store.load(nextId)
37537
+ assert(!!child)
37538
+ traversal.next(child)
37178
37539
  }
37540
+ */
37179
37541
  }
37180
37542
 
37181
37543
  /**
37544
+ * Asynchronously find and return a boolean indicating whether the given `key` exists within this `IAMap`
37182
37545
  *
37183
- * @param {string} carCid
37184
- * @param {*} value
37546
+ * @param {string|Uint8Array} key - A key to check for existence within this `IAMap`. See
37547
+ * {@link IAMap#set} for details about acceptable `key` types.
37548
+ * @returns {Promise<boolean>} A `Promise` that resolves to either `true` or `false` depending on whether the `key` exists
37549
+ * within this `IAMap`.
37550
+ * @async
37185
37551
  */
37186
- async parkCar (carCid, value, cids) {
37187
- await this.withDB(async db => {
37188
- const tx = db.transaction(['cars', 'cidToCar'], 'readwrite');
37189
- await tx.objectStore('cars').put(value, carCid);
37190
- await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) });
37191
- return await tx.done
37192
- });
37193
- // console.log('parked car', carCid, value.length, Array.from(cids))
37194
- // upload to web3.storage if we have credentials
37195
- if (this.uploadFunction) {
37196
- if (this.alreadyEnqueued.has(carCid)) {
37197
- // console.log('already enqueued', carCid)
37198
- return
37552
+ async has (key) {
37553
+ return (await this.get(key)) !== undefined
37554
+ }
37555
+
37556
+ /**
37557
+ * Asynchronously create a new `IAMap` instance identical to this one but with `key` and its associated
37558
+ * value removed. If the `key` does not exist within this `IAMap`, this instance of `IAMap` is returned.
37559
+ *
37560
+ * @param {string|Uint8Array} key - A key to remove. See {@link IAMap#set} for details about
37561
+ * acceptable `key` types.
37562
+ * @param {Uint8Array} [_cachedHash] - for internal use
37563
+ * @returns {Promise<IAMap<T>>} A `Promise` that resolves to a new `IAMap` instance without the given `key` or the same `IAMap`
37564
+ * instance if `key` does not exist within it.
37565
+ * @async
37566
+ */
37567
+ async delete (key, _cachedHash) {
37568
+ if (!(key instanceof Uint8Array)) {
37569
+ key = textEncoder.encode(key);
37570
+ }
37571
+ const hash = _cachedHash instanceof Uint8Array ? _cachedHash : await hasher(this)(key);
37572
+ assert(hash instanceof Uint8Array);
37573
+ const bitpos = mask(hash, this.depth, this.config.bitWidth);
37574
+
37575
+ if (bitmapHas(this.map, bitpos)) { // should be in a bucket in this node
37576
+ const { data, link } = findElement(this, bitpos, key);
37577
+ if (data) {
37578
+ if (data.found) {
37579
+ /* c8 ignore next 3 */
37580
+ if (data.bucketIndex === undefined) {
37581
+ throw new Error('Unexpected error')
37582
+ }
37583
+ if (this.depth !== 0 && this.directNodeCount() === 0 && this.directEntryCount() === this.config.bucketSize + 1) {
37584
+ // current node will only have this.config.bucketSize entries spread across its buckets
37585
+ // and no child nodes, so wrap up the remaining nodes in a fresh IAMap at depth 0, it will
37586
+ // bubble up to either become the new root node or be unpacked by a higher level
37587
+ return collapseIntoSingleBucket(this, hash, data.elementAt, data.bucketIndex)
37588
+ } else {
37589
+ // we'll either have more entries left than this.config.bucketSize or we're at the root node
37590
+ // so this is a simple bucket removal, no collapsing needed (root nodes can't be collapsed)
37591
+ const lastInBucket = this.data.length === 1;
37592
+ // we have at least one child node or too many entries in buckets to be collapsed
37593
+ const newData = removeFromBucket(this.data, data.elementAt, lastInBucket, data.bucketIndex);
37594
+ let newMap = this.map;
37595
+ if (lastInBucket) {
37596
+ newMap = setBit(newMap, bitpos, false);
37597
+ }
37598
+ return create$2(this.store, this.config, newMap, this.depth, newData)
37599
+ }
37600
+ } else {
37601
+ // key would be located here according to hash, but we don't have it
37602
+ return this
37603
+ }
37604
+ } else if (link) {
37605
+ const child = await load$2(this.store, link.element.link, this.depth + 1, this.config);
37606
+ assert(!!child);
37607
+ const newChild = await child.delete(key, hash);
37608
+ if (this.store.isEqual(newChild.id, link.element.link)) { // no modification
37609
+ return this
37610
+ }
37611
+
37612
+ assert(newChild.data.length > 0); // something probably went wrong in the map block above
37613
+
37614
+ if (newChild.directNodeCount() === 0 && newChild.directEntryCount() === this.config.bucketSize) {
37615
+ // child got collapsed
37616
+ if (this.directNodeCount() === 1 && this.directEntryCount() === 0) {
37617
+ // we only had one node to collapse and the child was collapsible so end up acting the same
37618
+ // as the child, bubble it up and it either becomes the new root or finds a parent to collapse
37619
+ // in to (next section)
37620
+ return newChild
37621
+ } else {
37622
+ // extract data elements from this returned node and merge them into ours
37623
+ return collapseNodeInline(this, bitpos, newChild)
37624
+ }
37625
+ } else {
37626
+ // simple node replacement with edited child
37627
+ return updateNode(this, link.elementAt, newChild)
37628
+ }
37629
+ /* c8 ignore next 3 */
37630
+ } else {
37631
+ throw new Error('Unexpected error')
37199
37632
  }
37200
- // don't await this, it will be done in the queue
37201
- // console.log('add to queue', carCid, value.length)
37202
- this.uploadQueue.push({ carCid, value });
37203
- this.alreadyEnqueued.add(carCid);
37633
+ } else { // we don't have an element for this hash portion
37634
+ return this
37204
37635
  }
37205
37636
  }
37206
37637
 
37207
- remoteBlockFunction = null
37208
-
37209
- async getBlock (dataCID) {
37210
- return await this.withDB(async db => {
37211
- const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
37212
- const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID);
37213
- const carCid = indexResp?.car;
37214
- if (!carCid) {
37215
- throw new Error('Missing block: ' + dataCID)
37638
+ /**
37639
+ * Asynchronously count the number of key/value pairs contained within this `IAMap`, including its children.
37640
+ *
37641
+ * @returns {Promise<number>} A `Promise` with a `number` indicating the number of key/value pairs within this `IAMap` instance.
37642
+ * @async
37643
+ */
37644
+ async size () {
37645
+ let c = 0;
37646
+ for (const e of this.data) {
37647
+ if (e.bucket) {
37648
+ c += e.bucket.length;
37649
+ } else {
37650
+ const child = await load$2(this.store, e.link, this.depth + 1, this.config);
37651
+ c += await child.size();
37216
37652
  }
37217
- const carBytes = await tx.objectStore('cars').get(carCid);
37218
- const reader = await CarReader.fromBytes(carBytes);
37219
- if (this.keyMaterial) {
37220
- const roots = await reader.getRoots();
37221
- const readerGetWithCodec = async cid => {
37222
- const got = await reader.get(cid);
37223
- // console.log('got.', cid.toString())
37224
- let useCodec = codec;
37225
- if (cid.toString().indexOf('bafy') === 0) {
37226
- useCodec = codec$1;
37227
- }
37228
- const decoded = await decode$9({
37229
- ...got,
37230
- codec: useCodec,
37231
- hasher: sha256$2
37232
- });
37233
- // console.log('decoded', decoded.value)
37234
- return decoded
37235
- };
37236
- const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
37237
- const block = blocks.find(b => b.cid.toString() === dataCID);
37238
- if (block) {
37239
- return block.bytes
37653
+ }
37654
+ return c
37655
+ }
37656
+
37657
+ /**
37658
+ * Asynchronously emit all keys that exist within this `IAMap`, including its children. This will cause a full
37659
+ * traversal of all nodes.
37660
+ *
37661
+ * @returns {AsyncGenerator<Uint8Array>} An async iterator that yields keys. All keys will be in `Uint8Array` format regardless of which
37662
+ * format they were inserted via `set()`.
37663
+ * @async
37664
+ */
37665
+ async * keys () {
37666
+ for (const e of this.data) {
37667
+ if (e.bucket) {
37668
+ for (const kv of e.bucket) {
37669
+ yield kv.key;
37240
37670
  }
37241
37671
  } else {
37242
- const gotBlock = await reader.get(CID$1.parse(dataCID));
37243
- if (gotBlock) {
37244
- return gotBlock.bytes
37672
+ const child = await load$2(this.store, e.link, this.depth + 1, this.config);
37673
+ yield * child.keys();
37674
+ }
37675
+ }
37676
+
37677
+ // yield * traverseKV(this, 'keys', this.store.isLink)
37678
+ }
37679
+
37680
+ /**
37681
+ * Asynchronously emit all values that exist within this `IAMap`, including its children. This will cause a full
37682
+ * traversal of all nodes.
37683
+ *
37684
+ * @returns {AsyncGenerator<any>} An async iterator that yields values.
37685
+ * @async
37686
+ */
37687
+ async * values () {
37688
+ for (const e of this.data) {
37689
+ if (e.bucket) {
37690
+ for (const kv of e.bucket) {
37691
+ yield kv.value;
37245
37692
  }
37693
+ } else {
37694
+ const child = await load$2(this.store, e.link, this.depth + 1, this.config);
37695
+ yield * child.values();
37246
37696
  }
37247
- })
37697
+ }
37698
+
37699
+ // yield * traverseKV(this, 'values', this.store.isLink)
37248
37700
  }
37249
- }
37250
37701
 
37251
- const blocksToCarBlock = async (rootCids, blocks) => {
37252
- let size = 0;
37253
- if (!Array.isArray(rootCids)) {
37254
- rootCids = [rootCids];
37702
+ /**
37703
+ * Asynchronously emit all { key, value } pairs that exist within this `IAMap`, including its children. This will
37704
+ * cause a full traversal of all nodes.
37705
+ *
37706
+ * @returns {AsyncGenerator<{ key: Uint8Array, value: any}>} An async iterator that yields objects with the properties `key` and `value`.
37707
+ * @async
37708
+ */
37709
+ async * entries () {
37710
+ for (const e of this.data) {
37711
+ if (e.bucket) {
37712
+ for (const kv of e.bucket) {
37713
+ yield { key: kv.key, value: kv.value };
37714
+ }
37715
+ } else {
37716
+ const child = await load$2(this.store, e.link, this.depth + 1, this.config);
37717
+ yield * child.entries();
37718
+ }
37719
+ }
37720
+
37721
+ // yield * traverseKV(this, 'entries', this.store.isLink)
37255
37722
  }
37256
- const headerSize = headerLength({ roots: rootCids });
37257
- size += headerSize;
37258
- if (!Array.isArray(blocks)) {
37259
- blocks = Array.from(blocks.entries());
37723
+
37724
+ /**
37725
+ * Asynchronously emit the IDs of this `IAMap` and all of its children.
37726
+ *
37727
+ * @returns {AsyncGenerator<any>} An async iterator that yields the ID of this `IAMap` and all of its children. The type of ID is
37728
+ * determined by the backing store which is responsible for generating IDs upon `save()` operations.
37729
+ */
37730
+ async * ids () {
37731
+ yield this.id;
37732
+ for (const e of this.data) {
37733
+ if (e.link) {
37734
+ const child = await load$2(this.store, e.link, this.depth + 1, this.config);
37735
+ yield * child.ids();
37736
+ }
37737
+ }
37260
37738
  }
37261
- for (const { cid, bytes } of blocks) {
37262
- // console.log(cid, bytes)
37263
- size += blockLength({ cid, bytes });
37739
+
37740
+ /**
37741
+ * Returns a serialisable form of this `IAMap` node. The internal representation of this local node is copied into a plain
37742
+ * JavaScript `Object` including a representation of its data array that the key/value pairs it contains as well as
37743
+ * the identifiers of child nodes.
37744
+ * Root nodes (depth==0) contain the full map configuration information, while intermediate and leaf nodes contain only
37745
+ * data that cannot be inferred by traversal from a root node that already has this data (hashAlg and bucketSize -- bitWidth
37746
+ * is inferred by the length of the `map` byte array).
37747
+ * The backing store can use this representation to create a suitable serialised form. When loading from the backing store,
37748
+ * `IAMap` expects to receive an object with the same layout from which it can instantiate a full `IAMap` object. Where
37749
+ * root nodes contain the full set of data and intermediate and leaf nodes contain just the required data.
37750
+ * For content addressable backing stores, it is expected that the same data in this serialisable form will always produce
37751
+ * the same identifier.
37752
+ * Note that the `map` property is a `Uint8Array` so will need special handling for some serialization forms (e.g. JSON).
37753
+ *
37754
+ * Root node form:
37755
+ * ```
37756
+ * {
37757
+ * hashAlg: number
37758
+ * bucketSize: number
37759
+ * hamt: [Uint8Array, Array]
37760
+ * }
37761
+ * ```
37762
+ *
37763
+ * Intermediate and leaf node form:
37764
+ * ```
37765
+ * [Uint8Array, Array]
37766
+ * ```
37767
+ *
37768
+ * The `Uint8Array` in both forms is the 'map' used to identify the presence of an element in this node.
37769
+ *
37770
+ * The second element in the tuple in both forms, `Array`, is an elements array a mix of either buckets or links:
37771
+ *
37772
+ * * A bucket is an array of two elements, the first being a `key` of type `Uint8Array` and the second a `value`
37773
+ * or whatever type has been provided in `set()` operations for this `IAMap`.
37774
+ * * A link is an object of the type that the backing store provides upon `save()` operations and can be identified
37775
+ * with `isLink()` calls.
37776
+ *
37777
+ * Buckets and links are differentiated by their "kind": a bucket is an array while a link is a "link" kind as dictated
37778
+ * by the backing store. We use `Array.isArray()` and `store.isLink()` to perform this differentiation.
37779
+ *
37780
+ * @returns {SerializedNode|SerializedRoot} An object representing the internal state of this local `IAMap` node, including its links to child nodes
37781
+ * if any.
37782
+ */
37783
+ toSerializable () {
37784
+ const map = this.map;
37785
+ const data = this.data.map((/** @type {Element} */ e) => {
37786
+ return e.toSerializable()
37787
+ });
37788
+ /**
37789
+ * @ignore
37790
+ * @type {SerializedNode}
37791
+ */
37792
+ const hamt = [map, data];
37793
+ if (this.depth !== 0) {
37794
+ return hamt
37795
+ }
37796
+ /**
37797
+ * @ignore
37798
+ * @type {SerializedElement}
37799
+ */
37800
+ return {
37801
+ hashAlg: this.config.hashAlg,
37802
+ bucketSize: this.config.bucketSize,
37803
+ hamt
37804
+ }
37264
37805
  }
37265
- const buffer = new Uint8Array(size);
37266
- const writer = await createWriter(buffer, { headerSize });
37267
37806
 
37268
- for (const cid of rootCids) {
37269
- writer.addRoot(cid);
37807
+ /**
37808
+ * Calculate the number of entries locally stored by this node. Performs a scan of local buckets and adds up
37809
+ * their size.
37810
+ *
37811
+ * @returns {number} A number representing the number of local entries.
37812
+ */
37813
+ directEntryCount () {
37814
+ return this.data.reduce((/** @type {number} */ p, /** @type {Element} */ c) => {
37815
+ return p + (c.bucket ? c.bucket.length : 0)
37816
+ }, 0)
37270
37817
  }
37271
37818
 
37272
- for (const { cid, bytes } of blocks) {
37273
- writer.write({ cid, bytes });
37819
+ /**
37820
+ * Calculate the number of child nodes linked by this node. Performs a scan of the local entries and tallies up the
37821
+ * ones containing links to child nodes.
37822
+ *
37823
+ * @returns {number} A number representing the number of direct child nodes
37824
+ */
37825
+ directNodeCount () {
37826
+ return this.data.reduce((/** @type {number} */ p, /** @type {Element} */ c) => {
37827
+ return p + (c.link ? 1 : 0)
37828
+ }, 0)
37274
37829
  }
37275
- await writer.close();
37276
- return await encode$7({ value: writer.bytes, hasher: sha256$2, codec: raw })
37277
- };
37278
37830
 
37279
- const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
37831
+ /**
37832
+ * Asynchronously perform a check on this node and its children that it is in canonical format for the current data.
37833
+ * As this uses `size()` to calculate the total number of entries in this node and its children, it performs a full
37834
+ * scan of nodes and therefore incurs a load and deserialisation cost for each child node.
37835
+ * A `false` result from this method suggests a flaw in the implemetation.
37836
+ *
37837
+ * @async
37838
+ * @returns {Promise<boolean>} A Promise with a boolean value indicating whether this IAMap is correctly formatted.
37839
+ */
37840
+ async isInvariant () {
37841
+ const size = await this.size();
37842
+ const entryArity = this.directEntryCount();
37843
+ const nodeArity = this.directNodeCount();
37844
+ const arity = entryArity + nodeArity;
37845
+ let sizePredicate = 2; // 2 == 'more than one'
37846
+ if (nodeArity === 0) {
37847
+ sizePredicate = Math.min(2, entryArity); // 0, 1 or 2=='more than one'
37848
+ }
37849
+
37850
+ const inv1 = size - entryArity >= 2 * (arity - entryArity);
37851
+ const inv2 = arity === 0 ? sizePredicate === 0 : true;
37852
+ const inv3 = (arity === 1 && entryArity === 1) ? sizePredicate === 1 : true;
37853
+ const inv4 = arity >= 2 ? sizePredicate === 2 : true;
37854
+ const inv5 = nodeArity >= 0 && entryArity >= 0 && ((entryArity + nodeArity) === arity);
37855
+
37856
+ return inv1 && inv2 && inv3 && inv4 && inv5
37857
+ }
37858
+
37859
+ /**
37860
+ * A convenience shortcut to {@link iamap.fromSerializable} that uses this IAMap node instance's backing `store` and
37861
+ * configuration `options`. Intended to be used to instantiate child IAMap nodes from a root IAMap node.
37862
+ *
37863
+ * @param {any} id An optional ID for the instantiated IAMap node. See {@link iamap.fromSerializable}.
37864
+ * @param {any} serializable The serializable form of an IAMap node to be instantiated.
37865
+ * @param {number} [depth=0] The depth of the IAMap node. See {@link iamap.fromSerializable}.
37866
+ */
37867
+ fromChildSerializable (id, serializable, depth) {
37868
+ return fromSerializable(this.store, id, serializable, this.config, depth)
37869
+ }
37870
+ }
37871
+
37872
+ /**
37873
+ * store a new node and assign it an ID
37874
+ * @ignore
37875
+ * @template T
37876
+ * @param {Store<T>} store
37877
+ * @param {IAMap<T>} newNode
37878
+ * @returns {Promise<IAMap<T>>}
37879
+ */
37880
+ async function save (store, newNode) {
37881
+ const id = await store.save(newNode.toSerializable());
37882
+ newNode.id = id;
37883
+ return newNode
37884
+ }
37885
+
37886
+ /**
37887
+ * // utility function to avoid duplication since it's used across get(), set() and delete()
37888
+ * { bucket: { found: true, elementAt, element, bucketIndex, bucketEntry } }
37889
+ * { bucket: { found: false, elementAt, element } }
37890
+ * { link: { elementAt, element } }
37891
+ * @ignore
37892
+ * @template T
37893
+ * @param {IAMap<T>} node
37894
+ * @param {number} bitpos
37895
+ * @param {Uint8Array} key
37896
+ * @returns {FoundElement}
37897
+ */
37898
+ function findElement (node, bitpos, key) {
37899
+ const elementAt = index(node.map, bitpos);
37900
+ const element = node.data[elementAt];
37901
+ assert(!!element);
37902
+ if (element.bucket) { // data element
37903
+ for (let bucketIndex = 0; bucketIndex < element.bucket.length; bucketIndex++) {
37904
+ const bucketEntry = element.bucket[bucketIndex];
37905
+ if (byteCompare(bucketEntry.key, key) === 0) {
37906
+ return { data: { found: true, elementAt, element, bucketIndex, bucketEntry } }
37907
+ }
37908
+ }
37909
+ return { data: { found: false, elementAt, element } }
37910
+ }
37911
+ assert(!!element.link);
37912
+ return { link: { elementAt, element } }
37913
+ }
37914
+
37915
+ /**
37916
+ * new element for this node, i.e. first time this hash portion has been seen here
37917
+ * @ignore
37918
+ * @template T
37919
+ * @param {IAMap<T>} node
37920
+ * @param {number} bitpos
37921
+ * @param {Uint8Array} key
37922
+ * @param {any} value
37923
+ * @returns {Promise<IAMap<T>>}
37924
+ */
37925
+ async function addNewElement (node, bitpos, key, value) {
37926
+ const insertAt = index(node.map, bitpos);
37927
+ const newData = node.data.slice();
37928
+ newData.splice(insertAt, 0, new Element([new KV(key, value)]));
37929
+ const newMap = setBit(node.map, bitpos, true);
37930
+ return create$2(node.store, node.config, newMap, node.depth, newData)
37931
+ }
37932
+
37933
+ /**
37934
+ * update an existing bucket with a new k/v pair
37935
+ * @ignore
37936
+ * @template T
37937
+ * @param {IAMap<T>} node
37938
+ * @param {number} elementAt
37939
+ * @param {number} bucketAt
37940
+ * @param {Uint8Array} key
37941
+ * @param {any} value
37942
+ * @returns {Promise<IAMap<T>>}
37943
+ */
37944
+ async function updateBucket (node, elementAt, bucketAt, key, value) {
37945
+ const oldElement = node.data[elementAt];
37946
+ /* c8 ignore next 3 */
37947
+ if (!oldElement.bucket) {
37948
+ throw new Error('Unexpected error')
37949
+ }
37950
+ const newElement = new Element(oldElement.bucket.slice());
37951
+ const newKv = new KV(key, value);
37952
+ /* c8 ignore next 3 */
37953
+ if (!newElement.bucket) {
37954
+ throw new Error('Unexpected error')
37955
+ }
37956
+ if (bucketAt === -1) {
37957
+ newElement.bucket.push(newKv);
37958
+ // in-bucket sort is required to maintain a canonical state
37959
+ newElement.bucket.sort((/** @type {KV} */ a, /** @type {KV} */ b) => byteCompare(a.key, b.key));
37960
+ } else {
37961
+ newElement.bucket[bucketAt] = newKv;
37962
+ }
37963
+ const newData = node.data.slice();
37964
+ newData[elementAt] = newElement;
37965
+ return create$2(node.store, node.config, node.map, node.depth, newData)
37966
+ }
37967
+
37968
+ /**
37969
+ * overflow of a bucket means it has to be replaced with a child node, tricky surgery
37970
+ * @ignore
37971
+ * @template T
37972
+ * @param {IAMap<T>} node
37973
+ * @param {number} elementAt
37974
+ * @returns {Promise<IAMap<T>>}
37975
+ */
37976
+ async function replaceBucketWithNode (node, elementAt) {
37977
+ let newNode = new IAMap(node.store, node.config, undefined, node.depth + 1);
37978
+ const element = node.data[elementAt];
37979
+ assert(!!element);
37980
+ /* c8 ignore next 3 */
37981
+ if (!element.bucket) {
37982
+ throw new Error('Unexpected error')
37983
+ }
37984
+ for (const c of element.bucket) {
37985
+ newNode = await newNode.set(c.key, c.value);
37986
+ }
37987
+ newNode = await save(node.store, newNode);
37988
+ const newData = node.data.slice();
37989
+ newData[elementAt] = new Element(undefined, newNode.id);
37990
+ return create$2(node.store, node.config, node.map, node.depth, newData)
37991
+ }
37992
+
37993
+ /**
37994
+ * similar to addNewElement() but for new child nodes
37995
+ * @ignore
37996
+ * @template T
37997
+ * @param {IAMap<T>} node
37998
+ * @param {number} elementAt
37999
+ * @param {IAMap<T>} newChild
38000
+ * @returns {Promise<IAMap<T>>}
38001
+ */
38002
+ async function updateNode (node, elementAt, newChild) {
38003
+ assert(!!newChild.id);
38004
+ const newElement = new Element(undefined, newChild.id);
38005
+ const newData = node.data.slice();
38006
+ newData[elementAt] = newElement;
38007
+ return create$2(node.store, node.config, node.map, node.depth, newData)
38008
+ }
38009
+
38010
+ // take a node, extract all of its local entries and put them into a new node with a single
38011
+ // bucket; used for collapsing a node and sending it upward
38012
+ /**
38013
+ * @ignore
38014
+ * @template T
38015
+ * @param {IAMap<T>} node
38016
+ * @param {Uint8Array} hash
38017
+ * @param {number} elementAt
38018
+ * @param {number} bucketIndex
38019
+ * @returns {Promise<IAMap<T>>}
38020
+ */
38021
+ function collapseIntoSingleBucket (node, hash, elementAt, bucketIndex) {
38022
+ // pretend it's depth=0 (it may end up being) and only 1 bucket
38023
+ const newMap = setBit(new Uint8Array(node.map.length), mask(hash, 0, node.config.bitWidth), true);
38024
+ /**
38025
+ * @ignore
38026
+ * @type {KV[]}
38027
+ */
38028
+ const newBucket = node.data.reduce((/** @type {KV[]} */ p, /** @type {Element} */ c, /** @type {number} */ i) => {
38029
+ if (i === elementAt) {
38030
+ /* c8 ignore next 3 */
38031
+ if (!c.bucket) {
38032
+ throw new Error('Unexpected error')
38033
+ }
38034
+ if (c.bucket.length === 1) { // only element in bucket, skip it
38035
+ return p
38036
+ } else {
38037
+ // there's more in this bucket, make a temporary one, remove it and concat it
38038
+ const tmpBucket = c.bucket.slice();
38039
+ tmpBucket.splice(bucketIndex, 1);
38040
+ return p.concat(tmpBucket)
38041
+ }
38042
+ } else {
38043
+ /* c8 ignore next 3 */
38044
+ if (!c.bucket) {
38045
+ throw new Error('Unexpected error')
38046
+ }
38047
+ return p.concat(c.bucket)
38048
+ }
38049
+ }, /** @type {KV[]} */ []);
38050
+ newBucket.sort((a, b) => byteCompare(a.key, b.key));
38051
+ const newElement = new Element(newBucket);
38052
+ return create$2(node.store, node.config, newMap, 0, [newElement])
38053
+ }
38054
+
38055
+ // simple delete from an existing bucket in this node
38056
+ /**
38057
+ * @ignore
38058
+ * @param {ReadonlyElement} data
38059
+ * @param {number} elementAt
38060
+ * @param {boolean} lastInBucket
38061
+ * @param {number} bucketIndex
38062
+ * @returns {Element[]}
38063
+ */
38064
+ function removeFromBucket (data, elementAt, lastInBucket, bucketIndex) {
38065
+ const newData = data.slice();
38066
+ if (!lastInBucket) {
38067
+ // bucket will not be empty, remove only the element from it
38068
+ const oldElement = data[elementAt];
38069
+ /* c8 ignore next 3 */
38070
+ if (!oldElement.bucket) {
38071
+ throw new Error('Unexpected error')
38072
+ }
38073
+ const newElement = new Element(oldElement.bucket.slice());
38074
+ /* c8 ignore next 3 */
38075
+ if (!newElement.bucket) {
38076
+ throw new Error('Unexpected error')
38077
+ }
38078
+ newElement.bucket.splice(bucketIndex, 1);
38079
+ newData.splice(elementAt, 1, newElement); // replace old bucket
38080
+ } else {
38081
+ // empty bucket, just remove it
38082
+ newData.splice(elementAt, 1);
38083
+ }
38084
+ return newData
38085
+ }
38086
+
38087
+ /**
38088
+ * a node has bubbled up from a recursive delete() and we need to extract its
38089
+ * contents and insert it into ours
38090
+ * @ignore
38091
+ * @template T
38092
+ * @param {IAMap<T>} node
38093
+ * @param {number} bitpos
38094
+ * @param {IAMap<T>} newNode
38095
+ * @returns {Promise<IAMap<T>>}
38096
+ */
38097
+ async function collapseNodeInline (node, bitpos, newNode) {
38098
+ // assume the newNode has a single bucket and it's sorted and ready to replace the place
38099
+ // it had in node's element array
38100
+ assert(newNode.data.length === 1);
38101
+ /* c8 ignore next 3 */
38102
+ if (!newNode.data[0].bucket) {
38103
+ throw new Error('Unexpected error')
38104
+ }
38105
+ const newBucket = newNode.data[0].bucket.slice();
38106
+ const newElement = new Element(newBucket);
38107
+ const elementIndex = index(node.map, bitpos);
38108
+ const newData = node.data.slice();
38109
+ newData[elementIndex] = newElement;
38110
+
38111
+ return create$2(node.store, node.config, node.map, node.depth, newData)
38112
+ }
38113
+
38114
+ /**
38115
+ * @ignore
38116
+ * @param {Options} [options]
38117
+ * @returns {Config}
38118
+ */
38119
+ function buildConfig (options) {
38120
+ /**
38121
+ * @ignore
38122
+ * @type {Config}
38123
+ */
38124
+ const config = {};
38125
+
38126
+ if (!options) {
38127
+ throw new TypeError('Invalid `options` object')
38128
+ }
38129
+
38130
+ if (!Number.isInteger(options.hashAlg)) {
38131
+ throw new TypeError('Invalid `hashAlg` option')
38132
+ }
38133
+ if (!hasherRegistry[options.hashAlg]) {
38134
+ throw new TypeError(`Unknown hashAlg: '${options.hashAlg}'`)
38135
+ }
38136
+ config.hashAlg = options.hashAlg;
38137
+
38138
+ if (options.bitWidth !== undefined) {
38139
+ if (Number.isInteger(options.bitWidth)) {
38140
+ if (options.bitWidth < 3 || options.bitWidth > 16) {
38141
+ throw new TypeError('Invalid `bitWidth` option, must be between 3 and 16')
38142
+ }
38143
+ config.bitWidth = options.bitWidth;
38144
+ } else {
38145
+ throw new TypeError('Invalid `bitWidth` option')
38146
+ }
38147
+ } else {
38148
+ config.bitWidth = defaultBitWidth;
38149
+ }
38150
+
38151
+ if (options.bucketSize !== undefined) {
38152
+ if (Number.isInteger(options.bucketSize)) {
38153
+ if (options.bucketSize < 2) {
38154
+ throw new TypeError('Invalid `bucketSize` option')
38155
+ }
38156
+ config.bucketSize = options.bucketSize;
38157
+ } else {
38158
+ throw new TypeError('Invalid `bucketSize` option')
38159
+ }
38160
+ } else {
38161
+ config.bucketSize = defaultBucketSize;
38162
+ }
38163
+
38164
+ return config
38165
+ }
38166
+
38167
+ /**
38168
+ * Determine if a serializable object is an IAMap root type, can be used to assert whether a data block is
38169
+ * an IAMap before trying to instantiate it.
38170
+ *
38171
+ * @name iamap.isRootSerializable
38172
+ * @function
38173
+ * @param {any} serializable An object that may be a serialisable form of an IAMap root node
38174
+ * @returns {boolean} An indication that the serialisable form is or is not an IAMap root node
38175
+ */
38176
+ function isRootSerializable (serializable) {
38177
+ return typeof serializable === 'object' &&
38178
+ Number.isInteger(serializable.hashAlg) &&
38179
+ Number.isInteger(serializable.bucketSize) &&
38180
+ Array.isArray(serializable.hamt) &&
38181
+ isSerializable(serializable.hamt)
38182
+ }
38183
+
38184
+ /**
38185
+ * Determine if a serializable object is an IAMap node type, can be used to assert whether a data block is
38186
+ * an IAMap node before trying to instantiate it.
38187
+ * This should pass for both root nodes as well as child nodes
38188
+ *
38189
+ * @name iamap.isSerializable
38190
+ * @function
38191
+ * @param {any} serializable An object that may be a serialisable form of an IAMap node
38192
+ * @returns {boolean} An indication that the serialisable form is or is not an IAMap node
38193
+ */
38194
+ function isSerializable (serializable) {
38195
+ if (Array.isArray(serializable)) {
38196
+ return serializable.length === 2 && serializable[0] instanceof Uint8Array && Array.isArray(serializable[1])
38197
+ }
38198
+ return isRootSerializable(serializable)
38199
+ }
38200
+
38201
+ /**
38202
+ * Instantiate an IAMap from a valid serialisable form of an IAMap node. The serializable should be the same as
38203
+ * produced by {@link IAMap#toSerializable}.
38204
+ * Serialised forms of root nodes must satisfy both {@link iamap.isRootSerializable} and {@link iamap.isSerializable}. For
38205
+ * root nodes, the `options` parameter will be ignored and the `depth` parameter must be the default value of `0`.
38206
+ * Serialised forms of non-root nodes must satisfy {@link iamap.isSerializable} and have a valid `options` parameter and
38207
+ * a non-`0` `depth` parameter.
38208
+ *
38209
+ * @name iamap.fromSerializable
38210
+ * @function
38211
+ * @template T
38212
+ * @param {Store<T>} store A backing store for this Map. See {@link iamap.create}.
38213
+ * @param {any} id An optional ID for the instantiated IAMap node. Unlike {@link iamap.create},
38214
+ * `fromSerializable()` does not `save()` a newly created IAMap node so an ID is not generated for it. If one is
38215
+ * required for downstream purposes it should be provided, if the value is `null` or `undefined`, `node.id` will
38216
+ * be `null` but will remain writable.
38217
+ * @param {any} serializable The serializable form of an IAMap node to be instantiated
38218
+ * @param {Options} [options=null] An options object for IAMap child node instantiation. Will be ignored for root
38219
+ * node instantiation (where `depth` = `0`) See {@link iamap.create}.
38220
+ * @param {number} [depth=0] The depth of the IAMap node. Where `0` is the root node and any `>0` number is a child
38221
+ * node.
38222
+ * @returns {IAMap<T>}
38223
+ */
38224
+ function fromSerializable (store, id, serializable, options, depth = 0) {
38225
+ /**
38226
+ * @ignore
38227
+ * @type {SerializedNode}
38228
+ */
38229
+ let hamt;
38230
+ if (depth === 0) { // even if options were supplied, ignore them and use what's in the serializable
38231
+ if (!isRootSerializable(serializable)) {
38232
+ throw new Error('Loaded object does not appear to be an IAMap root (depth==0)')
38233
+ }
38234
+ // don't use passed-in options
38235
+ options = serializableToOptions(serializable);
38236
+ hamt = serializable.hamt;
38237
+ } else {
38238
+ if (!isSerializable(serializable)) {
38239
+ throw new Error('Loaded object does not appear to be an IAMap node (depth>0)')
38240
+ }
38241
+ hamt = serializable;
38242
+ }
38243
+ const data = hamt[1].map(Element.fromSerializable.bind(null, store.isLink));
38244
+ const node = new IAMap(store, options, hamt[0], depth, data);
38245
+ if (id != null) {
38246
+ node.id = id;
38247
+ }
38248
+ return node
38249
+ }
38250
+
38251
+ /**
38252
+ * @ignore
38253
+ * @param {any} serializable
38254
+ * @returns {Config}
38255
+ */
38256
+ function serializableToOptions (serializable) {
38257
+ return {
38258
+ hashAlg: serializable.hashAlg,
38259
+ bitWidth: Math.log2(serializable.hamt[0].length * 8), // inverse of (2**bitWidth) / 8
38260
+ bucketSize: serializable.bucketSize
38261
+ }
38262
+ }
38263
+
38264
+ /**
38265
+ * @template T
38266
+ * @param {IAMap<T> | any} node
38267
+ * @returns {boolean}
38268
+ */
38269
+ IAMap.isIAMap = function isIAMap (node) {
38270
+ return node instanceof IAMap
38271
+ };
38272
+
38273
+ /**
38274
+ * internal utility to fetch a map instance's hash function
38275
+ *
38276
+ * @ignore
38277
+ * @template T
38278
+ * @param {IAMap<T>} map
38279
+ * @returns {Hasher}
38280
+ */
38281
+ function hasher (map) {
38282
+ return hasherRegistry[map.config.hashAlg].hasher
38283
+ }
38284
+
38285
+ /**
38286
+ * @ignore
38287
+ * @param {Uint8Array} b1
38288
+ * @param {Uint8Array} b2
38289
+ * @returns {number}
38290
+ */
38291
+ function byteCompare (b1, b2) {
38292
+ let x = b1.length;
38293
+ let y = b2.length;
38294
+
38295
+ for (let i = 0, len = Math.min(x, y); i < len; ++i) {
38296
+ if (b1[i] !== b2[i]) {
38297
+ x = b1[i];
38298
+ y = b2[i];
38299
+ break
38300
+ }
38301
+ }
38302
+ if (x < y) {
38303
+ return -1
38304
+ }
38305
+ if (y < x) {
38306
+ return 1
38307
+ }
38308
+ return 0
38309
+ }
38310
+
38311
+ var create_1 = create$2;
38312
+ var load_1 = load$2;
38313
+ var registerHasher_1 = registerHasher;
38314
+
38315
+ /** Auto-generated with ipld-schema-validator@1.0.1 at Tue Aug 16 2022 from IPLD Schema:
38316
+ *
38317
+ * # Root node layout
38318
+ * type HashMapRoot struct {
38319
+ * hashAlg Int
38320
+ * bucketSize Int
38321
+ * hamt HashMapNode
38322
+ * }
38323
+ *
38324
+ * # Non-root node layout
38325
+ * type HashMapNode struct {
38326
+ * map Bytes
38327
+ * data [ Element ]
38328
+ * } representation tuple
38329
+ *
38330
+ * type Element union {
38331
+ * | &HashMapNode link
38332
+ * | Bucket list
38333
+ * } representation kinded
38334
+ *
38335
+ * type Bucket [ BucketEntry ]
38336
+ *
38337
+ * type BucketEntry struct {
38338
+ * key Bytes
38339
+ * value Any
38340
+ * } representation tuple
38341
+ *
38342
+ */
38343
+
38344
+ const Kinds = {
38345
+ Null: /** @returns {boolean} */ (/** @type {any} */ obj) => obj === null,
38346
+ Int: /** @returns {boolean} */ (/** @type {any} */ obj) => Number.isInteger(obj),
38347
+ Float: /** @returns {boolean} */ (/** @type {any} */ obj) => typeof obj === 'number' && Number.isFinite(obj),
38348
+ String: /** @returns {boolean} */ (/** @type {any} */ obj) => typeof obj === 'string',
38349
+ Bool: /** @returns {boolean} */ (/** @type {any} */ obj) => typeof obj === 'boolean',
38350
+ Bytes: /** @returns {boolean} */ (/** @type {any} */ obj) => obj instanceof Uint8Array,
38351
+ Link: /** @returns {boolean} */ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID === obj,
38352
+ List: /** @returns {boolean} */ (/** @type {any} */ obj) => Array.isArray(obj),
38353
+ Map: /** @returns {boolean} */ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID !== obj && !Kinds.List(obj) && !Kinds.Bytes(obj)
38354
+ };
38355
+ /** @type {{ [k in string]: (obj:any)=>boolean}} */
38356
+ const Types = {
38357
+ Int: Kinds.Int,
38358
+ 'HashMapRoot > hashAlg': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Int(obj),
38359
+ 'HashMapRoot > bucketSize': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Int(obj),
38360
+ Bytes: Kinds.Bytes,
38361
+ 'HashMapNode > map': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Bytes(obj),
38362
+ 'Element > HashMapNode (anon)': Kinds.Link,
38363
+ 'BucketEntry > key': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Bytes(obj),
38364
+ Any: /** @returns {boolean} */ (/** @type {any} */ obj) => (Kinds.Bool(obj) && Types.Bool(obj)) || (Kinds.String(obj) && Types.String(obj)) || (Kinds.Bytes(obj) && Types.Bytes(obj)) || (Kinds.Int(obj) && Types.Int(obj)) || (Kinds.Float(obj) && Types.Float(obj)) || (Kinds.Null(obj) && Types.Null(obj)) || (Kinds.Link(obj) && Types.Link(obj)) || (Kinds.Map(obj) && Types.AnyMap(obj)) || (Kinds.List(obj) && Types.AnyList(obj)),
38365
+ Bool: Kinds.Bool,
38366
+ String: Kinds.String,
38367
+ Float: Kinds.Float,
38368
+ Null: Kinds.Null,
38369
+ Link: Kinds.Link,
38370
+ AnyMap: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.Map(obj) && Array.prototype.every.call(Object.values(obj), Types.Any),
38371
+ AnyList: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types.Any),
38372
+ 'BucketEntry > value': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Any(obj),
38373
+ BucketEntry: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && obj.length === 2 && Types['BucketEntry > key'](obj[0]) && Types['BucketEntry > value'](obj[1]),
38374
+ Bucket: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types.BucketEntry),
38375
+ Element: /** @returns {boolean} */ (/** @type {any} */ obj) => (Kinds.Link(obj) && Types['Element > HashMapNode (anon)'](obj)) || (Kinds.List(obj) && Types.Bucket(obj)),
38376
+ 'HashMapNode > data (anon)': /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types.Element),
38377
+ 'HashMapNode > data': /** @returns {boolean} */ (/** @type {any} */ obj) => Types['HashMapNode > data (anon)'](obj),
38378
+ HashMapNode: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && obj.length === 2 && Types['HashMapNode > map'](obj[0]) && Types['HashMapNode > data'](obj[1]),
38379
+ 'HashMapRoot > hamt': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.HashMapNode(obj),
38380
+ HashMapRoot: /** @returns {boolean} */ (/** @type {any} */ obj) => { const keys = obj && Object.keys(obj); return Kinds.Map(obj) && ['hashAlg', 'bucketSize', 'hamt'].every((k) => keys.includes(k)) && Object.entries(obj).every(([name, value]) => Types['HashMapRoot > ' + name] && Types['HashMapRoot > ' + name](value)) }
38381
+ };
38382
+
38383
+ const HashMapRoot = Types.HashMapRoot;
38384
+ const HashMapNode = Types.HashMapNode;
38385
+ Types.Element;
38386
+ Types.Bucket;
38387
+ Types.BucketEntry;
38388
+
38389
+ /**
38390
+ * @typedef {'bool'|'string'|'bytes'|'int'|'float'|'list'|'map'|'null'|'link'} RepresentationKindString
38391
+ */
38392
+
38393
+ /**
38394
+ * @param {any} obj
38395
+ * @returns {RepresentationKindString}
38396
+ */
38397
+ function kind (obj) {
38398
+ if (typeof obj === 'number') {
38399
+ if (Number.isInteger(obj)) {
38400
+ return 'int'
38401
+ }
38402
+ return 'float'
38403
+ }
38404
+ if (typeof obj === 'string') {
38405
+ return 'string'
38406
+ }
38407
+ if (obj === null) {
38408
+ return 'null'
38409
+ }
38410
+ if (typeof obj === 'boolean') {
38411
+ return 'bool'
38412
+ }
38413
+ if (typeof obj === 'object' && obj.asCID === obj) {
38414
+ return 'link'
38415
+ }
38416
+ if (obj instanceof Uint8Array) {
38417
+ return 'bytes'
38418
+ }
38419
+ if (Array.isArray(obj)) {
38420
+ return 'list'
38421
+ }
38422
+ if (typeof obj === 'object') {
38423
+ return 'map'
38424
+ }
38425
+ throw new TypeError(`Unknown IPLD kind for value: ${JSON.stringify(obj)}`)
38426
+ }
38427
+
38428
+ /**
38429
+ * @typedef {import('ipld-schema/schema-schema').Schema} Schema
38430
+ * @typedef {import('ipld-schema/schema-schema').TypeDefnLink} TypeDefnLink
38431
+ * @typedef {import('ipld-schema/schema-schema').TypeDefnList} TypeDefnList
38432
+ * @typedef {import('ipld-schema/schema-schema').TypeDefnMap} TypeDefnMap
38433
+ */
38434
+
38435
+ /**
38436
+ * @param {any} obj
38437
+ * @returns {{ schema: Schema, root: string }}
38438
+ */
38439
+ function describe (obj) {
38440
+ const description = describeObject(obj, { types: {} });
38441
+ if (!Object.keys(description.schema.types).length) {
38442
+ // when `obj` is a terminal type, make up a typedef for that kind so we have
38443
+ // something to point to for our root rather than the plain typed kind
38444
+
38445
+ // special case for links
38446
+ if (typeof description.root === 'object' && typeof description.root.link === 'object') {
38447
+ const name = 'Link';
38448
+ description.schema.types[name] = { link: {} };
38449
+ description.root = name;
38450
+ } else if (typeof description.root === 'string') {
38451
+ const name = `${description.root}`;
38452
+ // @ts-ignore
38453
+ description.schema.types[name] = { [description.root.toLowerCase()]: {} };
38454
+ description.root = name;
38455
+ /* c8 ignore next 3 */
38456
+ } else {
38457
+ throw new Error('internal error')
38458
+ }
38459
+ }
38460
+ /* c8 ignore next 3 */
38461
+ if (typeof description.root !== 'string') {
38462
+ throw new Error('internal error')
38463
+ }
38464
+ return { schema: description.schema, root: description.root }
38465
+ }
38466
+
38467
+ /**
38468
+ * @param {any} obj
38469
+ * @param {Schema} schema
38470
+ * @returns {{ schema: Schema, root: string|{ link: TypeDefnLink } }}
38471
+ */
38472
+ function describeObject (obj, schema) {
38473
+ const objKind = kind(obj);
38474
+ let name = `${objKind.charAt(0).toUpperCase()}${objKind.substring(1)}`;
38475
+
38476
+ // terminals
38477
+ if (objKind === 'null' ||
38478
+ objKind === 'int' ||
38479
+ objKind === 'bool' ||
38480
+ objKind === 'float' ||
38481
+ objKind === 'string' ||
38482
+ objKind === 'bytes') {
38483
+ return { schema, root: name }
38484
+ }
38485
+
38486
+ if (objKind === 'link') {
38487
+ return { schema, root: { link: {} } }
38488
+ }
38489
+
38490
+ // 'map' || 'list'
38491
+
38492
+ /** @type {{ fieldName: string, root: string|{ link: TypeDefnLink }}[]} */
38493
+ const fieldNames = [];
38494
+ const entries = objKind === 'map'
38495
+ ? Object.entries(obj)
38496
+ : obj.map((/** @type {any} */ e, /** @type {number} */ i) => [`f${i}`, e]);
38497
+ for (const [fieldName, value] of entries) {
38498
+ fieldNames.push({ fieldName, root: describeObject(value, schema).root });
38499
+ }
38500
+ let unique = true;
38501
+ for (let i = 1; i < fieldNames.length; i++) {
38502
+ // this is a shallow assumption - that the name tells us the uniqueness, it doesn't
38503
+ // and this will have to be improved
38504
+ if (fieldNames[i].root !== fieldNames[i - 1].root) {
38505
+ unique = false;
38506
+ break
38507
+ }
38508
+ }
38509
+
38510
+ name = `${name}_1`;
38511
+ /** @type {{ map: { keyType?: string, valueType?: string|{ link: TypeDefnLink } } }|{ list: { valueType?: string|{ link: TypeDefnLink } } }|{ struct: { fields: { [ k in string]: { type: string | { link: TypeDefnLink } } }, representation?: { tuple: {} } } } } */
38512
+ let type;
38513
+
38514
+ if (unique) { // a pure map or list
38515
+ const valueType = fieldNames.length ? fieldNames[0].root : 'Any';
38516
+ if (objKind === 'map') {
38517
+ type = { map: { keyType: 'String', valueType } };
38518
+ } else if (objKind === 'list') {
38519
+ type = { list: { valueType } };
38520
+ /* c8 ignore next 4 */
38521
+ } else {
38522
+ throw new Error(`Unexpected object kind: ${objKind}`)
38523
+ }
38524
+ } else { // a struct with varying types
38525
+ name = 'Struct_1';
38526
+ type = {
38527
+ struct: { fields: {} }
38528
+ };
38529
+ for (const field of fieldNames) {
38530
+ type.struct.fields[field.fieldName] = { type: field.root };
38531
+ }
38532
+ if (objKind === 'list') {
38533
+ type.struct.representation = { tuple: {} };
38534
+ }
38535
+ }
38536
+
38537
+ while (schema.types[name]) {
38538
+ if (deepEqual(schema.types[name], type)) {
38539
+ break
38540
+ }
38541
+ name = name.split('_').map((s, i) => i ? parseInt(s, 10) + 1 : s).join('_');
38542
+ }
38543
+ // too hard
38544
+ // @ts-ignore
38545
+ schema.types[name] = type;
38546
+
38547
+ return { schema, root: name }
38548
+ }
38549
+
38550
+ /**
38551
+ * @param {any} o1
38552
+ * @param {any} o2
38553
+ * @returns {boolean}
38554
+ */
38555
+ function deepEqual (o1, o2) {
38556
+ const k1 = kind(o1);
38557
+ const k2 = kind(o2);
38558
+ /* c8 ignore next 3 */
38559
+ if (k1 !== k2) {
38560
+ return false
38561
+ }
38562
+ switch (k1) {
38563
+ /* c8 ignore next 1 */
38564
+ case 'bool':
38565
+ case 'string':
38566
+ case 'int':
38567
+ case 'float':
38568
+ case 'null':
38569
+ return o1 === o2
38570
+ case 'map':
38571
+ return deepEqual(Object.entries(o1), Object.entries(o2))
38572
+ case 'list':
38573
+ if (o1.length !== o2.length) {
38574
+ return false
38575
+ }
38576
+ for (let i = 0; i < o1.length; i++) {
38577
+ if (!deepEqual(o1[i], o2[i])) {
38578
+ return false
38579
+ }
38580
+ }
38581
+ }
38582
+ return true
38583
+ }
38584
+
38585
+ const noop = (s) => s;
38586
+
38587
+ // based on prism.js syntax categories, except 'class-name' -> className
38588
+ const noopHighlighter = {
38589
+ keyword: noop,
38590
+ builtin: noop,
38591
+ operator: noop,
38592
+ number: noop,
38593
+ string: noop,
38594
+ // comment: noop,
38595
+ className: noop,
38596
+ punctuation: noop
38597
+ };
38598
+
38599
+ function print (schema, indent = ' ', highlighter = {}) {
38600
+ if (!schema || typeof schema.types !== 'object') {
38601
+ throw new Error('Invalid schema')
38602
+ }
38603
+
38604
+ highlighter = Object.assign({}, noopHighlighter, highlighter);
38605
+
38606
+ let str = '';
38607
+
38608
+ str += printAdvanced(schema, indent, highlighter);
38609
+ str += printTypes(schema, indent, highlighter);
38610
+
38611
+ return str
38612
+ }
38613
+
38614
+ function printAdvanced (schema, indent, highlighter) {
38615
+ let str = '';
38616
+
38617
+ if (typeof schema.advanced === 'object') {
38618
+ for (const advanced of Object.keys(schema.advanced)) {
38619
+ str += `${highlighter.keyword('advanced')} ${highlighter.className(advanced)}\n\n`;
38620
+ }
38621
+ }
38622
+
38623
+ return str
38624
+ }
38625
+
38626
+ function printTypes (schema, indent, highlighter) {
38627
+ let str = '';
38628
+
38629
+ for (const [type, defn] of Object.entries(schema.types)) {
38630
+ str += `${highlighter.keyword('type')} ${highlighter.className(type)} ${printType(defn, indent, highlighter)}\n\n`;
38631
+ }
38632
+
38633
+ return str.replace(/\n\n$/m, '')
38634
+ }
38635
+
38636
+ function kindFromDefinition (defn) {
38637
+ const [kind, more] = Object.keys(defn);
38638
+ if (!kind) {
38639
+ throw new Error('Invalid schema, missing kind')
38640
+ }
38641
+ if (more !== undefined) {
38642
+ throw new Error('Invalid schema more than one kind')
38643
+ }
38644
+ return kind
38645
+ }
38646
+
38647
+ function printType (defn, indent, highlighter) {
38648
+ const kind = kindFromDefinition(defn);
38649
+
38650
+ if (['map', 'list', 'link', 'copy'].includes(kind)) {
38651
+ return printTypeTerm(defn, indent, highlighter)
38652
+ }
38653
+
38654
+ if (['struct', 'union', 'enum'].includes(kind)) {
38655
+ return `${highlighter.builtin(kind)} ${printTypeTerm(defn, indent, highlighter)}`
38656
+ }
38657
+
38658
+ if ((kind === 'bytes' || kind === 'string') && defn[kind].representation && typeof defn[kind].representation.advanced === 'string') {
38659
+ return `${kind} ${highlighter.builtin('representation')} advanced ${defn[kind].representation.advanced}`
38660
+ }
38661
+
38662
+ return kind
38663
+ }
38664
+
38665
+ function printTypeTerm (defn, indent, highlighter) {
38666
+ if (typeof defn === 'string') {
38667
+ return defn
38668
+ }
38669
+
38670
+ const kind = kindFromDefinition(defn);
38671
+
38672
+ if (typeof printTypeTerm[kind] !== 'function') {
38673
+ throw new Error(`Invalid schema unsupported kind (${kind})`)
38674
+ }
38675
+
38676
+ return printTypeTerm[kind](defn[kind], indent, highlighter)
38677
+ }
38678
+
38679
+ printTypeTerm.link = function link (defn, indent, highlighter) {
38680
+ return `${highlighter.punctuation('&')}${printTypeTerm(defn.expectedType || 'Any', indent, highlighter)}`
38681
+ };
38682
+
38683
+ printTypeTerm.copy = function copy (defn, indent, highlighter) {
38684
+ return `${highlighter.operator('=')} ${defn.fromType}`
38685
+ };
38686
+
38687
+ printTypeTerm.map = function map (defn, indent, highlighter) {
38688
+ if (typeof defn.keyType !== 'string') {
38689
+ throw new Error('Invalid schema, map definition needs a "keyType"')
38690
+ }
38691
+ if (!defn.valueType) {
38692
+ throw new Error('Invalid schema, map definition needs a "keyType"')
38693
+ }
38694
+
38695
+ const nullable = defn.valueNullable === true ? 'nullable ' : '';
38696
+ let str = `${highlighter.punctuation('{')}${printTypeTerm(defn.keyType, indent, highlighter)}:${nullable}${printTypeTerm(defn.valueType, indent, highlighter)}${highlighter.punctuation('}')}`;
38697
+ if (defn.representation) {
38698
+ const repr = reprStrategy(defn);
38699
+ if (repr === 'listpairs') {
38700
+ str += ` ${highlighter.builtin('representation')} listpairs`;
38701
+ } else if (repr === 'stringpairs') {
38702
+ str += stringpairs(indent, 'map', defn.representation.stringpairs, highlighter);
38703
+ } else if (repr === 'advanced') {
38704
+ str += ` ${highlighter.builtin('representation')} advanced ${defn.representation.advanced}`;
38705
+ }
38706
+ }
38707
+ return str
38708
+ };
38709
+
38710
+ printTypeTerm.list = function list (defn, indent, highlighter) {
38711
+ if (!defn.valueType) {
38712
+ throw new Error('Invalid schema, list definition needs a "keyType"')
38713
+ }
38714
+
38715
+ const nullable = defn.valueNullable === true ? 'nullable ' : '';
38716
+ let str = `${highlighter.punctuation('[')}${nullable}${printTypeTerm(defn.valueType, indent, highlighter)}${highlighter.punctuation(']')}`;
38717
+
38718
+ if (defn.representation) {
38719
+ if (reprStrategy(defn) === 'advanced') {
38720
+ str += ` ${highlighter.builtin('representation')} advanced ${defn.representation.advanced}`;
38721
+ }
38722
+ }
38723
+
38724
+ return str
38725
+ };
38726
+
38727
+ printTypeTerm.struct = function struct (defn, indent, highlighter) {
38728
+ if (typeof defn.fields !== 'object') {
38729
+ throw new Error('Invalid schema, struct requires a "fields" map')
38730
+ }
38731
+
38732
+ let str = highlighter.punctuation('{');
38733
+
38734
+ for (const [name, fieldDefn] of Object.entries(defn.fields)) {
38735
+ const optional = fieldDefn.optional === true ? highlighter.keyword('optional') + ' ' : '';
38736
+ const nullable = fieldDefn.nullable === true ? highlighter.keyword('nullable') + ' ' : '';
38737
+ let fieldRepr = '';
38738
+ if (defn.representation && defn.representation.map && typeof defn.representation.map.fields === 'object') {
38739
+ const fr = defn.representation.map.fields[name];
38740
+ if (typeof fr === 'object') {
38741
+ const hasRename = typeof fr.rename === 'string';
38742
+ const hasImplicit = fr.implicit !== undefined;
38743
+ if (hasRename || hasImplicit) {
38744
+ fieldRepr = ` ${highlighter.punctuation('(')}`;
38745
+ if (hasRename) {
38746
+ fieldRepr += `${highlighter.keyword('rename')} ${highlighter.string(`"${fr.rename}"`)}`;
38747
+ if (hasImplicit) {
38748
+ fieldRepr += ' ';
38749
+ }
38750
+ }
38751
+ if (hasImplicit) {
38752
+ const impl = typeof fr.implicit === 'string'
38753
+ ? highlighter.string(`"${fr.implicit}"`)
38754
+ : typeof fr.implicit === 'number'
38755
+ ? highlighter.number(fr.implicit)
38756
+ : highlighter.keyword(fr.implicit);
38757
+ fieldRepr += `${highlighter.keyword('implicit')} ${impl}`;
38758
+ }
38759
+ fieldRepr += highlighter.punctuation(')');
38760
+ }
38761
+ }
38762
+ }
38763
+
38764
+ const fieldType = typeof fieldDefn.type === 'string' ? fieldDefn.type : printTypeTerm(fieldDefn.type, indent, highlighter);
38765
+ str += `\n${indent}${name} ${optional}${nullable}${fieldType}${fieldRepr}`;
38766
+ }
38767
+
38768
+ if (str[str.length - 1] !== highlighter.punctuation('{')) {
38769
+ str += '\n';
38770
+ }
38771
+ str += highlighter.punctuation('}');
38772
+
38773
+ if (defn.representation) {
38774
+ const repr = reprStrategy(defn);
38775
+ if (repr === 'listpairs') {
38776
+ str += ` ${highlighter.builtin('representation')} listpairs`;
38777
+ } else if (repr === 'stringjoin') {
38778
+ if (typeof defn.representation.stringjoin.join !== 'string') {
38779
+ throw new Error('Invalid schema, struct stringjoin representations require an join string')
38780
+ }
38781
+ str += ` ${highlighter.builtin('representation')} stringjoin ${highlighter.punctuation('{')}\n`;
38782
+ str += `${indent}join ${highlighter.string(`"${defn.representation.stringjoin.join}"`)}\n`;
38783
+ str += fieldOrder(indent, defn.representation.stringjoin.fieldOrder, highlighter);
38784
+ str += highlighter.punctuation('}');
38785
+ } else if (repr === 'stringpairs') {
38786
+ str += stringpairs(indent, 'struct', defn.representation.stringpairs, highlighter);
38787
+ } else if (repr === 'tuple') {
38788
+ str += ` ${highlighter.builtin('representation')} tuple`;
38789
+ if (Array.isArray(defn.representation.tuple.fieldOrder)) {
38790
+ str += ` ${highlighter.punctuation('{')}\n`;
38791
+ str += fieldOrder(indent, defn.representation.tuple.fieldOrder, highlighter);
38792
+ str += highlighter.punctuation('}');
38793
+ }
38794
+ } else if (repr === 'advanced') {
38795
+ str += ` ${highlighter.builtin('representation')} advanced ${defn.representation.advanced}`;
38796
+ }
38797
+ }
38798
+
38799
+ return str
38800
+ };
38801
+
38802
+ function fieldOrder (indent, fieldOrder, highlighter) {
38803
+ let str = '';
38804
+ if (Array.isArray(fieldOrder)) {
38805
+ const fo = fieldOrder.map((f) => highlighter.string(`"${f}"`)).join(', ');
38806
+ str += `${indent}fieldOrder ${highlighter.punctuation('[')}${fo}${highlighter.punctuation(']')}\n`;
38807
+ }
38808
+ return str
38809
+ }
38810
+
38811
+ function stringpairs (indent, kind, stringpairs, highlighter) {
38812
+ let str = '';
38813
+ if (typeof stringpairs.innerDelim !== 'string') {
38814
+ throw new Error(`Invalid schema, ${kind} stringpairs representations require an innerDelim string`)
38815
+ }
38816
+ if (typeof stringpairs.entryDelim !== 'string') {
38817
+ throw new Error(`Invalid schema, ${kind} stringpairs representations require an entryDelim string`)
38818
+ }
38819
+ str += ` ${highlighter.builtin('representation')} stringpairs ${highlighter.punctuation('{')}\n`;
38820
+ str += `${indent}innerDelim ${highlighter.string(`"${stringpairs.innerDelim}"`)}\n`;
38821
+ str += `${indent}entryDelim ${highlighter.string(`"${stringpairs.entryDelim}"`)}\n`;
38822
+ str += highlighter.punctuation('}');
38823
+ return str
38824
+ }
38825
+
38826
+ function reprStrategy (defn) {
38827
+ if (typeof defn.representation !== 'object') {
38828
+ throw new Error('Expected \'representation\' property of definition')
38829
+ }
38830
+ const keys = Object.keys(defn.representation);
38831
+ if (keys.length !== 1) {
38832
+ throw new Error('Expected exactly one \'representation\' field')
38833
+ }
38834
+ const repr = keys[0];
38835
+ if (repr === 'advanced') {
38836
+ if (typeof defn.representation[repr] !== 'string') {
38837
+ throw new Error('Expected representation \'advanced\' to be an string')
38838
+ }
38839
+ } else {
38840
+ if (typeof defn.representation[repr] !== 'object') {
38841
+ throw new Error(`Expected representation '${repr}' to be an object`)
38842
+ }
38843
+ }
38844
+ return repr
38845
+ }
38846
+
38847
+ printTypeTerm.union = function union (defn, indent, highlighter) {
38848
+ if (typeof defn.representation !== 'object') {
38849
+ throw new Error('Invalid schema, unions require a representation')
38850
+ }
38851
+
38852
+ let str = highlighter.punctuation('{');
38853
+ const repr = reprStrategy(defn);
38854
+
38855
+ if (repr === 'kinded') {
38856
+ for (const [kind, type] of Object.entries(defn.representation.kinded)) {
38857
+ str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${kind}`;
38858
+ }
38859
+ str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} kinded`;
38860
+ } else if (repr === 'stringprefix' || repr === 'bytesprefix') {
38861
+ if (typeof defn.representation[repr].prefixes !== 'object') {
38862
+ throw new Error(`Invalid schema, ${repr} unions require a representation prefixes map`)
38863
+ }
38864
+ for (const [key, type] of Object.entries(defn.representation[repr].prefixes)) {
38865
+ str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
38866
+ }
38867
+ str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} ${repr}`;
38868
+ } else if (repr === 'keyed') {
38869
+ if (typeof defn.representation[repr] !== 'object') {
38870
+ throw new Error(`Invalid schema, ${repr} unions require a representation keyed map`)
38871
+ }
38872
+ for (const [key, type] of Object.entries(defn.representation[repr])) {
38873
+ str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
38874
+ }
38875
+ str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} ${repr}`;
38876
+ } else if (repr === 'inline') {
38877
+ if (typeof defn.representation.inline.discriminantTable !== 'object') {
38878
+ throw new Error('Invalid schema, inline unions require a discriminantTable map')
38879
+ }
38880
+ if (typeof defn.representation.inline.discriminantKey !== 'string') {
38881
+ throw new Error('Invalid schema, inline unions require a discriminantKey string')
38882
+ }
38883
+ for (const [key, type] of Object.entries(defn.representation.inline.discriminantTable)) {
38884
+ str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
38885
+ }
38886
+ str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} inline ${highlighter.punctuation('{')}\n${indent}discriminantKey ${highlighter.string(`"${defn.representation.inline.discriminantKey}"`)}\n${highlighter.punctuation('}')}`;
38887
+ } else if (repr === 'envelope') {
38888
+ if (typeof defn.representation.envelope.discriminantTable !== 'object') {
38889
+ throw new Error('Invalid schema, envelope unions require a discriminantTable map')
38890
+ }
38891
+ if (typeof defn.representation.envelope.discriminantKey !== 'string') {
38892
+ throw new Error('Invalid schema, envelope unions require a discriminantKey string')
38893
+ }
38894
+ if (typeof defn.representation.envelope.contentKey !== 'string') {
38895
+ throw new Error('Invalid schema, envelope unions require a contentKey string')
38896
+ }
38897
+ for (const [key, type] of Object.entries(defn.representation.envelope.discriminantTable)) {
38898
+ str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
38899
+ }
38900
+ str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} envelope ${highlighter.punctuation('{')}`;
38901
+ str += `\n${indent}discriminantKey ${highlighter.string(`"${defn.representation.envelope.discriminantKey}"`)}`;
38902
+ str += `\n${indent}contentKey ${highlighter.string(`"${defn.representation.envelope.contentKey}"`)}`;
38903
+ str += `\n${highlighter.punctuation('}')}`;
38904
+ } else {
38905
+ throw new Error(`Invalid schema, unknown union representation type ${Object.keys(defn.representation)[0]}`)
38906
+ }
38907
+
38908
+ return str
38909
+ };
38910
+
38911
+ printTypeTerm.enum = function _enum (defn, indent, highlighter) {
38912
+ if (typeof defn.representation !== 'object') {
38913
+ throw new Error('Invalid schema, enum requires a "representation" map')
38914
+ }
38915
+ const repr = reprStrategy(defn);
38916
+ if (repr !== 'string' && repr !== 'int') {
38917
+ throw new Error('Invalid schema, enum requires a "string" or "int" representation map')
38918
+ }
38919
+
38920
+ let str = highlighter.punctuation('{');
38921
+
38922
+ for (const ev of defn.members) {
38923
+ str += `\n${indent}${highlighter.punctuation('|')} ${ev}`;
38924
+ const sv = (defn.representation.string && defn.representation.string[ev]) ||
38925
+ (defn.representation.int && defn.representation.int[ev]);
38926
+ if (sv !== undefined) {
38927
+ str += ` ${highlighter.punctuation('(')}${highlighter.string(`"${sv}"`)}${highlighter.punctuation(')')}`;
38928
+ }
38929
+ }
38930
+
38931
+ str += `\n${highlighter.punctuation('}')}`;
38932
+ if (defn.representation.int) {
38933
+ str += ` ${highlighter.builtin('representation')} int`;
38934
+ }
38935
+ return str
38936
+ };
38937
+
38938
+ const DEFAULT_HASHER = sha256$2;
38939
+ const DEFAULT_HASH_BYTES = 32;
38940
+ // 5/3 seems to offer best perf characteristics in terms of raw speed
38941
+ // (Filecoin found this too for their HAMT usage)
38942
+ // but amount and size of garbage will change with different parameters
38943
+ const DEFAULT_BITWIDTH = 5;
38944
+ const DEFAULT_BUCKET_SIZE = 3;
38945
+
38946
+ const textDecoder = new TextDecoder();
38947
+
38948
+ /**
38949
+ * @template V
38950
+ * @typedef {import('iamap').IAMap<V>} IAMap<V>
38951
+ */
38952
+ /**
38953
+ * @template V
38954
+ * @typedef {import('iamap').Store<V>} Store<V>
38955
+ */
38956
+ /**
38957
+ * @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher
38958
+ */
38959
+ /**
38960
+ * @template V
38961
+ * @typedef {import('./interface').HashMap<V>} HashMap<V>
38962
+ */
38963
+ /**
38964
+ * @template {number} Codec
38965
+ * @template V
38966
+ * @typedef {import('./interface').CreateOptions<Codec,V>} CreateOptions<Codec,V>
38967
+ */
38968
+ /**
38969
+ * @typedef {import('./interface').Loader} Loader<V>
38970
+ */
38971
+
38972
+ /**
38973
+ * @classdesc
38974
+ * An IPLD HashMap object. Create a new HashMap or load an existing one with the asynchronous
38975
+ * {@link HashMap.create} factory method.
38976
+ *
38977
+ * This class serves mostly as a IPLD usability wrapper for
38978
+ * [IAMap](https://github.com/rvagg/iamap) which implements the majority of the logic behind the
38979
+ * IPLD HashMap specification, without being IPLD-specific. IAMap is immutable, in that each
38980
+ * mutation (delete or set) returns a new IAMap instance. `HashMap`, however, is immutable, and
38981
+ * mutation operations may be performed on the same object but its `cid` property will change
38982
+ * with mutations.
38983
+ *
38984
+ * If consumed with TypeScript typings, `HashMap` is generic over value template type `V`, where various
38985
+ * operations will accept or return template type `V`.
38986
+ *
38987
+ * @name HashMap
38988
+ * @template V
38989
+ * @implements {HashMap<V>}
38990
+ * @class
38991
+ * @hideconstructor
38992
+ * @property {CID} cid - The _current_ CID of this HashMap. It is important to note that this CID
38993
+ * will change when successfully performing mutation operations `set()` or
38994
+ * `delete()`. Where a `set()` does not change an existing value (because
38995
+ * a key already exists with that value) or `delete()` does not delete an existing
38996
+ * key/value pair (because it doesn't already exist in this HashMap), the `cid` will not change.
38997
+ */
38998
+ class HashMapImpl {
38999
+ /**
39000
+ * @ignore
39001
+ * @param {IAMap<V>} iamap
39002
+ */
39003
+ constructor (iamap) {
39004
+ // IAMap is immutable, so mutation operations return a new instance so
39005
+ // we use `this._iamap` as the _current_ instance and wrap around that,
39006
+ // switching it out as we mutate
39007
+ this._iamap = iamap;
39008
+ }
39009
+
39010
+ /**
39011
+ * @name HashMap#get
39012
+ * @description
39013
+ * Fetches the value of the provided `key` stored in this HashMap, if it exists.
39014
+ * @function
39015
+ * @async
39016
+ * @memberof HashMap
39017
+ * @param {string|Uint8Array} key - The key of the key/value pair entry to look up in this HashMap.
39018
+ * @return {Promise<V|undefined>}
39019
+ * The value (of template type `V`) stored for the given `key` which may be any type serializable
39020
+ * by IPLD, or a CID to an existing IPLD object. This should match what was provided by
39021
+ * {@link HashMap#set} as the `value` for this `key`. If the `key` is not stored in this HashMap,
39022
+ * `undefined` will be returned.
39023
+ */
39024
+ async get (key) {
39025
+ return this._iamap.get(key)
39026
+ }
39027
+
39028
+ /**
39029
+ * @name HashMap#has
39030
+ * @description
39031
+ * Check whether the provided `key` exists in this HashMap. The equivalent of performing
39032
+ * `map.get(key) !== undefined`.
39033
+ * @function
39034
+ * @async
39035
+ * @memberof HashMap
39036
+ * @param {string|Uint8Array} key - The key of the key/value pair entry to look up in this HashMap.
39037
+ * @return {Promise<boolean>}
39038
+ * `true` if the `key` exists in this HashMap, `false` otherwise.
39039
+ */
39040
+ async has (key) {
39041
+ return this._iamap.has(key)
39042
+ }
39043
+
39044
+ /**
39045
+ * @name HashMap#size
39046
+ * @description
39047
+ * Count the number of key/value pairs stored in this HashMap.
39048
+ * @function
39049
+ * @async
39050
+ * @memberof HashMap
39051
+ * @return {Promise<number>}
39052
+ * An integer greater than or equal to zero indicating the number of key/value pairse stored
39053
+ * in this HashMap.
39054
+ */
39055
+ async size () {
39056
+ return this._iamap.size()
39057
+ }
39058
+
39059
+ /**
39060
+ * @name HashMap#set
39061
+ * @description
39062
+ * Add a key/value pair to this HashMap. The value may be any object that can be serialized by
39063
+ * IPLD, or a CID to a more complex (or larger) object. {@link HashMap#get} operations on the
39064
+ * same `key` will retreve the `value` as it was set as long as serialization and deserialization
39065
+ * results in the same object.
39066
+ *
39067
+ * If the `key` already exists in this HashMap, the existing entry will have the `value` replaced
39068
+ * with the new one provided. If the `value` is the same, the HashMap will remain unchanged.
39069
+ *
39070
+ * As a mutation operation, performing a successful `set()` where a new key/value pair or new
39071
+ * `value` for a given `key` is set, a new root node will be generated so `map.cid` will be a
39072
+ * different CID. This CID should be used to refer to this collection in the backing store where
39073
+ * persistence is required.
39074
+ * @function
39075
+ * @async
39076
+ * @memberof HashMap
39077
+ * @param {string|Uint8Array} key - The key of the new key/value pair entry to store in this HashMap.
39078
+ * @param {V} value - The value (of template type `V`) to store, either an object that can be
39079
+ * serialized inline via IPLD or a CID pointing to another object.
39080
+ * @returns {Promise<void>}
39081
+ */
39082
+ async set (key, value) {
39083
+ this._iamap = await this._iamap.set(key, value);
39084
+ }
39085
+
39086
+ /**
39087
+ * @name HashMap#delete
39088
+ * @description
39089
+ * Remove a key/value pair to this HashMap.
39090
+ *
39091
+ * If the `key` exists in this HashMap, its entry will be entirely removed. If the `key` does not
39092
+ * exist in this HashMap, no changes will occur.
39093
+ *
39094
+ * As a mutation operation, performing a successful `delete()` where an existing key/value pair
39095
+ * is removed from the collection, a new root node will be generated so `map.cid` will be a
39096
+ * different CID. This CID should be used to refer to this collection in the backing store where
39097
+ * persistence is required.
39098
+ * @function
39099
+ * @async
39100
+ * @memberof HashMap
39101
+ * @param {string|Uint8Array} key - The key of the key/value pair entry to remove from this HashMap.
39102
+ * @returns {Promise<void>}
39103
+ */
39104
+ async delete (key) {
39105
+ this._iamap = await this._iamap.delete(key);
39106
+ }
39107
+
39108
+ /**
39109
+ * @name HashMap#values
39110
+ * @description
39111
+ * Asynchronously emit all values that exist within this HashMap collection.
39112
+ *
39113
+ * This will cause a full traversal of all nodes that make up this collection so may result in
39114
+ * many block loads from the backing store if the collection is large.
39115
+ * @function
39116
+ * @async
39117
+ * @returns {AsyncIterable<V>}
39118
+ * An async iterator that yields values (of template type `V`) of the type stored in this
39119
+ * collection, either inlined objects or CIDs.
39120
+ */
39121
+ async * values () {
39122
+ yield * this._iamap.values();
39123
+ }
39124
+
39125
+ /**
39126
+ * @name HashMap#keys
39127
+ * @description
39128
+ * Asynchronously emit all keys that exist within this HashMap collection **as strings** rather
39129
+ * than the stored bytes.
39130
+ *
39131
+ * This will cause a full traversal of all nodes that make up this
39132
+ * collection so may result in many block loads from the backing store if the collection is large.
39133
+ * @function
39134
+ * @async
39135
+ * @returns {AsyncIterable<string>}
39136
+ * An async iterator that yields string keys stored in this collection.
39137
+ */
39138
+ async * keys () {
39139
+ for await (const key of this._iamap.keys()) {
39140
+ // IAMap keys are Uint8Arrays, make them strings
39141
+ yield textDecoder.decode(key);
39142
+ }
39143
+ }
39144
+
39145
+ /**
39146
+ * @name HashMap#keysRaw
39147
+ * @description
39148
+ * Asynchronously emit all keys that exist within this HashMap collection **as their raw bytes**
39149
+ * rather than being converted to a string.
39150
+ *
39151
+ * This will cause a full traversal of all nodes that make up this collection so may result in
39152
+ * many block loads from the backing store if the collection is large.
39153
+ * @function
39154
+ * @async
39155
+ * @returns {AsyncIterable<Uint8Array>}
39156
+ * An async iterator that yields string keys stored in this collection.
39157
+ */
39158
+ async * keysRaw () {
39159
+ yield * this._iamap.keys();
39160
+ }
39161
+
39162
+ /**
39163
+ * @name HashMap#entries
39164
+ * @description
39165
+ * Asynchronously emit all key/value pairs that exist within this HashMap collection. Keys will be
39166
+ * given **as strings** rather than their raw byte form as stored.
39167
+ *
39168
+ * This will cause a full traversal of all nodes that make up this collection so may result in
39169
+ * many block loads from the backing store if the collection is large.
39170
+ *
39171
+ * Entries are returned in tuple form like
39172
+ * [Map#entries()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/entries),
39173
+ * an array of key/value pairs where element `0` is the key and `1` is the value.
39174
+ * @function
39175
+ * @async
39176
+ * @returns {AsyncIterable<[string, V]>}
39177
+ * An async iterator that yields key/value pair tuples.
39178
+ */
39179
+ async * entries () {
39180
+ for await (const { key, value } of this._iamap.entries()) {
39181
+ // IAMap keys are Uint8Arrays, make them strings
39182
+ yield [textDecoder.decode(key), value];
39183
+ }
39184
+ }
39185
+
39186
+ /**
39187
+ * @name HashMap#entriesRaw
39188
+ * @description
39189
+ * Asynchronously emit all key/value pairs that exist within this HashMap collection. Keys will be
39190
+ * given **as raw bytes** as stored rather than being converted to strings.
39191
+ *
39192
+ * This will cause a full traversal of all nodes that make up this collection so may result in
39193
+ * many block loads from the backing store if the collection is large.
39194
+ *
39195
+ * Entries are returned in tuple form like
39196
+ * [Map#entries()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/entries),
39197
+ * an array of key/value pairs where element `0` is the key and `1` is the value.
39198
+ * @function
39199
+ * @async
39200
+ * @returns {AsyncIterable<[Uint8Array, V]>}
39201
+ * An async iterator that yields key/value pair tuples.
39202
+ */
39203
+ async * entriesRaw () {
39204
+ for await (const { key, value } of this._iamap.entries()) {
39205
+ yield [key, value];
39206
+ }
39207
+ }
39208
+
39209
+ /**
39210
+ * @name HashMap#cids
39211
+ * @description
39212
+ * Asynchronously emit all CIDs for blocks that make up this HashMap.
39213
+ *
39214
+ * This will cause a full traversal of all nodes that make up this collection so may result in
39215
+ * many block loads from the backing store if the collection is large.
39216
+ * @function
39217
+ * @async
39218
+ * @returns {AsyncIterable<CID>}
39219
+ * An async iterator that yields CIDs for the blocks that comprise this HashMap.
39220
+ */
39221
+ async * cids () {
39222
+ yield * this._iamap.ids();
39223
+ }
39224
+
39225
+ get cid () {
39226
+ return this._iamap.id
39227
+ }
39228
+
39229
+ /**
39230
+ * Create a new {@link HashMap} instance, beginning empty, or loading from existing data in a
39231
+ * backing store.
39232
+ *
39233
+ * A backing store must be provided to make use of a HashMap, an interface to the store is given
39234
+ * through the mandatory `loader` parameter. The backing store stores IPLD blocks, referenced by
39235
+ * CIDs. `loader` must have two functions: `get(cid)` which should return the raw bytes (`Buffer`
39236
+ * or `Uint8Array`) of a block matching the given CID, and `put(cid, block)` that will store the
39237
+ * provided raw bytes of a block (`block`) and store it with the associated CID.
39238
+ *
39239
+ * @async
39240
+ * @template V
39241
+ * @template {number} Codec
39242
+ * @param {Loader} loader - A loader with `get(cid):block` and `put(cid, block)` functions for
39243
+ * loading an storing block data by CID.
39244
+ * @param {CreateOptions<Codec, V>} options - Options for the HashMap. Defaults are provided but you can tweak
39245
+ * behavior according to your needs with these options.
39246
+ * @return {Promise<HashMap<V>>} - A HashMap instance, either loaded from an existing root block CID, or a new,
39247
+ * empty HashMap if no CID is provided.
39248
+ */
39249
+ static async create (loader, options) {
39250
+ return _load(loader, null, options)
39251
+ }
39252
+
39253
+ /**
39254
+ * @template V
39255
+ * @template {number} Codec
39256
+ * @param {Loader} loader
39257
+ * @param {CID} root - A root of an existing HashMap. Provide a CID if you want to load existing
39258
+ * data.
39259
+ * @param {CreateOptions<Codec, V>} options
39260
+ * @returns {Promise<HashMap<V>>}
39261
+ */
39262
+ static async load (loader, root, options) {
39263
+ return _load(loader, root, options)
39264
+ }
39265
+ }
39266
+
39267
+ /**
39268
+ * @ignore
39269
+ * @template V
39270
+ * @template {number} Codec
39271
+ * @param {Loader} loader
39272
+ * @param {CID|null} root
39273
+ * @param {CreateOptions<Codec, V>} options
39274
+ * @returns {Promise<HashMap<V>>}
39275
+ */
39276
+ async function _load (loader, root, options) {
39277
+ const cid = CID$1.asCID(root);
39278
+
39279
+ if (!loader || typeof loader.get !== 'function' || typeof loader.put !== 'function') {
39280
+ throw new TypeError('\'loader\' object with get() and put() methods is required')
39281
+ }
39282
+
39283
+ if (typeof options !== 'object') {
39284
+ throw new TypeError('An \'options\' argument is required')
39285
+ }
39286
+
39287
+ if (!('blockCodec' in options) ||
39288
+ typeof options.blockCodec !== 'object' ||
39289
+ typeof options.blockCodec.code !== 'number' ||
39290
+ typeof options.blockCodec.encode !== 'function' ||
39291
+ typeof options.blockCodec.decode !== 'function') {
39292
+ throw new TypeError('A valid \'blockCodec\' option is required')
39293
+ }
39294
+ const codec = options.blockCodec;
39295
+ if (!('blockHasher' in options) ||
39296
+ typeof options.blockHasher !== 'object' ||
39297
+ typeof options.blockHasher.digest !== 'function' ||
39298
+ typeof options.blockHasher.code !== 'number') {
39299
+ throw new TypeError('A valid \'blockHasher\' option is required')
39300
+ }
39301
+ const hasher = options.blockHasher;
39302
+
39303
+ /**
39304
+ * @ignore
39305
+ * @type {MultihashHasher}
39306
+ */
39307
+ const hamtHasher = (() => {
39308
+ if ('hasher' in options) {
39309
+ if (typeof options.hasher !== 'object' ||
39310
+ typeof options.hasher.digest !== 'function' ||
39311
+ typeof options.hasher.code !== 'number') {
39312
+ throw new TypeError('\'hasher\' option must be a Multihasher')
39313
+ }
39314
+ return options.hasher
39315
+ }
39316
+ return DEFAULT_HASHER
39317
+ })();
39318
+ const hashBytes = (() => {
39319
+ if ('hashBytes' in options) {
39320
+ if (typeof options.hashBytes !== 'number') {
39321
+ throw new TypeError('\'hashBytes\' option must be a number')
39322
+ }
39323
+ /* c8 ignore next 2 */
39324
+ return options.hashBytes
39325
+ }
39326
+ return DEFAULT_HASH_BYTES
39327
+ })();
39328
+ /**
39329
+ * @ignore
39330
+ * @param {Uint8Array} bytes
39331
+ */
39332
+ const hashFn = async (bytes) => {
39333
+ const hash = await sha256$2.digest(bytes);
39334
+ return hash.digest
39335
+ };
39336
+ registerHasher_1(hamtHasher.code, hashBytes, hashFn);
39337
+
39338
+ const bitWidth = (() => {
39339
+ if ('bitWidth' in options) {
39340
+ if (typeof options.bitWidth !== 'number') {
39341
+ throw new TypeError('\'bitWidth\' option must be a number')
39342
+ }
39343
+ return options.bitWidth
39344
+ }
39345
+ return DEFAULT_BITWIDTH
39346
+ })();
39347
+
39348
+ const bucketSize = (() => {
39349
+ if ('bucketSize' in options) {
39350
+ if (typeof options.bucketSize !== 'number') {
39351
+ throw new TypeError('\'bucketSize\' option must be a number')
39352
+ }
39353
+ return options.bucketSize
39354
+ }
39355
+ return DEFAULT_BUCKET_SIZE
39356
+ })();
39357
+
39358
+ const iamapOptions = { hashAlg: hamtHasher.code, bitWidth, bucketSize };
39359
+
39360
+ const store = {
39361
+ /**
39362
+ * @ignore
39363
+ * @param {CID} cid
39364
+ * @returns {Promise<V>}
39365
+ */
39366
+ async load (cid) {
39367
+ const bytes = await loader.get(cid);
39368
+ if (!bytes) {
39369
+ throw new Error(`Could not load block for: ${cid}`)
39370
+ }
39371
+ // create() validates the block for us
39372
+ const block = await create$7({ bytes, cid, hasher, codec });
39373
+ validateBlock(block.value);
39374
+ return block.value
39375
+ },
39376
+
39377
+ /**
39378
+ * @ignore
39379
+ * @param {V} value
39380
+ * @returns {Promise<CID>}
39381
+ */
39382
+ async save (value) {
39383
+ validateBlock(value);
39384
+ const block = await encode$7({ value, codec, hasher });
39385
+ await loader.put(block.cid, block.bytes);
39386
+ return block.cid
39387
+ },
39388
+
39389
+ /**
39390
+ * @ignore
39391
+ * @param {CID} cid1
39392
+ * @param {CID} cid2
39393
+ * @returns {boolean}
39394
+ */
39395
+ isEqual (cid1, cid2) {
39396
+ return cid1.equals(cid2)
39397
+ },
39398
+
39399
+ /**
39400
+ * @ignore
39401
+ * @param {any} obj
39402
+ * @returns {boolean}
39403
+ */
39404
+ isLink (obj) {
39405
+ return CID$1.asCID(obj) != null
39406
+ }
39407
+ };
39408
+
39409
+ let iamap;
39410
+ if (cid) {
39411
+ // load existing, ignoring bitWidth & bucketSize, they are loaded from the existing root
39412
+ iamap = await load_1(store, cid);
39413
+ } else {
39414
+ // create new
39415
+ iamap = await create_1(store, iamapOptions);
39416
+ }
39417
+
39418
+ return new HashMapImpl(iamap)
39419
+ }
39420
+
39421
+ /**
39422
+ * @ignore
39423
+ * @param {any} block
39424
+ */
39425
+ function validateBlock (block) {
39426
+ if (!HashMapNode(block) && !HashMapRoot(block)) {
39427
+ const description = print(describe(block).schema);
39428
+ throw new Error(`Internal error: unexpected layout for HashMap block does not match schema, got:\n${description}`)
39429
+ }
39430
+ }
39431
+
39432
+ const create$1 = HashMapImpl.create;
39433
+ const load$1 = HashMapImpl.load;
39434
+
39435
+ // @ts-nocheck
39436
+ // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
39437
+ // MIT License Copyright (c) 2020 Dumitru Uzun
39438
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
39439
+ // of this software and associated documentation files (the "Software"), to deal
39440
+ // in the Software without restriction, including without limitation the rights
39441
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
39442
+ // copies of the Software, and to permit persons to whom the Software is
39443
+ // furnished to do so, subject to the following conditions:
39444
+
39445
+ // The above copyright notice and this permission notice shall be included in all
39446
+ // copies or substantial portions of the Software.
39447
+
39448
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
39449
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
39450
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
39451
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
39452
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
39453
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
39454
+ // SOFTWARE.
39455
+
39456
+ // import {
39457
+ // isLittleEndian, switchEndianness32
39458
+ // } from 'string-encode'
39459
+
39460
+ /**
39461
+ * SHA1 on binary array
39462
+ *
39463
+ * @param {Uint8Array} b Data to hash
39464
+ *
39465
+ * @return {Uint8Array} sha1 hash
39466
+ */
39467
+ function rawSha1 (b) {
39468
+ let i = b.byteLength;
39469
+ let bs = 0;
39470
+ let A; let B; let C; let D; let G;
39471
+ const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
39472
+ const W = new Uint32Array(80);
39473
+ const nrWords = (i / 4 + 2) | 15;
39474
+ const words = new Uint32Array(nrWords + 1);
39475
+ let j;
39476
+
39477
+ words[nrWords] = i * 8;
39478
+ words[i >> 2] |= 0x80 << (~i << 3);
39479
+ for (;i--;) {
39480
+ words[i >> 2] |= b[i] << (~i << 3);
39481
+ }
39482
+
39483
+ for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
39484
+ for (i = 0; i < 80;
39485
+ A[0] = (
39486
+ G = ((b = A[0]) << 5 | b >>> 27) +
39487
+ A[4] +
39488
+ (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
39489
+ 0x5A827999,
39490
+ B = A[1],
39491
+ C = A[2],
39492
+ D = A[3],
39493
+ G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
39494
+ ? j !== 2
39495
+ ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
39496
+ : (B & C | B & D | C & D) + 0x34994343
39497
+ : B & C | ~B & D
39498
+ )
39499
+ )
39500
+ , A[1] = b
39501
+ , A[2] = B << 30 | B >>> 2
39502
+ , A[3] = C
39503
+ , A[4] = D
39504
+ , ++i
39505
+ ) {
39506
+ G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
39507
+ }
39508
+
39509
+ for (i = 5; i;) H[--i] = H[i] + A[i];
39510
+ }
39511
+
39512
+ // if (isLittleEndian()) {
39513
+ // H = H.map(switchEndianness32)
39514
+ // }
39515
+
39516
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
39517
+ }
39518
+
39519
+ const chunker = bf(30);
39520
+
39521
+ const blockOpts$1 = { cache: nocache, chunker, codec: codec$1, hasher: sha256$2, compare: simpleCompare };
39522
+
39523
+ const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT;
39524
+ // ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
39525
+
39526
+ class Valet {
39527
+ idb = null
39528
+ name = null
39529
+ uploadQueue = null
39530
+ alreadyEnqueued = new Set()
39531
+ keyMaterial = null
39532
+ keyId = 'null'
39533
+ valetRoot = null
39534
+ valetRootCid = null // set by hydrate
39535
+ valetRootCarCid = null // most recent diff
39536
+
39537
+ valetCidBlocks = new VMemoryBlockstore()
39538
+ instanceId = Math.random().toString(36).slice(2)
39539
+
39540
+ /**
39541
+ * Function installed by the database to upload car files
39542
+ * @type {null|function(string, Uint8Array):Promise<void>}
39543
+ */
39544
+ uploadFunction = null
39545
+
39546
+ constructor (name = 'default', keyMaterial) {
39547
+ this.name = name;
39548
+ this.setKeyMaterial(keyMaterial);
39549
+ this.uploadQueue = cargoQueue(async (tasks, callback) => {
39550
+ // console.log(
39551
+ // 'queue worker',
39552
+ // tasks.length,
39553
+ // tasks.reduce((acc, t) => acc + t.value.length, 0)
39554
+ // )
39555
+ if (this.uploadFunction) {
39556
+ // todo we can coalesce these into a single car file
39557
+ return await this.withDB(async db => {
39558
+ for (const task of tasks) {
39559
+ await this.uploadFunction(task.carCid, task.value);
39560
+ // update the indexedb to mark this car as no longer pending
39561
+ const carMeta = await db.get('cidToCar', task.carCid);
39562
+ delete carMeta.pending;
39563
+ await db.put('cidToCar', carMeta);
39564
+ }
39565
+ })
39566
+ }
39567
+ callback();
39568
+ });
39569
+
39570
+ this.uploadQueue.drain(async () => {
39571
+ return await this.withDB(async db => {
39572
+ const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
39573
+ for (const carKey of carKeys) {
39574
+ await this.uploadFunction(carKey, await db.get('cars', carKey));
39575
+ const carMeta = await db.get('cidToCar', carKey);
39576
+ delete carMeta.pending;
39577
+ await db.put('cidToCar', carMeta);
39578
+ }
39579
+ })
39580
+ });
39581
+ }
39582
+
39583
+ getKeyMaterial () {
39584
+ return this.keyMaterial
39585
+ }
39586
+
39587
+ setKeyMaterial (km) {
39588
+ if (km && !NO_ENCRYPT) {
39589
+ const hex = Uint8Array.from(Buffer$G.from(km, 'hex'));
39590
+ this.keyMaterial = km;
39591
+ const hash = rawSha1(hex);
39592
+ this.keyId = Buffer$G.from(hash).toString('hex');
39593
+ } else {
39594
+ this.keyMaterial = null;
39595
+ this.keyId = 'null';
39596
+ }
39597
+ // console.trace('keyId', this.name, this.keyId)
39598
+ }
39599
+
39600
+ /**
39601
+ * Group the blocks into a car and write it to the valet.
39602
+ * @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
39603
+ * @param {Set<string>} cids
39604
+ * @returns {Promise<void>}
39605
+ * @memberof Valet
39606
+ */
39607
+ async writeTransaction (innerBlockstore, cids) {
39608
+ if (innerBlockstore.lastCid) {
39609
+ if (this.keyMaterial) {
39610
+ // console.log('encrypting car', innerBlockstore.label)
39611
+ // should we pass cids in instead of iterating frin innerBlockstore?
39612
+ const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
39613
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
39614
+ } else {
39615
+ const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
39616
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
39617
+ }
39618
+ } else {
39619
+ throw new Error('missing lastCid for car header')
39620
+ }
39621
+ }
39622
+
39623
+ withDB = async dbWorkFun => {
39624
+ if (!this.idb) {
39625
+ this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
39626
+ upgrade (db, oldVersion, newVersion, transaction) {
39627
+ if (oldVersion < 1) {
39628
+ db.createObjectStore('cars'); // todo use database name
39629
+ const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
39630
+ cidToCar.createIndex('cids', 'cids', { multiEntry: true });
39631
+ }
39632
+ if (oldVersion < 2) {
39633
+ const cidToCar = transaction.objectStore('cidToCar');
39634
+ cidToCar.createIndex('pending', 'pending');
39635
+ }
39636
+ }
39637
+ });
39638
+ }
39639
+ return await dbWorkFun(this.idb)
39640
+ }
39641
+
39642
+ /**
39643
+ * Iterate over all blocks in the store.
39644
+ *
39645
+ * @yields {{cid: string, value: Uint8Array}}
39646
+ * @returns {AsyncGenerator<any, any, any>}
39647
+ */
39648
+ async * cids () {
39649
+ // console.log('valet cids')
39650
+ const db = await this.withDB(async db => db);
39651
+ const tx = db.transaction(['cidToCar'], 'readonly');
39652
+ let cursor = await tx.store.openCursor();
39653
+ while (cursor) {
39654
+ yield { cid: cursor.key, car: cursor.value.car };
39655
+ cursor = await cursor.continue();
39656
+ }
39657
+ }
39658
+
39659
+ setRootCarCid (cid) {
39660
+ this.valetRootCarCid = cid;
39661
+ this.valetRoot = null;
39662
+ this.valetRootCid = null;
39663
+ }
39664
+
39665
+ async getCarCIDForCID (cid) {
39666
+ // make a car reader for this.valetRootCarCid
39667
+ if (!this.valetRootCarCid) return
39668
+
39669
+ let indexNode;
39670
+ if (this.valetRoot) {
39671
+ indexNode = this.valetRoot;
39672
+ } else {
39673
+ const combinedReader = await this.getCombinedReader(this.valetRootCarCid);
39674
+ if (!this.valetRootCid) {
39675
+ const root = combinedReader.root.cid;
39676
+ // console.log('roots', this.instanceId, this.name, root, this.valetRootCarCid, this.valetRootCid)
39677
+ this.valetRootCid = root;
39678
+ }
39679
+ indexNode = await load$1(combinedReader, this.valetRootCid, {
39680
+ blockHasher: blockOpts$1.hasher,
39681
+ blockCodec: blockOpts$1.codec
39682
+ });
39683
+ }
39684
+
39685
+ const got = await indexNode.get(cid);
39686
+ // console.log('getCarCIDForCID', cid, got)
39687
+ return { result: got }
39688
+ }
39689
+
39690
+ async OLDgetCarCIDForCID (cid) {
39691
+ const carCid = await this.withDB(async db => {
39692
+ const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
39693
+ const indexResp = await tx.objectStore('cidToCar').index('cids').get(cid);
39694
+ return indexResp?.car
39695
+ });
39696
+ return { result: carCid }
39697
+ }
39698
+
39699
+ async getCombinedReader (carCid) {
39700
+ let carMapReader;
39701
+ if (this.valetRootCarCid) {
39702
+ // todo only need this if we are cold starting
39703
+ carMapReader = await this.getCarReader(this.valetRootCarCid);
39704
+ }
39705
+
39706
+ const theseValetCidBlocks = this.valetCidBlocks;
39707
+ // console.log('theseValetCidBlocks', theseValetCidBlocks)
39708
+ const combinedReader = {
39709
+ root: carMapReader?.root,
39710
+ put: async (cid, bytes) => {
39711
+ // console.log('mapPut', cid, bytes.length)
39712
+ return await theseValetCidBlocks.put(cid, bytes)
39713
+ },
39714
+ get: async cid => {
39715
+ // console.log('mapGet', cid)
39716
+ try {
39717
+ const got = await theseValetCidBlocks.get(cid);
39718
+ return got.bytes
39719
+ } catch (e) {
39720
+ // console.log('get from car', cid, carMapReader)
39721
+ if (!carMapReader) throw e
39722
+ const bytes = await carMapReader.get(cid);
39723
+ await theseValetCidBlocks.put(cid, bytes);
39724
+ // console.log('mapGet', cid, bytes.length, bytes.constructor.name)
39725
+ return bytes
39726
+ }
39727
+ }
39728
+ };
39729
+ return combinedReader
39730
+ }
39731
+
39732
+ /**
39733
+ *
39734
+ * @param {string} carCid
39735
+ * @param {*} value
39736
+ */
39737
+ async parkCar (carCid, value, cids) {
39738
+ // console.log('parkCar', this.instanceId, this.name, carCid, cids)
39739
+ const combinedReader = await this.getCombinedReader(carCid);
39740
+ const mapNode = await addCidsToCarIndex(
39741
+ combinedReader,
39742
+ this.valetRoot,
39743
+ this.valetRootCid,
39744
+ Array.from(cids).map(cid => ({ key: cid.toString(), value: carCid.toString() }))
39745
+ );
39746
+
39747
+ this.valetRoot = mapNode;
39748
+ this.valetRootCid = mapNode.cid;
39749
+ // make a block set with all the cids of the map
39750
+ const saveValetBlocks = new VMemoryBlockstore(); // todo this blockstore should read from the last valetCid car also
39751
+
39752
+ for await (const cidx of mapNode.cids()) {
39753
+ const bytes = await combinedReader.get(cidx);
39754
+ saveValetBlocks.put(cidx, bytes);
39755
+ }
39756
+ let newValetCidCar;
39757
+ if (this.keyMaterial) {
39758
+ newValetCidCar = await blocksToEncryptedCarBlock(this.valetRootCid, saveValetBlocks, this.keyMaterial);
39759
+ } else {
39760
+ newValetCidCar = await blocksToCarBlock(this.valetRootCid, saveValetBlocks);
39761
+ }
39762
+ // console.log('newValetCidCar', this.name, Math.floor(newValetCidCar.bytes.length / 1024))
39763
+ await this.withDB(async db => {
39764
+ const tx = db.transaction(['cars'], 'readwrite');
39765
+ await tx.objectStore('cars').put(value, carCid.toString());
39766
+ if (newValetCidCar) {
39767
+ if (this.valetRootCarCid) ;
39768
+ await tx.objectStore('cars').put(newValetCidCar.bytes, newValetCidCar.cid.toString());
39769
+ }
39770
+ return await tx.done
39771
+ });
39772
+ this.valetRootCarCid = newValetCidCar.cid; // goes to clock
39773
+
39774
+ // console.log('parked car', carCid, value.length, Array.from(cids))
39775
+ // upload to web3.storage if we have credentials
39776
+ if (this.uploadFunction) {
39777
+ if (this.alreadyEnqueued.has(carCid)) {
39778
+ // console.log('already enqueued', carCid)
39779
+ return
39780
+ }
39781
+ // don't await this, it will be done in the queue
39782
+ // console.log('add to queue', carCid, value.length)
39783
+ this.uploadQueue.push({ carCid, value });
39784
+ this.alreadyEnqueued.add(carCid);
39785
+ }
39786
+ }
39787
+
39788
+ remoteBlockFunction = null
39789
+
39790
+ async getCarReader (carCid) {
39791
+ carCid = carCid.toString();
39792
+ const carBytes = await this.withDB(async db => {
39793
+ const tx = db.transaction(['cars'], 'readonly');
39794
+ // console.log('getCarReader', carCid)
39795
+ return await tx.objectStore('cars').get(carCid)
39796
+ });
39797
+ const reader = await CarReader.fromBytes(carBytes);
39798
+ if (this.keyMaterial) {
39799
+ const roots = await reader.getRoots();
39800
+ const readerGetWithCodec = async cid => {
39801
+ const got = await reader.get(cid);
39802
+ // console.log('got.', cid.toString())
39803
+ let useCodec = codec;
39804
+ if (cid.toString().indexOf('bafy') === 0) {
39805
+ // todo cleanup types
39806
+ useCodec = codec$1;
39807
+ }
39808
+ const decoded = await decode$9({
39809
+ ...got,
39810
+ codec: useCodec,
39811
+ hasher: sha256$2
39812
+ });
39813
+ // console.log('decoded', decoded.value)
39814
+ return decoded
39815
+ };
39816
+ const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
39817
+
39818
+ // last block is the root ???
39819
+ const rootBlock = blocks[blocks.length - 1];
39820
+
39821
+ return {
39822
+ root: rootBlock,
39823
+ get: async dataCID => {
39824
+ // console.log('getCarReader dataCID', dataCID)
39825
+ dataCID = dataCID.toString();
39826
+ const block = blocks.find(b => b.cid.toString() === dataCID);
39827
+ // console.log('getCarReader block', block)
39828
+ if (block) {
39829
+ return block.bytes
39830
+ }
39831
+ }
39832
+ }
39833
+ } else {
39834
+ return {
39835
+ root: reader.getRoots()[0],
39836
+ get: async dataCID => {
39837
+ const gotBlock = await reader.get(CID$1.parse(dataCID));
39838
+ if (gotBlock) {
39839
+ return gotBlock.bytes
39840
+ }
39841
+ }
39842
+ }
39843
+ }
39844
+ }
39845
+
39846
+ // todo memoize this
39847
+ async getValetBlock (dataCID) {
39848
+ // console.log('get valet block', dataCID)
39849
+ const { result: carCid } = await this.getCarCIDForCID(dataCID);
39850
+ if (!carCid) {
39851
+ throw new Error('Missing block: ' + dataCID)
39852
+ }
39853
+ const reader = await this.getCarReader(carCid);
39854
+ return await reader.get(dataCID)
39855
+ }
39856
+ }
39857
+
39858
+ const blocksToCarBlock = async (rootCids, blocks) => {
39859
+ // console.log('blocksToCarBlock', rootCids, blocks.constructor.name)
39860
+ let size = 0;
39861
+ if (!Array.isArray(rootCids)) {
39862
+ rootCids = [rootCids];
39863
+ }
39864
+ const headerSize = headerLength({ roots: rootCids });
39865
+ size += headerSize;
39866
+ if (!Array.isArray(blocks)) {
39867
+ blocks = Array.from(blocks.entries());
39868
+ }
39869
+ for (const { cid, bytes } of blocks) {
39870
+ // console.log(cid, bytes)
39871
+ size += blockLength({ cid, bytes });
39872
+ }
39873
+ const buffer = new Uint8Array(size);
39874
+ const writer = await createWriter(buffer, { headerSize });
39875
+
39876
+ for (const cid of rootCids) {
39877
+ writer.addRoot(cid);
39878
+ }
39879
+
39880
+ for (const { cid, bytes } of blocks) {
39881
+ writer.write({ cid, bytes });
39882
+ }
39883
+ await writer.close();
39884
+ return await encode$7({ value: writer.bytes, hasher: sha256$2, codec: raw })
39885
+ };
39886
+
39887
+ const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
37280
39888
  const encryptionKey = Buffer$G.from(keyMaterial, 'hex');
37281
39889
  const encryptedBlocks = [];
37282
39890
  const theCids = [];
@@ -37334,6 +39942,58 @@ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
37334
39942
  }
37335
39943
  };
37336
39944
 
39945
+ const addCidsToCarIndex = async (blockstore, valetRoot, valetRootCid, bulkOperations) => {
39946
+ let indexNode;
39947
+ if (valetRootCid) {
39948
+ if (valetRoot) {
39949
+ indexNode = valetRoot;
39950
+ } else {
39951
+ indexNode = await load$1(blockstore, valetRootCid, { blockHasher: blockOpts$1.hasher, blockCodec: blockOpts$1.codec });
39952
+ }
39953
+ } else {
39954
+ indexNode = await create$1(blockstore, {
39955
+ bitWidth: 4,
39956
+ bucketSize: 2,
39957
+ blockHasher: blockOpts$1.hasher,
39958
+ blockCodec: blockOpts$1.codec
39959
+ });
39960
+ }
39961
+ // console.log('adding', bulkOperations.length, 'cids to index')
39962
+ for (const { key, value } of bulkOperations) {
39963
+ // console.log('adding', key, value)
39964
+ await indexNode.set(key, value);
39965
+ }
39966
+ return indexNode
39967
+ };
39968
+
39969
+ class VMemoryBlockstore {
39970
+ /** @type {Map<string, Uint8Array>} */
39971
+ blocks = new Map()
39972
+ instanceId = Math.random().toString(36).slice(2)
39973
+
39974
+ async get (cid) {
39975
+ const bytes = this.blocks.get(cid.toString());
39976
+ // console.log('getvm', bytes.constructor.name, this.instanceId, cid, bytes && bytes.length)
39977
+ if (bytes.length === 253) ;
39978
+ if (!bytes) throw new Error('block not found ' + cid.toString())
39979
+ return { cid, bytes }
39980
+ }
39981
+
39982
+ /**
39983
+ * @param {import('../src/link').AnyLink} cid
39984
+ * @param {Uint8Array} bytes
39985
+ */
39986
+ async put (cid, bytes) {
39987
+ this.blocks.set(cid.toString(), bytes);
39988
+ }
39989
+
39990
+ * entries () {
39991
+ for (const [str, bytes] of this.blocks) {
39992
+ yield { cid: parse(str), bytes };
39993
+ }
39994
+ }
39995
+ }
39996
+
37337
39997
  // const sleep = ms => new Promise(r => setTimeout(r, ms))
37338
39998
 
37339
39999
  const husherMap = new Map();
@@ -37412,7 +40072,7 @@ class TransactionBlockstore {
37412
40072
  // console.log('committedGet: ' + key + ' ' + this.instanceId, old.length)
37413
40073
  if (old) return old
37414
40074
  if (!this.valet) throw new Error('Missing block: ' + key)
37415
- const got = await this.valet.getBlock(key);
40075
+ const got = await this.valet.getValetBlock(key);
37416
40076
  this.committedBlocks.set(key, got);
37417
40077
  return got
37418
40078
  }
@@ -37627,7 +40287,7 @@ const makeGetBlock = blocks => {
37627
40287
  // const { cid, bytes } = await withLog(address, () => blocks.get(address))
37628
40288
  const { cid, bytes } = await blocks.get(address);
37629
40289
  // cids.add({ address: cid })
37630
- return create$5({ cid, bytes, hasher: sha256$2, codec: codec$1 })
40290
+ return create$7({ cid, bytes, hasher: sha256$2, codec: codec$1 })
37631
40291
  };
37632
40292
  return {
37633
40293
  // cids,
@@ -37754,7 +40414,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
37754
40414
  const event = await events.get(ancestor);
37755
40415
  const { root } = event.value.data;
37756
40416
  if (root) {
37757
- return load$2({ cid: root, get: getBlock, ...blockOpts })
40417
+ return load$4({ cid: root, get: getBlock, ...blockOpts })
37758
40418
  } else {
37759
40419
  // console.log('no root', root) // false means no common ancestor. null means empty database.
37760
40420
  return root
@@ -37808,7 +40468,7 @@ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
37808
40468
  if (bulkOperations.every(op => op.del)) {
37809
40469
  return { root: null, blocks: [], clockCIDs: await events.all() }
37810
40470
  }
37811
- for await (const node of create$3({ get: getBlock, list: bulkOperations, ...blockOpts })) {
40471
+ for await (const node of create$5({ get: getBlock, list: bulkOperations, ...blockOpts })) {
37812
40472
  // root = await node.block
37813
40473
  root = node;
37814
40474
  newBlocks.push(await node.block);
@@ -38006,7 +40666,7 @@ async function visMerkleTree (blocks, head) {
38006
40666
  // if (!head) return
38007
40667
  if (head && !Array.isArray(head)) {
38008
40668
  const getBl = makeGetBlock(blocks);
38009
- const prollyRootNode = await load$2({
40669
+ const prollyRootNode = await load$4({
38010
40670
  cid: head,
38011
40671
  get: getBl.getBlock,
38012
40672
  ...blockOpts
@@ -38290,7 +40950,6 @@ const parseCID = cid => (typeof cid === 'string' ? CID$1.parse(cid) : cid);
38290
40950
  * This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
38291
40951
  * usage guides in the repository README.
38292
40952
  *
38293
- * @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
38294
40953
  * @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
38295
40954
  * @param {object} [config] - Optional configuration options for the Fireproof instance.
38296
40955
  * @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
@@ -38302,10 +40961,11 @@ class Database {
38302
40961
  rootCache = null
38303
40962
  eventsCache = new Map()
38304
40963
 
38305
- constructor (blocks, clock, config = {}) {
38306
- this.name = config.name;
40964
+ constructor (name, clock, config = {}) {
40965
+ this.name = name;
38307
40966
  this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
38308
- this.blocks = blocks;
40967
+ this.blocks = new TransactionBlockstore(name, config.key);
40968
+ this.indexBlocks = new TransactionBlockstore(name + '.indexes', config.key);
38309
40969
  this.clock = clock;
38310
40970
  this.config = config;
38311
40971
  }
@@ -38322,6 +40982,8 @@ class Database {
38322
40982
  clock: this.clockToJSON(),
38323
40983
  name: this.name,
38324
40984
  key: this.blocks.valet?.getKeyMaterial(),
40985
+ car: this.blocks.valet?.valetRootCarCid.toString(),
40986
+ indexCar: this.indexBlocks.valet?.valetRootCarCid?.toString(),
38325
40987
  indexes: [...this.indexes.values()].map(index => index.toJSON())
38326
40988
  }
38327
40989
  }
@@ -38336,11 +40998,14 @@ class Database {
38336
40998
  return (clock || this.clock).map(cid => cid.toString())
38337
40999
  }
38338
41000
 
38339
- hydrate ({ clock, name, key }) {
41001
+ hydrate ({ clock, name, key, car, indexCar }) {
38340
41002
  this.name = name;
38341
41003
  this.clock = clock;
38342
41004
  this.blocks.valet?.setKeyMaterial(key);
38343
- this.indexBlocks = null;
41005
+ this.blocks.valet?.setRootCarCid(car); // maybe
41006
+ this.indexBlocks.valet?.setKeyMaterial(key);
41007
+ this.indexBlocks.valet?.setRootCarCid(indexCar); // maybe
41008
+ // this.indexBlocks = null
38344
41009
  }
38345
41010
 
38346
41011
  maybeSaveClock () {
@@ -38379,7 +41044,7 @@ class Database {
38379
41044
  let rows, dataCIDs, clockCIDs;
38380
41045
  // if (!aClock) aClock = []
38381
41046
  if (aClock && aClock.length > 0) {
38382
- aClock = aClock.map((cid) => cid.toString());
41047
+ aClock = aClock.map(cid => cid.toString());
38383
41048
  const eventKey = JSON.stringify([...this.clockToJSON(aClock), ...this.clockToJSON()]);
38384
41049
 
38385
41050
  let resp;
@@ -38547,6 +41212,7 @@ class Database {
38547
41212
  * @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
38548
41213
  */
38549
41214
  async putToProllyTree (decodedEvent, clock = null) {
41215
+ // console.log('putToProllyTree', decodedEvent)
38550
41216
  const event = encodeEvent(decodedEvent);
38551
41217
  if (clock && JSON.stringify(this.clockToJSON(clock)) !== JSON.stringify(this.clockToJSON())) {
38552
41218
  // console.log('this.clock', this.clockToJSON())
@@ -38664,7 +41330,9 @@ class Database {
38664
41330
 
38665
41331
  async function cidsToProof (cids) {
38666
41332
  if (!cids) return []
38667
- if (!cids.all) { return [...cids] }
41333
+ if (!cids.all) {
41334
+ return [...cids]
41335
+ }
38668
41336
 
38669
41337
  const all = await cids.all();
38670
41338
  return [...all].map(cid => cid.toString())
@@ -38914,14 +41582,14 @@ const create = opts => {
38914
41582
  ...defaults,
38915
41583
  ...opts
38916
41584
  };
38917
- return create$3(opts);
41585
+ return create$5(opts);
38918
41586
  };
38919
41587
  const load = opts => {
38920
41588
  opts = {
38921
41589
  ...defaults,
38922
41590
  ...opts
38923
41591
  };
38924
- return load$2(opts);
41592
+ return load$4(opts);
38925
41593
  };
38926
41594
 
38927
41595
  // @ts-ignore
@@ -38976,21 +41644,21 @@ const makeDoc = ({ key, value }) => ({ _id: key, ...value });
38976
41644
  */
38977
41645
  const indexEntriesForChanges = (changes, mapFn) => {
38978
41646
  const indexEntries = [];
38979
- changes.forEach(({ key, value, del }) => {
41647
+ changes.forEach(({ key: _id, value, del }) => {
38980
41648
  // key is _id, value is the document
38981
41649
  if (del || !value) return
38982
41650
  let mapCalled = false;
38983
- const mapReturn = mapFn(makeDoc({ key, value }), (k, v) => {
41651
+ const mapReturn = mapFn(makeDoc({ key: _id, value }), (k, v) => {
38984
41652
  mapCalled = true;
38985
41653
  if (typeof k === 'undefined') return
38986
41654
  indexEntries.push({
38987
- key: [charwise.encode(k), key],
41655
+ key: [charwise.encode(k), _id],
38988
41656
  value: v || null
38989
41657
  });
38990
41658
  });
38991
41659
  if (!mapCalled && mapReturn) {
38992
41660
  indexEntries.push({
38993
- key: [charwise.encode(mapReturn), key],
41661
+ key: [charwise.encode(mapReturn), _id],
38994
41662
  value: null
38995
41663
  });
38996
41664
  }
@@ -39014,12 +41682,6 @@ class DbIndex {
39014
41682
  */
39015
41683
  constructor (database, name, mapFn, clock = null, opts = {}) {
39016
41684
  this.database = database;
39017
- if (!database.indexBlocks) {
39018
- database.indexBlocks = new TransactionBlockstore(
39019
- database?.name + '.indexes',
39020
- database.blocks.valet?.getKeyMaterial()
39021
- );
39022
- }
39023
41685
  if (typeof name === 'function') {
39024
41686
  // app is using deprecated API, remove in 0.7
39025
41687
  opts = clock || {};
@@ -39172,7 +41834,14 @@ class DbIndex {
39172
41834
  await loadIndex(this.database.indexBlocks, this.indexByKey, dbIndexOpts);
39173
41835
  if (!this.indexByKey.root) return { result: [] }
39174
41836
  if (query.includeDocs === undefined) query.includeDocs = this.includeDocsDefault;
39175
- if (query.range) {
41837
+ if (query.prefix) {
41838
+ // ensure prefix is an array
41839
+ if (!Array.isArray(query.prefix)) query.prefix = [query.prefix];
41840
+ const start = [...query.prefix, NaN];
41841
+ const end = [...query.prefix, Infinity];
41842
+ const prefixRange = [start, end].map(key => charwise.encode(key));
41843
+ return await this.applyQuery(await this.indexByKey.root.range(...prefixRange), query)
41844
+ } else if (query.range) {
39176
41845
  const encodedRange = query.range.map(key => charwise.encode(key));
39177
41846
  return await this.applyQuery(await this.indexByKey.root.range(...encodedRange), query)
39178
41847
  } else if (query.key) {
@@ -41339,7 +44008,7 @@ class Sync {
41339
44008
  destroy () {
41340
44009
  this.database.blocks.syncs.delete(this);
41341
44010
  this.status = 'destroyed';
41342
- this.peer.destroy();
44011
+ // this.peer.destroy() todo
41343
44012
  }
41344
44013
 
41345
44014
  async sendUpdate (blockstore) {
@@ -41430,22 +44099,29 @@ class Fireproof {
41430
44099
  static storage = (name = null, opts = {}) => {
41431
44100
  if (name) {
41432
44101
  opts.name = name;
44102
+ // todo this can come from a registry also
41433
44103
  const existing = localGet('fp.' + name);
41434
44104
  if (existing) {
41435
44105
  const existingConfig = JSON.parse(existing);
41436
- const fp = new Database(new TransactionBlockstore(name, existingConfig.key), [], opts);
41437
- return Fireproof.fromJSON(existingConfig, fp)
44106
+ return Fireproof.fromConfig(name, existingConfig, opts)
41438
44107
  } else {
41439
44108
  const instanceKey = browserExports$1(32).toString('hex'); // pass null to disable encryption
41440
- return new Database(new TransactionBlockstore(name, instanceKey), [], opts)
44109
+ opts.key = instanceKey;
44110
+ return new Database(name, [], opts)
41441
44111
  }
41442
44112
  } else {
41443
- return new Database(new TransactionBlockstore(), [], opts)
44113
+ return new Database(null, [], opts)
41444
44114
  }
41445
44115
  }
41446
44116
 
44117
+ static fromConfig (name, existingConfig, opts = {}) {
44118
+ opts.key = existingConfig.key;
44119
+ const fp = new Database(name, [], opts);
44120
+ return Fireproof.fromJSON(existingConfig, fp)
44121
+ }
44122
+
41447
44123
  static fromJSON (json, database) {
41448
- database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
44124
+ database.hydrate({ car: json.car, indexCar: json.indexCar, clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
41449
44125
  if (json.indexes) {
41450
44126
  for (const {
41451
44127
  name,
@@ -41468,7 +44144,8 @@ class Fireproof {
41468
44144
 
41469
44145
  static snapshot (database, clock) {
41470
44146
  const definition = database.toJSON();
41471
- const withBlocks = new Database(database.blocks);
44147
+ const withBlocks = new Database(database.name);
44148
+ withBlocks.blocks = database.blocks;
41472
44149
  if (clock) {
41473
44150
  definition.clock = clock.map(c => parseCID(c));
41474
44151
  definition.indexes.forEach(index => {