@fireproof/core 0.5.16 → 0.5.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/src/blockstore.js +1 -1
- package/dist/src/clock.js +4 -2
- package/dist/src/database.js +14 -8
- package/dist/src/db-index.js +15 -9
- package/dist/src/fireproof.d.ts +198 -6
- package/dist/src/fireproof.js +3102 -425
- package/dist/src/fireproof.js.map +1 -1
- package/dist/src/fireproof.mjs +3102 -425
- package/dist/src/fireproof.mjs.map +1 -1
- package/dist/src/sync.js +1 -1
- package/dist/src/valet.js +222 -41
- package/package.json +2 -1
- package/src/blockstore.js +1 -1
- package/src/clock.js +3 -2
- package/src/database.js +17 -9
- package/src/db-index.js +13 -12
- package/src/fireproof.js +15 -7
- package/src/sync.js +1 -1
- package/src/valet.js +239 -38
package/dist/src/fireproof.mjs
CHANGED
@@ -2316,7 +2316,7 @@ const coerce$1 = o => {
|
|
2316
2316
|
* @param {Code} code
|
2317
2317
|
* @param {Uint8Array} digest
|
2318
2318
|
*/
|
2319
|
-
const create$
|
2319
|
+
const create$8 = (code, digest) => {
|
2320
2320
|
const size = digest.byteLength;
|
2321
2321
|
const sizeOffset = encodingLength$1(code);
|
2322
2322
|
const digestOffset = sizeOffset + encodingLength$1(size);
|
@@ -3076,7 +3076,7 @@ let CID$1 = class CID {
|
|
3076
3076
|
switch (this.version) {
|
3077
3077
|
case 0: {
|
3078
3078
|
const { code, digest } = this.multihash;
|
3079
|
-
const multihash = create$
|
3079
|
+
const multihash = create$8(code, digest);
|
3080
3080
|
return /** @type {CID<Data, Format, Alg, 1>} */ (
|
3081
3081
|
CID.createV1(this.code, multihash)
|
3082
3082
|
)
|
@@ -3550,9 +3550,9 @@ class Hasher {
|
|
3550
3550
|
if (input instanceof Uint8Array) {
|
3551
3551
|
const result = this.encode(input);
|
3552
3552
|
return result instanceof Uint8Array
|
3553
|
-
? create$
|
3553
|
+
? create$8(this.code, result)
|
3554
3554
|
/* c8 ignore next 1 */
|
3555
|
-
: result.then(digest => create$
|
3555
|
+
: result.then(digest => create$8(this.code, digest))
|
3556
3556
|
} else {
|
3557
3557
|
throw Error('Unknown type, must be binary type')
|
3558
3558
|
/* c8 ignore next 1 */
|
@@ -3819,7 +3819,7 @@ function createUnsafe ({ bytes, cid, value: maybeValue, codec }) {
|
|
3819
3819
|
* @param {API.MultihashHasher<Alg>} options.hasher
|
3820
3820
|
* @returns {Promise<API.BlockView<T, Code, Alg, V>>}
|
3821
3821
|
*/
|
3822
|
-
async function create$
|
3822
|
+
async function create$7 ({ bytes, cid, hasher, codec }) {
|
3823
3823
|
if (!bytes) throw new Error('Missing required argument "bytes"')
|
3824
3824
|
if (!hasher) throw new Error('Missing required argument "hasher"')
|
3825
3825
|
const value = codec.decode(bytes);
|
@@ -3976,8 +3976,8 @@ class Token {
|
|
3976
3976
|
}
|
3977
3977
|
|
3978
3978
|
const useBuffer = globalThis.process && !globalThis.process.browser && globalThis.Buffer && typeof globalThis.Buffer.isBuffer === 'function';
|
3979
|
-
const textDecoder = new TextDecoder();
|
3980
|
-
const textEncoder = new TextEncoder();
|
3979
|
+
const textDecoder$1 = new TextDecoder();
|
3980
|
+
const textEncoder$1 = new TextEncoder();
|
3981
3981
|
function isBuffer$1(buf) {
|
3982
3982
|
return useBuffer && globalThis.Buffer.isBuffer(buf);
|
3983
3983
|
}
|
@@ -3990,12 +3990,12 @@ function asU8A(buf) {
|
|
3990
3990
|
const toString = useBuffer ? (bytes, start, end) => {
|
3991
3991
|
return end - start > 64 ? globalThis.Buffer.from(bytes.subarray(start, end)).toString('utf8') : utf8Slice(bytes, start, end);
|
3992
3992
|
} : (bytes, start, end) => {
|
3993
|
-
return end - start > 64 ? textDecoder.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
|
3993
|
+
return end - start > 64 ? textDecoder$1.decode(bytes.subarray(start, end)) : utf8Slice(bytes, start, end);
|
3994
3994
|
};
|
3995
3995
|
const fromString = useBuffer ? string => {
|
3996
3996
|
return string.length > 64 ? globalThis.Buffer.from(string) : utf8ToBytes(string);
|
3997
3997
|
} : string => {
|
3998
|
-
return string.length > 64 ? textEncoder.encode(string) : utf8ToBytes(string);
|
3998
|
+
return string.length > 64 ? textEncoder$1.encode(string) : utf8ToBytes(string);
|
3999
3999
|
};
|
4000
4000
|
const fromArray$1 = arr => {
|
4001
4001
|
return Uint8Array.from(arr);
|
@@ -5713,12 +5713,13 @@ async function findEventsToSync (blocks, head) {
|
|
5713
5713
|
// console.time(callTag + '.findCommonAncestorWithSortedEvents')
|
5714
5714
|
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head);
|
5715
5715
|
// console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
|
5716
|
-
// console.log('sorted', !!ancestor, sorted
|
5716
|
+
// console.log('sorted', !!ancestor, sorted)
|
5717
5717
|
// console.time(callTag + '.contains')
|
5718
5718
|
|
5719
5719
|
const toSync = ancestor ? await asyncFilter(sorted, async uks => !(await contains(events, ancestor, uks.cid))) : sorted;
|
5720
5720
|
// console.timeEnd(callTag + '.contains')
|
5721
|
-
|
5721
|
+
const sortDifference = sorted.length - toSync.length;
|
5722
|
+
if (sortDifference / sorted.length > 0.6) console.log('optimize sorted', !!ancestor, sortDifference);
|
5722
5723
|
|
5723
5724
|
return { cids: events, events: toSync }
|
5724
5725
|
}
|
@@ -6754,7 +6755,7 @@ class IPLDLeaf extends IPLDNode {
|
|
6754
6755
|
return true;
|
6755
6756
|
}
|
6756
6757
|
}
|
6757
|
-
const create$
|
6758
|
+
const create$6 = async function* (obj) {
|
6758
6759
|
let {LeafClass, LeafEntryClass, BranchClass, BranchEntryClass, list, chunker, compare, ...opts} = obj;
|
6759
6760
|
list = list.map(value => new LeafEntryClass(value, opts));
|
6760
6761
|
opts.compare = compare;
|
@@ -6786,35 +6787,36 @@ const create$4 = async function* (obj) {
|
|
6786
6787
|
};
|
6787
6788
|
|
6788
6789
|
class MapEntry extends Entry {
|
6789
|
-
async identity() {
|
6790
|
+
async identity () {
|
6790
6791
|
const encoded = await this.codec.encode(await this.encodeNode());
|
6791
6792
|
const hash = await this.hasher.encode(encoded);
|
6792
|
-
return readUInt32LE$1(hash)
|
6793
|
+
return readUInt32LE$1(hash)
|
6793
6794
|
}
|
6794
6795
|
}
|
6795
6796
|
class MapLeafEntry extends MapEntry {
|
6796
|
-
constructor(node, opts) {
|
6797
|
+
constructor (node, opts) {
|
6797
6798
|
super(node, opts);
|
6798
6799
|
this.value = node.value;
|
6799
6800
|
}
|
6800
|
-
|
6801
|
+
|
6802
|
+
encodeNode () {
|
6801
6803
|
return [
|
6802
6804
|
this.key,
|
6803
6805
|
this.value
|
6804
|
-
]
|
6806
|
+
]
|
6805
6807
|
}
|
6806
6808
|
}
|
6807
6809
|
class MapBranchEntry extends MapEntry {
|
6808
|
-
constructor(node, opts) {
|
6809
|
-
if (!node.address)
|
6810
|
-
throw new Error('Cannot create MapBranchEntry without address');
|
6810
|
+
constructor (node, opts) {
|
6811
|
+
if (!node.address) { throw new Error('Cannot create MapBranchEntry without address') }
|
6811
6812
|
super(node, opts);
|
6812
6813
|
}
|
6813
|
-
|
6814
|
+
|
6815
|
+
async encodeNode () {
|
6814
6816
|
return [
|
6815
6817
|
this.key,
|
6816
6818
|
await this.address
|
6817
|
-
]
|
6819
|
+
]
|
6818
6820
|
}
|
6819
6821
|
}
|
6820
6822
|
const getValue = async (node, key) => {
|
@@ -6825,7 +6827,7 @@ const getValue = async (node, key) => {
|
|
6825
6827
|
return {
|
6826
6828
|
result: entry.value,
|
6827
6829
|
cids
|
6828
|
-
}
|
6830
|
+
}
|
6829
6831
|
};
|
6830
6832
|
const getManyValues = async (node, keys) => {
|
6831
6833
|
const {
|
@@ -6835,34 +6837,38 @@ const getManyValues = async (node, keys) => {
|
|
6835
6837
|
return {
|
6836
6838
|
result: entries.map(entry => entry.value),
|
6837
6839
|
cids
|
6838
|
-
}
|
6840
|
+
}
|
6839
6841
|
};
|
6840
6842
|
class MapLeaf extends IPLDLeaf {
|
6841
|
-
get(key) {
|
6842
|
-
return getValue(this, key)
|
6843
|
+
get (key) {
|
6844
|
+
return getValue(this, key)
|
6843
6845
|
}
|
6844
|
-
|
6845
|
-
|
6846
|
+
|
6847
|
+
getMany (keys) {
|
6848
|
+
return getManyValues(this, keys)
|
6846
6849
|
}
|
6847
|
-
|
6850
|
+
|
6851
|
+
bulk (bulk, opts = {}, isRoot = true) {
|
6848
6852
|
return super.bulk(bulk, {
|
6849
6853
|
...classes$1,
|
6850
6854
|
...opts
|
6851
|
-
}, isRoot)
|
6855
|
+
}, isRoot)
|
6852
6856
|
}
|
6853
6857
|
}
|
6854
6858
|
class MapBranch extends IPLDBranch {
|
6855
|
-
get(key) {
|
6856
|
-
return getValue(this, key)
|
6859
|
+
get (key) {
|
6860
|
+
return getValue(this, key)
|
6857
6861
|
}
|
6858
|
-
|
6859
|
-
|
6862
|
+
|
6863
|
+
getMany (keys) {
|
6864
|
+
return getManyValues(this, keys)
|
6860
6865
|
}
|
6861
|
-
|
6866
|
+
|
6867
|
+
bulk (bulk, opts = {}, isRoot = true) {
|
6862
6868
|
return super.bulk(bulk, {
|
6863
6869
|
...classes$1,
|
6864
6870
|
...opts
|
6865
|
-
}, isRoot)
|
6871
|
+
}, isRoot)
|
6866
6872
|
}
|
6867
6873
|
}
|
6868
6874
|
const classes$1 = {
|
@@ -6877,9 +6883,8 @@ const createGetNode$1 = (get, cache, chunker, codec, hasher, compare, opts) => {
|
|
6877
6883
|
const BranchClass = opts.BranchClass || MapBranch;
|
6878
6884
|
const BranchEntryClass = opts.BranchEntryClass || MapBranchEntry;
|
6879
6885
|
const getNode = async cid => {
|
6880
|
-
if (cache.has(cid))
|
6881
|
-
|
6882
|
-
return get(cid).then(block => decoder(block));
|
6886
|
+
if (cache.has(cid)) { return cache.get(cid) }
|
6887
|
+
return get(cid).then(block => decoder(block))
|
6883
6888
|
};
|
6884
6889
|
const decoder = makeDecoder({
|
6885
6890
|
chunker,
|
@@ -6893,11 +6898,10 @@ const createGetNode$1 = (get, cache, chunker, codec, hasher, compare, opts) => {
|
|
6893
6898
|
BranchEntryClass,
|
6894
6899
|
BranchClass
|
6895
6900
|
});
|
6896
|
-
return getNode
|
6901
|
+
return getNode
|
6897
6902
|
};
|
6898
|
-
const create$
|
6899
|
-
if (!sorted)
|
6900
|
-
list = list.sort(({key: a}, {key: b}) => compare(a, b));
|
6903
|
+
const create$5 = ({ get, cache, chunker, list, codec, hasher, sorted, compare, ...opts }) => {
|
6904
|
+
if (!sorted) { list = list.sort(({ key: a }, { key: b }) => compare(a, b)); }
|
6901
6905
|
const getNode = createGetNode$1(get, cache, chunker, codec, hasher, compare, opts);
|
6902
6906
|
const _opts = {
|
6903
6907
|
list,
|
@@ -6913,19 +6917,19 @@ const create$3 = ({get, cache, chunker, list, codec, hasher, sorted, compare, ..
|
|
6913
6917
|
BranchClass: opts.BranchClass || MapBranch,
|
6914
6918
|
BranchEntryClass: opts.BranchEntryClass || MapBranchEntry
|
6915
6919
|
};
|
6916
|
-
return create$
|
6920
|
+
return create$6(_opts)
|
6917
6921
|
};
|
6918
|
-
const load$
|
6922
|
+
const load$4 = ({ cid, get, cache, chunker, codec, hasher, compare, ...opts }) => {
|
6919
6923
|
const getNode = createGetNode$1(get, cache, chunker, codec, hasher, compare, opts);
|
6920
|
-
return getNode(cid)
|
6924
|
+
return getNode(cid)
|
6921
6925
|
};
|
6922
|
-
function makeDecoder({chunker, cache, getNode, codec, hasher, compare, LeafEntryClass, LeafClass, BranchEntryClass, BranchClass}) {
|
6926
|
+
function makeDecoder ({ chunker, cache, getNode, codec, hasher, compare, LeafEntryClass, LeafClass, BranchEntryClass, BranchClass }) {
|
6923
6927
|
const entryOpts = {
|
6924
6928
|
codec,
|
6925
6929
|
hasher
|
6926
6930
|
};
|
6927
6931
|
return block => {
|
6928
|
-
const {value} = block;
|
6932
|
+
const { value } = block;
|
6929
6933
|
const opts = {
|
6930
6934
|
chunker,
|
6931
6935
|
cache,
|
@@ -6952,7 +6956,7 @@ function makeDecoder({chunker, cache, getNode, codec, hasher, compare, LeafEntry
|
|
6952
6956
|
}, entryOpts));
|
6953
6957
|
CLS = BranchClass;
|
6954
6958
|
} else {
|
6955
|
-
throw new Error('Unknown block data, does not match schema')
|
6959
|
+
throw new Error('Unknown block data, does not match schema')
|
6956
6960
|
}
|
6957
6961
|
const entryList = new EntryList({
|
6958
6962
|
entries,
|
@@ -6963,8 +6967,8 @@ function makeDecoder({chunker, cache, getNode, codec, hasher, compare, LeafEntry
|
|
6963
6967
|
...opts
|
6964
6968
|
});
|
6965
6969
|
cache.set(node);
|
6966
|
-
return node
|
6967
|
-
}
|
6970
|
+
return node
|
6971
|
+
}
|
6968
6972
|
}
|
6969
6973
|
|
6970
6974
|
const nocache = {
|
@@ -7008,7 +7012,7 @@ const parse = (source, base) => CID$1.parse(source, base);
|
|
7008
7012
|
*
|
7009
7013
|
*/
|
7010
7014
|
|
7011
|
-
const Kinds = {
|
7015
|
+
const Kinds$1 = {
|
7012
7016
|
Null: /**
|
7013
7017
|
* @param {any} obj
|
7014
7018
|
* @returns {boolean}
|
@@ -7036,7 +7040,7 @@ const Kinds = {
|
|
7036
7040
|
Link: /**
|
7037
7041
|
* @param {any} obj
|
7038
7042
|
* @returns {boolean}
|
7039
|
-
*/ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID === obj,
|
7043
|
+
*/ (/** @type {any} */ obj) => !Kinds$1.Null(obj) && typeof obj === 'object' && obj.asCID === obj,
|
7040
7044
|
List: /**
|
7041
7045
|
* @param {any} obj
|
7042
7046
|
* @returns {boolean}
|
@@ -7044,31 +7048,31 @@ const Kinds = {
|
|
7044
7048
|
Map: /**
|
7045
7049
|
* @param {any} obj
|
7046
7050
|
* @returns {boolean}
|
7047
|
-
*/ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID !== obj && !Kinds.List(obj) && !Kinds.Bytes(obj)
|
7051
|
+
*/ (/** @type {any} */ obj) => !Kinds$1.Null(obj) && typeof obj === 'object' && obj.asCID !== obj && !Kinds$1.List(obj) && !Kinds$1.Bytes(obj)
|
7048
7052
|
};
|
7049
7053
|
/** @type {{ [k in string]: (obj:any)=>boolean}} */
|
7050
|
-
const Types = {
|
7051
|
-
Int: Kinds.Int,
|
7054
|
+
const Types$1 = {
|
7055
|
+
Int: Kinds$1.Int,
|
7052
7056
|
'CarHeader > version': /**
|
7053
7057
|
* @param {any} obj
|
7054
7058
|
* @returns {boolean}
|
7055
|
-
*/ (/** @type {any} */ obj) => Types.Int(obj),
|
7056
|
-
'CarHeader > roots (anon) > valueType (anon)': Kinds.Link,
|
7059
|
+
*/ (/** @type {any} */ obj) => Types$1.Int(obj),
|
7060
|
+
'CarHeader > roots (anon) > valueType (anon)': Kinds$1.Link,
|
7057
7061
|
'CarHeader > roots (anon)': /**
|
7058
7062
|
* @param {any} obj
|
7059
7063
|
* @returns {boolean}
|
7060
|
-
*/ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types['CarHeader > roots (anon) > valueType (anon)']),
|
7064
|
+
*/ (/** @type {any} */ obj) => Kinds$1.List(obj) && Array.prototype.every.call(obj, Types$1['CarHeader > roots (anon) > valueType (anon)']),
|
7061
7065
|
'CarHeader > roots': /**
|
7062
7066
|
* @param {any} obj
|
7063
7067
|
* @returns {boolean}
|
7064
|
-
*/ (/** @type {any} */ obj) => Types['CarHeader > roots (anon)'](obj),
|
7068
|
+
*/ (/** @type {any} */ obj) => Types$1['CarHeader > roots (anon)'](obj),
|
7065
7069
|
CarHeader: /**
|
7066
7070
|
* @param {any} obj
|
7067
7071
|
* @returns {boolean}
|
7068
|
-
*/ (/** @type {any} */ obj) => { const keys = obj && Object.keys(obj); return Kinds.Map(obj) && ['version'].every((k) => keys.includes(k)) && Object.entries(obj).every(([name, value]) => Types['CarHeader > ' + name] && Types['CarHeader > ' + name](value)) }
|
7072
|
+
*/ (/** @type {any} */ obj) => { const keys = obj && Object.keys(obj); return Kinds$1.Map(obj) && ['version'].every((k) => keys.includes(k)) && Object.entries(obj).every(([name, value]) => Types$1['CarHeader > ' + name] && Types$1['CarHeader > ' + name](value)) }
|
7069
7073
|
};
|
7070
7074
|
|
7071
|
-
const CarHeader = Types.CarHeader;
|
7075
|
+
const CarHeader = Types$1.CarHeader;
|
7072
7076
|
|
7073
7077
|
var encode_1$1 = encode$4;
|
7074
7078
|
|
@@ -9810,15 +9814,15 @@ var versions = {};
|
|
9810
9814
|
var release = {};
|
9811
9815
|
var config = {};
|
9812
9816
|
|
9813
|
-
function noop$
|
9817
|
+
function noop$3() {}
|
9814
9818
|
|
9815
|
-
var on = noop$
|
9816
|
-
var addListener = noop$
|
9817
|
-
var once$2 = noop$
|
9818
|
-
var off = noop$
|
9819
|
-
var removeListener = noop$
|
9820
|
-
var removeAllListeners = noop$
|
9821
|
-
var emit = noop$
|
9819
|
+
var on = noop$3;
|
9820
|
+
var addListener = noop$3;
|
9821
|
+
var once$2 = noop$3;
|
9822
|
+
var off = noop$3;
|
9823
|
+
var removeListener = noop$3;
|
9824
|
+
var removeAllListeners = noop$3;
|
9825
|
+
var emit = noop$3;
|
9822
9826
|
|
9823
9827
|
function binding(name) {
|
9824
9828
|
throw new Error('process.binding is not supported');
|
@@ -12057,14 +12061,14 @@ function once$1(callback) {
|
|
12057
12061
|
callback.apply(this, args);
|
12058
12062
|
};
|
12059
12063
|
}
|
12060
|
-
function noop$
|
12064
|
+
function noop$2() {}
|
12061
12065
|
function isRequest$1(stream) {
|
12062
12066
|
return stream.setHeader && typeof stream.abort === 'function';
|
12063
12067
|
}
|
12064
12068
|
function eos$1(stream, opts, callback) {
|
12065
12069
|
if (typeof opts === 'function') return eos$1(stream, null, opts);
|
12066
12070
|
if (!opts) opts = {};
|
12067
|
-
callback = once$1(callback || noop$
|
12071
|
+
callback = once$1(callback || noop$2);
|
12068
12072
|
let readable = opts.readable || opts.readable !== false && stream.readable;
|
12069
12073
|
let writable = opts.writable || opts.writable !== false && stream.writable;
|
12070
12074
|
const onlegacyfinish = () => {
|
@@ -13493,7 +13497,7 @@ function once(callback) {
|
|
13493
13497
|
const _require$codes = errorsBrowser.codes,
|
13494
13498
|
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
|
13495
13499
|
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
|
13496
|
-
function noop(err) {
|
13500
|
+
function noop$1(err) {
|
13497
13501
|
// Rethrow the error if it exists to avoid swallowing it
|
13498
13502
|
if (err) throw err;
|
13499
13503
|
}
|
@@ -13534,8 +13538,8 @@ function pipe(from, to) {
|
|
13534
13538
|
return from.pipe(to);
|
13535
13539
|
}
|
13536
13540
|
function popCallback(streams) {
|
13537
|
-
if (!streams.length) return noop;
|
13538
|
-
if (typeof streams[streams.length - 1] !== 'function') return noop;
|
13541
|
+
if (!streams.length) return noop$1;
|
13542
|
+
if (typeof streams[streams.length - 1] !== 'function') return noop$1;
|
13539
13543
|
return streams.pop();
|
13540
13544
|
}
|
13541
13545
|
function pipeline() {
|
@@ -17449,19 +17453,19 @@ utils$n.padSplit = function padSplit(num, size, group) {
|
|
17449
17453
|
return out.join(' ');
|
17450
17454
|
};
|
17451
17455
|
|
17452
|
-
var minimalisticAssert = assert$
|
17456
|
+
var minimalisticAssert = assert$k;
|
17453
17457
|
|
17454
|
-
function assert$
|
17458
|
+
function assert$k(val, msg) {
|
17455
17459
|
if (!val)
|
17456
17460
|
throw new Error(msg || 'Assertion failed');
|
17457
17461
|
}
|
17458
17462
|
|
17459
|
-
assert$
|
17463
|
+
assert$k.equal = function assertEqual(l, r, msg) {
|
17460
17464
|
if (l != r)
|
17461
17465
|
throw new Error(msg || ('Assertion failed: ' + l + ' != ' + r));
|
17462
17466
|
};
|
17463
17467
|
|
17464
|
-
var assert$
|
17468
|
+
var assert$j = minimalisticAssert;
|
17465
17469
|
|
17466
17470
|
function Cipher$3(options) {
|
17467
17471
|
this.options = options;
|
@@ -17594,14 +17598,14 @@ Cipher$3.prototype._unpad = function _unpad(buffer) {
|
|
17594
17598
|
};
|
17595
17599
|
|
17596
17600
|
Cipher$3.prototype._finalDecrypt = function _finalDecrypt() {
|
17597
|
-
assert$
|
17601
|
+
assert$j.equal(this.bufferOff, this.blockSize, 'Not enough data to decrypt');
|
17598
17602
|
var out = new Array(this.blockSize);
|
17599
17603
|
this._flushBuffer(out, 0);
|
17600
17604
|
|
17601
17605
|
return this._unpad(out);
|
17602
17606
|
};
|
17603
17607
|
|
17604
|
-
var assert$
|
17608
|
+
var assert$i = minimalisticAssert;
|
17605
17609
|
var inherits$h = require$$3$1;
|
17606
17610
|
|
17607
17611
|
var utils$m = utils$n;
|
@@ -17635,7 +17639,7 @@ var shiftTable = [
|
|
17635
17639
|
DES$3.prototype.deriveKeys = function deriveKeys(state, key) {
|
17636
17640
|
state.keys = new Array(16 * 2);
|
17637
17641
|
|
17638
|
-
assert$
|
17642
|
+
assert$i.equal(key.length, this.blockSize, 'Invalid key length');
|
17639
17643
|
|
17640
17644
|
var kL = utils$m.readUInt32BE(key, 0);
|
17641
17645
|
var kR = utils$m.readUInt32BE(key, 4);
|
@@ -17685,7 +17689,7 @@ DES$3.prototype._pad = function _pad(buffer, off) {
|
|
17685
17689
|
DES$3.prototype._unpad = function _unpad(buffer) {
|
17686
17690
|
var pad = buffer[buffer.length - 1];
|
17687
17691
|
for (var i = buffer.length - pad; i < buffer.length; i++)
|
17688
|
-
assert$
|
17692
|
+
assert$i.equal(buffer[i], pad);
|
17689
17693
|
|
17690
17694
|
return buffer.slice(0, buffer.length - pad);
|
17691
17695
|
};
|
@@ -17744,13 +17748,13 @@ DES$3.prototype._decrypt = function _decrypt(state, lStart, rStart, out, off) {
|
|
17744
17748
|
|
17745
17749
|
var cbc$1 = {};
|
17746
17750
|
|
17747
|
-
var assert$
|
17751
|
+
var assert$h = minimalisticAssert;
|
17748
17752
|
var inherits$g = require$$3$1;
|
17749
17753
|
|
17750
17754
|
var proto = {};
|
17751
17755
|
|
17752
17756
|
function CBCState(iv) {
|
17753
|
-
assert$
|
17757
|
+
assert$h.equal(iv.length, 8, 'Invalid IV length');
|
17754
17758
|
|
17755
17759
|
this.iv = new Array(8);
|
17756
17760
|
for (var i = 0; i < this.iv.length; i++)
|
@@ -17808,14 +17812,14 @@ proto._update = function _update(inp, inOff, out, outOff) {
|
|
17808
17812
|
}
|
17809
17813
|
};
|
17810
17814
|
|
17811
|
-
var assert$
|
17815
|
+
var assert$g = minimalisticAssert;
|
17812
17816
|
var inherits$f = require$$3$1;
|
17813
17817
|
|
17814
17818
|
var Cipher$1 = cipher;
|
17815
17819
|
var DES$2 = des$1;
|
17816
17820
|
|
17817
17821
|
function EDEState(type, key) {
|
17818
|
-
assert$
|
17822
|
+
assert$g.equal(key.length, 24, 'Invalid key length');
|
17819
17823
|
|
17820
17824
|
var k1 = key.slice(0, 8);
|
17821
17825
|
var k2 = key.slice(8, 16);
|
@@ -26919,7 +26923,7 @@ var BN$9 = bnExports$1;
|
|
26919
26923
|
var utils$j = utils$l;
|
26920
26924
|
var getNAF = utils$j.getNAF;
|
26921
26925
|
var getJSF = utils$j.getJSF;
|
26922
|
-
var assert$
|
26926
|
+
var assert$f = utils$j.assert;
|
26923
26927
|
|
26924
26928
|
function BaseCurve(type, conf) {
|
26925
26929
|
this.type = type;
|
@@ -26965,7 +26969,7 @@ BaseCurve.prototype.validate = function validate() {
|
|
26965
26969
|
};
|
26966
26970
|
|
26967
26971
|
BaseCurve.prototype._fixedNafMul = function _fixedNafMul(p, k) {
|
26968
|
-
assert$
|
26972
|
+
assert$f(p.precomputed);
|
26969
26973
|
var doubles = p._getDoubles();
|
26970
26974
|
|
26971
26975
|
var naf = getNAF(k, 1, this._bitLength);
|
@@ -27022,7 +27026,7 @@ BaseCurve.prototype._wnafMul = function _wnafMul(p, k) {
|
|
27022
27026
|
if (i < 0)
|
27023
27027
|
break;
|
27024
27028
|
var z = naf[i];
|
27025
|
-
assert$
|
27029
|
+
assert$f(z !== 0);
|
27026
27030
|
if (p.type === 'affine') {
|
27027
27031
|
// J +- P
|
27028
27032
|
if (z > 0)
|
@@ -27190,9 +27194,9 @@ BaseCurve.prototype.decodePoint = function decodePoint(bytes, enc) {
|
|
27190
27194
|
if ((bytes[0] === 0x04 || bytes[0] === 0x06 || bytes[0] === 0x07) &&
|
27191
27195
|
bytes.length - 1 === 2 * len) {
|
27192
27196
|
if (bytes[0] === 0x06)
|
27193
|
-
assert$
|
27197
|
+
assert$f(bytes[bytes.length - 1] % 2 === 0);
|
27194
27198
|
else if (bytes[0] === 0x07)
|
27195
|
-
assert$
|
27199
|
+
assert$f(bytes[bytes.length - 1] % 2 === 1);
|
27196
27200
|
|
27197
27201
|
var res = this.point(bytes.slice(1, 1 + len),
|
27198
27202
|
bytes.slice(1 + len, 1 + 2 * len));
|
@@ -27299,7 +27303,7 @@ var BN$8 = bnExports$1;
|
|
27299
27303
|
var inherits$9 = require$$3$1;
|
27300
27304
|
var Base$2 = base$2;
|
27301
27305
|
|
27302
|
-
var assert$
|
27306
|
+
var assert$e = utils$i.assert;
|
27303
27307
|
|
27304
27308
|
function ShortCurve(conf) {
|
27305
27309
|
Base$2.call(this, 'short', conf);
|
@@ -27344,7 +27348,7 @@ ShortCurve.prototype._getEndomorphism = function _getEndomorphism(conf) {
|
|
27344
27348
|
lambda = lambdas[0];
|
27345
27349
|
} else {
|
27346
27350
|
lambda = lambdas[1];
|
27347
|
-
assert$
|
27351
|
+
assert$e(this.g.mul(lambda).x.cmp(this.g.x.redMul(beta)) === 0);
|
27348
27352
|
}
|
27349
27353
|
}
|
27350
27354
|
|
@@ -28413,7 +28417,7 @@ var BN$6 = bnExports$1;
|
|
28413
28417
|
var inherits$7 = require$$3$1;
|
28414
28418
|
var Base = base$2;
|
28415
28419
|
|
28416
|
-
var assert$
|
28420
|
+
var assert$d = utils$g.assert;
|
28417
28421
|
|
28418
28422
|
function EdwardsCurve(conf) {
|
28419
28423
|
// NOTE: Important as we are creating point in Base.call()
|
@@ -28430,7 +28434,7 @@ function EdwardsCurve(conf) {
|
|
28430
28434
|
this.d = new BN$6(conf.d, 16).toRed(this.red);
|
28431
28435
|
this.dd = this.d.redAdd(this.d);
|
28432
28436
|
|
28433
|
-
assert$
|
28437
|
+
assert$d(!this.twisted || this.c.fromRed().cmpn(1) === 0);
|
28434
28438
|
this.oneC = (conf.c | 0) === 1;
|
28435
28439
|
}
|
28436
28440
|
inherits$7(EdwardsCurve, Base);
|
@@ -28858,7 +28862,7 @@ var hash$2 = {};
|
|
28858
28862
|
|
28859
28863
|
var utils$f = {};
|
28860
28864
|
|
28861
|
-
var assert$
|
28865
|
+
var assert$c = minimalisticAssert;
|
28862
28866
|
var inherits$6 = require$$3$1;
|
28863
28867
|
|
28864
28868
|
utils$f.inherits = inherits$6;
|
@@ -28979,7 +28983,7 @@ utils$f.zero8 = zero8;
|
|
28979
28983
|
|
28980
28984
|
function join32(msg, start, end, endian) {
|
28981
28985
|
var len = end - start;
|
28982
|
-
assert$
|
28986
|
+
assert$c(len % 4 === 0);
|
28983
28987
|
var res = new Array(len / 4);
|
28984
28988
|
for (var i = 0, k = start; i < res.length; i++, k += 4) {
|
28985
28989
|
var w;
|
@@ -29138,7 +29142,7 @@ utils$f.shr64_lo = shr64_lo$1;
|
|
29138
29142
|
var common$6 = {};
|
29139
29143
|
|
29140
29144
|
var utils$e = utils$f;
|
29141
|
-
var assert$
|
29145
|
+
var assert$b = minimalisticAssert;
|
29142
29146
|
|
29143
29147
|
function BlockHash$4() {
|
29144
29148
|
this.pending = null;
|
@@ -29183,7 +29187,7 @@ BlockHash$4.prototype.update = function update(msg, enc) {
|
|
29183
29187
|
|
29184
29188
|
BlockHash$4.prototype.digest = function digest(enc) {
|
29185
29189
|
this.update(this._pad());
|
29186
|
-
assert$
|
29190
|
+
assert$b(this.pending === null);
|
29187
29191
|
|
29188
29192
|
return this._digest(enc);
|
29189
29193
|
};
|
@@ -29356,7 +29360,7 @@ SHA1.prototype._digest = function digest(enc) {
|
|
29356
29360
|
var utils$b = utils$f;
|
29357
29361
|
var common$3 = common$6;
|
29358
29362
|
var shaCommon = common$5;
|
29359
|
-
var assert$
|
29363
|
+
var assert$a = minimalisticAssert;
|
29360
29364
|
|
29361
29365
|
var sum32$1 = utils$b.sum32;
|
29362
29366
|
var sum32_4$1 = utils$b.sum32_4;
|
@@ -29426,7 +29430,7 @@ SHA256$1.prototype._update = function _update(msg, start) {
|
|
29426
29430
|
var g = this.h[6];
|
29427
29431
|
var h = this.h[7];
|
29428
29432
|
|
29429
|
-
assert$
|
29433
|
+
assert$a(this.k.length === W.length);
|
29430
29434
|
for (i = 0; i < W.length; i++) {
|
29431
29435
|
var T1 = sum32_5(h, s1_256(e), ch32(e, f, g), this.k[i], W[i]);
|
29432
29436
|
var T2 = sum32$1(s0_256(a), maj32(a, b, c));
|
@@ -29487,7 +29491,7 @@ SHA224.prototype._digest = function digest(enc) {
|
|
29487
29491
|
|
29488
29492
|
var utils$9 = utils$f;
|
29489
29493
|
var common$2 = common$6;
|
29490
|
-
var assert$
|
29494
|
+
var assert$9 = minimalisticAssert;
|
29491
29495
|
|
29492
29496
|
var rotr64_hi = utils$9.rotr64_hi;
|
29493
29497
|
var rotr64_lo = utils$9.rotr64_lo;
|
@@ -29622,7 +29626,7 @@ SHA512$1.prototype._update = function _update(msg, start) {
|
|
29622
29626
|
var hh = this.h[14];
|
29623
29627
|
var hl = this.h[15];
|
29624
29628
|
|
29625
|
-
assert$
|
29629
|
+
assert$9(this.k.length === W.length);
|
29626
29630
|
for (var i = 0; i < W.length; i += 2) {
|
29627
29631
|
var c0_hi = hh;
|
29628
29632
|
var c0_lo = hl;
|
@@ -30002,7 +30006,7 @@ var sh = [
|
|
30002
30006
|
];
|
30003
30007
|
|
30004
30008
|
var utils$6 = utils$f;
|
30005
|
-
var assert$
|
30009
|
+
var assert$8 = minimalisticAssert;
|
30006
30010
|
|
30007
30011
|
function Hmac(hash, key, enc) {
|
30008
30012
|
if (!(this instanceof Hmac))
|
@@ -30021,7 +30025,7 @@ Hmac.prototype._init = function init(key) {
|
|
30021
30025
|
// Shorten key, if needed
|
30022
30026
|
if (key.length > this.blockSize)
|
30023
30027
|
key = new this.Hash().update(key).digest();
|
30024
|
-
assert$
|
30028
|
+
assert$8(key.length <= this.blockSize);
|
30025
30029
|
|
30026
30030
|
// Add padding to key
|
30027
30031
|
for (var i = key.length; i < this.blockSize; i++)
|
@@ -31064,7 +31068,7 @@ function requireSecp256k1 () {
|
|
31064
31068
|
|
31065
31069
|
var hash$1 = hash$2;
|
31066
31070
|
var utils$5 = utils$k;
|
31067
|
-
var assert$
|
31071
|
+
var assert$7 = minimalisticAssert;
|
31068
31072
|
|
31069
31073
|
function HmacDRBG(options) {
|
31070
31074
|
if (!(this instanceof HmacDRBG))
|
@@ -31083,7 +31087,7 @@ function HmacDRBG(options) {
|
|
31083
31087
|
var entropy = utils$5.toArray(options.entropy, options.entropyEnc || 'hex');
|
31084
31088
|
var nonce = utils$5.toArray(options.nonce, options.nonceEnc || 'hex');
|
31085
31089
|
var pers = utils$5.toArray(options.pers, options.persEnc || 'hex');
|
31086
|
-
assert$
|
31090
|
+
assert$7(entropy.length >= (this.minEntropy / 8),
|
31087
31091
|
'Not enough entropy. Minimum is: ' + this.minEntropy + ' bits');
|
31088
31092
|
this._init(entropy, nonce, pers);
|
31089
31093
|
}
|
@@ -31138,7 +31142,7 @@ HmacDRBG.prototype.reseed = function reseed(entropy, entropyEnc, add, addEnc) {
|
|
31138
31142
|
entropy = utils$5.toArray(entropy, entropyEnc);
|
31139
31143
|
add = utils$5.toArray(add, addEnc);
|
31140
31144
|
|
31141
|
-
assert$
|
31145
|
+
assert$7(entropy.length >= (this.minEntropy / 8),
|
31142
31146
|
'Not enough entropy. Minimum is: ' + this.minEntropy + ' bits');
|
31143
31147
|
|
31144
31148
|
this._update(entropy.concat(add || []));
|
@@ -31176,7 +31180,7 @@ HmacDRBG.prototype.generate = function generate(len, enc, add, addEnc) {
|
|
31176
31180
|
|
31177
31181
|
var BN$5 = bnExports$1;
|
31178
31182
|
var utils$4 = utils$l;
|
31179
|
-
var assert$
|
31183
|
+
var assert$6 = utils$4.assert;
|
31180
31184
|
|
31181
31185
|
function KeyPair$2(ec, options) {
|
31182
31186
|
this.ec = ec;
|
@@ -31261,10 +31265,10 @@ KeyPair$2.prototype._importPublic = function _importPublic(key, enc) {
|
|
31261
31265
|
// Weierstrass/Edwards points on the other hand have both `x` and
|
31262
31266
|
// `y` coordinates.
|
31263
31267
|
if (this.ec.curve.type === 'mont') {
|
31264
|
-
assert$
|
31268
|
+
assert$6(key.x, 'Need x coordinate');
|
31265
31269
|
} else if (this.ec.curve.type === 'short' ||
|
31266
31270
|
this.ec.curve.type === 'edwards') {
|
31267
|
-
assert$
|
31271
|
+
assert$6(key.x && key.y, 'Need both x and y coordinate');
|
31268
31272
|
}
|
31269
31273
|
this.pub = this.ec.curve.point(key.x, key.y);
|
31270
31274
|
return;
|
@@ -31275,7 +31279,7 @@ KeyPair$2.prototype._importPublic = function _importPublic(key, enc) {
|
|
31275
31279
|
// ECDH
|
31276
31280
|
KeyPair$2.prototype.derive = function derive(pub) {
|
31277
31281
|
if(!pub.validate()) {
|
31278
|
-
assert$
|
31282
|
+
assert$6(pub.validate(), 'public point not validated');
|
31279
31283
|
}
|
31280
31284
|
return pub.mul(this.priv).getX();
|
31281
31285
|
};
|
@@ -31297,7 +31301,7 @@ KeyPair$2.prototype.inspect = function inspect() {
|
|
31297
31301
|
var BN$4 = bnExports$1;
|
31298
31302
|
|
31299
31303
|
var utils$3 = utils$l;
|
31300
|
-
var assert$
|
31304
|
+
var assert$5 = utils$3.assert;
|
31301
31305
|
|
31302
31306
|
function Signature$2(options, enc) {
|
31303
31307
|
if (options instanceof Signature$2)
|
@@ -31306,7 +31310,7 @@ function Signature$2(options, enc) {
|
|
31306
31310
|
if (this._importDER(options, enc))
|
31307
31311
|
return;
|
31308
31312
|
|
31309
|
-
assert$
|
31313
|
+
assert$5(options.r && options.s, 'Signature without r or s');
|
31310
31314
|
this.r = new BN$4(options.r, 16);
|
31311
31315
|
this.s = new BN$4(options.s, 16);
|
31312
31316
|
if (options.recoveryParam === undefined)
|
@@ -31711,7 +31715,7 @@ function requireEc () {
|
|
31711
31715
|
}
|
31712
31716
|
|
31713
31717
|
var utils$2 = utils$l;
|
31714
|
-
var assert$
|
31718
|
+
var assert$4 = utils$2.assert;
|
31715
31719
|
var parseBytes$2 = utils$2.parseBytes;
|
31716
31720
|
var cachedProperty$1 = utils$2.cachedProperty;
|
31717
31721
|
|
@@ -31785,7 +31789,7 @@ cachedProperty$1(KeyPair$1, 'messagePrefix', function messagePrefix() {
|
|
31785
31789
|
});
|
31786
31790
|
|
31787
31791
|
KeyPair$1.prototype.sign = function sign(message) {
|
31788
|
-
assert$
|
31792
|
+
assert$4(this._secret, 'KeyPair can only verify');
|
31789
31793
|
return this.eddsa.sign(message, this);
|
31790
31794
|
};
|
31791
31795
|
|
@@ -31794,7 +31798,7 @@ KeyPair$1.prototype.verify = function verify(message, sig) {
|
|
31794
31798
|
};
|
31795
31799
|
|
31796
31800
|
KeyPair$1.prototype.getSecret = function getSecret(enc) {
|
31797
|
-
assert$
|
31801
|
+
assert$4(this._secret, 'KeyPair is public only');
|
31798
31802
|
return utils$2.encode(this.secret(), enc);
|
31799
31803
|
};
|
31800
31804
|
|
@@ -31806,7 +31810,7 @@ var key$1 = KeyPair$1;
|
|
31806
31810
|
|
31807
31811
|
var BN$3 = bnExports$1;
|
31808
31812
|
var utils$1 = utils$l;
|
31809
|
-
var assert$
|
31813
|
+
var assert$3 = utils$1.assert;
|
31810
31814
|
var cachedProperty = utils$1.cachedProperty;
|
31811
31815
|
var parseBytes$1 = utils$1.parseBytes;
|
31812
31816
|
|
@@ -31831,7 +31835,7 @@ function Signature$1(eddsa, sig) {
|
|
31831
31835
|
};
|
31832
31836
|
}
|
31833
31837
|
|
31834
|
-
assert$
|
31838
|
+
assert$3(sig.R && sig.S, 'Signature without R or S');
|
31835
31839
|
|
31836
31840
|
if (eddsa.isPoint(sig.R))
|
31837
31841
|
this._R = sig.R;
|
@@ -31871,13 +31875,13 @@ var signature = Signature$1;
|
|
31871
31875
|
var hash = hash$2;
|
31872
31876
|
var curves = curves$1;
|
31873
31877
|
var utils = utils$l;
|
31874
|
-
var assert$
|
31878
|
+
var assert$2 = utils.assert;
|
31875
31879
|
var parseBytes = utils.parseBytes;
|
31876
31880
|
var KeyPair = key$1;
|
31877
31881
|
var Signature = signature;
|
31878
31882
|
|
31879
31883
|
function EDDSA(curve) {
|
31880
|
-
assert$
|
31884
|
+
assert$2(curve === 'ed25519', 'only tested with ed25519 so far');
|
31881
31885
|
|
31882
31886
|
if (!(this instanceof EDDSA))
|
31883
31887
|
return new EDDSA(curve);
|
@@ -32372,7 +32376,7 @@ EncoderBuffer$1.prototype.join = function join(out, offset) {
|
|
32372
32376
|
const Reporter = reporter.Reporter;
|
32373
32377
|
const EncoderBuffer = buffer.EncoderBuffer;
|
32374
32378
|
const DecoderBuffer$1 = buffer.DecoderBuffer;
|
32375
|
-
const assert = minimalisticAssert;
|
32379
|
+
const assert$1 = minimalisticAssert;
|
32376
32380
|
|
32377
32381
|
// Supported tags
|
32378
32382
|
const tags = [
|
@@ -32463,14 +32467,14 @@ Node$2.prototype._wrap = function wrap() {
|
|
32463
32467
|
Node$2.prototype._init = function init(body) {
|
32464
32468
|
const state = this._baseState;
|
32465
32469
|
|
32466
|
-
assert(state.parent === null);
|
32470
|
+
assert$1(state.parent === null);
|
32467
32471
|
body.call(this);
|
32468
32472
|
|
32469
32473
|
// Filter children
|
32470
32474
|
state.children = state.children.filter(function(child) {
|
32471
32475
|
return child._baseState.parent === this;
|
32472
32476
|
}, this);
|
32473
|
-
assert.equal(state.children.length, 1, 'Root node can have only one child');
|
32477
|
+
assert$1.equal(state.children.length, 1, 'Root node can have only one child');
|
32474
32478
|
};
|
32475
32479
|
|
32476
32480
|
Node$2.prototype._useArgs = function useArgs(args) {
|
@@ -32485,7 +32489,7 @@ Node$2.prototype._useArgs = function useArgs(args) {
|
|
32485
32489
|
}, this);
|
32486
32490
|
|
32487
32491
|
if (children.length !== 0) {
|
32488
|
-
assert(state.children === null);
|
32492
|
+
assert$1(state.children === null);
|
32489
32493
|
state.children = children;
|
32490
32494
|
|
32491
32495
|
// Replace parent to maintain backward link
|
@@ -32494,7 +32498,7 @@ Node$2.prototype._useArgs = function useArgs(args) {
|
|
32494
32498
|
}, this);
|
32495
32499
|
}
|
32496
32500
|
if (args.length !== 0) {
|
32497
|
-
assert(state.args === null);
|
32501
|
+
assert$1(state.args === null);
|
32498
32502
|
state.args = args;
|
32499
32503
|
state.reverseArgs = args.map(function(arg) {
|
32500
32504
|
if (typeof arg !== 'object' || arg.constructor !== Object)
|
@@ -32532,7 +32536,7 @@ tags.forEach(function(tag) {
|
|
32532
32536
|
const state = this._baseState;
|
32533
32537
|
const args = Array.prototype.slice.call(arguments);
|
32534
32538
|
|
32535
|
-
assert(state.tag === null);
|
32539
|
+
assert$1(state.tag === null);
|
32536
32540
|
state.tag = tag;
|
32537
32541
|
|
32538
32542
|
this._useArgs(args);
|
@@ -32542,10 +32546,10 @@ tags.forEach(function(tag) {
|
|
32542
32546
|
});
|
32543
32547
|
|
32544
32548
|
Node$2.prototype.use = function use(item) {
|
32545
|
-
assert(item);
|
32549
|
+
assert$1(item);
|
32546
32550
|
const state = this._baseState;
|
32547
32551
|
|
32548
|
-
assert(state.use === null);
|
32552
|
+
assert$1(state.use === null);
|
32549
32553
|
state.use = item;
|
32550
32554
|
|
32551
32555
|
return this;
|
@@ -32562,7 +32566,7 @@ Node$2.prototype.optional = function optional() {
|
|
32562
32566
|
Node$2.prototype.def = function def(val) {
|
32563
32567
|
const state = this._baseState;
|
32564
32568
|
|
32565
|
-
assert(state['default'] === null);
|
32569
|
+
assert$1(state['default'] === null);
|
32566
32570
|
state['default'] = val;
|
32567
32571
|
state.optional = true;
|
32568
32572
|
|
@@ -32572,7 +32576,7 @@ Node$2.prototype.def = function def(val) {
|
|
32572
32576
|
Node$2.prototype.explicit = function explicit(num) {
|
32573
32577
|
const state = this._baseState;
|
32574
32578
|
|
32575
|
-
assert(state.explicit === null && state.implicit === null);
|
32579
|
+
assert$1(state.explicit === null && state.implicit === null);
|
32576
32580
|
state.explicit = num;
|
32577
32581
|
|
32578
32582
|
return this;
|
@@ -32581,7 +32585,7 @@ Node$2.prototype.explicit = function explicit(num) {
|
|
32581
32585
|
Node$2.prototype.implicit = function implicit(num) {
|
32582
32586
|
const state = this._baseState;
|
32583
32587
|
|
32584
|
-
assert(state.explicit === null && state.implicit === null);
|
32588
|
+
assert$1(state.explicit === null && state.implicit === null);
|
32585
32589
|
state.implicit = num;
|
32586
32590
|
|
32587
32591
|
return this;
|
@@ -32602,7 +32606,7 @@ Node$2.prototype.obj = function obj() {
|
|
32602
32606
|
Node$2.prototype.key = function key(newKey) {
|
32603
32607
|
const state = this._baseState;
|
32604
32608
|
|
32605
|
-
assert(state.key === null);
|
32609
|
+
assert$1(state.key === null);
|
32606
32610
|
state.key = newKey;
|
32607
32611
|
|
32608
32612
|
return this;
|
@@ -32619,7 +32623,7 @@ Node$2.prototype.any = function any() {
|
|
32619
32623
|
Node$2.prototype.choice = function choice(obj) {
|
32620
32624
|
const state = this._baseState;
|
32621
32625
|
|
32622
|
-
assert(state.choice === null);
|
32626
|
+
assert$1(state.choice === null);
|
32623
32627
|
state.choice = obj;
|
32624
32628
|
this._useArgs(Object.keys(obj).map(function(key) {
|
32625
32629
|
return obj[key];
|
@@ -32631,7 +32635,7 @@ Node$2.prototype.choice = function choice(obj) {
|
|
32631
32635
|
Node$2.prototype.contains = function contains(item) {
|
32632
32636
|
const state = this._baseState;
|
32633
32637
|
|
32634
|
-
assert(state.use === null);
|
32638
|
+
assert$1(state.use === null);
|
32635
32639
|
state.contains = item;
|
32636
32640
|
|
32637
32641
|
return this;
|
@@ -32804,7 +32808,7 @@ Node$2.prototype._getUse = function _getUse(entity, obj) {
|
|
32804
32808
|
const state = this._baseState;
|
32805
32809
|
// Create altered use decoder if implicit is set
|
32806
32810
|
state.useDecoder = this._use(entity, obj);
|
32807
|
-
assert(state.useDecoder._baseState.parent === null);
|
32811
|
+
assert$1(state.useDecoder._baseState.parent === null);
|
32808
32812
|
state.useDecoder = state.useDecoder._baseState.children[0];
|
32809
32813
|
if (state.implicit !== state.useDecoder._baseState.implicit) {
|
32810
32814
|
state.useDecoder = state.useDecoder.clone();
|
@@ -32965,7 +32969,7 @@ Node$2.prototype._encodeChoice = function encodeChoice(data, reporter) {
|
|
32965
32969
|
|
32966
32970
|
const node = state.choice[data.type];
|
32967
32971
|
if (!node) {
|
32968
|
-
assert(
|
32972
|
+
assert$1(
|
32969
32973
|
false,
|
32970
32974
|
data.type + ' not found in ' +
|
32971
32975
|
JSON.stringify(Object.keys(state.choice)));
|
@@ -36083,7 +36087,7 @@ const coerce = o => {
|
|
36083
36087
|
throw new Error('Unknown type, must be binary type');
|
36084
36088
|
};
|
36085
36089
|
|
36086
|
-
const create$
|
36090
|
+
const create$4 = (code, digest) => {
|
36087
36091
|
const size = digest.byteLength;
|
36088
36092
|
const sizeOffset = encodingLength(code);
|
36089
36093
|
const digestOffset = sizeOffset + encodingLength(size);
|
@@ -36459,7 +36463,7 @@ class CID {
|
|
36459
36463
|
switch (this.version) {
|
36460
36464
|
case 0: {
|
36461
36465
|
const {code, digest} = this.multihash;
|
36462
|
-
const multihash = create$
|
36466
|
+
const multihash = create$4(code, digest);
|
36463
36467
|
return CID.createV1(this.code, multihash);
|
36464
36468
|
}
|
36465
36469
|
case 1: {
|
@@ -36882,7 +36886,7 @@ const createGetNode = (get, cache, chunker, codec, hasher) => {
|
|
36882
36886
|
};
|
36883
36887
|
return getNode;
|
36884
36888
|
};
|
36885
|
-
const create$
|
36889
|
+
const create$3 = ({get, cache, chunker, list, codec, hasher, sorted}) => {
|
36886
36890
|
if (!sorted)
|
36887
36891
|
list = list.sort(compare$2);
|
36888
36892
|
const getNode = createGetNode(get, cache, chunker, codec, hasher);
|
@@ -36900,16 +36904,16 @@ const create$1 = ({get, cache, chunker, list, codec, hasher, sorted}) => {
|
|
36900
36904
|
BranchClass: CIDSetBranch,
|
36901
36905
|
BranchEntryClass: CIDNodeEntry
|
36902
36906
|
};
|
36903
|
-
return create$
|
36907
|
+
return create$6(opts);
|
36904
36908
|
};
|
36905
|
-
const load$
|
36909
|
+
const load$3 = ({cid, get, cache, chunker, codec, hasher, ...opts}) => {
|
36906
36910
|
const getNode = createGetNode(get, cache, chunker, codec, hasher);
|
36907
36911
|
return getNode(cid);
|
36908
36912
|
};
|
36909
36913
|
|
36910
36914
|
// @ts-nocheck
|
36911
36915
|
|
36912
|
-
const createBlock = (bytes, cid) => create$
|
36916
|
+
const createBlock = (bytes, cid) => create$7({ cid, bytes, hasher: sha256$2, codec });
|
36913
36917
|
|
36914
36918
|
const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
|
36915
36919
|
const set = new Set();
|
@@ -36926,7 +36930,7 @@ const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root
|
|
36926
36930
|
if (!eroot) throw new Error('cids does not include root')
|
36927
36931
|
const list = [...set].map(s => CID$1.parse(s));
|
36928
36932
|
let last;
|
36929
|
-
for await (const node of create$
|
36933
|
+
for await (const node of create$3({ list, get, cache, chunker, hasher, codec: codec$1 })) {
|
36930
36934
|
const block = await node.block;
|
36931
36935
|
yield block;
|
36932
36936
|
last = block;
|
@@ -36942,7 +36946,7 @@ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
|
|
36942
36946
|
// console.log('decodedRoot', decodedRoot)
|
36943
36947
|
const { value: [eroot, tree] } = decodedRoot;
|
36944
36948
|
const rootBlock = await get(eroot); // should I decrypt?
|
36945
|
-
const cidset = await load$
|
36949
|
+
const cidset = await load$3({ cid: tree, get, cache, chunker, codec, hasher });
|
36946
36950
|
const { result: nodes } = await cidset.getAllEntries();
|
36947
36951
|
const unwrap = async (eblock) => {
|
36948
36952
|
const { bytes, cid } = await decrypt$1({ ...eblock, key }).catch(e => {
|
@@ -36960,321 +36964,2925 @@ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
|
|
36960
36964
|
yield unwrap(rootBlock);
|
36961
36965
|
};
|
36962
36966
|
|
36963
|
-
|
36964
|
-
// from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
|
36965
|
-
// MIT License Copyright (c) 2020 Dumitru Uzun
|
36966
|
-
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
36967
|
-
// of this software and associated documentation files (the "Software"), to deal
|
36968
|
-
// in the Software without restriction, including without limitation the rights
|
36969
|
-
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
36970
|
-
// copies of the Software, and to permit persons to whom the Software is
|
36971
|
-
// furnished to do so, subject to the following conditions:
|
36967
|
+
var bitUtils = {};
|
36972
36968
|
|
36973
|
-
|
36974
|
-
|
36969
|
+
/**
|
36970
|
+
* @param {Uint8Array} bytes
|
36971
|
+
* @param {number} bitStart
|
36972
|
+
* @param {number} bitLength
|
36973
|
+
* @returns {number}
|
36974
|
+
*/
|
36975
36975
|
|
36976
|
-
|
36977
|
-
//
|
36978
|
-
//
|
36979
|
-
//
|
36980
|
-
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
36981
|
-
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
36982
|
-
// SOFTWARE.
|
36976
|
+
function bitSequence$1 (bytes, bitStart, bitLength) {
|
36977
|
+
// making an assumption that bytes is an Array-like that will give us one
|
36978
|
+
// byte per element, so either an Array full of 8-bit integers or a
|
36979
|
+
// Uint8Array or a Node.js Buffer, or something like that
|
36983
36980
|
|
36984
|
-
|
36985
|
-
|
36986
|
-
|
36981
|
+
const startOffset = bitStart % 8;
|
36982
|
+
const byteCount = Math.ceil((startOffset + bitLength) / 8);
|
36983
|
+
const byteStart = bitStart >> 3;
|
36984
|
+
const endOffset = byteCount * 8 - bitLength - startOffset;
|
36985
|
+
|
36986
|
+
let result = 0;
|
36987
|
+
|
36988
|
+
for (let i = 0; i < byteCount; i++) {
|
36989
|
+
let local = bytes[byteStart + i];
|
36990
|
+
let shift = 0;
|
36991
|
+
let localBitLength = 8; // how many bits of this byte we are extracting
|
36992
|
+
|
36993
|
+
if (i === 0) {
|
36994
|
+
localBitLength -= startOffset;
|
36995
|
+
}
|
36996
|
+
|
36997
|
+
if (i === byteCount - 1) {
|
36998
|
+
localBitLength -= endOffset;
|
36999
|
+
shift = endOffset;
|
37000
|
+
local >>= shift; // take off the trailing bits
|
37001
|
+
}
|
37002
|
+
|
37003
|
+
if (localBitLength < 8) {
|
37004
|
+
const mask = (1 << localBitLength) - 1;
|
37005
|
+
local &= mask; // mask off the leading bits
|
37006
|
+
}
|
37007
|
+
|
37008
|
+
if (i < 3) {
|
37009
|
+
if (shift < 8) {
|
37010
|
+
result = result << (8 - shift);
|
37011
|
+
}
|
37012
|
+
result |= local;
|
37013
|
+
} else {
|
37014
|
+
// once we start shifting beyond the 24-bit range we get to signed integers
|
37015
|
+
// and our bitwise operations break down, because JavaScript. But if we do
|
37016
|
+
// it without bitwise operations then we can cheat into very large numbers
|
37017
|
+
if (shift < 8) {
|
37018
|
+
result = result * Math.pow(2, (8 - shift));
|
37019
|
+
}
|
37020
|
+
result += local;
|
37021
|
+
}
|
37022
|
+
}
|
37023
|
+
|
37024
|
+
return result
|
37025
|
+
}
|
37026
|
+
|
37027
|
+
var bitSequence_1 = bitSequence$1;
|
37028
|
+
|
37029
|
+
// Copyright Rod Vagg; Licensed under the Apache License, Version 2.0, see README.md for more information
|
37030
|
+
|
37031
|
+
const bitSequence = bitSequence_1;
|
36987
37032
|
|
36988
37033
|
/**
|
36989
|
-
*
|
36990
|
-
*
|
36991
|
-
* @param
|
36992
|
-
*
|
36993
|
-
|
36994
|
-
|
36995
|
-
|
36996
|
-
|
36997
|
-
let bs = 0;
|
36998
|
-
let A; let B; let C; let D; let G;
|
36999
|
-
const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
|
37000
|
-
const W = new Uint32Array(80);
|
37001
|
-
const nrWords = (i / 4 + 2) | 15;
|
37002
|
-
const words = new Uint32Array(nrWords + 1);
|
37003
|
-
let j;
|
37034
|
+
* @param {Uint8Array} hash
|
37035
|
+
* @param {number} depth
|
37036
|
+
* @param {number} nbits
|
37037
|
+
* @returns {number}
|
37038
|
+
*/
|
37039
|
+
function mask$1 (hash, depth, nbits) {
|
37040
|
+
return bitSequence(hash, depth * nbits, nbits)
|
37041
|
+
}
|
37004
37042
|
|
37005
|
-
|
37006
|
-
|
37007
|
-
|
37008
|
-
|
37043
|
+
/**
|
37044
|
+
* set the `position` bit in the given `bitmap` to be `set` (truthy=1, falsey=0)
|
37045
|
+
* @param {Uint8Array} bitmap
|
37046
|
+
* @param {number} position
|
37047
|
+
* @param {boolean|0|1} set
|
37048
|
+
* @returns {Uint8Array}
|
37049
|
+
*/
|
37050
|
+
function setBit$1 (bitmap, position, set) {
|
37051
|
+
// if we assume that `bitmap` is already the opposite of `set`, we could skip this check
|
37052
|
+
const byte = Math.floor(position / 8);
|
37053
|
+
const offset = position % 8;
|
37054
|
+
const has = bitmapHas$1(bitmap, undefined, byte, offset);
|
37055
|
+
if ((set && !has) || (!set && has)) {
|
37056
|
+
const newBitmap = Uint8Array.from(bitmap);
|
37057
|
+
let b = bitmap[byte];
|
37058
|
+
if (set) {
|
37059
|
+
b |= (1 << offset);
|
37060
|
+
} else {
|
37061
|
+
b ^= (1 << offset);
|
37062
|
+
}
|
37063
|
+
newBitmap[byte] = b;
|
37064
|
+
return newBitmap
|
37009
37065
|
}
|
37066
|
+
return bitmap
|
37067
|
+
}
|
37010
37068
|
|
37011
|
-
|
37012
|
-
|
37013
|
-
|
37014
|
-
|
37015
|
-
|
37016
|
-
|
37017
|
-
|
37018
|
-
|
37019
|
-
|
37020
|
-
|
37021
|
-
|
37022
|
-
|
37023
|
-
|
37024
|
-
: (B & C | B & D | C & D) + 0x34994343
|
37025
|
-
: B & C | ~B & D
|
37026
|
-
)
|
37027
|
-
)
|
37028
|
-
, A[1] = b
|
37029
|
-
, A[2] = B << 30 | B >>> 2
|
37030
|
-
, A[3] = C
|
37031
|
-
, A[4] = D
|
37032
|
-
, ++i
|
37033
|
-
) {
|
37034
|
-
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
|
37069
|
+
/**
|
37070
|
+
* check whether `bitmap` has a `1` at the given `position` bit
|
37071
|
+
* @param {Uint8Array} bitmap
|
37072
|
+
* @param {number} [position]
|
37073
|
+
* @param {number} [byte]
|
37074
|
+
* @param {number} [offset]
|
37075
|
+
* @returns {boolean}
|
37076
|
+
*/
|
37077
|
+
function bitmapHas$1 (bitmap, position, byte, offset) {
|
37078
|
+
if (typeof byte !== 'number' || typeof offset !== 'number') {
|
37079
|
+
/* c8 ignore next 3 */
|
37080
|
+
if (position === undefined) {
|
37081
|
+
throw new Error('`position` expected')
|
37035
37082
|
}
|
37083
|
+
byte = Math.floor(position / 8);
|
37084
|
+
offset = position % 8;
|
37085
|
+
}
|
37086
|
+
return ((bitmap[byte] >> offset) & 1) === 1
|
37087
|
+
}
|
37036
37088
|
|
37037
|
-
|
37089
|
+
/**
|
37090
|
+
* count how many `1` bits are in `bitmap up until `position`
|
37091
|
+
* tells us where in the compacted element array an element should live
|
37092
|
+
* TODO: optimize with a popcount on a `position` shifted bitmap?
|
37093
|
+
* assumes bitmapHas(bitmap, position) == true, hence the i<position and +1 in the return
|
37094
|
+
* @param {Uint8Array} bitmap
|
37095
|
+
* @param {number} position
|
37096
|
+
* @returns {number}
|
37097
|
+
*/
|
37098
|
+
function index$1 (bitmap, position) {
|
37099
|
+
let t = 0;
|
37100
|
+
for (let i = 0; i < position; i++) {
|
37101
|
+
if (bitmapHas$1(bitmap, i)) {
|
37102
|
+
t++;
|
37103
|
+
}
|
37038
37104
|
}
|
37105
|
+
return t
|
37106
|
+
}
|
37039
37107
|
|
37040
|
-
|
37041
|
-
|
37042
|
-
|
37108
|
+
bitUtils.mask = mask$1;
|
37109
|
+
bitUtils.setBit = setBit$1;
|
37110
|
+
bitUtils.bitmapHas = bitmapHas$1;
|
37111
|
+
bitUtils.index = index$1;
|
37043
37112
|
|
37044
|
-
|
37113
|
+
// Copyright Rod Vagg; Licensed under the Apache License, Version 2.0, see README.md for more information
|
37114
|
+
|
37115
|
+
const { mask, setBit, bitmapHas, index } = bitUtils;
|
37116
|
+
|
37117
|
+
const defaultBitWidth = 8; // 2^8 = 256 buckets or children per node
|
37118
|
+
const defaultBucketSize = 5; // array size for a bucket of values
|
37119
|
+
|
37120
|
+
/**
|
37121
|
+
* @template T
|
37122
|
+
* @typedef {import('./interface').Store<T>} Store<T>
|
37123
|
+
*/
|
37124
|
+
/**
|
37125
|
+
* @typedef {import('./interface').Config} Config
|
37126
|
+
* @typedef {import('./interface').Options} Options
|
37127
|
+
* @typedef {import('./interface').SerializedKV} SerializedKV
|
37128
|
+
* @typedef {import('./interface').SerializedElement} SerializedElement
|
37129
|
+
* @typedef {import('./interface').SerializedNode} SerializedNode
|
37130
|
+
* @typedef {import('./interface').SerializedRoot} SerializedRoot
|
37131
|
+
* @typedef {(inp:Uint8Array)=>(Uint8Array|Promise<Uint8Array>)} Hasher
|
37132
|
+
* @typedef {{ hasher: Hasher, hashBytes: number }[]} Registry
|
37133
|
+
* @typedef {(link:any)=>boolean} IsLink
|
37134
|
+
* @typedef {readonly Element[]} ReadonlyElement
|
37135
|
+
* @typedef {{data?: { found: boolean, elementAt: number, element: Element, bucketIndex?: number, bucketEntry?: KV }, link?: { elementAt: number, element: Element }}} FoundElement
|
37136
|
+
*/
|
37137
|
+
|
37138
|
+
/**
|
37139
|
+
* @type {Registry}
|
37140
|
+
* @ignore
|
37141
|
+
*/
|
37142
|
+
const hasherRegistry = [];
|
37143
|
+
|
37144
|
+
const textEncoder = new TextEncoder();
|
37145
|
+
|
37146
|
+
/**
|
37147
|
+
* @ignore
|
37148
|
+
* @param {boolean} condition
|
37149
|
+
* @param {string} [message]
|
37150
|
+
*/
|
37151
|
+
function assert (condition, message) {
|
37152
|
+
if (!condition) {
|
37153
|
+
throw new Error(message || 'Unexpected error')
|
37154
|
+
}
|
37045
37155
|
}
|
37046
37156
|
|
37047
|
-
|
37157
|
+
/**
|
37158
|
+
* ```js
|
37159
|
+
* let map = await iamap.create(store, options)
|
37160
|
+
* ```
|
37161
|
+
*
|
37162
|
+
* Create a new IAMap instance with a backing store. This operation is asynchronous and returns a `Promise` that
|
37163
|
+
* resolves to a `IAMap` instance.
|
37164
|
+
*
|
37165
|
+
* @name iamap.create
|
37166
|
+
* @function
|
37167
|
+
* @async
|
37168
|
+
* @template T
|
37169
|
+
* @param {Store<T>} store - A backing store for this Map. The store should be able to save and load a serialised
|
37170
|
+
* form of a single node of a IAMap which is provided as a plain object representation. `store.save(node)` takes
|
37171
|
+
* a serialisable node and should return a content address / ID for the node. `store.load(id)` serves the inverse
|
37172
|
+
* purpose, taking a content address / ID as provided by a `save()` operation and returning the serialised form
|
37173
|
+
* of a node which can be instantiated by IAMap. In addition, two identifier handling methods are needed:
|
37174
|
+
* `store.isEqual(id1, id2)` is required to check the equality of the two content addresses / IDs
|
37175
|
+
* (which may be custom for that data type). `store.isLink(obj)` is used to determine if an object is a link type
|
37176
|
+
* that can be used for `load()` operations on the store. It is important that link types be different to standard
|
37177
|
+
* JavaScript arrays and don't share properties used by the serialized form of an IAMap (e.g. such that a
|
37178
|
+
* `typeof obj === 'object' && Array.isArray(obj.data)`) .This is because a node data element may either be a link to
|
37179
|
+
* a child node, or an inlined child node, so `isLink()` should be able to determine if an object is a link, and if not,
|
37180
|
+
* `Array.isArray(obj)` will determine if that data element is a bucket of elements, or the above object check be able
|
37181
|
+
* to determine that an inline child node exists at the data element.
|
37182
|
+
* The `store` object should take the following form:
|
37183
|
+
* `{ async save(node):id, async load(id):node, isEqual(id,id):boolean, isLink(obj):boolean }`
|
37184
|
+
* A `store` should throw an appropriately informative error when a node that is requested does not exist in the backing
|
37185
|
+
* store.
|
37186
|
+
*
|
37187
|
+
* Options:
|
37188
|
+
* - hashAlg (number) - A [multicodec](https://github.com/multiformats/multicodec/blob/master/table.csv)
|
37189
|
+
* hash function identifier, e.g. `0x23` for `murmur3-32`. Hash functions must be registered with {@link iamap.registerHasher}.
|
37190
|
+
* - bitWidth (number, default 8) - The number of bits to extract from the hash to form a data element index at
|
37191
|
+
* each level of the Map, e.g. a bitWidth of 5 will extract 5 bits to be used as the data element index, since 2^5=32,
|
37192
|
+
* each node will store up to 32 data elements (child nodes and/or entry buckets). The maximum depth of the Map is
|
37193
|
+
* determined by `floor((hashBytes * 8) / bitWidth)` where `hashBytes` is the number of bytes the hash function
|
37194
|
+
* produces, e.g. `hashBytes=32` and `bitWidth=5` yields a maximum depth of 51 nodes. The maximum `bitWidth`
|
37195
|
+
* currently allowed is `8` which will store 256 data elements in each node.
|
37196
|
+
* - bucketSize (number, default 5) - The maximum number of collisions acceptable at each level of the Map. A
|
37197
|
+
* collision in the `bitWidth` index at a given depth will result in entries stored in a bucket (array). Once the
|
37198
|
+
* bucket exceeds `bucketSize`, a new child node is created for that index and all entries in the bucket are
|
37199
|
+
* pushed
|
37200
|
+
*
|
37201
|
+
* @param {Options} options - Options for this IAMap
|
37202
|
+
* @param {Uint8Array} [map] - for internal use
|
37203
|
+
* @param {number} [depth] - for internal use
|
37204
|
+
* @param {Element[]} [data] - for internal use
|
37205
|
+
*/
|
37206
|
+
async function create$2 (store, options, map, depth, data) {
|
37207
|
+
// map, depth and data are intended for internal use
|
37208
|
+
const newNode = new IAMap(store, options, map, depth, data);
|
37209
|
+
return save(store, newNode)
|
37210
|
+
}
|
37048
37211
|
|
37049
|
-
|
37050
|
-
|
37212
|
+
/**
|
37213
|
+
* ```js
|
37214
|
+
* let map = await iamap.load(store, id)
|
37215
|
+
* ```
|
37216
|
+
*
|
37217
|
+
* Create a IAMap instance loaded from a serialised form in a backing store. See {@link iamap.create}.
|
37218
|
+
*
|
37219
|
+
* @name iamap.load
|
37220
|
+
* @function
|
37221
|
+
* @async
|
37222
|
+
* @template T
|
37223
|
+
* @param {Store<T>} store - A backing store for this Map. See {@link iamap.create}.
|
37224
|
+
* @param {any} id - An content address / ID understood by the backing `store`.
|
37225
|
+
* @param {number} [depth=0]
|
37226
|
+
* @param {Options} [options]
|
37227
|
+
*/
|
37228
|
+
async function load$2 (store, id, depth = 0, options) {
|
37229
|
+
// depth and options are internal arguments that the user doesn't need to interact with
|
37230
|
+
if (depth !== 0 && typeof options !== 'object') {
|
37231
|
+
throw new Error('Cannot load() without options at depth > 0')
|
37232
|
+
}
|
37233
|
+
const serialized = await store.load(id);
|
37234
|
+
return fromSerializable(store, id, serialized, options, depth)
|
37235
|
+
}
|
37051
37236
|
|
37052
|
-
|
37053
|
-
|
37054
|
-
|
37055
|
-
|
37056
|
-
|
37057
|
-
|
37058
|
-
|
37237
|
+
/**
|
37238
|
+
* ```js
|
37239
|
+
* iamap.registerHasher(hashAlg, hashBytes, hasher)
|
37240
|
+
* ```
|
37241
|
+
*
|
37242
|
+
* Register a new hash function. IAMap has no hash functions by default, at least one is required to create a new
|
37243
|
+
* IAMap.
|
37244
|
+
*
|
37245
|
+
* @name iamap.registerHasher
|
37246
|
+
* @function
|
37247
|
+
* @param {number} hashAlg - A [multicodec](https://github.com/multiformats/multicodec/blob/master/table.csv) hash
|
37248
|
+
* function identifier **number**, e.g. `0x23` for `murmur3-32`.
|
37249
|
+
* @param {number} hashBytes - The number of bytes to use from the result of the `hasher()` function (e.g. `32`)
|
37250
|
+
* @param {Hasher} hasher - A hash function that takes a `Uint8Array` derived from the `key` values used for this
|
37251
|
+
* Map and returns a `Uint8Array` (or a `Uint8Array`-like, such that each data element of the array contains a single byte value). The function
|
37252
|
+
* may or may not be asynchronous but will be called with an `await`.
|
37253
|
+
*/
|
37254
|
+
function registerHasher (hashAlg, hashBytes, hasher) {
|
37255
|
+
if (!Number.isInteger(hashAlg)) {
|
37256
|
+
throw new Error('Invalid `hashAlg`')
|
37257
|
+
}
|
37258
|
+
if (!Number.isInteger(hashBytes)) {
|
37259
|
+
throw new TypeError('Invalid `hashBytes`')
|
37260
|
+
}
|
37261
|
+
if (typeof hasher !== 'function') {
|
37262
|
+
throw new TypeError('Invalid `hasher` function }')
|
37263
|
+
}
|
37264
|
+
hasherRegistry[hashAlg] = { hashBytes, hasher };
|
37265
|
+
}
|
37059
37266
|
|
37267
|
+
// simple stable key/value representation
|
37268
|
+
/**
|
37269
|
+
* @ignore
|
37270
|
+
*/
|
37271
|
+
class KV {
|
37060
37272
|
/**
|
37061
|
-
*
|
37062
|
-
* @
|
37273
|
+
* @ignore
|
37274
|
+
* @param {Uint8Array} key
|
37275
|
+
* @param {any} value
|
37063
37276
|
*/
|
37064
|
-
|
37065
|
-
|
37066
|
-
|
37067
|
-
|
37068
|
-
this.setKeyMaterial(keyMaterial);
|
37069
|
-
this.uploadQueue = cargoQueue(async (tasks, callback) => {
|
37070
|
-
// console.log(
|
37071
|
-
// 'queue worker',
|
37072
|
-
// tasks.length,
|
37073
|
-
// tasks.reduce((acc, t) => acc + t.value.length, 0)
|
37074
|
-
// )
|
37075
|
-
if (this.uploadFunction) {
|
37076
|
-
// todo we can coalesce these into a single car file
|
37077
|
-
return await this.withDB(async db => {
|
37078
|
-
for (const task of tasks) {
|
37079
|
-
await this.uploadFunction(task.carCid, task.value);
|
37080
|
-
// update the indexedb to mark this car as no longer pending
|
37081
|
-
const carMeta = await db.get('cidToCar', task.carCid);
|
37082
|
-
delete carMeta.pending;
|
37083
|
-
await db.put('cidToCar', carMeta);
|
37084
|
-
}
|
37085
|
-
})
|
37086
|
-
}
|
37087
|
-
callback();
|
37088
|
-
});
|
37277
|
+
constructor (key, value) {
|
37278
|
+
this.key = key;
|
37279
|
+
this.value = value;
|
37280
|
+
}
|
37089
37281
|
|
37090
|
-
|
37091
|
-
|
37092
|
-
|
37093
|
-
|
37094
|
-
|
37095
|
-
|
37096
|
-
delete carMeta.pending;
|
37097
|
-
await db.put('cidToCar', carMeta);
|
37098
|
-
}
|
37099
|
-
})
|
37100
|
-
});
|
37282
|
+
/**
|
37283
|
+
* @ignore
|
37284
|
+
* @returns {SerializedKV}
|
37285
|
+
*/
|
37286
|
+
toSerializable () {
|
37287
|
+
return [this.key, this.value]
|
37101
37288
|
}
|
37289
|
+
}
|
37102
37290
|
|
37103
|
-
|
37104
|
-
|
37291
|
+
/**
|
37292
|
+
* @ignore
|
37293
|
+
* @param {SerializedKV} obj
|
37294
|
+
* @returns {KV}
|
37295
|
+
*/
|
37296
|
+
KV.fromSerializable = function (obj) {
|
37297
|
+
assert(Array.isArray(obj));
|
37298
|
+
assert(obj.length === 2);
|
37299
|
+
return new KV(obj[0], obj[1])
|
37300
|
+
};
|
37301
|
+
|
37302
|
+
// a element in the data array that each node holds, each element could be either a container of
|
37303
|
+
// an array (bucket) of KVs or a link to a child node
|
37304
|
+
class Element {
|
37305
|
+
/**
|
37306
|
+
* @ignore
|
37307
|
+
* @param {KV[]} [bucket]
|
37308
|
+
* @param {any} [link]
|
37309
|
+
*/
|
37310
|
+
constructor (bucket, link) {
|
37311
|
+
this.bucket = bucket || null;
|
37312
|
+
this.link = link !== undefined ? link : null;
|
37313
|
+
assert((this.bucket === null) === (this.link !== null));
|
37105
37314
|
}
|
37106
37315
|
|
37107
|
-
|
37108
|
-
|
37109
|
-
|
37110
|
-
|
37111
|
-
|
37112
|
-
|
37316
|
+
/**
|
37317
|
+
* @ignore
|
37318
|
+
* @returns {SerializedElement}
|
37319
|
+
*/
|
37320
|
+
toSerializable () {
|
37321
|
+
if (this.bucket) {
|
37322
|
+
return this.bucket.map((c) => {
|
37323
|
+
return c.toSerializable()
|
37324
|
+
})
|
37113
37325
|
} else {
|
37114
|
-
this.
|
37115
|
-
this.
|
37326
|
+
assert(!IAMap.isIAMap(this.link)); // TODO: inline here
|
37327
|
+
return this.link
|
37116
37328
|
}
|
37117
|
-
// console.trace('keyId', this.name, this.keyId)
|
37118
37329
|
}
|
37330
|
+
}
|
37331
|
+
|
37332
|
+
/**
|
37333
|
+
* @ignore
|
37334
|
+
* @param {IsLink} isLink
|
37335
|
+
* @param {any} obj
|
37336
|
+
* @returns {Element}
|
37337
|
+
*/
|
37338
|
+
Element.fromSerializable = function (isLink, obj) {
|
37339
|
+
if (isLink(obj)) {
|
37340
|
+
return new Element(undefined, obj)
|
37341
|
+
} else if (Array.isArray(obj)) {
|
37342
|
+
return new Element(obj.map(KV.fromSerializable))
|
37343
|
+
}
|
37344
|
+
throw new Error('Unexpected error: badly formed data element')
|
37345
|
+
};
|
37119
37346
|
|
37347
|
+
/**
|
37348
|
+
* Immutable Asynchronous Map
|
37349
|
+
*
|
37350
|
+
* The `IAMap` constructor should not be used directly. Use `iamap.create()` or `iamap.load()` to instantiate.
|
37351
|
+
*
|
37352
|
+
* @class
|
37353
|
+
* @template T
|
37354
|
+
* @property {any} id - A unique identifier for this `IAMap` instance. IDs are generated by the backing store and
|
37355
|
+
* are returned on `save()` operations.
|
37356
|
+
* @property {number} config.hashAlg - The hash function used by this `IAMap` instance. See {@link iamap.create} for more
|
37357
|
+
* details.
|
37358
|
+
* @property {number} config.bitWidth - The number of bits used at each level of this `IAMap`. See {@link iamap.create}
|
37359
|
+
* for more details.
|
37360
|
+
* @property {number} config.bucketSize - TThe maximum number of collisions acceptable at each level of the Map.
|
37361
|
+
* @property {Uint8Array} [map=Uint8Array] - Bitmap indicating which slots are occupied by data entries or child node links,
|
37362
|
+
* each data entry contains an bucket of entries. Must be the appropriate size for `config.bitWidth`
|
37363
|
+
* (`2 ** config.bitWith / 8` bytes).
|
37364
|
+
* @property {number} [depth=0] - Depth of the current node in the IAMap, `depth` is used to extract bits from the
|
37365
|
+
* key hashes to locate slots
|
37366
|
+
* @property {Array} [data=[]] - Array of data elements (an internal `Element` type), each of which contains a
|
37367
|
+
* bucket of entries or an ID of a child node
|
37368
|
+
* See {@link iamap.create} for more details.
|
37369
|
+
*/
|
37370
|
+
class IAMap {
|
37120
37371
|
/**
|
37121
|
-
*
|
37122
|
-
* @param {
|
37123
|
-
* @param {
|
37124
|
-
* @
|
37125
|
-
* @
|
37372
|
+
* @ignore
|
37373
|
+
* @param {Store<T>} store
|
37374
|
+
* @param {Options} [options]
|
37375
|
+
* @param {Uint8Array} [map]
|
37376
|
+
* @param {number} [depth]
|
37377
|
+
* @param {Element[]} [data]
|
37126
37378
|
*/
|
37127
|
-
|
37128
|
-
if (
|
37129
|
-
|
37130
|
-
|
37131
|
-
|
37132
|
-
|
37133
|
-
|
37134
|
-
|
37135
|
-
|
37136
|
-
|
37379
|
+
constructor (store, options, map, depth, data) {
|
37380
|
+
if (!store || typeof store.save !== 'function' ||
|
37381
|
+
typeof store.load !== 'function' ||
|
37382
|
+
typeof store.isLink !== 'function' ||
|
37383
|
+
typeof store.isEqual !== 'function') {
|
37384
|
+
throw new TypeError('Invalid `store` option, must be of type: { save(node):id, load(id):node, isEqual(id,id):boolean, isLink(obj):boolean }')
|
37385
|
+
}
|
37386
|
+
this.store = store;
|
37387
|
+
|
37388
|
+
/**
|
37389
|
+
* @ignore
|
37390
|
+
* @type {any|null}
|
37391
|
+
*/
|
37392
|
+
this.id = null;
|
37393
|
+
this.config = buildConfig(options);
|
37394
|
+
|
37395
|
+
const hashBytes = hasherRegistry[this.config.hashAlg].hashBytes;
|
37396
|
+
|
37397
|
+
if (map !== undefined && !(map instanceof Uint8Array)) {
|
37398
|
+
throw new TypeError('`map` must be a Uint8Array')
|
37399
|
+
}
|
37400
|
+
const mapLength = Math.ceil(Math.pow(2, this.config.bitWidth) / 8);
|
37401
|
+
if (map !== undefined && map.length !== mapLength) {
|
37402
|
+
throw new Error('`map` must be a Uint8Array of length ' + mapLength)
|
37403
|
+
}
|
37404
|
+
this.map = map || new Uint8Array(mapLength);
|
37405
|
+
|
37406
|
+
if (depth !== undefined && (!Number.isInteger(depth) || depth < 0)) {
|
37407
|
+
throw new TypeError('`depth` must be an integer >= 0')
|
37408
|
+
}
|
37409
|
+
this.depth = depth || 0;
|
37410
|
+
if (this.depth > Math.floor((hashBytes * 8) / this.config.bitWidth)) {
|
37411
|
+
// our hasher only has `hashBytes` to work with and we take off `bitWidth` bits with each level
|
37412
|
+
// e.g. 32-byte hash gives us a maximum depth of 51 levels
|
37413
|
+
throw new Error('Overflow: maximum tree depth reached')
|
37414
|
+
}
|
37415
|
+
|
37416
|
+
/**
|
37417
|
+
* @ignore
|
37418
|
+
* @type {ReadonlyElement}
|
37419
|
+
*/
|
37420
|
+
this.data = Object.freeze(data || []);
|
37421
|
+
for (const e of this.data) {
|
37422
|
+
if (!(e instanceof Element)) {
|
37423
|
+
throw new TypeError('`data` array must contain only `Element` types')
|
37137
37424
|
}
|
37138
|
-
} else {
|
37139
|
-
throw new Error('missing lastCid for car header')
|
37140
37425
|
}
|
37141
37426
|
}
|
37142
37427
|
|
37143
|
-
|
37144
|
-
|
37145
|
-
|
37146
|
-
|
37147
|
-
|
37148
|
-
|
37149
|
-
|
37150
|
-
|
37428
|
+
/**
|
37429
|
+
* Asynchronously create a new `IAMap` instance identical to this one but with `key` set to `value`.
|
37430
|
+
*
|
37431
|
+
* @param {(string|Uint8Array)} key - A key for the `value` being set whereby that same `value` may
|
37432
|
+
* be retrieved with a `get()` operation with the same `key`. The type of the `key` object should either be a
|
37433
|
+
* `Uint8Array` or be convertable to a `Uint8Array` via `TextEncoder.
|
37434
|
+
* @param {any} value - Any value that can be stored in the backing store. A value could be a serialisable object
|
37435
|
+
* or an address or content address or other kind of link to the actual value.
|
37436
|
+
* @param {Uint8Array} [_cachedHash] - for internal use
|
37437
|
+
* @returns {Promise<IAMap<T>>} A `Promise` containing a new `IAMap` that contains the new key/value pair.
|
37438
|
+
* @async
|
37439
|
+
*/
|
37440
|
+
async set (key, value, _cachedHash) {
|
37441
|
+
if (!(key instanceof Uint8Array)) {
|
37442
|
+
key = textEncoder.encode(key);
|
37443
|
+
}
|
37444
|
+
const hash = _cachedHash instanceof Uint8Array ? _cachedHash : await hasher(this)(key);
|
37445
|
+
const bitpos = mask(hash, this.depth, this.config.bitWidth);
|
37446
|
+
|
37447
|
+
if (bitmapHas(this.map, bitpos)) { // should be in a bucket in this node
|
37448
|
+
const { data, link } = findElement(this, bitpos, key);
|
37449
|
+
if (data) {
|
37450
|
+
if (data.found) {
|
37451
|
+
/* c8 ignore next 3 */
|
37452
|
+
if (data.bucketIndex === undefined || data.bucketEntry === undefined) {
|
37453
|
+
throw new Error('Unexpected error')
|
37151
37454
|
}
|
37152
|
-
if (
|
37153
|
-
|
37154
|
-
|
37455
|
+
if (data.bucketEntry.value === value) {
|
37456
|
+
return this // no change, identical value
|
37457
|
+
}
|
37458
|
+
// replace entry for this key with a new value
|
37459
|
+
// note that === will fail for two complex objects representing the same data so we may end up
|
37460
|
+
// with a node of the same ID anyway
|
37461
|
+
return updateBucket(this, data.elementAt, data.bucketIndex, key, value)
|
37462
|
+
} else {
|
37463
|
+
/* c8 ignore next 3 */
|
37464
|
+
if (!data.element.bucket) {
|
37465
|
+
throw new Error('Unexpected error')
|
37155
37466
|
}
|
37467
|
+
if (data.element.bucket.length >= this.config.bucketSize) {
|
37468
|
+
// too many collisions at this level, replace a bucket with a child node
|
37469
|
+
return (await replaceBucketWithNode(this, data.elementAt)).set(key, value, hash)
|
37470
|
+
}
|
37471
|
+
// insert into the bucket and sort it
|
37472
|
+
return updateBucket(this, data.elementAt, -1, key, value)
|
37156
37473
|
}
|
37157
|
-
})
|
37474
|
+
} else if (link) {
|
37475
|
+
const child = await load$2(this.store, link.element.link, this.depth + 1, this.config);
|
37476
|
+
assert(!!child);
|
37477
|
+
const newChild = await child.set(key, value, hash);
|
37478
|
+
return updateNode(this, link.elementAt, newChild)
|
37479
|
+
/* c8 ignore next 3 */
|
37480
|
+
} else {
|
37481
|
+
throw new Error('Unexpected error')
|
37482
|
+
}
|
37483
|
+
} else { // we don't have an element for this hash portion, make one
|
37484
|
+
return addNewElement(this, bitpos, key, value)
|
37158
37485
|
}
|
37159
|
-
return await dbWorkFun(this.idb)
|
37160
37486
|
}
|
37161
37487
|
|
37162
37488
|
/**
|
37163
|
-
*
|
37489
|
+
* Asynchronously find and return a value for the given `key` if it exists within this `IAMap`.
|
37164
37490
|
*
|
37165
|
-
* @
|
37166
|
-
*
|
37491
|
+
* @param {string|Uint8Array} key - A key for the value being sought. See {@link IAMap#set} for
|
37492
|
+
* details about acceptable `key` types.
|
37493
|
+
* @param {Uint8Array} [_cachedHash] - for internal use
|
37494
|
+
* @returns {Promise<any>} A `Promise` that resolves to the value being sought if that value exists within this `IAMap`. If the
|
37495
|
+
* key is not found in this `IAMap`, the `Promise` will resolve to `undefined`.
|
37496
|
+
* @async
|
37167
37497
|
*/
|
37168
|
-
async
|
37169
|
-
|
37170
|
-
|
37171
|
-
|
37172
|
-
|
37173
|
-
|
37174
|
-
|
37175
|
-
|
37498
|
+
async get (key, _cachedHash) {
|
37499
|
+
if (!(key instanceof Uint8Array)) {
|
37500
|
+
key = textEncoder.encode(key);
|
37501
|
+
}
|
37502
|
+
const hash = _cachedHash instanceof Uint8Array ? _cachedHash : await hasher(this)(key);
|
37503
|
+
const bitpos = mask(hash, this.depth, this.config.bitWidth);
|
37504
|
+
if (bitmapHas(this.map, bitpos)) { // should be in a bucket in this node
|
37505
|
+
const { data, link } = findElement(this, bitpos, key);
|
37506
|
+
if (data) {
|
37507
|
+
if (data.found) {
|
37508
|
+
/* c8 ignore next 3 */
|
37509
|
+
if (data.bucketIndex === undefined || data.bucketEntry === undefined) {
|
37510
|
+
throw new Error('Unexpected error')
|
37511
|
+
}
|
37512
|
+
return data.bucketEntry.value
|
37513
|
+
}
|
37514
|
+
return undefined // not found
|
37515
|
+
} else if (link) {
|
37516
|
+
const child = await load$2(this.store, link.element.link, this.depth + 1, this.config);
|
37517
|
+
assert(!!child);
|
37518
|
+
return await child.get(key, hash)
|
37519
|
+
/* c8 ignore next 3 */
|
37520
|
+
} else {
|
37521
|
+
throw new Error('Unexpected error')
|
37522
|
+
}
|
37523
|
+
} else { // we don't have an element for this hash portion, not found
|
37524
|
+
return undefined
|
37525
|
+
}
|
37526
|
+
|
37527
|
+
/*
|
37528
|
+
const traversal = traverseGet(this, key, this.store.isEqual, this.store.isLink, this.depth)
|
37529
|
+
while (true) {
|
37530
|
+
const nextId = traversal.traverse()
|
37531
|
+
if (!nextId) {
|
37532
|
+
return traversal.value()
|
37533
|
+
}
|
37534
|
+
const child = await this.store.load(nextId)
|
37535
|
+
assert(!!child)
|
37536
|
+
traversal.next(child)
|
37176
37537
|
}
|
37538
|
+
*/
|
37177
37539
|
}
|
37178
37540
|
|
37179
37541
|
/**
|
37542
|
+
* Asynchronously find and return a boolean indicating whether the given `key` exists within this `IAMap`
|
37180
37543
|
*
|
37181
|
-
* @param {string}
|
37182
|
-
* @
|
37544
|
+
* @param {string|Uint8Array} key - A key to check for existence within this `IAMap`. See
|
37545
|
+
* {@link IAMap#set} for details about acceptable `key` types.
|
37546
|
+
* @returns {Promise<boolean>} A `Promise` that resolves to either `true` or `false` depending on whether the `key` exists
|
37547
|
+
* within this `IAMap`.
|
37548
|
+
* @async
|
37183
37549
|
*/
|
37184
|
-
async
|
37185
|
-
await this.
|
37186
|
-
|
37187
|
-
|
37188
|
-
|
37189
|
-
|
37190
|
-
|
37191
|
-
|
37192
|
-
|
37193
|
-
|
37194
|
-
|
37195
|
-
|
37196
|
-
|
37550
|
+
async has (key) {
|
37551
|
+
return (await this.get(key)) !== undefined
|
37552
|
+
}
|
37553
|
+
|
37554
|
+
/**
|
37555
|
+
* Asynchronously create a new `IAMap` instance identical to this one but with `key` and its associated
|
37556
|
+
* value removed. If the `key` does not exist within this `IAMap`, this instance of `IAMap` is returned.
|
37557
|
+
*
|
37558
|
+
* @param {string|Uint8Array} key - A key to remove. See {@link IAMap#set} for details about
|
37559
|
+
* acceptable `key` types.
|
37560
|
+
* @param {Uint8Array} [_cachedHash] - for internal use
|
37561
|
+
* @returns {Promise<IAMap<T>>} A `Promise` that resolves to a new `IAMap` instance without the given `key` or the same `IAMap`
|
37562
|
+
* instance if `key` does not exist within it.
|
37563
|
+
* @async
|
37564
|
+
*/
|
37565
|
+
async delete (key, _cachedHash) {
|
37566
|
+
if (!(key instanceof Uint8Array)) {
|
37567
|
+
key = textEncoder.encode(key);
|
37568
|
+
}
|
37569
|
+
const hash = _cachedHash instanceof Uint8Array ? _cachedHash : await hasher(this)(key);
|
37570
|
+
assert(hash instanceof Uint8Array);
|
37571
|
+
const bitpos = mask(hash, this.depth, this.config.bitWidth);
|
37572
|
+
|
37573
|
+
if (bitmapHas(this.map, bitpos)) { // should be in a bucket in this node
|
37574
|
+
const { data, link } = findElement(this, bitpos, key);
|
37575
|
+
if (data) {
|
37576
|
+
if (data.found) {
|
37577
|
+
/* c8 ignore next 3 */
|
37578
|
+
if (data.bucketIndex === undefined) {
|
37579
|
+
throw new Error('Unexpected error')
|
37580
|
+
}
|
37581
|
+
if (this.depth !== 0 && this.directNodeCount() === 0 && this.directEntryCount() === this.config.bucketSize + 1) {
|
37582
|
+
// current node will only have this.config.bucketSize entries spread across its buckets
|
37583
|
+
// and no child nodes, so wrap up the remaining nodes in a fresh IAMap at depth 0, it will
|
37584
|
+
// bubble up to either become the new root node or be unpacked by a higher level
|
37585
|
+
return collapseIntoSingleBucket(this, hash, data.elementAt, data.bucketIndex)
|
37586
|
+
} else {
|
37587
|
+
// we'll either have more entries left than this.config.bucketSize or we're at the root node
|
37588
|
+
// so this is a simple bucket removal, no collapsing needed (root nodes can't be collapsed)
|
37589
|
+
const lastInBucket = this.data.length === 1;
|
37590
|
+
// we have at least one child node or too many entries in buckets to be collapsed
|
37591
|
+
const newData = removeFromBucket(this.data, data.elementAt, lastInBucket, data.bucketIndex);
|
37592
|
+
let newMap = this.map;
|
37593
|
+
if (lastInBucket) {
|
37594
|
+
newMap = setBit(newMap, bitpos, false);
|
37595
|
+
}
|
37596
|
+
return create$2(this.store, this.config, newMap, this.depth, newData)
|
37597
|
+
}
|
37598
|
+
} else {
|
37599
|
+
// key would be located here according to hash, but we don't have it
|
37600
|
+
return this
|
37601
|
+
}
|
37602
|
+
} else if (link) {
|
37603
|
+
const child = await load$2(this.store, link.element.link, this.depth + 1, this.config);
|
37604
|
+
assert(!!child);
|
37605
|
+
const newChild = await child.delete(key, hash);
|
37606
|
+
if (this.store.isEqual(newChild.id, link.element.link)) { // no modification
|
37607
|
+
return this
|
37608
|
+
}
|
37609
|
+
|
37610
|
+
assert(newChild.data.length > 0); // something probably went wrong in the map block above
|
37611
|
+
|
37612
|
+
if (newChild.directNodeCount() === 0 && newChild.directEntryCount() === this.config.bucketSize) {
|
37613
|
+
// child got collapsed
|
37614
|
+
if (this.directNodeCount() === 1 && this.directEntryCount() === 0) {
|
37615
|
+
// we only had one node to collapse and the child was collapsible so end up acting the same
|
37616
|
+
// as the child, bubble it up and it either becomes the new root or finds a parent to collapse
|
37617
|
+
// in to (next section)
|
37618
|
+
return newChild
|
37619
|
+
} else {
|
37620
|
+
// extract data elements from this returned node and merge them into ours
|
37621
|
+
return collapseNodeInline(this, bitpos, newChild)
|
37622
|
+
}
|
37623
|
+
} else {
|
37624
|
+
// simple node replacement with edited child
|
37625
|
+
return updateNode(this, link.elementAt, newChild)
|
37626
|
+
}
|
37627
|
+
/* c8 ignore next 3 */
|
37628
|
+
} else {
|
37629
|
+
throw new Error('Unexpected error')
|
37197
37630
|
}
|
37198
|
-
|
37199
|
-
|
37200
|
-
this.uploadQueue.push({ carCid, value });
|
37201
|
-
this.alreadyEnqueued.add(carCid);
|
37631
|
+
} else { // we don't have an element for this hash portion
|
37632
|
+
return this
|
37202
37633
|
}
|
37203
37634
|
}
|
37204
37635
|
|
37205
|
-
|
37206
|
-
|
37207
|
-
|
37208
|
-
|
37209
|
-
|
37210
|
-
|
37211
|
-
|
37212
|
-
|
37213
|
-
|
37636
|
+
/**
|
37637
|
+
* Asynchronously count the number of key/value pairs contained within this `IAMap`, including its children.
|
37638
|
+
*
|
37639
|
+
* @returns {Promise<number>} A `Promise` with a `number` indicating the number of key/value pairs within this `IAMap` instance.
|
37640
|
+
* @async
|
37641
|
+
*/
|
37642
|
+
async size () {
|
37643
|
+
let c = 0;
|
37644
|
+
for (const e of this.data) {
|
37645
|
+
if (e.bucket) {
|
37646
|
+
c += e.bucket.length;
|
37647
|
+
} else {
|
37648
|
+
const child = await load$2(this.store, e.link, this.depth + 1, this.config);
|
37649
|
+
c += await child.size();
|
37214
37650
|
}
|
37215
|
-
|
37216
|
-
|
37217
|
-
|
37218
|
-
|
37219
|
-
|
37220
|
-
|
37221
|
-
|
37222
|
-
|
37223
|
-
|
37224
|
-
|
37225
|
-
|
37226
|
-
|
37227
|
-
|
37228
|
-
|
37229
|
-
|
37230
|
-
|
37231
|
-
|
37232
|
-
return decoded
|
37233
|
-
};
|
37234
|
-
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
|
37235
|
-
const block = blocks.find(b => b.cid.toString() === dataCID);
|
37236
|
-
if (block) {
|
37237
|
-
return block.bytes
|
37651
|
+
}
|
37652
|
+
return c
|
37653
|
+
}
|
37654
|
+
|
37655
|
+
/**
|
37656
|
+
* Asynchronously emit all keys that exist within this `IAMap`, including its children. This will cause a full
|
37657
|
+
* traversal of all nodes.
|
37658
|
+
*
|
37659
|
+
* @returns {AsyncGenerator<Uint8Array>} An async iterator that yields keys. All keys will be in `Uint8Array` format regardless of which
|
37660
|
+
* format they were inserted via `set()`.
|
37661
|
+
* @async
|
37662
|
+
*/
|
37663
|
+
async * keys () {
|
37664
|
+
for (const e of this.data) {
|
37665
|
+
if (e.bucket) {
|
37666
|
+
for (const kv of e.bucket) {
|
37667
|
+
yield kv.key;
|
37238
37668
|
}
|
37239
37669
|
} else {
|
37240
|
-
const
|
37241
|
-
|
37242
|
-
|
37670
|
+
const child = await load$2(this.store, e.link, this.depth + 1, this.config);
|
37671
|
+
yield * child.keys();
|
37672
|
+
}
|
37673
|
+
}
|
37674
|
+
|
37675
|
+
// yield * traverseKV(this, 'keys', this.store.isLink)
|
37676
|
+
}
|
37677
|
+
|
37678
|
+
/**
|
37679
|
+
* Asynchronously emit all values that exist within this `IAMap`, including its children. This will cause a full
|
37680
|
+
* traversal of all nodes.
|
37681
|
+
*
|
37682
|
+
* @returns {AsyncGenerator<any>} An async iterator that yields values.
|
37683
|
+
* @async
|
37684
|
+
*/
|
37685
|
+
async * values () {
|
37686
|
+
for (const e of this.data) {
|
37687
|
+
if (e.bucket) {
|
37688
|
+
for (const kv of e.bucket) {
|
37689
|
+
yield kv.value;
|
37243
37690
|
}
|
37691
|
+
} else {
|
37692
|
+
const child = await load$2(this.store, e.link, this.depth + 1, this.config);
|
37693
|
+
yield * child.values();
|
37244
37694
|
}
|
37245
|
-
}
|
37695
|
+
}
|
37696
|
+
|
37697
|
+
// yield * traverseKV(this, 'values', this.store.isLink)
|
37246
37698
|
}
|
37247
|
-
}
|
37248
37699
|
|
37249
|
-
|
37250
|
-
|
37251
|
-
|
37252
|
-
|
37700
|
+
/**
|
37701
|
+
* Asynchronously emit all { key, value } pairs that exist within this `IAMap`, including its children. This will
|
37702
|
+
* cause a full traversal of all nodes.
|
37703
|
+
*
|
37704
|
+
* @returns {AsyncGenerator<{ key: Uint8Array, value: any}>} An async iterator that yields objects with the properties `key` and `value`.
|
37705
|
+
* @async
|
37706
|
+
*/
|
37707
|
+
async * entries () {
|
37708
|
+
for (const e of this.data) {
|
37709
|
+
if (e.bucket) {
|
37710
|
+
for (const kv of e.bucket) {
|
37711
|
+
yield { key: kv.key, value: kv.value };
|
37712
|
+
}
|
37713
|
+
} else {
|
37714
|
+
const child = await load$2(this.store, e.link, this.depth + 1, this.config);
|
37715
|
+
yield * child.entries();
|
37716
|
+
}
|
37717
|
+
}
|
37718
|
+
|
37719
|
+
// yield * traverseKV(this, 'entries', this.store.isLink)
|
37253
37720
|
}
|
37254
|
-
|
37255
|
-
|
37256
|
-
|
37257
|
-
|
37721
|
+
|
37722
|
+
/**
|
37723
|
+
* Asynchronously emit the IDs of this `IAMap` and all of its children.
|
37724
|
+
*
|
37725
|
+
* @returns {AsyncGenerator<any>} An async iterator that yields the ID of this `IAMap` and all of its children. The type of ID is
|
37726
|
+
* determined by the backing store which is responsible for generating IDs upon `save()` operations.
|
37727
|
+
*/
|
37728
|
+
async * ids () {
|
37729
|
+
yield this.id;
|
37730
|
+
for (const e of this.data) {
|
37731
|
+
if (e.link) {
|
37732
|
+
const child = await load$2(this.store, e.link, this.depth + 1, this.config);
|
37733
|
+
yield * child.ids();
|
37734
|
+
}
|
37735
|
+
}
|
37258
37736
|
}
|
37259
|
-
|
37260
|
-
|
37261
|
-
|
37737
|
+
|
37738
|
+
/**
|
37739
|
+
* Returns a serialisable form of this `IAMap` node. The internal representation of this local node is copied into a plain
|
37740
|
+
* JavaScript `Object` including a representation of its data array that the key/value pairs it contains as well as
|
37741
|
+
* the identifiers of child nodes.
|
37742
|
+
* Root nodes (depth==0) contain the full map configuration information, while intermediate and leaf nodes contain only
|
37743
|
+
* data that cannot be inferred by traversal from a root node that already has this data (hashAlg and bucketSize -- bitWidth
|
37744
|
+
* is inferred by the length of the `map` byte array).
|
37745
|
+
* The backing store can use this representation to create a suitable serialised form. When loading from the backing store,
|
37746
|
+
* `IAMap` expects to receive an object with the same layout from which it can instantiate a full `IAMap` object. Where
|
37747
|
+
* root nodes contain the full set of data and intermediate and leaf nodes contain just the required data.
|
37748
|
+
* For content addressable backing stores, it is expected that the same data in this serialisable form will always produce
|
37749
|
+
* the same identifier.
|
37750
|
+
* Note that the `map` property is a `Uint8Array` so will need special handling for some serialization forms (e.g. JSON).
|
37751
|
+
*
|
37752
|
+
* Root node form:
|
37753
|
+
* ```
|
37754
|
+
* {
|
37755
|
+
* hashAlg: number
|
37756
|
+
* bucketSize: number
|
37757
|
+
* hamt: [Uint8Array, Array]
|
37758
|
+
* }
|
37759
|
+
* ```
|
37760
|
+
*
|
37761
|
+
* Intermediate and leaf node form:
|
37762
|
+
* ```
|
37763
|
+
* [Uint8Array, Array]
|
37764
|
+
* ```
|
37765
|
+
*
|
37766
|
+
* The `Uint8Array` in both forms is the 'map' used to identify the presence of an element in this node.
|
37767
|
+
*
|
37768
|
+
* The second element in the tuple in both forms, `Array`, is an elements array a mix of either buckets or links:
|
37769
|
+
*
|
37770
|
+
* * A bucket is an array of two elements, the first being a `key` of type `Uint8Array` and the second a `value`
|
37771
|
+
* or whatever type has been provided in `set()` operations for this `IAMap`.
|
37772
|
+
* * A link is an object of the type that the backing store provides upon `save()` operations and can be identified
|
37773
|
+
* with `isLink()` calls.
|
37774
|
+
*
|
37775
|
+
* Buckets and links are differentiated by their "kind": a bucket is an array while a link is a "link" kind as dictated
|
37776
|
+
* by the backing store. We use `Array.isArray()` and `store.isLink()` to perform this differentiation.
|
37777
|
+
*
|
37778
|
+
* @returns {SerializedNode|SerializedRoot} An object representing the internal state of this local `IAMap` node, including its links to child nodes
|
37779
|
+
* if any.
|
37780
|
+
*/
|
37781
|
+
toSerializable () {
|
37782
|
+
const map = this.map;
|
37783
|
+
const data = this.data.map((/** @type {Element} */ e) => {
|
37784
|
+
return e.toSerializable()
|
37785
|
+
});
|
37786
|
+
/**
|
37787
|
+
* @ignore
|
37788
|
+
* @type {SerializedNode}
|
37789
|
+
*/
|
37790
|
+
const hamt = [map, data];
|
37791
|
+
if (this.depth !== 0) {
|
37792
|
+
return hamt
|
37793
|
+
}
|
37794
|
+
/**
|
37795
|
+
* @ignore
|
37796
|
+
* @type {SerializedElement}
|
37797
|
+
*/
|
37798
|
+
return {
|
37799
|
+
hashAlg: this.config.hashAlg,
|
37800
|
+
bucketSize: this.config.bucketSize,
|
37801
|
+
hamt
|
37802
|
+
}
|
37262
37803
|
}
|
37263
|
-
const buffer = new Uint8Array(size);
|
37264
|
-
const writer = await createWriter(buffer, { headerSize });
|
37265
37804
|
|
37266
|
-
|
37267
|
-
|
37805
|
+
/**
|
37806
|
+
* Calculate the number of entries locally stored by this node. Performs a scan of local buckets and adds up
|
37807
|
+
* their size.
|
37808
|
+
*
|
37809
|
+
* @returns {number} A number representing the number of local entries.
|
37810
|
+
*/
|
37811
|
+
directEntryCount () {
|
37812
|
+
return this.data.reduce((/** @type {number} */ p, /** @type {Element} */ c) => {
|
37813
|
+
return p + (c.bucket ? c.bucket.length : 0)
|
37814
|
+
}, 0)
|
37268
37815
|
}
|
37269
37816
|
|
37270
|
-
|
37271
|
-
|
37817
|
+
/**
|
37818
|
+
* Calculate the number of child nodes linked by this node. Performs a scan of the local entries and tallies up the
|
37819
|
+
* ones containing links to child nodes.
|
37820
|
+
*
|
37821
|
+
* @returns {number} A number representing the number of direct child nodes
|
37822
|
+
*/
|
37823
|
+
directNodeCount () {
|
37824
|
+
return this.data.reduce((/** @type {number} */ p, /** @type {Element} */ c) => {
|
37825
|
+
return p + (c.link ? 1 : 0)
|
37826
|
+
}, 0)
|
37272
37827
|
}
|
37273
|
-
await writer.close();
|
37274
|
-
return await encode$7({ value: writer.bytes, hasher: sha256$2, codec: raw })
|
37275
|
-
};
|
37276
37828
|
|
37277
|
-
|
37829
|
+
/**
|
37830
|
+
* Asynchronously perform a check on this node and its children that it is in canonical format for the current data.
|
37831
|
+
* As this uses `size()` to calculate the total number of entries in this node and its children, it performs a full
|
37832
|
+
* scan of nodes and therefore incurs a load and deserialisation cost for each child node.
|
37833
|
+
* A `false` result from this method suggests a flaw in the implemetation.
|
37834
|
+
*
|
37835
|
+
* @async
|
37836
|
+
* @returns {Promise<boolean>} A Promise with a boolean value indicating whether this IAMap is correctly formatted.
|
37837
|
+
*/
|
37838
|
+
async isInvariant () {
|
37839
|
+
const size = await this.size();
|
37840
|
+
const entryArity = this.directEntryCount();
|
37841
|
+
const nodeArity = this.directNodeCount();
|
37842
|
+
const arity = entryArity + nodeArity;
|
37843
|
+
let sizePredicate = 2; // 2 == 'more than one'
|
37844
|
+
if (nodeArity === 0) {
|
37845
|
+
sizePredicate = Math.min(2, entryArity); // 0, 1 or 2=='more than one'
|
37846
|
+
}
|
37847
|
+
|
37848
|
+
const inv1 = size - entryArity >= 2 * (arity - entryArity);
|
37849
|
+
const inv2 = arity === 0 ? sizePredicate === 0 : true;
|
37850
|
+
const inv3 = (arity === 1 && entryArity === 1) ? sizePredicate === 1 : true;
|
37851
|
+
const inv4 = arity >= 2 ? sizePredicate === 2 : true;
|
37852
|
+
const inv5 = nodeArity >= 0 && entryArity >= 0 && ((entryArity + nodeArity) === arity);
|
37853
|
+
|
37854
|
+
return inv1 && inv2 && inv3 && inv4 && inv5
|
37855
|
+
}
|
37856
|
+
|
37857
|
+
/**
|
37858
|
+
* A convenience shortcut to {@link iamap.fromSerializable} that uses this IAMap node instance's backing `store` and
|
37859
|
+
* configuration `options`. Intended to be used to instantiate child IAMap nodes from a root IAMap node.
|
37860
|
+
*
|
37861
|
+
* @param {any} id An optional ID for the instantiated IAMap node. See {@link iamap.fromSerializable}.
|
37862
|
+
* @param {any} serializable The serializable form of an IAMap node to be instantiated.
|
37863
|
+
* @param {number} [depth=0] The depth of the IAMap node. See {@link iamap.fromSerializable}.
|
37864
|
+
*/
|
37865
|
+
fromChildSerializable (id, serializable, depth) {
|
37866
|
+
return fromSerializable(this.store, id, serializable, this.config, depth)
|
37867
|
+
}
|
37868
|
+
}
|
37869
|
+
|
37870
|
+
/**
|
37871
|
+
* store a new node and assign it an ID
|
37872
|
+
* @ignore
|
37873
|
+
* @template T
|
37874
|
+
* @param {Store<T>} store
|
37875
|
+
* @param {IAMap<T>} newNode
|
37876
|
+
* @returns {Promise<IAMap<T>>}
|
37877
|
+
*/
|
37878
|
+
async function save (store, newNode) {
|
37879
|
+
const id = await store.save(newNode.toSerializable());
|
37880
|
+
newNode.id = id;
|
37881
|
+
return newNode
|
37882
|
+
}
|
37883
|
+
|
37884
|
+
/**
|
37885
|
+
* // utility function to avoid duplication since it's used across get(), set() and delete()
|
37886
|
+
* { bucket: { found: true, elementAt, element, bucketIndex, bucketEntry } }
|
37887
|
+
* { bucket: { found: false, elementAt, element } }
|
37888
|
+
* { link: { elementAt, element } }
|
37889
|
+
* @ignore
|
37890
|
+
* @template T
|
37891
|
+
* @param {IAMap<T>} node
|
37892
|
+
* @param {number} bitpos
|
37893
|
+
* @param {Uint8Array} key
|
37894
|
+
* @returns {FoundElement}
|
37895
|
+
*/
|
37896
|
+
function findElement (node, bitpos, key) {
|
37897
|
+
const elementAt = index(node.map, bitpos);
|
37898
|
+
const element = node.data[elementAt];
|
37899
|
+
assert(!!element);
|
37900
|
+
if (element.bucket) { // data element
|
37901
|
+
for (let bucketIndex = 0; bucketIndex < element.bucket.length; bucketIndex++) {
|
37902
|
+
const bucketEntry = element.bucket[bucketIndex];
|
37903
|
+
if (byteCompare(bucketEntry.key, key) === 0) {
|
37904
|
+
return { data: { found: true, elementAt, element, bucketIndex, bucketEntry } }
|
37905
|
+
}
|
37906
|
+
}
|
37907
|
+
return { data: { found: false, elementAt, element } }
|
37908
|
+
}
|
37909
|
+
assert(!!element.link);
|
37910
|
+
return { link: { elementAt, element } }
|
37911
|
+
}
|
37912
|
+
|
37913
|
+
/**
|
37914
|
+
* new element for this node, i.e. first time this hash portion has been seen here
|
37915
|
+
* @ignore
|
37916
|
+
* @template T
|
37917
|
+
* @param {IAMap<T>} node
|
37918
|
+
* @param {number} bitpos
|
37919
|
+
* @param {Uint8Array} key
|
37920
|
+
* @param {any} value
|
37921
|
+
* @returns {Promise<IAMap<T>>}
|
37922
|
+
*/
|
37923
|
+
async function addNewElement (node, bitpos, key, value) {
|
37924
|
+
const insertAt = index(node.map, bitpos);
|
37925
|
+
const newData = node.data.slice();
|
37926
|
+
newData.splice(insertAt, 0, new Element([new KV(key, value)]));
|
37927
|
+
const newMap = setBit(node.map, bitpos, true);
|
37928
|
+
return create$2(node.store, node.config, newMap, node.depth, newData)
|
37929
|
+
}
|
37930
|
+
|
37931
|
+
/**
|
37932
|
+
* update an existing bucket with a new k/v pair
|
37933
|
+
* @ignore
|
37934
|
+
* @template T
|
37935
|
+
* @param {IAMap<T>} node
|
37936
|
+
* @param {number} elementAt
|
37937
|
+
* @param {number} bucketAt
|
37938
|
+
* @param {Uint8Array} key
|
37939
|
+
* @param {any} value
|
37940
|
+
* @returns {Promise<IAMap<T>>}
|
37941
|
+
*/
|
37942
|
+
async function updateBucket (node, elementAt, bucketAt, key, value) {
|
37943
|
+
const oldElement = node.data[elementAt];
|
37944
|
+
/* c8 ignore next 3 */
|
37945
|
+
if (!oldElement.bucket) {
|
37946
|
+
throw new Error('Unexpected error')
|
37947
|
+
}
|
37948
|
+
const newElement = new Element(oldElement.bucket.slice());
|
37949
|
+
const newKv = new KV(key, value);
|
37950
|
+
/* c8 ignore next 3 */
|
37951
|
+
if (!newElement.bucket) {
|
37952
|
+
throw new Error('Unexpected error')
|
37953
|
+
}
|
37954
|
+
if (bucketAt === -1) {
|
37955
|
+
newElement.bucket.push(newKv);
|
37956
|
+
// in-bucket sort is required to maintain a canonical state
|
37957
|
+
newElement.bucket.sort((/** @type {KV} */ a, /** @type {KV} */ b) => byteCompare(a.key, b.key));
|
37958
|
+
} else {
|
37959
|
+
newElement.bucket[bucketAt] = newKv;
|
37960
|
+
}
|
37961
|
+
const newData = node.data.slice();
|
37962
|
+
newData[elementAt] = newElement;
|
37963
|
+
return create$2(node.store, node.config, node.map, node.depth, newData)
|
37964
|
+
}
|
37965
|
+
|
37966
|
+
/**
|
37967
|
+
* overflow of a bucket means it has to be replaced with a child node, tricky surgery
|
37968
|
+
* @ignore
|
37969
|
+
* @template T
|
37970
|
+
* @param {IAMap<T>} node
|
37971
|
+
* @param {number} elementAt
|
37972
|
+
* @returns {Promise<IAMap<T>>}
|
37973
|
+
*/
|
37974
|
+
async function replaceBucketWithNode (node, elementAt) {
|
37975
|
+
let newNode = new IAMap(node.store, node.config, undefined, node.depth + 1);
|
37976
|
+
const element = node.data[elementAt];
|
37977
|
+
assert(!!element);
|
37978
|
+
/* c8 ignore next 3 */
|
37979
|
+
if (!element.bucket) {
|
37980
|
+
throw new Error('Unexpected error')
|
37981
|
+
}
|
37982
|
+
for (const c of element.bucket) {
|
37983
|
+
newNode = await newNode.set(c.key, c.value);
|
37984
|
+
}
|
37985
|
+
newNode = await save(node.store, newNode);
|
37986
|
+
const newData = node.data.slice();
|
37987
|
+
newData[elementAt] = new Element(undefined, newNode.id);
|
37988
|
+
return create$2(node.store, node.config, node.map, node.depth, newData)
|
37989
|
+
}
|
37990
|
+
|
37991
|
+
/**
|
37992
|
+
* similar to addNewElement() but for new child nodes
|
37993
|
+
* @ignore
|
37994
|
+
* @template T
|
37995
|
+
* @param {IAMap<T>} node
|
37996
|
+
* @param {number} elementAt
|
37997
|
+
* @param {IAMap<T>} newChild
|
37998
|
+
* @returns {Promise<IAMap<T>>}
|
37999
|
+
*/
|
38000
|
+
async function updateNode (node, elementAt, newChild) {
|
38001
|
+
assert(!!newChild.id);
|
38002
|
+
const newElement = new Element(undefined, newChild.id);
|
38003
|
+
const newData = node.data.slice();
|
38004
|
+
newData[elementAt] = newElement;
|
38005
|
+
return create$2(node.store, node.config, node.map, node.depth, newData)
|
38006
|
+
}
|
38007
|
+
|
38008
|
+
// take a node, extract all of its local entries and put them into a new node with a single
|
38009
|
+
// bucket; used for collapsing a node and sending it upward
|
38010
|
+
/**
|
38011
|
+
* @ignore
|
38012
|
+
* @template T
|
38013
|
+
* @param {IAMap<T>} node
|
38014
|
+
* @param {Uint8Array} hash
|
38015
|
+
* @param {number} elementAt
|
38016
|
+
* @param {number} bucketIndex
|
38017
|
+
* @returns {Promise<IAMap<T>>}
|
38018
|
+
*/
|
38019
|
+
function collapseIntoSingleBucket (node, hash, elementAt, bucketIndex) {
|
38020
|
+
// pretend it's depth=0 (it may end up being) and only 1 bucket
|
38021
|
+
const newMap = setBit(new Uint8Array(node.map.length), mask(hash, 0, node.config.bitWidth), true);
|
38022
|
+
/**
|
38023
|
+
* @ignore
|
38024
|
+
* @type {KV[]}
|
38025
|
+
*/
|
38026
|
+
const newBucket = node.data.reduce((/** @type {KV[]} */ p, /** @type {Element} */ c, /** @type {number} */ i) => {
|
38027
|
+
if (i === elementAt) {
|
38028
|
+
/* c8 ignore next 3 */
|
38029
|
+
if (!c.bucket) {
|
38030
|
+
throw new Error('Unexpected error')
|
38031
|
+
}
|
38032
|
+
if (c.bucket.length === 1) { // only element in bucket, skip it
|
38033
|
+
return p
|
38034
|
+
} else {
|
38035
|
+
// there's more in this bucket, make a temporary one, remove it and concat it
|
38036
|
+
const tmpBucket = c.bucket.slice();
|
38037
|
+
tmpBucket.splice(bucketIndex, 1);
|
38038
|
+
return p.concat(tmpBucket)
|
38039
|
+
}
|
38040
|
+
} else {
|
38041
|
+
/* c8 ignore next 3 */
|
38042
|
+
if (!c.bucket) {
|
38043
|
+
throw new Error('Unexpected error')
|
38044
|
+
}
|
38045
|
+
return p.concat(c.bucket)
|
38046
|
+
}
|
38047
|
+
}, /** @type {KV[]} */ []);
|
38048
|
+
newBucket.sort((a, b) => byteCompare(a.key, b.key));
|
38049
|
+
const newElement = new Element(newBucket);
|
38050
|
+
return create$2(node.store, node.config, newMap, 0, [newElement])
|
38051
|
+
}
|
38052
|
+
|
38053
|
+
// simple delete from an existing bucket in this node
|
38054
|
+
/**
|
38055
|
+
* @ignore
|
38056
|
+
* @param {ReadonlyElement} data
|
38057
|
+
* @param {number} elementAt
|
38058
|
+
* @param {boolean} lastInBucket
|
38059
|
+
* @param {number} bucketIndex
|
38060
|
+
* @returns {Element[]}
|
38061
|
+
*/
|
38062
|
+
function removeFromBucket (data, elementAt, lastInBucket, bucketIndex) {
|
38063
|
+
const newData = data.slice();
|
38064
|
+
if (!lastInBucket) {
|
38065
|
+
// bucket will not be empty, remove only the element from it
|
38066
|
+
const oldElement = data[elementAt];
|
38067
|
+
/* c8 ignore next 3 */
|
38068
|
+
if (!oldElement.bucket) {
|
38069
|
+
throw new Error('Unexpected error')
|
38070
|
+
}
|
38071
|
+
const newElement = new Element(oldElement.bucket.slice());
|
38072
|
+
/* c8 ignore next 3 */
|
38073
|
+
if (!newElement.bucket) {
|
38074
|
+
throw new Error('Unexpected error')
|
38075
|
+
}
|
38076
|
+
newElement.bucket.splice(bucketIndex, 1);
|
38077
|
+
newData.splice(elementAt, 1, newElement); // replace old bucket
|
38078
|
+
} else {
|
38079
|
+
// empty bucket, just remove it
|
38080
|
+
newData.splice(elementAt, 1);
|
38081
|
+
}
|
38082
|
+
return newData
|
38083
|
+
}
|
38084
|
+
|
38085
|
+
/**
|
38086
|
+
* a node has bubbled up from a recursive delete() and we need to extract its
|
38087
|
+
* contents and insert it into ours
|
38088
|
+
* @ignore
|
38089
|
+
* @template T
|
38090
|
+
* @param {IAMap<T>} node
|
38091
|
+
* @param {number} bitpos
|
38092
|
+
* @param {IAMap<T>} newNode
|
38093
|
+
* @returns {Promise<IAMap<T>>}
|
38094
|
+
*/
|
38095
|
+
async function collapseNodeInline (node, bitpos, newNode) {
|
38096
|
+
// assume the newNode has a single bucket and it's sorted and ready to replace the place
|
38097
|
+
// it had in node's element array
|
38098
|
+
assert(newNode.data.length === 1);
|
38099
|
+
/* c8 ignore next 3 */
|
38100
|
+
if (!newNode.data[0].bucket) {
|
38101
|
+
throw new Error('Unexpected error')
|
38102
|
+
}
|
38103
|
+
const newBucket = newNode.data[0].bucket.slice();
|
38104
|
+
const newElement = new Element(newBucket);
|
38105
|
+
const elementIndex = index(node.map, bitpos);
|
38106
|
+
const newData = node.data.slice();
|
38107
|
+
newData[elementIndex] = newElement;
|
38108
|
+
|
38109
|
+
return create$2(node.store, node.config, node.map, node.depth, newData)
|
38110
|
+
}
|
38111
|
+
|
38112
|
+
/**
|
38113
|
+
* @ignore
|
38114
|
+
* @param {Options} [options]
|
38115
|
+
* @returns {Config}
|
38116
|
+
*/
|
38117
|
+
function buildConfig (options) {
|
38118
|
+
/**
|
38119
|
+
* @ignore
|
38120
|
+
* @type {Config}
|
38121
|
+
*/
|
38122
|
+
const config = {};
|
38123
|
+
|
38124
|
+
if (!options) {
|
38125
|
+
throw new TypeError('Invalid `options` object')
|
38126
|
+
}
|
38127
|
+
|
38128
|
+
if (!Number.isInteger(options.hashAlg)) {
|
38129
|
+
throw new TypeError('Invalid `hashAlg` option')
|
38130
|
+
}
|
38131
|
+
if (!hasherRegistry[options.hashAlg]) {
|
38132
|
+
throw new TypeError(`Unknown hashAlg: '${options.hashAlg}'`)
|
38133
|
+
}
|
38134
|
+
config.hashAlg = options.hashAlg;
|
38135
|
+
|
38136
|
+
if (options.bitWidth !== undefined) {
|
38137
|
+
if (Number.isInteger(options.bitWidth)) {
|
38138
|
+
if (options.bitWidth < 3 || options.bitWidth > 16) {
|
38139
|
+
throw new TypeError('Invalid `bitWidth` option, must be between 3 and 16')
|
38140
|
+
}
|
38141
|
+
config.bitWidth = options.bitWidth;
|
38142
|
+
} else {
|
38143
|
+
throw new TypeError('Invalid `bitWidth` option')
|
38144
|
+
}
|
38145
|
+
} else {
|
38146
|
+
config.bitWidth = defaultBitWidth;
|
38147
|
+
}
|
38148
|
+
|
38149
|
+
if (options.bucketSize !== undefined) {
|
38150
|
+
if (Number.isInteger(options.bucketSize)) {
|
38151
|
+
if (options.bucketSize < 2) {
|
38152
|
+
throw new TypeError('Invalid `bucketSize` option')
|
38153
|
+
}
|
38154
|
+
config.bucketSize = options.bucketSize;
|
38155
|
+
} else {
|
38156
|
+
throw new TypeError('Invalid `bucketSize` option')
|
38157
|
+
}
|
38158
|
+
} else {
|
38159
|
+
config.bucketSize = defaultBucketSize;
|
38160
|
+
}
|
38161
|
+
|
38162
|
+
return config
|
38163
|
+
}
|
38164
|
+
|
38165
|
+
/**
|
38166
|
+
* Determine if a serializable object is an IAMap root type, can be used to assert whether a data block is
|
38167
|
+
* an IAMap before trying to instantiate it.
|
38168
|
+
*
|
38169
|
+
* @name iamap.isRootSerializable
|
38170
|
+
* @function
|
38171
|
+
* @param {any} serializable An object that may be a serialisable form of an IAMap root node
|
38172
|
+
* @returns {boolean} An indication that the serialisable form is or is not an IAMap root node
|
38173
|
+
*/
|
38174
|
+
function isRootSerializable (serializable) {
|
38175
|
+
return typeof serializable === 'object' &&
|
38176
|
+
Number.isInteger(serializable.hashAlg) &&
|
38177
|
+
Number.isInteger(serializable.bucketSize) &&
|
38178
|
+
Array.isArray(serializable.hamt) &&
|
38179
|
+
isSerializable(serializable.hamt)
|
38180
|
+
}
|
38181
|
+
|
38182
|
+
/**
|
38183
|
+
* Determine if a serializable object is an IAMap node type, can be used to assert whether a data block is
|
38184
|
+
* an IAMap node before trying to instantiate it.
|
38185
|
+
* This should pass for both root nodes as well as child nodes
|
38186
|
+
*
|
38187
|
+
* @name iamap.isSerializable
|
38188
|
+
* @function
|
38189
|
+
* @param {any} serializable An object that may be a serialisable form of an IAMap node
|
38190
|
+
* @returns {boolean} An indication that the serialisable form is or is not an IAMap node
|
38191
|
+
*/
|
38192
|
+
function isSerializable (serializable) {
|
38193
|
+
if (Array.isArray(serializable)) {
|
38194
|
+
return serializable.length === 2 && serializable[0] instanceof Uint8Array && Array.isArray(serializable[1])
|
38195
|
+
}
|
38196
|
+
return isRootSerializable(serializable)
|
38197
|
+
}
|
38198
|
+
|
38199
|
+
/**
|
38200
|
+
* Instantiate an IAMap from a valid serialisable form of an IAMap node. The serializable should be the same as
|
38201
|
+
* produced by {@link IAMap#toSerializable}.
|
38202
|
+
* Serialised forms of root nodes must satisfy both {@link iamap.isRootSerializable} and {@link iamap.isSerializable}. For
|
38203
|
+
* root nodes, the `options` parameter will be ignored and the `depth` parameter must be the default value of `0`.
|
38204
|
+
* Serialised forms of non-root nodes must satisfy {@link iamap.isSerializable} and have a valid `options` parameter and
|
38205
|
+
* a non-`0` `depth` parameter.
|
38206
|
+
*
|
38207
|
+
* @name iamap.fromSerializable
|
38208
|
+
* @function
|
38209
|
+
* @template T
|
38210
|
+
* @param {Store<T>} store A backing store for this Map. See {@link iamap.create}.
|
38211
|
+
* @param {any} id An optional ID for the instantiated IAMap node. Unlike {@link iamap.create},
|
38212
|
+
* `fromSerializable()` does not `save()` a newly created IAMap node so an ID is not generated for it. If one is
|
38213
|
+
* required for downstream purposes it should be provided, if the value is `null` or `undefined`, `node.id` will
|
38214
|
+
* be `null` but will remain writable.
|
38215
|
+
* @param {any} serializable The serializable form of an IAMap node to be instantiated
|
38216
|
+
* @param {Options} [options=null] An options object for IAMap child node instantiation. Will be ignored for root
|
38217
|
+
* node instantiation (where `depth` = `0`) See {@link iamap.create}.
|
38218
|
+
* @param {number} [depth=0] The depth of the IAMap node. Where `0` is the root node and any `>0` number is a child
|
38219
|
+
* node.
|
38220
|
+
* @returns {IAMap<T>}
|
38221
|
+
*/
|
38222
|
+
function fromSerializable (store, id, serializable, options, depth = 0) {
|
38223
|
+
/**
|
38224
|
+
* @ignore
|
38225
|
+
* @type {SerializedNode}
|
38226
|
+
*/
|
38227
|
+
let hamt;
|
38228
|
+
if (depth === 0) { // even if options were supplied, ignore them and use what's in the serializable
|
38229
|
+
if (!isRootSerializable(serializable)) {
|
38230
|
+
throw new Error('Loaded object does not appear to be an IAMap root (depth==0)')
|
38231
|
+
}
|
38232
|
+
// don't use passed-in options
|
38233
|
+
options = serializableToOptions(serializable);
|
38234
|
+
hamt = serializable.hamt;
|
38235
|
+
} else {
|
38236
|
+
if (!isSerializable(serializable)) {
|
38237
|
+
throw new Error('Loaded object does not appear to be an IAMap node (depth>0)')
|
38238
|
+
}
|
38239
|
+
hamt = serializable;
|
38240
|
+
}
|
38241
|
+
const data = hamt[1].map(Element.fromSerializable.bind(null, store.isLink));
|
38242
|
+
const node = new IAMap(store, options, hamt[0], depth, data);
|
38243
|
+
if (id != null) {
|
38244
|
+
node.id = id;
|
38245
|
+
}
|
38246
|
+
return node
|
38247
|
+
}
|
38248
|
+
|
38249
|
+
/**
|
38250
|
+
* @ignore
|
38251
|
+
* @param {any} serializable
|
38252
|
+
* @returns {Config}
|
38253
|
+
*/
|
38254
|
+
function serializableToOptions (serializable) {
|
38255
|
+
return {
|
38256
|
+
hashAlg: serializable.hashAlg,
|
38257
|
+
bitWidth: Math.log2(serializable.hamt[0].length * 8), // inverse of (2**bitWidth) / 8
|
38258
|
+
bucketSize: serializable.bucketSize
|
38259
|
+
}
|
38260
|
+
}
|
38261
|
+
|
38262
|
+
/**
|
38263
|
+
* @template T
|
38264
|
+
* @param {IAMap<T> | any} node
|
38265
|
+
* @returns {boolean}
|
38266
|
+
*/
|
38267
|
+
IAMap.isIAMap = function isIAMap (node) {
|
38268
|
+
return node instanceof IAMap
|
38269
|
+
};
|
38270
|
+
|
38271
|
+
/**
|
38272
|
+
* internal utility to fetch a map instance's hash function
|
38273
|
+
*
|
38274
|
+
* @ignore
|
38275
|
+
* @template T
|
38276
|
+
* @param {IAMap<T>} map
|
38277
|
+
* @returns {Hasher}
|
38278
|
+
*/
|
38279
|
+
function hasher (map) {
|
38280
|
+
return hasherRegistry[map.config.hashAlg].hasher
|
38281
|
+
}
|
38282
|
+
|
38283
|
+
/**
|
38284
|
+
* @ignore
|
38285
|
+
* @param {Uint8Array} b1
|
38286
|
+
* @param {Uint8Array} b2
|
38287
|
+
* @returns {number}
|
38288
|
+
*/
|
38289
|
+
function byteCompare (b1, b2) {
|
38290
|
+
let x = b1.length;
|
38291
|
+
let y = b2.length;
|
38292
|
+
|
38293
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
38294
|
+
if (b1[i] !== b2[i]) {
|
38295
|
+
x = b1[i];
|
38296
|
+
y = b2[i];
|
38297
|
+
break
|
38298
|
+
}
|
38299
|
+
}
|
38300
|
+
if (x < y) {
|
38301
|
+
return -1
|
38302
|
+
}
|
38303
|
+
if (y < x) {
|
38304
|
+
return 1
|
38305
|
+
}
|
38306
|
+
return 0
|
38307
|
+
}
|
38308
|
+
|
38309
|
+
var create_1 = create$2;
|
38310
|
+
var load_1 = load$2;
|
38311
|
+
var registerHasher_1 = registerHasher;
|
38312
|
+
|
38313
|
+
/** Auto-generated with ipld-schema-validator@1.0.1 at Tue Aug 16 2022 from IPLD Schema:
|
38314
|
+
*
|
38315
|
+
* # Root node layout
|
38316
|
+
* type HashMapRoot struct {
|
38317
|
+
* hashAlg Int
|
38318
|
+
* bucketSize Int
|
38319
|
+
* hamt HashMapNode
|
38320
|
+
* }
|
38321
|
+
*
|
38322
|
+
* # Non-root node layout
|
38323
|
+
* type HashMapNode struct {
|
38324
|
+
* map Bytes
|
38325
|
+
* data [ Element ]
|
38326
|
+
* } representation tuple
|
38327
|
+
*
|
38328
|
+
* type Element union {
|
38329
|
+
* | &HashMapNode link
|
38330
|
+
* | Bucket list
|
38331
|
+
* } representation kinded
|
38332
|
+
*
|
38333
|
+
* type Bucket [ BucketEntry ]
|
38334
|
+
*
|
38335
|
+
* type BucketEntry struct {
|
38336
|
+
* key Bytes
|
38337
|
+
* value Any
|
38338
|
+
* } representation tuple
|
38339
|
+
*
|
38340
|
+
*/
|
38341
|
+
|
38342
|
+
const Kinds = {
|
38343
|
+
Null: /** @returns {boolean} */ (/** @type {any} */ obj) => obj === null,
|
38344
|
+
Int: /** @returns {boolean} */ (/** @type {any} */ obj) => Number.isInteger(obj),
|
38345
|
+
Float: /** @returns {boolean} */ (/** @type {any} */ obj) => typeof obj === 'number' && Number.isFinite(obj),
|
38346
|
+
String: /** @returns {boolean} */ (/** @type {any} */ obj) => typeof obj === 'string',
|
38347
|
+
Bool: /** @returns {boolean} */ (/** @type {any} */ obj) => typeof obj === 'boolean',
|
38348
|
+
Bytes: /** @returns {boolean} */ (/** @type {any} */ obj) => obj instanceof Uint8Array,
|
38349
|
+
Link: /** @returns {boolean} */ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID === obj,
|
38350
|
+
List: /** @returns {boolean} */ (/** @type {any} */ obj) => Array.isArray(obj),
|
38351
|
+
Map: /** @returns {boolean} */ (/** @type {any} */ obj) => !Kinds.Null(obj) && typeof obj === 'object' && obj.asCID !== obj && !Kinds.List(obj) && !Kinds.Bytes(obj)
|
38352
|
+
};
|
38353
|
+
/** @type {{ [k in string]: (obj:any)=>boolean}} */
|
38354
|
+
const Types = {
|
38355
|
+
Int: Kinds.Int,
|
38356
|
+
'HashMapRoot > hashAlg': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Int(obj),
|
38357
|
+
'HashMapRoot > bucketSize': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Int(obj),
|
38358
|
+
Bytes: Kinds.Bytes,
|
38359
|
+
'HashMapNode > map': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Bytes(obj),
|
38360
|
+
'Element > HashMapNode (anon)': Kinds.Link,
|
38361
|
+
'BucketEntry > key': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Bytes(obj),
|
38362
|
+
Any: /** @returns {boolean} */ (/** @type {any} */ obj) => (Kinds.Bool(obj) && Types.Bool(obj)) || (Kinds.String(obj) && Types.String(obj)) || (Kinds.Bytes(obj) && Types.Bytes(obj)) || (Kinds.Int(obj) && Types.Int(obj)) || (Kinds.Float(obj) && Types.Float(obj)) || (Kinds.Null(obj) && Types.Null(obj)) || (Kinds.Link(obj) && Types.Link(obj)) || (Kinds.Map(obj) && Types.AnyMap(obj)) || (Kinds.List(obj) && Types.AnyList(obj)),
|
38363
|
+
Bool: Kinds.Bool,
|
38364
|
+
String: Kinds.String,
|
38365
|
+
Float: Kinds.Float,
|
38366
|
+
Null: Kinds.Null,
|
38367
|
+
Link: Kinds.Link,
|
38368
|
+
AnyMap: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.Map(obj) && Array.prototype.every.call(Object.values(obj), Types.Any),
|
38369
|
+
AnyList: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types.Any),
|
38370
|
+
'BucketEntry > value': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.Any(obj),
|
38371
|
+
BucketEntry: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && obj.length === 2 && Types['BucketEntry > key'](obj[0]) && Types['BucketEntry > value'](obj[1]),
|
38372
|
+
Bucket: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types.BucketEntry),
|
38373
|
+
Element: /** @returns {boolean} */ (/** @type {any} */ obj) => (Kinds.Link(obj) && Types['Element > HashMapNode (anon)'](obj)) || (Kinds.List(obj) && Types.Bucket(obj)),
|
38374
|
+
'HashMapNode > data (anon)': /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && Array.prototype.every.call(obj, Types.Element),
|
38375
|
+
'HashMapNode > data': /** @returns {boolean} */ (/** @type {any} */ obj) => Types['HashMapNode > data (anon)'](obj),
|
38376
|
+
HashMapNode: /** @returns {boolean} */ (/** @type {any} */ obj) => Kinds.List(obj) && obj.length === 2 && Types['HashMapNode > map'](obj[0]) && Types['HashMapNode > data'](obj[1]),
|
38377
|
+
'HashMapRoot > hamt': /** @returns {boolean} */ (/** @type {any} */ obj) => Types.HashMapNode(obj),
|
38378
|
+
HashMapRoot: /** @returns {boolean} */ (/** @type {any} */ obj) => { const keys = obj && Object.keys(obj); return Kinds.Map(obj) && ['hashAlg', 'bucketSize', 'hamt'].every((k) => keys.includes(k)) && Object.entries(obj).every(([name, value]) => Types['HashMapRoot > ' + name] && Types['HashMapRoot > ' + name](value)) }
|
38379
|
+
};
|
38380
|
+
|
38381
|
+
const HashMapRoot = Types.HashMapRoot;
|
38382
|
+
const HashMapNode = Types.HashMapNode;
|
38383
|
+
Types.Element;
|
38384
|
+
Types.Bucket;
|
38385
|
+
Types.BucketEntry;
|
38386
|
+
|
38387
|
+
/**
|
38388
|
+
* @typedef {'bool'|'string'|'bytes'|'int'|'float'|'list'|'map'|'null'|'link'} RepresentationKindString
|
38389
|
+
*/
|
38390
|
+
|
38391
|
+
/**
|
38392
|
+
* @param {any} obj
|
38393
|
+
* @returns {RepresentationKindString}
|
38394
|
+
*/
|
38395
|
+
function kind (obj) {
|
38396
|
+
if (typeof obj === 'number') {
|
38397
|
+
if (Number.isInteger(obj)) {
|
38398
|
+
return 'int'
|
38399
|
+
}
|
38400
|
+
return 'float'
|
38401
|
+
}
|
38402
|
+
if (typeof obj === 'string') {
|
38403
|
+
return 'string'
|
38404
|
+
}
|
38405
|
+
if (obj === null) {
|
38406
|
+
return 'null'
|
38407
|
+
}
|
38408
|
+
if (typeof obj === 'boolean') {
|
38409
|
+
return 'bool'
|
38410
|
+
}
|
38411
|
+
if (typeof obj === 'object' && obj.asCID === obj) {
|
38412
|
+
return 'link'
|
38413
|
+
}
|
38414
|
+
if (obj instanceof Uint8Array) {
|
38415
|
+
return 'bytes'
|
38416
|
+
}
|
38417
|
+
if (Array.isArray(obj)) {
|
38418
|
+
return 'list'
|
38419
|
+
}
|
38420
|
+
if (typeof obj === 'object') {
|
38421
|
+
return 'map'
|
38422
|
+
}
|
38423
|
+
throw new TypeError(`Unknown IPLD kind for value: ${JSON.stringify(obj)}`)
|
38424
|
+
}
|
38425
|
+
|
38426
|
+
/**
|
38427
|
+
* @typedef {import('ipld-schema/schema-schema').Schema} Schema
|
38428
|
+
* @typedef {import('ipld-schema/schema-schema').TypeDefnLink} TypeDefnLink
|
38429
|
+
* @typedef {import('ipld-schema/schema-schema').TypeDefnList} TypeDefnList
|
38430
|
+
* @typedef {import('ipld-schema/schema-schema').TypeDefnMap} TypeDefnMap
|
38431
|
+
*/
|
38432
|
+
|
38433
|
+
/**
|
38434
|
+
* @param {any} obj
|
38435
|
+
* @returns {{ schema: Schema, root: string }}
|
38436
|
+
*/
|
38437
|
+
function describe (obj) {
|
38438
|
+
const description = describeObject(obj, { types: {} });
|
38439
|
+
if (!Object.keys(description.schema.types).length) {
|
38440
|
+
// when `obj` is a terminal type, make up a typedef for that kind so we have
|
38441
|
+
// something to point to for our root rather than the plain typed kind
|
38442
|
+
|
38443
|
+
// special case for links
|
38444
|
+
if (typeof description.root === 'object' && typeof description.root.link === 'object') {
|
38445
|
+
const name = 'Link';
|
38446
|
+
description.schema.types[name] = { link: {} };
|
38447
|
+
description.root = name;
|
38448
|
+
} else if (typeof description.root === 'string') {
|
38449
|
+
const name = `${description.root}`;
|
38450
|
+
// @ts-ignore
|
38451
|
+
description.schema.types[name] = { [description.root.toLowerCase()]: {} };
|
38452
|
+
description.root = name;
|
38453
|
+
/* c8 ignore next 3 */
|
38454
|
+
} else {
|
38455
|
+
throw new Error('internal error')
|
38456
|
+
}
|
38457
|
+
}
|
38458
|
+
/* c8 ignore next 3 */
|
38459
|
+
if (typeof description.root !== 'string') {
|
38460
|
+
throw new Error('internal error')
|
38461
|
+
}
|
38462
|
+
return { schema: description.schema, root: description.root }
|
38463
|
+
}
|
38464
|
+
|
38465
|
+
/**
|
38466
|
+
* @param {any} obj
|
38467
|
+
* @param {Schema} schema
|
38468
|
+
* @returns {{ schema: Schema, root: string|{ link: TypeDefnLink } }}
|
38469
|
+
*/
|
38470
|
+
function describeObject (obj, schema) {
|
38471
|
+
const objKind = kind(obj);
|
38472
|
+
let name = `${objKind.charAt(0).toUpperCase()}${objKind.substring(1)}`;
|
38473
|
+
|
38474
|
+
// terminals
|
38475
|
+
if (objKind === 'null' ||
|
38476
|
+
objKind === 'int' ||
|
38477
|
+
objKind === 'bool' ||
|
38478
|
+
objKind === 'float' ||
|
38479
|
+
objKind === 'string' ||
|
38480
|
+
objKind === 'bytes') {
|
38481
|
+
return { schema, root: name }
|
38482
|
+
}
|
38483
|
+
|
38484
|
+
if (objKind === 'link') {
|
38485
|
+
return { schema, root: { link: {} } }
|
38486
|
+
}
|
38487
|
+
|
38488
|
+
// 'map' || 'list'
|
38489
|
+
|
38490
|
+
/** @type {{ fieldName: string, root: string|{ link: TypeDefnLink }}[]} */
|
38491
|
+
const fieldNames = [];
|
38492
|
+
const entries = objKind === 'map'
|
38493
|
+
? Object.entries(obj)
|
38494
|
+
: obj.map((/** @type {any} */ e, /** @type {number} */ i) => [`f${i}`, e]);
|
38495
|
+
for (const [fieldName, value] of entries) {
|
38496
|
+
fieldNames.push({ fieldName, root: describeObject(value, schema).root });
|
38497
|
+
}
|
38498
|
+
let unique = true;
|
38499
|
+
for (let i = 1; i < fieldNames.length; i++) {
|
38500
|
+
// this is a shallow assumption - that the name tells us the uniqueness, it doesn't
|
38501
|
+
// and this will have to be improved
|
38502
|
+
if (fieldNames[i].root !== fieldNames[i - 1].root) {
|
38503
|
+
unique = false;
|
38504
|
+
break
|
38505
|
+
}
|
38506
|
+
}
|
38507
|
+
|
38508
|
+
name = `${name}_1`;
|
38509
|
+
/** @type {{ map: { keyType?: string, valueType?: string|{ link: TypeDefnLink } } }|{ list: { valueType?: string|{ link: TypeDefnLink } } }|{ struct: { fields: { [ k in string]: { type: string | { link: TypeDefnLink } } }, representation?: { tuple: {} } } } } */
|
38510
|
+
let type;
|
38511
|
+
|
38512
|
+
if (unique) { // a pure map or list
|
38513
|
+
const valueType = fieldNames.length ? fieldNames[0].root : 'Any';
|
38514
|
+
if (objKind === 'map') {
|
38515
|
+
type = { map: { keyType: 'String', valueType } };
|
38516
|
+
} else if (objKind === 'list') {
|
38517
|
+
type = { list: { valueType } };
|
38518
|
+
/* c8 ignore next 4 */
|
38519
|
+
} else {
|
38520
|
+
throw new Error(`Unexpected object kind: ${objKind}`)
|
38521
|
+
}
|
38522
|
+
} else { // a struct with varying types
|
38523
|
+
name = 'Struct_1';
|
38524
|
+
type = {
|
38525
|
+
struct: { fields: {} }
|
38526
|
+
};
|
38527
|
+
for (const field of fieldNames) {
|
38528
|
+
type.struct.fields[field.fieldName] = { type: field.root };
|
38529
|
+
}
|
38530
|
+
if (objKind === 'list') {
|
38531
|
+
type.struct.representation = { tuple: {} };
|
38532
|
+
}
|
38533
|
+
}
|
38534
|
+
|
38535
|
+
while (schema.types[name]) {
|
38536
|
+
if (deepEqual(schema.types[name], type)) {
|
38537
|
+
break
|
38538
|
+
}
|
38539
|
+
name = name.split('_').map((s, i) => i ? parseInt(s, 10) + 1 : s).join('_');
|
38540
|
+
}
|
38541
|
+
// too hard
|
38542
|
+
// @ts-ignore
|
38543
|
+
schema.types[name] = type;
|
38544
|
+
|
38545
|
+
return { schema, root: name }
|
38546
|
+
}
|
38547
|
+
|
38548
|
+
/**
|
38549
|
+
* @param {any} o1
|
38550
|
+
* @param {any} o2
|
38551
|
+
* @returns {boolean}
|
38552
|
+
*/
|
38553
|
+
function deepEqual (o1, o2) {
|
38554
|
+
const k1 = kind(o1);
|
38555
|
+
const k2 = kind(o2);
|
38556
|
+
/* c8 ignore next 3 */
|
38557
|
+
if (k1 !== k2) {
|
38558
|
+
return false
|
38559
|
+
}
|
38560
|
+
switch (k1) {
|
38561
|
+
/* c8 ignore next 1 */
|
38562
|
+
case 'bool':
|
38563
|
+
case 'string':
|
38564
|
+
case 'int':
|
38565
|
+
case 'float':
|
38566
|
+
case 'null':
|
38567
|
+
return o1 === o2
|
38568
|
+
case 'map':
|
38569
|
+
return deepEqual(Object.entries(o1), Object.entries(o2))
|
38570
|
+
case 'list':
|
38571
|
+
if (o1.length !== o2.length) {
|
38572
|
+
return false
|
38573
|
+
}
|
38574
|
+
for (let i = 0; i < o1.length; i++) {
|
38575
|
+
if (!deepEqual(o1[i], o2[i])) {
|
38576
|
+
return false
|
38577
|
+
}
|
38578
|
+
}
|
38579
|
+
}
|
38580
|
+
return true
|
38581
|
+
}
|
38582
|
+
|
38583
|
+
const noop = (s) => s;
|
38584
|
+
|
38585
|
+
// based on prism.js syntax categories, except 'class-name' -> className
|
38586
|
+
const noopHighlighter = {
|
38587
|
+
keyword: noop,
|
38588
|
+
builtin: noop,
|
38589
|
+
operator: noop,
|
38590
|
+
number: noop,
|
38591
|
+
string: noop,
|
38592
|
+
// comment: noop,
|
38593
|
+
className: noop,
|
38594
|
+
punctuation: noop
|
38595
|
+
};
|
38596
|
+
|
38597
|
+
function print (schema, indent = ' ', highlighter = {}) {
|
38598
|
+
if (!schema || typeof schema.types !== 'object') {
|
38599
|
+
throw new Error('Invalid schema')
|
38600
|
+
}
|
38601
|
+
|
38602
|
+
highlighter = Object.assign({}, noopHighlighter, highlighter);
|
38603
|
+
|
38604
|
+
let str = '';
|
38605
|
+
|
38606
|
+
str += printAdvanced(schema, indent, highlighter);
|
38607
|
+
str += printTypes(schema, indent, highlighter);
|
38608
|
+
|
38609
|
+
return str
|
38610
|
+
}
|
38611
|
+
|
38612
|
+
function printAdvanced (schema, indent, highlighter) {
|
38613
|
+
let str = '';
|
38614
|
+
|
38615
|
+
if (typeof schema.advanced === 'object') {
|
38616
|
+
for (const advanced of Object.keys(schema.advanced)) {
|
38617
|
+
str += `${highlighter.keyword('advanced')} ${highlighter.className(advanced)}\n\n`;
|
38618
|
+
}
|
38619
|
+
}
|
38620
|
+
|
38621
|
+
return str
|
38622
|
+
}
|
38623
|
+
|
38624
|
+
function printTypes (schema, indent, highlighter) {
|
38625
|
+
let str = '';
|
38626
|
+
|
38627
|
+
for (const [type, defn] of Object.entries(schema.types)) {
|
38628
|
+
str += `${highlighter.keyword('type')} ${highlighter.className(type)} ${printType(defn, indent, highlighter)}\n\n`;
|
38629
|
+
}
|
38630
|
+
|
38631
|
+
return str.replace(/\n\n$/m, '')
|
38632
|
+
}
|
38633
|
+
|
38634
|
+
function kindFromDefinition (defn) {
|
38635
|
+
const [kind, more] = Object.keys(defn);
|
38636
|
+
if (!kind) {
|
38637
|
+
throw new Error('Invalid schema, missing kind')
|
38638
|
+
}
|
38639
|
+
if (more !== undefined) {
|
38640
|
+
throw new Error('Invalid schema more than one kind')
|
38641
|
+
}
|
38642
|
+
return kind
|
38643
|
+
}
|
38644
|
+
|
38645
|
+
function printType (defn, indent, highlighter) {
|
38646
|
+
const kind = kindFromDefinition(defn);
|
38647
|
+
|
38648
|
+
if (['map', 'list', 'link', 'copy'].includes(kind)) {
|
38649
|
+
return printTypeTerm(defn, indent, highlighter)
|
38650
|
+
}
|
38651
|
+
|
38652
|
+
if (['struct', 'union', 'enum'].includes(kind)) {
|
38653
|
+
return `${highlighter.builtin(kind)} ${printTypeTerm(defn, indent, highlighter)}`
|
38654
|
+
}
|
38655
|
+
|
38656
|
+
if ((kind === 'bytes' || kind === 'string') && defn[kind].representation && typeof defn[kind].representation.advanced === 'string') {
|
38657
|
+
return `${kind} ${highlighter.builtin('representation')} advanced ${defn[kind].representation.advanced}`
|
38658
|
+
}
|
38659
|
+
|
38660
|
+
return kind
|
38661
|
+
}
|
38662
|
+
|
38663
|
+
function printTypeTerm (defn, indent, highlighter) {
|
38664
|
+
if (typeof defn === 'string') {
|
38665
|
+
return defn
|
38666
|
+
}
|
38667
|
+
|
38668
|
+
const kind = kindFromDefinition(defn);
|
38669
|
+
|
38670
|
+
if (typeof printTypeTerm[kind] !== 'function') {
|
38671
|
+
throw new Error(`Invalid schema unsupported kind (${kind})`)
|
38672
|
+
}
|
38673
|
+
|
38674
|
+
return printTypeTerm[kind](defn[kind], indent, highlighter)
|
38675
|
+
}
|
38676
|
+
|
38677
|
+
printTypeTerm.link = function link (defn, indent, highlighter) {
|
38678
|
+
return `${highlighter.punctuation('&')}${printTypeTerm(defn.expectedType || 'Any', indent, highlighter)}`
|
38679
|
+
};
|
38680
|
+
|
38681
|
+
printTypeTerm.copy = function copy (defn, indent, highlighter) {
|
38682
|
+
return `${highlighter.operator('=')} ${defn.fromType}`
|
38683
|
+
};
|
38684
|
+
|
38685
|
+
printTypeTerm.map = function map (defn, indent, highlighter) {
|
38686
|
+
if (typeof defn.keyType !== 'string') {
|
38687
|
+
throw new Error('Invalid schema, map definition needs a "keyType"')
|
38688
|
+
}
|
38689
|
+
if (!defn.valueType) {
|
38690
|
+
throw new Error('Invalid schema, map definition needs a "keyType"')
|
38691
|
+
}
|
38692
|
+
|
38693
|
+
const nullable = defn.valueNullable === true ? 'nullable ' : '';
|
38694
|
+
let str = `${highlighter.punctuation('{')}${printTypeTerm(defn.keyType, indent, highlighter)}:${nullable}${printTypeTerm(defn.valueType, indent, highlighter)}${highlighter.punctuation('}')}`;
|
38695
|
+
if (defn.representation) {
|
38696
|
+
const repr = reprStrategy(defn);
|
38697
|
+
if (repr === 'listpairs') {
|
38698
|
+
str += ` ${highlighter.builtin('representation')} listpairs`;
|
38699
|
+
} else if (repr === 'stringpairs') {
|
38700
|
+
str += stringpairs(indent, 'map', defn.representation.stringpairs, highlighter);
|
38701
|
+
} else if (repr === 'advanced') {
|
38702
|
+
str += ` ${highlighter.builtin('representation')} advanced ${defn.representation.advanced}`;
|
38703
|
+
}
|
38704
|
+
}
|
38705
|
+
return str
|
38706
|
+
};
|
38707
|
+
|
38708
|
+
printTypeTerm.list = function list (defn, indent, highlighter) {
|
38709
|
+
if (!defn.valueType) {
|
38710
|
+
throw new Error('Invalid schema, list definition needs a "keyType"')
|
38711
|
+
}
|
38712
|
+
|
38713
|
+
const nullable = defn.valueNullable === true ? 'nullable ' : '';
|
38714
|
+
let str = `${highlighter.punctuation('[')}${nullable}${printTypeTerm(defn.valueType, indent, highlighter)}${highlighter.punctuation(']')}`;
|
38715
|
+
|
38716
|
+
if (defn.representation) {
|
38717
|
+
if (reprStrategy(defn) === 'advanced') {
|
38718
|
+
str += ` ${highlighter.builtin('representation')} advanced ${defn.representation.advanced}`;
|
38719
|
+
}
|
38720
|
+
}
|
38721
|
+
|
38722
|
+
return str
|
38723
|
+
};
|
38724
|
+
|
38725
|
+
printTypeTerm.struct = function struct (defn, indent, highlighter) {
|
38726
|
+
if (typeof defn.fields !== 'object') {
|
38727
|
+
throw new Error('Invalid schema, struct requires a "fields" map')
|
38728
|
+
}
|
38729
|
+
|
38730
|
+
let str = highlighter.punctuation('{');
|
38731
|
+
|
38732
|
+
for (const [name, fieldDefn] of Object.entries(defn.fields)) {
|
38733
|
+
const optional = fieldDefn.optional === true ? highlighter.keyword('optional') + ' ' : '';
|
38734
|
+
const nullable = fieldDefn.nullable === true ? highlighter.keyword('nullable') + ' ' : '';
|
38735
|
+
let fieldRepr = '';
|
38736
|
+
if (defn.representation && defn.representation.map && typeof defn.representation.map.fields === 'object') {
|
38737
|
+
const fr = defn.representation.map.fields[name];
|
38738
|
+
if (typeof fr === 'object') {
|
38739
|
+
const hasRename = typeof fr.rename === 'string';
|
38740
|
+
const hasImplicit = fr.implicit !== undefined;
|
38741
|
+
if (hasRename || hasImplicit) {
|
38742
|
+
fieldRepr = ` ${highlighter.punctuation('(')}`;
|
38743
|
+
if (hasRename) {
|
38744
|
+
fieldRepr += `${highlighter.keyword('rename')} ${highlighter.string(`"${fr.rename}"`)}`;
|
38745
|
+
if (hasImplicit) {
|
38746
|
+
fieldRepr += ' ';
|
38747
|
+
}
|
38748
|
+
}
|
38749
|
+
if (hasImplicit) {
|
38750
|
+
const impl = typeof fr.implicit === 'string'
|
38751
|
+
? highlighter.string(`"${fr.implicit}"`)
|
38752
|
+
: typeof fr.implicit === 'number'
|
38753
|
+
? highlighter.number(fr.implicit)
|
38754
|
+
: highlighter.keyword(fr.implicit);
|
38755
|
+
fieldRepr += `${highlighter.keyword('implicit')} ${impl}`;
|
38756
|
+
}
|
38757
|
+
fieldRepr += highlighter.punctuation(')');
|
38758
|
+
}
|
38759
|
+
}
|
38760
|
+
}
|
38761
|
+
|
38762
|
+
const fieldType = typeof fieldDefn.type === 'string' ? fieldDefn.type : printTypeTerm(fieldDefn.type, indent, highlighter);
|
38763
|
+
str += `\n${indent}${name} ${optional}${nullable}${fieldType}${fieldRepr}`;
|
38764
|
+
}
|
38765
|
+
|
38766
|
+
if (str[str.length - 1] !== highlighter.punctuation('{')) {
|
38767
|
+
str += '\n';
|
38768
|
+
}
|
38769
|
+
str += highlighter.punctuation('}');
|
38770
|
+
|
38771
|
+
if (defn.representation) {
|
38772
|
+
const repr = reprStrategy(defn);
|
38773
|
+
if (repr === 'listpairs') {
|
38774
|
+
str += ` ${highlighter.builtin('representation')} listpairs`;
|
38775
|
+
} else if (repr === 'stringjoin') {
|
38776
|
+
if (typeof defn.representation.stringjoin.join !== 'string') {
|
38777
|
+
throw new Error('Invalid schema, struct stringjoin representations require an join string')
|
38778
|
+
}
|
38779
|
+
str += ` ${highlighter.builtin('representation')} stringjoin ${highlighter.punctuation('{')}\n`;
|
38780
|
+
str += `${indent}join ${highlighter.string(`"${defn.representation.stringjoin.join}"`)}\n`;
|
38781
|
+
str += fieldOrder(indent, defn.representation.stringjoin.fieldOrder, highlighter);
|
38782
|
+
str += highlighter.punctuation('}');
|
38783
|
+
} else if (repr === 'stringpairs') {
|
38784
|
+
str += stringpairs(indent, 'struct', defn.representation.stringpairs, highlighter);
|
38785
|
+
} else if (repr === 'tuple') {
|
38786
|
+
str += ` ${highlighter.builtin('representation')} tuple`;
|
38787
|
+
if (Array.isArray(defn.representation.tuple.fieldOrder)) {
|
38788
|
+
str += ` ${highlighter.punctuation('{')}\n`;
|
38789
|
+
str += fieldOrder(indent, defn.representation.tuple.fieldOrder, highlighter);
|
38790
|
+
str += highlighter.punctuation('}');
|
38791
|
+
}
|
38792
|
+
} else if (repr === 'advanced') {
|
38793
|
+
str += ` ${highlighter.builtin('representation')} advanced ${defn.representation.advanced}`;
|
38794
|
+
}
|
38795
|
+
}
|
38796
|
+
|
38797
|
+
return str
|
38798
|
+
};
|
38799
|
+
|
38800
|
+
function fieldOrder (indent, fieldOrder, highlighter) {
|
38801
|
+
let str = '';
|
38802
|
+
if (Array.isArray(fieldOrder)) {
|
38803
|
+
const fo = fieldOrder.map((f) => highlighter.string(`"${f}"`)).join(', ');
|
38804
|
+
str += `${indent}fieldOrder ${highlighter.punctuation('[')}${fo}${highlighter.punctuation(']')}\n`;
|
38805
|
+
}
|
38806
|
+
return str
|
38807
|
+
}
|
38808
|
+
|
38809
|
+
function stringpairs (indent, kind, stringpairs, highlighter) {
|
38810
|
+
let str = '';
|
38811
|
+
if (typeof stringpairs.innerDelim !== 'string') {
|
38812
|
+
throw new Error(`Invalid schema, ${kind} stringpairs representations require an innerDelim string`)
|
38813
|
+
}
|
38814
|
+
if (typeof stringpairs.entryDelim !== 'string') {
|
38815
|
+
throw new Error(`Invalid schema, ${kind} stringpairs representations require an entryDelim string`)
|
38816
|
+
}
|
38817
|
+
str += ` ${highlighter.builtin('representation')} stringpairs ${highlighter.punctuation('{')}\n`;
|
38818
|
+
str += `${indent}innerDelim ${highlighter.string(`"${stringpairs.innerDelim}"`)}\n`;
|
38819
|
+
str += `${indent}entryDelim ${highlighter.string(`"${stringpairs.entryDelim}"`)}\n`;
|
38820
|
+
str += highlighter.punctuation('}');
|
38821
|
+
return str
|
38822
|
+
}
|
38823
|
+
|
38824
|
+
function reprStrategy (defn) {
|
38825
|
+
if (typeof defn.representation !== 'object') {
|
38826
|
+
throw new Error('Expected \'representation\' property of definition')
|
38827
|
+
}
|
38828
|
+
const keys = Object.keys(defn.representation);
|
38829
|
+
if (keys.length !== 1) {
|
38830
|
+
throw new Error('Expected exactly one \'representation\' field')
|
38831
|
+
}
|
38832
|
+
const repr = keys[0];
|
38833
|
+
if (repr === 'advanced') {
|
38834
|
+
if (typeof defn.representation[repr] !== 'string') {
|
38835
|
+
throw new Error('Expected representation \'advanced\' to be an string')
|
38836
|
+
}
|
38837
|
+
} else {
|
38838
|
+
if (typeof defn.representation[repr] !== 'object') {
|
38839
|
+
throw new Error(`Expected representation '${repr}' to be an object`)
|
38840
|
+
}
|
38841
|
+
}
|
38842
|
+
return repr
|
38843
|
+
}
|
38844
|
+
|
38845
|
+
printTypeTerm.union = function union (defn, indent, highlighter) {
|
38846
|
+
if (typeof defn.representation !== 'object') {
|
38847
|
+
throw new Error('Invalid schema, unions require a representation')
|
38848
|
+
}
|
38849
|
+
|
38850
|
+
let str = highlighter.punctuation('{');
|
38851
|
+
const repr = reprStrategy(defn);
|
38852
|
+
|
38853
|
+
if (repr === 'kinded') {
|
38854
|
+
for (const [kind, type] of Object.entries(defn.representation.kinded)) {
|
38855
|
+
str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${kind}`;
|
38856
|
+
}
|
38857
|
+
str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} kinded`;
|
38858
|
+
} else if (repr === 'stringprefix' || repr === 'bytesprefix') {
|
38859
|
+
if (typeof defn.representation[repr].prefixes !== 'object') {
|
38860
|
+
throw new Error(`Invalid schema, ${repr} unions require a representation prefixes map`)
|
38861
|
+
}
|
38862
|
+
for (const [key, type] of Object.entries(defn.representation[repr].prefixes)) {
|
38863
|
+
str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
|
38864
|
+
}
|
38865
|
+
str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} ${repr}`;
|
38866
|
+
} else if (repr === 'keyed') {
|
38867
|
+
if (typeof defn.representation[repr] !== 'object') {
|
38868
|
+
throw new Error(`Invalid schema, ${repr} unions require a representation keyed map`)
|
38869
|
+
}
|
38870
|
+
for (const [key, type] of Object.entries(defn.representation[repr])) {
|
38871
|
+
str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
|
38872
|
+
}
|
38873
|
+
str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} ${repr}`;
|
38874
|
+
} else if (repr === 'inline') {
|
38875
|
+
if (typeof defn.representation.inline.discriminantTable !== 'object') {
|
38876
|
+
throw new Error('Invalid schema, inline unions require a discriminantTable map')
|
38877
|
+
}
|
38878
|
+
if (typeof defn.representation.inline.discriminantKey !== 'string') {
|
38879
|
+
throw new Error('Invalid schema, inline unions require a discriminantKey string')
|
38880
|
+
}
|
38881
|
+
for (const [key, type] of Object.entries(defn.representation.inline.discriminantTable)) {
|
38882
|
+
str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
|
38883
|
+
}
|
38884
|
+
str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} inline ${highlighter.punctuation('{')}\n${indent}discriminantKey ${highlighter.string(`"${defn.representation.inline.discriminantKey}"`)}\n${highlighter.punctuation('}')}`;
|
38885
|
+
} else if (repr === 'envelope') {
|
38886
|
+
if (typeof defn.representation.envelope.discriminantTable !== 'object') {
|
38887
|
+
throw new Error('Invalid schema, envelope unions require a discriminantTable map')
|
38888
|
+
}
|
38889
|
+
if (typeof defn.representation.envelope.discriminantKey !== 'string') {
|
38890
|
+
throw new Error('Invalid schema, envelope unions require a discriminantKey string')
|
38891
|
+
}
|
38892
|
+
if (typeof defn.representation.envelope.contentKey !== 'string') {
|
38893
|
+
throw new Error('Invalid schema, envelope unions require a contentKey string')
|
38894
|
+
}
|
38895
|
+
for (const [key, type] of Object.entries(defn.representation.envelope.discriminantTable)) {
|
38896
|
+
str += `\n${indent}${highlighter.punctuation('|')} ${printTypeTerm(type, indent, highlighter)} ${highlighter.string(`"${key}"`)}`;
|
38897
|
+
}
|
38898
|
+
str += `\n${highlighter.punctuation('}')} ${highlighter.builtin('representation')} envelope ${highlighter.punctuation('{')}`;
|
38899
|
+
str += `\n${indent}discriminantKey ${highlighter.string(`"${defn.representation.envelope.discriminantKey}"`)}`;
|
38900
|
+
str += `\n${indent}contentKey ${highlighter.string(`"${defn.representation.envelope.contentKey}"`)}`;
|
38901
|
+
str += `\n${highlighter.punctuation('}')}`;
|
38902
|
+
} else {
|
38903
|
+
throw new Error(`Invalid schema, unknown union representation type ${Object.keys(defn.representation)[0]}`)
|
38904
|
+
}
|
38905
|
+
|
38906
|
+
return str
|
38907
|
+
};
|
38908
|
+
|
38909
|
+
printTypeTerm.enum = function _enum (defn, indent, highlighter) {
|
38910
|
+
if (typeof defn.representation !== 'object') {
|
38911
|
+
throw new Error('Invalid schema, enum requires a "representation" map')
|
38912
|
+
}
|
38913
|
+
const repr = reprStrategy(defn);
|
38914
|
+
if (repr !== 'string' && repr !== 'int') {
|
38915
|
+
throw new Error('Invalid schema, enum requires a "string" or "int" representation map')
|
38916
|
+
}
|
38917
|
+
|
38918
|
+
let str = highlighter.punctuation('{');
|
38919
|
+
|
38920
|
+
for (const ev of defn.members) {
|
38921
|
+
str += `\n${indent}${highlighter.punctuation('|')} ${ev}`;
|
38922
|
+
const sv = (defn.representation.string && defn.representation.string[ev]) ||
|
38923
|
+
(defn.representation.int && defn.representation.int[ev]);
|
38924
|
+
if (sv !== undefined) {
|
38925
|
+
str += ` ${highlighter.punctuation('(')}${highlighter.string(`"${sv}"`)}${highlighter.punctuation(')')}`;
|
38926
|
+
}
|
38927
|
+
}
|
38928
|
+
|
38929
|
+
str += `\n${highlighter.punctuation('}')}`;
|
38930
|
+
if (defn.representation.int) {
|
38931
|
+
str += ` ${highlighter.builtin('representation')} int`;
|
38932
|
+
}
|
38933
|
+
return str
|
38934
|
+
};
|
38935
|
+
|
38936
|
+
const DEFAULT_HASHER = sha256$2;
|
38937
|
+
const DEFAULT_HASH_BYTES = 32;
|
38938
|
+
// 5/3 seems to offer best perf characteristics in terms of raw speed
|
38939
|
+
// (Filecoin found this too for their HAMT usage)
|
38940
|
+
// but amount and size of garbage will change with different parameters
|
38941
|
+
const DEFAULT_BITWIDTH = 5;
|
38942
|
+
const DEFAULT_BUCKET_SIZE = 3;
|
38943
|
+
|
38944
|
+
const textDecoder = new TextDecoder();
|
38945
|
+
|
38946
|
+
/**
|
38947
|
+
* @template V
|
38948
|
+
* @typedef {import('iamap').IAMap<V>} IAMap<V>
|
38949
|
+
*/
|
38950
|
+
/**
|
38951
|
+
* @template V
|
38952
|
+
* @typedef {import('iamap').Store<V>} Store<V>
|
38953
|
+
*/
|
38954
|
+
/**
|
38955
|
+
* @typedef {import('multiformats/hashes/interface').MultihashHasher} MultihashHasher
|
38956
|
+
*/
|
38957
|
+
/**
|
38958
|
+
* @template V
|
38959
|
+
* @typedef {import('./interface').HashMap<V>} HashMap<V>
|
38960
|
+
*/
|
38961
|
+
/**
|
38962
|
+
* @template {number} Codec
|
38963
|
+
* @template V
|
38964
|
+
* @typedef {import('./interface').CreateOptions<Codec,V>} CreateOptions<Codec,V>
|
38965
|
+
*/
|
38966
|
+
/**
|
38967
|
+
* @typedef {import('./interface').Loader} Loader<V>
|
38968
|
+
*/
|
38969
|
+
|
38970
|
+
/**
|
38971
|
+
* @classdesc
|
38972
|
+
* An IPLD HashMap object. Create a new HashMap or load an existing one with the asynchronous
|
38973
|
+
* {@link HashMap.create} factory method.
|
38974
|
+
*
|
38975
|
+
* This class serves mostly as a IPLD usability wrapper for
|
38976
|
+
* [IAMap](https://github.com/rvagg/iamap) which implements the majority of the logic behind the
|
38977
|
+
* IPLD HashMap specification, without being IPLD-specific. IAMap is immutable, in that each
|
38978
|
+
* mutation (delete or set) returns a new IAMap instance. `HashMap`, however, is immutable, and
|
38979
|
+
* mutation operations may be performed on the same object but its `cid` property will change
|
38980
|
+
* with mutations.
|
38981
|
+
*
|
38982
|
+
* If consumed with TypeScript typings, `HashMap` is generic over value template type `V`, where various
|
38983
|
+
* operations will accept or return template type `V`.
|
38984
|
+
*
|
38985
|
+
* @name HashMap
|
38986
|
+
* @template V
|
38987
|
+
* @implements {HashMap<V>}
|
38988
|
+
* @class
|
38989
|
+
* @hideconstructor
|
38990
|
+
* @property {CID} cid - The _current_ CID of this HashMap. It is important to note that this CID
|
38991
|
+
* will change when successfully performing mutation operations `set()` or
|
38992
|
+
* `delete()`. Where a `set()` does not change an existing value (because
|
38993
|
+
* a key already exists with that value) or `delete()` does not delete an existing
|
38994
|
+
* key/value pair (because it doesn't already exist in this HashMap), the `cid` will not change.
|
38995
|
+
*/
|
38996
|
+
class HashMapImpl {
|
38997
|
+
/**
|
38998
|
+
* @ignore
|
38999
|
+
* @param {IAMap<V>} iamap
|
39000
|
+
*/
|
39001
|
+
constructor (iamap) {
|
39002
|
+
// IAMap is immutable, so mutation operations return a new instance so
|
39003
|
+
// we use `this._iamap` as the _current_ instance and wrap around that,
|
39004
|
+
// switching it out as we mutate
|
39005
|
+
this._iamap = iamap;
|
39006
|
+
}
|
39007
|
+
|
39008
|
+
/**
|
39009
|
+
* @name HashMap#get
|
39010
|
+
* @description
|
39011
|
+
* Fetches the value of the provided `key` stored in this HashMap, if it exists.
|
39012
|
+
* @function
|
39013
|
+
* @async
|
39014
|
+
* @memberof HashMap
|
39015
|
+
* @param {string|Uint8Array} key - The key of the key/value pair entry to look up in this HashMap.
|
39016
|
+
* @return {Promise<V|undefined>}
|
39017
|
+
* The value (of template type `V`) stored for the given `key` which may be any type serializable
|
39018
|
+
* by IPLD, or a CID to an existing IPLD object. This should match what was provided by
|
39019
|
+
* {@link HashMap#set} as the `value` for this `key`. If the `key` is not stored in this HashMap,
|
39020
|
+
* `undefined` will be returned.
|
39021
|
+
*/
|
39022
|
+
async get (key) {
|
39023
|
+
return this._iamap.get(key)
|
39024
|
+
}
|
39025
|
+
|
39026
|
+
/**
|
39027
|
+
* @name HashMap#has
|
39028
|
+
* @description
|
39029
|
+
* Check whether the provided `key` exists in this HashMap. The equivalent of performing
|
39030
|
+
* `map.get(key) !== undefined`.
|
39031
|
+
* @function
|
39032
|
+
* @async
|
39033
|
+
* @memberof HashMap
|
39034
|
+
* @param {string|Uint8Array} key - The key of the key/value pair entry to look up in this HashMap.
|
39035
|
+
* @return {Promise<boolean>}
|
39036
|
+
* `true` if the `key` exists in this HashMap, `false` otherwise.
|
39037
|
+
*/
|
39038
|
+
async has (key) {
|
39039
|
+
return this._iamap.has(key)
|
39040
|
+
}
|
39041
|
+
|
39042
|
+
/**
|
39043
|
+
* @name HashMap#size
|
39044
|
+
* @description
|
39045
|
+
* Count the number of key/value pairs stored in this HashMap.
|
39046
|
+
* @function
|
39047
|
+
* @async
|
39048
|
+
* @memberof HashMap
|
39049
|
+
* @return {Promise<number>}
|
39050
|
+
* An integer greater than or equal to zero indicating the number of key/value pairse stored
|
39051
|
+
* in this HashMap.
|
39052
|
+
*/
|
39053
|
+
async size () {
|
39054
|
+
return this._iamap.size()
|
39055
|
+
}
|
39056
|
+
|
39057
|
+
/**
|
39058
|
+
* @name HashMap#set
|
39059
|
+
* @description
|
39060
|
+
* Add a key/value pair to this HashMap. The value may be any object that can be serialized by
|
39061
|
+
* IPLD, or a CID to a more complex (or larger) object. {@link HashMap#get} operations on the
|
39062
|
+
* same `key` will retreve the `value` as it was set as long as serialization and deserialization
|
39063
|
+
* results in the same object.
|
39064
|
+
*
|
39065
|
+
* If the `key` already exists in this HashMap, the existing entry will have the `value` replaced
|
39066
|
+
* with the new one provided. If the `value` is the same, the HashMap will remain unchanged.
|
39067
|
+
*
|
39068
|
+
* As a mutation operation, performing a successful `set()` where a new key/value pair or new
|
39069
|
+
* `value` for a given `key` is set, a new root node will be generated so `map.cid` will be a
|
39070
|
+
* different CID. This CID should be used to refer to this collection in the backing store where
|
39071
|
+
* persistence is required.
|
39072
|
+
* @function
|
39073
|
+
* @async
|
39074
|
+
* @memberof HashMap
|
39075
|
+
* @param {string|Uint8Array} key - The key of the new key/value pair entry to store in this HashMap.
|
39076
|
+
* @param {V} value - The value (of template type `V`) to store, either an object that can be
|
39077
|
+
* serialized inline via IPLD or a CID pointing to another object.
|
39078
|
+
* @returns {Promise<void>}
|
39079
|
+
*/
|
39080
|
+
async set (key, value) {
|
39081
|
+
this._iamap = await this._iamap.set(key, value);
|
39082
|
+
}
|
39083
|
+
|
39084
|
+
/**
|
39085
|
+
* @name HashMap#delete
|
39086
|
+
* @description
|
39087
|
+
* Remove a key/value pair to this HashMap.
|
39088
|
+
*
|
39089
|
+
* If the `key` exists in this HashMap, its entry will be entirely removed. If the `key` does not
|
39090
|
+
* exist in this HashMap, no changes will occur.
|
39091
|
+
*
|
39092
|
+
* As a mutation operation, performing a successful `delete()` where an existing key/value pair
|
39093
|
+
* is removed from the collection, a new root node will be generated so `map.cid` will be a
|
39094
|
+
* different CID. This CID should be used to refer to this collection in the backing store where
|
39095
|
+
* persistence is required.
|
39096
|
+
* @function
|
39097
|
+
* @async
|
39098
|
+
* @memberof HashMap
|
39099
|
+
* @param {string|Uint8Array} key - The key of the key/value pair entry to remove from this HashMap.
|
39100
|
+
* @returns {Promise<void>}
|
39101
|
+
*/
|
39102
|
+
async delete (key) {
|
39103
|
+
this._iamap = await this._iamap.delete(key);
|
39104
|
+
}
|
39105
|
+
|
39106
|
+
/**
|
39107
|
+
* @name HashMap#values
|
39108
|
+
* @description
|
39109
|
+
* Asynchronously emit all values that exist within this HashMap collection.
|
39110
|
+
*
|
39111
|
+
* This will cause a full traversal of all nodes that make up this collection so may result in
|
39112
|
+
* many block loads from the backing store if the collection is large.
|
39113
|
+
* @function
|
39114
|
+
* @async
|
39115
|
+
* @returns {AsyncIterable<V>}
|
39116
|
+
* An async iterator that yields values (of template type `V`) of the type stored in this
|
39117
|
+
* collection, either inlined objects or CIDs.
|
39118
|
+
*/
|
39119
|
+
async * values () {
|
39120
|
+
yield * this._iamap.values();
|
39121
|
+
}
|
39122
|
+
|
39123
|
+
/**
|
39124
|
+
* @name HashMap#keys
|
39125
|
+
* @description
|
39126
|
+
* Asynchronously emit all keys that exist within this HashMap collection **as strings** rather
|
39127
|
+
* than the stored bytes.
|
39128
|
+
*
|
39129
|
+
* This will cause a full traversal of all nodes that make up this
|
39130
|
+
* collection so may result in many block loads from the backing store if the collection is large.
|
39131
|
+
* @function
|
39132
|
+
* @async
|
39133
|
+
* @returns {AsyncIterable<string>}
|
39134
|
+
* An async iterator that yields string keys stored in this collection.
|
39135
|
+
*/
|
39136
|
+
async * keys () {
|
39137
|
+
for await (const key of this._iamap.keys()) {
|
39138
|
+
// IAMap keys are Uint8Arrays, make them strings
|
39139
|
+
yield textDecoder.decode(key);
|
39140
|
+
}
|
39141
|
+
}
|
39142
|
+
|
39143
|
+
/**
|
39144
|
+
* @name HashMap#keysRaw
|
39145
|
+
* @description
|
39146
|
+
* Asynchronously emit all keys that exist within this HashMap collection **as their raw bytes**
|
39147
|
+
* rather than being converted to a string.
|
39148
|
+
*
|
39149
|
+
* This will cause a full traversal of all nodes that make up this collection so may result in
|
39150
|
+
* many block loads from the backing store if the collection is large.
|
39151
|
+
* @function
|
39152
|
+
* @async
|
39153
|
+
* @returns {AsyncIterable<Uint8Array>}
|
39154
|
+
* An async iterator that yields string keys stored in this collection.
|
39155
|
+
*/
|
39156
|
+
async * keysRaw () {
|
39157
|
+
yield * this._iamap.keys();
|
39158
|
+
}
|
39159
|
+
|
39160
|
+
/**
|
39161
|
+
* @name HashMap#entries
|
39162
|
+
* @description
|
39163
|
+
* Asynchronously emit all key/value pairs that exist within this HashMap collection. Keys will be
|
39164
|
+
* given **as strings** rather than their raw byte form as stored.
|
39165
|
+
*
|
39166
|
+
* This will cause a full traversal of all nodes that make up this collection so may result in
|
39167
|
+
* many block loads from the backing store if the collection is large.
|
39168
|
+
*
|
39169
|
+
* Entries are returned in tuple form like
|
39170
|
+
* [Map#entries()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/entries),
|
39171
|
+
* an array of key/value pairs where element `0` is the key and `1` is the value.
|
39172
|
+
* @function
|
39173
|
+
* @async
|
39174
|
+
* @returns {AsyncIterable<[string, V]>}
|
39175
|
+
* An async iterator that yields key/value pair tuples.
|
39176
|
+
*/
|
39177
|
+
async * entries () {
|
39178
|
+
for await (const { key, value } of this._iamap.entries()) {
|
39179
|
+
// IAMap keys are Uint8Arrays, make them strings
|
39180
|
+
yield [textDecoder.decode(key), value];
|
39181
|
+
}
|
39182
|
+
}
|
39183
|
+
|
39184
|
+
/**
|
39185
|
+
* @name HashMap#entriesRaw
|
39186
|
+
* @description
|
39187
|
+
* Asynchronously emit all key/value pairs that exist within this HashMap collection. Keys will be
|
39188
|
+
* given **as raw bytes** as stored rather than being converted to strings.
|
39189
|
+
*
|
39190
|
+
* This will cause a full traversal of all nodes that make up this collection so may result in
|
39191
|
+
* many block loads from the backing store if the collection is large.
|
39192
|
+
*
|
39193
|
+
* Entries are returned in tuple form like
|
39194
|
+
* [Map#entries()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/entries),
|
39195
|
+
* an array of key/value pairs where element `0` is the key and `1` is the value.
|
39196
|
+
* @function
|
39197
|
+
* @async
|
39198
|
+
* @returns {AsyncIterable<[Uint8Array, V]>}
|
39199
|
+
* An async iterator that yields key/value pair tuples.
|
39200
|
+
*/
|
39201
|
+
async * entriesRaw () {
|
39202
|
+
for await (const { key, value } of this._iamap.entries()) {
|
39203
|
+
yield [key, value];
|
39204
|
+
}
|
39205
|
+
}
|
39206
|
+
|
39207
|
+
/**
|
39208
|
+
* @name HashMap#cids
|
39209
|
+
* @description
|
39210
|
+
* Asynchronously emit all CIDs for blocks that make up this HashMap.
|
39211
|
+
*
|
39212
|
+
* This will cause a full traversal of all nodes that make up this collection so may result in
|
39213
|
+
* many block loads from the backing store if the collection is large.
|
39214
|
+
* @function
|
39215
|
+
* @async
|
39216
|
+
* @returns {AsyncIterable<CID>}
|
39217
|
+
* An async iterator that yields CIDs for the blocks that comprise this HashMap.
|
39218
|
+
*/
|
39219
|
+
async * cids () {
|
39220
|
+
yield * this._iamap.ids();
|
39221
|
+
}
|
39222
|
+
|
39223
|
+
get cid () {
|
39224
|
+
return this._iamap.id
|
39225
|
+
}
|
39226
|
+
|
39227
|
+
/**
|
39228
|
+
* Create a new {@link HashMap} instance, beginning empty, or loading from existing data in a
|
39229
|
+
* backing store.
|
39230
|
+
*
|
39231
|
+
* A backing store must be provided to make use of a HashMap, an interface to the store is given
|
39232
|
+
* through the mandatory `loader` parameter. The backing store stores IPLD blocks, referenced by
|
39233
|
+
* CIDs. `loader` must have two functions: `get(cid)` which should return the raw bytes (`Buffer`
|
39234
|
+
* or `Uint8Array`) of a block matching the given CID, and `put(cid, block)` that will store the
|
39235
|
+
* provided raw bytes of a block (`block`) and store it with the associated CID.
|
39236
|
+
*
|
39237
|
+
* @async
|
39238
|
+
* @template V
|
39239
|
+
* @template {number} Codec
|
39240
|
+
* @param {Loader} loader - A loader with `get(cid):block` and `put(cid, block)` functions for
|
39241
|
+
* loading an storing block data by CID.
|
39242
|
+
* @param {CreateOptions<Codec, V>} options - Options for the HashMap. Defaults are provided but you can tweak
|
39243
|
+
* behavior according to your needs with these options.
|
39244
|
+
* @return {Promise<HashMap<V>>} - A HashMap instance, either loaded from an existing root block CID, or a new,
|
39245
|
+
* empty HashMap if no CID is provided.
|
39246
|
+
*/
|
39247
|
+
static async create (loader, options) {
|
39248
|
+
return _load(loader, null, options)
|
39249
|
+
}
|
39250
|
+
|
39251
|
+
/**
|
39252
|
+
* @template V
|
39253
|
+
* @template {number} Codec
|
39254
|
+
* @param {Loader} loader
|
39255
|
+
* @param {CID} root - A root of an existing HashMap. Provide a CID if you want to load existing
|
39256
|
+
* data.
|
39257
|
+
* @param {CreateOptions<Codec, V>} options
|
39258
|
+
* @returns {Promise<HashMap<V>>}
|
39259
|
+
*/
|
39260
|
+
static async load (loader, root, options) {
|
39261
|
+
return _load(loader, root, options)
|
39262
|
+
}
|
39263
|
+
}
|
39264
|
+
|
39265
|
+
/**
|
39266
|
+
* @ignore
|
39267
|
+
* @template V
|
39268
|
+
* @template {number} Codec
|
39269
|
+
* @param {Loader} loader
|
39270
|
+
* @param {CID|null} root
|
39271
|
+
* @param {CreateOptions<Codec, V>} options
|
39272
|
+
* @returns {Promise<HashMap<V>>}
|
39273
|
+
*/
|
39274
|
+
async function _load (loader, root, options) {
|
39275
|
+
const cid = CID$1.asCID(root);
|
39276
|
+
|
39277
|
+
if (!loader || typeof loader.get !== 'function' || typeof loader.put !== 'function') {
|
39278
|
+
throw new TypeError('\'loader\' object with get() and put() methods is required')
|
39279
|
+
}
|
39280
|
+
|
39281
|
+
if (typeof options !== 'object') {
|
39282
|
+
throw new TypeError('An \'options\' argument is required')
|
39283
|
+
}
|
39284
|
+
|
39285
|
+
if (!('blockCodec' in options) ||
|
39286
|
+
typeof options.blockCodec !== 'object' ||
|
39287
|
+
typeof options.blockCodec.code !== 'number' ||
|
39288
|
+
typeof options.blockCodec.encode !== 'function' ||
|
39289
|
+
typeof options.blockCodec.decode !== 'function') {
|
39290
|
+
throw new TypeError('A valid \'blockCodec\' option is required')
|
39291
|
+
}
|
39292
|
+
const codec = options.blockCodec;
|
39293
|
+
if (!('blockHasher' in options) ||
|
39294
|
+
typeof options.blockHasher !== 'object' ||
|
39295
|
+
typeof options.blockHasher.digest !== 'function' ||
|
39296
|
+
typeof options.blockHasher.code !== 'number') {
|
39297
|
+
throw new TypeError('A valid \'blockHasher\' option is required')
|
39298
|
+
}
|
39299
|
+
const hasher = options.blockHasher;
|
39300
|
+
|
39301
|
+
/**
|
39302
|
+
* @ignore
|
39303
|
+
* @type {MultihashHasher}
|
39304
|
+
*/
|
39305
|
+
const hamtHasher = (() => {
|
39306
|
+
if ('hasher' in options) {
|
39307
|
+
if (typeof options.hasher !== 'object' ||
|
39308
|
+
typeof options.hasher.digest !== 'function' ||
|
39309
|
+
typeof options.hasher.code !== 'number') {
|
39310
|
+
throw new TypeError('\'hasher\' option must be a Multihasher')
|
39311
|
+
}
|
39312
|
+
return options.hasher
|
39313
|
+
}
|
39314
|
+
return DEFAULT_HASHER
|
39315
|
+
})();
|
39316
|
+
const hashBytes = (() => {
|
39317
|
+
if ('hashBytes' in options) {
|
39318
|
+
if (typeof options.hashBytes !== 'number') {
|
39319
|
+
throw new TypeError('\'hashBytes\' option must be a number')
|
39320
|
+
}
|
39321
|
+
/* c8 ignore next 2 */
|
39322
|
+
return options.hashBytes
|
39323
|
+
}
|
39324
|
+
return DEFAULT_HASH_BYTES
|
39325
|
+
})();
|
39326
|
+
/**
|
39327
|
+
* @ignore
|
39328
|
+
* @param {Uint8Array} bytes
|
39329
|
+
*/
|
39330
|
+
const hashFn = async (bytes) => {
|
39331
|
+
const hash = await sha256$2.digest(bytes);
|
39332
|
+
return hash.digest
|
39333
|
+
};
|
39334
|
+
registerHasher_1(hamtHasher.code, hashBytes, hashFn);
|
39335
|
+
|
39336
|
+
const bitWidth = (() => {
|
39337
|
+
if ('bitWidth' in options) {
|
39338
|
+
if (typeof options.bitWidth !== 'number') {
|
39339
|
+
throw new TypeError('\'bitWidth\' option must be a number')
|
39340
|
+
}
|
39341
|
+
return options.bitWidth
|
39342
|
+
}
|
39343
|
+
return DEFAULT_BITWIDTH
|
39344
|
+
})();
|
39345
|
+
|
39346
|
+
const bucketSize = (() => {
|
39347
|
+
if ('bucketSize' in options) {
|
39348
|
+
if (typeof options.bucketSize !== 'number') {
|
39349
|
+
throw new TypeError('\'bucketSize\' option must be a number')
|
39350
|
+
}
|
39351
|
+
return options.bucketSize
|
39352
|
+
}
|
39353
|
+
return DEFAULT_BUCKET_SIZE
|
39354
|
+
})();
|
39355
|
+
|
39356
|
+
const iamapOptions = { hashAlg: hamtHasher.code, bitWidth, bucketSize };
|
39357
|
+
|
39358
|
+
const store = {
|
39359
|
+
/**
|
39360
|
+
* @ignore
|
39361
|
+
* @param {CID} cid
|
39362
|
+
* @returns {Promise<V>}
|
39363
|
+
*/
|
39364
|
+
async load (cid) {
|
39365
|
+
const bytes = await loader.get(cid);
|
39366
|
+
if (!bytes) {
|
39367
|
+
throw new Error(`Could not load block for: ${cid}`)
|
39368
|
+
}
|
39369
|
+
// create() validates the block for us
|
39370
|
+
const block = await create$7({ bytes, cid, hasher, codec });
|
39371
|
+
validateBlock(block.value);
|
39372
|
+
return block.value
|
39373
|
+
},
|
39374
|
+
|
39375
|
+
/**
|
39376
|
+
* @ignore
|
39377
|
+
* @param {V} value
|
39378
|
+
* @returns {Promise<CID>}
|
39379
|
+
*/
|
39380
|
+
async save (value) {
|
39381
|
+
validateBlock(value);
|
39382
|
+
const block = await encode$7({ value, codec, hasher });
|
39383
|
+
await loader.put(block.cid, block.bytes);
|
39384
|
+
return block.cid
|
39385
|
+
},
|
39386
|
+
|
39387
|
+
/**
|
39388
|
+
* @ignore
|
39389
|
+
* @param {CID} cid1
|
39390
|
+
* @param {CID} cid2
|
39391
|
+
* @returns {boolean}
|
39392
|
+
*/
|
39393
|
+
isEqual (cid1, cid2) {
|
39394
|
+
return cid1.equals(cid2)
|
39395
|
+
},
|
39396
|
+
|
39397
|
+
/**
|
39398
|
+
* @ignore
|
39399
|
+
* @param {any} obj
|
39400
|
+
* @returns {boolean}
|
39401
|
+
*/
|
39402
|
+
isLink (obj) {
|
39403
|
+
return CID$1.asCID(obj) != null
|
39404
|
+
}
|
39405
|
+
};
|
39406
|
+
|
39407
|
+
let iamap;
|
39408
|
+
if (cid) {
|
39409
|
+
// load existing, ignoring bitWidth & bucketSize, they are loaded from the existing root
|
39410
|
+
iamap = await load_1(store, cid);
|
39411
|
+
} else {
|
39412
|
+
// create new
|
39413
|
+
iamap = await create_1(store, iamapOptions);
|
39414
|
+
}
|
39415
|
+
|
39416
|
+
return new HashMapImpl(iamap)
|
39417
|
+
}
|
39418
|
+
|
39419
|
+
/**
|
39420
|
+
* @ignore
|
39421
|
+
* @param {any} block
|
39422
|
+
*/
|
39423
|
+
function validateBlock (block) {
|
39424
|
+
if (!HashMapNode(block) && !HashMapRoot(block)) {
|
39425
|
+
const description = print(describe(block).schema);
|
39426
|
+
throw new Error(`Internal error: unexpected layout for HashMap block does not match schema, got:\n${description}`)
|
39427
|
+
}
|
39428
|
+
}
|
39429
|
+
|
39430
|
+
const create$1 = HashMapImpl.create;
|
39431
|
+
const load$1 = HashMapImpl.load;
|
39432
|
+
|
39433
|
+
// @ts-nocheck
|
39434
|
+
// from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
|
39435
|
+
// MIT License Copyright (c) 2020 Dumitru Uzun
|
39436
|
+
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
39437
|
+
// of this software and associated documentation files (the "Software"), to deal
|
39438
|
+
// in the Software without restriction, including without limitation the rights
|
39439
|
+
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
39440
|
+
// copies of the Software, and to permit persons to whom the Software is
|
39441
|
+
// furnished to do so, subject to the following conditions:
|
39442
|
+
|
39443
|
+
// The above copyright notice and this permission notice shall be included in all
|
39444
|
+
// copies or substantial portions of the Software.
|
39445
|
+
|
39446
|
+
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
39447
|
+
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
39448
|
+
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
39449
|
+
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
39450
|
+
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
39451
|
+
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
39452
|
+
// SOFTWARE.
|
39453
|
+
|
39454
|
+
// import {
|
39455
|
+
// isLittleEndian, switchEndianness32
|
39456
|
+
// } from 'string-encode'
|
39457
|
+
|
39458
|
+
/**
|
39459
|
+
* SHA1 on binary array
|
39460
|
+
*
|
39461
|
+
* @param {Uint8Array} b Data to hash
|
39462
|
+
*
|
39463
|
+
* @return {Uint8Array} sha1 hash
|
39464
|
+
*/
|
39465
|
+
function rawSha1 (b) {
|
39466
|
+
let i = b.byteLength;
|
39467
|
+
let bs = 0;
|
39468
|
+
let A; let B; let C; let D; let G;
|
39469
|
+
const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
|
39470
|
+
const W = new Uint32Array(80);
|
39471
|
+
const nrWords = (i / 4 + 2) | 15;
|
39472
|
+
const words = new Uint32Array(nrWords + 1);
|
39473
|
+
let j;
|
39474
|
+
|
39475
|
+
words[nrWords] = i * 8;
|
39476
|
+
words[i >> 2] |= 0x80 << (~i << 3);
|
39477
|
+
for (;i--;) {
|
39478
|
+
words[i >> 2] |= b[i] << (~i << 3);
|
39479
|
+
}
|
39480
|
+
|
39481
|
+
for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
|
39482
|
+
for (i = 0; i < 80;
|
39483
|
+
A[0] = (
|
39484
|
+
G = ((b = A[0]) << 5 | b >>> 27) +
|
39485
|
+
A[4] +
|
39486
|
+
(W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
|
39487
|
+
0x5A827999,
|
39488
|
+
B = A[1],
|
39489
|
+
C = A[2],
|
39490
|
+
D = A[3],
|
39491
|
+
G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
|
39492
|
+
? j !== 2
|
39493
|
+
? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
|
39494
|
+
: (B & C | B & D | C & D) + 0x34994343
|
39495
|
+
: B & C | ~B & D
|
39496
|
+
)
|
39497
|
+
)
|
39498
|
+
, A[1] = b
|
39499
|
+
, A[2] = B << 30 | B >>> 2
|
39500
|
+
, A[3] = C
|
39501
|
+
, A[4] = D
|
39502
|
+
, ++i
|
39503
|
+
) {
|
39504
|
+
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
|
39505
|
+
}
|
39506
|
+
|
39507
|
+
for (i = 5; i;) H[--i] = H[i] + A[i];
|
39508
|
+
}
|
39509
|
+
|
39510
|
+
// if (isLittleEndian()) {
|
39511
|
+
// H = H.map(switchEndianness32)
|
39512
|
+
// }
|
39513
|
+
|
39514
|
+
return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
|
39515
|
+
}
|
39516
|
+
|
39517
|
+
const chunker = bf(30);
|
39518
|
+
|
39519
|
+
const blockOpts$1 = { cache: nocache, chunker, codec: codec$1, hasher: sha256$2, compare: simpleCompare };
|
39520
|
+
|
39521
|
+
const NO_ENCRYPT = typeof process !== 'undefined' && !!process.env?.NO_ENCRYPT;
|
39522
|
+
// ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
|
39523
|
+
|
39524
|
+
class Valet {
|
39525
|
+
idb = null
|
39526
|
+
name = null
|
39527
|
+
uploadQueue = null
|
39528
|
+
alreadyEnqueued = new Set()
|
39529
|
+
keyMaterial = null
|
39530
|
+
keyId = 'null'
|
39531
|
+
valetRoot = null
|
39532
|
+
valetRootCid = null // set by hydrate
|
39533
|
+
valetRootCarCid = null // most recent diff
|
39534
|
+
|
39535
|
+
valetCidBlocks = new VMemoryBlockstore()
|
39536
|
+
instanceId = Math.random().toString(36).slice(2)
|
39537
|
+
|
39538
|
+
/**
|
39539
|
+
* Function installed by the database to upload car files
|
39540
|
+
* @type {null|function(string, Uint8Array):Promise<void>}
|
39541
|
+
*/
|
39542
|
+
uploadFunction = null
|
39543
|
+
|
39544
|
+
constructor (name = 'default', keyMaterial) {
|
39545
|
+
this.name = name;
|
39546
|
+
this.setKeyMaterial(keyMaterial);
|
39547
|
+
this.uploadQueue = cargoQueue(async (tasks, callback) => {
|
39548
|
+
// console.log(
|
39549
|
+
// 'queue worker',
|
39550
|
+
// tasks.length,
|
39551
|
+
// tasks.reduce((acc, t) => acc + t.value.length, 0)
|
39552
|
+
// )
|
39553
|
+
if (this.uploadFunction) {
|
39554
|
+
// todo we can coalesce these into a single car file
|
39555
|
+
return await this.withDB(async db => {
|
39556
|
+
for (const task of tasks) {
|
39557
|
+
await this.uploadFunction(task.carCid, task.value);
|
39558
|
+
// update the indexedb to mark this car as no longer pending
|
39559
|
+
const carMeta = await db.get('cidToCar', task.carCid);
|
39560
|
+
delete carMeta.pending;
|
39561
|
+
await db.put('cidToCar', carMeta);
|
39562
|
+
}
|
39563
|
+
})
|
39564
|
+
}
|
39565
|
+
callback();
|
39566
|
+
});
|
39567
|
+
|
39568
|
+
this.uploadQueue.drain(async () => {
|
39569
|
+
return await this.withDB(async db => {
|
39570
|
+
const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car);
|
39571
|
+
for (const carKey of carKeys) {
|
39572
|
+
await this.uploadFunction(carKey, await db.get('cars', carKey));
|
39573
|
+
const carMeta = await db.get('cidToCar', carKey);
|
39574
|
+
delete carMeta.pending;
|
39575
|
+
await db.put('cidToCar', carMeta);
|
39576
|
+
}
|
39577
|
+
})
|
39578
|
+
});
|
39579
|
+
}
|
39580
|
+
|
39581
|
+
getKeyMaterial () {
|
39582
|
+
return this.keyMaterial
|
39583
|
+
}
|
39584
|
+
|
39585
|
+
setKeyMaterial (km) {
|
39586
|
+
if (km && !NO_ENCRYPT) {
|
39587
|
+
const hex = Uint8Array.from(Buffer$G.from(km, 'hex'));
|
39588
|
+
this.keyMaterial = km;
|
39589
|
+
const hash = rawSha1(hex);
|
39590
|
+
this.keyId = Buffer$G.from(hash).toString('hex');
|
39591
|
+
} else {
|
39592
|
+
this.keyMaterial = null;
|
39593
|
+
this.keyId = 'null';
|
39594
|
+
}
|
39595
|
+
// console.trace('keyId', this.name, this.keyId)
|
39596
|
+
}
|
39597
|
+
|
39598
|
+
/**
|
39599
|
+
* Group the blocks into a car and write it to the valet.
|
39600
|
+
* @param {import('./blockstore.js').InnerBlockstore} innerBlockstore
|
39601
|
+
* @param {Set<string>} cids
|
39602
|
+
* @returns {Promise<void>}
|
39603
|
+
* @memberof Valet
|
39604
|
+
*/
|
39605
|
+
async writeTransaction (innerBlockstore, cids) {
|
39606
|
+
if (innerBlockstore.lastCid) {
|
39607
|
+
if (this.keyMaterial) {
|
39608
|
+
// console.log('encrypting car', innerBlockstore.label)
|
39609
|
+
// should we pass cids in instead of iterating frin innerBlockstore?
|
39610
|
+
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.keyMaterial);
|
39611
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
39612
|
+
} else {
|
39613
|
+
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore);
|
39614
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids);
|
39615
|
+
}
|
39616
|
+
} else {
|
39617
|
+
throw new Error('missing lastCid for car header')
|
39618
|
+
}
|
39619
|
+
}
|
39620
|
+
|
39621
|
+
withDB = async dbWorkFun => {
|
39622
|
+
if (!this.idb) {
|
39623
|
+
this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
|
39624
|
+
upgrade (db, oldVersion, newVersion, transaction) {
|
39625
|
+
if (oldVersion < 1) {
|
39626
|
+
db.createObjectStore('cars'); // todo use database name
|
39627
|
+
const cidToCar = db.createObjectStore('cidToCar', { keyPath: 'car' });
|
39628
|
+
cidToCar.createIndex('cids', 'cids', { multiEntry: true });
|
39629
|
+
}
|
39630
|
+
if (oldVersion < 2) {
|
39631
|
+
const cidToCar = transaction.objectStore('cidToCar');
|
39632
|
+
cidToCar.createIndex('pending', 'pending');
|
39633
|
+
}
|
39634
|
+
}
|
39635
|
+
});
|
39636
|
+
}
|
39637
|
+
return await dbWorkFun(this.idb)
|
39638
|
+
}
|
39639
|
+
|
39640
|
+
/**
|
39641
|
+
* Iterate over all blocks in the store.
|
39642
|
+
*
|
39643
|
+
* @yields {{cid: string, value: Uint8Array}}
|
39644
|
+
* @returns {AsyncGenerator<any, any, any>}
|
39645
|
+
*/
|
39646
|
+
async * cids () {
|
39647
|
+
// console.log('valet cids')
|
39648
|
+
const db = await this.withDB(async db => db);
|
39649
|
+
const tx = db.transaction(['cidToCar'], 'readonly');
|
39650
|
+
let cursor = await tx.store.openCursor();
|
39651
|
+
while (cursor) {
|
39652
|
+
yield { cid: cursor.key, car: cursor.value.car };
|
39653
|
+
cursor = await cursor.continue();
|
39654
|
+
}
|
39655
|
+
}
|
39656
|
+
|
39657
|
+
setRootCarCid (cid) {
|
39658
|
+
this.valetRootCarCid = cid;
|
39659
|
+
this.valetRoot = null;
|
39660
|
+
this.valetRootCid = null;
|
39661
|
+
}
|
39662
|
+
|
39663
|
+
async getCarCIDForCID (cid) {
|
39664
|
+
// make a car reader for this.valetRootCarCid
|
39665
|
+
if (!this.valetRootCarCid) return
|
39666
|
+
|
39667
|
+
let indexNode;
|
39668
|
+
if (this.valetRoot) {
|
39669
|
+
indexNode = this.valetRoot;
|
39670
|
+
} else {
|
39671
|
+
const combinedReader = await this.getCombinedReader(this.valetRootCarCid);
|
39672
|
+
if (!this.valetRootCid) {
|
39673
|
+
const root = combinedReader.root.cid;
|
39674
|
+
// console.log('roots', this.instanceId, this.name, root, this.valetRootCarCid, this.valetRootCid)
|
39675
|
+
this.valetRootCid = root;
|
39676
|
+
}
|
39677
|
+
indexNode = await load$1(combinedReader, this.valetRootCid, {
|
39678
|
+
blockHasher: blockOpts$1.hasher,
|
39679
|
+
blockCodec: blockOpts$1.codec
|
39680
|
+
});
|
39681
|
+
}
|
39682
|
+
|
39683
|
+
const got = await indexNode.get(cid);
|
39684
|
+
// console.log('getCarCIDForCID', cid, got)
|
39685
|
+
return { result: got }
|
39686
|
+
}
|
39687
|
+
|
39688
|
+
async OLDgetCarCIDForCID (cid) {
|
39689
|
+
const carCid = await this.withDB(async db => {
|
39690
|
+
const tx = db.transaction(['cars', 'cidToCar'], 'readonly');
|
39691
|
+
const indexResp = await tx.objectStore('cidToCar').index('cids').get(cid);
|
39692
|
+
return indexResp?.car
|
39693
|
+
});
|
39694
|
+
return { result: carCid }
|
39695
|
+
}
|
39696
|
+
|
39697
|
+
async getCombinedReader (carCid) {
|
39698
|
+
let carMapReader;
|
39699
|
+
if (this.valetRootCarCid) {
|
39700
|
+
// todo only need this if we are cold starting
|
39701
|
+
carMapReader = await this.getCarReader(this.valetRootCarCid);
|
39702
|
+
}
|
39703
|
+
|
39704
|
+
const theseValetCidBlocks = this.valetCidBlocks;
|
39705
|
+
// console.log('theseValetCidBlocks', theseValetCidBlocks)
|
39706
|
+
const combinedReader = {
|
39707
|
+
root: carMapReader?.root,
|
39708
|
+
put: async (cid, bytes) => {
|
39709
|
+
// console.log('mapPut', cid, bytes.length)
|
39710
|
+
return await theseValetCidBlocks.put(cid, bytes)
|
39711
|
+
},
|
39712
|
+
get: async cid => {
|
39713
|
+
// console.log('mapGet', cid)
|
39714
|
+
try {
|
39715
|
+
const got = await theseValetCidBlocks.get(cid);
|
39716
|
+
return got.bytes
|
39717
|
+
} catch (e) {
|
39718
|
+
// console.log('get from car', cid, carMapReader)
|
39719
|
+
if (!carMapReader) throw e
|
39720
|
+
const bytes = await carMapReader.get(cid);
|
39721
|
+
await theseValetCidBlocks.put(cid, bytes);
|
39722
|
+
// console.log('mapGet', cid, bytes.length, bytes.constructor.name)
|
39723
|
+
return bytes
|
39724
|
+
}
|
39725
|
+
}
|
39726
|
+
};
|
39727
|
+
return combinedReader
|
39728
|
+
}
|
39729
|
+
|
39730
|
+
/**
|
39731
|
+
*
|
39732
|
+
* @param {string} carCid
|
39733
|
+
* @param {*} value
|
39734
|
+
*/
|
39735
|
+
async parkCar (carCid, value, cids) {
|
39736
|
+
// console.log('parkCar', this.instanceId, this.name, carCid, cids)
|
39737
|
+
const combinedReader = await this.getCombinedReader(carCid);
|
39738
|
+
const mapNode = await addCidsToCarIndex(
|
39739
|
+
combinedReader,
|
39740
|
+
this.valetRoot,
|
39741
|
+
this.valetRootCid,
|
39742
|
+
Array.from(cids).map(cid => ({ key: cid.toString(), value: carCid.toString() }))
|
39743
|
+
);
|
39744
|
+
|
39745
|
+
this.valetRoot = mapNode;
|
39746
|
+
this.valetRootCid = mapNode.cid;
|
39747
|
+
// make a block set with all the cids of the map
|
39748
|
+
const saveValetBlocks = new VMemoryBlockstore(); // todo this blockstore should read from the last valetCid car also
|
39749
|
+
|
39750
|
+
for await (const cidx of mapNode.cids()) {
|
39751
|
+
const bytes = await combinedReader.get(cidx);
|
39752
|
+
saveValetBlocks.put(cidx, bytes);
|
39753
|
+
}
|
39754
|
+
let newValetCidCar;
|
39755
|
+
if (this.keyMaterial) {
|
39756
|
+
newValetCidCar = await blocksToEncryptedCarBlock(this.valetRootCid, saveValetBlocks, this.keyMaterial);
|
39757
|
+
} else {
|
39758
|
+
newValetCidCar = await blocksToCarBlock(this.valetRootCid, saveValetBlocks);
|
39759
|
+
}
|
39760
|
+
// console.log('newValetCidCar', this.name, Math.floor(newValetCidCar.bytes.length / 1024))
|
39761
|
+
await this.withDB(async db => {
|
39762
|
+
const tx = db.transaction(['cars'], 'readwrite');
|
39763
|
+
await tx.objectStore('cars').put(value, carCid.toString());
|
39764
|
+
if (newValetCidCar) {
|
39765
|
+
if (this.valetRootCarCid) ;
|
39766
|
+
await tx.objectStore('cars').put(newValetCidCar.bytes, newValetCidCar.cid.toString());
|
39767
|
+
}
|
39768
|
+
return await tx.done
|
39769
|
+
});
|
39770
|
+
this.valetRootCarCid = newValetCidCar.cid; // goes to clock
|
39771
|
+
|
39772
|
+
// console.log('parked car', carCid, value.length, Array.from(cids))
|
39773
|
+
// upload to web3.storage if we have credentials
|
39774
|
+
if (this.uploadFunction) {
|
39775
|
+
if (this.alreadyEnqueued.has(carCid)) {
|
39776
|
+
// console.log('already enqueued', carCid)
|
39777
|
+
return
|
39778
|
+
}
|
39779
|
+
// don't await this, it will be done in the queue
|
39780
|
+
// console.log('add to queue', carCid, value.length)
|
39781
|
+
this.uploadQueue.push({ carCid, value });
|
39782
|
+
this.alreadyEnqueued.add(carCid);
|
39783
|
+
}
|
39784
|
+
}
|
39785
|
+
|
39786
|
+
remoteBlockFunction = null
|
39787
|
+
|
39788
|
+
async getCarReader (carCid) {
|
39789
|
+
carCid = carCid.toString();
|
39790
|
+
const carBytes = await this.withDB(async db => {
|
39791
|
+
const tx = db.transaction(['cars'], 'readonly');
|
39792
|
+
// console.log('getCarReader', carCid)
|
39793
|
+
return await tx.objectStore('cars').get(carCid)
|
39794
|
+
});
|
39795
|
+
const reader = await CarReader.fromBytes(carBytes);
|
39796
|
+
if (this.keyMaterial) {
|
39797
|
+
const roots = await reader.getRoots();
|
39798
|
+
const readerGetWithCodec = async cid => {
|
39799
|
+
const got = await reader.get(cid);
|
39800
|
+
// console.log('got.', cid.toString())
|
39801
|
+
let useCodec = codec;
|
39802
|
+
if (cid.toString().indexOf('bafy') === 0) {
|
39803
|
+
// todo cleanup types
|
39804
|
+
useCodec = codec$1;
|
39805
|
+
}
|
39806
|
+
const decoded = await decode$9({
|
39807
|
+
...got,
|
39808
|
+
codec: useCodec,
|
39809
|
+
hasher: sha256$2
|
39810
|
+
});
|
39811
|
+
// console.log('decoded', decoded.value)
|
39812
|
+
return decoded
|
39813
|
+
};
|
39814
|
+
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.keyMaterial);
|
39815
|
+
|
39816
|
+
// last block is the root ???
|
39817
|
+
const rootBlock = blocks[blocks.length - 1];
|
39818
|
+
|
39819
|
+
return {
|
39820
|
+
root: rootBlock,
|
39821
|
+
get: async dataCID => {
|
39822
|
+
// console.log('getCarReader dataCID', dataCID)
|
39823
|
+
dataCID = dataCID.toString();
|
39824
|
+
const block = blocks.find(b => b.cid.toString() === dataCID);
|
39825
|
+
// console.log('getCarReader block', block)
|
39826
|
+
if (block) {
|
39827
|
+
return block.bytes
|
39828
|
+
}
|
39829
|
+
}
|
39830
|
+
}
|
39831
|
+
} else {
|
39832
|
+
return {
|
39833
|
+
root: reader.getRoots()[0],
|
39834
|
+
get: async dataCID => {
|
39835
|
+
const gotBlock = await reader.get(CID$1.parse(dataCID));
|
39836
|
+
if (gotBlock) {
|
39837
|
+
return gotBlock.bytes
|
39838
|
+
}
|
39839
|
+
}
|
39840
|
+
}
|
39841
|
+
}
|
39842
|
+
}
|
39843
|
+
|
39844
|
+
// todo memoize this
|
39845
|
+
async getValetBlock (dataCID) {
|
39846
|
+
// console.log('get valet block', dataCID)
|
39847
|
+
const { result: carCid } = await this.getCarCIDForCID(dataCID);
|
39848
|
+
if (!carCid) {
|
39849
|
+
throw new Error('Missing block: ' + dataCID)
|
39850
|
+
}
|
39851
|
+
const reader = await this.getCarReader(carCid);
|
39852
|
+
return await reader.get(dataCID)
|
39853
|
+
}
|
39854
|
+
}
|
39855
|
+
|
39856
|
+
const blocksToCarBlock = async (rootCids, blocks) => {
|
39857
|
+
// console.log('blocksToCarBlock', rootCids, blocks.constructor.name)
|
39858
|
+
let size = 0;
|
39859
|
+
if (!Array.isArray(rootCids)) {
|
39860
|
+
rootCids = [rootCids];
|
39861
|
+
}
|
39862
|
+
const headerSize = headerLength({ roots: rootCids });
|
39863
|
+
size += headerSize;
|
39864
|
+
if (!Array.isArray(blocks)) {
|
39865
|
+
blocks = Array.from(blocks.entries());
|
39866
|
+
}
|
39867
|
+
for (const { cid, bytes } of blocks) {
|
39868
|
+
// console.log(cid, bytes)
|
39869
|
+
size += blockLength({ cid, bytes });
|
39870
|
+
}
|
39871
|
+
const buffer = new Uint8Array(size);
|
39872
|
+
const writer = await createWriter(buffer, { headerSize });
|
39873
|
+
|
39874
|
+
for (const cid of rootCids) {
|
39875
|
+
writer.addRoot(cid);
|
39876
|
+
}
|
39877
|
+
|
39878
|
+
for (const { cid, bytes } of blocks) {
|
39879
|
+
writer.write({ cid, bytes });
|
39880
|
+
}
|
39881
|
+
await writer.close();
|
39882
|
+
return await encode$7({ value: writer.bytes, hasher: sha256$2, codec: raw })
|
39883
|
+
};
|
39884
|
+
|
39885
|
+
const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
|
37278
39886
|
const encryptionKey = Buffer$G.from(keyMaterial, 'hex');
|
37279
39887
|
const encryptedBlocks = [];
|
37280
39888
|
const theCids = [];
|
@@ -37332,6 +39940,58 @@ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
|
|
37332
39940
|
}
|
37333
39941
|
};
|
37334
39942
|
|
39943
|
+
const addCidsToCarIndex = async (blockstore, valetRoot, valetRootCid, bulkOperations) => {
|
39944
|
+
let indexNode;
|
39945
|
+
if (valetRootCid) {
|
39946
|
+
if (valetRoot) {
|
39947
|
+
indexNode = valetRoot;
|
39948
|
+
} else {
|
39949
|
+
indexNode = await load$1(blockstore, valetRootCid, { blockHasher: blockOpts$1.hasher, blockCodec: blockOpts$1.codec });
|
39950
|
+
}
|
39951
|
+
} else {
|
39952
|
+
indexNode = await create$1(blockstore, {
|
39953
|
+
bitWidth: 4,
|
39954
|
+
bucketSize: 2,
|
39955
|
+
blockHasher: blockOpts$1.hasher,
|
39956
|
+
blockCodec: blockOpts$1.codec
|
39957
|
+
});
|
39958
|
+
}
|
39959
|
+
// console.log('adding', bulkOperations.length, 'cids to index')
|
39960
|
+
for (const { key, value } of bulkOperations) {
|
39961
|
+
// console.log('adding', key, value)
|
39962
|
+
await indexNode.set(key, value);
|
39963
|
+
}
|
39964
|
+
return indexNode
|
39965
|
+
};
|
39966
|
+
|
39967
|
+
class VMemoryBlockstore {
|
39968
|
+
/** @type {Map<string, Uint8Array>} */
|
39969
|
+
blocks = new Map()
|
39970
|
+
instanceId = Math.random().toString(36).slice(2)
|
39971
|
+
|
39972
|
+
async get (cid) {
|
39973
|
+
const bytes = this.blocks.get(cid.toString());
|
39974
|
+
// console.log('getvm', bytes.constructor.name, this.instanceId, cid, bytes && bytes.length)
|
39975
|
+
if (bytes.length === 253) ;
|
39976
|
+
if (!bytes) throw new Error('block not found ' + cid.toString())
|
39977
|
+
return { cid, bytes }
|
39978
|
+
}
|
39979
|
+
|
39980
|
+
/**
|
39981
|
+
* @param {import('../src/link').AnyLink} cid
|
39982
|
+
* @param {Uint8Array} bytes
|
39983
|
+
*/
|
39984
|
+
async put (cid, bytes) {
|
39985
|
+
this.blocks.set(cid.toString(), bytes);
|
39986
|
+
}
|
39987
|
+
|
39988
|
+
* entries () {
|
39989
|
+
for (const [str, bytes] of this.blocks) {
|
39990
|
+
yield { cid: parse(str), bytes };
|
39991
|
+
}
|
39992
|
+
}
|
39993
|
+
}
|
39994
|
+
|
37335
39995
|
// const sleep = ms => new Promise(r => setTimeout(r, ms))
|
37336
39996
|
|
37337
39997
|
const husherMap = new Map();
|
@@ -37410,7 +40070,7 @@ class TransactionBlockstore {
|
|
37410
40070
|
// console.log('committedGet: ' + key + ' ' + this.instanceId, old.length)
|
37411
40071
|
if (old) return old
|
37412
40072
|
if (!this.valet) throw new Error('Missing block: ' + key)
|
37413
|
-
const got = await this.valet.
|
40073
|
+
const got = await this.valet.getValetBlock(key);
|
37414
40074
|
this.committedBlocks.set(key, got);
|
37415
40075
|
return got
|
37416
40076
|
}
|
@@ -37625,7 +40285,7 @@ const makeGetBlock = blocks => {
|
|
37625
40285
|
// const { cid, bytes } = await withLog(address, () => blocks.get(address))
|
37626
40286
|
const { cid, bytes } = await blocks.get(address);
|
37627
40287
|
// cids.add({ address: cid })
|
37628
|
-
return create$
|
40288
|
+
return create$7({ cid, bytes, hasher: sha256$2, codec: codec$1 })
|
37629
40289
|
};
|
37630
40290
|
return {
|
37631
40291
|
// cids,
|
@@ -37752,7 +40412,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
|
37752
40412
|
const event = await events.get(ancestor);
|
37753
40413
|
const { root } = event.value.data;
|
37754
40414
|
if (root) {
|
37755
|
-
return load$
|
40415
|
+
return load$4({ cid: root, get: getBlock, ...blockOpts })
|
37756
40416
|
} else {
|
37757
40417
|
// console.log('no root', root) // false means no common ancestor. null means empty database.
|
37758
40418
|
return root
|
@@ -37806,7 +40466,7 @@ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
|
|
37806
40466
|
if (bulkOperations.every(op => op.del)) {
|
37807
40467
|
return { root: null, blocks: [], clockCIDs: await events.all() }
|
37808
40468
|
}
|
37809
|
-
for await (const node of create$
|
40469
|
+
for await (const node of create$5({ get: getBlock, list: bulkOperations, ...blockOpts })) {
|
37810
40470
|
// root = await node.block
|
37811
40471
|
root = node;
|
37812
40472
|
newBlocks.push(await node.block);
|
@@ -38004,7 +40664,7 @@ async function visMerkleTree (blocks, head) {
|
|
38004
40664
|
// if (!head) return
|
38005
40665
|
if (head && !Array.isArray(head)) {
|
38006
40666
|
const getBl = makeGetBlock(blocks);
|
38007
|
-
const prollyRootNode = await load$
|
40667
|
+
const prollyRootNode = await load$4({
|
38008
40668
|
cid: head,
|
38009
40669
|
get: getBl.getBlock,
|
38010
40670
|
...blockOpts
|
@@ -38288,7 +40948,6 @@ const parseCID = cid => (typeof cid === 'string' ? CID$1.parse(cid) : cid);
|
|
38288
40948
|
* This is the main class for saving and loading JSON and other documents with the database. You can find additional examples and
|
38289
40949
|
* usage guides in the repository README.
|
38290
40950
|
*
|
38291
|
-
* @param {import('./blockstore.js').TransactionBlockstore} blocks - The block storage instance to use documents and indexes
|
38292
40951
|
* @param {CID[]} clock - The Merkle clock head to use for the Fireproof instance.
|
38293
40952
|
* @param {object} [config] - Optional configuration options for the Fireproof instance.
|
38294
40953
|
* @param {object} [authCtx] - Optional authorization context object to use for any authentication checks.
|
@@ -38300,10 +40959,11 @@ class Database {
|
|
38300
40959
|
rootCache = null
|
38301
40960
|
eventsCache = new Map()
|
38302
40961
|
|
38303
|
-
constructor (
|
38304
|
-
this.name =
|
40962
|
+
constructor (name, clock, config = {}) {
|
40963
|
+
this.name = name;
|
38305
40964
|
this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`;
|
38306
|
-
this.blocks =
|
40965
|
+
this.blocks = new TransactionBlockstore(name, config.key);
|
40966
|
+
this.indexBlocks = new TransactionBlockstore(name + '.indexes', config.key);
|
38307
40967
|
this.clock = clock;
|
38308
40968
|
this.config = config;
|
38309
40969
|
}
|
@@ -38320,6 +40980,8 @@ class Database {
|
|
38320
40980
|
clock: this.clockToJSON(),
|
38321
40981
|
name: this.name,
|
38322
40982
|
key: this.blocks.valet?.getKeyMaterial(),
|
40983
|
+
car: this.blocks.valet?.valetRootCarCid.toString(),
|
40984
|
+
indexCar: this.indexBlocks.valet?.valetRootCarCid?.toString(),
|
38323
40985
|
indexes: [...this.indexes.values()].map(index => index.toJSON())
|
38324
40986
|
}
|
38325
40987
|
}
|
@@ -38334,11 +40996,14 @@ class Database {
|
|
38334
40996
|
return (clock || this.clock).map(cid => cid.toString())
|
38335
40997
|
}
|
38336
40998
|
|
38337
|
-
hydrate ({ clock, name, key }) {
|
40999
|
+
hydrate ({ clock, name, key, car, indexCar }) {
|
38338
41000
|
this.name = name;
|
38339
41001
|
this.clock = clock;
|
38340
41002
|
this.blocks.valet?.setKeyMaterial(key);
|
38341
|
-
this.
|
41003
|
+
this.blocks.valet?.setRootCarCid(car); // maybe
|
41004
|
+
this.indexBlocks.valet?.setKeyMaterial(key);
|
41005
|
+
this.indexBlocks.valet?.setRootCarCid(indexCar); // maybe
|
41006
|
+
// this.indexBlocks = null
|
38342
41007
|
}
|
38343
41008
|
|
38344
41009
|
maybeSaveClock () {
|
@@ -38377,7 +41042,7 @@ class Database {
|
|
38377
41042
|
let rows, dataCIDs, clockCIDs;
|
38378
41043
|
// if (!aClock) aClock = []
|
38379
41044
|
if (aClock && aClock.length > 0) {
|
38380
|
-
aClock = aClock.map(
|
41045
|
+
aClock = aClock.map(cid => cid.toString());
|
38381
41046
|
const eventKey = JSON.stringify([...this.clockToJSON(aClock), ...this.clockToJSON()]);
|
38382
41047
|
|
38383
41048
|
let resp;
|
@@ -38545,6 +41210,7 @@ class Database {
|
|
38545
41210
|
* @returns {Promise<{ proof:{}, id: string, clock: CID[] }>} - The result of adding the event to storage
|
38546
41211
|
*/
|
38547
41212
|
async putToProllyTree (decodedEvent, clock = null) {
|
41213
|
+
// console.log('putToProllyTree', decodedEvent)
|
38548
41214
|
const event = encodeEvent(decodedEvent);
|
38549
41215
|
if (clock && JSON.stringify(this.clockToJSON(clock)) !== JSON.stringify(this.clockToJSON())) {
|
38550
41216
|
// console.log('this.clock', this.clockToJSON())
|
@@ -38662,7 +41328,9 @@ class Database {
|
|
38662
41328
|
|
38663
41329
|
async function cidsToProof (cids) {
|
38664
41330
|
if (!cids) return []
|
38665
|
-
if (!cids.all) {
|
41331
|
+
if (!cids.all) {
|
41332
|
+
return [...cids]
|
41333
|
+
}
|
38666
41334
|
|
38667
41335
|
const all = await cids.all();
|
38668
41336
|
return [...all].map(cid => cid.toString())
|
@@ -38912,14 +41580,14 @@ const create = opts => {
|
|
38912
41580
|
...defaults,
|
38913
41581
|
...opts
|
38914
41582
|
};
|
38915
|
-
return create$
|
41583
|
+
return create$5(opts);
|
38916
41584
|
};
|
38917
41585
|
const load = opts => {
|
38918
41586
|
opts = {
|
38919
41587
|
...defaults,
|
38920
41588
|
...opts
|
38921
41589
|
};
|
38922
|
-
return load$
|
41590
|
+
return load$4(opts);
|
38923
41591
|
};
|
38924
41592
|
|
38925
41593
|
// @ts-ignore
|
@@ -38974,21 +41642,21 @@ const makeDoc = ({ key, value }) => ({ _id: key, ...value });
|
|
38974
41642
|
*/
|
38975
41643
|
const indexEntriesForChanges = (changes, mapFn) => {
|
38976
41644
|
const indexEntries = [];
|
38977
|
-
changes.forEach(({ key, value, del }) => {
|
41645
|
+
changes.forEach(({ key: _id, value, del }) => {
|
38978
41646
|
// key is _id, value is the document
|
38979
41647
|
if (del || !value) return
|
38980
41648
|
let mapCalled = false;
|
38981
|
-
const mapReturn = mapFn(makeDoc({ key, value }), (k, v) => {
|
41649
|
+
const mapReturn = mapFn(makeDoc({ key: _id, value }), (k, v) => {
|
38982
41650
|
mapCalled = true;
|
38983
41651
|
if (typeof k === 'undefined') return
|
38984
41652
|
indexEntries.push({
|
38985
|
-
key: [charwise.encode(k),
|
41653
|
+
key: [charwise.encode(k), _id],
|
38986
41654
|
value: v || null
|
38987
41655
|
});
|
38988
41656
|
});
|
38989
41657
|
if (!mapCalled && mapReturn) {
|
38990
41658
|
indexEntries.push({
|
38991
|
-
key: [charwise.encode(mapReturn),
|
41659
|
+
key: [charwise.encode(mapReturn), _id],
|
38992
41660
|
value: null
|
38993
41661
|
});
|
38994
41662
|
}
|
@@ -39012,12 +41680,6 @@ class DbIndex {
|
|
39012
41680
|
*/
|
39013
41681
|
constructor (database, name, mapFn, clock = null, opts = {}) {
|
39014
41682
|
this.database = database;
|
39015
|
-
if (!database.indexBlocks) {
|
39016
|
-
database.indexBlocks = new TransactionBlockstore(
|
39017
|
-
database?.name + '.indexes',
|
39018
|
-
database.blocks.valet?.getKeyMaterial()
|
39019
|
-
);
|
39020
|
-
}
|
39021
41683
|
if (typeof name === 'function') {
|
39022
41684
|
// app is using deprecated API, remove in 0.7
|
39023
41685
|
opts = clock || {};
|
@@ -39170,7 +41832,14 @@ class DbIndex {
|
|
39170
41832
|
await loadIndex(this.database.indexBlocks, this.indexByKey, dbIndexOpts);
|
39171
41833
|
if (!this.indexByKey.root) return { result: [] }
|
39172
41834
|
if (query.includeDocs === undefined) query.includeDocs = this.includeDocsDefault;
|
39173
|
-
if (query.
|
41835
|
+
if (query.prefix) {
|
41836
|
+
// ensure prefix is an array
|
41837
|
+
if (!Array.isArray(query.prefix)) query.prefix = [query.prefix];
|
41838
|
+
const start = [...query.prefix, NaN];
|
41839
|
+
const end = [...query.prefix, Infinity];
|
41840
|
+
const prefixRange = [start, end].map(key => charwise.encode(key));
|
41841
|
+
return await this.applyQuery(await this.indexByKey.root.range(...prefixRange), query)
|
41842
|
+
} else if (query.range) {
|
39174
41843
|
const encodedRange = query.range.map(key => charwise.encode(key));
|
39175
41844
|
return await this.applyQuery(await this.indexByKey.root.range(...encodedRange), query)
|
39176
41845
|
} else if (query.key) {
|
@@ -41337,7 +44006,7 @@ class Sync {
|
|
41337
44006
|
destroy () {
|
41338
44007
|
this.database.blocks.syncs.delete(this);
|
41339
44008
|
this.status = 'destroyed';
|
41340
|
-
this.peer.destroy()
|
44009
|
+
// this.peer.destroy() todo
|
41341
44010
|
}
|
41342
44011
|
|
41343
44012
|
async sendUpdate (blockstore) {
|
@@ -41428,22 +44097,29 @@ class Fireproof {
|
|
41428
44097
|
static storage = (name = null, opts = {}) => {
|
41429
44098
|
if (name) {
|
41430
44099
|
opts.name = name;
|
44100
|
+
// todo this can come from a registry also
|
41431
44101
|
const existing = localGet('fp.' + name);
|
41432
44102
|
if (existing) {
|
41433
44103
|
const existingConfig = JSON.parse(existing);
|
41434
|
-
|
41435
|
-
return Fireproof.fromJSON(existingConfig, fp)
|
44104
|
+
return Fireproof.fromConfig(name, existingConfig, opts)
|
41436
44105
|
} else {
|
41437
44106
|
const instanceKey = browserExports$1(32).toString('hex'); // pass null to disable encryption
|
41438
|
-
|
44107
|
+
opts.key = instanceKey;
|
44108
|
+
return new Database(name, [], opts)
|
41439
44109
|
}
|
41440
44110
|
} else {
|
41441
|
-
return new Database(
|
44111
|
+
return new Database(null, [], opts)
|
41442
44112
|
}
|
41443
44113
|
}
|
41444
44114
|
|
44115
|
+
static fromConfig (name, existingConfig, opts = {}) {
|
44116
|
+
opts.key = existingConfig.key;
|
44117
|
+
const fp = new Database(name, [], opts);
|
44118
|
+
return Fireproof.fromJSON(existingConfig, fp)
|
44119
|
+
}
|
44120
|
+
|
41445
44121
|
static fromJSON (json, database) {
|
41446
|
-
database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
|
44122
|
+
database.hydrate({ car: json.car, indexCar: json.indexCar, clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key });
|
41447
44123
|
if (json.indexes) {
|
41448
44124
|
for (const {
|
41449
44125
|
name,
|
@@ -41466,7 +44142,8 @@ class Fireproof {
|
|
41466
44142
|
|
41467
44143
|
static snapshot (database, clock) {
|
41468
44144
|
const definition = database.toJSON();
|
41469
|
-
const withBlocks = new Database(database.
|
44145
|
+
const withBlocks = new Database(database.name);
|
44146
|
+
withBlocks.blocks = database.blocks;
|
41470
44147
|
if (clock) {
|
41471
44148
|
definition.clock = clock.map(c => parseCID(c));
|
41472
44149
|
definition.indexes.forEach(index => {
|