omnipin 2.1.1 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +842 -509
- package/dist/rslib-runtime.js +3 -3
- package/package.json +10 -12
package/dist/index.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var dag_pb_src_namespaceObject, car_namespaceObject, src_cbor_namespaceObject, schema_link_namespaceObject, request_namespaceObject, response_namespaceObject, sha256_namespaceObject, base10_namespaceObject, base16_namespaceObject, base2_namespaceObject, base256emoji_namespaceObject, base32_namespaceObject, base36_namespaceObject, base58_namespaceObject, base64_namespaceObject, base8_namespaceObject, bases_identity_namespaceObject, raw_namespaceObject, identity_namespaceObject, hashes_sha2_namespaceObject, MSB, MSBALL, INT, MSB$1, REST$1, _computedKey,
|
|
2
|
+
var dag_pb_src_namespaceObject, car_namespaceObject, src_cbor_namespaceObject, schema_link_namespaceObject, request_namespaceObject, response_namespaceObject, sha256_namespaceObject, base10_namespaceObject, base16_namespaceObject, base2_namespaceObject, base256emoji_namespaceObject, base32_namespaceObject, base36_namespaceObject, base58_namespaceObject, base64_namespaceObject, base8_namespaceObject, bases_identity_namespaceObject, raw_namespaceObject, identity_namespaceObject, hashes_sha2_namespaceObject, MSB, MSBALL, INT, MSB$1, REST$1, _computedKey, __require, Queue, Counter, Aborter, Walker, APIBuilder, Builder, codec_CODEC_TYPES, unixfs_Data, unixfs_UnixTime, unixfs_Metadata, COMPRESSED$1, COMPRESSED, curveDef, CODEC_TYPES, Data, DataType, __DataTypeValues, UnixTime, Metadata;
|
|
3
3
|
let _sha512Sync, _fsReadFn, _fsWriteFn, SHIFTED_RANK, EXCLUSIONS, DECOMP, RECOMP, MAPPED, IGNORED, CM, NSM, ESCAPE, GROUPS, WHOLE_VALID, WHOLE_MAP, VALID, EMOJI_LIST, EMOJI_ROOT, create, slab, SIZE, MAX, offset, __DataTypeValues1, _codec, _codec1, _codec2;
|
|
4
4
|
function makeFullPath(cli, path = []) {
|
|
5
5
|
return cli.parent ? makeFullPath(cli.parent, path) : path;
|
|
@@ -1430,7 +1430,7 @@ function parseError(error) {
|
|
|
1430
1430
|
stack: error instanceof Error ? error.stack : void 0
|
|
1431
1431
|
});
|
|
1432
1432
|
}
|
|
1433
|
-
function Provider_from(provider
|
|
1433
|
+
function Provider_from(provider) {
|
|
1434
1434
|
if (!provider) throw new IsUndefinedError();
|
|
1435
1435
|
return {
|
|
1436
1436
|
...provider,
|
|
@@ -2128,18 +2128,18 @@ function AbiParameters_decode(parameters, data, options = {}) {
|
|
|
2128
2128
|
if ('address' === param.type) return function(cursor, options = {}) {
|
|
2129
2129
|
let address, { checksum = !1 } = options;
|
|
2130
2130
|
return [
|
|
2131
|
-
(address = Hex_fromBytes(function(value,
|
|
2131
|
+
(address = Hex_fromBytes(function(value, options = {}) {
|
|
2132
2132
|
let { strict } = options;
|
|
2133
2133
|
!1;
|
|
2134
2134
|
let value_ = value.slice(-20, void 0);
|
|
2135
|
-
return strict && function(value,
|
|
2135
|
+
return strict && function(value, end) {
|
|
2136
2136
|
if ('number' == typeof end && Bytes_size(value) !== end - -20) throw new Bytes_SliceOffsetOutOfBoundsError({
|
|
2137
2137
|
offset: end,
|
|
2138
2138
|
position: 'end',
|
|
2139
2139
|
size: Bytes_size(value)
|
|
2140
2140
|
});
|
|
2141
|
-
}(value_,
|
|
2142
|
-
}(cursor.readBytes(32)
|
|
2141
|
+
}(value_, void 0), value_;
|
|
2142
|
+
}(cursor.readBytes(32))), checksum ? Address_checksum(address) : address),
|
|
2143
2143
|
32
|
|
2144
2144
|
];
|
|
2145
2145
|
}(cursor, {
|
|
@@ -2431,7 +2431,7 @@ function AbiError_decode(...parameters) {
|
|
|
2431
2431
|
if (Array.isArray(parameters[0])) {
|
|
2432
2432
|
let [abi, name, data, options] = parameters;
|
|
2433
2433
|
return [
|
|
2434
|
-
function(abi, name
|
|
2434
|
+
function(abi, name) {
|
|
2435
2435
|
if ('Error' === name) return solidityError;
|
|
2436
2436
|
if ('Panic' === name) return solidityPanic;
|
|
2437
2437
|
if (validate(name, {
|
|
@@ -2982,7 +2982,7 @@ async function esm_verify(sig, message, publicKey) {
|
|
|
2982
2982
|
return k = modlLE(hashed), kA = ExtendedPoint.fromAffine(pub).multiplyUnsafe(k), ExtendedPoint.fromAffine(r).add(kA).subtract(SB).multiplyUnsafe(esm_CURVE.h).equals(ExtendedPoint.ZERO);
|
|
2983
2983
|
}
|
|
2984
2984
|
function bytesEncoder(bytes) {
|
|
2985
|
-
let bytesString =
|
|
2985
|
+
let bytesString = base64_base64.encode(bytes).slice(1);
|
|
2986
2986
|
return [
|
|
2987
2987
|
new Token(Type.map, 1 / 0, 1),
|
|
2988
2988
|
new Token(Type.string, '/', 1),
|
|
@@ -3027,35 +3027,20 @@ function parseAbility(ability) {
|
|
|
3027
3027
|
};
|
|
3028
3028
|
}
|
|
3029
3029
|
async function setup({ pk, proof }) {
|
|
3030
|
-
let
|
|
3030
|
+
let agentData = {
|
|
3031
3031
|
meta: {
|
|
3032
3032
|
name: 'agent',
|
|
3033
3033
|
type: 'device'
|
|
3034
3034
|
},
|
|
3035
|
-
principal,
|
|
3035
|
+
principal: signer_parse(pk),
|
|
3036
3036
|
delegations: new Map()
|
|
3037
3037
|
}, agent = new Agent(agentData);
|
|
3038
3038
|
try {
|
|
3039
3039
|
let delegation = await proof_parse(proof), space = fromDelegation(delegation);
|
|
3040
|
-
agentData.delegations.set(delegation.cid.toString(), {
|
|
3040
|
+
return agentData.delegations.set(delegation.cid.toString(), {
|
|
3041
3041
|
delegation,
|
|
3042
3042
|
meta: {}
|
|
3043
|
-
})
|
|
3044
|
-
let raw = {
|
|
3045
|
-
meta: agentData.meta,
|
|
3046
|
-
principal: principal.toArchive(),
|
|
3047
|
-
delegations: new Map()
|
|
3048
|
-
};
|
|
3049
|
-
for (let [key, value] of agentData.delegations)raw.delegations.set(key, {
|
|
3050
|
-
meta: value.meta,
|
|
3051
|
-
delegation: [
|
|
3052
|
-
...value.delegation.export()
|
|
3053
|
-
].map((b)=>({
|
|
3054
|
-
cid: b.cid.toString(),
|
|
3055
|
-
bytes: b.bytes.buffer.slice(b.bytes.byteOffset, b.bytes.byteOffset + b.bytes.byteLength)
|
|
3056
|
-
}))
|
|
3057
|
-
});
|
|
3058
|
-
return {
|
|
3043
|
+
}), {
|
|
3059
3044
|
agent,
|
|
3060
3045
|
space
|
|
3061
3046
|
};
|
|
@@ -3082,20 +3067,15 @@ async function retry(fn, options = {}) {
|
|
|
3082
3067
|
throw Error('Unexpected retry loop end');
|
|
3083
3068
|
}
|
|
3084
3069
|
async function poll(taskCid) {
|
|
3085
|
-
|
|
3086
|
-
|
|
3087
|
-
|
|
3088
|
-
|
|
3089
|
-
return res.ok;
|
|
3090
|
-
}, {
|
|
3091
|
-
onFailedAttempt: console.warn,
|
|
3092
|
-
retries: REQUEST_RETRIES
|
|
3093
|
-
});
|
|
3070
|
+
await new Promise((r)=>setTimeout(r, WAIT_MS));
|
|
3071
|
+
let res = await receipts_get(taskCid);
|
|
3072
|
+
if (res.error) throw res.error;
|
|
3073
|
+
return res.ok;
|
|
3094
3074
|
}
|
|
3095
3075
|
async function receipts_get(taskCid) {
|
|
3096
3076
|
let url = new URL(taskCid.toString(), receiptsEndpoint), workflowResponse = await fetch(url);
|
|
3097
3077
|
if (404 === workflowResponse.status) return {
|
|
3098
|
-
error: new
|
|
3078
|
+
error: new DOMException('Receipt not found', 'NotFoundError')
|
|
3099
3079
|
};
|
|
3100
3080
|
let agentMessageBytes = new Uint8Array(await workflowResponse.arrayBuffer()), agentMessage = await request_decode({
|
|
3101
3081
|
body: agentMessageBytes,
|
|
@@ -3119,7 +3099,7 @@ async function receipts_get(taskCid) {
|
|
|
3119
3099
|
};
|
|
3120
3100
|
}
|
|
3121
3101
|
return {
|
|
3122
|
-
error: new
|
|
3102
|
+
error: new DOMException(`failed to fetch receipt for task: ${taskCid}`, 'AbortError')
|
|
3123
3103
|
};
|
|
3124
3104
|
}
|
|
3125
3105
|
return {
|
|
@@ -3582,7 +3562,7 @@ function normalizePattern(pattern, expandDirectories, cwd, props, isIgnore) {
|
|
|
3582
3562
|
newCommonPath.pop();
|
|
3583
3563
|
break;
|
|
3584
3564
|
}
|
|
3585
|
-
if (part !== props.commonPath[i] || function(pattern
|
|
3565
|
+
if (part !== props.commonPath[i] || function(pattern) {
|
|
3586
3566
|
let scan = picomatch.scan(pattern);
|
|
3587
3567
|
return scan.isGlob || scan.negated;
|
|
3588
3568
|
}(part) || i === parts.length - 1) break;
|
|
@@ -3685,12 +3665,12 @@ async function glob(patternsOrOptions, options) {
|
|
|
3685
3665
|
return "." !== relativePath && !partialMatcher(relativePath) || ignore(relativePath);
|
|
3686
3666
|
},
|
|
3687
3667
|
fs: options.fs ? {
|
|
3688
|
-
readdir: options.fs.readdir ||
|
|
3689
|
-
readdirSync: options.fs.readdirSync ||
|
|
3690
|
-
realpath: options.fs.realpath ||
|
|
3691
|
-
realpathSync: options.fs.realpathSync ||
|
|
3692
|
-
stat: options.fs.stat ||
|
|
3693
|
-
statSync: options.fs.statSync ||
|
|
3668
|
+
readdir: options.fs.readdir || fs_0.readdir,
|
|
3669
|
+
readdirSync: options.fs.readdirSync || fs_0.readdirSync,
|
|
3670
|
+
realpath: options.fs.realpath || fs_0.realpath,
|
|
3671
|
+
realpathSync: options.fs.realpathSync || fs_0.realpathSync,
|
|
3672
|
+
stat: options.fs.stat || fs_0.stat,
|
|
3673
|
+
statSync: options.fs.statSync || fs_0.statSync
|
|
3694
3674
|
} : void 0,
|
|
3695
3675
|
pathSeparator: "/",
|
|
3696
3676
|
relativePaths: !0,
|
|
@@ -4135,6 +4115,30 @@ async function* buildFileBatch(file, blockstore, options) {
|
|
|
4135
4115
|
function isSingleBlockImport(result) {
|
|
4136
4116
|
return !0 === result.single;
|
|
4137
4117
|
}
|
|
4118
|
+
function varintLen(x) {
|
|
4119
|
+
var x1;
|
|
4120
|
+
let n;
|
|
4121
|
+
return x % 2 == 0 && x++, Math.floor((n = 0, (x1 = x) >= pb_size_maxInt32 && (x1 = Math.floor(x1 / pb_size_maxInt32), n = 32), x1 >= 65536 && (x1 >>>= 16, n += 16), x1 >= 256 && (x1 >>>= 8, n += 8), (n + pb_size_len8tab[x1] + 6) / 7));
|
|
4122
|
+
}
|
|
4123
|
+
function utf8ByteLength(str) {
|
|
4124
|
+
let len = 0;
|
|
4125
|
+
for(let i = 0; i < str.length; i++){
|
|
4126
|
+
let c = str.charCodeAt(i);
|
|
4127
|
+
c < 0x80 ? len++ : c < 0x800 ? len += 2 : c >= 0xD800 && c <= 0xDBFF && i + 1 < str.length ? (i++, len += 4) : len += 3;
|
|
4128
|
+
}
|
|
4129
|
+
return len;
|
|
4130
|
+
}
|
|
4131
|
+
function estimateLinkSize(nameBytes, child) {
|
|
4132
|
+
return child?.cid != null && child?.size != null ? nameBytes + child.cid.byteLength : 0;
|
|
4133
|
+
}
|
|
4134
|
+
function calculateLinkSize(nameBytes, child) {
|
|
4135
|
+
if (child?.cid != null && child?.size != null) {
|
|
4136
|
+
var cidByteLength, tsize;
|
|
4137
|
+
let linkLen;
|
|
4138
|
+
return cidByteLength = child.cid.byteLength, tsize = Number(child.size), linkLen = 1 + varintLen(cidByteLength) + cidByteLength + (1 + varintLen(nameBytes) + nameBytes) + (1 + varintLen(tsize)), 1 + varintLen(linkLen) + linkLen;
|
|
4139
|
+
}
|
|
4140
|
+
return 0;
|
|
4141
|
+
}
|
|
4138
4142
|
function add64(a, b) {
|
|
4139
4143
|
let lo = a.lo + b.lo >>> 0, hi = a.hi + b.hi + +(lo < a.lo) >>> 0;
|
|
4140
4144
|
return {
|
|
@@ -4290,13 +4294,13 @@ async function* dir_sharded_flush(bucket, blockstore, shardRoot, options) {
|
|
|
4290
4294
|
if (child instanceof Bucket) {
|
|
4291
4295
|
let shard;
|
|
4292
4296
|
for await (let subShard of dir_sharded_flush(child, blockstore, null, options))shard = subShard;
|
|
4293
|
-
if (null == shard) throw Error('Could not flush sharded directory, no
|
|
4297
|
+
if (null == shard) throw Error('Could not flush sharded directory, no sub-shard found');
|
|
4294
4298
|
links.push({
|
|
4295
4299
|
Name: labelPrefix,
|
|
4296
4300
|
Tsize: Number(shard.size),
|
|
4297
4301
|
Hash: shard.cid
|
|
4298
4302
|
}), childrenSize += shard.size;
|
|
4299
|
-
} else if (
|
|
4303
|
+
} else if (dir_sharded_isDir(child.value)) {
|
|
4300
4304
|
let flushedDir;
|
|
4301
4305
|
for await (let entry of child.value.flush(blockstore))flushedDir = entry, yield flushedDir;
|
|
4302
4306
|
if (null == flushedDir) throw Error('Did not flush dir');
|
|
@@ -4321,7 +4325,7 @@ async function* dir_sharded_flush(bucket, blockstore, shardRoot, options) {
|
|
|
4321
4325
|
type: 'hamt-sharded-directory',
|
|
4322
4326
|
data: Uint8Array.from(children.bitField().reverse()),
|
|
4323
4327
|
fanout: BigInt(bucket.tableSize()),
|
|
4324
|
-
hashType:
|
|
4328
|
+
hashType: DEFAULT_SHARD_HASH_CODE,
|
|
4325
4329
|
mtime: shardRoot?.mtime,
|
|
4326
4330
|
mode: shardRoot?.mode
|
|
4327
4331
|
}), buffer = dag_pb_src_encode(util_prepare({
|
|
@@ -4334,20 +4338,69 @@ async function* dir_sharded_flush(bucket, blockstore, shardRoot, options) {
|
|
|
4334
4338
|
size
|
|
4335
4339
|
};
|
|
4336
4340
|
}
|
|
4337
|
-
|
|
4338
|
-
|
|
4339
|
-
|
|
4341
|
+
function dir_sharded_isDir(obj) {
|
|
4342
|
+
return 'function' == typeof obj.flush;
|
|
4343
|
+
}
|
|
4344
|
+
async function calculateSize(bucket, shardRoot, blocks, options) {
|
|
4345
|
+
let children = bucket._children, padLength = (bucket.tableSize() - 1).toString(16).length, links = [], sizeEstimate = 0;
|
|
4346
|
+
for(let i = 0; i < children.length; i++){
|
|
4347
|
+
let child = children.get(i);
|
|
4348
|
+
if (null == child) continue;
|
|
4349
|
+
let labelPrefix = i.toString(16).toUpperCase().padStart(padLength, '0');
|
|
4350
|
+
if (child instanceof Bucket) {
|
|
4351
|
+
let { size, cid } = await calculateSize(child, null, blocks, options);
|
|
4352
|
+
links.push({
|
|
4353
|
+
Name: labelPrefix,
|
|
4354
|
+
Tsize: Number(size),
|
|
4355
|
+
Hash: cid
|
|
4356
|
+
}), sizeEstimate += labelPrefix.length + cid.byteLength;
|
|
4357
|
+
} else if (dir_sharded_isDir(child.value)) {
|
|
4358
|
+
let dir = child.value, size = dir.nodeSize;
|
|
4359
|
+
if (null == dir.cid) throw Error('Child directory has not been persisted');
|
|
4360
|
+
links.push({
|
|
4361
|
+
Name: labelPrefix + child.key,
|
|
4362
|
+
Tsize: Number(size),
|
|
4363
|
+
Hash: dir.cid
|
|
4364
|
+
}), sizeEstimate += labelPrefix.length + dir.cid.byteLength;
|
|
4365
|
+
} else {
|
|
4366
|
+
let value = child.value, label = labelPrefix + child.key, size = value.size;
|
|
4367
|
+
links.push({
|
|
4368
|
+
Name: label,
|
|
4369
|
+
Tsize: Number(size),
|
|
4370
|
+
Hash: value.cid
|
|
4371
|
+
}), sizeEstimate += labelPrefix.length + value.cid.byteLength;
|
|
4372
|
+
}
|
|
4373
|
+
}
|
|
4374
|
+
let buffer = dag_pb_src_encode(util_prepare({
|
|
4375
|
+
Data: new UnixFS({
|
|
4376
|
+
type: 'hamt-sharded-directory',
|
|
4377
|
+
data: Uint8Array.from(children.bitField().reverse()),
|
|
4378
|
+
fanout: BigInt(bucket.tableSize()),
|
|
4379
|
+
hashType: DEFAULT_SHARD_HASH_CODE,
|
|
4380
|
+
mtime: shardRoot?.mtime,
|
|
4381
|
+
mode: shardRoot?.mode
|
|
4382
|
+
}).marshal(),
|
|
4383
|
+
Links: links
|
|
4384
|
+
}));
|
|
4385
|
+
return {
|
|
4386
|
+
cid: await persist(buffer, blocks, options),
|
|
4387
|
+
size: 'links-bytes' === options.shardSplitStrategy ? sizeEstimate : buffer.length
|
|
4388
|
+
};
|
|
4389
|
+
}
|
|
4390
|
+
async function flatToShard(child, dir) {
|
|
4391
|
+
let newDir = dir, shardSplitThresholdBytes = dir.options.shardSplitThresholdBytes ?? 262144;
|
|
4392
|
+
dir instanceof DirFlat && await dir.estimateNodeSize() > shardSplitThresholdBytes && (newDir = await convertToShard(dir));
|
|
4340
4393
|
let parent = newDir.parent;
|
|
4341
4394
|
if (null != parent) {
|
|
4342
4395
|
if (newDir !== dir) {
|
|
4343
4396
|
if (null != child && (child.parent = newDir), null == newDir.parentKey) throw Error('No parent key found');
|
|
4344
4397
|
await parent.put(newDir.parentKey, newDir);
|
|
4345
4398
|
}
|
|
4346
|
-
return flatToShard(newDir, parent
|
|
4399
|
+
return flatToShard(newDir, parent);
|
|
4347
4400
|
}
|
|
4348
4401
|
return newDir;
|
|
4349
4402
|
}
|
|
4350
|
-
async function convertToShard(oldDir
|
|
4403
|
+
async function convertToShard(oldDir) {
|
|
4351
4404
|
let newDir = new dir_sharded({
|
|
4352
4405
|
root: oldDir.root,
|
|
4353
4406
|
dir: !0,
|
|
@@ -4358,7 +4411,7 @@ async function convertToShard(oldDir, options) {
|
|
|
4358
4411
|
flat: !1,
|
|
4359
4412
|
mtime: oldDir.mtime,
|
|
4360
4413
|
mode: oldDir.mode
|
|
4361
|
-
}, options);
|
|
4414
|
+
}, oldDir.options);
|
|
4362
4415
|
for (let { key, child } of oldDir.eachChildSeries())await newDir.put(key, child);
|
|
4363
4416
|
return newDir;
|
|
4364
4417
|
}
|
|
@@ -4368,7 +4421,7 @@ async function addToTree(elem, tree, options) {
|
|
|
4368
4421
|
let pathElem = pathElems[i];
|
|
4369
4422
|
currentPath += `${'' !== currentPath ? '/' : ''}${pathElem}`;
|
|
4370
4423
|
let last = i === lastIndex;
|
|
4371
|
-
if (parent.dirty = !0, parent.cid = void 0, parent.size = void 0, last) await parent.put(pathElem, elem), tree = await flatToShard(null, parent
|
|
4424
|
+
if (parent.dirty = !0, parent.cid = void 0, parent.size = void 0, last) await parent.put(pathElem, elem), tree = await flatToShard(null, parent);
|
|
4372
4425
|
else {
|
|
4373
4426
|
let dir = await parent.get(pathElem);
|
|
4374
4427
|
null != dir && dir instanceof Dir || (dir = new DirFlat({
|
|
@@ -4394,21 +4447,25 @@ async function* flushAndYield(tree, blockstore) {
|
|
|
4394
4447
|
yield* tree.flush(blockstore);
|
|
4395
4448
|
}
|
|
4396
4449
|
async function* src_importer(source, blockstore, options = {}) {
|
|
4397
|
-
var options1, options2, options3;
|
|
4398
|
-
let candidates, maxChildrenPerNode;
|
|
4450
|
+
var options1, options2, options3, options4;
|
|
4451
|
+
let candidates, chunkSize, maxChildrenPerNode, maxChildrenPerNode1;
|
|
4399
4452
|
candidates = Symbol.asyncIterator in source || Symbol.iterator in source ? source : [
|
|
4400
4453
|
source
|
|
4401
|
-
];
|
|
4402
|
-
let wrapWithDirectory = options.wrapWithDirectory ?? !1, shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144, shardFanoutBits = options.shardFanoutBits ?? 8, cidVersion = options.cidVersion ?? 1, rawLeaves = options.rawLeaves ?? !0, leafType = options.leafType ??
|
|
4454
|
+
], 'unixfs-v0-2015' === options.profile ? (options.shardSplitStrategy = options.shardSplitStrategy ?? 'links-bytes', options.cidVersion = options.cidVersion ?? 0, options.rawLeaves = options.rawLeaves ?? !1, chunkSize = 262144, maxChildrenPerNode = 174) : 'unixfs-v1-2025' === options.profile && (options.shardSplitStrategy = options.shardSplitStrategy ?? 'block-bytes', options.cidVersion = options.cidVersion ?? 1, options.rawLeaves = options.rawLeaves ?? !0, chunkSize = DEFAULT_CHUNK_SIZE_1MIB, maxChildrenPerNode = 1024);
|
|
4455
|
+
let wrapWithDirectory = options.wrapWithDirectory ?? !1, shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144, shardSplitStrategy = options.shardSplitStrategy ?? DEFAULT_SHARD_SPLIT_STRATEGY, shardFanoutBits = options.shardFanoutBits ?? 8, cidVersion = options.cidVersion ?? 1, rawLeaves = options.rawLeaves ?? !0, leafType = options.leafType ?? "file", fileImportConcurrency = options.fileImportConcurrency ?? 50, blockWriteConcurrency = options.blockWriteConcurrency ?? 10, reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? !0, chunker = options.chunker ?? fixedSize({
|
|
4456
|
+
chunkSize
|
|
4457
|
+
}), chunkValidator = options.chunkValidator ?? defaultChunkValidator(), buildDag = options.dagBuilder ?? (options3 = {
|
|
4403
4458
|
chunker,
|
|
4404
4459
|
chunkValidator,
|
|
4405
4460
|
wrapWithDirectory,
|
|
4406
|
-
layout: options.layout ?? (
|
|
4461
|
+
layout: options.layout ?? (options1 = {
|
|
4462
|
+
maxChildrenPerNode
|
|
4463
|
+
}, maxChildrenPerNode1 = options1?.maxChildrenPerNode ?? DEFAULT_MAX_CHILDREN_PER_NODE, async function balancedLayout(source, reduce) {
|
|
4407
4464
|
let roots = [];
|
|
4408
|
-
for await (let chunked of dist_src(source,
|
|
4465
|
+
for await (let chunked of dist_src(source, maxChildrenPerNode1))roots.push(await reduce(chunked));
|
|
4409
4466
|
return roots.length > 1 ? balancedLayout(roots, reduce) : roots[0];
|
|
4410
4467
|
}),
|
|
4411
|
-
bufferImporter: options.bufferImporter ?? (
|
|
4468
|
+
bufferImporter: options.bufferImporter ?? (options2 = {
|
|
4412
4469
|
cidVersion,
|
|
4413
4470
|
rawLeaves,
|
|
4414
4471
|
leafType,
|
|
@@ -4418,18 +4475,18 @@ async function* src_importer(source, blockstore, options = {}) {
|
|
|
4418
4475
|
for await (let block of file.content)yield async ()=>{
|
|
4419
4476
|
let unixfs, opts = {
|
|
4420
4477
|
codec: dag_pb_src_namespaceObject,
|
|
4421
|
-
cidVersion:
|
|
4422
|
-
onProgress:
|
|
4478
|
+
cidVersion: options2.cidVersion,
|
|
4479
|
+
onProgress: options2.onProgress
|
|
4423
4480
|
};
|
|
4424
|
-
|
|
4481
|
+
options2.rawLeaves ? (opts.codec = raw_namespaceObject, opts.cidVersion = 1) : block = dag_pb_src_encode({
|
|
4425
4482
|
Data: (unixfs = new UnixFS({
|
|
4426
|
-
type:
|
|
4483
|
+
type: options2.leafType,
|
|
4427
4484
|
data: block
|
|
4428
4485
|
})).marshal(),
|
|
4429
4486
|
Links: []
|
|
4430
4487
|
});
|
|
4431
4488
|
let cid = await persist(block, blockstore, opts);
|
|
4432
|
-
return bytesWritten += BigInt(block.byteLength),
|
|
4489
|
+
return bytesWritten += BigInt(block.byteLength), options2.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:write', {
|
|
4433
4490
|
bytesWritten,
|
|
4434
4491
|
cid,
|
|
4435
4492
|
path: file.path
|
|
@@ -4457,7 +4514,7 @@ async function* src_importer(source, blockstore, options = {}) {
|
|
|
4457
4514
|
mode: entry.mode,
|
|
4458
4515
|
content: async function*() {
|
|
4459
4516
|
let bytesRead = 0n;
|
|
4460
|
-
for await (let chunk of
|
|
4517
|
+
for await (let chunk of options3.chunker(options3.chunkValidator(function(content) {
|
|
4461
4518
|
try {
|
|
4462
4519
|
if (content instanceof Uint8Array) return async function*() {
|
|
4463
4520
|
yield content;
|
|
@@ -4470,7 +4527,7 @@ async function* src_importer(source, blockstore, options = {}) {
|
|
|
4470
4527
|
throw new InvalidContentError('Content was invalid');
|
|
4471
4528
|
}(entry.content)))){
|
|
4472
4529
|
let currentChunkSize = BigInt(chunk.byteLength);
|
|
4473
|
-
bytesRead += currentChunkSize,
|
|
4530
|
+
bytesRead += currentChunkSize, options3.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:read', {
|
|
4474
4531
|
bytesRead,
|
|
4475
4532
|
chunkSize: currentChunkSize,
|
|
4476
4533
|
path: entry.path
|
|
@@ -4478,22 +4535,23 @@ async function* src_importer(source, blockstore, options = {}) {
|
|
|
4478
4535
|
}
|
|
4479
4536
|
}(),
|
|
4480
4537
|
originalPath
|
|
4481
|
-
}, fileBuilder =
|
|
4482
|
-
yield async ()=>fileBuilder(file, blockstore,
|
|
4538
|
+
}, fileBuilder = options3.fileBuilder ?? defaultFileBuilder;
|
|
4539
|
+
yield async ()=>fileBuilder(file, blockstore, options3);
|
|
4483
4540
|
} else if (null != entry.path) {
|
|
4484
4541
|
let dir = {
|
|
4485
4542
|
path: entry.path,
|
|
4486
4543
|
mtime: entry.mtime,
|
|
4487
4544
|
mode: entry.mode,
|
|
4488
4545
|
originalPath
|
|
4489
|
-
}, dirBuilder =
|
|
4490
|
-
yield async ()=>dirBuilder(dir, blockstore,
|
|
4546
|
+
}, dirBuilder = options3.dirBuilder ?? defaultDirBuilder;
|
|
4547
|
+
yield async ()=>dirBuilder(dir, blockstore, options3);
|
|
4491
4548
|
} else throw Error('Import candidate must have content or path or both');
|
|
4492
4549
|
}
|
|
4493
4550
|
});
|
|
4494
|
-
for await (let entry of (options.treeBuilder ?? (
|
|
4551
|
+
for await (let entry of (options.treeBuilder ?? (options4 = {
|
|
4495
4552
|
wrapWithDirectory,
|
|
4496
4553
|
shardSplitThresholdBytes,
|
|
4554
|
+
shardSplitStrategy,
|
|
4497
4555
|
shardFanoutBits,
|
|
4498
4556
|
cidVersion,
|
|
4499
4557
|
onProgress: options.onProgress
|
|
@@ -4504,13 +4562,13 @@ async function* src_importer(source, blockstore, options = {}) {
|
|
|
4504
4562
|
path: '',
|
|
4505
4563
|
dirty: !0,
|
|
4506
4564
|
flat: !0
|
|
4507
|
-
},
|
|
4565
|
+
}, options4), singleRoot = !1;
|
|
4508
4566
|
for await (let entry of source){
|
|
4509
4567
|
if (null == entry) continue;
|
|
4510
4568
|
let dir = `${entry.originalPath ?? ''}`.split('/')[0];
|
|
4511
|
-
null != dir && '' !== dir && (null == rootDir ? (rootDir = dir, singleRoot = !0) : rootDir !== dir && (singleRoot = !1)), tree = await addToTree(entry, tree,
|
|
4569
|
+
null != dir && '' !== dir && (null == rootDir ? (rootDir = dir, singleRoot = !0) : rootDir !== dir && (singleRoot = !1)), tree = await addToTree(entry, tree, options4), entry.unixfs?.isDirectory() !== !0 && (yield entry);
|
|
4512
4570
|
}
|
|
4513
|
-
if (
|
|
4571
|
+
if (options4.wrapWithDirectory || singleRoot && tree.childCount() > 1) yield* flushAndYield(tree, block);
|
|
4514
4572
|
else for (let unwrapped of tree.eachChildSeries())null != unwrapped && (yield* flushAndYield(unwrapped.child, block));
|
|
4515
4573
|
}))(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore))yield {
|
|
4516
4574
|
cid: entry.cid,
|
|
@@ -4935,12 +4993,13 @@ function _writeString(buffer, str, offset, size) {
|
|
|
4935
4993
|
function _leftPad(input, targetLength) {
|
|
4936
4994
|
return String(input).padStart(targetLength, "0");
|
|
4937
4995
|
}
|
|
4938
|
-
import * as __rspack_external_node_crypto_9ba42079 from "node:crypto";
|
|
4939
|
-
import * as __rspack_external_crypto from "crypto";
|
|
4940
|
-
import * as __rspack_external_fs from "fs";
|
|
4941
4996
|
import { __webpack_require__ } from "./rslib-runtime.js";
|
|
4942
4997
|
import { parseArgs, styleText } from "node:util";
|
|
4998
|
+
import node_crypto, * as __rspack_external_node_crypto_9ba42079 from "node:crypto";
|
|
4999
|
+
import { createHash, createHmac, randomInt } from "node:crypto";
|
|
4943
5000
|
import { setTimeout as promises_setTimeout } from "node:timers/promises";
|
|
5001
|
+
import crypto_0, * as __rspack_external_crypto from "crypto";
|
|
5002
|
+
import fs_0, * as __rspack_external_fs from "fs";
|
|
4944
5003
|
import { promisify } from "util";
|
|
4945
5004
|
import { Buffer } from "node:buffer";
|
|
4946
5005
|
import { constants, createReadStream, createWriteStream } from "node:fs";
|
|
@@ -6279,9 +6338,9 @@ __webpack_require__.add({
|
|
|
6279
6338
|
base58btc: ()=>base58_base58btc,
|
|
6280
6339
|
base58flickr: ()=>base58flickr
|
|
6281
6340
|
}), base64_namespaceObject = {}, __webpack_require__.r(base64_namespaceObject), __webpack_require__.d(base64_namespaceObject, {
|
|
6282
|
-
base64: ()=>
|
|
6283
|
-
base64pad: ()=>
|
|
6284
|
-
base64url: ()=>
|
|
6341
|
+
base64: ()=>base64_base64,
|
|
6342
|
+
base64pad: ()=>base64_base64pad,
|
|
6343
|
+
base64url: ()=>base64_base64url,
|
|
6285
6344
|
base64urlpad: ()=>base64urlpad
|
|
6286
6345
|
}), base8_namespaceObject = {}, __webpack_require__.r(base8_namespaceObject), __webpack_require__.d(base8_namespaceObject, {
|
|
6287
6346
|
base8: ()=>base8
|
|
@@ -7518,8 +7577,8 @@ class HMAC extends utils_Hash {
|
|
|
7518
7577
|
this.destroyed = !0, this.oHash.destroy(), this.iHash.destroy();
|
|
7519
7578
|
}
|
|
7520
7579
|
}
|
|
7521
|
-
let
|
|
7522
|
-
|
|
7580
|
+
let hmac_hmac = (hash, key, message)=>new HMAC(hash, key).update(message).digest();
|
|
7581
|
+
hmac_hmac.create = (hash, key)=>new HMAC(hash, key);
|
|
7523
7582
|
let utils_0n = BigInt(0), utils_1n = BigInt(1), utils_hasHexBuiltin = 'function' == typeof Uint8Array.from([]).toHex && 'function' == typeof Uint8Array.fromHex, abstract_utils_hexes = Array.from({
|
|
7524
7583
|
length: 256
|
|
7525
7584
|
}, (_, i)=>i.toString(16).padStart(2, '0')), utils_asciis = {
|
|
@@ -8203,7 +8262,7 @@ let utils_0n = BigInt(0), utils_1n = BigInt(1), utils_hasHexBuiltin = 'function'
|
|
|
8203
8262
|
})({
|
|
8204
8263
|
...curveDef,
|
|
8205
8264
|
hash: hash,
|
|
8206
|
-
hmac: (key, ...msgs)=>
|
|
8265
|
+
hmac: (key, ...msgs)=>hmac_hmac(hash, key, function(...arrays) {
|
|
8207
8266
|
let sum = 0;
|
|
8208
8267
|
for(let i = 0; i < arrays.length; i++){
|
|
8209
8268
|
let a = arrays[i];
|
|
@@ -8371,7 +8430,7 @@ let aleph_te = new TextEncoder(), pinToAleph = async ({ cid, token, chain, verbo
|
|
|
8371
8430
|
return requestOptions = ((token, requestOptions)=>{
|
|
8372
8431
|
let [accessKeyId, secretAccessKey] = atob(token).split(':');
|
|
8373
8432
|
if (!accessKeyId || !secretAccessKey) throw Error('Missing access key ID and secret access key');
|
|
8374
|
-
let amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '') + 'Z', dateStamp = amzDate.slice(0, 8), payloadHash =
|
|
8433
|
+
let amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '') + 'Z', dateStamp = amzDate.slice(0, 8), payloadHash = createHash('sha256').update('').digest('hex'), canonicalHeaders = `content-length:${requestOptions.headers['Content-Length']}\nhost:${requestOptions.host}\nx-amz-content-sha256:${payloadHash}\nx-amz-date:${amzDate}\n`, signedHeaders = 'content-length;host;x-amz-content-sha256;x-amz-date', canonicalRequest = [
|
|
8375
8434
|
requestOptions.method,
|
|
8376
8435
|
requestOptions.path,
|
|
8377
8436
|
'',
|
|
@@ -8379,16 +8438,16 @@ let aleph_te = new TextEncoder(), pinToAleph = async ({ cid, token, chain, verbo
|
|
|
8379
8438
|
signedHeaders,
|
|
8380
8439
|
payloadHash
|
|
8381
8440
|
].join(`
|
|
8382
|
-
`), hashedCanonicalRequest =
|
|
8441
|
+
`), hashedCanonicalRequest = createHash('sha256').update(canonicalRequest).digest('hex'), credentialScope = `${dateStamp}/${requestOptions.region}/${requestOptions.service}/aws4_request`, stringToSign = [
|
|
8383
8442
|
'AWS4-HMAC-SHA256',
|
|
8384
8443
|
amzDate,
|
|
8385
8444
|
credentialScope,
|
|
8386
8445
|
hashedCanonicalRequest
|
|
8387
8446
|
].join(`
|
|
8388
|
-
`), kDate =
|
|
8447
|
+
`), kDate = createHmac('sha256', 'AWS4' + secretAccessKey).update(dateStamp).digest(), kRegion = createHmac('sha256', kDate).update(requestOptions.region).digest(), kService = createHmac('sha256', kRegion).update(requestOptions.service).digest(), kSigning = createHmac('sha256', kService).update('aws4_request').digest(), signature = createHmac('sha256', kSigning).update(stringToSign).digest('hex');
|
|
8389
8448
|
return requestOptions.headers['X-Amz-Content-Sha256'] = payloadHash, requestOptions.headers['X-Amz-Date'] = amzDate, requestOptions.headers.Authorization = `AWS4-HMAC-SHA256 Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`, requestOptions;
|
|
8390
8449
|
})(token, requestOptions), await fetch(`https://${requestOptions.host}/`, requestOptions).then((res)=>200 == res.status);
|
|
8391
|
-
}, utils_sign = (key, msg)=>new Uint8Array(
|
|
8450
|
+
}, utils_sign = (key, msg)=>new Uint8Array(createHmac('sha256', key).update(msg).digest());
|
|
8392
8451
|
class HttpRequest {
|
|
8393
8452
|
method;
|
|
8394
8453
|
protocol;
|
|
@@ -8462,9 +8521,9 @@ let createPresignedUrl = async ({ bucketName, apiUrl, file, token })=>{
|
|
|
8462
8521
|
'AWS4-HMAC-SHA256',
|
|
8463
8522
|
amzDate,
|
|
8464
8523
|
credentialScope,
|
|
8465
|
-
|
|
8524
|
+
createHash('sha256').update(canonicalRequest).digest('hex')
|
|
8466
8525
|
].join(`
|
|
8467
|
-
`),
|
|
8526
|
+
`), signature = createHmac('sha256', (kDate = utils_sign(`AWS4${secretAccessKey}`, dateStamp), kRegion = utils_sign(kDate, region), kService = utils_sign(kRegion, 's3'), utils_sign(kService, 'aws4_request'))).update(stringToSign).digest('hex');
|
|
8468
8527
|
return {
|
|
8469
8528
|
...request,
|
|
8470
8529
|
query: {
|
|
@@ -8586,7 +8645,7 @@ class digest_Digest {
|
|
|
8586
8645
|
}
|
|
8587
8646
|
let node_digest = (payload)=>{
|
|
8588
8647
|
let digest = new Uint8Array(digest_prefix.length + 32);
|
|
8589
|
-
return digest.set(digest_prefix, 0), digest.set(
|
|
8648
|
+
return digest.set(digest_prefix, 0), digest.set(node_crypto.createHash('sha256').update(payload).digest(), digest_prefix.length), new digest_Digest(digest);
|
|
8590
8649
|
}, sha256_code = 18, objectTypeNames = [
|
|
8591
8650
|
'Object',
|
|
8592
8651
|
'RegExp',
|
|
@@ -11280,7 +11339,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11280
11339
|
to,
|
|
11281
11340
|
data,
|
|
11282
11341
|
value,
|
|
11283
|
-
gas:
|
|
11342
|
+
gas: SIMULATION_GAS_LIMIT
|
|
11284
11343
|
},
|
|
11285
11344
|
'latest'
|
|
11286
11345
|
]
|
|
@@ -11291,23 +11350,12 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11291
11350
|
to,
|
|
11292
11351
|
data,
|
|
11293
11352
|
from,
|
|
11294
|
-
gas:
|
|
11353
|
+
gas: SIMULATION_GAS_LIMIT
|
|
11295
11354
|
},
|
|
11296
11355
|
'latest'
|
|
11297
11356
|
]
|
|
11298
11357
|
}), sendTransaction = async ({ provider, chainId, privateKey, to, data, from })=>{
|
|
11299
|
-
let
|
|
11300
|
-
method: 'eth_feeHistory',
|
|
11301
|
-
params: [
|
|
11302
|
-
'0x5',
|
|
11303
|
-
'latest',
|
|
11304
|
-
[
|
|
11305
|
-
10,
|
|
11306
|
-
50,
|
|
11307
|
-
90
|
|
11308
|
-
]
|
|
11309
|
-
]
|
|
11310
|
-
}), estimatedGas = await estimateGas({
|
|
11358
|
+
let estimatedGas = await estimateGas({
|
|
11311
11359
|
provider,
|
|
11312
11360
|
from,
|
|
11313
11361
|
to,
|
|
@@ -11320,9 +11368,17 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11320
11368
|
from,
|
|
11321
11369
|
'latest'
|
|
11322
11370
|
]
|
|
11323
|
-
})),
|
|
11324
|
-
|
|
11325
|
-
|
|
11371
|
+
})), block = await provider.request({
|
|
11372
|
+
method: 'eth_getBlockByNumber',
|
|
11373
|
+
params: [
|
|
11374
|
+
'latest',
|
|
11375
|
+
!1
|
|
11376
|
+
]
|
|
11377
|
+
});
|
|
11378
|
+
if (!block) throw Error('Failed to fetch latest block');
|
|
11379
|
+
let baseFeePerGas = toBigInt(block.baseFeePerGas || '0x0'), maxPriorityFeePerGas = toBigInt(await provider.request({
|
|
11380
|
+
method: 'eth_maxPriorityFeePerGas'
|
|
11381
|
+
})), envelope = function(envelope, options = {}) {
|
|
11326
11382
|
let { signature } = options, envelope_ = 'string' == typeof envelope ? function(serialized) {
|
|
11327
11383
|
var value;
|
|
11328
11384
|
let to_, transactionArray = (value = Hex_slice(serialized, 1), to_ = 'Hex', function decodeRlpCursor(cursor, to = 'Hex') {
|
|
@@ -11407,8 +11463,8 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11407
11463
|
};
|
|
11408
11464
|
}({
|
|
11409
11465
|
chainId,
|
|
11410
|
-
maxFeePerGas: 2n * baseFeePerGas +
|
|
11411
|
-
maxPriorityFeePerGas
|
|
11466
|
+
maxFeePerGas: 2n * baseFeePerGas + maxPriorityFeePerGas,
|
|
11467
|
+
maxPriorityFeePerGas,
|
|
11412
11468
|
to,
|
|
11413
11469
|
data,
|
|
11414
11470
|
value: 0n,
|
|
@@ -11441,43 +11497,44 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11441
11497
|
});
|
|
11442
11498
|
}, waitForTransaction = async (provider, hash)=>{
|
|
11443
11499
|
for(let attempt = 0; attempt < 10; attempt++){
|
|
11444
|
-
|
|
11445
|
-
|
|
11446
|
-
|
|
11447
|
-
|
|
11448
|
-
|
|
11449
|
-
|
|
11450
|
-
if (rawReceipt) {
|
|
11451
|
-
var receipt;
|
|
11452
|
-
if ('0x0' === rawReceipt.status) throw Error(`Transaction ${hash} reverted`);
|
|
11453
|
-
let chainId = await provider.request({
|
|
11454
|
-
method: 'eth_chainId'
|
|
11500
|
+
try {
|
|
11501
|
+
let rawReceipt = await provider.request({
|
|
11502
|
+
method: 'eth_getTransactionReceipt',
|
|
11503
|
+
params: [
|
|
11504
|
+
hash
|
|
11505
|
+
]
|
|
11455
11506
|
});
|
|
11456
|
-
|
|
11457
|
-
|
|
11458
|
-
|
|
11459
|
-
|
|
11460
|
-
|
|
11461
|
-
|
|
11462
|
-
|
|
11463
|
-
|
|
11464
|
-
|
|
11465
|
-
|
|
11466
|
-
|
|
11467
|
-
|
|
11468
|
-
|
|
11507
|
+
if (rawReceipt) {
|
|
11508
|
+
var receipt;
|
|
11509
|
+
if ('0x0' === rawReceipt.status) throw Error(`Transaction ${hash} reverted`);
|
|
11510
|
+
let chainId = await provider.request({
|
|
11511
|
+
method: 'eth_chainId'
|
|
11512
|
+
});
|
|
11513
|
+
return {
|
|
11514
|
+
...receipt = {
|
|
11515
|
+
...rawReceipt,
|
|
11516
|
+
chainId
|
|
11517
|
+
},
|
|
11518
|
+
blobGasPrice: receipt.blobGasPrice ? BigInt(receipt.blobGasPrice) : void 0,
|
|
11519
|
+
blobGasUsed: receipt.blobGasUsed ? BigInt(receipt.blobGasUsed) : void 0,
|
|
11520
|
+
blockNumber: BigInt(receipt.blockNumber ?? 0n),
|
|
11521
|
+
cumulativeGasUsed: BigInt(receipt.cumulativeGasUsed ?? 0n),
|
|
11522
|
+
effectiveGasPrice: BigInt(receipt.effectiveGasPrice ?? 0n),
|
|
11523
|
+
gasUsed: BigInt(receipt.gasUsed ?? 0n),
|
|
11524
|
+
logs: receipt.logs.map((log)=>({
|
|
11469
11525
|
...log,
|
|
11470
11526
|
blockNumber: log.blockNumber ? BigInt(log.blockNumber) : null,
|
|
11471
11527
|
logIndex: log.logIndex ? Number(log.logIndex) : null,
|
|
11472
11528
|
transactionIndex: log.transactionIndex ? Number(log.transactionIndex) : null
|
|
11473
|
-
}
|
|
11474
|
-
|
|
11475
|
-
|
|
11476
|
-
|
|
11477
|
-
|
|
11478
|
-
|
|
11479
|
-
|
|
11480
|
-
|
|
11529
|
+
})),
|
|
11530
|
+
status: fromRpcStatus[receipt.status],
|
|
11531
|
+
transactionIndex: Number(receipt.transactionIndex ?? 0),
|
|
11532
|
+
type: fromRpcType[receipt.type] || receipt.type
|
|
11533
|
+
};
|
|
11534
|
+
}
|
|
11535
|
+
} catch (error) {
|
|
11536
|
+
if (-32603 === error.code || error.message?.includes('receipt not found')) ;
|
|
11537
|
+
else throw error;
|
|
11481
11538
|
}
|
|
11482
11539
|
let delay = Math.min(1000 * 2 ** attempt, 30000);
|
|
11483
11540
|
await promises_setTimeout(delay);
|
|
@@ -11527,7 +11584,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11527
11584
|
calls
|
|
11528
11585
|
]),
|
|
11529
11586
|
to: chain.contracts.multicall3.address,
|
|
11530
|
-
gas:
|
|
11587
|
+
gas: SIMULATION_GAS_LIMIT
|
|
11531
11588
|
},
|
|
11532
11589
|
'latest'
|
|
11533
11590
|
]
|
|
@@ -11839,7 +11896,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11839
11896
|
key: 'withIPFSIndexing',
|
|
11840
11897
|
value: ''
|
|
11841
11898
|
}
|
|
11842
|
-
], createDataSet_keys = metadata.map((item)=>item.key), createDataSet_values = metadata.map((item)=>item.value), priceBuffer = function(
|
|
11899
|
+
], createDataSet_keys = metadata.map((item)=>item.key), createDataSet_values = metadata.map((item)=>item.value), priceBuffer = function(decimals = 0) {
|
|
11843
11900
|
if (!/^(-?)([0-9]*)\.?([0-9]*)$/.test('0.5')) throw new InvalidDecimalNumberError({
|
|
11844
11901
|
value: '0.5'
|
|
11845
11902
|
});
|
|
@@ -11854,7 +11911,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11854
11911
|
(fraction = rounded > 9 ? `${BigInt(left) + BigInt(1)}0`.padStart(left.length + 1, '0') : `${left}${rounded}`).length > decimals && (fraction = fraction.slice(1), integer = `${BigInt(integer) + 1n}`), fraction = fraction.slice(0, decimals);
|
|
11855
11912
|
} else fraction = fraction.padEnd(decimals, '0');
|
|
11856
11913
|
return BigInt(`${negative ? '-' : ''}${integer}${fraction}`);
|
|
11857
|
-
}(
|
|
11914
|
+
}(18), createDataSet = async ({ providerURL, privateKey, payee, address: payer, verbose, chain, perMonth })=>{
|
|
11858
11915
|
let provider = filProvider[chain.id], [funds] = await getAccountInfo({
|
|
11859
11916
|
address: payer,
|
|
11860
11917
|
chain
|
|
@@ -11870,7 +11927,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
11870
11927
|
});
|
|
11871
11928
|
logger.info(`Transaction pending: ${chain.blockExplorer}/tx/${hash}`), await waitForTransaction(provider, hash), logger.success('Transaction succeeded');
|
|
11872
11929
|
}
|
|
11873
|
-
let clientDataSetId = BigInt(
|
|
11930
|
+
let clientDataSetId = BigInt(randomInt(100000000));
|
|
11874
11931
|
logger.info(`Client data set ID: ${clientDataSetId}`);
|
|
11875
11932
|
let recordKeeper = chain.contracts.storage.address;
|
|
11876
11933
|
logger.info(`Record keeper address: ${recordKeeper}`);
|
|
@@ -12736,7 +12793,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
|
|
|
12736
12793
|
verbose,
|
|
12737
12794
|
datasetId,
|
|
12738
12795
|
privateKey,
|
|
12739
|
-
nonce: BigInt(
|
|
12796
|
+
nonce: BigInt(randomInt(100000000)),
|
|
12740
12797
|
clientDataSetId,
|
|
12741
12798
|
chain
|
|
12742
12799
|
});
|
|
@@ -13196,17 +13253,17 @@ class DIDKey extends did_DID {
|
|
|
13196
13253
|
return `did:key:${base58_base58btc.encode(this)}`;
|
|
13197
13254
|
}
|
|
13198
13255
|
}
|
|
13199
|
-
let
|
|
13256
|
+
let base64_base64 = rfc4648({
|
|
13200
13257
|
prefix: 'm',
|
|
13201
13258
|
name: 'base64',
|
|
13202
13259
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/',
|
|
13203
13260
|
bitsPerChar: 6
|
|
13204
|
-
}),
|
|
13261
|
+
}), base64_base64pad = rfc4648({
|
|
13205
13262
|
prefix: 'M',
|
|
13206
13263
|
name: 'base64pad',
|
|
13207
13264
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=',
|
|
13208
13265
|
bitsPerChar: 6
|
|
13209
|
-
}),
|
|
13266
|
+
}), base64_base64url = rfc4648({
|
|
13210
13267
|
prefix: 'u',
|
|
13211
13268
|
name: 'base64url',
|
|
13212
13269
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_',
|
|
@@ -13318,7 +13375,7 @@ let signature_algorithm = (signature)=>{
|
|
|
13318
13375
|
return signature_view(bytes);
|
|
13319
13376
|
}, signature_toJSON = (signature)=>({
|
|
13320
13377
|
"/": {
|
|
13321
|
-
bytes:
|
|
13378
|
+
bytes: base64_base64.baseEncode(signature)
|
|
13322
13379
|
}
|
|
13323
13380
|
}), withDID = (key, id)=>new VerifierWithDID(id, key);
|
|
13324
13381
|
class VerifierWithDID {
|
|
@@ -13424,7 +13481,7 @@ let PRIVATE_TAG_SIZE = encodingLength(0x1300), signer_SIZE = PRIVATE_TAG_SIZE +
|
|
|
13424
13481
|
if (0xed !== code) throw Error("Given bytes must contain public key in multiformats with 237 tag");
|
|
13425
13482
|
}
|
|
13426
13483
|
return new Ed25519Signer(bytes);
|
|
13427
|
-
}, signer_parse = (principal, decoder)=>signer_decode((decoder ||
|
|
13484
|
+
}, signer_parse = (principal, decoder)=>signer_decode((decoder || base64_base64pad).decode(principal));
|
|
13428
13485
|
class Ed25519Signer extends Uint8Array {
|
|
13429
13486
|
get code() {
|
|
13430
13487
|
return 0x1300;
|
|
@@ -13951,7 +14008,7 @@ class DagJsonTokenizer extends Tokenizer {
|
|
|
13951
14008
|
let breakToken = this._next();
|
|
13952
14009
|
if (!Type.equals(breakToken.type, Type.break)) throw Error('Invalid encoded Bytes form');
|
|
13953
14010
|
}
|
|
13954
|
-
let bytes =
|
|
14011
|
+
let bytes = base64_base64.decode(`m${innerValueToken.value}`);
|
|
13955
14012
|
return new Token(Type.bytes, bytes, innerValueToken.value.length);
|
|
13956
14013
|
}
|
|
13957
14014
|
this.tokenBuffer.push(innerValueToken);
|
|
@@ -13990,7 +14047,7 @@ let dag_json_src_encode = (node)=>{
|
|
|
13990
14047
|
}, options), decode_decode(buf, options);
|
|
13991
14048
|
};
|
|
13992
14049
|
new TextDecoder(), new TextEncoder();
|
|
13993
|
-
let formatHeader = (version, alg)=>
|
|
14050
|
+
let formatHeader = (version, alg)=>base64_base64url.baseEncode(encodeHeader(version, alg)), formatPayload = (data)=>base64_base64url.baseEncode(encodePayload(data)), encodeHeader = (v, alg)=>dag_json_src_encode({
|
|
13994
14051
|
alg,
|
|
13995
14052
|
ucv: v,
|
|
13996
14053
|
typ: "JWT"
|
|
@@ -14134,7 +14191,7 @@ class CBORView extends View {
|
|
|
14134
14191
|
format() {
|
|
14135
14192
|
var model;
|
|
14136
14193
|
let header, payload, signature, signature1;
|
|
14137
|
-
return header = formatHeader((model = this.model).v, model.s.algorithm), payload = formatPayload(model), signature1 = model.s, signature =
|
|
14194
|
+
return header = formatHeader((model = this.model).v, model.s.algorithm), payload = formatPayload(model), signature1 = model.s, signature = base64_base64url.baseEncode(signature1.raw), `${header}.${payload}.${signature}`;
|
|
14138
14195
|
}
|
|
14139
14196
|
encode() {
|
|
14140
14197
|
return codec_cbor_encode(this.model);
|
|
@@ -14158,11 +14215,11 @@ class JWTView extends View {
|
|
|
14158
14215
|
let hashes_sha2_sha256 = hasher_from({
|
|
14159
14216
|
name: 'sha2-256',
|
|
14160
14217
|
code: 0x12,
|
|
14161
|
-
encode: (input)=>coerce(
|
|
14218
|
+
encode: (input)=>coerce(crypto_0.createHash('sha256').update(input).digest())
|
|
14162
14219
|
}), sha2_sha512 = hasher_from({
|
|
14163
14220
|
name: 'sha2-512',
|
|
14164
14221
|
code: 0x13,
|
|
14165
|
-
encode: (input)=>coerce(
|
|
14222
|
+
encode: (input)=>coerce(crypto_0.createHash('sha512').update(input).digest())
|
|
14166
14223
|
}), defaultHasher = hashes_sha2_sha256, write = async (ucan, { hasher = defaultHasher } = {})=>{
|
|
14167
14224
|
let [code, bytes] = ucan.jwt ? [
|
|
14168
14225
|
raw_code,
|
|
@@ -14481,7 +14538,7 @@ class CarBufferReader {
|
|
|
14481
14538
|
return new CarBufferReader(header, blocks);
|
|
14482
14539
|
}
|
|
14483
14540
|
}
|
|
14484
|
-
let fsread =
|
|
14541
|
+
let fsread = fs_0.readSync;
|
|
14485
14542
|
class buffer_reader_CarBufferReader extends CarBufferReader {
|
|
14486
14543
|
static readRaw(fd, blockIndex) {
|
|
14487
14544
|
let read, { cid, blockLength, blockOffset } = blockIndex, bytes = new Uint8Array(blockLength);
|
|
@@ -15592,7 +15649,7 @@ let delegation_archive = async (delegation)=>{
|
|
|
15592
15649
|
...((jwt)=>{
|
|
15593
15650
|
var raw;
|
|
15594
15651
|
let source, code, segments = jwt.split("."), [header, payload, signature] = 3 === segments.length ? segments : schema_fail(`Can't parse UCAN: ${jwt}: Expected JWT format: 3 dot-separated base64url-encoded values.`), { ucv, alg } = ((header)=>{
|
|
15595
|
-
let { ucv, alg, typ } = dag_json_src_decode(
|
|
15652
|
+
let { ucv, alg, typ } = dag_json_src_decode(base64_base64url.baseDecode(header));
|
|
15596
15653
|
return {
|
|
15597
15654
|
typ: "JWT" === typ ? "JWT" : schema_ParseError.throw(`Expected typ to be a ${JSON.stringify("JWT")} instead got ${JSON.stringify(typ)}`),
|
|
15598
15655
|
ucv: readVersion(ucv, "ucv"),
|
|
@@ -15600,12 +15657,12 @@ let delegation_archive = async (delegation)=>{
|
|
|
15600
15657
|
};
|
|
15601
15658
|
})(header);
|
|
15602
15659
|
return {
|
|
15603
|
-
...(source = payload, readPayloadWith(dag_json_src_decode(
|
|
15660
|
+
...(source = payload, readPayloadWith(dag_json_src_decode(base64_base64url.baseDecode(source)), {
|
|
15604
15661
|
readPrincipal: readStringPrincipal,
|
|
15605
15662
|
readProof: readStringProof
|
|
15606
15663
|
})),
|
|
15607
15664
|
v: ucv,
|
|
15608
|
-
s: (raw =
|
|
15665
|
+
s: (raw = base64_base64url.baseDecode(signature), 0xd000 === (code = ((name)=>{
|
|
15609
15666
|
switch(name){
|
|
15610
15667
|
case "ES256K":
|
|
15611
15668
|
return 0xd0e7;
|
|
@@ -16222,7 +16279,7 @@ class Agent {
|
|
|
16222
16279
|
}
|
|
16223
16280
|
}
|
|
16224
16281
|
let proof_parse = async (str)=>{
|
|
16225
|
-
let cid = link_parse(str,
|
|
16282
|
+
let cid = link_parse(str, base64_base64);
|
|
16226
16283
|
if (cid.code !== car_code) throw Error(`non CAR codec found: 0x${cid.code.toString(16)}`);
|
|
16227
16284
|
let { ok, error } = await delegation_extract(cid.multihash.digest);
|
|
16228
16285
|
if (error) throw Error('failed to extract delegation', {
|
|
@@ -16235,9 +16292,6 @@ class SharedSpace {
|
|
|
16235
16292
|
constructor(model){
|
|
16236
16293
|
this.model = model;
|
|
16237
16294
|
}
|
|
16238
|
-
get meta() {
|
|
16239
|
-
return this.model.meta;
|
|
16240
|
-
}
|
|
16241
16295
|
did() {
|
|
16242
16296
|
return this.model.id;
|
|
16243
16297
|
}
|
|
@@ -16249,14 +16303,12 @@ let fromDelegation = ({ facts, capabilities })=>{
|
|
|
16249
16303
|
if (result.error) throw Error(`Invalid delegation, expected capabilities[0].with to be DID, ${result.error}`, {
|
|
16250
16304
|
cause: result.error
|
|
16251
16305
|
});
|
|
16252
|
-
let meta = facts[0]?.space ?? {};
|
|
16253
16306
|
return new SharedSpace({
|
|
16254
16307
|
id: result.ok,
|
|
16255
16308
|
delegation: {
|
|
16256
16309
|
facts,
|
|
16257
16310
|
capabilities
|
|
16258
|
-
}
|
|
16259
|
-
meta
|
|
16311
|
+
}
|
|
16260
16312
|
});
|
|
16261
16313
|
};
|
|
16262
16314
|
class Piece {
|
|
@@ -17207,26 +17259,7 @@ capability_capability({
|
|
|
17207
17259
|
}),
|
|
17208
17260
|
derives: (claim, from)=>utils_and(equalWith(claim, from)) ?? checkLink(claim.nb.proof, from.nb.proof, 'nb.proof')
|
|
17209
17261
|
});
|
|
17210
|
-
|
|
17211
|
-
name = 'ReceiptNotFound';
|
|
17212
|
-
taskCid;
|
|
17213
|
-
constructor(taskCid){
|
|
17214
|
-
super(), this.taskCid = taskCid;
|
|
17215
|
-
}
|
|
17216
|
-
get reason() {
|
|
17217
|
-
return `receipt not found for task ${this.taskCid} in the indexed workflow`;
|
|
17218
|
-
}
|
|
17219
|
-
}
|
|
17220
|
-
class ReceiptMissing extends Error {
|
|
17221
|
-
name = 'ReceiptMissing';
|
|
17222
|
-
taskCid;
|
|
17223
|
-
constructor(taskCid){
|
|
17224
|
-
super(), this.taskCid = taskCid;
|
|
17225
|
-
}
|
|
17226
|
-
get reason() {
|
|
17227
|
-
return `receipt missing for task ${this.taskCid}`;
|
|
17228
|
-
}
|
|
17229
|
-
}
|
|
17262
|
+
let WAIT_MS = 3000;
|
|
17230
17263
|
blob_add.can;
|
|
17231
17264
|
let blob_add_input = (digest, size)=>({
|
|
17232
17265
|
blob: {
|
|
@@ -17234,7 +17267,6 @@ let blob_add_input = (digest, size)=>({
|
|
|
17234
17267
|
size
|
|
17235
17268
|
}
|
|
17236
17269
|
}), CAR_V1_VERSION = 1;
|
|
17237
|
-
__rspack_external_fs.default;
|
|
17238
17270
|
class CarWriter {
|
|
17239
17271
|
constructor(roots, encoder){
|
|
17240
17272
|
this._encoder = encoder, this._mutex = encoder.setRoots(roots), this._ended = !1;
|
|
@@ -17322,7 +17354,7 @@ class writer_CarWriter extends CarWriter {
|
|
|
17322
17354
|
let bytes, readChunk, offset = 0;
|
|
17323
17355
|
if ('number' == typeof fd) readChunk = async ()=>{
|
|
17324
17356
|
var buffer, position;
|
|
17325
|
-
return (await (buffer = bytes, position = offset, !_fsReadFn && (_fsReadFn = promisify(
|
|
17357
|
+
return (await (buffer = bytes, position = offset, !_fsReadFn && (_fsReadFn = promisify(fs_0.read)), _fsReadFn(fd, buffer, 0, 256, position))).bytesRead;
|
|
17326
17358
|
};
|
|
17327
17359
|
else if ('object' == typeof fd && 'function' == typeof fd.read) readChunk = async ()=>(await fd.read(bytes, 0, 256, offset)).bytesRead;
|
|
17328
17360
|
else throw TypeError('Bad fd');
|
|
@@ -17336,7 +17368,7 @@ class writer_CarWriter extends CarWriter {
|
|
|
17336
17368
|
if (fdReader.pos !== newHeader.length) throw Error(`updateRoots() can only overwrite a header of the same length (old header is ${fdReader.pos} bytes, new header is ${newHeader.length} bytes)`);
|
|
17337
17369
|
if ('number' == typeof fd) {
|
|
17338
17370
|
var length;
|
|
17339
|
-
await (length = newHeader.length, !_fsWriteFn && (_fsWriteFn = promisify(
|
|
17371
|
+
await (length = newHeader.length, !_fsWriteFn && (_fsWriteFn = promisify(fs_0.write)), _fsWriteFn(fd, newHeader, 0, length, 0));
|
|
17340
17372
|
} else 'object' == typeof fd && 'function' == typeof fd.read && await fd.write(newHeader, 0, newHeader.length, 0);
|
|
17341
17373
|
}
|
|
17342
17374
|
}
|
|
@@ -17525,18 +17557,12 @@ let filecoin_offer_filecoinOffer = async (conf, piece, content)=>filecoinOffer.i
|
|
|
17525
17557
|
});
|
|
17526
17558
|
}
|
|
17527
17559
|
};
|
|
17528
|
-
_computedKey = Symbol.
|
|
17560
|
+
_computedKey = Symbol.iterator;
|
|
17529
17561
|
class DigestMap {
|
|
17530
17562
|
#data;
|
|
17531
17563
|
constructor(entries){
|
|
17532
17564
|
for (let [k, v] of (this.#data = new Map(), entries ?? []))this.set(k, v);
|
|
17533
17565
|
}
|
|
17534
|
-
get [_computedKey]() {
|
|
17535
|
-
return 'DigestMap';
|
|
17536
|
-
}
|
|
17537
|
-
clear() {
|
|
17538
|
-
this.#data.clear();
|
|
17539
|
-
}
|
|
17540
17566
|
get(key) {
|
|
17541
17567
|
let data = this.#data.get(toBase58String(key));
|
|
17542
17568
|
if (data) return data[1];
|
|
@@ -17547,7 +17573,7 @@ class DigestMap {
|
|
|
17547
17573
|
value
|
|
17548
17574
|
]), this;
|
|
17549
17575
|
}
|
|
17550
|
-
[
|
|
17576
|
+
[_computedKey]() {
|
|
17551
17577
|
return this.entries();
|
|
17552
17578
|
}
|
|
17553
17579
|
*entries() {
|
|
@@ -18385,14 +18411,17 @@ let fixedSize = (options = {})=>{
|
|
|
18385
18411
|
let chunkSize = options.chunkSize ?? 262144;
|
|
18386
18412
|
return async function*(source) {
|
|
18387
18413
|
let list = new Uint8ArrayList(), currentLength = 0, emitted = !1;
|
|
18388
|
-
for await (let buffer of source)for(list.append(buffer), currentLength += buffer.length; currentLength >= chunkSize;)
|
|
18389
|
-
|
|
18390
|
-
|
|
18391
|
-
|
|
18414
|
+
for await (let buffer of source)for(list.append(buffer), currentLength += buffer.length; currentLength >= chunkSize;){
|
|
18415
|
+
let buf = list.subarray(0, chunkSize);
|
|
18416
|
+
if (yield buf, emitted = !0, chunkSize === list.length) list = new Uint8ArrayList(), currentLength = 0;
|
|
18417
|
+
else {
|
|
18418
|
+
let newBl = new Uint8ArrayList();
|
|
18419
|
+
newBl.append(list.sublist(chunkSize)), list = newBl, currentLength -= chunkSize;
|
|
18420
|
+
}
|
|
18392
18421
|
}
|
|
18393
18422
|
(!emitted || currentLength > 0) && (yield list.subarray(0, currentLength));
|
|
18394
18423
|
};
|
|
18395
|
-
}, pb_decode_textDecoder = new TextDecoder(), pb_encode_textEncoder = new TextEncoder(), pb_encode_maxInt32 = 4294967296, maxUInt32 = 2147483648, len8tab = [
|
|
18424
|
+
}, DEFAULT_SHARD_HASH_CODE = BigInt(0x22), DEFAULT_SHARD_SPLIT_STRATEGY = 'links-bytes', DEFAULT_CHUNK_SIZE_1MIB = 1048576, pb_decode_textDecoder = new TextDecoder(), pb_encode_textEncoder = new TextEncoder(), pb_encode_maxInt32 = 4294967296, maxUInt32 = 2147483648, len8tab = [
|
|
18396
18425
|
0,
|
|
18397
18426
|
1,
|
|
18398
18427
|
2,
|
|
@@ -19336,7 +19365,7 @@ class CustomProgressEvent extends Event {
|
|
|
19336
19365
|
}
|
|
19337
19366
|
let persist = async (buffer, blockstore, options)=>{
|
|
19338
19367
|
null == options.codec && (options.codec = dag_pb_src_namespaceObject);
|
|
19339
|
-
let multihash = await hashes_sha2_sha256.digest(buffer), cid = cid_CID.create(options.cidVersion, options.codec.code, multihash);
|
|
19368
|
+
let multihash = await hashes_sha2_sha256.digest(buffer), cid = cid_CID.create(options.cidVersion ?? 1, options.codec.code, multihash);
|
|
19340
19369
|
return await blockstore.put(cid, buffer, options), cid;
|
|
19341
19370
|
};
|
|
19342
19371
|
class errors_InvalidParametersError extends Error {
|
|
@@ -19384,6 +19413,12 @@ class InvalidContentError extends Error {
|
|
|
19384
19413
|
super(message);
|
|
19385
19414
|
}
|
|
19386
19415
|
}
|
|
19416
|
+
class InvalidShardingStrategyError extends Error {
|
|
19417
|
+
static name = 'InvalidShardingStrategyError';
|
|
19418
|
+
static code = 'ERR_SHARDING_STRATEGY';
|
|
19419
|
+
name = InvalidContentError.name;
|
|
19420
|
+
code = InvalidContentError.code;
|
|
19421
|
+
}
|
|
19387
19422
|
let defaultDirBuilder = async (dir, blockstore, options)=>{
|
|
19388
19423
|
let unixfs = new UnixFS({
|
|
19389
19424
|
type: 'directory',
|
|
@@ -19416,13 +19451,13 @@ let defaultDirBuilder = async (dir, blockstore, options)=>{
|
|
|
19416
19451
|
cidVersion: options.cidVersion
|
|
19417
19452
|
}), leaf.size = BigInt(leaf.block.length)), options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress:file:layout', {
|
|
19418
19453
|
cid: leaf.cid,
|
|
19419
|
-
path:
|
|
19454
|
+
path: file.originalPath
|
|
19420
19455
|
})), {
|
|
19421
19456
|
cid: leaf.cid,
|
|
19422
19457
|
path: file.path,
|
|
19423
19458
|
unixfs: leaf.unixfs,
|
|
19424
19459
|
size: leaf.size,
|
|
19425
|
-
originalPath:
|
|
19460
|
+
originalPath: file.originalPath
|
|
19426
19461
|
};
|
|
19427
19462
|
}
|
|
19428
19463
|
let f = new UnixFS({
|
|
@@ -19476,177 +19511,588 @@ class Dir {
|
|
|
19476
19511
|
this.options = options ?? {}, this.root = props.root, this.dir = props.dir, this.path = props.path, this.dirty = props.dirty, this.flat = props.flat, this.parent = props.parent, this.parentKey = props.parentKey, this.unixfs = props.unixfs, this.mode = props.mode, this.mtime = props.mtime;
|
|
19477
19512
|
}
|
|
19478
19513
|
}
|
|
19479
|
-
let
|
|
19480
|
-
|
|
19481
|
-
|
|
19482
|
-
|
|
19483
|
-
|
|
19484
|
-
|
|
19485
|
-
|
|
19486
|
-
|
|
19487
|
-
|
|
19488
|
-
|
|
19489
|
-
|
|
19490
|
-
|
|
19491
|
-
|
|
19492
|
-
|
|
19493
|
-
|
|
19494
|
-
|
|
19495
|
-
|
|
19496
|
-
|
|
19497
|
-
|
|
19498
|
-
|
|
19499
|
-
|
|
19500
|
-
|
|
19501
|
-
|
|
19502
|
-
|
|
19503
|
-
|
|
19504
|
-
|
|
19505
|
-
|
|
19506
|
-
|
|
19507
|
-
|
|
19508
|
-
|
|
19509
|
-
|
|
19510
|
-
|
|
19511
|
-
|
|
19512
|
-
|
|
19513
|
-
|
|
19514
|
-
|
|
19515
|
-
|
|
19516
|
-
|
|
19517
|
-
|
|
19518
|
-
|
|
19519
|
-
|
|
19520
|
-
|
|
19521
|
-
|
|
19522
|
-
|
|
19523
|
-
|
|
19524
|
-
|
|
19525
|
-
|
|
19526
|
-
|
|
19527
|
-
|
|
19528
|
-
|
|
19529
|
-
|
|
19530
|
-
|
|
19531
|
-
|
|
19532
|
-
|
|
19533
|
-
|
|
19534
|
-
|
|
19535
|
-
|
|
19536
|
-
|
|
19537
|
-
|
|
19538
|
-
|
|
19539
|
-
|
|
19540
|
-
|
|
19541
|
-
|
|
19542
|
-
|
|
19543
|
-
|
|
19544
|
-
|
|
19545
|
-
|
|
19546
|
-
|
|
19547
|
-
|
|
19548
|
-
|
|
19549
|
-
|
|
19550
|
-
|
|
19551
|
-
|
|
19552
|
-
|
|
19553
|
-
|
|
19554
|
-
|
|
19555
|
-
|
|
19556
|
-
|
|
19557
|
-
|
|
19558
|
-
|
|
19559
|
-
|
|
19560
|
-
|
|
19561
|
-
|
|
19562
|
-
|
|
19563
|
-
|
|
19564
|
-
|
|
19565
|
-
|
|
19566
|
-
|
|
19567
|
-
|
|
19568
|
-
|
|
19569
|
-
|
|
19570
|
-
|
|
19571
|
-
|
|
19572
|
-
|
|
19573
|
-
|
|
19574
|
-
|
|
19575
|
-
|
|
19576
|
-
|
|
19577
|
-
|
|
19578
|
-
|
|
19579
|
-
|
|
19580
|
-
|
|
19581
|
-
|
|
19582
|
-
|
|
19583
|
-
|
|
19584
|
-
|
|
19585
|
-
|
|
19586
|
-
|
|
19587
|
-
|
|
19588
|
-
|
|
19589
|
-
|
|
19590
|
-
|
|
19591
|
-
|
|
19592
|
-
|
|
19593
|
-
|
|
19594
|
-
|
|
19595
|
-
|
|
19596
|
-
|
|
19597
|
-
|
|
19598
|
-
|
|
19599
|
-
|
|
19600
|
-
|
|
19601
|
-
|
|
19602
|
-
|
|
19603
|
-
|
|
19604
|
-
|
|
19605
|
-
|
|
19606
|
-
|
|
19607
|
-
|
|
19608
|
-
|
|
19609
|
-
|
|
19610
|
-
|
|
19611
|
-
|
|
19612
|
-
|
|
19613
|
-
|
|
19614
|
-
|
|
19615
|
-
|
|
19616
|
-
|
|
19617
|
-
|
|
19618
|
-
|
|
19619
|
-
|
|
19620
|
-
|
|
19621
|
-
|
|
19622
|
-
|
|
19623
|
-
|
|
19624
|
-
|
|
19625
|
-
|
|
19626
|
-
|
|
19627
|
-
|
|
19628
|
-
|
|
19629
|
-
|
|
19630
|
-
|
|
19631
|
-
|
|
19632
|
-
|
|
19633
|
-
|
|
19634
|
-
|
|
19635
|
-
|
|
19636
|
-
|
|
19637
|
-
|
|
19638
|
-
|
|
19639
|
-
|
|
19640
|
-
|
|
19641
|
-
|
|
19642
|
-
|
|
19643
|
-
|
|
19644
|
-
|
|
19645
|
-
|
|
19646
|
-
|
|
19647
|
-
|
|
19648
|
-
|
|
19649
|
-
|
|
19514
|
+
let pb_size_maxInt32 = 4294967296, pb_size_len8tab = [
|
|
19515
|
+
0,
|
|
19516
|
+
1,
|
|
19517
|
+
2,
|
|
19518
|
+
2,
|
|
19519
|
+
3,
|
|
19520
|
+
3,
|
|
19521
|
+
3,
|
|
19522
|
+
3,
|
|
19523
|
+
4,
|
|
19524
|
+
4,
|
|
19525
|
+
4,
|
|
19526
|
+
4,
|
|
19527
|
+
4,
|
|
19528
|
+
4,
|
|
19529
|
+
4,
|
|
19530
|
+
4,
|
|
19531
|
+
5,
|
|
19532
|
+
5,
|
|
19533
|
+
5,
|
|
19534
|
+
5,
|
|
19535
|
+
5,
|
|
19536
|
+
5,
|
|
19537
|
+
5,
|
|
19538
|
+
5,
|
|
19539
|
+
5,
|
|
19540
|
+
5,
|
|
19541
|
+
5,
|
|
19542
|
+
5,
|
|
19543
|
+
5,
|
|
19544
|
+
5,
|
|
19545
|
+
5,
|
|
19546
|
+
5,
|
|
19547
|
+
6,
|
|
19548
|
+
6,
|
|
19549
|
+
6,
|
|
19550
|
+
6,
|
|
19551
|
+
6,
|
|
19552
|
+
6,
|
|
19553
|
+
6,
|
|
19554
|
+
6,
|
|
19555
|
+
6,
|
|
19556
|
+
6,
|
|
19557
|
+
6,
|
|
19558
|
+
6,
|
|
19559
|
+
6,
|
|
19560
|
+
6,
|
|
19561
|
+
6,
|
|
19562
|
+
6,
|
|
19563
|
+
6,
|
|
19564
|
+
6,
|
|
19565
|
+
6,
|
|
19566
|
+
6,
|
|
19567
|
+
6,
|
|
19568
|
+
6,
|
|
19569
|
+
6,
|
|
19570
|
+
6,
|
|
19571
|
+
6,
|
|
19572
|
+
6,
|
|
19573
|
+
6,
|
|
19574
|
+
6,
|
|
19575
|
+
6,
|
|
19576
|
+
6,
|
|
19577
|
+
6,
|
|
19578
|
+
6,
|
|
19579
|
+
7,
|
|
19580
|
+
7,
|
|
19581
|
+
7,
|
|
19582
|
+
7,
|
|
19583
|
+
7,
|
|
19584
|
+
7,
|
|
19585
|
+
7,
|
|
19586
|
+
7,
|
|
19587
|
+
7,
|
|
19588
|
+
7,
|
|
19589
|
+
7,
|
|
19590
|
+
7,
|
|
19591
|
+
7,
|
|
19592
|
+
7,
|
|
19593
|
+
7,
|
|
19594
|
+
7,
|
|
19595
|
+
7,
|
|
19596
|
+
7,
|
|
19597
|
+
7,
|
|
19598
|
+
7,
|
|
19599
|
+
7,
|
|
19600
|
+
7,
|
|
19601
|
+
7,
|
|
19602
|
+
7,
|
|
19603
|
+
7,
|
|
19604
|
+
7,
|
|
19605
|
+
7,
|
|
19606
|
+
7,
|
|
19607
|
+
7,
|
|
19608
|
+
7,
|
|
19609
|
+
7,
|
|
19610
|
+
7,
|
|
19611
|
+
7,
|
|
19612
|
+
7,
|
|
19613
|
+
7,
|
|
19614
|
+
7,
|
|
19615
|
+
7,
|
|
19616
|
+
7,
|
|
19617
|
+
7,
|
|
19618
|
+
7,
|
|
19619
|
+
7,
|
|
19620
|
+
7,
|
|
19621
|
+
7,
|
|
19622
|
+
7,
|
|
19623
|
+
7,
|
|
19624
|
+
7,
|
|
19625
|
+
7,
|
|
19626
|
+
7,
|
|
19627
|
+
7,
|
|
19628
|
+
7,
|
|
19629
|
+
7,
|
|
19630
|
+
7,
|
|
19631
|
+
7,
|
|
19632
|
+
7,
|
|
19633
|
+
7,
|
|
19634
|
+
7,
|
|
19635
|
+
7,
|
|
19636
|
+
7,
|
|
19637
|
+
7,
|
|
19638
|
+
7,
|
|
19639
|
+
7,
|
|
19640
|
+
7,
|
|
19641
|
+
7,
|
|
19642
|
+
7,
|
|
19643
|
+
8,
|
|
19644
|
+
8,
|
|
19645
|
+
8,
|
|
19646
|
+
8,
|
|
19647
|
+
8,
|
|
19648
|
+
8,
|
|
19649
|
+
8,
|
|
19650
|
+
8,
|
|
19651
|
+
8,
|
|
19652
|
+
8,
|
|
19653
|
+
8,
|
|
19654
|
+
8,
|
|
19655
|
+
8,
|
|
19656
|
+
8,
|
|
19657
|
+
8,
|
|
19658
|
+
8,
|
|
19659
|
+
8,
|
|
19660
|
+
8,
|
|
19661
|
+
8,
|
|
19662
|
+
8,
|
|
19663
|
+
8,
|
|
19664
|
+
8,
|
|
19665
|
+
8,
|
|
19666
|
+
8,
|
|
19667
|
+
8,
|
|
19668
|
+
8,
|
|
19669
|
+
8,
|
|
19670
|
+
8,
|
|
19671
|
+
8,
|
|
19672
|
+
8,
|
|
19673
|
+
8,
|
|
19674
|
+
8,
|
|
19675
|
+
8,
|
|
19676
|
+
8,
|
|
19677
|
+
8,
|
|
19678
|
+
8,
|
|
19679
|
+
8,
|
|
19680
|
+
8,
|
|
19681
|
+
8,
|
|
19682
|
+
8,
|
|
19683
|
+
8,
|
|
19684
|
+
8,
|
|
19685
|
+
8,
|
|
19686
|
+
8,
|
|
19687
|
+
8,
|
|
19688
|
+
8,
|
|
19689
|
+
8,
|
|
19690
|
+
8,
|
|
19691
|
+
8,
|
|
19692
|
+
8,
|
|
19693
|
+
8,
|
|
19694
|
+
8,
|
|
19695
|
+
8,
|
|
19696
|
+
8,
|
|
19697
|
+
8,
|
|
19698
|
+
8,
|
|
19699
|
+
8,
|
|
19700
|
+
8,
|
|
19701
|
+
8,
|
|
19702
|
+
8,
|
|
19703
|
+
8,
|
|
19704
|
+
8,
|
|
19705
|
+
8,
|
|
19706
|
+
8,
|
|
19707
|
+
8,
|
|
19708
|
+
8,
|
|
19709
|
+
8,
|
|
19710
|
+
8,
|
|
19711
|
+
8,
|
|
19712
|
+
8,
|
|
19713
|
+
8,
|
|
19714
|
+
8,
|
|
19715
|
+
8,
|
|
19716
|
+
8,
|
|
19717
|
+
8,
|
|
19718
|
+
8,
|
|
19719
|
+
8,
|
|
19720
|
+
8,
|
|
19721
|
+
8,
|
|
19722
|
+
8,
|
|
19723
|
+
8,
|
|
19724
|
+
8,
|
|
19725
|
+
8,
|
|
19726
|
+
8,
|
|
19727
|
+
8,
|
|
19728
|
+
8,
|
|
19729
|
+
8,
|
|
19730
|
+
8,
|
|
19731
|
+
8,
|
|
19732
|
+
8,
|
|
19733
|
+
8,
|
|
19734
|
+
8,
|
|
19735
|
+
8,
|
|
19736
|
+
8,
|
|
19737
|
+
8,
|
|
19738
|
+
8,
|
|
19739
|
+
8,
|
|
19740
|
+
8,
|
|
19741
|
+
8,
|
|
19742
|
+
8,
|
|
19743
|
+
8,
|
|
19744
|
+
8,
|
|
19745
|
+
8,
|
|
19746
|
+
8,
|
|
19747
|
+
8,
|
|
19748
|
+
8,
|
|
19749
|
+
8,
|
|
19750
|
+
8,
|
|
19751
|
+
8,
|
|
19752
|
+
8,
|
|
19753
|
+
8,
|
|
19754
|
+
8,
|
|
19755
|
+
8,
|
|
19756
|
+
8,
|
|
19757
|
+
8,
|
|
19758
|
+
8,
|
|
19759
|
+
8,
|
|
19760
|
+
8,
|
|
19761
|
+
8,
|
|
19762
|
+
8,
|
|
19763
|
+
8,
|
|
19764
|
+
8,
|
|
19765
|
+
8,
|
|
19766
|
+
8,
|
|
19767
|
+
8,
|
|
19768
|
+
8,
|
|
19769
|
+
8,
|
|
19770
|
+
8
|
|
19771
|
+
];
|
|
19772
|
+
class DirFlat extends Dir {
|
|
19773
|
+
_children;
|
|
19774
|
+
constructor(props, options){
|
|
19775
|
+
super(props, options), this._children = new Map();
|
|
19776
|
+
}
|
|
19777
|
+
async put(name, value) {
|
|
19778
|
+
if (void 0 !== this.nodeSize) {
|
|
19779
|
+
let oldChild = this._children.get(name), nameBytes = utf8ByteLength(name), strategy = this.options?.shardSplitStrategy;
|
|
19780
|
+
if ('links-bytes' === strategy) this.nodeSize -= estimateLinkSize(nameBytes, oldChild), this.nodeSize += estimateLinkSize(nameBytes, value);
|
|
19781
|
+
else if ('block-bytes' === strategy) this.nodeSize -= calculateLinkSize(nameBytes, oldChild), this.nodeSize += calculateLinkSize(nameBytes, value);
|
|
19782
|
+
else throw new InvalidShardingStrategyError(`Invalid shardSplitStrategy: ${strategy}`);
|
|
19783
|
+
this.nodeSize < 0 && (this.nodeSize = void 0);
|
|
19784
|
+
}
|
|
19785
|
+
this.cid = void 0, this.size = void 0, this._children.set(name, value);
|
|
19786
|
+
}
|
|
19787
|
+
async get(name) {
|
|
19788
|
+
return Promise.resolve(this._children.get(name));
|
|
19789
|
+
}
|
|
19790
|
+
childCount() {
|
|
19791
|
+
return this._children.size;
|
|
19792
|
+
}
|
|
19793
|
+
directChildrenCount() {
|
|
19794
|
+
return this.childCount();
|
|
19795
|
+
}
|
|
19796
|
+
onlyChild() {
|
|
19797
|
+
return this._children.values().next().value;
|
|
19798
|
+
}
|
|
19799
|
+
*eachChildSeries() {
|
|
19800
|
+
for (let [key, child] of this._children.entries())yield {
|
|
19801
|
+
key,
|
|
19802
|
+
child
|
|
19803
|
+
};
|
|
19804
|
+
}
|
|
19805
|
+
marshal() {
|
|
19806
|
+
let unixfs = new UnixFS({
|
|
19807
|
+
type: 'directory',
|
|
19808
|
+
mtime: this.mtime,
|
|
19809
|
+
mode: this.mode
|
|
19810
|
+
}), links = [];
|
|
19811
|
+
for (let [name, child] of this._children.entries())if (null != child.size && null != child.cid) {
|
|
19812
|
+
if (null == child.cid) throw Error('Directory contents must be flushed before marshaling');
|
|
19813
|
+
links.push({
|
|
19814
|
+
Hash: child.cid,
|
|
19815
|
+
Name: name,
|
|
19816
|
+
Tsize: null == child.size ? void 0 : Number(child.size)
|
|
19817
|
+
});
|
|
19818
|
+
}
|
|
19819
|
+
return dag_pb_src_encode(util_prepare({
|
|
19820
|
+
Data: unixfs.marshal(),
|
|
19821
|
+
Links: links
|
|
19822
|
+
}));
|
|
19823
|
+
}
|
|
19824
|
+
async estimateNodeSize() {
|
|
19825
|
+
if (void 0 !== this.nodeSize) return this.nodeSize;
|
|
19826
|
+
let strategy = this.options?.shardSplitStrategy;
|
|
19827
|
+
if ('links-bytes' === strategy) for (let [name, child] of (this.nodeSize = 0, this._children.entries()))this.nodeSize += estimateLinkSize(utf8ByteLength(name), child);
|
|
19828
|
+
else if ('block-bytes' === strategy) for (let [name, child] of (this.nodeSize = function(mode, mtime) {
|
|
19829
|
+
let innerSize = 2;
|
|
19830
|
+
if (void 0 !== mode && 493 !== mode && (innerSize += 1 + varintLen(mode)), null != mtime) {
|
|
19831
|
+
let mtimeInner = 0, secs = Number(mtime.secs);
|
|
19832
|
+
secs < 0 ? mtimeInner += 11 : mtimeInner += 1 + varintLen(secs), null != mtime.nsecs && (mtimeInner += 5), innerSize += 1 + varintLen(mtimeInner) + mtimeInner;
|
|
19833
|
+
}
|
|
19834
|
+
return 1 + varintLen(innerSize) + innerSize;
|
|
19835
|
+
}(this.mode, this.mtime), this._children.entries()))this.nodeSize += calculateLinkSize(utf8ByteLength(name), child);
|
|
19836
|
+
else throw new InvalidShardingStrategyError(`Invalid shardSplitStrategy: ${strategy}`);
|
|
19837
|
+
return this.nodeSize;
|
|
19838
|
+
}
|
|
19839
|
+
async *flush(block) {
|
|
19840
|
+
let links = [];
|
|
19841
|
+
for (let [name, child] of this._children.entries()){
|
|
19842
|
+
let result = child;
|
|
19843
|
+
if (child instanceof Dir) for await (let entry of child.flush(block))result = entry, yield entry;
|
|
19844
|
+
null != result.size && null != result.cid && links.push({
|
|
19845
|
+
Name: name,
|
|
19846
|
+
Tsize: Number(result.size),
|
|
19847
|
+
Hash: result.cid
|
|
19848
|
+
});
|
|
19849
|
+
}
|
|
19850
|
+
let unixfs = new UnixFS({
|
|
19851
|
+
type: 'directory',
|
|
19852
|
+
mtime: this.mtime,
|
|
19853
|
+
mode: this.mode
|
|
19854
|
+
}), node = {
|
|
19855
|
+
Data: unixfs.marshal(),
|
|
19856
|
+
Links: links
|
|
19857
|
+
}, buffer = dag_pb_src_encode(util_prepare(node)), cid = await persist(buffer, block, this.options), size = buffer.length + node.Links.reduce((acc, curr)=>acc + (curr.Tsize ?? 0), 0);
|
|
19858
|
+
this.cid = cid, this.size = size, yield {
|
|
19859
|
+
cid,
|
|
19860
|
+
unixfs,
|
|
19861
|
+
path: this.path,
|
|
19862
|
+
size: BigInt(size)
|
|
19863
|
+
};
|
|
19864
|
+
}
|
|
19865
|
+
}
|
|
19866
|
+
hasher_from({
|
|
19867
|
+
name: 'murmur3-32',
|
|
19868
|
+
code: 0x23,
|
|
19869
|
+
encode: (input)=>(function(number) {
|
|
19870
|
+
let bytes = [
|
|
19871
|
+
,
|
|
19872
|
+
,
|
|
19873
|
+
,
|
|
19874
|
+
,
|
|
19875
|
+
];
|
|
19876
|
+
for(let i = 0; i < 4; i++)bytes[i] = 0xff & number, number >>= 8;
|
|
19877
|
+
return new Uint8Array(bytes);
|
|
19878
|
+
})(function(key, seed = 0) {
|
|
19879
|
+
var k;
|
|
19880
|
+
let h1 = seed >>> 0, length = key.length, blocks = Math.floor(length / 4);
|
|
19881
|
+
for(let i = 0; i < blocks; i++){
|
|
19882
|
+
let k1 = function(key, i) {
|
|
19883
|
+
let offset = 4 * i;
|
|
19884
|
+
return (key[offset] | key[offset + 1] << 8 | key[offset + 2] << 16 | key[offset + 3] << 24) >>> 0;
|
|
19885
|
+
}(key, i);
|
|
19886
|
+
k1 = rotl32(k1 = mul32(k1, 0xcc9e2d51), 15), h1 ^= k1 = mul32(k1, 0x1b873593), h1 = mul32(h1 = rotl32(h1, 13), 5) + 0xe6546b64 >>> 0;
|
|
19887
|
+
}
|
|
19888
|
+
let tail = key.slice(4 * blocks), k1 = 0;
|
|
19889
|
+
switch(tail.length){
|
|
19890
|
+
case 3:
|
|
19891
|
+
k1 ^= tail[2] << 16;
|
|
19892
|
+
case 2:
|
|
19893
|
+
k1 ^= tail[1] << 8;
|
|
19894
|
+
case 1:
|
|
19895
|
+
k1 ^= tail[0], k1 = rotl32(k1 = mul32(k1, 0xcc9e2d51), 15), h1 ^= k1 = mul32(k1, 0x1b873593);
|
|
19896
|
+
}
|
|
19897
|
+
return h1 ^= length, k = h1, k ^= k >>> 16, k = mul32(k, 0x85ebca6b), k ^= k >>> 13, k = mul32(k, 0xc2b2ae35), h1 = k ^= k >>> 16;
|
|
19898
|
+
}(input))
|
|
19899
|
+
});
|
|
19900
|
+
let murmur3128 = hasher_from({
|
|
19901
|
+
name: 'murmur3-128',
|
|
19902
|
+
code: 0x22,
|
|
19903
|
+
encode: (input)=>murmurHash3_x64_128(input)
|
|
19904
|
+
});
|
|
19905
|
+
hasher_from({
|
|
19906
|
+
name: 'murmur3-x64-64',
|
|
19907
|
+
code: 0x22,
|
|
19908
|
+
encode: (input)=>murmurHash3_x64_128(input).subarray(0, 8)
|
|
19909
|
+
});
|
|
19910
|
+
class OpenFailedError extends Error {
|
|
19911
|
+
static name = 'OpenFailedError';
|
|
19912
|
+
static code = 'ERR_OPEN_FAILED';
|
|
19913
|
+
name = OpenFailedError.name;
|
|
19914
|
+
code = OpenFailedError.code;
|
|
19915
|
+
constructor(message = 'Open failed'){
|
|
19916
|
+
super(message);
|
|
19917
|
+
}
|
|
19918
|
+
}
|
|
19919
|
+
class CloseFailedError extends Error {
|
|
19920
|
+
static name = 'CloseFailedError';
|
|
19921
|
+
static code = 'ERR_CLOSE_FAILED';
|
|
19922
|
+
name = CloseFailedError.name;
|
|
19923
|
+
code = CloseFailedError.code;
|
|
19924
|
+
constructor(message = 'Close failed'){
|
|
19925
|
+
super(message);
|
|
19926
|
+
}
|
|
19927
|
+
}
|
|
19928
|
+
class PutFailedError extends Error {
|
|
19929
|
+
static name = 'PutFailedError';
|
|
19930
|
+
static code = 'ERR_PUT_FAILED';
|
|
19931
|
+
name = PutFailedError.name;
|
|
19932
|
+
code = PutFailedError.code;
|
|
19933
|
+
constructor(message = 'Put failed'){
|
|
19934
|
+
super(message);
|
|
19935
|
+
}
|
|
19936
|
+
}
|
|
19937
|
+
class GetFailedError extends Error {
|
|
19938
|
+
static name = 'GetFailedError';
|
|
19939
|
+
static code = 'ERR_GET_FAILED';
|
|
19940
|
+
name = GetFailedError.name;
|
|
19941
|
+
code = GetFailedError.code;
|
|
19942
|
+
constructor(message = 'Get failed'){
|
|
19943
|
+
super(message);
|
|
19944
|
+
}
|
|
19945
|
+
}
|
|
19946
|
+
class DeleteFailedError extends Error {
|
|
19947
|
+
static name = 'DeleteFailedError';
|
|
19948
|
+
static code = 'ERR_DELETE_FAILED';
|
|
19949
|
+
name = DeleteFailedError.name;
|
|
19950
|
+
code = DeleteFailedError.code;
|
|
19951
|
+
constructor(message = 'Delete failed'){
|
|
19952
|
+
super(message);
|
|
19953
|
+
}
|
|
19954
|
+
}
|
|
19955
|
+
class HasFailedError extends Error {
|
|
19956
|
+
static name = 'HasFailedError';
|
|
19957
|
+
static code = 'ERR_HAS_FAILED';
|
|
19958
|
+
name = HasFailedError.name;
|
|
19959
|
+
code = HasFailedError.code;
|
|
19960
|
+
constructor(message = 'Has failed'){
|
|
19961
|
+
super(message);
|
|
19962
|
+
}
|
|
19963
|
+
}
|
|
19964
|
+
class errors_NotFoundError extends Error {
|
|
19965
|
+
static name = 'NotFoundError';
|
|
19966
|
+
static code = 'ERR_NOT_FOUND';
|
|
19967
|
+
name = errors_NotFoundError.name;
|
|
19968
|
+
code = errors_NotFoundError.code;
|
|
19969
|
+
constructor(message = 'Not Found'){
|
|
19970
|
+
super(message);
|
|
19971
|
+
}
|
|
19972
|
+
}
|
|
19973
|
+
class AbortError extends Error {
|
|
19974
|
+
static name = 'AbortError';
|
|
19975
|
+
static code = 'ERR_ABORTED';
|
|
19976
|
+
name = AbortError.name;
|
|
19977
|
+
code = AbortError.code;
|
|
19978
|
+
constructor(message = 'Aborted'){
|
|
19979
|
+
super(message);
|
|
19980
|
+
}
|
|
19981
|
+
}
|
|
19982
|
+
class BaseBlockstore {
|
|
19983
|
+
has(key, options) {
|
|
19984
|
+
return Promise.reject(Error('.has is not implemented'));
|
|
19985
|
+
}
|
|
19986
|
+
put(key, val, options) {
|
|
19987
|
+
return Promise.reject(Error('.put is not implemented'));
|
|
19988
|
+
}
|
|
19989
|
+
async *putMany(source, options) {
|
|
19990
|
+
for await (let { cid, bytes } of source)await this.put(cid, bytes, options), yield cid;
|
|
19991
|
+
}
|
|
19992
|
+
get(key, options) {
|
|
19993
|
+
throw Error('.get is not implemented');
|
|
19994
|
+
}
|
|
19995
|
+
async *getMany(source, options) {
|
|
19996
|
+
for await (let key of source)yield {
|
|
19997
|
+
cid: key,
|
|
19998
|
+
bytes: this.get(key, options)
|
|
19999
|
+
};
|
|
20000
|
+
}
|
|
20001
|
+
delete(key, options) {
|
|
20002
|
+
return Promise.reject(Error('.delete is not implemented'));
|
|
20003
|
+
}
|
|
20004
|
+
async *deleteMany(source, options) {
|
|
20005
|
+
for await (let key of source)await this.delete(key, options), yield key;
|
|
20006
|
+
}
|
|
20007
|
+
async *getAll(options) {
|
|
20008
|
+
throw Error('.getAll is not implemented');
|
|
20009
|
+
}
|
|
20010
|
+
}
|
|
20011
|
+
class BlackHoleBlockstore extends BaseBlockstore {
|
|
20012
|
+
put(key, value, options) {
|
|
20013
|
+
return options?.signal?.throwIfAborted(), key;
|
|
20014
|
+
}
|
|
20015
|
+
get(key, options) {
|
|
20016
|
+
throw options?.signal?.throwIfAborted(), new errors_NotFoundError();
|
|
20017
|
+
}
|
|
20018
|
+
has(key, options) {
|
|
20019
|
+
return options?.signal?.throwIfAborted(), !1;
|
|
20020
|
+
}
|
|
20021
|
+
async delete(cid, options) {
|
|
20022
|
+
options?.signal?.throwIfAborted();
|
|
20023
|
+
}
|
|
20024
|
+
async *getAll(options) {
|
|
20025
|
+
options?.signal?.throwIfAborted();
|
|
20026
|
+
}
|
|
20027
|
+
}
|
|
20028
|
+
let sparse_array = __webpack_require__("./node_modules/sparse-array/index.js");
|
|
20029
|
+
class Bucket {
|
|
20030
|
+
_options;
|
|
20031
|
+
_popCount;
|
|
20032
|
+
_parent;
|
|
20033
|
+
_posAtParent;
|
|
20034
|
+
_children;
|
|
20035
|
+
key;
|
|
20036
|
+
constructor(options, parent, posAtParent = 0){
|
|
20037
|
+
this._options = options, this._popCount = 0, this._parent = parent, this._posAtParent = posAtParent, this._children = new sparse_array(), this.key = null;
|
|
20038
|
+
}
|
|
20039
|
+
async put(key, value) {
|
|
20040
|
+
let place = await this._findNewBucketAndPos(key);
|
|
20041
|
+
place.bucket._putAt(place, key, value);
|
|
20042
|
+
}
|
|
20043
|
+
async get(key) {
|
|
20044
|
+
let child = await this._findChild(key);
|
|
20045
|
+
if (null != child) return child.value;
|
|
20046
|
+
}
|
|
20047
|
+
async del(key) {
|
|
20048
|
+
let place = await this._findPlace(key), child = place.bucket._at(place.pos);
|
|
20049
|
+
null != child && child.key === key && place.bucket._delAt(place.pos);
|
|
20050
|
+
}
|
|
20051
|
+
leafCount() {
|
|
20052
|
+
return this._children.compactArray().reduce((acc, child)=>child instanceof Bucket ? acc + child.leafCount() : acc + 1, 0);
|
|
20053
|
+
}
|
|
20054
|
+
childrenCount() {
|
|
20055
|
+
return this._children.length;
|
|
20056
|
+
}
|
|
20057
|
+
onlyChild() {
|
|
20058
|
+
return this._children.get(0);
|
|
20059
|
+
}
|
|
20060
|
+
*eachLeafSeries() {
|
|
20061
|
+
for (let child of this._children.compactArray())child instanceof Bucket ? yield* child.eachLeafSeries() : yield child;
|
|
20062
|
+
}
|
|
20063
|
+
serialize(map, reduce) {
|
|
20064
|
+
return reduce(this._children.reduce((acc, child, index)=>(null != child && (child instanceof Bucket ? acc.push(child.serialize(map, reduce)) : acc.push(map(child, index))), acc), []));
|
|
20065
|
+
}
|
|
20066
|
+
async asyncTransform(asyncMap, asyncReduce) {
|
|
20067
|
+
return asyncTransformBucket(this, asyncMap, asyncReduce);
|
|
20068
|
+
}
|
|
20069
|
+
toJSON() {
|
|
20070
|
+
return this.serialize(mapNode, reduceNodes);
|
|
20071
|
+
}
|
|
20072
|
+
prettyPrint() {
|
|
20073
|
+
return JSON.stringify(this.toJSON(), null, ' ');
|
|
20074
|
+
}
|
|
20075
|
+
tableSize() {
|
|
20076
|
+
return Math.pow(2, this._options.bits);
|
|
20077
|
+
}
|
|
20078
|
+
async _findChild(key) {
|
|
20079
|
+
let result = await this._findPlace(key), child = result.bucket._at(result.pos);
|
|
20080
|
+
if (!(child instanceof Bucket) && null != child && child.key === key) return child;
|
|
20081
|
+
}
|
|
20082
|
+
async _findPlace(key) {
|
|
20083
|
+
let hashValue = this._options.hash('string' == typeof key ? from_string_node_fromString(key) : key), index = await hashValue.take(this._options.bits), child = this._children.get(index);
|
|
20084
|
+
return child instanceof Bucket ? child._findPlace(hashValue) : {
|
|
20085
|
+
bucket: this,
|
|
20086
|
+
pos: index,
|
|
20087
|
+
hash: hashValue,
|
|
20088
|
+
existingChild: child
|
|
20089
|
+
};
|
|
20090
|
+
}
|
|
20091
|
+
async _findNewBucketAndPos(key) {
|
|
20092
|
+
let place = await this._findPlace(key);
|
|
20093
|
+
if (null != place.existingChild && place.existingChild.key !== key) {
|
|
20094
|
+
let bucket = new Bucket(this._options, place.bucket, place.pos);
|
|
20095
|
+
place.bucket._putObjectAt(place.pos, bucket);
|
|
19650
20096
|
let newPlace = await bucket._findPlace(place.existingChild.hash);
|
|
19651
20097
|
return newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value), bucket._findNewBucketAndPos(place.hash);
|
|
19652
20098
|
}
|
|
@@ -19772,10 +20218,10 @@ class InfiniteHash {
|
|
|
19772
20218
|
this._buffers.push(buffer), this._availableBits += buffer.availableBits();
|
|
19773
20219
|
}
|
|
19774
20220
|
}
|
|
19775
|
-
let
|
|
19776
|
-
|
|
20221
|
+
let dir_sharded = class extends Dir {
|
|
20222
|
+
bucket;
|
|
19777
20223
|
constructor(props, options){
|
|
19778
|
-
super(props, options), this.
|
|
20224
|
+
super(props, options), this.bucket = function(options) {
|
|
19779
20225
|
var hashFn;
|
|
19780
20226
|
if (null == options || null == options.hashFn) throw Error('please define an options.hashFn');
|
|
19781
20227
|
return new Bucket({
|
|
@@ -19790,151 +20236,38 @@ let HAMT_HASH_CODE = BigInt(0x22), dir_sharded = class extends Dir {
|
|
|
19790
20236
|
});
|
|
19791
20237
|
}
|
|
19792
20238
|
async put(name, value) {
|
|
19793
|
-
this.cid = void 0, this.size = void 0, this.nodeSize = void 0, await this.
|
|
20239
|
+
this.cid = void 0, this.size = void 0, this.nodeSize = void 0, await this.bucket.put(name, value);
|
|
19794
20240
|
}
|
|
19795
20241
|
async get(name) {
|
|
19796
|
-
return this.
|
|
20242
|
+
return this.bucket.get(name);
|
|
19797
20243
|
}
|
|
19798
20244
|
childCount() {
|
|
19799
|
-
return this.
|
|
20245
|
+
return this.bucket.leafCount();
|
|
19800
20246
|
}
|
|
19801
20247
|
directChildrenCount() {
|
|
19802
|
-
return this.
|
|
20248
|
+
return this.bucket.childrenCount();
|
|
19803
20249
|
}
|
|
19804
20250
|
onlyChild() {
|
|
19805
|
-
return this.
|
|
20251
|
+
return this.bucket.onlyChild();
|
|
19806
20252
|
}
|
|
19807
20253
|
*eachChildSeries() {
|
|
19808
|
-
for (let { key, value } of this.
|
|
20254
|
+
for (let { key, value } of this.bucket.eachLeafSeries())yield {
|
|
19809
20255
|
key,
|
|
19810
20256
|
child: value
|
|
19811
20257
|
};
|
|
19812
20258
|
}
|
|
19813
|
-
estimateNodeSize() {
|
|
19814
|
-
|
|
19815
|
-
|
|
19816
|
-
|
|
19817
|
-
let child = children.get(i);
|
|
19818
|
-
if (null == child) continue;
|
|
19819
|
-
let labelPrefix = i.toString(16).toUpperCase().padStart(padLength, '0');
|
|
19820
|
-
if (child instanceof Bucket) {
|
|
19821
|
-
let size = calculateSize(child, null, options);
|
|
19822
|
-
links.push({
|
|
19823
|
-
Name: labelPrefix,
|
|
19824
|
-
Tsize: Number(size),
|
|
19825
|
-
Hash: 0 === options.cidVersion ? CID_V0 : CID_V1
|
|
19826
|
-
});
|
|
19827
|
-
} else if ('function' == typeof child.value.flush) {
|
|
19828
|
-
let size = child.value.nodeSize();
|
|
19829
|
-
links.push({
|
|
19830
|
-
Name: labelPrefix + child.key,
|
|
19831
|
-
Tsize: Number(size),
|
|
19832
|
-
Hash: 0 === options.cidVersion ? CID_V0 : CID_V1
|
|
19833
|
-
});
|
|
19834
|
-
} else {
|
|
19835
|
-
let value = child.value;
|
|
19836
|
-
if (null == value.cid) continue;
|
|
19837
|
-
let label = labelPrefix + child.key, size = value.size;
|
|
19838
|
-
links.push({
|
|
19839
|
-
Name: label,
|
|
19840
|
-
Tsize: Number(size),
|
|
19841
|
-
Hash: value.cid
|
|
19842
|
-
});
|
|
19843
|
-
}
|
|
19844
|
-
}
|
|
19845
|
-
return dag_pb_src_encode(util_prepare({
|
|
19846
|
-
Data: new UnixFS({
|
|
19847
|
-
type: 'hamt-sharded-directory',
|
|
19848
|
-
data: Uint8Array.from(children.bitField().reverse()),
|
|
19849
|
-
fanout: BigInt(bucket.tableSize()),
|
|
19850
|
-
hashType: HAMT_HASH_CODE,
|
|
19851
|
-
mtime: shardRoot?.mtime,
|
|
19852
|
-
mode: shardRoot?.mode
|
|
19853
|
-
}).marshal(),
|
|
19854
|
-
Links: links
|
|
19855
|
-
})).length;
|
|
19856
|
-
}(this._bucket, this, this.options)), this.nodeSize;
|
|
20259
|
+
async estimateNodeSize() {
|
|
20260
|
+
if (void 0 !== this.nodeSize) return this.nodeSize;
|
|
20261
|
+
let blockstore = new BlackHoleBlockstore(), result = await calculateSize(this.bucket, this, blockstore, this.options);
|
|
20262
|
+
return this.nodeSize = result.size, this.nodeSize;
|
|
19857
20263
|
}
|
|
19858
20264
|
async *flush(blockstore) {
|
|
19859
|
-
for await (let entry of dir_sharded_flush(this.
|
|
20265
|
+
for await (let entry of dir_sharded_flush(this.bucket, blockstore, this, this.options))yield {
|
|
19860
20266
|
...entry,
|
|
19861
20267
|
path: this.path
|
|
19862
20268
|
};
|
|
19863
20269
|
}
|
|
19864
|
-
}, toPathComponents = (path = '')=>path.split(/(?<!\\)\//).filter(Boolean)
|
|
19865
|
-
class OpenFailedError extends Error {
|
|
19866
|
-
static name = 'OpenFailedError';
|
|
19867
|
-
static code = 'ERR_OPEN_FAILED';
|
|
19868
|
-
name = OpenFailedError.name;
|
|
19869
|
-
code = OpenFailedError.code;
|
|
19870
|
-
constructor(message = 'Open failed'){
|
|
19871
|
-
super(message);
|
|
19872
|
-
}
|
|
19873
|
-
}
|
|
19874
|
-
class CloseFailedError extends Error {
|
|
19875
|
-
static name = 'CloseFailedError';
|
|
19876
|
-
static code = 'ERR_CLOSE_FAILED';
|
|
19877
|
-
name = CloseFailedError.name;
|
|
19878
|
-
code = CloseFailedError.code;
|
|
19879
|
-
constructor(message = 'Close failed'){
|
|
19880
|
-
super(message);
|
|
19881
|
-
}
|
|
19882
|
-
}
|
|
19883
|
-
class PutFailedError extends Error {
|
|
19884
|
-
static name = 'PutFailedError';
|
|
19885
|
-
static code = 'ERR_PUT_FAILED';
|
|
19886
|
-
name = PutFailedError.name;
|
|
19887
|
-
code = PutFailedError.code;
|
|
19888
|
-
constructor(message = 'Put failed'){
|
|
19889
|
-
super(message);
|
|
19890
|
-
}
|
|
19891
|
-
}
|
|
19892
|
-
class GetFailedError extends Error {
|
|
19893
|
-
static name = 'GetFailedError';
|
|
19894
|
-
static code = 'ERR_GET_FAILED';
|
|
19895
|
-
name = GetFailedError.name;
|
|
19896
|
-
code = GetFailedError.code;
|
|
19897
|
-
constructor(message = 'Get failed'){
|
|
19898
|
-
super(message);
|
|
19899
|
-
}
|
|
19900
|
-
}
|
|
19901
|
-
class DeleteFailedError extends Error {
|
|
19902
|
-
static name = 'DeleteFailedError';
|
|
19903
|
-
static code = 'ERR_DELETE_FAILED';
|
|
19904
|
-
name = DeleteFailedError.name;
|
|
19905
|
-
code = DeleteFailedError.code;
|
|
19906
|
-
constructor(message = 'Delete failed'){
|
|
19907
|
-
super(message);
|
|
19908
|
-
}
|
|
19909
|
-
}
|
|
19910
|
-
class HasFailedError extends Error {
|
|
19911
|
-
static name = 'HasFailedError';
|
|
19912
|
-
static code = 'ERR_HAS_FAILED';
|
|
19913
|
-
name = HasFailedError.name;
|
|
19914
|
-
code = HasFailedError.code;
|
|
19915
|
-
constructor(message = 'Has failed'){
|
|
19916
|
-
super(message);
|
|
19917
|
-
}
|
|
19918
|
-
}
|
|
19919
|
-
class errors_NotFoundError extends Error {
|
|
19920
|
-
static name = 'NotFoundError';
|
|
19921
|
-
static code = 'ERR_NOT_FOUND';
|
|
19922
|
-
name = errors_NotFoundError.name;
|
|
19923
|
-
code = errors_NotFoundError.code;
|
|
19924
|
-
constructor(message = 'Not Found'){
|
|
19925
|
-
super(message);
|
|
19926
|
-
}
|
|
19927
|
-
}
|
|
19928
|
-
class AbortError extends Error {
|
|
19929
|
-
static name = 'AbortError';
|
|
19930
|
-
static code = 'ERR_ABORTED';
|
|
19931
|
-
name = AbortError.name;
|
|
19932
|
-
code = AbortError.code;
|
|
19933
|
-
constructor(message = 'Aborted'){
|
|
19934
|
-
super(message);
|
|
19935
|
-
}
|
|
19936
|
-
}
|
|
19937
|
-
let it_all_dist_src = function(source) {
|
|
20270
|
+
}, toPathComponents = (path = '')=>path.split(/(?<!\\)\//).filter(Boolean), it_all_dist_src = function(source) {
|
|
19938
20271
|
if (null != source[Symbol.asyncIterator]) return (async ()=>{
|
|
19939
20272
|
let arr = [];
|
|
19940
20273
|
for await (let entry of source)arr.push(entry);
|
|
@@ -20549,7 +20882,7 @@ let ENS_DEPLOYER_ROLE = keccak256(Bytes_fromString('ENS_DEPLOYER')), execTransac
|
|
|
20549
20882
|
let type, tokens = info.tokens = function(input, nf, ef) {
|
|
20550
20883
|
let ret = [], chars = [];
|
|
20551
20884
|
for(input = input.slice().reverse(); input.length;){
|
|
20552
|
-
let emoji = function(cps
|
|
20885
|
+
let emoji = function(cps) {
|
|
20553
20886
|
let emoji, node = EMOJI_ROOT, pos = cps.length;
|
|
20554
20887
|
for(; pos && (node = node.get(cps[--pos]));){
|
|
20555
20888
|
let { V } = node;
|