omnipin 2.2.0 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +70 -61
  2. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -2139,10 +2139,8 @@ function toZeroPaddedSize(payloadSize) {
2139
2139
  let size = Math.max(payloadSize, MIN_PAYLOAD_SIZE), highestBit = Math.floor(Math.log2(size)), bound = Math.ceil(calculatePieceCID_FR_RATIO * 2 ** (highestBit + 1));
2140
2140
  return size <= bound ? bound : Math.ceil(calculatePieceCID_FR_RATIO * 2 ** (highestBit + 2));
2141
2141
  }
2142
- function trunc254InPlace(node) {
2143
- let last = node[31];
2144
- if (void 0 === last) throw Error('invalid node size');
2145
- node[31] = 63 & last;
2142
+ function trunc254AtOffset(buf, offset) {
2143
+ buf[offset + 31] = 63 & buf[offset + 31];
2146
2144
  }
2147
2145
  function readLength(cursor, prefix, offset) {
2148
2146
  if (0x80 === offset && prefix < 0x80) return 1;
@@ -8419,10 +8417,12 @@ let createPresignedUrl = async ({ bucketName, apiUrl, file, token })=>{
8419
8417
  'x-amz-meta-import': 'car'
8420
8418
  }
8421
8419
  });
8422
- }, uploadOnS3 = async ({ name, car, token, bucketName, apiUrl, providerName, verbose })=>{
8420
+ }, uploadOnS3 = async ({ name, bytes, token, bucketName, apiUrl, providerName, verbose })=>{
8423
8421
  let file = new File([
8424
- car
8425
- ], name), res = await uploadCar({
8422
+ bytes.buffer
8423
+ ], name, {
8424
+ type: 'application/vnd.ipld.car'
8425
+ }), res = await uploadCar({
8426
8426
  apiUrl,
8427
8427
  file,
8428
8428
  token,
@@ -8430,7 +8430,7 @@ let createPresignedUrl = async ({ bucketName, apiUrl, file, token })=>{
8430
8430
  }), text = await res.text();
8431
8431
  if (!res.ok) throw new DeployError(providerName, text);
8432
8432
  return verbose && logger.request('PUT', res.url, res.status), res;
8433
- }, filebase_providerName = 'Filebase', filebase_baseURL = 'https://rpc.filebase.io/api/v0', uploadOnFilebase = async ({ first, car, name, token, bucketName, verbose, cid })=>{
8433
+ }, filebase_providerName = 'Filebase', filebase_baseURL = 'https://rpc.filebase.io/api/v0', uploadOnFilebase = async ({ first, bytes, name, token, bucketName, verbose, cid, size })=>{
8434
8434
  if (first) {
8435
8435
  if (!bucketName) throw new MissingKeyError("FILEBASE_BUCKET_NAME");
8436
8436
  return {
@@ -8440,8 +8440,8 @@ let createPresignedUrl = async ({ bucketName, apiUrl, file, token })=>{
8440
8440
  providerName: filebase_providerName,
8441
8441
  verbose,
8442
8442
  name,
8443
- car,
8444
- size: car.size,
8443
+ bytes,
8444
+ size,
8445
8445
  token
8446
8446
  })).headers.get('x-amz-meta-cid'),
8447
8447
  status: 'queued'
@@ -11960,7 +11960,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
11960
11960
  await promises_setTimeout(delay);
11961
11961
  }
11962
11962
  throw Error(`Transaction ${hash} not mined within timeout period`);
11963
- }, filecoin_providerName = 'Filecoin', uploadToFilecoin = async ({ providerAddress, providerURL, cid, car, token: privateKey, verbose, filecoinChain = 'mainnet', filecoinForceNewDataset, size })=>{
11963
+ }, filecoin_providerName = 'Filecoin', uploadToFilecoin = async ({ providerAddress, providerURL, cid, bytes, token: privateKey, verbose, filecoinChain = 'mainnet', filecoinForceNewDataset, size })=>{
11964
11964
  if (!providerAddress && providerURL) throw new MissingKeyError('FILECOIN_SP_ADDRESS');
11965
11965
  let address = fromPublicKey(Secp256k1_getPublicKey({
11966
11966
  privateKey
@@ -11994,7 +11994,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
11994
11994
  chain
11995
11995
  });
11996
11996
  logger.info(`Price for storage: ${Value_format(perMonth, 18)} USDfc/month`);
11997
- let carBytes = new Uint8Array(await car.arrayBuffer()), pieceCid = link_create(raw_code, digest_create(0x1011, function(data) {
11997
+ let pieceCid = link_create(raw_code, digest_create(0x1011, function(data) {
11998
11998
  let fr32 = function(source) {
11999
11999
  let output = new Uint8Array(Math.floor(toZeroPaddedSize(source.length) / calculatePieceCID_FR_RATIO)), quadCount = toZeroPaddedSize(source.length) / 127;
12000
12000
  for(let n = 0; n < quadCount; n++){
@@ -12031,33 +12031,26 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
12031
12031
  return output;
12032
12032
  }(data), root = function(fr32) {
12033
12033
  if (0 === fr32.length || fr32.length % 32 != 0) throw Error(`invalid fr32 payload size: ${fr32.length}`);
12034
- let targetLeaves = 1 << Math.ceil(Math.log2(fr32.length / 32)), level = Array(targetLeaves);
12035
- for(let i = 0; i < targetLeaves; i++){
12036
- let leaf = new Uint8Array(32), start = 32 * i;
12037
- start < fr32.length && leaf.set(fr32.subarray(start, start + 32)), trunc254InPlace(leaf), level[i] = leaf;
12038
- }
12039
- for(; level.length > 1;){
12040
- let next = Array(level.length / 2);
12041
- for(let i = 0; i < level.length; i += 2){
12042
- let left = level[i], right = level[i + 1];
12043
- if (!left || !right) throw Error('invalid tree state');
12044
- next[i / 2] = function(left, right) {
12045
- let input = new Uint8Array(64);
12046
- input.set(left, 0), input.set(right, 32);
12047
- let out = new Uint8Array(hashes_sha2_sha256(input));
12048
- return trunc254InPlace(out), out;
12049
- }(left, right);
12034
+ let targetLeaves = 1 << Math.ceil(Math.log2(fr32.length / 32)), buf = new Uint8Array(32 * targetLeaves);
12035
+ buf.set(fr32, 0);
12036
+ for(let i = 0; i < targetLeaves; i++)trunc254AtOffset(buf, 32 * i);
12037
+ let hashInput = new Uint8Array(64), count = targetLeaves;
12038
+ for(; count > 1;){
12039
+ let half = count / 2;
12040
+ for(let i = 0; i < half; i++){
12041
+ let leftOff = 2 * i * 32, rightOff = leftOff + 32;
12042
+ hashInput.set(buf.subarray(leftOff, leftOff + 32), 0), hashInput.set(buf.subarray(rightOff, rightOff + 32), 32);
12043
+ let hash = hashes_sha2_sha256(hashInput), destOff = 32 * i;
12044
+ buf.set(hash, destOff), trunc254AtOffset(buf, destOff);
12050
12045
  }
12051
- level = next;
12046
+ count = half;
12052
12047
  }
12053
- let root = level[0];
12054
- if (!root) throw Error('failed to build piece root');
12055
- return root;
12048
+ return buf.slice(0, 32);
12056
12049
  }(fr32), pieceSize = fr32.length, height = Math.log2(pieceSize / 32);
12057
12050
  if (!Number.isInteger(height) || height < 0 || height > 255) throw Error(`invalid piece height for piece size ${pieceSize}`);
12058
12051
  let payloadPadding = toZeroPaddedSize(data.length) - data.length, paddingVarint = Uint8Array.from(varint_0.encode(payloadPadding)), out = new Uint8Array(paddingVarint.length + 1 + 32);
12059
12052
  return out.set(paddingVarint, 0), out[paddingVarint.length] = height, out.set(root, paddingVarint.length + 1), out;
12060
- }(carBytes))), providerActiveDataSets = (await getClientDataSets({
12053
+ }(bytes))), providerActiveDataSets = (await getClientDataSets({
12061
12054
  chain,
12062
12055
  address
12063
12056
  })).filter((ds)=>ds.providerId === providerId && 0n === ds.pdpEndEpoch), isNewDataSet = 0 === providerActiveDataSets.length || filecoinForceNewDataset, sybilFee = isNewDataSet ? 100000000000000000n : 0n, totalRequired = perMonth + sybilFee;
@@ -12091,7 +12084,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
12091
12084
  logger.info('Creating new data set'), verbose && logger.info('Uploading piece to provider'), await uploadPiece({
12092
12085
  providerURL,
12093
12086
  pieceCid: pieceCid.toString(),
12094
- bytes: carBytes
12087
+ bytes: bytes
12095
12088
  }), verbose && logger.info('Waiting for piece to be stored at provider'), await findPiece(providerURL, pieceCid.toString(), {
12096
12089
  verbose
12097
12090
  });
@@ -12186,7 +12179,7 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
12186
12179
  verbose && logger.info(`Data set ID: ${dataset.dataSetId}`), verbose && logger.info('Uploading piece to provider'), await uploadPiece({
12187
12180
  providerURL,
12188
12181
  pieceCid: pieceCid.toString(),
12189
- bytes: carBytes
12182
+ bytes: bytes
12190
12183
  }), logger.info('Waiting for piece to be stored at provider'), await findPiece(providerURL, pieceCid.toString(), {
12191
12184
  verbose
12192
12185
  });
@@ -12211,10 +12204,14 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
12211
12204
  }), logger.success('Piece found'), {
12212
12205
  cid
12213
12206
  };
12214
- }, lighthouse_providerName = 'Lighthouse', uploadOnLighthouse = async ({ car, name, first, token, verbose, cid })=>{
12207
+ }, lighthouse_providerName = 'Lighthouse', uploadOnLighthouse = async ({ bytes, name, first, token, verbose, cid })=>{
12215
12208
  if (first) {
12216
12209
  let fd = new FormData();
12217
- fd.append('file', car, `${name}.car`);
12210
+ fd.append('file', new Blob([
12211
+ Uint8Array.from(bytes).buffer
12212
+ ], {
12213
+ type: 'application/vnd.ipld.car'
12214
+ }), `${name}.car`);
12218
12215
  let res = await fetch('https://upload.lighthouse.storage/api/v0/dag/import', {
12219
12216
  method: 'POST',
12220
12217
  headers: {
@@ -12246,10 +12243,14 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
12246
12243
  cid,
12247
12244
  status: 'queued'
12248
12245
  };
12249
- }, pinata_providerName = 'Pinata', uploadOnPinata = async ({ car, name, token, verbose, first, cid })=>{
12246
+ }, pinata_providerName = 'Pinata', uploadOnPinata = async ({ bytes, name, token, verbose, first, cid })=>{
12250
12247
  if (first) {
12251
12248
  let fd = new FormData();
12252
- fd.append('file', car), fd.append('network', 'public'), fd.append('name', `${name}.car`), fd.append('car', 'true');
12249
+ fd.append('file', new Blob([
12250
+ bytes.buffer
12251
+ ], {
12252
+ type: 'application/vnd.ipld.car'
12253
+ })), fd.append('network', 'public'), fd.append('name', `${name}.car`), fd.append('car', 'true');
12253
12254
  let res = await fetch('https://uploads.pinata.cloud/v3/files', {
12254
12255
  method: 'POST',
12255
12256
  headers: {
@@ -12291,10 +12292,10 @@ let serializedType = '0x02', TxEnvelopeEip1559_type = 'eip1559', SIMULATION_GAS_
12291
12292
  status: json.status,
12292
12293
  cid: json.pin.cid
12293
12294
  };
12294
- }, uploadToSimplePage = async ({ token, car, name })=>{
12295
+ }, uploadToSimplePage = async ({ token, bytes, name })=>{
12295
12296
  let fd = new FormData();
12296
12297
  fd.append('file', new File([
12297
- car
12298
+ bytes.buffer
12298
12299
  ], `${name}.car`, {
12299
12300
  type: 'application/vnd.ipld.car'
12300
12301
  }));
@@ -17710,13 +17711,18 @@ let uploadCAR = async (conf, car)=>{
17710
17711
  }, swarmy_providerName = 'Swarmy', PROVIDERS = {
17711
17712
  STORACHA_TOKEN: {
17712
17713
  name: 'Storacha',
17713
- upload: async ({ token, car, proof })=>{
17714
+ upload: async ({ token, bytes, proof })=>{
17714
17715
  if (!proof) throw new MissingKeyError("STORACHA_PROOF");
17715
17716
  let { agent, space } = await setup({
17716
17717
  pk: token,
17717
17718
  proof
17718
17719
  });
17719
17720
  if (!space) throw Error('No space found');
17721
+ let blob = new Blob([
17722
+ bytes.buffer
17723
+ ], {
17724
+ type: 'application/vnd.ipld.car'
17725
+ });
17720
17726
  try {
17721
17727
  return {
17722
17728
  cid: (await uploadCAR({
@@ -17726,7 +17732,7 @@ let uploadCAR = async (conf, car)=>{
17726
17732
  with: space.did()
17727
17733
  }))),
17728
17734
  with: space.did()
17729
- }, car)).toString()
17735
+ }, blob)).toString()
17730
17736
  };
17731
17737
  } catch (e) {
17732
17738
  throw new DeployError('Storacha', e.message, {
@@ -17789,10 +17795,12 @@ let uploadCAR = async (conf, car)=>{
17789
17795
  },
17790
17796
  SWARMY_TOKEN: {
17791
17797
  name: 'Swarmy',
17792
- upload: async ({ token, car, verbose, first })=>{
17798
+ upload: async ({ token, bytes, verbose, first })=>{
17793
17799
  if (!first) throw new PinningNotSupportedError(swarmy_providerName);
17794
17800
  let body = new FormData();
17795
- body.append('file', car);
17801
+ body.append('file', new Blob([
17802
+ bytes.buffer
17803
+ ]));
17796
17804
  let res = await fetch('https://api.swarmy.cloud/api/files?website=true', {
17797
17805
  body,
17798
17806
  headers: {
@@ -17811,10 +17819,12 @@ let uploadCAR = async (conf, car)=>{
17811
17819
  },
17812
17820
  BEE_TOKEN: {
17813
17821
  name: 'Bee',
17814
- upload: async ({ token, car, verbose, beeURL, first })=>{
17822
+ upload: async ({ token, bytes, verbose, beeURL, first })=>{
17815
17823
  if (!first) throw new PinningNotSupportedError('Bee');
17816
17824
  let res = await fetch(`${beeURL}/bzz`, {
17817
- body: car,
17825
+ body: new Blob([
17826
+ bytes.buffer
17827
+ ]),
17818
17828
  headers: {
17819
17829
  'Swarm-Postage-Batch-Id': token,
17820
17830
  'Content-Type': 'application/x-tar',
@@ -20379,6 +20389,9 @@ class MemoryBlockstore {
20379
20389
  async delete(key, options) {
20380
20390
  options?.signal?.throwIfAborted(), this.data.delete(base32.encode(key.multihash.bytes));
20381
20391
  }
20392
+ clear() {
20393
+ this.data.clear();
20394
+ }
20382
20395
  *getAll(options) {
20383
20396
  for (let [key, value] of (options?.signal?.throwIfAborted(), this.data.entries()))yield {
20384
20397
  cid: cid_CID.createV1(raw_code, digest_decode(base32.decode(key))),
@@ -20415,12 +20428,10 @@ let ipfs_tmp = tmpdir(), ipfs_concatBytes = (chunks)=>{
20415
20428
  } catch (error) {
20416
20429
  console.warn(`Failed to add block ${cid.toString()} to CAR:`, error);
20417
20430
  }
20418
- return await writer.close(), await writePromise, writeStream.end(), await new Promise((resolve)=>writeStream.on('close', resolve)), {
20419
- blob: new Blob([
20420
- await readFile(output)
20421
- ], {
20422
- type: 'application/vnd.ipld.car'
20423
- }),
20431
+ await writer.close(), await writePromise, writeStream.end(), await new Promise((resolve)=>writeStream.on('close', resolve));
20432
+ let file = await readFile(output);
20433
+ return blockstore.clear(), {
20434
+ bytes: file,
20424
20435
  rootCID
20425
20436
  };
20426
20437
  }, assertCID = (cid)=>{
@@ -21232,17 +21243,15 @@ let ENS_DEPLOYER_ROLE = keccak256(Bytes_fromString('ENS_DEPLOYER')), execTransac
21232
21243
  'dist'
21233
21244
  ].includes(dir) ? name : dir;
21234
21245
  if (onlyHash || logger.start(`Packing ${isTTY ? styleText('cyan', distName) : distName} (${fileSize(size, 2)})`), tar) return {
21235
- blob: new Blob([
21236
- await packTAR(files)
21237
- ]),
21246
+ bytes: await packTAR(files),
21238
21247
  size
21239
21248
  };
21240
21249
  {
21241
- let { rootCID, blob } = await packCAR(files, name, dist), cid = rootCID.toString();
21250
+ let { rootCID, bytes } = await packCAR(files, name, dist), cid = rootCID.toString();
21242
21251
  return onlyHash ? console.log(cid) : logger.info(`Root CID: ${isTTY ? styleText('white', cid) : cid}`), {
21243
21252
  name,
21244
21253
  cid,
21245
- blob,
21254
+ bytes,
21246
21255
  files,
21247
21256
  size
21248
21257
  };
@@ -21251,7 +21260,7 @@ let ENS_DEPLOYER_ROLE = keccak256(Bytes_fromString('ENS_DEPLOYER')), execTransac
21251
21260
  let cid, { strict, ens, chain = 'mainnet', name: customName, dist, verbose = !1, providers: providersList, dnslink, 'progress-bar': progressBar, 'filecoin-chain': filecoinChain, 'filecoin-force-new-dataset': filecoinForceNewDataset, ...opts } = options, apiTokens = parseTokensFromEnv(), allProviders = (providersList ? providersList.split(',') : tokensToProviderNames(apiTokens.keys())).map((providerName)=>PROVIDERS[findEnvVarProviderName(providerName)]), ipfsProviders = allProviders.filter((p)=>'ipfs' === p.protocol), swarmProviders = allProviders.filter((p)=>'swarm' === p.protocol);
21252
21261
  if (ipfsProviders.sort((a)=>'both' === a.supported || 'upload' === a.supported ? -1 : 1), !allProviders.length) throw new NoProvidersError();
21253
21262
  logger.info(`Deploying with providers: ${(swarmProviders.length ? swarmProviders : ipfsProviders).map((p)=>p.name).join(', ')}`);
21254
- let { name, cid: ipfsCid, blob, size } = await packAction({
21263
+ let { name, cid: ipfsCid, bytes, size } = await packAction({
21255
21264
  dir,
21256
21265
  options: {
21257
21266
  name: customName,
@@ -21272,7 +21281,7 @@ let ENS_DEPLOYER_ROLE = keccak256(Bytes_fromString('ENS_DEPLOYER')), execTransac
21272
21281
  let envVar = findEnvVarProviderName(provider.name);
21273
21282
  try {
21274
21283
  let result = await provider.upload({
21275
- car: blob,
21284
+ bytes,
21276
21285
  token: apiTokens.get(envVar),
21277
21286
  verbose,
21278
21287
  name: '',
@@ -21294,7 +21303,7 @@ let ENS_DEPLOYER_ROLE = keccak256(Bytes_fromString('ENS_DEPLOYER')), execTransac
21294
21303
  try {
21295
21304
  await provider.upload({
21296
21305
  name,
21297
- car: blob,
21306
+ bytes,
21298
21307
  token,
21299
21308
  bucketName,
21300
21309
  proof: apiTokens.get('STORACHA_PROOF'),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "omnipin",
3
- "version": "2.2.0",
3
+ "version": "2.2.2",
4
4
  "author": "v1rtl <hi@v1rtl.site>",
5
5
  "repository": {
6
6
  "type": "git",
@@ -10,7 +10,7 @@
10
10
  "@biomejs/biome": "^2.4.7",
11
11
  "@ipld/car": "^5.4.2",
12
12
  "@ipld/dag-ucan": "^3.4.5",
13
- "@omnipin/foc": "npm:@jsr/omnipin__foc@0.0.10",
13
+ "@omnipin/foc": "npm:@jsr/omnipin__foc@0.0.11",
14
14
  "@rslib/core": "^0.20.0",
15
15
  "@size-limit/file": "^12.0.1",
16
16
  "@stauro/filebase-upload": "npm:@jsr/stauro__filebase-upload",