@taqueria/plugin-ipfs-pinata 0.56.15 → 0.57.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,15 @@
1
+ // src/ipfsHash.ts
2
+ import { createHash } from "crypto";
3
+ import { readFile } from "fs/promises";
4
+ async function getFileIPFSHash(filePath) {
5
+ const fileContent = await readFile(filePath);
6
+ const hash = createHash("sha256").update(fileContent).digest("hex");
7
+ return hash;
8
+ }
9
+ var ipfsHash_default = getFileIPFSHash;
10
+
11
+ export {
12
+ getFileIPFSHash,
13
+ ipfsHash_default
14
+ };
15
+ //# sourceMappingURL=chunk-6OD7MDAL.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["src/ipfsHash.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(fileContent).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";AAAA,SAAS,kBAAkB;AAC3B,SAAS,gBAAgB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,MAAM,SAAS,QAAQ;AAG3C,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,WAAW,EAAE,OAAO,KAAK;AAGlE,SAAO;AACR;AAEA,IAAO,mBAAQ;","names":[]}
package/index.cjs CHANGED
@@ -36,18 +36,12 @@ module.exports = __toCommonJS(taqueria_plugin_ipfs_pinata_exports);
36
36
  var import_node_sdk2 = require("@taqueria/node-sdk");
37
37
 
38
38
  // src/ipfsHash.ts
39
- var import_unixfs = require("@helia/unixfs");
39
+ var import_crypto = require("crypto");
40
40
  var import_promises = require("fs/promises");
41
- var import_helia = require("helia");
42
41
  async function getFileIPFSHash(filePath) {
43
- const helia = await (0, import_helia.createHelia)();
44
- const fs3 = (0, import_unixfs.unixfs)(helia);
45
- const cid = fs3.addFile({
46
- path: filePath,
47
- content: await (0, import_promises.readFile)(filePath)
48
- });
49
- await helia.stop();
50
- return cid.toString();
42
+ const fileContent = await (0, import_promises.readFile)(filePath);
43
+ const hash = (0, import_crypto.createHash)("sha256").update(fileContent).digest("hex");
44
+ return hash;
51
45
  }
52
46
 
53
47
  // src/proxy.ts
@@ -269,8 +263,7 @@ var publishToIpfs = async (fileOrDirPath, auth) => {
269
263
  onProgress: ({ processedFilesCount, estimateFileCount }) => {
270
264
  if (estimateFileCount && processedFilesCount % 10) {
271
265
  let ratio = processedFilesCount / estimateFileCount;
272
- if (ratio > 1)
273
- ratio = 1;
266
+ if (ratio > 1) ratio = 1;
274
267
  }
275
268
  }
276
269
  });
package/index.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["index.ts","src/ipfsHash.ts","src/proxy.ts","src/file-processing.ts","src/pinata-api.ts","src/utils.ts"],"sourcesContent":["import { Option, Plugin, PositionalArg, Task } from '@taqueria/node-sdk';\nexport { getFileIPFSHash } from './src/ipfsHash';\nimport proxy from './src/proxy';\n\nPlugin.create(() => ({\n\tschema: '0.1',\n\tversion: '0.4.0',\n\talias: 'pinata',\n\ttasks: [\n\t\tTask.create({\n\t\t\ttask: 'publish',\n\t\t\tcommand: 'publish [path]',\n\t\t\tdescription: 'Upload and pin files using your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'path',\n\t\t\t\t\tdescription: 'Directory or file path to publish',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t\tencoding: 'json',\n\t\t}),\n\t\tTask.create({\n\t\t\ttask: 'pin',\n\t\t\tcommand: 'pin [hash]',\n\t\t\tdescription: 'Pin a file already on ipfs with your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'hash',\n\t\t\t\t\tdescription: 'Ipfs hash of the file or directory that is already on the ipfs network.',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t}),\n\t],\n\tproxy,\n}), process.argv);\n","import { unixfs } from '@helia/unixfs';\nimport { readFile } from 'fs/promises';\nimport { createHelia } from 'helia';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// create a Helia node\n\tconst helia = await createHelia();\n\n\t// create a filesystem on top of Helia, in this case it's UnixFS\n\tconst fs = unixfs(helia);\n\n\t// Create a text encoder and encode the contents of the file\n\t// into a Uint8Array.\n\tconst cid = fs.addFile({\n\t\tpath: filePath,\n\t\tcontent: await readFile(filePath),\n\t});\n\n\tawait helia.stop();\n\n\treturn cid.toString();\n}\n\nexport default getFileIPFSHash;\n","import { sendAsyncErr, sendAsyncRes, sendErr, sendJsonRes } from '@taqueria/node-sdk';\nimport { RequestArgs } from '@taqueria/node-sdk';\nimport { LoadedConfig, SanitizedAbsPath } from '@taqueria/node-sdk/types';\nimport path from 'path';\nimport { processFiles } from './file-processing';\nimport { PinataAuth, pinHash, publishFileToIpfs } from './pinata-api';\nimport { createProcessBackoffController } from './utils';\n\n// Load .env for jwt token\n// TODO: How should this be stored in a secure way?\nimport 'dotenv/config';\n\n// TODO: What should this be, it was removed from the sdk\ntype PluginResponse =\n\t| void\n\t| {\n\t\trender: 'table';\n\t\tdata: unknown[];\n\t};\n\ninterface Opts extends RequestArgs.t {\n\treadonly path?: string;\n\treadonly hash?: string;\n\treadonly task?: string;\n}\n\nconst publishToIpfs = async (fileOrDirPath: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!fileOrDirPath) {\n\t\tthrow new Error(`path was not provided`);\n\t}\n\n\t// Pinata is limited to 180 requests per minute\n\t// So for the first 180 requests they can go fast\n\n\tconst { processWithBackoff } = createProcessBackoffController({\n\t\tretryCount: 5,\n\t\ttargetRequestsPerMinute: 180,\n\t});\n\n\tconst result = await processFiles({\n\t\tfileOrDirPath,\n\t\tparallelCount: 10,\n\t\tprocessFile: async filePath => {\n\t\t\t// // TEMP: Debug\n\t\t\t// console.log(`publishing: ${filePath}`);\n\n\t\t\treturn processWithBackoff(() =>\n\t\t\t\tpublishFileToIpfs({\n\t\t\t\t\tauth,\n\t\t\t\t\titem: { filePath, name: path.basename(filePath) },\n\t\t\t\t})\n\t\t\t);\n\t\t},\n\t\tonProgress: ({ processedFilesCount, estimateFileCount }) => {\n\t\t\tif (estimateFileCount && processedFilesCount % 10) {\n\t\t\t\tlet ratio = processedFilesCount / estimateFileCount;\n\t\t\t\tif (ratio > 1) ratio = 1;\n\n\t\t\t\t// // TODO: Call task sdk progress\n\t\t\t\t// console.log(`Progress: ${(ratio * 100).toFixed(0)}%`);\n\t\t\t}\n\t\t},\n\t});\n\n\t// // TEMP: DEBUG: Show error\n\t// if (result.failures.length) {\n\t// \tconsole.log('❗ Failures:\\n' + result.failures.map(f => `${f.filePath}: ${f.error}`).join('\\n'));\n\t// }\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [\n\t\t\t...result.failures.map(x => ({\n\t\t\t\t'?': '❌',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: undefined,\n\t\t\t\terror: (x.error as { message?: string })?.message ?? JSON.stringify(x.error),\n\t\t\t})),\n\t\t\t...result.successes.map(x => ({\n\t\t\t\t'?': '✔',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: x.result.ipfsHash,\n\t\t\t\terror: undefined,\n\t\t\t})),\n\t\t],\n\t};\n};\n\nconst pinToIpfs = async (hash: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!hash) {\n\t\tthrow new Error(`ipfs hash was not provided`);\n\t}\n\n\tawait pinHash({ ipfsHash: hash, auth });\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [{ ipfsHash: hash }],\n\t};\n};\n\nconst execute = async (opts: Opts): Promise<PluginResponse> => {\n\tconst {\n\t\ttask,\n\t\tpath,\n\t\thash,\n\t} = opts;\n\n\tconst auth: PinataAuth = {\n\t\t// TODO: Where should this be stored?\n\t\t// pinataJwtToken: (config as Record<string, any>).credentials.pinataJwtToken,\n\t\tpinataJwtToken: process.env['pinataJwtToken'] as string,\n\t};\n\n\tif (!auth.pinataJwtToken) {\n\t\tthrow new Error(`The 'credentials.pinataJwtToken' was not found in config`);\n\t}\n\n\tswitch (task) {\n\t\tcase 'publish':\n\t\t\treturn publishToIpfs(path, auth);\n\t\tcase 'pin':\n\t\t\treturn pinToIpfs(hash, auth);\n\t\tdefault:\n\t\t\tthrow new Error(`${task} is not an understood task by the ipfs-pinata plugin`);\n\t}\n};\n\nexport default async (args: RequestArgs.t): Promise<PluginResponse> => {\n\tconst opts = args as Opts;\n\n\ttry {\n\t\tconst resultRaw = await execute(opts) as Record<string, unknown>;\n\t\t// TODO: Fix deno parsing\n\t\t// Without this, `data.reduce is not a function`\n\t\tconst result = ('data' in resultRaw) ? resultRaw.data : resultRaw;\n\t\treturn sendJsonRes(result);\n\t} catch (err) {\n\t\tconst error = err as Error;\n\t\tif (error.message) {\n\t\t\treturn sendAsyncErr(error.message);\n\t\t}\n\t}\n};\n","import fs from 'fs/promises';\nimport path from 'path';\n\n// Async generator\n// https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search\nasync function* getFiles(fileOrDirPath: string): AsyncGenerator<string, void, unknown> {\n\tconst dirInfo = await fs.stat(fileOrDirPath);\n\tif (dirInfo.isFile()) {\n\t\tyield fileOrDirPath;\n\t\treturn;\n\t}\n\n\tconst dirents = await fs.readdir(fileOrDirPath, { withFileTypes: true });\n\tfor (const dirent of dirents) {\n\t\tconst res = path.resolve(fileOrDirPath, dirent.name);\n\t\tif (dirent.isDirectory()) {\n\t\t\tyield* getFiles(res);\n\t\t} else {\n\t\t\tyield res;\n\t\t}\n\t}\n}\n\nconst createFileProvider = async ({\n\tfileOrDirPath,\n\tfilter,\n\tshouldEstimateFileCount,\n}: {\n\tfileOrDirPath: string;\n\tfilter?: (filePath: string) => boolean;\n\tshouldEstimateFileCount?: boolean;\n}) => {\n\tfileOrDirPath = path.resolve(fileOrDirPath);\n\tconst pathInfo = await fs.stat(fileOrDirPath);\n\tif (\n\t\t!pathInfo.isFile()\n\t\t&& !pathInfo.isDirectory()\n\t) {\n\t\tthrow new Error(`The path '${fileOrDirPath}' is not a file or directory`);\n\t}\n\n\tlet estimateFileCount = undefined as undefined | number;\n\tif (shouldEstimateFileCount) {\n\t\testimateFileCount = 0;\n\t\tfor await (const filePath of getFiles(fileOrDirPath)) {\n\t\t\tif (filter && !filter(filePath)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\testimateFileCount++;\n\t\t}\n\t}\n\n\tconst fileGenerator = getFiles(fileOrDirPath);\n\tconst getNextFile = async () => {\n\t\tlet nextFile = (await fileGenerator.next()).value;\n\t\tif (!filter) {\n\t\t\treturn nextFile;\n\t\t}\n\n\t\twhile (nextFile && !filter(nextFile)) {\n\t\t\tnextFile = await getNextFile();\n\t\t}\n\n\t\treturn nextFile;\n\t};\n\treturn {\n\t\tgetNextFile,\n\t\testimateFileCount,\n\t};\n};\n\ntype ProgressInfo = { processedFilesCount: number; estimateFileCount: undefined | number };\nexport const processFiles = async <TResult>({\n\tfileOrDirPath,\n\tprocessFile,\n\tfilter,\n\tparallelCount = 10,\n\tonProgress,\n}: {\n\tfileOrDirPath: string;\n\tprocessFile: (filePath: string, progress: ProgressInfo) => Promise<TResult>;\n\tfilter?: (filePath: string) => boolean;\n\tparallelCount?: number;\n\tonProgress?: (progress: ProgressInfo) => void;\n}) => {\n\tconst { getNextFile, estimateFileCount } = await createFileProvider({\n\t\tfileOrDirPath,\n\t\tfilter,\n\t\tshouldEstimateFileCount: true,\n\t});\n\n\tconst successes = [] as { filePath: string; result: TResult }[];\n\tconst failures = [] as { filePath: string; error: unknown }[];\n\n\tonProgress?.({\n\t\tprocessedFilesCount: 0,\n\t\testimateFileCount,\n\t});\n\n\tawait Promise.all([...new Array(parallelCount)].map(async x => {\n\t\tlet fileToProcess = await getNextFile();\n\t\twhile (fileToProcess) {\n\t\t\tconst progressInfo = {\n\t\t\t\tprocessedFilesCount: successes.length + failures.length,\n\t\t\t\testimateFileCount,\n\t\t\t};\n\t\t\tonProgress?.(progressInfo);\n\n\t\t\ttry {\n\t\t\t\tconst result = await processFile(fileToProcess, progressInfo);\n\t\t\t\tsuccesses.push({ filePath: fileToProcess, result });\n\t\t\t} catch (err) {\n\t\t\t\tfailures.push({ filePath: fileToProcess, error: err });\n\t\t\t}\n\n\t\t\tfileToProcess = await getNextFile();\n\t\t}\n\t}));\n\n\tonProgress?.({\n\t\tprocessedFilesCount: successes.length + failures.length,\n\t\testimateFileCount,\n\t});\n\n\treturn {\n\t\tsuccesses,\n\t\tfailures,\n\t};\n};\n","import FormData from 'form-data';\nimport fs from 'fs';\nimport { readFile } from 'fs/promises';\nimport fetch from 'node-fetch';\nimport { getFileIPFSHash } from './ipfsHash';\n\nexport type PinataAuth = {\n\tpinataJwtToken: string;\n};\n\nexport type PublishFileResult = {\n\tipfsHash: string;\n};\n\nexport const publishFileToIpfs = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}): Promise<PublishFileResult> => {\n\t// The data api to check for existing file is limited to 30 requests per minute\n\t// While uploading allows 180 requests per minute\n\t// i.e. it's faster to just upload again\n\n\t// // Skip if already pinned\n\t// const { isPinned, ipfsHash } = await checkIfFileIsPinned({ auth, item });\n\t// if (isPinned) {\n\t// \treturn {\n\t// \t\tipfsHash,\n\t// \t};\n\t// }\n\n\tconst data = new FormData();\n\tdata.append('file', fs.createReadStream(item.filePath));\n\tdata.append(\n\t\t'pinataMetadata',\n\t\tJSON.stringify({\n\t\t\tname: item.name,\n\t\t}),\n\t);\n\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinFileToIPFS`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': `multipart/form-data; boundary=${(data as unknown as { _boundary: string })._boundary}`,\n\t\t},\n\t\tbody: data,\n\t\tmethod: 'post',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to upload '${item.name}' to ipfs ${response.statusText}`);\n\t}\n\n\tconst uploadResult = await response.json() as {\n\t\tIpfsHash: string; // This is the IPFS multi-hash provided back for your content,\n\t\tPinSize: string; // This is how large (in bytes) the content you just pinned is,\n\t\tTimestamp: string; // This is the timestamp for your content pinning (represented in ISO 8601 format)\n\t};\n\n\treturn {\n\t\tipfsHash: uploadResult.IpfsHash,\n\t};\n};\n\nconst checkIfFileIsPinned = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}) => {\n\tconst contents = await readFile(item.filePath);\n\tconst ipfsHash = await getFileIPFSHash(item.filePath);\n\n\tconst response = await fetch(`https://api.pinata.cloud/data/pinList?status=pinned&hashContains=${ipfsHash}`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t},\n\t\tmethod: 'get',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to query '${item.name}' status from pinata ${response.statusText}`);\n\t}\n\n\tconst pinResult = await response.json() as {\n\t\tcount: number;\n\t\trows: {\n\t\t\tid: string;\n\t\t\tipfs_pin_hash: string;\n\t\t\tsize: number;\n\t\t\tuser_id: string;\n\t\t\tdate_pinned: null | string;\n\t\t\tdate_unpinned: null | string;\n\t\t\tmetadata: {\n\t\t\t\tname: string;\n\t\t\t\tkeyvalues: null | string;\n\t\t\t};\n\t\t\tregions: {\n\t\t\t\tregionId: string;\n\t\t\t\tcurrentReplicationCount: number;\n\t\t\t\tdesiredReplicationCount: number;\n\t\t\t}[];\n\t\t}[];\n\t};\n\n\tconst isPinned = pinResult.rows.some(x =>\n\t\tx.ipfs_pin_hash === ipfsHash\n\t\t&& x.date_pinned\n\t\t&& !x.date_unpinned\n\t);\n\n\treturn {\n\t\tisPinned,\n\t\tipfsHash,\n\t};\n};\n\nexport const pinHash = async ({\n\tauth,\n\tipfsHash,\n}: {\n\tauth: PinataAuth;\n\tipfsHash: string;\n}) => {\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinByHash`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': 'application/json',\n\t\t},\n\t\tmethod: 'post',\n\t\tbody: JSON.stringify({\n\t\t\thashToPin: ipfsHash,\n\t\t}),\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to pin '${ipfsHash}' with pinata: ${response.statusText}`);\n\t}\n\n\t// Ok is the only response if successful\n\treturn;\n};\n","export async function delay(timeout: number): Promise<void> {\n\treturn await new Promise(resolve => {\n\t\tsetTimeout(resolve, timeout);\n\t});\n}\n\nexport const createProcessBackoffController = ({\n\tretryCount = 5,\n\ttargetRequestsPerMinute = 180,\n}: {\n\tretryCount?: number;\n\ttargetRequestsPerMinute?: number;\n}) => {\n\tlet averageTimePerRequest = 5000;\n\tlet targetTimePerRequest = 60000 / targetRequestsPerMinute;\n\tlet lastTime = Date.now();\n\n\tconst processWithBackoff = async <TResult>(process: () => Promise<TResult>) => {\n\t\tlet attempt = 0;\n\t\tlet lastError = undefined as unknown;\n\t\twhile (attempt < retryCount) {\n\t\t\ttry {\n\t\t\t\tlet delayTimeMs = Math.max(10, targetTimePerRequest - averageTimePerRequest);\n\n\t\t\t\t// Partially randomized delay to ensure parallel requests don't line up\n\t\t\t\tawait delay(Math.floor(delayTimeMs * (1 + 0.5 * Math.random())));\n\n\t\t\t\tconst result = await process();\n\n\t\t\t\tconst timeNow = Date.now();\n\t\t\t\tconst timeElapsed = timeNow - lastTime;\n\t\t\t\tlastTime = timeNow;\n\n\t\t\t\t// Running average\n\t\t\t\taverageTimePerRequest = averageTimePerRequest * 0.97 + timeElapsed * 0.03;\n\n\t\t\t\treturn result;\n\t\t\t} catch (err) {\n\t\t\t\tlastError = err;\n\t\t\t}\n\n\t\t\t// Quickly increase time to wait if failure (allow negatives to wait longer than target)\n\t\t\taverageTimePerRequest -= (attempt + 1) * 1000;\n\t\t\tattempt++;\n\t\t}\n\n\t\t// All attempts failed\n\t\tthrow lastError;\n\t};\n\n\treturn {\n\t\tprocessWithBackoff,\n\t};\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAA,mBAAoD;;;ACApD,oBAAuB;AACvB,sBAAyB;AACzB,mBAA4B;AAE5B,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,QAAQ,UAAM,0BAAY;AAGhC,QAAMC,UAAK,sBAAO,KAAK;AAIvB,QAAM,MAAMA,IAAG,QAAQ;AAAA,IACtB,MAAM;AAAA,IACN,SAAS,UAAM,0BAAS,QAAQ;AAAA,EACjC,CAAC;AAED,QAAM,MAAM,KAAK;AAEjB,SAAO,IAAI,SAAS;AACrB;;;ACrBA,sBAAiE;AAGjE,IAAAC,eAAiB;;;ACHjB,IAAAC,mBAAe;AACf,kBAAiB;AAIjB,gBAAgB,SAAS,eAA8D;AACtF,QAAM,UAAU,MAAM,iBAAAC,QAAG,KAAK,aAAa;AAC3C,MAAI,QAAQ,OAAO,GAAG;AACrB,UAAM;AACN;AAAA,EACD;AAEA,QAAM,UAAU,MAAM,iBAAAA,QAAG,QAAQ,eAAe,EAAE,eAAe,KAAK,CAAC;AACvE,aAAW,UAAU,SAAS;AAC7B,UAAM,MAAM,YAAAC,QAAK,QAAQ,eAAe,OAAO,IAAI;AACnD,QAAI,OAAO,YAAY,GAAG;AACzB,aAAO,SAAS,GAAG;AAAA,IACpB,OAAO;AACN,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACD,MAIM;AACL,kBAAgB,YAAAA,QAAK,QAAQ,aAAa;AAC1C,QAAM,WAAW,MAAM,iBAAAD,QAAG,KAAK,aAAa;AAC5C,MACC,CAAC,SAAS,OAAO,KACd,CAAC,SAAS,YAAY,GACxB;AACD,UAAM,IAAI,MAAM,aAAa,aAAa,8BAA8B;AAAA,EACzE;AAEA,MAAI,oBAAoB;AACxB,MAAI,yBAAyB;AAC5B,wBAAoB;AACpB,qBAAiB,YAAY,SAAS,aAAa,GAAG;AACrD,UAAI,UAAU,CAAC,OAAO,QAAQ,GAAG;AAChC;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAEA,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,cAAc,YAAY;AAC/B,QAAI,YAAY,MAAM,cAAc,KAAK,GAAG;AAC5C,QAAI,CAAC,QAAQ;AACZ,aAAO;AAAA,IACR;AAEA,WAAO,YAAY,CAAC,OAAO,QAAQ,GAAG;AACrC,iBAAW,MAAM,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AACA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAGO,IAAM,eAAe,OAAgB;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAChB;AACD,MAMM;AACL,QAAM,EAAE,aAAa,kBAAkB,IAAI,MAAM,mBAAmB;AAAA,IACnE;AAAA,IACA;AAAA,IACA,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,YAAY,CAAC;AACnB,QAAM,WAAW,CAAC;AAElB,2CAAa;AAAA,IACZ,qBAAqB;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,IAAI,MAAM,aAAa,CAAC,EAAE,IAAI,OAAM,MAAK;AAC9D,QAAI,gBAAgB,MAAM,YAAY;AACtC,WAAO,eAAe;AACrB,YAAM,eAAe;AAAA,QACpB,qBAAqB,UAAU,SAAS,SAAS;AAAA,QACjD;AAAA,MACD;AACA,+CAAa;AAEb,UAAI;AACH,cAAM,SAAS,MAAM,YAAY,eAAe,YAAY;AAC5D,kBAAU,KAAK,EAAE,UAAU,eAAe,OAAO,CAAC;AAAA,MACnD,SAAS,KAAK;AACb,iBAAS,KAAK,EAAE,UAAU,eAAe,OAAO,IAAI,CAAC;AAAA,MACtD;AAEA,sBAAgB,MAAM,YAAY;AAAA,IACnC;AAAA,EACD,CAAC,CAAC;AAEF,2CAAa;AAAA,IACZ,qBAAqB,UAAU,SAAS,SAAS;AAAA,IACjD;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;;;AChIA,uBAAqB;AACrB,gBAAe;AAEf,wBAAkB;AAWX,IAAM,oBAAoB,OAAO;AAAA,EACvC;AAAA,EACA;AACD,MAMkC;AAajC,QAAM,OAAO,IAAI,iBAAAE,QAAS;AAC1B,OAAK,OAAO,QAAQ,UAAAC,QAAG,iBAAiB,KAAK,QAAQ,CAAC;AACtD,OAAK;AAAA,IACJ;AAAA,IACA,KAAK,UAAU;AAAA,MACd,MAAM,KAAK;AAAA,IACZ,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,kBAAAC,SAAM,kDAAkD;AAAA,IAC9E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB,iCAAkC,KAA0C,SAAS;AAAA,IACtG;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,EACT,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,qBAAqB,KAAK,IAAI,aAAa,SAAS,UAAU,EAAE;AAAA,EACjF;AAEA,QAAM,eAAe,MAAM,SAAS,KAAK;AAMzC,SAAO;AAAA,IACN,UAAU,aAAa;AAAA,EACxB;AACD;AA2DO,IAAM,UAAU,OAAO;AAAA,EAC7B;AAAA,EACA;AACD,MAGM;AACL,QAAM,WAAW,UAAM,kBAAAC,SAAM,8CAA8C;AAAA,IAC1E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,IACR,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,kBAAkB,QAAQ,kBAAkB,SAAS,UAAU,EAAE;AAAA,EAClF;AAGA;AACD;;;ACtJA,eAAsB,MAAM,SAAgC;AAC3D,SAAO,MAAM,IAAI,QAAQ,aAAW;AACnC,eAAW,SAAS,OAAO;AAAA,EAC5B,CAAC;AACF;AAEO,IAAM,iCAAiC,CAAC;AAAA,EAC9C,aAAa;AAAA,EACb,0BAA0B;AAC3B,MAGM;AACL,MAAI,wBAAwB;AAC5B,MAAI,uBAAuB,MAAQ;AACnC,MAAI,WAAW,KAAK,IAAI;AAExB,QAAM,qBAAqB,OAAgBC,aAAoC;AAC9E,QAAI,UAAU;AACd,QAAI,YAAY;AAChB,WAAO,UAAU,YAAY;AAC5B,UAAI;AACH,YAAI,cAAc,KAAK,IAAI,IAAI,uBAAuB,qBAAqB;AAG3E,cAAM,MAAM,KAAK,MAAM,eAAe,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;AAE/D,cAAM,SAAS,MAAMA,SAAQ;AAE7B,cAAM,UAAU,KAAK,IAAI;AACzB,cAAM,cAAc,UAAU;AAC9B,mBAAW;AAGX,gCAAwB,wBAAwB,OAAO,cAAc;AAErE,eAAO;AAAA,MACR,SAAS,KAAK;AACb,oBAAY;AAAA,MACb;AAGA,gCAA0B,UAAU,KAAK;AACzC;AAAA,IACD;AAGA,UAAM;AAAA,EACP;AAEA,SAAO;AAAA,IACN;AAAA,EACD;AACD;;;AH3CA,oBAAO;AAgBP,IAAM,gBAAgB,OAAO,eAAmC,SAA8C;AAC7G,MAAI,CAAC,eAAe;AACnB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACxC;AAKA,QAAM,EAAE,mBAAmB,IAAI,+BAA+B;AAAA,IAC7D,YAAY;AAAA,IACZ,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,SAAS,MAAM,aAAa;AAAA,IACjC;AAAA,IACA,eAAe;AAAA,IACf,aAAa,OAAM,aAAY;AAI9B,aAAO;AAAA,QAAmB,MACzB,kBAAkB;AAAA,UACjB;AAAA,UACA,MAAM,EAAE,UAAU,MAAM,aAAAC,QAAK,SAAS,QAAQ,EAAE;AAAA,QACjD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,IACA,YAAY,CAAC,EAAE,qBAAqB,kBAAkB,MAAM;AAC3D,UAAI,qBAAqB,sBAAsB,IAAI;AAClD,YAAI,QAAQ,sBAAsB;AAClC,YAAI,QAAQ;AAAG,kBAAQ;AAAA,MAIxB;AAAA,IACD;AAAA,EACD,CAAC;AAOD,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM;AAAA,MACL,GAAG,OAAO,SAAS,IAAI,OAAE;AAxE5B;AAwEgC;AAAA,UAC5B,KAAK;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,SAAQ,OAAE,UAAF,mBAAkC,YAAW,KAAK,UAAU,EAAE,KAAK;AAAA,QAC5E;AAAA,OAAE;AAAA,MACF,GAAG,OAAO,UAAU,IAAI,QAAM;AAAA,QAC7B,KAAK;AAAA,QACL,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE,OAAO;AAAA,QACnB,OAAO;AAAA,MACR,EAAE;AAAA,IACH;AAAA,EACD;AACD;AAEA,IAAM,YAAY,OAAO,MAA0B,SAA8C;AAChG,MAAI,CAAC,MAAM;AACV,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC7C;AAEA,QAAM,QAAQ,EAAE,UAAU,MAAM,KAAK,CAAC;AAEtC,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC;AAAA,EAC1B;AACD;AAEA,IAAM,UAAU,OAAO,SAAwC;AAC9D,QAAM;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,EACD,IAAI;AAEJ,QAAM,OAAmB;AAAA;AAAA;AAAA,IAGxB,gBAAgB,QAAQ,IAAI,gBAAgB;AAAA,EAC7C;AAEA,MAAI,CAAC,KAAK,gBAAgB;AACzB,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC3E;AAEA,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,cAAcA,OAAM,IAAI;AAAA,IAChC,KAAK;AACJ,aAAO,UAAU,MAAM,IAAI;AAAA,IAC5B;AACC,YAAM,IAAI,MAAM,GAAG,IAAI,sDAAsD;AAAA,EAC/E;AACD;AAEA,IAAO,gBAAQ,OAAO,SAAiD;AACtE,QAAM,OAAO;AAEb,MAAI;AACH,UAAM,YAAY,MAAM,QAAQ,IAAI;AAGpC,UAAM,SAAU,UAAU,YAAa,UAAU,OAAO;AACxD,eAAO,6BAAY,MAAM;AAAA,EAC1B,SAAS,KAAK;AACb,UAAM,QAAQ;AACd,QAAI,MAAM,SAAS;AAClB,iBAAO,8BAAa,MAAM,OAAO;AAAA,IAClC;AAAA,EACD;AACD;;;AF3IA,wBAAO,OAAO,OAAO;AAAA,EACpB,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,IACN,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA,IACD,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AACD,IAAI,QAAQ,IAAI;","names":["import_node_sdk","fs","import_path","import_promises","fs","path","FormData","fs","fetch","fetch","process","path"]}
1
+ {"version":3,"sources":["index.ts","src/ipfsHash.ts","src/proxy.ts","src/file-processing.ts","src/pinata-api.ts","src/utils.ts"],"sourcesContent":["import { Option, Plugin, PositionalArg, Task } from '@taqueria/node-sdk';\nexport { getFileIPFSHash } from './src/ipfsHash';\nimport proxy from './src/proxy';\n\nPlugin.create(() => ({\n\tschema: '0.1',\n\tversion: '0.4.0',\n\talias: 'pinata',\n\ttasks: [\n\t\tTask.create({\n\t\t\ttask: 'publish',\n\t\t\tcommand: 'publish [path]',\n\t\t\tdescription: 'Upload and pin files using your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'path',\n\t\t\t\t\tdescription: 'Directory or file path to publish',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t\tencoding: 'json',\n\t\t}),\n\t\tTask.create({\n\t\t\ttask: 'pin',\n\t\t\tcommand: 'pin [hash]',\n\t\t\tdescription: 'Pin a file already on ipfs with your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'hash',\n\t\t\t\t\tdescription: 'Ipfs hash of the file or directory that is already on the ipfs network.',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t}),\n\t],\n\tproxy,\n}), process.argv);\n","import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(fileContent).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n","import { sendAsyncErr, sendAsyncRes, sendErr, sendJsonRes } from '@taqueria/node-sdk';\nimport { RequestArgs } from '@taqueria/node-sdk';\nimport { LoadedConfig, SanitizedAbsPath } from '@taqueria/node-sdk/types';\nimport path from 'path';\nimport { processFiles } from './file-processing';\nimport { PinataAuth, pinHash, publishFileToIpfs } from './pinata-api';\nimport { createProcessBackoffController } from './utils';\n\n// Load .env for jwt token\n// TODO: How should this be stored in a secure way?\nimport 'dotenv/config';\n\n// TODO: What should this be, it was removed from the sdk\ntype PluginResponse =\n\t| void\n\t| {\n\t\trender: 'table';\n\t\tdata: unknown[];\n\t};\n\ninterface Opts extends RequestArgs.t {\n\treadonly path?: string;\n\treadonly hash?: string;\n\treadonly task?: string;\n}\n\nconst publishToIpfs = async (fileOrDirPath: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!fileOrDirPath) {\n\t\tthrow new Error(`path was not provided`);\n\t}\n\n\t// Pinata is limited to 180 requests per minute\n\t// So for the first 180 requests they can go fast\n\n\tconst { processWithBackoff } = createProcessBackoffController({\n\t\tretryCount: 5,\n\t\ttargetRequestsPerMinute: 180,\n\t});\n\n\tconst result = await processFiles({\n\t\tfileOrDirPath,\n\t\tparallelCount: 10,\n\t\tprocessFile: async filePath => {\n\t\t\t// // TEMP: Debug\n\t\t\t// console.log(`publishing: ${filePath}`);\n\n\t\t\treturn processWithBackoff(() =>\n\t\t\t\tpublishFileToIpfs({\n\t\t\t\t\tauth,\n\t\t\t\t\titem: { filePath, name: path.basename(filePath) },\n\t\t\t\t})\n\t\t\t);\n\t\t},\n\t\tonProgress: ({ processedFilesCount, estimateFileCount }) => {\n\t\t\tif (estimateFileCount && processedFilesCount % 10) {\n\t\t\t\tlet ratio = processedFilesCount / estimateFileCount;\n\t\t\t\tif (ratio > 1) ratio = 1;\n\n\t\t\t\t// // TODO: Call task sdk progress\n\t\t\t\t// console.log(`Progress: ${(ratio * 100).toFixed(0)}%`);\n\t\t\t}\n\t\t},\n\t});\n\n\t// // TEMP: DEBUG: Show error\n\t// if (result.failures.length) {\n\t// \tconsole.log('❗ Failures:\\n' + result.failures.map(f => `${f.filePath}: ${f.error}`).join('\\n'));\n\t// }\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [\n\t\t\t...result.failures.map(x => ({\n\t\t\t\t'?': '❌',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: undefined,\n\t\t\t\terror: (x.error as { message?: string })?.message ?? JSON.stringify(x.error),\n\t\t\t})),\n\t\t\t...result.successes.map(x => ({\n\t\t\t\t'?': '✔',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: x.result.ipfsHash,\n\t\t\t\terror: undefined,\n\t\t\t})),\n\t\t],\n\t};\n};\n\nconst pinToIpfs = async (hash: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!hash) {\n\t\tthrow new Error(`ipfs hash was not provided`);\n\t}\n\n\tawait pinHash({ ipfsHash: hash, auth });\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [{ ipfsHash: hash }],\n\t};\n};\n\nconst execute = async (opts: Opts): Promise<PluginResponse> => {\n\tconst {\n\t\ttask,\n\t\tpath,\n\t\thash,\n\t} = opts;\n\n\tconst auth: PinataAuth = {\n\t\t// TODO: Where should this be stored?\n\t\t// pinataJwtToken: (config as Record<string, any>).credentials.pinataJwtToken,\n\t\tpinataJwtToken: process.env['pinataJwtToken'] as string,\n\t};\n\n\tif (!auth.pinataJwtToken) {\n\t\tthrow new Error(`The 'credentials.pinataJwtToken' was not found in config`);\n\t}\n\n\tswitch (task) {\n\t\tcase 'publish':\n\t\t\treturn publishToIpfs(path, auth);\n\t\tcase 'pin':\n\t\t\treturn pinToIpfs(hash, auth);\n\t\tdefault:\n\t\t\tthrow new Error(`${task} is not an understood task by the ipfs-pinata plugin`);\n\t}\n};\n\nexport default async (args: RequestArgs.t): Promise<PluginResponse> => {\n\tconst opts = args as Opts;\n\n\ttry {\n\t\tconst resultRaw = await execute(opts) as Record<string, unknown>;\n\t\t// TODO: Fix deno parsing\n\t\t// Without this, `data.reduce is not a function`\n\t\tconst result = ('data' in resultRaw) ? resultRaw.data : resultRaw;\n\t\treturn sendJsonRes(result);\n\t} catch (err) {\n\t\tconst error = err as Error;\n\t\tif (error.message) {\n\t\t\treturn sendAsyncErr(error.message);\n\t\t}\n\t}\n};\n","import fs from 'fs/promises';\nimport path from 'path';\n\n// Async generator\n// https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search\nasync function* getFiles(fileOrDirPath: string): AsyncGenerator<string, void, unknown> {\n\tconst dirInfo = await fs.stat(fileOrDirPath);\n\tif (dirInfo.isFile()) {\n\t\tyield fileOrDirPath;\n\t\treturn;\n\t}\n\n\tconst dirents = await fs.readdir(fileOrDirPath, { withFileTypes: true });\n\tfor (const dirent of dirents) {\n\t\tconst res = path.resolve(fileOrDirPath, dirent.name);\n\t\tif (dirent.isDirectory()) {\n\t\t\tyield* getFiles(res);\n\t\t} else {\n\t\t\tyield res;\n\t\t}\n\t}\n}\n\nconst createFileProvider = async ({\n\tfileOrDirPath,\n\tfilter,\n\tshouldEstimateFileCount,\n}: {\n\tfileOrDirPath: string;\n\tfilter?: (filePath: string) => boolean;\n\tshouldEstimateFileCount?: boolean;\n}) => {\n\tfileOrDirPath = path.resolve(fileOrDirPath);\n\tconst pathInfo = await fs.stat(fileOrDirPath);\n\tif (\n\t\t!pathInfo.isFile()\n\t\t&& !pathInfo.isDirectory()\n\t) {\n\t\tthrow new Error(`The path '${fileOrDirPath}' is not a file or directory`);\n\t}\n\n\tlet estimateFileCount = undefined as undefined | number;\n\tif (shouldEstimateFileCount) {\n\t\testimateFileCount = 0;\n\t\tfor await (const filePath of getFiles(fileOrDirPath)) {\n\t\t\tif (filter && !filter(filePath)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\testimateFileCount++;\n\t\t}\n\t}\n\n\tconst fileGenerator = getFiles(fileOrDirPath);\n\tconst getNextFile = async () => {\n\t\tlet nextFile = (await fileGenerator.next()).value;\n\t\tif (!filter) {\n\t\t\treturn nextFile;\n\t\t}\n\n\t\twhile (nextFile && !filter(nextFile)) {\n\t\t\tnextFile = await getNextFile();\n\t\t}\n\n\t\treturn nextFile;\n\t};\n\treturn {\n\t\tgetNextFile,\n\t\testimateFileCount,\n\t};\n};\n\ntype ProgressInfo = { processedFilesCount: number; estimateFileCount: undefined | number };\nexport const processFiles = async <TResult>({\n\tfileOrDirPath,\n\tprocessFile,\n\tfilter,\n\tparallelCount = 10,\n\tonProgress,\n}: {\n\tfileOrDirPath: string;\n\tprocessFile: (filePath: string, progress: ProgressInfo) => Promise<TResult>;\n\tfilter?: (filePath: string) => boolean;\n\tparallelCount?: number;\n\tonProgress?: (progress: ProgressInfo) => void;\n}) => {\n\tconst { getNextFile, estimateFileCount } = await createFileProvider({\n\t\tfileOrDirPath,\n\t\tfilter,\n\t\tshouldEstimateFileCount: true,\n\t});\n\n\tconst successes = [] as { filePath: string; result: TResult }[];\n\tconst failures = [] as { filePath: string; error: unknown }[];\n\n\tonProgress?.({\n\t\tprocessedFilesCount: 0,\n\t\testimateFileCount,\n\t});\n\n\tawait Promise.all([...new Array(parallelCount)].map(async x => {\n\t\tlet fileToProcess = await getNextFile();\n\t\twhile (fileToProcess) {\n\t\t\tconst progressInfo = {\n\t\t\t\tprocessedFilesCount: successes.length + failures.length,\n\t\t\t\testimateFileCount,\n\t\t\t};\n\t\t\tonProgress?.(progressInfo);\n\n\t\t\ttry {\n\t\t\t\tconst result = await processFile(fileToProcess, progressInfo);\n\t\t\t\tsuccesses.push({ filePath: fileToProcess, result });\n\t\t\t} catch (err) {\n\t\t\t\tfailures.push({ filePath: fileToProcess, error: err });\n\t\t\t}\n\n\t\t\tfileToProcess = await getNextFile();\n\t\t}\n\t}));\n\n\tonProgress?.({\n\t\tprocessedFilesCount: successes.length + failures.length,\n\t\testimateFileCount,\n\t});\n\n\treturn {\n\t\tsuccesses,\n\t\tfailures,\n\t};\n};\n","import FormData from 'form-data';\nimport fs from 'fs';\nimport { readFile } from 'fs/promises';\nimport fetch from 'node-fetch';\nimport { getFileIPFSHash } from './ipfsHash';\n\nexport type PinataAuth = {\n\tpinataJwtToken: string;\n};\n\nexport type PublishFileResult = {\n\tipfsHash: string;\n};\n\nexport const publishFileToIpfs = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}): Promise<PublishFileResult> => {\n\t// The data api to check for existing file is limited to 30 requests per minute\n\t// While uploading allows 180 requests per minute\n\t// i.e. it's faster to just upload again\n\n\t// // Skip if already pinned\n\t// const { isPinned, ipfsHash } = await checkIfFileIsPinned({ auth, item });\n\t// if (isPinned) {\n\t// \treturn {\n\t// \t\tipfsHash,\n\t// \t};\n\t// }\n\n\tconst data = new FormData();\n\tdata.append('file', fs.createReadStream(item.filePath));\n\tdata.append(\n\t\t'pinataMetadata',\n\t\tJSON.stringify({\n\t\t\tname: item.name,\n\t\t}),\n\t);\n\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinFileToIPFS`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': `multipart/form-data; boundary=${(data as unknown as { _boundary: string })._boundary}`,\n\t\t},\n\t\tbody: data,\n\t\tmethod: 'post',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to upload '${item.name}' to ipfs ${response.statusText}`);\n\t}\n\n\tconst uploadResult = await response.json() as {\n\t\tIpfsHash: string; // This is the IPFS multi-hash provided back for your content,\n\t\tPinSize: string; // This is how large (in bytes) the content you just pinned is,\n\t\tTimestamp: string; // This is the timestamp for your content pinning (represented in ISO 8601 format)\n\t};\n\n\treturn {\n\t\tipfsHash: uploadResult.IpfsHash,\n\t};\n};\n\nconst checkIfFileIsPinned = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}) => {\n\tconst contents = await readFile(item.filePath);\n\tconst ipfsHash = await getFileIPFSHash(item.filePath);\n\n\tconst response = await fetch(`https://api.pinata.cloud/data/pinList?status=pinned&hashContains=${ipfsHash}`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t},\n\t\tmethod: 'get',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to query '${item.name}' status from pinata ${response.statusText}`);\n\t}\n\n\tconst pinResult = await response.json() as {\n\t\tcount: number;\n\t\trows: {\n\t\t\tid: string;\n\t\t\tipfs_pin_hash: string;\n\t\t\tsize: number;\n\t\t\tuser_id: string;\n\t\t\tdate_pinned: null | string;\n\t\t\tdate_unpinned: null | string;\n\t\t\tmetadata: {\n\t\t\t\tname: string;\n\t\t\t\tkeyvalues: null | string;\n\t\t\t};\n\t\t\tregions: {\n\t\t\t\tregionId: string;\n\t\t\t\tcurrentReplicationCount: number;\n\t\t\t\tdesiredReplicationCount: number;\n\t\t\t}[];\n\t\t}[];\n\t};\n\n\tconst isPinned = pinResult.rows.some(x =>\n\t\tx.ipfs_pin_hash === ipfsHash\n\t\t&& x.date_pinned\n\t\t&& !x.date_unpinned\n\t);\n\n\treturn {\n\t\tisPinned,\n\t\tipfsHash,\n\t};\n};\n\nexport const pinHash = async ({\n\tauth,\n\tipfsHash,\n}: {\n\tauth: PinataAuth;\n\tipfsHash: string;\n}) => {\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinByHash`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': 'application/json',\n\t\t},\n\t\tmethod: 'post',\n\t\tbody: JSON.stringify({\n\t\t\thashToPin: ipfsHash,\n\t\t}),\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to pin '${ipfsHash}' with pinata: ${response.statusText}`);\n\t}\n\n\t// Ok is the only response if successful\n\treturn;\n};\n","export async function delay(timeout: number): Promise<void> {\n\treturn await new Promise(resolve => {\n\t\tsetTimeout(resolve, timeout);\n\t});\n}\n\nexport const createProcessBackoffController = ({\n\tretryCount = 5,\n\ttargetRequestsPerMinute = 180,\n}: {\n\tretryCount?: number;\n\ttargetRequestsPerMinute?: number;\n}) => {\n\tlet averageTimePerRequest = 5000;\n\tlet targetTimePerRequest = 60000 / targetRequestsPerMinute;\n\tlet lastTime = Date.now();\n\n\tconst processWithBackoff = async <TResult>(process: () => Promise<TResult>) => {\n\t\tlet attempt = 0;\n\t\tlet lastError = undefined as unknown;\n\t\twhile (attempt < retryCount) {\n\t\t\ttry {\n\t\t\t\tlet delayTimeMs = Math.max(10, targetTimePerRequest - averageTimePerRequest);\n\n\t\t\t\t// Partially randomized delay to ensure parallel requests don't line up\n\t\t\t\tawait delay(Math.floor(delayTimeMs * (1 + 0.5 * Math.random())));\n\n\t\t\t\tconst result = await process();\n\n\t\t\t\tconst timeNow = Date.now();\n\t\t\t\tconst timeElapsed = timeNow - lastTime;\n\t\t\t\tlastTime = timeNow;\n\n\t\t\t\t// Running average\n\t\t\t\taverageTimePerRequest = averageTimePerRequest * 0.97 + timeElapsed * 0.03;\n\n\t\t\t\treturn result;\n\t\t\t} catch (err) {\n\t\t\t\tlastError = err;\n\t\t\t}\n\n\t\t\t// Quickly increase time to wait if failure (allow negatives to wait longer than target)\n\t\t\taverageTimePerRequest -= (attempt + 1) * 1000;\n\t\t\tattempt++;\n\t\t}\n\n\t\t// All attempts failed\n\t\tthrow lastError;\n\t};\n\n\treturn {\n\t\tprocessWithBackoff,\n\t};\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAA,mBAAoD;;;ACApD,oBAA2B;AAC3B,sBAAyB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,UAAM,0BAAS,QAAQ;AAG3C,QAAM,WAAO,0BAAW,QAAQ,EAAE,OAAO,WAAW,EAAE,OAAO,KAAK;AAGlE,SAAO;AACR;;;ACZA,sBAAiE;AAGjE,IAAAC,eAAiB;;;ACHjB,IAAAC,mBAAe;AACf,kBAAiB;AAIjB,gBAAgB,SAAS,eAA8D;AACtF,QAAM,UAAU,MAAM,iBAAAC,QAAG,KAAK,aAAa;AAC3C,MAAI,QAAQ,OAAO,GAAG;AACrB,UAAM;AACN;AAAA,EACD;AAEA,QAAM,UAAU,MAAM,iBAAAA,QAAG,QAAQ,eAAe,EAAE,eAAe,KAAK,CAAC;AACvE,aAAW,UAAU,SAAS;AAC7B,UAAM,MAAM,YAAAC,QAAK,QAAQ,eAAe,OAAO,IAAI;AACnD,QAAI,OAAO,YAAY,GAAG;AACzB,aAAO,SAAS,GAAG;AAAA,IACpB,OAAO;AACN,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACD,MAIM;AACL,kBAAgB,YAAAA,QAAK,QAAQ,aAAa;AAC1C,QAAM,WAAW,MAAM,iBAAAD,QAAG,KAAK,aAAa;AAC5C,MACC,CAAC,SAAS,OAAO,KACd,CAAC,SAAS,YAAY,GACxB;AACD,UAAM,IAAI,MAAM,aAAa,aAAa,8BAA8B;AAAA,EACzE;AAEA,MAAI,oBAAoB;AACxB,MAAI,yBAAyB;AAC5B,wBAAoB;AACpB,qBAAiB,YAAY,SAAS,aAAa,GAAG;AACrD,UAAI,UAAU,CAAC,OAAO,QAAQ,GAAG;AAChC;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAEA,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,cAAc,YAAY;AAC/B,QAAI,YAAY,MAAM,cAAc,KAAK,GAAG;AAC5C,QAAI,CAAC,QAAQ;AACZ,aAAO;AAAA,IACR;AAEA,WAAO,YAAY,CAAC,OAAO,QAAQ,GAAG;AACrC,iBAAW,MAAM,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AACA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAGO,IAAM,eAAe,OAAgB;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAChB;AACD,MAMM;AACL,QAAM,EAAE,aAAa,kBAAkB,IAAI,MAAM,mBAAmB;AAAA,IACnE;AAAA,IACA;AAAA,IACA,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,YAAY,CAAC;AACnB,QAAM,WAAW,CAAC;AAElB,2CAAa;AAAA,IACZ,qBAAqB;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,IAAI,MAAM,aAAa,CAAC,EAAE,IAAI,OAAM,MAAK;AAC9D,QAAI,gBAAgB,MAAM,YAAY;AACtC,WAAO,eAAe;AACrB,YAAM,eAAe;AAAA,QACpB,qBAAqB,UAAU,SAAS,SAAS;AAAA,QACjD;AAAA,MACD;AACA,+CAAa;AAEb,UAAI;AACH,cAAM,SAAS,MAAM,YAAY,eAAe,YAAY;AAC5D,kBAAU,KAAK,EAAE,UAAU,eAAe,OAAO,CAAC;AAAA,MACnD,SAAS,KAAK;AACb,iBAAS,KAAK,EAAE,UAAU,eAAe,OAAO,IAAI,CAAC;AAAA,MACtD;AAEA,sBAAgB,MAAM,YAAY;AAAA,IACnC;AAAA,EACD,CAAC,CAAC;AAEF,2CAAa;AAAA,IACZ,qBAAqB,UAAU,SAAS,SAAS;AAAA,IACjD;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;;;AChIA,uBAAqB;AACrB,gBAAe;AAEf,wBAAkB;AAWX,IAAM,oBAAoB,OAAO;AAAA,EACvC;AAAA,EACA;AACD,MAMkC;AAajC,QAAM,OAAO,IAAI,iBAAAE,QAAS;AAC1B,OAAK,OAAO,QAAQ,UAAAC,QAAG,iBAAiB,KAAK,QAAQ,CAAC;AACtD,OAAK;AAAA,IACJ;AAAA,IACA,KAAK,UAAU;AAAA,MACd,MAAM,KAAK;AAAA,IACZ,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,kBAAAC,SAAM,kDAAkD;AAAA,IAC9E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB,iCAAkC,KAA0C,SAAS;AAAA,IACtG;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,EACT,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,qBAAqB,KAAK,IAAI,aAAa,SAAS,UAAU,EAAE;AAAA,EACjF;AAEA,QAAM,eAAe,MAAM,SAAS,KAAK;AAMzC,SAAO;AAAA,IACN,UAAU,aAAa;AAAA,EACxB;AACD;AA2DO,IAAM,UAAU,OAAO;AAAA,EAC7B;AAAA,EACA;AACD,MAGM;AACL,QAAM,WAAW,UAAM,kBAAAC,SAAM,8CAA8C;AAAA,IAC1E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,IACR,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,kBAAkB,QAAQ,kBAAkB,SAAS,UAAU,EAAE;AAAA,EAClF;AAGA;AACD;;;ACtJA,eAAsB,MAAM,SAAgC;AAC3D,SAAO,MAAM,IAAI,QAAQ,aAAW;AACnC,eAAW,SAAS,OAAO;AAAA,EAC5B,CAAC;AACF;AAEO,IAAM,iCAAiC,CAAC;AAAA,EAC9C,aAAa;AAAA,EACb,0BAA0B;AAC3B,MAGM;AACL,MAAI,wBAAwB;AAC5B,MAAI,uBAAuB,MAAQ;AACnC,MAAI,WAAW,KAAK,IAAI;AAExB,QAAM,qBAAqB,OAAgBC,aAAoC;AAC9E,QAAI,UAAU;AACd,QAAI,YAAY;AAChB,WAAO,UAAU,YAAY;AAC5B,UAAI;AACH,YAAI,cAAc,KAAK,IAAI,IAAI,uBAAuB,qBAAqB;AAG3E,cAAM,MAAM,KAAK,MAAM,eAAe,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;AAE/D,cAAM,SAAS,MAAMA,SAAQ;AAE7B,cAAM,UAAU,KAAK,IAAI;AACzB,cAAM,cAAc,UAAU;AAC9B,mBAAW;AAGX,gCAAwB,wBAAwB,OAAO,cAAc;AAErE,eAAO;AAAA,MACR,SAAS,KAAK;AACb,oBAAY;AAAA,MACb;AAGA,gCAA0B,UAAU,KAAK;AACzC;AAAA,IACD;AAGA,UAAM;AAAA,EACP;AAEA,SAAO;AAAA,IACN;AAAA,EACD;AACD;;;AH3CA,oBAAO;AAgBP,IAAM,gBAAgB,OAAO,eAAmC,SAA8C;AAC7G,MAAI,CAAC,eAAe;AACnB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACxC;AAKA,QAAM,EAAE,mBAAmB,IAAI,+BAA+B;AAAA,IAC7D,YAAY;AAAA,IACZ,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,SAAS,MAAM,aAAa;AAAA,IACjC;AAAA,IACA,eAAe;AAAA,IACf,aAAa,OAAM,aAAY;AAI9B,aAAO;AAAA,QAAmB,MACzB,kBAAkB;AAAA,UACjB;AAAA,UACA,MAAM,EAAE,UAAU,MAAM,aAAAC,QAAK,SAAS,QAAQ,EAAE;AAAA,QACjD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,IACA,YAAY,CAAC,EAAE,qBAAqB,kBAAkB,MAAM;AAC3D,UAAI,qBAAqB,sBAAsB,IAAI;AAClD,YAAI,QAAQ,sBAAsB;AAClC,YAAI,QAAQ,EAAG,SAAQ;AAAA,MAIxB;AAAA,IACD;AAAA,EACD,CAAC;AAOD,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM;AAAA,MACL,GAAG,OAAO,SAAS,IAAI,OAAE;AAxE5B;AAwEgC;AAAA,UAC5B,KAAK;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,SAAQ,OAAE,UAAF,mBAAkC,YAAW,KAAK,UAAU,EAAE,KAAK;AAAA,QAC5E;AAAA,OAAE;AAAA,MACF,GAAG,OAAO,UAAU,IAAI,QAAM;AAAA,QAC7B,KAAK;AAAA,QACL,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE,OAAO;AAAA,QACnB,OAAO;AAAA,MACR,EAAE;AAAA,IACH;AAAA,EACD;AACD;AAEA,IAAM,YAAY,OAAO,MAA0B,SAA8C;AAChG,MAAI,CAAC,MAAM;AACV,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC7C;AAEA,QAAM,QAAQ,EAAE,UAAU,MAAM,KAAK,CAAC;AAEtC,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC;AAAA,EAC1B;AACD;AAEA,IAAM,UAAU,OAAO,SAAwC;AAC9D,QAAM;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,EACD,IAAI;AAEJ,QAAM,OAAmB;AAAA;AAAA;AAAA,IAGxB,gBAAgB,QAAQ,IAAI,gBAAgB;AAAA,EAC7C;AAEA,MAAI,CAAC,KAAK,gBAAgB;AACzB,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC3E;AAEA,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,cAAcA,OAAM,IAAI;AAAA,IAChC,KAAK;AACJ,aAAO,UAAU,MAAM,IAAI;AAAA,IAC5B;AACC,YAAM,IAAI,MAAM,GAAG,IAAI,sDAAsD;AAAA,EAC/E;AACD;AAEA,IAAO,gBAAQ,OAAO,SAAiD;AACtE,QAAM,OAAO;AAEb,MAAI;AACH,UAAM,YAAY,MAAM,QAAQ,IAAI;AAGpC,UAAM,SAAU,UAAU,YAAa,UAAU,OAAO;AACxD,eAAO,6BAAY,MAAM;AAAA,EAC1B,SAAS,KAAK;AACb,UAAM,QAAQ;AACd,QAAI,MAAM,SAAS;AAClB,iBAAO,8BAAa,MAAM,OAAO;AAAA,IAClC;AAAA,EACD;AACD;;;AF3IA,wBAAO,OAAO,OAAO;AAAA,EACpB,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,IACN,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA,IACD,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AACD,IAAI,QAAQ,IAAI;","names":["import_node_sdk","import_path","import_promises","fs","path","FormData","fs","fetch","fetch","process","path"]}
package/index.d.cts ADDED
@@ -0,0 +1 @@
1
+ export { default as getFileIPFSHash } from './src/ipfsHash.cjs';
package/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  getFileIPFSHash
3
- } from "./chunk-PF3FHOWY.js";
3
+ } from "./chunk-6OD7MDAL.js";
4
4
 
5
5
  // index.ts
6
6
  import { Plugin, PositionalArg, Task } from "@taqueria/node-sdk";
@@ -224,8 +224,7 @@ var publishToIpfs = async (fileOrDirPath, auth) => {
224
224
  onProgress: ({ processedFilesCount, estimateFileCount }) => {
225
225
  if (estimateFileCount && processedFilesCount % 10) {
226
226
  let ratio = processedFilesCount / estimateFileCount;
227
- if (ratio > 1)
228
- ratio = 1;
227
+ if (ratio > 1) ratio = 1;
229
228
  }
230
229
  }
231
230
  });
package/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["index.ts","src/proxy.ts","src/file-processing.ts","src/pinata-api.ts","src/utils.ts"],"sourcesContent":["import { Option, Plugin, PositionalArg, Task } from '@taqueria/node-sdk';\nexport { getFileIPFSHash } from './src/ipfsHash';\nimport proxy from './src/proxy';\n\nPlugin.create(() => ({\n\tschema: '0.1',\n\tversion: '0.4.0',\n\talias: 'pinata',\n\ttasks: [\n\t\tTask.create({\n\t\t\ttask: 'publish',\n\t\t\tcommand: 'publish [path]',\n\t\t\tdescription: 'Upload and pin files using your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'path',\n\t\t\t\t\tdescription: 'Directory or file path to publish',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t\tencoding: 'json',\n\t\t}),\n\t\tTask.create({\n\t\t\ttask: 'pin',\n\t\t\tcommand: 'pin [hash]',\n\t\t\tdescription: 'Pin a file already on ipfs with your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'hash',\n\t\t\t\t\tdescription: 'Ipfs hash of the file or directory that is already on the ipfs network.',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t}),\n\t],\n\tproxy,\n}), process.argv);\n","import { sendAsyncErr, sendAsyncRes, sendErr, sendJsonRes } from '@taqueria/node-sdk';\nimport { RequestArgs } from '@taqueria/node-sdk';\nimport { LoadedConfig, SanitizedAbsPath } from '@taqueria/node-sdk/types';\nimport path from 'path';\nimport { processFiles } from './file-processing';\nimport { PinataAuth, pinHash, publishFileToIpfs } from './pinata-api';\nimport { createProcessBackoffController } from './utils';\n\n// Load .env for jwt token\n// TODO: How should this be stored in a secure way?\nimport 'dotenv/config';\n\n// TODO: What should this be, it was removed from the sdk\ntype PluginResponse =\n\t| void\n\t| {\n\t\trender: 'table';\n\t\tdata: unknown[];\n\t};\n\ninterface Opts extends RequestArgs.t {\n\treadonly path?: string;\n\treadonly hash?: string;\n\treadonly task?: string;\n}\n\nconst publishToIpfs = async (fileOrDirPath: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!fileOrDirPath) {\n\t\tthrow new Error(`path was not provided`);\n\t}\n\n\t// Pinata is limited to 180 requests per minute\n\t// So for the first 180 requests they can go fast\n\n\tconst { processWithBackoff } = createProcessBackoffController({\n\t\tretryCount: 5,\n\t\ttargetRequestsPerMinute: 180,\n\t});\n\n\tconst result = await processFiles({\n\t\tfileOrDirPath,\n\t\tparallelCount: 10,\n\t\tprocessFile: async filePath => {\n\t\t\t// // TEMP: Debug\n\t\t\t// console.log(`publishing: ${filePath}`);\n\n\t\t\treturn processWithBackoff(() =>\n\t\t\t\tpublishFileToIpfs({\n\t\t\t\t\tauth,\n\t\t\t\t\titem: { filePath, name: path.basename(filePath) },\n\t\t\t\t})\n\t\t\t);\n\t\t},\n\t\tonProgress: ({ processedFilesCount, estimateFileCount }) => {\n\t\t\tif (estimateFileCount && processedFilesCount % 10) {\n\t\t\t\tlet ratio = processedFilesCount / estimateFileCount;\n\t\t\t\tif (ratio > 1) ratio = 1;\n\n\t\t\t\t// // TODO: Call task sdk progress\n\t\t\t\t// console.log(`Progress: ${(ratio * 100).toFixed(0)}%`);\n\t\t\t}\n\t\t},\n\t});\n\n\t// // TEMP: DEBUG: Show error\n\t// if (result.failures.length) {\n\t// \tconsole.log('❗ Failures:\\n' + result.failures.map(f => `${f.filePath}: ${f.error}`).join('\\n'));\n\t// }\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [\n\t\t\t...result.failures.map(x => ({\n\t\t\t\t'?': '❌',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: undefined,\n\t\t\t\terror: (x.error as { message?: string })?.message ?? JSON.stringify(x.error),\n\t\t\t})),\n\t\t\t...result.successes.map(x => ({\n\t\t\t\t'?': '✔',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: x.result.ipfsHash,\n\t\t\t\terror: undefined,\n\t\t\t})),\n\t\t],\n\t};\n};\n\nconst pinToIpfs = async (hash: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!hash) {\n\t\tthrow new Error(`ipfs hash was not provided`);\n\t}\n\n\tawait pinHash({ ipfsHash: hash, auth });\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [{ ipfsHash: hash }],\n\t};\n};\n\nconst execute = async (opts: Opts): Promise<PluginResponse> => {\n\tconst {\n\t\ttask,\n\t\tpath,\n\t\thash,\n\t} = opts;\n\n\tconst auth: PinataAuth = {\n\t\t// TODO: Where should this be stored?\n\t\t// pinataJwtToken: (config as Record<string, any>).credentials.pinataJwtToken,\n\t\tpinataJwtToken: process.env['pinataJwtToken'] as string,\n\t};\n\n\tif (!auth.pinataJwtToken) {\n\t\tthrow new Error(`The 'credentials.pinataJwtToken' was not found in config`);\n\t}\n\n\tswitch (task) {\n\t\tcase 'publish':\n\t\t\treturn publishToIpfs(path, auth);\n\t\tcase 'pin':\n\t\t\treturn pinToIpfs(hash, auth);\n\t\tdefault:\n\t\t\tthrow new Error(`${task} is not an understood task by the ipfs-pinata plugin`);\n\t}\n};\n\nexport default async (args: RequestArgs.t): Promise<PluginResponse> => {\n\tconst opts = args as Opts;\n\n\ttry {\n\t\tconst resultRaw = await execute(opts) as Record<string, unknown>;\n\t\t// TODO: Fix deno parsing\n\t\t// Without this, `data.reduce is not a function`\n\t\tconst result = ('data' in resultRaw) ? resultRaw.data : resultRaw;\n\t\treturn sendJsonRes(result);\n\t} catch (err) {\n\t\tconst error = err as Error;\n\t\tif (error.message) {\n\t\t\treturn sendAsyncErr(error.message);\n\t\t}\n\t}\n};\n","import fs from 'fs/promises';\nimport path from 'path';\n\n// Async generator\n// https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search\nasync function* getFiles(fileOrDirPath: string): AsyncGenerator<string, void, unknown> {\n\tconst dirInfo = await fs.stat(fileOrDirPath);\n\tif (dirInfo.isFile()) {\n\t\tyield fileOrDirPath;\n\t\treturn;\n\t}\n\n\tconst dirents = await fs.readdir(fileOrDirPath, { withFileTypes: true });\n\tfor (const dirent of dirents) {\n\t\tconst res = path.resolve(fileOrDirPath, dirent.name);\n\t\tif (dirent.isDirectory()) {\n\t\t\tyield* getFiles(res);\n\t\t} else {\n\t\t\tyield res;\n\t\t}\n\t}\n}\n\nconst createFileProvider = async ({\n\tfileOrDirPath,\n\tfilter,\n\tshouldEstimateFileCount,\n}: {\n\tfileOrDirPath: string;\n\tfilter?: (filePath: string) => boolean;\n\tshouldEstimateFileCount?: boolean;\n}) => {\n\tfileOrDirPath = path.resolve(fileOrDirPath);\n\tconst pathInfo = await fs.stat(fileOrDirPath);\n\tif (\n\t\t!pathInfo.isFile()\n\t\t&& !pathInfo.isDirectory()\n\t) {\n\t\tthrow new Error(`The path '${fileOrDirPath}' is not a file or directory`);\n\t}\n\n\tlet estimateFileCount = undefined as undefined | number;\n\tif (shouldEstimateFileCount) {\n\t\testimateFileCount = 0;\n\t\tfor await (const filePath of getFiles(fileOrDirPath)) {\n\t\t\tif (filter && !filter(filePath)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\testimateFileCount++;\n\t\t}\n\t}\n\n\tconst fileGenerator = getFiles(fileOrDirPath);\n\tconst getNextFile = async () => {\n\t\tlet nextFile = (await fileGenerator.next()).value;\n\t\tif (!filter) {\n\t\t\treturn nextFile;\n\t\t}\n\n\t\twhile (nextFile && !filter(nextFile)) {\n\t\t\tnextFile = await getNextFile();\n\t\t}\n\n\t\treturn nextFile;\n\t};\n\treturn {\n\t\tgetNextFile,\n\t\testimateFileCount,\n\t};\n};\n\ntype ProgressInfo = { processedFilesCount: number; estimateFileCount: undefined | number };\nexport const processFiles = async <TResult>({\n\tfileOrDirPath,\n\tprocessFile,\n\tfilter,\n\tparallelCount = 10,\n\tonProgress,\n}: {\n\tfileOrDirPath: string;\n\tprocessFile: (filePath: string, progress: ProgressInfo) => Promise<TResult>;\n\tfilter?: (filePath: string) => boolean;\n\tparallelCount?: number;\n\tonProgress?: (progress: ProgressInfo) => void;\n}) => {\n\tconst { getNextFile, estimateFileCount } = await createFileProvider({\n\t\tfileOrDirPath,\n\t\tfilter,\n\t\tshouldEstimateFileCount: true,\n\t});\n\n\tconst successes = [] as { filePath: string; result: TResult }[];\n\tconst failures = [] as { filePath: string; error: unknown }[];\n\n\tonProgress?.({\n\t\tprocessedFilesCount: 0,\n\t\testimateFileCount,\n\t});\n\n\tawait Promise.all([...new Array(parallelCount)].map(async x => {\n\t\tlet fileToProcess = await getNextFile();\n\t\twhile (fileToProcess) {\n\t\t\tconst progressInfo = {\n\t\t\t\tprocessedFilesCount: successes.length + failures.length,\n\t\t\t\testimateFileCount,\n\t\t\t};\n\t\t\tonProgress?.(progressInfo);\n\n\t\t\ttry {\n\t\t\t\tconst result = await processFile(fileToProcess, progressInfo);\n\t\t\t\tsuccesses.push({ filePath: fileToProcess, result });\n\t\t\t} catch (err) {\n\t\t\t\tfailures.push({ filePath: fileToProcess, error: err });\n\t\t\t}\n\n\t\t\tfileToProcess = await getNextFile();\n\t\t}\n\t}));\n\n\tonProgress?.({\n\t\tprocessedFilesCount: successes.length + failures.length,\n\t\testimateFileCount,\n\t});\n\n\treturn {\n\t\tsuccesses,\n\t\tfailures,\n\t};\n};\n","import FormData from 'form-data';\nimport fs from 'fs';\nimport { readFile } from 'fs/promises';\nimport fetch from 'node-fetch';\nimport { getFileIPFSHash } from './ipfsHash';\n\nexport type PinataAuth = {\n\tpinataJwtToken: string;\n};\n\nexport type PublishFileResult = {\n\tipfsHash: string;\n};\n\nexport const publishFileToIpfs = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}): Promise<PublishFileResult> => {\n\t// The data api to check for existing file is limited to 30 requests per minute\n\t// While uploading allows 180 requests per minute\n\t// i.e. it's faster to just upload again\n\n\t// // Skip if already pinned\n\t// const { isPinned, ipfsHash } = await checkIfFileIsPinned({ auth, item });\n\t// if (isPinned) {\n\t// \treturn {\n\t// \t\tipfsHash,\n\t// \t};\n\t// }\n\n\tconst data = new FormData();\n\tdata.append('file', fs.createReadStream(item.filePath));\n\tdata.append(\n\t\t'pinataMetadata',\n\t\tJSON.stringify({\n\t\t\tname: item.name,\n\t\t}),\n\t);\n\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinFileToIPFS`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': `multipart/form-data; boundary=${(data as unknown as { _boundary: string })._boundary}`,\n\t\t},\n\t\tbody: data,\n\t\tmethod: 'post',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to upload '${item.name}' to ipfs ${response.statusText}`);\n\t}\n\n\tconst uploadResult = await response.json() as {\n\t\tIpfsHash: string; // This is the IPFS multi-hash provided back for your content,\n\t\tPinSize: string; // This is how large (in bytes) the content you just pinned is,\n\t\tTimestamp: string; // This is the timestamp for your content pinning (represented in ISO 8601 format)\n\t};\n\n\treturn {\n\t\tipfsHash: uploadResult.IpfsHash,\n\t};\n};\n\nconst checkIfFileIsPinned = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}) => {\n\tconst contents = await readFile(item.filePath);\n\tconst ipfsHash = await getFileIPFSHash(item.filePath);\n\n\tconst response = await fetch(`https://api.pinata.cloud/data/pinList?status=pinned&hashContains=${ipfsHash}`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t},\n\t\tmethod: 'get',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to query '${item.name}' status from pinata ${response.statusText}`);\n\t}\n\n\tconst pinResult = await response.json() as {\n\t\tcount: number;\n\t\trows: {\n\t\t\tid: string;\n\t\t\tipfs_pin_hash: string;\n\t\t\tsize: number;\n\t\t\tuser_id: string;\n\t\t\tdate_pinned: null | string;\n\t\t\tdate_unpinned: null | string;\n\t\t\tmetadata: {\n\t\t\t\tname: string;\n\t\t\t\tkeyvalues: null | string;\n\t\t\t};\n\t\t\tregions: {\n\t\t\t\tregionId: string;\n\t\t\t\tcurrentReplicationCount: number;\n\t\t\t\tdesiredReplicationCount: number;\n\t\t\t}[];\n\t\t}[];\n\t};\n\n\tconst isPinned = pinResult.rows.some(x =>\n\t\tx.ipfs_pin_hash === ipfsHash\n\t\t&& x.date_pinned\n\t\t&& !x.date_unpinned\n\t);\n\n\treturn {\n\t\tisPinned,\n\t\tipfsHash,\n\t};\n};\n\nexport const pinHash = async ({\n\tauth,\n\tipfsHash,\n}: {\n\tauth: PinataAuth;\n\tipfsHash: string;\n}) => {\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinByHash`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': 'application/json',\n\t\t},\n\t\tmethod: 'post',\n\t\tbody: JSON.stringify({\n\t\t\thashToPin: ipfsHash,\n\t\t}),\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to pin '${ipfsHash}' with pinata: ${response.statusText}`);\n\t}\n\n\t// Ok is the only response if successful\n\treturn;\n};\n","export async function delay(timeout: number): Promise<void> {\n\treturn await new Promise(resolve => {\n\t\tsetTimeout(resolve, timeout);\n\t});\n}\n\nexport const createProcessBackoffController = ({\n\tretryCount = 5,\n\ttargetRequestsPerMinute = 180,\n}: {\n\tretryCount?: number;\n\ttargetRequestsPerMinute?: number;\n}) => {\n\tlet averageTimePerRequest = 5000;\n\tlet targetTimePerRequest = 60000 / targetRequestsPerMinute;\n\tlet lastTime = Date.now();\n\n\tconst processWithBackoff = async <TResult>(process: () => Promise<TResult>) => {\n\t\tlet attempt = 0;\n\t\tlet lastError = undefined as unknown;\n\t\twhile (attempt < retryCount) {\n\t\t\ttry {\n\t\t\t\tlet delayTimeMs = Math.max(10, targetTimePerRequest - averageTimePerRequest);\n\n\t\t\t\t// Partially randomized delay to ensure parallel requests don't line up\n\t\t\t\tawait delay(Math.floor(delayTimeMs * (1 + 0.5 * Math.random())));\n\n\t\t\t\tconst result = await process();\n\n\t\t\t\tconst timeNow = Date.now();\n\t\t\t\tconst timeElapsed = timeNow - lastTime;\n\t\t\t\tlastTime = timeNow;\n\n\t\t\t\t// Running average\n\t\t\t\taverageTimePerRequest = averageTimePerRequest * 0.97 + timeElapsed * 0.03;\n\n\t\t\t\treturn result;\n\t\t\t} catch (err) {\n\t\t\t\tlastError = err;\n\t\t\t}\n\n\t\t\t// Quickly increase time to wait if failure (allow negatives to wait longer than target)\n\t\t\taverageTimePerRequest -= (attempt + 1) * 1000;\n\t\t\tattempt++;\n\t\t}\n\n\t\t// All attempts failed\n\t\tthrow lastError;\n\t};\n\n\treturn {\n\t\tprocessWithBackoff,\n\t};\n};\n"],"mappings":";;;;;AAAA,SAAiB,QAAQ,eAAe,YAAY;;;ACApD,SAAS,cAAqC,mBAAmB;AAGjE,OAAOA,WAAU;;;ACHjB,OAAO,QAAQ;AACf,OAAO,UAAU;AAIjB,gBAAgB,SAAS,eAA8D;AACtF,QAAM,UAAU,MAAM,GAAG,KAAK,aAAa;AAC3C,MAAI,QAAQ,OAAO,GAAG;AACrB,UAAM;AACN;AAAA,EACD;AAEA,QAAM,UAAU,MAAM,GAAG,QAAQ,eAAe,EAAE,eAAe,KAAK,CAAC;AACvE,aAAW,UAAU,SAAS;AAC7B,UAAM,MAAM,KAAK,QAAQ,eAAe,OAAO,IAAI;AACnD,QAAI,OAAO,YAAY,GAAG;AACzB,aAAO,SAAS,GAAG;AAAA,IACpB,OAAO;AACN,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACD,MAIM;AACL,kBAAgB,KAAK,QAAQ,aAAa;AAC1C,QAAM,WAAW,MAAM,GAAG,KAAK,aAAa;AAC5C,MACC,CAAC,SAAS,OAAO,KACd,CAAC,SAAS,YAAY,GACxB;AACD,UAAM,IAAI,MAAM,aAAa,aAAa,8BAA8B;AAAA,EACzE;AAEA,MAAI,oBAAoB;AACxB,MAAI,yBAAyB;AAC5B,wBAAoB;AACpB,qBAAiB,YAAY,SAAS,aAAa,GAAG;AACrD,UAAI,UAAU,CAAC,OAAO,QAAQ,GAAG;AAChC;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAEA,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,cAAc,YAAY;AAC/B,QAAI,YAAY,MAAM,cAAc,KAAK,GAAG;AAC5C,QAAI,CAAC,QAAQ;AACZ,aAAO;AAAA,IACR;AAEA,WAAO,YAAY,CAAC,OAAO,QAAQ,GAAG;AACrC,iBAAW,MAAM,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AACA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAGO,IAAM,eAAe,OAAgB;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAChB;AACD,MAMM;AACL,QAAM,EAAE,aAAa,kBAAkB,IAAI,MAAM,mBAAmB;AAAA,IACnE;AAAA,IACA;AAAA,IACA,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,YAAY,CAAC;AACnB,QAAM,WAAW,CAAC;AAElB,2CAAa;AAAA,IACZ,qBAAqB;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,IAAI,MAAM,aAAa,CAAC,EAAE,IAAI,OAAM,MAAK;AAC9D,QAAI,gBAAgB,MAAM,YAAY;AACtC,WAAO,eAAe;AACrB,YAAM,eAAe;AAAA,QACpB,qBAAqB,UAAU,SAAS,SAAS;AAAA,QACjD;AAAA,MACD;AACA,+CAAa;AAEb,UAAI;AACH,cAAM,SAAS,MAAM,YAAY,eAAe,YAAY;AAC5D,kBAAU,KAAK,EAAE,UAAU,eAAe,OAAO,CAAC;AAAA,MACnD,SAAS,KAAK;AACb,iBAAS,KAAK,EAAE,UAAU,eAAe,OAAO,IAAI,CAAC;AAAA,MACtD;AAEA,sBAAgB,MAAM,YAAY;AAAA,IACnC;AAAA,EACD,CAAC,CAAC;AAEF,2CAAa;AAAA,IACZ,qBAAqB,UAAU,SAAS,SAAS;AAAA,IACjD;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;;;AChIA,OAAO,cAAc;AACrB,OAAOC,SAAQ;AAEf,OAAO,WAAW;AAWX,IAAM,oBAAoB,OAAO;AAAA,EACvC;AAAA,EACA;AACD,MAMkC;AAajC,QAAM,OAAO,IAAI,SAAS;AAC1B,OAAK,OAAO,QAAQC,IAAG,iBAAiB,KAAK,QAAQ,CAAC;AACtD,OAAK;AAAA,IACJ;AAAA,IACA,KAAK,UAAU;AAAA,MACd,MAAM,KAAK;AAAA,IACZ,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,MAAM,kDAAkD;AAAA,IAC9E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB,iCAAkC,KAA0C,SAAS;AAAA,IACtG;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,EACT,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,qBAAqB,KAAK,IAAI,aAAa,SAAS,UAAU,EAAE;AAAA,EACjF;AAEA,QAAM,eAAe,MAAM,SAAS,KAAK;AAMzC,SAAO;AAAA,IACN,UAAU,aAAa;AAAA,EACxB;AACD;AA2DO,IAAM,UAAU,OAAO;AAAA,EAC7B;AAAA,EACA;AACD,MAGM;AACL,QAAM,WAAW,MAAM,MAAM,8CAA8C;AAAA,IAC1E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,IACR,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,kBAAkB,QAAQ,kBAAkB,SAAS,UAAU,EAAE;AAAA,EAClF;AAGA;AACD;;;ACtJA,eAAsB,MAAM,SAAgC;AAC3D,SAAO,MAAM,IAAI,QAAQ,aAAW;AACnC,eAAW,SAAS,OAAO;AAAA,EAC5B,CAAC;AACF;AAEO,IAAM,iCAAiC,CAAC;AAAA,EAC9C,aAAa;AAAA,EACb,0BAA0B;AAC3B,MAGM;AACL,MAAI,wBAAwB;AAC5B,MAAI,uBAAuB,MAAQ;AACnC,MAAI,WAAW,KAAK,IAAI;AAExB,QAAM,qBAAqB,OAAgBC,aAAoC;AAC9E,QAAI,UAAU;AACd,QAAI,YAAY;AAChB,WAAO,UAAU,YAAY;AAC5B,UAAI;AACH,YAAI,cAAc,KAAK,IAAI,IAAI,uBAAuB,qBAAqB;AAG3E,cAAM,MAAM,KAAK,MAAM,eAAe,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;AAE/D,cAAM,SAAS,MAAMA,SAAQ;AAE7B,cAAM,UAAU,KAAK,IAAI;AACzB,cAAM,cAAc,UAAU;AAC9B,mBAAW;AAGX,gCAAwB,wBAAwB,OAAO,cAAc;AAErE,eAAO;AAAA,MACR,SAAS,KAAK;AACb,oBAAY;AAAA,MACb;AAGA,gCAA0B,UAAU,KAAK;AACzC;AAAA,IACD;AAGA,UAAM;AAAA,EACP;AAEA,SAAO;AAAA,IACN;AAAA,EACD;AACD;;;AH3CA,OAAO;AAgBP,IAAM,gBAAgB,OAAO,eAAmC,SAA8C;AAC7G,MAAI,CAAC,eAAe;AACnB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACxC;AAKA,QAAM,EAAE,mBAAmB,IAAI,+BAA+B;AAAA,IAC7D,YAAY;AAAA,IACZ,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,SAAS,MAAM,aAAa;AAAA,IACjC;AAAA,IACA,eAAe;AAAA,IACf,aAAa,OAAM,aAAY;AAI9B,aAAO;AAAA,QAAmB,MACzB,kBAAkB;AAAA,UACjB;AAAA,UACA,MAAM,EAAE,UAAU,MAAMC,MAAK,SAAS,QAAQ,EAAE;AAAA,QACjD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,IACA,YAAY,CAAC,EAAE,qBAAqB,kBAAkB,MAAM;AAC3D,UAAI,qBAAqB,sBAAsB,IAAI;AAClD,YAAI,QAAQ,sBAAsB;AAClC,YAAI,QAAQ;AAAG,kBAAQ;AAAA,MAIxB;AAAA,IACD;AAAA,EACD,CAAC;AAOD,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM;AAAA,MACL,GAAG,OAAO,SAAS,IAAI,OAAE;AAxE5B;AAwEgC;AAAA,UAC5B,KAAK;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,SAAQ,OAAE,UAAF,mBAAkC,YAAW,KAAK,UAAU,EAAE,KAAK;AAAA,QAC5E;AAAA,OAAE;AAAA,MACF,GAAG,OAAO,UAAU,IAAI,QAAM;AAAA,QAC7B,KAAK;AAAA,QACL,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE,OAAO;AAAA,QACnB,OAAO;AAAA,MACR,EAAE;AAAA,IACH;AAAA,EACD;AACD;AAEA,IAAM,YAAY,OAAO,MAA0B,SAA8C;AAChG,MAAI,CAAC,MAAM;AACV,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC7C;AAEA,QAAM,QAAQ,EAAE,UAAU,MAAM,KAAK,CAAC;AAEtC,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC;AAAA,EAC1B;AACD;AAEA,IAAM,UAAU,OAAO,SAAwC;AAC9D,QAAM;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,EACD,IAAI;AAEJ,QAAM,OAAmB;AAAA;AAAA;AAAA,IAGxB,gBAAgB,QAAQ,IAAI,gBAAgB;AAAA,EAC7C;AAEA,MAAI,CAAC,KAAK,gBAAgB;AACzB,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC3E;AAEA,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,cAAcA,OAAM,IAAI;AAAA,IAChC,KAAK;AACJ,aAAO,UAAU,MAAM,IAAI;AAAA,IAC5B;AACC,YAAM,IAAI,MAAM,GAAG,IAAI,sDAAsD;AAAA,EAC/E;AACD;AAEA,IAAO,gBAAQ,OAAO,SAAiD;AACtE,QAAM,OAAO;AAEb,MAAI;AACH,UAAM,YAAY,MAAM,QAAQ,IAAI;AAGpC,UAAM,SAAU,UAAU,YAAa,UAAU,OAAO;AACxD,WAAO,YAAY,MAAM;AAAA,EAC1B,SAAS,KAAK;AACb,UAAM,QAAQ;AACd,QAAI,MAAM,SAAS;AAClB,aAAO,aAAa,MAAM,OAAO;AAAA,IAClC;AAAA,EACD;AACD;;;AD3IA,OAAO,OAAO,OAAO;AAAA,EACpB,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,IACN,KAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,cAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA,IACD,KAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,cAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AACD,IAAI,QAAQ,IAAI;","names":["path","fs","fs","process","path"]}
1
+ {"version":3,"sources":["index.ts","src/proxy.ts","src/file-processing.ts","src/pinata-api.ts","src/utils.ts"],"sourcesContent":["import { Option, Plugin, PositionalArg, Task } from '@taqueria/node-sdk';\nexport { getFileIPFSHash } from './src/ipfsHash';\nimport proxy from './src/proxy';\n\nPlugin.create(() => ({\n\tschema: '0.1',\n\tversion: '0.4.0',\n\talias: 'pinata',\n\ttasks: [\n\t\tTask.create({\n\t\t\ttask: 'publish',\n\t\t\tcommand: 'publish [path]',\n\t\t\tdescription: 'Upload and pin files using your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'path',\n\t\t\t\t\tdescription: 'Directory or file path to publish',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t\tencoding: 'json',\n\t\t}),\n\t\tTask.create({\n\t\t\ttask: 'pin',\n\t\t\tcommand: 'pin [hash]',\n\t\t\tdescription: 'Pin a file already on ipfs with your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'hash',\n\t\t\t\t\tdescription: 'Ipfs hash of the file or directory that is already on the ipfs network.',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t}),\n\t],\n\tproxy,\n}), process.argv);\n","import { sendAsyncErr, sendAsyncRes, sendErr, sendJsonRes } from '@taqueria/node-sdk';\nimport { RequestArgs } from '@taqueria/node-sdk';\nimport { LoadedConfig, SanitizedAbsPath } from '@taqueria/node-sdk/types';\nimport path from 'path';\nimport { processFiles } from './file-processing';\nimport { PinataAuth, pinHash, publishFileToIpfs } from './pinata-api';\nimport { createProcessBackoffController } from './utils';\n\n// Load .env for jwt token\n// TODO: How should this be stored in a secure way?\nimport 'dotenv/config';\n\n// TODO: What should this be, it was removed from the sdk\ntype PluginResponse =\n\t| void\n\t| {\n\t\trender: 'table';\n\t\tdata: unknown[];\n\t};\n\ninterface Opts extends RequestArgs.t {\n\treadonly path?: string;\n\treadonly hash?: string;\n\treadonly task?: string;\n}\n\nconst publishToIpfs = async (fileOrDirPath: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!fileOrDirPath) {\n\t\tthrow new Error(`path was not provided`);\n\t}\n\n\t// Pinata is limited to 180 requests per minute\n\t// So for the first 180 requests they can go fast\n\n\tconst { processWithBackoff } = createProcessBackoffController({\n\t\tretryCount: 5,\n\t\ttargetRequestsPerMinute: 180,\n\t});\n\n\tconst result = await processFiles({\n\t\tfileOrDirPath,\n\t\tparallelCount: 10,\n\t\tprocessFile: async filePath => {\n\t\t\t// // TEMP: Debug\n\t\t\t// console.log(`publishing: ${filePath}`);\n\n\t\t\treturn processWithBackoff(() =>\n\t\t\t\tpublishFileToIpfs({\n\t\t\t\t\tauth,\n\t\t\t\t\titem: { filePath, name: path.basename(filePath) },\n\t\t\t\t})\n\t\t\t);\n\t\t},\n\t\tonProgress: ({ processedFilesCount, estimateFileCount }) => {\n\t\t\tif (estimateFileCount && processedFilesCount % 10) {\n\t\t\t\tlet ratio = processedFilesCount / estimateFileCount;\n\t\t\t\tif (ratio > 1) ratio = 1;\n\n\t\t\t\t// // TODO: Call task sdk progress\n\t\t\t\t// console.log(`Progress: ${(ratio * 100).toFixed(0)}%`);\n\t\t\t}\n\t\t},\n\t});\n\n\t// // TEMP: DEBUG: Show error\n\t// if (result.failures.length) {\n\t// \tconsole.log('❗ Failures:\\n' + result.failures.map(f => `${f.filePath}: ${f.error}`).join('\\n'));\n\t// }\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [\n\t\t\t...result.failures.map(x => ({\n\t\t\t\t'?': '❌',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: undefined,\n\t\t\t\terror: (x.error as { message?: string })?.message ?? JSON.stringify(x.error),\n\t\t\t})),\n\t\t\t...result.successes.map(x => ({\n\t\t\t\t'?': '✔',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: x.result.ipfsHash,\n\t\t\t\terror: undefined,\n\t\t\t})),\n\t\t],\n\t};\n};\n\nconst pinToIpfs = async (hash: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!hash) {\n\t\tthrow new Error(`ipfs hash was not provided`);\n\t}\n\n\tawait pinHash({ ipfsHash: hash, auth });\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [{ ipfsHash: hash }],\n\t};\n};\n\nconst execute = async (opts: Opts): Promise<PluginResponse> => {\n\tconst {\n\t\ttask,\n\t\tpath,\n\t\thash,\n\t} = opts;\n\n\tconst auth: PinataAuth = {\n\t\t// TODO: Where should this be stored?\n\t\t// pinataJwtToken: (config as Record<string, any>).credentials.pinataJwtToken,\n\t\tpinataJwtToken: process.env['pinataJwtToken'] as string,\n\t};\n\n\tif (!auth.pinataJwtToken) {\n\t\tthrow new Error(`The 'credentials.pinataJwtToken' was not found in config`);\n\t}\n\n\tswitch (task) {\n\t\tcase 'publish':\n\t\t\treturn publishToIpfs(path, auth);\n\t\tcase 'pin':\n\t\t\treturn pinToIpfs(hash, auth);\n\t\tdefault:\n\t\t\tthrow new Error(`${task} is not an understood task by the ipfs-pinata plugin`);\n\t}\n};\n\nexport default async (args: RequestArgs.t): Promise<PluginResponse> => {\n\tconst opts = args as Opts;\n\n\ttry {\n\t\tconst resultRaw = await execute(opts) as Record<string, unknown>;\n\t\t// TODO: Fix deno parsing\n\t\t// Without this, `data.reduce is not a function`\n\t\tconst result = ('data' in resultRaw) ? resultRaw.data : resultRaw;\n\t\treturn sendJsonRes(result);\n\t} catch (err) {\n\t\tconst error = err as Error;\n\t\tif (error.message) {\n\t\t\treturn sendAsyncErr(error.message);\n\t\t}\n\t}\n};\n","import fs from 'fs/promises';\nimport path from 'path';\n\n// Async generator\n// https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search\nasync function* getFiles(fileOrDirPath: string): AsyncGenerator<string, void, unknown> {\n\tconst dirInfo = await fs.stat(fileOrDirPath);\n\tif (dirInfo.isFile()) {\n\t\tyield fileOrDirPath;\n\t\treturn;\n\t}\n\n\tconst dirents = await fs.readdir(fileOrDirPath, { withFileTypes: true });\n\tfor (const dirent of dirents) {\n\t\tconst res = path.resolve(fileOrDirPath, dirent.name);\n\t\tif (dirent.isDirectory()) {\n\t\t\tyield* getFiles(res);\n\t\t} else {\n\t\t\tyield res;\n\t\t}\n\t}\n}\n\nconst createFileProvider = async ({\n\tfileOrDirPath,\n\tfilter,\n\tshouldEstimateFileCount,\n}: {\n\tfileOrDirPath: string;\n\tfilter?: (filePath: string) => boolean;\n\tshouldEstimateFileCount?: boolean;\n}) => {\n\tfileOrDirPath = path.resolve(fileOrDirPath);\n\tconst pathInfo = await fs.stat(fileOrDirPath);\n\tif (\n\t\t!pathInfo.isFile()\n\t\t&& !pathInfo.isDirectory()\n\t) {\n\t\tthrow new Error(`The path '${fileOrDirPath}' is not a file or directory`);\n\t}\n\n\tlet estimateFileCount = undefined as undefined | number;\n\tif (shouldEstimateFileCount) {\n\t\testimateFileCount = 0;\n\t\tfor await (const filePath of getFiles(fileOrDirPath)) {\n\t\t\tif (filter && !filter(filePath)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\testimateFileCount++;\n\t\t}\n\t}\n\n\tconst fileGenerator = getFiles(fileOrDirPath);\n\tconst getNextFile = async () => {\n\t\tlet nextFile = (await fileGenerator.next()).value;\n\t\tif (!filter) {\n\t\t\treturn nextFile;\n\t\t}\n\n\t\twhile (nextFile && !filter(nextFile)) {\n\t\t\tnextFile = await getNextFile();\n\t\t}\n\n\t\treturn nextFile;\n\t};\n\treturn {\n\t\tgetNextFile,\n\t\testimateFileCount,\n\t};\n};\n\ntype ProgressInfo = { processedFilesCount: number; estimateFileCount: undefined | number };\nexport const processFiles = async <TResult>({\n\tfileOrDirPath,\n\tprocessFile,\n\tfilter,\n\tparallelCount = 10,\n\tonProgress,\n}: {\n\tfileOrDirPath: string;\n\tprocessFile: (filePath: string, progress: ProgressInfo) => Promise<TResult>;\n\tfilter?: (filePath: string) => boolean;\n\tparallelCount?: number;\n\tonProgress?: (progress: ProgressInfo) => void;\n}) => {\n\tconst { getNextFile, estimateFileCount } = await createFileProvider({\n\t\tfileOrDirPath,\n\t\tfilter,\n\t\tshouldEstimateFileCount: true,\n\t});\n\n\tconst successes = [] as { filePath: string; result: TResult }[];\n\tconst failures = [] as { filePath: string; error: unknown }[];\n\n\tonProgress?.({\n\t\tprocessedFilesCount: 0,\n\t\testimateFileCount,\n\t});\n\n\tawait Promise.all([...new Array(parallelCount)].map(async x => {\n\t\tlet fileToProcess = await getNextFile();\n\t\twhile (fileToProcess) {\n\t\t\tconst progressInfo = {\n\t\t\t\tprocessedFilesCount: successes.length + failures.length,\n\t\t\t\testimateFileCount,\n\t\t\t};\n\t\t\tonProgress?.(progressInfo);\n\n\t\t\ttry {\n\t\t\t\tconst result = await processFile(fileToProcess, progressInfo);\n\t\t\t\tsuccesses.push({ filePath: fileToProcess, result });\n\t\t\t} catch (err) {\n\t\t\t\tfailures.push({ filePath: fileToProcess, error: err });\n\t\t\t}\n\n\t\t\tfileToProcess = await getNextFile();\n\t\t}\n\t}));\n\n\tonProgress?.({\n\t\tprocessedFilesCount: successes.length + failures.length,\n\t\testimateFileCount,\n\t});\n\n\treturn {\n\t\tsuccesses,\n\t\tfailures,\n\t};\n};\n","import FormData from 'form-data';\nimport fs from 'fs';\nimport { readFile } from 'fs/promises';\nimport fetch from 'node-fetch';\nimport { getFileIPFSHash } from './ipfsHash';\n\nexport type PinataAuth = {\n\tpinataJwtToken: string;\n};\n\nexport type PublishFileResult = {\n\tipfsHash: string;\n};\n\nexport const publishFileToIpfs = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}): Promise<PublishFileResult> => {\n\t// The data api to check for existing file is limited to 30 requests per minute\n\t// While uploading allows 180 requests per minute\n\t// i.e. it's faster to just upload again\n\n\t// // Skip if already pinned\n\t// const { isPinned, ipfsHash } = await checkIfFileIsPinned({ auth, item });\n\t// if (isPinned) {\n\t// \treturn {\n\t// \t\tipfsHash,\n\t// \t};\n\t// }\n\n\tconst data = new FormData();\n\tdata.append('file', fs.createReadStream(item.filePath));\n\tdata.append(\n\t\t'pinataMetadata',\n\t\tJSON.stringify({\n\t\t\tname: item.name,\n\t\t}),\n\t);\n\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinFileToIPFS`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': `multipart/form-data; boundary=${(data as unknown as { _boundary: string })._boundary}`,\n\t\t},\n\t\tbody: data,\n\t\tmethod: 'post',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to upload '${item.name}' to ipfs ${response.statusText}`);\n\t}\n\n\tconst uploadResult = await response.json() as {\n\t\tIpfsHash: string; // This is the IPFS multi-hash provided back for your content,\n\t\tPinSize: string; // This is how large (in bytes) the content you just pinned is,\n\t\tTimestamp: string; // This is the timestamp for your content pinning (represented in ISO 8601 format)\n\t};\n\n\treturn {\n\t\tipfsHash: uploadResult.IpfsHash,\n\t};\n};\n\nconst checkIfFileIsPinned = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}) => {\n\tconst contents = await readFile(item.filePath);\n\tconst ipfsHash = await getFileIPFSHash(item.filePath);\n\n\tconst response = await fetch(`https://api.pinata.cloud/data/pinList?status=pinned&hashContains=${ipfsHash}`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t},\n\t\tmethod: 'get',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to query '${item.name}' status from pinata ${response.statusText}`);\n\t}\n\n\tconst pinResult = await response.json() as {\n\t\tcount: number;\n\t\trows: {\n\t\t\tid: string;\n\t\t\tipfs_pin_hash: string;\n\t\t\tsize: number;\n\t\t\tuser_id: string;\n\t\t\tdate_pinned: null | string;\n\t\t\tdate_unpinned: null | string;\n\t\t\tmetadata: {\n\t\t\t\tname: string;\n\t\t\t\tkeyvalues: null | string;\n\t\t\t};\n\t\t\tregions: {\n\t\t\t\tregionId: string;\n\t\t\t\tcurrentReplicationCount: number;\n\t\t\t\tdesiredReplicationCount: number;\n\t\t\t}[];\n\t\t}[];\n\t};\n\n\tconst isPinned = pinResult.rows.some(x =>\n\t\tx.ipfs_pin_hash === ipfsHash\n\t\t&& x.date_pinned\n\t\t&& !x.date_unpinned\n\t);\n\n\treturn {\n\t\tisPinned,\n\t\tipfsHash,\n\t};\n};\n\nexport const pinHash = async ({\n\tauth,\n\tipfsHash,\n}: {\n\tauth: PinataAuth;\n\tipfsHash: string;\n}) => {\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinByHash`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': 'application/json',\n\t\t},\n\t\tmethod: 'post',\n\t\tbody: JSON.stringify({\n\t\t\thashToPin: ipfsHash,\n\t\t}),\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to pin '${ipfsHash}' with pinata: ${response.statusText}`);\n\t}\n\n\t// Ok is the only response if successful\n\treturn;\n};\n","export async function delay(timeout: number): Promise<void> {\n\treturn await new Promise(resolve => {\n\t\tsetTimeout(resolve, timeout);\n\t});\n}\n\nexport const createProcessBackoffController = ({\n\tretryCount = 5,\n\ttargetRequestsPerMinute = 180,\n}: {\n\tretryCount?: number;\n\ttargetRequestsPerMinute?: number;\n}) => {\n\tlet averageTimePerRequest = 5000;\n\tlet targetTimePerRequest = 60000 / targetRequestsPerMinute;\n\tlet lastTime = Date.now();\n\n\tconst processWithBackoff = async <TResult>(process: () => Promise<TResult>) => {\n\t\tlet attempt = 0;\n\t\tlet lastError = undefined as unknown;\n\t\twhile (attempt < retryCount) {\n\t\t\ttry {\n\t\t\t\tlet delayTimeMs = Math.max(10, targetTimePerRequest - averageTimePerRequest);\n\n\t\t\t\t// Partially randomized delay to ensure parallel requests don't line up\n\t\t\t\tawait delay(Math.floor(delayTimeMs * (1 + 0.5 * Math.random())));\n\n\t\t\t\tconst result = await process();\n\n\t\t\t\tconst timeNow = Date.now();\n\t\t\t\tconst timeElapsed = timeNow - lastTime;\n\t\t\t\tlastTime = timeNow;\n\n\t\t\t\t// Running average\n\t\t\t\taverageTimePerRequest = averageTimePerRequest * 0.97 + timeElapsed * 0.03;\n\n\t\t\t\treturn result;\n\t\t\t} catch (err) {\n\t\t\t\tlastError = err;\n\t\t\t}\n\n\t\t\t// Quickly increase time to wait if failure (allow negatives to wait longer than target)\n\t\t\taverageTimePerRequest -= (attempt + 1) * 1000;\n\t\t\tattempt++;\n\t\t}\n\n\t\t// All attempts failed\n\t\tthrow lastError;\n\t};\n\n\treturn {\n\t\tprocessWithBackoff,\n\t};\n};\n"],"mappings":";;;;;AAAA,SAAiB,QAAQ,eAAe,YAAY;;;ACApD,SAAS,cAAqC,mBAAmB;AAGjE,OAAOA,WAAU;;;ACHjB,OAAO,QAAQ;AACf,OAAO,UAAU;AAIjB,gBAAgB,SAAS,eAA8D;AACtF,QAAM,UAAU,MAAM,GAAG,KAAK,aAAa;AAC3C,MAAI,QAAQ,OAAO,GAAG;AACrB,UAAM;AACN;AAAA,EACD;AAEA,QAAM,UAAU,MAAM,GAAG,QAAQ,eAAe,EAAE,eAAe,KAAK,CAAC;AACvE,aAAW,UAAU,SAAS;AAC7B,UAAM,MAAM,KAAK,QAAQ,eAAe,OAAO,IAAI;AACnD,QAAI,OAAO,YAAY,GAAG;AACzB,aAAO,SAAS,GAAG;AAAA,IACpB,OAAO;AACN,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACD,MAIM;AACL,kBAAgB,KAAK,QAAQ,aAAa;AAC1C,QAAM,WAAW,MAAM,GAAG,KAAK,aAAa;AAC5C,MACC,CAAC,SAAS,OAAO,KACd,CAAC,SAAS,YAAY,GACxB;AACD,UAAM,IAAI,MAAM,aAAa,aAAa,8BAA8B;AAAA,EACzE;AAEA,MAAI,oBAAoB;AACxB,MAAI,yBAAyB;AAC5B,wBAAoB;AACpB,qBAAiB,YAAY,SAAS,aAAa,GAAG;AACrD,UAAI,UAAU,CAAC,OAAO,QAAQ,GAAG;AAChC;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAEA,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,cAAc,YAAY;AAC/B,QAAI,YAAY,MAAM,cAAc,KAAK,GAAG;AAC5C,QAAI,CAAC,QAAQ;AACZ,aAAO;AAAA,IACR;AAEA,WAAO,YAAY,CAAC,OAAO,QAAQ,GAAG;AACrC,iBAAW,MAAM,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AACA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAGO,IAAM,eAAe,OAAgB;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAChB;AACD,MAMM;AACL,QAAM,EAAE,aAAa,kBAAkB,IAAI,MAAM,mBAAmB;AAAA,IACnE;AAAA,IACA;AAAA,IACA,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,YAAY,CAAC;AACnB,QAAM,WAAW,CAAC;AAElB,2CAAa;AAAA,IACZ,qBAAqB;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,IAAI,MAAM,aAAa,CAAC,EAAE,IAAI,OAAM,MAAK;AAC9D,QAAI,gBAAgB,MAAM,YAAY;AACtC,WAAO,eAAe;AACrB,YAAM,eAAe;AAAA,QACpB,qBAAqB,UAAU,SAAS,SAAS;AAAA,QACjD;AAAA,MACD;AACA,+CAAa;AAEb,UAAI;AACH,cAAM,SAAS,MAAM,YAAY,eAAe,YAAY;AAC5D,kBAAU,KAAK,EAAE,UAAU,eAAe,OAAO,CAAC;AAAA,MACnD,SAAS,KAAK;AACb,iBAAS,KAAK,EAAE,UAAU,eAAe,OAAO,IAAI,CAAC;AAAA,MACtD;AAEA,sBAAgB,MAAM,YAAY;AAAA,IACnC;AAAA,EACD,CAAC,CAAC;AAEF,2CAAa;AAAA,IACZ,qBAAqB,UAAU,SAAS,SAAS;AAAA,IACjD;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;;;AChIA,OAAO,cAAc;AACrB,OAAOC,SAAQ;AAEf,OAAO,WAAW;AAWX,IAAM,oBAAoB,OAAO;AAAA,EACvC;AAAA,EACA;AACD,MAMkC;AAajC,QAAM,OAAO,IAAI,SAAS;AAC1B,OAAK,OAAO,QAAQC,IAAG,iBAAiB,KAAK,QAAQ,CAAC;AACtD,OAAK;AAAA,IACJ;AAAA,IACA,KAAK,UAAU;AAAA,MACd,MAAM,KAAK;AAAA,IACZ,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,MAAM,kDAAkD;AAAA,IAC9E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB,iCAAkC,KAA0C,SAAS;AAAA,IACtG;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,EACT,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,qBAAqB,KAAK,IAAI,aAAa,SAAS,UAAU,EAAE;AAAA,EACjF;AAEA,QAAM,eAAe,MAAM,SAAS,KAAK;AAMzC,SAAO;AAAA,IACN,UAAU,aAAa;AAAA,EACxB;AACD;AA2DO,IAAM,UAAU,OAAO;AAAA,EAC7B;AAAA,EACA;AACD,MAGM;AACL,QAAM,WAAW,MAAM,MAAM,8CAA8C;AAAA,IAC1E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,IACR,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,kBAAkB,QAAQ,kBAAkB,SAAS,UAAU,EAAE;AAAA,EAClF;AAGA;AACD;;;ACtJA,eAAsB,MAAM,SAAgC;AAC3D,SAAO,MAAM,IAAI,QAAQ,aAAW;AACnC,eAAW,SAAS,OAAO;AAAA,EAC5B,CAAC;AACF;AAEO,IAAM,iCAAiC,CAAC;AAAA,EAC9C,aAAa;AAAA,EACb,0BAA0B;AAC3B,MAGM;AACL,MAAI,wBAAwB;AAC5B,MAAI,uBAAuB,MAAQ;AACnC,MAAI,WAAW,KAAK,IAAI;AAExB,QAAM,qBAAqB,OAAgBC,aAAoC;AAC9E,QAAI,UAAU;AACd,QAAI,YAAY;AAChB,WAAO,UAAU,YAAY;AAC5B,UAAI;AACH,YAAI,cAAc,KAAK,IAAI,IAAI,uBAAuB,qBAAqB;AAG3E,cAAM,MAAM,KAAK,MAAM,eAAe,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;AAE/D,cAAM,SAAS,MAAMA,SAAQ;AAE7B,cAAM,UAAU,KAAK,IAAI;AACzB,cAAM,cAAc,UAAU;AAC9B,mBAAW;AAGX,gCAAwB,wBAAwB,OAAO,cAAc;AAErE,eAAO;AAAA,MACR,SAAS,KAAK;AACb,oBAAY;AAAA,MACb;AAGA,gCAA0B,UAAU,KAAK;AACzC;AAAA,IACD;AAGA,UAAM;AAAA,EACP;AAEA,SAAO;AAAA,IACN;AAAA,EACD;AACD;;;AH3CA,OAAO;AAgBP,IAAM,gBAAgB,OAAO,eAAmC,SAA8C;AAC7G,MAAI,CAAC,eAAe;AACnB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACxC;AAKA,QAAM,EAAE,mBAAmB,IAAI,+BAA+B;AAAA,IAC7D,YAAY;AAAA,IACZ,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,SAAS,MAAM,aAAa;AAAA,IACjC;AAAA,IACA,eAAe;AAAA,IACf,aAAa,OAAM,aAAY;AAI9B,aAAO;AAAA,QAAmB,MACzB,kBAAkB;AAAA,UACjB;AAAA,UACA,MAAM,EAAE,UAAU,MAAMC,MAAK,SAAS,QAAQ,EAAE;AAAA,QACjD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,IACA,YAAY,CAAC,EAAE,qBAAqB,kBAAkB,MAAM;AAC3D,UAAI,qBAAqB,sBAAsB,IAAI;AAClD,YAAI,QAAQ,sBAAsB;AAClC,YAAI,QAAQ,EAAG,SAAQ;AAAA,MAIxB;AAAA,IACD;AAAA,EACD,CAAC;AAOD,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM;AAAA,MACL,GAAG,OAAO,SAAS,IAAI,OAAE;AAxE5B;AAwEgC;AAAA,UAC5B,KAAK;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,SAAQ,OAAE,UAAF,mBAAkC,YAAW,KAAK,UAAU,EAAE,KAAK;AAAA,QAC5E;AAAA,OAAE;AAAA,MACF,GAAG,OAAO,UAAU,IAAI,QAAM;AAAA,QAC7B,KAAK;AAAA,QACL,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE,OAAO;AAAA,QACnB,OAAO;AAAA,MACR,EAAE;AAAA,IACH;AAAA,EACD;AACD;AAEA,IAAM,YAAY,OAAO,MAA0B,SAA8C;AAChG,MAAI,CAAC,MAAM;AACV,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC7C;AAEA,QAAM,QAAQ,EAAE,UAAU,MAAM,KAAK,CAAC;AAEtC,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC;AAAA,EAC1B;AACD;AAEA,IAAM,UAAU,OAAO,SAAwC;AAC9D,QAAM;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,EACD,IAAI;AAEJ,QAAM,OAAmB;AAAA;AAAA;AAAA,IAGxB,gBAAgB,QAAQ,IAAI,gBAAgB;AAAA,EAC7C;AAEA,MAAI,CAAC,KAAK,gBAAgB;AACzB,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC3E;AAEA,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,cAAcA,OAAM,IAAI;AAAA,IAChC,KAAK;AACJ,aAAO,UAAU,MAAM,IAAI;AAAA,IAC5B;AACC,YAAM,IAAI,MAAM,GAAG,IAAI,sDAAsD;AAAA,EAC/E;AACD;AAEA,IAAO,gBAAQ,OAAO,SAAiD;AACtE,QAAM,OAAO;AAEb,MAAI;AACH,UAAM,YAAY,MAAM,QAAQ,IAAI;AAGpC,UAAM,SAAU,UAAU,YAAa,UAAU,OAAO;AACxD,WAAO,YAAY,MAAM;AAAA,EAC1B,SAAS,KAAK;AACb,UAAM,QAAQ;AACd,QAAI,MAAM,SAAS;AAClB,aAAO,aAAa,MAAM,OAAO;AAAA,IAClC;AAAA,EACD;AACD;;;AD3IA,OAAO,OAAO,OAAO;AAAA,EACpB,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,IACN,KAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,cAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA,IACD,KAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,cAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AACD,IAAI,QAAQ,IAAI;","names":["path","fs","fs","process","path"]}
package/package.json CHANGED
@@ -1,80 +1,72 @@
1
1
  {
2
- "name": "@taqueria/plugin-ipfs-pinata",
3
- "version": "0.56.15",
4
- "description": "A plugin for Taqueria providing ipfs publishing and pinning using the Pinata service",
5
- "keywords": [
6
- "taqueria",
7
- "plugin",
8
- "jest",
9
- "testing",
10
- "tdd",
11
- "pinnaclelabs",
12
- "pinnacle-labs",
13
- "tezos"
14
- ],
15
- "targets": {
16
- "default": {
17
- "source": "./index.ts",
18
- "distDir": "./",
19
- "context": "node",
20
- "isLibrary": true,
21
- "outputFormat": "esmodule"
22
- }
23
- },
24
- "scripts": {
25
- "build": "npx tsc -noEmit -p ./tsconfig.json && npx tsup"
26
- },
27
- "author": "Taqueria",
28
- "license": "Apache-2.0",
29
- "type": "module",
30
- "repository": {
31
- "type": "git",
32
- "url": "https://github.com/tezostaqueria/taqueria.git",
33
- "directory": "taqueria-plugin-ipfs-pinata"
34
- },
35
- "dependencies": {
36
- "@helia/unixfs": "^3.0.0",
37
- "@taqueria/node-sdk": "^0.56.15",
38
- "dotenv": "^16.4.1",
39
- "form-data": "^4.0.0",
40
- "helia": "^4.0.0",
41
- "node-fetch": "^3.3.2"
42
- },
43
- "devDependencies": {
44
- "@types/node-fetch": "^2.6.11",
45
- "tsup": "^8.0.1",
46
- "typescript": "^5.3.3"
47
- },
48
- "exports": {
49
- ".": {
50
- "import": "./index.js",
51
- "require": "./index.cjs",
52
- "node": "./index.cjs",
53
- "default": "./index.js"
54
- },
55
- "./src/ipfsHash": {
56
- "import": "./src/ipfsHash.js",
57
- "require": "./src/ipfsHash.cjs",
58
- "node": "./src/ipfsHash.cjs",
59
- "default": "./src/ipfsHash.js"
60
- }
61
- },
62
- "tsup": {
63
- "entry": [
64
- "index.ts",
65
- "src/ipfsHash.ts"
66
- ],
67
- "sourcemap": true,
68
- "target": "node16",
69
- "outDir": "./",
70
- "dts": true,
71
- "clean": false,
72
- "skipNodeModulesBundle": false,
73
- "platform": "node",
74
- "format": [
75
- "esm",
76
- "cjs"
77
- ]
78
- },
79
- "gitHead": "ff58a2fc06ad233869ad6be574093c8b3b272e2e"
80
- }
2
+ "name": "@taqueria/plugin-ipfs-pinata",
3
+ "version": "0.57.8",
4
+ "description": "A plugin for Taqueria providing ipfs publishing and pinning using the Pinata service",
5
+ "keywords": [
6
+ "taqueria",
7
+ "plugin",
8
+ "jest",
9
+ "testing",
10
+ "tdd",
11
+ "pinnaclelabs",
12
+ "pinnacle-labs",
13
+ "tezos"
14
+ ],
15
+ "targets": {
16
+ "default": {
17
+ "source": "./index.ts",
18
+ "distDir": "./",
19
+ "context": "node",
20
+ "isLibrary": true,
21
+ "outputFormat": "esmodule"
22
+ }
23
+ },
24
+ "author": "Taqueria",
25
+ "license": "Apache-2.0",
26
+ "type": "module",
27
+ "repository": {
28
+ "type": "git",
29
+ "url": "https://github.com/tezostaqueria/taqueria.git",
30
+ "directory": "taqueria-plugin-ipfs-pinata"
31
+ },
32
+ "dependencies": {
33
+ "dotenv": "^16.4.5",
34
+ "form-data": "^4.0.0",
35
+ "node-fetch": "^3.3.2",
36
+ "@taqueria/node-sdk": "0.57.8"
37
+ },
38
+ "devDependencies": {
39
+ "@types/node-fetch": "^2.6.11",
40
+ "tsup": "^8.3.0",
41
+ "typescript": "^5.6.2"
42
+ },
43
+ "exports": {
44
+ ".": {
45
+ "default": "./index.js"
46
+ },
47
+ "./src/ipfsHash": {
48
+ "default": "./src/ipfsHash.js"
49
+ }
50
+ },
51
+ "tsup": {
52
+ "entry": [
53
+ "index.ts",
54
+ "src/ipfsHash.ts"
55
+ ],
56
+ "sourcemap": true,
57
+ "target": "node16",
58
+ "outDir": "./",
59
+ "dts": true,
60
+ "clean": false,
61
+ "skipNodeModulesBundle": false,
62
+ "platform": "node",
63
+ "format": [
64
+ "esm",
65
+ "cjs"
66
+ ]
67
+ },
68
+ "gitHead": "ff58a2fc06ad233869ad6be574093c8b3b272e2e",
69
+ "scripts": {
70
+ "build": "npx tsc -noEmit -p ./tsconfig.json && npx tsup"
71
+ }
72
+ }
package/src/ipfsHash.cjs CHANGED
@@ -24,18 +24,12 @@ __export(ipfsHash_exports, {
24
24
  getFileIPFSHash: () => getFileIPFSHash
25
25
  });
26
26
  module.exports = __toCommonJS(ipfsHash_exports);
27
- var import_unixfs = require("@helia/unixfs");
27
+ var import_crypto = require("crypto");
28
28
  var import_promises = require("fs/promises");
29
- var import_helia = require("helia");
30
29
  async function getFileIPFSHash(filePath) {
31
- const helia = await (0, import_helia.createHelia)();
32
- const fs = (0, import_unixfs.unixfs)(helia);
33
- const cid = fs.addFile({
34
- path: filePath,
35
- content: await (0, import_promises.readFile)(filePath)
36
- });
37
- await helia.stop();
38
- return cid.toString();
30
+ const fileContent = await (0, import_promises.readFile)(filePath);
31
+ const hash = (0, import_crypto.createHash)("sha256").update(fileContent).digest("hex");
32
+ return hash;
39
33
  }
40
34
  var ipfsHash_default = getFileIPFSHash;
41
35
  // Annotate the CommonJS export names for ESM import in node:
@@ -1 +1 @@
1
- {"version":3,"sources":["ipfsHash.ts"],"sourcesContent":["import { unixfs } from '@helia/unixfs';\nimport { readFile } from 'fs/promises';\nimport { createHelia } from 'helia';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// create a Helia node\n\tconst helia = await createHelia();\n\n\t// create a filesystem on top of Helia, in this case it's UnixFS\n\tconst fs = unixfs(helia);\n\n\t// Create a text encoder and encode the contents of the file\n\t// into a Uint8Array.\n\tconst cid = fs.addFile({\n\t\tpath: filePath,\n\t\tcontent: await readFile(filePath),\n\t});\n\n\tawait helia.stop();\n\n\treturn cid.toString();\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,sBAAyB;AACzB,mBAA4B;AAE5B,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,QAAQ,UAAM,0BAAY;AAGhC,QAAM,SAAK,sBAAO,KAAK;AAIvB,QAAM,MAAM,GAAG,QAAQ;AAAA,IACtB,MAAM;AAAA,IACN,SAAS,UAAM,0BAAS,QAAQ;AAAA,EACjC,CAAC;AAED,QAAM,MAAM,KAAK;AAEjB,SAAO,IAAI,SAAS;AACrB;AAEA,IAAO,mBAAQ;","names":[]}
1
+ {"version":3,"sources":["ipfsHash.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(fileContent).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA2B;AAC3B,sBAAyB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,UAAM,0BAAS,QAAQ;AAG3C,QAAM,WAAO,0BAAW,QAAQ,EAAE,OAAO,WAAW,EAAE,OAAO,KAAK;AAGlE,SAAO;AACR;AAEA,IAAO,mBAAQ;","names":[]}
@@ -0,0 +1,3 @@
1
+ declare function getFileIPFSHash(filePath: string): Promise<string>;
2
+
3
+ export { getFileIPFSHash as default, getFileIPFSHash };
package/src/ipfsHash.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  getFileIPFSHash,
3
3
  ipfsHash_default
4
- } from "../chunk-PF3FHOWY.js";
4
+ } from "../chunk-6OD7MDAL.js";
5
5
  export {
6
6
  ipfsHash_default as default,
7
7
  getFileIPFSHash
package/src/ipfsHash.ts CHANGED
@@ -1,24 +1,15 @@
1
- import { unixfs } from '@helia/unixfs';
1
+ import { createHash } from 'crypto';
2
2
  import { readFile } from 'fs/promises';
3
- import { createHelia } from 'helia';
4
3
 
5
4
  export async function getFileIPFSHash(filePath: string): Promise<string> {
6
- // create a Helia node
7
- const helia = await createHelia();
5
+ // Read the file contents
6
+ const fileContent = await readFile(filePath);
8
7
 
9
- // create a filesystem on top of Helia, in this case it's UnixFS
10
- const fs = unixfs(helia);
8
+ // Create a SHA-256 hash of the file contents
9
+ const hash = createHash('sha256').update(fileContent).digest('hex');
11
10
 
12
- // Create a text encoder and encode the contents of the file
13
- // into a Uint8Array.
14
- const cid = fs.addFile({
15
- path: filePath,
16
- content: await readFile(filePath),
17
- });
18
-
19
- await helia.stop();
20
-
21
- return cid.toString();
11
+ // Return the hash as a string
12
+ return hash;
22
13
  }
23
14
 
24
15
  export default getFileIPFSHash;
package/tsconfig.json CHANGED
@@ -35,7 +35,8 @@
35
35
  // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */
36
36
  // "types": [], /* Specify type package names to be included without being referenced in a source file. */
37
37
  // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
38
- // "resolveJsonModule": true, /* Enable importing .json files */
38
+ "resolveJsonModule": true,
39
+ "allowImportingTsExtensions": true,
39
40
  // "noResolve": true, /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
40
41
 
41
42
  /* JavaScript Support */
@@ -99,5 +100,10 @@
99
100
  /* Completeness */
100
101
  // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
101
102
  "skipLibCheck": true /* Skip type checking all .d.ts files. */
102
- }
103
+ },
104
+ "references": [
105
+ {
106
+ "path": "../taqueria-sdk"
107
+ }
108
+ ]
103
109
  }
package/chunk-PF3FHOWY.js DELETED
@@ -1,21 +0,0 @@
1
- // src/ipfsHash.ts
2
- import { unixfs } from "@helia/unixfs";
3
- import { readFile } from "fs/promises";
4
- import { createHelia } from "helia";
5
- async function getFileIPFSHash(filePath) {
6
- const helia = await createHelia();
7
- const fs = unixfs(helia);
8
- const cid = fs.addFile({
9
- path: filePath,
10
- content: await readFile(filePath)
11
- });
12
- await helia.stop();
13
- return cid.toString();
14
- }
15
- var ipfsHash_default = getFileIPFSHash;
16
-
17
- export {
18
- getFileIPFSHash,
19
- ipfsHash_default
20
- };
21
- //# sourceMappingURL=chunk-PF3FHOWY.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["src/ipfsHash.ts"],"sourcesContent":["import { unixfs } from '@helia/unixfs';\nimport { readFile } from 'fs/promises';\nimport { createHelia } from 'helia';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// create a Helia node\n\tconst helia = await createHelia();\n\n\t// create a filesystem on top of Helia, in this case it's UnixFS\n\tconst fs = unixfs(helia);\n\n\t// Create a text encoder and encode the contents of the file\n\t// into a Uint8Array.\n\tconst cid = fs.addFile({\n\t\tpath: filePath,\n\t\tcontent: await readFile(filePath),\n\t});\n\n\tawait helia.stop();\n\n\treturn cid.toString();\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";AAAA,SAAS,cAAc;AACvB,SAAS,gBAAgB;AACzB,SAAS,mBAAmB;AAE5B,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,QAAQ,MAAM,YAAY;AAGhC,QAAM,KAAK,OAAO,KAAK;AAIvB,QAAM,MAAM,GAAG,QAAQ;AAAA,IACtB,MAAM;AAAA,IACN,SAAS,MAAM,SAAS,QAAQ;AAAA,EACjC,CAAC;AAED,QAAM,MAAM,KAAK;AAEjB,SAAO,IAAI,SAAS;AACrB;AAEA,IAAO,mBAAQ;","names":[]}