@taqueria/plugin-ipfs-pinata 0.73.12 → 0.78.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/{chunk-6OD7MDAL.js → chunk-YUY6NFES.js} +2 -2
- package/{chunk-6OD7MDAL.js.map → chunk-YUY6NFES.js.map} +1 -1
- package/index.cjs +1 -1
- package/index.cjs.map +1 -1
- package/index.js +1 -1
- package/package.json +7 -7
- package/src/ipfsHash.cjs +1 -1
- package/src/ipfsHash.cjs.map +1 -1
- package/src/ipfsHash.js +1 -1
- package/src/ipfsHash.ts +1 -1
|
@@ -3,7 +3,7 @@ import { createHash } from "crypto";
|
|
|
3
3
|
import { readFile } from "fs/promises";
|
|
4
4
|
async function getFileIPFSHash(filePath) {
|
|
5
5
|
const fileContent = await readFile(filePath);
|
|
6
|
-
const hash = createHash("sha256").update(fileContent).digest("hex");
|
|
6
|
+
const hash = createHash("sha256").update(new Uint8Array(fileContent)).digest("hex");
|
|
7
7
|
return hash;
|
|
8
8
|
}
|
|
9
9
|
var ipfsHash_default = getFileIPFSHash;
|
|
@@ -12,4 +12,4 @@ export {
|
|
|
12
12
|
getFileIPFSHash,
|
|
13
13
|
ipfsHash_default
|
|
14
14
|
};
|
|
15
|
-
//# sourceMappingURL=chunk-
|
|
15
|
+
//# sourceMappingURL=chunk-YUY6NFES.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["src/ipfsHash.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(fileContent).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";AAAA,SAAS,kBAAkB;AAC3B,SAAS,gBAAgB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,MAAM,SAAS,QAAQ;AAG3C,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,WAAW,EAAE,OAAO,KAAK;
|
|
1
|
+
{"version":3,"sources":["src/ipfsHash.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(new Uint8Array(fileContent)).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";AAAA,SAAS,kBAAkB;AAC3B,SAAS,gBAAgB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,MAAM,SAAS,QAAQ;AAG3C,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,IAAI,WAAW,WAAW,CAAC,EAAE,OAAO,KAAK;AAGlF,SAAO;AACR;AAEA,IAAO,mBAAQ;","names":[]}
|
package/index.cjs
CHANGED
|
@@ -40,7 +40,7 @@ var import_crypto = require("crypto");
|
|
|
40
40
|
var import_promises = require("fs/promises");
|
|
41
41
|
async function getFileIPFSHash(filePath) {
|
|
42
42
|
const fileContent = await (0, import_promises.readFile)(filePath);
|
|
43
|
-
const hash = (0, import_crypto.createHash)("sha256").update(fileContent).digest("hex");
|
|
43
|
+
const hash = (0, import_crypto.createHash)("sha256").update(new Uint8Array(fileContent)).digest("hex");
|
|
44
44
|
return hash;
|
|
45
45
|
}
|
|
46
46
|
|
package/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["index.ts","src/ipfsHash.ts","src/proxy.ts","src/file-processing.ts","src/pinata-api.ts","src/utils.ts"],"sourcesContent":["import { Option, Plugin, PositionalArg, Task } from '@taqueria/node-sdk';\nexport { getFileIPFSHash } from './src/ipfsHash';\nimport proxy from './src/proxy';\n\nPlugin.create(() => ({\n\tschema: '0.1',\n\tversion: '0.4.0',\n\talias: 'pinata',\n\ttasks: [\n\t\tTask.create({\n\t\t\ttask: 'publish',\n\t\t\tcommand: 'publish [path]',\n\t\t\tdescription: 'Upload and pin files using your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'path',\n\t\t\t\t\tdescription: 'Directory or file path to publish',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t\tencoding: 'json',\n\t\t}),\n\t\tTask.create({\n\t\t\ttask: 'pin',\n\t\t\tcommand: 'pin [hash]',\n\t\t\tdescription: 'Pin a file already on ipfs with your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'hash',\n\t\t\t\t\tdescription: 'Ipfs hash of the file or directory that is already on the ipfs network.',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t}),\n\t],\n\tproxy,\n}), process.argv);\n","import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(fileContent).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n","import { sendAsyncErr, sendAsyncRes, sendErr, sendJsonRes } from '@taqueria/node-sdk';\nimport { RequestArgs } from '@taqueria/node-sdk';\nimport { LoadedConfig, SanitizedAbsPath } from '@taqueria/node-sdk/types';\nimport path from 'path';\nimport { processFiles } from './file-processing';\nimport { PinataAuth, pinHash, publishFileToIpfs } from './pinata-api';\nimport { createProcessBackoffController } from './utils';\n\n// Load .env for jwt token\n// TODO: How should this be stored in a secure way?\nimport 'dotenv/config';\n\n// TODO: What should this be, it was removed from the sdk\ntype PluginResponse =\n\t| void\n\t| {\n\t\trender: 'table';\n\t\tdata: unknown[];\n\t};\n\ninterface Opts extends RequestArgs.t {\n\treadonly path?: string;\n\treadonly hash?: string;\n\treadonly task?: string;\n}\n\nconst publishToIpfs = async (fileOrDirPath: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!fileOrDirPath) {\n\t\tthrow new Error(`path was not provided`);\n\t}\n\n\t// Pinata is limited to 180 requests per minute\n\t// So for the first 180 requests they can go fast\n\n\tconst { processWithBackoff } = createProcessBackoffController({\n\t\tretryCount: 5,\n\t\ttargetRequestsPerMinute: 180,\n\t});\n\n\tconst result = await processFiles({\n\t\tfileOrDirPath,\n\t\tparallelCount: 10,\n\t\tprocessFile: async filePath => {\n\t\t\t// // TEMP: Debug\n\t\t\t// console.log(`publishing: ${filePath}`);\n\n\t\t\treturn processWithBackoff(() =>\n\t\t\t\tpublishFileToIpfs({\n\t\t\t\t\tauth,\n\t\t\t\t\titem: { filePath, name: path.basename(filePath) },\n\t\t\t\t})\n\t\t\t);\n\t\t},\n\t\tonProgress: ({ processedFilesCount, estimateFileCount }) => {\n\t\t\tif (estimateFileCount && processedFilesCount % 10) {\n\t\t\t\tlet ratio = processedFilesCount / estimateFileCount;\n\t\t\t\tif (ratio > 1) ratio = 1;\n\n\t\t\t\t// // TODO: Call task sdk progress\n\t\t\t\t// console.log(`Progress: ${(ratio * 100).toFixed(0)}%`);\n\t\t\t}\n\t\t},\n\t});\n\n\t// // TEMP: DEBUG: Show error\n\t// if (result.failures.length) {\n\t// \tconsole.log('❗ Failures:\\n' + result.failures.map(f => `${f.filePath}: ${f.error}`).join('\\n'));\n\t// }\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [\n\t\t\t...result.failures.map(x => ({\n\t\t\t\t'?': '❌',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: undefined,\n\t\t\t\terror: (x.error as { message?: string })?.message ?? JSON.stringify(x.error),\n\t\t\t})),\n\t\t\t...result.successes.map(x => ({\n\t\t\t\t'?': '✔',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: x.result.ipfsHash,\n\t\t\t\terror: undefined,\n\t\t\t})),\n\t\t],\n\t};\n};\n\nconst pinToIpfs = async (hash: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!hash) {\n\t\tthrow new Error(`ipfs hash was not provided`);\n\t}\n\n\tawait pinHash({ ipfsHash: hash, auth });\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [{ ipfsHash: hash }],\n\t};\n};\n\nconst execute = async (opts: Opts): Promise<PluginResponse> => {\n\tconst {\n\t\ttask,\n\t\tpath,\n\t\thash,\n\t} = opts;\n\n\tconst auth: PinataAuth = {\n\t\t// TODO: Where should this be stored?\n\t\t// pinataJwtToken: (config as Record<string, any>).credentials.pinataJwtToken,\n\t\tpinataJwtToken: process.env['pinataJwtToken'] as string,\n\t};\n\n\tif (!auth.pinataJwtToken) {\n\t\tthrow new Error(`The 'credentials.pinataJwtToken' was not found in config`);\n\t}\n\n\tswitch (task) {\n\t\tcase 'publish':\n\t\t\treturn publishToIpfs(path, auth);\n\t\tcase 'pin':\n\t\t\treturn pinToIpfs(hash, auth);\n\t\tdefault:\n\t\t\tthrow new Error(`${task} is not an understood task by the ipfs-pinata plugin`);\n\t}\n};\n\nexport default async (args: RequestArgs.t): Promise<PluginResponse> => {\n\tconst opts = args as Opts;\n\n\ttry {\n\t\tconst resultRaw = await execute(opts) as Record<string, unknown>;\n\t\t// TODO: Fix deno parsing\n\t\t// Without this, `data.reduce is not a function`\n\t\tconst result = ('data' in resultRaw) ? resultRaw.data : resultRaw;\n\t\treturn sendJsonRes(result);\n\t} catch (err) {\n\t\tconst error = err as Error;\n\t\tif (error.message) {\n\t\t\treturn sendAsyncErr(error.message);\n\t\t}\n\t}\n};\n","import fs from 'fs/promises';\nimport path from 'path';\n\n// Async generator\n// https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search\nasync function* getFiles(fileOrDirPath: string): AsyncGenerator<string, void, unknown> {\n\tconst dirInfo = await fs.stat(fileOrDirPath);\n\tif (dirInfo.isFile()) {\n\t\tyield fileOrDirPath;\n\t\treturn;\n\t}\n\n\tconst dirents = await fs.readdir(fileOrDirPath, { withFileTypes: true });\n\tfor (const dirent of dirents) {\n\t\tconst res = path.resolve(fileOrDirPath, dirent.name);\n\t\tif (dirent.isDirectory()) {\n\t\t\tyield* getFiles(res);\n\t\t} else {\n\t\t\tyield res;\n\t\t}\n\t}\n}\n\nconst createFileProvider = async ({\n\tfileOrDirPath,\n\tfilter,\n\tshouldEstimateFileCount,\n}: {\n\tfileOrDirPath: string;\n\tfilter?: (filePath: string) => boolean;\n\tshouldEstimateFileCount?: boolean;\n}) => {\n\tfileOrDirPath = path.resolve(fileOrDirPath);\n\tconst pathInfo = await fs.stat(fileOrDirPath);\n\tif (\n\t\t!pathInfo.isFile()\n\t\t&& !pathInfo.isDirectory()\n\t) {\n\t\tthrow new Error(`The path '${fileOrDirPath}' is not a file or directory`);\n\t}\n\n\tlet estimateFileCount = undefined as undefined | number;\n\tif (shouldEstimateFileCount) {\n\t\testimateFileCount = 0;\n\t\tfor await (const filePath of getFiles(fileOrDirPath)) {\n\t\t\tif (filter && !filter(filePath)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\testimateFileCount++;\n\t\t}\n\t}\n\n\tconst fileGenerator = getFiles(fileOrDirPath);\n\tconst getNextFile = async () => {\n\t\tlet nextFile = (await fileGenerator.next()).value;\n\t\tif (!filter) {\n\t\t\treturn nextFile;\n\t\t}\n\n\t\twhile (nextFile && !filter(nextFile)) {\n\t\t\tnextFile = await getNextFile();\n\t\t}\n\n\t\treturn nextFile;\n\t};\n\treturn {\n\t\tgetNextFile,\n\t\testimateFileCount,\n\t};\n};\n\ntype ProgressInfo = { processedFilesCount: number; estimateFileCount: undefined | number };\nexport const processFiles = async <TResult>({\n\tfileOrDirPath,\n\tprocessFile,\n\tfilter,\n\tparallelCount = 10,\n\tonProgress,\n}: {\n\tfileOrDirPath: string;\n\tprocessFile: (filePath: string, progress: ProgressInfo) => Promise<TResult>;\n\tfilter?: (filePath: string) => boolean;\n\tparallelCount?: number;\n\tonProgress?: (progress: ProgressInfo) => void;\n}) => {\n\tconst { getNextFile, estimateFileCount } = await createFileProvider({\n\t\tfileOrDirPath,\n\t\tfilter,\n\t\tshouldEstimateFileCount: true,\n\t});\n\n\tconst successes = [] as { filePath: string; result: TResult }[];\n\tconst failures = [] as { filePath: string; error: unknown }[];\n\n\tonProgress?.({\n\t\tprocessedFilesCount: 0,\n\t\testimateFileCount,\n\t});\n\n\tawait Promise.all([...new Array(parallelCount)].map(async x => {\n\t\tlet fileToProcess = await getNextFile();\n\t\twhile (fileToProcess) {\n\t\t\tconst progressInfo = {\n\t\t\t\tprocessedFilesCount: successes.length + failures.length,\n\t\t\t\testimateFileCount,\n\t\t\t};\n\t\t\tonProgress?.(progressInfo);\n\n\t\t\ttry {\n\t\t\t\tconst result = await processFile(fileToProcess, progressInfo);\n\t\t\t\tsuccesses.push({ filePath: fileToProcess, result });\n\t\t\t} catch (err) {\n\t\t\t\tfailures.push({ filePath: fileToProcess, error: err });\n\t\t\t}\n\n\t\t\tfileToProcess = await getNextFile();\n\t\t}\n\t}));\n\n\tonProgress?.({\n\t\tprocessedFilesCount: successes.length + failures.length,\n\t\testimateFileCount,\n\t});\n\n\treturn {\n\t\tsuccesses,\n\t\tfailures,\n\t};\n};\n","import FormData from 'form-data';\nimport fs from 'fs';\nimport { readFile } from 'fs/promises';\nimport fetch from 'node-fetch';\nimport { getFileIPFSHash } from './ipfsHash';\n\nexport type PinataAuth = {\n\tpinataJwtToken: string;\n};\n\nexport type PublishFileResult = {\n\tipfsHash: string;\n};\n\nexport const publishFileToIpfs = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}): Promise<PublishFileResult> => {\n\t// The data api to check for existing file is limited to 30 requests per minute\n\t// While uploading allows 180 requests per minute\n\t// i.e. it's faster to just upload again\n\n\t// // Skip if already pinned\n\t// const { isPinned, ipfsHash } = await checkIfFileIsPinned({ auth, item });\n\t// if (isPinned) {\n\t// \treturn {\n\t// \t\tipfsHash,\n\t// \t};\n\t// }\n\n\tconst data = new FormData();\n\tdata.append('file', fs.createReadStream(item.filePath));\n\tdata.append(\n\t\t'pinataMetadata',\n\t\tJSON.stringify({\n\t\t\tname: item.name,\n\t\t}),\n\t);\n\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinFileToIPFS`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': `multipart/form-data; boundary=${(data as unknown as { _boundary: string })._boundary}`,\n\t\t},\n\t\tbody: data,\n\t\tmethod: 'post',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to upload '${item.name}' to ipfs ${response.statusText}`);\n\t}\n\n\tconst uploadResult = await response.json() as {\n\t\tIpfsHash: string; // This is the IPFS multi-hash provided back for your content,\n\t\tPinSize: string; // This is how large (in bytes) the content you just pinned is,\n\t\tTimestamp: string; // This is the timestamp for your content pinning (represented in ISO 8601 format)\n\t};\n\n\treturn {\n\t\tipfsHash: uploadResult.IpfsHash,\n\t};\n};\n\nconst checkIfFileIsPinned = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}) => {\n\tconst contents = await readFile(item.filePath);\n\tconst ipfsHash = await getFileIPFSHash(item.filePath);\n\n\tconst response = await fetch(`https://api.pinata.cloud/data/pinList?status=pinned&hashContains=${ipfsHash}`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t},\n\t\tmethod: 'get',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to query '${item.name}' status from pinata ${response.statusText}`);\n\t}\n\n\tconst pinResult = await response.json() as {\n\t\tcount: number;\n\t\trows: {\n\t\t\tid: string;\n\t\t\tipfs_pin_hash: string;\n\t\t\tsize: number;\n\t\t\tuser_id: string;\n\t\t\tdate_pinned: null | string;\n\t\t\tdate_unpinned: null | string;\n\t\t\tmetadata: {\n\t\t\t\tname: string;\n\t\t\t\tkeyvalues: null | string;\n\t\t\t};\n\t\t\tregions: {\n\t\t\t\tregionId: string;\n\t\t\t\tcurrentReplicationCount: number;\n\t\t\t\tdesiredReplicationCount: number;\n\t\t\t}[];\n\t\t}[];\n\t};\n\n\tconst isPinned = pinResult.rows.some(x =>\n\t\tx.ipfs_pin_hash === ipfsHash\n\t\t&& x.date_pinned\n\t\t&& !x.date_unpinned\n\t);\n\n\treturn {\n\t\tisPinned,\n\t\tipfsHash,\n\t};\n};\n\nexport const pinHash = async ({\n\tauth,\n\tipfsHash,\n}: {\n\tauth: PinataAuth;\n\tipfsHash: string;\n}) => {\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinByHash`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': 'application/json',\n\t\t},\n\t\tmethod: 'post',\n\t\tbody: JSON.stringify({\n\t\t\thashToPin: ipfsHash,\n\t\t}),\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to pin '${ipfsHash}' with pinata: ${response.statusText}`);\n\t}\n\n\t// Ok is the only response if successful\n\treturn;\n};\n","export async function delay(timeout: number): Promise<void> {\n\treturn await new Promise(resolve => {\n\t\tsetTimeout(resolve, timeout);\n\t});\n}\n\nexport const createProcessBackoffController = ({\n\tretryCount = 5,\n\ttargetRequestsPerMinute = 180,\n}: {\n\tretryCount?: number;\n\ttargetRequestsPerMinute?: number;\n}) => {\n\tlet averageTimePerRequest = 5000;\n\tlet targetTimePerRequest = 60000 / targetRequestsPerMinute;\n\tlet lastTime = Date.now();\n\n\tconst processWithBackoff = async <TResult>(process: () => Promise<TResult>) => {\n\t\tlet attempt = 0;\n\t\tlet lastError = undefined as unknown;\n\t\twhile (attempt < retryCount) {\n\t\t\ttry {\n\t\t\t\tlet delayTimeMs = Math.max(10, targetTimePerRequest - averageTimePerRequest);\n\n\t\t\t\t// Partially randomized delay to ensure parallel requests don't line up\n\t\t\t\tawait delay(Math.floor(delayTimeMs * (1 + 0.5 * Math.random())));\n\n\t\t\t\tconst result = await process();\n\n\t\t\t\tconst timeNow = Date.now();\n\t\t\t\tconst timeElapsed = timeNow - lastTime;\n\t\t\t\tlastTime = timeNow;\n\n\t\t\t\t// Running average\n\t\t\t\taverageTimePerRequest = averageTimePerRequest * 0.97 + timeElapsed * 0.03;\n\n\t\t\t\treturn result;\n\t\t\t} catch (err) {\n\t\t\t\tlastError = err;\n\t\t\t}\n\n\t\t\t// Quickly increase time to wait if failure (allow negatives to wait longer than target)\n\t\t\taverageTimePerRequest -= (attempt + 1) * 1000;\n\t\t\tattempt++;\n\t\t}\n\n\t\t// All attempts failed\n\t\tthrow lastError;\n\t};\n\n\treturn {\n\t\tprocessWithBackoff,\n\t};\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAA,mBAAoD;;;ACApD,oBAA2B;AAC3B,sBAAyB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,UAAM,0BAAS,QAAQ;AAG3C,QAAM,WAAO,0BAAW,QAAQ,EAAE,OAAO,WAAW,EAAE,OAAO,KAAK;AAGlE,SAAO;AACR;;;ACZA,sBAAiE;AAGjE,IAAAC,eAAiB;;;ACHjB,IAAAC,mBAAe;AACf,kBAAiB;AAIjB,gBAAgB,SAAS,eAA8D;AACtF,QAAM,UAAU,MAAM,iBAAAC,QAAG,KAAK,aAAa;AAC3C,MAAI,QAAQ,OAAO,GAAG;AACrB,UAAM;AACN;AAAA,EACD;AAEA,QAAM,UAAU,MAAM,iBAAAA,QAAG,QAAQ,eAAe,EAAE,eAAe,KAAK,CAAC;AACvE,aAAW,UAAU,SAAS;AAC7B,UAAM,MAAM,YAAAC,QAAK,QAAQ,eAAe,OAAO,IAAI;AACnD,QAAI,OAAO,YAAY,GAAG;AACzB,aAAO,SAAS,GAAG;AAAA,IACpB,OAAO;AACN,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACD,MAIM;AACL,kBAAgB,YAAAA,QAAK,QAAQ,aAAa;AAC1C,QAAM,WAAW,MAAM,iBAAAD,QAAG,KAAK,aAAa;AAC5C,MACC,CAAC,SAAS,OAAO,KACd,CAAC,SAAS,YAAY,GACxB;AACD,UAAM,IAAI,MAAM,aAAa,aAAa,8BAA8B;AAAA,EACzE;AAEA,MAAI,oBAAoB;AACxB,MAAI,yBAAyB;AAC5B,wBAAoB;AACpB,qBAAiB,YAAY,SAAS,aAAa,GAAG;AACrD,UAAI,UAAU,CAAC,OAAO,QAAQ,GAAG;AAChC;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAEA,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,cAAc,YAAY;AAC/B,QAAI,YAAY,MAAM,cAAc,KAAK,GAAG;AAC5C,QAAI,CAAC,QAAQ;AACZ,aAAO;AAAA,IACR;AAEA,WAAO,YAAY,CAAC,OAAO,QAAQ,GAAG;AACrC,iBAAW,MAAM,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AACA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAGO,IAAM,eAAe,OAAgB;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAChB;AACD,MAMM;AACL,QAAM,EAAE,aAAa,kBAAkB,IAAI,MAAM,mBAAmB;AAAA,IACnE;AAAA,IACA;AAAA,IACA,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,YAAY,CAAC;AACnB,QAAM,WAAW,CAAC;AAElB,2CAAa;AAAA,IACZ,qBAAqB;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,IAAI,MAAM,aAAa,CAAC,EAAE,IAAI,OAAM,MAAK;AAC9D,QAAI,gBAAgB,MAAM,YAAY;AACtC,WAAO,eAAe;AACrB,YAAM,eAAe;AAAA,QACpB,qBAAqB,UAAU,SAAS,SAAS;AAAA,QACjD;AAAA,MACD;AACA,+CAAa;AAEb,UAAI;AACH,cAAM,SAAS,MAAM,YAAY,eAAe,YAAY;AAC5D,kBAAU,KAAK,EAAE,UAAU,eAAe,OAAO,CAAC;AAAA,MACnD,SAAS,KAAK;AACb,iBAAS,KAAK,EAAE,UAAU,eAAe,OAAO,IAAI,CAAC;AAAA,MACtD;AAEA,sBAAgB,MAAM,YAAY;AAAA,IACnC;AAAA,EACD,CAAC,CAAC;AAEF,2CAAa;AAAA,IACZ,qBAAqB,UAAU,SAAS,SAAS;AAAA,IACjD;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;;;AChIA,uBAAqB;AACrB,gBAAe;AAEf,wBAAkB;AAWX,IAAM,oBAAoB,OAAO;AAAA,EACvC;AAAA,EACA;AACD,MAMkC;AAajC,QAAM,OAAO,IAAI,iBAAAE,QAAS;AAC1B,OAAK,OAAO,QAAQ,UAAAC,QAAG,iBAAiB,KAAK,QAAQ,CAAC;AACtD,OAAK;AAAA,IACJ;AAAA,IACA,KAAK,UAAU;AAAA,MACd,MAAM,KAAK;AAAA,IACZ,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,kBAAAC,SAAM,kDAAkD;AAAA,IAC9E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB,iCAAkC,KAA0C,SAAS;AAAA,IACtG;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,EACT,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,qBAAqB,KAAK,IAAI,aAAa,SAAS,UAAU,EAAE;AAAA,EACjF;AAEA,QAAM,eAAe,MAAM,SAAS,KAAK;AAMzC,SAAO;AAAA,IACN,UAAU,aAAa;AAAA,EACxB;AACD;AA2DO,IAAM,UAAU,OAAO;AAAA,EAC7B;AAAA,EACA;AACD,MAGM;AACL,QAAM,WAAW,UAAM,kBAAAC,SAAM,8CAA8C;AAAA,IAC1E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,IACR,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,kBAAkB,QAAQ,kBAAkB,SAAS,UAAU,EAAE;AAAA,EAClF;AAGA;AACD;;;ACtJA,eAAsB,MAAM,SAAgC;AAC3D,SAAO,MAAM,IAAI,QAAQ,aAAW;AACnC,eAAW,SAAS,OAAO;AAAA,EAC5B,CAAC;AACF;AAEO,IAAM,iCAAiC,CAAC;AAAA,EAC9C,aAAa;AAAA,EACb,0BAA0B;AAC3B,MAGM;AACL,MAAI,wBAAwB;AAC5B,MAAI,uBAAuB,MAAQ;AACnC,MAAI,WAAW,KAAK,IAAI;AAExB,QAAM,qBAAqB,OAAgBC,aAAoC;AAC9E,QAAI,UAAU;AACd,QAAI,YAAY;AAChB,WAAO,UAAU,YAAY;AAC5B,UAAI;AACH,YAAI,cAAc,KAAK,IAAI,IAAI,uBAAuB,qBAAqB;AAG3E,cAAM,MAAM,KAAK,MAAM,eAAe,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;AAE/D,cAAM,SAAS,MAAMA,SAAQ;AAE7B,cAAM,UAAU,KAAK,IAAI;AACzB,cAAM,cAAc,UAAU;AAC9B,mBAAW;AAGX,gCAAwB,wBAAwB,OAAO,cAAc;AAErE,eAAO;AAAA,MACR,SAAS,KAAK;AACb,oBAAY;AAAA,MACb;AAGA,gCAA0B,UAAU,KAAK;AACzC;AAAA,IACD;AAGA,UAAM;AAAA,EACP;AAEA,SAAO;AAAA,IACN;AAAA,EACD;AACD;;;AH3CA,oBAAO;AAgBP,IAAM,gBAAgB,OAAO,eAAmC,SAA8C;AAC7G,MAAI,CAAC,eAAe;AACnB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACxC;AAKA,QAAM,EAAE,mBAAmB,IAAI,+BAA+B;AAAA,IAC7D,YAAY;AAAA,IACZ,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,SAAS,MAAM,aAAa;AAAA,IACjC;AAAA,IACA,eAAe;AAAA,IACf,aAAa,OAAM,aAAY;AAI9B,aAAO;AAAA,QAAmB,MACzB,kBAAkB;AAAA,UACjB;AAAA,UACA,MAAM,EAAE,UAAU,MAAM,aAAAC,QAAK,SAAS,QAAQ,EAAE;AAAA,QACjD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,IACA,YAAY,CAAC,EAAE,qBAAqB,kBAAkB,MAAM;AAC3D,UAAI,qBAAqB,sBAAsB,IAAI;AAClD,YAAI,QAAQ,sBAAsB;AAClC,YAAI,QAAQ,EAAG,SAAQ;AAAA,MAIxB;AAAA,IACD;AAAA,EACD,CAAC;AAOD,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM;AAAA,MACL,GAAG,OAAO,SAAS,IAAI,OAAE;AAxE5B;AAwEgC;AAAA,UAC5B,KAAK;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,SAAQ,OAAE,UAAF,mBAAkC,YAAW,KAAK,UAAU,EAAE,KAAK;AAAA,QAC5E;AAAA,OAAE;AAAA,MACF,GAAG,OAAO,UAAU,IAAI,QAAM;AAAA,QAC7B,KAAK;AAAA,QACL,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE,OAAO;AAAA,QACnB,OAAO;AAAA,MACR,EAAE;AAAA,IACH;AAAA,EACD;AACD;AAEA,IAAM,YAAY,OAAO,MAA0B,SAA8C;AAChG,MAAI,CAAC,MAAM;AACV,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC7C;AAEA,QAAM,QAAQ,EAAE,UAAU,MAAM,KAAK,CAAC;AAEtC,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC;AAAA,EAC1B;AACD;AAEA,IAAM,UAAU,OAAO,SAAwC;AAC9D,QAAM;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,EACD,IAAI;AAEJ,QAAM,OAAmB;AAAA;AAAA;AAAA,IAGxB,gBAAgB,QAAQ,IAAI,gBAAgB;AAAA,EAC7C;AAEA,MAAI,CAAC,KAAK,gBAAgB;AACzB,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC3E;AAEA,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,cAAcA,OAAM,IAAI;AAAA,IAChC,KAAK;AACJ,aAAO,UAAU,MAAM,IAAI;AAAA,IAC5B;AACC,YAAM,IAAI,MAAM,GAAG,IAAI,sDAAsD;AAAA,EAC/E;AACD;AAEA,IAAO,gBAAQ,OAAO,SAAiD;AACtE,QAAM,OAAO;AAEb,MAAI;AACH,UAAM,YAAY,MAAM,QAAQ,IAAI;AAGpC,UAAM,SAAU,UAAU,YAAa,UAAU,OAAO;AACxD,eAAO,6BAAY,MAAM;AAAA,EAC1B,SAAS,KAAK;AACb,UAAM,QAAQ;AACd,QAAI,MAAM,SAAS;AAClB,iBAAO,8BAAa,MAAM,OAAO;AAAA,IAClC;AAAA,EACD;AACD;;;AF3IA,wBAAO,OAAO,OAAO;AAAA,EACpB,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,IACN,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA,IACD,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AACD,IAAI,QAAQ,IAAI;","names":["import_node_sdk","import_path","import_promises","fs","path","FormData","fs","fetch","fetch","process","path"]}
|
|
1
|
+
{"version":3,"sources":["index.ts","src/ipfsHash.ts","src/proxy.ts","src/file-processing.ts","src/pinata-api.ts","src/utils.ts"],"sourcesContent":["import { Option, Plugin, PositionalArg, Task } from '@taqueria/node-sdk';\nexport { getFileIPFSHash } from './src/ipfsHash';\nimport proxy from './src/proxy';\n\nPlugin.create(() => ({\n\tschema: '0.1',\n\tversion: '0.4.0',\n\talias: 'pinata',\n\ttasks: [\n\t\tTask.create({\n\t\t\ttask: 'publish',\n\t\t\tcommand: 'publish [path]',\n\t\t\tdescription: 'Upload and pin files using your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'path',\n\t\t\t\t\tdescription: 'Directory or file path to publish',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t\tencoding: 'json',\n\t\t}),\n\t\tTask.create({\n\t\t\ttask: 'pin',\n\t\t\tcommand: 'pin [hash]',\n\t\t\tdescription: 'Pin a file already on ipfs with your pinata account.',\n\t\t\taliases: [],\n\t\t\thandler: 'proxy',\n\t\t\tpositionals: [\n\t\t\t\tPositionalArg.create({\n\t\t\t\t\tplaceholder: 'hash',\n\t\t\t\t\tdescription: 'Ipfs hash of the file or directory that is already on the ipfs network.',\n\t\t\t\t\ttype: 'string',\n\t\t\t\t}),\n\t\t\t],\n\t\t}),\n\t],\n\tproxy,\n}), process.argv);\n","import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(new Uint8Array(fileContent)).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n","import { sendAsyncErr, sendAsyncRes, sendErr, sendJsonRes } from '@taqueria/node-sdk';\nimport { RequestArgs } from '@taqueria/node-sdk';\nimport { LoadedConfig, SanitizedAbsPath } from '@taqueria/node-sdk/types';\nimport path from 'path';\nimport { processFiles } from './file-processing';\nimport { PinataAuth, pinHash, publishFileToIpfs } from './pinata-api';\nimport { createProcessBackoffController } from './utils';\n\n// Load .env for jwt token\n// TODO: How should this be stored in a secure way?\nimport 'dotenv/config';\n\n// TODO: What should this be, it was removed from the sdk\ntype PluginResponse =\n\t| void\n\t| {\n\t\trender: 'table';\n\t\tdata: unknown[];\n\t};\n\ninterface Opts extends RequestArgs.t {\n\treadonly path?: string;\n\treadonly hash?: string;\n\treadonly task?: string;\n}\n\nconst publishToIpfs = async (fileOrDirPath: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!fileOrDirPath) {\n\t\tthrow new Error(`path was not provided`);\n\t}\n\n\t// Pinata is limited to 180 requests per minute\n\t// So for the first 180 requests they can go fast\n\n\tconst { processWithBackoff } = createProcessBackoffController({\n\t\tretryCount: 5,\n\t\ttargetRequestsPerMinute: 180,\n\t});\n\n\tconst result = await processFiles({\n\t\tfileOrDirPath,\n\t\tparallelCount: 10,\n\t\tprocessFile: async filePath => {\n\t\t\t// // TEMP: Debug\n\t\t\t// console.log(`publishing: ${filePath}`);\n\n\t\t\treturn processWithBackoff(() =>\n\t\t\t\tpublishFileToIpfs({\n\t\t\t\t\tauth,\n\t\t\t\t\titem: { filePath, name: path.basename(filePath) },\n\t\t\t\t})\n\t\t\t);\n\t\t},\n\t\tonProgress: ({ processedFilesCount, estimateFileCount }) => {\n\t\t\tif (estimateFileCount && processedFilesCount % 10) {\n\t\t\t\tlet ratio = processedFilesCount / estimateFileCount;\n\t\t\t\tif (ratio > 1) ratio = 1;\n\n\t\t\t\t// // TODO: Call task sdk progress\n\t\t\t\t// console.log(`Progress: ${(ratio * 100).toFixed(0)}%`);\n\t\t\t}\n\t\t},\n\t});\n\n\t// // TEMP: DEBUG: Show error\n\t// if (result.failures.length) {\n\t// \tconsole.log('❗ Failures:\\n' + result.failures.map(f => `${f.filePath}: ${f.error}`).join('\\n'));\n\t// }\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [\n\t\t\t...result.failures.map(x => ({\n\t\t\t\t'?': '❌',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: undefined,\n\t\t\t\terror: (x.error as { message?: string })?.message ?? JSON.stringify(x.error),\n\t\t\t})),\n\t\t\t...result.successes.map(x => ({\n\t\t\t\t'?': '✔',\n\t\t\t\tfilePath: x.filePath,\n\t\t\t\tipfsHash: x.result.ipfsHash,\n\t\t\t\terror: undefined,\n\t\t\t})),\n\t\t],\n\t};\n};\n\nconst pinToIpfs = async (hash: undefined | string, auth: PinataAuth): Promise<PluginResponse> => {\n\tif (!hash) {\n\t\tthrow new Error(`ipfs hash was not provided`);\n\t}\n\n\tawait pinHash({ ipfsHash: hash, auth });\n\n\treturn {\n\t\trender: 'table',\n\t\tdata: [{ ipfsHash: hash }],\n\t};\n};\n\nconst execute = async (opts: Opts): Promise<PluginResponse> => {\n\tconst {\n\t\ttask,\n\t\tpath,\n\t\thash,\n\t} = opts;\n\n\tconst auth: PinataAuth = {\n\t\t// TODO: Where should this be stored?\n\t\t// pinataJwtToken: (config as Record<string, any>).credentials.pinataJwtToken,\n\t\tpinataJwtToken: process.env['pinataJwtToken'] as string,\n\t};\n\n\tif (!auth.pinataJwtToken) {\n\t\tthrow new Error(`The 'credentials.pinataJwtToken' was not found in config`);\n\t}\n\n\tswitch (task) {\n\t\tcase 'publish':\n\t\t\treturn publishToIpfs(path, auth);\n\t\tcase 'pin':\n\t\t\treturn pinToIpfs(hash, auth);\n\t\tdefault:\n\t\t\tthrow new Error(`${task} is not an understood task by the ipfs-pinata plugin`);\n\t}\n};\n\nexport default async (args: RequestArgs.t): Promise<PluginResponse> => {\n\tconst opts = args as Opts;\n\n\ttry {\n\t\tconst resultRaw = await execute(opts) as Record<string, unknown>;\n\t\t// TODO: Fix deno parsing\n\t\t// Without this, `data.reduce is not a function`\n\t\tconst result = ('data' in resultRaw) ? resultRaw.data : resultRaw;\n\t\treturn sendJsonRes(result);\n\t} catch (err) {\n\t\tconst error = err as Error;\n\t\tif (error.message) {\n\t\t\treturn sendAsyncErr(error.message);\n\t\t}\n\t}\n};\n","import fs from 'fs/promises';\nimport path from 'path';\n\n// Async generator\n// https://stackoverflow.com/questions/5827612/node-js-fs-readdir-recursive-directory-search\nasync function* getFiles(fileOrDirPath: string): AsyncGenerator<string, void, unknown> {\n\tconst dirInfo = await fs.stat(fileOrDirPath);\n\tif (dirInfo.isFile()) {\n\t\tyield fileOrDirPath;\n\t\treturn;\n\t}\n\n\tconst dirents = await fs.readdir(fileOrDirPath, { withFileTypes: true });\n\tfor (const dirent of dirents) {\n\t\tconst res = path.resolve(fileOrDirPath, dirent.name);\n\t\tif (dirent.isDirectory()) {\n\t\t\tyield* getFiles(res);\n\t\t} else {\n\t\t\tyield res;\n\t\t}\n\t}\n}\n\nconst createFileProvider = async ({\n\tfileOrDirPath,\n\tfilter,\n\tshouldEstimateFileCount,\n}: {\n\tfileOrDirPath: string;\n\tfilter?: (filePath: string) => boolean;\n\tshouldEstimateFileCount?: boolean;\n}) => {\n\tfileOrDirPath = path.resolve(fileOrDirPath);\n\tconst pathInfo = await fs.stat(fileOrDirPath);\n\tif (\n\t\t!pathInfo.isFile()\n\t\t&& !pathInfo.isDirectory()\n\t) {\n\t\tthrow new Error(`The path '${fileOrDirPath}' is not a file or directory`);\n\t}\n\n\tlet estimateFileCount = undefined as undefined | number;\n\tif (shouldEstimateFileCount) {\n\t\testimateFileCount = 0;\n\t\tfor await (const filePath of getFiles(fileOrDirPath)) {\n\t\t\tif (filter && !filter(filePath)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\testimateFileCount++;\n\t\t}\n\t}\n\n\tconst fileGenerator = getFiles(fileOrDirPath);\n\tconst getNextFile = async () => {\n\t\tlet nextFile = (await fileGenerator.next()).value;\n\t\tif (!filter) {\n\t\t\treturn nextFile;\n\t\t}\n\n\t\twhile (nextFile && !filter(nextFile)) {\n\t\t\tnextFile = await getNextFile();\n\t\t}\n\n\t\treturn nextFile;\n\t};\n\treturn {\n\t\tgetNextFile,\n\t\testimateFileCount,\n\t};\n};\n\ntype ProgressInfo = { processedFilesCount: number; estimateFileCount: undefined | number };\nexport const processFiles = async <TResult>({\n\tfileOrDirPath,\n\tprocessFile,\n\tfilter,\n\tparallelCount = 10,\n\tonProgress,\n}: {\n\tfileOrDirPath: string;\n\tprocessFile: (filePath: string, progress: ProgressInfo) => Promise<TResult>;\n\tfilter?: (filePath: string) => boolean;\n\tparallelCount?: number;\n\tonProgress?: (progress: ProgressInfo) => void;\n}) => {\n\tconst { getNextFile, estimateFileCount } = await createFileProvider({\n\t\tfileOrDirPath,\n\t\tfilter,\n\t\tshouldEstimateFileCount: true,\n\t});\n\n\tconst successes = [] as { filePath: string; result: TResult }[];\n\tconst failures = [] as { filePath: string; error: unknown }[];\n\n\tonProgress?.({\n\t\tprocessedFilesCount: 0,\n\t\testimateFileCount,\n\t});\n\n\tawait Promise.all([...new Array(parallelCount)].map(async x => {\n\t\tlet fileToProcess = await getNextFile();\n\t\twhile (fileToProcess) {\n\t\t\tconst progressInfo = {\n\t\t\t\tprocessedFilesCount: successes.length + failures.length,\n\t\t\t\testimateFileCount,\n\t\t\t};\n\t\t\tonProgress?.(progressInfo);\n\n\t\t\ttry {\n\t\t\t\tconst result = await processFile(fileToProcess, progressInfo);\n\t\t\t\tsuccesses.push({ filePath: fileToProcess, result });\n\t\t\t} catch (err) {\n\t\t\t\tfailures.push({ filePath: fileToProcess, error: err });\n\t\t\t}\n\n\t\t\tfileToProcess = await getNextFile();\n\t\t}\n\t}));\n\n\tonProgress?.({\n\t\tprocessedFilesCount: successes.length + failures.length,\n\t\testimateFileCount,\n\t});\n\n\treturn {\n\t\tsuccesses,\n\t\tfailures,\n\t};\n};\n","import FormData from 'form-data';\nimport fs from 'fs';\nimport { readFile } from 'fs/promises';\nimport fetch from 'node-fetch';\nimport { getFileIPFSHash } from './ipfsHash';\n\nexport type PinataAuth = {\n\tpinataJwtToken: string;\n};\n\nexport type PublishFileResult = {\n\tipfsHash: string;\n};\n\nexport const publishFileToIpfs = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}): Promise<PublishFileResult> => {\n\t// The data api to check for existing file is limited to 30 requests per minute\n\t// While uploading allows 180 requests per minute\n\t// i.e. it's faster to just upload again\n\n\t// // Skip if already pinned\n\t// const { isPinned, ipfsHash } = await checkIfFileIsPinned({ auth, item });\n\t// if (isPinned) {\n\t// \treturn {\n\t// \t\tipfsHash,\n\t// \t};\n\t// }\n\n\tconst data = new FormData();\n\tdata.append('file', fs.createReadStream(item.filePath));\n\tdata.append(\n\t\t'pinataMetadata',\n\t\tJSON.stringify({\n\t\t\tname: item.name,\n\t\t}),\n\t);\n\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinFileToIPFS`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': `multipart/form-data; boundary=${(data as unknown as { _boundary: string })._boundary}`,\n\t\t},\n\t\tbody: data,\n\t\tmethod: 'post',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to upload '${item.name}' to ipfs ${response.statusText}`);\n\t}\n\n\tconst uploadResult = await response.json() as {\n\t\tIpfsHash: string; // This is the IPFS multi-hash provided back for your content,\n\t\tPinSize: string; // This is how large (in bytes) the content you just pinned is,\n\t\tTimestamp: string; // This is the timestamp for your content pinning (represented in ISO 8601 format)\n\t};\n\n\treturn {\n\t\tipfsHash: uploadResult.IpfsHash,\n\t};\n};\n\nconst checkIfFileIsPinned = async ({\n\tauth,\n\titem,\n}: {\n\tauth: PinataAuth;\n\titem: {\n\t\tname: string;\n\t\tfilePath: string;\n\t};\n}) => {\n\tconst contents = await readFile(item.filePath);\n\tconst ipfsHash = await getFileIPFSHash(item.filePath);\n\n\tconst response = await fetch(`https://api.pinata.cloud/data/pinList?status=pinned&hashContains=${ipfsHash}`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t},\n\t\tmethod: 'get',\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to query '${item.name}' status from pinata ${response.statusText}`);\n\t}\n\n\tconst pinResult = await response.json() as {\n\t\tcount: number;\n\t\trows: {\n\t\t\tid: string;\n\t\t\tipfs_pin_hash: string;\n\t\t\tsize: number;\n\t\t\tuser_id: string;\n\t\t\tdate_pinned: null | string;\n\t\t\tdate_unpinned: null | string;\n\t\t\tmetadata: {\n\t\t\t\tname: string;\n\t\t\t\tkeyvalues: null | string;\n\t\t\t};\n\t\t\tregions: {\n\t\t\t\tregionId: string;\n\t\t\t\tcurrentReplicationCount: number;\n\t\t\t\tdesiredReplicationCount: number;\n\t\t\t}[];\n\t\t}[];\n\t};\n\n\tconst isPinned = pinResult.rows.some(x =>\n\t\tx.ipfs_pin_hash === ipfsHash\n\t\t&& x.date_pinned\n\t\t&& !x.date_unpinned\n\t);\n\n\treturn {\n\t\tisPinned,\n\t\tipfsHash,\n\t};\n};\n\nexport const pinHash = async ({\n\tauth,\n\tipfsHash,\n}: {\n\tauth: PinataAuth;\n\tipfsHash: string;\n}) => {\n\tconst response = await fetch(`https://api.pinata.cloud/pinning/pinByHash`, {\n\t\theaders: {\n\t\t\tAuthorization: `Bearer ${auth.pinataJwtToken}`,\n\t\t\t'Content-Type': 'application/json',\n\t\t},\n\t\tmethod: 'post',\n\t\tbody: JSON.stringify({\n\t\t\thashToPin: ipfsHash,\n\t\t}),\n\t});\n\n\tif (!response.ok) {\n\t\tthrow new Error(`Failed to pin '${ipfsHash}' with pinata: ${response.statusText}`);\n\t}\n\n\t// Ok is the only response if successful\n\treturn;\n};\n","export async function delay(timeout: number): Promise<void> {\n\treturn await new Promise(resolve => {\n\t\tsetTimeout(resolve, timeout);\n\t});\n}\n\nexport const createProcessBackoffController = ({\n\tretryCount = 5,\n\ttargetRequestsPerMinute = 180,\n}: {\n\tretryCount?: number;\n\ttargetRequestsPerMinute?: number;\n}) => {\n\tlet averageTimePerRequest = 5000;\n\tlet targetTimePerRequest = 60000 / targetRequestsPerMinute;\n\tlet lastTime = Date.now();\n\n\tconst processWithBackoff = async <TResult>(process: () => Promise<TResult>) => {\n\t\tlet attempt = 0;\n\t\tlet lastError = undefined as unknown;\n\t\twhile (attempt < retryCount) {\n\t\t\ttry {\n\t\t\t\tlet delayTimeMs = Math.max(10, targetTimePerRequest - averageTimePerRequest);\n\n\t\t\t\t// Partially randomized delay to ensure parallel requests don't line up\n\t\t\t\tawait delay(Math.floor(delayTimeMs * (1 + 0.5 * Math.random())));\n\n\t\t\t\tconst result = await process();\n\n\t\t\t\tconst timeNow = Date.now();\n\t\t\t\tconst timeElapsed = timeNow - lastTime;\n\t\t\t\tlastTime = timeNow;\n\n\t\t\t\t// Running average\n\t\t\t\taverageTimePerRequest = averageTimePerRequest * 0.97 + timeElapsed * 0.03;\n\n\t\t\t\treturn result;\n\t\t\t} catch (err) {\n\t\t\t\tlastError = err;\n\t\t\t}\n\n\t\t\t// Quickly increase time to wait if failure (allow negatives to wait longer than target)\n\t\t\taverageTimePerRequest -= (attempt + 1) * 1000;\n\t\t\tattempt++;\n\t\t}\n\n\t\t// All attempts failed\n\t\tthrow lastError;\n\t};\n\n\treturn {\n\t\tprocessWithBackoff,\n\t};\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAA,mBAAoD;;;ACApD,oBAA2B;AAC3B,sBAAyB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,UAAM,0BAAS,QAAQ;AAG3C,QAAM,WAAO,0BAAW,QAAQ,EAAE,OAAO,IAAI,WAAW,WAAW,CAAC,EAAE,OAAO,KAAK;AAGlF,SAAO;AACR;;;ACZA,sBAAiE;AAGjE,IAAAC,eAAiB;;;ACHjB,IAAAC,mBAAe;AACf,kBAAiB;AAIjB,gBAAgB,SAAS,eAA8D;AACtF,QAAM,UAAU,MAAM,iBAAAC,QAAG,KAAK,aAAa;AAC3C,MAAI,QAAQ,OAAO,GAAG;AACrB,UAAM;AACN;AAAA,EACD;AAEA,QAAM,UAAU,MAAM,iBAAAA,QAAG,QAAQ,eAAe,EAAE,eAAe,KAAK,CAAC;AACvE,aAAW,UAAU,SAAS;AAC7B,UAAM,MAAM,YAAAC,QAAK,QAAQ,eAAe,OAAO,IAAI;AACnD,QAAI,OAAO,YAAY,GAAG;AACzB,aAAO,SAAS,GAAG;AAAA,IACpB,OAAO;AACN,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AACD,MAIM;AACL,kBAAgB,YAAAA,QAAK,QAAQ,aAAa;AAC1C,QAAM,WAAW,MAAM,iBAAAD,QAAG,KAAK,aAAa;AAC5C,MACC,CAAC,SAAS,OAAO,KACd,CAAC,SAAS,YAAY,GACxB;AACD,UAAM,IAAI,MAAM,aAAa,aAAa,8BAA8B;AAAA,EACzE;AAEA,MAAI,oBAAoB;AACxB,MAAI,yBAAyB;AAC5B,wBAAoB;AACpB,qBAAiB,YAAY,SAAS,aAAa,GAAG;AACrD,UAAI,UAAU,CAAC,OAAO,QAAQ,GAAG;AAChC;AAAA,MACD;AACA;AAAA,IACD;AAAA,EACD;AAEA,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,cAAc,YAAY;AAC/B,QAAI,YAAY,MAAM,cAAc,KAAK,GAAG;AAC5C,QAAI,CAAC,QAAQ;AACZ,aAAO;AAAA,IACR;AAEA,WAAO,YAAY,CAAC,OAAO,QAAQ,GAAG;AACrC,iBAAW,MAAM,YAAY;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AACA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;AAGO,IAAM,eAAe,OAAgB;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAChB;AACD,MAMM;AACL,QAAM,EAAE,aAAa,kBAAkB,IAAI,MAAM,mBAAmB;AAAA,IACnE;AAAA,IACA;AAAA,IACA,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,YAAY,CAAC;AACnB,QAAM,WAAW,CAAC;AAElB,2CAAa;AAAA,IACZ,qBAAqB;AAAA,IACrB;AAAA,EACD;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,IAAI,MAAM,aAAa,CAAC,EAAE,IAAI,OAAM,MAAK;AAC9D,QAAI,gBAAgB,MAAM,YAAY;AACtC,WAAO,eAAe;AACrB,YAAM,eAAe;AAAA,QACpB,qBAAqB,UAAU,SAAS,SAAS;AAAA,QACjD;AAAA,MACD;AACA,+CAAa;AAEb,UAAI;AACH,cAAM,SAAS,MAAM,YAAY,eAAe,YAAY;AAC5D,kBAAU,KAAK,EAAE,UAAU,eAAe,OAAO,CAAC;AAAA,MACnD,SAAS,KAAK;AACb,iBAAS,KAAK,EAAE,UAAU,eAAe,OAAO,IAAI,CAAC;AAAA,MACtD;AAEA,sBAAgB,MAAM,YAAY;AAAA,IACnC;AAAA,EACD,CAAC,CAAC;AAEF,2CAAa;AAAA,IACZ,qBAAqB,UAAU,SAAS,SAAS;AAAA,IACjD;AAAA,EACD;AAEA,SAAO;AAAA,IACN;AAAA,IACA;AAAA,EACD;AACD;;;AChIA,uBAAqB;AACrB,gBAAe;AAEf,wBAAkB;AAWX,IAAM,oBAAoB,OAAO;AAAA,EACvC;AAAA,EACA;AACD,MAMkC;AAajC,QAAM,OAAO,IAAI,iBAAAE,QAAS;AAC1B,OAAK,OAAO,QAAQ,UAAAC,QAAG,iBAAiB,KAAK,QAAQ,CAAC;AACtD,OAAK;AAAA,IACJ;AAAA,IACA,KAAK,UAAU;AAAA,MACd,MAAM,KAAK;AAAA,IACZ,CAAC;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,kBAAAC,SAAM,kDAAkD;AAAA,IAC9E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB,iCAAkC,KAA0C,SAAS;AAAA,IACtG;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,EACT,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,qBAAqB,KAAK,IAAI,aAAa,SAAS,UAAU,EAAE;AAAA,EACjF;AAEA,QAAM,eAAe,MAAM,SAAS,KAAK;AAMzC,SAAO;AAAA,IACN,UAAU,aAAa;AAAA,EACxB;AACD;AA2DO,IAAM,UAAU,OAAO;AAAA,EAC7B;AAAA,EACA;AACD,MAGM;AACL,QAAM,WAAW,UAAM,kBAAAC,SAAM,8CAA8C;AAAA,IAC1E,SAAS;AAAA,MACR,eAAe,UAAU,KAAK,cAAc;AAAA,MAC5C,gBAAgB;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,IACR,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW;AAAA,IACZ,CAAC;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,UAAM,IAAI,MAAM,kBAAkB,QAAQ,kBAAkB,SAAS,UAAU,EAAE;AAAA,EAClF;AAGA;AACD;;;ACtJA,eAAsB,MAAM,SAAgC;AAC3D,SAAO,MAAM,IAAI,QAAQ,aAAW;AACnC,eAAW,SAAS,OAAO;AAAA,EAC5B,CAAC;AACF;AAEO,IAAM,iCAAiC,CAAC;AAAA,EAC9C,aAAa;AAAA,EACb,0BAA0B;AAC3B,MAGM;AACL,MAAI,wBAAwB;AAC5B,MAAI,uBAAuB,MAAQ;AACnC,MAAI,WAAW,KAAK,IAAI;AAExB,QAAM,qBAAqB,OAAgBC,aAAoC;AAC9E,QAAI,UAAU;AACd,QAAI,YAAY;AAChB,WAAO,UAAU,YAAY;AAC5B,UAAI;AACH,YAAI,cAAc,KAAK,IAAI,IAAI,uBAAuB,qBAAqB;AAG3E,cAAM,MAAM,KAAK,MAAM,eAAe,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;AAE/D,cAAM,SAAS,MAAMA,SAAQ;AAE7B,cAAM,UAAU,KAAK,IAAI;AACzB,cAAM,cAAc,UAAU;AAC9B,mBAAW;AAGX,gCAAwB,wBAAwB,OAAO,cAAc;AAErE,eAAO;AAAA,MACR,SAAS,KAAK;AACb,oBAAY;AAAA,MACb;AAGA,gCAA0B,UAAU,KAAK;AACzC;AAAA,IACD;AAGA,UAAM;AAAA,EACP;AAEA,SAAO;AAAA,IACN;AAAA,EACD;AACD;;;AH3CA,oBAAO;AAgBP,IAAM,gBAAgB,OAAO,eAAmC,SAA8C;AAC7G,MAAI,CAAC,eAAe;AACnB,UAAM,IAAI,MAAM,uBAAuB;AAAA,EACxC;AAKA,QAAM,EAAE,mBAAmB,IAAI,+BAA+B;AAAA,IAC7D,YAAY;AAAA,IACZ,yBAAyB;AAAA,EAC1B,CAAC;AAED,QAAM,SAAS,MAAM,aAAa;AAAA,IACjC;AAAA,IACA,eAAe;AAAA,IACf,aAAa,OAAM,aAAY;AAI9B,aAAO;AAAA,QAAmB,MACzB,kBAAkB;AAAA,UACjB;AAAA,UACA,MAAM,EAAE,UAAU,MAAM,aAAAC,QAAK,SAAS,QAAQ,EAAE;AAAA,QACjD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,IACA,YAAY,CAAC,EAAE,qBAAqB,kBAAkB,MAAM;AAC3D,UAAI,qBAAqB,sBAAsB,IAAI;AAClD,YAAI,QAAQ,sBAAsB;AAClC,YAAI,QAAQ,EAAG,SAAQ;AAAA,MAIxB;AAAA,IACD;AAAA,EACD,CAAC;AAOD,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM;AAAA,MACL,GAAG,OAAO,SAAS,IAAI,OAAE;AAxE5B;AAwEgC;AAAA,UAC5B,KAAK;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,SAAQ,OAAE,UAAF,mBAAkC,YAAW,KAAK,UAAU,EAAE,KAAK;AAAA,QAC5E;AAAA,OAAE;AAAA,MACF,GAAG,OAAO,UAAU,IAAI,QAAM;AAAA,QAC7B,KAAK;AAAA,QACL,UAAU,EAAE;AAAA,QACZ,UAAU,EAAE,OAAO;AAAA,QACnB,OAAO;AAAA,MACR,EAAE;AAAA,IACH;AAAA,EACD;AACD;AAEA,IAAM,YAAY,OAAO,MAA0B,SAA8C;AAChG,MAAI,CAAC,MAAM;AACV,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC7C;AAEA,QAAM,QAAQ,EAAE,UAAU,MAAM,KAAK,CAAC;AAEtC,SAAO;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,CAAC,EAAE,UAAU,KAAK,CAAC;AAAA,EAC1B;AACD;AAEA,IAAM,UAAU,OAAO,SAAwC;AAC9D,QAAM;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,IACA;AAAA,EACD,IAAI;AAEJ,QAAM,OAAmB;AAAA;AAAA;AAAA,IAGxB,gBAAgB,QAAQ,IAAI,gBAAgB;AAAA,EAC7C;AAEA,MAAI,CAAC,KAAK,gBAAgB;AACzB,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC3E;AAEA,UAAQ,MAAM;AAAA,IACb,KAAK;AACJ,aAAO,cAAcA,OAAM,IAAI;AAAA,IAChC,KAAK;AACJ,aAAO,UAAU,MAAM,IAAI;AAAA,IAC5B;AACC,YAAM,IAAI,MAAM,GAAG,IAAI,sDAAsD;AAAA,EAC/E;AACD;AAEA,IAAO,gBAAQ,OAAO,SAAiD;AACtE,QAAM,OAAO;AAEb,MAAI;AACH,UAAM,YAAY,MAAM,QAAQ,IAAI;AAGpC,UAAM,SAAU,UAAU,YAAa,UAAU,OAAO;AACxD,eAAO,6BAAY,MAAM;AAAA,EAC1B,SAAS,KAAK;AACb,UAAM,QAAQ;AACd,QAAI,MAAM,SAAS;AAClB,iBAAO,8BAAa,MAAM,OAAO;AAAA,IAClC;AAAA,EACD;AACD;;;AF3IA,wBAAO,OAAO,OAAO;AAAA,EACpB,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,IACN,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,MACA,UAAU;AAAA,IACX,CAAC;AAAA,IACD,sBAAK,OAAO;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,aAAa;AAAA,QACZ,+BAAc,OAAO;AAAA,UACpB,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,QACP,CAAC;AAAA,MACF;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EACA;AACD,IAAI,QAAQ,IAAI;","names":["import_node_sdk","import_path","import_promises","fs","path","FormData","fs","fetch","fetch","process","path"]}
|
package/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@taqueria/plugin-ipfs-pinata",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.78.10",
|
|
4
4
|
"description": "A plugin for Taqueria providing ipfs publishing and pinning using the Pinata service",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"taqueria",
|
|
@@ -30,15 +30,15 @@
|
|
|
30
30
|
"directory": "taqueria-plugin-ipfs-pinata"
|
|
31
31
|
},
|
|
32
32
|
"dependencies": {
|
|
33
|
-
"dotenv": "^
|
|
34
|
-
"form-data": "^4.0.
|
|
33
|
+
"dotenv": "^17.2.3",
|
|
34
|
+
"form-data": "^4.0.5",
|
|
35
35
|
"node-fetch": "^3.3.2",
|
|
36
|
-
"@taqueria/node-sdk": "0.
|
|
36
|
+
"@taqueria/node-sdk": "0.78.10"
|
|
37
37
|
},
|
|
38
38
|
"devDependencies": {
|
|
39
|
-
"@types/node-fetch": "^2.6.
|
|
40
|
-
"tsup": "^8.
|
|
41
|
-
"typescript": "^5.
|
|
39
|
+
"@types/node-fetch": "^2.6.13",
|
|
40
|
+
"tsup": "^8.5.1",
|
|
41
|
+
"typescript": "^5.9.3"
|
|
42
42
|
},
|
|
43
43
|
"exports": {
|
|
44
44
|
".": {
|
package/src/ipfsHash.cjs
CHANGED
|
@@ -28,7 +28,7 @@ var import_crypto = require("crypto");
|
|
|
28
28
|
var import_promises = require("fs/promises");
|
|
29
29
|
async function getFileIPFSHash(filePath) {
|
|
30
30
|
const fileContent = await (0, import_promises.readFile)(filePath);
|
|
31
|
-
const hash = (0, import_crypto.createHash)("sha256").update(fileContent).digest("hex");
|
|
31
|
+
const hash = (0, import_crypto.createHash)("sha256").update(new Uint8Array(fileContent)).digest("hex");
|
|
32
32
|
return hash;
|
|
33
33
|
}
|
|
34
34
|
var ipfsHash_default = getFileIPFSHash;
|
package/src/ipfsHash.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["ipfsHash.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(fileContent).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA2B;AAC3B,sBAAyB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,UAAM,0BAAS,QAAQ;AAG3C,QAAM,WAAO,0BAAW,QAAQ,EAAE,OAAO,WAAW,EAAE,OAAO,KAAK;
|
|
1
|
+
{"version":3,"sources":["ipfsHash.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { readFile } from 'fs/promises';\n\nexport async function getFileIPFSHash(filePath: string): Promise<string> {\n\t// Read the file contents\n\tconst fileContent = await readFile(filePath);\n\n\t// Create a SHA-256 hash of the file contents\n\tconst hash = createHash('sha256').update(new Uint8Array(fileContent)).digest('hex');\n\n\t// Return the hash as a string\n\treturn hash;\n}\n\nexport default getFileIPFSHash;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA2B;AAC3B,sBAAyB;AAEzB,eAAsB,gBAAgB,UAAmC;AAExE,QAAM,cAAc,UAAM,0BAAS,QAAQ;AAG3C,QAAM,WAAO,0BAAW,QAAQ,EAAE,OAAO,IAAI,WAAW,WAAW,CAAC,EAAE,OAAO,KAAK;AAGlF,SAAO;AACR;AAEA,IAAO,mBAAQ;","names":[]}
|
package/src/ipfsHash.js
CHANGED
package/src/ipfsHash.ts
CHANGED
|
@@ -6,7 +6,7 @@ export async function getFileIPFSHash(filePath: string): Promise<string> {
|
|
|
6
6
|
const fileContent = await readFile(filePath);
|
|
7
7
|
|
|
8
8
|
// Create a SHA-256 hash of the file contents
|
|
9
|
-
const hash = createHash('sha256').update(fileContent).digest('hex');
|
|
9
|
+
const hash = createHash('sha256').update(new Uint8Array(fileContent)).digest('hex');
|
|
10
10
|
|
|
11
11
|
// Return the hash as a string
|
|
12
12
|
return hash;
|