pinme 1.0.3 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +2 -2
  2. package/dist/index.js +189 -100
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -130,14 +130,14 @@ pinme help
130
130
 
131
131
  ## Upload Limits
132
132
 
133
- - Single file size limit: 100MB
133
+ - Single file size limit: 20MB
134
134
  - Total directory size limit: 500MB
135
135
 
136
136
  ## File Storage
137
137
 
138
138
  Uploaded files are stored on the IPFS network and accessible through the Glitter Protocol's IPFS gateway. After a successful upload, you will receive:
139
139
 
140
- 1. IPFS hash value
140
+ 1. IPFS content hash
141
141
  2. Accessible URL link
142
142
 
143
143
  ### Log Locations
package/dist/index.js CHANGED
@@ -367,7 +367,7 @@ var import_chalk4 = __toESM(require("chalk"));
367
367
  var import_figlet2 = __toESM(require("figlet"));
368
368
 
369
369
  // package.json
370
- var version = "1.0.3";
370
+ var version = "1.0.4";
371
371
 
372
372
  // bin/upload.ts
373
373
  var import_path5 = __toESM(require("path"));
@@ -386,7 +386,7 @@ var import_chalk2 = __toESM(require("chalk"));
386
386
  // bin/utils/uploadLimits.ts
387
387
  var import_fs = __toESM(require("fs"));
388
388
  var import_path = __toESM(require("path"));
389
- var FILE_SIZE_LIMIT = parseInt("100", 10) * 1024 * 1024;
389
+ var FILE_SIZE_LIMIT = parseInt("20", 10) * 1024 * 1024;
390
390
  var DIRECTORY_SIZE_LIMIT = parseInt("500", 10) * 1024 * 1024;
391
391
  function checkFileSizeLimit(filePath) {
392
392
  const stats = import_fs.default.statSync(filePath);
@@ -454,7 +454,8 @@ var saveUploadHistory = (uploadData) => {
454
454
  previewHash: uploadData.previewHash,
455
455
  size: uploadData.size,
456
456
  fileCount: uploadData.fileCount || 1,
457
- type: uploadData.isDirectory ? "directory" : "file"
457
+ type: uploadData.isDirectory ? "directory" : "file",
458
+ shortUrl: (uploadData == null ? void 0 : uploadData.shortUrl) || null
458
459
  };
459
460
  history.uploads.unshift(newRecord);
460
461
  import_fs_extra.default.writeJsonSync(HISTORY_FILE, history, { spaces: 2 });
@@ -480,25 +481,23 @@ var displayUploadHistory = (limit = 10) => {
480
481
  console.log(import_chalk.default.yellow("No upload history found."));
481
482
  return;
482
483
  }
483
- console.log(import_chalk.default.bold("\n\u{1F4DC} Upload History:"));
484
- console.log(import_chalk.default.dim("\u2500".repeat(80)));
485
- history.forEach((record, index) => {
486
- console.log(import_chalk.default.bold(`#${index + 1} - ${record.date}`));
487
- console.log(import_chalk.default.cyan(`Name: ${record.filename}`));
488
- console.log(import_chalk.default.cyan(`Path: ${record.path}`));
489
- console.log(import_chalk.default.cyan(`Type: ${record.type}`));
490
- console.log(import_chalk.default.cyan(`Size: ${formatSize(record.size)}`));
491
- if (record.type === "directory") {
492
- console.log(import_chalk.default.cyan(`Files: ${record.fileCount}`));
484
+ console.log(import_chalk.default.cyan("Upload History:"));
485
+ console.log(import_chalk.default.cyan("-".repeat(80)));
486
+ const recentHistory = history.slice(-limit);
487
+ recentHistory.forEach((item, index) => {
488
+ console.log(import_chalk.default.green(`${index + 1}. ${item.filename}`));
489
+ console.log(import_chalk.default.white(` Path: ${item.path}`));
490
+ console.log(import_chalk.default.white(` Content Hash: ${item.contentHash}`));
491
+ if (item.shortUrl) {
492
+ console.log(import_chalk.default.white(` ENS URL: https://${item.shortUrl}.pinit.eth.limo`));
493
493
  }
494
- console.log(import_chalk.default.cyan(`Content Hash: ${record.contentHash}`));
495
- if (record.previewHash) {
496
- console.log(import_chalk.default.cyan(`Preview Hash: ${record.previewHash}`));
497
- console.log(import_chalk.default.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${record.previewHash}/#/?from=local`));
498
- } else {
499
- console.log(import_chalk.default.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${record.contentHash}`));
494
+ console.log(import_chalk.default.white(` Size: ${formatSize(item.size)}`));
495
+ console.log(import_chalk.default.white(` Files: ${item.fileCount}`));
496
+ console.log(import_chalk.default.white(` Type: ${item.type === "directory" ? "Directory" : "File"}`));
497
+ if (item.timestamp) {
498
+ console.log(import_chalk.default.white(` Date: ${new Date(item.timestamp).toLocaleString()}`));
500
499
  }
501
- console.log(import_chalk.default.dim("\u2500".repeat(80)));
500
+ console.log(import_chalk.default.cyan("-".repeat(80)));
502
501
  });
503
502
  const totalSize = history.reduce((sum, record) => sum + record.size, 0);
504
503
  const totalFiles = history.reduce((sum, record) => sum + record.fileCount, 0);
@@ -539,6 +538,10 @@ function getDeviceId() {
539
538
 
540
539
  // bin/utils/uploadToIpfs.ts
541
540
  var ipfsApiUrl = "https://ipfs.glitterprotocol.dev/api/v2";
541
+ var ERROR_CODES = {
542
+ "30001": `File too large, single file max size: ${"20"}MB,single folder max size: ${"500"}MB`,
543
+ "30002": `Max storage quorum ${Number("1000") / 1e3} GB reached`
544
+ };
542
545
  var dirPath = null;
543
546
  function loadFilesToArrRecursively(directoryPath, dist) {
544
547
  const filesArr = [];
@@ -551,7 +554,11 @@ function loadFilesToArrRecursively(directoryPath, dist) {
551
554
  if (import_fs_extra3.default.statSync(filePath).isFile()) {
552
555
  const sizeCheck = checkFileSizeLimit(filePath);
553
556
  if (sizeCheck.exceeds) {
554
- throw new Error(`File ${file} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
557
+ throw new Error(
558
+ `File ${file} exceeds size limit of ${formatSize(
559
+ sizeCheck.limit
560
+ )} (size: ${formatSize(sizeCheck.size)})`
561
+ );
555
562
  }
556
563
  const filePathWithNoEndSep = filePath.replace(dirPath, "");
557
564
  const filePathEncodeSep = filePathWithNoEndSep.replaceAll(sep, "%2F");
@@ -586,10 +593,15 @@ function countFilesInDirectory(directoryPath) {
586
593
  async function uploadDirectory(directoryPath, deviceId) {
587
594
  const sizeCheck = checkDirectorySizeLimit(directoryPath);
588
595
  if (sizeCheck.exceeds) {
589
- throw new Error(`Directory ${directoryPath} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
596
+ throw new Error(
597
+ `Directory ${directoryPath} exceeds size limit of ${formatSize(
598
+ sizeCheck.limit
599
+ )} (size: ${formatSize(sizeCheck.size)})`
600
+ );
590
601
  }
591
602
  const formData = new import_form_data.default();
592
- if (directoryPath.endsWith(import_path4.default.sep)) directoryPath = directoryPath.slice(0, -1);
603
+ if (directoryPath.endsWith(import_path4.default.sep))
604
+ directoryPath = directoryPath.slice(0, -1);
593
605
  const dist = directoryPath.split(import_path4.default.sep).pop() || "";
594
606
  const files = loadFilesToArrRecursively(directoryPath, dist);
595
607
  files.forEach((file) => {
@@ -598,94 +610,171 @@ async function uploadDirectory(directoryPath, deviceId) {
598
610
  });
599
611
  });
600
612
  const spinner = (0, import_ora.default)(`Uploading ${directoryPath} to glitter ipfs...`).start();
601
- const response = await import_axios.default.post(`${ipfsApiUrl}/add?uid=${deviceId}&cidV=1`, formData, {
602
- headers: {
603
- ...formData.getHeaders()
613
+ try {
614
+ const response = await import_axios.default.post(
615
+ `${ipfsApiUrl}/add?uid=${deviceId}&cidV=1`,
616
+ formData,
617
+ {
618
+ headers: {
619
+ ...formData.getHeaders()
620
+ }
621
+ }
622
+ );
623
+ const resData = response.data.data;
624
+ if (Array.isArray(resData) && resData.length > 0) {
625
+ const directoryItem = resData.find((item) => item.Name === dist);
626
+ if (directoryItem) {
627
+ spinner.succeed(
628
+ `Successfully uploaded ${directoryPath} to glitter ipfs`
629
+ );
630
+ const fileStats = import_fs_extra3.default.statSync(directoryPath);
631
+ const fileCount = countFilesInDirectory(directoryPath);
632
+ const uploadData = {
633
+ path: directoryPath,
634
+ filename: import_path4.default.basename(directoryPath),
635
+ contentHash: directoryItem.Hash,
636
+ previewHash: null,
637
+ size: sizeCheck.size,
638
+ fileCount,
639
+ isDirectory: true,
640
+ shortUrl: directoryItem.ShortUrl || null
641
+ };
642
+ saveUploadHistory(uploadData);
643
+ return {
644
+ hash: directoryItem.Hash,
645
+ shortUrl: directoryItem.ShortUrl
646
+ };
647
+ }
648
+ spinner.fail(`Directory hash not found in response`);
649
+ console.log(import_chalk2.default.red(`Directory hash not found in response`));
650
+ } else {
651
+ spinner.fail(`Invalid response format from IPFS`);
652
+ console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
604
653
  }
605
- });
606
- const resData = response.data.data;
607
- if (Array.isArray(resData) && resData.length > 0) {
608
- spinner.succeed();
609
- const directoryItem = resData.find((item) => item.Name === dist);
610
- if (directoryItem) {
611
- const fileStats = import_fs_extra3.default.statSync(directoryPath);
612
- const fileCount = countFilesInDirectory(directoryPath);
613
- const uploadData = {
614
- path: directoryPath,
615
- filename: import_path4.default.basename(directoryPath),
616
- contentHash: directoryItem.Hash,
617
- previewHash: null,
618
- size: sizeCheck.size,
619
- fileCount,
620
- isDirectory: true
621
- };
622
- saveUploadHistory(uploadData);
623
- return directoryItem.Hash;
654
+ return null;
655
+ } catch (error) {
656
+ if (error.response && error.response.data && error.response.data.code) {
657
+ const errorCode = error.response.data.code.toString();
658
+ if (ERROR_CODES[errorCode]) {
659
+ spinner.fail(
660
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
661
+ );
662
+ console.log(
663
+ import_chalk2.default.red(
664
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
665
+ )
666
+ );
667
+ return null;
668
+ }
624
669
  }
625
- spinner.fail();
626
- console.log(import_chalk2.default.red(`Directory hash not found in response`));
627
- } else {
628
- spinner.fail();
629
- console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
670
+ spinner.fail(`Error: ${error.message}`);
671
+ console.log(import_chalk2.default.red(`Error: ${error.message}`));
672
+ return null;
630
673
  }
631
- return null;
632
674
  }
633
675
  async function uploadFile(filePath, deviceId) {
634
676
  const sizeCheck = checkFileSizeLimit(filePath);
635
677
  if (sizeCheck.exceeds) {
636
- throw new Error(`File ${filePath} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
678
+ throw new Error(
679
+ `File ${filePath} exceeds size limit of ${formatSize(
680
+ sizeCheck.limit
681
+ )} (size: ${formatSize(sizeCheck.size)})`
682
+ );
637
683
  }
638
- const formData = new import_form_data.default();
639
- formData.append("file", import_fs_extra3.default.createReadStream(filePath), {
640
- filename: filePath.split(import_path4.default.sep).pop() || ""
641
- });
642
684
  const spinner = (0, import_ora.default)(`Uploading ${filePath} to glitter ipfs...`).start();
643
- const response = await import_axios.default.post(`${ipfsApiUrl}/add?uid=${deviceId}&cidV=1`, formData, {
644
- headers: {
645
- ...formData.getHeaders()
685
+ try {
686
+ const formData = new import_form_data.default();
687
+ formData.append("file", import_fs_extra3.default.createReadStream(filePath), {
688
+ filename: filePath.split(import_path4.default.sep).pop() || ""
689
+ });
690
+ const response = await import_axios.default.post(
691
+ `${ipfsApiUrl}/add?uid=${deviceId}&cidV=1`,
692
+ formData,
693
+ {
694
+ headers: {
695
+ ...formData.getHeaders()
696
+ }
697
+ }
698
+ );
699
+ console.log(import_chalk2.default.cyan(JSON.stringify(response)));
700
+ const resData = response.data.data;
701
+ if (Array.isArray(resData) && resData.length > 0) {
702
+ const fileItem = resData.find(
703
+ (item) => item.Name === filePath.split(import_path4.default.sep).pop() || ""
704
+ );
705
+ if (fileItem) {
706
+ spinner.succeed(`Successfully uploaded ${filePath} to glitter ipfs`);
707
+ const uploadData = {
708
+ path: filePath,
709
+ filename: filePath.split(import_path4.default.sep).pop() || "",
710
+ contentHash: fileItem.Hash,
711
+ previewHash: null,
712
+ size: sizeCheck.size,
713
+ fileCount: 1,
714
+ isDirectory: false,
715
+ shortUrl: fileItem.ShortUrl || null
716
+ };
717
+ saveUploadHistory(uploadData);
718
+ return {
719
+ hash: fileItem.Hash,
720
+ shortUrl: fileItem.ShortUrl
721
+ };
722
+ }
723
+ spinner.fail(`File hash not found in response`);
724
+ console.log(import_chalk2.default.red(`File hash not found in response`));
725
+ } else {
726
+ spinner.fail(`Invalid response format from IPFS`);
727
+ console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
646
728
  }
647
- });
648
- const resData = response.data.data;
649
- if (Array.isArray(resData) && resData.length > 0) {
650
- spinner.succeed();
651
- const fileItem = resData.find((item) => item.Name === filePath.split(import_path4.default.sep).pop() || "");
652
- if (fileItem) {
653
- const uploadData = {
654
- path: filePath,
655
- filename: filePath.split(import_path4.default.sep).pop() || "",
656
- contentHash: fileItem.Hash,
657
- previewHash: null,
658
- size: sizeCheck.size,
659
- fileCount: 1,
660
- isDirectory: false
661
- };
662
- saveUploadHistory(uploadData);
663
- return fileItem.Hash;
729
+ return null;
730
+ } catch (error) {
731
+ if (error.response && error.response.data && error.response.data.code) {
732
+ const errorCode = error.response.data.code.toString();
733
+ if (ERROR_CODES[errorCode]) {
734
+ spinner.fail(
735
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
736
+ );
737
+ console.log(
738
+ import_chalk2.default.red(
739
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
740
+ )
741
+ );
742
+ return null;
743
+ }
664
744
  }
665
- spinner.fail();
666
- console.log(import_chalk2.default.red(`File hash not found in response`));
667
- } else {
668
- spinner.fail();
669
- console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
745
+ spinner.fail(`Error: ${error.message}`);
746
+ console.log(import_chalk2.default.red(`Error: ${error.message}`));
747
+ return null;
670
748
  }
671
- return null;
672
749
  }
673
750
  async function uploadToIpfs_default(filePath) {
674
751
  const deviceId = getDeviceId();
675
752
  if (!deviceId) {
676
753
  throw new Error("Device ID not found");
677
754
  }
755
+ let contentHash = "";
756
+ let shortUrl = "";
678
757
  if (import_fs_extra3.default.statSync(filePath).isDirectory()) {
679
- return {
680
- contentHash: await uploadDirectory(filePath, deviceId) || "",
681
- previewHash: null
682
- };
758
+ const result = await uploadDirectory(filePath, deviceId);
759
+ if (result) {
760
+ contentHash = result.hash;
761
+ shortUrl = result.shortUrl || "";
762
+ }
683
763
  } else {
764
+ const result = await uploadFile(filePath, deviceId);
765
+ if (result) {
766
+ contentHash = result.hash;
767
+ shortUrl = result.shortUrl || "";
768
+ }
769
+ }
770
+ if (contentHash) {
684
771
  return {
685
- contentHash: await uploadFile(filePath, deviceId) || "",
686
- previewHash: null
772
+ contentHash,
773
+ previewHash: null,
774
+ shortUrl
687
775
  };
688
776
  }
777
+ return null;
689
778
  }
690
779
 
691
780
  // bin/upload.ts
@@ -741,15 +830,15 @@ var upload_default = async (options) => {
741
830
  const result = await uploadToIpfs_default(absolutePath);
742
831
  if (result) {
743
832
  const encryptedCID = encryptHash(result.contentHash, secretKey);
744
- console.log(import_chalk3.default.cyan(
745
- import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
746
- ));
833
+ console.log(
834
+ import_chalk3.default.cyan(
835
+ import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
836
+ )
837
+ );
747
838
  console.log(import_chalk3.default.cyan(`URL: ${URL2}${encryptedCID}`));
748
- } else {
749
- console.log(import_chalk3.default.red(`upload failed`));
750
839
  }
751
840
  } catch (error) {
752
- console.error(import_chalk3.default.red(`error uploading: ${error.message}`));
841
+ console.error(import_chalk3.default.red(`Error: ${error.message}`));
753
842
  }
754
843
  return;
755
844
  }
@@ -771,15 +860,15 @@ var upload_default = async (options) => {
771
860
  const result = await uploadToIpfs_default(absolutePath);
772
861
  if (result) {
773
862
  const encryptedCID = encryptHash(result.contentHash, secretKey);
774
- console.log(import_chalk3.default.cyan(
775
- import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
776
- ));
863
+ console.log(
864
+ import_chalk3.default.cyan(
865
+ import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
866
+ )
867
+ );
777
868
  console.log(import_chalk3.default.cyan(`URL: ${URL2}${encryptedCID}`));
778
- } else {
779
- console.log(import_chalk3.default.red(`upload failed`));
780
869
  }
781
870
  } catch (error) {
782
- console.error(import_chalk3.default.red(`error uploading: ${error.message}`));
871
+ console.error(import_chalk3.default.red(`Error: ${error.message}`));
783
872
  }
784
873
  }
785
874
  } catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pinme",
3
- "version": "1.0.3",
3
+ "version": "1.0.4",
4
4
  "publishConfig": {
5
5
  "access": "public"
6
6
  },