pinme 1.0.2 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +18 -2
  2. package/dist/index.js +189 -102
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -130,14 +130,14 @@ pinme help
130
130
 
131
131
  ## Upload Limits
132
132
 
133
- - Single file size limit: 100MB
133
+ - Single file size limit: 20MB
134
134
  - Total directory size limit: 500MB
135
135
 
136
136
  ## File Storage
137
137
 
138
138
  Uploaded files are stored on the IPFS network and accessible through the Glitter Protocol's IPFS gateway. After a successful upload, you will receive:
139
139
 
140
- 1. IPFS hash value
140
+ 1. IPFS content hash
141
141
  2. Accessible URL link
142
142
 
143
143
  ### Log Locations
@@ -151,6 +151,22 @@ Logs and configuration files are stored in:
151
151
 
152
152
  MIT License - See the [LICENSE](LICENSE) file for details
153
153
 
154
+ ## Usage Tips
155
+
156
+ ### Uploading Vite Projects
157
+
158
+ When uploading projects built with Vite, please note:
159
+
160
+ 1. **Vite Configuration**: Add `base: "./"` to your Vite configuration file to ensure proper asset path resolution:
161
+
162
+ ```js
163
+ // vite.config.js
164
+ export default {
165
+ base: "./",
166
+ // other configurations...
167
+ }
168
+ ```
169
+
154
170
  ## Contact Us
155
171
 
156
172
  If you have questions or suggestions, please contact us through:
package/dist/index.js CHANGED
@@ -367,7 +367,7 @@ var import_chalk4 = __toESM(require("chalk"));
367
367
  var import_figlet2 = __toESM(require("figlet"));
368
368
 
369
369
  // package.json
370
- var version = "1.0.2";
370
+ var version = "1.0.4";
371
371
 
372
372
  // bin/upload.ts
373
373
  var import_path5 = __toESM(require("path"));
@@ -386,7 +386,7 @@ var import_chalk2 = __toESM(require("chalk"));
386
386
  // bin/utils/uploadLimits.ts
387
387
  var import_fs = __toESM(require("fs"));
388
388
  var import_path = __toESM(require("path"));
389
- var FILE_SIZE_LIMIT = parseInt("100", 10) * 1024 * 1024;
389
+ var FILE_SIZE_LIMIT = parseInt("20", 10) * 1024 * 1024;
390
390
  var DIRECTORY_SIZE_LIMIT = parseInt("500", 10) * 1024 * 1024;
391
391
  function checkFileSizeLimit(filePath) {
392
392
  const stats = import_fs.default.statSync(filePath);
@@ -454,7 +454,8 @@ var saveUploadHistory = (uploadData) => {
454
454
  previewHash: uploadData.previewHash,
455
455
  size: uploadData.size,
456
456
  fileCount: uploadData.fileCount || 1,
457
- type: uploadData.isDirectory ? "directory" : "file"
457
+ type: uploadData.isDirectory ? "directory" : "file",
458
+ shortUrl: (uploadData == null ? void 0 : uploadData.shortUrl) || null
458
459
  };
459
460
  history.uploads.unshift(newRecord);
460
461
  import_fs_extra.default.writeJsonSync(HISTORY_FILE, history, { spaces: 2 });
@@ -480,25 +481,23 @@ var displayUploadHistory = (limit = 10) => {
480
481
  console.log(import_chalk.default.yellow("No upload history found."));
481
482
  return;
482
483
  }
483
- console.log(import_chalk.default.bold("\n\u{1F4DC} Upload History:"));
484
- console.log(import_chalk.default.dim("\u2500".repeat(80)));
485
- history.forEach((record, index) => {
486
- console.log(import_chalk.default.bold(`#${index + 1} - ${record.date}`));
487
- console.log(import_chalk.default.cyan(`Name: ${record.filename}`));
488
- console.log(import_chalk.default.cyan(`Path: ${record.path}`));
489
- console.log(import_chalk.default.cyan(`Type: ${record.type}`));
490
- console.log(import_chalk.default.cyan(`Size: ${formatSize(record.size)}`));
491
- if (record.type === "directory") {
492
- console.log(import_chalk.default.cyan(`Files: ${record.fileCount}`));
484
+ console.log(import_chalk.default.cyan("Upload History:"));
485
+ console.log(import_chalk.default.cyan("-".repeat(80)));
486
+ const recentHistory = history.slice(-limit);
487
+ recentHistory.forEach((item, index) => {
488
+ console.log(import_chalk.default.green(`${index + 1}. ${item.filename}`));
489
+ console.log(import_chalk.default.white(` Path: ${item.path}`));
490
+ console.log(import_chalk.default.white(` Content Hash: ${item.contentHash}`));
491
+ if (item.shortUrl) {
492
+ console.log(import_chalk.default.white(` ENS URL: https://${item.shortUrl}.pinit.eth.limo`));
493
493
  }
494
- console.log(import_chalk.default.cyan(`Content Hash: ${record.contentHash}`));
495
- if (record.previewHash) {
496
- console.log(import_chalk.default.cyan(`Preview Hash: ${record.previewHash}`));
497
- console.log(import_chalk.default.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${record.previewHash}/#/?from=local`));
498
- } else {
499
- console.log(import_chalk.default.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${record.contentHash}`));
494
+ console.log(import_chalk.default.white(` Size: ${formatSize(item.size)}`));
495
+ console.log(import_chalk.default.white(` Files: ${item.fileCount}`));
496
+ console.log(import_chalk.default.white(` Type: ${item.type === "directory" ? "Directory" : "File"}`));
497
+ if (item.timestamp) {
498
+ console.log(import_chalk.default.white(` Date: ${new Date(item.timestamp).toLocaleString()}`));
500
499
  }
501
- console.log(import_chalk.default.dim("\u2500".repeat(80)));
500
+ console.log(import_chalk.default.cyan("-".repeat(80)));
502
501
  });
503
502
  const totalSize = history.reduce((sum, record) => sum + record.size, 0);
504
503
  const totalFiles = history.reduce((sum, record) => sum + record.fileCount, 0);
@@ -539,6 +538,10 @@ function getDeviceId() {
539
538
 
540
539
  // bin/utils/uploadToIpfs.ts
541
540
  var ipfsApiUrl = "https://ipfs.glitterprotocol.dev/api/v2";
541
+ var ERROR_CODES = {
542
+ "30001": `File too large, single file max size: ${"20"}MB,single folder max size: ${"500"}MB`,
543
+ "30002": `Max storage quorum ${Number("1000") / 1e3} GB reached`
544
+ };
542
545
  var dirPath = null;
543
546
  function loadFilesToArrRecursively(directoryPath, dist) {
544
547
  const filesArr = [];
@@ -551,7 +554,11 @@ function loadFilesToArrRecursively(directoryPath, dist) {
551
554
  if (import_fs_extra3.default.statSync(filePath).isFile()) {
552
555
  const sizeCheck = checkFileSizeLimit(filePath);
553
556
  if (sizeCheck.exceeds) {
554
- throw new Error(`File ${file} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
557
+ throw new Error(
558
+ `File ${file} exceeds size limit of ${formatSize(
559
+ sizeCheck.limit
560
+ )} (size: ${formatSize(sizeCheck.size)})`
561
+ );
555
562
  }
556
563
  const filePathWithNoEndSep = filePath.replace(dirPath, "");
557
564
  const filePathEncodeSep = filePathWithNoEndSep.replaceAll(sep, "%2F");
@@ -586,10 +593,15 @@ function countFilesInDirectory(directoryPath) {
586
593
  async function uploadDirectory(directoryPath, deviceId) {
587
594
  const sizeCheck = checkDirectorySizeLimit(directoryPath);
588
595
  if (sizeCheck.exceeds) {
589
- throw new Error(`Directory ${directoryPath} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
596
+ throw new Error(
597
+ `Directory ${directoryPath} exceeds size limit of ${formatSize(
598
+ sizeCheck.limit
599
+ )} (size: ${formatSize(sizeCheck.size)})`
600
+ );
590
601
  }
591
602
  const formData = new import_form_data.default();
592
- if (directoryPath.endsWith(import_path4.default.sep)) directoryPath = directoryPath.slice(0, -1);
603
+ if (directoryPath.endsWith(import_path4.default.sep))
604
+ directoryPath = directoryPath.slice(0, -1);
593
605
  const dist = directoryPath.split(import_path4.default.sep).pop() || "";
594
606
  const files = loadFilesToArrRecursively(directoryPath, dist);
595
607
  files.forEach((file) => {
@@ -597,97 +609,172 @@ async function uploadDirectory(directoryPath, deviceId) {
597
609
  filename: file.name
598
610
  });
599
611
  });
600
- formData.append("uid", deviceId);
601
612
  const spinner = (0, import_ora.default)(`Uploading ${directoryPath} to glitter ipfs...`).start();
602
- const response = await import_axios.default.post(`${ipfsApiUrl}/add`, formData, {
603
- headers: {
604
- ...formData.getHeaders()
613
+ try {
614
+ const response = await import_axios.default.post(
615
+ `${ipfsApiUrl}/add?uid=${deviceId}&cidV=1`,
616
+ formData,
617
+ {
618
+ headers: {
619
+ ...formData.getHeaders()
620
+ }
621
+ }
622
+ );
623
+ const resData = response.data.data;
624
+ if (Array.isArray(resData) && resData.length > 0) {
625
+ const directoryItem = resData.find((item) => item.Name === dist);
626
+ if (directoryItem) {
627
+ spinner.succeed(
628
+ `Successfully uploaded ${directoryPath} to glitter ipfs`
629
+ );
630
+ const fileStats = import_fs_extra3.default.statSync(directoryPath);
631
+ const fileCount = countFilesInDirectory(directoryPath);
632
+ const uploadData = {
633
+ path: directoryPath,
634
+ filename: import_path4.default.basename(directoryPath),
635
+ contentHash: directoryItem.Hash,
636
+ previewHash: null,
637
+ size: sizeCheck.size,
638
+ fileCount,
639
+ isDirectory: true,
640
+ shortUrl: directoryItem.ShortUrl || null
641
+ };
642
+ saveUploadHistory(uploadData);
643
+ return {
644
+ hash: directoryItem.Hash,
645
+ shortUrl: directoryItem.ShortUrl
646
+ };
647
+ }
648
+ spinner.fail(`Directory hash not found in response`);
649
+ console.log(import_chalk2.default.red(`Directory hash not found in response`));
650
+ } else {
651
+ spinner.fail(`Invalid response format from IPFS`);
652
+ console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
605
653
  }
606
- });
607
- const resData = response.data.data;
608
- if (Array.isArray(resData) && resData.length > 0) {
609
- spinner.succeed();
610
- const directoryItem = resData.find((item) => item.Name === dist);
611
- if (directoryItem) {
612
- const fileStats = import_fs_extra3.default.statSync(directoryPath);
613
- const fileCount = countFilesInDirectory(directoryPath);
614
- const uploadData = {
615
- path: directoryPath,
616
- filename: import_path4.default.basename(directoryPath),
617
- contentHash: directoryItem.Hash,
618
- previewHash: null,
619
- size: sizeCheck.size,
620
- fileCount,
621
- isDirectory: true
622
- };
623
- saveUploadHistory(uploadData);
624
- return directoryItem.Hash;
654
+ return null;
655
+ } catch (error) {
656
+ if (error.response && error.response.data && error.response.data.code) {
657
+ const errorCode = error.response.data.code.toString();
658
+ if (ERROR_CODES[errorCode]) {
659
+ spinner.fail(
660
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
661
+ );
662
+ console.log(
663
+ import_chalk2.default.red(
664
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
665
+ )
666
+ );
667
+ return null;
668
+ }
625
669
  }
626
- spinner.fail();
627
- console.log(import_chalk2.default.red(`Directory hash not found in response`));
628
- } else {
629
- spinner.fail();
630
- console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
670
+ spinner.fail(`Error: ${error.message}`);
671
+ console.log(import_chalk2.default.red(`Error: ${error.message}`));
672
+ return null;
631
673
  }
632
- return null;
633
674
  }
634
675
  async function uploadFile(filePath, deviceId) {
635
676
  const sizeCheck = checkFileSizeLimit(filePath);
636
677
  if (sizeCheck.exceeds) {
637
- throw new Error(`File ${filePath} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
678
+ throw new Error(
679
+ `File ${filePath} exceeds size limit of ${formatSize(
680
+ sizeCheck.limit
681
+ )} (size: ${formatSize(sizeCheck.size)})`
682
+ );
638
683
  }
639
- const formData = new import_form_data.default();
640
- formData.append("file", import_fs_extra3.default.createReadStream(filePath), {
641
- filename: filePath.split(import_path4.default.sep).pop() || ""
642
- });
643
- formData.append("uid", deviceId);
644
684
  const spinner = (0, import_ora.default)(`Uploading ${filePath} to glitter ipfs...`).start();
645
- const response = await import_axios.default.post(`${ipfsApiUrl}/add`, formData, {
646
- headers: {
647
- ...formData.getHeaders()
685
+ try {
686
+ const formData = new import_form_data.default();
687
+ formData.append("file", import_fs_extra3.default.createReadStream(filePath), {
688
+ filename: filePath.split(import_path4.default.sep).pop() || ""
689
+ });
690
+ const response = await import_axios.default.post(
691
+ `${ipfsApiUrl}/add?uid=${deviceId}&cidV=1`,
692
+ formData,
693
+ {
694
+ headers: {
695
+ ...formData.getHeaders()
696
+ }
697
+ }
698
+ );
699
+ console.log(import_chalk2.default.cyan(JSON.stringify(response)));
700
+ const resData = response.data.data;
701
+ if (Array.isArray(resData) && resData.length > 0) {
702
+ const fileItem = resData.find(
703
+ (item) => item.Name === filePath.split(import_path4.default.sep).pop() || ""
704
+ );
705
+ if (fileItem) {
706
+ spinner.succeed(`Successfully uploaded ${filePath} to glitter ipfs`);
707
+ const uploadData = {
708
+ path: filePath,
709
+ filename: filePath.split(import_path4.default.sep).pop() || "",
710
+ contentHash: fileItem.Hash,
711
+ previewHash: null,
712
+ size: sizeCheck.size,
713
+ fileCount: 1,
714
+ isDirectory: false,
715
+ shortUrl: fileItem.ShortUrl || null
716
+ };
717
+ saveUploadHistory(uploadData);
718
+ return {
719
+ hash: fileItem.Hash,
720
+ shortUrl: fileItem.ShortUrl
721
+ };
722
+ }
723
+ spinner.fail(`File hash not found in response`);
724
+ console.log(import_chalk2.default.red(`File hash not found in response`));
725
+ } else {
726
+ spinner.fail(`Invalid response format from IPFS`);
727
+ console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
648
728
  }
649
- });
650
- const resData = response.data.data;
651
- if (Array.isArray(resData) && resData.length > 0) {
652
- spinner.succeed();
653
- const fileItem = resData.find((item) => item.Name === filePath.split(import_path4.default.sep).pop() || "");
654
- if (fileItem) {
655
- const uploadData = {
656
- path: filePath,
657
- filename: filePath.split(import_path4.default.sep).pop() || "",
658
- contentHash: fileItem.Hash,
659
- previewHash: null,
660
- size: sizeCheck.size,
661
- fileCount: 1,
662
- isDirectory: false
663
- };
664
- saveUploadHistory(uploadData);
665
- return fileItem.Hash;
729
+ return null;
730
+ } catch (error) {
731
+ if (error.response && error.response.data && error.response.data.code) {
732
+ const errorCode = error.response.data.code.toString();
733
+ if (ERROR_CODES[errorCode]) {
734
+ spinner.fail(
735
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
736
+ );
737
+ console.log(
738
+ import_chalk2.default.red(
739
+ `Error: ${ERROR_CODES[errorCode]} (Code: ${errorCode})`
740
+ )
741
+ );
742
+ return null;
743
+ }
666
744
  }
667
- spinner.fail();
668
- console.log(import_chalk2.default.red(`File hash not found in response`));
669
- } else {
670
- spinner.fail();
671
- console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
745
+ spinner.fail(`Error: ${error.message}`);
746
+ console.log(import_chalk2.default.red(`Error: ${error.message}`));
747
+ return null;
672
748
  }
673
- return null;
674
749
  }
675
750
  async function uploadToIpfs_default(filePath) {
676
751
  const deviceId = getDeviceId();
677
752
  if (!deviceId) {
678
753
  throw new Error("Device ID not found");
679
754
  }
755
+ let contentHash = "";
756
+ let shortUrl = "";
680
757
  if (import_fs_extra3.default.statSync(filePath).isDirectory()) {
681
- return {
682
- contentHash: await uploadDirectory(filePath, deviceId) || "",
683
- previewHash: null
684
- };
758
+ const result = await uploadDirectory(filePath, deviceId);
759
+ if (result) {
760
+ contentHash = result.hash;
761
+ shortUrl = result.shortUrl || "";
762
+ }
685
763
  } else {
764
+ const result = await uploadFile(filePath, deviceId);
765
+ if (result) {
766
+ contentHash = result.hash;
767
+ shortUrl = result.shortUrl || "";
768
+ }
769
+ }
770
+ if (contentHash) {
686
771
  return {
687
- contentHash: await uploadFile(filePath, deviceId) || "",
688
- previewHash: null
772
+ contentHash,
773
+ previewHash: null,
774
+ shortUrl
689
775
  };
690
776
  }
777
+ return null;
691
778
  }
692
779
 
693
780
  // bin/upload.ts
@@ -743,15 +830,15 @@ var upload_default = async (options) => {
743
830
  const result = await uploadToIpfs_default(absolutePath);
744
831
  if (result) {
745
832
  const encryptedCID = encryptHash(result.contentHash, secretKey);
746
- console.log(import_chalk3.default.cyan(
747
- import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
748
- ));
833
+ console.log(
834
+ import_chalk3.default.cyan(
835
+ import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
836
+ )
837
+ );
749
838
  console.log(import_chalk3.default.cyan(`URL: ${URL2}${encryptedCID}`));
750
- } else {
751
- console.log(import_chalk3.default.red(`upload failed`));
752
839
  }
753
840
  } catch (error) {
754
- console.error(import_chalk3.default.red(`error uploading: ${error.message}`));
841
+ console.error(import_chalk3.default.red(`Error: ${error.message}`));
755
842
  }
756
843
  return;
757
844
  }
@@ -773,15 +860,15 @@ var upload_default = async (options) => {
773
860
  const result = await uploadToIpfs_default(absolutePath);
774
861
  if (result) {
775
862
  const encryptedCID = encryptHash(result.contentHash, secretKey);
776
- console.log(import_chalk3.default.cyan(
777
- import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
778
- ));
863
+ console.log(
864
+ import_chalk3.default.cyan(
865
+ import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
866
+ )
867
+ );
779
868
  console.log(import_chalk3.default.cyan(`URL: ${URL2}${encryptedCID}`));
780
- } else {
781
- console.log(import_chalk3.default.red(`upload failed`));
782
869
  }
783
870
  } catch (error) {
784
- console.error(import_chalk3.default.red(`error uploading: ${error.message}`));
871
+ console.error(import_chalk3.default.red(`Error: ${error.message}`));
785
872
  }
786
873
  }
787
874
  } catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pinme",
3
- "version": "1.0.2",
3
+ "version": "1.0.4",
4
4
  "publishConfig": {
5
5
  "access": "public"
6
6
  },