pinme 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +7 -48
  2. package/dist/index.js +839 -1
  3. package/package.json +15 -54
package/README.md CHANGED
@@ -1,8 +1,6 @@
1
- # Pinme CLI
1
+ # PinMe
2
2
 
3
- A simple and easy-to-use command-line tool for uploading files and directories to the IPFS network.
4
-
5
- ![Pinme CLI](https://via.placeholder.com/800x200?text=Pinme+CLI)
3
+ [PinMe](https://pinme.eth.limo/) is a simple and easy-to-use command-line tool for uploading files and directories to the [IPFS](https://ipfs.tech/) network.
6
4
 
7
5
  ## Features
8
6
 
@@ -17,13 +15,13 @@ A simple and easy-to-use command-line tool for uploading files and directories t
17
15
  ### Using npm
18
16
 
19
17
  ```bash
20
- npm install -g @glitterprotocol/pinme
18
+ npm install -g pinme
21
19
  ```
22
20
 
23
21
  ### Using yarn
24
22
 
25
23
  ```bash
26
- yarn global add @glitterprotocol/pinme
24
+ yarn global add pinme
27
25
  ```
28
26
 
29
27
  ## Usage
@@ -59,9 +57,6 @@ pinme list -c
59
57
  ```bash
60
58
  # Display help information
61
59
  pinme help
62
-
63
- # Display help for a specific command
64
- pinme help upload
65
60
  ```
66
61
 
67
62
  ## Command Details
@@ -129,9 +124,6 @@ pinme help [command]
129
124
  ```bash
130
125
  # Display general help
131
126
  pinme help
132
-
133
- # Display help for the upload command
134
- pinme help upload
135
127
  ```
136
128
 
137
129
  ## Upload Limits
@@ -146,45 +138,12 @@ Uploaded files are stored on the IPFS network and accessible through the Glitter
146
138
  1. IPFS hash value
147
139
  2. Accessible URL link
148
140
 
149
- ## Data Privacy
150
-
151
- - Upload history is stored locally (`~/.pinme/upload-history.json`)
152
- - Device ID is stored locally (`~/.pinme/device-id.json`)
153
- - All content uploaded to IPFS is public, do not upload sensitive information
154
-
155
- ## Troubleshooting
156
-
157
- ### Common Issues
158
-
159
- 1. **Upload Failure**
160
- - Check network connection
161
- - Confirm the file/directory exists and has read permissions
162
- - Confirm the file size does not exceed limits
163
-
164
- 2. **Command Not Found**
165
- - Confirm global installation was successful
166
- - Check PATH environment variable
167
- - Try reinstalling
168
-
169
- 3. **History Not Displaying**
170
- - Check if the `~/.pinme` directory exists
171
- - Confirm read/write permissions
172
-
173
141
  ### Log Locations
174
142
 
175
143
  Logs and configuration files are stored in:
176
144
  - Linux/macOS: `~/.pinme/`
177
145
  - Windows: `%USERPROFILE%\.pinme\`
178
146
 
179
- ## Contribution Guidelines
180
-
181
- Contributions of code, issue reports, or improvement suggestions are welcome! Please follow these steps:
182
-
183
- 1. Fork the repository
184
- 2. Create a feature branch (`git checkout -b feature/amazing-feature`)
185
- 3. Commit your changes (`git commit -m 'Add some amazing feature'`)
186
- 4. Push to the branch (`git push origin feature/amazing-feature`)
187
- 5. Create a Pull Request
188
147
 
189
148
  ## License
190
149
 
@@ -194,9 +153,9 @@ MIT License - See the [LICENSE](LICENSE) file for details
194
153
 
195
154
  If you have questions or suggestions, please contact us through:
196
155
 
197
- - GitHub Issues: [https://github.com/glitterprotocol/pinme-cli/issues](https://github.com/glitterprotocol/pinme-cli/issues)
198
- - Email: [support@example.com](mailto:support@example.com)
156
+ - GitHub Issues: [https://github.com/glitternetwork/pinme/issues](https://github.com/glitternetwork/pinme/issue)
157
+ - Email: [pinme@glitterprotocol.io](mailto:pinme@glitterprotocol.io)
199
158
 
200
159
  ---
201
160
 
202
- Developed and maintained by the [Glitter Protocol](https://github.com/glitterprotocol) team
161
+ Developed and maintained by the [Glitter Protocol](https://glitterprotocol.io/) team
package/dist/index.js CHANGED
@@ -1,2 +1,840 @@
1
1
  #!/usr/bin/env node
2
- "use strict";var e=require("commander"),o=require("chalk"),t=require("figlet"),r=require("path"),i=require("inquirer");require("ethers"),require("bip39");var s=require("axios"),n=require("fs-extra"),a=require("form-data"),l=require("ora"),c=require("os"),d=require("uuid"),u=require("fs"),p=require("dayjs"),f=require("crypto-js");function h(e){return e&&"object"==typeof e&&"default"in e?e:{default:e}}var y=h(e),m=h(o),g=h(t),S=h(r),$=h(i),v=h(s),z=h(n),w=h(a),H=h(l),x=h(c),F=h(d),I=h(u),L=h(p),D=h(f);const E=z.default,U=S.default,b=x.default,{v4:q}=F.default,j=U.join(b.homedir(),".pinme"),k=U.join(j,"device-id.json");var T={getDeviceId:function(){try{if(E.existsSync(j)||E.mkdirSync(j,{recursive:!0}),E.existsSync(k)){const e=E.readJsonSync(k);if(e&&e.deviceId)return e.deviceId}const e=q();return E.writeJsonSync(k,{deviceId:e,createdAt:(new Date).toISOString()},{spaces:2}),e}catch(e){return console.error(`Error getting device ID: ${e.message}`),q()}}};const C=I.default,J=S.default,M=10485760,P=524288e3,R=e=>{let o=0;const t=C.readdirSync(e);for(const r of t){const t=J.join(e,r),i=C.statSync(t);i.isFile()?o+=i.size:i.isDirectory()&&(o+=R(t))}return o};var A={checkFileSizeLimit:e=>{const o=C.statSync(e).size;return{size:o,exceeds:o>M,limit:M}},checkDirectorySizeLimit:e=>{let o=0,t={path:"",size:0},r=!1;const i=e=>{const s=C.readdirSync(e);for(const n of s){const s=J.join(e,n),a=C.statSync(s);a.isFile()?(o+=a.size,a.size>t.size&&(t={path:s,size:a.size}),a.size>M&&(r=!0)):a.isDirectory()&&i(s)}};return i(e),{totalSize:o,hasExceeded:r,largestFile:t,exceedsTotalLimit:o>P,limit:M,totalLimit:P}},calculateDirectorySize:R,formatSize:e=>e<1024?e+" B":e<1048576?(e/1024).toFixed(2)+" KB":e<1073741824?(e/1048576).toFixed(2)+" MB":(e/1073741824).toFixed(2)+" GB",SINGLE_FILE_LIMIT:M,TOTAL_SIZE_LIMIT:P};const B=z.default,_=S.default,N=x.default,W=L.default,Y=m.default,{formatSize:O}=A,Q=_.join(N.homedir(),".pinme"),Z=_.join(Q,"upload-history.json"),G=()=>{B.existsSync(Q)||B.mkdirSync(Q,{recursive:!0}),B.existsSync(Z)||B.writeJsonSync(Z,{uploads:[]})},K=(e=10)=>{try{G();return B.readJsonSync(Z).uploads.slice(0,e)}catch(e){return console.error(Y.red(`Error reading upload history: ${e.message}`)),[]}};var V={saveUploadHistory:e=>{try{G();const o=B.readJsonSync(Z),t={timestamp:Date.now(),date:W().format("YYYY-MM-DD HH:mm:ss"),path:e.path,filename:e.filename||_.basename(e.path),contentHash:e.contentHash,previewHash:e.previewHash,size:e.size,fileCount:e.fileCount||1,type:e.isDirectory?"directory":"file"};return o.uploads.unshift(t),B.writeJsonSync(Z,o,{spaces:2}),!0}catch(e){return console.error(Y.red(`Error saving upload history: ${e.message}`)),!1}},getUploadHistory:K,displayUploadHistory:(e=10)=>{const o=K(e);if(0===o.length)return void console.log(Y.yellow("No upload history found."));console.log(Y.bold("\nšŸ“œ Upload History:")),console.log(Y.dim("─".repeat(80))),o.forEach(((e,o)=>{console.log(Y.bold(`#${o+1} - ${e.date}`)),console.log(Y.cyan(`Name: ${e.filename}`)),console.log(Y.cyan(`Path: ${e.path}`)),console.log(Y.cyan(`Type: ${e.type}`)),console.log(Y.cyan(`Size: ${O(e.size)}`)),"directory"===e.type&&console.log(Y.cyan(`Files: ${e.fileCount}`)),console.log(Y.cyan(`Content Hash: ${e.contentHash}`)),e.previewHash?(console.log(Y.cyan(`Preview Hash: ${e.previewHash}`)),console.log(Y.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${e.previewHash}/#/?from=local`))):console.log(Y.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${e.contentHash}`)),console.log(Y.dim("─".repeat(80)))}));const t=o.reduce(((e,o)=>e+o.size),0),r=o.reduce(((e,o)=>e+o.fileCount),0);console.log(Y.bold(`Total Uploads: ${o.length}`)),console.log(Y.bold(`Total Files: ${r}`)),console.log(Y.bold(`Total Size: ${O(t)}`))},clearUploadHistory:()=>{try{return G(),B.writeJsonSync(Z,{uploads:[]}),console.log(Y.green("Upload history cleared successfully.")),!0}catch(e){return console.error(Y.red(`Error clearing upload history: ${e.message}`)),!1}}};const X=v.default,ee=z.default,oe=S.default,te=w.default,re=H.default,ie=m.default,se=x.default,{getDeviceId:ne}=T,{checkFileSizeLimit:ae,checkDirectorySizeLimit:le,calculateDirectorySize:ce,formatSize:de,TOTAL_SIZE_LIMIT:ue}=A,{saveUploadHistory:pe}=V;oe.join(se.homedir(),".pinme","upload-history.json");const fe="https://ipfs.glitterprotocol.dev/api/v2";let he=null;function ye(e,o){const t=[],r=oe.sep;if(he??=e.replace(o,""),ee.statSync(e).isDirectory()){ee.readdirSync(e).forEach((i=>{const s=oe.join(e,i);if(ee.statSync(s).isFile()){const e=ae(s);if(e.exceeds)throw new Error(`File ${i} exceeds size limit of ${e.limit/1048576}MB (size: ${e.size/1048576}MB)`);const o=s.replace(he,"").replaceAll(r,"%2F");t.push({name:o,path:s})}else if(ee.statSync(s).isDirectory()){const e=ye(s,o);t.push(...e)}}))}else console.error("Error: path must be a directory");return t}const me=e=>{let o=0;const t=e=>{const r=ee.readdirSync(e);for(const i of r){const r=oe.join(e,i),s=ee.statSync(r);s.isFile()?o++:s.isDirectory()&&t(r)}};return t(e),o};async function ge(e,o){const t=le(e);if(t.hasExceeded)throw new Error(`File ${t.largestFile.path} exceeds size limit of ${de(t.limit)} (size: ${de(t.largestFile.size)})`);if(t.exceedsTotalLimit)throw new Error(`Total directory size ${de(t.totalSize)} exceeds the limit of ${de(t.totalLimit)}`);const r=new te;e.endsWith(oe.sep)&&(e=e.slice(0,-1));const i=e.split(oe.sep).pop();ye(e,i).forEach((e=>{r.append("file",ee.createReadStream(e.path),{filename:e.name})}));const s=re(`Uploading ${e} to glitter ipfs...`).start(),n=(await X.post(`${fe}/add`,r,{headers:{...r.getHeaders(),uid:o}})).data.data;if(Array.isArray(n)&&n.length>0){s.succeed();const o=n.find((e=>e.Name===i));if(o){ee.statSync(e);const r=me(e),i={path:e,filename:oe.basename(e),contentHash:o.Hash,previewHash:null,size:t.totalSize,fileCount:r,isDirectory:!0};return pe(i),o.Hash}s.fail(),console.log(ie.red("Directory hash not found in response"))}else s.fail(),console.log(ie.red("Invalid response format from IPFS"));return null}var Se=async function(e){try{const o=ne();let t;return t=ee.statSync(e).isDirectory()?await ge(e,o):await async function(e,o){const t=ae(e);if(t.exceeds)throw new Error(`File exceeds size limit of ${de(t.limit)} (size: ${de(t.size)})`);const r=new te;r.append("file",ee.createReadStream(e));const i=re(`Uploading ${e} to glitter ipfs...`).start(),s=(await X.post(`${fe}/add`,r,{headers:{...r.getHeaders(),uid:o}})).data.data;if(s){i.succeed();const o=ee.statSync(e),t=1,r={path:e,filename:oe.basename(e),contentHash:s[0].Hash,previewHash:null,size:o.size,fileCount:t,isDirectory:!1};return pe(r),s[0].Hash}return i.fail(),console.log(ie.red("Invalid response format from IPFS")),null}(e,o),{contentHash:t}}catch(e){return console.log(ie.red(`${e}`)),null}};const $e=S.default,ve=m.default,ze=$.default,we=g.default,He=Se,xe=I.default,Fe=D.default,Ie="https://ipfs.glitterprotocol.dev/ipfs/QmRumtELULDtHJeJVUF1nthQnWLQ81DWCFpXikZS3WeEtU/#/preview/",Le="pinme-secret-key";function De(e,o){try{const t=Fe.RC4.encrypt(e,o).toString();return t.replace(/\+/g,"-").replace(/\//g,"_").replace(/=+$/,"")}catch(o){return console.error(`Encryption error: ${o.message}`),e}}function Ee(e){try{const o=$e.resolve(e);return xe.existsSync(o)?o:null}catch(e){return console.error(ve.red(`error checking path: ${e.message}`)),null}}var Ue="1.0.0";const be=y.default,qe=m.default,je=g.default,ke=async e=>{try{console.log(we.textSync("PINME",{font:"Shadow",horizontalLayout:"default",verticalLayout:"default",width:180,whitespaceBreak:!0}));const e=process.argv[3];if(e&&!e.startsWith("-")){const o=Ee(e);if(!o)return void console.log(ve.red(`path ${e} does not exist`));console.log(ve.blue(`uploading ${o} to ipfs...`));try{const e=await He(o);if(e){const o=De(e.contentHash,Le);console.log(ve.cyan(we.textSync("Successful",{horizontalLayout:"full"}))),console.log(ve.cyan(`URL: ${Ie}${o}`))}else console.log(ve.red("upload failed"))}catch(e){console.error(ve.red(`error uploading: ${e.message}`)),console.error(e.stack)}return}const o=await ze.prompt([{type:"input",name:"path",message:"path to upload: "}]);if(o.path){const e=Ee(o.path);if(!e)return void console.log(ve.red(`path ${o.path} does not exist`));console.log(ve.blue(`uploading ${e} to ipfs...`));try{const o=await He(e);if(o){const e=De(o.contentHash,Le);console.log(ve.cyan(we.textSync("Successful",{horizontalLayout:"full"}))),console.log(ve.cyan(`IPFS Hash: ${o.contentHash}`)),console.log(ve.cyan(`URL: ${Ie}${e}`))}else console.log(ve.red("upload failed"))}catch(e){console.error(ve.red(`error uploading: ${e.message}`)),console.error(e.stack)}}}catch(e){console.error(ve.red(`error executing: ${e.message}`)),console.error(e.stack)}},{displayUploadHistory:Te,clearUploadHistory:Ce}=V;function Je(){console.log(qe.cyan(je.textSync("Pinme",{horizontalLayout:"full"}))),console.log(qe.cyan("A command-line tool for uploading files to IPFS\n"))}be.name("pinme").version(Ue).option("-v, --version","output the current version"),be.command("upload").description("upload a file or directory to IPFS").action((()=>ke())),be.command("list").description("show upload history").option("-l, --limit <number>","limit the number of records to show",parseInt).option("-c, --clear","clear all upload history").action((e=>{e.clear?Ce():Te(e.limit||10)})),be.command("ls").description("alias for 'list' command").option("-l, --limit <number>","limit the number of records to show",parseInt).option("-c, --clear","clear all upload history").action((e=>{e.clear?Ce():Te(e.limit||10)})),be.command("help").description("display help information").action((()=>{Je(),be.help()})),be.on("--help",(()=>{console.log(""),console.log("Examples:"),console.log(" $ pinme upload"),console.log(" $ pinme list -l 5"),console.log(" $ pinme ls"),console.log(" $ pinme help"),console.log(""),console.log("For more information, visit: https://github.com/glitterprotocol/pinme-cli")})),be.parse(process.argv),2===process.argv.length&&(Je(),be.help()),module.exports={};
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __commonJS = (cb, mod) => function __require() {
9
+ return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
20
+ // If the importer is in node compatibility mode or this is not an ESM
21
+ // file that has been converted to a CommonJS file using a Babel-
22
+ // compatible transform (i.e. "__esModule" has not been set), then set
23
+ // "default" to the CommonJS "module.exports" for node compatibility.
24
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
25
+ mod
26
+ ));
27
+
28
+ // node_modules/.pnpm/dotenv@16.5.0/node_modules/dotenv/package.json
29
+ var require_package = __commonJS({
30
+ "node_modules/.pnpm/dotenv@16.5.0/node_modules/dotenv/package.json"(exports2, module2) {
31
+ module2.exports = {
32
+ name: "dotenv",
33
+ version: "16.5.0",
34
+ description: "Loads environment variables from .env file",
35
+ main: "lib/main.js",
36
+ types: "lib/main.d.ts",
37
+ exports: {
38
+ ".": {
39
+ types: "./lib/main.d.ts",
40
+ require: "./lib/main.js",
41
+ default: "./lib/main.js"
42
+ },
43
+ "./config": "./config.js",
44
+ "./config.js": "./config.js",
45
+ "./lib/env-options": "./lib/env-options.js",
46
+ "./lib/env-options.js": "./lib/env-options.js",
47
+ "./lib/cli-options": "./lib/cli-options.js",
48
+ "./lib/cli-options.js": "./lib/cli-options.js",
49
+ "./package.json": "./package.json"
50
+ },
51
+ scripts: {
52
+ "dts-check": "tsc --project tests/types/tsconfig.json",
53
+ lint: "standard",
54
+ pretest: "npm run lint && npm run dts-check",
55
+ test: "tap run --allow-empty-coverage --disable-coverage --timeout=60000",
56
+ "test:coverage": "tap run --show-full-coverage --timeout=60000 --coverage-report=lcov",
57
+ prerelease: "npm test",
58
+ release: "standard-version"
59
+ },
60
+ repository: {
61
+ type: "git",
62
+ url: "git://github.com/motdotla/dotenv.git"
63
+ },
64
+ homepage: "https://github.com/motdotla/dotenv#readme",
65
+ funding: "https://dotenvx.com",
66
+ keywords: [
67
+ "dotenv",
68
+ "env",
69
+ ".env",
70
+ "environment",
71
+ "variables",
72
+ "config",
73
+ "settings"
74
+ ],
75
+ readmeFilename: "README.md",
76
+ license: "BSD-2-Clause",
77
+ devDependencies: {
78
+ "@types/node": "^18.11.3",
79
+ decache: "^4.6.2",
80
+ sinon: "^14.0.1",
81
+ standard: "^17.0.0",
82
+ "standard-version": "^9.5.0",
83
+ tap: "^19.2.0",
84
+ typescript: "^4.8.4"
85
+ },
86
+ engines: {
87
+ node: ">=12"
88
+ },
89
+ browser: {
90
+ fs: false
91
+ }
92
+ };
93
+ }
94
+ });
95
+
96
+ // node_modules/.pnpm/dotenv@16.5.0/node_modules/dotenv/lib/main.js
97
+ var require_main = __commonJS({
98
+ "node_modules/.pnpm/dotenv@16.5.0/node_modules/dotenv/lib/main.js"(exports2, module2) {
99
+ var fs6 = require("fs");
100
+ var path6 = require("path");
101
+ var os3 = require("os");
102
+ var crypto = require("crypto");
103
+ var packageJson = require_package();
104
+ var version2 = packageJson.version;
105
+ var LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
106
+ function parse(src) {
107
+ const obj = {};
108
+ let lines = src.toString();
109
+ lines = lines.replace(/\r\n?/mg, "\n");
110
+ let match;
111
+ while ((match = LINE.exec(lines)) != null) {
112
+ const key = match[1];
113
+ let value = match[2] || "";
114
+ value = value.trim();
115
+ const maybeQuote = value[0];
116
+ value = value.replace(/^(['"`])([\s\S]*)\1$/mg, "$2");
117
+ if (maybeQuote === '"') {
118
+ value = value.replace(/\\n/g, "\n");
119
+ value = value.replace(/\\r/g, "\r");
120
+ }
121
+ obj[key] = value;
122
+ }
123
+ return obj;
124
+ }
125
+ function _parseVault(options) {
126
+ const vaultPath = _vaultPath(options);
127
+ const result = DotenvModule.configDotenv({ path: vaultPath });
128
+ if (!result.parsed) {
129
+ const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`);
130
+ err.code = "MISSING_DATA";
131
+ throw err;
132
+ }
133
+ const keys = _dotenvKey(options).split(",");
134
+ const length = keys.length;
135
+ let decrypted;
136
+ for (let i = 0; i < length; i++) {
137
+ try {
138
+ const key = keys[i].trim();
139
+ const attrs = _instructions(result, key);
140
+ decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key);
141
+ break;
142
+ } catch (error) {
143
+ if (i + 1 >= length) {
144
+ throw error;
145
+ }
146
+ }
147
+ }
148
+ return DotenvModule.parse(decrypted);
149
+ }
150
+ function _warn(message) {
151
+ console.log(`[dotenv@${version2}][WARN] ${message}`);
152
+ }
153
+ function _debug(message) {
154
+ console.log(`[dotenv@${version2}][DEBUG] ${message}`);
155
+ }
156
+ function _dotenvKey(options) {
157
+ if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {
158
+ return options.DOTENV_KEY;
159
+ }
160
+ if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {
161
+ return process.env.DOTENV_KEY;
162
+ }
163
+ return "";
164
+ }
165
+ function _instructions(result, dotenvKey) {
166
+ let uri;
167
+ try {
168
+ uri = new URL(dotenvKey);
169
+ } catch (error) {
170
+ if (error.code === "ERR_INVALID_URL") {
171
+ const err = new Error("INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development");
172
+ err.code = "INVALID_DOTENV_KEY";
173
+ throw err;
174
+ }
175
+ throw error;
176
+ }
177
+ const key = uri.password;
178
+ if (!key) {
179
+ const err = new Error("INVALID_DOTENV_KEY: Missing key part");
180
+ err.code = "INVALID_DOTENV_KEY";
181
+ throw err;
182
+ }
183
+ const environment = uri.searchParams.get("environment");
184
+ if (!environment) {
185
+ const err = new Error("INVALID_DOTENV_KEY: Missing environment part");
186
+ err.code = "INVALID_DOTENV_KEY";
187
+ throw err;
188
+ }
189
+ const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`;
190
+ const ciphertext = result.parsed[environmentKey];
191
+ if (!ciphertext) {
192
+ const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`);
193
+ err.code = "NOT_FOUND_DOTENV_ENVIRONMENT";
194
+ throw err;
195
+ }
196
+ return { ciphertext, key };
197
+ }
198
+ function _vaultPath(options) {
199
+ let possibleVaultPath = null;
200
+ if (options && options.path && options.path.length > 0) {
201
+ if (Array.isArray(options.path)) {
202
+ for (const filepath of options.path) {
203
+ if (fs6.existsSync(filepath)) {
204
+ possibleVaultPath = filepath.endsWith(".vault") ? filepath : `${filepath}.vault`;
205
+ }
206
+ }
207
+ } else {
208
+ possibleVaultPath = options.path.endsWith(".vault") ? options.path : `${options.path}.vault`;
209
+ }
210
+ } else {
211
+ possibleVaultPath = path6.resolve(process.cwd(), ".env.vault");
212
+ }
213
+ if (fs6.existsSync(possibleVaultPath)) {
214
+ return possibleVaultPath;
215
+ }
216
+ return null;
217
+ }
218
+ function _resolveHome(envPath) {
219
+ return envPath[0] === "~" ? path6.join(os3.homedir(), envPath.slice(1)) : envPath;
220
+ }
221
+ function _configVault(options) {
222
+ const debug = Boolean(options && options.debug);
223
+ if (debug) {
224
+ _debug("Loading env from encrypted .env.vault");
225
+ }
226
+ const parsed = DotenvModule._parseVault(options);
227
+ let processEnv = process.env;
228
+ if (options && options.processEnv != null) {
229
+ processEnv = options.processEnv;
230
+ }
231
+ DotenvModule.populate(processEnv, parsed, options);
232
+ return { parsed };
233
+ }
234
+ function configDotenv(options) {
235
+ const dotenvPath = path6.resolve(process.cwd(), ".env");
236
+ let encoding = "utf8";
237
+ const debug = Boolean(options && options.debug);
238
+ if (options && options.encoding) {
239
+ encoding = options.encoding;
240
+ } else {
241
+ if (debug) {
242
+ _debug("No encoding is specified. UTF-8 is used by default");
243
+ }
244
+ }
245
+ let optionPaths = [dotenvPath];
246
+ if (options && options.path) {
247
+ if (!Array.isArray(options.path)) {
248
+ optionPaths = [_resolveHome(options.path)];
249
+ } else {
250
+ optionPaths = [];
251
+ for (const filepath of options.path) {
252
+ optionPaths.push(_resolveHome(filepath));
253
+ }
254
+ }
255
+ }
256
+ let lastError;
257
+ const parsedAll = {};
258
+ for (const path7 of optionPaths) {
259
+ try {
260
+ const parsed = DotenvModule.parse(fs6.readFileSync(path7, { encoding }));
261
+ DotenvModule.populate(parsedAll, parsed, options);
262
+ } catch (e) {
263
+ if (debug) {
264
+ _debug(`Failed to load ${path7} ${e.message}`);
265
+ }
266
+ lastError = e;
267
+ }
268
+ }
269
+ let processEnv = process.env;
270
+ if (options && options.processEnv != null) {
271
+ processEnv = options.processEnv;
272
+ }
273
+ DotenvModule.populate(processEnv, parsedAll, options);
274
+ if (lastError) {
275
+ return { parsed: parsedAll, error: lastError };
276
+ } else {
277
+ return { parsed: parsedAll };
278
+ }
279
+ }
280
+ function config(options) {
281
+ if (_dotenvKey(options).length === 0) {
282
+ return DotenvModule.configDotenv(options);
283
+ }
284
+ const vaultPath = _vaultPath(options);
285
+ if (!vaultPath) {
286
+ _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`);
287
+ return DotenvModule.configDotenv(options);
288
+ }
289
+ return DotenvModule._configVault(options);
290
+ }
291
+ function decrypt(encrypted, keyStr) {
292
+ const key = Buffer.from(keyStr.slice(-64), "hex");
293
+ let ciphertext = Buffer.from(encrypted, "base64");
294
+ const nonce = ciphertext.subarray(0, 12);
295
+ const authTag = ciphertext.subarray(-16);
296
+ ciphertext = ciphertext.subarray(12, -16);
297
+ try {
298
+ const aesgcm = crypto.createDecipheriv("aes-256-gcm", key, nonce);
299
+ aesgcm.setAuthTag(authTag);
300
+ return `${aesgcm.update(ciphertext)}${aesgcm.final()}`;
301
+ } catch (error) {
302
+ const isRange = error instanceof RangeError;
303
+ const invalidKeyLength = error.message === "Invalid key length";
304
+ const decryptionFailed = error.message === "Unsupported state or unable to authenticate data";
305
+ if (isRange || invalidKeyLength) {
306
+ const err = new Error("INVALID_DOTENV_KEY: It must be 64 characters long (or more)");
307
+ err.code = "INVALID_DOTENV_KEY";
308
+ throw err;
309
+ } else if (decryptionFailed) {
310
+ const err = new Error("DECRYPTION_FAILED: Please check your DOTENV_KEY");
311
+ err.code = "DECRYPTION_FAILED";
312
+ throw err;
313
+ } else {
314
+ throw error;
315
+ }
316
+ }
317
+ }
318
+ function populate(processEnv, parsed, options = {}) {
319
+ const debug = Boolean(options && options.debug);
320
+ const override = Boolean(options && options.override);
321
+ if (typeof parsed !== "object") {
322
+ const err = new Error("OBJECT_REQUIRED: Please check the processEnv argument being passed to populate");
323
+ err.code = "OBJECT_REQUIRED";
324
+ throw err;
325
+ }
326
+ for (const key of Object.keys(parsed)) {
327
+ if (Object.prototype.hasOwnProperty.call(processEnv, key)) {
328
+ if (override === true) {
329
+ processEnv[key] = parsed[key];
330
+ }
331
+ if (debug) {
332
+ if (override === true) {
333
+ _debug(`"${key}" is already defined and WAS overwritten`);
334
+ } else {
335
+ _debug(`"${key}" is already defined and was NOT overwritten`);
336
+ }
337
+ }
338
+ } else {
339
+ processEnv[key] = parsed[key];
340
+ }
341
+ }
342
+ }
343
+ var DotenvModule = {
344
+ configDotenv,
345
+ _configVault,
346
+ _parseVault,
347
+ config,
348
+ decrypt,
349
+ parse,
350
+ populate
351
+ };
352
+ module2.exports.configDotenv = DotenvModule.configDotenv;
353
+ module2.exports._configVault = DotenvModule._configVault;
354
+ module2.exports._parseVault = DotenvModule._parseVault;
355
+ module2.exports.config = DotenvModule.config;
356
+ module2.exports.decrypt = DotenvModule.decrypt;
357
+ module2.exports.parse = DotenvModule.parse;
358
+ module2.exports.populate = DotenvModule.populate;
359
+ module2.exports = DotenvModule;
360
+ }
361
+ });
362
+
363
+ // bin/index.ts
364
+ var import_dotenv = __toESM(require_main());
365
+ var import_commander = require("commander");
366
+ var import_chalk4 = __toESM(require("chalk"));
367
+ var import_figlet2 = __toESM(require("figlet"));
368
+
369
+ // package.json
370
+ var version = "1.0.1";
371
+
372
+ // bin/upload.ts
373
+ var import_path5 = __toESM(require("path"));
374
+ var import_chalk3 = __toESM(require("chalk"));
375
+ var import_inquirer = __toESM(require("inquirer"));
376
+ var import_figlet = __toESM(require("figlet"));
377
+
378
+ // bin/utils/uploadToIpfs.ts
379
+ var import_axios = __toESM(require("axios"));
380
+ var import_fs_extra3 = __toESM(require("fs-extra"));
381
+ var import_path4 = __toESM(require("path"));
382
+ var import_form_data = __toESM(require("form-data"));
383
+ var import_ora = __toESM(require("ora"));
384
+ var import_chalk2 = __toESM(require("chalk"));
385
+
386
+ // bin/utils/uploadLimits.ts
387
+ var import_fs = __toESM(require("fs"));
388
+ var import_path = __toESM(require("path"));
389
+ var FILE_SIZE_LIMIT = parseInt("100", 10) * 1024 * 1024;
390
+ var DIRECTORY_SIZE_LIMIT = parseInt("500", 10) * 1024 * 1024;
391
+ function checkFileSizeLimit(filePath) {
392
+ const stats = import_fs.default.statSync(filePath);
393
+ return {
394
+ size: stats.size,
395
+ limit: FILE_SIZE_LIMIT,
396
+ exceeds: stats.size > FILE_SIZE_LIMIT
397
+ };
398
+ }
399
+ function checkDirectorySizeLimit(directoryPath) {
400
+ const totalSize = calculateDirectorySize(directoryPath);
401
+ return {
402
+ size: totalSize,
403
+ limit: DIRECTORY_SIZE_LIMIT,
404
+ exceeds: totalSize > DIRECTORY_SIZE_LIMIT
405
+ };
406
+ }
407
+ function calculateDirectorySize(directoryPath) {
408
+ let totalSize = 0;
409
+ const files = import_fs.default.readdirSync(directoryPath);
410
+ for (const file of files) {
411
+ const filePath = import_path.default.join(directoryPath, file);
412
+ const stats = import_fs.default.statSync(filePath);
413
+ if (stats.isFile()) {
414
+ totalSize += stats.size;
415
+ } else if (stats.isDirectory()) {
416
+ totalSize += calculateDirectorySize(filePath);
417
+ }
418
+ }
419
+ return totalSize;
420
+ }
421
+ function formatSize(bytes) {
422
+ if (bytes < 1024) return bytes + " bytes";
423
+ else if (bytes < 1024 * 1024) return (bytes / 1024).toFixed(2) + " KB";
424
+ else if (bytes < 1024 * 1024 * 1024) return (bytes / (1024 * 1024)).toFixed(2) + " MB";
425
+ else return (bytes / (1024 * 1024 * 1024)).toFixed(2) + " GB";
426
+ }
427
+
428
+ // bin/utils/history.ts
429
+ var import_fs_extra = __toESM(require("fs-extra"));
430
+ var import_path2 = __toESM(require("path"));
431
+ var import_os = __toESM(require("os"));
432
+ var import_dayjs = __toESM(require("dayjs"));
433
+ var import_chalk = __toESM(require("chalk"));
434
+ var HISTORY_DIR = import_path2.default.join(import_os.default.homedir(), ".pinme");
435
+ var HISTORY_FILE = import_path2.default.join(HISTORY_DIR, "upload-history.json");
436
+ var ensureHistoryDir = () => {
437
+ if (!import_fs_extra.default.existsSync(HISTORY_DIR)) {
438
+ import_fs_extra.default.mkdirSync(HISTORY_DIR, { recursive: true });
439
+ }
440
+ if (!import_fs_extra.default.existsSync(HISTORY_FILE)) {
441
+ import_fs_extra.default.writeJsonSync(HISTORY_FILE, { uploads: [] });
442
+ }
443
+ };
444
+ var saveUploadHistory = (uploadData) => {
445
+ try {
446
+ ensureHistoryDir();
447
+ const history = import_fs_extra.default.readJsonSync(HISTORY_FILE);
448
+ const newRecord = {
449
+ timestamp: Date.now(),
450
+ date: (0, import_dayjs.default)().format("YYYY-MM-DD HH:mm:ss"),
451
+ path: uploadData.path,
452
+ filename: uploadData.filename || import_path2.default.basename(uploadData.path),
453
+ contentHash: uploadData.contentHash,
454
+ previewHash: uploadData.previewHash,
455
+ size: uploadData.size,
456
+ fileCount: uploadData.fileCount || 1,
457
+ type: uploadData.isDirectory ? "directory" : "file"
458
+ };
459
+ history.uploads.unshift(newRecord);
460
+ import_fs_extra.default.writeJsonSync(HISTORY_FILE, history, { spaces: 2 });
461
+ return true;
462
+ } catch (error) {
463
+ console.error(import_chalk.default.red(`Error saving upload history: ${error.message}`));
464
+ return false;
465
+ }
466
+ };
467
+ var getUploadHistory = (limit = 10) => {
468
+ try {
469
+ ensureHistoryDir();
470
+ const history = import_fs_extra.default.readJsonSync(HISTORY_FILE);
471
+ return history.uploads.slice(0, limit);
472
+ } catch (error) {
473
+ console.error(import_chalk.default.red(`Error reading upload history: ${error.message}`));
474
+ return [];
475
+ }
476
+ };
477
+ var displayUploadHistory = (limit = 10) => {
478
+ const history = getUploadHistory(limit);
479
+ if (history.length === 0) {
480
+ console.log(import_chalk.default.yellow("No upload history found."));
481
+ return;
482
+ }
483
+ console.log(import_chalk.default.bold("\n\u{1F4DC} Upload History:"));
484
+ console.log(import_chalk.default.dim("\u2500".repeat(80)));
485
+ history.forEach((record, index) => {
486
+ console.log(import_chalk.default.bold(`#${index + 1} - ${record.date}`));
487
+ console.log(import_chalk.default.cyan(`Name: ${record.filename}`));
488
+ console.log(import_chalk.default.cyan(`Path: ${record.path}`));
489
+ console.log(import_chalk.default.cyan(`Type: ${record.type}`));
490
+ console.log(import_chalk.default.cyan(`Size: ${formatSize(record.size)}`));
491
+ if (record.type === "directory") {
492
+ console.log(import_chalk.default.cyan(`Files: ${record.fileCount}`));
493
+ }
494
+ console.log(import_chalk.default.cyan(`Content Hash: ${record.contentHash}`));
495
+ if (record.previewHash) {
496
+ console.log(import_chalk.default.cyan(`Preview Hash: ${record.previewHash}`));
497
+ console.log(import_chalk.default.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${record.previewHash}/#/?from=local`));
498
+ } else {
499
+ console.log(import_chalk.default.cyan(`URL: https://ipfs.glitterprotocol.dev/ipfs/${record.contentHash}`));
500
+ }
501
+ console.log(import_chalk.default.dim("\u2500".repeat(80)));
502
+ });
503
+ const totalSize = history.reduce((sum, record) => sum + record.size, 0);
504
+ const totalFiles = history.reduce((sum, record) => sum + record.fileCount, 0);
505
+ console.log(import_chalk.default.bold(`Total Uploads: ${history.length}`));
506
+ console.log(import_chalk.default.bold(`Total Files: ${totalFiles}`));
507
+ console.log(import_chalk.default.bold(`Total Size: ${formatSize(totalSize)}`));
508
+ };
509
+ var clearUploadHistory = () => {
510
+ try {
511
+ ensureHistoryDir();
512
+ import_fs_extra.default.writeJsonSync(HISTORY_FILE, { uploads: [] });
513
+ console.log(import_chalk.default.green("Upload history cleared successfully."));
514
+ return true;
515
+ } catch (error) {
516
+ console.error(import_chalk.default.red(`Error clearing upload history: ${error.message}`));
517
+ return false;
518
+ }
519
+ };
520
+
521
+ // bin/utils/getDeviceId.ts
522
+ var import_fs_extra2 = __toESM(require("fs-extra"));
523
+ var import_path3 = __toESM(require("path"));
524
+ var import_os2 = __toESM(require("os"));
525
+ var import_uuid = require("uuid");
526
+ function getDeviceId() {
527
+ const configDir = import_path3.default.join(import_os2.default.homedir(), ".pinme");
528
+ const configFile = import_path3.default.join(configDir, "device-id");
529
+ if (!import_fs_extra2.default.existsSync(configDir)) {
530
+ import_fs_extra2.default.mkdirSync(configDir, { recursive: true });
531
+ }
532
+ if (import_fs_extra2.default.existsSync(configFile)) {
533
+ return import_fs_extra2.default.readFileSync(configFile, "utf8").trim();
534
+ }
535
+ const deviceId = (0, import_uuid.v4)();
536
+ import_fs_extra2.default.writeFileSync(configFile, deviceId);
537
+ return deviceId;
538
+ }
539
+
540
+ // bin/utils/uploadToIpfs.ts
541
+ var ipfsApiUrl = "https://ipfs.glitterprotocol.dev/api/v2";
542
+ var dirPath = null;
543
+ function loadFilesToArrRecursively(directoryPath, dist) {
544
+ const filesArr = [];
545
+ const sep = import_path4.default.sep;
546
+ dirPath ?? (dirPath = directoryPath.replace(dist, ""));
547
+ if (import_fs_extra3.default.statSync(directoryPath).isDirectory()) {
548
+ const files = import_fs_extra3.default.readdirSync(directoryPath);
549
+ files.forEach((file) => {
550
+ const filePath = import_path4.default.join(directoryPath, file);
551
+ if (import_fs_extra3.default.statSync(filePath).isFile()) {
552
+ const sizeCheck = checkFileSizeLimit(filePath);
553
+ if (sizeCheck.exceeds) {
554
+ throw new Error(`File ${file} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
555
+ }
556
+ const filePathWithNoEndSep = filePath.replace(dirPath, "");
557
+ const filePathEncodeSep = filePathWithNoEndSep.replaceAll(sep, "%2F");
558
+ filesArr.push({
559
+ name: filePathEncodeSep,
560
+ path: filePath
561
+ });
562
+ } else if (import_fs_extra3.default.statSync(filePath).isDirectory()) {
563
+ const recursiveFiles = loadFilesToArrRecursively(filePath, dist);
564
+ filesArr.push(...recursiveFiles);
565
+ }
566
+ });
567
+ } else {
568
+ console.error("Error: path must be a directory");
569
+ }
570
+ return filesArr;
571
+ }
572
+ function countFilesInDirectory(directoryPath) {
573
+ let count = 0;
574
+ const files = import_fs_extra3.default.readdirSync(directoryPath);
575
+ for (const file of files) {
576
+ const filePath = import_path4.default.join(directoryPath, file);
577
+ const stats = import_fs_extra3.default.statSync(filePath);
578
+ if (stats.isFile()) {
579
+ count++;
580
+ } else if (stats.isDirectory()) {
581
+ count += countFilesInDirectory(filePath);
582
+ }
583
+ }
584
+ return count;
585
+ }
586
+ async function uploadDirectory(directoryPath, deviceId) {
587
+ const sizeCheck = checkDirectorySizeLimit(directoryPath);
588
+ if (sizeCheck.exceeds) {
589
+ throw new Error(`Directory ${directoryPath} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
590
+ }
591
+ const formData = new import_form_data.default();
592
+ if (directoryPath.endsWith(import_path4.default.sep)) directoryPath = directoryPath.slice(0, -1);
593
+ const dist = directoryPath.split(import_path4.default.sep).pop() || "";
594
+ const files = loadFilesToArrRecursively(directoryPath, dist);
595
+ files.forEach((file) => {
596
+ formData.append("file", import_fs_extra3.default.createReadStream(file.path), {
597
+ filename: file.name
598
+ });
599
+ });
600
+ formData.append("uid", deviceId);
601
+ const spinner = (0, import_ora.default)(`Uploading ${directoryPath} to glitter ipfs...`).start();
602
+ const response = await import_axios.default.post(`${ipfsApiUrl}/add`, formData, {
603
+ headers: {
604
+ ...formData.getHeaders()
605
+ }
606
+ });
607
+ const resData = response.data.data;
608
+ if (Array.isArray(resData) && resData.length > 0) {
609
+ spinner.succeed();
610
+ const directoryItem = resData.find((item) => item.Name === dist);
611
+ if (directoryItem) {
612
+ const fileStats = import_fs_extra3.default.statSync(directoryPath);
613
+ const fileCount = countFilesInDirectory(directoryPath);
614
+ const uploadData = {
615
+ path: directoryPath,
616
+ filename: import_path4.default.basename(directoryPath),
617
+ contentHash: directoryItem.Hash,
618
+ previewHash: null,
619
+ size: sizeCheck.size,
620
+ fileCount,
621
+ isDirectory: true
622
+ };
623
+ saveUploadHistory(uploadData);
624
+ return directoryItem.Hash;
625
+ }
626
+ spinner.fail();
627
+ console.log(import_chalk2.default.red(`Directory hash not found in response`));
628
+ } else {
629
+ spinner.fail();
630
+ console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
631
+ }
632
+ return null;
633
+ }
634
+ async function uploadFile(filePath, deviceId) {
635
+ const sizeCheck = checkFileSizeLimit(filePath);
636
+ if (sizeCheck.exceeds) {
637
+ throw new Error(`File ${filePath} exceeds size limit of ${formatSize(sizeCheck.limit)} (size: ${formatSize(sizeCheck.size)})`);
638
+ }
639
+ const formData = new import_form_data.default();
640
+ formData.append("file", import_fs_extra3.default.createReadStream(filePath), {
641
+ filename: filePath.split(import_path4.default.sep).pop() || ""
642
+ });
643
+ formData.append("uid", deviceId);
644
+ const spinner = (0, import_ora.default)(`Uploading ${filePath} to glitter ipfs...`).start();
645
+ const response = await import_axios.default.post(`${ipfsApiUrl}/add`, formData, {
646
+ headers: {
647
+ ...formData.getHeaders()
648
+ }
649
+ });
650
+ const resData = response.data.data;
651
+ if (Array.isArray(resData) && resData.length > 0) {
652
+ spinner.succeed();
653
+ const fileItem = resData.find((item) => item.Name === filePath.split(import_path4.default.sep).pop() || "");
654
+ if (fileItem) {
655
+ const uploadData = {
656
+ path: filePath,
657
+ filename: filePath.split(import_path4.default.sep).pop() || "",
658
+ contentHash: fileItem.Hash,
659
+ previewHash: null,
660
+ size: sizeCheck.size,
661
+ fileCount: 1,
662
+ isDirectory: false
663
+ };
664
+ saveUploadHistory(uploadData);
665
+ return fileItem.Hash;
666
+ }
667
+ spinner.fail();
668
+ console.log(import_chalk2.default.red(`File hash not found in response`));
669
+ } else {
670
+ spinner.fail();
671
+ console.log(import_chalk2.default.red(`Invalid response format from IPFS`));
672
+ }
673
+ return null;
674
+ }
675
+ async function uploadToIpfs_default(filePath) {
676
+ const deviceId = getDeviceId();
677
+ if (!deviceId) {
678
+ throw new Error("Device ID not found");
679
+ }
680
+ if (import_fs_extra3.default.statSync(filePath).isDirectory()) {
681
+ return {
682
+ contentHash: await uploadDirectory(filePath, deviceId) || "",
683
+ previewHash: null
684
+ };
685
+ } else {
686
+ return {
687
+ contentHash: await uploadFile(filePath, deviceId) || "",
688
+ previewHash: null
689
+ };
690
+ }
691
+ }
692
+
693
+ // bin/upload.ts
694
+ var import_fs2 = __toESM(require("fs"));
695
+ var import_crypto_js = __toESM(require("crypto-js"));
696
+ var URL2 = "https://pinme.eth.limo/#/preview/";
697
+ var secretKey = "pinme-secret-key";
698
+ function encryptHash(hash, key) {
699
+ try {
700
+ if (!key) {
701
+ throw new Error("Secret key not found");
702
+ }
703
+ const encrypted = import_crypto_js.default.RC4.encrypt(hash, key).toString();
704
+ const urlSafe = encrypted.replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
705
+ return urlSafe;
706
+ } catch (error) {
707
+ console.error(`Encryption error: ${error.message}`);
708
+ return hash;
709
+ }
710
+ }
711
+ function checkPathSync(inputPath) {
712
+ try {
713
+ const absolutePath = import_path5.default.resolve(inputPath);
714
+ if (import_fs2.default.existsSync(absolutePath)) {
715
+ return absolutePath;
716
+ }
717
+ return null;
718
+ } catch (error) {
719
+ console.error(import_chalk3.default.red(`error checking path: ${error.message}`));
720
+ return null;
721
+ }
722
+ }
723
+ var upload_default = async (options) => {
724
+ try {
725
+ console.log(
726
+ import_figlet.default.textSync("PINME", {
727
+ font: "Shadow",
728
+ horizontalLayout: "default",
729
+ verticalLayout: "default",
730
+ width: 180,
731
+ whitespaceBreak: true
732
+ })
733
+ );
734
+ const argPath = process.argv[3];
735
+ if (argPath && !argPath.startsWith("-")) {
736
+ const absolutePath = checkPathSync(argPath);
737
+ if (!absolutePath) {
738
+ console.log(import_chalk3.default.red(`path ${argPath} does not exist`));
739
+ return;
740
+ }
741
+ console.log(import_chalk3.default.blue(`uploading ${absolutePath} to ipfs...`));
742
+ try {
743
+ const result = await uploadToIpfs_default(absolutePath);
744
+ if (result) {
745
+ const encryptedCID = encryptHash(result.contentHash, secretKey);
746
+ console.log(import_chalk3.default.cyan(
747
+ import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
748
+ ));
749
+ console.log(import_chalk3.default.cyan(`URL: ${URL2}${encryptedCID}`));
750
+ } else {
751
+ console.log(import_chalk3.default.red(`upload failed`));
752
+ }
753
+ } catch (error) {
754
+ console.error(import_chalk3.default.red(`error uploading: ${error.message}`));
755
+ console.error(error.stack);
756
+ }
757
+ return;
758
+ }
759
+ const answer = await import_inquirer.default.prompt([
760
+ {
761
+ type: "input",
762
+ name: "path",
763
+ message: "path to upload: "
764
+ }
765
+ ]);
766
+ if (answer.path) {
767
+ const absolutePath = checkPathSync(answer.path);
768
+ if (!absolutePath) {
769
+ console.log(import_chalk3.default.red(`path ${answer.path} does not exist`));
770
+ return;
771
+ }
772
+ console.log(import_chalk3.default.blue(`uploading ${absolutePath} to ipfs...`));
773
+ try {
774
+ const result = await uploadToIpfs_default(absolutePath);
775
+ if (result) {
776
+ const encryptedCID = encryptHash(result.contentHash, secretKey);
777
+ console.log(import_chalk3.default.cyan(
778
+ import_figlet.default.textSync("Successful", { horizontalLayout: "full" })
779
+ ));
780
+ console.log(import_chalk3.default.cyan(`URL: ${URL2}${encryptedCID}`));
781
+ } else {
782
+ console.log(import_chalk3.default.red(`upload failed`));
783
+ }
784
+ } catch (error) {
785
+ console.error(import_chalk3.default.red(`error uploading: ${error.message}`));
786
+ console.error(error.stack);
787
+ }
788
+ }
789
+ } catch (error) {
790
+ console.error(import_chalk3.default.red(`error executing: ${error.message}`));
791
+ console.error(error.stack);
792
+ }
793
+ };
794
+
795
+ // bin/index.ts
796
+ import_dotenv.default.config();
797
+ function showBanner() {
798
+ console.log(
799
+ import_chalk4.default.cyan(
800
+ import_figlet2.default.textSync("Pinme", { horizontalLayout: "full" })
801
+ )
802
+ );
803
+ console.log(import_chalk4.default.cyan("A command-line tool for uploading files to IPFS\n"));
804
+ }
805
+ var program = new import_commander.Command();
806
+ program.name("pinme").version(version).option("-v, --version", "output the current version");
807
+ program.command("upload").description("upload a file or directory to IPFS").action(() => upload_default());
808
+ program.command("list").description("show upload history").option("-l, --limit <number>", "limit the number of records to show", parseInt).option("-c, --clear", "clear all upload history").action((options) => {
809
+ if (options.clear) {
810
+ clearUploadHistory();
811
+ } else {
812
+ displayUploadHistory(options.limit || 10);
813
+ }
814
+ });
815
+ program.command("ls").description("alias for 'list' command").option("-l, --limit <number>", "limit the number of records to show", parseInt).option("-c, --clear", "clear all upload history").action((options) => {
816
+ if (options.clear) {
817
+ clearUploadHistory();
818
+ } else {
819
+ displayUploadHistory(options.limit || 10);
820
+ }
821
+ });
822
+ program.command("help").description("display help information").action(() => {
823
+ showBanner();
824
+ program.help();
825
+ });
826
+ program.on("--help", () => {
827
+ console.log("");
828
+ console.log("Examples:");
829
+ console.log(" $ pinme upload");
830
+ console.log(" $ pinme list -l 5");
831
+ console.log(" $ pinme ls");
832
+ console.log(" $ pinme help");
833
+ console.log("");
834
+ console.log("For more information, visit: https://github.com/glitternetwork/pinme");
835
+ });
836
+ program.parse(process.argv);
837
+ if (process.argv.length === 2) {
838
+ showBanner();
839
+ program.help();
840
+ }
package/package.json CHANGED
@@ -1,18 +1,15 @@
1
1
  {
2
2
  "name": "pinme",
3
- "version": "1.0.0",
3
+ "version": "1.0.1",
4
4
  "publishConfig": {
5
5
  "access": "public"
6
6
  },
7
- "description": "Pinme",
7
+ "description": "Deploy Your Frontend In a Single Command",
8
8
  "main": "dist/index.js",
9
9
  "scripts": {
10
- "prebuild": "mkdir -p dist/utils",
11
- "build": "rollup -c",
12
- "dev": "rollup -c -w",
13
- "prepublishOnly": "npm run build",
14
- "test": "echo \"Error: no test specified\" && exit 1",
15
- "build:debug": "rollup -c --debug"
10
+ "build": "node build.js",
11
+ "dev": "NODE_ENV=development node build.js",
12
+ "prepublishOnly": "npm run build"
16
13
  },
17
14
  "bin": {
18
15
  "pinme": "./dist/index.js"
@@ -20,11 +17,15 @@
20
17
  "files": [
21
18
  "dist"
22
19
  ],
23
- "keywords": [],
24
- "author": "Ted",
20
+ "keywords": [
21
+ "ipfs",
22
+ "cli",
23
+ "deploy",
24
+ "frontend"
25
+ ],
26
+ "author": "Glitter Protocol",
25
27
  "license": "MIT",
26
28
  "dependencies": {
27
- "@pinata/sdk": "^2.1.0",
28
29
  "axios": "^1.3.2",
29
30
  "base-x": "^5.0.1",
30
31
  "bip39": "^3.1.0",
@@ -32,69 +33,29 @@
32
33
  "commander": "^11.1.0",
33
34
  "crypto-js": "^4.2.0",
34
35
  "dayjs": "^1.11.7",
35
- "dotenv": "^16.3.1",
36
- "download-git-repo": "^3.0.2",
37
36
  "ethers": "5.7.2",
38
37
  "figlet": "^1.7.0",
39
38
  "form-data": "^4.0.0",
40
39
  "fs-extra": "^11.2.0",
41
- "i18next": "^22.4.9",
42
- "i18next-browser-languagedetector": "^7.0.1",
43
40
  "inquirer": "^8.2.5",
44
- "js-file-downloader": "^1.1.24",
45
- "nanoid": "^4.0.1",
46
41
  "ora": "^3.2.0",
47
- "rollup-plugin-visualizer": "^5.9.2",
48
42
  "uuid": "^9.0.0"
49
43
  },
50
44
  "devDependencies": {
51
- "@babel/core": "^7.20.12",
52
- "@babel/plugin-syntax-flow": "^7.14.5",
53
- "@babel/plugin-transform-react-jsx": "^7.14.9",
54
- "@esbuild-plugins/esm-externals": "^0.1.2",
55
- "@esbuild-plugins/node-globals-polyfill": "^0.2.3",
56
- "@esbuild-plugins/node-modules-polyfill": "^0.2.2",
57
- "@rollup/plugin-babel": "^5.3.1",
58
45
  "@rollup/plugin-commonjs": "^22.0.2",
59
46
  "@rollup/plugin-json": "^4.1.0",
60
47
  "@rollup/plugin-node-resolve": "^14.1.0",
61
- "@types/lodash": "^4.14.191",
62
- "@types/node": "^16.11.7",
63
- "@types/react": "^18.0.27",
64
- "@types/react-dom": "^18.0.10",
65
- "@types/react-router-dom": "^5.3.3",
66
- "@vitejs/plugin-basic-ssl": "^1.0.1",
67
- "@vitejs/plugin-react": "^3.1.0",
68
- "autoprefixer": "^10.4.13",
69
- "buffer": "^6.0.3",
70
- "cssnano": "^5.1.14",
71
- "cssnano-preset-advanced": "^5.3.9",
48
+ "dotenv": "^16.5.0",
49
+ "esbuild": "^0.25.2",
72
50
  "eslint": "^8.33.0",
73
51
  "eslint-config-airbnb-base": "^15.0.0",
74
52
  "eslint-config-prettier": "^8.6.0",
75
- "eslint-config-react-app": "^7.0.1",
76
53
  "eslint-plugin-import": "^2.27.5",
77
54
  "eslint-plugin-prettier": "^4.2.1",
78
- "eslint-plugin-react": "^7.32.2",
79
- "less": "^4.1.3",
80
- "postcss": "^8.4.21",
81
- "postcss-import": "^15.1.0",
82
- "postcss-px-to-viewport": "^1.1.1",
83
- "postcss-url": "^10.1.3",
84
55
  "prettier": "^2.8.3",
85
56
  "rollup": "^2.79.1",
86
- "rollup-plugin-commonjs": "^10.1.0",
87
57
  "rollup-plugin-copy": "^3.5.0",
88
- "rollup-plugin-dotenv": "^0.5.1",
89
- "rollup-plugin-json": "^4.0.0",
90
- "rollup-plugin-node-polyfills": "^0.2.1",
91
- "rollup-plugin-node-resolve": "^5.2.0",
92
- "rollup-plugin-terser": "^7.0.2",
93
- "typescript": "^4.9.5",
94
- "vite": "^4.1.1",
95
- "vite-plugin-compression": "^0.5.1",
96
- "vite-plugin-rewrite-all": "^1.0.1",
97
- "vite-plugin-svg-icons": "^2.0.1"
58
+ "rollup-plugin-terser": "^7.0.2"
98
59
  },
99
60
  "engines": {
100
61
  "node": ">= 14.18.0"