codehub-ghx-cli 1.0.2 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/commands/add.js CHANGED
@@ -1,19 +1,83 @@
1
+
2
+
1
3
  const fs = require("fs").promises;
4
+ const fssync = require("fs");
2
5
  const path = require("path");
3
6
 
7
+ async function copyRecursive(src, dest) {
8
+ const stats = fssync.statSync(src);
9
+
10
+ if (stats.isDirectory()) {
11
+ await fs.mkdir(dest, { recursive: true });
12
+ const items = await fs.readdir(src);
13
+
14
+ for (const item of items) {
15
+ const srcItem = path.join(src, item);
16
+ const destItem = path.join(dest, item);
17
+ await copyRecursive(srcItem, destItem);
18
+ }
19
+ } else {
20
+ await fs.copyFile(src, dest);
21
+ }
22
+ }
4
23
 
5
24
  async function addRepo(filepath) {
6
- const repoPath = path.resolve(process.cwd(),".codehub");
25
+ const repoPath = path.resolve(process.cwd(), ".codehub");
7
26
  const stagingPath = path.join(repoPath, "staging");
27
+ const ROOT_IGNORE = [".env", "node_modules", ".codehub", ".git"];
28
+
8
29
 
9
30
  try {
10
- await fs.mkdir(stagingPath, {recursive: true});
31
+ await fs.access(repoPath);
32
+ await fs.mkdir(stagingPath, { recursive: true });
33
+
34
+ if(filepath === "." || filepath === "./"){
35
+ const currentDir = process.cwd();
36
+ const items = await fs.readdir(currentDir);
37
+ for (const item of items) {
38
+ // if ([".codehub", "node_modules", ".git"].includes(item)) continue;
39
+ if (ROOT_IGNORE.includes(item)) {
40
+ console.log(`Skipped ignored item: ${item}`);
41
+ continue;
42
+ }
43
+ const src = path.join(currentDir, item);
44
+ const dest = path.join(stagingPath, item);
45
+
46
+ const stats = fssync.statSync(src);
47
+
48
+ if (stats.isDirectory()) {
49
+ await copyRecursive(src, dest);
50
+ } else {
51
+ await fs.copyFile(src, dest);
52
+ }
53
+
54
+ console.log(`Added: ${item}`);
55
+ }
56
+
57
+ return;
58
+ }
59
+
11
60
  const fileName = path.basename(filepath);
12
- await fs.copyFile(filepath, path.join(stagingPath, fileName));
13
- console.log(`File ${fileName} added to staging area`);
61
+ if (ROOT_IGNORE.includes(fileName)) {
62
+ console.log(`Cannot add "${fileName}" It is ignored for security reasons.`);
63
+ return;
64
+ }
65
+
66
+ const destination = path.join(stagingPath, fileName);
67
+
68
+
69
+ const stats = fssync.statSync(filepath);
70
+
71
+ if (stats.isDirectory()) {
72
+ await copyRecursive(filepath, destination);
73
+ } else {
74
+ await fs.copyFile(filepath, destination);
75
+ }
76
+
77
+ console.log(`✅ Added: ${fileName}`);
14
78
  } catch (err) {
15
- console.log("error adding file:", err);
79
+ console.log("error adding file:", err);
16
80
  }
17
81
  }
18
82
 
19
- module.exports = {addRepo};
83
+ module.exports = { addRepo };
@@ -1,25 +1,56 @@
1
1
  const fs = require("fs").promises;
2
2
  const path = require('path');
3
+ const axios = require('axios');
3
4
  const {v4: uuidv4} = require("uuid");
5
+ const fssync = require("fs");
6
+
7
+ async function copyRecursive(src, dest) {
8
+ const stats = fssync.statSync(src);
9
+
10
+ if (stats.isDirectory()) {
11
+ await fs.mkdir(dest, { recursive: true });
12
+ const items = await fs.readdir(src);
13
+
14
+ for (const item of items) {
15
+ const srcItem = path.join(src, item);
16
+ const destItem = path.join(dest, item);
17
+ await copyRecursive(srcItem, destItem);
18
+ }
19
+ } else {
20
+ await fs.copyFile(src, dest);
21
+ }
22
+ }
4
23
 
5
24
 
6
25
  async function commitRepo(message) {
7
26
  const repopath = path.resolve(process.cwd(), ".codehub");
8
27
  const stagedPath = path.join(repopath, "staging");
9
28
  const commitPath = path.join(repopath, "commits");
29
+ const configPath = path.join(repopath, "config.json");
10
30
 
11
31
  try {
32
+ const config = JSON.parse(await fs.readFile(configPath, "utf8"));
33
+ const repoId = config.repoId;
12
34
  const commitID = uuidv4();
13
35
  const commitDir = path.join(commitPath, commitID);
14
36
  await fs.mkdir(commitDir, {recursive: true});
15
37
 
16
38
  const files = await fs.readdir(stagedPath);
39
+ if (files.length === 0) {
40
+ console.log(" Nothing to commit. Run: ghx add <file>");
41
+ return;
42
+ }
17
43
  for(const file of files){
18
- await fs.copyFile(
44
+ await copyRecursive(
19
45
  path.join(stagedPath, file),
20
46
  path.join(commitDir, file)
21
47
  );
22
48
  }
49
+
50
+ await axios.put(`http://localhost:3000/repo/update/${repoId}`, {
51
+ message:message,
52
+ });
53
+
23
54
  await fs.writeFile(path.join(commitDir, "commit.json"),JSON.stringify({message, date:new Date().toISOString()}));
24
55
  console.log(`Commit ${commitID} created with message: ${message}`);
25
56
  } catch (err) {
package/commands/init.js CHANGED
@@ -1,22 +1,46 @@
1
+
1
2
  const fs = require("fs").promises;
2
3
  const path = require("path");
3
4
 
4
5
 
5
- async function initRepo(){
6
+ async function initRepo(repoLink) {
7
+
8
+ function extractRepoId(repoLink) {
9
+ const url = new URL(repoLink);
10
+ return url.searchParams.get("repoId");
11
+ }
12
+
6
13
  const repoPath = path.resolve(process.cwd(), ".codehub");
7
14
  const commitsPath = path.join(repoPath, "commits");
8
15
 
9
16
  try {
10
- await fs.mkdir(repoPath, {recursive: true});
11
- await fs.mkdir(commitsPath, {recursive: true});
17
+ await fs.mkdir(repoPath, { recursive: true });
18
+ await fs.mkdir(commitsPath, { recursive: true });
19
+
20
+ const repoId = extractRepoId(repoLink);
21
+
22
+ if (!repoId) {
23
+ console.error("repoId missing in clone URL!");
24
+ return;
25
+ }
26
+
12
27
  await fs.writeFile(
13
28
  path.join(repoPath, "config.json"),
14
- JSON.stringify({ bucket: process.env.S3_BUCKET})
29
+ JSON.stringify({
30
+ bucket: process.env.S3_BUCKET,
31
+ repoLink,
32
+ repoId
33
+ })
15
34
  );
35
+
16
36
  console.log("Repository initialized!");
37
+ console.log("Repo ID saved:", repoId);
38
+
39
+ if (repoLink) console.log("Linked to:", repoLink);
40
+
17
41
  } catch (error) {
18
- console.error("Error initialising repository", error);
42
+ console.error("Error initializing repository", error);
19
43
  }
20
44
  }
21
-
22
- module.exports = { initRepo};
45
+
46
+ module.exports = { initRepo };
package/commands/push.js CHANGED
@@ -1,34 +1,202 @@
1
+
2
+
3
+ // ghx-cli/commands/push.js
1
4
  const fs = require("fs").promises;
2
- const path = require('path');
3
- // const {s3, s3_BUCKET} = require('../config/aws-config');
4
- const {s3, s3_BUCKET} = require('../config/aws-config');
5
+ const fssync = require("fs");
6
+ const path = require("path");
7
+ const axios = require("axios");
8
+ const mongoose = require('mongoose');
9
+ const { s3, s3_BUCKET } = require("../config/aws-config");
10
+ const PushLog = require('../../models/pushModel');
11
+ require('dotenv').config;
12
+
13
+
14
+ async function connectDB(){
15
+ const mongoURI = process.env.MONGODB_URI;
16
+ mongoose.connect(mongoURI)
17
+ .then(()=>console.log("push date saved"))
18
+ .catch((err)=>console.error("Unable to connect",err));
19
+ }
20
+
21
+
22
+
23
+ // uploadRecursive:
24
+
25
+ async function uploadRecursive(localPath, baseDir, commitId, collectedFiles) {
26
+
27
+ const relative = path.relative(baseDir, localPath).replace(/\\/g, "/");
28
+
29
+ if (relative !== "") {
30
+ collectedFiles.push({
31
+ filename: path.basename(localPath),
32
+ commit: commitId,
33
+ path: `repo/${commitId}/${relative}`,
34
+ folder: path.dirname(relative).replace(/\\/g, "/"),
35
+ fullS3Path: `repo/${commitId}/${relative}`,
36
+ fullPath: `repo/${commitId}/${relative}`,
37
+ isFolder: true
38
+ });
39
+ }
40
+
41
+
42
+ const stats = fssync.statSync(localPath);
5
43
 
44
+ if (stats.isDirectory()) {
45
+ const items = await fs.readdir(localPath);
46
+ for (const item of items) {
47
+ const itemLocal = path.join(localPath, item);
48
+ await uploadRecursive(itemLocal, baseDir, commitId, collectedFiles);
49
+ }
50
+ } else {
51
+
52
+ const relative = path.relative(baseDir, localPath).replace(/\\/g, "/");
53
+
54
+ const cleanPath = relative.replace(/\\/g, "/");
55
+ // const s3Key = `commits/${commitId}/${relative}`; // canonical S3 key and DB path
56
+ const s3Key = `repo/${commitId}/${cleanPath}`;
57
+ const fileIdPath = cleanPath;
58
+ const fileContent = await fs.readFile(localPath);
59
+
60
+ // upload to S3 under canonical key
61
+ await s3.upload({
62
+ Bucket: s3_BUCKET,
63
+ Key: s3Key,
64
+ Body: fileContent
65
+ }).promise();
66
+
67
+ const filename = path.basename(localPath);
68
+ const folder = path.dirname(relative).replace(/\\/g, "/"); // relative folder inside commit
69
+
70
+ collectedFiles.push({
71
+ // filename,
72
+ // commit: commitId,
73
+ // // path: s3Key, // canonical path saved to DB
74
+ // // folder: folder === "." ? "" : folder,
75
+ // // isFolder: false
76
+ // path: cleanPath, // <-- canonical
77
+ // folder: folder === "." ? "" : folder,
78
+ // fullS3Path: s3Key, // <-- actual file stored in S3
79
+ // fullPath: cleanPath,
80
+ // isFolder: false
81
+ filename,
82
+ commit: commitId,
83
+ path: s3Key, // <--- ALWAYS THIS
84
+ folder: folder === "." ? "" : folder,
85
+ fullS3Path: s3Key, // <--- ALWAYS THIS
86
+ fullPath: s3Key, // <--- ALWAYS THIS
87
+ isFolder: false
88
+ });
89
+ }
90
+ }
6
91
 
7
92
  async function pushRepo() {
8
- const repopath = path.resolve(process.cwd(), ".codehub");
9
- const commitsPath = path.join(repopath, "commits");
93
+ const repoPath = path.resolve(process.cwd(), ".codehub");
94
+ const commitsPath = path.join(repoPath, "commits");
95
+ const configPath = path.join(repoPath, "config.json");
10
96
 
11
97
  try {
98
+ const config = JSON.parse(await fs.readFile(configPath, "utf8"));
99
+ const repoId = config.repoId;
100
+
101
+ if (!repoId) {
102
+ console.log("❌ repoId missing in .codehub/config.json");
103
+ return;
104
+ }
105
+
106
+
12
107
  const commitDirs = await fs.readdir(commitsPath);
13
- for(const commitDir of commitDirs){
14
- const commitPath = path.join(commitsPath, commitDir);
15
- const files = await fs.readdir(commitPath);
16
-
17
- for(const file of files){
18
- const filePath = path.join(commitPath, file);
19
- const fileContent = await fs.readFile(filePath);
20
- const params = {
21
- Bucket: s3_BUCKET,
22
- Key: `commits/${commitDir}/${file}`,
23
- Body: fileContent,
24
- };
25
- await s3.upload(params).promise();
26
- }
108
+
109
+ if (commitDirs.length === 0) {
110
+ console.log("❌ No commits to push");
111
+ return;
112
+ }
113
+
114
+ const lastCommit = commitDirs[commitDirs.length - 1];
115
+ const metaFile = path.join(commitsPath, lastCommit, "commit.json");
116
+
117
+ let commitMessage = "No commit message";
118
+ if (fssync.existsSync(metaFile)) {
119
+ const metaData = JSON.parse(await fs.readFile(metaFile, "utf8"));
120
+ commitMessage = metaData.message;
121
+ }
122
+
123
+
124
+
125
+
126
+ // Fetch existing repo content so we can merge (and dedupe)
127
+ const dbRes = await axios.get(`http://localhost:3000/repo/id/${repoId}`);
128
+ // backend returns array; first element is repo object
129
+ const existing = (dbRes.data && dbRes.data[0] && dbRes.data[0].content) || [];
130
+
131
+ // map existing by path for fast overwrite
132
+ const contentMap = {};
133
+ existing.forEach(f => {
134
+ if (!f || !f.path) return;
135
+ contentMap[f.path] = f;
136
+ });
137
+
138
+ // const commitDirs = await fs.readdir(commitsPath);
139
+ let newFiles = [];
140
+
141
+ for (const commitId of commitDirs) {
142
+ const commitFolder = path.join(commitsPath, commitId);
143
+
144
+ // commitFolder may contain files/folders
145
+ await uploadRecursive(
146
+ commitFolder,
147
+ commitFolder, // baseDir
148
+ commitId,
149
+ newFiles
150
+ );
151
+ }
152
+
153
+ // merge/overwrite: newFiles wins
154
+ for (const f of newFiles) {
155
+
156
+ for (const existingPath in contentMap) {
157
+ const old = contentMap[existingPath];
158
+
159
+ // compare logical location (folder + filename)
160
+ if (old.filename === f.filename && old.folder === f.folder) {
161
+ delete contentMap[existingPath];
27
162
  }
28
- console.log("All commits pushed to S3.")
29
- } catch (error) {
30
- console.error("Error pushing to s3:", error);
163
+ }
164
+
165
+
166
+ contentMap[f.path] = f;
167
+ }
168
+
169
+
170
+
171
+
172
+ const finalFiles = Object.values(contentMap);
173
+
174
+ // push finalFiles to backend (replace content)
175
+ await axios.put(`http://localhost:3000/repo/update/${repoId}`, {
176
+ content: finalFiles,
177
+ message: commitMessage,
178
+ description: ""
179
+ });
180
+
181
+
182
+ await fs.rm(commitsPath, { recursive: true, force: true });
183
+ await fs.mkdir(commitsPath);
184
+ await connectDB();
185
+
186
+
187
+ console.log("✅ Push complete!");
188
+ // console.log("Using repoId:", repoId);
189
+
190
+ await PushLog.create({
191
+ repoId: repoId,
192
+ pushedAt: new Date()
193
+ });
194
+
195
+ process.exit(0);
196
+ } catch (err) {
197
+ console.error("❌ Push error:", err);
198
+ process.exit(1);
31
199
  }
32
200
  }
33
201
 
34
- module.exports = {pushRepo};
202
+ module.exports = { pushRepo };
@@ -1,30 +1,78 @@
1
- const fs = require("fs");
2
- const path = require('path');
3
- const { promisify } = require("util");
1
+ // const fs = require("fs");
2
+ // const path = require('path');
3
+ // const { promisify } = require("util");
4
+ // // const {s3, s3_BUCKET} = require('../config/aws-config');
4
5
  // const {s3, s3_BUCKET} = require('../config/aws-config');
5
- const {s3, s3_BUCKET} = require('../config/aws-config');
6
6
 
7
- const readdir = promisify(fs.readdir);
8
- const copyFile = promisify(fs.copyFile);
7
+ // const readdir = promisify(fs.readdir);
8
+ // const copyFile = promisify(fs.copyFile);
9
9
 
10
10
 
11
+ // async function revertRepo(commitID) {
12
+ // const repopath = path.resolve(process.cwd(), ".codehub");
13
+ // const commitsPath = path.join(repopath, "commits");
14
+
15
+
16
+ // try {
17
+ // const commitDir = path.join(commitsPath, commitID);
18
+ // const files = await readdir(commitDir);
19
+ // const parentDir = path.resolve(repopath, "..");
20
+
21
+ // for(const file of files){
22
+ // await copyFile(path.join(commitDir, file), path.join(parentDir, file));
23
+ // }
24
+ // console.log(`Commit ${commitID} reverted successfully!`);
25
+ // } catch (err) {
26
+ // console.error("Unable to revert:", err);
27
+ // }
28
+ // }
29
+
30
+ // module.exports = {revertRepo};
31
+
32
+ const fs = require("fs").promises;
33
+ const fssync = require("fs");
34
+ const path = require("path");
35
+
36
+ async function copyRecursive(src, dest) {
37
+ const stats = fssync.statSync(src);
38
+
39
+ if (stats.isDirectory()) {
40
+ await fs.mkdir(dest, { recursive: true });
41
+
42
+ const items = await fs.readdir(src);
43
+ for (const item of items) {
44
+ const srcItem = path.join(src, item);
45
+ const destItem = path.join(dest, item);
46
+ await copyRecursive(srcItem, destItem);
47
+ }
48
+ } else {
49
+ await fs.copyFile(src, dest);
50
+ }
51
+ }
52
+
11
53
  async function revertRepo(commitID) {
12
- const repopath = path.resolve(process.cwd(), ".codehub");
13
- const commitsPath = path.join(repopath, "commits");
54
+ const repoPath = path.join(process.cwd(), ".codehub");
55
+ const commitsPath = path.join(repoPath, "commits");
56
+ const commitFolder = path.join(commitsPath, commitID);
14
57
 
58
+ // Ensure that commit exists
59
+ if (!fssync.existsSync(commitFolder)) {
60
+ console.log(`❌ Commit "${commitID}" not found.`);
61
+ return;
62
+ }
15
63
 
16
- try {
17
- const commitDir = path.join(commitsPath, commitID);
18
- const files = await readdir(commitDir);
19
- const parentDir = path.resolve(repopath, "..");
64
+ console.log(`🔄 Reverting to commit: ${commitID}`);
20
65
 
21
- for(const file of files){
22
- await copyFile(path.join(commitDir, file), path.join(parentDir, file));
66
+ const items = await fs.readdir(commitFolder);
67
+
68
+ for (const item of items) {
69
+ const src = path.join(commitFolder, item);
70
+ const dest = path.join(process.cwd(), item);
71
+
72
+ await copyRecursive(src, dest);
23
73
  }
24
- console.log(`Commit ${commitID} reverted successfully!`);
25
- } catch (err) {
26
- console.error("Unable to revert:", err);
27
- }
74
+
75
+ console.log("✅ Revert complete! Project reset to the selected commit.");
28
76
  }
29
77
 
30
- module.exports = {revertRepo};
78
+ module.exports = { revertRepo };
@@ -0,0 +1,21 @@
1
+ // const AWS = require("aws-sdk");
2
+
3
+ // AWS.config.update({ region: "ap-south-1"});
4
+
5
+ // const s3 = new AWS.S3();
6
+ // const s3_BUCKET = "gitbuck";
7
+
8
+ // module.exports = {s3, s3_BUCKET};
9
+ const AWS = require("aws-sdk");
10
+ require("dotenv").config();
11
+
12
+ AWS.config.update({
13
+ accessKeyId: process.env.AWS_ACCESS_KEY,
14
+ secretAccessKey: process.env.AWS_SECRET_KEY,
15
+ region: "ap-south-1",
16
+ });
17
+
18
+ const s3 = new AWS.S3();
19
+ const s3_BUCKET = process.env.S3_BUCKET;
20
+
21
+ module.exports = { s3, s3_BUCKET };
package/index.js CHANGED
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env node
2
+ process.env.DOTENV_SILENT = "true";
3
+ process.env.DOTENVX_SILENT = "true";
2
4
 
3
- // require("dotenv").config();
4
5
  const path = require("path");
5
6
  require("dotenv").config({
6
7
  path: path.join(__dirname, ".env")
@@ -12,6 +13,7 @@ const { hideBin } = require("yargs/helpers");
12
13
  const { initRepo } = require("./commands/init");
13
14
  const { addRepo } = require("./commands/add");
14
15
  const { commitRepo } = require("./commands/commit");
16
+ const mongoose = require('mongoose');
15
17
  const { pushRepo } = require("./commands/push");
16
18
  const { pullRepo } = require("./commands/pull");
17
19
  const { revertRepo } = require("./commands/revert");
@@ -20,11 +22,21 @@ yargs(hideBin(process.argv))
20
22
 
21
23
  // init
22
24
  .command(
23
- "init",
25
+ "init <repoLink>",
24
26
  "Initialize a repository in the current folder",
25
- () => {},
26
- () => {
27
- initRepo();
27
+ (yargs) => {
28
+ yargs
29
+ .positional("repoLink", {
30
+ type: "string",
31
+ describe: "Repository link from CodeHub"
32
+ })
33
+ // .positional("repoId",{
34
+ // type: "string",
35
+ // describe: "Repository ID from CodeHub backend"
36
+ // });
37
+ },
38
+ (argv) => {
39
+ initRepo(argv.repoLink);
28
40
  }
29
41
  )
30
42
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codehub-ghx-cli",
3
- "version": "1.0.2",
3
+ "version": "1.0.3",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "bin": {