@ridit/forge 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,237 @@
1
+ import path from "path";
2
+ import fs from "fs";
3
+ import type { Commit, CommitRef } from "../types/commit";
4
+ import type { FileBlob } from "../types/files";
5
+ import { updateBranch } from "./branch";
6
+ import type { Repo } from "../types/repo";
7
+ import type { Branch } from "../types/branch";
8
+
9
+ export function commitInGlobal(
10
+ message: string,
11
+ repo_path: string,
12
+ branch_name: string,
13
+ commitId: string,
14
+ ): {
15
+ status: "ok" | "error";
16
+ error?: string;
17
+ } {
18
+ const forgeFolder = path.join(repo_path, ".forge");
19
+ const commitFolder = path.join(forgeFolder, "commits");
20
+
21
+ if (!fs.existsSync(commitFolder))
22
+ return {
23
+ status: "error",
24
+ error: "commits folder is missing, consider reinitialize repo.",
25
+ };
26
+
27
+ const newCommit: CommitRef = {
28
+ id: commitId,
29
+ message: message,
30
+ date: new Date().toISOString(),
31
+ branch: branch_name,
32
+ };
33
+
34
+ fs.writeFileSync(
35
+ path.join(commitFolder, `${commitId}.json`),
36
+ JSON.stringify(newCommit),
37
+ );
38
+
39
+ return { status: "ok" };
40
+ }
41
+
42
+ export function commitInBranch(
43
+ message: string,
44
+ repo_path: string,
45
+ branch_name: string,
46
+ ): {
47
+ status: "ok" | "error";
48
+ error?: string;
49
+ } {
50
+ const forgeFolder = path.join(repo_path, ".forge");
51
+ const branchesFolder = path.join(forgeFolder, "branches");
52
+ const branchFolder = path.join(branchesFolder, branch_name);
53
+ const commitFolder = path.join(branchFolder, "commits");
54
+ const tempAddedFiles = path.join(forgeFolder, "tempAddedFiles.json");
55
+
56
+ if (!fs.existsSync(tempAddedFiles))
57
+ return {
58
+ status: "error",
59
+ error: "tempAddedFiles.json is missing, consider reinitialize repo.",
60
+ };
61
+
62
+ if (!fs.existsSync(commitFolder))
63
+ return {
64
+ status: "error",
65
+ error: "commits folder is missing, consider reinitialize repo.",
66
+ };
67
+
68
+ const parsed = JSON.parse(fs.readFileSync(tempAddedFiles, "utf-8"));
69
+
70
+ const tempFiles: FileBlob[] = Array.isArray(parsed.files) ? parsed.files : [];
71
+
72
+ const fileBlobs = tempFiles;
73
+
74
+ let parentCommit;
75
+
76
+ const latestCommitId = getLatestCommitId(repo_path, branch_name);
77
+ if (latestCommitId.error || !latestCommitId.latestCommitId) {
78
+ } else {
79
+ parentCommit = latestCommitId.latestCommitId;
80
+ }
81
+
82
+ const newCommitId = crypto.randomUUID();
83
+ const newCommit: Commit = {
84
+ id: newCommitId,
85
+ message: message,
86
+ fileBlobs,
87
+ date: new Date().toISOString(),
88
+ parent: parentCommit,
89
+ };
90
+
91
+ fs.writeFileSync(
92
+ path.join(commitFolder, `${newCommitId}.json`),
93
+ JSON.stringify(newCommit),
94
+ );
95
+
96
+ fs.writeFileSync(tempAddedFiles, JSON.stringify({ files: [] }));
97
+
98
+ const res = updateBranch(repo_path, branch_name, newCommitId);
99
+ if (res.error) return { status: "error", error: res.error };
100
+
101
+ commitInGlobal(message, repo_path, branch_name, newCommitId);
102
+
103
+ return { status: "ok" };
104
+ }
105
+
106
+ export function logCommits(repo_path: string): {
107
+ status: "ok" | "error";
108
+ error?: string;
109
+ commits?: CommitRef[];
110
+ } {
111
+ const forgeFolder = path.join(repo_path, ".forge");
112
+ const commitFolder = path.join(forgeFolder, "commits");
113
+
114
+ if (!fs.existsSync(commitFolder))
115
+ return {
116
+ status: "error",
117
+ error: "commits folder is missing, consider reinitialize repo.",
118
+ };
119
+
120
+ const files = fs.readdirSync(commitFolder);
121
+ const commits = files.map((file) => {
122
+ const commit = JSON.parse(
123
+ fs.readFileSync(path.join(commitFolder, file.toString()), "utf-8"),
124
+ ) as CommitRef;
125
+ return commit;
126
+ });
127
+
128
+ return { status: "ok", commits };
129
+ }
130
+
131
+ export function logCommitsInBranch(
132
+ repo_path: string,
133
+ branch_name: string,
134
+ ): {
135
+ status: "ok" | "error";
136
+ error?: string;
137
+ commits?: Commit[];
138
+ } {
139
+ const forgeFolder = path.join(repo_path, ".forge");
140
+ const branchesFolder = path.join(forgeFolder, "branches");
141
+ const branchFolder = path.join(branchesFolder, branch_name);
142
+ const commitFolder = path.join(branchFolder, "commits");
143
+
144
+ if (!fs.existsSync(commitFolder))
145
+ return {
146
+ status: "error",
147
+ error: "commits folder is missing, consider reinitialize repo.",
148
+ };
149
+
150
+ const files = fs.readdirSync(commitFolder);
151
+ const commits = files.map((file) => {
152
+ const commit = JSON.parse(
153
+ fs.readFileSync(path.join(commitFolder, file.toString()), "utf-8"),
154
+ ) as Commit;
155
+ return commit;
156
+ });
157
+
158
+ return { status: "ok", commits };
159
+ }
160
+
161
+ export function getCommit(
162
+ repo_path: string,
163
+ commitId: string,
164
+ branch_name: string,
165
+ ): {
166
+ status: "ok" | "error";
167
+ error?: string;
168
+ commit?: Commit;
169
+ } {
170
+ const forgeFolder = path.join(repo_path, ".forge");
171
+ const commitFolder = path.join(
172
+ forgeFolder,
173
+ "branches",
174
+ branch_name,
175
+ "commits",
176
+ );
177
+
178
+ if (!fs.existsSync(commitFolder))
179
+ return {
180
+ status: "error",
181
+ error: "commits folder is missing, consider reinitialize repo.",
182
+ };
183
+
184
+ const commitFile = path.join(commitFolder, `${commitId}.json`);
185
+
186
+ if (!fs.existsSync(commitFile))
187
+ return {
188
+ status: "error",
189
+ error: `${commitId} doesn't exists.`,
190
+ };
191
+
192
+ const commitData = JSON.parse(fs.readFileSync(commitFile, "utf-8")) as Commit;
193
+
194
+ return { status: "ok", commit: commitData };
195
+ }
196
+
197
+ export function getLatestCommitId(
198
+ repo_path: string,
199
+ branch_name?: string,
200
+ ): {
201
+ status: "ok" | "error";
202
+ error?: string;
203
+ latestCommitId?: string;
204
+ } {
205
+ const forgeFolder = path.join(repo_path, ".forge");
206
+ const repoDataFile = path.join(forgeFolder, "repo.json");
207
+
208
+ if (!fs.existsSync(repoDataFile))
209
+ return {
210
+ status: "error",
211
+ error: `repo meta data is missing, consider reinitialize the branch.`,
212
+ };
213
+
214
+ const repoData = JSON.parse(fs.readFileSync(repoDataFile, "utf-8")) as Repo;
215
+ const branchName = branch_name ?? repoData.branch;
216
+ const branchFolder = path.join(forgeFolder, "branches", branchName);
217
+
218
+ if (!fs.existsSync(branchFolder))
219
+ return {
220
+ status: "error",
221
+ error: `${branchName} doesn't exists, consider reinitializing the repo.`,
222
+ };
223
+
224
+ const branchDataFile = path.join(branchFolder, "branch.json");
225
+
226
+ if (!fs.existsSync(branchDataFile))
227
+ return {
228
+ status: "error",
229
+ error: `${branchName} meta data is missing, consider reinitialize the branch.`,
230
+ };
231
+
232
+ const branchData = JSON.parse(
233
+ fs.readFileSync(branchDataFile, "utf-8"),
234
+ ) as Branch;
235
+
236
+ return { status: "ok", latestCommitId: branchData.latestCommitId };
237
+ }
@@ -0,0 +1,56 @@
1
+ import fs from "fs";
2
+ import ignore from "ignore";
3
+ import path from "path";
4
+
5
+ const ig = ignore({ allowRelativePaths: true });
6
+
7
+ export function matchPaths(files: string[]): {
8
+ status: "ok" | "error";
9
+ files?: string[];
10
+ error?: string;
11
+ } {
12
+ let ignoreFile;
13
+
14
+ const forgeignoreFile = path.join(".", ".forgeignore");
15
+ const gitignoreFile = path.join(".", ".gitignore");
16
+
17
+ if (!fs.existsSync(forgeignoreFile)) ignoreFile = gitignoreFile;
18
+ else if (fs.existsSync(forgeignoreFile)) ignoreFile = forgeignoreFile;
19
+
20
+ if (ignoreFile && fs.existsSync(ignoreFile)) {
21
+ const content = fs.readFileSync(ignoreFile, "utf-8");
22
+ ig.add(content);
23
+ }
24
+
25
+ ig.add(".git");
26
+ ig.add(".forge");
27
+ ig.add("node_modules");
28
+
29
+ const checkedFiles: string[] = [];
30
+
31
+ files.forEach((file) => {
32
+ if (!ig.ignores(file)) checkedFiles.push(file);
33
+ });
34
+
35
+ return { status: "ok", files: checkedFiles };
36
+ }
37
+
38
+ export function createDefaultForgeIgnore(repo_path: string): {
39
+ status: "ok" | "error";
40
+ error?: string;
41
+ } {
42
+ const forgeFolder = path.join(repo_path, ".forge");
43
+
44
+ if (!fs.existsSync(forgeFolder))
45
+ return { status: "error", error: "repo doesn't exists" };
46
+
47
+ const forgeignoreFile = path.join(repo_path, ".forgeignore");
48
+
49
+ if (!fs.existsSync(forgeignoreFile))
50
+ fs.writeFileSync(
51
+ forgeignoreFile,
52
+ "node_modules\n.git\nout\ndist\n.forge\.next\.cache\nbuild\n__pycache__\n.env\.DS_Store",
53
+ );
54
+
55
+ return { status: "ok" };
56
+ }
@@ -0,0 +1,28 @@
1
+ import crypto from "crypto";
2
+ import path from "path";
3
+ import fs from "fs";
4
+
5
+ export function hashContent(content: string): string {
6
+ return crypto.createHash("sha256").update(content).digest("hex");
7
+ }
8
+
9
+ export function writeObject(repo_path: string, content: string): string {
10
+ const hash = hashContent(content);
11
+ const folder = hash.slice(0, 2);
12
+ const file = hash.slice(2);
13
+
14
+ const objectDir = path.join(repo_path, ".forge", "objects", folder);
15
+ const objectPath = path.join(objectDir, file);
16
+
17
+ if (!fs.existsSync(objectDir)) fs.mkdirSync(objectDir, { recursive: true });
18
+ if (!fs.existsSync(objectPath)) fs.writeFileSync(objectPath, content);
19
+
20
+ return hash;
21
+ }
22
+
23
+ export function readObject(repo_path: string, hash: string): string {
24
+ const folder = hash.slice(0, 2);
25
+ const file = hash.slice(2);
26
+ const objectPath = path.join(repo_path, ".forge", "objects", folder, file);
27
+ return fs.readFileSync(objectPath, "utf-8");
28
+ }
@@ -0,0 +1,64 @@
1
+ import path from "path";
2
+ import fs from "fs";
3
+ import type { Repo } from "../types/repo";
4
+ import { createBranch } from "./branch";
5
+ import { createDefaultForgeIgnore } from "./forgeIgnore";
6
+
7
+ export function initRepo(folder_path: string): {
8
+ status: "ok" | "error";
9
+ error?: string;
10
+ } {
11
+ const cwd = folder_path;
12
+ const forgeFolder = path.join(cwd, ".forge");
13
+ const repoInfoFile = path.join(forgeFolder, "repo.json");
14
+ const tempAddedFiles = path.join(forgeFolder, "tempAddedFiles.json");
15
+ const commitsFolder = path.join(forgeFolder, "commits");
16
+ const objectsFolder = path.join(forgeFolder, "objects");
17
+
18
+ if (fs.existsSync(forgeFolder)) {
19
+ return { status: "error", error: "Repo already exists" };
20
+ }
21
+
22
+ fs.mkdirSync(forgeFolder, { recursive: true });
23
+
24
+ fs.writeFileSync(
25
+ repoInfoFile,
26
+ JSON.stringify({
27
+ name: path.basename(folder_path),
28
+ isFork: false,
29
+ branch: "main",
30
+ } as Repo),
31
+ );
32
+
33
+ fs.writeFileSync(tempAddedFiles, JSON.stringify({ files: [] }));
34
+
35
+ fs.mkdirSync(commitsFolder, { recursive: true });
36
+ fs.mkdirSync(objectsFolder, { recursive: true });
37
+
38
+ const res = createBranch(".", "main");
39
+ if (res.error) return { status: "error", error: res.error };
40
+
41
+ const res_2 = createDefaultForgeIgnore(folder_path);
42
+ if (res_2.error) return { status: "error", error: res_2.error };
43
+
44
+ return { status: "ok" };
45
+ }
46
+
47
+ export function updateRepo(
48
+ repo_path: string,
49
+ data: Partial<Repo>,
50
+ ): { status: "ok" | "error"; error?: string } {
51
+ const forgeFolder = path.join(repo_path, ".forge");
52
+ const repoInfoFile = path.join(forgeFolder, "repo.json");
53
+
54
+ if (!fs.existsSync(repoInfoFile))
55
+ return {
56
+ status: "error",
57
+ error: "repo.json is missing, consider reinitialize repo.",
58
+ };
59
+
60
+ const current = JSON.parse(fs.readFileSync(repoInfoFile, "utf-8")) as Repo;
61
+ fs.writeFileSync(repoInfoFile, JSON.stringify({ ...current, ...data }));
62
+
63
+ return { status: "ok" };
64
+ }
@@ -0,0 +1,46 @@
1
+ import { matchPaths } from "./forgeIgnore";
2
+ import path from "path";
3
+ import fs from "fs";
4
+ import type { FileStatus } from "../types/files";
5
+ import { checkPrime } from "crypto";
6
+ import { determineFileStatus } from "./add";
7
+
8
+ function processFiles(basePath: string, files: (string | Buffer)[]) {
9
+ const normalized = files
10
+ .map((f) => path.join(basePath, f.toString()).replace(/\\/g, "/"))
11
+ .filter((p) => {
12
+ try {
13
+ return fs.statSync(p).isFile();
14
+ } catch {
15
+ return false;
16
+ }
17
+ });
18
+
19
+ const checkedFiles = matchPaths(normalized);
20
+
21
+ return checkedFiles;
22
+ }
23
+
24
+ export interface FileRef {
25
+ path: string;
26
+ status: FileStatus;
27
+ }
28
+
29
+ export function listAllFilesWithStatus(repo_path: string): {
30
+ status: "error" | "ok";
31
+ error?: string;
32
+ files?: FileRef[];
33
+ } {
34
+ const list = fs.readdirSync(".", { recursive: true });
35
+ const checkedFiles = processFiles(".", list);
36
+
37
+ let files: { path: string; status: FileStatus }[] = [];
38
+
39
+ checkedFiles.files?.forEach((file) => {
40
+ const status =
41
+ determineFileStatus(repo_path, file).file_status ?? "unchanged";
42
+ files.push({ path: file, status });
43
+ });
44
+
45
+ return { status: "ok", files: files.filter((f) => f.status !== "unchanged") };
46
+ }
@@ -0,0 +1,10 @@
1
+ import { EventEmitter } from "events";
2
+ export const switchEmitter = new EventEmitter();
3
+
4
+ export type SwitchEvent =
5
+ | { type: "checkpoint_created"; branch: string }
6
+ | { type: "files_deleted"; files: string[] }
7
+ | { type: "files_restored"; files: string[]; source: "checkpoint" | "commit" }
8
+ | { type: "switched"; from: string; to: string }
9
+ | { type: "merge_conflict_detected"; files: string[] }
10
+ | { type: "merge_complete"; from: string; to: string; filesChanged: number };
package/tsconfig.json ADDED
@@ -0,0 +1,29 @@
1
+ {
2
+ "compilerOptions": {
3
+ // Environment setup & latest features
4
+ "lib": ["ESNext"],
5
+ "target": "ESNext",
6
+ "module": "Preserve",
7
+ "moduleDetection": "force",
8
+ "jsx": "react-jsx",
9
+ "allowJs": true,
10
+
11
+ // Bundler mode
12
+ "moduleResolution": "bundler",
13
+ "allowImportingTsExtensions": true,
14
+ "verbatimModuleSyntax": true,
15
+ "noEmit": true,
16
+
17
+ // Best practices
18
+ "strict": true,
19
+ "skipLibCheck": true,
20
+ "noFallthroughCasesInSwitch": true,
21
+ "noUncheckedIndexedAccess": true,
22
+ "noImplicitOverride": true,
23
+
24
+ // Some stricter flags (disabled by default)
25
+ "noUnusedLocals": false,
26
+ "noUnusedParameters": false,
27
+ "noPropertyAccessFromIndexSignature": false
28
+ }
29
+ }