aethel 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,146 @@
1
+ /**
2
+ * .aethelignore support — gitignore-syntax pattern matching.
3
+ *
4
+ * Reads a `.aethelignore` file from the workspace root (and optionally
5
+ * nested directories) and exposes a filter that tests relative paths.
6
+ * Uses the `ignore` npm package which implements the full gitignore spec.
7
+ */
8
+
9
+ import fs from "node:fs";
10
+ import path from "node:path";
11
+ import ignore from "ignore";
12
+ import { AETHEL_DIR } from "./config.js";
13
+
14
+ const IGNORE_FILE = ".aethelignore";
15
+
16
+ // Paths that are always ignored regardless of .aethelignore contents.
17
+ const BUILTIN_PATTERNS = [
18
+ AETHEL_DIR,
19
+ `${AETHEL_DIR}/**`,
20
+ ".git",
21
+ ".git/**",
22
+ "node_modules",
23
+ "node_modules/**",
24
+ ".DS_Store",
25
+ "Thumbs.db",
26
+ ];
27
+
28
+ // Module-level cache: avoids re-reading and re-parsing .aethelignore
29
+ // on every call within the same process.
30
+ const _cache = new Map(); // root → { mtime, rules }
31
+
32
+ /**
33
+ * Load ignore rules from a workspace root (cached per-process).
34
+ *
35
+ * Returns an object with:
36
+ * - `ignores(relativePath)` → boolean
37
+ * - `filter(paths)` → filtered array of non-ignored paths
38
+ * - `patterns` → raw pattern strings for inspection
39
+ */
40
+ export function loadIgnoreRules(root) {
41
+ const resolved = path.resolve(root);
42
+ const ignoreFile = path.join(resolved, IGNORE_FILE);
43
+
44
+ // Check if cached version is still valid (same mtime on .aethelignore)
45
+ let fileMtime = 0;
46
+ try {
47
+ fileMtime = fs.statSync(ignoreFile).mtimeMs;
48
+ } catch {
49
+ // File doesn't exist — that's fine, we still cache the result
50
+ }
51
+
52
+ const cached = _cache.get(resolved);
53
+ if (cached && cached.mtime === fileMtime) {
54
+ return cached.rules;
55
+ }
56
+
57
+ const ig = ignore.default();
58
+
59
+ // Always ignore builtins
60
+ ig.add(BUILTIN_PATTERNS);
61
+
62
+ // Load .aethelignore from root
63
+ const userPatterns = [];
64
+
65
+ if (fileMtime > 0) {
66
+ const content = fs.readFileSync(ignoreFile, "utf-8");
67
+ const lines = content
68
+ .split(/\r?\n/)
69
+ .filter((line) => line.trim() && !line.startsWith("#"));
70
+ ig.add(lines);
71
+ userPatterns.push(...lines);
72
+ }
73
+
74
+ const rules = {
75
+ ignores(relativePath) {
76
+ const normalized = relativePath.replace(/^\/+/, "");
77
+ return ig.ignores(normalized);
78
+ },
79
+
80
+ filter(paths) {
81
+ return ig.filter(paths.map((p) => p.replace(/^\/+/, "")));
82
+ },
83
+
84
+ patterns: [...BUILTIN_PATTERNS, ...userPatterns],
85
+ userPatterns,
86
+ };
87
+
88
+ _cache.set(resolved, { mtime: fileMtime, rules });
89
+ return rules;
90
+ }
91
+
92
+ /** Invalidate the cached rules for a root (call after editing .aethelignore). */
93
+ export function invalidateIgnoreCache(root) {
94
+ _cache.delete(path.resolve(root));
95
+ }
96
+
97
+ /**
98
+ * Create a default .aethelignore file with common patterns.
99
+ */
100
+ export function createDefaultIgnoreFile(root) {
101
+ const ignoreFile = path.join(root, IGNORE_FILE);
102
+
103
+ if (fs.existsSync(ignoreFile)) {
104
+ return false;
105
+ }
106
+
107
+ const content = `# Aethel ignore patterns (gitignore syntax)
108
+ # Lines starting with # are comments.
109
+ # See https://git-scm.com/docs/gitignore for pattern syntax.
110
+
111
+ # OS files
112
+ .DS_Store
113
+ Thumbs.db
114
+ desktop.ini
115
+
116
+ # Editor / IDE
117
+ .vscode/
118
+ .idea/
119
+ *.swp
120
+ *.swo
121
+ *~
122
+
123
+ # Dependencies
124
+ node_modules/
125
+ .venv/
126
+ __pycache__/
127
+
128
+ # Build output
129
+ dist/
130
+ build/
131
+ *.pyc
132
+ *.o
133
+ *.so
134
+
135
+ # Secrets and credentials
136
+ .env
137
+ *.pem
138
+ *.key
139
+ credentials.json
140
+ token.json
141
+ client_secret*.json
142
+ `;
143
+
144
+ fs.writeFileSync(ignoreFile, content);
145
+ return true;
146
+ }
@@ -0,0 +1,109 @@
1
+ import fs from "node:fs";
2
+ import os from "node:os";
3
+ import path from "node:path";
4
+ import { humanSize } from "./drive-api.js";
5
+ import { loadIgnoreRules } from "./ignore.js";
6
+ import { findRoot } from "./config.js";
7
+
8
+ export function defaultLocalRoot() {
9
+ return process.cwd() || os.homedir();
10
+ }
11
+
12
+ export async function ensureLocalDirectory(targetPath) {
13
+ const resolvedPath = path.resolve(targetPath);
14
+ const stat = await fs.promises.stat(resolvedPath);
15
+
16
+ if (!stat.isDirectory()) {
17
+ throw new Error(`Local path is not a directory: ${resolvedPath}`);
18
+ }
19
+
20
+ return resolvedPath;
21
+ }
22
+
23
+ export async function listLocalEntries(targetPath, { respectIgnore = true } = {}) {
24
+ const resolvedPath = await ensureLocalDirectory(targetPath);
25
+
26
+ // Load ignore rules if inside an Aethel workspace
27
+ let ignoreRules = null;
28
+ if (respectIgnore) {
29
+ const root = findRoot(resolvedPath);
30
+ if (root) {
31
+ ignoreRules = loadIgnoreRules(root);
32
+ }
33
+ }
34
+
35
+ const directoryEntries = await fs.promises.readdir(resolvedPath, {
36
+ withFileTypes: true,
37
+ });
38
+
39
+ // Resolve root once, outside the filter — not per-entry
40
+ const workspaceRoot = respectIgnore ? findRoot(resolvedPath) : null;
41
+
42
+ const items = await Promise.all(
43
+ directoryEntries
44
+ .filter((entry) => {
45
+ if (entry.name.startsWith(".")) return false;
46
+ if (ignoreRules && workspaceRoot) {
47
+ const rel = path.relative(workspaceRoot, path.join(resolvedPath, entry.name)).split(path.sep).join("/");
48
+ if (ignoreRules.ignores(rel)) return false;
49
+ }
50
+ return true;
51
+ })
52
+ .map(async (entry) => {
53
+ const absolutePath = path.join(resolvedPath, entry.name);
54
+ const stat = await fs.promises.stat(absolutePath);
55
+
56
+ return {
57
+ id: absolutePath,
58
+ name: entry.name,
59
+ absolutePath,
60
+ isDirectory: stat.isDirectory(),
61
+ size: stat.isDirectory() ? null : stat.size,
62
+ sizeLabel: stat.isDirectory() ? " DIR " : humanSize(stat.size),
63
+ modifiedTime: new Date(stat.mtimeMs).toISOString(),
64
+ };
65
+ })
66
+ );
67
+
68
+ return items.sort((left, right) => {
69
+ if (left.isDirectory !== right.isDirectory) {
70
+ return left.isDirectory ? -1 : 1;
71
+ }
72
+
73
+ return left.name.localeCompare(right.name);
74
+ });
75
+ }
76
+
77
+ export async function deleteLocalEntry(targetPath) {
78
+ const resolvedPath = path.resolve(targetPath);
79
+ await fs.promises.rm(resolvedPath, { recursive: true, force: false });
80
+ return resolvedPath;
81
+ }
82
+
83
+ export async function renameLocalEntry(targetPath, nextName) {
84
+ const resolvedPath = path.resolve(targetPath);
85
+ const trimmedName = nextName.trim();
86
+
87
+ if (!trimmedName) {
88
+ throw new Error("New name cannot be empty.");
89
+ }
90
+
91
+ if (trimmedName.includes(path.sep)) {
92
+ throw new Error("New name cannot include path separators.");
93
+ }
94
+
95
+ const nextPath = path.join(path.dirname(resolvedPath), trimmedName);
96
+ if (resolvedPath === nextPath) {
97
+ return resolvedPath;
98
+ }
99
+
100
+ await fs.promises.access(nextPath).then(
101
+ () => {
102
+ throw new Error(`Target already exists: ${nextPath}`);
103
+ },
104
+ () => undefined
105
+ );
106
+
107
+ await fs.promises.rename(resolvedPath, nextPath);
108
+ return nextPath;
109
+ }
@@ -0,0 +1,65 @@
1
+ /**
2
+ * Short-lived cache for remote file listings.
3
+ *
4
+ * Stores the last remote file list in .aethel/.remote-cache.json so that
5
+ * rapid successive commands (e.g. `status` then `add` then `commit`) don't
6
+ * each make a full Drive API round-trip.
7
+ *
8
+ * Default TTL: 60 seconds. Commands that mutate remote state (commit, push)
9
+ * should invalidate the cache.
10
+ */
11
+
12
+ import fs from "node:fs";
13
+ import path from "node:path";
14
+ import { AETHEL_DIR } from "./config.js";
15
+
16
+ const CACHE_FILE = ".remote-cache.json";
17
+ const DEFAULT_TTL_MS = 60_000; // 60 seconds
18
+
19
+ function cachePath(root) {
20
+ return path.join(root, AETHEL_DIR, CACHE_FILE);
21
+ }
22
+
23
+ export function readRemoteCache(root, rootFolderId = null, ttlMs = DEFAULT_TTL_MS) {
24
+ const p = cachePath(root);
25
+ if (!fs.existsSync(p)) return null;
26
+
27
+ try {
28
+ const raw = JSON.parse(fs.readFileSync(p, "utf-8"));
29
+ const age = Date.now() - (raw.timestamp || 0);
30
+ if (age > ttlMs) return null;
31
+ if ((raw.rootFolderId ?? null) !== (rootFolderId ?? null)) {
32
+ return null;
33
+ }
34
+ if (!Array.isArray(raw.files) || !Array.isArray(raw.duplicateFolders)) {
35
+ return null;
36
+ }
37
+ return {
38
+ files: raw.files,
39
+ duplicateFolders: raw.duplicateFolders,
40
+ };
41
+ } catch {
42
+ return null;
43
+ }
44
+ }
45
+
46
+ export function writeRemoteCache(root, remoteState, rootFolderId = null) {
47
+ const p = cachePath(root);
48
+ fs.writeFileSync(
49
+ p,
50
+ JSON.stringify({
51
+ timestamp: Date.now(),
52
+ rootFolderId: rootFolderId ?? null,
53
+ count: remoteState.files.length,
54
+ files: remoteState.files,
55
+ duplicateFolders: remoteState.duplicateFolders,
56
+ }) + "\n"
57
+ );
58
+ }
59
+
60
+ export function invalidateRemoteCache(root) {
61
+ const p = cachePath(root);
62
+ if (fs.existsSync(p)) {
63
+ fs.unlinkSync(p);
64
+ }
65
+ }
@@ -0,0 +1,159 @@
1
+ import crypto from "node:crypto";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
4
+ import { AETHEL_DIR } from "./config.js";
5
+ import { loadIgnoreRules } from "./ignore.js";
6
+
7
+ const HASH_CACHE_FILE = ".hash-cache.json";
8
+
9
+ export async function md5Local(filePath) {
10
+ return new Promise((resolve, reject) => {
11
+ const hash = crypto.createHash("md5");
12
+ const stream = fs.createReadStream(filePath);
13
+
14
+ stream.on("data", (chunk) => hash.update(chunk));
15
+ stream.on("error", reject);
16
+ stream.on("end", () => resolve(hash.digest("hex")));
17
+ });
18
+ }
19
+
20
+ // ── Hash cache ───────────────────────────────────────────────────────
21
+
22
+ function hashCachePath(root) {
23
+ return path.join(root, AETHEL_DIR, HASH_CACHE_FILE);
24
+ }
25
+
26
+ function loadHashCache(root) {
27
+ const p = hashCachePath(root);
28
+ if (!fs.existsSync(p)) return new Map();
29
+ try {
30
+ const data = JSON.parse(fs.readFileSync(p, "utf-8"));
31
+ return new Map(Object.entries(data));
32
+ } catch {
33
+ return new Map();
34
+ }
35
+ }
36
+
37
+ function saveHashCache(root, cache) {
38
+ const p = hashCachePath(root);
39
+ const obj = Object.fromEntries(cache);
40
+ fs.writeFileSync(p, JSON.stringify(obj) + "\n");
41
+ }
42
+
43
+ // ── Scanning ─────────────────────────────────────────────────────────
44
+
45
+ const PARALLEL_HASH_LIMIT = 32;
46
+
47
+ export async function scanLocal(root, { respectIgnore = true } = {}) {
48
+ const resolvedRoot = path.resolve(root);
49
+ const ignoreRules = respectIgnore ? loadIgnoreRules(resolvedRoot) : null;
50
+ const hashCache = loadHashCache(resolvedRoot);
51
+ const nextCache = new Map();
52
+
53
+ // Phase 1: collect all file stats (fast — no hashing yet)
54
+ const filesToHash = [];
55
+
56
+ async function walk(currentPath) {
57
+ let entries;
58
+ try {
59
+ entries = await fs.promises.readdir(currentPath, { withFileTypes: true });
60
+ } catch {
61
+ return;
62
+ }
63
+
64
+ for (const entry of entries) {
65
+ const fullPath = path.join(currentPath, entry.name);
66
+ const relativePath = path
67
+ .relative(resolvedRoot, fullPath)
68
+ .split(path.sep)
69
+ .join("/");
70
+
71
+ if (ignoreRules?.ignores(relativePath)) {
72
+ continue;
73
+ }
74
+
75
+ if (entry.isDirectory()) {
76
+ await walk(fullPath);
77
+ continue;
78
+ }
79
+
80
+ if (!entry.isFile()) {
81
+ continue;
82
+ }
83
+
84
+ const stat = await fs.promises.stat(fullPath);
85
+ filesToHash.push({ fullPath, relativePath, stat });
86
+ }
87
+ }
88
+
89
+ await walk(resolvedRoot);
90
+
91
+ // Phase 2: hash files in parallel batches, using cache when possible
92
+ const result = {};
93
+
94
+ for (let i = 0; i < filesToHash.length; i += PARALLEL_HASH_LIMIT) {
95
+ const batch = filesToHash.slice(i, i + PARALLEL_HASH_LIMIT);
96
+ const hashes = await Promise.all(
97
+ batch.map(async ({ fullPath, relativePath, stat }) => {
98
+ const md5 = await getMd5Cached(hashCache, nextCache, fullPath, relativePath, stat);
99
+ return { relativePath, stat, md5 };
100
+ })
101
+ );
102
+
103
+ for (const { relativePath, stat, md5 } of hashes) {
104
+ result[relativePath] = {
105
+ localPath: relativePath,
106
+ size: stat.size,
107
+ md5,
108
+ modifiedTime: new Date(stat.mtimeMs).toISOString(),
109
+ };
110
+ }
111
+ }
112
+
113
+ // Persist updated cache
114
+ saveHashCache(resolvedRoot, nextCache);
115
+ return result;
116
+ }
117
+
118
+ async function getMd5Cached(oldCache, newCache, fullPath, relativePath, stat) {
119
+ const key = `${stat.mtimeMs}:${stat.size}`;
120
+ const cached = oldCache.get(relativePath);
121
+
122
+ let md5;
123
+ if (cached && cached.startsWith(key + ":")) {
124
+ // Cache hit: mtime and size match
125
+ md5 = cached.slice(key.length + 1);
126
+ } else {
127
+ // Cache miss: compute hash
128
+ md5 = await md5Local(fullPath);
129
+ }
130
+
131
+ newCache.set(relativePath, `${key}:${md5}`);
132
+ return md5;
133
+ }
134
+
135
+ // ── Snapshot building ────────────────────────────────────────────────
136
+
137
+ export function buildSnapshot(remoteFiles, localFiles, message = "") {
138
+ const files = {};
139
+
140
+ for (const file of remoteFiles) {
141
+ files[file.id] = {
142
+ id: file.id,
143
+ name: file.name,
144
+ path: file.path,
145
+ md5Checksum: file.md5Checksum ?? null,
146
+ size: file.size ?? null,
147
+ mimeType: file.mimeType || "",
148
+ modifiedTime: file.modifiedTime ?? null,
149
+ localPath: file.path,
150
+ };
151
+ }
152
+
153
+ return {
154
+ timestamp: new Date().toISOString(),
155
+ message,
156
+ files,
157
+ localFiles: { ...localFiles },
158
+ };
159
+ }
@@ -0,0 +1,125 @@
1
+ import { readIndex, writeIndex } from "./config.js";
2
+
3
+ export function stagedEntries(root) {
4
+ return readIndex(root).staged || [];
5
+ }
6
+
7
+ function changeToEntry(change) {
8
+ const entry = {
9
+ action: change.suggestedAction,
10
+ path: change.path,
11
+ localPath: change.localMeta?.localPath || change.path,
12
+ };
13
+
14
+ if (change.fileId) {
15
+ entry.fileId = change.fileId;
16
+ }
17
+
18
+ if (change.remoteMeta?.path) {
19
+ entry.remotePath = change.remoteMeta.path;
20
+ }
21
+
22
+ return entry;
23
+ }
24
+
25
+ export function stageChange(root, change) {
26
+ const index = readIndex(root);
27
+ const staged = (index.staged || []).filter(
28
+ (entry) => entry.path !== change.path
29
+ );
30
+ staged.push(changeToEntry(change));
31
+ index.staged = staged;
32
+ writeIndex(root, index);
33
+ }
34
+
35
+ export function stageChanges(root, changes) {
36
+ const index = readIndex(root);
37
+ const byPath = new Map((index.staged || []).map((entry) => [entry.path, entry]));
38
+
39
+ for (const change of changes) {
40
+ byPath.set(change.path, changeToEntry(change));
41
+ }
42
+
43
+ index.staged = [...byPath.values()];
44
+ writeIndex(root, index);
45
+ return changes.length;
46
+ }
47
+
48
+ export function unstagePath(root, targetPath) {
49
+ const index = readIndex(root);
50
+ const staged = index.staged || [];
51
+ const next = staged.filter((entry) => entry.path !== targetPath);
52
+
53
+ if (next.length === staged.length) {
54
+ return false;
55
+ }
56
+
57
+ index.staged = next;
58
+ writeIndex(root, index);
59
+ return true;
60
+ }
61
+
62
+ export function unstageAll(root) {
63
+ const index = readIndex(root);
64
+ const count = (index.staged || []).length;
65
+ index.staged = [];
66
+ writeIndex(root, index);
67
+ return count;
68
+ }
69
+
70
+ /**
71
+ * Stage a conflict with an explicit resolution strategy.
72
+ * @param {"ours"|"theirs"|"both"} strategy
73
+ */
74
+ export function stageConflictResolution(root, change, strategy) {
75
+ if (strategy === "theirs") {
76
+ // Keep remote version → download
77
+ return stageChange(root, {
78
+ ...change,
79
+ changeType: "remote_modified",
80
+ suggestedAction: "download",
81
+ });
82
+ }
83
+
84
+ if (strategy === "ours") {
85
+ // Keep local version → upload
86
+ return stageChange(root, {
87
+ ...change,
88
+ changeType: "local_modified",
89
+ suggestedAction: "upload",
90
+ });
91
+ }
92
+
93
+ if (strategy === "both") {
94
+ // Keep both: download remote as .remote copy, keep local as-is, then upload local
95
+ const index = readIndex(root);
96
+ const staged = (index.staged || []).filter(
97
+ (entry) => entry.path !== change.path
98
+ );
99
+
100
+ // Stage download of remote with a renamed path
101
+ const ext = change.path.includes(".") ? "." + change.path.split(".").pop() : "";
102
+ const base = ext ? change.path.slice(0, -ext.length) : change.path;
103
+ const remoteCopyPath = `${base}.remote${ext}`;
104
+
105
+ staged.push({
106
+ action: "download",
107
+ path: remoteCopyPath,
108
+ localPath: remoteCopyPath,
109
+ fileId: change.fileId,
110
+ remotePath: change.remoteMeta?.path || change.path,
111
+ });
112
+
113
+ // Stage upload of local version
114
+ staged.push({
115
+ action: "upload",
116
+ path: change.path,
117
+ localPath: change.localMeta?.localPath || change.path,
118
+ fileId: change.fileId,
119
+ remotePath: change.remoteMeta?.path || change.path,
120
+ });
121
+
122
+ index.staged = staged;
123
+ writeIndex(root, index);
124
+ }
125
+ }