aethel 0.3.7 → 0.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.3.8 (2026-04-06)
4
+
5
+ - Fix Drive upload checksum test stub
6
+
3
7
  ## 0.3.7 (2026-04-05)
4
8
 
5
9
  - Fix orphan checker not recognizing My Drive root — all files under synced folders were silently dropped
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 Aethel Contributors
3
+ Copyright (c) 2026 Aethel Contributors
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "aethel",
3
- "version": "0.3.7",
3
+ "version": "0.3.8",
4
4
  "description": "Git-style Google Drive sync CLI with interactive TUI",
5
5
  "type": "module",
6
6
  "license": "MIT",
package/src/cli.js CHANGED
@@ -97,6 +97,15 @@ async function loadStateWithProgress(repo, opts) {
97
97
  }
98
98
  }
99
99
 
100
+ function assertInsideRoot(root, targetPath) {
101
+ const abs = path.resolve(root, targetPath);
102
+ const resolvedRoot = path.resolve(root);
103
+ if (!abs.startsWith(resolvedRoot + path.sep) && abs !== resolvedRoot) {
104
+ throw new Error(`Path traversal blocked: '${targetPath}' resolves outside workspace`);
105
+ }
106
+ return abs;
107
+ }
108
+
100
109
  function matchesPattern(targetPath, pattern) {
101
110
  if (targetPath === pattern) {
102
111
  return true;
@@ -783,7 +792,7 @@ async function handleRestore(paths, options) {
783
792
  continue;
784
793
  }
785
794
 
786
- const localDest = path.join(root, entry.localPath || entry.path);
795
+ const localDest = assertInsideRoot(root, entry.localPath || entry.path);
787
796
  const spinner = createSpinner(`Restoring ${targetPath}...`);
788
797
 
789
798
  try {
@@ -807,7 +816,7 @@ async function handleRm(paths, options) {
807
816
  const root = repo.root;
808
817
 
809
818
  for (const targetPath of paths) {
810
- const localAbs = path.join(root, targetPath);
819
+ const localAbs = assertInsideRoot(root, targetPath);
811
820
  if (fs.existsSync(localAbs)) {
812
821
  await fs.promises.rm(localAbs, { recursive: true });
813
822
  console.log(` Deleted locally: ${targetPath}`);
@@ -833,8 +842,8 @@ async function handleRm(paths, options) {
833
842
  async function handleMv(source, dest, options) {
834
843
  const root = requireRoot();
835
844
 
836
- const srcAbs = path.join(root, source);
837
- const destAbs = path.join(root, dest);
845
+ const srcAbs = assertInsideRoot(root, source);
846
+ const destAbs = assertInsideRoot(root, dest);
838
847
 
839
848
  if (!fs.existsSync(srcAbs)) {
840
849
  console.log(`Source not found: ${source}`);
@@ -848,6 +857,77 @@ async function handleMv(source, dest, options) {
848
857
  console.log(" Run 'aethel status' to see the resulting changes (old path deleted, new path added).");
849
858
  }
850
859
 
860
+ async function handleVerify(options) {
861
+ const checkRemote = Boolean(options.remote);
862
+ const repo = checkRemote
863
+ ? await openRepo(options)
864
+ : (() => { const root = requireRoot(); return new Repository(root); })();
865
+
866
+ const snapshot = repo.getSnapshot();
867
+ if (!snapshot) {
868
+ console.log("No snapshot to verify. Run 'aethel commit' first.");
869
+ return;
870
+ }
871
+
872
+ const localCount = Object.keys(snapshot.localFiles || {}).filter(
873
+ (k) => !snapshot.localFiles[k].isFolder
874
+ ).length;
875
+ const remoteCount = checkRemote ? Object.keys(snapshot.files || {}).length : 0;
876
+ const total = localCount + remoteCount;
877
+
878
+ const bar = createProgressBar("Verifying", total);
879
+ const result = await repo.verify({
880
+ checkRemote,
881
+ onProgress(done) { bar.update(done); },
882
+ });
883
+
884
+ // Snapshot integrity
885
+ if (result.snapshot.valid) {
886
+ bar.done(`Verification complete`);
887
+ console.log(`\n Snapshot: ✔ ${result.snapshot.reason}`);
888
+ } else {
889
+ bar.done(`Verification found issues`);
890
+ console.log(`\n Snapshot: ✖ ${result.snapshot.reason}`);
891
+ }
892
+
893
+ // Local issues
894
+ if (result.local.length) {
895
+ console.log(`\n Local issues (${result.local.length}):`);
896
+ for (const e of result.local) {
897
+ if (e.status === "missing") {
898
+ console.log(` ✖ ${e.path} — file missing`);
899
+ } else if (e.status === "modified") {
900
+ console.log(` ✖ ${e.path} — md5 mismatch (expected ${e.expected.slice(0, 8)}, got ${e.actual.slice(0, 8)})`);
901
+ }
902
+ }
903
+ } else {
904
+ console.log(` Local files: ✔ ${localCount} file(s) verified`);
905
+ }
906
+
907
+ // Remote issues
908
+ if (checkRemote) {
909
+ if (result.remote.length) {
910
+ console.log(`\n Remote issues (${result.remote.length}):`);
911
+ for (const e of result.remote) {
912
+ if (e.status === "deleted_remote") {
913
+ console.log(` ✖ ${e.path} — deleted on Drive`);
914
+ } else if (e.status === "modified_remote") {
915
+ console.log(` ✖ ${e.path} — md5 mismatch (expected ${e.expected.slice(0, 8)}, got ${e.actual.slice(0, 8)})`);
916
+ }
917
+ }
918
+ } else {
919
+ console.log(` Remote files: ✔ ${remoteCount} file(s) verified`);
920
+ }
921
+ }
922
+
923
+ if (result.ok) {
924
+ console.log("\n✔ All integrity checks passed.");
925
+ } else {
926
+ console.log("\n✖ Integrity issues detected. Run 'aethel status' to review.");
927
+ process.exitCode = 1;
928
+ }
929
+ }
930
+
851
931
  async function handleTui(options) {
852
932
  const repo = await openRepo(options, { requireWorkspace: false, silent: true });
853
933
  const cliArgs = [];
@@ -1080,6 +1160,13 @@ async function main() {
1080
1160
  .argument("<dest>", "Destination path (relative to workspace)")
1081
1161
  .action((source, dest, options) => handleMv(source, dest, options));
1082
1162
 
1163
+ addAuthOptions(
1164
+ program
1165
+ .command("verify")
1166
+ .description("Verify file integrity against last snapshot")
1167
+ .option("--remote", "Also verify remote files on Drive (requires network)")
1168
+ ).action(handleVerify);
1169
+
1083
1170
  addAuthOptions(
1084
1171
  program
1085
1172
  .command("tui")
package/src/core/auth.js CHANGED
@@ -40,8 +40,9 @@ export async function persistCredentials(sourcePath) {
40
40
  const resolved = path.resolve(sourcePath);
41
41
  if (resolved === dest) return;
42
42
  if (fsSyncFallback.existsSync(dest)) return;
43
- await fs.mkdir(CONFIG_DIR, { recursive: true });
43
+ await fs.mkdir(CONFIG_DIR, { recursive: true, mode: 0o700 });
44
44
  await fs.copyFile(resolved, dest);
45
+ await fs.chmod(dest, 0o600);
45
46
  }
46
47
 
47
48
  export function resolveCredentialsPath(customPath) {
@@ -109,8 +110,8 @@ function createOAuthClient(config, redirectUri) {
109
110
  }
110
111
 
111
112
  async function persistToken(tokenPath, credentials) {
112
- await fs.mkdir(path.dirname(path.resolve(tokenPath)), { recursive: true });
113
- await fs.writeFile(tokenPath, JSON.stringify(credentials, null, 2) + "\n");
113
+ await fs.mkdir(path.dirname(path.resolve(tokenPath)), { recursive: true, mode: 0o700 });
114
+ await fs.writeFile(tokenPath, JSON.stringify(credentials, null, 2) + "\n", { mode: 0o600 });
114
115
  }
115
116
 
116
117
  function attachTokenPersistence(client, tokenPath) {
@@ -2,6 +2,7 @@
2
2
  * .aethel/ directory management, configuration, and state persistence.
3
3
  */
4
4
 
5
+ import crypto from "node:crypto";
5
6
  import fs from "node:fs";
6
7
  import path from "node:path";
7
8
 
@@ -97,10 +98,28 @@ export function latestSnapshotPath(root) {
97
98
  return path.join(dot(root), SNAPSHOTS_DIR, LATEST_SNAPSHOT);
98
99
  }
99
100
 
100
- export function readLatestSnapshot(root) {
101
+ export function readLatestSnapshot(root, { verify = false } = {}) {
101
102
  const p = latestSnapshotPath(root);
102
103
  if (!fs.existsSync(p)) return null;
103
- return JSON.parse(fs.readFileSync(p, "utf-8"));
104
+ const snapshot = JSON.parse(fs.readFileSync(p, "utf-8"));
105
+
106
+ if (verify && snapshot._checksum) {
107
+ const canonical = JSON.stringify({
108
+ timestamp: snapshot.timestamp,
109
+ message: snapshot.message,
110
+ files: snapshot.files,
111
+ localFiles: snapshot.localFiles,
112
+ });
113
+ const actual = crypto.createHash("sha256").update(canonical).digest("hex");
114
+ if (actual !== snapshot._checksum) {
115
+ throw new Error(
116
+ `Snapshot integrity check failed: checksum mismatch. ` +
117
+ `The snapshot file may have been tampered with.`
118
+ );
119
+ }
120
+ }
121
+
122
+ return snapshot;
104
123
  }
105
124
 
106
125
  export function writeSnapshot(root, snapshot) {
@@ -111,7 +111,12 @@ export function isWorkspaceType(mime) {
111
111
  }
112
112
 
113
113
  function escapeDriveQueryValue(value) {
114
- return value.replace(/\\/g, "\\\\").replace(/'/g, "\\'");
114
+ // Google Drive API query strings use single-quoted values.
115
+ // Escape backslashes first, then single quotes.
116
+ return value
117
+ .replace(/\\/g, "\\\\")
118
+ .replace(/'/g, "\\'")
119
+ .replace(/"/g, '\\"');
115
120
  }
116
121
 
117
122
  export function iconForMime(mime) {
@@ -504,14 +509,36 @@ export async function downloadFile(drive, fileMeta, localPath) {
504
509
  { responseType: "stream" }
505
510
  );
506
511
  await pipeline(response.data, fs.createWriteStream(targetPath));
512
+ // Exported files have no md5Checksum from Drive — skip verification
507
513
  return;
508
514
  }
509
515
 
516
+ // Stream to disk while computing MD5 in parallel
517
+ const { createHash } = await import("node:crypto");
518
+ const md5 = createHash("md5");
519
+ const writeStream = fs.createWriteStream(localPath);
510
520
  const response = await drive.files.get(
511
521
  { fileId: fileMeta.id, alt: "media", supportsAllDrives: true },
512
522
  { responseType: "stream" }
513
523
  );
514
- await pipeline(response.data, fs.createWriteStream(localPath));
524
+
525
+ // Tee: pipe to both disk and hasher
526
+ response.data.on("data", (chunk) => md5.update(chunk));
527
+ await pipeline(response.data, writeStream);
528
+
529
+ // Verify integrity if Drive provided an md5
530
+ const expectedMd5 = fileMeta.md5Checksum;
531
+ if (expectedMd5) {
532
+ const actualMd5 = md5.digest("hex");
533
+ if (actualMd5 !== expectedMd5) {
534
+ // Remove corrupt file
535
+ fs.unlinkSync(localPath);
536
+ throw new Error(
537
+ `Integrity check failed for ${fileMeta.name}: ` +
538
+ `expected md5 ${expectedMd5}, got ${actualMd5}`
539
+ );
540
+ }
541
+ }
515
542
  }
516
543
 
517
544
  export async function uploadFile(
@@ -36,7 +36,13 @@ import {
36
36
  readRemoteCache,
37
37
  writeRemoteCache,
38
38
  } from "./remote-cache.js";
39
- import { buildSnapshot, scanLocal } from "./snapshot.js";
39
+ import {
40
+ buildSnapshot,
41
+ hashFile,
42
+ md5Local,
43
+ scanLocal,
44
+ verifySnapshotChecksum,
45
+ } from "./snapshot.js";
40
46
  import {
41
47
  stageChange,
42
48
  stageChanges,
@@ -329,6 +335,87 @@ export class Repository {
329
335
  return JSON.parse(fs.readFileSync(path.join(historyPath, match), "utf-8"));
330
336
  }
331
337
 
338
+ // ── Integrity verification ──────────────────────────────────────────
339
+
340
+ /**
341
+ * Full integrity verification of the workspace.
342
+ * Checks: snapshot checksum, local files vs snapshot md5, remote vs snapshot md5.
343
+ *
344
+ * @param {object} [options]
345
+ * @param {boolean} [options.checkRemote=false] Also verify remote checksums (requires connect)
346
+ * @param {function} [options.onProgress] (done, total, path, status) callback
347
+ * @returns {{ ok: boolean, snapshot: object, local: object[], remote: object[] }}
348
+ */
349
+ async verify({ checkRemote = false, onProgress } = {}) {
350
+ const snapshot = readLatestSnapshot(this._root, { verify: true });
351
+ const result = { ok: true, snapshot: { valid: true }, local: [], remote: [] };
352
+
353
+ if (!snapshot) {
354
+ return { ok: true, snapshot: { valid: true, reason: "no snapshot yet" }, local: [], remote: [] };
355
+ }
356
+
357
+ // 1. Snapshot integrity
358
+ const snapshotCheck = verifySnapshotChecksum(snapshot);
359
+ result.snapshot = snapshotCheck;
360
+ if (!snapshotCheck.valid) result.ok = false;
361
+
362
+ // 2. Local file integrity vs snapshot
363
+ const localFiles = snapshot.localFiles || {};
364
+ const entries = Object.entries(localFiles).filter(([, meta]) => !meta.isFolder);
365
+ const total = entries.length + (checkRemote ? Object.keys(snapshot.files || {}).length : 0);
366
+ let done = 0;
367
+
368
+ for (const [relativePath, meta] of entries) {
369
+ const absPath = path.join(this._root, ...relativePath.split("/"));
370
+ const entry = { path: relativePath, status: "ok" };
371
+
372
+ if (!fs.existsSync(absPath)) {
373
+ entry.status = "missing";
374
+ result.ok = false;
375
+ } else if (meta.md5) {
376
+ const actual = await md5Local(absPath);
377
+ if (actual !== meta.md5) {
378
+ entry.status = "modified";
379
+ entry.expected = meta.md5;
380
+ entry.actual = actual;
381
+ result.ok = false;
382
+ }
383
+ }
384
+
385
+ if (entry.status !== "ok") result.local.push(entry);
386
+ done++;
387
+ onProgress?.(done, total, relativePath, entry.status);
388
+ }
389
+
390
+ // 3. Remote integrity vs snapshot (optional, requires API call)
391
+ if (checkRemote) {
392
+ const remoteState = await this._loadRemoteState({ useCache: false });
393
+ const remoteById = new Map(remoteState.files.map((f) => [f.id, f]));
394
+
395
+ for (const [fileId, snapEntry] of Object.entries(snapshot.files || {})) {
396
+ if (snapEntry.isFolder) { done++; continue; }
397
+ const entry = { path: snapEntry.path || snapEntry.localPath, status: "ok" };
398
+ const remote = remoteById.get(fileId);
399
+
400
+ if (!remote) {
401
+ entry.status = "deleted_remote";
402
+ result.ok = false;
403
+ } else if (snapEntry.md5Checksum && remote.md5Checksum && snapEntry.md5Checksum !== remote.md5Checksum) {
404
+ entry.status = "modified_remote";
405
+ entry.expected = snapEntry.md5Checksum;
406
+ entry.actual = remote.md5Checksum;
407
+ result.ok = false;
408
+ }
409
+
410
+ if (entry.status !== "ok") result.remote.push(entry);
411
+ done++;
412
+ onProgress?.(done, total, entry.path, entry.status);
413
+ }
414
+ }
415
+
416
+ return result;
417
+ }
418
+
332
419
  // ── Private helpers ─────────────────────────────────────────────────
333
420
 
334
421
  async _loadRemoteState({ useCache = true } = {}) {
@@ -17,6 +17,48 @@ export async function md5Local(filePath) {
17
17
  });
18
18
  }
19
19
 
20
+ /**
21
+ * Stream-hash a file with the given algorithm (default sha256).
22
+ * Returns hex digest.
23
+ */
24
+ export async function hashFile(filePath, algorithm = "sha256") {
25
+ return new Promise((resolve, reject) => {
26
+ const hash = crypto.createHash(algorithm);
27
+ const stream = fs.createReadStream(filePath);
28
+
29
+ stream.on("data", (chunk) => hash.update(chunk));
30
+ stream.on("error", reject);
31
+ stream.on("end", () => resolve(hash.digest("hex")));
32
+ });
33
+ }
34
+
35
+ /**
36
+ * Compute a SHA-256 integrity checksum over the snapshot's data fields.
37
+ * The checksum covers files + localFiles + message + timestamp, but NOT
38
+ * the checksum field itself, so it can be verified after reading.
39
+ */
40
+ export function computeSnapshotChecksum(snapshot) {
41
+ const canonical = JSON.stringify({
42
+ timestamp: snapshot.timestamp,
43
+ message: snapshot.message,
44
+ files: snapshot.files,
45
+ localFiles: snapshot.localFiles,
46
+ });
47
+ return crypto.createHash("sha256").update(canonical).digest("hex");
48
+ }
49
+
50
+ /**
51
+ * Verify a snapshot's embedded checksum. Returns true if valid or
52
+ * if the snapshot has no checksum (pre-integrity snapshots).
53
+ */
54
+ export function verifySnapshotChecksum(snapshot) {
55
+ if (!snapshot?._checksum) return { valid: true, reason: "no checksum (legacy snapshot)" };
56
+ const expected = snapshot._checksum;
57
+ const actual = computeSnapshotChecksum(snapshot);
58
+ if (actual === expected) return { valid: true, reason: "checksum valid" };
59
+ return { valid: false, reason: `checksum mismatch: expected ${expected.slice(0, 12)}…, got ${actual.slice(0, 12)}…` };
60
+ }
61
+
20
62
  // ── Hash cache ───────────────────────────────────────────────────────
21
63
 
22
64
  function hashCachePath(root) {
@@ -227,10 +269,14 @@ export function buildSnapshot(remoteFiles, localFiles, message = "") {
227
269
  };
228
270
  }
229
271
 
230
- return {
272
+ const snapshot = {
231
273
  timestamp: new Date().toISOString(),
232
274
  message,
233
275
  files,
234
276
  localFiles: { ...localFiles },
235
277
  };
278
+
279
+ // Embed integrity checksum
280
+ snapshot._checksum = computeSnapshotChecksum(snapshot);
281
+ return snapshot;
236
282
  }
package/src/core/sync.js CHANGED
@@ -2,6 +2,7 @@ import fs from "node:fs";
2
2
  import path from "node:path";
3
3
  import { readConfig, readIndex, writeIndex } from "./config.js";
4
4
  import { downloadFile, ensureFolder, trashFile, uploadFile } from "./drive-api.js";
5
+ import { md5Local } from "./snapshot.js";
5
6
 
6
7
  function readPositiveIntEnv(name, fallback) {
7
8
  const rawValue = Number.parseInt(process.env[name] || "", 10);
@@ -11,7 +12,12 @@ function readPositiveIntEnv(name, fallback) {
11
12
  const CONCURRENCY = readPositiveIntEnv("AETHEL_DRIVE_CONCURRENCY", 10);
12
13
 
13
14
  function toLocalAbsolutePath(root, relativePath) {
14
- return path.join(root, ...relativePath.split("/"));
15
+ const abs = path.resolve(root, ...relativePath.split("/"));
16
+ const resolvedRoot = path.resolve(root);
17
+ if (!abs.startsWith(resolvedRoot + path.sep) && abs !== resolvedRoot) {
18
+ throw new Error(`Path traversal blocked: ${relativePath} resolves outside workspace`);
19
+ }
20
+ return abs;
15
21
  }
16
22
 
17
23
  export class CommitResult {
@@ -102,10 +108,22 @@ async function uploadStagedFile(drive, entry, root, driveFolderId) {
102
108
  parentId = await ensureFolder(drive, parentPath, driveFolderId);
103
109
  }
104
110
 
105
- await uploadFile(drive, localAbsolutePath, remotePath, {
111
+ const uploadResult = await uploadFile(drive, localAbsolutePath, remotePath, {
106
112
  parentId,
107
113
  existingId: entry.fileId || null,
108
114
  });
115
+
116
+ // Verify: Drive-returned md5 must match the local file we just uploaded.
117
+ // Google Workspace files (Docs, Sheets, etc.) don't have md5 — skip them.
118
+ if (uploadResult?.md5Checksum) {
119
+ const localMd5 = await md5Local(localAbsolutePath);
120
+ if (localMd5 !== uploadResult.md5Checksum) {
121
+ throw new Error(
122
+ `Upload integrity check failed for ${remotePath}: ` +
123
+ `local md5 ${localMd5}, Drive returned ${uploadResult.md5Checksum}`
124
+ );
125
+ }
126
+ }
109
127
  }
110
128
 
111
129
  async function deleteLocalFile(entry, root) {
@@ -194,6 +212,7 @@ export async function executeStaged(drive, root, progress) {
194
212
  // Remote operations (download, upload, delete_remote) share a concurrency pool.
195
213
  const localDeletes = [];
196
214
  const remoteOps = [];
215
+ const failedPaths = new Set();
197
216
 
198
217
  for (const [i, entry] of staged.entries()) {
199
218
  if (entry.action === "delete_local") {
@@ -210,6 +229,7 @@ export async function executeStaged(drive, root, progress) {
210
229
  await deleteLocalFile(entry, root);
211
230
  result.deletedLocal++;
212
231
  } catch (err) {
232
+ failedPaths.add(entry.path);
213
233
  result.errors.push(`delete_local ${entry.path}: ${err.message}`);
214
234
  }
215
235
  })
@@ -242,13 +262,20 @@ export async function executeStaged(drive, root, progress) {
242
262
  completed++;
243
263
  const op = remoteOps[idx];
244
264
  if (err) {
265
+ failedPaths.add(op.entry.path);
245
266
  result.errors.push(`${op.entry.action} ${op.entry.path}: ${err.message}`);
246
267
  }
247
268
  progress?.(completed - 1, staged.length, op.entry.action, path.posix.basename(op.entry.path || ""));
248
269
  });
249
270
 
250
271
  progress?.(staged.length, staged.length, "done", "");
251
- index.staged = [];
272
+
273
+ // Only clear succeeded entries — keep failed ones staged for retry
274
+ if (failedPaths.size > 0) {
275
+ index.staged = staged.filter((e) => failedPaths.has(e.path));
276
+ } else {
277
+ index.staged = [];
278
+ }
252
279
  writeIndex(root, index);
253
280
 
254
281
  return result;