aethel 0.3.7 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.4.0 (2026-04-06)
4
+
5
+ - Add pull --all for full remote download
6
+
7
+ ## 0.3.8 (2026-04-06)
8
+
9
+ - Fix Drive upload checksum test stub
10
+
3
11
  ## 0.3.7 (2026-04-05)
4
12
 
5
13
  - Fix orphan checker not recognizing My Drive root — all files under synced folders were silently dropped
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 Aethel Contributors
3
+ Copyright (c) 2026 Aethel Contributors
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/README.md CHANGED
@@ -78,6 +78,7 @@ aethel add --all # stage default suggested actions
78
78
  aethel commit -m "sync" # execute staged operations
79
79
 
80
80
  aethel pull -m "pull" # fetch remote changes and apply
81
+ aethel pull --all # download the full remote tree to local
81
82
  aethel push -m "push" # push local changes to Drive
82
83
  ```
83
84
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "aethel",
3
- "version": "0.3.7",
3
+ "version": "0.4.0",
4
4
  "description": "Git-style Google Drive sync CLI with interactive TUI",
5
5
  "type": "module",
6
6
  "license": "MIT",
package/src/cli.js CHANGED
@@ -97,6 +97,15 @@ async function loadStateWithProgress(repo, opts) {
97
97
  }
98
98
  }
99
99
 
100
+ function assertInsideRoot(root, targetPath) {
101
+ const abs = path.resolve(root, targetPath);
102
+ const resolvedRoot = path.resolve(root);
103
+ if (!abs.startsWith(resolvedRoot + path.sep) && abs !== resolvedRoot) {
104
+ throw new Error(`Path traversal blocked: '${targetPath}' resolves outside workspace`);
105
+ }
106
+ return abs;
107
+ }
108
+
100
109
  function matchesPattern(targetPath, pattern) {
101
110
  if (targetPath === pattern) {
102
111
  return true;
@@ -504,6 +513,37 @@ async function handlePull(paths, options) {
504
513
  const repo = await openRepo(options);
505
514
  const { diff, remoteState } = await loadStateWithProgress(repo, { useCache: false });
506
515
 
516
+ if (options.all) {
517
+ let remoteFiles = remoteState.files;
518
+
519
+ if (paths && paths.length > 0) {
520
+ remoteFiles = remoteFiles.filter((file) =>
521
+ paths.some((p) => matchesPattern(file.path, p))
522
+ );
523
+ }
524
+
525
+ if (!remoteFiles.length) {
526
+ console.log("No remote files matched.");
527
+ return;
528
+ }
529
+
530
+ if (options.dryRun) {
531
+ console.log(`Would pull ${remoteFiles.length} remote item(s):`);
532
+ for (const file of remoteFiles) {
533
+ console.log(` +R ${file.path} (full remote download)`);
534
+ }
535
+ return;
536
+ }
537
+
538
+ const count = repo.stageRemoteFilesForDownload(remoteFiles);
539
+ console.log(`Staged ${count} remote item(s). Committing...`);
540
+ await handleCommit({ ...options, message: options.message || "pull" }, {
541
+ repo,
542
+ snapshotHint: { remote: remoteState },
543
+ });
544
+ return;
545
+ }
546
+
507
547
  let remoteChanges = diff.changes.filter((change) =>
508
548
  [
509
549
  ChangeType.REMOTE_ADDED,
@@ -783,7 +823,7 @@ async function handleRestore(paths, options) {
783
823
  continue;
784
824
  }
785
825
 
786
- const localDest = path.join(root, entry.localPath || entry.path);
826
+ const localDest = assertInsideRoot(root, entry.localPath || entry.path);
787
827
  const spinner = createSpinner(`Restoring ${targetPath}...`);
788
828
 
789
829
  try {
@@ -807,7 +847,7 @@ async function handleRm(paths, options) {
807
847
  const root = repo.root;
808
848
 
809
849
  for (const targetPath of paths) {
810
- const localAbs = path.join(root, targetPath);
850
+ const localAbs = assertInsideRoot(root, targetPath);
811
851
  if (fs.existsSync(localAbs)) {
812
852
  await fs.promises.rm(localAbs, { recursive: true });
813
853
  console.log(` Deleted locally: ${targetPath}`);
@@ -833,8 +873,8 @@ async function handleRm(paths, options) {
833
873
  async function handleMv(source, dest, options) {
834
874
  const root = requireRoot();
835
875
 
836
- const srcAbs = path.join(root, source);
837
- const destAbs = path.join(root, dest);
876
+ const srcAbs = assertInsideRoot(root, source);
877
+ const destAbs = assertInsideRoot(root, dest);
838
878
 
839
879
  if (!fs.existsSync(srcAbs)) {
840
880
  console.log(`Source not found: ${source}`);
@@ -848,6 +888,77 @@ async function handleMv(source, dest, options) {
848
888
  console.log(" Run 'aethel status' to see the resulting changes (old path deleted, new path added).");
849
889
  }
850
890
 
891
+ async function handleVerify(options) {
892
+ const checkRemote = Boolean(options.remote);
893
+ const repo = checkRemote
894
+ ? await openRepo(options)
895
+ : (() => { const root = requireRoot(); return new Repository(root); })();
896
+
897
+ const snapshot = repo.getSnapshot();
898
+ if (!snapshot) {
899
+ console.log("No snapshot to verify. Run 'aethel commit' first.");
900
+ return;
901
+ }
902
+
903
+ const localCount = Object.keys(snapshot.localFiles || {}).filter(
904
+ (k) => !snapshot.localFiles[k].isFolder
905
+ ).length;
906
+ const remoteCount = checkRemote ? Object.keys(snapshot.files || {}).length : 0;
907
+ const total = localCount + remoteCount;
908
+
909
+ const bar = createProgressBar("Verifying", total);
910
+ const result = await repo.verify({
911
+ checkRemote,
912
+ onProgress(done) { bar.update(done); },
913
+ });
914
+
915
+ // Snapshot integrity
916
+ if (result.snapshot.valid) {
917
+ bar.done(`Verification complete`);
918
+ console.log(`\n Snapshot: ✔ ${result.snapshot.reason}`);
919
+ } else {
920
+ bar.done(`Verification found issues`);
921
+ console.log(`\n Snapshot: ✖ ${result.snapshot.reason}`);
922
+ }
923
+
924
+ // Local issues
925
+ if (result.local.length) {
926
+ console.log(`\n Local issues (${result.local.length}):`);
927
+ for (const e of result.local) {
928
+ if (e.status === "missing") {
929
+ console.log(` ✖ ${e.path} — file missing`);
930
+ } else if (e.status === "modified") {
931
+ console.log(` ✖ ${e.path} — md5 mismatch (expected ${e.expected.slice(0, 8)}, got ${e.actual.slice(0, 8)})`);
932
+ }
933
+ }
934
+ } else {
935
+ console.log(` Local files: ✔ ${localCount} file(s) verified`);
936
+ }
937
+
938
+ // Remote issues
939
+ if (checkRemote) {
940
+ if (result.remote.length) {
941
+ console.log(`\n Remote issues (${result.remote.length}):`);
942
+ for (const e of result.remote) {
943
+ if (e.status === "deleted_remote") {
944
+ console.log(` ✖ ${e.path} — deleted on Drive`);
945
+ } else if (e.status === "modified_remote") {
946
+ console.log(` ✖ ${e.path} — md5 mismatch (expected ${e.expected.slice(0, 8)}, got ${e.actual.slice(0, 8)})`);
947
+ }
948
+ }
949
+ } else {
950
+ console.log(` Remote files: ✔ ${remoteCount} file(s) verified`);
951
+ }
952
+ }
953
+
954
+ if (result.ok) {
955
+ console.log("\n✔ All integrity checks passed.");
956
+ } else {
957
+ console.log("\n✖ Integrity issues detected. Run 'aethel status' to review.");
958
+ process.exitCode = 1;
959
+ }
960
+ }
961
+
851
962
  async function handleTui(options) {
852
963
  const repo = await openRepo(options, { requireWorkspace: false, silent: true });
853
964
  const cliArgs = [];
@@ -1020,6 +1131,7 @@ async function main() {
1020
1131
  .command("pull")
1021
1132
  .description("Download remote changes")
1022
1133
  .argument("[paths...]", "Specific paths to pull (default: all)")
1134
+ .option("--all", "Download all remote files regardless of snapshot state")
1023
1135
  .option("-m, --message <message>", "Commit message")
1024
1136
  .option("--force", "Force-pull conflicts (remote wins)")
1025
1137
  .option("--dry-run", "Preview changes without applying")
@@ -1080,6 +1192,13 @@ async function main() {
1080
1192
  .argument("<dest>", "Destination path (relative to workspace)")
1081
1193
  .action((source, dest, options) => handleMv(source, dest, options));
1082
1194
 
1195
+ addAuthOptions(
1196
+ program
1197
+ .command("verify")
1198
+ .description("Verify file integrity against last snapshot")
1199
+ .option("--remote", "Also verify remote files on Drive (requires network)")
1200
+ ).action(handleVerify);
1201
+
1083
1202
  addAuthOptions(
1084
1203
  program
1085
1204
  .command("tui")
package/src/core/auth.js CHANGED
@@ -40,8 +40,9 @@ export async function persistCredentials(sourcePath) {
40
40
  const resolved = path.resolve(sourcePath);
41
41
  if (resolved === dest) return;
42
42
  if (fsSyncFallback.existsSync(dest)) return;
43
- await fs.mkdir(CONFIG_DIR, { recursive: true });
43
+ await fs.mkdir(CONFIG_DIR, { recursive: true, mode: 0o700 });
44
44
  await fs.copyFile(resolved, dest);
45
+ await fs.chmod(dest, 0o600);
45
46
  }
46
47
 
47
48
  export function resolveCredentialsPath(customPath) {
@@ -109,8 +110,8 @@ function createOAuthClient(config, redirectUri) {
109
110
  }
110
111
 
111
112
  async function persistToken(tokenPath, credentials) {
112
- await fs.mkdir(path.dirname(path.resolve(tokenPath)), { recursive: true });
113
- await fs.writeFile(tokenPath, JSON.stringify(credentials, null, 2) + "\n");
113
+ await fs.mkdir(path.dirname(path.resolve(tokenPath)), { recursive: true, mode: 0o700 });
114
+ await fs.writeFile(tokenPath, JSON.stringify(credentials, null, 2) + "\n", { mode: 0o600 });
114
115
  }
115
116
 
116
117
  function attachTokenPersistence(client, tokenPath) {
@@ -2,6 +2,7 @@
2
2
  * .aethel/ directory management, configuration, and state persistence.
3
3
  */
4
4
 
5
+ import crypto from "node:crypto";
5
6
  import fs from "node:fs";
6
7
  import path from "node:path";
7
8
 
@@ -97,10 +98,28 @@ export function latestSnapshotPath(root) {
97
98
  return path.join(dot(root), SNAPSHOTS_DIR, LATEST_SNAPSHOT);
98
99
  }
99
100
 
100
- export function readLatestSnapshot(root) {
101
+ export function readLatestSnapshot(root, { verify = false } = {}) {
101
102
  const p = latestSnapshotPath(root);
102
103
  if (!fs.existsSync(p)) return null;
103
- return JSON.parse(fs.readFileSync(p, "utf-8"));
104
+ const snapshot = JSON.parse(fs.readFileSync(p, "utf-8"));
105
+
106
+ if (verify && snapshot._checksum) {
107
+ const canonical = JSON.stringify({
108
+ timestamp: snapshot.timestamp,
109
+ message: snapshot.message,
110
+ files: snapshot.files,
111
+ localFiles: snapshot.localFiles,
112
+ });
113
+ const actual = crypto.createHash("sha256").update(canonical).digest("hex");
114
+ if (actual !== snapshot._checksum) {
115
+ throw new Error(
116
+ `Snapshot integrity check failed: checksum mismatch. ` +
117
+ `The snapshot file may have been tampered with.`
118
+ );
119
+ }
120
+ }
121
+
122
+ return snapshot;
104
123
  }
105
124
 
106
125
  export function writeSnapshot(root, snapshot) {
@@ -111,7 +111,12 @@ export function isWorkspaceType(mime) {
111
111
  }
112
112
 
113
113
  function escapeDriveQueryValue(value) {
114
- return value.replace(/\\/g, "\\\\").replace(/'/g, "\\'");
114
+ // Google Drive API query strings use single-quoted values.
115
+ // Escape backslashes first, then single quotes.
116
+ return value
117
+ .replace(/\\/g, "\\\\")
118
+ .replace(/'/g, "\\'")
119
+ .replace(/"/g, '\\"');
115
120
  }
116
121
 
117
122
  export function iconForMime(mime) {
@@ -504,14 +509,36 @@ export async function downloadFile(drive, fileMeta, localPath) {
504
509
  { responseType: "stream" }
505
510
  );
506
511
  await pipeline(response.data, fs.createWriteStream(targetPath));
512
+ // Exported files have no md5Checksum from Drive — skip verification
507
513
  return;
508
514
  }
509
515
 
516
+ // Stream to disk while computing MD5 in parallel
517
+ const { createHash } = await import("node:crypto");
518
+ const md5 = createHash("md5");
519
+ const writeStream = fs.createWriteStream(localPath);
510
520
  const response = await drive.files.get(
511
521
  { fileId: fileMeta.id, alt: "media", supportsAllDrives: true },
512
522
  { responseType: "stream" }
513
523
  );
514
- await pipeline(response.data, fs.createWriteStream(localPath));
524
+
525
+ // Tee: pipe to both disk and hasher
526
+ response.data.on("data", (chunk) => md5.update(chunk));
527
+ await pipeline(response.data, writeStream);
528
+
529
+ // Verify integrity if Drive provided an md5
530
+ const expectedMd5 = fileMeta.md5Checksum;
531
+ if (expectedMd5) {
532
+ const actualMd5 = md5.digest("hex");
533
+ if (actualMd5 !== expectedMd5) {
534
+ // Remove corrupt file
535
+ fs.unlinkSync(localPath);
536
+ throw new Error(
537
+ `Integrity check failed for ${fileMeta.name}: ` +
538
+ `expected md5 ${expectedMd5}, got ${actualMd5}`
539
+ );
540
+ }
541
+ }
515
542
  }
516
543
 
517
544
  export async function uploadFile(
@@ -36,11 +36,18 @@ import {
36
36
  readRemoteCache,
37
37
  writeRemoteCache,
38
38
  } from "./remote-cache.js";
39
- import { buildSnapshot, scanLocal } from "./snapshot.js";
39
+ import {
40
+ buildSnapshot,
41
+ hashFile,
42
+ md5Local,
43
+ scanLocal,
44
+ verifySnapshotChecksum,
45
+ } from "./snapshot.js";
40
46
  import {
41
47
  stageChange,
42
48
  stageChanges,
43
49
  stageConflictResolution,
50
+ stageRemoteFilesForDownload,
44
51
  stagedEntries,
45
52
  unstageAll,
46
53
  unstagePath,
@@ -184,6 +191,10 @@ export class Repository {
184
191
  return stageChanges(this._root, changes);
185
192
  }
186
193
 
194
+ stageRemoteFilesForDownload(remoteFiles) {
195
+ return stageRemoteFilesForDownload(this._root, remoteFiles);
196
+ }
197
+
187
198
  unstagePath(targetPath) {
188
199
  return unstagePath(this._root, targetPath);
189
200
  }
@@ -329,6 +340,87 @@ export class Repository {
329
340
  return JSON.parse(fs.readFileSync(path.join(historyPath, match), "utf-8"));
330
341
  }
331
342
 
343
+ // ── Integrity verification ──────────────────────────────────────────
344
+
345
+ /**
346
+ * Full integrity verification of the workspace.
347
+ * Checks: snapshot checksum, local files vs snapshot md5, remote vs snapshot md5.
348
+ *
349
+ * @param {object} [options]
350
+ * @param {boolean} [options.checkRemote=false] Also verify remote checksums (requires connect)
351
+ * @param {function} [options.onProgress] (done, total, path, status) callback
352
+ * @returns {{ ok: boolean, snapshot: object, local: object[], remote: object[] }}
353
+ */
354
+ async verify({ checkRemote = false, onProgress } = {}) {
355
+ const snapshot = readLatestSnapshot(this._root, { verify: true });
356
+ const result = { ok: true, snapshot: { valid: true }, local: [], remote: [] };
357
+
358
+ if (!snapshot) {
359
+ return { ok: true, snapshot: { valid: true, reason: "no snapshot yet" }, local: [], remote: [] };
360
+ }
361
+
362
+ // 1. Snapshot integrity
363
+ const snapshotCheck = verifySnapshotChecksum(snapshot);
364
+ result.snapshot = snapshotCheck;
365
+ if (!snapshotCheck.valid) result.ok = false;
366
+
367
+ // 2. Local file integrity vs snapshot
368
+ const localFiles = snapshot.localFiles || {};
369
+ const entries = Object.entries(localFiles).filter(([, meta]) => !meta.isFolder);
370
+ const total = entries.length + (checkRemote ? Object.keys(snapshot.files || {}).length : 0);
371
+ let done = 0;
372
+
373
+ for (const [relativePath, meta] of entries) {
374
+ const absPath = path.join(this._root, ...relativePath.split("/"));
375
+ const entry = { path: relativePath, status: "ok" };
376
+
377
+ if (!fs.existsSync(absPath)) {
378
+ entry.status = "missing";
379
+ result.ok = false;
380
+ } else if (meta.md5) {
381
+ const actual = await md5Local(absPath);
382
+ if (actual !== meta.md5) {
383
+ entry.status = "modified";
384
+ entry.expected = meta.md5;
385
+ entry.actual = actual;
386
+ result.ok = false;
387
+ }
388
+ }
389
+
390
+ if (entry.status !== "ok") result.local.push(entry);
391
+ done++;
392
+ onProgress?.(done, total, relativePath, entry.status);
393
+ }
394
+
395
+ // 3. Remote integrity vs snapshot (optional, requires API call)
396
+ if (checkRemote) {
397
+ const remoteState = await this._loadRemoteState({ useCache: false });
398
+ const remoteById = new Map(remoteState.files.map((f) => [f.id, f]));
399
+
400
+ for (const [fileId, snapEntry] of Object.entries(snapshot.files || {})) {
401
+ if (snapEntry.isFolder) { done++; continue; }
402
+ const entry = { path: snapEntry.path || snapEntry.localPath, status: "ok" };
403
+ const remote = remoteById.get(fileId);
404
+
405
+ if (!remote) {
406
+ entry.status = "deleted_remote";
407
+ result.ok = false;
408
+ } else if (snapEntry.md5Checksum && remote.md5Checksum && snapEntry.md5Checksum !== remote.md5Checksum) {
409
+ entry.status = "modified_remote";
410
+ entry.expected = snapEntry.md5Checksum;
411
+ entry.actual = remote.md5Checksum;
412
+ result.ok = false;
413
+ }
414
+
415
+ if (entry.status !== "ok") result.remote.push(entry);
416
+ done++;
417
+ onProgress?.(done, total, entry.path, entry.status);
418
+ }
419
+ }
420
+
421
+ return result;
422
+ }
423
+
332
424
  // ── Private helpers ─────────────────────────────────────────────────
333
425
 
334
426
  async _loadRemoteState({ useCache = true } = {}) {
@@ -17,6 +17,48 @@ export async function md5Local(filePath) {
17
17
  });
18
18
  }
19
19
 
20
+ /**
21
+ * Stream-hash a file with the given algorithm (default sha256).
22
+ * Returns hex digest.
23
+ */
24
+ export async function hashFile(filePath, algorithm = "sha256") {
25
+ return new Promise((resolve, reject) => {
26
+ const hash = crypto.createHash(algorithm);
27
+ const stream = fs.createReadStream(filePath);
28
+
29
+ stream.on("data", (chunk) => hash.update(chunk));
30
+ stream.on("error", reject);
31
+ stream.on("end", () => resolve(hash.digest("hex")));
32
+ });
33
+ }
34
+
35
+ /**
36
+ * Compute a SHA-256 integrity checksum over the snapshot's data fields.
37
+ * The checksum covers files + localFiles + message + timestamp, but NOT
38
+ * the checksum field itself, so it can be verified after reading.
39
+ */
40
+ export function computeSnapshotChecksum(snapshot) {
41
+ const canonical = JSON.stringify({
42
+ timestamp: snapshot.timestamp,
43
+ message: snapshot.message,
44
+ files: snapshot.files,
45
+ localFiles: snapshot.localFiles,
46
+ });
47
+ return crypto.createHash("sha256").update(canonical).digest("hex");
48
+ }
49
+
50
+ /**
51
+ * Verify a snapshot's embedded checksum. Returns true if valid or
52
+ * if the snapshot has no checksum (pre-integrity snapshots).
53
+ */
54
+ export function verifySnapshotChecksum(snapshot) {
55
+ if (!snapshot?._checksum) return { valid: true, reason: "no checksum (legacy snapshot)" };
56
+ const expected = snapshot._checksum;
57
+ const actual = computeSnapshotChecksum(snapshot);
58
+ if (actual === expected) return { valid: true, reason: "checksum valid" };
59
+ return { valid: false, reason: `checksum mismatch: expected ${expected.slice(0, 12)}…, got ${actual.slice(0, 12)}…` };
60
+ }
61
+
20
62
  // ── Hash cache ───────────────────────────────────────────────────────
21
63
 
22
64
  function hashCachePath(root) {
@@ -227,10 +269,14 @@ export function buildSnapshot(remoteFiles, localFiles, message = "") {
227
269
  };
228
270
  }
229
271
 
230
- return {
272
+ const snapshot = {
231
273
  timestamp: new Date().toISOString(),
232
274
  message,
233
275
  files,
234
276
  localFiles: { ...localFiles },
235
277
  };
278
+
279
+ // Embed integrity checksum
280
+ snapshot._checksum = computeSnapshotChecksum(snapshot);
281
+ return snapshot;
236
282
  }
@@ -50,6 +50,26 @@ export function stageChanges(root, changes) {
50
50
  return changes.length;
51
51
  }
52
52
 
53
+ export function stageRemoteFilesForDownload(root, remoteFiles) {
54
+ const index = readIndex(root);
55
+ const byPath = new Map((index.staged || []).map((entry) => [entry.path, entry]));
56
+
57
+ for (const remoteFile of remoteFiles) {
58
+ byPath.set(remoteFile.path, {
59
+ action: "download",
60
+ path: remoteFile.path,
61
+ localPath: remoteFile.path,
62
+ fileId: remoteFile.id,
63
+ remotePath: remoteFile.path,
64
+ ...(remoteFile.isFolder ? { isFolder: true } : {}),
65
+ });
66
+ }
67
+
68
+ index.staged = [...byPath.values()];
69
+ writeIndex(root, index);
70
+ return remoteFiles.length;
71
+ }
72
+
53
73
  export function unstagePath(root, targetPath) {
54
74
  const index = readIndex(root);
55
75
  const staged = index.staged || [];
package/src/core/sync.js CHANGED
@@ -2,6 +2,7 @@ import fs from "node:fs";
2
2
  import path from "node:path";
3
3
  import { readConfig, readIndex, writeIndex } from "./config.js";
4
4
  import { downloadFile, ensureFolder, trashFile, uploadFile } from "./drive-api.js";
5
+ import { md5Local } from "./snapshot.js";
5
6
 
6
7
  function readPositiveIntEnv(name, fallback) {
7
8
  const rawValue = Number.parseInt(process.env[name] || "", 10);
@@ -11,7 +12,12 @@ function readPositiveIntEnv(name, fallback) {
11
12
  const CONCURRENCY = readPositiveIntEnv("AETHEL_DRIVE_CONCURRENCY", 10);
12
13
 
13
14
  function toLocalAbsolutePath(root, relativePath) {
14
- return path.join(root, ...relativePath.split("/"));
15
+ const abs = path.resolve(root, ...relativePath.split("/"));
16
+ const resolvedRoot = path.resolve(root);
17
+ if (!abs.startsWith(resolvedRoot + path.sep) && abs !== resolvedRoot) {
18
+ throw new Error(`Path traversal blocked: ${relativePath} resolves outside workspace`);
19
+ }
20
+ return abs;
15
21
  }
16
22
 
17
23
  export class CommitResult {
@@ -102,10 +108,22 @@ async function uploadStagedFile(drive, entry, root, driveFolderId) {
102
108
  parentId = await ensureFolder(drive, parentPath, driveFolderId);
103
109
  }
104
110
 
105
- await uploadFile(drive, localAbsolutePath, remotePath, {
111
+ const uploadResult = await uploadFile(drive, localAbsolutePath, remotePath, {
106
112
  parentId,
107
113
  existingId: entry.fileId || null,
108
114
  });
115
+
116
+ // Verify: Drive-returned md5 must match the local file we just uploaded.
117
+ // Google Workspace files (Docs, Sheets, etc.) don't have md5 — skip them.
118
+ if (uploadResult?.md5Checksum) {
119
+ const localMd5 = await md5Local(localAbsolutePath);
120
+ if (localMd5 !== uploadResult.md5Checksum) {
121
+ throw new Error(
122
+ `Upload integrity check failed for ${remotePath}: ` +
123
+ `local md5 ${localMd5}, Drive returned ${uploadResult.md5Checksum}`
124
+ );
125
+ }
126
+ }
109
127
  }
110
128
 
111
129
  async function deleteLocalFile(entry, root) {
@@ -194,6 +212,7 @@ export async function executeStaged(drive, root, progress) {
194
212
  // Remote operations (download, upload, delete_remote) share a concurrency pool.
195
213
  const localDeletes = [];
196
214
  const remoteOps = [];
215
+ const failedPaths = new Set();
197
216
 
198
217
  for (const [i, entry] of staged.entries()) {
199
218
  if (entry.action === "delete_local") {
@@ -210,6 +229,7 @@ export async function executeStaged(drive, root, progress) {
210
229
  await deleteLocalFile(entry, root);
211
230
  result.deletedLocal++;
212
231
  } catch (err) {
232
+ failedPaths.add(entry.path);
213
233
  result.errors.push(`delete_local ${entry.path}: ${err.message}`);
214
234
  }
215
235
  })
@@ -242,13 +262,20 @@ export async function executeStaged(drive, root, progress) {
242
262
  completed++;
243
263
  const op = remoteOps[idx];
244
264
  if (err) {
265
+ failedPaths.add(op.entry.path);
245
266
  result.errors.push(`${op.entry.action} ${op.entry.path}: ${err.message}`);
246
267
  }
247
268
  progress?.(completed - 1, staged.length, op.entry.action, path.posix.basename(op.entry.path || ""));
248
269
  });
249
270
 
250
271
  progress?.(staged.length, staged.length, "done", "");
251
- index.staged = [];
272
+
273
+ // Only clear succeeded entries — keep failed ones staged for retry
274
+ if (failedPaths.size > 0) {
275
+ index.staged = staged.filter((e) => failedPaths.has(e.path));
276
+ } else {
277
+ index.staged = [];
278
+ }
252
279
  writeIndex(root, index);
253
280
 
254
281
  return result;