aethel 0.3.4 → 0.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.3.6 (2026-04-05)
4
+
5
+ - Optimize status and saveSnapshot performance: parallelize loadState, skip redundant fetches, increase hash concurrency
6
+
7
+ ## 0.3.5 (2026-04-05)
8
+
9
+ - Add progress bars and spinners for all time-consuming CLI operations
10
+
3
11
  ## 0.3.4 (2026-04-05)
4
12
 
5
13
  - Add empty folder sync support between local and Google Drive
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "aethel",
3
- "version": "0.3.4",
3
+ "version": "0.3.6",
4
4
  "description": "Git-style Google Drive sync CLI with interactive TUI",
5
5
  "type": "module",
6
6
  "license": "MIT",
package/src/cli.js CHANGED
@@ -31,6 +31,7 @@ import {
31
31
  DuplicateFoldersError,
32
32
  } from "./core/drive-api.js";
33
33
  import { createDefaultIgnoreFile, loadIgnoreRules } from "./core/ignore.js";
34
+ import { createProgressBar, createSpinner } from "./core/progress.js";
34
35
  import { Repository } from "./core/repository.js";
35
36
  import { runTui } from "./tui/index.js";
36
37
 
@@ -42,16 +43,36 @@ function addAuthOptions(command) {
42
43
  .option("--token <path>", "Path to cached OAuth token JSON");
43
44
  }
44
45
 
45
- async function openRepo(options, { requireWorkspace = true } = {}) {
46
+ async function openRepo(options, { requireWorkspace = true, silent = false } = {}) {
46
47
  const root = requireWorkspace ? requireRoot() : null;
47
48
  const repo = new Repository(root, {
48
49
  credentials: options.credentials,
49
50
  token: options.token,
50
51
  });
51
- await repo.connect();
52
+ const spinner = silent ? null : createSpinner("Connecting to Google Drive...");
53
+ try {
54
+ await repo.connect();
55
+ spinner?.succeed("Connected to Google Drive");
56
+ } catch (err) {
57
+ spinner?.fail("Connection failed");
58
+ throw err;
59
+ }
52
60
  return repo;
53
61
  }
54
62
 
63
+ async function loadStateWithProgress(repo, opts) {
64
+ const spinner = createSpinner("Loading workspace state...");
65
+ try {
66
+ const state = await repo.loadState(opts);
67
+ const n = state.diff.changes.length;
68
+ spinner.succeed(n ? `Loaded state — ${n} change(s) detected` : "Loaded state — everything up to date");
69
+ return state;
70
+ } catch (err) {
71
+ spinner.fail("Failed to load workspace state");
72
+ throw err;
73
+ }
74
+ }
75
+
55
76
  function matchesPattern(targetPath, pattern) {
56
77
  if (targetPath === pattern) {
57
78
  return true;
@@ -117,7 +138,9 @@ function requireConfirmation(options) {
117
138
 
118
139
  async function handleAuth(options) {
119
140
  const repo = await openRepo(options, { requireWorkspace: false });
141
+ const spinner = createSpinner("Fetching account info...");
120
142
  const account = await repo.getAccountInfo();
143
+ spinner.succeed(`Authenticated as ${account.email}`);
121
144
 
122
145
  const credentialsPath = resolveCredentialsPath(options.credentials);
123
146
  await persistCredentials(credentialsPath);
@@ -134,7 +157,9 @@ async function handleAuth(options) {
134
157
  async function handleClean(options) {
135
158
  requireConfirmation(options);
136
159
  const repo = await openRepo(options, { requireWorkspace: false });
160
+ const spinner = createSpinner("Listing remote files...");
137
161
  const files = await repo.listRemoteFiles({ includeSharedDrives: Boolean(options.sharedDrives) });
162
+ spinner.succeed(`Found ${files.length} file(s) on Drive`);
138
163
 
139
164
  printCleanerPlan(files, options);
140
165
 
@@ -148,13 +173,15 @@ async function handleClean(options) {
148
173
  return;
149
174
  }
150
175
 
176
+ const bar = createProgressBar(`Cleaning ${files.length} file(s)`, files.length);
151
177
  const result = await repo.batchOperateFiles(files, {
152
178
  permanent: Boolean(options.permanent),
153
179
  includeSharedDrives: Boolean(options.sharedDrives),
154
- onProgress: (done, total, verb, name) => {
155
- console.log(`[${done}/${total}] ${verb}: ${name}`);
180
+ onProgress: (done) => {
181
+ bar.update(done);
156
182
  },
157
183
  });
184
+ bar.done(`Cleaned ${files.length} file(s)`);
158
185
 
159
186
  if (result.errors) {
160
187
  console.log(`Completed with ${result.errors} error(s) out of ${files.length} file(s).`);
@@ -171,8 +198,9 @@ async function handleInit(options) {
171
198
  // Interactive folder selection when no --drive-folder is provided
172
199
  if (!driveFolderId) {
173
200
  const repo = await openRepo(options, { requireWorkspace: false });
174
- console.log("Fetching root-level Drive folders...");
201
+ const spinner = createSpinner("Fetching root-level Drive folders...");
175
202
  const folders = await repo.listRootFolders();
203
+ spinner.succeed(`Found ${folders.length} folder(s) in Drive root`);
176
204
 
177
205
  if (folders.length === 0) {
178
206
  console.log("No folders found in Drive root. Syncing entire My Drive.");
@@ -225,7 +253,7 @@ async function handleInit(options) {
225
253
 
226
254
  async function handleStatus(options) {
227
255
  const repo = await openRepo(options);
228
- const { diff } = await repo.loadState();
256
+ const { diff } = await loadStateWithProgress(repo);
229
257
  const staged = repo.getStagedEntries();
230
258
 
231
259
  if (diff.isClean && staged.length === 0) {
@@ -264,7 +292,7 @@ async function handleStatus(options) {
264
292
 
265
293
  async function handleDiff(options) {
266
294
  const repo = await openRepo(options);
267
- const { diff } = await repo.loadState();
295
+ const { diff } = await loadStateWithProgress(repo);
268
296
 
269
297
  if (diff.isClean) {
270
298
  console.log("No changes detected.");
@@ -298,7 +326,7 @@ async function handleDiff(options) {
298
326
 
299
327
  async function handleAdd(paths, options) {
300
328
  const repo = await openRepo(options);
301
- const { diff } = await repo.loadState();
329
+ const { diff } = await loadStateWithProgress(repo);
302
330
 
303
331
  if (options.all) {
304
332
  const toStage = diff.changes.filter(
@@ -357,7 +385,7 @@ function handleReset(paths, options) {
357
385
  }
358
386
  }
359
387
 
360
- async function handleCommit(options, { repo: existingRepo } = {}) {
388
+ async function handleCommit(options, { repo: existingRepo, snapshotHint } = {}) {
361
389
  const repo = existingRepo || await openRepo(options);
362
390
  const staged = repo.getStagedEntries();
363
391
 
@@ -367,25 +395,24 @@ async function handleCommit(options, { repo: existingRepo } = {}) {
367
395
  }
368
396
 
369
397
  const message = options.message || "sync";
398
+ const bar = createProgressBar(`Syncing ${staged.length} change(s)`, staged.length);
370
399
 
371
- console.log(`Committing ${staged.length} change(s)...`);
372
-
373
- const result = await repo.executeStaged((done, total, verb, name) => {
374
- if (done < total) {
375
- console.log(` [${done + 1}/${total}] ${verb}: ${name}`);
376
- }
400
+ const result = await repo.executeStaged((done) => {
401
+ bar.update(done + 1);
377
402
  });
378
403
 
379
- console.log(`\nCommit complete: ${result.summary}`);
404
+ bar.done(`Commit complete: ${result.summary}`);
380
405
  if (result.errors.length) {
381
406
  for (const error of result.errors) {
382
407
  console.log(` ERROR: ${error}`);
383
408
  }
384
409
  }
385
410
 
386
- console.log("Saving snapshot...");
387
- await repo.saveSnapshot(message);
388
- console.log(`Snapshot saved: "${message}"`);
411
+ const spinner = createSpinner("Saving snapshot...");
412
+ // snapshotHint lets callers (pull/push) pass pre-loaded state
413
+ // so saveSnapshot skips redundant API calls / fs scans.
414
+ await repo.saveSnapshot(message, snapshotHint);
415
+ spinner.succeed(`Snapshot saved: "${message}"`);
389
416
  }
390
417
 
391
418
  function handleLog(options) {
@@ -409,10 +436,10 @@ async function handleFetch(options) {
409
436
  const repo = await openRepo(options);
410
437
 
411
438
  repo.invalidateRemoteCache();
412
- console.log("Fetching remote file list...");
439
+ const spinner = createSpinner("Fetching remote file list...");
413
440
  const remoteState = await repo.getRemoteState({ useCache: false });
414
441
  const remote = remoteState.files;
415
- console.log(`Found ${remote.length} file(s) on Drive.`);
442
+ spinner.succeed(`Found ${remote.length} file(s) on Drive`);
416
443
 
417
444
  const snapshot = repo.getSnapshot();
418
445
  if (snapshot) {
@@ -445,7 +472,7 @@ async function handleFetch(options) {
445
472
 
446
473
  async function handlePull(paths, options) {
447
474
  const repo = await openRepo(options);
448
- const { diff } = await repo.loadState({ useCache: false });
475
+ const { diff, remoteState } = await loadStateWithProgress(repo, { useCache: false });
449
476
 
450
477
  let remoteChanges = diff.changes.filter((change) =>
451
478
  [
@@ -489,12 +516,16 @@ async function handlePull(paths, options) {
489
516
 
490
517
  const count = repo.stageChanges(remoteChanges);
491
518
  console.log(`Staged ${count} remote change(s). Committing...`);
492
- await handleCommit({ ...options, message: options.message || "pull" }, { repo });
519
+ // Pull downloads remote→local: remote state unchanged, only re-scan local
520
+ await handleCommit({ ...options, message: options.message || "pull" }, {
521
+ repo,
522
+ snapshotHint: { remote: remoteState },
523
+ });
493
524
  }
494
525
 
495
526
  async function handlePush(paths, options) {
496
527
  const repo = await openRepo(options);
497
- const { diff } = await repo.loadState({ useCache: false });
528
+ const { diff, local } = await loadStateWithProgress(repo, { useCache: false });
498
529
 
499
530
  let localChanges = diff.changes.filter((change) =>
500
531
  [
@@ -538,12 +569,16 @@ async function handlePush(paths, options) {
538
569
 
539
570
  const count = repo.stageChanges(localChanges);
540
571
  console.log(`Staged ${count} local change(s). Committing...`);
541
- await handleCommit({ ...options, message: options.message || "push" }, { repo });
572
+ // Push uploads local→remote: local state unchanged, only re-fetch remote
573
+ await handleCommit({ ...options, message: options.message || "push" }, {
574
+ repo,
575
+ snapshotHint: { local },
576
+ });
542
577
  }
543
578
 
544
579
  async function handleResolve(paths, options) {
545
580
  const repo = await openRepo(options);
546
- const { diff } = await repo.loadState();
581
+ const { diff } = await loadStateWithProgress(repo);
547
582
  const conflicts = diff.conflicts;
548
583
 
549
584
  if (conflicts.length === 0) {
@@ -719,7 +754,7 @@ async function handleRestore(paths, options) {
719
754
  }
720
755
 
721
756
  const localDest = path.join(root, entry.localPath || entry.path);
722
- console.log(` Restoring ${targetPath} from Drive...`);
757
+ const spinner = createSpinner(`Restoring ${targetPath}...`);
723
758
 
724
759
  try {
725
760
  const meta = await repo.drive.files.get({
@@ -729,16 +764,16 @@ async function handleRestore(paths, options) {
729
764
 
730
765
  const { downloadFile } = await import("./core/drive-api.js");
731
766
  await downloadFile(repo.drive, { ...meta.data, id: entry.id }, localDest);
732
- console.log(` Restored: ${targetPath}`);
767
+ spinner.succeed(`Restored: ${targetPath}`);
733
768
  } catch (err) {
734
- console.log(` Failed to restore ${targetPath}: ${err.message}`);
769
+ spinner.fail(`Failed to restore ${targetPath}: ${err.message}`);
735
770
  }
736
771
  }
737
772
  }
738
773
 
739
774
  async function handleRm(paths, options) {
740
775
  const repo = await openRepo(options);
741
- const { diff } = await repo.loadState();
776
+ const { diff } = await loadStateWithProgress(repo);
742
777
  const root = repo.root;
743
778
 
744
779
  for (const targetPath of paths) {
@@ -784,7 +819,7 @@ async function handleMv(source, dest, options) {
784
819
  }
785
820
 
786
821
  async function handleTui(options) {
787
- const repo = await openRepo(options, { requireWorkspace: false });
822
+ const repo = await openRepo(options, { requireWorkspace: false, silent: true });
788
823
  const cliArgs = [];
789
824
  if (options.credentials) {
790
825
  cliArgs.push("--credentials", options.credentials);
@@ -819,6 +854,7 @@ async function handleDedupeFolders(options) {
819
854
  const config = repo.getConfig();
820
855
  const rootFolderId = config.drive_folder_id || null;
821
856
  const ignoreRules = loadIgnoreRules(repo.root);
857
+ const dedupeSpinner = createSpinner("Scanning for duplicate folders...");
822
858
  const result = await dedupeDuplicateFolders(repo.drive, rootFolderId, {
823
859
  execute: Boolean(options.execute),
824
860
  ignoreRules,
@@ -848,8 +884,9 @@ async function handleDedupeFolders(options) {
848
884
  },
849
885
  });
850
886
 
887
+ dedupeSpinner.succeed(`Scan complete — ${result.duplicateFolders.length} duplicate group(s) found`);
888
+
851
889
  if (result.duplicateFolders.length === 0) {
852
- console.log("No duplicate folders detected.");
853
890
  return;
854
891
  }
855
892
 
@@ -113,5 +113,6 @@ export function writeSnapshot(root, snapshot) {
113
113
  fs.copyFileSync(latest, path.join(snapDir, HISTORY_DIR, `${ts}.json`));
114
114
  }
115
115
 
116
- fs.writeFileSync(latest, JSON.stringify(snapshot, null, 2) + "\n");
116
+ // Compact JSON — snapshots can be large, pretty-printing is slow + wastes disk
117
+ fs.writeFileSync(latest, JSON.stringify(snapshot) + "\n");
117
118
  }
@@ -0,0 +1,90 @@
1
+ /**
2
+ * Lightweight terminal progress indicators (spinner + bar).
3
+ * Writes to stderr so stdout stays clean for piped output.
4
+ */
5
+
6
+ const SPINNER_FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
7
+ const SPINNER_INTERVAL = 80;
8
+ const BAR_WIDTH = 25;
9
+
10
+ const isTTY = process.stderr.isTTY;
11
+
12
+ function clearLine() {
13
+ if (isTTY) process.stderr.write("\r\x1b[K");
14
+ }
15
+
16
+ export function createSpinner(message) {
17
+ if (!isTTY) {
18
+ process.stderr.write(`${message}\n`);
19
+ return {
20
+ update() {},
21
+ succeed(msg) { if (msg) process.stderr.write(`${msg}\n`); },
22
+ fail(msg) { if (msg) process.stderr.write(`${msg}\n`); },
23
+ stop() {},
24
+ };
25
+ }
26
+
27
+ let frame = 0;
28
+ let currentMessage = message;
29
+
30
+ const timer = setInterval(() => {
31
+ clearLine();
32
+ process.stderr.write(`${SPINNER_FRAMES[frame % SPINNER_FRAMES.length]} ${currentMessage}`);
33
+ frame++;
34
+ }, SPINNER_INTERVAL);
35
+
36
+ return {
37
+ update(msg) { currentMessage = msg; },
38
+ succeed(msg) {
39
+ clearInterval(timer);
40
+ clearLine();
41
+ process.stderr.write(`✔ ${msg || currentMessage}\n`);
42
+ },
43
+ fail(msg) {
44
+ clearInterval(timer);
45
+ clearLine();
46
+ process.stderr.write(`✖ ${msg || currentMessage}\n`);
47
+ },
48
+ stop() {
49
+ clearInterval(timer);
50
+ clearLine();
51
+ },
52
+ };
53
+ }
54
+
55
+ export function createProgressBar(label, total) {
56
+ let lastRendered = -1;
57
+
58
+ function render(current) {
59
+ if (current === lastRendered) return;
60
+ lastRendered = current;
61
+
62
+ const ratio = total > 0 ? Math.min(current / total, 1) : 0;
63
+ const filled = Math.round(BAR_WIDTH * ratio);
64
+ const empty = BAR_WIDTH - filled;
65
+ const pct = Math.round(ratio * 100);
66
+ const bar = "█".repeat(filled) + "░".repeat(empty);
67
+ const line = `${label} [${bar}] ${current}/${total} (${pct}%)`;
68
+
69
+ if (isTTY) {
70
+ clearLine();
71
+ process.stderr.write(line);
72
+ }
73
+ }
74
+
75
+ // Initial render
76
+ render(0);
77
+
78
+ return {
79
+ update(current) { render(current); },
80
+ done(msg) {
81
+ render(total);
82
+ if (isTTY) {
83
+ clearLine();
84
+ process.stderr.write(`✔ ${msg || label}\n`);
85
+ } else {
86
+ process.stderr.write(`${msg || label}\n`);
87
+ }
88
+ },
89
+ };
90
+ }
@@ -45,6 +45,7 @@ export function readRemoteCache(root, rootFolderId = null, ttlMs = DEFAULT_TTL_M
45
45
 
46
46
  export function writeRemoteCache(root, remoteState, rootFolderId = null) {
47
47
  const p = cachePath(root);
48
+ // Compact JSON — cache can be large with many files
48
49
  fs.writeFileSync(
49
50
  p,
50
51
  JSON.stringify({
@@ -112,17 +112,19 @@ export class Repository {
112
112
  async loadState({ useCache = true } = {}) {
113
113
  const config = this.getConfig();
114
114
 
115
- const [local, snapshot] = await Promise.all([
115
+ // Run all three in parallel — remote fetch is the slowest, overlap it
116
+ // with local scan and snapshot read.
117
+ const [local, snapshot, remoteState] = await Promise.all([
116
118
  scanLocal(this._root),
117
119
  Promise.resolve(readLatestSnapshot(this._root)),
120
+ this._loadRemoteState({ useCache }),
118
121
  ]);
119
-
120
- const remoteState = await this._loadRemoteState({ useCache });
121
122
  const remote = remoteState.files;
122
123
 
123
124
  return {
124
125
  config,
125
126
  remote,
127
+ remoteState,
126
128
  local,
127
129
  snapshot,
128
130
  diff: computeDiff(snapshot, remote, local, { root: this._root }),
@@ -178,20 +180,31 @@ export class Repository {
178
180
  }
179
181
 
180
182
  /**
181
- * Invalidate cache, re-fetch remote + re-scan local, write snapshot.
183
+ * Build and persist a new snapshot.
184
+ *
185
+ * @param {string} message
186
+ * @param {object} [preloaded]
187
+ * @param {object} [preloaded.remote] Reuse this remote state (skip API call)
188
+ * @param {object} [preloaded.local] Reuse this local scan (skip fs walk)
182
189
  */
183
- async saveSnapshot(message = "sync") {
190
+ async saveSnapshot(message = "sync", { remote, local } = {}) {
184
191
  const config = this.getConfig();
185
192
  const rootFolderId = config.drive_folder_id || null;
186
193
 
187
- invalidateRemoteCache(this._root);
188
- const [remoteState, local] = await Promise.all([
189
- getRemoteState(this.drive, rootFolderId),
190
- scanLocal(this._root),
194
+ // Only fetch what wasn't pre-loaded, in parallel.
195
+ const needRemote = !remote;
196
+ const needLocal = !local;
197
+
198
+ if (needRemote) invalidateRemoteCache(this._root);
199
+
200
+ const [remoteState, localFiles] = await Promise.all([
201
+ needRemote ? getRemoteState(this.drive, rootFolderId) : remote,
202
+ needLocal ? scanLocal(this._root) : local,
191
203
  ]);
204
+
192
205
  assertNoDuplicateFolders(remoteState.duplicateFolders);
193
206
  writeRemoteCache(this._root, remoteState, rootFolderId);
194
- writeSnapshot(this._root, buildSnapshot(remoteState.files, local, message));
207
+ writeSnapshot(this._root, buildSnapshot(remoteState.files, localFiles, message));
195
208
  }
196
209
 
197
210
  // ── Cache management ────────────────────────────────────────────────
@@ -42,7 +42,7 @@ function saveHashCache(root, cache) {
42
42
 
43
43
  // ── Scanning ─────────────────────────────────────────────────────────
44
44
 
45
- const PARALLEL_HASH_LIMIT = 32;
45
+ const PARALLEL_HASH_LIMIT = 128;
46
46
 
47
47
  export async function scanLocal(root, { respectIgnore = true } = {}) {
48
48
  const resolvedRoot = path.resolve(root);
@@ -54,7 +54,8 @@ export async function scanLocal(root, { respectIgnore = true } = {}) {
54
54
  const filesToHash = [];
55
55
  // Track directories and their child counts to detect empty folders
56
56
  const dirChildCount = new Map();
57
- const dirStats = new Map();
57
+ // Map relative dir path → absolute path (for deferred stat on empty dirs only)
58
+ const dirAbsPath = new Map();
58
59
 
59
60
  async function walk(currentPath) {
60
61
  let entries;
@@ -72,6 +73,7 @@ export async function scanLocal(root, { respectIgnore = true } = {}) {
72
73
  if (relativeDirPath !== null) {
73
74
  if (!dirChildCount.has(relativeDirPath)) {
74
75
  dirChildCount.set(relativeDirPath, 0);
76
+ dirAbsPath.set(relativeDirPath, currentPath);
75
77
  }
76
78
  }
77
79
 
@@ -110,12 +112,6 @@ export async function scanLocal(root, { respectIgnore = true } = {}) {
110
112
 
111
113
  if (relativeDirPath !== null) {
112
114
  dirChildCount.set(relativeDirPath, trackedChildren);
113
- try {
114
- const stat = await fs.promises.stat(currentPath);
115
- dirStats.set(relativeDirPath, stat);
116
- } catch {
117
- // ignore
118
- }
119
115
  }
120
116
 
121
117
  await Promise.all([
@@ -171,16 +167,24 @@ export async function scanLocal(root, { respectIgnore = true } = {}) {
171
167
  }
172
168
  }
173
169
 
174
- for (const dirPath of emptyDirs) {
175
- const stat = dirStats.get(dirPath);
170
+ // Only stat the empty directories (not all directories)
171
+ await Promise.all([...emptyDirs].map(async (dirPath) => {
172
+ let mtime = new Date().toISOString();
173
+ const absPath = dirAbsPath.get(dirPath);
174
+ if (absPath) {
175
+ try {
176
+ const stat = await fs.promises.stat(absPath);
177
+ mtime = new Date(stat.mtimeMs).toISOString();
178
+ } catch { /* ignore */ }
179
+ }
176
180
  result[dirPath] = {
177
181
  localPath: dirPath,
178
182
  isFolder: true,
179
183
  size: 0,
180
184
  md5: null,
181
- modifiedTime: stat ? new Date(stat.mtimeMs).toISOString() : new Date().toISOString(),
185
+ modifiedTime: mtime,
182
186
  };
183
- }
187
+ }));
184
188
 
185
189
  // Persist updated cache
186
190
  saveHashCache(resolvedRoot, nextCache);