gdrive-syncer 3.1.0 → 3.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,142 @@
1
+ # Performance Optimization Decisions
2
+
3
+ This document records design decisions made for performance optimizations, including rationale and trade-offs considered.
4
+
5
+ ## Overview
6
+
7
+ The diff/sync operations were slow with nested folders and many files (e.g., 30+ files across multiple folder levels). Root cause: all Google Drive API calls were sequential and blocking.
8
+
9
+ ---
10
+
11
+ ## Decision 1: Parallel API Calls (Implemented)
12
+
13
+ **Problem:** Sequential API calls caused O(n) latency where n = number of files + folders.
14
+
15
+ **Solution:** Use async/parallel operations with batched concurrency.
16
+
17
+ **Implementation:**
18
+ - `listAsync()` - Non-blocking folder listing using `child_process.exec` with promises
19
+ - `downloadAsync()` - Non-blocking file downloads
20
+ - `downloadParallel()` - Batch downloads with configurable concurrency
21
+ - `listDriveFilesRecursiveOptimized()` - Breadth-first traversal querying multiple folders in parallel
22
+
23
+ **Concurrency Limit:** 5 concurrent requests per batch
24
+
25
+ **Rationale for batch size of 5:**
26
+ - Google Drive API limit: ~1,000 queries per 100 seconds per user (~10/sec sustained)
27
+ - Batch size of 5 is conservative, allowing headroom for other operations
28
+ - Can be increased to 8-10 if needed, but diminishing returns due to network latency
29
+
30
+ **Expected Performance Improvement:**
31
+ | Scenario | Before (sequential) | After (parallel, batch=5) |
32
+ |----------|---------------------|---------------------------|
33
+ | 30 files in nested folders | ~10-15 seconds | ~3-5 seconds |
34
+ | Download 30 files | ~6-15 seconds | ~1.5-3 seconds |
35
+
36
+ ---
37
+
38
+ ## Decision 2: Session-Scoped Folder Caching (Implemented)
39
+
40
+ **Problem:** When uploading multiple files to the same nested folder structure (e.g., 10 files to `root/a/b/c/`), the `findOrCreateDriveFolder` function would query the same parent folders repeatedly.
41
+
42
+ **Solution:** Session-scoped folder cache that is cleared at the start of each sync operation.
43
+
44
+ **Implementation:**
45
+ - `folderCache` - Map structure: `parentId -> Map(folderName -> folderId)`
46
+ - `clearFolderCache()` - Called at start of `runSyncOperation()` to ensure fresh data
47
+ - `getCachedFolderId()` / `cacheFolderId()` - Cache lookup and storage helpers
48
+ - Cache is used in `findOrCreateDriveFolder()` to avoid redundant API calls
49
+
50
+ **Why this is safe:**
51
+ 1. **Fresh data on every operation** - Cache is cleared at start, so we always query Drive for current state
52
+ 2. **Short-lived** - Cache only exists for duration of single sync (seconds)
53
+ 3. **Consistent within operation** - Folder structure shouldn't change during a 3-5 second sync
54
+
55
+ **Benefit:** If uploading 10 files to `root/a/b/c/`:
56
+ - Without cache: Query `a`, `b`, `c` 10 times each = 30 API calls
57
+ - With cache: Query `a`, `b`, `c` once each = 3 API calls
58
+
59
+ **Trade-off considered:** Risk of stale data if folder modified mid-sync.
60
+ - Acceptable risk given short operation duration (seconds)
61
+ - User can simply re-run sync if concurrent modification occurs
62
+
63
+ ---
64
+
65
+ ## Decision 3: Breadth-First vs Depth-First Listing
66
+
67
+ **Problem:** Original `listDriveFilesRecursive` used depth-first traversal, which creates a chain of blocking calls.
68
+
69
+ **Solution:** Breadth-first traversal with level-by-level parallel queries.
70
+
71
+ **How it works:**
72
+ ```
73
+ Level 0: Query root folder (1 API call)
74
+ Level 1: Query all subfolders in parallel (e.g., 5 folders = 1 batch)
75
+ Level 2: Query all sub-subfolders in parallel (e.g., 10 folders = 2 batches)
76
+ ...
77
+ ```
78
+
79
+ **Benefit:** Folders at the same depth are queried concurrently instead of sequentially.
80
+
81
+ **Trade-off:** Slightly more memory usage to hold the queue, but negligible for typical folder structures.
82
+
83
+ ---
84
+
85
+ ## Decision 4: Delta Sync (Implemented)
86
+
87
+ **Problem:** Previously, ALL files were downloaded from Drive to a temp directory for comparison, even if they hadn't changed. With 30 files, this meant 30 downloads every sync.
88
+
89
+ **Solution:** Compare file metadata (size + modification time) before downloading. Only download files where metadata indicates a potential change.
90
+
91
+ **Implementation:**
92
+ - `parseListOutput()` now extracts `sizeBytes` and `modifiedTime` from gdrive output
93
+ - `listLocalFilesRecursive()` now includes `sizeBytes` and `modifiedTime` from `fs.statSync()`
94
+ - Comparison logic in `runSyncOperation()`:
95
+ 1. Get local files with metadata
96
+ 2. For each Drive file, check if local exists with same size AND local is not older
97
+ 3. Skip download for unchanged files
98
+ 4. Only download and compare files where metadata differs
99
+
100
+ **Unchanged file detection:**
101
+ ```javascript
102
+ const sizeMatch = driveFile.sizeBytes === localFile.sizeBytes;
103
+ const timeMatch = driveTime <= localTime + 1000; // 1 second tolerance
104
+
105
+ if (sizeMatch && timeMatch) {
106
+ // Skip download - metadata indicates unchanged
107
+ }
108
+ ```
109
+
110
+ **Performance improvement:**
111
+
112
+ | Scenario | Before | After |
113
+ |----------|--------|-------|
114
+ | 30 files, none changed | Download 30 files | Download 0 files |
115
+ | 30 files, 5 changed | Download 30 files | Download 5 files |
116
+ | 30 files, all changed | Download 30 files | Download 30 files |
117
+
118
+ **Trade-offs:**
119
+ - Relies on accurate timestamps from Google Drive
120
+ - 1-second tolerance to handle clock differences
121
+ - Files with same size but different content AND older local time could be missed (rare edge case)
122
+
123
+ **Why not use hashes?**
124
+ - Google Drive provides `md5Checksum` but requires additional API call per file
125
+ - Size + time comparison is sufficient for most use cases and much faster
126
+ - Can add optional hash verification in future if needed
127
+
128
+ ---
129
+
130
+ ## Future Considerations
131
+
132
+ 1. **Configurable concurrency** - Allow users to adjust batch size via config if they have different rate limit needs
133
+ 2. **Progress callbacks** - Add progress reporting for large sync operations
134
+ 3. **Retry with exponential backoff** - Handle transient API failures gracefully
135
+ 4. **Optional hash verification** - Add `--verify` flag to compare MD5 checksums for critical syncs
136
+
137
+ ---
138
+
139
+ ## References
140
+
141
+ - Google Drive API Quotas: https://developers.google.com/drive/api/guides/limits
142
+ - Rate limit: 1,000 queries per 100 seconds per user
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gdrive-syncer",
3
- "version": "3.1.0",
3
+ "version": "3.1.2",
4
4
  "description": "Google Drive Syncer",
5
5
  "main": "./index.js",
6
6
  "bin": "./run.js",
package/run.js CHANGED
@@ -6,6 +6,7 @@ const { cfgAdd, cfgRm, cfgShow } = require('./src/cfgManager');
6
6
  const { runSync } = require('./src/sync');
7
7
  const { runList, runSearch, runDelete, runMkdir, runListSync } = require('./src/list');
8
8
  const { envInit, envRun, envShow, envRemove, envRegister, envUnregister, envMigrate } = require('./src/envSync');
9
+ const { checkForUpdates } = require('./src/versionCheck');
9
10
 
10
11
  const [, , ...args] = process.argv;
11
12
  const [firstArg] = args;
@@ -81,10 +82,17 @@ const showHelp = () => {
81
82
  console.clear();
82
83
  intro(color.bgCyan(color.black(' GDrive Syncer ')));
83
84
 
85
+ // Check for updates and show at start if available
86
+ const updateMessage = await checkForUpdates();
87
+ if (updateMessage) {
88
+ note(updateMessage, 'Update Available');
89
+ }
90
+
84
91
  try {
85
92
  // Check for help
86
93
  if (firstArg === 'help' || firstArg === '--help' || firstArg === '-h') {
87
94
  showHelp();
95
+ if (updateMessage) note(updateMessage, 'Update Available');
88
96
  outro(color.green('Done!'));
89
97
  return;
90
98
  }
@@ -92,6 +100,7 @@ const showHelp = () => {
92
100
  // Check for direct command
93
101
  if (firstArg && commands[firstArg]) {
94
102
  await commands[firstArg].handler();
103
+ if (updateMessage) note(updateMessage, 'Update Available');
95
104
  outro(color.green('Done!'));
96
105
  return;
97
106
  }
@@ -119,6 +128,7 @@ const showHelp = () => {
119
128
 
120
129
  if (category === 'help') {
121
130
  showHelp();
131
+ if (updateMessage) note(updateMessage, 'Update Available');
122
132
  outro(color.green('Done!'));
123
133
  return;
124
134
  }
@@ -176,6 +186,8 @@ const showHelp = () => {
176
186
  await commands[action].handler();
177
187
  }
178
188
 
189
+ // Show update message at exit
190
+ if (updateMessage) note(updateMessage, 'Update Available');
179
191
  outro(color.green('Done!'));
180
192
  } catch (e) {
181
193
  log.error(e.message);
package/src/envSync.js CHANGED
@@ -565,6 +565,129 @@ const envShow = async () => {
565
565
  }
566
566
  };
567
567
 
568
+ // =============================================================================
569
+ // Performance Optimization: Parallel API calls for faster sync operations
570
+ // See docs/PERFORMANCE_DECISIONS.md for design rationale
571
+ // =============================================================================
572
+
573
+ /**
574
+ * Session-scoped folder cache to avoid repeated API calls within a single operation.
575
+ * Structure: parentId -> Map(folderName -> folderId)
576
+ * IMPORTANT: Must be cleared at the start of each sync/diff operation via clearFolderCache()
577
+ */
578
+ let folderCache = new Map();
579
+
580
+ /**
581
+ * Clear folder cache - MUST be called at start of each sync operation
582
+ * This ensures we always start with fresh data from Google Drive
583
+ */
584
+ const clearFolderCache = () => {
585
+ folderCache = new Map();
586
+ };
587
+
588
+ /**
589
+ * Get cached folder ID or null if not cached
590
+ * @param {string} parentId - Parent folder ID
591
+ * @param {string} folderName - Folder name to look up
592
+ * @returns {string|null} - Folder ID if cached, null otherwise
593
+ */
594
+ const getCachedFolderId = (parentId, folderName) => {
595
+ const parentCache = folderCache.get(parentId);
596
+ return parentCache ? parentCache.get(folderName) || null : null;
597
+ };
598
+
599
+ /**
600
+ * Cache a folder ID for later lookups within the same operation
601
+ * @param {string} parentId - Parent folder ID
602
+ * @param {string} folderName - Folder name
603
+ * @param {string} folderId - Folder ID to cache
604
+ */
605
+ const cacheFolderId = (parentId, folderName, folderId) => {
606
+ if (!folderCache.has(parentId)) {
607
+ folderCache.set(parentId, new Map());
608
+ }
609
+ folderCache.get(parentId).set(folderName, folderId);
610
+ };
611
+
612
+ /**
613
+ * Optimized recursive listing using breadth-first traversal with parallel API calls
614
+ * This significantly improves performance for nested folder structures
615
+ * @param {string} folderId - Root folder ID
616
+ * @param {string} pattern - File pattern to match
617
+ * @param {string} [ignore] - Ignore pattern
618
+ * @param {boolean} [includeFolders=false] - Whether to include folders in results
619
+ * @returns {Promise<Array<{id: string, name: string, relativePath: string, isFolder: boolean, sizeBytes: number, modifiedTime: Date|null}>>}
620
+ */
621
+ const listDriveFilesRecursiveOptimized = async (folderId, pattern, ignore, includeFolders = false) => {
622
+ const results = [];
623
+ // Queue of folders to process: { id, prefix }
624
+ let queue = [{ id: folderId, prefix: '' }];
625
+
626
+ while (queue.length > 0) {
627
+ // Process all folders at current level in parallel
628
+ const currentBatch = queue;
629
+ queue = [];
630
+
631
+ // Fetch all folder contents in parallel (limit concurrency to 5)
632
+ const batchSize = 5;
633
+ for (let i = 0; i < currentBatch.length; i += batchSize) {
634
+ const batch = currentBatch.slice(i, i + batchSize);
635
+ const listPromises = batch.map(async ({ id, prefix }) => {
636
+ const listResult = await gdrive.listAsync({
637
+ query: `'${id}' in parents and trashed = false`,
638
+ noHeader: true,
639
+ max: 1000,
640
+ });
641
+
642
+ if (listResult.code !== 0 || !listResult.stdout.trim()) {
643
+ return { items: [], folders: [] };
644
+ }
645
+
646
+ const parsed = gdrive.parseListOutput(listResult.stdout.trim());
647
+ const items = [];
648
+ const folders = [];
649
+
650
+ for (const file of parsed) {
651
+ if (!file.id || !file.name) continue;
652
+
653
+ const relativePath = prefix ? `${prefix}/${file.name}` : file.name;
654
+ const isFolder = file.type && file.type.toLowerCase().includes('folder');
655
+
656
+ if (isFolder) {
657
+ // Queue folder for next level processing
658
+ folders.push({ id: file.id, prefix: relativePath });
659
+ if (includeFolders) {
660
+ items.push({ id: file.id, name: file.name, relativePath, isFolder: true, sizeBytes: 0, modifiedTime: null });
661
+ }
662
+ } else {
663
+ // Only add files that match the pattern
664
+ if (matchPattern(file.name, pattern, ignore)) {
665
+ items.push({
666
+ id: file.id,
667
+ name: file.name,
668
+ relativePath,
669
+ isFolder: false,
670
+ sizeBytes: file.sizeBytes || 0,
671
+ modifiedTime: file.modifiedTime || null,
672
+ });
673
+ }
674
+ }
675
+ }
676
+
677
+ return { items, folders };
678
+ });
679
+
680
+ const batchResults = await Promise.all(listPromises);
681
+ for (const { items, folders } of batchResults) {
682
+ results.push(...items);
683
+ queue.push(...folders);
684
+ }
685
+ }
686
+ }
687
+
688
+ return results;
689
+ };
690
+
568
691
  /**
569
692
  * Recursively list files and folders from Google Drive
570
693
  * @param {string} folderId - Root folder ID
@@ -634,7 +757,7 @@ const listDriveFilesRecursive = (folderId, pattern, ignore, prefix = '', include
634
757
  * @param {string} [ignore] - Ignore pattern
635
758
  * @param {string} [prefix=''] - Path prefix for nested files
636
759
  * @param {boolean} [includeFolders=false] - Whether to include folders in results
637
- * @returns {Array<{name: string, relativePath: string, isFolder: boolean}>}
760
+ * @returns {Array<{name: string, relativePath: string, isFolder: boolean, sizeBytes: number, modifiedTime: Date|null}>}
638
761
  */
639
762
  const listLocalFilesRecursive = (dir, pattern, ignore, prefix = '', includeFolders = false) => {
640
763
  const results = [];
@@ -647,6 +770,7 @@ const listLocalFilesRecursive = (dir, pattern, ignore, prefix = '', includeFolde
647
770
 
648
771
  for (const entry of entries) {
649
772
  const relativePath = prefix ? `${prefix}/${entry.name}` : entry.name;
773
+ const fullPath = path.join(dir, entry.name);
650
774
 
651
775
  if (entry.isDirectory()) {
652
776
  // Add folder to results if requested
@@ -655,19 +779,33 @@ const listLocalFilesRecursive = (dir, pattern, ignore, prefix = '', includeFolde
655
779
  name: entry.name,
656
780
  relativePath,
657
781
  isFolder: true,
782
+ sizeBytes: 0,
783
+ modifiedTime: null,
658
784
  });
659
785
  }
660
786
  // Recursively list subdirectory
661
- const subDir = path.join(dir, entry.name);
662
- const subFiles = listLocalFilesRecursive(subDir, pattern, ignore, relativePath, includeFolders);
787
+ const subFiles = listLocalFilesRecursive(fullPath, pattern, ignore, relativePath, includeFolders);
663
788
  results.push(...subFiles);
664
789
  } else if (entry.isFile()) {
665
790
  // Only add files that match the pattern
666
791
  if (matchPattern(entry.name, pattern, ignore)) {
792
+ // Get file stats for delta sync comparison
793
+ let sizeBytes = 0;
794
+ let modifiedTime = null;
795
+ try {
796
+ const stats = fs.statSync(fullPath);
797
+ sizeBytes = stats.size;
798
+ modifiedTime = stats.mtime;
799
+ } catch (e) {
800
+ // Ignore stat errors, use defaults
801
+ }
802
+
667
803
  results.push({
668
804
  name: entry.name,
669
805
  relativePath,
670
806
  isFolder: false,
807
+ sizeBytes,
808
+ modifiedTime,
671
809
  });
672
810
  }
673
811
  }
@@ -678,6 +816,7 @@ const listLocalFilesRecursive = (dir, pattern, ignore, prefix = '', includeFolde
678
816
 
679
817
  /**
680
818
  * Find or create a folder in Google Drive by path
819
+ * Uses session-scoped cache to avoid repeated API calls for same folder paths
681
820
  * @param {string} parentId - Parent folder ID
682
821
  * @param {string} folderPath - Folder path like "a/b/c"
683
822
  * @returns {string} - The folder ID of the deepest folder
@@ -687,38 +826,44 @@ const findOrCreateDriveFolder = (parentId, folderPath) => {
687
826
  let currentParentId = parentId;
688
827
 
689
828
  for (const folderName of parts) {
690
- // Check if folder exists
691
- const listResult = gdrive.list({
692
- query: `'${currentParentId}' in parents and name = '${folderName}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false`,
693
- noHeader: true,
694
- max: 1,
695
- });
829
+ // Check cache first to avoid redundant API calls
830
+ let folderId = getCachedFolderId(currentParentId, folderName);
696
831
 
697
- let folderId = null;
832
+ if (!folderId) {
833
+ // Not in cache, query the API
834
+ const listResult = gdrive.list({
835
+ query: `'${currentParentId}' in parents and name = '${folderName}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false`,
836
+ noHeader: true,
837
+ max: 1,
838
+ });
698
839
 
699
- if (listResult.code === 0 && listResult.stdout.trim()) {
700
- const parsed = gdrive.parseListOutput(listResult.stdout.trim());
701
- if (parsed.length > 0 && parsed[0].id) {
702
- folderId = parsed[0].id;
840
+ if (listResult.code === 0 && listResult.stdout.trim()) {
841
+ const parsed = gdrive.parseListOutput(listResult.stdout.trim());
842
+ if (parsed.length > 0 && parsed[0].id) {
843
+ folderId = parsed[0].id;
844
+ }
703
845
  }
704
- }
705
846
 
706
- if (!folderId) {
707
- // Create the folder
708
- const mkdirResult = gdrive.mkdir(folderName, { parent: currentParentId });
709
- if (mkdirResult.code === 0) {
710
- // Parse the created folder ID from output
711
- // gdrive@3 outputs: "Directory created: ID"
712
- // gdrive@2 outputs: "Directory ID created"
713
- const match = mkdirResult.stdout.match(/([a-zA-Z0-9_-]{20,})/);
714
- if (match) {
715
- folderId = match[1];
847
+ if (!folderId) {
848
+ // Create the folder
849
+ const mkdirResult = gdrive.mkdir(folderName, { parent: currentParentId });
850
+ if (mkdirResult.code === 0) {
851
+ // Parse the created folder ID from output
852
+ // gdrive@3 outputs: "Directory created: ID"
853
+ // gdrive@2 outputs: "Directory ID created"
854
+ const match = mkdirResult.stdout.match(/([a-zA-Z0-9_-]{20,})/);
855
+ if (match) {
856
+ folderId = match[1];
857
+ }
716
858
  }
717
859
  }
718
- }
719
860
 
720
- if (!folderId) {
721
- throw new Error(`Failed to find or create folder: ${folderName}`);
861
+ if (!folderId) {
862
+ throw new Error(`Failed to find or create folder: ${folderName}`);
863
+ }
864
+
865
+ // Cache the result for subsequent lookups within this operation
866
+ cacheFolderId(currentParentId, folderName, folderId);
722
867
  }
723
868
 
724
869
  currentParentId = folderId;
@@ -1049,6 +1194,9 @@ const envRun = async (presetAction, presetConfigType) => {
1049
1194
  * Run a single sync operation
1050
1195
  */
1051
1196
  const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, configType) => {
1197
+ // Clear folder cache at start of each operation to ensure fresh data
1198
+ clearFolderCache();
1199
+
1052
1200
  const { folderId, pattern, name, ignore } = syncConfig;
1053
1201
  // Strip quotes from paths (in case manually added)
1054
1202
  const localDir = syncConfig.localDir.replace(/^['"]|['"]$/g, '');
@@ -1071,10 +1219,11 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1071
1219
 
1072
1220
  try {
1073
1221
  // Fetch file list from Drive (recursive, including folders for cleanup)
1222
+ // Uses optimized breadth-first parallel listing for better performance
1074
1223
  const s = spinner();
1075
1224
  s.start('Fetching files from Google Drive (including nested folders)...');
1076
1225
 
1077
- const driveItems = listDriveFilesRecursive(folderId, pattern, ignore, '', true);
1226
+ const driveItems = await listDriveFilesRecursiveOptimized(folderId, pattern, ignore, true);
1078
1227
  const driveFiles = driveItems.filter((f) => !f.isFolder);
1079
1228
  const driveFolders = driveItems.filter((f) => f.isFolder);
1080
1229
 
@@ -1093,21 +1242,77 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1093
1242
  }
1094
1243
  }
1095
1244
 
1096
- // Download Drive files to temp for comparison (preserving folder structure)
1097
- s.message('Downloading Drive files for comparison...');
1098
- for (const file of driveFiles) {
1099
- const destPath = path.join(tempDir, path.dirname(file.relativePath));
1100
- fs.ensureDirSync(destPath);
1101
- gdrive.download(file.id, { destination: destPath, overwrite: true });
1102
- }
1103
- s.stop(color.green(`Found ${driveFiles.length} matching file(s) on Drive`));
1104
-
1105
- // Get local files (recursive, including folders for cleanup)
1245
+ // Get local files first for delta sync comparison
1106
1246
  fs.ensureDirSync(envDir);
1107
1247
  const localItems = listLocalFilesRecursive(envDir, pattern, ignore, '', true);
1108
1248
  const localFiles = localItems.filter((f) => !f.isFolder);
1109
1249
  const localFolders = localItems.filter((f) => f.isFolder);
1110
1250
 
1251
+ // Build local file lookup map for delta sync
1252
+ const localFileMap = new Map();
1253
+ for (const file of localFiles) {
1254
+ localFileMap.set(file.relativePath, file);
1255
+ }
1256
+
1257
+ // Delta sync: Determine which files need to be downloaded for comparison
1258
+ // Skip files where metadata indicates no change (same size AND local is not older)
1259
+ s.message('Analyzing file changes (delta sync)...');
1260
+ const filesToDownload = [];
1261
+ const unchangedFiles = [];
1262
+
1263
+ for (const driveFile of driveFiles) {
1264
+ const localFile = localFileMap.get(driveFile.relativePath);
1265
+
1266
+ if (!localFile) {
1267
+ // File only exists on Drive - need to download for diff
1268
+ filesToDownload.push(driveFile);
1269
+ } else {
1270
+ // File exists in both places - check if metadata indicates change
1271
+ const sizeMatch = driveFile.sizeBytes === localFile.sizeBytes;
1272
+ const driveTime = driveFile.modifiedTime ? driveFile.modifiedTime.getTime() : 0;
1273
+ const localTime = localFile.modifiedTime ? localFile.modifiedTime.getTime() : 0;
1274
+
1275
+ // Consider unchanged if: same size AND Drive is not newer
1276
+ // (allow 1 second tolerance for time comparison)
1277
+ const timeMatch = driveTime <= localTime + 1000;
1278
+
1279
+ if (sizeMatch && timeMatch) {
1280
+ // Metadata suggests unchanged - skip download
1281
+ unchangedFiles.push(driveFile.relativePath);
1282
+ } else {
1283
+ // Metadata differs - need to download for accurate comparison
1284
+ filesToDownload.push(driveFile);
1285
+ }
1286
+ }
1287
+ }
1288
+
1289
+ // Download only changed files to temp for comparison
1290
+ s.message(
1291
+ `Downloading ${filesToDownload.length} file(s) for comparison (${unchangedFiles.length} unchanged)...`
1292
+ );
1293
+
1294
+ // Prepare destination directories first
1295
+ for (const file of filesToDownload) {
1296
+ const destPath = path.join(tempDir, path.dirname(file.relativePath));
1297
+ fs.ensureDirSync(destPath);
1298
+ }
1299
+
1300
+ // Download files in parallel batches (5 concurrent downloads) with progress
1301
+ const downloads = filesToDownload.map((file) => ({
1302
+ fileId: file.id,
1303
+ options: {
1304
+ destination: path.join(tempDir, path.dirname(file.relativePath)),
1305
+ overwrite: true,
1306
+ },
1307
+ }));
1308
+ if (downloads.length > 0) {
1309
+ await gdrive.downloadParallel(downloads, 5, (completed, total) => {
1310
+ s.message(`Downloading files for comparison... (${completed}/${total})`);
1311
+ });
1312
+ }
1313
+
1314
+ s.stop(color.green(`Found ${driveFiles.length} file(s) on Drive (${unchangedFiles.length} skipped via delta sync)`));
1315
+
1111
1316
  // Compare files using relativePath
1112
1317
  const changes = {
1113
1318
  modified: [],
@@ -1116,10 +1321,11 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1116
1321
  orphanDriveFolders: [], // Folders on Drive with no local equivalent
1117
1322
  };
1118
1323
 
1119
- // Build a set of drive file paths for quick lookup
1324
+ // Build sets for quick lookup
1120
1325
  const driveFilePaths = new Set(driveFiles.map((f) => f.relativePath));
1121
1326
  const localFilePaths = new Set(localFiles.map((f) => f.relativePath));
1122
1327
  const localFolderPaths = new Set(localFolders.map((f) => f.relativePath));
1328
+ const unchangedSet = new Set(unchangedFiles);
1123
1329
 
1124
1330
  // Check local files
1125
1331
  for (const localFile of localFiles) {
@@ -1127,7 +1333,12 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1127
1333
  const driveFilePath = path.join(tempDir, localFile.relativePath);
1128
1334
 
1129
1335
  if (driveFilePaths.has(localFile.relativePath)) {
1130
- // File exists on both - check for modifications
1336
+ // File exists on both
1337
+ if (unchangedSet.has(localFile.relativePath)) {
1338
+ // Delta sync determined this file is unchanged - skip comparison
1339
+ continue;
1340
+ }
1341
+ // Check for modifications (only for files we downloaded)
1131
1342
  if (fs.existsSync(driveFilePath)) {
1132
1343
  const localContent = fs.readFileSync(localFilePath, 'utf-8');
1133
1344
  const driveContent = fs.readFileSync(driveFilePath, 'utf-8');
@@ -1308,11 +1519,13 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1308
1519
  }
1309
1520
 
1310
1521
  const downloadSpinner = spinner();
1311
- downloadSpinner.start('Downloading files...');
1312
-
1522
+ const totalToDownload = changes.modified.length + changes.driveOnly.length;
1313
1523
  let downloaded = 0;
1314
1524
 
1525
+ downloadSpinner.start(`Downloading files... (0/${totalToDownload})`);
1526
+
1315
1527
  for (const relativePath of changes.modified) {
1528
+ downloadSpinner.message(`Downloading (${downloaded + 1}/${totalToDownload}): ${relativePath}`);
1316
1529
  const srcPath = path.join(tempDir, relativePath);
1317
1530
  const destPath = path.join(envDir, relativePath);
1318
1531
  fs.ensureDirSync(path.dirname(destPath));
@@ -1321,6 +1534,7 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1321
1534
  }
1322
1535
 
1323
1536
  for (const relativePath of changes.driveOnly) {
1537
+ downloadSpinner.message(`Downloading (${downloaded + 1}/${totalToDownload}): ${relativePath}`);
1324
1538
  const srcPath = path.join(tempDir, relativePath);
1325
1539
  const destPath = path.join(envDir, relativePath);
1326
1540
  fs.ensureDirSync(path.dirname(destPath));
@@ -1331,11 +1545,13 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1331
1545
  downloadSpinner.stop(color.green(`Downloaded ${downloaded} file(s)`));
1332
1546
  } else if (action === 'upload') {
1333
1547
  const uploadSpinner = spinner();
1334
- uploadSpinner.start('Uploading files...');
1335
-
1548
+ const totalToUpload = changes.modified.length + changes.localOnly.length;
1549
+ let completed = 0;
1336
1550
  let uploaded = 0;
1337
1551
  let replaced = 0;
1338
1552
 
1553
+ uploadSpinner.start(`Uploading files... (0/${totalToUpload})`);
1554
+
1339
1555
  // Build a map of relativePath -> driveFile for quick lookup
1340
1556
  const driveFileMap = new Map();
1341
1557
  for (const df of driveFiles) {
@@ -1343,14 +1559,17 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1343
1559
  }
1344
1560
 
1345
1561
  for (const relativePath of changes.modified) {
1562
+ uploadSpinner.message(`Replacing (${completed + 1}/${totalToUpload}): ${relativePath}`);
1346
1563
  const driveFile = driveFileMap.get(relativePath);
1347
1564
  if (driveFile) {
1348
1565
  gdrive.update(driveFile.id, path.join(envDir, relativePath));
1349
1566
  replaced++;
1567
+ completed++;
1350
1568
  }
1351
1569
  }
1352
1570
 
1353
1571
  for (const relativePath of changes.localOnly) {
1572
+ uploadSpinner.message(`Uploading (${completed + 1}/${totalToUpload}): ${relativePath}`);
1354
1573
  const localFilePath = path.join(envDir, relativePath);
1355
1574
  const folderPath = path.dirname(relativePath);
1356
1575
 
@@ -1362,6 +1581,7 @@ const runSyncOperation = async (syncConfig, action, projectRoot, backupPath, con
1362
1581
 
1363
1582
  gdrive.upload(localFilePath, { parent: parentId });
1364
1583
  uploaded++;
1584
+ completed++;
1365
1585
  }
1366
1586
 
1367
1587
  uploadSpinner.stop(color.green(`Replaced: ${replaced}, New: ${uploaded}`));
package/src/gdriveCmd.js CHANGED
@@ -1,6 +1,9 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  const shell = require('shelljs');
4
+ const { exec } = require('child_process');
5
+ const { promisify } = require('util');
6
+ const execAsync = promisify(exec);
4
7
 
5
8
  let cachedVersion = null;
6
9
 
@@ -254,7 +257,7 @@ const syncList = () => {
254
257
  * Parse list output into structured data
255
258
  * Works with both v2 and v3 output formats
256
259
  * @param {string} stdout - Raw stdout from list command
257
- * @returns {Array<{id: string, name: string, type: string, size: string, date: string}>}
260
+ * @returns {Array<{id: string, name: string, type: string, size: string, sizeBytes: number, date: string, modifiedTime: Date|null}>}
258
261
  */
259
262
  const parseListOutput = (stdout) => {
260
263
  const lines = stdout.trim().split('\n').filter((line) => line.trim());
@@ -267,16 +270,64 @@ const parseListOutput = (stdout) => {
267
270
 
268
271
  return lines.map((line) => {
269
272
  const parts = line.trim().split(separator);
273
+ const size = parts[3] || '';
274
+ const date = parts[4] || '';
275
+
270
276
  return {
271
277
  id: parts[0] || '',
272
278
  name: parts[1] || '',
273
279
  type: parts[2] || '',
274
- size: parts[3] || '',
275
- date: parts[4] || '',
280
+ size,
281
+ sizeBytes: parseSizeToBytes(size),
282
+ date,
283
+ modifiedTime: parseGdriveDate(date),
276
284
  };
277
285
  });
278
286
  };
279
287
 
288
+ /**
289
+ * Parse size string to bytes (e.g., "1.5 KB" -> 1536)
290
+ * @param {string} sizeStr - Size string from gdrive output
291
+ * @returns {number} - Size in bytes, or 0 if unparseable
292
+ */
293
+ const parseSizeToBytes = (sizeStr) => {
294
+ if (!sizeStr) return 0;
295
+
296
+ const match = sizeStr.match(/^([\d.]+)\s*(B|KB|MB|GB|TB)?$/i);
297
+ if (!match) return 0;
298
+
299
+ const value = parseFloat(match[1]);
300
+ const unit = (match[2] || 'B').toUpperCase();
301
+
302
+ const multipliers = {
303
+ B: 1,
304
+ KB: 1024,
305
+ MB: 1024 * 1024,
306
+ GB: 1024 * 1024 * 1024,
307
+ TB: 1024 * 1024 * 1024 * 1024,
308
+ };
309
+
310
+ return Math.round(value * (multipliers[unit] || 1));
311
+ };
312
+
313
+ /**
314
+ * Parse gdrive date string to Date object
315
+ * gdrive outputs dates like "2024-01-15 10:30:00" or ISO format
316
+ * @param {string} dateStr - Date string from gdrive output
317
+ * @returns {Date|null} - Date object or null if unparseable
318
+ */
319
+ const parseGdriveDate = (dateStr) => {
320
+ if (!dateStr) return null;
321
+
322
+ // Try parsing as-is (handles ISO format and common formats)
323
+ const date = new Date(dateStr);
324
+ if (!isNaN(date.getTime())) {
325
+ return date;
326
+ }
327
+
328
+ return null;
329
+ };
330
+
280
331
  /**
281
332
  * Clear the cached version (useful for testing)
282
333
  */
@@ -284,12 +335,112 @@ const clearCache = () => {
284
335
  cachedVersion = null;
285
336
  };
286
337
 
338
+ /**
339
+ * Async version of list - for parallel operations
340
+ * @param {Object} options - Same options as list()
341
+ * @returns {Promise<{code: number, stdout: string, stderr: string}>}
342
+ */
343
+ const listAsync = async (options = {}) => {
344
+ const version = detectVersion();
345
+ const { query, max = 30, noHeader = false, absolute = false, parent } = options;
346
+
347
+ let cmd;
348
+ if (version === 2) {
349
+ cmd = 'gdrive list';
350
+ if (max) cmd += ` --max ${max}`;
351
+ if (query) cmd += ` --query "${query}"`;
352
+ if (noHeader) cmd += ' --no-header';
353
+ if (absolute) cmd += ' --absolute';
354
+ } else {
355
+ cmd = 'gdrive files list';
356
+ if (max) cmd += ` --max ${max}`;
357
+ if (parent) {
358
+ cmd += ` --parent ${parent}`;
359
+ } else if (query) {
360
+ cmd += ` --query "${query}"`;
361
+ }
362
+ if (noHeader) cmd += ' --skip-header';
363
+ }
364
+
365
+ try {
366
+ const { stdout, stderr } = await execAsync(cmd);
367
+ return { code: 0, stdout: stdout || '', stderr: stderr || '' };
368
+ } catch (error) {
369
+ return { code: error.code || 1, stdout: error.stdout || '', stderr: error.stderr || error.message };
370
+ }
371
+ };
372
+
373
+ /**
374
+ * Async version of download - for parallel downloads
375
+ * @param {string} fileId - File ID to download
376
+ * @param {Object} options - Same options as download()
377
+ * @returns {Promise<{code: number, stdout: string, stderr: string}>}
378
+ */
379
+ const downloadAsync = async (fileId, options = {}) => {
380
+ const version = detectVersion();
381
+ const { destination, overwrite = false, recursive = false } = options;
382
+
383
+ let cmd;
384
+ if (version === 2) {
385
+ cmd = `gdrive download "${fileId}"`;
386
+ if (destination) cmd += ` --path "${destination}"`;
387
+ if (overwrite) cmd += ' --force';
388
+ if (recursive) cmd += ' -r';
389
+ } else {
390
+ cmd = `gdrive files download "${fileId}"`;
391
+ if (destination) cmd += ` --destination "${destination}"`;
392
+ if (overwrite) cmd += ' --overwrite';
393
+ if (recursive) cmd += ' --recursive';
394
+ }
395
+
396
+ try {
397
+ const { stdout, stderr } = await execAsync(cmd);
398
+ return { code: 0, stdout: stdout || '', stderr: stderr || '' };
399
+ } catch (error) {
400
+ return { code: error.code || 1, stdout: error.stdout || '', stderr: error.stderr || error.message };
401
+ }
402
+ };
403
+
404
+ /**
405
+ * Download multiple files in parallel with concurrency limit and progress tracking
406
+ * @param {Array<{fileId: string, options: Object}>} downloads - Array of download requests
407
+ * @param {number} concurrency - Maximum concurrent downloads (default: 5)
408
+ * @param {Function} [onProgress] - Progress callback: (completed, total) => void
409
+ * @returns {Promise<Array<{code: number, stdout: string, stderr: string}>>}
410
+ */
411
+ const downloadParallel = async (downloads, concurrency = 5, onProgress = null) => {
412
+ const results = [];
413
+ const total = downloads.length;
414
+ let completed = 0;
415
+
416
+ // Process in batches to limit concurrency
417
+ for (let i = 0; i < downloads.length; i += concurrency) {
418
+ const batch = downloads.slice(i, i + concurrency);
419
+ const batchResults = await Promise.all(
420
+ batch.map(async ({ fileId, options }) => {
421
+ const result = await downloadAsync(fileId, options);
422
+ completed++;
423
+ if (onProgress) {
424
+ onProgress(completed, total);
425
+ }
426
+ return result;
427
+ })
428
+ );
429
+ results.push(...batchResults);
430
+ }
431
+
432
+ return results;
433
+ };
434
+
287
435
  module.exports = {
288
436
  detectVersion,
289
437
  getVersion,
290
438
  hasSyncSupport,
291
439
  list,
440
+ listAsync,
292
441
  download,
442
+ downloadAsync,
443
+ downloadParallel,
293
444
  upload,
294
445
  update,
295
446
  mkdir,
@@ -297,5 +448,7 @@ module.exports = {
297
448
  syncUpload,
298
449
  syncList,
299
450
  parseListOutput,
451
+ parseSizeToBytes,
452
+ parseGdriveDate,
300
453
  clearCache,
301
454
  };
@@ -0,0 +1,100 @@
1
+ /**
2
+ * Version check utility - warns user if a newer version is available
3
+ */
4
+ const https = require('https');
5
+ const path = require('path');
6
+ const color = require('picocolors');
7
+
8
+ // Get current version from package.json
9
+ const getLocalVersion = () => {
10
+ const pkg = require(path.join(__dirname, '..', 'package.json'));
11
+ return pkg.version;
12
+ };
13
+
14
+ // Get package name from package.json
15
+ const getPackageName = () => {
16
+ const pkg = require(path.join(__dirname, '..', 'package.json'));
17
+ return pkg.name;
18
+ };
19
+
20
+ /**
21
+ * Fetch latest version from npm registry
22
+ * @param {string} packageName - npm package name
23
+ * @returns {Promise<string|null>} - Latest version or null on error
24
+ */
25
+ const fetchLatestVersion = (packageName) => {
26
+ return new Promise((resolve) => {
27
+ const url = `https://registry.npmjs.org/${packageName}/latest`;
28
+
29
+ const req = https.get(url, { timeout: 3000 }, (res) => {
30
+ let data = '';
31
+
32
+ res.on('data', (chunk) => {
33
+ data += chunk;
34
+ });
35
+
36
+ res.on('end', () => {
37
+ try {
38
+ const json = JSON.parse(data);
39
+ resolve(json.version || null);
40
+ } catch {
41
+ resolve(null);
42
+ }
43
+ });
44
+ });
45
+
46
+ req.on('error', () => resolve(null));
47
+ req.on('timeout', () => {
48
+ req.destroy();
49
+ resolve(null);
50
+ });
51
+ });
52
+ };
53
+
54
+ /**
55
+ * Compare semver versions
56
+ * @param {string} current - Current version (e.g., "1.2.3")
57
+ * @param {string} latest - Latest version (e.g., "1.3.0")
58
+ * @returns {boolean} - True if latest is newer than current
59
+ */
60
+ const isNewerVersion = (current, latest) => {
61
+ const currentParts = current.split('.').map(Number);
62
+ const latestParts = latest.split('.').map(Number);
63
+
64
+ for (let i = 0; i < 3; i++) {
65
+ const c = currentParts[i] || 0;
66
+ const l = latestParts[i] || 0;
67
+ if (l > c) return true;
68
+ if (l < c) return false;
69
+ }
70
+ return false;
71
+ };
72
+
73
+ /**
74
+ * Check for updates and return message if newer version available
75
+ * Non-blocking - doesn't throw errors, just silently fails
76
+ * @returns {Promise<string|null>} - Update message or null if up to date/error
77
+ */
78
+ const checkForUpdates = async () => {
79
+ try {
80
+ const packageName = getPackageName();
81
+ const currentVersion = getLocalVersion();
82
+ const latestVersion = await fetchLatestVersion(packageName);
83
+
84
+ if (latestVersion && isNewerVersion(currentVersion, latestVersion)) {
85
+ return `Update available: ${color.dim(currentVersion)} → ${color.green(latestVersion)} (${color.cyan(packageName)})`;
86
+ }
87
+ return null;
88
+ } catch {
89
+ // Silently ignore errors - don't disrupt user experience
90
+ return null;
91
+ }
92
+ };
93
+
94
+ module.exports = {
95
+ getLocalVersion,
96
+ getPackageName,
97
+ fetchLatestVersion,
98
+ isNewerVersion,
99
+ checkForUpdates,
100
+ };