sftp-push-sync 3.0.1 → 3.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Changelog
2
2
 
3
- ## [3.0.1] - 2026-03-05
3
+ ## [3.0.2] - 2026-03-05
4
4
 
5
5
  - stability improvements especialy during large and longtime uploads, error handling, log with datetime.
6
6
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sftp-push-sync",
3
- "version": "3.0.1",
3
+ "version": "3.0.2",
4
4
  "description": "SFTP sync tool for Hugo projects (local to remote, with hash cache)",
5
5
  "type": "module",
6
6
  "bin": {
@@ -200,7 +200,10 @@ export class SftpPushSyncApp {
200
200
  // Try a minimal operation to check connection
201
201
  await sftp.cwd();
202
202
  return true;
203
- } catch {
203
+ } catch (e) {
204
+ if (this.isVerbose) {
205
+ this.vlog(`${TAB_A}${pc.dim(`Connection check failed: ${e?.message || e}`)}`);
206
+ }
204
207
  return false;
205
208
  }
206
209
  }
@@ -213,8 +216,11 @@ export class SftpPushSyncApp {
213
216
  try {
214
217
  try {
215
218
  await sftp.end();
216
- } catch {
219
+ } catch (e) {
217
220
  // Ignore errors when closing dead connection
221
+ if (this.isVerbose) {
222
+ this.vlog(`${TAB_A}${pc.dim(`Closing old connection failed (expected): ${e?.message || e}`)}`);
223
+ }
218
224
  }
219
225
 
220
226
  // Wait before reconnecting (exponential backoff)
@@ -254,6 +260,45 @@ export class SftpPushSyncApp {
254
260
  }
255
261
  }
256
262
 
263
+ /**
264
+ * Upload a file with progress reporting for large files.
265
+ * Uses fastPut for files > threshold, with automatic fallback to put on failure.
266
+ */
267
+ async _uploadFile(sftp, localPath, remotePath, rel, size) {
268
+ const LARGE_FILE_THRESHOLD = 5 * 1024 * 1024; // 5MB
269
+ const sizeMB = (size / (1024 * 1024)).toFixed(1);
270
+
271
+ // For small files, just use put
272
+ if (size < LARGE_FILE_THRESHOLD) {
273
+ await sftp.put(localPath, remotePath);
274
+ return;
275
+ }
276
+
277
+ // For large files, try fastPut with progress
278
+ let lastReportedPercent = 0;
279
+ const shortRel = rel.length > 50 ? '...' + rel.slice(-47) : rel;
280
+
281
+ try {
282
+ await sftp.fastPut(localPath, remotePath, {
283
+ step: (transferred, chunk, total) => {
284
+ const percent = Math.floor((transferred / total) * 100);
285
+ // Only log at 25%, 50%, 75%, 100%
286
+ if (percent >= lastReportedPercent + 25) {
287
+ lastReportedPercent = Math.floor(percent / 25) * 25;
288
+ this.log(`${TAB_A}${pc.dim(` ↑ ${sizeMB}MB ${percent}%: ${shortRel}`)}`);
289
+ }
290
+ }
291
+ });
292
+ } catch (fastPutErr) {
293
+ // fastPut not supported by server, fall back to regular put
294
+ if (this.isVerbose) {
295
+ this.vlog(`${TAB_A}${pc.dim(` fastPut failed, using put: ${fastPutErr?.message}`)}`);
296
+ }
297
+ this.log(`${TAB_A}${pc.dim(` Uploading ${sizeMB}MB: ${shortRel}`)}`);
298
+ await sftp.put(localPath, remotePath);
299
+ }
300
+ }
301
+
257
302
  // ---------------------------------------------------------
258
303
  // Pattern-Helper
259
304
  // ---------------------------------------------------------
@@ -936,6 +981,7 @@ export class SftpPushSyncApp {
936
981
  this.hashCache = await createHashCacheNDJSON({
937
982
  cachePath: ndjsonCachePath,
938
983
  namespace: target,
984
+ vlog: this.isVerbose ? (...m) => console.log(...m) : null,
939
985
  });
940
986
 
941
987
  // Logger
@@ -1144,11 +1190,29 @@ export class SftpPushSyncApp {
1144
1190
  analyzeChunk: this.analyzeChunk,
1145
1191
  updateProgress: (prefix, current, total, rel) =>
1146
1192
  this.updateProgress2(prefix, current, total, rel, "Files"),
1193
+ log: this.isVerbose ? (...m) => this.log(...m) : null,
1147
1194
  });
1148
1195
 
1149
1196
  toAdd = diffResult.toAdd;
1150
1197
  toUpdate = diffResult.toUpdate;
1151
1198
 
1199
+ // Report large files that skipped hash comparison
1200
+ if (diffResult.largeFilesSkipped && diffResult.largeFilesSkipped.length > 0 && this.isVerbose) {
1201
+ const totalSizeMB = diffResult.largeFilesSkipped.reduce((sum, f) => sum + f.size, 0) / (1024 * 1024);
1202
+ this.log(` ℹ ${diffResult.largeFilesSkipped.length} large files (${totalSizeMB.toFixed(0)}MB total) skipped hash compare (same size/date)`);
1203
+ }
1204
+
1205
+ // Report compare errors if any
1206
+ if (diffResult.compareErrors && diffResult.compareErrors.length > 0) {
1207
+ this.log("");
1208
+ this.wlog(pc.yellow(`⚠ ${diffResult.compareErrors.length} files had compare errors (will be re-uploaded):`));
1209
+ if (this.isVerbose) {
1210
+ for (const { rel, error } of diffResult.compareErrors) {
1211
+ this.wlog(pc.yellow(` - ${rel}: ${error}`));
1212
+ }
1213
+ }
1214
+ }
1215
+
1152
1216
  if (toAdd.length === 0 && toUpdate.length === 0) {
1153
1217
  this.log("");
1154
1218
  this.log(`${TAB_A}No differences found. Everything is up to date.`);
@@ -1217,14 +1281,14 @@ export class SftpPushSyncApp {
1217
1281
  await this.runTasks(
1218
1282
  toAdd,
1219
1283
  this.connection.workers,
1220
- async ({ local: l, remotePath }) => {
1284
+ async ({ local: l, remotePath, rel }) => {
1221
1285
  const remoteDir = path.posix.dirname(remotePath);
1222
1286
  try {
1223
1287
  await sftp.mkdir(remoteDir, true);
1224
1288
  } catch {
1225
1289
  // Directory may already exist
1226
1290
  }
1227
- await sftp.put(l.localPath, remotePath);
1291
+ await this._uploadFile(sftp, l.localPath, remotePath, rel, l.size);
1228
1292
  },
1229
1293
  "Uploads (new)",
1230
1294
  sftp
@@ -1234,14 +1298,14 @@ export class SftpPushSyncApp {
1234
1298
  await this.runTasks(
1235
1299
  toUpdate,
1236
1300
  this.connection.workers,
1237
- async ({ local: l, remotePath }) => {
1301
+ async ({ local: l, remotePath, rel }) => {
1238
1302
  const remoteDir = path.posix.dirname(remotePath);
1239
1303
  try {
1240
1304
  await sftp.mkdir(remoteDir, true);
1241
1305
  } catch {
1242
1306
  // Directory may already exist
1243
1307
  }
1244
- await sftp.put(l.localPath, remotePath);
1308
+ await this._uploadFile(sftp, l.localPath, remotePath, rel, l.size);
1245
1309
  },
1246
1310
  "Uploads (update)",
1247
1311
  sftp
@@ -1356,8 +1420,11 @@ export class SftpPushSyncApp {
1356
1420
  if (this.hashCache?.close) {
1357
1421
  await this.hashCache.close();
1358
1422
  }
1359
- } catch {
1360
- // ignore
1423
+ } catch (e) {
1424
+ // Cache close failed during error cleanup
1425
+ if (this.isVerbose) {
1426
+ this.vlog(`${TAB_A}${pc.dim(`Cache close during cleanup failed: ${e?.message || e}`)}`)
1427
+ }
1361
1428
  }
1362
1429
  } finally {
1363
1430
  try {
@@ -23,6 +23,8 @@ import path from "path";
23
23
  * - analyzeChunk: Progress-Schrittgröße
24
24
  * - updateProgress(prefix, current, total, rel): optional
25
25
  * - concurrency: Max parallele Vergleiche (default: 5)
26
+ * - log: optional logging function for errors/warnings
27
+ * - maxSizeForHash: Files larger than this skip hash comparison (default: 50MB)
26
28
  */
27
29
  export async function analyseDifferences({
28
30
  local,
@@ -34,7 +36,13 @@ export async function analyseDifferences({
34
36
  analyzeChunk = 10,
35
37
  updateProgress,
36
38
  concurrency = 10,
39
+ log,
40
+ maxSizeForHash = 50 * 1024 * 1024, // 50MB default
37
41
  }) {
42
+ // Track errors for summary
43
+ const compareErrors = [];
44
+ // Track large files skipped
45
+ const largeFilesSkipped = [];
38
46
  const toAdd = [];
39
47
  const toUpdate = [];
40
48
 
@@ -45,6 +53,7 @@ export async function analyseDifferences({
45
53
  // Phase 1: Schneller Vorab-Check ohne SFTP
46
54
  // - Dateien nur lokal → direkt zu toAdd
47
55
  // - Size-Vergleich für existierende Dateien
56
+ // - Große Dateien: nur MTime-Vergleich (kein Hash-Download)
48
57
  const keysNeedContentCompare = [];
49
58
 
50
59
  for (const rel of localKeys) {
@@ -58,21 +67,46 @@ export async function analyseDifferences({
58
67
  } else if (l.size !== r.size) {
59
68
  // Size unterschiedlich → Changed (kein SFTP-Call nötig)
60
69
  toUpdate.push({ rel, local: l, remote: r, remotePath });
70
+ // } else if (l.size > maxSizeForHash) {
71
+ // // Große Datei mit gleicher Size: nur MTime vergleichen
72
+ // // Remote modifyTime ist String wie "2026-03-05", local mtimeMs ist Timestamp
73
+ // const localDate = new Date(l.mtimeMs).toISOString().split('T')[0];
74
+ // const remoteDate = r.modifyTime ? r.modifyTime.split('T')[0] : '';
75
+ //
76
+ // if (localDate > remoteDate) {
77
+ // // Local ist neuer → Changed
78
+ // toUpdate.push({ rel, local: l, remote: r, remotePath });
79
+ // if (log) {
80
+ // const sizeMB = (l.size / (1024 * 1024)).toFixed(1);
81
+ // log(` ℹ Large file (${sizeMB}MB) newer locally: ${rel}`);
82
+ // }
83
+ // } else {
84
+ // largeFilesSkipped.push({ rel, size: l.size });
85
+ // }
61
86
  } else {
62
- // Size gleich → Content-Vergleich nötig
87
+ // Size gleich, normale Größe → Content-Vergleich nötig
63
88
  keysNeedContentCompare.push(rel);
64
89
  }
65
90
 
66
91
  checked++;
67
92
  if (updateProgress && checked % analyzeChunk === 0) {
68
- updateProgress("Analyse (Size): ", checked, totalToCheck, rel);
93
+ updateProgress("Analyse (quick): ", checked, totalToCheck, rel);
69
94
  }
70
95
  }
71
96
 
97
+ // Final progress update for Phase 1
98
+ if (updateProgress) {
99
+ updateProgress("Analyse (quick): ", totalToCheck, totalToCheck, "done");
100
+ }
101
+
72
102
  // Phase 2: Content-Vergleich in echten Batches
73
- // Nur für Dateien mit gleicher Size
103
+ // Nur für Dateien mit gleicher Size (und unter maxSizeForHash)
74
104
  const totalContentCompare = keysNeedContentCompare.length;
75
105
 
106
+ if (totalContentCompare > 0 && log) {
107
+ log(` → ${totalContentCompare} files need content comparison`);
108
+ }
109
+
76
110
  for (let i = 0; i < totalContentCompare; i += concurrency) {
77
111
  const batch = keysNeedContentCompare.slice(i, i + concurrency);
78
112
 
@@ -114,8 +148,14 @@ export async function analyseDifferences({
114
148
  : null;
115
149
  }
116
150
  } catch (err) {
117
- // Bei Fehler als changed markieren (sicherer)
118
- return { rel, local: l, remote: r, remotePath, changed: true };
151
+ // Log the error so user can see what's happening
152
+ const errMsg = err?.message || String(err);
153
+ compareErrors.push({ rel, error: errMsg });
154
+ if (log) {
155
+ log(` ⚠ Compare error for ${rel}: ${errMsg}`);
156
+ }
157
+ // Mark as changed (sicherer) - file will be re-uploaded
158
+ return { rel, local: l, remote: r, remotePath, changed: true, hadError: true };
119
159
  }
120
160
  })
121
161
  );
@@ -127,14 +167,14 @@ export async function analyseDifferences({
127
167
  }
128
168
  }
129
169
 
130
- // Progress update
170
+ // Progress update - show as separate progress (doesn't jump back)
131
171
  const progressCount = Math.min(i + batch.length, totalContentCompare);
132
172
  if (updateProgress) {
133
- updateProgress("Analyse (Content): ", checked + progressCount, totalToCheck + totalContentCompare, batch[batch.length - 1]);
173
+ updateProgress("Analyse (hash): ", progressCount, totalContentCompare, batch[batch.length - 1]);
134
174
  }
135
175
  }
136
176
 
137
- return { toAdd, toUpdate };
177
+ return { toAdd, toUpdate, compareErrors, largeFilesSkipped };
138
178
  }
139
179
 
140
180
  /**
@@ -35,28 +35,61 @@ export function hashLocalFile(filePath) {
35
35
 
36
36
  /**
37
37
  * Streaming-SHA256 für Remote-Datei via ssh2-sftp-client
38
- * Mit Timeout, um hängende Verbindungen zu erkennen.
38
+ * Mit IDLE-Timeout: nur wenn keine Daten mehr fließen für X Sekunden.
39
+ * Große Dateien werden korrekt behandelt - solange Daten ankommen, kein Timeout.
40
+ *
41
+ * @param {Object} sftp - SFTP client
42
+ * @param {string} remotePath - Remote file path
43
+ * @param {number} idleTimeoutMs - Timeout in ms when NO data is received (default: 60000)
44
+ * @param {number} fileSizeBytes - File size (for logging)
39
45
  */
40
- export async function hashRemoteFile(sftp, remotePath, timeoutMs = 60000) {
46
+ export async function hashRemoteFile(sftp, remotePath, idleTimeoutMs = 60000, fileSizeBytes = 0) {
41
47
  const hash = createHash("sha256");
48
+ let lastDataTime = Date.now();
49
+ let totalReceived = 0;
50
+ let timeoutId = null;
51
+ let rejectFn = null;
52
+
53
+ // Promise that rejects on idle timeout
54
+ const idleTimeoutPromise = new Promise((_, reject) => {
55
+ rejectFn = reject;
56
+
57
+ const checkIdle = () => {
58
+ const idleTime = Date.now() - lastDataTime;
59
+ if (idleTime >= idleTimeoutMs) {
60
+ const receivedMB = (totalReceived / (1024 * 1024)).toFixed(1);
61
+ reject(new Error(`Idle timeout (${Math.round(idleTimeoutMs/1000)}s no data) at ${receivedMB}MB for ${remotePath}`));
62
+ } else {
63
+ // Check again in 5 seconds
64
+ timeoutId = setTimeout(checkIdle, 5000);
65
+ }
66
+ };
67
+
68
+ // Start checking after initial timeout
69
+ timeoutId = setTimeout(checkIdle, idleTimeoutMs);
70
+ });
42
71
 
43
72
  const writable = new Writable({
44
73
  write(chunk, enc, cb) {
74
+ lastDataTime = Date.now(); // Reset idle timer on each chunk
75
+ totalReceived += chunk.length;
45
76
  hash.update(chunk);
46
77
  cb();
47
78
  },
48
79
  });
49
80
 
50
- // Timeout-Promise
51
- const timeoutPromise = new Promise((_, reject) => {
52
- setTimeout(() => reject(new Error(`Timeout downloading ${remotePath}`)), timeoutMs);
53
- });
54
-
55
- // Race between download and timeout
56
- await Promise.race([
57
- sftp.get(remotePath, writable),
58
- timeoutPromise,
59
- ]);
81
+ try {
82
+ // Race between download and idle timeout
83
+ await Promise.race([
84
+ sftp.get(remotePath, writable),
85
+ idleTimeoutPromise,
86
+ ]);
87
+ } finally {
88
+ // Clean up timeout
89
+ if (timeoutId) {
90
+ clearTimeout(timeoutId);
91
+ }
92
+ }
60
93
 
61
94
  return hash.digest("hex");
62
95
  }
@@ -71,8 +104,9 @@ export async function hashRemoteFile(sftp, remotePath, timeoutMs = 60000) {
71
104
  * @param {string} options.cachePath - Path to the NDJSON file (e.g., ".sync-cache.prod.ndjson")
72
105
  * @param {string} options.namespace - Namespace for keys (e.g., "prod")
73
106
  * @param {number} options.autoSaveInterval - Save after this many changes (default: 1000)
107
+ * @param {Function} options.vlog - Optional verbose logging function
74
108
  */
75
- export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInterval = 1000 }) {
109
+ export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInterval = 1000, vlog }) {
76
110
  const ns = namespace || "default";
77
111
 
78
112
  // In-memory storage
@@ -94,6 +128,7 @@ export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInte
94
128
  await fsp.access(cachePath);
95
129
  } catch {
96
130
  // File doesn't exist - start fresh
131
+ if (vlog) vlog(` Cache file not found, starting fresh: ${cachePath}`);
97
132
  return;
98
133
  }
99
134
 
@@ -203,7 +238,8 @@ export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInte
203
238
 
204
239
  // Cache miss or stale: compute new hash
205
240
  const filePath = meta.fullPath || meta.remotePath;
206
- const hash = await hashRemoteFile(sftp, filePath);
241
+ // Pass file size for dynamic timeout calculation
242
+ const hash = await hashRemoteFile(sftp, filePath, 60000, meta.size || 0);
207
243
 
208
244
  remoteCache.set(key, {
209
245
  size: meta.size,