sftp-push-sync 3.0.0 → 3.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,12 +1,16 @@
1
1
  # Changelog
2
2
 
3
+ ## [3.0.2] - 2026-03-05
4
+
5
+ - stability improvements especialy during large and longtime uploads, error handling, log with datetime.
6
+
3
7
  ## [3.0.0] - 2026-03-04
4
8
 
5
9
  - Switched from JSON-file based hash cache to NDJSON-based Cache-implementation.
6
10
  - Disk-based, only active entries in RAM
7
11
  - Scales to 100,000+ files without memory issues
8
12
  - Auto-persist (no explicit saving required)
9
- - Auto-migration - Existing JSON cache (.sync-cache.prod.json) is automatically migrated to LevelDB (.sync-cache-prod/)
13
+ - Auto-migration - Existing JSON cache is automatically migrated
10
14
 
11
15
  ## [2.5.0] - 2026-03-04
12
16
 
package/README.md CHANGED
@@ -19,7 +19,7 @@ Features:
19
19
  - adds, updates, deletes files
20
20
  - text diff detection
21
21
  - Binary files (images, video, audio, PDF, etc.): SHA-256 hash comparison
22
- - Hashes are cached in .sync-cache.*.json
22
+ - Hashes are cached in `.sync-cache.*.ndjson`
23
23
  - Parallel uploads/deletions via worker pool
24
24
  - include/exclude patterns
25
25
  - Sidecar uploads / downloads - Bypassing the sync process
@@ -104,6 +104,7 @@ Create a `sync.config.json` in the root folder of your project:
104
104
  "analyzeChunk": 1
105
105
  },
106
106
  "logLevel": "normal",
107
+ "logTimestamps": false,
107
108
  "logFile": ".sftp-push-sync.{target}.log"
108
109
  }
109
110
  ```
@@ -204,6 +205,7 @@ sftp-push-sync prod --sidecar-download --skip-sync
204
205
  Logging can also be configured.
205
206
 
206
207
  - `logLevel` - normal, verbose, laconic.
208
+ - `logTimestamps` - true/false. When enabled, each log line is prefixed with a timestamp `[YYYY-MM-DD HH:mm:ss.SSS]`.
207
209
  - `logFile` - an optional logFile.
208
210
  - `scanChunk` - After how many elements should a log output be generated during scanning?
209
211
  - `analyzeChunk` - After how many elements should a log output be generated during analysis?
@@ -275,4 +277,4 @@ Note 2: Reliability and accuracy are more important to me than speed.
275
277
  - <https://www.npmjs.com/package/sftp-push-sync>
276
278
  - <https://github.com/cnichte/sftp-push-sync>
277
279
  - <https://www.npmjs.com/package/hugo-toolbox>
278
- - <https://carsten-nichte.de>
280
+ - <https://carsten-nichte.de>
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sftp-push-sync",
3
- "version": "3.0.0",
3
+ "version": "3.0.2",
4
4
  "description": "SFTP sync tool for Hugo projects (local to remote, with hash cache)",
5
5
  "type": "module",
6
6
  "bin": {
@@ -200,36 +200,103 @@ export class SftpPushSyncApp {
200
200
  // Try a minimal operation to check connection
201
201
  await sftp.cwd();
202
202
  return true;
203
- } catch {
203
+ } catch (e) {
204
+ if (this.isVerbose) {
205
+ this.vlog(`${TAB_A}${pc.dim(`Connection check failed: ${e?.message || e}`)}`);
206
+ }
204
207
  return false;
205
208
  }
206
209
  }
207
210
 
208
211
  /**
209
- * Reconnect to SFTP server
212
+ * Reconnect to SFTP server with retry logic
210
213
  */
211
- async _reconnect(sftp) {
212
- try {
213
- await sftp.end();
214
- } catch {
215
- // Ignore errors when closing dead connection
216
- }
214
+ async _reconnect(sftp, maxRetries = 3) {
215
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
216
+ try {
217
+ try {
218
+ await sftp.end();
219
+ } catch (e) {
220
+ // Ignore errors when closing dead connection
221
+ if (this.isVerbose) {
222
+ this.vlog(`${TAB_A}${pc.dim(`Closing old connection failed (expected): ${e?.message || e}`)}`);
223
+ }
224
+ }
217
225
 
218
- await sftp.connect({
219
- host: this.connection.host,
220
- port: this.connection.port,
221
- username: this.connection.user,
222
- password: this.connection.password,
223
- keepaliveInterval: 10000,
224
- keepaliveCountMax: 10,
225
- readyTimeout: 30000,
226
- });
226
+ // Wait before reconnecting (exponential backoff)
227
+ if (attempt > 1) {
228
+ const waitTime = 1000 * Math.pow(2, attempt - 1); // 2s, 4s, 8s
229
+ this.log(`${TAB_A}${pc.yellow(`⏳ Waiting ${waitTime/1000}s before retry ${attempt}/${maxRetries}…`)}`);
230
+ await new Promise(r => setTimeout(r, waitTime));
231
+ }
232
+
233
+ await sftp.connect({
234
+ host: this.connection.host,
235
+ port: this.connection.port,
236
+ username: this.connection.user,
237
+ password: this.connection.password,
238
+ keepaliveInterval: 5000, // More frequent keepalive (5s instead of 10s)
239
+ keepaliveCountMax: 6, // Disconnect after 30s of no response
240
+ readyTimeout: 60000, // 60s timeout for initial connection
241
+ retries: 2, // Internal retries
242
+ retry_factor: 2,
243
+ retry_minTimeout: 2000,
244
+ });
245
+
246
+ if (sftp.client) {
247
+ sftp.client.setMaxListeners(50);
248
+ }
227
249
 
228
- if (sftp.client) {
229
- sftp.client.setMaxListeners(50);
250
+ this.log(`${TAB_A}${pc.green("✔ Reconnected to SFTP.")}`);
251
+ return; // Success
252
+ } catch (err) {
253
+ const msg = err?.message || String(err);
254
+ if (attempt === maxRetries) {
255
+ this.elog(pc.red(`❌ Failed to reconnect after ${maxRetries} attempts: ${msg}`));
256
+ throw err;
257
+ }
258
+ this.wlog(pc.yellow(`⚠ Reconnect attempt ${attempt} failed: ${msg}`));
259
+ }
260
+ }
261
+ }
262
+
263
+ /**
264
+ * Upload a file with progress reporting for large files.
265
+ * Uses fastPut for files > threshold, with automatic fallback to put on failure.
266
+ */
267
+ async _uploadFile(sftp, localPath, remotePath, rel, size) {
268
+ const LARGE_FILE_THRESHOLD = 5 * 1024 * 1024; // 5MB
269
+ const sizeMB = (size / (1024 * 1024)).toFixed(1);
270
+
271
+ // For small files, just use put
272
+ if (size < LARGE_FILE_THRESHOLD) {
273
+ await sftp.put(localPath, remotePath);
274
+ return;
230
275
  }
231
276
 
232
- this.log(`${TAB_A}${pc.green("✔ Reconnected to SFTP.")}`);
277
+ // For large files, try fastPut with progress
278
+ let lastReportedPercent = 0;
279
+ const shortRel = rel.length > 50 ? '...' + rel.slice(-47) : rel;
280
+
281
+ try {
282
+ await sftp.fastPut(localPath, remotePath, {
283
+ step: (transferred, chunk, total) => {
284
+ const percent = Math.floor((transferred / total) * 100);
285
+ // Only log at 25%, 50%, 75%, 100%
286
+ if (percent >= lastReportedPercent + 25) {
287
+ lastReportedPercent = Math.floor(percent / 25) * 25;
288
+ this.log(`${TAB_A}${pc.dim(` ↑ ${sizeMB}MB ${percent}%: ${shortRel}`)}`);
289
+ }
290
+ }
291
+ });
292
+ } catch (fastPutErr) {
293
+ // fastPut not supported by server, fall back to regular put
294
+ if (this.isVerbose) {
295
+ this.vlog(`${TAB_A}${pc.dim(` fastPut failed, using put: ${fastPutErr?.message}`)}`);
296
+ }
297
+ this.log(`${TAB_A}${pc.dim(` Uploading ${sizeMB}MB: ${shortRel}`)}`);
298
+ await sftp.put(localPath, remotePath);
299
+ }
233
300
  }
234
301
 
235
302
  // ---------------------------------------------------------
@@ -333,18 +400,23 @@ export class SftpPushSyncApp {
333
400
  }
334
401
 
335
402
  // ---------------------------------------------------------
336
- // Worker-Pool
403
+ // Worker-Pool with auto-reconnect
337
404
  // ---------------------------------------------------------
338
405
 
339
- async runTasks(items, workerCount, handler, label = "Tasks") {
406
+ async runTasks(items, workerCount, handler, label = "Tasks", sftp = null) {
340
407
  if (!items || items.length === 0) return;
341
408
 
342
409
  const total = items.length;
343
410
  let done = 0;
344
411
  let index = 0;
412
+ let failedCount = 0;
345
413
  const workers = [];
346
414
  const actualWorkers = Math.max(1, Math.min(workerCount, total));
347
415
 
416
+ // Mutex for reconnection (only one worker reconnects at a time)
417
+ let reconnecting = false;
418
+ let reconnectWaiters = 0;
419
+
348
420
  const worker = async () => {
349
421
  // eslint-disable-next-line no-constant-condition
350
422
  while (true) {
@@ -353,13 +425,82 @@ export class SftpPushSyncApp {
353
425
  index += 1;
354
426
  const item = items[i];
355
427
 
356
- try {
357
- await handler(item);
358
- } catch (err) {
359
- this.elog(
360
- pc.red(`${TAB_A}⚠️ Error in ${label}:`),
361
- err?.message || err
362
- );
428
+ let retries = 0;
429
+ const maxRetries = 5; // Increased from 2 to 5 for unstable servers
430
+
431
+ while (retries <= maxRetries) {
432
+ try {
433
+ await handler(item);
434
+ break; // Success, exit retry loop
435
+ } catch (err) {
436
+ const msg = err?.message || String(err);
437
+ const isConnectionError =
438
+ msg.includes("No SFTP connection") ||
439
+ msg.includes("ECONNRESET") ||
440
+ msg.includes("ETIMEDOUT") ||
441
+ msg.includes("ECONNREFUSED") ||
442
+ msg.includes("connection") ||
443
+ msg.includes("Channel open failure") ||
444
+ msg.includes("socket") ||
445
+ msg.includes("SSH");
446
+
447
+ if (isConnectionError && sftp && retries < maxRetries) {
448
+ // Wait if another worker is already reconnecting
449
+ let waitCount = 0;
450
+ reconnectWaiters++;
451
+ if (reconnecting && this.isVerbose) {
452
+ this.log(`${TAB_A}${pc.dim(`Worker waiting for reconnect (${reconnectWaiters} waiting)…`)}`);
453
+ }
454
+ while (reconnecting && waitCount < 120) { // Max 60 seconds wait
455
+ await new Promise(r => setTimeout(r, 500));
456
+ waitCount++;
457
+ // Log every 10 seconds while waiting
458
+ if (waitCount % 20 === 0 && this.isVerbose) {
459
+ this.log(`${TAB_A}${pc.dim(`Still waiting for reconnect… (${waitCount / 2}s)`)}`);
460
+ }
461
+ }
462
+ reconnectWaiters--;
463
+
464
+ // Check if reconnection is still needed
465
+ if (!await this._isConnected(sftp)) {
466
+ reconnecting = true;
467
+ this.log(`${TAB_A}${pc.yellow("⚠ Connection lost during " + label + ", reconnecting…")}`);
468
+ try {
469
+ await this._reconnect(sftp);
470
+ this.log(`${TAB_A}${pc.green("✔ Reconnected, resuming " + label + "…")}`);
471
+ } catch (reconnectErr) {
472
+ this.elog(pc.red(`${TAB_A}❌ Reconnect failed: ${reconnectErr?.message || reconnectErr}`));
473
+ reconnecting = false;
474
+ // Re-throw to trigger retry
475
+ throw reconnectErr;
476
+ } finally {
477
+ reconnecting = false;
478
+ }
479
+ }
480
+
481
+ retries++;
482
+ const retryDelay = 500 * retries;
483
+ if (this.isVerbose) {
484
+ this.log(`${TAB_A}${pc.dim(`Retry ${retries}/${maxRetries} for: ${item.rel || ''} (waiting ${retryDelay}ms)`)}`);
485
+ }
486
+ // Brief pause before retry
487
+ await new Promise(r => setTimeout(r, retryDelay));
488
+ // Retry the same item
489
+ continue;
490
+ }
491
+
492
+ // Log error and move on
493
+ this.elog(
494
+ pc.red(`${TAB_A}⚠️ Error in ${label} (attempt ${retries + 1}/${maxRetries + 1}):`),
495
+ msg
496
+ );
497
+
498
+ if (retries >= maxRetries) {
499
+ failedCount++;
500
+ this.elog(pc.red(`${TAB_A}❌ Failed after ${maxRetries + 1} attempts: ${item.rel || item.remotePath || ''}`));
501
+ }
502
+ break; // Exit retry loop
503
+ }
363
504
  }
364
505
 
365
506
  done += 1;
@@ -373,6 +514,9 @@ export class SftpPushSyncApp {
373
514
  workers.push(worker());
374
515
  }
375
516
  await Promise.all(workers);
517
+
518
+ // Return statistics
519
+ return { total, done, failed: failedCount };
376
520
  }
377
521
 
378
522
  // ---------------------------------------------------------
@@ -409,6 +553,7 @@ export class SftpPushSyncApp {
409
553
  if (total === 0) return;
410
554
 
411
555
  let current = 0;
556
+ let failedDirs = 0;
412
557
 
413
558
  for (const relDir of dirs) {
414
559
  current += 1;
@@ -422,24 +567,59 @@ export class SftpPushSyncApp {
422
567
  "Folders"
423
568
  );
424
569
 
425
- try {
426
- const exists = await sftp.exists(remoteDir);
427
- if (!exists) {
428
- await sftp.mkdir(remoteDir, true);
429
- this.dirStats.createdDirs += 1;
430
- this.vlog(`${TAB_A}${pc.dim("dir created:")} ${remoteDir}`);
431
- } else {
432
- this.vlog(`${TAB_A}${pc.dim("dir ok:")} ${remoteDir}`);
570
+ let retries = 0;
571
+ const maxRetries = 3;
572
+ let success = false;
573
+
574
+ while (retries <= maxRetries && !success) {
575
+ try {
576
+ const exists = await sftp.exists(remoteDir);
577
+ if (!exists) {
578
+ await sftp.mkdir(remoteDir, true);
579
+ this.dirStats.createdDirs += 1;
580
+ this.vlog(`${TAB_A}${pc.dim("dir created:")} ${remoteDir}`);
581
+ } else {
582
+ this.vlog(`${TAB_A}${pc.dim("dir ok:")} ${remoteDir}`);
583
+ }
584
+ success = true;
585
+ } catch (e) {
586
+ const msg = e?.message || String(e);
587
+ const isConnectionError =
588
+ msg.includes("No SFTP connection") ||
589
+ msg.includes("ECONNRESET") ||
590
+ msg.includes("ETIMEDOUT") ||
591
+ msg.includes("connection") ||
592
+ msg.includes("Channel open failure") ||
593
+ msg.includes("socket") ||
594
+ msg.includes("SSH");
595
+
596
+ if (isConnectionError && retries < maxRetries) {
597
+ this.log(`${TAB_A}${pc.yellow("⚠ Connection lost during directory preparation, reconnecting…")}`);
598
+ try {
599
+ await this._reconnect(sftp);
600
+ retries++;
601
+ await new Promise(r => setTimeout(r, 500 * retries));
602
+ continue; // Retry this directory
603
+ } catch (reconnectErr) {
604
+ this.elog(pc.red(`${TAB_A}❌ Reconnect failed: ${reconnectErr?.message || reconnectErr}`));
605
+ }
606
+ }
607
+
608
+ this.wlog(
609
+ pc.yellow("⚠️ Could not ensure directory:"),
610
+ remoteDir,
611
+ msg
612
+ );
613
+ failedDirs++;
614
+ break; // Move to next directory
433
615
  }
434
- } catch (e) {
435
- this.wlog(
436
- pc.yellow("⚠️ Could not ensure directory:"),
437
- remoteDir,
438
- e?.message || e
439
- );
440
616
  }
441
617
  }
442
618
 
619
+ if (failedDirs > 0) {
620
+ this.wlog(pc.yellow(`⚠️ ${failedDirs} directories could not be created`));
621
+ }
622
+
443
623
  this.updateProgress2("Prepare dirs: ", total, total, "done", "Folders");
444
624
  process.stdout.write("\n");
445
625
  this.progressActive = false;
@@ -450,7 +630,24 @@ export class SftpPushSyncApp {
450
630
  // ---------------------------------------------------------
451
631
 
452
632
  async cleanupEmptyDirs(sftp, rootDir, dryRun) {
453
- const recurse = async (dir) => {
633
+ // Track reconnect state at cleanup level
634
+ let reconnectNeeded = false;
635
+
636
+ const attemptReconnect = async () => {
637
+ if (reconnectNeeded) return false; // Already tried
638
+ reconnectNeeded = true;
639
+ this.log(`${TAB_A}${pc.yellow("⚠ Connection lost during cleanup, reconnecting…")}`);
640
+ try {
641
+ await this._reconnect(sftp);
642
+ reconnectNeeded = false;
643
+ return true;
644
+ } catch (err) {
645
+ this.elog(pc.red(`${TAB_A}❌ Reconnect during cleanup failed: ${err?.message || err}`));
646
+ return false;
647
+ }
648
+ };
649
+
650
+ const recurse = async (dir, depth = 0) => {
454
651
  this.dirStats.cleanupVisited += 1;
455
652
 
456
653
  const relForProgress = toPosix(path.relative(rootDir, dir)) || ".";
@@ -467,17 +664,37 @@ export class SftpPushSyncApp {
467
664
  const subdirs = [];
468
665
  let items;
469
666
 
470
- try {
471
- items = await sftp.list(dir);
472
- } catch (e) {
473
- this.wlog(
474
- pc.yellow("⚠️ Could not list directory during cleanup:"),
475
- dir,
476
- e?.message || e
477
- );
478
- return false;
667
+ // Try to list directory with reconnect on failure
668
+ let retries = 0;
669
+ while (retries <= 2) {
670
+ try {
671
+ items = await sftp.list(dir);
672
+ break;
673
+ } catch (e) {
674
+ const msg = e?.message || String(e);
675
+ const isConnectionError = msg.includes("No SFTP connection") ||
676
+ msg.includes("ECONNRESET") || msg.includes("connection");
677
+
678
+ if (isConnectionError && retries < 2) {
679
+ const reconnected = await attemptReconnect();
680
+ if (reconnected) {
681
+ retries++;
682
+ await new Promise(r => setTimeout(r, 500));
683
+ continue;
684
+ }
685
+ }
686
+
687
+ this.wlog(
688
+ pc.yellow("⚠️ Could not list directory during cleanup:"),
689
+ dir,
690
+ msg
691
+ );
692
+ return false;
693
+ }
479
694
  }
480
695
 
696
+ if (!items) return false;
697
+
481
698
  for (const item of items) {
482
699
  if (!item.name || item.name === "." || item.name === "..") continue;
483
700
  if (item.type === "d") {
@@ -490,7 +707,7 @@ export class SftpPushSyncApp {
490
707
  let allSubdirsEmpty = true;
491
708
  for (const sub of subdirs) {
492
709
  const full = path.posix.join(dir, sub.name);
493
- const subEmpty = await recurse(full);
710
+ const subEmpty = await recurse(full, depth + 1);
494
711
  if (!subEmpty) {
495
712
  allSubdirsEmpty = false;
496
713
  }
@@ -507,17 +724,34 @@ export class SftpPushSyncApp {
507
724
  );
508
725
  this.dirStats.cleanupDeleted += 1;
509
726
  } else {
510
- try {
511
- await sftp.rmdir(dir, false);
512
- this.log(`${TAB_A}${DEL} Removed empty directory: ${rel}`);
513
- this.dirStats.cleanupDeleted += 1;
514
- } catch (e) {
515
- this.wlog(
516
- pc.yellow("⚠️ Could not remove directory:"),
517
- dir,
518
- e?.message || e
519
- );
520
- return false;
727
+ let deleteRetries = 0;
728
+ while (deleteRetries <= 2) {
729
+ try {
730
+ await sftp.rmdir(dir, false);
731
+ this.log(`${TAB_A}${DEL} Removed empty directory: ${rel}`);
732
+ this.dirStats.cleanupDeleted += 1;
733
+ break;
734
+ } catch (e) {
735
+ const msg = e?.message || String(e);
736
+ const isConnectionError = msg.includes("No SFTP connection") ||
737
+ msg.includes("ECONNRESET") || msg.includes("connection");
738
+
739
+ if (isConnectionError && deleteRetries < 2) {
740
+ const reconnected = await attemptReconnect();
741
+ if (reconnected) {
742
+ deleteRetries++;
743
+ await new Promise(r => setTimeout(r, 500));
744
+ continue;
745
+ }
746
+ }
747
+
748
+ this.wlog(
749
+ pc.yellow("⚠️ Could not remove directory:"),
750
+ dir,
751
+ msg
752
+ );
753
+ return false;
754
+ }
521
755
  }
522
756
  }
523
757
  }
@@ -544,8 +778,44 @@ export class SftpPushSyncApp {
544
778
  // Hauptlauf
545
779
  // ---------------------------------------------------------
546
780
 
781
+ /**
782
+ * Format duration in human-readable format (mm:ss or hh:mm:ss)
783
+ */
784
+ _formatDuration(seconds) {
785
+ const totalSec = Math.floor(seconds);
786
+ const hours = Math.floor(totalSec / 3600);
787
+ const minutes = Math.floor((totalSec % 3600) / 60);
788
+ const secs = totalSec % 60;
789
+
790
+ if (hours > 0) {
791
+ return `${hours}:${String(minutes).padStart(2, '0')}:${String(secs).padStart(2, '0')}`;
792
+ }
793
+ return `${minutes}:${String(secs).padStart(2, '0')}`;
794
+ }
795
+
547
796
  async run() {
548
797
  const start = Date.now();
798
+
799
+ // Global error handlers to catch unexpected errors
800
+ const handleFatalError = (type, error) => {
801
+ const msg = error?.message || String(error);
802
+ const logMsg = `❌ FATAL ${type}: ${msg}`;
803
+ console.error(pc.red(logMsg));
804
+ if (this.logger) {
805
+ this.logger.writeLine(logMsg);
806
+ this.logger.writeLine(error?.stack || "No stack trace available");
807
+ this.logger.close();
808
+ }
809
+ process.exitCode = 1;
810
+ };
811
+
812
+ process.on('unhandledRejection', (reason) => {
813
+ handleFatalError('Unhandled Promise Rejection', reason);
814
+ });
815
+ process.on('uncaughtException', (error) => {
816
+ handleFatalError('Uncaught Exception', error);
817
+ });
818
+
549
819
  const {
550
820
  target,
551
821
  dryRun = false,
@@ -627,6 +897,9 @@ export class SftpPushSyncApp {
627
897
  this.isVerbose = logLevel === "verbose";
628
898
  this.isLaconic = logLevel === "laconic";
629
899
 
900
+ // Timestamps in Logfile
901
+ this.logTimestamps = configRaw.logTimestamps ?? false;
902
+
630
903
  // Progress-Konfig
631
904
  const PROGRESS = configRaw.progress ?? {};
632
905
  this.scanChunk = PROGRESS.scanChunk ?? (this.isVerbose ? 1 : 100);
@@ -708,6 +981,7 @@ export class SftpPushSyncApp {
708
981
  this.hashCache = await createHashCacheNDJSON({
709
982
  cachePath: ndjsonCachePath,
710
983
  namespace: target,
984
+ vlog: this.isVerbose ? (...m) => console.log(...m) : null,
711
985
  });
712
986
 
713
987
  // Logger
@@ -716,7 +990,7 @@ export class SftpPushSyncApp {
716
990
  const logFile = path.resolve(
717
991
  rawLogFilePattern.replace("{target}", target)
718
992
  );
719
- this.logger = new SyncLogger(logFile);
993
+ this.logger = new SyncLogger(logFile, { enableTimestamps: this.logTimestamps });
720
994
  await this.logger.init();
721
995
 
722
996
  // Header
@@ -726,7 +1000,7 @@ export class SftpPushSyncApp {
726
1000
  `🔐 SFTP Push-Synchronisation: sftp-push-sync v${pkg.version}`
727
1001
  )
728
1002
  );
729
- this.log(`${TAB_A}LogLevel: ${this.logLevel}`);
1003
+ this.log(`${TAB_A}LogLevel: ${this.logLevel}${this.logTimestamps ? " (timestamps enabled)" : ""}`);
730
1004
  this.log(`${TAB_A}Connection: ${pc.cyan(target)}`);
731
1005
  this.log(`${TAB_A}Worker: ${this.connection.workers}`);
732
1006
  this.log(
@@ -781,9 +1055,12 @@ export class SftpPushSyncApp {
781
1055
  username: this.connection.user,
782
1056
  password: this.connection.password,
783
1057
  // Keep-Alive to prevent server disconnection during long operations
784
- keepaliveInterval: 10000, // Send keepalive every 10 seconds
785
- keepaliveCountMax: 10, // Allow up to 10 missed keepalives before disconnect
786
- readyTimeout: 30000, // 30s timeout for initial connection
1058
+ keepaliveInterval: 5000, // Send keepalive every 5 seconds (more frequent for unstable servers)
1059
+ keepaliveCountMax: 6, // Allow up to 6 missed keepalives (30s total) before disconnect
1060
+ readyTimeout: 60000, // 60s timeout for initial connection
1061
+ retries: 2, // Internal retries
1062
+ retry_factor: 2,
1063
+ retry_minTimeout: 2000,
787
1064
  });
788
1065
  connected = true;
789
1066
 
@@ -818,10 +1095,11 @@ export class SftpPushSyncApp {
818
1095
  symbols: { ADD, CHA, tab_a: TAB_A },
819
1096
  });
820
1097
 
821
- const duration = ((Date.now() - start) / 1000).toFixed(2);
1098
+ const durationSec = (Date.now() - start) / 1000;
1099
+ const durationFormatted = this._formatDuration(durationSec);
822
1100
  this.log("");
823
1101
  this.log(pc.bold(pc.cyan("📊 Summary (bypass only):")));
824
- this.log(`${TAB_A}Duration: ${pc.green(duration + " s")}`);
1102
+ this.log(`${TAB_A}Duration: ${pc.green(durationFormatted)} (${durationSec.toFixed(1)}s)`);
825
1103
  return;
826
1104
  }
827
1105
 
@@ -912,11 +1190,29 @@ export class SftpPushSyncApp {
912
1190
  analyzeChunk: this.analyzeChunk,
913
1191
  updateProgress: (prefix, current, total, rel) =>
914
1192
  this.updateProgress2(prefix, current, total, rel, "Files"),
1193
+ log: this.isVerbose ? (...m) => this.log(...m) : null,
915
1194
  });
916
1195
 
917
1196
  toAdd = diffResult.toAdd;
918
1197
  toUpdate = diffResult.toUpdate;
919
1198
 
1199
+ // Report large files that skipped hash comparison
1200
+ if (diffResult.largeFilesSkipped && diffResult.largeFilesSkipped.length > 0 && this.isVerbose) {
1201
+ const totalSizeMB = diffResult.largeFilesSkipped.reduce((sum, f) => sum + f.size, 0) / (1024 * 1024);
1202
+ this.log(` ℹ ${diffResult.largeFilesSkipped.length} large files (${totalSizeMB.toFixed(0)}MB total) skipped hash compare (same size/date)`);
1203
+ }
1204
+
1205
+ // Report compare errors if any
1206
+ if (diffResult.compareErrors && diffResult.compareErrors.length > 0) {
1207
+ this.log("");
1208
+ this.wlog(pc.yellow(`⚠ ${diffResult.compareErrors.length} files had compare errors (will be re-uploaded):`));
1209
+ if (this.isVerbose) {
1210
+ for (const { rel, error } of diffResult.compareErrors) {
1211
+ this.wlog(pc.yellow(` - ${rel}: ${error}`));
1212
+ }
1213
+ }
1214
+ }
1215
+
920
1216
  if (toAdd.length === 0 && toUpdate.length === 0) {
921
1217
  this.log("");
922
1218
  this.log(`${TAB_A}No differences found. Everything is up to date.`);
@@ -985,32 +1281,34 @@ export class SftpPushSyncApp {
985
1281
  await this.runTasks(
986
1282
  toAdd,
987
1283
  this.connection.workers,
988
- async ({ local: l, remotePath }) => {
1284
+ async ({ local: l, remotePath, rel }) => {
989
1285
  const remoteDir = path.posix.dirname(remotePath);
990
1286
  try {
991
1287
  await sftp.mkdir(remoteDir, true);
992
1288
  } catch {
993
1289
  // Directory may already exist
994
1290
  }
995
- await sftp.put(l.localPath, remotePath);
1291
+ await this._uploadFile(sftp, l.localPath, remotePath, rel, l.size);
996
1292
  },
997
- "Uploads (new)"
1293
+ "Uploads (new)",
1294
+ sftp
998
1295
  );
999
1296
 
1000
1297
  // Updates
1001
1298
  await this.runTasks(
1002
1299
  toUpdate,
1003
1300
  this.connection.workers,
1004
- async ({ local: l, remotePath }) => {
1301
+ async ({ local: l, remotePath, rel }) => {
1005
1302
  const remoteDir = path.posix.dirname(remotePath);
1006
1303
  try {
1007
1304
  await sftp.mkdir(remoteDir, true);
1008
1305
  } catch {
1009
1306
  // Directory may already exist
1010
1307
  }
1011
- await sftp.put(l.localPath, remotePath);
1308
+ await this._uploadFile(sftp, l.localPath, remotePath, rel, l.size);
1012
1309
  },
1013
- "Uploads (update)"
1310
+ "Uploads (update)",
1311
+ sftp
1014
1312
  );
1015
1313
 
1016
1314
  // Deletes
@@ -1028,7 +1326,8 @@ export class SftpPushSyncApp {
1028
1326
  );
1029
1327
  }
1030
1328
  },
1031
- "Deletes"
1329
+ "Deletes",
1330
+ sftp
1032
1331
  );
1033
1332
  } else {
1034
1333
  this.log("");
@@ -1055,7 +1354,8 @@ export class SftpPushSyncApp {
1055
1354
  await this.cleanupEmptyDirs(sftp, this.connection.remoteRoot, dryRun);
1056
1355
  }
1057
1356
 
1058
- const duration = ((Date.now() - start) / 1000).toFixed(2);
1357
+ const durationSec = (Date.now() - start) / 1000;
1358
+ const durationFormatted = this._formatDuration(durationSec);
1059
1359
 
1060
1360
  // Save cache and close
1061
1361
  await this.hashCache.save();
@@ -1065,7 +1365,7 @@ export class SftpPushSyncApp {
1065
1365
  this.log(hr1());
1066
1366
  this.log("");
1067
1367
  this.log(pc.bold(pc.cyan("📊 Summary:")));
1068
- this.log(`${TAB_A}Duration: ${pc.green(duration + " s")}`);
1368
+ this.log(`${TAB_A}Duration: ${pc.green(durationFormatted)} (${durationSec.toFixed(1)}s)`);
1069
1369
  this.log(`${TAB_A}${ADD} Added : ${toAdd.length}`);
1070
1370
  this.log(`${TAB_A}${CHA} Changed: ${toUpdate.length}`);
1071
1371
  this.log(`${TAB_A}${DEL} Deleted: ${toDelete.length}`);
@@ -1120,8 +1420,11 @@ export class SftpPushSyncApp {
1120
1420
  if (this.hashCache?.close) {
1121
1421
  await this.hashCache.close();
1122
1422
  }
1123
- } catch {
1124
- // ignore
1423
+ } catch (e) {
1424
+ // Cache close failed during error cleanup
1425
+ if (this.isVerbose) {
1426
+ this.vlog(`${TAB_A}${pc.dim(`Cache close during cleanup failed: ${e?.message || e}`)}`)
1427
+ }
1125
1428
  }
1126
1429
  } finally {
1127
1430
  try {
@@ -1,9 +1,9 @@
1
1
  /**
2
2
  * SyncLogger.mjs
3
- *
3
+ *
4
4
  * @author Carsten Nichte, 2025 / https://carsten-nichte.de/
5
- *
6
- */
5
+ *
6
+ */
7
7
  // src/core/SyncLogger.mjs
8
8
  import fs from "fs";
9
9
  import fsp from "fs/promises";
@@ -14,9 +14,10 @@ import path from "path";
14
14
  * und entfernt ANSI-Farbcodes.
15
15
  */
16
16
  export class SyncLogger {
17
- constructor(filePath) {
17
+ constructor(filePath, options = {}) {
18
18
  this.filePath = filePath;
19
19
  this.stream = null;
20
+ this.enableTimestamps = options.enableTimestamps ?? false;
20
21
  }
21
22
 
22
23
  async init() {
@@ -31,13 +32,24 @@ export class SyncLogger {
31
32
  });
32
33
  }
33
34
 
35
+ /**
36
+ * Returns current timestamp in ISO format: [YYYY-MM-DD HH:mm:ss.SSS]
37
+ */
38
+ _getTimestamp() {
39
+ const now = new Date();
40
+ const pad = (n, len = 2) => String(n).padStart(len, '0');
41
+ return `[${now.getFullYear()}-${pad(now.getMonth() + 1)}-${pad(now.getDate())} ${pad(now.getHours())}:${pad(now.getMinutes())}:${pad(now.getSeconds())}.${pad(now.getMilliseconds(), 3)}]`;
42
+ }
43
+
34
44
  writeLine(line) {
35
45
  if (!this.stream) return;
36
46
  const text = typeof line === "string" ? line : String(line);
37
47
  const clean = text.replace(/\x1b\[[0-9;]*m/g, "");
38
48
 
49
+ const prefix = this.enableTimestamps ? this._getTimestamp() + " " : "";
50
+
39
51
  try {
40
- this.stream.write(clean + "\n");
52
+ this.stream.write(prefix + clean + "\n");
41
53
  } catch {
42
54
  // Stream schon zu → ignorieren
43
55
  }
@@ -49,4 +61,4 @@ export class SyncLogger {
49
61
  this.stream = null;
50
62
  }
51
63
  }
52
- }
64
+ }
@@ -23,6 +23,8 @@ import path from "path";
23
23
  * - analyzeChunk: Progress-Schrittgröße
24
24
  * - updateProgress(prefix, current, total, rel): optional
25
25
  * - concurrency: Max parallele Vergleiche (default: 5)
26
+ * - log: optional logging function for errors/warnings
27
+ * - maxSizeForHash: Files larger than this skip hash comparison (default: 50MB)
26
28
  */
27
29
  export async function analyseDifferences({
28
30
  local,
@@ -34,7 +36,13 @@ export async function analyseDifferences({
34
36
  analyzeChunk = 10,
35
37
  updateProgress,
36
38
  concurrency = 10,
39
+ log,
40
+ maxSizeForHash = 50 * 1024 * 1024, // 50MB default
37
41
  }) {
42
+ // Track errors for summary
43
+ const compareErrors = [];
44
+ // Track large files skipped
45
+ const largeFilesSkipped = [];
38
46
  const toAdd = [];
39
47
  const toUpdate = [];
40
48
 
@@ -45,6 +53,7 @@ export async function analyseDifferences({
45
53
  // Phase 1: Schneller Vorab-Check ohne SFTP
46
54
  // - Dateien nur lokal → direkt zu toAdd
47
55
  // - Size-Vergleich für existierende Dateien
56
+ // - Große Dateien: nur MTime-Vergleich (kein Hash-Download)
48
57
  const keysNeedContentCompare = [];
49
58
 
50
59
  for (const rel of localKeys) {
@@ -58,21 +67,46 @@ export async function analyseDifferences({
58
67
  } else if (l.size !== r.size) {
59
68
  // Size unterschiedlich → Changed (kein SFTP-Call nötig)
60
69
  toUpdate.push({ rel, local: l, remote: r, remotePath });
70
+ // } else if (l.size > maxSizeForHash) {
71
+ // // Große Datei mit gleicher Size: nur MTime vergleichen
72
+ // // Remote modifyTime ist String wie "2026-03-05", local mtimeMs ist Timestamp
73
+ // const localDate = new Date(l.mtimeMs).toISOString().split('T')[0];
74
+ // const remoteDate = r.modifyTime ? r.modifyTime.split('T')[0] : '';
75
+ //
76
+ // if (localDate > remoteDate) {
77
+ // // Local ist neuer → Changed
78
+ // toUpdate.push({ rel, local: l, remote: r, remotePath });
79
+ // if (log) {
80
+ // const sizeMB = (l.size / (1024 * 1024)).toFixed(1);
81
+ // log(` ℹ Large file (${sizeMB}MB) newer locally: ${rel}`);
82
+ // }
83
+ // } else {
84
+ // largeFilesSkipped.push({ rel, size: l.size });
85
+ // }
61
86
  } else {
62
- // Size gleich → Content-Vergleich nötig
87
+ // Size gleich, normale Größe → Content-Vergleich nötig
63
88
  keysNeedContentCompare.push(rel);
64
89
  }
65
90
 
66
91
  checked++;
67
92
  if (updateProgress && checked % analyzeChunk === 0) {
68
- updateProgress("Analyse (Size): ", checked, totalToCheck, rel);
93
+ updateProgress("Analyse (quick): ", checked, totalToCheck, rel);
69
94
  }
70
95
  }
71
96
 
97
+ // Final progress update for Phase 1
98
+ if (updateProgress) {
99
+ updateProgress("Analyse (quick): ", totalToCheck, totalToCheck, "done");
100
+ }
101
+
72
102
  // Phase 2: Content-Vergleich in echten Batches
73
- // Nur für Dateien mit gleicher Size
103
+ // Nur für Dateien mit gleicher Size (und unter maxSizeForHash)
74
104
  const totalContentCompare = keysNeedContentCompare.length;
75
105
 
106
+ if (totalContentCompare > 0 && log) {
107
+ log(` → ${totalContentCompare} files need content comparison`);
108
+ }
109
+
76
110
  for (let i = 0; i < totalContentCompare; i += concurrency) {
77
111
  const batch = keysNeedContentCompare.slice(i, i + concurrency);
78
112
 
@@ -114,8 +148,14 @@ export async function analyseDifferences({
114
148
  : null;
115
149
  }
116
150
  } catch (err) {
117
- // Bei Fehler als changed markieren (sicherer)
118
- return { rel, local: l, remote: r, remotePath, changed: true };
151
+ // Log the error so user can see what's happening
152
+ const errMsg = err?.message || String(err);
153
+ compareErrors.push({ rel, error: errMsg });
154
+ if (log) {
155
+ log(` ⚠ Compare error for ${rel}: ${errMsg}`);
156
+ }
157
+ // Mark as changed (sicherer) - file will be re-uploaded
158
+ return { rel, local: l, remote: r, remotePath, changed: true, hadError: true };
119
159
  }
120
160
  })
121
161
  );
@@ -127,14 +167,14 @@ export async function analyseDifferences({
127
167
  }
128
168
  }
129
169
 
130
- // Progress update
170
+ // Progress update - show as separate progress (doesn't jump back)
131
171
  const progressCount = Math.min(i + batch.length, totalContentCompare);
132
172
  if (updateProgress) {
133
- updateProgress("Analyse (Content): ", checked + progressCount, totalToCheck + totalContentCompare, batch[batch.length - 1]);
173
+ updateProgress("Analyse (hash): ", progressCount, totalContentCompare, batch[batch.length - 1]);
134
174
  }
135
175
  }
136
176
 
137
- return { toAdd, toUpdate };
177
+ return { toAdd, toUpdate, compareErrors, largeFilesSkipped };
138
178
  }
139
179
 
140
180
  /**
@@ -35,18 +35,62 @@ export function hashLocalFile(filePath) {
35
35
 
36
36
  /**
37
37
  * Streaming-SHA256 für Remote-Datei via ssh2-sftp-client
38
+ * Mit IDLE-Timeout: nur wenn keine Daten mehr fließen für X Sekunden.
39
+ * Große Dateien werden korrekt behandelt - solange Daten ankommen, kein Timeout.
40
+ *
41
+ * @param {Object} sftp - SFTP client
42
+ * @param {string} remotePath - Remote file path
43
+ * @param {number} idleTimeoutMs - Timeout in ms when NO data is received (default: 60000)
44
+ * @param {number} fileSizeBytes - File size (for logging)
38
45
  */
39
- export async function hashRemoteFile(sftp, remotePath) {
46
+ export async function hashRemoteFile(sftp, remotePath, idleTimeoutMs = 60000, fileSizeBytes = 0) {
40
47
  const hash = createHash("sha256");
48
+ let lastDataTime = Date.now();
49
+ let totalReceived = 0;
50
+ let timeoutId = null;
51
+ let rejectFn = null;
52
+
53
+ // Promise that rejects on idle timeout
54
+ const idleTimeoutPromise = new Promise((_, reject) => {
55
+ rejectFn = reject;
56
+
57
+ const checkIdle = () => {
58
+ const idleTime = Date.now() - lastDataTime;
59
+ if (idleTime >= idleTimeoutMs) {
60
+ const receivedMB = (totalReceived / (1024 * 1024)).toFixed(1);
61
+ reject(new Error(`Idle timeout (${Math.round(idleTimeoutMs/1000)}s no data) at ${receivedMB}MB for ${remotePath}`));
62
+ } else {
63
+ // Check again in 5 seconds
64
+ timeoutId = setTimeout(checkIdle, 5000);
65
+ }
66
+ };
67
+
68
+ // Start checking after initial timeout
69
+ timeoutId = setTimeout(checkIdle, idleTimeoutMs);
70
+ });
41
71
 
42
72
  const writable = new Writable({
43
73
  write(chunk, enc, cb) {
74
+ lastDataTime = Date.now(); // Reset idle timer on each chunk
75
+ totalReceived += chunk.length;
44
76
  hash.update(chunk);
45
77
  cb();
46
78
  },
47
79
  });
48
80
 
49
- await sftp.get(remotePath, writable);
81
+ try {
82
+ // Race between download and idle timeout
83
+ await Promise.race([
84
+ sftp.get(remotePath, writable),
85
+ idleTimeoutPromise,
86
+ ]);
87
+ } finally {
88
+ // Clean up timeout
89
+ if (timeoutId) {
90
+ clearTimeout(timeoutId);
91
+ }
92
+ }
93
+
50
94
  return hash.digest("hex");
51
95
  }
52
96
 
@@ -60,8 +104,9 @@ export async function hashRemoteFile(sftp, remotePath) {
60
104
  * @param {string} options.cachePath - Path to the NDJSON file (e.g., ".sync-cache.prod.ndjson")
61
105
  * @param {string} options.namespace - Namespace for keys (e.g., "prod")
62
106
  * @param {number} options.autoSaveInterval - Save after this many changes (default: 1000)
107
+ * @param {Function} options.vlog - Optional verbose logging function
63
108
  */
64
- export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInterval = 1000 }) {
109
+ export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInterval = 1000, vlog }) {
65
110
  const ns = namespace || "default";
66
111
 
67
112
  // In-memory storage
@@ -83,6 +128,7 @@ export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInte
83
128
  await fsp.access(cachePath);
84
129
  } catch {
85
130
  // File doesn't exist - start fresh
131
+ if (vlog) vlog(` Cache file not found, starting fresh: ${cachePath}`);
86
132
  return;
87
133
  }
88
134
 
@@ -192,7 +238,8 @@ export async function createHashCacheNDJSON({ cachePath, namespace, autoSaveInte
192
238
 
193
239
  // Cache miss or stale: compute new hash
194
240
  const filePath = meta.fullPath || meta.remotePath;
195
- const hash = await hashRemoteFile(sftp, filePath);
241
+ // Pass file size for dynamic timeout calculation
242
+ const hash = await hashRemoteFile(sftp, filePath, 60000, meta.size || 0);
196
243
 
197
244
  remoteCache.set(key, {
198
245
  size: meta.size,