sftp-push-sync 1.0.16 → 1.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -65,26 +65,65 @@ Create a `sync.config.json` in the root folder of your project:
65
65
  },
66
66
  "include": [],
67
67
  "exclude": ["**/.DS_Store", "**/.git/**", "**/node_modules/**"],
68
- "textExtensions": [
69
- ".html",
70
- ".xml",
71
- ".txt",
72
- ".json",
73
- ".js",
74
- ".css",
75
- ".md",
76
- ".svg"
77
- ],
68
+ "textExtensions": [".html",".xml",".txt",".json",".js",".css",".md",".svg"],
69
+ "mediaExtensions": [".jpg",".jpeg",".png",".webp",".gif",".avif",".tif",".tiff",".mp4",".mov",".m4v","mp3",".wav",".flac"],
78
70
  "progress": {
79
71
  "scanChunk": 10,
80
72
  "analyzeChunk": 1
81
73
  },
82
74
  "logLevel": "normal",
75
+ "logFile": ".sftp-push-sync.{target}.log",
83
76
  "uploadList": [],
84
77
  "downloadList": ["download-counter.json"]
85
78
  }
86
79
  ```
87
80
 
81
+ ### CLI Usage
82
+
83
+ ```bash
84
+ # Normal synchronisation
85
+ node bin/sftp-push-sync.mjs staging
86
+
87
+ # Consider normal synchronisation + upload list
88
+ node bin/sftp-push-sync.mjs staging --upload-list
89
+
90
+ # Only lists, no standard synchronisation
91
+ node bin/sftp-push-sync.mjs staging --skip-sync --upload-list
92
+ node bin/sftp-push-sync.mjs staging --skip-sync --download-list
93
+ node bin/sftp-push-sync.mjs staging --skip-sync --upload-list --download-list
94
+
95
+ # (optional) only run lists dry
96
+ node bin/sftp-push-sync.mjs staging --skip-sync --upload-list --dry-run
97
+ ```
98
+
99
+ - Can be conveniently started via the scripts in `package.json`:
100
+
101
+ ```bash
102
+ # For example
103
+ npm run sync:staging
104
+ # or short
105
+ npm run ss
106
+ ```
107
+
108
+ If you have stored the scripts in `package.json` as follows:
109
+
110
+ ```json
111
+
112
+ "scripts": {
113
+ "sync:staging": "sftp-push-sync staging",
114
+ "sync:staging:dry": "sftp-push-sync staging --dry-run",
115
+ "ss": "npm run sync:staging",
116
+ "ssd": "npm run sync:staging:dry",
117
+
118
+ "sync:prod": "sftp-push-sync prod",
119
+ "sync:prod:dry": "sftp-push-sync prod --dry-run",
120
+ "sp": "npm run sync:prod",
121
+ "spd": "npm run sync:prod:dry",
122
+ },
123
+ ```
124
+
125
+ The dry run is a great way to compare files and fill the cache.
126
+
88
127
  ### special uploads / downloads
89
128
 
90
129
  A list of files that are excluded from the sync comparison and can be downloaded or uploaded separately.
@@ -113,41 +152,45 @@ sftp-push-sync prod --download-list # then do
113
152
  Logging can also be configured.
114
153
 
115
154
  - `logLevel` - normal, verbose, laconic.
155
+ - `logFile` - an optional logFile.
116
156
  - `scanChunk` - After how many elements should a log output be generated during scanning?
117
157
  - `analyzeChunk` - After how many elements should a log output be generated during analysis?
118
158
 
119
159
  For >100k files, use analyzeChunk = 10 or 50, otherwise the TTY output itself is a relevant factor.
120
160
 
121
- ## NPM Scripts
122
-
123
- - Can be conveniently started via the scripts in `package.json`:
124
-
125
- ```bash
126
- # For example
127
- npm run sync:staging
128
- # or short
129
- npm run ss
161
+ ### Wildcards
162
+
163
+ Examples for Wirdcards for `include`, `exclude`, `uploadList` and `downloadList`:
164
+
165
+ - `"content/**"` -EVERYTHING below `content/`
166
+ - `".html", ".htm", ".md", ".txt", ".json"`- Only certain file extensions
167
+ - `"**/*.html"` - all HTML files
168
+ - `"**/*.md"`- all Markdown files
169
+ - `"content/**/*.md"` - only Markdown in `content/`
170
+ - `"static/images/**/*.jpg"`
171
+ - `"**/thumb-*.*"` - thumb images everywhere
172
+ - `"**/*-draft.*"` -Files with -draft before the extension
173
+ - `"content/**/*.md"` - all Markdown files
174
+ - `"config/**"` - complete configuration
175
+ - `"static/images/covers/**"`- cover images only
176
+ - `"logs/**/*.log"` - all logs from logs/
177
+ - `"reports/**/*.xlsx"`
178
+
179
+ practical excludes:
180
+
181
+ ```txt
182
+ "exclude": [
183
+ ".git/**", // kompletter .git Ordner
184
+ ".idea/**", // JetBrains
185
+ "node_modules/**", // Node dependencies
186
+ "dist/**", // Build Output
187
+ "**/*.map", // Source Maps
188
+ "**/~*", // Emacs/Editor-Backups (~Dateien)
189
+ "**/#*#", // weitere Editor-Backups
190
+ "**/.DS_Store" // macOS Trash
191
+ ]
130
192
  ```
131
193
 
132
- If you have stored the scripts in `package.json` as follows:
133
-
134
- ```json
135
-
136
- "scripts": {
137
- "sync:staging": "sftp-push-sync staging",
138
- "sync:staging:dry": "sftp-push-sync staging --dry-run",
139
- "ss": "npm run sync:staging",
140
- "ssd": "npm run sync:staging:dry",
141
-
142
- "sync:prod": "sftp-push-sync prod",
143
- "sync:prod:dry": "sftp-push-sync prod --dry-run",
144
- "sp": "npm run sync:prod",
145
- "spd": "npm run sync:prod:dry",
146
- },
147
- ```
148
-
149
- The dry run is a great way to compare files and fill the cache.
150
-
151
194
  ## Which files are needed?
152
195
 
153
196
  - `sync.config.json` - The configuration file (with passwords in plain text, so please leave it out of the git repository)
@@ -155,10 +198,11 @@ The dry run is a great way to compare files and fill the cache.
155
198
  ## Which files are created?
156
199
 
157
200
  - The cache files: `.sync-cache.*.json`
201
+ - The log file: `.sftp-push-sync.{target}.log` (Optional, overwritten with each run)
158
202
 
159
- You can safely delete the local cache at any time. The first analysis will then take longer again (because remote hashes will be streamed again). After that, everything will run fast.
203
+ You can safely delete the local cache at any time. The first analysis will then take longer again, because remote hashes will be streamed again. After that, everything will run fast.
160
204
 
161
- The first run always takes a while, especially with lots of images – so be patient! Once the cache is full, it will be faster.
205
+ Note: The first run always takes a while, especially with lots of images – so be patient! Once the cache is full, it will be faster.
162
206
 
163
207
  ## Example Output
164
208
 
@@ -48,7 +48,7 @@ const pkg = require("../package.json");
48
48
  // Colors for the State (works on dark + light background)
49
49
  const ADD = pc.green("+"); // Added
50
50
  const CHA = pc.yellow("~"); // Changed
51
- const DEL = pc.red("-"); // Deleted
51
+ const DEL = pc.red("-"); // Deleted
52
52
  const EXC = pc.redBright("-"); // Excluded
53
53
 
54
54
  const hr1 = () => "─".repeat(65); // horizontal line -
@@ -65,6 +65,7 @@ const TARGET = args[0];
65
65
  const DRY_RUN = args.includes("--dry-run");
66
66
  const RUN_UPLOAD_LIST = args.includes("--upload-list");
67
67
  const RUN_DOWNLOAD_LIST = args.includes("--download-list");
68
+ const SKIP_SYNC = args.includes("--skip-sync");
68
69
 
69
70
  // logLevel override via CLI (optional)
70
71
  let cliLogLevel = null;
@@ -77,6 +78,14 @@ if (!TARGET) {
77
78
  process.exit(1);
78
79
  }
79
80
 
81
+ // Wenn jemand --skip-sync ohne Listen benutzt → sinnlos, also abbrechen
82
+ if (SKIP_SYNC && !RUN_UPLOAD_LIST && !RUN_DOWNLOAD_LIST) {
83
+ console.error(
84
+ pc.red("❌ --skip-sync requires at least --upload-list or --download-list.")
85
+ );
86
+ process.exit(1);
87
+ }
88
+
80
89
  // ---------------------------------------------------------------------------
81
90
  // Load config file
82
91
  // ---------------------------------------------------------------------------
@@ -101,6 +110,93 @@ if (!CONFIG_RAW.connections || typeof CONFIG_RAW.connections !== "object") {
101
110
  process.exit(1);
102
111
  }
103
112
 
113
+ // ---------------------------------------------------------------------------
114
+ // Logging helpers (Terminal + optional Logfile)
115
+ // ---------------------------------------------------------------------------
116
+ // Default: .sync.{TARGET}.log, kann via config.logFile überschrieben werden
117
+ const DEFAULT_LOG_FILE = `.sync.${TARGET}.log`;
118
+ const rawLogFilePattern = CONFIG_RAW.logFile || DEFAULT_LOG_FILE;
119
+ const LOG_FILE = path.resolve(
120
+ rawLogFilePattern.replace("{target}", TARGET)
121
+ );
122
+ let LOG_STREAM = null;
123
+
124
+ /** einmalig Logfile-Stream öffnen */
125
+ function openLogFile() {
126
+ if (!LOG_FILE) return;
127
+ if (!LOG_STREAM) {
128
+ LOG_STREAM = fs.createWriteStream(LOG_FILE, {
129
+ flags: "w", // pro Lauf überschreiben
130
+ encoding: "utf8",
131
+ });
132
+ }
133
+ }
134
+
135
+ /** eine fertige Zeile ins Logfile schreiben (ohne Einfluss auf Terminal) */
136
+ function writeLogLine(line) {
137
+ if (!LOG_STREAM) return;
138
+ // ANSI-Farbsequenzen aus der Log-Zeile entfernen
139
+ const clean =
140
+ typeof line === "string"
141
+ ? line.replace(/\x1b\[[0-9;]*m/g, "")
142
+ : String(line).replace(/\x1b\[[0-9;]*m/g, "");
143
+ try {
144
+ LOG_STREAM.write(clean + "\n");
145
+ } catch {
146
+ // falls Stream schon zu ist, einfach ignorieren – verhindert ERR_STREAM_WRITE_AFTER_END
147
+ }
148
+ }
149
+
150
+ /** Konsole + Logfile (normal) */
151
+ function rawConsoleLog(...msg) {
152
+ clearProgressLine();
153
+ console.log(...msg);
154
+ const line = msg
155
+ .map((m) => (typeof m === "string" ? m : String(m)))
156
+ .join(" ");
157
+ writeLogLine(line);
158
+ }
159
+
160
+ function rawConsoleError(...msg) {
161
+ clearProgressLine();
162
+ console.error(...msg);
163
+ const line = msg
164
+ .map((m) => (typeof m === "string" ? m : String(m)))
165
+ .join(" ");
166
+ writeLogLine("[ERROR] " + line);
167
+ }
168
+
169
+ function rawConsoleWarn(...msg) {
170
+ clearProgressLine();
171
+ console.warn(...msg);
172
+ const line = msg
173
+ .map((m) => (typeof m === "string" ? m : String(m)))
174
+ .join(" ");
175
+ writeLogLine("[WARN] " + line);
176
+ }
177
+
178
+ // High-level Helfer, die du überall im Script schon verwendest:
179
+ function log(...msg) {
180
+ rawConsoleLog(...msg);
181
+ }
182
+
183
+ function vlog(...msg) {
184
+ if (!IS_VERBOSE) return;
185
+ rawConsoleLog(...msg);
186
+ }
187
+
188
+ function elog(...msg) {
189
+ rawConsoleError(...msg);
190
+ }
191
+
192
+ function wlog(...msg) {
193
+ rawConsoleWarn(...msg);
194
+ }
195
+
196
+ // ---------------------------------------------------------------------------
197
+ // Connection
198
+ // ---------------------------------------------------------------------------
199
+
104
200
  const TARGET_CONFIG = CONFIG_RAW.connections[TARGET];
105
201
  if (!TARGET_CONFIG) {
106
202
  console.error(
@@ -146,6 +242,38 @@ const ANALYZE_CHUNK = PROGRESS.analyzeChunk ?? (IS_VERBOSE ? 1 : 10);
146
242
  const INCLUDE = CONFIG_RAW.include ?? [];
147
243
  const BASE_EXCLUDE = CONFIG_RAW.exclude ?? [];
148
244
 
245
+ // textExtensions
246
+ const TEXT_EXT = CONFIG_RAW.textExtensions ?? [
247
+ ".html",
248
+ ".htm",
249
+ ".xml",
250
+ ".txt",
251
+ ".json",
252
+ ".js",
253
+ ".mjs",
254
+ ".cjs",
255
+ ".css",
256
+ ".md",
257
+ ".svg",
258
+ ];
259
+
260
+ // mediaExtensions – aktuell nur Meta, aber schon konfigurierbar
261
+ const MEDIA_EXT = CONFIG_RAW.mediaExtensions ?? [
262
+ ".jpg",
263
+ ".jpeg",
264
+ ".png",
265
+ ".gif",
266
+ ".webp",
267
+ ".avif",
268
+ ".mp4",
269
+ ".mov",
270
+ ".mp3",
271
+ ".wav",
272
+ ".ogg",
273
+ ".flac",
274
+ ".pdf",
275
+ ];
276
+
149
277
  // Special: Lists for targeted uploads/downloads
150
278
  function normalizeList(list) {
151
279
  if (!Array.isArray(list)) return [];
@@ -163,25 +291,13 @@ const UPLOAD_LIST = normalizeList(CONFIG_RAW.uploadList ?? []);
163
291
  const DOWNLOAD_LIST = normalizeList(CONFIG_RAW.downloadList ?? []);
164
292
 
165
293
  // Effektive Exclude-Liste: explizites exclude + Upload/Download-Listen
294
+ // → diese Dateien werden im „normalen“ Sync nicht angerührt,
295
+ // sondern nur über die Bypass-Mechanik behandelt.
166
296
  const EXCLUDE = [...BASE_EXCLUDE, ...UPLOAD_LIST, ...DOWNLOAD_LIST];
167
297
 
168
298
  // List of ALL files that were excluded due to uploadList/downloadList
169
299
  const AUTO_EXCLUDED = new Set();
170
300
 
171
- const TEXT_EXT = CONFIG_RAW.textExtensions ?? [
172
- ".html",
173
- ".htm",
174
- ".xml",
175
- ".txt",
176
- ".json",
177
- ".js",
178
- ".mjs",
179
- ".cjs",
180
- ".css",
181
- ".md",
182
- ".svg",
183
- ];
184
-
185
301
  // Cache file name per connection
186
302
  const syncCacheName = TARGET_CONFIG.syncCache || `.sync-cache.${TARGET}.json`;
187
303
  const CACHE_PATH = path.resolve(syncCacheName);
@@ -192,7 +308,7 @@ const CACHE_PATH = path.resolve(syncCacheName);
192
308
 
193
309
  let CACHE = {
194
310
  version: 1,
195
- local: {}, // key: "<TARGET>:<relPath>" -> { size, mtimeMs, hash }
311
+ local: {}, // key: "<TARGET>:<relPath>" -> { size, mtimeMs, hash }
196
312
  remote: {}, // key: "<TARGET>:<relPath>" -> { size, modifyTime, hash }
197
313
  };
198
314
 
@@ -245,7 +361,7 @@ function clearProgressLine() {
245
361
 
246
362
  // Zwei Progress-Zeilen ohne zusätzliche Newlines leeren:
247
363
  // Cursor steht nach updateProgress2() auf der ersten Zeile.
248
- process.stdout.write("\r"); // an Zeilenanfang
364
+ process.stdout.write("\r"); // an Zeilenanfang
249
365
  process.stdout.write("\x1b[2K"); // erste Zeile löschen
250
366
  process.stdout.write("\x1b[1B"); // eine Zeile nach unten
251
367
  process.stdout.write("\x1b[2K"); // zweite Zeile löschen
@@ -258,27 +374,6 @@ function toPosix(p) {
258
374
  return p.split(path.sep).join("/");
259
375
  }
260
376
 
261
- function log(...msg) {
262
- clearProgressLine();
263
- console.log(...msg);
264
- }
265
-
266
- function vlog(...msg) {
267
- if (!IS_VERBOSE) return;
268
- clearProgressLine();
269
- console.log(...msg);
270
- }
271
-
272
- function elog(...msg) {
273
- clearProgressLine();
274
- console.error(...msg);
275
- }
276
-
277
- function wlog(...msg) {
278
- clearProgressLine();
279
- console.warn(...msg);
280
- }
281
-
282
377
  function matchesAny(patterns, relPath) {
283
378
  if (!patterns || patterns.length === 0) return false;
284
379
  return patterns.some((pattern) => minimatch(relPath, pattern, { dot: true }));
@@ -303,6 +398,11 @@ function isTextFile(relPath) {
303
398
  return TEXT_EXT.includes(ext);
304
399
  }
305
400
 
401
+ function isMediaFile(relPath) {
402
+ const ext = path.extname(relPath).toLowerCase();
403
+ return MEDIA_EXT.includes(ext);
404
+ }
405
+
306
406
  function shortenPathForProgress(rel) {
307
407
  if (!rel) return "";
308
408
  const parts = rel.split("/");
@@ -320,17 +420,28 @@ function shortenPathForProgress(rel) {
320
420
  return `…/${prev}/${last}`;
321
421
  }
322
422
 
323
- // Two-line progress bar
423
+ // Two-line progress bar (for terminal) + 1-line log entry
324
424
  function updateProgress2(prefix, current, total, rel = "") {
425
+ const short = rel ? shortenPathForProgress(rel) : "";
426
+
427
+ //Log file: always as a single line with **full** rel path
428
+ const base =
429
+ total && total > 0
430
+ ? `${prefix}${current}/${total} Files`
431
+ : `${prefix}${current} Files`;
432
+ writeLogLine(
433
+ `[progress] ${base}${rel ? " – " + rel : ""}`
434
+ );
435
+
325
436
  if (!process.stdout.isTTY) {
326
- // Fallback für Pipes / Logs
437
+ // Fallback-Terminal
327
438
  if (total && total > 0) {
328
439
  const percent = ((current / total) * 100).toFixed(1);
329
440
  console.log(
330
- `${tab_a()}${prefix}${current}/${total} Files (${percent}%) – ${rel}`
441
+ `${tab_a()}${prefix}${current}/${total} Files (${percent}%) – ${short}`
331
442
  );
332
443
  } else {
333
- console.log(`${tab_a()}${prefix}${current} Files – ${rel}`);
444
+ console.log(`${tab_a()}${prefix}${current} Files – ${short}`);
334
445
  }
335
446
  return;
336
447
  }
@@ -346,7 +457,6 @@ function updateProgress2(prefix, current, total, rel = "") {
346
457
  line1 = `${tab_a()}${prefix}${current} Files`;
347
458
  }
348
459
 
349
- const short = rel ? shortenPathForProgress(rel) : "";
350
460
  let line2 = short;
351
461
 
352
462
  if (line1.length > width) line1 = line1.slice(0, width - 1);
@@ -383,8 +493,8 @@ async function runTasks(items, workerCount, handler, label = "Tasks") {
383
493
  elog(pc.red(`${tab_a()}⚠️ Error in ${label}:`), err.message || err);
384
494
  }
385
495
  done += 1;
386
- if (done % 10 === 0 || done === total) {
387
- updateProgress2(`${label}: `, done, total);
496
+ if (done === 1 || done % 10 === 0 || done === total) {
497
+ updateProgress2(`${label}: `, done, total, item.rel ?? "");
388
498
  }
389
499
  }
390
500
  }
@@ -423,12 +533,12 @@ async function walkLocal(root) {
423
533
  size: stat.size,
424
534
  mtimeMs: stat.mtimeMs,
425
535
  isText: isTextFile(rel),
536
+ isMedia: isMediaFile(rel),
426
537
  });
427
538
 
428
539
  scanned += 1;
429
540
  const chunk = IS_VERBOSE ? 1 : SCAN_CHUNK;
430
541
  if (scanned === 1 || scanned % chunk === 0) {
431
- // totally unknown → total = 0 → no automatic \n
432
542
  updateProgress2("Scan local: ", scanned, 0, rel);
433
543
  }
434
544
  }
@@ -438,7 +548,6 @@ async function walkLocal(root) {
438
548
  await recurse(root);
439
549
 
440
550
  if (scanned > 0) {
441
- // last line + neat finish
442
551
  updateProgress2("Scan local: ", scanned, 0, "fertig");
443
552
  process.stdout.write("\n");
444
553
  progressActive = false;
@@ -447,6 +556,30 @@ async function walkLocal(root) {
447
556
  return result;
448
557
  }
449
558
 
559
+ // Plain walker für Bypass (ignoriert INCLUDE/EXCLUDE)
560
+ async function walkLocalPlain(root) {
561
+ const result = new Map();
562
+
563
+ async function recurse(current) {
564
+ const entries = await fsp.readdir(current, { withFileTypes: true });
565
+ for (const entry of entries) {
566
+ const full = path.join(current, entry.name);
567
+ if (entry.isDirectory()) {
568
+ await recurse(full);
569
+ } else if (entry.isFile()) {
570
+ const rel = toPosix(path.relative(root, full));
571
+ result.set(rel, {
572
+ rel,
573
+ localPath: full,
574
+ });
575
+ }
576
+ }
577
+ }
578
+
579
+ await recurse(root);
580
+ return result;
581
+ }
582
+
450
583
  // ---------------------------------------------------------------------------
451
584
  // Remote walker (recursive, all subdirectories) – respects INCLUDE/EXCLUDE
452
585
  // ---------------------------------------------------------------------------
@@ -497,6 +630,34 @@ async function walkRemote(sftp, remoteRoot) {
497
630
  return result;
498
631
  }
499
632
 
633
+ // Plain walker für Bypass (ignoriert INCLUDE/EXCLUDE)
634
+ async function walkRemotePlain(sftp, remoteRoot) {
635
+ const result = new Map();
636
+
637
+ async function recurse(remoteDir, prefix) {
638
+ const items = await sftp.list(remoteDir);
639
+
640
+ for (const item of items) {
641
+ if (!item.name || item.name === "." || item.name === "..") continue;
642
+
643
+ const full = path.posix.join(remoteDir, item.name);
644
+ const rel = prefix ? `${prefix}/${item.name}` : item.name;
645
+
646
+ if (item.type === "d") {
647
+ await recurse(full, rel);
648
+ } else {
649
+ result.set(rel, {
650
+ rel,
651
+ remotePath: full,
652
+ });
653
+ }
654
+ }
655
+ }
656
+
657
+ await recurse(remoteRoot, "");
658
+ return result;
659
+ }
660
+
500
661
  // ---------------------------------------------------------------------------
501
662
  // Hash helper for binaries (streaming, memory-efficient)
502
663
  // ---------------------------------------------------------------------------
@@ -609,26 +770,140 @@ function describeSftpError(err) {
609
770
  return "";
610
771
  }
611
772
 
773
+ // ---------------------------------------------------------------------------
774
+ // Bypass-only Mode (uploadList / downloadList ohne normalen Sync)
775
+ // ---------------------------------------------------------------------------
776
+
777
+ async function collectUploadTargets() {
778
+ const all = await walkLocalPlain(CONNECTION.localRoot);
779
+ const results = [];
780
+
781
+ for (const [rel, meta] of all.entries()) {
782
+ if (matchesAny(UPLOAD_LIST, rel)) {
783
+ const remotePath = path.posix.join(CONNECTION.remoteRoot, rel);
784
+ results.push({
785
+ rel,
786
+ localPath: meta.localPath,
787
+ remotePath,
788
+ });
789
+ }
790
+ }
791
+
792
+ return results;
793
+ }
794
+
795
+ async function collectDownloadTargets(sftp) {
796
+ const all = await walkRemotePlain(sftp, CONNECTION.remoteRoot);
797
+ const results = [];
798
+
799
+ for (const [rel, meta] of all.entries()) {
800
+ if (matchesAny(DOWNLOAD_LIST, rel)) {
801
+ const localPath = path.join(CONNECTION.localRoot, rel);
802
+ results.push({
803
+ rel,
804
+ remotePath: meta.remotePath,
805
+ localPath,
806
+ });
807
+ }
808
+ }
809
+
810
+ return results;
811
+ }
812
+
813
+ async function performBypassOnly(sftp) {
814
+ log("");
815
+ log(pc.bold(pc.cyan("🚀 Bypass-Only Mode (skip-sync)")));
816
+
817
+ if (RUN_UPLOAD_LIST) {
818
+ log("");
819
+ log(pc.bold(pc.cyan("⬆️ Upload-Bypass (uploadList) …")));
820
+ const targets = await collectUploadTargets();
821
+ log(`${tab_a()}→ ${targets.length} files from uploadList`);
822
+
823
+ if (!DRY_RUN) {
824
+ await runTasks(
825
+ targets,
826
+ CONNECTION.workers,
827
+ async ({ localPath, remotePath, rel }) => {
828
+ const remoteDir = path.posix.dirname(remotePath);
829
+ try {
830
+ await sftp.mkdir(remoteDir, true);
831
+ } catch {
832
+ // Directory may already exist
833
+ }
834
+ await sftp.put(localPath, remotePath);
835
+ vlog(`${tab_a()}${ADD} Uploaded (bypass): ${rel}`);
836
+ },
837
+ "Bypass Uploads"
838
+ );
839
+ } else {
840
+ for (const t of targets) {
841
+ log(`${tab_a()}${ADD} (DRY-RUN) Upload: ${t.rel}`);
842
+ }
843
+ }
844
+ }
845
+
846
+ if (RUN_DOWNLOAD_LIST) {
847
+ log("");
848
+ log(pc.bold(pc.cyan("⬇️ Download-Bypass (downloadList) …")));
849
+ const targets = await collectDownloadTargets(sftp);
850
+ log(`${tab_a()}→ ${targets.length} files from downloadList`);
851
+
852
+ if (!DRY_RUN) {
853
+ await runTasks(
854
+ targets,
855
+ CONNECTION.workers,
856
+ async ({ remotePath, localPath, rel }) => {
857
+ const localDir = path.dirname(localPath);
858
+ await fsp.mkdir(localDir, { recursive: true });
859
+ await sftp.get(remotePath, localPath);
860
+ vlog(`${tab_a()}${CHA} Downloaded (bypass): ${rel}`);
861
+ },
862
+ "Bypass Downloads"
863
+ );
864
+ } else {
865
+ for (const t of targets) {
866
+ log(`${tab_a()}${CHA} (DRY-RUN) Download: ${t.rel}`);
867
+ }
868
+ }
869
+ }
870
+
871
+ log("");
872
+ log(pc.bold(pc.green("✅ Bypass-only run finished.")));
873
+ }
874
+
612
875
  // ---------------------------------------------------------------------------
613
876
  // MAIN
614
877
  // ---------------------------------------------------------------------------
615
878
 
879
+ async function initLogFile() {
880
+ if (!LOG_FILE) return;
881
+ const dir = path.dirname(LOG_FILE);
882
+ await fsp.mkdir(dir, { recursive: true });
883
+ LOG_STREAM = fs.createWriteStream(LOG_FILE, {
884
+ flags: "w",
885
+ encoding: "utf8",
886
+ });
887
+ }
888
+
616
889
  async function main() {
617
890
  const start = Date.now();
618
891
 
892
+ await initLogFile();
893
+
619
894
  // Header-Abstand wie gehabt: zwei Leerzeilen davor
620
- log("\n\n" + hr2());
621
- log(
622
- pc.bold(
623
- `🔐 SFTP Push-Synchronisation: sftp-push-sync v${pkg.version} [logLevel=${LOG_LEVEL}]`
624
- )
625
- );
895
+ log("\n" + hr2());
896
+ log(pc.bold(`🔐 SFTP Push-Synchronisation: sftp-push-sync v${pkg.version}`));
897
+ log(`${tab_a()}LogLevel: ${LOG_LEVEL}`);
626
898
  log(`${tab_a()}Connection: ${pc.cyan(TARGET)}`);
627
899
  log(`${tab_a()}Worker: ${CONNECTION.workers}`);
628
- log(`${tab_a()}Host: ${pc.green(CONNECTION.host)}:${pc.green(CONNECTION.port)}`);
900
+ log(
901
+ `${tab_a()}Host: ${pc.green(CONNECTION.host)}:${pc.green(CONNECTION.port)}`
902
+ );
629
903
  log(`${tab_a()}Local: ${pc.green(CONNECTION.localRoot)}`);
630
904
  log(`${tab_a()}Remote: ${pc.green(CONNECTION.remoteRoot)}`);
631
905
  if (DRY_RUN) log(pc.yellow(`${tab_a()}Mode: DRY-RUN (no changes)`));
906
+ if (SKIP_SYNC) log(pc.yellow(`${tab_a()}Mode: SKIP-SYNC (bypass only)`));
632
907
  if (RUN_UPLOAD_LIST || RUN_DOWNLOAD_LIST) {
633
908
  log(
634
909
  pc.blue(
@@ -638,6 +913,9 @@ async function main() {
638
913
  )
639
914
  );
640
915
  }
916
+ if (LOG_FILE) {
917
+ log(`${tab_a()}LogFile: ${pc.cyan(LOG_FILE)}`);
918
+ }
641
919
  log(hr1());
642
920
 
643
921
  const sftp = new SftpClient();
@@ -667,6 +945,22 @@ async function main() {
667
945
  process.exit(1);
668
946
  }
669
947
 
948
+ // -------------------------------------------------------------
949
+ // SKIP-SYNC-Modus → nur Bypass mit Listen
950
+ // -------------------------------------------------------------
951
+ if (SKIP_SYNC) {
952
+ await performBypassOnly(sftp);
953
+ const duration = ((Date.now() - start) / 1000).toFixed(2);
954
+ log("");
955
+ log(pc.bold(pc.cyan("📊 Summary (bypass only):")));
956
+ log(`${tab_a()}Duration: ${pc.green(duration + " s")}`);
957
+ return;
958
+ }
959
+
960
+ // -------------------------------------------------------------
961
+ // Normaler Sync (inkl. evtl. paralleler Listen-Excludes)
962
+ // -------------------------------------------------------------
963
+
670
964
  // Phase 1 – mit exakt einer Leerzeile davor
671
965
  log("");
672
966
  log(pc.bold(pc.cyan("📥 Phase 1: Scan local files …")));
@@ -756,7 +1050,9 @@ async function main() {
756
1050
 
757
1051
  toUpdate.push({ rel, local: l, remote: r, remotePath });
758
1052
  if (!IS_LACONIC) {
759
- log(`${tab_a()}${CHA} ${pc.yellow("Content changed (Text):")} ${rel}`);
1053
+ log(
1054
+ `${tab_a()}${CHA} ${pc.yellow("Content changed (Text):")} ${rel}`
1055
+ );
760
1056
  }
761
1057
  } else {
762
1058
  // Binary: Hash comparison with cache
@@ -890,7 +1186,9 @@ async function main() {
890
1186
  log(`${tab_a()}${DEL} Deleted: ${toDelete.length}`);
891
1187
  if (AUTO_EXCLUDED.size > 0) {
892
1188
  log(
893
- `${tab_a()}${EXC} Excluded via uploadList | downloadList: ${AUTO_EXCLUDED.size}`
1189
+ `${tab_a()}${EXC} Excluded via uploadList | downloadList: ${
1190
+ AUTO_EXCLUDED.size
1191
+ }`
894
1192
  );
895
1193
  }
896
1194
  if (toAdd.length || toUpdate.length || toDelete.length) {
@@ -940,11 +1238,15 @@ async function main() {
940
1238
  e.message || e
941
1239
  );
942
1240
  }
943
- }
944
1241
 
945
- // Abschlusslinie + eine Leerzeile dahinter
946
- log(hr2());
947
- log("");
1242
+ // Abschlusslinie + Leerzeile **vor** dem Schließen des Logfiles
1243
+ log(hr2());
1244
+ log("");
1245
+
1246
+ if (LOG_STREAM) {
1247
+ LOG_STREAM.end();
1248
+ }
1249
+ }
948
1250
  }
949
1251
 
950
- main();
1252
+ main();
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sftp-push-sync",
3
- "version": "1.0.16",
3
+ "version": "1.0.18",
4
4
  "description": "SFTP sync tool for Hugo projects (local to remote, with hash cache)",
5
5
  "type": "module",
6
6
  "bin": {