sftp-push-sync 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vscode/settings.json +3 -0
- package/LICENSE +674 -0
- package/README.md +104 -0
- package/bin/sync-sftp.mjs +651 -0
- package/package.json +23 -0
|
@@ -0,0 +1,651 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
** sync-sftp.mjs - SFTP Syncronisations Tool
|
|
4
|
+
*
|
|
5
|
+
* SFTP push sync with dry run
|
|
6
|
+
* 1. Upload new files
|
|
7
|
+
* 2. Delete remote files that no longer exist locally
|
|
8
|
+
* 3. Detect changes based on size or modified content and upload them
|
|
9
|
+
*
|
|
10
|
+
* Features:
|
|
11
|
+
* - multiple connections in sync.config.json
|
|
12
|
+
* - dry-run mode
|
|
13
|
+
* - mirrors local → remote
|
|
14
|
+
* - adds, updates, deletes files
|
|
15
|
+
* - text diff detection
|
|
16
|
+
* - Binary files (images, video, audio, PDF, etc.): SHA-256 hash comparison
|
|
17
|
+
* - Hashes are cached in .sync-cache.json to save space.
|
|
18
|
+
* - Parallel uploads/deletes via worker pool
|
|
19
|
+
* - include/exclude patterns
|
|
20
|
+
*
|
|
21
|
+
* The file shell-scripts/sync-sftp.mjs is pure JavaScript (ESM), not TypeScript.
|
|
22
|
+
* Node.js can execute it directly as long as "type": "module" is specified in package.json
|
|
23
|
+
* or the file has the extension .mjs.
|
|
24
|
+
*
|
|
25
|
+
* @author Carsten Nichte, 2025 / https://carsten-nichte.de
|
|
26
|
+
*/
|
|
27
|
+
import fs from "fs";
|
|
28
|
+
import fsp from "fs/promises";
|
|
29
|
+
import path from "path";
|
|
30
|
+
import SftpClient from "ssh2-sftp-client";
|
|
31
|
+
import { minimatch } from "minimatch";
|
|
32
|
+
import { diffWords } from "diff";
|
|
33
|
+
import { createHash } from "crypto";
|
|
34
|
+
import { Writable } from "stream";
|
|
35
|
+
import pc from "picocolors";
|
|
36
|
+
|
|
37
|
+
// ---------------------------------------------------------------------------
|
|
38
|
+
// CLI arguments
|
|
39
|
+
// ---------------------------------------------------------------------------
|
|
40
|
+
|
|
41
|
+
const args = process.argv.slice(2);
|
|
42
|
+
const TARGET = args[0];
|
|
43
|
+
const DRY_RUN = args.includes("--dry-run");
|
|
44
|
+
const VERBOSE = args.includes("--verbose") || args.includes("-v");
|
|
45
|
+
|
|
46
|
+
if (!TARGET) {
|
|
47
|
+
console.error(pc.red("❌ Please specify a connection profile:"));
|
|
48
|
+
console.error(pc.yellow(" node sync-sftp.mjs staging --dry-run"));
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// ---------------------------------------------------------------------------
|
|
53
|
+
// Load config file
|
|
54
|
+
// ---------------------------------------------------------------------------
|
|
55
|
+
|
|
56
|
+
const CONFIG_PATH = path.resolve("sync.config.json");
|
|
57
|
+
|
|
58
|
+
if (!fs.existsSync(CONFIG_PATH)) {
|
|
59
|
+
console.error(pc.red(`❌ Configuration file missing: ${CONFIG_PATH}`));
|
|
60
|
+
process.exit(1);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
let CONFIG_RAW;
|
|
64
|
+
try {
|
|
65
|
+
CONFIG_RAW = JSON.parse(await fsp.readFile(CONFIG_PATH, "utf8"));
|
|
66
|
+
} catch (err) {
|
|
67
|
+
elog(pc.red("❌ Error reading sync.config.json:"), err.message);
|
|
68
|
+
process.exit(1);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (!CONFIG_RAW.connections || typeof CONFIG_RAW.connections !== "object") {
|
|
72
|
+
elog(pc.red("❌ sync.config.json must have a 'connections' field."));
|
|
73
|
+
process.exit(1);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const TARGET_CONFIG = CONFIG_RAW.connections[TARGET];
|
|
77
|
+
if (!TARGET_CONFIG) {
|
|
78
|
+
elog(pc.red(`❌ Connection '${TARGET}' not found in sync.config.json.`));
|
|
79
|
+
process.exit(1);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const CONNECTION = {
|
|
83
|
+
host: TARGET_CONFIG.host,
|
|
84
|
+
port: TARGET_CONFIG.port ?? 22,
|
|
85
|
+
user: TARGET_CONFIG.user,
|
|
86
|
+
password: TARGET_CONFIG.password,
|
|
87
|
+
localRoot: path.resolve(TARGET_CONFIG.localRoot),
|
|
88
|
+
remoteRoot: TARGET_CONFIG.remoteRoot,
|
|
89
|
+
workers: TARGET_CONFIG.worker ?? 2,
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
// Shared config from JSON
|
|
93
|
+
const INCLUDE = CONFIG_RAW.include ?? [];
|
|
94
|
+
const EXCLUDE = CONFIG_RAW.exclude ?? [];
|
|
95
|
+
const TEXT_EXT = CONFIG_RAW.textExtensions ?? [
|
|
96
|
+
".html",
|
|
97
|
+
".htm",
|
|
98
|
+
".xml",
|
|
99
|
+
".txt",
|
|
100
|
+
".json",
|
|
101
|
+
".js",
|
|
102
|
+
".mjs",
|
|
103
|
+
".cjs",
|
|
104
|
+
".css",
|
|
105
|
+
".md",
|
|
106
|
+
".svg",
|
|
107
|
+
];
|
|
108
|
+
|
|
109
|
+
// Cache file name per connection
|
|
110
|
+
const syncCacheName =
|
|
111
|
+
TARGET_CONFIG.syncCache || `.sync-cache.${TARGET}.json`;
|
|
112
|
+
const CACHE_PATH = path.resolve(syncCacheName);
|
|
113
|
+
|
|
114
|
+
// ---------------------------------------------------------------------------
|
|
115
|
+
// Load/initialise hash cache
|
|
116
|
+
// ---------------------------------------------------------------------------
|
|
117
|
+
|
|
118
|
+
let CACHE = {
|
|
119
|
+
version: 1,
|
|
120
|
+
local: {}, // key: "<TARGET>:<relPath>" -> { size, mtimeMs, hash }
|
|
121
|
+
remote: {}, // key: "<TARGET>:<relPath>" -> { size, modifyTime, hash }
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
try {
|
|
125
|
+
if (fs.existsSync(CACHE_PATH)) {
|
|
126
|
+
const raw = JSON.parse(await fsp.readFile(CACHE_PATH, "utf8"));
|
|
127
|
+
CACHE.version = raw.version ?? 1;
|
|
128
|
+
CACHE.local = raw.local ?? {};
|
|
129
|
+
CACHE.remote = raw.remote ?? {};
|
|
130
|
+
}
|
|
131
|
+
} catch (err) {
|
|
132
|
+
wlog(pc.yellow("⚠️ Konnte Cache nicht laden, starte ohne Cache:"), err.message);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
function cacheKey(relPath) {
|
|
136
|
+
return `${TARGET}:${relPath}`;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
let cacheDirty = false;
|
|
140
|
+
let cacheDirtyCount = 0;
|
|
141
|
+
const CACHE_FLUSH_INTERVAL = 50; // Write cache to disk after 50 new hashes
|
|
142
|
+
|
|
143
|
+
async function saveCache(force = false) {
|
|
144
|
+
if (!cacheDirty && !force) return;
|
|
145
|
+
const data = JSON.stringify(CACHE, null, 2);
|
|
146
|
+
await fsp.writeFile(CACHE_PATH, data, "utf8");
|
|
147
|
+
cacheDirty = false;
|
|
148
|
+
cacheDirtyCount = 0;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async function markCacheDirty() {
|
|
152
|
+
cacheDirty = true;
|
|
153
|
+
cacheDirtyCount += 1;
|
|
154
|
+
if (cacheDirtyCount >= CACHE_FLUSH_INTERVAL) {
|
|
155
|
+
await saveCache();
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// ---------------------------------------------------------------------------
|
|
160
|
+
// Helpers
|
|
161
|
+
// ---------------------------------------------------------------------------
|
|
162
|
+
|
|
163
|
+
let progressActive = false;
|
|
164
|
+
|
|
165
|
+
function clearProgressLine() {
|
|
166
|
+
if (!process.stdout.isTTY || !progressActive) return;
|
|
167
|
+
const width = process.stdout.columns || 80;
|
|
168
|
+
const blank = " ".repeat(width - 1);
|
|
169
|
+
process.stdout.write("\r" + blank + "\r");
|
|
170
|
+
progressActive = false;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
function toPosix(p) {
|
|
174
|
+
return p.split(path.sep).join("/");
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
function log(...msg) {
|
|
178
|
+
clearProgressLine();
|
|
179
|
+
console.log(...msg);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
function vlog(...msg) {
|
|
183
|
+
if (!VERBOSE) return;
|
|
184
|
+
clearProgressLine();
|
|
185
|
+
console.log(...msg);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
function elog(...msg) {
|
|
189
|
+
clearProgressLine();
|
|
190
|
+
console.error(...msg);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
function wlog(...msg) {
|
|
194
|
+
clearProgressLine();
|
|
195
|
+
console.warn(...msg);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
function matchesAny(patterns, relPath) {
|
|
199
|
+
if (!patterns || patterns.length === 0) return false;
|
|
200
|
+
return patterns.some((pattern) =>
|
|
201
|
+
minimatch(relPath, pattern, { dot: true })
|
|
202
|
+
);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
function isIncluded(relPath) {
|
|
206
|
+
if (INCLUDE.length > 0 && !matchesAny(INCLUDE, relPath)) return false;
|
|
207
|
+
if (EXCLUDE.length > 0 && matchesAny(EXCLUDE, relPath)) return false;
|
|
208
|
+
return true;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
function isTextFile(relPath) {
|
|
212
|
+
const ext = path.extname(relPath).toLowerCase();
|
|
213
|
+
return TEXT_EXT.includes(ext);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// Single-line progress bar
|
|
217
|
+
function updateProgress(prefix, current, total) {
|
|
218
|
+
const percent = total > 0 ? ((current / total) * 100).toFixed(1) : "0.0";
|
|
219
|
+
const msg = `${prefix}${current}/${total} Dateien (${percent}%)`;
|
|
220
|
+
|
|
221
|
+
if (!process.stdout.isTTY) {
|
|
222
|
+
// Fallback: simply log in
|
|
223
|
+
console.log(" " + msg);
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
const width = process.stdout.columns || 80;
|
|
228
|
+
const line = msg.padEnd(width - 1);
|
|
229
|
+
|
|
230
|
+
progressActive = true;
|
|
231
|
+
process.stdout.write("\r" + line);
|
|
232
|
+
|
|
233
|
+
if (current === total) {
|
|
234
|
+
process.stdout.write("\n");
|
|
235
|
+
progressActive = false;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Simple worker pool for parallel tasks
|
|
240
|
+
async function runTasks(items, workerCount, handler, label = "Tasks") {
|
|
241
|
+
if (items.length === 0) return;
|
|
242
|
+
|
|
243
|
+
const total = items.length;
|
|
244
|
+
let done = 0;
|
|
245
|
+
let index = 0;
|
|
246
|
+
|
|
247
|
+
async function worker() {
|
|
248
|
+
// eslint-disable-next-line no-constant-condition
|
|
249
|
+
while (true) {
|
|
250
|
+
const i = index;
|
|
251
|
+
if (i >= total) break;
|
|
252
|
+
index += 1;
|
|
253
|
+
const item = items[i];
|
|
254
|
+
try {
|
|
255
|
+
await handler(item);
|
|
256
|
+
} catch (err) {
|
|
257
|
+
elog(pc.red(` ⚠️ Fehler in ${label}:`), err.message || err);
|
|
258
|
+
}
|
|
259
|
+
done += 1;
|
|
260
|
+
if (done % 10 === 0 || done === total) {
|
|
261
|
+
updateProgress(` ${label}: `, done, total);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
const workers = [];
|
|
267
|
+
const actualWorkers = Math.max(1, Math.min(workerCount, total));
|
|
268
|
+
for (let i = 0; i < actualWorkers; i += 1) {
|
|
269
|
+
workers.push(worker());
|
|
270
|
+
}
|
|
271
|
+
await Promise.all(workers);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// ---------------------------------------------------------------------------
|
|
275
|
+
// Local file walker (recursive, all subdirectories)
|
|
276
|
+
// ---------------------------------------------------------------------------
|
|
277
|
+
|
|
278
|
+
async function walkLocal(root) {
|
|
279
|
+
const result = new Map();
|
|
280
|
+
|
|
281
|
+
async function recurse(current) {
|
|
282
|
+
const entries = await fsp.readdir(current, { withFileTypes: true });
|
|
283
|
+
for (const entry of entries) {
|
|
284
|
+
const full = path.join(current, entry.name);
|
|
285
|
+
if (entry.isDirectory()) {
|
|
286
|
+
await recurse(full);
|
|
287
|
+
} else if (entry.isFile()) {
|
|
288
|
+
const rel = toPosix(path.relative(root, full));
|
|
289
|
+
if (!isIncluded(rel)) continue;
|
|
290
|
+
const stat = await fsp.stat(full);
|
|
291
|
+
result.set(rel, {
|
|
292
|
+
rel,
|
|
293
|
+
localPath: full,
|
|
294
|
+
size: stat.size,
|
|
295
|
+
mtimeMs: stat.mtimeMs,
|
|
296
|
+
isText: isTextFile(rel),
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
await recurse(root);
|
|
303
|
+
return result;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// ---------------------------------------------------------------------------
|
|
307
|
+
// Remote walker (recursive, all subdirectories)
|
|
308
|
+
// ---------------------------------------------------------------------------
|
|
309
|
+
|
|
310
|
+
async function walkRemote(sftp, remoteRoot) {
|
|
311
|
+
const result = new Map();
|
|
312
|
+
|
|
313
|
+
async function recurse(remoteDir, prefix) {
|
|
314
|
+
const items = await sftp.list(remoteDir);
|
|
315
|
+
|
|
316
|
+
for (const item of items) {
|
|
317
|
+
if (!item.name || item.name === "." || item.name === "..") continue;
|
|
318
|
+
|
|
319
|
+
const full = path.posix.join(remoteDir, item.name);
|
|
320
|
+
const rel = prefix ? `${prefix}/${item.name}` : item.name;
|
|
321
|
+
|
|
322
|
+
if (item.type === "d") {
|
|
323
|
+
await recurse(full, rel);
|
|
324
|
+
} else {
|
|
325
|
+
result.set(rel, {
|
|
326
|
+
rel,
|
|
327
|
+
remotePath: full,
|
|
328
|
+
size: Number(item.size),
|
|
329
|
+
modifyTime: item.modifyTime ?? 0,
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
await recurse(remoteRoot, "");
|
|
336
|
+
return result;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
// ---------------------------------------------------------------------------
|
|
340
|
+
// Hash helper for binaries (streaming, memory-efficient)
|
|
341
|
+
// ---------------------------------------------------------------------------
|
|
342
|
+
|
|
343
|
+
function hashLocalFile(filePath) {
|
|
344
|
+
return new Promise((resolve, reject) => {
|
|
345
|
+
const hash = createHash("sha256");
|
|
346
|
+
const stream = fs.createReadStream(filePath);
|
|
347
|
+
stream.on("error", reject);
|
|
348
|
+
stream.on("data", (chunk) => hash.update(chunk));
|
|
349
|
+
stream.on("end", () => resolve(hash.digest("hex")));
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
async function hashRemoteFile(sftp, remotePath) {
|
|
354
|
+
const hash = createHash("sha256");
|
|
355
|
+
|
|
356
|
+
const writable = new Writable({
|
|
357
|
+
write(chunk, enc, cb) {
|
|
358
|
+
hash.update(chunk);
|
|
359
|
+
cb();
|
|
360
|
+
},
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
await sftp.get(remotePath, writable);
|
|
364
|
+
return hash.digest("hex");
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// Cache-aware Helpers
|
|
368
|
+
async function getLocalHash(rel, meta) {
|
|
369
|
+
const key = cacheKey(rel);
|
|
370
|
+
const cached = CACHE.local[key];
|
|
371
|
+
if (
|
|
372
|
+
cached &&
|
|
373
|
+
cached.size === meta.size &&
|
|
374
|
+
cached.mtimeMs === meta.mtimeMs &&
|
|
375
|
+
cached.hash
|
|
376
|
+
) {
|
|
377
|
+
return cached.hash;
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
const hash = await hashLocalFile(meta.localPath);
|
|
381
|
+
CACHE.local[key] = {
|
|
382
|
+
size: meta.size,
|
|
383
|
+
mtimeMs: meta.mtimeMs,
|
|
384
|
+
hash,
|
|
385
|
+
};
|
|
386
|
+
await markCacheDirty();
|
|
387
|
+
return hash;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
async function getRemoteHash(rel, meta, sftp) {
|
|
391
|
+
const key = cacheKey(rel);
|
|
392
|
+
const cached = CACHE.remote[key];
|
|
393
|
+
if (
|
|
394
|
+
cached &&
|
|
395
|
+
cached.size === meta.size &&
|
|
396
|
+
cached.modifyTime === meta.modifyTime &&
|
|
397
|
+
cached.hash
|
|
398
|
+
) {
|
|
399
|
+
return cached.hash;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
const hash = await hashRemoteFile(sftp, meta.remotePath);
|
|
403
|
+
CACHE.remote[key] = {
|
|
404
|
+
size: meta.size,
|
|
405
|
+
modifyTime: meta.modifyTime,
|
|
406
|
+
hash,
|
|
407
|
+
};
|
|
408
|
+
await markCacheDirty();
|
|
409
|
+
return hash;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// ---------------------------------------------------------------------------
|
|
413
|
+
// MAIN
|
|
414
|
+
// ---------------------------------------------------------------------------
|
|
415
|
+
|
|
416
|
+
async function main() {
|
|
417
|
+
const start = Date.now();
|
|
418
|
+
|
|
419
|
+
log("\n\n==================================================================");
|
|
420
|
+
log(pc.bold("🔐 SFTP-Synchronisation - shell-scripts/sync-sftp.mjs"));
|
|
421
|
+
log(` Connection: ${pc.cyan(TARGET)} (Worker: ${CONNECTION.workers})`);
|
|
422
|
+
log(` Host:Port: ${pc.green(CONNECTION.host)}:${pc.green(CONNECTION.port)}`);
|
|
423
|
+
log(` Local: ${pc.green(CONNECTION.localRoot)}`);
|
|
424
|
+
log(` Remote: ${pc.green(CONNECTION.remoteRoot)}`);
|
|
425
|
+
if (DRY_RUN) log(pc.yellow(" Modus: DRY-RUN (no changes)"));
|
|
426
|
+
log("-----------------------------------------------------------------\n");
|
|
427
|
+
|
|
428
|
+
const sftp = new SftpClient();
|
|
429
|
+
|
|
430
|
+
const toAdd = [];
|
|
431
|
+
const toUpdate = [];
|
|
432
|
+
const toDelete = [];
|
|
433
|
+
|
|
434
|
+
try {
|
|
435
|
+
await sftp.connect({
|
|
436
|
+
host: CONNECTION.host,
|
|
437
|
+
port: CONNECTION.port,
|
|
438
|
+
username: CONNECTION.user,
|
|
439
|
+
password: CONNECTION.password,
|
|
440
|
+
});
|
|
441
|
+
|
|
442
|
+
vlog(pc.dim(" Connection established."));
|
|
443
|
+
|
|
444
|
+
if (!fs.existsSync(CONNECTION.localRoot)) {
|
|
445
|
+
console.error(pc.red("❌ Local root does not exist:"), CONNECTION.localRoot);
|
|
446
|
+
process.exit(1);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
log(pc.bold(pc.cyan("📥 Phase 1: Scan local files …")));
|
|
450
|
+
const local = await walkLocal(CONNECTION.localRoot);
|
|
451
|
+
log(` → ${local.size} lokale Dateien`);
|
|
452
|
+
|
|
453
|
+
log(pc.bold(pc.cyan("📤 Phase 2: Scan remote files …")));
|
|
454
|
+
const remote = await walkRemote(sftp, CONNECTION.remoteRoot);
|
|
455
|
+
log(` → ${remote.size} remote files`);
|
|
456
|
+
|
|
457
|
+
const localKeys = new Set(local.keys());
|
|
458
|
+
const remoteKeys = new Set(remote.keys());
|
|
459
|
+
|
|
460
|
+
log(pc.bold(pc.cyan("🔎 Phase 3: Compare & decide …")));
|
|
461
|
+
const totalToCheck = localKeys.size;
|
|
462
|
+
let checkedCount = 0;
|
|
463
|
+
|
|
464
|
+
// Analysis: just decide, don't upload/delete anything yet
|
|
465
|
+
for (const rel of localKeys) {
|
|
466
|
+
checkedCount += 1;
|
|
467
|
+
if (checkedCount % 500 === 0 || checkedCount === totalToCheck) {
|
|
468
|
+
updateProgress(" Analyse: ", checkedCount, totalToCheck);
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
const l = local.get(rel);
|
|
472
|
+
const r = remote.get(rel);
|
|
473
|
+
|
|
474
|
+
const remotePath = path.posix.join(CONNECTION.remoteRoot, rel);
|
|
475
|
+
|
|
476
|
+
if (!r) {
|
|
477
|
+
toAdd.push({ rel, local: l, remotePath });
|
|
478
|
+
log(`${pc.green("➕ New:")} ${rel}`);
|
|
479
|
+
continue;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
// 1. size comparison
|
|
483
|
+
if (l.size !== r.size) {
|
|
484
|
+
toUpdate.push({ rel, local: l, remote: r, remotePath });
|
|
485
|
+
log(`${pc.yellow("🔁 Size changed:")} ${rel}`);
|
|
486
|
+
continue;
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
// 2. content comparison
|
|
490
|
+
if (l.isText) {
|
|
491
|
+
// Text file: Read & compare in full
|
|
492
|
+
const [localBuf, remoteBuf] = await Promise.all([
|
|
493
|
+
fsp.readFile(l.localPath),
|
|
494
|
+
sftp.get(r.remotePath),
|
|
495
|
+
]);
|
|
496
|
+
|
|
497
|
+
const localStr = localBuf.toString("utf8");
|
|
498
|
+
const remoteStr = (Buffer.isBuffer(remoteBuf)
|
|
499
|
+
? remoteBuf
|
|
500
|
+
: Buffer.from(remoteBuf)
|
|
501
|
+
).toString("utf8");
|
|
502
|
+
|
|
503
|
+
if (localStr === remoteStr) {
|
|
504
|
+
vlog(` ${pc.dim("✓ Unchanged (Text):")} ${rel}`);
|
|
505
|
+
continue;
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
if (VERBOSE) {
|
|
509
|
+
const diff = diffWords(remoteStr, localStr);
|
|
510
|
+
const blocks = diff.filter((d) => d.added || d.removed).length;
|
|
511
|
+
vlog(` ↪️ text difference (${blocks} Blocks) in ${rel}`);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
toUpdate.push({ rel, local: l, remote: r, remotePath });
|
|
515
|
+
log(`${pc.yellow(" 🔁 Content changed(Text):")} ${rel}`);
|
|
516
|
+
} else {
|
|
517
|
+
// Binary: Hash comparison with cache
|
|
518
|
+
const localMeta = l;
|
|
519
|
+
const remoteMeta = r;
|
|
520
|
+
|
|
521
|
+
const [localHash, remoteHash] = await Promise.all([
|
|
522
|
+
getLocalHash(rel, localMeta),
|
|
523
|
+
getRemoteHash(rel, remoteMeta, sftp),
|
|
524
|
+
]);
|
|
525
|
+
|
|
526
|
+
if (localHash === remoteHash) {
|
|
527
|
+
vlog(` ${pc.dim("✓ Unchanged (binary, hash):")} ${rel}`);
|
|
528
|
+
continue;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
if (VERBOSE) {
|
|
532
|
+
vlog(` ↪️ Hash different(binary): ${rel}`);
|
|
533
|
+
vlog(` local: ${localHash}`);
|
|
534
|
+
vlog(` remote: ${remoteHash}`);
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
toUpdate.push({ rel, local: l, remote: r, remotePath });
|
|
538
|
+
log(`${pc.yellow(" 🔁 Content changed (Binary):")} ${rel}`);
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
log("\n" + pc.bold(pc.cyan("🧹 Phase 4: Removing orphaned remote files …")));
|
|
543
|
+
for (const rel of remoteKeys) {
|
|
544
|
+
if (!localKeys.has(rel)) {
|
|
545
|
+
const r = remote.get(rel);
|
|
546
|
+
toDelete.push({ rel, remotePath: r.remotePath });
|
|
547
|
+
log(`${pc.red(" 🗑 Remove:")} ${rel}`);
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
// -------------------------------------------------------------------
|
|
552
|
+
// Phase 5: Execute changes (parallel, worker-based)
|
|
553
|
+
// -------------------------------------------------------------------
|
|
554
|
+
|
|
555
|
+
if (!DRY_RUN) {
|
|
556
|
+
log("\n" + pc.bold(pc.cyan("🚚 Phase 5: Implement changes …")));
|
|
557
|
+
|
|
558
|
+
// Upload new files
|
|
559
|
+
await runTasks(
|
|
560
|
+
toAdd,
|
|
561
|
+
CONNECTION.workers,
|
|
562
|
+
async ({ rel, local: l, remotePath }) => {
|
|
563
|
+
const remoteDir = path.posix.dirname(remotePath);
|
|
564
|
+
try {
|
|
565
|
+
await sftp.mkdir(remoteDir, true);
|
|
566
|
+
} catch {
|
|
567
|
+
// Directory may already exist.
|
|
568
|
+
}
|
|
569
|
+
await sftp.put(l.localPath, remotePath);
|
|
570
|
+
},
|
|
571
|
+
"Uploads (new)"
|
|
572
|
+
);
|
|
573
|
+
|
|
574
|
+
// Updates
|
|
575
|
+
await runTasks(
|
|
576
|
+
toUpdate,
|
|
577
|
+
CONNECTION.workers,
|
|
578
|
+
async ({ rel, local: l, remotePath }) => {
|
|
579
|
+
const remoteDir = path.posix.dirname(remotePath);
|
|
580
|
+
try {
|
|
581
|
+
await sftp.mkdir(remoteDir, true);
|
|
582
|
+
} catch {
|
|
583
|
+
// Directory may already exist.
|
|
584
|
+
}
|
|
585
|
+
await sftp.put(l.localPath, remotePath);
|
|
586
|
+
},
|
|
587
|
+
"Uploads (update)"
|
|
588
|
+
);
|
|
589
|
+
|
|
590
|
+
// Deletes
|
|
591
|
+
await runTasks(
|
|
592
|
+
toDelete,
|
|
593
|
+
CONNECTION.workers,
|
|
594
|
+
async ({ remotePath }) => {
|
|
595
|
+
try {
|
|
596
|
+
await sftp.delete(remotePath);
|
|
597
|
+
} catch (e) {
|
|
598
|
+
console.error(
|
|
599
|
+
pc.red(" ⚠️ Error during deletion:"),
|
|
600
|
+
remotePath,
|
|
601
|
+
e.message || e
|
|
602
|
+
);
|
|
603
|
+
}
|
|
604
|
+
},
|
|
605
|
+
"Deletes"
|
|
606
|
+
);
|
|
607
|
+
} else {
|
|
608
|
+
log(pc.yellow("\n💡 DRY-RUN: No files transferred or deleted."));
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
const duration = ((Date.now() - start) / 1000).toFixed(2);
|
|
612
|
+
|
|
613
|
+
// Write cache securely at the end
|
|
614
|
+
await saveCache(true);
|
|
615
|
+
|
|
616
|
+
// Summary
|
|
617
|
+
log("\n" + pc.bold(pc.cyan("📊 Summary:")));
|
|
618
|
+
log(` Dauer: ${pc.green(duration + " s")}`);
|
|
619
|
+
log(` ➕ Added : ${toAdd.length}`);
|
|
620
|
+
log(` 🔁 Changed: ${toUpdate.length}`);
|
|
621
|
+
log(` 🗑 Deleted: ${toDelete.length}`);
|
|
622
|
+
|
|
623
|
+
if (toAdd.length || toUpdate.length || toDelete.length) {
|
|
624
|
+
log("\n📄 Changes:");
|
|
625
|
+
[...toAdd.map((t) => t.rel)].sort().forEach((f) => console.log(` ${pc.green("➕")} ${f}`));
|
|
626
|
+
[...toUpdate.map((t) => t.rel)].sort().forEach((f) => console.log(` ${pc.yellow("🔁")} ${f}`));
|
|
627
|
+
[...toDelete.map((t) => t.rel)].sort().forEach((f) => console.log(` ${pc.red("🗑")} ${f}`));
|
|
628
|
+
} else {
|
|
629
|
+
log("\nNo changes.");
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
log("\n" + pc.bold(pc.green("✅ Sync complete.")));
|
|
633
|
+
log("==================================================================\n\n");
|
|
634
|
+
} catch (err) {
|
|
635
|
+
elog(pc.red("❌ Synchronisation error:"), err);
|
|
636
|
+
process.exitCode = 1;
|
|
637
|
+
try {
|
|
638
|
+
await saveCache(true);
|
|
639
|
+
} catch {
|
|
640
|
+
// ignore
|
|
641
|
+
}
|
|
642
|
+
} finally {
|
|
643
|
+
try {
|
|
644
|
+
await sftp.end();
|
|
645
|
+
} catch {
|
|
646
|
+
// ignore
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
main();
|
package/package.json
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "sftp-push-sync",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "SFTP Sync Tool für Hugo-Projekte (local -> remote, mit Hash-Cache)",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"hugo-sftp-sync": "./bin/sftp-sync.mjs"
|
|
8
|
+
},
|
|
9
|
+
"keywords": [
|
|
10
|
+
"hugo",
|
|
11
|
+
"sftp",
|
|
12
|
+
"deploy",
|
|
13
|
+
"sync"
|
|
14
|
+
],
|
|
15
|
+
"author": "Carsten Nichte",
|
|
16
|
+
"license": "MIT",
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"ssh2-sftp-client": "^10.0.0",
|
|
19
|
+
"minimatch": "^9.0.3",
|
|
20
|
+
"diff": "^5.2.0",
|
|
21
|
+
"picocolors": "^1.0.0"
|
|
22
|
+
}
|
|
23
|
+
}
|