sftp-push-sync 2.5.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -6
- package/README.md +11 -4
- package/bin/sftp-push-sync.mjs +5 -1
- package/package.json +1 -1
- package/src/core/SftpPushSyncApp.mjs +387 -74
- package/src/core/SyncLogger.mjs +18 -6
- package/src/helpers/compare.mjs +82 -89
- package/src/helpers/hash-cache-leveldb.mjs +299 -0
- package/src/helpers/hash-cache-ndjson.mjs +423 -0
- package/src/helpers/hashing.mjs +39 -7
- package/src/helpers/walkers.mjs +4 -4
package/src/core/SyncLogger.mjs
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* SyncLogger.mjs
|
|
3
|
-
*
|
|
3
|
+
*
|
|
4
4
|
* @author Carsten Nichte, 2025 / https://carsten-nichte.de/
|
|
5
|
-
*
|
|
6
|
-
*/
|
|
5
|
+
*
|
|
6
|
+
*/
|
|
7
7
|
// src/core/SyncLogger.mjs
|
|
8
8
|
import fs from "fs";
|
|
9
9
|
import fsp from "fs/promises";
|
|
@@ -14,9 +14,10 @@ import path from "path";
|
|
|
14
14
|
* und entfernt ANSI-Farbcodes.
|
|
15
15
|
*/
|
|
16
16
|
export class SyncLogger {
|
|
17
|
-
constructor(filePath) {
|
|
17
|
+
constructor(filePath, options = {}) {
|
|
18
18
|
this.filePath = filePath;
|
|
19
19
|
this.stream = null;
|
|
20
|
+
this.enableTimestamps = options.enableTimestamps ?? false;
|
|
20
21
|
}
|
|
21
22
|
|
|
22
23
|
async init() {
|
|
@@ -31,13 +32,24 @@ export class SyncLogger {
|
|
|
31
32
|
});
|
|
32
33
|
}
|
|
33
34
|
|
|
35
|
+
/**
|
|
36
|
+
* Returns current timestamp in ISO format: [YYYY-MM-DD HH:mm:ss.SSS]
|
|
37
|
+
*/
|
|
38
|
+
_getTimestamp() {
|
|
39
|
+
const now = new Date();
|
|
40
|
+
const pad = (n, len = 2) => String(n).padStart(len, '0');
|
|
41
|
+
return `[${now.getFullYear()}-${pad(now.getMonth() + 1)}-${pad(now.getDate())} ${pad(now.getHours())}:${pad(now.getMinutes())}:${pad(now.getSeconds())}.${pad(now.getMilliseconds(), 3)}]`;
|
|
42
|
+
}
|
|
43
|
+
|
|
34
44
|
writeLine(line) {
|
|
35
45
|
if (!this.stream) return;
|
|
36
46
|
const text = typeof line === "string" ? line : String(line);
|
|
37
47
|
const clean = text.replace(/\x1b\[[0-9;]*m/g, "");
|
|
38
48
|
|
|
49
|
+
const prefix = this.enableTimestamps ? this._getTimestamp() + " " : "";
|
|
50
|
+
|
|
39
51
|
try {
|
|
40
|
-
this.stream.write(clean + "\n");
|
|
52
|
+
this.stream.write(prefix + clean + "\n");
|
|
41
53
|
} catch {
|
|
42
54
|
// Stream schon zu → ignorieren
|
|
43
55
|
}
|
|
@@ -49,4 +61,4 @@ export class SyncLogger {
|
|
|
49
61
|
this.stream = null;
|
|
50
62
|
}
|
|
51
63
|
}
|
|
52
|
-
}
|
|
64
|
+
}
|
package/src/helpers/compare.mjs
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* compare.mjs
|
|
3
|
-
*
|
|
3
|
+
*
|
|
4
4
|
* @author Carsten Nichte, 2025 / https://carsten-nichte.de/
|
|
5
|
-
*
|
|
6
|
-
*/
|
|
5
|
+
*
|
|
6
|
+
*/
|
|
7
7
|
// src/helpers/compare.mjs
|
|
8
8
|
import fsp from "fs/promises";
|
|
9
9
|
import path from "path";
|
|
10
10
|
|
|
11
11
|
/**
|
|
12
12
|
* Analysiert Unterschiede zwischen local- und remote-Maps.
|
|
13
|
-
* Optimiert:
|
|
13
|
+
* Optimiert: Echtes Batch-Processing mit Concurrency-Limit.
|
|
14
14
|
*
|
|
15
15
|
* Erwartete Struktur:
|
|
16
16
|
* local: Map<rel, { rel, localPath, size, mtimeMs, isText? }>
|
|
@@ -22,7 +22,7 @@ import path from "path";
|
|
|
22
22
|
* - getLocalHash / getRemoteHash: from createHashCache
|
|
23
23
|
* - analyzeChunk: Progress-Schrittgröße
|
|
24
24
|
* - updateProgress(prefix, current, total, rel): optional
|
|
25
|
-
* - concurrency: Max parallele Vergleiche (default:
|
|
25
|
+
* - concurrency: Max parallele Vergleiche (default: 5)
|
|
26
26
|
*/
|
|
27
27
|
export async function analyseDifferences({
|
|
28
28
|
local,
|
|
@@ -33,7 +33,7 @@ export async function analyseDifferences({
|
|
|
33
33
|
getRemoteHash,
|
|
34
34
|
analyzeChunk = 10,
|
|
35
35
|
updateProgress,
|
|
36
|
-
concurrency =
|
|
36
|
+
concurrency = 10,
|
|
37
37
|
}) {
|
|
38
38
|
const toAdd = [];
|
|
39
39
|
const toUpdate = [];
|
|
@@ -42,104 +42,97 @@ export async function analyseDifferences({
|
|
|
42
42
|
const totalToCheck = localKeys.length;
|
|
43
43
|
let checked = 0;
|
|
44
44
|
|
|
45
|
-
// Schneller Vorab-Check
|
|
46
|
-
|
|
45
|
+
// Phase 1: Schneller Vorab-Check ohne SFTP
|
|
46
|
+
// - Dateien nur lokal → direkt zu toAdd
|
|
47
|
+
// - Size-Vergleich für existierende Dateien
|
|
48
|
+
const keysNeedContentCompare = [];
|
|
49
|
+
|
|
47
50
|
for (const rel of localKeys) {
|
|
51
|
+
const l = local.get(rel);
|
|
48
52
|
const r = remote.get(rel);
|
|
49
53
|
const remotePath = path.posix.join(remoteRoot, rel);
|
|
50
|
-
|
|
54
|
+
|
|
51
55
|
if (!r) {
|
|
52
56
|
// Datei existiert nur lokal → New (kein SFTP-Call nötig)
|
|
53
|
-
toAdd.push({ rel, local:
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
}
|
|
57
|
+
toAdd.push({ rel, local: l, remotePath });
|
|
58
|
+
} else if (l.size !== r.size) {
|
|
59
|
+
// Size unterschiedlich → Changed (kein SFTP-Call nötig)
|
|
60
|
+
toUpdate.push({ rel, local: l, remote: r, remotePath });
|
|
58
61
|
} else {
|
|
59
|
-
|
|
62
|
+
// Size gleich → Content-Vergleich nötig
|
|
63
|
+
keysNeedContentCompare.push(rel);
|
|
60
64
|
}
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// Parallele Verarbeitung mit Semaphore
|
|
64
|
-
let activeCount = 0;
|
|
65
|
-
const waiting = [];
|
|
66
|
-
|
|
67
|
-
async function acquireSemaphore() {
|
|
68
|
-
if (activeCount < concurrency) {
|
|
69
|
-
activeCount++;
|
|
70
|
-
return;
|
|
71
|
-
}
|
|
72
|
-
await new Promise((resolve) => waiting.push(resolve));
|
|
73
|
-
activeCount++;
|
|
74
|
-
}
|
|
75
65
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
const next = waiting.shift();
|
|
80
|
-
next();
|
|
66
|
+
checked++;
|
|
67
|
+
if (updateProgress && checked % analyzeChunk === 0) {
|
|
68
|
+
updateProgress("Analyse (Size): ", checked, totalToCheck, rel);
|
|
81
69
|
}
|
|
82
70
|
}
|
|
83
71
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
72
|
+
// Phase 2: Content-Vergleich in echten Batches
|
|
73
|
+
// Nur für Dateien mit gleicher Size
|
|
74
|
+
const totalContentCompare = keysNeedContentCompare.length;
|
|
75
|
+
|
|
76
|
+
for (let i = 0; i < totalContentCompare; i += concurrency) {
|
|
77
|
+
const batch = keysNeedContentCompare.slice(i, i + concurrency);
|
|
78
|
+
|
|
79
|
+
const batchResults = await Promise.all(
|
|
80
|
+
batch.map(async (rel) => {
|
|
81
|
+
const l = local.get(rel);
|
|
82
|
+
const r = remote.get(rel);
|
|
83
|
+
const remotePath = path.posix.join(remoteRoot, rel);
|
|
84
|
+
|
|
85
|
+
try {
|
|
86
|
+
if (l.isText) {
|
|
87
|
+
// Text-Datei: vollständiger inhaltlicher Vergleich
|
|
88
|
+
const [localBuf, remoteBuf] = await Promise.all([
|
|
89
|
+
fsp.readFile(l.localPath),
|
|
90
|
+
sftp.get(r.remotePath),
|
|
91
|
+
]);
|
|
92
|
+
|
|
93
|
+
const localStr = localBuf.toString("utf8");
|
|
94
|
+
const remoteStr = (
|
|
95
|
+
Buffer.isBuffer(remoteBuf) ? remoteBuf : Buffer.from(remoteBuf)
|
|
96
|
+
).toString("utf8");
|
|
97
|
+
|
|
98
|
+
return localStr !== remoteStr
|
|
99
|
+
? { rel, local: l, remote: r, remotePath, changed: true }
|
|
100
|
+
: null;
|
|
101
|
+
} else {
|
|
102
|
+
// Binary: Hash-Vergleich mit Cache
|
|
103
|
+
if (!getLocalHash || !getRemoteHash) {
|
|
104
|
+
return { rel, local: l, remote: r, remotePath, changed: true };
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const [localHash, remoteHash] = await Promise.all([
|
|
108
|
+
getLocalHash(rel, l),
|
|
109
|
+
getRemoteHash(rel, r, sftp),
|
|
110
|
+
]);
|
|
111
|
+
|
|
112
|
+
return localHash !== remoteHash
|
|
113
|
+
? { rel, local: l, remote: r, remotePath, changed: true }
|
|
114
|
+
: null;
|
|
115
|
+
}
|
|
116
|
+
} catch (err) {
|
|
117
|
+
// Bei Fehler als changed markieren (sicherer)
|
|
118
|
+
return { rel, local: l, remote: r, remotePath, changed: true };
|
|
118
119
|
}
|
|
120
|
+
})
|
|
121
|
+
);
|
|
119
122
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
if (localHash !== remoteHash) {
|
|
126
|
-
toUpdate.push({ rel, local: l, remote: r, remotePath });
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
} finally {
|
|
130
|
-
releaseSemaphore();
|
|
131
|
-
checked++;
|
|
132
|
-
if (
|
|
133
|
-
updateProgress &&
|
|
134
|
-
(checked === 1 || checked % analyzeChunk === 0 || checked === totalToCheck)
|
|
135
|
-
) {
|
|
136
|
-
updateProgress("Analyse: ", checked, totalToCheck, rel);
|
|
123
|
+
// Ergebnisse sammeln
|
|
124
|
+
for (const result of batchResults) {
|
|
125
|
+
if (result && result.changed) {
|
|
126
|
+
toUpdate.push({ rel: result.rel, local: result.local, remote: result.remote, remotePath: result.remotePath });
|
|
137
127
|
}
|
|
138
128
|
}
|
|
139
|
-
}
|
|
140
129
|
|
|
141
|
-
|
|
142
|
-
|
|
130
|
+
// Progress update
|
|
131
|
+
const progressCount = Math.min(i + batch.length, totalContentCompare);
|
|
132
|
+
if (updateProgress) {
|
|
133
|
+
updateProgress("Analyse (Content): ", checked + progressCount, totalToCheck + totalContentCompare, batch[batch.length - 1]);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
143
136
|
|
|
144
137
|
return { toAdd, toUpdate };
|
|
145
138
|
}
|
|
@@ -160,4 +153,4 @@ export function computeRemoteDeletes({ local, remote }) {
|
|
|
160
153
|
}
|
|
161
154
|
|
|
162
155
|
return toDelete;
|
|
163
|
-
}
|
|
156
|
+
}
|
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* hash-cache-leveldb.mjs
|
|
3
|
+
*
|
|
4
|
+
* LevelDB-based hash cache for efficient storage of large file sets.
|
|
5
|
+
* Unlike the JSON-based cache, this scales to 100k+ files without memory issues.
|
|
6
|
+
*
|
|
7
|
+
* @author Carsten Nichte, 2025 / https://carsten-nichte.de/
|
|
8
|
+
*/
|
|
9
|
+
import { Level } from "level";
|
|
10
|
+
import fs from "fs";
|
|
11
|
+
import fsp from "fs/promises";
|
|
12
|
+
import { createHash } from "crypto";
|
|
13
|
+
import { Writable } from "stream";
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Streaming-SHA256 für lokale Datei
|
|
17
|
+
*/
|
|
18
|
+
export function hashLocalFile(filePath) {
|
|
19
|
+
return new Promise((resolve, reject) => {
|
|
20
|
+
const hash = createHash("sha256");
|
|
21
|
+
const stream = fs.createReadStream(filePath);
|
|
22
|
+
stream.on("error", reject);
|
|
23
|
+
stream.on("data", (chunk) => hash.update(chunk));
|
|
24
|
+
stream.on("end", () => resolve(hash.digest("hex")));
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Streaming-SHA256 für Remote-Datei via ssh2-sftp-client
|
|
30
|
+
*/
|
|
31
|
+
export async function hashRemoteFile(sftp, remotePath) {
|
|
32
|
+
const hash = createHash("sha256");
|
|
33
|
+
|
|
34
|
+
const writable = new Writable({
|
|
35
|
+
write(chunk, enc, cb) {
|
|
36
|
+
hash.update(chunk);
|
|
37
|
+
cb();
|
|
38
|
+
},
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
await sftp.get(remotePath, writable);
|
|
42
|
+
return hash.digest("hex");
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Creates a LevelDB-based hash cache.
|
|
47
|
+
*
|
|
48
|
+
* Database structure:
|
|
49
|
+
* local:<namespace>:<relPath> → JSON { size, mtimeMs, hash }
|
|
50
|
+
* remote:<namespace>:<relPath> → JSON { size, modifyTime, hash }
|
|
51
|
+
*
|
|
52
|
+
* @param {Object} options
|
|
53
|
+
* @param {string} options.cachePath - Path to the LevelDB directory (e.g., ".sync-cache-prod")
|
|
54
|
+
* @param {string} options.namespace - Namespace for keys (e.g., "prod")
|
|
55
|
+
*/
|
|
56
|
+
export async function createHashCacheLevelDB({ cachePath, namespace }) {
|
|
57
|
+
const ns = namespace || "default";
|
|
58
|
+
|
|
59
|
+
// Open or create the LevelDB database
|
|
60
|
+
const db = new Level(cachePath, { valueEncoding: "json" });
|
|
61
|
+
await db.open();
|
|
62
|
+
|
|
63
|
+
function localKey(relPath) {
|
|
64
|
+
return `local:${ns}:${relPath}`;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function remoteKey(relPath) {
|
|
68
|
+
return `remote:${ns}:${relPath}`;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Get cached local hash or compute and store it
|
|
73
|
+
*/
|
|
74
|
+
async function getLocalHash(rel, meta) {
|
|
75
|
+
const key = localKey(rel);
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
const cached = await db.get(key);
|
|
79
|
+
|
|
80
|
+
// Cache hit: check if still valid (same size + mtime)
|
|
81
|
+
if (
|
|
82
|
+
cached &&
|
|
83
|
+
cached.size === meta.size &&
|
|
84
|
+
cached.mtimeMs === meta.mtimeMs &&
|
|
85
|
+
cached.hash
|
|
86
|
+
) {
|
|
87
|
+
return cached.hash;
|
|
88
|
+
}
|
|
89
|
+
} catch (err) {
|
|
90
|
+
// Key not found - that's fine, we'll compute the hash
|
|
91
|
+
if (err.code !== "LEVEL_NOT_FOUND") {
|
|
92
|
+
throw err;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Compute hash and store
|
|
97
|
+
const hash = await hashLocalFile(meta.localPath);
|
|
98
|
+
await db.put(key, {
|
|
99
|
+
size: meta.size,
|
|
100
|
+
mtimeMs: meta.mtimeMs,
|
|
101
|
+
hash,
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
return hash;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Get cached remote hash or compute and store it
|
|
109
|
+
*/
|
|
110
|
+
async function getRemoteHash(rel, meta, sftp) {
|
|
111
|
+
const key = remoteKey(rel);
|
|
112
|
+
|
|
113
|
+
try {
|
|
114
|
+
const cached = await db.get(key);
|
|
115
|
+
|
|
116
|
+
// Cache hit: check if still valid (same size + modifyTime)
|
|
117
|
+
if (
|
|
118
|
+
cached &&
|
|
119
|
+
cached.size === meta.size &&
|
|
120
|
+
cached.modifyTime === meta.modifyTime &&
|
|
121
|
+
cached.hash
|
|
122
|
+
) {
|
|
123
|
+
return cached.hash;
|
|
124
|
+
}
|
|
125
|
+
} catch (err) {
|
|
126
|
+
// Key not found - compute the hash
|
|
127
|
+
if (err.code !== "LEVEL_NOT_FOUND") {
|
|
128
|
+
throw err;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Compute hash (downloads file content for hashing)
|
|
133
|
+
const hash = await hashRemoteFile(sftp, meta.remotePath);
|
|
134
|
+
await db.put(key, {
|
|
135
|
+
size: meta.size,
|
|
136
|
+
modifyTime: meta.modifyTime,
|
|
137
|
+
hash,
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
return hash;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Explicitly save (flush) - LevelDB auto-persists, but this ensures sync
|
|
145
|
+
*/
|
|
146
|
+
async function save() {
|
|
147
|
+
// LevelDB auto-persists, nothing to do
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Close the database connection
|
|
152
|
+
*/
|
|
153
|
+
async function close() {
|
|
154
|
+
await db.close();
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Get statistics about cache contents
|
|
159
|
+
*/
|
|
160
|
+
async function getStats() {
|
|
161
|
+
let localCount = 0;
|
|
162
|
+
let remoteCount = 0;
|
|
163
|
+
|
|
164
|
+
for await (const key of db.keys()) {
|
|
165
|
+
if (key.startsWith(`local:${ns}:`)) {
|
|
166
|
+
localCount++;
|
|
167
|
+
} else if (key.startsWith(`remote:${ns}:`)) {
|
|
168
|
+
remoteCount++;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return { localCount, remoteCount };
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Clean up stale entries (entries for files that no longer exist)
|
|
177
|
+
* @param {Set<string>} validLocalPaths - Set of currently existing local file paths
|
|
178
|
+
* @param {Set<string>} validRemotePaths - Set of currently existing remote file paths
|
|
179
|
+
*/
|
|
180
|
+
async function cleanup(validLocalPaths, validRemotePaths) {
|
|
181
|
+
const batch = db.batch();
|
|
182
|
+
let deletedCount = 0;
|
|
183
|
+
|
|
184
|
+
for await (const key of db.keys()) {
|
|
185
|
+
if (key.startsWith(`local:${ns}:`)) {
|
|
186
|
+
const relPath = key.slice(`local:${ns}:`.length);
|
|
187
|
+
if (!validLocalPaths.has(relPath)) {
|
|
188
|
+
batch.del(key);
|
|
189
|
+
deletedCount++;
|
|
190
|
+
}
|
|
191
|
+
} else if (key.startsWith(`remote:${ns}:`)) {
|
|
192
|
+
const relPath = key.slice(`remote:${ns}:`.length);
|
|
193
|
+
if (!validRemotePaths.has(relPath)) {
|
|
194
|
+
batch.del(key);
|
|
195
|
+
deletedCount++;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
await batch.write();
|
|
201
|
+
return deletedCount;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
return {
|
|
205
|
+
getLocalHash,
|
|
206
|
+
getRemoteHash,
|
|
207
|
+
save,
|
|
208
|
+
close,
|
|
209
|
+
getStats,
|
|
210
|
+
cleanup,
|
|
211
|
+
db, // Expose for advanced usage
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Migrate from old JSON cache to LevelDB
|
|
217
|
+
* @param {string} jsonCachePath - Path to old .sync-cache.json file
|
|
218
|
+
* @param {string} levelDbPath - Path to new LevelDB directory
|
|
219
|
+
* @param {string} namespace - Namespace for keys
|
|
220
|
+
*/
|
|
221
|
+
export async function migrateFromJsonCache(jsonCachePath, levelDbPath, namespace) {
|
|
222
|
+
const ns = namespace || "default";
|
|
223
|
+
|
|
224
|
+
// Check if JSON cache exists
|
|
225
|
+
try {
|
|
226
|
+
await fsp.access(jsonCachePath);
|
|
227
|
+
} catch {
|
|
228
|
+
return { migrated: false, reason: "No JSON cache found" };
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Check if LevelDB already has data (don't re-migrate)
|
|
232
|
+
const db = new Level(levelDbPath, { valueEncoding: "json" });
|
|
233
|
+
await db.open();
|
|
234
|
+
|
|
235
|
+
let hasData = false;
|
|
236
|
+
for await (const _ of db.keys({ limit: 1 })) {
|
|
237
|
+
hasData = true;
|
|
238
|
+
break;
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
if (hasData) {
|
|
242
|
+
await db.close();
|
|
243
|
+
return { migrated: false, reason: "LevelDB already has data" };
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Read and parse JSON cache
|
|
247
|
+
let jsonCache;
|
|
248
|
+
try {
|
|
249
|
+
const raw = await fsp.readFile(jsonCachePath, "utf8");
|
|
250
|
+
jsonCache = JSON.parse(raw);
|
|
251
|
+
} catch (parseErr) {
|
|
252
|
+
await db.close();
|
|
253
|
+
// Rename corrupt file so it doesn't block future runs
|
|
254
|
+
try {
|
|
255
|
+
await fsp.rename(jsonCachePath, jsonCachePath + ".corrupt");
|
|
256
|
+
} catch {
|
|
257
|
+
// Ignore rename errors
|
|
258
|
+
}
|
|
259
|
+
return { migrated: false, reason: `JSON cache corrupt: ${parseErr.message}` };
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Migrate entries
|
|
263
|
+
const batch = db.batch();
|
|
264
|
+
let localCount = 0;
|
|
265
|
+
let remoteCount = 0;
|
|
266
|
+
|
|
267
|
+
if (jsonCache.local) {
|
|
268
|
+
for (const [key, value] of Object.entries(jsonCache.local)) {
|
|
269
|
+
// Keys in JSON were like "namespace:relPath"
|
|
270
|
+
const relPath = key.startsWith(`${ns}:`) ? key.slice(ns.length + 1) : key;
|
|
271
|
+
batch.put(`local:${ns}:${relPath}`, value);
|
|
272
|
+
localCount++;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
if (jsonCache.remote) {
|
|
277
|
+
for (const [key, value] of Object.entries(jsonCache.remote)) {
|
|
278
|
+
const relPath = key.startsWith(`${ns}:`) ? key.slice(ns.length + 1) : key;
|
|
279
|
+
batch.put(`remote:${ns}:${relPath}`, value);
|
|
280
|
+
remoteCount++;
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
await batch.write();
|
|
285
|
+
await db.close();
|
|
286
|
+
|
|
287
|
+
// Optionally rename old cache to .bak
|
|
288
|
+
try {
|
|
289
|
+
await fsp.rename(jsonCachePath, jsonCachePath + ".bak");
|
|
290
|
+
} catch {
|
|
291
|
+
// Ignore rename errors
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
return {
|
|
295
|
+
migrated: true,
|
|
296
|
+
localCount,
|
|
297
|
+
remoteCount,
|
|
298
|
+
};
|
|
299
|
+
}
|