aethel 0.4.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/README.md +95 -45
- package/docs/ARCHITECTURE.md +24 -0
- package/package.json +16 -3
- package/scripts/demo.js +416 -0
- package/scripts/render-demo-gif.py +90 -0
- package/scripts/render-demo-screenshot.js +65 -0
- package/src/cli.js +47 -13
- package/src/core/compress.js +285 -0
- package/src/core/config.js +119 -0
- package/src/core/diff.js +146 -7
- package/src/core/pack-manifest.js +163 -0
- package/src/core/pack.js +355 -0
- package/src/core/snapshot.js +55 -9
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-algorithm compression/decompression abstraction.
|
|
3
|
+
* Supports gzip, brotli (built-in), and optionally zstd, xz.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import zlib from "node:zlib";
|
|
7
|
+
import { pipeline } from "node:stream/promises";
|
|
8
|
+
import fs from "node:fs";
|
|
9
|
+
import path from "node:path";
|
|
10
|
+
|
|
11
|
+
// Algorithm enumeration
|
|
12
|
+
export const Algorithm = Object.freeze({
|
|
13
|
+
NONE: "none",
|
|
14
|
+
GZIP: "gzip",
|
|
15
|
+
ZSTD: "zstd",
|
|
16
|
+
BROTLI: "brotli",
|
|
17
|
+
XZ: "xz",
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
// File extension mapping
|
|
21
|
+
export const EXTENSIONS = {
|
|
22
|
+
[Algorithm.NONE]: ".tar",
|
|
23
|
+
[Algorithm.GZIP]: ".tar.gz",
|
|
24
|
+
[Algorithm.ZSTD]: ".tar.zst",
|
|
25
|
+
[Algorithm.BROTLI]: ".tar.br",
|
|
26
|
+
[Algorithm.XZ]: ".tar.xz",
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
// Compression profiles for easy configuration
|
|
30
|
+
export const PROFILES = {
|
|
31
|
+
fast: { algorithm: Algorithm.ZSTD, level: 1 },
|
|
32
|
+
balanced: { algorithm: Algorithm.ZSTD, level: 6 },
|
|
33
|
+
maximum: { algorithm: Algorithm.ZSTD, level: 19 },
|
|
34
|
+
extreme: { algorithm: Algorithm.XZ, level: 6 },
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
// Cache for optional dependency availability
|
|
38
|
+
const availabilityCache = new Map();
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Try to load an optional dependency.
|
|
42
|
+
* @param {string} moduleName
|
|
43
|
+
* @returns {Promise<any|null>}
|
|
44
|
+
*/
|
|
45
|
+
async function tryLoadModule(moduleName) {
|
|
46
|
+
if (availabilityCache.has(moduleName)) {
|
|
47
|
+
return availabilityCache.get(moduleName);
|
|
48
|
+
}
|
|
49
|
+
try {
|
|
50
|
+
const mod = await import(moduleName);
|
|
51
|
+
availabilityCache.set(moduleName, mod.default || mod);
|
|
52
|
+
return availabilityCache.get(moduleName);
|
|
53
|
+
} catch {
|
|
54
|
+
availabilityCache.set(moduleName, null);
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Check if an algorithm is available in the current environment.
|
|
61
|
+
* @param {string} algorithm - Algorithm name from Algorithm enum
|
|
62
|
+
* @returns {Promise<boolean>}
|
|
63
|
+
*/
|
|
64
|
+
export async function isAlgorithmAvailable(algorithm) {
|
|
65
|
+
switch (algorithm) {
|
|
66
|
+
case Algorithm.NONE:
|
|
67
|
+
case Algorithm.GZIP:
|
|
68
|
+
case Algorithm.BROTLI:
|
|
69
|
+
return true; // Built-in Node.js
|
|
70
|
+
case Algorithm.ZSTD:
|
|
71
|
+
return (await tryLoadModule("@bokuweb/zstd-wasm")) !== null;
|
|
72
|
+
case Algorithm.XZ:
|
|
73
|
+
return (await tryLoadModule("lzma-native")) !== null;
|
|
74
|
+
default:
|
|
75
|
+
return false;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Get the best available algorithm for compression.
|
|
81
|
+
* Falls back to gzip if preferred is unavailable.
|
|
82
|
+
* @param {string} preferred - Preferred algorithm
|
|
83
|
+
* @returns {Promise<string>} Available algorithm
|
|
84
|
+
*/
|
|
85
|
+
export async function resolveAlgorithm(preferred) {
|
|
86
|
+
if (await isAlgorithmAvailable(preferred)) {
|
|
87
|
+
return preferred;
|
|
88
|
+
}
|
|
89
|
+
// Fallback chain: zstd -> gzip
|
|
90
|
+
if (preferred !== Algorithm.ZSTD && (await isAlgorithmAvailable(Algorithm.ZSTD))) {
|
|
91
|
+
return Algorithm.ZSTD;
|
|
92
|
+
}
|
|
93
|
+
return Algorithm.GZIP;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Create a compression stream for the given algorithm.
|
|
98
|
+
* @param {string} algorithm - Algorithm name
|
|
99
|
+
* @param {{ level?: number }} options - Compression options
|
|
100
|
+
* @returns {Promise<import("node:stream").Transform>} Compression stream
|
|
101
|
+
*/
|
|
102
|
+
export async function createCompressStream(algorithm, options = {}) {
|
|
103
|
+
const level = options.level ?? 6;
|
|
104
|
+
|
|
105
|
+
switch (algorithm) {
|
|
106
|
+
case Algorithm.NONE:
|
|
107
|
+
// Pass-through stream
|
|
108
|
+
const { PassThrough } = await import("node:stream");
|
|
109
|
+
return new PassThrough();
|
|
110
|
+
|
|
111
|
+
case Algorithm.GZIP:
|
|
112
|
+
return zlib.createGzip({ level });
|
|
113
|
+
|
|
114
|
+
case Algorithm.BROTLI:
|
|
115
|
+
return zlib.createBrotliCompress({
|
|
116
|
+
params: {
|
|
117
|
+
[zlib.constants.BROTLI_PARAM_QUALITY]: Math.min(level, 11),
|
|
118
|
+
},
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
case Algorithm.ZSTD: {
|
|
122
|
+
const zstd = await tryLoadModule("@bokuweb/zstd-wasm");
|
|
123
|
+
if (!zstd) {
|
|
124
|
+
throw new Error("zstd not available. Install @bokuweb/zstd-wasm");
|
|
125
|
+
}
|
|
126
|
+
// zstd-wasm provides compress/decompress functions, not streams
|
|
127
|
+
// We need to wrap it in a transform stream
|
|
128
|
+
const { Transform } = await import("node:stream");
|
|
129
|
+
const chunks = [];
|
|
130
|
+
return new Transform({
|
|
131
|
+
transform(chunk, encoding, callback) {
|
|
132
|
+
chunks.push(chunk);
|
|
133
|
+
callback();
|
|
134
|
+
},
|
|
135
|
+
async flush(callback) {
|
|
136
|
+
try {
|
|
137
|
+
const input = Buffer.concat(chunks);
|
|
138
|
+
const compressed = await zstd.compress(input, level);
|
|
139
|
+
this.push(Buffer.from(compressed));
|
|
140
|
+
callback();
|
|
141
|
+
} catch (err) {
|
|
142
|
+
callback(err);
|
|
143
|
+
}
|
|
144
|
+
},
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
case Algorithm.XZ: {
|
|
149
|
+
const lzma = await tryLoadModule("lzma-native");
|
|
150
|
+
if (!lzma) {
|
|
151
|
+
throw new Error("xz not available. Install lzma-native");
|
|
152
|
+
}
|
|
153
|
+
return lzma.createCompressor({ preset: level });
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
default:
|
|
157
|
+
throw new Error(`Unknown compression algorithm: ${algorithm}`);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Create a decompression stream for the given algorithm.
|
|
163
|
+
* @param {string} algorithm - Algorithm name
|
|
164
|
+
* @returns {Promise<import("node:stream").Transform>} Decompression stream
|
|
165
|
+
*/
|
|
166
|
+
export async function createDecompressStream(algorithm) {
|
|
167
|
+
switch (algorithm) {
|
|
168
|
+
case Algorithm.NONE: {
|
|
169
|
+
const { PassThrough } = await import("node:stream");
|
|
170
|
+
return new PassThrough();
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
case Algorithm.GZIP:
|
|
174
|
+
return zlib.createGunzip();
|
|
175
|
+
|
|
176
|
+
case Algorithm.BROTLI:
|
|
177
|
+
return zlib.createBrotliDecompress();
|
|
178
|
+
|
|
179
|
+
case Algorithm.ZSTD: {
|
|
180
|
+
const zstd = await tryLoadModule("@bokuweb/zstd-wasm");
|
|
181
|
+
if (!zstd) {
|
|
182
|
+
throw new Error("zstd not available. Install @bokuweb/zstd-wasm");
|
|
183
|
+
}
|
|
184
|
+
const { Transform } = await import("node:stream");
|
|
185
|
+
const chunks = [];
|
|
186
|
+
return new Transform({
|
|
187
|
+
transform(chunk, encoding, callback) {
|
|
188
|
+
chunks.push(chunk);
|
|
189
|
+
callback();
|
|
190
|
+
},
|
|
191
|
+
async flush(callback) {
|
|
192
|
+
try {
|
|
193
|
+
const input = Buffer.concat(chunks);
|
|
194
|
+
const decompressed = await zstd.decompress(input);
|
|
195
|
+
this.push(Buffer.from(decompressed));
|
|
196
|
+
callback();
|
|
197
|
+
} catch (err) {
|
|
198
|
+
callback(err);
|
|
199
|
+
}
|
|
200
|
+
},
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
case Algorithm.XZ: {
|
|
205
|
+
const lzma = await tryLoadModule("lzma-native");
|
|
206
|
+
if (!lzma) {
|
|
207
|
+
throw new Error("xz not available. Install lzma-native");
|
|
208
|
+
}
|
|
209
|
+
return lzma.createDecompressor();
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
default:
|
|
213
|
+
throw new Error(`Unknown decompression algorithm: ${algorithm}`);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Compress a file to destination.
|
|
219
|
+
* @param {string} inputPath - Source file path
|
|
220
|
+
* @param {string} outputPath - Destination file path
|
|
221
|
+
* @param {{ algorithm?: string, level?: number }} options
|
|
222
|
+
* @returns {Promise<{ originalSize: number, compressedSize: number, ratio: number }>}
|
|
223
|
+
*/
|
|
224
|
+
export async function compressFile(inputPath, outputPath, options = {}) {
|
|
225
|
+
const algorithm = options.algorithm ?? Algorithm.GZIP;
|
|
226
|
+
const level = options.level ?? 6;
|
|
227
|
+
|
|
228
|
+
const inputStat = fs.statSync(inputPath);
|
|
229
|
+
const originalSize = inputStat.size;
|
|
230
|
+
|
|
231
|
+
const readStream = fs.createReadStream(inputPath);
|
|
232
|
+
const writeStream = fs.createWriteStream(outputPath);
|
|
233
|
+
const compressStream = await createCompressStream(algorithm, { level });
|
|
234
|
+
|
|
235
|
+
await pipeline(readStream, compressStream, writeStream);
|
|
236
|
+
|
|
237
|
+
const outputStat = fs.statSync(outputPath);
|
|
238
|
+
const compressedSize = outputStat.size;
|
|
239
|
+
const ratio = originalSize > 0 ? 1 - compressedSize / originalSize : 0;
|
|
240
|
+
|
|
241
|
+
return { originalSize, compressedSize, ratio };
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
/**
|
|
245
|
+
* Decompress a file to destination.
|
|
246
|
+
* @param {string} inputPath - Compressed file path
|
|
247
|
+
* @param {string} outputPath - Destination file path
|
|
248
|
+
* @param {string} algorithm - Algorithm used for compression
|
|
249
|
+
* @returns {Promise<void>}
|
|
250
|
+
*/
|
|
251
|
+
export async function decompressFile(inputPath, outputPath, algorithm) {
|
|
252
|
+
const readStream = fs.createReadStream(inputPath);
|
|
253
|
+
const writeStream = fs.createWriteStream(outputPath);
|
|
254
|
+
const decompressStream = await createDecompressStream(algorithm);
|
|
255
|
+
|
|
256
|
+
await pipeline(readStream, decompressStream, writeStream);
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* Detect algorithm from file extension.
|
|
261
|
+
* @param {string} filePath - File path with extension
|
|
262
|
+
* @returns {string|null} Algorithm name or null if unknown
|
|
263
|
+
*/
|
|
264
|
+
export function detectAlgorithm(filePath) {
|
|
265
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
266
|
+
const fullExt = filePath.toLowerCase();
|
|
267
|
+
|
|
268
|
+
if (fullExt.endsWith(".tar.gz") || fullExt.endsWith(".tgz")) {
|
|
269
|
+
return Algorithm.GZIP;
|
|
270
|
+
}
|
|
271
|
+
if (fullExt.endsWith(".tar.zst")) {
|
|
272
|
+
return Algorithm.ZSTD;
|
|
273
|
+
}
|
|
274
|
+
if (fullExt.endsWith(".tar.br")) {
|
|
275
|
+
return Algorithm.BROTLI;
|
|
276
|
+
}
|
|
277
|
+
if (fullExt.endsWith(".tar.xz")) {
|
|
278
|
+
return Algorithm.XZ;
|
|
279
|
+
}
|
|
280
|
+
if (ext === ".tar") {
|
|
281
|
+
return Algorithm.NONE;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
return null;
|
|
285
|
+
}
|
package/src/core/config.js
CHANGED
|
@@ -5,6 +5,8 @@
|
|
|
5
5
|
import crypto from "node:crypto";
|
|
6
6
|
import fs from "node:fs";
|
|
7
7
|
import path from "node:path";
|
|
8
|
+
import YAML from "yaml";
|
|
9
|
+
import { createManifest } from "./pack-manifest.js";
|
|
8
10
|
|
|
9
11
|
export const AETHEL_DIR = ".aethel";
|
|
10
12
|
export const CONFIG_FILE = "config.json";
|
|
@@ -12,6 +14,8 @@ export const INDEX_FILE = "index.json";
|
|
|
12
14
|
export const SNAPSHOTS_DIR = "snapshots";
|
|
13
15
|
export const HISTORY_DIR = "history";
|
|
14
16
|
export const LATEST_SNAPSHOT = "latest.json";
|
|
17
|
+
export const PACK_MANIFEST_FILE = "pack-manifest.json";
|
|
18
|
+
export const PACK_CONFIG_FILE = ".aethelconfig";
|
|
15
19
|
|
|
16
20
|
/** Walk up from `start` looking for a .aethel/ directory. */
|
|
17
21
|
export function findRoot(start = process.cwd()) {
|
|
@@ -135,3 +139,118 @@ export function writeSnapshot(root, snapshot) {
|
|
|
135
139
|
// Compact JSON — snapshots can be large, pretty-printing is slow + wastes disk
|
|
136
140
|
fs.writeFileSync(latest, JSON.stringify(snapshot) + "\n");
|
|
137
141
|
}
|
|
142
|
+
|
|
143
|
+
// ── pack config helpers ───────────────────────────────────────────────
|
|
144
|
+
|
|
145
|
+
const DEFAULT_PACK_CONFIG = {
|
|
146
|
+
packing: {
|
|
147
|
+
enabled: false,
|
|
148
|
+
compression: {
|
|
149
|
+
default: {
|
|
150
|
+
algorithm: "zstd",
|
|
151
|
+
level: 6,
|
|
152
|
+
},
|
|
153
|
+
overrides: [],
|
|
154
|
+
},
|
|
155
|
+
rules: [],
|
|
156
|
+
},
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Load pack configuration from .aethelconfig (YAML).
|
|
161
|
+
* Returns default config if file doesn't exist.
|
|
162
|
+
* @param {string} root - Workspace root
|
|
163
|
+
* @returns {object} Pack configuration
|
|
164
|
+
*/
|
|
165
|
+
export function loadPackConfig(root) {
|
|
166
|
+
const p = path.join(root, PACK_CONFIG_FILE);
|
|
167
|
+
if (!fs.existsSync(p)) {
|
|
168
|
+
return structuredClone(DEFAULT_PACK_CONFIG);
|
|
169
|
+
}
|
|
170
|
+
try {
|
|
171
|
+
const content = fs.readFileSync(p, "utf-8");
|
|
172
|
+
const parsed = YAML.parse(content);
|
|
173
|
+
// Merge with defaults for missing keys
|
|
174
|
+
return {
|
|
175
|
+
packing: {
|
|
176
|
+
...DEFAULT_PACK_CONFIG.packing,
|
|
177
|
+
...parsed?.packing,
|
|
178
|
+
compression: {
|
|
179
|
+
...DEFAULT_PACK_CONFIG.packing.compression,
|
|
180
|
+
...parsed?.packing?.compression,
|
|
181
|
+
},
|
|
182
|
+
},
|
|
183
|
+
};
|
|
184
|
+
} catch {
|
|
185
|
+
return structuredClone(DEFAULT_PACK_CONFIG);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Save pack configuration to .aethelconfig.
|
|
191
|
+
* @param {string} root - Workspace root
|
|
192
|
+
* @param {object} config - Configuration to save
|
|
193
|
+
*/
|
|
194
|
+
export function savePackConfig(root, config) {
|
|
195
|
+
const p = path.join(root, PACK_CONFIG_FILE);
|
|
196
|
+
const content = YAML.stringify(config);
|
|
197
|
+
fs.writeFileSync(p, content);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Read pack manifest from .aethel/pack-manifest.json.
|
|
202
|
+
* Returns empty manifest if file doesn't exist.
|
|
203
|
+
* @param {string} root - Workspace root
|
|
204
|
+
* @returns {object} Pack manifest
|
|
205
|
+
*/
|
|
206
|
+
export function loadPackManifest(root) {
|
|
207
|
+
const p = path.join(dot(root), PACK_MANIFEST_FILE);
|
|
208
|
+
if (!fs.existsSync(p)) {
|
|
209
|
+
return createManifest();
|
|
210
|
+
}
|
|
211
|
+
try {
|
|
212
|
+
return JSON.parse(fs.readFileSync(p, "utf-8"));
|
|
213
|
+
} catch {
|
|
214
|
+
return createManifest();
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Save pack manifest to .aethel/pack-manifest.json.
|
|
220
|
+
* @param {string} root - Workspace root
|
|
221
|
+
* @param {object} manifest - Manifest to save
|
|
222
|
+
*/
|
|
223
|
+
export function savePackManifest(root, manifest) {
|
|
224
|
+
const p = path.join(dot(root), PACK_MANIFEST_FILE);
|
|
225
|
+
fs.writeFileSync(p, JSON.stringify(manifest, null, 2) + "\n");
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Check if packing feature is enabled.
|
|
230
|
+
* @param {string} root - Workspace root
|
|
231
|
+
* @returns {boolean}
|
|
232
|
+
*/
|
|
233
|
+
export function isPackingEnabled(root) {
|
|
234
|
+
const config = loadPackConfig(root);
|
|
235
|
+
return config.packing?.enabled === true;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Get packing rule for a specific path.
|
|
240
|
+
* @param {object} packConfig - Pack configuration
|
|
241
|
+
* @param {string} relativePath - Path to check
|
|
242
|
+
* @returns {object|null} Matching rule or null
|
|
243
|
+
*/
|
|
244
|
+
export function getPackRule(packConfig, relativePath) {
|
|
245
|
+
const rules = packConfig.packing?.rules ?? [];
|
|
246
|
+
const normalized = relativePath.replace(/\\/g, "/").replace(/^\/+/, "").replace(/\/+$/, "");
|
|
247
|
+
|
|
248
|
+
for (const rule of rules) {
|
|
249
|
+
const rulePath = rule.path.replace(/\\/g, "/").replace(/^\/+/, "").replace(/\/+$/, "");
|
|
250
|
+
if (normalized === rulePath || normalized.startsWith(rulePath + "/")) {
|
|
251
|
+
return rule;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
return null;
|
|
256
|
+
}
|
package/src/core/diff.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { isWorkspaceType } from "./drive-api.js";
|
|
2
2
|
import { loadIgnoreRules } from "./ignore.js";
|
|
3
|
+
import { loadPackManifest } from "./config.js";
|
|
3
4
|
|
|
4
5
|
export const ChangeType = Object.freeze({
|
|
5
6
|
REMOTE_ADDED: "remote_added",
|
|
@@ -9,6 +10,12 @@ export const ChangeType = Object.freeze({
|
|
|
9
10
|
LOCAL_MODIFIED: "local_modified",
|
|
10
11
|
LOCAL_DELETED: "local_deleted",
|
|
11
12
|
CONFLICT: "conflict",
|
|
13
|
+
// Pack-specific change types
|
|
14
|
+
PACK_LOCAL_MODIFIED: "pack_local_modified",
|
|
15
|
+
PACK_REMOTE_MODIFIED: "pack_remote_modified",
|
|
16
|
+
PACK_SYNCED: "pack_synced",
|
|
17
|
+
PACK_CONFLICT: "pack_conflict",
|
|
18
|
+
PACK_NEW: "pack_new",
|
|
12
19
|
});
|
|
13
20
|
|
|
14
21
|
const SHORT_STATUS = {
|
|
@@ -19,6 +26,11 @@ const SHORT_STATUS = {
|
|
|
19
26
|
[ChangeType.LOCAL_MODIFIED]: "ML",
|
|
20
27
|
[ChangeType.LOCAL_DELETED]: "-L",
|
|
21
28
|
[ChangeType.CONFLICT]: "!!",
|
|
29
|
+
[ChangeType.PACK_LOCAL_MODIFIED]: "PL",
|
|
30
|
+
[ChangeType.PACK_REMOTE_MODIFIED]: "PR",
|
|
31
|
+
[ChangeType.PACK_SYNCED]: "P=",
|
|
32
|
+
[ChangeType.PACK_CONFLICT]: "P!",
|
|
33
|
+
[ChangeType.PACK_NEW]: "P+",
|
|
22
34
|
};
|
|
23
35
|
|
|
24
36
|
const DESCRIPTION = {
|
|
@@ -29,6 +41,11 @@ const DESCRIPTION = {
|
|
|
29
41
|
[ChangeType.LOCAL_MODIFIED]: "modified locally",
|
|
30
42
|
[ChangeType.LOCAL_DELETED]: "deleted locally",
|
|
31
43
|
[ChangeType.CONFLICT]: "both sides changed",
|
|
44
|
+
[ChangeType.PACK_LOCAL_MODIFIED]: "pack changed locally",
|
|
45
|
+
[ChangeType.PACK_REMOTE_MODIFIED]: "pack changed on Drive",
|
|
46
|
+
[ChangeType.PACK_SYNCED]: "pack up to date",
|
|
47
|
+
[ChangeType.PACK_CONFLICT]: "pack conflict",
|
|
48
|
+
[ChangeType.PACK_NEW]: "new pack",
|
|
32
49
|
};
|
|
33
50
|
|
|
34
51
|
const SUGGESTED_ACTION = {
|
|
@@ -39,6 +56,11 @@ const SUGGESTED_ACTION = {
|
|
|
39
56
|
[ChangeType.LOCAL_MODIFIED]: "upload",
|
|
40
57
|
[ChangeType.LOCAL_DELETED]: "delete_remote",
|
|
41
58
|
[ChangeType.CONFLICT]: "conflict",
|
|
59
|
+
[ChangeType.PACK_LOCAL_MODIFIED]: "push_pack",
|
|
60
|
+
[ChangeType.PACK_REMOTE_MODIFIED]: "pull_pack",
|
|
61
|
+
[ChangeType.PACK_SYNCED]: "none",
|
|
62
|
+
[ChangeType.PACK_CONFLICT]: "resolve_pack",
|
|
63
|
+
[ChangeType.PACK_NEW]: "push_pack",
|
|
42
64
|
};
|
|
43
65
|
|
|
44
66
|
function createChange({
|
|
@@ -62,9 +84,10 @@ function createChange({
|
|
|
62
84
|
};
|
|
63
85
|
}
|
|
64
86
|
|
|
65
|
-
function buildDiffResult(changes) {
|
|
87
|
+
function buildDiffResult(changes, packChanges = []) {
|
|
66
88
|
return {
|
|
67
89
|
changes,
|
|
90
|
+
packChanges,
|
|
68
91
|
get remoteChanges() {
|
|
69
92
|
return this.changes.filter((change) =>
|
|
70
93
|
change.changeType.startsWith("remote")
|
|
@@ -80,8 +103,29 @@ function buildDiffResult(changes) {
|
|
|
80
103
|
(change) => change.changeType === ChangeType.CONFLICT
|
|
81
104
|
);
|
|
82
105
|
},
|
|
106
|
+
get packConflicts() {
|
|
107
|
+
return this.packChanges.filter(
|
|
108
|
+
(change) => change.changeType === ChangeType.PACK_CONFLICT
|
|
109
|
+
);
|
|
110
|
+
},
|
|
111
|
+
get pendingPackChanges() {
|
|
112
|
+
return this.packChanges.filter(
|
|
113
|
+
(change) =>
|
|
114
|
+
change.changeType === ChangeType.PACK_LOCAL_MODIFIED ||
|
|
115
|
+
change.changeType === ChangeType.PACK_REMOTE_MODIFIED ||
|
|
116
|
+
change.changeType === ChangeType.PACK_NEW
|
|
117
|
+
);
|
|
118
|
+
},
|
|
119
|
+
get syncedPacks() {
|
|
120
|
+
return this.packChanges.filter(
|
|
121
|
+
(change) => change.changeType === ChangeType.PACK_SYNCED
|
|
122
|
+
);
|
|
123
|
+
},
|
|
124
|
+
get hasPackChanges() {
|
|
125
|
+
return this.pendingPackChanges.length > 0 || this.packConflicts.length > 0;
|
|
126
|
+
},
|
|
83
127
|
get isClean() {
|
|
84
|
-
return this.changes.length === 0;
|
|
128
|
+
return this.changes.length === 0 && this.pendingPackChanges.length === 0;
|
|
85
129
|
},
|
|
86
130
|
};
|
|
87
131
|
}
|
|
@@ -154,6 +198,93 @@ function promoteConflicts(changes) {
|
|
|
154
198
|
return filtered;
|
|
155
199
|
}
|
|
156
200
|
|
|
201
|
+
/**
|
|
202
|
+
* Compute pack-level changes by comparing local packedDirs against manifest.
|
|
203
|
+
* @param {string|null} root - Workspace root for loading manifest
|
|
204
|
+
* @param {object} packedDirs - Local packed directories from scanLocal
|
|
205
|
+
* @param {object|null} snapshot - Previous snapshot (may contain packedDirs)
|
|
206
|
+
* @returns {object[]} Array of pack change objects
|
|
207
|
+
*/
|
|
208
|
+
function computePackChanges(root, packedDirs, snapshot) {
|
|
209
|
+
if (!root || !packedDirs || Object.keys(packedDirs).length === 0) {
|
|
210
|
+
return [];
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
const manifest = loadPackManifest(root);
|
|
214
|
+
const snapshotPackedDirs = snapshot?.packedDirs || {};
|
|
215
|
+
const changes = [];
|
|
216
|
+
|
|
217
|
+
for (const [packPath, packInfo] of Object.entries(packedDirs)) {
|
|
218
|
+
const manifestEntry = manifest.packs?.[packPath];
|
|
219
|
+
const snapshotEntry = snapshotPackedDirs[packPath];
|
|
220
|
+
const localTreeHash = packInfo.treeHash;
|
|
221
|
+
|
|
222
|
+
if (!manifestEntry) {
|
|
223
|
+
// Pack not in manifest = new pack
|
|
224
|
+
changes.push(
|
|
225
|
+
createChange({
|
|
226
|
+
changeType: ChangeType.PACK_NEW,
|
|
227
|
+
path: packPath,
|
|
228
|
+
localMeta: packInfo,
|
|
229
|
+
})
|
|
230
|
+
);
|
|
231
|
+
continue;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
const { localTreeHash: manifestLocalHash, remoteTreeHash: manifestRemoteHash } = manifestEntry;
|
|
235
|
+
|
|
236
|
+
// Check for local modification
|
|
237
|
+
const localChanged = localTreeHash !== manifestLocalHash;
|
|
238
|
+
// Check for remote modification (comparing against what we last synced)
|
|
239
|
+
const remoteChanged = manifestRemoteHash && manifestRemoteHash !== manifestLocalHash;
|
|
240
|
+
|
|
241
|
+
if (localChanged && remoteChanged) {
|
|
242
|
+
// Both sides changed = conflict
|
|
243
|
+
changes.push(
|
|
244
|
+
createChange({
|
|
245
|
+
changeType: ChangeType.PACK_CONFLICT,
|
|
246
|
+
path: packPath,
|
|
247
|
+
localMeta: { ...packInfo, treeHash: localTreeHash },
|
|
248
|
+
snapshotMeta: { treeHash: manifestLocalHash },
|
|
249
|
+
remoteMeta: { treeHash: manifestRemoteHash },
|
|
250
|
+
})
|
|
251
|
+
);
|
|
252
|
+
} else if (localChanged) {
|
|
253
|
+
// Only local changed
|
|
254
|
+
changes.push(
|
|
255
|
+
createChange({
|
|
256
|
+
changeType: ChangeType.PACK_LOCAL_MODIFIED,
|
|
257
|
+
path: packPath,
|
|
258
|
+
localMeta: { ...packInfo, treeHash: localTreeHash },
|
|
259
|
+
snapshotMeta: { treeHash: manifestLocalHash },
|
|
260
|
+
})
|
|
261
|
+
);
|
|
262
|
+
} else if (remoteChanged) {
|
|
263
|
+
// Only remote changed
|
|
264
|
+
changes.push(
|
|
265
|
+
createChange({
|
|
266
|
+
changeType: ChangeType.PACK_REMOTE_MODIFIED,
|
|
267
|
+
path: packPath,
|
|
268
|
+
localMeta: packInfo,
|
|
269
|
+
remoteMeta: { treeHash: manifestRemoteHash },
|
|
270
|
+
snapshotMeta: { treeHash: manifestLocalHash },
|
|
271
|
+
})
|
|
272
|
+
);
|
|
273
|
+
} else {
|
|
274
|
+
// No changes = synced
|
|
275
|
+
changes.push(
|
|
276
|
+
createChange({
|
|
277
|
+
changeType: ChangeType.PACK_SYNCED,
|
|
278
|
+
path: packPath,
|
|
279
|
+
localMeta: packInfo,
|
|
280
|
+
})
|
|
281
|
+
);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
return changes;
|
|
286
|
+
}
|
|
287
|
+
|
|
157
288
|
/**
|
|
158
289
|
* @param {object|null} snapshot
|
|
159
290
|
* @param {object[]} remoteFiles
|
|
@@ -182,6 +313,11 @@ export function computeDiff(snapshot, remoteFiles, localFiles, { root, respectIg
|
|
|
182
313
|
if (ignoreRules) {
|
|
183
314
|
remoteFiles = remoteFiles.filter((f) => !ignoreRules.ignores(f.path));
|
|
184
315
|
}
|
|
316
|
+
|
|
317
|
+
// Handle new localFiles format with .files and .packedDirs
|
|
318
|
+
const localFilesData = localFiles?.files ?? localFiles;
|
|
319
|
+
const packedDirs = localFiles?.packedDirs ?? {};
|
|
320
|
+
|
|
185
321
|
const changes = [];
|
|
186
322
|
const snapshotFiles = snapshot?.files || {};
|
|
187
323
|
const snapshotLocalFiles = snapshot?.localFiles || {};
|
|
@@ -189,13 +325,13 @@ export function computeDiff(snapshot, remoteFiles, localFiles, { root, respectIg
|
|
|
189
325
|
// Build sets of all folder paths that implicitly exist on each side
|
|
190
326
|
// (from parent directories of files), so we can skip redundant folder additions.
|
|
191
327
|
const remoteFolderPaths = collectFolderPaths(remoteFiles.map((f) => f.path));
|
|
192
|
-
const localFolderPaths = collectFolderPaths(Object.keys(
|
|
328
|
+
const localFolderPaths = collectFolderPaths(Object.keys(localFilesData));
|
|
193
329
|
|
|
194
330
|
// Also include explicit folder entries
|
|
195
331
|
for (const f of remoteFiles) {
|
|
196
332
|
if (f.isFolder) remoteFolderPaths.add(f.path);
|
|
197
333
|
}
|
|
198
|
-
for (const [p, meta] of Object.entries(
|
|
334
|
+
for (const [p, meta] of Object.entries(localFilesData)) {
|
|
199
335
|
if (meta.isFolder) localFolderPaths.add(p);
|
|
200
336
|
}
|
|
201
337
|
|
|
@@ -254,7 +390,7 @@ export function computeDiff(snapshot, remoteFiles, localFiles, { root, respectIg
|
|
|
254
390
|
}
|
|
255
391
|
}
|
|
256
392
|
|
|
257
|
-
for (const [relativePath, localMeta] of Object.entries(
|
|
393
|
+
for (const [relativePath, localMeta] of Object.entries(localFilesData)) {
|
|
258
394
|
const snapshotEntry = snapshotLocalFiles[relativePath];
|
|
259
395
|
|
|
260
396
|
if (!snapshotEntry) {
|
|
@@ -285,7 +421,7 @@ export function computeDiff(snapshot, remoteFiles, localFiles, { root, respectIg
|
|
|
285
421
|
}
|
|
286
422
|
|
|
287
423
|
for (const [relativePath, snapshotEntry] of Object.entries(snapshotLocalFiles)) {
|
|
288
|
-
if (!(relativePath in
|
|
424
|
+
if (!(relativePath in localFilesData)) {
|
|
289
425
|
// Skip folder deletion if the folder still implicitly exists locally
|
|
290
426
|
if (snapshotEntry.isFolder && localFolderPaths.has(relativePath)) {
|
|
291
427
|
continue;
|
|
@@ -300,5 +436,8 @@ export function computeDiff(snapshot, remoteFiles, localFiles, { root, respectIg
|
|
|
300
436
|
}
|
|
301
437
|
}
|
|
302
438
|
|
|
303
|
-
|
|
439
|
+
// Compute pack changes
|
|
440
|
+
const packChanges = computePackChanges(root, packedDirs, snapshot);
|
|
441
|
+
|
|
442
|
+
return buildDiffResult(promoteConflicts(changes), packChanges);
|
|
304
443
|
}
|