aethel 0.4.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/README.md +95 -45
- package/docs/ARCHITECTURE.md +24 -0
- package/package.json +16 -3
- package/scripts/demo.js +416 -0
- package/scripts/render-demo-gif.py +90 -0
- package/scripts/render-demo-screenshot.js +65 -0
- package/src/cli.js +47 -13
- package/src/core/compress.js +285 -0
- package/src/core/config.js +119 -0
- package/src/core/diff.js +146 -7
- package/src/core/pack-manifest.js +163 -0
- package/src/core/pack.js +355 -0
- package/src/core/snapshot.js +55 -9
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pack manifest CRUD operations.
|
|
3
|
+
* Manages the pack-manifest.json data structure.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import crypto from "node:crypto";
|
|
7
|
+
|
|
8
|
+
const MANIFEST_VERSION = 1;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Create a new empty manifest.
|
|
12
|
+
* @returns {{ version: number, packs: {} }}
|
|
13
|
+
*/
|
|
14
|
+
export function createManifest() {
|
|
15
|
+
return {
|
|
16
|
+
version: MANIFEST_VERSION,
|
|
17
|
+
packs: {},
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Get pack information for a specific path.
|
|
23
|
+
* @param {object} manifest - Manifest object
|
|
24
|
+
* @param {string} packPath - Directory path (e.g., "node_modules")
|
|
25
|
+
* @returns {object|null} Pack info or null if not found
|
|
26
|
+
*/
|
|
27
|
+
export function getPack(manifest, packPath) {
|
|
28
|
+
const normalized = normalizePath(packPath);
|
|
29
|
+
return manifest.packs[normalized] ?? null;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Set or update pack information.
|
|
34
|
+
* @param {object} manifest - Manifest object
|
|
35
|
+
* @param {string} packPath - Directory path
|
|
36
|
+
* @param {object} data - Pack data to set/merge
|
|
37
|
+
* @returns {object} Updated manifest
|
|
38
|
+
*/
|
|
39
|
+
export function setPack(manifest, packPath, data) {
|
|
40
|
+
const normalized = normalizePath(packPath);
|
|
41
|
+
const existing = manifest.packs[normalized] ?? {};
|
|
42
|
+
|
|
43
|
+
manifest.packs[normalized] = {
|
|
44
|
+
...existing,
|
|
45
|
+
...data,
|
|
46
|
+
// Always update lastModified when setting
|
|
47
|
+
lastModified: new Date().toISOString(),
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
return manifest;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Remove a pack from the manifest.
|
|
55
|
+
* @param {object} manifest - Manifest object
|
|
56
|
+
* @param {string} packPath - Directory path
|
|
57
|
+
* @returns {object} Updated manifest
|
|
58
|
+
*/
|
|
59
|
+
export function removePack(manifest, packPath) {
|
|
60
|
+
const normalized = normalizePath(packPath);
|
|
61
|
+
delete manifest.packs[normalized];
|
|
62
|
+
return manifest;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* List all packs in the manifest.
|
|
67
|
+
* @param {object} manifest - Manifest object
|
|
68
|
+
* @returns {Array<{ path: string, info: object }>}
|
|
69
|
+
*/
|
|
70
|
+
export function listPacks(manifest) {
|
|
71
|
+
return Object.entries(manifest.packs).map(([path, info]) => ({
|
|
72
|
+
path,
|
|
73
|
+
info,
|
|
74
|
+
}));
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Check if a path is covered by any pack rule.
|
|
79
|
+
* @param {object} manifest - Manifest object
|
|
80
|
+
* @param {string} filePath - File path to check
|
|
81
|
+
* @returns {{ isPacked: boolean, packPath: string|null }}
|
|
82
|
+
*/
|
|
83
|
+
export function isPathPacked(manifest, filePath) {
|
|
84
|
+
const normalized = normalizePath(filePath);
|
|
85
|
+
|
|
86
|
+
for (const packPath of Object.keys(manifest.packs)) {
|
|
87
|
+
// Check if filePath starts with packPath
|
|
88
|
+
if (normalized === packPath || normalized.startsWith(packPath + "/")) {
|
|
89
|
+
return { isPacked: true, packPath };
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
return { isPacked: false, packPath: null };
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Validate manifest structure.
|
|
98
|
+
* @param {object} manifest - Manifest to validate
|
|
99
|
+
* @returns {{ valid: boolean, errors: string[] }}
|
|
100
|
+
*/
|
|
101
|
+
export function validateManifest(manifest) {
|
|
102
|
+
const errors = [];
|
|
103
|
+
|
|
104
|
+
if (!manifest || typeof manifest !== "object") {
|
|
105
|
+
errors.push("Manifest must be an object");
|
|
106
|
+
return { valid: false, errors };
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (typeof manifest.version !== "number") {
|
|
110
|
+
errors.push("Manifest version must be a number");
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
if (manifest.version !== MANIFEST_VERSION) {
|
|
114
|
+
errors.push(`Unsupported manifest version: ${manifest.version} (expected ${MANIFEST_VERSION})`);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
if (!manifest.packs || typeof manifest.packs !== "object") {
|
|
118
|
+
errors.push("Manifest packs must be an object");
|
|
119
|
+
} else {
|
|
120
|
+
for (const [path, info] of Object.entries(manifest.packs)) {
|
|
121
|
+
if (typeof path !== "string" || path.length === 0) {
|
|
122
|
+
errors.push(`Invalid pack path: ${path}`);
|
|
123
|
+
}
|
|
124
|
+
if (!info || typeof info !== "object") {
|
|
125
|
+
errors.push(`Pack info for "${path}" must be an object`);
|
|
126
|
+
} else {
|
|
127
|
+
if (!info.packId || typeof info.packId !== "string") {
|
|
128
|
+
errors.push(`Pack "${path}" missing valid packId`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return { valid: errors.length === 0, errors };
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Generate a unique pack ID.
|
|
139
|
+
* @param {string} packPath - Directory path
|
|
140
|
+
* @returns {string} Pack ID like "pack-node_modules-a1b2c3d4"
|
|
141
|
+
*/
|
|
142
|
+
export function generatePackId(packPath) {
|
|
143
|
+
const normalized = normalizePath(packPath);
|
|
144
|
+
// Sanitize path for ID: replace / with _ and remove special chars
|
|
145
|
+
const sanitized = normalized
|
|
146
|
+
.replace(/\//g, "_")
|
|
147
|
+
.replace(/[^a-zA-Z0-9_-]/g, "");
|
|
148
|
+
const shortHash = crypto.randomBytes(4).toString("hex");
|
|
149
|
+
return `pack-${sanitized}-${shortHash}`;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Normalize a path for consistent manifest keys.
|
|
154
|
+
* @param {string} p - Path to normalize
|
|
155
|
+
* @returns {string}
|
|
156
|
+
*/
|
|
157
|
+
function normalizePath(p) {
|
|
158
|
+
// Remove leading/trailing slashes and normalize to forward slashes
|
|
159
|
+
return p
|
|
160
|
+
.replace(/\\/g, "/")
|
|
161
|
+
.replace(/^\/+/, "")
|
|
162
|
+
.replace(/\/+$/, "");
|
|
163
|
+
}
|
package/src/core/pack.js
ADDED
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Directory packing operations: tar archive creation/extraction and tree hash.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import * as tar from "tar";
|
|
6
|
+
import crypto from "node:crypto";
|
|
7
|
+
import fs from "node:fs";
|
|
8
|
+
import fsp from "node:fs/promises";
|
|
9
|
+
import path from "node:path";
|
|
10
|
+
import os from "node:os";
|
|
11
|
+
import { pipeline } from "node:stream/promises";
|
|
12
|
+
import {
|
|
13
|
+
Algorithm,
|
|
14
|
+
EXTENSIONS,
|
|
15
|
+
createCompressStream,
|
|
16
|
+
createDecompressStream,
|
|
17
|
+
detectAlgorithm,
|
|
18
|
+
resolveAlgorithm,
|
|
19
|
+
} from "./compress.js";
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Calculate tree hash for a directory using mtime + size.
|
|
23
|
+
* This is the key optimization: ~30x faster than MD5 hashing all files.
|
|
24
|
+
* @param {string} dirPath - Directory to hash
|
|
25
|
+
* @returns {Promise<string>} Hash in format "sha256:..."
|
|
26
|
+
*/
|
|
27
|
+
export async function getTreeHash(dirPath) {
|
|
28
|
+
const entries = [];
|
|
29
|
+
|
|
30
|
+
async function walk(currentPath) {
|
|
31
|
+
let items;
|
|
32
|
+
try {
|
|
33
|
+
items = await fsp.readdir(currentPath, { withFileTypes: true });
|
|
34
|
+
} catch {
|
|
35
|
+
return; // Skip directories we can't read
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
for (const item of items) {
|
|
39
|
+
const fullPath = path.join(currentPath, item.name);
|
|
40
|
+
|
|
41
|
+
if (item.isDirectory()) {
|
|
42
|
+
await walk(fullPath);
|
|
43
|
+
} else if (item.isFile()) {
|
|
44
|
+
try {
|
|
45
|
+
const stat = await fsp.stat(fullPath);
|
|
46
|
+
const relativePath = path.relative(dirPath, fullPath);
|
|
47
|
+
// Use forward slashes for consistency across platforms
|
|
48
|
+
const normalizedPath = relativePath.replace(/\\/g, "/");
|
|
49
|
+
entries.push(`${normalizedPath}:${stat.mtimeMs}:${stat.size}`);
|
|
50
|
+
} catch {
|
|
51
|
+
// Skip files we can't stat
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
await walk(dirPath);
|
|
58
|
+
|
|
59
|
+
// Sort for deterministic hash
|
|
60
|
+
entries.sort();
|
|
61
|
+
|
|
62
|
+
const fingerprint = entries.join("\n");
|
|
63
|
+
const hash = crypto.createHash("sha256").update(fingerprint).digest("hex");
|
|
64
|
+
return `sha256:${hash}`;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Create a pack (tar archive) from a directory.
|
|
69
|
+
* @param {string} sourcePath - Source directory path
|
|
70
|
+
* @param {string} destPath - Destination file path (without extension)
|
|
71
|
+
* @param {{ algorithm?: string, level?: number }} options
|
|
72
|
+
* @returns {Promise<{
|
|
73
|
+
* packPath: string,
|
|
74
|
+
* fileCount: number,
|
|
75
|
+
* originalSize: number,
|
|
76
|
+
* packedSize: number,
|
|
77
|
+
* treeHash: string,
|
|
78
|
+
* compression: { algorithm: string, level: number }
|
|
79
|
+
* }>}
|
|
80
|
+
*/
|
|
81
|
+
export async function createPack(sourcePath, destPath, options = {}) {
|
|
82
|
+
const preferredAlgorithm = options.algorithm ?? Algorithm.GZIP;
|
|
83
|
+
const level = options.level ?? 6;
|
|
84
|
+
|
|
85
|
+
// Resolve to available algorithm
|
|
86
|
+
const algorithm = await resolveAlgorithm(preferredAlgorithm);
|
|
87
|
+
const extension = EXTENSIONS[algorithm];
|
|
88
|
+
const packPath = destPath + extension;
|
|
89
|
+
|
|
90
|
+
// Calculate tree hash before packing
|
|
91
|
+
const treeHash = await getTreeHash(sourcePath);
|
|
92
|
+
|
|
93
|
+
// Count files and total size
|
|
94
|
+
let fileCount = 0;
|
|
95
|
+
let originalSize = 0;
|
|
96
|
+
|
|
97
|
+
async function countFiles(dir) {
|
|
98
|
+
const items = await fsp.readdir(dir, { withFileTypes: true });
|
|
99
|
+
for (const item of items) {
|
|
100
|
+
const fullPath = path.join(dir, item.name);
|
|
101
|
+
if (item.isDirectory()) {
|
|
102
|
+
await countFiles(fullPath);
|
|
103
|
+
} else if (item.isFile()) {
|
|
104
|
+
fileCount++;
|
|
105
|
+
const stat = await fsp.stat(fullPath);
|
|
106
|
+
originalSize += stat.size;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
await countFiles(sourcePath);
|
|
111
|
+
|
|
112
|
+
// Create tar archive with compression
|
|
113
|
+
const tempTarPath = path.join(os.tmpdir(), `aethel-tar-${Date.now()}.tar`);
|
|
114
|
+
|
|
115
|
+
try {
|
|
116
|
+
// Create tar archive
|
|
117
|
+
await tar.create(
|
|
118
|
+
{
|
|
119
|
+
file: tempTarPath,
|
|
120
|
+
cwd: path.dirname(sourcePath),
|
|
121
|
+
gzip: false,
|
|
122
|
+
},
|
|
123
|
+
[path.basename(sourcePath)]
|
|
124
|
+
);
|
|
125
|
+
|
|
126
|
+
// Apply compression
|
|
127
|
+
if (algorithm === Algorithm.NONE) {
|
|
128
|
+
// No compression, just rename
|
|
129
|
+
await fsp.rename(tempTarPath, packPath);
|
|
130
|
+
} else {
|
|
131
|
+
const readStream = fs.createReadStream(tempTarPath);
|
|
132
|
+
const writeStream = fs.createWriteStream(packPath);
|
|
133
|
+
const compressStream = await createCompressStream(algorithm, { level });
|
|
134
|
+
await pipeline(readStream, compressStream, writeStream);
|
|
135
|
+
await fsp.unlink(tempTarPath);
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const packStat = await fsp.stat(packPath);
|
|
139
|
+
|
|
140
|
+
return {
|
|
141
|
+
packPath,
|
|
142
|
+
fileCount,
|
|
143
|
+
originalSize,
|
|
144
|
+
packedSize: packStat.size,
|
|
145
|
+
treeHash,
|
|
146
|
+
compression: { algorithm, level },
|
|
147
|
+
};
|
|
148
|
+
} catch (err) {
|
|
149
|
+
// Cleanup on error
|
|
150
|
+
try {
|
|
151
|
+
await fsp.unlink(tempTarPath);
|
|
152
|
+
} catch {}
|
|
153
|
+
try {
|
|
154
|
+
await fsp.unlink(packPath);
|
|
155
|
+
} catch {}
|
|
156
|
+
throw err;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Extract a pack to a destination directory.
|
|
162
|
+
* @param {string} packPath - Pack file path
|
|
163
|
+
* @param {string} destPath - Destination directory path
|
|
164
|
+
* @param {{ algorithm?: string }} options - Algorithm override (auto-detected if not provided)
|
|
165
|
+
* @returns {Promise<{ fileCount: number, extractedSize: number }>}
|
|
166
|
+
*/
|
|
167
|
+
export async function extractPack(packPath, destPath, options = {}) {
|
|
168
|
+
const algorithm = options.algorithm ?? detectAlgorithm(packPath) ?? Algorithm.GZIP;
|
|
169
|
+
|
|
170
|
+
// Ensure destination exists
|
|
171
|
+
await fsp.mkdir(destPath, { recursive: true });
|
|
172
|
+
|
|
173
|
+
const tempTarPath = path.join(os.tmpdir(), `aethel-extract-${Date.now()}.tar`);
|
|
174
|
+
|
|
175
|
+
try {
|
|
176
|
+
// Decompress if needed
|
|
177
|
+
if (algorithm === Algorithm.NONE) {
|
|
178
|
+
// No decompression needed
|
|
179
|
+
await tar.extract({
|
|
180
|
+
file: packPath,
|
|
181
|
+
cwd: destPath,
|
|
182
|
+
strip: 1, // Remove top-level directory
|
|
183
|
+
});
|
|
184
|
+
} else {
|
|
185
|
+
// Decompress first
|
|
186
|
+
const readStream = fs.createReadStream(packPath);
|
|
187
|
+
const writeStream = fs.createWriteStream(tempTarPath);
|
|
188
|
+
const decompressStream = await createDecompressStream(algorithm);
|
|
189
|
+
await pipeline(readStream, decompressStream, writeStream);
|
|
190
|
+
|
|
191
|
+
// Then extract
|
|
192
|
+
await tar.extract({
|
|
193
|
+
file: tempTarPath,
|
|
194
|
+
cwd: destPath,
|
|
195
|
+
strip: 1,
|
|
196
|
+
});
|
|
197
|
+
|
|
198
|
+
await fsp.unlink(tempTarPath);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// Count extracted files
|
|
202
|
+
let fileCount = 0;
|
|
203
|
+
let extractedSize = 0;
|
|
204
|
+
|
|
205
|
+
async function countFiles(dir) {
|
|
206
|
+
const items = await fsp.readdir(dir, { withFileTypes: true });
|
|
207
|
+
for (const item of items) {
|
|
208
|
+
const fullPath = path.join(dir, item.name);
|
|
209
|
+
if (item.isDirectory()) {
|
|
210
|
+
await countFiles(fullPath);
|
|
211
|
+
} else if (item.isFile()) {
|
|
212
|
+
fileCount++;
|
|
213
|
+
const stat = await fsp.stat(fullPath);
|
|
214
|
+
extractedSize += stat.size;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
await countFiles(destPath);
|
|
219
|
+
|
|
220
|
+
return { fileCount, extractedSize };
|
|
221
|
+
} catch (err) {
|
|
222
|
+
try {
|
|
223
|
+
await fsp.unlink(tempTarPath);
|
|
224
|
+
} catch {}
|
|
225
|
+
throw err;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Check if a pack is stale (local directory has changed).
|
|
231
|
+
* @param {string} currentHash - Current tree hash of directory
|
|
232
|
+
* @param {string|null} manifestHash - Hash stored in manifest
|
|
233
|
+
* @returns {boolean} True if pack needs to be recreated
|
|
234
|
+
*/
|
|
235
|
+
export function isPackStale(currentHash, manifestHash) {
|
|
236
|
+
if (!manifestHash) return true;
|
|
237
|
+
return currentHash !== manifestHash;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/**
|
|
241
|
+
* List contents of a pack without extracting.
|
|
242
|
+
* @param {string} packPath - Pack file path
|
|
243
|
+
* @returns {Promise<Array<{ path: string, size: number, mtime: Date }>>}
|
|
244
|
+
*/
|
|
245
|
+
export async function listPackContents(packPath) {
|
|
246
|
+
const algorithm = detectAlgorithm(packPath) ?? Algorithm.GZIP;
|
|
247
|
+
const entries = [];
|
|
248
|
+
|
|
249
|
+
if (algorithm === Algorithm.NONE) {
|
|
250
|
+
// Direct tar list
|
|
251
|
+
await tar.list({
|
|
252
|
+
file: packPath,
|
|
253
|
+
onentry: (entry) => {
|
|
254
|
+
entries.push({
|
|
255
|
+
path: entry.path,
|
|
256
|
+
size: entry.size,
|
|
257
|
+
mtime: entry.mtime,
|
|
258
|
+
});
|
|
259
|
+
},
|
|
260
|
+
});
|
|
261
|
+
} else {
|
|
262
|
+
// Need to decompress first
|
|
263
|
+
const tempTarPath = path.join(os.tmpdir(), `aethel-list-${Date.now()}.tar`);
|
|
264
|
+
try {
|
|
265
|
+
const readStream = fs.createReadStream(packPath);
|
|
266
|
+
const writeStream = fs.createWriteStream(tempTarPath);
|
|
267
|
+
const decompressStream = await createDecompressStream(algorithm);
|
|
268
|
+
await pipeline(readStream, decompressStream, writeStream);
|
|
269
|
+
|
|
270
|
+
await tar.list({
|
|
271
|
+
file: tempTarPath,
|
|
272
|
+
onentry: (entry) => {
|
|
273
|
+
entries.push({
|
|
274
|
+
path: entry.path,
|
|
275
|
+
size: entry.size,
|
|
276
|
+
mtime: entry.mtime,
|
|
277
|
+
});
|
|
278
|
+
},
|
|
279
|
+
});
|
|
280
|
+
|
|
281
|
+
await fsp.unlink(tempTarPath);
|
|
282
|
+
} catch (err) {
|
|
283
|
+
try {
|
|
284
|
+
await fsp.unlink(tempTarPath);
|
|
285
|
+
} catch {}
|
|
286
|
+
throw err;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
return entries;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
/**
|
|
294
|
+
* Extract a single file from a pack.
|
|
295
|
+
* @param {string} packPath - Pack file path
|
|
296
|
+
* @param {string} filePath - Relative path within the pack
|
|
297
|
+
* @param {string} destPath - Destination path for extracted file
|
|
298
|
+
* @returns {Promise<void>}
|
|
299
|
+
*/
|
|
300
|
+
export async function extractSingleFile(packPath, filePath, destPath) {
|
|
301
|
+
const algorithm = detectAlgorithm(packPath) ?? Algorithm.GZIP;
|
|
302
|
+
|
|
303
|
+
// Ensure destination directory exists
|
|
304
|
+
await fsp.mkdir(path.dirname(destPath), { recursive: true });
|
|
305
|
+
|
|
306
|
+
const tempDir = path.join(os.tmpdir(), `aethel-single-${Date.now()}`);
|
|
307
|
+
await fsp.mkdir(tempDir, { recursive: true });
|
|
308
|
+
|
|
309
|
+
try {
|
|
310
|
+
if (algorithm === Algorithm.NONE) {
|
|
311
|
+
await tar.extract({
|
|
312
|
+
file: packPath,
|
|
313
|
+
cwd: tempDir,
|
|
314
|
+
filter: (p) => p === filePath || p.endsWith("/" + filePath),
|
|
315
|
+
});
|
|
316
|
+
} else {
|
|
317
|
+
// Decompress first
|
|
318
|
+
const tempTarPath = path.join(tempDir, "archive.tar");
|
|
319
|
+
const readStream = fs.createReadStream(packPath);
|
|
320
|
+
const writeStream = fs.createWriteStream(tempTarPath);
|
|
321
|
+
const decompressStream = await createDecompressStream(algorithm);
|
|
322
|
+
await pipeline(readStream, decompressStream, writeStream);
|
|
323
|
+
|
|
324
|
+
await tar.extract({
|
|
325
|
+
file: tempTarPath,
|
|
326
|
+
cwd: tempDir,
|
|
327
|
+
filter: (p) => p === filePath || p.endsWith("/" + filePath),
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// Find the extracted file and move it
|
|
332
|
+
async function findFile(dir) {
|
|
333
|
+
const items = await fsp.readdir(dir, { withFileTypes: true });
|
|
334
|
+
for (const item of items) {
|
|
335
|
+
const fullPath = path.join(dir, item.name);
|
|
336
|
+
if (item.isDirectory()) {
|
|
337
|
+
const found = await findFile(fullPath);
|
|
338
|
+
if (found) return found;
|
|
339
|
+
} else if (item.name === path.basename(filePath)) {
|
|
340
|
+
return fullPath;
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
return null;
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
const extractedPath = await findFile(tempDir);
|
|
347
|
+
if (!extractedPath) {
|
|
348
|
+
throw new Error(`File not found in pack: ${filePath}`);
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
await fsp.copyFile(extractedPath, destPath);
|
|
352
|
+
} finally {
|
|
353
|
+
await fsp.rm(tempDir, { recursive: true, force: true });
|
|
354
|
+
}
|
|
355
|
+
}
|
package/src/core/snapshot.js
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import crypto from "node:crypto";
|
|
2
2
|
import fs from "node:fs";
|
|
3
3
|
import path from "node:path";
|
|
4
|
-
import { AETHEL_DIR } from "./config.js";
|
|
4
|
+
import { AETHEL_DIR, loadPackConfig, getPackRule } from "./config.js";
|
|
5
5
|
import { loadIgnoreRules } from "./ignore.js";
|
|
6
|
+
import { getTreeHash } from "./pack.js";
|
|
6
7
|
|
|
7
8
|
const HASH_CACHE_FILE = ".hash-cache.json";
|
|
8
9
|
|
|
@@ -86,14 +87,17 @@ function saveHashCache(root, cache) {
|
|
|
86
87
|
|
|
87
88
|
const PARALLEL_HASH_LIMIT = 128;
|
|
88
89
|
|
|
89
|
-
export async function scanLocal(root, { respectIgnore = true } = {}) {
|
|
90
|
+
export async function scanLocal(root, { respectIgnore = true, respectPacking = true } = {}) {
|
|
90
91
|
const resolvedRoot = path.resolve(root);
|
|
91
92
|
const ignoreRules = respectIgnore ? loadIgnoreRules(resolvedRoot) : null;
|
|
93
|
+
const packConfig = respectPacking ? loadPackConfig(resolvedRoot) : null;
|
|
94
|
+
const packingEnabled = packConfig?.packing?.enabled === true;
|
|
92
95
|
const hashCache = loadHashCache(resolvedRoot);
|
|
93
96
|
const nextCache = new Map();
|
|
94
97
|
|
|
95
98
|
// Phase 1: collect all file stats (fast — no hashing yet)
|
|
96
99
|
const filesToHash = [];
|
|
100
|
+
const packedDirs = {};
|
|
97
101
|
// Track directories and their child counts to detect empty folders
|
|
98
102
|
const dirChildCount = new Map();
|
|
99
103
|
// Map relative dir path → absolute path (for deferred stat on empty dirs only)
|
|
@@ -130,16 +134,46 @@ export async function scanLocal(root, { respectIgnore = true } = {}) {
|
|
|
130
134
|
.split(path.sep)
|
|
131
135
|
.join("/");
|
|
132
136
|
|
|
133
|
-
if (ignoreRules?.ignores(relativePath)) {
|
|
134
|
-
continue;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
137
|
if (entry.isDirectory()) {
|
|
138
|
+
// Check if this directory is a pack target BEFORE checking ignore rules
|
|
139
|
+
// Pack targets should be processed even if they match ignore patterns
|
|
140
|
+
if (packingEnabled) {
|
|
141
|
+
const packRule = getPackRule(packConfig, relativePath);
|
|
142
|
+
if (packRule && packRule.path === relativePath) {
|
|
143
|
+
// This directory should be packed - compute tree hash instead of scanning
|
|
144
|
+
try {
|
|
145
|
+
const treeHash = await getTreeHash(fullPath);
|
|
146
|
+
packedDirs[relativePath] = {
|
|
147
|
+
path: relativePath,
|
|
148
|
+
isPacked: true,
|
|
149
|
+
treeHash,
|
|
150
|
+
packRule: packRule.strategy || "full",
|
|
151
|
+
};
|
|
152
|
+
} catch {
|
|
153
|
+
// If we can't compute tree hash, fall back to normal scanning
|
|
154
|
+
if (!ignoreRules?.ignores(relativePath)) {
|
|
155
|
+
trackedChildren++;
|
|
156
|
+
subdirs.push(fullPath);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
continue; // Don't descend into packed directory
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Apply ignore rules for non-pack directories
|
|
164
|
+
if (ignoreRules?.ignores(relativePath)) {
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
138
167
|
trackedChildren++;
|
|
139
168
|
subdirs.push(fullPath);
|
|
140
169
|
continue;
|
|
141
170
|
}
|
|
142
171
|
|
|
172
|
+
// Apply ignore rules for files
|
|
173
|
+
if (ignoreRules?.ignores(relativePath)) {
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
176
|
+
|
|
143
177
|
if (!entry.isFile()) {
|
|
144
178
|
continue;
|
|
145
179
|
}
|
|
@@ -230,7 +264,14 @@ export async function scanLocal(root, { respectIgnore = true } = {}) {
|
|
|
230
264
|
|
|
231
265
|
// Persist updated cache
|
|
232
266
|
saveHashCache(resolvedRoot, nextCache);
|
|
233
|
-
|
|
267
|
+
|
|
268
|
+
// Return both files and packed directories
|
|
269
|
+
return {
|
|
270
|
+
files: result,
|
|
271
|
+
packedDirs,
|
|
272
|
+
// For backward compatibility, also expose files at top level
|
|
273
|
+
...result,
|
|
274
|
+
};
|
|
234
275
|
}
|
|
235
276
|
|
|
236
277
|
async function getMd5Cached(oldCache, newCache, fullPath, relativePath, stat) {
|
|
@@ -252,7 +293,7 @@ async function getMd5Cached(oldCache, newCache, fullPath, relativePath, stat) {
|
|
|
252
293
|
|
|
253
294
|
// ── Snapshot building ────────────────────────────────────────────────
|
|
254
295
|
|
|
255
|
-
export function buildSnapshot(remoteFiles, localFiles, message = "") {
|
|
296
|
+
export function buildSnapshot(remoteFiles, localFiles, message = "", packedDirs = {}) {
|
|
256
297
|
const files = {};
|
|
257
298
|
|
|
258
299
|
for (const file of remoteFiles) {
|
|
@@ -269,11 +310,16 @@ export function buildSnapshot(remoteFiles, localFiles, message = "") {
|
|
|
269
310
|
};
|
|
270
311
|
}
|
|
271
312
|
|
|
313
|
+
// Handle localFiles which may be the new format with .files property
|
|
314
|
+
const localFilesData = localFiles?.files ?? localFiles;
|
|
315
|
+
const localPackedDirs = localFiles?.packedDirs ?? packedDirs;
|
|
316
|
+
|
|
272
317
|
const snapshot = {
|
|
273
318
|
timestamp: new Date().toISOString(),
|
|
274
319
|
message,
|
|
275
320
|
files,
|
|
276
|
-
localFiles: { ...
|
|
321
|
+
localFiles: { ...localFilesData },
|
|
322
|
+
packedDirs: { ...localPackedDirs },
|
|
277
323
|
};
|
|
278
324
|
|
|
279
325
|
// Embed integrity checksum
|