aethel 1.0.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,31 @@
1
1
  # Changelog
2
2
 
3
+ ## 1.2.1 (2026-04-26)
4
+
5
+ - Document the `verify` integrity-check command in the README help guide.
6
+ - Add `verify` to the TUI command catalog with local and remote verification actions.
7
+ - Fix legacy local-delete staging so remote deletions can resolve Drive file IDs from the latest snapshot.
8
+ - Add a debug installer command that symlinks the working-copy CLI as `debug_aethel`.
9
+
10
+ ## 1.2.0 (2026-04-15)
11
+
12
+ - Add Packing modules
13
+
14
+ ## 1.1.0 (2026-04-15)
15
+
16
+ ### Added
17
+ - **Directory Packing**: Pack large directories (e.g., `node_modules`) into compressed archives for faster sync
18
+ - Multi-algorithm compression support: gzip, brotli (built-in), zstd, xz (optional)
19
+ - Tree hash algorithm for fast directory fingerprinting (~30x faster than MD5)
20
+ - Pack-aware scanning that skips packed directories
21
+ - Pack change detection: PACK_NEW, PACK_LOCAL_MODIFIED, PACK_REMOTE_MODIFIED, PACK_SYNCED, PACK_CONFLICT
22
+ - `aethel status --verbose` shows synced packs
23
+ - `.aethelconfig` YAML file for packing configuration
24
+
25
+ ### Changed
26
+ - Upgraded ink from 6.8.0 to 7.0.0
27
+ - Upgraded react from 19.2.4 to 19.2.5
28
+
3
29
  ## 1.0.0 (2026-04-06)
4
30
 
5
31
  - release: 1.0.0
package/README.md CHANGED
@@ -23,6 +23,7 @@ git clone https://github.com/CCJ-0617/Aethel.git
23
23
  cd Aethel
24
24
  npm install
25
25
  npm run install:cli # symlinks `aethel` into ~/.local/bin
26
+ npm run install:debug # symlinks `debug_aethel` without replacing `aethel`
26
27
  ```
27
28
 
28
29
  </details>
@@ -85,6 +86,7 @@ aethel commit -m "sync" # execute staged operations
85
86
  aethel pull -m "pull" # fetch remote changes and apply
86
87
  aethel pull --all # download the full remote tree to local
87
88
  aethel push -m "push" # push local changes to Drive
89
+ aethel verify # verify local files against the last snapshot
88
90
  ```
89
91
 
90
92
  `pull` applies remote changes relative to the latest snapshot. Use `pull --all` for the first full download or to rehydrate a local workspace from the current remote tree.
@@ -131,10 +133,20 @@ Processes deepest-first for single-pass convergence, caches child state to minim
131
133
  | `restore` | Restore files from the last snapshot |
132
134
  | `rm` | Remove local files and stage remote deletion |
133
135
  | `mv` | Move or rename local files |
136
+ | `verify` | Verify local and optional remote integrity against the last snapshot |
134
137
  | `clean` | List and optionally trash/delete Drive files |
135
138
  | `dedupe-folders` | Detect and merge duplicate remote folders |
136
139
  | `tui` | Launch interactive terminal UI |
137
140
 
141
+ ### Integrity Verification
142
+
143
+ ```bash
144
+ aethel verify # check snapshot checksum and local file hashes
145
+ aethel verify --remote # also compare Drive file hashes
146
+ ```
147
+
148
+ `verify` compares the latest snapshot with the workspace on disk and exits non-zero when files are missing or modified. Add `--remote` when you also want to verify Drive state before a release, migration, or restore.
149
+
138
150
  ## TUI
139
151
 
140
152
  ```bash
@@ -158,6 +170,46 @@ Dual-pane file browser — local filesystem on the left, Google Drive on the rig
158
170
  | `f` | Open the commands page and choose a TUI action |
159
171
  | `:` | Run any Aethel CLI command inside the TUI |
160
172
 
173
+ ## Directory Packing
174
+
175
+ Large directories with many small files (e.g., `node_modules`, `vendor`) can be slow to sync. Aethel can pack these into compressed archives for faster transfers.
176
+
177
+ ### Enable Packing
178
+
179
+ Create `.aethelconfig` in your workspace root:
180
+
181
+ ```yaml
182
+ packing:
183
+ enabled: true
184
+ compression:
185
+ default:
186
+ algorithm: gzip # gzip, brotli, zstd, xz, or none
187
+ level: 6
188
+ rules:
189
+ - path: node_modules
190
+ strategy: full
191
+ - path: vendor
192
+ strategy: full
193
+ ```
194
+
195
+ ### How It Works
196
+
197
+ 1. **Tree Hash**: Directories are fingerprinted using mtime+size (30x faster than MD5)
198
+ 2. **Pack Detection**: `aethel status` shows pack states (P+, PL, PR, P=, P!)
199
+ 3. **Compression**: Archives use gzip/brotli (built-in) or zstd/xz (if installed)
200
+
201
+ ### Pack Status Codes
202
+
203
+ | Code | Meaning |
204
+ |------|---------|
205
+ | `P+` | New pack (not yet synced) |
206
+ | `PL` | Pack changed locally |
207
+ | `PR` | Pack changed on Drive |
208
+ | `P=` | Pack up to date |
209
+ | `P!` | Pack conflict |
210
+
211
+ Use `aethel status --verbose` to show synced packs.
212
+
161
213
  ## Ignore Patterns
162
214
 
163
215
  Create `.aethelignore` (gitignore syntax) in your workspace root — or run `aethel init` to generate a default one.
@@ -194,10 +246,13 @@ src/
194
246
  │ ├── drive-api.js Google Drive API wrapper
195
247
  │ ├── local-fs.js Local filesystem operations
196
248
  │ ├── remote-cache.js Short-lived remote file cache
197
- │ ├── snapshot.js Local scanning & snapshot creation
249
+ │ ├── snapshot.js Local scanning & snapshot creation
198
250
  │ ├── staging.js Stage/unstage operations
199
251
  │ ├── sync.js Execute staged changes
200
- └── ignore.js .aethelignore pattern matching
252
+ ├── ignore.js .aethelignore pattern matching
253
+ │ ├── compress.js Multi-algorithm compression (gzip, brotli, zstd, xz)
254
+ │ ├── pack.js Tar archive operations & tree hash
255
+ │ └── pack-manifest.js Pack manifest CRUD operations
201
256
  └── tui/
202
257
  ├── app.js React (Ink) dual-pane component
203
258
  ├── index.js TUI entry
@@ -45,6 +45,15 @@ The core design is not a live mirror between local storage and Drive. Instead, s
45
45
  - Manages `.aethelignore`
46
46
  - `src/core/remote-cache.js`
47
47
  - Short-lived cache for remote listings
48
+ - `src/core/compress.js`
49
+ - Multi-algorithm compression (gzip, brotli, zstd, xz)
50
+ - Compression profiles and algorithm detection
51
+ - `src/core/pack.js`
52
+ - Tar archive creation and extraction
53
+ - Tree hash algorithm for fast directory fingerprinting
54
+ - `src/core/pack-manifest.js`
55
+ - CRUD operations for pack manifest
56
+ - Tracks packed directories and their sync state
48
57
 
49
58
  ### 2.3 State Storage Layer
50
59
 
@@ -55,16 +64,20 @@ After workspace initialization, the project root contains:
55
64
  config.json
56
65
  index.json
57
66
  .hash-cache.json
67
+ pack-manifest.json
58
68
  snapshots/
59
69
  latest.json
60
70
  history/
71
+ .aethelconfig
61
72
  ```
62
73
 
63
74
  - `config.json`: sync root configuration
64
75
  - `index.json`: currently staged operations
65
76
  - `.hash-cache.json`: local file hash cache
77
+ - `pack-manifest.json`: tracks packed directories and their sync state
66
78
  - `snapshots/latest.json`: baseline state after the most recent successful sync
67
79
  - `snapshots/history/`: archived older snapshots
80
+ - `.aethelconfig` (workspace root): YAML configuration for directory packing
68
81
 
69
82
  ## 3. Core Data Flow
70
83
 
@@ -124,6 +137,7 @@ That means the system does not compare only "local vs remote". It also asks:
124
137
 
125
138
  `src/core/diff.js` classifies changes as:
126
139
 
140
+ **File changes:**
127
141
  - `remote_added`
128
142
  - `remote_modified`
129
143
  - `remote_deleted`
@@ -132,6 +146,13 @@ That means the system does not compare only "local vs remote". It also asks:
132
146
  - `local_deleted`
133
147
  - `conflict`
134
148
 
149
+ **Pack changes (for packed directories):**
150
+ - `pack_new` - directory newly configured for packing
151
+ - `pack_local_modified` - packed directory changed locally
152
+ - `pack_remote_modified` - packed directory changed on Drive
153
+ - `pack_synced` - packed directory up to date
154
+ - `pack_conflict` - both sides changed the packed directory
155
+
135
156
  It also provides default suggested actions for each category:
136
157
 
137
158
  - Drive added/modified -> `download`
@@ -139,6 +160,9 @@ It also provides default suggested actions for each category:
139
160
  - Local added/modified -> `upload`
140
161
  - Local deleted -> `delete_remote`
141
162
  - Both sides changed the same path -> `conflict`
163
+ - Pack new/local modified -> `push_pack`
164
+ - Pack remote modified -> `pull_pack`
165
+ - Pack conflict -> `resolve_pack`
142
166
 
143
167
  ### 4.3 Execution Model
144
168
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "aethel",
3
- "version": "1.0.0",
3
+ "version": "1.2.1",
4
4
  "description": "Git-style Google Drive sync CLI with interactive TUI",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -48,6 +48,7 @@
48
48
  "auth": "node src/cli.js auth",
49
49
  "clean": "node src/cli.js clean",
50
50
  "install:cli": "bash scripts/install.sh",
51
+ "install:debug": "bash scripts/install-debug.sh",
51
52
  "release": "bash scripts/release.sh",
52
53
  "pack:check": "npm pack --dry-run",
53
54
  "prepublishOnly": "npm test && npm run pack:check",
@@ -59,12 +60,14 @@
59
60
  "commander": "^14.0.3",
60
61
  "googleapis": "^171.4.0",
61
62
  "ignore": "^7.0.5",
62
- "ink": "^6.8.0",
63
+ "ink": "^7.0.0",
63
64
  "ink-select-input": "^6.0.0",
64
65
  "ink-spinner": "^5.0.0",
65
66
  "ink-text-input": "^6.0.0",
66
67
  "open": "^11.0.0",
67
- "react": "^19.2.4"
68
+ "react": "^19.2.4",
69
+ "tar": "^7.5.13",
70
+ "yaml": "^2.8.3"
68
71
  },
69
72
  "engines": {
70
73
  "node": ">=18"
package/src/cli.js CHANGED
@@ -289,7 +289,9 @@ async function handleStatus(options) {
289
289
  const { diff } = await loadStateWithProgress(repo);
290
290
  const staged = repo.getStagedEntries();
291
291
 
292
- if (diff.isClean && staged.length === 0) {
292
+ const hasPackChanges = diff.hasPackChanges || (options.verbose && diff.syncedPacks?.length > 0);
293
+
294
+ if (diff.isClean && staged.length === 0 && !hasPackChanges) {
293
295
  console.log("Everything up to date.");
294
296
  return;
295
297
  }
@@ -326,6 +328,32 @@ async function handleStatus(options) {
326
328
  console.log(` ${change.shortStatus} ${change.path} (${change.description})`);
327
329
  }
328
330
  }
331
+
332
+ // Display pack changes
333
+ const pendingPacks = diff.pendingPackChanges || [];
334
+ const packConflicts = diff.packConflicts || [];
335
+ const syncedPacks = diff.syncedPacks || [];
336
+
337
+ if (pendingPacks.length) {
338
+ console.log(`\nPack changes (${pendingPacks.length}):`);
339
+ for (const change of pendingPacks) {
340
+ console.log(` ${change.shortStatus} ${change.path} (${change.description})`);
341
+ }
342
+ }
343
+
344
+ if (packConflicts.length) {
345
+ console.log(`\nPack conflicts (${packConflicts.length}):`);
346
+ for (const change of packConflicts) {
347
+ console.log(` ${change.shortStatus} ${change.path} (${change.description})`);
348
+ }
349
+ }
350
+
351
+ if (options.verbose && syncedPacks.length) {
352
+ console.log(`\nSynced packs (${syncedPacks.length}):`);
353
+ for (const change of syncedPacks) {
354
+ console.log(` ${change.shortStatus} ${change.path} (${change.description})`);
355
+ }
356
+ }
329
357
  }
330
358
 
331
359
  async function handleDiff(options) {
@@ -1081,9 +1109,10 @@ async function main() {
1081
1109
  .option("--drive-folder-name <name>", "Display name for the Drive folder")
1082
1110
  ).action(handleInit);
1083
1111
 
1084
- addAuthOptions(program.command("status").description("Show sync status")).action(
1085
- handleStatus
1086
- );
1112
+ addAuthOptions(
1113
+ program.command("status").description("Show sync status")
1114
+ .option("-v, --verbose", "Show all pack states including synced")
1115
+ ).action(handleStatus);
1087
1116
 
1088
1117
  addAuthOptions(
1089
1118
  program
@@ -0,0 +1,285 @@
1
+ /**
2
+ * Multi-algorithm compression/decompression abstraction.
3
+ * Supports gzip, brotli (built-in), and optionally zstd, xz.
4
+ */
5
+
6
+ import zlib from "node:zlib";
7
+ import { pipeline } from "node:stream/promises";
8
+ import fs from "node:fs";
9
+ import path from "node:path";
10
+
11
+ // Algorithm enumeration
12
+ export const Algorithm = Object.freeze({
13
+ NONE: "none",
14
+ GZIP: "gzip",
15
+ ZSTD: "zstd",
16
+ BROTLI: "brotli",
17
+ XZ: "xz",
18
+ });
19
+
20
+ // File extension mapping
21
+ export const EXTENSIONS = {
22
+ [Algorithm.NONE]: ".tar",
23
+ [Algorithm.GZIP]: ".tar.gz",
24
+ [Algorithm.ZSTD]: ".tar.zst",
25
+ [Algorithm.BROTLI]: ".tar.br",
26
+ [Algorithm.XZ]: ".tar.xz",
27
+ };
28
+
29
+ // Compression profiles for easy configuration
30
+ export const PROFILES = {
31
+ fast: { algorithm: Algorithm.ZSTD, level: 1 },
32
+ balanced: { algorithm: Algorithm.ZSTD, level: 6 },
33
+ maximum: { algorithm: Algorithm.ZSTD, level: 19 },
34
+ extreme: { algorithm: Algorithm.XZ, level: 6 },
35
+ };
36
+
37
+ // Cache for optional dependency availability
38
+ const availabilityCache = new Map();
39
+
40
+ /**
41
+ * Try to load an optional dependency.
42
+ * @param {string} moduleName
43
+ * @returns {Promise<any|null>}
44
+ */
45
+ async function tryLoadModule(moduleName) {
46
+ if (availabilityCache.has(moduleName)) {
47
+ return availabilityCache.get(moduleName);
48
+ }
49
+ try {
50
+ const mod = await import(moduleName);
51
+ availabilityCache.set(moduleName, mod.default || mod);
52
+ return availabilityCache.get(moduleName);
53
+ } catch {
54
+ availabilityCache.set(moduleName, null);
55
+ return null;
56
+ }
57
+ }
58
+
59
+ /**
60
+ * Check if an algorithm is available in the current environment.
61
+ * @param {string} algorithm - Algorithm name from Algorithm enum
62
+ * @returns {Promise<boolean>}
63
+ */
64
+ export async function isAlgorithmAvailable(algorithm) {
65
+ switch (algorithm) {
66
+ case Algorithm.NONE:
67
+ case Algorithm.GZIP:
68
+ case Algorithm.BROTLI:
69
+ return true; // Built-in Node.js
70
+ case Algorithm.ZSTD:
71
+ return (await tryLoadModule("@bokuweb/zstd-wasm")) !== null;
72
+ case Algorithm.XZ:
73
+ return (await tryLoadModule("lzma-native")) !== null;
74
+ default:
75
+ return false;
76
+ }
77
+ }
78
+
79
+ /**
80
+ * Get the best available algorithm for compression.
81
+ * Falls back to gzip if preferred is unavailable.
82
+ * @param {string} preferred - Preferred algorithm
83
+ * @returns {Promise<string>} Available algorithm
84
+ */
85
+ export async function resolveAlgorithm(preferred) {
86
+ if (await isAlgorithmAvailable(preferred)) {
87
+ return preferred;
88
+ }
89
+ // Fallback chain: zstd -> gzip
90
+ if (preferred !== Algorithm.ZSTD && (await isAlgorithmAvailable(Algorithm.ZSTD))) {
91
+ return Algorithm.ZSTD;
92
+ }
93
+ return Algorithm.GZIP;
94
+ }
95
+
96
+ /**
97
+ * Create a compression stream for the given algorithm.
98
+ * @param {string} algorithm - Algorithm name
99
+ * @param {{ level?: number }} options - Compression options
100
+ * @returns {Promise<import("node:stream").Transform>} Compression stream
101
+ */
102
+ export async function createCompressStream(algorithm, options = {}) {
103
+ const level = options.level ?? 6;
104
+
105
+ switch (algorithm) {
106
+ case Algorithm.NONE:
107
+ // Pass-through stream
108
+ const { PassThrough } = await import("node:stream");
109
+ return new PassThrough();
110
+
111
+ case Algorithm.GZIP:
112
+ return zlib.createGzip({ level });
113
+
114
+ case Algorithm.BROTLI:
115
+ return zlib.createBrotliCompress({
116
+ params: {
117
+ [zlib.constants.BROTLI_PARAM_QUALITY]: Math.min(level, 11),
118
+ },
119
+ });
120
+
121
+ case Algorithm.ZSTD: {
122
+ const zstd = await tryLoadModule("@bokuweb/zstd-wasm");
123
+ if (!zstd) {
124
+ throw new Error("zstd not available. Install @bokuweb/zstd-wasm");
125
+ }
126
+ // zstd-wasm provides compress/decompress functions, not streams
127
+ // We need to wrap it in a transform stream
128
+ const { Transform } = await import("node:stream");
129
+ const chunks = [];
130
+ return new Transform({
131
+ transform(chunk, encoding, callback) {
132
+ chunks.push(chunk);
133
+ callback();
134
+ },
135
+ async flush(callback) {
136
+ try {
137
+ const input = Buffer.concat(chunks);
138
+ const compressed = await zstd.compress(input, level);
139
+ this.push(Buffer.from(compressed));
140
+ callback();
141
+ } catch (err) {
142
+ callback(err);
143
+ }
144
+ },
145
+ });
146
+ }
147
+
148
+ case Algorithm.XZ: {
149
+ const lzma = await tryLoadModule("lzma-native");
150
+ if (!lzma) {
151
+ throw new Error("xz not available. Install lzma-native");
152
+ }
153
+ return lzma.createCompressor({ preset: level });
154
+ }
155
+
156
+ default:
157
+ throw new Error(`Unknown compression algorithm: ${algorithm}`);
158
+ }
159
+ }
160
+
161
+ /**
162
+ * Create a decompression stream for the given algorithm.
163
+ * @param {string} algorithm - Algorithm name
164
+ * @returns {Promise<import("node:stream").Transform>} Decompression stream
165
+ */
166
+ export async function createDecompressStream(algorithm) {
167
+ switch (algorithm) {
168
+ case Algorithm.NONE: {
169
+ const { PassThrough } = await import("node:stream");
170
+ return new PassThrough();
171
+ }
172
+
173
+ case Algorithm.GZIP:
174
+ return zlib.createGunzip();
175
+
176
+ case Algorithm.BROTLI:
177
+ return zlib.createBrotliDecompress();
178
+
179
+ case Algorithm.ZSTD: {
180
+ const zstd = await tryLoadModule("@bokuweb/zstd-wasm");
181
+ if (!zstd) {
182
+ throw new Error("zstd not available. Install @bokuweb/zstd-wasm");
183
+ }
184
+ const { Transform } = await import("node:stream");
185
+ const chunks = [];
186
+ return new Transform({
187
+ transform(chunk, encoding, callback) {
188
+ chunks.push(chunk);
189
+ callback();
190
+ },
191
+ async flush(callback) {
192
+ try {
193
+ const input = Buffer.concat(chunks);
194
+ const decompressed = await zstd.decompress(input);
195
+ this.push(Buffer.from(decompressed));
196
+ callback();
197
+ } catch (err) {
198
+ callback(err);
199
+ }
200
+ },
201
+ });
202
+ }
203
+
204
+ case Algorithm.XZ: {
205
+ const lzma = await tryLoadModule("lzma-native");
206
+ if (!lzma) {
207
+ throw new Error("xz not available. Install lzma-native");
208
+ }
209
+ return lzma.createDecompressor();
210
+ }
211
+
212
+ default:
213
+ throw new Error(`Unknown decompression algorithm: ${algorithm}`);
214
+ }
215
+ }
216
+
217
+ /**
218
+ * Compress a file to destination.
219
+ * @param {string} inputPath - Source file path
220
+ * @param {string} outputPath - Destination file path
221
+ * @param {{ algorithm?: string, level?: number }} options
222
+ * @returns {Promise<{ originalSize: number, compressedSize: number, ratio: number }>}
223
+ */
224
+ export async function compressFile(inputPath, outputPath, options = {}) {
225
+ const algorithm = options.algorithm ?? Algorithm.GZIP;
226
+ const level = options.level ?? 6;
227
+
228
+ const inputStat = fs.statSync(inputPath);
229
+ const originalSize = inputStat.size;
230
+
231
+ const readStream = fs.createReadStream(inputPath);
232
+ const writeStream = fs.createWriteStream(outputPath);
233
+ const compressStream = await createCompressStream(algorithm, { level });
234
+
235
+ await pipeline(readStream, compressStream, writeStream);
236
+
237
+ const outputStat = fs.statSync(outputPath);
238
+ const compressedSize = outputStat.size;
239
+ const ratio = originalSize > 0 ? 1 - compressedSize / originalSize : 0;
240
+
241
+ return { originalSize, compressedSize, ratio };
242
+ }
243
+
244
+ /**
245
+ * Decompress a file to destination.
246
+ * @param {string} inputPath - Compressed file path
247
+ * @param {string} outputPath - Destination file path
248
+ * @param {string} algorithm - Algorithm used for compression
249
+ * @returns {Promise<void>}
250
+ */
251
+ export async function decompressFile(inputPath, outputPath, algorithm) {
252
+ const readStream = fs.createReadStream(inputPath);
253
+ const writeStream = fs.createWriteStream(outputPath);
254
+ const decompressStream = await createDecompressStream(algorithm);
255
+
256
+ await pipeline(readStream, decompressStream, writeStream);
257
+ }
258
+
259
+ /**
260
+ * Detect algorithm from file extension.
261
+ * @param {string} filePath - File path with extension
262
+ * @returns {string|null} Algorithm name or null if unknown
263
+ */
264
+ export function detectAlgorithm(filePath) {
265
+ const ext = path.extname(filePath).toLowerCase();
266
+ const fullExt = filePath.toLowerCase();
267
+
268
+ if (fullExt.endsWith(".tar.gz") || fullExt.endsWith(".tgz")) {
269
+ return Algorithm.GZIP;
270
+ }
271
+ if (fullExt.endsWith(".tar.zst")) {
272
+ return Algorithm.ZSTD;
273
+ }
274
+ if (fullExt.endsWith(".tar.br")) {
275
+ return Algorithm.BROTLI;
276
+ }
277
+ if (fullExt.endsWith(".tar.xz")) {
278
+ return Algorithm.XZ;
279
+ }
280
+ if (ext === ".tar") {
281
+ return Algorithm.NONE;
282
+ }
283
+
284
+ return null;
285
+ }