autosnap 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +107 -0
- package/bin/autosnap.js +39 -0
- package/package.json +37 -0
- package/src/cli/clear.js +35 -0
- package/src/cli/config.js +19 -0
- package/src/cli/history.js +126 -0
- package/src/cli/init.js +73 -0
- package/src/cli/restore.js +77 -0
- package/src/cli/start.js +37 -0
- package/src/cli/stop.js +25 -0
- package/src/core/daemon.js +20 -0
- package/src/core/diff.js +96 -0
- package/src/core/snapshot.js +167 -0
- package/src/core/watcher.js +75 -0
- package/src/utils/config.js +58 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
ISC License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025, S.Shanmukha Sai Reddy
|
|
4
|
+
|
|
5
|
+
Permission to use, copy, modify, and/or distribute this software for any purpose
|
|
6
|
+
with or without fee is hereby granted, provided that the above copyright notice
|
|
7
|
+
and this permission notice appear in all copies.
|
|
8
|
+
|
|
9
|
+
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|
10
|
+
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
11
|
+
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|
12
|
+
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
|
13
|
+
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
|
14
|
+
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
|
|
15
|
+
THIS SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
|
|
2
|
+
<div align="center">
|
|
3
|
+
|
|
4
|
+
# a u t o s n a p
|
|
5
|
+
### Intelligent Local File History & Protection in Time
|
|
6
|
+
|
|
7
|
+
[](https://www.npmjs.com/package/autosnap)
|
|
8
|
+
[](LICENSE)
|
|
9
|
+
[]()
|
|
10
|
+
|
|
11
|
+
</div>
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
**Autosnap** is a lightweight, background service that automatically watches your code for changes and creates **intelligent snapshots**, allowing you to travel back in time to any version of any file.
|
|
16
|
+
|
|
17
|
+
Think of it as a local, granular Time Machine for your code—working silently to protect your work from accidental deletion or bad edits.
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
## 🚀 Key Features
|
|
22
|
+
|
|
23
|
+
| Feature | Description |
|
|
24
|
+
| :--- | :--- |
|
|
25
|
+
| **🌲 Tree-Based History** | Every edit branches off. Restoring an old version creates a new branch, so **nothing is ever overwritten or lost**. |
|
|
26
|
+
| **📉 Compact Storage** | Uses **Forward Delta** + **Brotli Compression** to store dozens of versions in just a few KB (typ. ~80% space reduction). |
|
|
27
|
+
| **⚡ Zero Friction** | Runs silently in the background. No manual `git commit` needed for every small change. |
|
|
28
|
+
| **🔄 Non-Destructive Restore** | "Pivots" your workspace to any past version instantly without creating duplicate snapshots. |
|
|
29
|
+
|
|
30
|
+
---
|
|
31
|
+
|
|
32
|
+
## 📦 Installation
|
|
33
|
+
|
|
34
|
+
Install globally via NPM to use the `autosnap` CLI anywhere:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
npm install -g autosnap
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
---
|
|
41
|
+
|
|
42
|
+
## 🛠️ Usage Guide
|
|
43
|
+
|
|
44
|
+
### 1. Start Watching
|
|
45
|
+
Go to your project directory and start the background watcher.
|
|
46
|
+
```bash
|
|
47
|
+
autosnap start
|
|
48
|
+
```
|
|
49
|
+
> **Note:** Autosnap automatically ignores `node_modules`, `.git`, `dist`, and other noisy folders.
|
|
50
|
+
|
|
51
|
+
### 2. View History
|
|
52
|
+
See a summary of all tracked files or deep-dive into a specific file's timeline.
|
|
53
|
+
|
|
54
|
+
**Summary View:**
|
|
55
|
+
```bash
|
|
56
|
+
autosnap history
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
**Detailed Tree View:**
|
|
60
|
+
```bash
|
|
61
|
+
autosnap history src/server.js
|
|
62
|
+
```
|
|
63
|
+
* **Snapshot ID**: (First Column) Key used for restoration.
|
|
64
|
+
* **Tree Structure**: Visualizes parent-child relationships.
|
|
65
|
+
* **(HEAD)**: Your current working version.
|
|
66
|
+
|
|
67
|
+
### 3. Restore in Time
|
|
68
|
+
Travel back safely. This updates the file in your workspace and "pivots" the history tree to that version.
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
autosnap restore <SNAPSHOT_ID>
|
|
72
|
+
```
|
|
73
|
+
*Example:* `autosnap restore mjibpjwi`
|
|
74
|
+
|
|
75
|
+
### 4. Other Commands
|
|
76
|
+
* `autosnap stop`: Stop the background process.
|
|
77
|
+
* `autosnap clear`: **RESET** all history (Irreversible!).
|
|
78
|
+
* `autosnap settings`: Open the config file.
|
|
79
|
+
|
|
80
|
+
---
|
|
81
|
+
|
|
82
|
+
## ⚙️ Configuration
|
|
83
|
+
A `.autosnap/config.json` is created in your project root.
|
|
84
|
+
|
|
85
|
+
```json
|
|
86
|
+
{
|
|
87
|
+
"debounce": 2000, // Wait 2s after typing stops before snapshotting
|
|
88
|
+
"maxSize": 102400, // Max file size to track (in bytes)
|
|
89
|
+
"include": ["**/*"], // Files to watch
|
|
90
|
+
"exclude": ["*.log"] // Files to ignore
|
|
91
|
+
}
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
## 🧩 Architecture
|
|
95
|
+
|
|
96
|
+
Autosnap uses a modern **Forward-Delta Architecture**:
|
|
97
|
+
1. **Root Node**: Stores the full initial file content.
|
|
98
|
+
2. **Child Nodes**: Store only the *patches* (diffs) required to recreate the file from the parent.
|
|
99
|
+
3. **Storage**: All data is compressed (Brotli) and stored in succinct `.snap` files in `.autosnap/store/`.
|
|
100
|
+
|
|
101
|
+
---
|
|
102
|
+
|
|
103
|
+
<div align="center">
|
|
104
|
+
|
|
105
|
+
Made with ❤️ by [Shanmukha Sai Reddy](https://github.com/shanmukhasaireddy13)
|
|
106
|
+
|
|
107
|
+
</div>
|
package/bin/autosnap.js
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { program } = require('commander');
|
|
4
|
+
const chalk = require('chalk');
|
|
5
|
+
const packageJson = require('../package.json');
|
|
6
|
+
|
|
7
|
+
program
|
|
8
|
+
.version(packageJson.version)
|
|
9
|
+
.description('Autosnap: Intelligent Local File History');
|
|
10
|
+
|
|
11
|
+
program.command('init')
|
|
12
|
+
.description('Initialize Autosnap in the current directory')
|
|
13
|
+
.action(require('../src/cli/init'));
|
|
14
|
+
|
|
15
|
+
program.command('start')
|
|
16
|
+
.description('Start the background file watcher')
|
|
17
|
+
.action(require('../src/cli/start'));
|
|
18
|
+
|
|
19
|
+
program.command('stop')
|
|
20
|
+
.description('Stop the background file watcher')
|
|
21
|
+
.action(require('../src/cli/stop'));
|
|
22
|
+
|
|
23
|
+
program.command('history [file]')
|
|
24
|
+
.description('View snapshot history (summary or detail)')
|
|
25
|
+
.action(require('../src/cli/history'));
|
|
26
|
+
|
|
27
|
+
program.command('restore <id> [file]')
|
|
28
|
+
.description('Restore a snapshot or specific file')
|
|
29
|
+
.action(require('../src/cli/restore'));
|
|
30
|
+
|
|
31
|
+
program.command('clear')
|
|
32
|
+
.description('Clear all snapshot history')
|
|
33
|
+
.action(require('../src/cli/clear'));
|
|
34
|
+
|
|
35
|
+
program.command('settings')
|
|
36
|
+
.description('Open configuration')
|
|
37
|
+
.action(require('../src/cli/config'));
|
|
38
|
+
|
|
39
|
+
program.parse(process.argv);
|
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "autosnap",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "An intelligent, local file history & snapshot manager designed to protect your code from accidental loss.",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"test": "echo \"Error: no test specified\" && exit 1",
|
|
8
|
+
"start": "node bin/autosnap.js start",
|
|
9
|
+
"history": "node bin/autosnap.js history",
|
|
10
|
+
"restore": "node bin/autosnap.js restore",
|
|
11
|
+
"clear": "node bin/autosnap.js clear",
|
|
12
|
+
"init": "node bin/autosnap.js init"
|
|
13
|
+
},
|
|
14
|
+
"bin": {
|
|
15
|
+
"autosnap": "./bin/autosnap.js"
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"snapshot",
|
|
19
|
+
"backup",
|
|
20
|
+
"history",
|
|
21
|
+
"local",
|
|
22
|
+
"git",
|
|
23
|
+
"auto-save",
|
|
24
|
+
"file-watcher"
|
|
25
|
+
],
|
|
26
|
+
"author": "S.Shanmukha Sai Reddy",
|
|
27
|
+
"license": "ISC",
|
|
28
|
+
"type": "commonjs",
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"chalk": "^4.1.2",
|
|
31
|
+
"chokidar": "^3.6.0",
|
|
32
|
+
"commander": "^14.0.2",
|
|
33
|
+
"diff": "^8.0.2",
|
|
34
|
+
"diff-match-patch": "^1.0.5",
|
|
35
|
+
"fs-extra": "^11.3.2"
|
|
36
|
+
}
|
|
37
|
+
}
|
package/src/cli/clear.js
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const { CONFIG_DIR, STORE_DIR } = require('../utils/config');
|
|
5
|
+
|
|
6
|
+
async function clear() {
|
|
7
|
+
const rootDir = process.cwd();
|
|
8
|
+
const storePath = path.join(rootDir, CONFIG_DIR, STORE_DIR);
|
|
9
|
+
|
|
10
|
+
try {
|
|
11
|
+
console.log(chalk.yellow('Clearing all snapshots...'));
|
|
12
|
+
|
|
13
|
+
// Clear Store (New Architecture)
|
|
14
|
+
if (await fs.pathExists(storePath)) {
|
|
15
|
+
await fs.emptyDir(storePath);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// Cleanup Legacy Directories (Safety)
|
|
19
|
+
const legacySnapshots = path.join(rootDir, CONFIG_DIR, 'snapshots');
|
|
20
|
+
if (await fs.pathExists(legacySnapshots)) {
|
|
21
|
+
await fs.remove(legacySnapshots);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const legacyBlobs = path.join(rootDir, CONFIG_DIR, 'blobs');
|
|
25
|
+
if (await fs.pathExists(legacyBlobs)) {
|
|
26
|
+
await fs.remove(legacyBlobs);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
console.log(chalk.green('History cleared successfully.'));
|
|
30
|
+
} catch (err) {
|
|
31
|
+
console.error(chalk.red('Failed to clear history:'), err);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
module.exports = clear;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
const { loadConfig, CONFIG_DIR } = require('../utils/config');
|
|
2
|
+
const chalk = require('chalk');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
async function config() {
|
|
6
|
+
try {
|
|
7
|
+
const rootDir = process.cwd();
|
|
8
|
+
const configData = await loadConfig(rootDir);
|
|
9
|
+
const configPath = path.join(rootDir, CONFIG_DIR, 'config.json');
|
|
10
|
+
|
|
11
|
+
console.log(chalk.bold(`Configuration (${configPath}):`));
|
|
12
|
+
console.log(JSON.stringify(configData, null, 2));
|
|
13
|
+
console.log(chalk.gray('\nEdit this file to change settings.'));
|
|
14
|
+
} catch (err) {
|
|
15
|
+
console.error(chalk.red('Failed to load config:'), err);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = config;
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const { readSnapHistory } = require('../core/snapshot');
|
|
5
|
+
const { STORE_DIR } = require('../utils/config');
|
|
6
|
+
|
|
7
|
+
async function history(filePattern) {
|
|
8
|
+
try {
|
|
9
|
+
const rootDir = process.cwd();
|
|
10
|
+
const storeDir = path.join(rootDir, '.auto-snap', STORE_DIR);
|
|
11
|
+
|
|
12
|
+
if (!await fs.pathExists(storeDir)) {
|
|
13
|
+
console.log(chalk.yellow('No snapshots found.'));
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const statsList = [];
|
|
18
|
+
|
|
19
|
+
async function walk(dir) {
|
|
20
|
+
const files = await fs.readdir(dir);
|
|
21
|
+
for (const file of files) {
|
|
22
|
+
const fullPath = path.join(dir, file);
|
|
23
|
+
const stat = await fs.stat(fullPath);
|
|
24
|
+
if (stat.isDirectory()) {
|
|
25
|
+
await walk(fullPath);
|
|
26
|
+
} else if (file.endsWith('.snap')) {
|
|
27
|
+
const relPath = path.relative(storeDir, fullPath).replace(/\.snap$/, '');
|
|
28
|
+
|
|
29
|
+
// Filter if pattern provided
|
|
30
|
+
if (filePattern && !relPath.includes(filePattern)) continue;
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
const historyData = await readSnapHistory(rootDir, relPath);
|
|
34
|
+
if (historyData) {
|
|
35
|
+
statsList.push({ file: relPath, data: historyData });
|
|
36
|
+
}
|
|
37
|
+
} catch (e) {
|
|
38
|
+
console.error(chalk.red(`Failed to read history for ${file}:`), e.message);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
await walk(storeDir);
|
|
45
|
+
|
|
46
|
+
if (statsList.length === 0) {
|
|
47
|
+
console.log(chalk.yellow('No matching snapshots found.'));
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// --- DETAILED VIEW ---
|
|
52
|
+
if (filePattern) {
|
|
53
|
+
console.log(chalk.bold.underline(`Snapshot History for "${filePattern}":`));
|
|
54
|
+
|
|
55
|
+
for (const { file, data } of statsList) {
|
|
56
|
+
console.log(chalk.bold(`\nFile: ${file}`));
|
|
57
|
+
console.log(chalk.gray('------------------------------------------------------------------------------------------------------------------'));
|
|
58
|
+
console.log(chalk.bold(`| ${'Snapshot ID'.padEnd(15)} | ${'Parent'.padEnd(15)} | ${'Timestamp'.padEnd(30)} | ${'Stats'.padEnd(10)} | ${'Type'} `));
|
|
59
|
+
console.log(chalk.gray('------------------------------------------------------------------------------------------------------------------'));
|
|
60
|
+
|
|
61
|
+
// Show ALL versions (Tree View)
|
|
62
|
+
const snapData = data.raw;
|
|
63
|
+
const allNodes = Object.keys(snapData.i || {}).map(id => ({ id, ...snapData.i[id] }));
|
|
64
|
+
|
|
65
|
+
// Sort Newest -> Oldest
|
|
66
|
+
allNodes.sort((a, b) => b.t - a.t);
|
|
67
|
+
|
|
68
|
+
allNodes.forEach(node => {
|
|
69
|
+
const date = new Date(node.t);
|
|
70
|
+
const dateStr = date.toLocaleString(); // Human Readable
|
|
71
|
+
|
|
72
|
+
let statsStr = '';
|
|
73
|
+
if (node.s) {
|
|
74
|
+
const added = node.s[0] ? chalk.green(`+${node.s[0]}`) : '';
|
|
75
|
+
const removed = node.s[1] ? chalk.red(`-${node.s[1]}`) : '';
|
|
76
|
+
statsStr = `${added} ${removed}`.trim();
|
|
77
|
+
}
|
|
78
|
+
const type = node.p ? 'VERSION' : 'ROOT';
|
|
79
|
+
const isHead = (node.id === data.currentId) ? chalk.cyan('(HEAD)') : '';
|
|
80
|
+
const parentId = node.p || '-';
|
|
81
|
+
|
|
82
|
+
console.log(`| ${node.id.padEnd(15)} | ${parentId.padEnd(15)} | ${dateStr.padEnd(30)} | ${statsStr.padEnd(10)} | ${type} ${isHead}`);
|
|
83
|
+
});
|
|
84
|
+
console.log(chalk.gray('------------------------------------------------------------------------------------------------------------------'));
|
|
85
|
+
console.log(chalk.yellow(`\nTip: Use the 'Snapshot ID' (first column) to restore. Example: npm run restore ${allNodes[0].id}`));
|
|
86
|
+
}
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// --- SUMMARY VIEW (Default) ---
|
|
91
|
+
console.log(chalk.bold.underline('Tracked Files Summary:'));
|
|
92
|
+
console.log(chalk.gray('Use "npm run history <filename>" to see detailed versions.\n'));
|
|
93
|
+
console.log(chalk.gray('--------------------------------------------------------------------------------'));
|
|
94
|
+
console.log(chalk.bold(`| ${'File'.padEnd(40)} | ${'Last Updated'.padEnd(30)} | ${'Versions'} `));
|
|
95
|
+
console.log(chalk.gray('--------------------------------------------------------------------------------'));
|
|
96
|
+
|
|
97
|
+
// Filter invalid entries
|
|
98
|
+
const validStats = statsList.filter(item => item.data && item.data.raw && item.data.raw.i);
|
|
99
|
+
|
|
100
|
+
validStats.sort((a, b) => {
|
|
101
|
+
const timeA = (a.data.raw.i[a.data.currentId]) ? a.data.raw.i[a.data.currentId].t : 0;
|
|
102
|
+
const timeB = (b.data.raw.i[b.data.currentId]) ? b.data.raw.i[b.data.currentId].t : 0;
|
|
103
|
+
return timeB - timeA;
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
for (const { file, data } of validStats) {
|
|
107
|
+
const currentId = data.currentId;
|
|
108
|
+
const currentNode = (data.raw.i && currentId) ? data.raw.i[currentId] : null;
|
|
109
|
+
const dateStr = currentNode ? new Date(currentNode.t).toLocaleString() : 'N/A';
|
|
110
|
+
|
|
111
|
+
// Count total versions in the file
|
|
112
|
+
const count = data.raw.i ? Object.keys(data.raw.i).length : 0;
|
|
113
|
+
|
|
114
|
+
console.log(`| ${file.padEnd(40)} | ${dateStr.padEnd(30)} | ${count} `);
|
|
115
|
+
}
|
|
116
|
+
console.log(chalk.gray('--------------------------------------------------------------------------------'));
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
} catch (err) {
|
|
120
|
+
console.error(chalk.red('Failed to retrieve history:'), err);
|
|
121
|
+
// Debug
|
|
122
|
+
// console.error(err.stack);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
module.exports = history;
|
package/src/cli/init.js
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
const { initConfig, loadConfig } = require('../utils/config');
|
|
2
|
+
const { createSnapshot } = require('../core/snapshot');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const fs = require('fs-extra');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const chokidar = require('chokidar'); // Use chokidar's match logic? Or just simple glob.
|
|
7
|
+
|
|
8
|
+
async function init() {
|
|
9
|
+
try {
|
|
10
|
+
const rootDir = process.cwd();
|
|
11
|
+
const created = await initConfig(rootDir);
|
|
12
|
+
|
|
13
|
+
if (created) {
|
|
14
|
+
console.log(chalk.green('Auto-Snap initialized successfully!'));
|
|
15
|
+
console.log(chalk.blue('Config created at .auto-snap/config.json'));
|
|
16
|
+
|
|
17
|
+
// Perform Initial Snapshot
|
|
18
|
+
console.log(chalk.blue('Creating initial snapshots of all files...'));
|
|
19
|
+
const config = await loadConfig(rootDir);
|
|
20
|
+
|
|
21
|
+
// We can reuse the Watcher logic or just manually scan.
|
|
22
|
+
// Let's use chokidar's glob to match config.include/exclude
|
|
23
|
+
// But checking files one by one is safer.
|
|
24
|
+
// Using chokidar just to scan is easiest way to reuse logic.
|
|
25
|
+
|
|
26
|
+
const watcher = chokidar.watch(config.include, {
|
|
27
|
+
cwd: rootDir,
|
|
28
|
+
ignored: [...config.exclude, /(^|[\/\\])\../],
|
|
29
|
+
persistent: false, // Exit when done
|
|
30
|
+
ignoreInitial: false
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
watcher.on('add', async (relPath) => {
|
|
34
|
+
const absPath = path.join(rootDir, relPath);
|
|
35
|
+
try {
|
|
36
|
+
// Check size constraint manually as watcher does
|
|
37
|
+
const stats = await fs.stat(absPath);
|
|
38
|
+
const fileSizeMB = stats.size / (1024 * 1024);
|
|
39
|
+
if (fileSizeMB > config.maxFileSizeMB) return;
|
|
40
|
+
|
|
41
|
+
await createSnapshot(absPath, rootDir, config);
|
|
42
|
+
} catch (e) { }
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
// Wait for "ready" event
|
|
46
|
+
await new Promise(resolve => {
|
|
47
|
+
watcher.on('ready', () => {
|
|
48
|
+
resolve();
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
// Allow a moment for async 'add' handlers to fire?
|
|
53
|
+
// Actually 'ready' fires after initial scan. 'add' events fire synchronously during scan in chokidar usually?
|
|
54
|
+
// Safer to just close after a small delay or use awaitWriteFinish logic?
|
|
55
|
+
// Standard chokidar scan: events emit, then ready.
|
|
56
|
+
// We need to wait for the async createSnapshot calls.
|
|
57
|
+
// Let's use a simpler approach: glob. But we don't have glob package installed.
|
|
58
|
+
// We have fs-extra.
|
|
59
|
+
// Actually chokidar works fine. Just wait a bit after ready.
|
|
60
|
+
|
|
61
|
+
await new Promise(r => setTimeout(r, 1000));
|
|
62
|
+
watcher.close();
|
|
63
|
+
console.log(chalk.green('Initial snapshots complete.'));
|
|
64
|
+
|
|
65
|
+
} else {
|
|
66
|
+
console.log(chalk.yellow('Auto-Snap is already initialized.'));
|
|
67
|
+
}
|
|
68
|
+
} catch (err) {
|
|
69
|
+
console.error(chalk.red('Initialization failed:'), err);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
module.exports = init;
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const { restoreSnapshot } = require('../core/snapshot');
|
|
5
|
+
const { STORE_DIR, CONFIG_DIR } = require('../utils/config');
|
|
6
|
+
|
|
7
|
+
async function restore(targetId, filePattern) {
|
|
8
|
+
const rootDir = process.cwd();
|
|
9
|
+
const storeDir = path.join(rootDir, '.auto-snap', STORE_DIR);
|
|
10
|
+
|
|
11
|
+
if (!await fs.pathExists(storeDir)) {
|
|
12
|
+
console.log(chalk.red('No snapshots found.'));
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
let targetFiles = [];
|
|
17
|
+
|
|
18
|
+
// Find all .snap files
|
|
19
|
+
async function walk(dir) {
|
|
20
|
+
const files = await fs.readdir(dir);
|
|
21
|
+
for (const file of files) {
|
|
22
|
+
const fullPath = path.join(dir, file);
|
|
23
|
+
const stat = await fs.stat(fullPath);
|
|
24
|
+
if (stat.isDirectory()) {
|
|
25
|
+
await walk(fullPath);
|
|
26
|
+
} else if (file.endsWith('.snap')) {
|
|
27
|
+
const relPath = path.relative(storeDir, fullPath).replace(/\.snap$/, '');
|
|
28
|
+
if (!filePattern || relPath.includes(filePattern)) {
|
|
29
|
+
targetFiles.push({ snapPath: fullPath, relPath, absUserPath: path.join(rootDir, relPath) });
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
await walk(storeDir);
|
|
36
|
+
|
|
37
|
+
if (targetFiles.length === 0) {
|
|
38
|
+
console.log(chalk.yellow('No matching files found.'));
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
console.log(chalk.blue(`Restoring to version ID: ${targetId}...`));
|
|
43
|
+
|
|
44
|
+
let successCount = 0;
|
|
45
|
+
let failCount = 0;
|
|
46
|
+
|
|
47
|
+
for (const { absUserPath, relPath } of targetFiles) {
|
|
48
|
+
try {
|
|
49
|
+
// Restore Snapshot (Pivots the Tree & Returns Content)
|
|
50
|
+
const content = await restoreSnapshot(absUserPath, rootDir, targetId);
|
|
51
|
+
|
|
52
|
+
// Write to Workspace
|
|
53
|
+
await fs.ensureDir(path.dirname(absUserPath));
|
|
54
|
+
await fs.writeFile(absUserPath, content);
|
|
55
|
+
|
|
56
|
+
console.log(chalk.green(`Restored ${relPath}`));
|
|
57
|
+
successCount++;
|
|
58
|
+
|
|
59
|
+
} catch (err) {
|
|
60
|
+
// Silence "not found" errors for unrelated files
|
|
61
|
+
if (err.message.includes('not found') || err.message.includes('Version')) {
|
|
62
|
+
// Silent skip
|
|
63
|
+
} else {
|
|
64
|
+
console.error(chalk.red(`Failed to restore ${relPath}:`), err.message);
|
|
65
|
+
failCount++;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (successCount === 0 && failCount === 0) {
|
|
71
|
+
console.log(chalk.yellow(`Version ${targetId} not found in any matching files.`));
|
|
72
|
+
} else {
|
|
73
|
+
console.log(chalk.gray(`\nSummary: ${successCount} restored, ${failCount} failed.`));
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
module.exports = restore;
|
package/src/cli/start.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
const { spawn } = require('child_process');
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const chalk = require('chalk');
|
|
5
|
+
const { CONFIG_DIR } = require('../utils/config');
|
|
6
|
+
|
|
7
|
+
async function start() {
|
|
8
|
+
const pidFile = path.join(process.cwd(), CONFIG_DIR, 'watcher.pid');
|
|
9
|
+
|
|
10
|
+
if (await fs.pathExists(pidFile)) {
|
|
11
|
+
const pid = parseInt(await fs.readFile(pidFile, 'utf8'));
|
|
12
|
+
try {
|
|
13
|
+
process.kill(pid, 0); // Check if running
|
|
14
|
+
console.log(chalk.yellow('Auto-Snap watcher is already running.'));
|
|
15
|
+
return;
|
|
16
|
+
} catch (e) {
|
|
17
|
+
// Not running, clean up pid file
|
|
18
|
+
await fs.remove(pidFile);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
console.log(chalk.blue('Starting Auto-Snap watcher in background...'));
|
|
23
|
+
|
|
24
|
+
const daemonPath = path.join(__dirname, '../../src/core/daemon.js');
|
|
25
|
+
const child = spawn('node', [daemonPath], {
|
|
26
|
+
detached: true,
|
|
27
|
+
stdio: 'ignore',
|
|
28
|
+
cwd: process.cwd()
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
await fs.writeFile(pidFile, child.pid.toString());
|
|
32
|
+
|
|
33
|
+
child.unref();
|
|
34
|
+
console.log(chalk.green(`Watcher started (PID: ${child.pid})`));
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
module.exports = start;
|
package/src/cli/stop.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const { CONFIG_DIR } = require('../utils/config');
|
|
5
|
+
|
|
6
|
+
async function stop() {
|
|
7
|
+
const pidFile = path.join(process.cwd(), CONFIG_DIR, 'watcher.pid');
|
|
8
|
+
|
|
9
|
+
if (!await fs.pathExists(pidFile)) {
|
|
10
|
+
console.log(chalk.yellow('Auto-Snap watcher is not running.'));
|
|
11
|
+
return;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
const pid = parseInt(await fs.readFile(pidFile, 'utf8'));
|
|
16
|
+
process.kill(pid, 'SIGINT');
|
|
17
|
+
console.log(chalk.green('Auto-Snap watcher stopped.'));
|
|
18
|
+
} catch (e) {
|
|
19
|
+
console.log(chalk.red('Failed to stop watcher (maybe it was already stopped?)'));
|
|
20
|
+
} finally {
|
|
21
|
+
await fs.remove(pidFile);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
module.exports = stop;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
const Watcher = require('./watcher');
|
|
2
|
+
const { loadConfig } = require('../utils/config');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
(async () => {
|
|
6
|
+
const rootDir = process.cwd();
|
|
7
|
+
const config = await loadConfig(rootDir);
|
|
8
|
+
|
|
9
|
+
const watcher = new Watcher(rootDir, config);
|
|
10
|
+
watcher.start();
|
|
11
|
+
|
|
12
|
+
// Keep process alive
|
|
13
|
+
process.stdin.resume();
|
|
14
|
+
|
|
15
|
+
// Handle signals
|
|
16
|
+
process.on('SIGINT', () => {
|
|
17
|
+
watcher.stop();
|
|
18
|
+
process.exit(0);
|
|
19
|
+
});
|
|
20
|
+
})();
|
package/src/core/diff.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
const { diffLines, diffChars } = require('diff');
|
|
2
|
+
const crypto = require('crypto');
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
function calculateSimilarity(oldContent, newContent) {
|
|
7
|
+
if (!oldContent || !newContent) return 0;
|
|
8
|
+
const oldLines = oldContent.split('\n');
|
|
9
|
+
const newLines = newContent.split('\n');
|
|
10
|
+
const diff = diffLines(oldContent, newContent);
|
|
11
|
+
|
|
12
|
+
let unchangedLines = 0;
|
|
13
|
+
diff.forEach(part => {
|
|
14
|
+
if (!part.added && !part.removed) {
|
|
15
|
+
unchangedLines += part.count;
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
const totalLines = Math.max(oldLines.length, newLines.length);
|
|
20
|
+
return totalLines === 0 ? 1 : unchangedLines / totalLines;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function isTrivialChange(oldContent, newContent, config) {
|
|
24
|
+
const { minCharChange, minLineChange } = config;
|
|
25
|
+
|
|
26
|
+
const lineDiff = diffLines(oldContent, newContent);
|
|
27
|
+
let changedLines = 0;
|
|
28
|
+
lineDiff.forEach(part => {
|
|
29
|
+
if (part.added || part.removed) {
|
|
30
|
+
changedLines += part.count;
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
if (changedLines >= minLineChange) return false; // Not trivial if lines changed enough
|
|
35
|
+
|
|
36
|
+
const charDiff = diffChars(oldContent, newContent);
|
|
37
|
+
let changedChars = 0;
|
|
38
|
+
charDiff.forEach(part => {
|
|
39
|
+
if (part.added || part.removed) {
|
|
40
|
+
changedChars += part.count;
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
return changedChars < minCharChange;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function isMeaningfulChange(oldContent, newContent, config) {
|
|
48
|
+
if (!oldContent && newContent) return true; // New file
|
|
49
|
+
if (!newContent) return true; // Deleted file
|
|
50
|
+
|
|
51
|
+
// Rule 1: Trivial Change
|
|
52
|
+
if (isTrivialChange(oldContent, newContent, config)) {
|
|
53
|
+
console.log('[Diff] Trivial change skipped.');
|
|
54
|
+
return false;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Rule 2: Whitespace Check (User Request)
|
|
58
|
+
if (config.ignoreWhitespace) {
|
|
59
|
+
const cleanOld = oldContent.replace(/\s+/g, '');
|
|
60
|
+
const cleanNew = newContent.replace(/\s+/g, '');
|
|
61
|
+
if (cleanOld === cleanNew) {
|
|
62
|
+
console.log('[Diff] Whitespace-only change skipped.');
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Rule 3: Similarity Check
|
|
68
|
+
const similarity = calculateSimilarity(oldContent, newContent);
|
|
69
|
+
if (similarity >= (config.similarityThreshold || 0.98)) {
|
|
70
|
+
console.log(`[Diff] Content too similar (${(similarity * 100).toFixed(2)}%). Skipped.`);
|
|
71
|
+
return false;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return true;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function getDiffStats(oldContent, newContent) {
|
|
78
|
+
if (!oldContent) {
|
|
79
|
+
return { added: newContent ? newContent.split('\n').length : 0, removed: 0 };
|
|
80
|
+
}
|
|
81
|
+
const changes = diffLines(oldContent, newContent);
|
|
82
|
+
let added = 0;
|
|
83
|
+
let removed = 0;
|
|
84
|
+
changes.forEach(part => {
|
|
85
|
+
if (part.added) added += part.count;
|
|
86
|
+
if (part.removed) removed += part.count;
|
|
87
|
+
});
|
|
88
|
+
return { added, removed };
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
module.exports = {
|
|
92
|
+
isMeaningfulChange,
|
|
93
|
+
getDiffStats,
|
|
94
|
+
|
|
95
|
+
calculateSimilarity
|
|
96
|
+
};
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const zlib = require('zlib');
|
|
4
|
+
const util = require('util');
|
|
5
|
+
const DiffMatchPatch = require('diff-match-patch');
|
|
6
|
+
const { getDiffStats } = require('./diff');
|
|
7
|
+
const { STORE_DIR } = require('../utils/config');
|
|
8
|
+
|
|
9
|
+
const brotliCompress = util.promisify(zlib.brotliCompress);
|
|
10
|
+
const brotliDecompress = util.promisify(zlib.brotliDecompress);
|
|
11
|
+
const dmp = new DiffMatchPatch();
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* .snap Data Structure (Compact Tree):
|
|
15
|
+
* {
|
|
16
|
+
* c: "current_node_id",
|
|
17
|
+
* i: {
|
|
18
|
+
* "root_id": { t: 123, b: "full_body", s: null },
|
|
19
|
+
* "child_id": { p: "parent_id", t: 124, d: "patch_text", s: [add, rem] }
|
|
20
|
+
* }
|
|
21
|
+
* }
|
|
22
|
+
*/
|
|
23
|
+
|
|
24
|
+
async function createSnapshot(filePath, rootDir, config) {
|
|
25
|
+
try {
|
|
26
|
+
const relativePath = path.relative(rootDir, filePath);
|
|
27
|
+
const storeDir = path.join(rootDir, '.auto-snap', STORE_DIR);
|
|
28
|
+
await fs.ensureDir(storeDir);
|
|
29
|
+
const snapPath = path.join(storeDir, `${path.basename(filePath)}.snap`);
|
|
30
|
+
|
|
31
|
+
const currentContent = await fs.readFile(filePath, 'utf8');
|
|
32
|
+
let snapData = { c: null, i: {} };
|
|
33
|
+
|
|
34
|
+
// Load existing
|
|
35
|
+
if (await fs.pathExists(snapPath)) {
|
|
36
|
+
const buffer = await fs.readFile(snapPath);
|
|
37
|
+
snapData = JSON.parse((await brotliDecompress(buffer)).toString());
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const now = Date.now();
|
|
41
|
+
const newId = now.toString(36); // Simple ID
|
|
42
|
+
|
|
43
|
+
if (!snapData.c) {
|
|
44
|
+
// First Snapshot -> Root
|
|
45
|
+
snapData.i[newId] = {
|
|
46
|
+
t: now,
|
|
47
|
+
b: currentContent, // Full Body
|
|
48
|
+
s: null
|
|
49
|
+
};
|
|
50
|
+
snapData.c = newId;
|
|
51
|
+
console.log(`[Snapshot] Created Root for ${path.basename(filePath)}`);
|
|
52
|
+
} else {
|
|
53
|
+
// Branch from Current
|
|
54
|
+
const parentId = snapData.c;
|
|
55
|
+
const parentContent = reconstructContent(snapData, parentId);
|
|
56
|
+
|
|
57
|
+
// Calculate Diff (Forward Patch: Parent -> Child)
|
|
58
|
+
const patches = dmp.patch_make(parentContent, currentContent);
|
|
59
|
+
const patchText = dmp.patch_toText(patches);
|
|
60
|
+
|
|
61
|
+
// Calculate Stats
|
|
62
|
+
const stats = getDiffStats(parentContent, currentContent);
|
|
63
|
+
|
|
64
|
+
if (patches.length === 0) {
|
|
65
|
+
return; // No change
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
snapData.i[newId] = {
|
|
69
|
+
p: parentId,
|
|
70
|
+
t: now,
|
|
71
|
+
d: patchText,
|
|
72
|
+
s: [stats.added, stats.removed]
|
|
73
|
+
};
|
|
74
|
+
snapData.c = newId;
|
|
75
|
+
console.log(`[Snapshot] Saved version for ${path.basename(filePath)} (Parent: ${parentId})`);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Save
|
|
79
|
+
const compressed = await brotliCompress(Buffer.from(JSON.stringify(snapData)));
|
|
80
|
+
await fs.writeFile(snapPath, compressed);
|
|
81
|
+
|
|
82
|
+
} catch (error) {
|
|
83
|
+
console.error(`Failed to create snapshot for ${filePath}:`, error);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async function restoreSnapshot(filePath, rootDir, targetId) {
|
|
88
|
+
const storeDir = path.join(rootDir, '.auto-snap', STORE_DIR);
|
|
89
|
+
const snapPath = path.join(storeDir, `${path.basename(filePath)}.snap`);
|
|
90
|
+
|
|
91
|
+
if (!await fs.pathExists(snapPath)) throw new Error('Snapshot file not found');
|
|
92
|
+
|
|
93
|
+
const buffer = await fs.readFile(snapPath);
|
|
94
|
+
const snapData = JSON.parse((await brotliDecompress(buffer)).toString());
|
|
95
|
+
|
|
96
|
+
if (!snapData || !snapData.i || !snapData.i[targetId]) throw new Error(`Version ${targetId} not found`);
|
|
97
|
+
|
|
98
|
+
const content = reconstructContent(snapData, targetId);
|
|
99
|
+
|
|
100
|
+
// Update Current Pointer (Pivot)
|
|
101
|
+
snapData.c = targetId;
|
|
102
|
+
|
|
103
|
+
// Save Updated State
|
|
104
|
+
const compressed = await brotliCompress(Buffer.from(JSON.stringify(snapData)));
|
|
105
|
+
await fs.writeFile(snapPath, compressed);
|
|
106
|
+
|
|
107
|
+
return content;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function reconstructContent(snapData, targetId) {
|
|
111
|
+
// 1. Trace back to Root
|
|
112
|
+
const path = [];
|
|
113
|
+
let curr = targetId;
|
|
114
|
+
while (curr) {
|
|
115
|
+
path.unshift(curr); // Add to front [Root, ..., Target]
|
|
116
|
+
const node = snapData.i[curr];
|
|
117
|
+
if (node.b !== undefined) break; // Found Root (has body)
|
|
118
|
+
curr = node.p;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// 2. Apply Forward From Root
|
|
122
|
+
const rootId = path[0];
|
|
123
|
+
let content = snapData.i[rootId].b;
|
|
124
|
+
|
|
125
|
+
for (let i = 1; i < path.length; i++) {
|
|
126
|
+
const nodeId = path[i];
|
|
127
|
+
const node = snapData.i[nodeId];
|
|
128
|
+
const patches = dmp.patch_fromText(node.d);
|
|
129
|
+
const [newContent, results] = dmp.patch_apply(patches, content);
|
|
130
|
+
content = newContent;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return content;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async function readSnapHistory(rootDir, relPath) {
|
|
137
|
+
const storeDir = path.join(rootDir, '.auto-snap', STORE_DIR);
|
|
138
|
+
const snapPath = path.join(storeDir, `${path.basename(relPath)}.snap`); // Note: simplified lookup
|
|
139
|
+
|
|
140
|
+
if (!await fs.pathExists(snapPath)) return null;
|
|
141
|
+
|
|
142
|
+
const buffer = await fs.readFile(snapPath);
|
|
143
|
+
const snapData = JSON.parse((await brotliDecompress(buffer)).toString());
|
|
144
|
+
|
|
145
|
+
// Get Current Content for Watcher
|
|
146
|
+
const currentContent = reconstructContent(snapData, snapData.c);
|
|
147
|
+
|
|
148
|
+
// Return structured data for CLI/Watcher
|
|
149
|
+
return {
|
|
150
|
+
currentId: snapData.c,
|
|
151
|
+
currentContent: currentContent,
|
|
152
|
+
raw: snapData
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Helper to record deletions (optional implementation)
|
|
157
|
+
async function recordDeletion(filePath, rootDir, config) {
|
|
158
|
+
// For now, maybe just log? Or create a "deleted" node?
|
|
159
|
+
// Skipping complexity for this turn unless requested.
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
module.exports = {
|
|
163
|
+
createSnapshot,
|
|
164
|
+
restoreSnapshot,
|
|
165
|
+
readSnapHistory,
|
|
166
|
+
recordDeletion
|
|
167
|
+
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
const chokidar = require('chokidar');
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const chalk = require('chalk');
|
|
5
|
+
const { isMeaningfulChange } = require('./diff');
|
|
6
|
+
const { createSnapshot, readSnapHistory } = require('./snapshot');
|
|
7
|
+
|
|
8
|
+
class Watcher {
|
|
9
|
+
constructor(rootDir, config) {
|
|
10
|
+
this.rootDir = rootDir;
|
|
11
|
+
this.config = config;
|
|
12
|
+
this.watcher = null;
|
|
13
|
+
this.processing = new Set();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
start() {
|
|
17
|
+
console.log(chalk.blue(`Starting Auto-Snap watcher in ${this.rootDir}...`));
|
|
18
|
+
const ignored = [...this.config.exclude, /(^|[\/\\])\../];
|
|
19
|
+
|
|
20
|
+
this.watcher = chokidar.watch(this.config.include, {
|
|
21
|
+
cwd: this.rootDir,
|
|
22
|
+
ignored: ignored,
|
|
23
|
+
persistent: true,
|
|
24
|
+
ignoreInitial: true,
|
|
25
|
+
awaitWriteFinish: {
|
|
26
|
+
stabilityThreshold: this.config.debounce || 2000,
|
|
27
|
+
pollInterval: 100
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
this.watcher
|
|
32
|
+
.on('add', path => this.handleChange(path, 'add'))
|
|
33
|
+
.on('change', path => this.handleChange(path, 'change'))
|
|
34
|
+
.on('error', error => console.error(chalk.red(`Watcher error: ${error}`)));
|
|
35
|
+
|
|
36
|
+
console.log(chalk.green('Watcher is active.'));
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async handleChange(relPath, type) {
|
|
40
|
+
if (this.processing.has(relPath)) return;
|
|
41
|
+
this.processing.add(relPath);
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const absPath = path.join(this.rootDir, relPath);
|
|
45
|
+
const content = await fs.readFile(absPath, 'utf8');
|
|
46
|
+
|
|
47
|
+
// Get Last Snapshot Content (Reconstructed from Tree Current Pointer)
|
|
48
|
+
const snapInfo = await readSnapHistory(this.rootDir, relPath);
|
|
49
|
+
const lastContent = snapInfo ? snapInfo.currentContent : null;
|
|
50
|
+
|
|
51
|
+
// If content matches 'lastContent', it means:
|
|
52
|
+
// 1. It's a revert to exact previous state.
|
|
53
|
+
// 2. OR, we just performed a Restore, and the .snap file's 'current' pointer matches this file.
|
|
54
|
+
// In both cases, we SKIP creating a new snapshot.
|
|
55
|
+
if (isMeaningfulChange(lastContent, content, this.config)) {
|
|
56
|
+
await createSnapshot(absPath, this.rootDir, this.config);
|
|
57
|
+
} else {
|
|
58
|
+
// console.log(chalk.gray(`Skipping ${relPath}: no meaningful change (or match with current head)`));
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
} catch (err) {
|
|
62
|
+
console.error(chalk.red(`Error processing ${relPath}:`), err);
|
|
63
|
+
} finally {
|
|
64
|
+
this.processing.delete(relPath);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
stop() {
|
|
69
|
+
if (this.watcher) {
|
|
70
|
+
this.watcher.close();
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
module.exports = Watcher;
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const CONFIG_DIR = '.auto-snap';
|
|
5
|
+
const CONFIG_FILE = 'config.json';
|
|
6
|
+
const STORE_DIR = 'store';
|
|
7
|
+
|
|
8
|
+
const DEFAULT_CONFIG = {
|
|
9
|
+
debounce: 10000, // 10 seconds (Rule 2)
|
|
10
|
+
minCharChange: 5, // Rule 1
|
|
11
|
+
minLineChange: 1, // Rule 1
|
|
12
|
+
similarityThreshold: 0.98, // Rule 3
|
|
13
|
+
ignoreWhitespace: true,
|
|
14
|
+
include: ['**/*'],
|
|
15
|
+
exclude: [
|
|
16
|
+
'node_modules/**',
|
|
17
|
+
'.git/**',
|
|
18
|
+
'dist/**',
|
|
19
|
+
'build/**',
|
|
20
|
+
'.auto-snap/**',
|
|
21
|
+
'package-lock.json',
|
|
22
|
+
'.gitignore'
|
|
23
|
+
],
|
|
24
|
+
maxFileSizeMB: 2,
|
|
25
|
+
retention: {
|
|
26
|
+
days: 7,
|
|
27
|
+
maxSnapshots: 200
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
async function loadConfig(rootDir = process.cwd()) {
|
|
32
|
+
const configPath = path.join(rootDir, CONFIG_DIR, CONFIG_FILE);
|
|
33
|
+
if (await fs.pathExists(configPath)) {
|
|
34
|
+
const userConfig = await fs.readJson(configPath);
|
|
35
|
+
return { ...DEFAULT_CONFIG, ...userConfig };
|
|
36
|
+
}
|
|
37
|
+
return DEFAULT_CONFIG;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async function initConfig(rootDir = process.cwd()) {
|
|
41
|
+
const configDir = path.join(rootDir, CONFIG_DIR);
|
|
42
|
+
await fs.ensureDir(configDir);
|
|
43
|
+
await fs.ensureDir(path.join(configDir, STORE_DIR));
|
|
44
|
+
|
|
45
|
+
const configPath = path.join(configDir, CONFIG_FILE);
|
|
46
|
+
if (!await fs.pathExists(configPath)) {
|
|
47
|
+
await fs.writeJson(configPath, DEFAULT_CONFIG, { spaces: 2 });
|
|
48
|
+
return true;
|
|
49
|
+
}
|
|
50
|
+
return false;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
module.exports = {
|
|
54
|
+
loadConfig,
|
|
55
|
+
initConfig,
|
|
56
|
+
CONFIG_DIR,
|
|
57
|
+
STORE_DIR
|
|
58
|
+
};
|