consync 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- consync/__init__.py +9 -0
- consync/backup.py +188 -0
- consync/cli.py +372 -0
- consync/config.py +200 -0
- consync/hooks.py +81 -0
- consync/lock.py +118 -0
- consync/logging_config.py +273 -0
- consync/models.py +104 -0
- consync/parsers/__init__.py +40 -0
- consync/parsers/c_header.py +96 -0
- consync/parsers/csv_parser.py +133 -0
- consync/parsers/json_parser.py +138 -0
- consync/parsers/toml_parser.py +74 -0
- consync/parsers/xlsx.py +116 -0
- consync/precision.py +148 -0
- consync/renderers/__init__.py +49 -0
- consync/renderers/c_header.py +222 -0
- consync/renderers/csharp.py +174 -0
- consync/renderers/csv_renderer.py +46 -0
- consync/renderers/json_renderer.py +71 -0
- consync/renderers/python_const.py +84 -0
- consync/renderers/rust_const.py +90 -0
- consync/renderers/verilog.py +89 -0
- consync/renderers/vhdl.py +94 -0
- consync/state.py +76 -0
- consync/sync.py +458 -0
- consync/validators.py +233 -0
- consync/watcher.py +176 -0
- consync-0.1.0.dist-info/METADATA +590 -0
- consync-0.1.0.dist-info/RECORD +33 -0
- consync-0.1.0.dist-info/WHEEL +4 -0
- consync-0.1.0.dist-info/entry_points.txt +2 -0
- consync-0.1.0.dist-info/licenses/LICENSE +21 -0
consync/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"""consync — Bidirectional sync between spreadsheets and source code constants."""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.1.0"
|
|
4
|
+
|
|
5
|
+
from consync.models import Constant, SyncDirection
|
|
6
|
+
from consync.config import load_config
|
|
7
|
+
from consync.sync import sync, check
|
|
8
|
+
|
|
9
|
+
__all__ = ["Constant", "SyncDirection", "load_config", "sync", "check", "__version__"]
|
consync/backup.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"""Backup & Recovery — snapshot files before write, restore on demand.
|
|
2
|
+
|
|
3
|
+
Before every sync that writes a file, consync saves the previous version to
|
|
4
|
+
.consync/backups/<filename>.<timestamp>.bak
|
|
5
|
+
|
|
6
|
+
Recovery options:
|
|
7
|
+
consync recover # List available snapshots
|
|
8
|
+
consync recover --file out.h # List snapshots for a specific file
|
|
9
|
+
consync recover --file out.h --at 2026-05-05T08:45:12 # Restore exact timestamp
|
|
10
|
+
consync recover --file out.h --last # Restore most recent backup
|
|
11
|
+
|
|
12
|
+
Retention: keeps last 20 backups per file (configurable). Oldest auto-deleted.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
import shutil
|
|
19
|
+
from datetime import datetime, timezone
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
DEFAULT_BACKUP_DIR = ".consync/backups"
|
|
25
|
+
DEFAULT_MAX_BACKUPS_PER_FILE = 20
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def backup_file(
|
|
29
|
+
filepath: Path,
|
|
30
|
+
backup_dir: Path | None = None,
|
|
31
|
+
max_backups: int = DEFAULT_MAX_BACKUPS_PER_FILE,
|
|
32
|
+
) -> Path | None:
|
|
33
|
+
"""Create a timestamped backup of a file before overwriting.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
filepath: The file to back up.
|
|
37
|
+
backup_dir: Where to store backups (default: .consync/backups/).
|
|
38
|
+
max_backups: Max backups to keep per file (oldest trimmed).
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
Path to the backup file, or None if file doesn't exist (nothing to back up).
|
|
42
|
+
"""
|
|
43
|
+
if not filepath.exists():
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
if backup_dir is None:
|
|
47
|
+
backup_dir = filepath.parent / DEFAULT_BACKUP_DIR
|
|
48
|
+
|
|
49
|
+
backup_dir.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
|
|
51
|
+
# Timestamp format: YYYYMMDD_HHMMSS (sortable, filesystem-safe)
|
|
52
|
+
ts = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
|
|
53
|
+
backup_name = f"{filepath.name}.{ts}.bak"
|
|
54
|
+
backup_path = backup_dir / backup_name
|
|
55
|
+
|
|
56
|
+
shutil.copy2(filepath, backup_path)
|
|
57
|
+
logger.debug("Backed up %s → %s", filepath.name, backup_path)
|
|
58
|
+
|
|
59
|
+
# Trim old backups
|
|
60
|
+
_trim_backups(backup_dir, filepath.name, max_backups)
|
|
61
|
+
|
|
62
|
+
return backup_path
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _trim_backups(backup_dir: Path, filename: str, max_backups: int) -> None:
|
|
66
|
+
"""Keep only the most recent N backups for a given filename."""
|
|
67
|
+
pattern = f"{filename}.*.bak"
|
|
68
|
+
backups = sorted(backup_dir.glob(pattern)) # sorted by timestamp in name
|
|
69
|
+
|
|
70
|
+
if len(backups) > max_backups:
|
|
71
|
+
for old in backups[:-max_backups]:
|
|
72
|
+
old.unlink(missing_ok=True)
|
|
73
|
+
logger.debug("Trimmed old backup: %s", old.name)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def list_backups(
|
|
77
|
+
filepath: Path | None = None,
|
|
78
|
+
backup_dir: Path | None = None,
|
|
79
|
+
project_dir: Path | None = None,
|
|
80
|
+
) -> list[dict]:
|
|
81
|
+
"""List available backup snapshots.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
filepath: Filter to backups of this specific file (optional).
|
|
85
|
+
backup_dir: Backup directory (default: .consync/backups/ in project_dir).
|
|
86
|
+
project_dir: Project root (default: cwd).
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
List of dicts: {"file": str, "timestamp": str, "path": Path, "size": int}
|
|
90
|
+
"""
|
|
91
|
+
if project_dir is None:
|
|
92
|
+
project_dir = Path.cwd()
|
|
93
|
+
if backup_dir is None:
|
|
94
|
+
backup_dir = project_dir / DEFAULT_BACKUP_DIR
|
|
95
|
+
|
|
96
|
+
if not backup_dir.exists():
|
|
97
|
+
return []
|
|
98
|
+
|
|
99
|
+
results = []
|
|
100
|
+
pattern = f"{filepath.name}.*.bak" if filepath else "*.bak"
|
|
101
|
+
|
|
102
|
+
for bak in sorted(backup_dir.glob(pattern)):
|
|
103
|
+
parts = bak.name.rsplit(".", 2) # name.TIMESTAMP.bak
|
|
104
|
+
if len(parts) >= 3:
|
|
105
|
+
original_name = parts[0]
|
|
106
|
+
ts_str = parts[1]
|
|
107
|
+
# Parse timestamp back to ISO format
|
|
108
|
+
try:
|
|
109
|
+
ts = datetime.strptime(ts_str, "%Y%m%d_%H%M%S").replace(tzinfo=timezone.utc)
|
|
110
|
+
iso_ts = ts.isoformat(timespec="seconds")
|
|
111
|
+
except ValueError:
|
|
112
|
+
iso_ts = ts_str
|
|
113
|
+
|
|
114
|
+
results.append({
|
|
115
|
+
"file": original_name,
|
|
116
|
+
"timestamp": iso_ts,
|
|
117
|
+
"ts_raw": ts_str,
|
|
118
|
+
"path": bak,
|
|
119
|
+
"size": bak.stat().st_size,
|
|
120
|
+
})
|
|
121
|
+
|
|
122
|
+
return results
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def recover_file(
|
|
126
|
+
filepath: Path,
|
|
127
|
+
timestamp: str | None = None,
|
|
128
|
+
last: bool = False,
|
|
129
|
+
backup_dir: Path | None = None,
|
|
130
|
+
project_dir: Path | None = None,
|
|
131
|
+
) -> Path | None:
|
|
132
|
+
"""Restore a file from a backup snapshot.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
filepath: The file to restore.
|
|
136
|
+
timestamp: ISO timestamp to restore to (e.g., "2026-05-05T08:45:12").
|
|
137
|
+
last: If True, restore the most recent backup.
|
|
138
|
+
backup_dir: Backup directory.
|
|
139
|
+
project_dir: Project root.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Path to the restored file, or None if no matching backup found.
|
|
143
|
+
"""
|
|
144
|
+
backups = list_backups(
|
|
145
|
+
filepath=filepath, backup_dir=backup_dir, project_dir=project_dir
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
if not backups:
|
|
149
|
+
logger.warning("No backups found for %s", filepath.name)
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
if last:
|
|
153
|
+
chosen = backups[-1] # most recent
|
|
154
|
+
elif timestamp:
|
|
155
|
+
# Match by timestamp prefix (ISO or raw format)
|
|
156
|
+
ts_normalized = timestamp.replace("T", "").replace("-", "").replace(":", "")[:15]
|
|
157
|
+
chosen = None
|
|
158
|
+
for b in backups:
|
|
159
|
+
if b["ts_raw"].startswith(ts_normalized[:8]):
|
|
160
|
+
# Date match — find closest
|
|
161
|
+
if ts_normalized in b["ts_raw"].replace("_", ""):
|
|
162
|
+
chosen = b
|
|
163
|
+
break
|
|
164
|
+
if chosen is None:
|
|
165
|
+
# Fallback: find nearest timestamp
|
|
166
|
+
chosen = backups[-1]
|
|
167
|
+
logger.warning(
|
|
168
|
+
"Exact timestamp %s not found, using nearest: %s",
|
|
169
|
+
timestamp, chosen["timestamp"],
|
|
170
|
+
)
|
|
171
|
+
else:
|
|
172
|
+
# No timestamp specified — use most recent
|
|
173
|
+
chosen = backups[-1]
|
|
174
|
+
|
|
175
|
+
# Back up current file before restoring (safety net)
|
|
176
|
+
if filepath.exists():
|
|
177
|
+
safety_dir = (project_dir or Path.cwd()) / ".consync/backups"
|
|
178
|
+
safety_dir.mkdir(parents=True, exist_ok=True)
|
|
179
|
+
safety_ts = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
|
|
180
|
+
safety_path = safety_dir / f"{filepath.name}.{safety_ts}_pre_recover.bak"
|
|
181
|
+
shutil.copy2(filepath, safety_path)
|
|
182
|
+
logger.info("Safety backup of current state: %s", safety_path.name)
|
|
183
|
+
|
|
184
|
+
# Restore
|
|
185
|
+
shutil.copy2(chosen["path"], filepath)
|
|
186
|
+
logger.info("Restored %s to %s", filepath.name, chosen["timestamp"])
|
|
187
|
+
|
|
188
|
+
return filepath
|
consync/cli.py
ADDED
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
"""CLI entry point — consync command-line interface.
|
|
2
|
+
|
|
3
|
+
Commands:
|
|
4
|
+
consync init Create a .consync.yaml template
|
|
5
|
+
consync sync Sync all mappings (auto-detect direction)
|
|
6
|
+
consync sync --from source Force direction
|
|
7
|
+
consync watch Watch files and auto-sync on changes
|
|
8
|
+
consync check CI mode — verify files are in sync (exit 1 if not)
|
|
9
|
+
consync install-hook Install git pre-commit hook
|
|
10
|
+
consync status Show current sync state
|
|
11
|
+
consync log Show recent audit log entries
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
import sys
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
import click
|
|
20
|
+
|
|
21
|
+
from consync import __version__
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@click.group()
|
|
25
|
+
@click.version_option(__version__, prog_name="consync")
|
|
26
|
+
@click.option("-v", "--verbose", is_flag=True, help="Show INFO-level details.")
|
|
27
|
+
@click.option("--debug", is_flag=True, help="Show DEBUG-level details (very verbose).")
|
|
28
|
+
@click.pass_context
|
|
29
|
+
def main(ctx, verbose: bool, debug: bool):
|
|
30
|
+
"""consync — Bidirectional sync between spreadsheets and source code constants."""
|
|
31
|
+
from consync.logging_config import setup_logging
|
|
32
|
+
ctx.ensure_object(dict)
|
|
33
|
+
ctx.obj["verbose"] = verbose
|
|
34
|
+
ctx.obj["debug"] = debug
|
|
35
|
+
setup_logging(verbose=verbose, debug=debug)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@main.command()
|
|
39
|
+
@click.option("--path", default=".", help="Directory to create .consync.yaml in.")
|
|
40
|
+
def init(path: str):
|
|
41
|
+
"""Create a .consync.yaml configuration template."""
|
|
42
|
+
from consync.config import generate_default_config, DEFAULT_CONFIG_NAME
|
|
43
|
+
|
|
44
|
+
target = Path(path) / DEFAULT_CONFIG_NAME
|
|
45
|
+
if target.exists():
|
|
46
|
+
click.echo(f"⚠️ {target} already exists. Delete it first to regenerate.")
|
|
47
|
+
sys.exit(1)
|
|
48
|
+
|
|
49
|
+
target.write_text(generate_default_config())
|
|
50
|
+
click.echo(f"✅ Created {target}")
|
|
51
|
+
click.echo(f" Edit it to configure your source ↔ target mappings.")
|
|
52
|
+
click.echo(f" Then run: consync sync")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@main.command(name="sync")
|
|
56
|
+
@click.option("--from", "from_side", type=click.Choice(["source", "target"]), default=None,
|
|
57
|
+
help="Force sync direction.")
|
|
58
|
+
@click.option("--dry-run", is_flag=True, help="Show what would change without writing files.")
|
|
59
|
+
@click.option("--config", "config_path", default=None, help="Path to .consync.yaml.")
|
|
60
|
+
def sync_cmd(from_side: str | None, dry_run: bool, config_path: str | None):
|
|
61
|
+
"""Sync constants between source and target files."""
|
|
62
|
+
from consync.sync import sync, SyncResult
|
|
63
|
+
|
|
64
|
+
try:
|
|
65
|
+
reports = sync(config_path=config_path, force_direction=from_side, dry_run=dry_run)
|
|
66
|
+
except FileNotFoundError as e:
|
|
67
|
+
click.echo(f"❌ {e}")
|
|
68
|
+
sys.exit(1)
|
|
69
|
+
except ValueError as e:
|
|
70
|
+
click.echo(f"❌ Config error: {e}")
|
|
71
|
+
sys.exit(1)
|
|
72
|
+
|
|
73
|
+
has_errors = False
|
|
74
|
+
for r in reports:
|
|
75
|
+
icon = _result_icon(r.result)
|
|
76
|
+
click.echo(f"{icon} {r.source} ↔ {r.target}: {r.message}")
|
|
77
|
+
if r.result == SyncResult.ERROR:
|
|
78
|
+
has_errors = True
|
|
79
|
+
|
|
80
|
+
if has_errors:
|
|
81
|
+
sys.exit(1)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@main.command()
|
|
85
|
+
@click.option("--config", "config_path", default=None, help="Path to .consync.yaml.")
|
|
86
|
+
def check(config_path: str | None):
|
|
87
|
+
"""Verify all mappings are in sync (CI mode). Exits 1 if out of sync."""
|
|
88
|
+
from consync.sync import check as check_sync, SyncResult
|
|
89
|
+
|
|
90
|
+
try:
|
|
91
|
+
reports = check_sync(config_path=config_path)
|
|
92
|
+
except FileNotFoundError as e:
|
|
93
|
+
click.echo(f"❌ {e}")
|
|
94
|
+
sys.exit(1)
|
|
95
|
+
|
|
96
|
+
all_ok = True
|
|
97
|
+
for r in reports:
|
|
98
|
+
icon = _result_icon(r.result)
|
|
99
|
+
click.echo(f"{icon} {r.source} ↔ {r.target}: {r.message}")
|
|
100
|
+
if r.result != SyncResult.ALREADY_IN_SYNC:
|
|
101
|
+
all_ok = False
|
|
102
|
+
|
|
103
|
+
if all_ok:
|
|
104
|
+
click.echo(f"\n✅ All mappings in sync.")
|
|
105
|
+
else:
|
|
106
|
+
click.echo(f"\n❌ Out of sync! Run 'consync sync' to fix.")
|
|
107
|
+
sys.exit(1)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@main.command()
|
|
111
|
+
@click.option("--config", "config_path", default=None, help="Path to .consync.yaml.")
|
|
112
|
+
@click.option("--debounce", default=None, type=float, help="Debounce seconds (default from config).")
|
|
113
|
+
def watch(config_path: str | None, debounce: float | None):
|
|
114
|
+
"""Watch source/target files and auto-sync on changes."""
|
|
115
|
+
from consync.watcher import start_watcher
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
start_watcher(config_path=config_path, debounce_override=debounce)
|
|
119
|
+
except FileNotFoundError as e:
|
|
120
|
+
click.echo(f"❌ {e}")
|
|
121
|
+
sys.exit(1)
|
|
122
|
+
except KeyboardInterrupt:
|
|
123
|
+
click.echo("\n👋 Watcher stopped.")
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@main.command(name="install-hook")
|
|
127
|
+
@click.option("--hook", type=click.Choice(["pre-commit", "pre-push"]), default="pre-commit",
|
|
128
|
+
help="Which git hook to install.")
|
|
129
|
+
def install_hook(hook: str):
|
|
130
|
+
"""Install a git hook that runs 'consync check' before commit/push."""
|
|
131
|
+
from consync.hooks import install_git_hook
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
install_git_hook(hook)
|
|
135
|
+
except FileNotFoundError as e:
|
|
136
|
+
click.echo(f"❌ {e}")
|
|
137
|
+
sys.exit(1)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
@main.command()
|
|
141
|
+
@click.option("--config", "config_path", default=None, help="Path to .consync.yaml.")
|
|
142
|
+
def status(config_path: str | None):
|
|
143
|
+
"""Show current sync state for all mappings."""
|
|
144
|
+
from consync.config import load_config
|
|
145
|
+
from consync.state import SyncState
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
cfg = load_config(config_path)
|
|
149
|
+
except FileNotFoundError as e:
|
|
150
|
+
click.echo(f"❌ {e}")
|
|
151
|
+
sys.exit(1)
|
|
152
|
+
|
|
153
|
+
config_dir = Path(config_path).parent if config_path else Path.cwd()
|
|
154
|
+
state = SyncState(config_dir / cfg.state_file)
|
|
155
|
+
|
|
156
|
+
click.echo(f"Config: {config_path or '.consync.yaml'}")
|
|
157
|
+
click.echo(f"State file: {cfg.state_file}")
|
|
158
|
+
click.echo(f"Mappings: {len(cfg.mappings)}")
|
|
159
|
+
click.echo("")
|
|
160
|
+
|
|
161
|
+
for m in cfg.mappings:
|
|
162
|
+
key = state.mapping_key(m.source, m.target)
|
|
163
|
+
src_hash = state.get_hash(key, "source") or "unknown"
|
|
164
|
+
tgt_hash = state.get_hash(key, "target") or "unknown"
|
|
165
|
+
click.echo(f" {m.source} ↔ {m.target}")
|
|
166
|
+
click.echo(f" Direction: {m.direction.value}")
|
|
167
|
+
click.echo(f" Precision: {m.precision} sig digits")
|
|
168
|
+
click.echo(f" Source hash: {src_hash[:8]}...")
|
|
169
|
+
click.echo(f" Target hash: {tgt_hash[:8]}...")
|
|
170
|
+
click.echo("")
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _result_icon(result) -> str:
|
|
174
|
+
from consync.sync import SyncResult
|
|
175
|
+
return {
|
|
176
|
+
SyncResult.SYNCED_SOURCE_TO_TARGET: "✅",
|
|
177
|
+
SyncResult.SYNCED_TARGET_TO_SOURCE: "✅",
|
|
178
|
+
SyncResult.ALREADY_IN_SYNC: "✔️ ",
|
|
179
|
+
SyncResult.CONFLICT: "⚠️ ",
|
|
180
|
+
SyncResult.SKIPPED: "⏭️ ",
|
|
181
|
+
SyncResult.ERROR: "❌",
|
|
182
|
+
}.get(result, "?")
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@main.command(name="log")
|
|
186
|
+
@click.option("-n", "--lines", default=20, help="Number of recent entries to show.")
|
|
187
|
+
@click.option("--json", "as_json", is_flag=True, help="Output raw JSON lines.")
|
|
188
|
+
def show_log(lines: int, as_json: bool):
|
|
189
|
+
"""Show recent sync audit log entries.
|
|
190
|
+
|
|
191
|
+
Displays timestamp, user, direction, files, and synced values.
|
|
192
|
+
Reads from .consync.audit.jsonl in the current directory.
|
|
193
|
+
"""
|
|
194
|
+
from consync.logging_config import read_audit_log
|
|
195
|
+
|
|
196
|
+
entries = read_audit_log(last_n=lines)
|
|
197
|
+
if not entries:
|
|
198
|
+
click.echo("No audit log entries found. Run 'consync sync' first.")
|
|
199
|
+
return
|
|
200
|
+
|
|
201
|
+
for entry in entries:
|
|
202
|
+
if as_json:
|
|
203
|
+
import json
|
|
204
|
+
click.echo(json.dumps(entry, ensure_ascii=False))
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
ts = entry.get("timestamp", "?")
|
|
208
|
+
user = entry.get("user", "?")
|
|
209
|
+
direction = entry.get("direction", "?")
|
|
210
|
+
source = entry.get("source", "?")
|
|
211
|
+
target = entry.get("target", "?")
|
|
212
|
+
count = entry.get("count", 0)
|
|
213
|
+
dry = " [DRY RUN]" if entry.get("dry_run") else ""
|
|
214
|
+
|
|
215
|
+
click.echo(f" {ts} {user} {direction} {source} ↔ {target} ({count} constants){dry}")
|
|
216
|
+
|
|
217
|
+
# Show constant values
|
|
218
|
+
constants = entry.get("constants", [])
|
|
219
|
+
for c in constants:
|
|
220
|
+
name = c.get("name", "?")
|
|
221
|
+
value = c.get("value", "?")
|
|
222
|
+
unit = c.get("unit", "")
|
|
223
|
+
unit_str = f" {unit}" if unit else ""
|
|
224
|
+
click.echo(f" {name} = {value}{unit_str}")
|
|
225
|
+
click.echo("")
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
@main.command(name="recover")
|
|
229
|
+
@click.option("--file", "filepath", default=None, help="File to recover (e.g., out.h).")
|
|
230
|
+
@click.option("--at", "timestamp", default=None, help="ISO timestamp to restore to.")
|
|
231
|
+
@click.option("--last", is_flag=True, help="Restore the most recent backup.")
|
|
232
|
+
@click.option("--list", "list_only", is_flag=True, help="List available backups without restoring.")
|
|
233
|
+
def recover_cmd(filepath: str | None, timestamp: str | None, last: bool, list_only: bool):
|
|
234
|
+
"""Recover a file from a previous backup snapshot.
|
|
235
|
+
|
|
236
|
+
Before every sync, consync saves the previous version of the target file.
|
|
237
|
+
Use this command to list available snapshots or restore one.
|
|
238
|
+
|
|
239
|
+
Examples:
|
|
240
|
+
consync recover --list
|
|
241
|
+
consync recover --file config.h --list
|
|
242
|
+
consync recover --file config.h --last
|
|
243
|
+
consync recover --file config.h --at 2026-05-05T08:45:12
|
|
244
|
+
"""
|
|
245
|
+
from consync.backup import list_backups, recover_file
|
|
246
|
+
|
|
247
|
+
project_dir = Path.cwd()
|
|
248
|
+
|
|
249
|
+
if list_only or (filepath is None and not last):
|
|
250
|
+
# List mode
|
|
251
|
+
file_path = Path(filepath) if filepath else None
|
|
252
|
+
backups = list_backups(filepath=file_path, project_dir=project_dir)
|
|
253
|
+
|
|
254
|
+
if not backups:
|
|
255
|
+
click.echo("No backups found. Backups are created automatically during sync.")
|
|
256
|
+
return
|
|
257
|
+
|
|
258
|
+
click.echo(f"Available backups ({len(backups)} snapshots):\n")
|
|
259
|
+
for b in backups:
|
|
260
|
+
size_kb = b["size"] / 1024
|
|
261
|
+
click.echo(f" {b['timestamp']} {b['file']:30s} {size_kb:.1f} KB")
|
|
262
|
+
click.echo(f"\nRestore with: consync recover --file <name> --at <timestamp>")
|
|
263
|
+
return
|
|
264
|
+
|
|
265
|
+
if filepath is None:
|
|
266
|
+
click.echo("❌ Specify --file to recover (or use --list to see available backups).")
|
|
267
|
+
sys.exit(1)
|
|
268
|
+
|
|
269
|
+
file_path = Path(filepath)
|
|
270
|
+
result = recover_file(file_path, timestamp=timestamp, last=last, project_dir=project_dir)
|
|
271
|
+
|
|
272
|
+
if result:
|
|
273
|
+
click.echo(f"✅ Restored {filepath}")
|
|
274
|
+
else:
|
|
275
|
+
click.echo(f"❌ No backup found for {filepath}. Use --list to see available snapshots.")
|
|
276
|
+
sys.exit(1)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
@main.command(name="diff")
|
|
280
|
+
@click.option("--config", "config_path", default=None, help="Path to .consync.yaml.")
|
|
281
|
+
@click.option("--from", "from_side", type=click.Choice(["source", "target"]), default=None,
|
|
282
|
+
help="Force sync direction.")
|
|
283
|
+
@click.option("--color/--no-color", default=True, help="Colorize diff output.")
|
|
284
|
+
def diff_cmd(config_path: str | None, from_side: str | None, color: bool):
|
|
285
|
+
"""Preview what would change on next sync (unified diff).
|
|
286
|
+
|
|
287
|
+
Shows a unified diff for each mapping that would be modified,
|
|
288
|
+
without actually writing any files. Like --dry-run but with full diff.
|
|
289
|
+
"""
|
|
290
|
+
import difflib
|
|
291
|
+
import tempfile
|
|
292
|
+
from consync.config import load_config
|
|
293
|
+
from consync.sync import _config_dir, _resolve_path, _parse_file, _render_file, _determine_direction
|
|
294
|
+
from consync.state import SyncState, compute_hash
|
|
295
|
+
|
|
296
|
+
try:
|
|
297
|
+
cfg = load_config(config_path)
|
|
298
|
+
except FileNotFoundError as e:
|
|
299
|
+
click.echo(f"❌ {e}")
|
|
300
|
+
sys.exit(1)
|
|
301
|
+
|
|
302
|
+
config_dir = _config_dir(config_path)
|
|
303
|
+
state = SyncState(config_dir / cfg.state_file)
|
|
304
|
+
any_changes = False
|
|
305
|
+
|
|
306
|
+
for mapping in cfg.mappings:
|
|
307
|
+
source_path = _resolve_path(mapping.source, config_dir)
|
|
308
|
+
target_path = _resolve_path(mapping.target, config_dir)
|
|
309
|
+
key = state.mapping_key(mapping.source, mapping.target)
|
|
310
|
+
|
|
311
|
+
direction = _determine_direction(
|
|
312
|
+
mapping, source_path, target_path, state, key, cfg.on_conflict, from_side
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
if direction is None or direction == "conflict":
|
|
316
|
+
continue
|
|
317
|
+
|
|
318
|
+
# Determine what file would be written
|
|
319
|
+
if direction == "source":
|
|
320
|
+
constants = _parse_file(source_path, mapping.source_format)
|
|
321
|
+
dest_path = target_path
|
|
322
|
+
else:
|
|
323
|
+
constants = _parse_file(target_path, mapping.target_format)
|
|
324
|
+
dest_path = source_path
|
|
325
|
+
|
|
326
|
+
# Render to temp file to get the "new" content
|
|
327
|
+
with tempfile.NamedTemporaryFile(mode="w", suffix=dest_path.suffix, delete=False) as tmp:
|
|
328
|
+
tmp_path = Path(tmp.name)
|
|
329
|
+
|
|
330
|
+
try:
|
|
331
|
+
_render_file(constants, tmp_path, mapping.target_format if direction == "source" else mapping.source_format, mapping)
|
|
332
|
+
new_content = tmp_path.read_text().splitlines(keepends=True)
|
|
333
|
+
finally:
|
|
334
|
+
tmp_path.unlink(missing_ok=True)
|
|
335
|
+
|
|
336
|
+
# Get current content
|
|
337
|
+
if dest_path.exists():
|
|
338
|
+
old_content = dest_path.read_text().splitlines(keepends=True)
|
|
339
|
+
else:
|
|
340
|
+
old_content = []
|
|
341
|
+
|
|
342
|
+
# Generate unified diff
|
|
343
|
+
diff_lines = list(difflib.unified_diff(
|
|
344
|
+
old_content, new_content,
|
|
345
|
+
fromfile=f"a/{dest_path.name}",
|
|
346
|
+
tofile=f"b/{dest_path.name}",
|
|
347
|
+
lineterm="",
|
|
348
|
+
))
|
|
349
|
+
|
|
350
|
+
if diff_lines:
|
|
351
|
+
any_changes = True
|
|
352
|
+
click.echo(f"--- {mapping.source} → {mapping.target} ---")
|
|
353
|
+
for line in diff_lines:
|
|
354
|
+
if color:
|
|
355
|
+
if line.startswith("+") and not line.startswith("+++"):
|
|
356
|
+
click.echo(click.style(line, fg="green"))
|
|
357
|
+
elif line.startswith("-") and not line.startswith("---"):
|
|
358
|
+
click.echo(click.style(line, fg="red"))
|
|
359
|
+
elif line.startswith("@@"):
|
|
360
|
+
click.echo(click.style(line, fg="cyan"))
|
|
361
|
+
else:
|
|
362
|
+
click.echo(line)
|
|
363
|
+
else:
|
|
364
|
+
click.echo(line)
|
|
365
|
+
click.echo("")
|
|
366
|
+
|
|
367
|
+
if not any_changes:
|
|
368
|
+
click.echo("✔️ No changes — all mappings already in sync.")
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
if __name__ == "__main__":
|
|
372
|
+
main()
|