consync 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- consync/__init__.py +9 -0
- consync/backup.py +188 -0
- consync/cli.py +372 -0
- consync/config.py +200 -0
- consync/hooks.py +81 -0
- consync/lock.py +118 -0
- consync/logging_config.py +273 -0
- consync/models.py +104 -0
- consync/parsers/__init__.py +40 -0
- consync/parsers/c_header.py +96 -0
- consync/parsers/csv_parser.py +133 -0
- consync/parsers/json_parser.py +138 -0
- consync/parsers/toml_parser.py +74 -0
- consync/parsers/xlsx.py +116 -0
- consync/precision.py +148 -0
- consync/renderers/__init__.py +49 -0
- consync/renderers/c_header.py +222 -0
- consync/renderers/csharp.py +174 -0
- consync/renderers/csv_renderer.py +46 -0
- consync/renderers/json_renderer.py +71 -0
- consync/renderers/python_const.py +84 -0
- consync/renderers/rust_const.py +90 -0
- consync/renderers/verilog.py +89 -0
- consync/renderers/vhdl.py +94 -0
- consync/state.py +76 -0
- consync/sync.py +458 -0
- consync/validators.py +233 -0
- consync/watcher.py +176 -0
- consync-0.1.0.dist-info/METADATA +590 -0
- consync-0.1.0.dist-info/RECORD +33 -0
- consync-0.1.0.dist-info/WHEEL +4 -0
- consync-0.1.0.dist-info/entry_points.txt +2 -0
- consync-0.1.0.dist-info/licenses/LICENSE +21 -0
consync/config.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"""Configuration loader for .consync.yaml files."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import yaml
|
|
9
|
+
|
|
10
|
+
from consync.models import ConsyncConfig, MappingConfig, SyncDirection
|
|
11
|
+
|
|
12
|
+
DEFAULT_CONFIG_NAME = ".consync.yaml"
|
|
13
|
+
|
|
14
|
+
# Format auto-detection from file extensions
|
|
15
|
+
EXTENSION_TO_FORMAT: dict[str, str] = {
|
|
16
|
+
".xlsx": "xlsx",
|
|
17
|
+
".xls": "xlsx",
|
|
18
|
+
".csv": "csv",
|
|
19
|
+
".json": "json",
|
|
20
|
+
".toml": "toml",
|
|
21
|
+
".h": "c_header",
|
|
22
|
+
".hpp": "c_header",
|
|
23
|
+
".hh": "c_header",
|
|
24
|
+
".c": "c_header",
|
|
25
|
+
".cs": "csharp",
|
|
26
|
+
".py": "python",
|
|
27
|
+
".rs": "rust",
|
|
28
|
+
".v": "verilog",
|
|
29
|
+
".sv": "verilog",
|
|
30
|
+
".vhd": "vhdl",
|
|
31
|
+
".vhdl": "vhdl",
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def find_config(start_dir: Path | None = None) -> Path | None:
|
|
36
|
+
"""Walk up from start_dir looking for .consync.yaml.
|
|
37
|
+
|
|
38
|
+
Similar to how .gitignore or pyproject.toml are found.
|
|
39
|
+
"""
|
|
40
|
+
if start_dir is None:
|
|
41
|
+
start_dir = Path.cwd()
|
|
42
|
+
start_dir = start_dir.resolve()
|
|
43
|
+
|
|
44
|
+
for directory in [start_dir, *start_dir.parents]:
|
|
45
|
+
candidate = directory / DEFAULT_CONFIG_NAME
|
|
46
|
+
if candidate.exists():
|
|
47
|
+
return candidate
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def detect_format(filepath: str) -> str:
|
|
52
|
+
"""Auto-detect format from file extension."""
|
|
53
|
+
ext = Path(filepath).suffix.lower()
|
|
54
|
+
fmt = EXTENSION_TO_FORMAT.get(ext, "")
|
|
55
|
+
if not fmt:
|
|
56
|
+
raise ValueError(
|
|
57
|
+
f"Cannot auto-detect format for '{filepath}'. "
|
|
58
|
+
f"Supported extensions: {', '.join(sorted(EXTENSION_TO_FORMAT.keys()))}. "
|
|
59
|
+
f"Specify format explicitly in .consync.yaml."
|
|
60
|
+
)
|
|
61
|
+
return fmt
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _parse_direction(raw: str) -> SyncDirection:
|
|
65
|
+
"""Parse direction string from YAML config."""
|
|
66
|
+
mapping = {
|
|
67
|
+
"source_to_target": SyncDirection.SOURCE_TO_TARGET,
|
|
68
|
+
"target_to_source": SyncDirection.TARGET_TO_SOURCE,
|
|
69
|
+
"both": SyncDirection.BOTH,
|
|
70
|
+
"s2t": SyncDirection.SOURCE_TO_TARGET,
|
|
71
|
+
"t2s": SyncDirection.TARGET_TO_SOURCE,
|
|
72
|
+
"bidirectional": SyncDirection.BOTH,
|
|
73
|
+
}
|
|
74
|
+
normalized = raw.lower().strip().replace("-", "_").replace(" ", "_")
|
|
75
|
+
if normalized not in mapping:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
f"Invalid direction '{raw}'. "
|
|
78
|
+
f"Use: source_to_target, target_to_source, or both."
|
|
79
|
+
)
|
|
80
|
+
return mapping[normalized]
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _parse_mapping(raw: dict[str, Any], config_dir: Path) -> MappingConfig:
|
|
84
|
+
"""Parse a single mapping entry from YAML."""
|
|
85
|
+
source = raw.get("source", "")
|
|
86
|
+
target = raw.get("target", "")
|
|
87
|
+
|
|
88
|
+
if not source:
|
|
89
|
+
raise ValueError("Each mapping must have a 'source' field.")
|
|
90
|
+
if not target:
|
|
91
|
+
raise ValueError("Each mapping must have a 'target' field.")
|
|
92
|
+
|
|
93
|
+
source_format = raw.get("source_format", "") or raw.get("format_source", "")
|
|
94
|
+
target_format = raw.get("target_format", "") or raw.get("format_target", "") or raw.get("format", "")
|
|
95
|
+
|
|
96
|
+
# Auto-detect formats if not specified
|
|
97
|
+
if not source_format:
|
|
98
|
+
source_format = detect_format(source)
|
|
99
|
+
if not target_format:
|
|
100
|
+
target_format = detect_format(target)
|
|
101
|
+
|
|
102
|
+
direction = _parse_direction(raw.get("direction", "source_to_target"))
|
|
103
|
+
|
|
104
|
+
return MappingConfig(
|
|
105
|
+
source=source,
|
|
106
|
+
target=target,
|
|
107
|
+
source_format=source_format,
|
|
108
|
+
target_format=target_format,
|
|
109
|
+
direction=direction,
|
|
110
|
+
precision=int(raw.get("precision", 17)),
|
|
111
|
+
header_guard=raw.get("header_guard", ""),
|
|
112
|
+
namespace=raw.get("namespace", ""),
|
|
113
|
+
module_name=raw.get("module_name", ""),
|
|
114
|
+
prefix=raw.get("prefix", ""),
|
|
115
|
+
uppercase_names=raw.get("uppercase_names", True),
|
|
116
|
+
output_style=raw.get("output_style", "const"),
|
|
117
|
+
static_const=raw.get("static_const", False),
|
|
118
|
+
typed_ints=raw.get("typed_ints", True),
|
|
119
|
+
validators=raw.get("validators", {}),
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def load_config(config_path: Path | str | None = None) -> ConsyncConfig:
|
|
124
|
+
"""Load and validate a .consync.yaml configuration.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
config_path: Explicit path to config file. If None, searches
|
|
128
|
+
upward from CWD.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
Parsed ConsyncConfig.
|
|
132
|
+
|
|
133
|
+
Raises:
|
|
134
|
+
FileNotFoundError: If no config file found.
|
|
135
|
+
ValueError: If config is invalid.
|
|
136
|
+
"""
|
|
137
|
+
if config_path is None:
|
|
138
|
+
found = find_config()
|
|
139
|
+
if found is None:
|
|
140
|
+
raise FileNotFoundError(
|
|
141
|
+
f"No {DEFAULT_CONFIG_NAME} found. Run 'consync init' to create one."
|
|
142
|
+
)
|
|
143
|
+
config_path = found
|
|
144
|
+
else:
|
|
145
|
+
config_path = Path(config_path)
|
|
146
|
+
|
|
147
|
+
if not config_path.exists():
|
|
148
|
+
raise FileNotFoundError(f"Config file not found: {config_path}")
|
|
149
|
+
|
|
150
|
+
config_dir = config_path.parent
|
|
151
|
+
|
|
152
|
+
with open(config_path) as f:
|
|
153
|
+
raw = yaml.safe_load(f) or {}
|
|
154
|
+
|
|
155
|
+
if not isinstance(raw, dict):
|
|
156
|
+
raise ValueError(f"Invalid config: expected a YAML mapping, got {type(raw).__name__}")
|
|
157
|
+
|
|
158
|
+
raw_mappings = raw.get("mappings", [])
|
|
159
|
+
if not raw_mappings:
|
|
160
|
+
raise ValueError("Config must have at least one entry in 'mappings'.")
|
|
161
|
+
|
|
162
|
+
mappings = [_parse_mapping(m, config_dir) for m in raw_mappings]
|
|
163
|
+
|
|
164
|
+
return ConsyncConfig(
|
|
165
|
+
mappings=mappings,
|
|
166
|
+
state_file=raw.get("state_file", ".consync.state.json"),
|
|
167
|
+
watch_debounce=float(raw.get("watch_debounce", 2.0)),
|
|
168
|
+
on_conflict=raw.get("on_conflict", "source_wins"),
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def generate_default_config() -> str:
|
|
173
|
+
"""Generate a default .consync.yaml template for `consync init`."""
|
|
174
|
+
return """\
|
|
175
|
+
# consync configuration — https://github.com/naveenkumarbaskaran/consync
|
|
176
|
+
# Sync constants between spreadsheets and source code with full decimal precision.
|
|
177
|
+
|
|
178
|
+
mappings:
|
|
179
|
+
- source: constants.xlsx # Where constants are defined (spreadsheet)
|
|
180
|
+
target: include/constants.h # Generated code file
|
|
181
|
+
direction: both # source_to_target | target_to_source | both
|
|
182
|
+
precision: 17 # Significant digits (17 = full IEEE 754 double)
|
|
183
|
+
header_guard: HW_CONSTANTS_H # C header include guard
|
|
184
|
+
|
|
185
|
+
# Optional:
|
|
186
|
+
# prefix: "" # Prefix all constant names (e.g., "HW_")
|
|
187
|
+
# uppercase_names: true # Force UPPER_CASE names in output
|
|
188
|
+
|
|
189
|
+
# Add more mappings as needed:
|
|
190
|
+
# - source: parameters.xlsx
|
|
191
|
+
# target: src/params.v
|
|
192
|
+
# direction: source_to_target
|
|
193
|
+
# precision: 12
|
|
194
|
+
# module_name: design_params
|
|
195
|
+
|
|
196
|
+
# Global settings:
|
|
197
|
+
# state_file: .consync.state.json # Track sync state (gitignore this)
|
|
198
|
+
# watch_debounce: 2.0 # Seconds to wait before re-syncing
|
|
199
|
+
# on_conflict: source_wins # source_wins | target_wins | fail
|
|
200
|
+
"""
|
consync/hooks.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""Git hook installer — adds consync check to pre-commit or pre-push."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
HOOK_TEMPLATE = """\
|
|
11
|
+
#!/usr/bin/env bash
|
|
12
|
+
# consync — auto-generated git hook
|
|
13
|
+
# Verifies constants are in sync before {hook_type}.
|
|
14
|
+
# To skip: git {git_cmd} --no-verify
|
|
15
|
+
|
|
16
|
+
set -euo pipefail
|
|
17
|
+
|
|
18
|
+
if command -v consync &> /dev/null; then
|
|
19
|
+
echo "🔍 consync: checking sync state..."
|
|
20
|
+
if ! consync check; then
|
|
21
|
+
echo ""
|
|
22
|
+
echo "❌ Constants out of sync! Run 'consync sync' first."
|
|
23
|
+
exit 1
|
|
24
|
+
fi
|
|
25
|
+
echo "✅ consync: all constants in sync."
|
|
26
|
+
else
|
|
27
|
+
echo "⚠️ consync not installed — skipping sync check."
|
|
28
|
+
echo " Install with: pip install consync"
|
|
29
|
+
fi
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def install_git_hook(hook_type: str = "pre-commit"):
|
|
34
|
+
"""Install a git hook that runs 'consync check'.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
hook_type: "pre-commit" or "pre-push"
|
|
38
|
+
|
|
39
|
+
Raises:
|
|
40
|
+
FileNotFoundError: If not in a git repository.
|
|
41
|
+
"""
|
|
42
|
+
git_dir = _find_git_dir()
|
|
43
|
+
hooks_dir = git_dir / "hooks"
|
|
44
|
+
hooks_dir.mkdir(exist_ok=True)
|
|
45
|
+
|
|
46
|
+
hook_file = hooks_dir / hook_type
|
|
47
|
+
|
|
48
|
+
git_cmd = "commit" if hook_type == "pre-commit" else "push"
|
|
49
|
+
|
|
50
|
+
# Check for existing hook
|
|
51
|
+
if hook_file.exists():
|
|
52
|
+
existing = hook_file.read_text()
|
|
53
|
+
if "consync" in existing:
|
|
54
|
+
click.echo(f"✔️ consync hook already installed in .git/hooks/{hook_type}")
|
|
55
|
+
return
|
|
56
|
+
|
|
57
|
+
# Append to existing hook
|
|
58
|
+
click.echo(f"⚠️ Existing {hook_type} hook found — appending consync check.")
|
|
59
|
+
with open(hook_file, "a") as f:
|
|
60
|
+
f.write("\n\n# --- consync sync check ---\n")
|
|
61
|
+
f.write(HOOK_TEMPLATE.format(hook_type=hook_type, git_cmd=git_cmd))
|
|
62
|
+
else:
|
|
63
|
+
# Create new hook
|
|
64
|
+
hook_file.write_text(HOOK_TEMPLATE.format(hook_type=hook_type, git_cmd=git_cmd))
|
|
65
|
+
|
|
66
|
+
hook_file.chmod(0o755)
|
|
67
|
+
click.echo(f"✅ Installed consync check in .git/hooks/{hook_type}")
|
|
68
|
+
click.echo(f" Will run 'consync check' before every {git_cmd}.")
|
|
69
|
+
click.echo(f" Skip with: git {git_cmd} --no-verify")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _find_git_dir() -> Path:
|
|
73
|
+
"""Find the .git directory by walking up from CWD."""
|
|
74
|
+
cwd = Path.cwd()
|
|
75
|
+
for directory in [cwd, *cwd.parents]:
|
|
76
|
+
git_dir = directory / ".git"
|
|
77
|
+
if git_dir.is_dir():
|
|
78
|
+
return git_dir
|
|
79
|
+
raise FileNotFoundError(
|
|
80
|
+
"Not a git repository. Run this command from within a git repo."
|
|
81
|
+
)
|
consync/lock.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
"""Lock file — advisory locking to prevent concurrent sync operations.
|
|
2
|
+
|
|
3
|
+
Uses a .consync.lock file in the project root during sync. If another
|
|
4
|
+
process is already syncing, the lock prevents corruption.
|
|
5
|
+
|
|
6
|
+
Features:
|
|
7
|
+
- Advisory lock (not system-level flock — works on all OSes)
|
|
8
|
+
- Stale lock detection (PID check — if process is dead, lock is reclaimed)
|
|
9
|
+
- Configurable timeout
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import json
|
|
15
|
+
import logging
|
|
16
|
+
import os
|
|
17
|
+
import time
|
|
18
|
+
from datetime import datetime, timezone
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
LOCK_FILENAME = ".consync.lock"
|
|
24
|
+
LOCK_TIMEOUT_SECONDS = 30 # Max age before considering stale
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class LockError(Exception):
|
|
28
|
+
"""Raised when a lock cannot be acquired."""
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SyncLock:
|
|
33
|
+
"""Advisory lock file for preventing concurrent sync operations.
|
|
34
|
+
|
|
35
|
+
Usage:
|
|
36
|
+
with SyncLock(project_dir):
|
|
37
|
+
# ... do sync ...
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def __init__(self, project_dir: Path | None = None, timeout: float = LOCK_TIMEOUT_SECONDS):
|
|
41
|
+
self.project_dir = project_dir or Path.cwd()
|
|
42
|
+
self.lock_path = self.project_dir / LOCK_FILENAME
|
|
43
|
+
self.timeout = timeout
|
|
44
|
+
self._acquired = False
|
|
45
|
+
|
|
46
|
+
def acquire(self) -> None:
|
|
47
|
+
"""Acquire the lock. Raises LockError if already held by another live process."""
|
|
48
|
+
if self.lock_path.exists():
|
|
49
|
+
info = self._read_lock()
|
|
50
|
+
if info and self._is_stale(info):
|
|
51
|
+
logger.warning(
|
|
52
|
+
"Removing stale lock (PID %s, created %s)",
|
|
53
|
+
info.get("pid"), info.get("created"),
|
|
54
|
+
)
|
|
55
|
+
self.lock_path.unlink(missing_ok=True)
|
|
56
|
+
elif info:
|
|
57
|
+
raise LockError(
|
|
58
|
+
f"Another consync process is running (PID {info.get('pid')}, "
|
|
59
|
+
f"started {info.get('created')}). "
|
|
60
|
+
f"If this is stale, delete {self.lock_path}"
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
# Write lock
|
|
64
|
+
lock_info = {
|
|
65
|
+
"pid": os.getpid(),
|
|
66
|
+
"created": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
67
|
+
"hostname": os.uname().nodename,
|
|
68
|
+
}
|
|
69
|
+
self.lock_path.write_text(json.dumps(lock_info, indent=2))
|
|
70
|
+
self._acquired = True
|
|
71
|
+
logger.debug("Lock acquired: %s", self.lock_path)
|
|
72
|
+
|
|
73
|
+
def release(self) -> None:
|
|
74
|
+
"""Release the lock."""
|
|
75
|
+
if self._acquired and self.lock_path.exists():
|
|
76
|
+
self.lock_path.unlink(missing_ok=True)
|
|
77
|
+
self._acquired = False
|
|
78
|
+
logger.debug("Lock released: %s", self.lock_path)
|
|
79
|
+
|
|
80
|
+
def _read_lock(self) -> dict | None:
|
|
81
|
+
"""Read lock file contents."""
|
|
82
|
+
try:
|
|
83
|
+
return json.loads(self.lock_path.read_text())
|
|
84
|
+
except (json.JSONDecodeError, OSError):
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
def _is_stale(self, info: dict) -> bool:
|
|
88
|
+
"""Check if a lock is stale (process dead or too old)."""
|
|
89
|
+
pid = info.get("pid")
|
|
90
|
+
|
|
91
|
+
# Check if process is still alive
|
|
92
|
+
if pid:
|
|
93
|
+
try:
|
|
94
|
+
os.kill(pid, 0) # Signal 0 = check existence
|
|
95
|
+
except ProcessLookupError:
|
|
96
|
+
return True # Process doesn't exist
|
|
97
|
+
except PermissionError:
|
|
98
|
+
pass # Process exists but we can't signal it
|
|
99
|
+
|
|
100
|
+
# Check age
|
|
101
|
+
created = info.get("created", "")
|
|
102
|
+
if created:
|
|
103
|
+
try:
|
|
104
|
+
lock_time = datetime.fromisoformat(created)
|
|
105
|
+
age = (datetime.now(timezone.utc) - lock_time).total_seconds()
|
|
106
|
+
if age > self.timeout:
|
|
107
|
+
return True
|
|
108
|
+
except ValueError:
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
def __enter__(self) -> "SyncLock":
|
|
114
|
+
self.acquire()
|
|
115
|
+
return self
|
|
116
|
+
|
|
117
|
+
def __exit__(self, *_) -> None:
|
|
118
|
+
self.release()
|
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
"""Logging configuration for consync.
|
|
2
|
+
|
|
3
|
+
Provides structured logging with:
|
|
4
|
+
- Console output (stderr) controlled by --verbose / --debug CLI flags
|
|
5
|
+
- File logging to .consync.log with rotation (default: 5MB × 3 backups)
|
|
6
|
+
- Audit log (.consync.audit.jsonl) — one JSON line per sync event with:
|
|
7
|
+
timestamp, user, direction, source, target, constants (names + values)
|
|
8
|
+
- Per-module loggers following `consync.*` hierarchy
|
|
9
|
+
|
|
10
|
+
Usage in any module:
|
|
11
|
+
import logging
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
logger.debug("Parsed %d constants from %s", count, filepath)
|
|
14
|
+
logger.info("Synced source → target: %d constants", count)
|
|
15
|
+
logger.warning("Both files changed — conflict detected")
|
|
16
|
+
|
|
17
|
+
CLI verbosity levels:
|
|
18
|
+
(default) WARNING+ to stderr, INFO+ to file
|
|
19
|
+
--verbose INFO+ to stderr, DEBUG+ to file
|
|
20
|
+
--debug DEBUG+ to stderr and file
|
|
21
|
+
|
|
22
|
+
Log file location: .consync.log in the working directory (or config dir).
|
|
23
|
+
Rotation: 5MB max, 3 backup files (.log.1, .log.2, .log.3).
|
|
24
|
+
|
|
25
|
+
Audit log: .consync.audit.jsonl — append-only, one JSON object per line.
|
|
26
|
+
Retention: configurable max_audit_lines (default 10000 lines ≈ 2-4MB).
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
from __future__ import annotations
|
|
30
|
+
|
|
31
|
+
import getpass
|
|
32
|
+
import json
|
|
33
|
+
import logging
|
|
34
|
+
import logging.handlers
|
|
35
|
+
from datetime import datetime, timezone
|
|
36
|
+
from pathlib import Path
|
|
37
|
+
from typing import Any
|
|
38
|
+
|
|
39
|
+
from consync.models import Constant
|
|
40
|
+
|
|
41
|
+
# Package-level logger — all modules use children of this
|
|
42
|
+
ROOT_LOGGER_NAME = "consync"
|
|
43
|
+
|
|
44
|
+
# Defaults
|
|
45
|
+
DEFAULT_LOG_FILE = ".consync.log"
|
|
46
|
+
DEFAULT_AUDIT_FILE = ".consync.audit.jsonl"
|
|
47
|
+
DEFAULT_MAX_BYTES = 5 * 1024 * 1024 # 5 MB
|
|
48
|
+
DEFAULT_BACKUP_COUNT = 3
|
|
49
|
+
DEFAULT_MAX_AUDIT_LINES = 10000
|
|
50
|
+
DEFAULT_FILE_LEVEL = logging.INFO
|
|
51
|
+
DEFAULT_CONSOLE_LEVEL = logging.WARNING
|
|
52
|
+
|
|
53
|
+
# Format strings
|
|
54
|
+
FILE_FORMAT = "%(asctime)s [%(levelname)-5s] %(name)s: %(message)s"
|
|
55
|
+
CONSOLE_FORMAT = "%(levelname)s: %(message)s"
|
|
56
|
+
DEBUG_FORMAT = "%(asctime)s.%(msecs)03d [%(levelname)-5s] %(name)s:%(funcName)s:%(lineno)d — %(message)s"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def setup_logging(
|
|
60
|
+
*,
|
|
61
|
+
verbose: bool = False,
|
|
62
|
+
debug: bool = False,
|
|
63
|
+
log_file: str | Path | None = DEFAULT_LOG_FILE,
|
|
64
|
+
log_dir: Path | None = None,
|
|
65
|
+
max_bytes: int = DEFAULT_MAX_BYTES,
|
|
66
|
+
backup_count: int = DEFAULT_BACKUP_COUNT,
|
|
67
|
+
quiet: bool = False,
|
|
68
|
+
) -> logging.Logger:
|
|
69
|
+
"""Configure the consync logging hierarchy.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
verbose: Show INFO+ on console (default: WARNING+).
|
|
73
|
+
debug: Show DEBUG+ on console AND file.
|
|
74
|
+
log_file: Log filename (None = disable file logging).
|
|
75
|
+
log_dir: Directory for log file (default: cwd).
|
|
76
|
+
max_bytes: Max log file size before rotation.
|
|
77
|
+
backup_count: Number of rotated backup files to keep.
|
|
78
|
+
quiet: Suppress all console output (file logging still active).
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
The root 'consync' logger.
|
|
82
|
+
"""
|
|
83
|
+
logger = logging.getLogger(ROOT_LOGGER_NAME)
|
|
84
|
+
|
|
85
|
+
# Avoid duplicate handlers on repeated calls
|
|
86
|
+
if logger.handlers:
|
|
87
|
+
return logger
|
|
88
|
+
|
|
89
|
+
logger.setLevel(logging.DEBUG) # Capture everything; handlers filter
|
|
90
|
+
|
|
91
|
+
# ─── Console handler (stderr) ───
|
|
92
|
+
if not quiet:
|
|
93
|
+
console = logging.StreamHandler()
|
|
94
|
+
if debug:
|
|
95
|
+
console.setLevel(logging.DEBUG)
|
|
96
|
+
console.setFormatter(logging.Formatter(DEBUG_FORMAT, datefmt="%H:%M:%S"))
|
|
97
|
+
elif verbose:
|
|
98
|
+
console.setLevel(logging.INFO)
|
|
99
|
+
console.setFormatter(logging.Formatter(CONSOLE_FORMAT))
|
|
100
|
+
else:
|
|
101
|
+
console.setLevel(logging.WARNING)
|
|
102
|
+
console.setFormatter(logging.Formatter(CONSOLE_FORMAT))
|
|
103
|
+
logger.addHandler(console)
|
|
104
|
+
|
|
105
|
+
# ─── File handler (rotating) ───
|
|
106
|
+
if log_file:
|
|
107
|
+
log_path = (log_dir or Path.cwd()) / log_file
|
|
108
|
+
try:
|
|
109
|
+
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
110
|
+
file_handler = logging.handlers.RotatingFileHandler(
|
|
111
|
+
log_path,
|
|
112
|
+
maxBytes=max_bytes,
|
|
113
|
+
backupCount=backup_count,
|
|
114
|
+
encoding="utf-8",
|
|
115
|
+
)
|
|
116
|
+
file_level = logging.DEBUG if debug else DEFAULT_FILE_LEVEL
|
|
117
|
+
file_handler.setLevel(file_level)
|
|
118
|
+
file_handler.setFormatter(logging.Formatter(FILE_FORMAT, datefmt="%Y-%m-%d %H:%M:%S"))
|
|
119
|
+
logger.addHandler(file_handler)
|
|
120
|
+
except OSError:
|
|
121
|
+
# Can't write log file (read-only FS, permissions) — silently skip
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
return logger
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def get_logger(name: str) -> logging.Logger:
|
|
128
|
+
"""Get a child logger under the consync namespace.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
name: Typically __name__ (e.g., 'consync.sync', 'consync.parsers.csv_parser')
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Logger instance.
|
|
135
|
+
"""
|
|
136
|
+
return logging.getLogger(name)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
140
|
+
# Audit Log — structured JSON Lines file
|
|
141
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _get_user() -> str:
|
|
145
|
+
"""Get current OS username for audit trail."""
|
|
146
|
+
try:
|
|
147
|
+
return getpass.getuser()
|
|
148
|
+
except Exception:
|
|
149
|
+
return "unknown"
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _serialize_value(value: Any) -> Any:
|
|
153
|
+
"""Serialize a constant value for JSON audit output."""
|
|
154
|
+
if isinstance(value, list):
|
|
155
|
+
return value
|
|
156
|
+
if isinstance(value, float):
|
|
157
|
+
return value
|
|
158
|
+
if isinstance(value, int):
|
|
159
|
+
return value
|
|
160
|
+
return str(value)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def write_audit_entry(
|
|
164
|
+
*,
|
|
165
|
+
direction: str,
|
|
166
|
+
source: str,
|
|
167
|
+
target: str,
|
|
168
|
+
constants: list[Constant],
|
|
169
|
+
result: str,
|
|
170
|
+
dry_run: bool = False,
|
|
171
|
+
audit_file: Path | None = None,
|
|
172
|
+
max_lines: int = DEFAULT_MAX_AUDIT_LINES,
|
|
173
|
+
) -> None:
|
|
174
|
+
"""Append a structured audit entry to .consync.audit.jsonl.
|
|
175
|
+
|
|
176
|
+
Each line is a self-contained JSON object:
|
|
177
|
+
{
|
|
178
|
+
"timestamp": "2026-05-05T08:45:12Z",
|
|
179
|
+
"user": "naveenkumar",
|
|
180
|
+
"direction": "source → target",
|
|
181
|
+
"source": "params.csv",
|
|
182
|
+
"target": "ecu_params.h",
|
|
183
|
+
"result": "synced",
|
|
184
|
+
"count": 5,
|
|
185
|
+
"dry_run": false,
|
|
186
|
+
"constants": [
|
|
187
|
+
{"name": "BRAKE_MAX", "value": 250, "unit": "bar"},
|
|
188
|
+
{"name": "THRESHOLDS", "value": [50, 100, 150]},
|
|
189
|
+
...
|
|
190
|
+
]
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
Retention: trims oldest lines when file exceeds max_lines.
|
|
194
|
+
"""
|
|
195
|
+
if audit_file is None:
|
|
196
|
+
audit_file = Path.cwd() / DEFAULT_AUDIT_FILE
|
|
197
|
+
|
|
198
|
+
entry = {
|
|
199
|
+
"timestamp": datetime.now(timezone.utc).isoformat(timespec="seconds"),
|
|
200
|
+
"user": _get_user(),
|
|
201
|
+
"direction": direction,
|
|
202
|
+
"source": source,
|
|
203
|
+
"target": target,
|
|
204
|
+
"result": result,
|
|
205
|
+
"count": len(constants),
|
|
206
|
+
"dry_run": dry_run,
|
|
207
|
+
"constants": [
|
|
208
|
+
{
|
|
209
|
+
"name": c.name,
|
|
210
|
+
"value": _serialize_value(c.value),
|
|
211
|
+
**({"unit": c.unit} if c.unit else {}),
|
|
212
|
+
}
|
|
213
|
+
for c in constants
|
|
214
|
+
],
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
try:
|
|
218
|
+
audit_file.parent.mkdir(parents=True, exist_ok=True)
|
|
219
|
+
|
|
220
|
+
# Append entry
|
|
221
|
+
with audit_file.open("a", encoding="utf-8") as f:
|
|
222
|
+
f.write(json.dumps(entry, ensure_ascii=False) + "\n")
|
|
223
|
+
|
|
224
|
+
# Retention: trim if over max_lines
|
|
225
|
+
_trim_audit_file(audit_file, max_lines)
|
|
226
|
+
|
|
227
|
+
except OSError:
|
|
228
|
+
# Can't write audit — log warning but don't fail sync
|
|
229
|
+
logger = logging.getLogger(ROOT_LOGGER_NAME)
|
|
230
|
+
logger.warning("Could not write audit log to %s", audit_file)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def _trim_audit_file(audit_file: Path, max_lines: int) -> None:
|
|
234
|
+
"""Trim audit file to max_lines, keeping newest entries."""
|
|
235
|
+
try:
|
|
236
|
+
lines = audit_file.read_text(encoding="utf-8").splitlines()
|
|
237
|
+
if len(lines) > max_lines:
|
|
238
|
+
# Keep the last max_lines entries
|
|
239
|
+
trimmed = lines[-max_lines:]
|
|
240
|
+
audit_file.write_text("\n".join(trimmed) + "\n", encoding="utf-8")
|
|
241
|
+
except OSError:
|
|
242
|
+
pass
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def read_audit_log(
|
|
246
|
+
audit_file: Path | None = None,
|
|
247
|
+
last_n: int = 20,
|
|
248
|
+
) -> list[dict]:
|
|
249
|
+
"""Read the last N entries from the audit log.
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
audit_file: Path to .consync.audit.jsonl (default: cwd).
|
|
253
|
+
last_n: Number of recent entries to return.
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
List of parsed audit entries (newest last).
|
|
257
|
+
"""
|
|
258
|
+
if audit_file is None:
|
|
259
|
+
audit_file = Path.cwd() / DEFAULT_AUDIT_FILE
|
|
260
|
+
|
|
261
|
+
if not audit_file.exists():
|
|
262
|
+
return []
|
|
263
|
+
|
|
264
|
+
lines = audit_file.read_text(encoding="utf-8").splitlines()
|
|
265
|
+
entries = []
|
|
266
|
+
for line in lines[-last_n:]:
|
|
267
|
+
line = line.strip()
|
|
268
|
+
if line:
|
|
269
|
+
try:
|
|
270
|
+
entries.append(json.loads(line))
|
|
271
|
+
except json.JSONDecodeError:
|
|
272
|
+
continue
|
|
273
|
+
return entries
|