hadsync 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hadsync/__init__.py +1 -0
- hadsync/cli.py +979 -0
- hadsync/config.py +123 -0
- hadsync/converter.py +73 -0
- hadsync/entities.py +134 -0
- hadsync/ha_rest.py +41 -0
- hadsync/ha_ws.py +144 -0
- hadsync/output.py +20 -0
- hadsync/schema.py +138 -0
- hadsync/state.py +57 -0
- hadsync/validator.py +168 -0
- hadsync/watcher.py +119 -0
- hadsync-0.2.2.dist-info/METADATA +403 -0
- hadsync-0.2.2.dist-info/RECORD +17 -0
- hadsync-0.2.2.dist-info/WHEEL +4 -0
- hadsync-0.2.2.dist-info/entry_points.txt +2 -0
- hadsync-0.2.2.dist-info/licenses/LICENSE +21 -0
hadsync/cli.py
ADDED
|
@@ -0,0 +1,979 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import os
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
import typer
|
|
10
|
+
from typing_extensions import Annotated
|
|
11
|
+
|
|
12
|
+
from hadsync import __version__
|
|
13
|
+
import hadsync.output as output
|
|
14
|
+
from hadsync.config import (
|
|
15
|
+
CONFIG_FILENAME, WORKSPACE_ENV_VAR, ConfigError, discover_config, load_config, save_config,
|
|
16
|
+
)
|
|
17
|
+
from hadsync.ha_rest import HARestError, get_ha_info
|
|
18
|
+
from hadsync.ha_ws import HAAuthError, HAWebSocketClient, HAWebSocketError
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class _State:
|
|
23
|
+
config_path: Optional[Path] = None
|
|
24
|
+
dry_run: bool = False
|
|
25
|
+
verbose: bool = False
|
|
26
|
+
quiet: bool = False
|
|
27
|
+
json_output: bool = False
|
|
28
|
+
yes: bool = False
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
_state = _State()
|
|
32
|
+
|
|
33
|
+
app = typer.Typer(
|
|
34
|
+
name="hadsync",
|
|
35
|
+
help="Home Assistant Dashboard Sync — manage Lovelace dashboards as code.",
|
|
36
|
+
no_args_is_help=True,
|
|
37
|
+
add_completion=False,
|
|
38
|
+
)
|
|
39
|
+
entities_app = typer.Typer(help="Entity cache management.", no_args_is_help=True)
|
|
40
|
+
config_app = typer.Typer(help="Configuration management.", no_args_is_help=True)
|
|
41
|
+
app.add_typer(entities_app, name="entities")
|
|
42
|
+
app.add_typer(config_app, name="config")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _version_callback(value: bool) -> None:
|
|
46
|
+
if value:
|
|
47
|
+
typer.echo(f"hadsync {__version__}")
|
|
48
|
+
raise typer.Exit()
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@app.callback()
|
|
52
|
+
def _main(
|
|
53
|
+
version: Annotated[
|
|
54
|
+
Optional[bool],
|
|
55
|
+
typer.Option("--version", callback=_version_callback, is_eager=True, help="Show version and exit."),
|
|
56
|
+
] = None,
|
|
57
|
+
config: Annotated[Optional[str], typer.Option("--config", help="Path to .hadsync.yaml.")] = None,
|
|
58
|
+
dry_run: Annotated[bool, typer.Option("--dry-run", help="Show what would happen; do not execute.")] = False,
|
|
59
|
+
verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Increase output verbosity.")] = False,
|
|
60
|
+
quiet: Annotated[bool, typer.Option("--quiet", "-q", help="Suppress all output except errors.")] = False,
|
|
61
|
+
json_output: Annotated[bool, typer.Option("--json-output", help="Output results as JSON.")] = False,
|
|
62
|
+
yes: Annotated[bool, typer.Option("--yes", "-y", help="Skip confirmation prompts.")] = False,
|
|
63
|
+
) -> None:
|
|
64
|
+
_state.config_path = Path(config) if config else None
|
|
65
|
+
_state.dry_run = dry_run
|
|
66
|
+
_state.verbose = verbose
|
|
67
|
+
_state.quiet = quiet
|
|
68
|
+
_state.json_output = json_output
|
|
69
|
+
_state.yes = yes
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _not_implemented(name: str) -> None:
|
|
73
|
+
output.warn(f"'{name}' is not yet implemented.")
|
|
74
|
+
raise typer.Exit(1)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@app.command()
|
|
78
|
+
def init() -> None:
|
|
79
|
+
"""Interactive setup: HA URL, token env var, workspace directory."""
|
|
80
|
+
existing = discover_config()
|
|
81
|
+
if existing is not None and existing.parent == Path.cwd():
|
|
82
|
+
if not typer.confirm(f"Config already exists at {existing}. Overwrite?", default=False):
|
|
83
|
+
raise typer.Exit()
|
|
84
|
+
|
|
85
|
+
ha_url = typer.prompt("Home Assistant URL", default="http://homeassistant.local:8123").rstrip("/")
|
|
86
|
+
|
|
87
|
+
output.info("Enter the NAME of the environment variable that holds your HA long-lived token.")
|
|
88
|
+
output.info("Example: if you run export HA_TOKEN=eyJ... then enter HA_TOKEN")
|
|
89
|
+
while True:
|
|
90
|
+
token_var = typer.prompt("Token environment variable name", default="HA_TOKEN").strip()
|
|
91
|
+
# Catch the common mistake of pasting the token value instead of the var name
|
|
92
|
+
if "." in token_var or token_var.startswith("eyJ") or len(token_var) > 64:
|
|
93
|
+
output.error(
|
|
94
|
+
"That looks like a token value, not a variable name. "
|
|
95
|
+
"Enter just the name, e.g. HA_TOKEN"
|
|
96
|
+
)
|
|
97
|
+
continue
|
|
98
|
+
if not token_var.replace("_", "").isalnum() or token_var[0].isdigit():
|
|
99
|
+
output.error("Variable name must contain only letters, digits, and underscores, and not start with a digit.")
|
|
100
|
+
continue
|
|
101
|
+
break
|
|
102
|
+
|
|
103
|
+
workspace_str = typer.prompt(
|
|
104
|
+
f"Local workspace directory\n (or set {WORKSPACE_ENV_VAR} env var to override at runtime)",
|
|
105
|
+
default=".",
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
token = os.environ.get(token_var)
|
|
109
|
+
if token is None:
|
|
110
|
+
output.warn(f"${token_var} is not set — skipping connection test.")
|
|
111
|
+
output.info(f"Set it before using hadsync: export {token_var}=<long-lived-token>")
|
|
112
|
+
else:
|
|
113
|
+
try:
|
|
114
|
+
ha_info = get_ha_info(ha_url, token)
|
|
115
|
+
output.success(f"Connection verified (HA {ha_info.get('version', 'unknown')})")
|
|
116
|
+
except HARestError as e:
|
|
117
|
+
output.error(str(e))
|
|
118
|
+
raise typer.Exit(1)
|
|
119
|
+
|
|
120
|
+
config_path = Path.cwd() / CONFIG_FILENAME
|
|
121
|
+
raw: dict = {
|
|
122
|
+
"ha_url": ha_url,
|
|
123
|
+
"ha_token": f"${{{token_var}}}",
|
|
124
|
+
"workspace": workspace_str,
|
|
125
|
+
"pull": {"refresh_entities": True, "dashboards": "all"},
|
|
126
|
+
"push": {"require_validation": True, "confirm": True},
|
|
127
|
+
"validation": {"warn_on_unknown_entities": True, "entity_cache_max_age_days": 7},
|
|
128
|
+
}
|
|
129
|
+
save_config(raw, config_path)
|
|
130
|
+
output.success(f"Config saved to {config_path}")
|
|
131
|
+
|
|
132
|
+
workspace = Path(workspace_str)
|
|
133
|
+
if not workspace.is_absolute():
|
|
134
|
+
workspace = Path.cwd() / workspace
|
|
135
|
+
workspace.mkdir(parents=True, exist_ok=True)
|
|
136
|
+
output.success(f"Workspace ready: {workspace}")
|
|
137
|
+
|
|
138
|
+
gitignore = workspace / ".gitignore"
|
|
139
|
+
entries = [".hadsync-state.json", ".ha-entities.json"]
|
|
140
|
+
existing_lines: set[str] = set()
|
|
141
|
+
if gitignore.exists():
|
|
142
|
+
existing_lines = set(gitignore.read_text().splitlines())
|
|
143
|
+
new_entries = [e for e in entries if e not in existing_lines]
|
|
144
|
+
if new_entries:
|
|
145
|
+
with gitignore.open("a") as f:
|
|
146
|
+
f.write("\n".join(new_entries) + "\n")
|
|
147
|
+
verb = "updated" if existing_lines else "created"
|
|
148
|
+
output.success(f".gitignore {verb} in {workspace_str}/")
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
@app.command("list")
|
|
152
|
+
def list_dashboards() -> None:
|
|
153
|
+
"""List all dashboards on the HA instance."""
|
|
154
|
+
asyncio.run(_list_async())
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
async def _list_async() -> None:
|
|
158
|
+
from rich.table import Table
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
cfg, _ = load_config(_state.config_path)
|
|
162
|
+
except ConfigError as e:
|
|
163
|
+
output.error(str(e))
|
|
164
|
+
raise typer.Exit(1)
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
async with HAWebSocketClient(cfg.ha_url, cfg.ha_token) as client:
|
|
168
|
+
panels = await client.get_panels()
|
|
169
|
+
except HAAuthError as e:
|
|
170
|
+
output.error(f"Authentication failed: {e}")
|
|
171
|
+
raise typer.Exit(1)
|
|
172
|
+
except HAWebSocketError as e:
|
|
173
|
+
output.error(f"Connection error: {e}")
|
|
174
|
+
raise typer.Exit(1)
|
|
175
|
+
|
|
176
|
+
table = Table(title=f"Dashboards on {cfg.ha_url}")
|
|
177
|
+
table.add_column("ID", style="bold cyan", no_wrap=True)
|
|
178
|
+
table.add_column("Title")
|
|
179
|
+
table.add_column("URL Path", style="dim")
|
|
180
|
+
table.add_column("Mode", style="dim")
|
|
181
|
+
|
|
182
|
+
for panel_key, panel in sorted(panels.items(), key=lambda x: x[1].get("title", "")):
|
|
183
|
+
url_path = panel.get("url_path", panel_key)
|
|
184
|
+
title = panel.get("title") or url_path
|
|
185
|
+
mode = (panel.get("config") or {}).get("mode", "storage")
|
|
186
|
+
table.add_row(url_path, title, f"/{url_path}", mode)
|
|
187
|
+
|
|
188
|
+
output.console.print()
|
|
189
|
+
output.console.print(table)
|
|
190
|
+
output.console.print(f"\n[dim]{len(panels)} dashboard(s) found[/dim]")
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
@app.command()
|
|
194
|
+
def pull(
|
|
195
|
+
dashboard_id: Annotated[Optional[str], typer.Argument(help="Dashboard ID to pull (omit for all).")] = None,
|
|
196
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Skip entity cache refresh.")] = False,
|
|
197
|
+
) -> None:
|
|
198
|
+
"""Pull dashboards from HA to local YAML."""
|
|
199
|
+
asyncio.run(_pull_async(dashboard_id, no_cache))
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
async def _pull_async(dashboard_id: Optional[str], no_cache: bool) -> None:
|
|
203
|
+
from hadsync.converter import (
|
|
204
|
+
LOVELACE_FILENAME, config_hash, config_to_yaml_file, count_cards,
|
|
205
|
+
is_strategy_dashboard, normalize,
|
|
206
|
+
)
|
|
207
|
+
from hadsync.state import record_pull
|
|
208
|
+
from hadsync.ha_rest import HARestError, get_entity_states
|
|
209
|
+
from hadsync.entities import write_entity_cache
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
cfg, _ = load_config(_state.config_path)
|
|
213
|
+
except ConfigError as e:
|
|
214
|
+
output.error(str(e))
|
|
215
|
+
raise typer.Exit(1)
|
|
216
|
+
|
|
217
|
+
try:
|
|
218
|
+
async with HAWebSocketClient(cfg.ha_url, cfg.ha_token) as client:
|
|
219
|
+
panels = await client.get_panels()
|
|
220
|
+
|
|
221
|
+
# Resolve target dashboards
|
|
222
|
+
if dashboard_id:
|
|
223
|
+
url_paths = {v.get("url_path", k) for k, v in panels.items()}
|
|
224
|
+
if dashboard_id not in url_paths:
|
|
225
|
+
output.error(
|
|
226
|
+
f"Dashboard '{dashboard_id}' not found. "
|
|
227
|
+
"Run 'hadsync list' to see available dashboards."
|
|
228
|
+
)
|
|
229
|
+
raise typer.Exit(1)
|
|
230
|
+
targets = {k: v for k, v in panels.items() if v.get("url_path", k) == dashboard_id}
|
|
231
|
+
elif cfg.pull.dashboards == "all":
|
|
232
|
+
targets = panels
|
|
233
|
+
else:
|
|
234
|
+
wanted = set(cfg.pull.dashboards) if isinstance(cfg.pull.dashboards, list) else set()
|
|
235
|
+
targets = {k: v for k, v in panels.items() if v.get("url_path", k) in wanted}
|
|
236
|
+
|
|
237
|
+
pulled, skipped = 0, 0
|
|
238
|
+
for panel_key, panel in sorted(targets.items(), key=lambda x: x[1].get("title", "")):
|
|
239
|
+
url_path = panel.get("url_path", panel_key)
|
|
240
|
+
title = panel.get("title", url_path)
|
|
241
|
+
try:
|
|
242
|
+
config = await client.get_dashboard_config(url_path)
|
|
243
|
+
except Exception as e:
|
|
244
|
+
output.warn(f"Skipped {url_path} — fetch failed: {e}")
|
|
245
|
+
skipped += 1
|
|
246
|
+
continue
|
|
247
|
+
|
|
248
|
+
if is_strategy_dashboard(config):
|
|
249
|
+
output.warn(f"Skipped {url_path} ({title}) — strategy dashboard is read-only")
|
|
250
|
+
skipped += 1
|
|
251
|
+
continue
|
|
252
|
+
|
|
253
|
+
yaml_path = cfg.workspace / url_path / LOVELACE_FILENAME
|
|
254
|
+
config_to_yaml_file(config, yaml_path)
|
|
255
|
+
record_pull(
|
|
256
|
+
cfg.workspace, url_path,
|
|
257
|
+
ha_config_hash=config_hash(normalize(config)),
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
n_views, n_cards = count_cards(config)
|
|
261
|
+
try:
|
|
262
|
+
display = yaml_path.relative_to(Path.cwd())
|
|
263
|
+
except ValueError:
|
|
264
|
+
display = yaml_path
|
|
265
|
+
output.success(f"Pulled {url_path} → {display} ({n_views} views, {n_cards} cards)")
|
|
266
|
+
pulled += 1
|
|
267
|
+
|
|
268
|
+
except HAAuthError as e:
|
|
269
|
+
output.error(f"Authentication failed: {e}")
|
|
270
|
+
raise typer.Exit(1)
|
|
271
|
+
except HAWebSocketError as e:
|
|
272
|
+
output.error(f"Connection error: {e}")
|
|
273
|
+
raise typer.Exit(1)
|
|
274
|
+
|
|
275
|
+
# Entity cache refresh
|
|
276
|
+
if not no_cache and cfg.pull.refresh_entities:
|
|
277
|
+
try:
|
|
278
|
+
states = get_entity_states(cfg.ha_url, cfg.ha_token)
|
|
279
|
+
count = write_entity_cache(cfg.workspace, states)
|
|
280
|
+
output.success(f"Entity cache refreshed → .ha-entities.json ({count} entities)")
|
|
281
|
+
except HARestError as e:
|
|
282
|
+
output.warn(f"Entity cache refresh failed: {e}")
|
|
283
|
+
|
|
284
|
+
if pulled or skipped:
|
|
285
|
+
parts = []
|
|
286
|
+
if pulled:
|
|
287
|
+
parts.append(f"{pulled} pulled")
|
|
288
|
+
if skipped:
|
|
289
|
+
parts.append(f"{skipped} skipped")
|
|
290
|
+
output.console.print(f"\n[dim]{', '.join(parts)}[/dim]")
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@app.command()
|
|
294
|
+
def push(
|
|
295
|
+
dashboard_id: Annotated[Optional[str], typer.Argument(help="Dashboard ID to push (omit for all).")] = None,
|
|
296
|
+
skip_validation: Annotated[
|
|
297
|
+
bool, typer.Option("--skip-validation", help="Skip pre-push validation (not recommended).")
|
|
298
|
+
] = False,
|
|
299
|
+
) -> None:
|
|
300
|
+
"""Push local YAML dashboards to HA."""
|
|
301
|
+
asyncio.run(_push_async(dashboard_id, skip_validation))
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
async def _push_async(dashboard_id: Optional[str], skip_validation: bool) -> None:
|
|
305
|
+
from rich.table import Table
|
|
306
|
+
from hadsync.converter import LOVELACE_FILENAME, count_cards, normalize, yaml_file_to_config
|
|
307
|
+
from hadsync.validator import Severity, has_errors, validate
|
|
308
|
+
from hadsync.state import record_push
|
|
309
|
+
|
|
310
|
+
try:
|
|
311
|
+
cfg, _ = load_config(_state.config_path)
|
|
312
|
+
except ConfigError as e:
|
|
313
|
+
output.error(str(e))
|
|
314
|
+
raise typer.Exit(1)
|
|
315
|
+
|
|
316
|
+
try:
|
|
317
|
+
async with HAWebSocketClient(cfg.ha_url, cfg.ha_token) as client:
|
|
318
|
+
panels = await client.get_panels()
|
|
319
|
+
panel_by_path = {v.get("url_path", k): v for k, v in panels.items()}
|
|
320
|
+
|
|
321
|
+
# Resolve targets from local workspace or explicit arg
|
|
322
|
+
if dashboard_id:
|
|
323
|
+
targets = [dashboard_id]
|
|
324
|
+
else:
|
|
325
|
+
targets = sorted(
|
|
326
|
+
url_path for url_path in panel_by_path
|
|
327
|
+
if (cfg.workspace / url_path / LOVELACE_FILENAME).exists()
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
if not targets:
|
|
331
|
+
output.warn("No local dashboard files found. Run 'hadsync pull' first.")
|
|
332
|
+
raise typer.Exit(0)
|
|
333
|
+
|
|
334
|
+
pushed = skipped = 0
|
|
335
|
+
|
|
336
|
+
for url_path in targets:
|
|
337
|
+
title = (panel_by_path.get(url_path) or {}).get("title", url_path)
|
|
338
|
+
output.console.print(f"\n[bold cyan]{url_path}[/bold cyan] ({title})")
|
|
339
|
+
|
|
340
|
+
yaml_path = cfg.workspace / url_path / LOVELACE_FILENAME
|
|
341
|
+
|
|
342
|
+
if not yaml_path.exists():
|
|
343
|
+
output.warn(f" No local file — run 'hadsync pull {url_path}' first.")
|
|
344
|
+
skipped += 1
|
|
345
|
+
continue
|
|
346
|
+
|
|
347
|
+
if url_path not in panel_by_path:
|
|
348
|
+
output.error(f" Dashboard not found in HA — cannot push.")
|
|
349
|
+
skipped += 1
|
|
350
|
+
continue
|
|
351
|
+
|
|
352
|
+
# --- Validation (Phase 1 + 2 + 3) ---
|
|
353
|
+
if not skip_validation:
|
|
354
|
+
from hadsync.validator import validate_entities, validate_schema
|
|
355
|
+
issues = validate(yaml_path)
|
|
356
|
+
issues += validate_entities(
|
|
357
|
+
yaml_path, cfg.workspace,
|
|
358
|
+
warn_on_unknown=cfg.validation.warn_on_unknown_entities,
|
|
359
|
+
max_age_days=cfg.validation.entity_cache_max_age_days,
|
|
360
|
+
)
|
|
361
|
+
issues += validate_schema(yaml_path, cfg.validation.custom_card_types)
|
|
362
|
+
for issue in issues:
|
|
363
|
+
fn = output.error if issue.severity == Severity.ERROR else output.warn
|
|
364
|
+
fn(f" {issue}")
|
|
365
|
+
if has_errors(issues):
|
|
366
|
+
output.error(" Blocked by validation errors.")
|
|
367
|
+
skipped += 1
|
|
368
|
+
continue
|
|
369
|
+
|
|
370
|
+
# --- Load local config and normalize ---
|
|
371
|
+
try:
|
|
372
|
+
local_raw = yaml_file_to_config(yaml_path)
|
|
373
|
+
except Exception as e:
|
|
374
|
+
output.error(f" Cannot read local file: {e}")
|
|
375
|
+
skipped += 1
|
|
376
|
+
continue
|
|
377
|
+
local_config = normalize(local_raw)
|
|
378
|
+
|
|
379
|
+
# --- Fetch current HA state ---
|
|
380
|
+
try:
|
|
381
|
+
ha_config = normalize(await client.get_dashboard_config(url_path))
|
|
382
|
+
except Exception as e:
|
|
383
|
+
output.error(f" Cannot fetch current HA state: {e}")
|
|
384
|
+
skipped += 1
|
|
385
|
+
continue
|
|
386
|
+
|
|
387
|
+
# --- No-op guard ---
|
|
388
|
+
if local_config == ha_config:
|
|
389
|
+
output.info(" Already up to date — no changes to push.")
|
|
390
|
+
skipped += 1
|
|
391
|
+
continue
|
|
392
|
+
|
|
393
|
+
# --- Change summary ---
|
|
394
|
+
ha_views, ha_cards = count_cards(ha_config)
|
|
395
|
+
local_views, local_cards = count_cards(local_config)
|
|
396
|
+
|
|
397
|
+
summary = Table(show_header=True, box=None, padding=(0, 2))
|
|
398
|
+
summary.add_column("", style="dim")
|
|
399
|
+
summary.add_column("Views", justify="right")
|
|
400
|
+
summary.add_column("Cards", justify="right")
|
|
401
|
+
summary.add_row("Current HA", str(ha_views), str(ha_cards))
|
|
402
|
+
summary.add_row("Local (to push)", str(local_views), str(local_cards))
|
|
403
|
+
output.console.print(summary)
|
|
404
|
+
|
|
405
|
+
if local_views < ha_views:
|
|
406
|
+
output.warn(f" ⚠ Will REMOVE {ha_views - local_views} view(s) from HA.")
|
|
407
|
+
if local_cards < ha_cards:
|
|
408
|
+
output.warn(f" ⚠ Will REMOVE {ha_cards - local_cards} card(s) from HA.")
|
|
409
|
+
if local_views == 0 and ha_views > 0 and not skip_validation:
|
|
410
|
+
output.error(
|
|
411
|
+
" Refusing to push: local config has 0 views but HA has "
|
|
412
|
+
f"{ha_views}. This would wipe the dashboard. "
|
|
413
|
+
"Use --skip-validation to override."
|
|
414
|
+
)
|
|
415
|
+
skipped += 1
|
|
416
|
+
continue
|
|
417
|
+
|
|
418
|
+
# --- Dry run ---
|
|
419
|
+
if _state.dry_run:
|
|
420
|
+
output.info(f" [dry-run] Would push {local_views} views, {local_cards} cards — not sent.")
|
|
421
|
+
continue
|
|
422
|
+
|
|
423
|
+
# --- Confirm ---
|
|
424
|
+
if cfg.push.confirm and not _state.yes:
|
|
425
|
+
confirmed = typer.confirm(f" Push '{url_path}' to HA?", default=False)
|
|
426
|
+
if not confirmed:
|
|
427
|
+
output.info(" Skipped.")
|
|
428
|
+
skipped += 1
|
|
429
|
+
continue
|
|
430
|
+
|
|
431
|
+
# --- Push ---
|
|
432
|
+
await client.save_dashboard_config(url_path, local_config)
|
|
433
|
+
record_push(cfg.workspace, url_path)
|
|
434
|
+
output.success(f" Pushed ({local_views} views, {local_cards} cards)")
|
|
435
|
+
pushed += 1
|
|
436
|
+
|
|
437
|
+
except HAAuthError as e:
|
|
438
|
+
output.error(f"Authentication failed: {e}")
|
|
439
|
+
raise typer.Exit(1)
|
|
440
|
+
except HAWebSocketError as e:
|
|
441
|
+
output.error(f"Connection error: {e}")
|
|
442
|
+
raise typer.Exit(1)
|
|
443
|
+
|
|
444
|
+
output.console.print(f"\n[dim]{pushed} pushed, {skipped} skipped[/dim]")
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
@app.command()
|
|
448
|
+
def diff(
|
|
449
|
+
dashboard_id: Annotated[Optional[str], typer.Argument(help="Dashboard ID to diff (omit for all).")] = None,
|
|
450
|
+
show: Annotated[bool, typer.Option("--show", help="Print unified diff.")] = False,
|
|
451
|
+
) -> None:
|
|
452
|
+
"""Compare local YAML vs current HA state."""
|
|
453
|
+
asyncio.run(_diff_async(dashboard_id, show))
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def _view_key(view: dict, idx: int) -> str:
|
|
457
|
+
return view.get("path") or view.get("title") or f"view_{idx}"
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
def _print_view_diff(ha_config: dict, local_config: dict) -> None:
|
|
461
|
+
"""Print a view-by-view change summary between HA and local configs."""
|
|
462
|
+
ha_views = {_view_key(v, i): v for i, v in enumerate(ha_config.get("views", []))}
|
|
463
|
+
local_views = {_view_key(v, i): v for i, v in enumerate(local_config.get("views", []))}
|
|
464
|
+
|
|
465
|
+
from hadsync.converter import count_cards
|
|
466
|
+
|
|
467
|
+
all_keys = list(dict.fromkeys(list(ha_views) + list(local_views))) # preserve order
|
|
468
|
+
for key in all_keys:
|
|
469
|
+
in_ha = key in ha_views
|
|
470
|
+
in_local = key in local_views
|
|
471
|
+
if in_ha and not in_local:
|
|
472
|
+
output.console.print(f" [red] - {key}[/red] (removed)")
|
|
473
|
+
elif in_local and not in_ha:
|
|
474
|
+
output.console.print(f" [green] + {key}[/green] (added)")
|
|
475
|
+
elif ha_views[key] != local_views[key]:
|
|
476
|
+
_, ha_c = count_cards(ha_views[key])
|
|
477
|
+
_, lc = count_cards(local_views[key])
|
|
478
|
+
delta = f"{lc - ha_c:+d} cards" if ha_c != lc else "content changed"
|
|
479
|
+
output.console.print(f" [yellow] ~ {key}[/yellow] ({delta})")
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
async def _diff_async(dashboard_id: Optional[str], show: bool) -> None:
|
|
483
|
+
import difflib
|
|
484
|
+
from datetime import datetime, timezone
|
|
485
|
+
from io import StringIO
|
|
486
|
+
from ruamel.yaml import YAML as _YAML
|
|
487
|
+
from hadsync.converter import (
|
|
488
|
+
LOVELACE_FILENAME, config_hash, count_cards, normalize, yaml_file_to_config,
|
|
489
|
+
)
|
|
490
|
+
from hadsync.state import get_dashboard_state
|
|
491
|
+
|
|
492
|
+
try:
|
|
493
|
+
cfg, _ = load_config(_state.config_path)
|
|
494
|
+
except ConfigError as e:
|
|
495
|
+
output.error(str(e))
|
|
496
|
+
raise typer.Exit(1)
|
|
497
|
+
|
|
498
|
+
if dashboard_id:
|
|
499
|
+
targets = [dashboard_id]
|
|
500
|
+
else:
|
|
501
|
+
if not cfg.workspace.exists():
|
|
502
|
+
output.warn("Workspace directory does not exist. Run 'hadsync pull' first.")
|
|
503
|
+
raise typer.Exit(1)
|
|
504
|
+
targets = sorted(
|
|
505
|
+
d.name for d in cfg.workspace.iterdir()
|
|
506
|
+
if d.is_dir() and (d / LOVELACE_FILENAME).exists()
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
if not targets:
|
|
510
|
+
output.warn("No local dashboard files found. Run 'hadsync pull' first.")
|
|
511
|
+
raise typer.Exit(0)
|
|
512
|
+
|
|
513
|
+
def _fmt_pull_ts(ts: str) -> str:
|
|
514
|
+
try:
|
|
515
|
+
dt = datetime.fromisoformat(ts)
|
|
516
|
+
if dt.tzinfo is None:
|
|
517
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
518
|
+
delta = datetime.now(timezone.utc) - dt
|
|
519
|
+
mins = int(delta.total_seconds() / 60)
|
|
520
|
+
if mins < 60:
|
|
521
|
+
age = f"{mins}m ago"
|
|
522
|
+
elif mins < 1440:
|
|
523
|
+
age = f"{mins // 60}h ago"
|
|
524
|
+
else:
|
|
525
|
+
age = f"{mins // 1440}d ago"
|
|
526
|
+
return f"{dt.strftime('%Y-%m-%d %H:%M')} ({age})"
|
|
527
|
+
except Exception:
|
|
528
|
+
return ts[:16]
|
|
529
|
+
|
|
530
|
+
try:
|
|
531
|
+
async with HAWebSocketClient(cfg.ha_url, cfg.ha_token) as client:
|
|
532
|
+
changed = clean = 0
|
|
533
|
+
|
|
534
|
+
for url_path in targets:
|
|
535
|
+
yaml_path = cfg.workspace / url_path / LOVELACE_FILENAME
|
|
536
|
+
output.console.print(f"\n[bold cyan]{url_path}[/bold cyan]")
|
|
537
|
+
|
|
538
|
+
if not yaml_path.exists():
|
|
539
|
+
output.warn(" No local file.")
|
|
540
|
+
continue
|
|
541
|
+
|
|
542
|
+
try:
|
|
543
|
+
local_config = normalize(yaml_file_to_config(yaml_path))
|
|
544
|
+
except Exception as e:
|
|
545
|
+
output.error(f" Cannot read local file: {e}")
|
|
546
|
+
continue
|
|
547
|
+
|
|
548
|
+
try:
|
|
549
|
+
ha_config = normalize(await client.get_dashboard_config(url_path))
|
|
550
|
+
except Exception as e:
|
|
551
|
+
output.error(f" Cannot fetch from HA: {e}")
|
|
552
|
+
continue
|
|
553
|
+
|
|
554
|
+
# --- Pull context ---
|
|
555
|
+
ds = get_dashboard_state(cfg.workspace, url_path)
|
|
556
|
+
last_pull = ds.get("last_pull")
|
|
557
|
+
stored_hash = ds.get("ha_config_hash")
|
|
558
|
+
|
|
559
|
+
if last_pull:
|
|
560
|
+
output.console.print(f" [dim]Last pull: {_fmt_pull_ts(last_pull)}[/dim]")
|
|
561
|
+
|
|
562
|
+
# --- In-sync fast path ---
|
|
563
|
+
if local_config == ha_config:
|
|
564
|
+
output.success(" In sync — local matches HA.")
|
|
565
|
+
clean += 1
|
|
566
|
+
continue
|
|
567
|
+
|
|
568
|
+
changed += 1
|
|
569
|
+
|
|
570
|
+
# --- Conflict classification ---
|
|
571
|
+
# Local modified: file mtime > last_pull timestamp
|
|
572
|
+
local_modified = False
|
|
573
|
+
if last_pull and yaml_path.exists():
|
|
574
|
+
try:
|
|
575
|
+
pull_dt = datetime.fromisoformat(last_pull)
|
|
576
|
+
if pull_dt.tzinfo is None:
|
|
577
|
+
pull_dt = pull_dt.replace(tzinfo=timezone.utc)
|
|
578
|
+
mtime = datetime.fromtimestamp(
|
|
579
|
+
yaml_path.stat().st_mtime, tz=timezone.utc
|
|
580
|
+
)
|
|
581
|
+
local_modified = mtime > pull_dt
|
|
582
|
+
except Exception:
|
|
583
|
+
pass
|
|
584
|
+
|
|
585
|
+
# HA modified: current hash differs from stored pull-time hash
|
|
586
|
+
ha_modified = (
|
|
587
|
+
stored_hash is not None and config_hash(ha_config) != stored_hash
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
# --- Change summary ---
|
|
591
|
+
ha_views, ha_cards = count_cards(ha_config)
|
|
592
|
+
local_views, local_cards = count_cards(local_config)
|
|
593
|
+
|
|
594
|
+
ha_tag = (
|
|
595
|
+
" [yellow]← changed since pull[/yellow]" if ha_modified
|
|
596
|
+
else " [dim](unchanged since pull)[/dim]" if stored_hash
|
|
597
|
+
else ""
|
|
598
|
+
)
|
|
599
|
+
local_tag = (
|
|
600
|
+
" [yellow]← modified since pull[/yellow]" if local_modified
|
|
601
|
+
else " [dim](clean since pull)[/dim]" if last_pull
|
|
602
|
+
else ""
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
output.console.print(f" HA: {ha_views} views, {ha_cards} cards{ha_tag}")
|
|
606
|
+
output.console.print(f" Local: {local_views} views, {local_cards} cards{local_tag}")
|
|
607
|
+
_print_view_diff(ha_config, local_config)
|
|
608
|
+
|
|
609
|
+
# --- Verdict ---
|
|
610
|
+
if local_modified and ha_modified:
|
|
611
|
+
output.error(
|
|
612
|
+
" CONFLICT — both sides changed since last pull."
|
|
613
|
+
)
|
|
614
|
+
output.console.print(
|
|
615
|
+
f" [dim] hadsync push {url_path}[/dim]"
|
|
616
|
+
" — overwrite HA with local [dim](discards HA edits)[/dim]"
|
|
617
|
+
)
|
|
618
|
+
output.console.print(
|
|
619
|
+
f" [dim] hadsync pull {url_path}[/dim]"
|
|
620
|
+
" — overwrite local with HA [dim](discards local edits)[/dim]"
|
|
621
|
+
)
|
|
622
|
+
elif ha_modified:
|
|
623
|
+
output.warn(
|
|
624
|
+
f" HA changed since last pull — run "
|
|
625
|
+
f"'hadsync pull {url_path}' to update local."
|
|
626
|
+
)
|
|
627
|
+
elif local_modified:
|
|
628
|
+
output.warn(
|
|
629
|
+
f" Local modified since pull — run "
|
|
630
|
+
f"'hadsync push {url_path}' to apply to HA."
|
|
631
|
+
)
|
|
632
|
+
else:
|
|
633
|
+
output.warn(
|
|
634
|
+
" Local differs from HA — "
|
|
635
|
+
"run 'hadsync push' to apply or 'hadsync pull' to discard."
|
|
636
|
+
)
|
|
637
|
+
|
|
638
|
+
# --- Unified diff (--show) ---
|
|
639
|
+
if show:
|
|
640
|
+
_yaml = _YAML()
|
|
641
|
+
_yaml.default_flow_style = False
|
|
642
|
+
_yaml.width = 4096
|
|
643
|
+
ha_buf, local_buf = StringIO(), StringIO()
|
|
644
|
+
_yaml.dump(ha_config, ha_buf)
|
|
645
|
+
_yaml.dump(local_config, local_buf)
|
|
646
|
+
diff_lines = list(difflib.unified_diff(
|
|
647
|
+
ha_buf.getvalue().splitlines(keepends=True),
|
|
648
|
+
local_buf.getvalue().splitlines(keepends=True),
|
|
649
|
+
fromfile=f"ha/{url_path}",
|
|
650
|
+
tofile=f"local/{url_path}",
|
|
651
|
+
lineterm="",
|
|
652
|
+
))
|
|
653
|
+
output.console.print()
|
|
654
|
+
for line in diff_lines[:200]:
|
|
655
|
+
if line.startswith("+++") or line.startswith("---"):
|
|
656
|
+
output.console.print(f"[bold]{line}[/bold]", highlight=False)
|
|
657
|
+
elif line.startswith("+"):
|
|
658
|
+
output.console.print(f"[green]{line}[/green]", highlight=False)
|
|
659
|
+
elif line.startswith("-"):
|
|
660
|
+
output.console.print(f"[red]{line}[/red]", highlight=False)
|
|
661
|
+
elif line.startswith("@@"):
|
|
662
|
+
output.console.print(f"[cyan]{line}[/cyan]", highlight=False)
|
|
663
|
+
else:
|
|
664
|
+
output.console.print(line, highlight=False)
|
|
665
|
+
if len(diff_lines) > 200:
|
|
666
|
+
output.warn(
|
|
667
|
+
f" ... {len(diff_lines) - 200} more lines (diff truncated to 200)"
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
except HAAuthError as e:
|
|
671
|
+
output.error(f"Authentication failed: {e}")
|
|
672
|
+
raise typer.Exit(1)
|
|
673
|
+
except HAWebSocketError as e:
|
|
674
|
+
output.error(f"Connection error: {e}")
|
|
675
|
+
raise typer.Exit(1)
|
|
676
|
+
|
|
677
|
+
output.console.print(f"\n[dim]{changed} changed, {clean} unchanged[/dim]")
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
@app.command()
|
|
681
|
+
def validate(
|
|
682
|
+
dashboard_id: Annotated[Optional[str], typer.Argument(help="Dashboard ID to validate (omit for all).")] = None,
|
|
683
|
+
) -> None:
|
|
684
|
+
"""Validate local YAML files without pushing."""
|
|
685
|
+
from hadsync.converter import LOVELACE_FILENAME
|
|
686
|
+
from hadsync.validator import Severity, has_errors, validate as _validate
|
|
687
|
+
|
|
688
|
+
try:
|
|
689
|
+
cfg, _ = load_config(_state.config_path)
|
|
690
|
+
except ConfigError as e:
|
|
691
|
+
output.error(str(e))
|
|
692
|
+
raise typer.Exit(1)
|
|
693
|
+
|
|
694
|
+
if dashboard_id:
|
|
695
|
+
targets = [dashboard_id]
|
|
696
|
+
else:
|
|
697
|
+
if not cfg.workspace.exists():
|
|
698
|
+
output.warn("Workspace directory does not exist. Run 'hadsync pull' first.")
|
|
699
|
+
raise typer.Exit(1)
|
|
700
|
+
targets = sorted(
|
|
701
|
+
d.name for d in cfg.workspace.iterdir()
|
|
702
|
+
if d.is_dir() and (d / LOVELACE_FILENAME).exists()
|
|
703
|
+
)
|
|
704
|
+
|
|
705
|
+
if not targets:
|
|
706
|
+
output.warn("No local dashboard files found. Run 'hadsync pull' first.")
|
|
707
|
+
raise typer.Exit(0)
|
|
708
|
+
|
|
709
|
+
from hadsync.validator import validate_entities, validate_schema
|
|
710
|
+
import json as _json
|
|
711
|
+
|
|
712
|
+
# Collect all issues before output so --json-output can emit them atomically
|
|
713
|
+
all_issues: dict[str, list] = {}
|
|
714
|
+
total_errors = total_warnings = 0
|
|
715
|
+
|
|
716
|
+
for url_path in targets:
|
|
717
|
+
yaml_path = cfg.workspace / url_path / LOVELACE_FILENAME
|
|
718
|
+
issues = _validate(yaml_path)
|
|
719
|
+
issues += validate_entities(
|
|
720
|
+
yaml_path, cfg.workspace,
|
|
721
|
+
warn_on_unknown=cfg.validation.warn_on_unknown_entities,
|
|
722
|
+
max_age_days=cfg.validation.entity_cache_max_age_days,
|
|
723
|
+
)
|
|
724
|
+
issues += validate_schema(yaml_path, cfg.validation.custom_card_types)
|
|
725
|
+
all_issues[url_path] = issues
|
|
726
|
+
total_errors += sum(1 for i in issues if i.severity == Severity.ERROR)
|
|
727
|
+
total_warnings += sum(1 for i in issues if i.severity == Severity.WARN)
|
|
728
|
+
|
|
729
|
+
if _state.json_output:
|
|
730
|
+
result = {
|
|
731
|
+
"dashboards": {
|
|
732
|
+
url_path: {
|
|
733
|
+
"file": str(cfg.workspace / url_path / LOVELACE_FILENAME),
|
|
734
|
+
"passed": not any(i.severity == Severity.ERROR for i in issues),
|
|
735
|
+
"issues": [
|
|
736
|
+
{"severity": i.severity.value, "message": i.message, "line": i.line}
|
|
737
|
+
for i in issues
|
|
738
|
+
],
|
|
739
|
+
}
|
|
740
|
+
for url_path, issues in all_issues.items()
|
|
741
|
+
},
|
|
742
|
+
"total_errors": total_errors,
|
|
743
|
+
"total_warnings": total_warnings,
|
|
744
|
+
}
|
|
745
|
+
print(_json.dumps(result))
|
|
746
|
+
raise typer.Exit(1 if total_errors else 0)
|
|
747
|
+
|
|
748
|
+
for url_path, issues in all_issues.items():
|
|
749
|
+
n_err = sum(1 for i in issues if i.severity == Severity.ERROR)
|
|
750
|
+
n_warn = sum(1 for i in issues if i.severity == Severity.WARN)
|
|
751
|
+
|
|
752
|
+
if not issues:
|
|
753
|
+
label = "[green]PASS[/green]"
|
|
754
|
+
elif n_err:
|
|
755
|
+
label = f"[red]FAIL ({n_err} error(s))[/red]"
|
|
756
|
+
else:
|
|
757
|
+
label = f"[yellow]WARN ({n_warn} warning(s))[/yellow]"
|
|
758
|
+
|
|
759
|
+
output.console.print(f" {label} {url_path}")
|
|
760
|
+
for issue in issues:
|
|
761
|
+
fn = output.error if issue.severity == Severity.ERROR else output.warn
|
|
762
|
+
fn(f" {issue}")
|
|
763
|
+
|
|
764
|
+
output.console.print()
|
|
765
|
+
if total_errors:
|
|
766
|
+
output.error(f"{total_errors} error(s), {total_warnings} warning(s) — not safe to push")
|
|
767
|
+
raise typer.Exit(1)
|
|
768
|
+
elif total_warnings:
|
|
769
|
+
output.warn(f"0 errors, {total_warnings} warning(s) — review before pushing")
|
|
770
|
+
else:
|
|
771
|
+
output.success(f"All {len(targets)} dashboard(s) passed")
|
|
772
|
+
|
|
773
|
+
|
|
774
|
+
@app.command()
|
|
775
|
+
def watch(
|
|
776
|
+
dashboard_id: Annotated[Optional[str], typer.Argument(help="Dashboard ID to watch (omit for all).")] = None,
|
|
777
|
+
auto_push: Annotated[bool, typer.Option("--auto-push", help="Push to HA automatically after validation passes.")] = False,
|
|
778
|
+
) -> None:
|
|
779
|
+
"""Monitor local YAML files and validate on every save (Phase 1+2+3).
|
|
780
|
+
|
|
781
|
+
With --auto-push: pushes to HA automatically when validation passes.
|
|
782
|
+
"""
|
|
783
|
+
from hadsync.watcher import run_watch
|
|
784
|
+
|
|
785
|
+
try:
|
|
786
|
+
cfg, _ = load_config(_state.config_path)
|
|
787
|
+
except ConfigError as e:
|
|
788
|
+
output.error(str(e))
|
|
789
|
+
raise typer.Exit(1)
|
|
790
|
+
|
|
791
|
+
if not cfg.workspace.exists():
|
|
792
|
+
output.error(f"Workspace does not exist: {cfg.workspace}")
|
|
793
|
+
raise typer.Exit(1)
|
|
794
|
+
|
|
795
|
+
run_watch(cfg, auto_push=auto_push, filter_id=dashboard_id)
|
|
796
|
+
|
|
797
|
+
|
|
798
|
+
@app.command()
|
|
799
|
+
def status() -> None:
|
|
800
|
+
"""Show sync status for all local dashboards."""
|
|
801
|
+
from datetime import datetime, timezone
|
|
802
|
+
from rich.table import Table
|
|
803
|
+
from hadsync.converter import LOVELACE_FILENAME
|
|
804
|
+
from hadsync.state import get_all_states
|
|
805
|
+
|
|
806
|
+
try:
|
|
807
|
+
cfg, _ = load_config(_state.config_path)
|
|
808
|
+
except ConfigError as e:
|
|
809
|
+
output.error(str(e))
|
|
810
|
+
raise typer.Exit(1)
|
|
811
|
+
|
|
812
|
+
states = get_all_states(cfg.workspace)
|
|
813
|
+
|
|
814
|
+
local_dirs: set[str] = set()
|
|
815
|
+
if cfg.workspace.exists():
|
|
816
|
+
local_dirs = {
|
|
817
|
+
d.name for d in cfg.workspace.iterdir()
|
|
818
|
+
if d.is_dir() and (d / LOVELACE_FILENAME).exists()
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
all_dashboards = sorted(set(states.keys()) | local_dirs)
|
|
822
|
+
|
|
823
|
+
if not all_dashboards:
|
|
824
|
+
output.warn("No local dashboards found. Run 'hadsync pull' first.")
|
|
825
|
+
raise typer.Exit(0)
|
|
826
|
+
|
|
827
|
+
def _fmt(ts: Optional[str]) -> str:
|
|
828
|
+
if not ts:
|
|
829
|
+
return "[dim]—[/dim]"
|
|
830
|
+
try:
|
|
831
|
+
return datetime.fromisoformat(ts).strftime("%Y-%m-%d %H:%M")
|
|
832
|
+
except Exception:
|
|
833
|
+
return ts[:16]
|
|
834
|
+
|
|
835
|
+
def _local_label(url_path: str, last_pull: Optional[str]) -> str:
|
|
836
|
+
yaml_path = cfg.workspace / url_path / LOVELACE_FILENAME
|
|
837
|
+
if not yaml_path.exists():
|
|
838
|
+
return "[red]no file[/red]"
|
|
839
|
+
if not last_pull:
|
|
840
|
+
return "[yellow]never pulled[/yellow]"
|
|
841
|
+
try:
|
|
842
|
+
pull_dt = datetime.fromisoformat(last_pull)
|
|
843
|
+
if pull_dt.tzinfo is None:
|
|
844
|
+
pull_dt = pull_dt.replace(tzinfo=timezone.utc)
|
|
845
|
+
mtime = datetime.fromtimestamp(yaml_path.stat().st_mtime, tz=timezone.utc)
|
|
846
|
+
return "[yellow]modified[/yellow]" if mtime > pull_dt else "[green]clean[/green]"
|
|
847
|
+
except Exception:
|
|
848
|
+
return "[dim]unknown[/dim]"
|
|
849
|
+
|
|
850
|
+
table = Table(show_header=True, header_style="bold")
|
|
851
|
+
table.add_column("Dashboard", style="bold cyan", no_wrap=True)
|
|
852
|
+
table.add_column("Last Pull")
|
|
853
|
+
table.add_column("Last Push")
|
|
854
|
+
table.add_column("Local")
|
|
855
|
+
|
|
856
|
+
for url_path in all_dashboards:
|
|
857
|
+
s = states.get(url_path, {})
|
|
858
|
+
table.add_row(
|
|
859
|
+
url_path,
|
|
860
|
+
_fmt(s.get("last_pull")),
|
|
861
|
+
_fmt(s.get("last_push")),
|
|
862
|
+
_local_label(url_path, s.get("last_pull")),
|
|
863
|
+
)
|
|
864
|
+
|
|
865
|
+
output.console.print()
|
|
866
|
+
output.console.print(table)
|
|
867
|
+
output.console.print(f"\n[dim]Workspace: {cfg.workspace}[/dim]")
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
@entities_app.command("refresh")
|
|
871
|
+
def entities_refresh() -> None:
|
|
872
|
+
"""Refresh the local entity cache from HA /api/states."""
|
|
873
|
+
from hadsync.ha_rest import HARestError, get_entity_states
|
|
874
|
+
from hadsync.entities import write_entity_cache
|
|
875
|
+
|
|
876
|
+
try:
|
|
877
|
+
cfg, _ = load_config(_state.config_path)
|
|
878
|
+
except ConfigError as e:
|
|
879
|
+
output.error(str(e))
|
|
880
|
+
raise typer.Exit(1)
|
|
881
|
+
|
|
882
|
+
try:
|
|
883
|
+
output.info("Fetching entity states from HA...")
|
|
884
|
+
states = get_entity_states(cfg.ha_url, cfg.ha_token)
|
|
885
|
+
count = write_entity_cache(cfg.workspace, states)
|
|
886
|
+
output.success(f"Entity cache refreshed — {count} entities → {cfg.workspace / '.ha-entities.json'}")
|
|
887
|
+
except HARestError as e:
|
|
888
|
+
output.error(str(e))
|
|
889
|
+
raise typer.Exit(1)
|
|
890
|
+
|
|
891
|
+
|
|
892
|
+
@entities_app.command("list")
|
|
893
|
+
def entities_list(
|
|
894
|
+
filter_term: Annotated[Optional[str], typer.Argument(help="Filter by domain or name.")] = None,
|
|
895
|
+
) -> None:
|
|
896
|
+
"""List cached entities, optionally filtered by domain or name."""
|
|
897
|
+
from rich.table import Table
|
|
898
|
+
from hadsync.entities import cache_age_days, load_entity_cache, search_entities
|
|
899
|
+
|
|
900
|
+
try:
|
|
901
|
+
cfg, _ = load_config(_state.config_path)
|
|
902
|
+
except ConfigError as e:
|
|
903
|
+
output.error(str(e))
|
|
904
|
+
raise typer.Exit(1)
|
|
905
|
+
|
|
906
|
+
total = len(load_entity_cache(cfg.workspace).get("entities", {}))
|
|
907
|
+
if total == 0:
|
|
908
|
+
output.warn("Entity cache is empty. Run 'hadsync entities refresh' first.")
|
|
909
|
+
raise typer.Exit(1)
|
|
910
|
+
|
|
911
|
+
matched = search_entities(cfg.workspace, filter_term or "")
|
|
912
|
+
|
|
913
|
+
if not matched:
|
|
914
|
+
output.warn(f"No entities matching '{filter_term}'.")
|
|
915
|
+
raise typer.Exit(0)
|
|
916
|
+
|
|
917
|
+
table = Table(show_header=True, header_style="bold")
|
|
918
|
+
table.add_column("Entity ID", style="bold cyan", no_wrap=True)
|
|
919
|
+
table.add_column("Friendly Name")
|
|
920
|
+
table.add_column("Domain", style="dim")
|
|
921
|
+
|
|
922
|
+
for entity_id, info in sorted(matched.items()):
|
|
923
|
+
table.add_row(
|
|
924
|
+
entity_id,
|
|
925
|
+
info.get("friendly_name") or "—",
|
|
926
|
+
info.get("domain", entity_id.split(".")[0]),
|
|
927
|
+
)
|
|
928
|
+
|
|
929
|
+
age = cache_age_days(cfg.workspace)
|
|
930
|
+
age_str = f"{age:.0f}d old" if age is not None else "unknown age"
|
|
931
|
+
filter_note = f" matching '{filter_term}'" if filter_term else f" of {total} total"
|
|
932
|
+
|
|
933
|
+
output.console.print()
|
|
934
|
+
output.console.print(table)
|
|
935
|
+
output.console.print(f"\n[dim]{len(matched)} entities{filter_note} — cache {age_str}[/dim]")
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
@config_app.command("show")
|
|
939
|
+
def config_show() -> None:
|
|
940
|
+
"""Print resolved config (token masked)."""
|
|
941
|
+
from rich.table import Table
|
|
942
|
+
|
|
943
|
+
try:
|
|
944
|
+
cfg, path = load_config(_state.config_path)
|
|
945
|
+
except ConfigError as e:
|
|
946
|
+
output.error(str(e))
|
|
947
|
+
raise typer.Exit(1)
|
|
948
|
+
|
|
949
|
+
import os as _os
|
|
950
|
+
ws_source = f"({WORKSPACE_ENV_VAR} env var)" if _os.environ.get(WORKSPACE_ENV_VAR) else "(config)"
|
|
951
|
+
|
|
952
|
+
table = Table(show_header=False, box=None, padding=(0, 2))
|
|
953
|
+
table.add_column(style="bold cyan")
|
|
954
|
+
table.add_column()
|
|
955
|
+
for key, val in [
|
|
956
|
+
("ha_url", cfg.ha_url),
|
|
957
|
+
("ha_token", cfg.masked_token()),
|
|
958
|
+
("workspace", f"{cfg.workspace} {ws_source}"),
|
|
959
|
+
("pull.refresh_entities", str(cfg.pull.refresh_entities).lower()),
|
|
960
|
+
("pull.dashboards", str(cfg.pull.dashboards)),
|
|
961
|
+
("push.require_validation", str(cfg.push.require_validation).lower()),
|
|
962
|
+
("push.confirm", str(cfg.push.confirm).lower()),
|
|
963
|
+
("validation.warn_on_unknown_entities", str(cfg.validation.warn_on_unknown_entities).lower()),
|
|
964
|
+
("validation.entity_cache_max_age_days", f"{cfg.validation.entity_cache_max_age_days} days"),
|
|
965
|
+
]:
|
|
966
|
+
table.add_row(key, val)
|
|
967
|
+
|
|
968
|
+
output.console.print(f"\n[bold]Config:[/bold] {path}\n")
|
|
969
|
+
output.console.print(table)
|
|
970
|
+
output.console.print()
|
|
971
|
+
|
|
972
|
+
|
|
973
|
+
@config_app.command("set")
|
|
974
|
+
def config_set(
|
|
975
|
+
key: Annotated[str, typer.Argument(help="Config key (e.g. ha_url, pull.refresh_entities).")],
|
|
976
|
+
value: Annotated[str, typer.Argument(help="New value.")],
|
|
977
|
+
) -> None:
|
|
978
|
+
"""Set a config value."""
|
|
979
|
+
_not_implemented("config set")
|