hadsync 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hadsync/__init__.py +1 -0
- hadsync/cli.py +979 -0
- hadsync/config.py +123 -0
- hadsync/converter.py +73 -0
- hadsync/entities.py +134 -0
- hadsync/ha_rest.py +41 -0
- hadsync/ha_ws.py +144 -0
- hadsync/output.py +20 -0
- hadsync/schema.py +138 -0
- hadsync/state.py +57 -0
- hadsync/validator.py +168 -0
- hadsync/watcher.py +119 -0
- hadsync-0.2.2.dist-info/METADATA +403 -0
- hadsync-0.2.2.dist-info/RECORD +17 -0
- hadsync-0.2.2.dist-info/WHEEL +4 -0
- hadsync-0.2.2.dist-info/entry_points.txt +2 -0
- hadsync-0.2.2.dist-info/licenses/LICENSE +21 -0
hadsync/config.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional, Union
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, field_validator, ValidationError
|
|
9
|
+
from ruamel.yaml import YAML
|
|
10
|
+
|
|
11
|
+
CONFIG_FILENAME = ".hadsync.yaml"
|
|
12
|
+
WORKSPACE_ENV_VAR = "HADSYNC_WORKSPACE"
|
|
13
|
+
_ENV_RE = re.compile(r"^\$\{([A-Za-z_][A-Za-z0-9_]*)\}$")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ConfigError(Exception):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PullSettings(BaseModel):
|
|
21
|
+
refresh_entities: bool = True
|
|
22
|
+
dashboards: Union[str, list[str]] = "all"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class PushSettings(BaseModel):
|
|
26
|
+
require_validation: bool = True
|
|
27
|
+
confirm: bool = True
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ValidationSettings(BaseModel):
|
|
31
|
+
warn_on_unknown_entities: bool = True
|
|
32
|
+
entity_cache_max_age_days: int = 7
|
|
33
|
+
custom_card_types: list[str] = [] # prefixes treated as valid beyond custom:
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class Config(BaseModel):
|
|
37
|
+
ha_url: str
|
|
38
|
+
ha_token: str
|
|
39
|
+
workspace: Path = Path(".")
|
|
40
|
+
pull: PullSettings = PullSettings()
|
|
41
|
+
push: PushSettings = PushSettings()
|
|
42
|
+
validation: ValidationSettings = ValidationSettings()
|
|
43
|
+
|
|
44
|
+
@field_validator("ha_token", mode="before")
|
|
45
|
+
@classmethod
|
|
46
|
+
def resolve_token(cls, v: str) -> str:
|
|
47
|
+
m = _ENV_RE.match(str(v))
|
|
48
|
+
if m:
|
|
49
|
+
var_name = m.group(1)
|
|
50
|
+
token = os.environ.get(var_name)
|
|
51
|
+
if token is None:
|
|
52
|
+
raise ValueError(f"Environment variable '{var_name}' is not set")
|
|
53
|
+
return token
|
|
54
|
+
return v
|
|
55
|
+
|
|
56
|
+
@field_validator("ha_url")
|
|
57
|
+
@classmethod
|
|
58
|
+
def normalize_url(cls, v: str) -> str:
|
|
59
|
+
return v.rstrip("/")
|
|
60
|
+
|
|
61
|
+
def masked_token(self) -> str:
|
|
62
|
+
t = self.ha_token
|
|
63
|
+
if len(t) <= 8:
|
|
64
|
+
return "****"
|
|
65
|
+
return f"{t[:4]}...{t[-4:]}"
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
_yaml = YAML()
|
|
69
|
+
_yaml.preserve_quotes = True
|
|
70
|
+
_yaml.default_flow_style = False
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def discover_config(start: Optional[Path] = None) -> Optional[Path]:
|
|
74
|
+
current = Path(start or Path.cwd()).resolve()
|
|
75
|
+
while True:
|
|
76
|
+
candidate = current / CONFIG_FILENAME
|
|
77
|
+
if candidate.exists():
|
|
78
|
+
return candidate
|
|
79
|
+
parent = current.parent
|
|
80
|
+
if parent == current:
|
|
81
|
+
break
|
|
82
|
+
current = parent
|
|
83
|
+
global_cfg = Path.home() / CONFIG_FILENAME
|
|
84
|
+
return global_cfg if global_cfg.exists() else None
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def load_config(path: Optional[Path] = None) -> tuple[Config, Path]:
|
|
88
|
+
config_path = path or discover_config()
|
|
89
|
+
if config_path is None:
|
|
90
|
+
raise ConfigError("No .hadsync.yaml found. Run 'hadsync init' to create one.")
|
|
91
|
+
if not config_path.exists():
|
|
92
|
+
raise ConfigError(f"Config file not found: {config_path}")
|
|
93
|
+
try:
|
|
94
|
+
data = _yaml.load(config_path)
|
|
95
|
+
except Exception as e:
|
|
96
|
+
raise ConfigError(f"Failed to parse {config_path}: {e}") from e
|
|
97
|
+
if not isinstance(data, dict):
|
|
98
|
+
raise ConfigError(f"{config_path} is not a valid YAML mapping.")
|
|
99
|
+
try:
|
|
100
|
+
cfg = Config.model_validate(data)
|
|
101
|
+
except ValidationError as e:
|
|
102
|
+
raise ConfigError(f"Invalid config in {config_path}:\n{e}") from e
|
|
103
|
+
|
|
104
|
+
# Workspace resolution priority:
|
|
105
|
+
# 1. HADSYNC_WORKSPACE env var
|
|
106
|
+
# 2. workspace in config (relative → resolved against config file's directory)
|
|
107
|
+
# 3. default (.) → resolves to CWD
|
|
108
|
+
env_ws = os.environ.get(WORKSPACE_ENV_VAR)
|
|
109
|
+
if env_ws:
|
|
110
|
+
workspace = Path(env_ws).expanduser().resolve()
|
|
111
|
+
elif not cfg.workspace.is_absolute():
|
|
112
|
+
workspace = (config_path.parent / cfg.workspace).resolve()
|
|
113
|
+
else:
|
|
114
|
+
workspace = cfg.workspace.resolve()
|
|
115
|
+
|
|
116
|
+
cfg = cfg.model_copy(update={"workspace": workspace})
|
|
117
|
+
return cfg, config_path
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def save_config(raw: dict, path: Path) -> None:
|
|
121
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
122
|
+
with path.open("w") as f:
|
|
123
|
+
_yaml.dump(raw, f)
|
hadsync/converter.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import json as _json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from ruamel.yaml import YAML
|
|
8
|
+
|
|
9
|
+
LOVELACE_FILENAME = "lovelace.yaml"
|
|
10
|
+
|
|
11
|
+
_yaml = YAML()
|
|
12
|
+
_yaml.default_flow_style = False
|
|
13
|
+
_yaml.width = 4096 # prevent wrapping long strings (markdown cards etc.)
|
|
14
|
+
_yaml.best_sequence_indent = 2
|
|
15
|
+
_yaml.best_map_indent = 2
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def is_strategy_dashboard(config: dict) -> bool:
|
|
19
|
+
"""True when the dashboard is auto-generated via a strategy (read-only in hadsync)."""
|
|
20
|
+
return "strategy" in config
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def count_cards(config: dict) -> tuple[int, int]:
|
|
24
|
+
"""Return (view_count, card_count) for a dashboard config.
|
|
25
|
+
|
|
26
|
+
Handles both classic (cards) and sections-layout views.
|
|
27
|
+
"""
|
|
28
|
+
views = config.get("views", [])
|
|
29
|
+
cards = 0
|
|
30
|
+
for view in views:
|
|
31
|
+
cards += len(view.get("cards", []))
|
|
32
|
+
for section in view.get("sections", []):
|
|
33
|
+
cards += len(section.get("cards", []))
|
|
34
|
+
return len(views), cards
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def config_to_yaml_file(config: dict, path: Path) -> None:
|
|
38
|
+
"""Write a Lovelace config dict to a YAML file, creating parent dirs."""
|
|
39
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
40
|
+
with path.open("w", encoding="utf-8") as f:
|
|
41
|
+
_yaml.dump(config, f)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def yaml_file_to_config(path: Path) -> dict:
|
|
45
|
+
"""Read a YAML file and return a Lovelace config dict."""
|
|
46
|
+
data = _yaml.load(path)
|
|
47
|
+
if not isinstance(data, dict):
|
|
48
|
+
raise ValueError(f"{path} does not contain a YAML mapping")
|
|
49
|
+
return data
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def config_hash(config: dict) -> str:
|
|
53
|
+
"""Return a short stable hash of a normalized config dict.
|
|
54
|
+
|
|
55
|
+
Used to detect HA-side changes between pulls without storing the full config.
|
|
56
|
+
Sorting keys ensures the hash is independent of insertion order.
|
|
57
|
+
"""
|
|
58
|
+
return hashlib.sha256(
|
|
59
|
+
_json.dumps(config, sort_keys=True, ensure_ascii=False).encode()
|
|
60
|
+
).hexdigest()[:16]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def normalize(obj: object) -> object:
|
|
64
|
+
"""Recursively convert ruamel.yaml CommentedMap/Seq to plain dict/list.
|
|
65
|
+
|
|
66
|
+
Used to produce a clean JSON-serialisable dict for pushing to HA and for
|
|
67
|
+
equality comparisons between local YAML and the current HA state.
|
|
68
|
+
"""
|
|
69
|
+
if isinstance(obj, dict):
|
|
70
|
+
return {k: normalize(v) for k, v in obj.items()}
|
|
71
|
+
if isinstance(obj, list):
|
|
72
|
+
return [normalize(v) for v in obj]
|
|
73
|
+
return obj
|
hadsync/entities.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
CACHE_FILENAME = ".ha-entities.json"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def write_entity_cache(workspace: Path, states: list[dict]) -> int:
|
|
12
|
+
"""Build and write the entity cache from a /api/states response list.
|
|
13
|
+
|
|
14
|
+
Returns the number of entities written.
|
|
15
|
+
"""
|
|
16
|
+
entities: dict[str, dict] = {}
|
|
17
|
+
for state in states:
|
|
18
|
+
entity_id = state.get("entity_id")
|
|
19
|
+
if not entity_id:
|
|
20
|
+
continue
|
|
21
|
+
domain = entity_id.split(".")[0]
|
|
22
|
+
friendly_name = (state.get("attributes") or {}).get("friendly_name", "")
|
|
23
|
+
entities[entity_id] = {"friendly_name": friendly_name, "domain": domain}
|
|
24
|
+
|
|
25
|
+
cache = {
|
|
26
|
+
"refreshed_at": datetime.now(timezone.utc).isoformat(),
|
|
27
|
+
"entities": entities,
|
|
28
|
+
}
|
|
29
|
+
path = workspace / CACHE_FILENAME
|
|
30
|
+
path.write_text(json.dumps(cache, indent=2), encoding="utf-8")
|
|
31
|
+
return len(entities)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def load_entity_cache(workspace: Path) -> dict:
|
|
35
|
+
"""Return the raw cache dict, or an empty structure if not found."""
|
|
36
|
+
path = workspace / CACHE_FILENAME
|
|
37
|
+
if not path.exists():
|
|
38
|
+
return {"entities": {}, "refreshed_at": None}
|
|
39
|
+
try:
|
|
40
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
41
|
+
except Exception:
|
|
42
|
+
return {"entities": {}, "refreshed_at": None}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def entity_id_exists(workspace: Path, entity_id: str) -> bool:
|
|
46
|
+
return entity_id in load_entity_cache(workspace).get("entities", {})
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def cache_age_days(workspace: Path) -> Optional[float]:
|
|
50
|
+
"""Return the age of the entity cache in days, or None if cache is missing."""
|
|
51
|
+
refreshed_at = load_entity_cache(workspace).get("refreshed_at")
|
|
52
|
+
if not refreshed_at:
|
|
53
|
+
return None
|
|
54
|
+
try:
|
|
55
|
+
ts = datetime.fromisoformat(refreshed_at)
|
|
56
|
+
if ts.tzinfo is None:
|
|
57
|
+
ts = ts.replace(tzinfo=timezone.utc)
|
|
58
|
+
return (datetime.now(timezone.utc) - ts).total_seconds() / 86400
|
|
59
|
+
except Exception:
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def search_entities(workspace: Path, filter_term: str = "") -> dict[str, dict]:
|
|
64
|
+
"""Return entities whose entity_id or friendly_name contain filter_term."""
|
|
65
|
+
entities = load_entity_cache(workspace).get("entities", {})
|
|
66
|
+
if not filter_term:
|
|
67
|
+
return entities
|
|
68
|
+
term = filter_term.lower()
|
|
69
|
+
return {
|
|
70
|
+
eid: info for eid, info in entities.items()
|
|
71
|
+
if term in eid.lower() or term in (info.get("friendly_name") or "").lower()
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
# ---------------------------------------------------------------------------
|
|
76
|
+
# Entity ID extraction from Lovelace configs
|
|
77
|
+
# ---------------------------------------------------------------------------
|
|
78
|
+
|
|
79
|
+
def _get_line(container: object, key_or_idx: int | str) -> Optional[int]:
|
|
80
|
+
"""Best-effort: return 1-based line number of a key/item from ruamel.yaml."""
|
|
81
|
+
try:
|
|
82
|
+
lc = getattr(container, "lc", None)
|
|
83
|
+
if lc is None:
|
|
84
|
+
return None
|
|
85
|
+
if isinstance(key_or_idx, int):
|
|
86
|
+
return lc.item(key_or_idx)[0] + 1
|
|
87
|
+
return lc.key(key_or_idx)[0] + 1
|
|
88
|
+
except Exception:
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _is_entity_id(value: object) -> bool:
|
|
93
|
+
return isinstance(value, str) and "." in value and not value.startswith("#")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _walk(obj: object, results: list[tuple[str, Optional[int]]]) -> None:
|
|
97
|
+
if isinstance(obj, dict):
|
|
98
|
+
# entity: light.lamp
|
|
99
|
+
if "entity" in obj:
|
|
100
|
+
v = obj["entity"]
|
|
101
|
+
if _is_entity_id(v):
|
|
102
|
+
results.append((v, _get_line(obj, "entity")))
|
|
103
|
+
|
|
104
|
+
# entities: ["light.lamp", {entity: sensor.temp}]
|
|
105
|
+
if "entities" in obj:
|
|
106
|
+
items = obj["entities"]
|
|
107
|
+
if isinstance(items, list):
|
|
108
|
+
for i, item in enumerate(items):
|
|
109
|
+
if _is_entity_id(item):
|
|
110
|
+
results.append((item, _get_line(items, i)))
|
|
111
|
+
elif isinstance(item, dict) and "entity" in item:
|
|
112
|
+
v = item["entity"]
|
|
113
|
+
if _is_entity_id(v):
|
|
114
|
+
results.append((v, _get_line(item, "entity")))
|
|
115
|
+
|
|
116
|
+
# Recurse into all other values (cards, sections, elements, card, conditions…)
|
|
117
|
+
for key, val in obj.items():
|
|
118
|
+
if key not in ("entity", "entities"):
|
|
119
|
+
_walk(val, results)
|
|
120
|
+
|
|
121
|
+
elif isinstance(obj, list):
|
|
122
|
+
for item in obj:
|
|
123
|
+
_walk(item, results)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def extract_entity_ids(config: object) -> list[tuple[str, Optional[int]]]:
|
|
127
|
+
"""Walk a Lovelace config (raw ruamel.yaml dict) and return (entity_id, line) pairs.
|
|
128
|
+
|
|
129
|
+
Handles entity/entities fields at any nesting depth (cards, sections, elements,
|
|
130
|
+
conditional conditions, stack cards, etc.).
|
|
131
|
+
"""
|
|
132
|
+
results: list[tuple[str, Optional[int]]] = []
|
|
133
|
+
_walk(config, results)
|
|
134
|
+
return results
|
hadsync/ha_rest.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class HARestError(Exception):
|
|
7
|
+
pass
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_ha_info(ha_url: str, token: str, timeout: float = 10.0) -> dict:
|
|
11
|
+
"""Fetch HA instance info from REST API. Returns dict with 'version', 'location_name', etc."""
|
|
12
|
+
try:
|
|
13
|
+
resp = httpx.get(
|
|
14
|
+
f"{ha_url}/api/config",
|
|
15
|
+
headers={"Authorization": f"Bearer {token}"},
|
|
16
|
+
timeout=timeout,
|
|
17
|
+
)
|
|
18
|
+
resp.raise_for_status()
|
|
19
|
+
return resp.json()
|
|
20
|
+
except httpx.HTTPStatusError as e:
|
|
21
|
+
if e.response.status_code == 401:
|
|
22
|
+
raise HARestError("Authentication failed — token is invalid or expired.") from e
|
|
23
|
+
raise HARestError(f"HA returned HTTP {e.response.status_code}") from e
|
|
24
|
+
except httpx.RequestError as e:
|
|
25
|
+
raise HARestError(f"Connection failed: {e}") from e
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_entity_states(ha_url: str, token: str, timeout: float = 30.0) -> list[dict]:
|
|
29
|
+
"""Fetch all entity states from HA REST API for entity cache population."""
|
|
30
|
+
try:
|
|
31
|
+
resp = httpx.get(
|
|
32
|
+
f"{ha_url}/api/states",
|
|
33
|
+
headers={"Authorization": f"Bearer {token}"},
|
|
34
|
+
timeout=timeout,
|
|
35
|
+
)
|
|
36
|
+
resp.raise_for_status()
|
|
37
|
+
return resp.json()
|
|
38
|
+
except httpx.HTTPStatusError as e:
|
|
39
|
+
raise HARestError(f"HA returned HTTP {e.response.status_code}") from e
|
|
40
|
+
except httpx.RequestError as e:
|
|
41
|
+
raise HARestError(f"Connection failed: {e}") from e
|
hadsync/ha_ws.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
|
|
6
|
+
from websockets.asyncio.client import connect
|
|
7
|
+
from websockets.exceptions import WebSocketException
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class HAWebSocketError(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class HAAuthError(HAWebSocketError):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class HACommandError(HAWebSocketError):
|
|
19
|
+
def __init__(self, code: str, message: str) -> None:
|
|
20
|
+
self.code = code
|
|
21
|
+
super().__init__(f"{code}: {message}")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _ws_url(ha_url: str) -> str:
|
|
25
|
+
if ha_url.startswith("https://"):
|
|
26
|
+
return "wss://" + ha_url[8:] + "/api/websocket"
|
|
27
|
+
return "ws://" + ha_url[7:] + "/api/websocket"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class HAWebSocketClient:
|
|
31
|
+
"""Async context manager for the HA WebSocket API.
|
|
32
|
+
|
|
33
|
+
Correct commands for HA 2026.5+:
|
|
34
|
+
list dashboards → get_panels (filter component_name=lovelace)
|
|
35
|
+
fetch config → lovelace/config with explicit url_path
|
|
36
|
+
save config → lovelace/config/save with explicit url_path
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(self, ha_url: str, token: str, timeout: float = 15.0) -> None:
|
|
40
|
+
self._url = _ws_url(ha_url)
|
|
41
|
+
self._token = token
|
|
42
|
+
self._timeout = timeout
|
|
43
|
+
self._ws = None
|
|
44
|
+
self._msg_id = 0
|
|
45
|
+
|
|
46
|
+
async def __aenter__(self) -> HAWebSocketClient:
|
|
47
|
+
try:
|
|
48
|
+
self._ws = await connect(self._url, open_timeout=self._timeout)
|
|
49
|
+
except TimeoutError:
|
|
50
|
+
raise HAWebSocketError(
|
|
51
|
+
f"Connection timed out after {self._timeout:.0f}s — "
|
|
52
|
+
f"is HA reachable at {self._url}?"
|
|
53
|
+
)
|
|
54
|
+
except ConnectionRefusedError:
|
|
55
|
+
raise HAWebSocketError(
|
|
56
|
+
f"Connection refused at {self._url} — "
|
|
57
|
+
"check ha_url and port in .hadsync.yaml"
|
|
58
|
+
)
|
|
59
|
+
except OSError as e:
|
|
60
|
+
msg = str(e).lower()
|
|
61
|
+
if "nodename nor servname" in msg or "name or service not known" in msg:
|
|
62
|
+
raise HAWebSocketError(
|
|
63
|
+
f"Cannot resolve hostname — check ha_url in .hadsync.yaml: {self._url}"
|
|
64
|
+
) from e
|
|
65
|
+
raise HAWebSocketError(f"Cannot connect to {self._url}: {e}") from e
|
|
66
|
+
except WebSocketException as e:
|
|
67
|
+
raise HAWebSocketError(f"WebSocket error connecting to {self._url}: {e}") from e
|
|
68
|
+
|
|
69
|
+
async with asyncio.timeout(self._timeout):
|
|
70
|
+
try:
|
|
71
|
+
first = json.loads(await self._ws.recv())
|
|
72
|
+
except json.JSONDecodeError as e:
|
|
73
|
+
raise HAWebSocketError(f"HA sent malformed data during handshake: {e}") from e
|
|
74
|
+
|
|
75
|
+
if first.get("type") != "auth_required":
|
|
76
|
+
raise HAWebSocketError(
|
|
77
|
+
f"Expected auth_required from HA, got: {first.get('type')!r}"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
await self._ws.send(json.dumps({"type": "auth", "access_token": self._token}))
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
auth_resp = json.loads(await self._ws.recv())
|
|
84
|
+
except json.JSONDecodeError as e:
|
|
85
|
+
raise HAWebSocketError(f"HA sent malformed auth response: {e}") from e
|
|
86
|
+
|
|
87
|
+
if auth_resp.get("type") == "auth_invalid":
|
|
88
|
+
raise HAAuthError(
|
|
89
|
+
"Authentication failed — check that HA_TOKEN is a valid long-lived access token. "
|
|
90
|
+
"Generate one in HA → Profile → Long-Lived Access Tokens."
|
|
91
|
+
)
|
|
92
|
+
if auth_resp.get("type") != "auth_ok":
|
|
93
|
+
raise HAWebSocketError(
|
|
94
|
+
f"Unexpected auth response from HA: {auth_resp.get('type')!r}"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return self
|
|
98
|
+
|
|
99
|
+
async def __aexit__(self, *args: object) -> None:
|
|
100
|
+
if self._ws is not None:
|
|
101
|
+
await self._ws.close()
|
|
102
|
+
self._ws = None
|
|
103
|
+
|
|
104
|
+
def _next_id(self) -> int:
|
|
105
|
+
self._msg_id += 1
|
|
106
|
+
return self._msg_id
|
|
107
|
+
|
|
108
|
+
async def _command(self, **payload: object) -> object:
|
|
109
|
+
msg_id = self._next_id()
|
|
110
|
+
await self._ws.send(json.dumps({"id": msg_id, **payload}))
|
|
111
|
+
async with asyncio.timeout(self._timeout):
|
|
112
|
+
while True:
|
|
113
|
+
raw = await self._ws.recv()
|
|
114
|
+
try:
|
|
115
|
+
msg = json.loads(raw)
|
|
116
|
+
except json.JSONDecodeError as e:
|
|
117
|
+
raise HAWebSocketError(f"HA sent malformed JSON: {e}") from e
|
|
118
|
+
if msg.get("id") != msg_id:
|
|
119
|
+
continue # skip push messages or responses to other commands
|
|
120
|
+
if not msg.get("success"):
|
|
121
|
+
err = msg.get("error", {})
|
|
122
|
+
raise HACommandError(
|
|
123
|
+
err.get("code", "unknown"),
|
|
124
|
+
err.get("message", str(err)),
|
|
125
|
+
)
|
|
126
|
+
return msg.get("result")
|
|
127
|
+
|
|
128
|
+
async def get_panels(self) -> dict[str, dict]:
|
|
129
|
+
"""Return all Lovelace dashboard panels keyed by panel id."""
|
|
130
|
+
result = await self._command(type="get_panels")
|
|
131
|
+
return {
|
|
132
|
+
k: v
|
|
133
|
+
for k, v in (result or {}).items()
|
|
134
|
+
if isinstance(v, dict) and v.get("component_name") == "lovelace"
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async def get_dashboard_config(self, url_path: str) -> dict:
|
|
138
|
+
"""Fetch the full Lovelace config for a dashboard by its url_path."""
|
|
139
|
+
result = await self._command(type="lovelace/config", url_path=url_path)
|
|
140
|
+
return result or {}
|
|
141
|
+
|
|
142
|
+
async def save_dashboard_config(self, url_path: str, config: dict) -> None:
|
|
143
|
+
"""Overwrite the Lovelace config for a dashboard by its url_path."""
|
|
144
|
+
await self._command(type="lovelace/config/save", url_path=url_path, config=config)
|
hadsync/output.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from rich.console import Console
|
|
2
|
+
|
|
3
|
+
console = Console()
|
|
4
|
+
_err_console = Console(stderr=True)
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def success(msg: str) -> None:
|
|
8
|
+
console.print(f"[green]✔[/green] {msg}")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def error(msg: str) -> None:
|
|
12
|
+
_err_console.print(f"[red]✗[/red] {msg}")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def warn(msg: str) -> None:
|
|
16
|
+
console.print(f"[yellow]⚠[/yellow] {msg}")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def info(msg: str) -> None:
|
|
20
|
+
console.print(f"[dim]ℹ[/dim] {msg}")
|
hadsync/schema.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Iterator, Optional
|
|
4
|
+
|
|
5
|
+
# ---------------------------------------------------------------------------
|
|
6
|
+
# Known standard Lovelace card types → list of required field names.
|
|
7
|
+
# An empty list means the card has no required fields beyond 'type'.
|
|
8
|
+
# ---------------------------------------------------------------------------
|
|
9
|
+
KNOWN_CARD_TYPES: dict[str, list[str]] = {
|
|
10
|
+
"alarm-panel": ["entity"],
|
|
11
|
+
"attribute": [],
|
|
12
|
+
"button": [],
|
|
13
|
+
"calendar": [],
|
|
14
|
+
"cast": [],
|
|
15
|
+
"conditional": ["conditions", "card"],
|
|
16
|
+
"divider": [],
|
|
17
|
+
"entities": ["entities"],
|
|
18
|
+
"entity": ["entity"],
|
|
19
|
+
"entity-filter": ["entities", "conditions"],
|
|
20
|
+
"gauge": ["entity"],
|
|
21
|
+
"glance": ["entities"],
|
|
22
|
+
"grid": ["cards"],
|
|
23
|
+
"heading": [],
|
|
24
|
+
"history-graph": ["entities"],
|
|
25
|
+
"horizontal-stack": ["cards"],
|
|
26
|
+
"humidifier": ["entity"],
|
|
27
|
+
"iframe": ["url"],
|
|
28
|
+
"light": ["entity"],
|
|
29
|
+
"logbook": [],
|
|
30
|
+
"map": [],
|
|
31
|
+
"markdown": ["content"],
|
|
32
|
+
"media-control": ["entity"],
|
|
33
|
+
"picture": [],
|
|
34
|
+
"picture-elements": [],
|
|
35
|
+
"picture-entity": ["entity"],
|
|
36
|
+
"picture-glance": ["entities"],
|
|
37
|
+
"plant-status": ["entity"],
|
|
38
|
+
"sensor": ["entity"],
|
|
39
|
+
"shopping-list": [],
|
|
40
|
+
"statistic": ["entity", "stat_type"],
|
|
41
|
+
"statistics-graph": ["entities"],
|
|
42
|
+
"thermostat": ["entity"],
|
|
43
|
+
"tile": ["entity"],
|
|
44
|
+
"todo-list": [],
|
|
45
|
+
"vertical-stack": ["cards"],
|
|
46
|
+
"weather-forecast": ["entity"],
|
|
47
|
+
# HA 2024+
|
|
48
|
+
"area": ["area"],
|
|
49
|
+
"energy-date-selection": [],
|
|
50
|
+
"input-button": ["entity"],
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _walk_cards(obj: object) -> Iterator[dict]:
|
|
55
|
+
"""Yield every card dict in a Lovelace config, at any nesting depth.
|
|
56
|
+
|
|
57
|
+
Descends into cards[], sections[].cards[], card (conditional),
|
|
58
|
+
and row (entity-filter template).
|
|
59
|
+
Views are iterated but view-level fields are not yielded as cards.
|
|
60
|
+
"""
|
|
61
|
+
if isinstance(obj, dict):
|
|
62
|
+
for view in obj.get("views", []):
|
|
63
|
+
if isinstance(view, dict):
|
|
64
|
+
yield from _walk_cards(view)
|
|
65
|
+
|
|
66
|
+
for card in obj.get("cards", []):
|
|
67
|
+
if isinstance(card, dict):
|
|
68
|
+
yield card
|
|
69
|
+
yield from _walk_cards(card)
|
|
70
|
+
|
|
71
|
+
for section in obj.get("sections", []):
|
|
72
|
+
if isinstance(section, dict):
|
|
73
|
+
for card in section.get("cards", []):
|
|
74
|
+
if isinstance(card, dict):
|
|
75
|
+
yield card
|
|
76
|
+
yield from _walk_cards(card)
|
|
77
|
+
|
|
78
|
+
if "card" in obj and isinstance(obj["card"], dict):
|
|
79
|
+
yield obj["card"]
|
|
80
|
+
yield from _walk_cards(obj["card"])
|
|
81
|
+
|
|
82
|
+
if "row" in obj and isinstance(obj["row"], dict):
|
|
83
|
+
yield obj["row"]
|
|
84
|
+
yield from _walk_cards(obj["row"])
|
|
85
|
+
|
|
86
|
+
elif isinstance(obj, list):
|
|
87
|
+
for item in obj:
|
|
88
|
+
yield from _walk_cards(item)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _get_line(container: object, key: str) -> Optional[int]:
|
|
92
|
+
try:
|
|
93
|
+
lc = getattr(container, "lc", None)
|
|
94
|
+
if lc is None:
|
|
95
|
+
return None
|
|
96
|
+
return lc.key(key)[0] + 1
|
|
97
|
+
except Exception:
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def validate_cards(
|
|
102
|
+
config: object,
|
|
103
|
+
custom_card_types: list[str] | None = None,
|
|
104
|
+
) -> list[tuple[str, str, Optional[int]]]:
|
|
105
|
+
"""Walk all cards and return (severity, message, line) tuples.
|
|
106
|
+
|
|
107
|
+
Checks:
|
|
108
|
+
- Each card has a 'type' field
|
|
109
|
+
- Known card types have their required fields present
|
|
110
|
+
- Unknown card types (not custom:*) are flagged
|
|
111
|
+
|
|
112
|
+
custom_card_types: extra type prefixes to treat as valid beyond 'custom:'.
|
|
113
|
+
"""
|
|
114
|
+
extra_prefixes = tuple(custom_card_types or [])
|
|
115
|
+
issues: list[tuple[str, str, Optional[int]]] = []
|
|
116
|
+
|
|
117
|
+
for card in _walk_cards(config):
|
|
118
|
+
card_type = card.get("type")
|
|
119
|
+
|
|
120
|
+
if card_type is None:
|
|
121
|
+
issues.append(("WARN", "Card is missing required 'type' field", _get_line(card, "type")))
|
|
122
|
+
continue
|
|
123
|
+
|
|
124
|
+
# custom:* and user-allowlisted prefixes — skip schema check
|
|
125
|
+
if card_type.startswith("custom:") or (extra_prefixes and card_type.startswith(extra_prefixes)):
|
|
126
|
+
continue
|
|
127
|
+
|
|
128
|
+
if card_type not in KNOWN_CARD_TYPES:
|
|
129
|
+
line = _get_line(card, "type")
|
|
130
|
+
issues.append(("WARN", f"Unknown card type: '{card_type}'", line))
|
|
131
|
+
continue
|
|
132
|
+
|
|
133
|
+
for required in KNOWN_CARD_TYPES[card_type]:
|
|
134
|
+
if required not in card:
|
|
135
|
+
line = _get_line(card, "type")
|
|
136
|
+
issues.append(("WARN", f"Card '{card_type}' is missing field '{required}'", line))
|
|
137
|
+
|
|
138
|
+
return issues
|