hassl 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hassl/__init__.py +1 -1
- hassl/ast/nodes.py +35 -2
- hassl/codegen/package.py +368 -13
- hassl/codegen/rules_min.py +50 -13
- hassl/parser/hassl.lark +52 -2
- hassl/parser/transform.py +326 -159
- hassl/semantics/analyzer.py +175 -13
- {hassl-0.3.0.dist-info → hassl-0.3.2.dist-info}/METADATA +76 -6
- hassl-0.3.2.dist-info/RECORD +23 -0
- hassl-0.3.0.dist-info/RECORD +0 -23
- {hassl-0.3.0.dist-info → hassl-0.3.2.dist-info}/WHEEL +0 -0
- {hassl-0.3.0.dist-info → hassl-0.3.2.dist-info}/entry_points.txt +0 -0
- {hassl-0.3.0.dist-info → hassl-0.3.2.dist-info}/licenses/LICENSE +0 -0
- {hassl-0.3.0.dist-info → hassl-0.3.2.dist-info}/top_level.txt +0 -0
hassl/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.3.
|
|
1
|
+
__version__ = "0.3.2"
|
hassl/ast/nodes.py
CHANGED
|
@@ -19,11 +19,43 @@ class IfClause:
|
|
|
19
19
|
condition: Dict[str, Any]
|
|
20
20
|
actions: List[Dict[str, Any]]
|
|
21
21
|
|
|
22
|
+
# ---- NEW: Holiday sets & structured schedule windows ----
|
|
23
|
+
@dataclass
|
|
24
|
+
class HolidaySet:
|
|
25
|
+
id: str
|
|
26
|
+
country: str
|
|
27
|
+
province: Optional[str] = None
|
|
28
|
+
add: List[str] = field(default_factory=list) # YYYY-MM-DD
|
|
29
|
+
remove: List[str] = field(default_factory=list)
|
|
30
|
+
workdays: List[str] = field(default_factory=lambda: ["mon","tue","wed","thu","fri"])
|
|
31
|
+
excludes: List[str] = field(default_factory=lambda: ["sat","sun","holiday"])
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class PeriodSelector:
|
|
35
|
+
# kind = 'months' | 'dates' | 'range'
|
|
36
|
+
kind: str
|
|
37
|
+
# data:
|
|
38
|
+
# - months: {"list":[Mon,...]} or {"range":[Mon,Mon]}
|
|
39
|
+
# - dates: {"start":"MM-DD","end":"MM-DD"}
|
|
40
|
+
# - range: {"start":"YYYY-MM-DD","end":"YYYY-MM-DD"}
|
|
41
|
+
data: Dict[str, Any]
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class ScheduleWindow:
|
|
45
|
+
start: str # "HH:MM"
|
|
46
|
+
end: str # "HH:MM"
|
|
47
|
+
day_selector: str # "weekdays" | "weekends" | "daily"
|
|
48
|
+
period: Optional[PeriodSelector] = None
|
|
49
|
+
holiday_ref: Optional[str] = None # id from HolidaySet (for 'except'/'only')
|
|
50
|
+
holiday_mode: Optional[str] = None # "except" | "only" | None
|
|
51
|
+
|
|
22
52
|
@dataclass
|
|
23
53
|
class Schedule:
|
|
24
54
|
name: str
|
|
25
|
-
# raw clauses as produced by the transformer
|
|
55
|
+
# raw clauses as produced by the transformer (legacy form)
|
|
26
56
|
clauses: List[Dict[str, Any]]
|
|
57
|
+
# structured windows for the new 'on ...' syntax (optional)
|
|
58
|
+
windows: List[ScheduleWindow] = field(default_factory=list)
|
|
27
59
|
private: bool = False
|
|
28
60
|
|
|
29
61
|
@dataclass
|
|
@@ -42,7 +74,8 @@ class Program:
|
|
|
42
74
|
imports: List[Dict[str, Any]] = field(default_factory=list)
|
|
43
75
|
def to_dict(self):
|
|
44
76
|
def enc(x):
|
|
45
|
-
if isinstance(x, (Alias, Sync, Rule, IfClause, Schedule
|
|
77
|
+
if isinstance(x, (Alias, Sync, Rule, IfClause, Schedule,
|
|
78
|
+
HolidaySet, ScheduleWindow, PeriodSelector)):
|
|
46
79
|
d = asdict(x); d["type"] = x.__class__.__name__; return d
|
|
47
80
|
return x
|
|
48
81
|
return {
|
hassl/codegen/package.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Dict, List, Iterable, Any
|
|
1
|
+
from typing import Dict, List, Iterable, Any, Tuple, Optional
|
|
2
2
|
import os, re
|
|
3
3
|
from dataclasses import dataclass, field
|
|
4
4
|
from ..semantics.analyzer import IRProgram, IRSync
|
|
@@ -78,6 +78,13 @@ def _context_entity(entity: str, prop: str = None) -> str:
|
|
|
78
78
|
def _domain(entity: str) -> str:
|
|
79
79
|
return entity.split(".", 1)[0]
|
|
80
80
|
|
|
81
|
+
def _gate_entity_for_schedule(resolved: str, is_window: bool) -> str:
|
|
82
|
+
# resolved is "pkg.name" (your analyzer already normalizes)
|
|
83
|
+
pkg, name = resolved.rsplit(".", 1) if "." in resolved else ("", resolved)
|
|
84
|
+
if is_window:
|
|
85
|
+
return f"input_boolean.hassl_sched_{_safe(pkg)}_{_safe(name)}"
|
|
86
|
+
return f"binary_sensor.hassl_schedule_{_safe(pkg)}_{_safe(name)}_active"
|
|
87
|
+
|
|
81
88
|
def _turn_service(domain: str, state_on: bool) -> str:
|
|
82
89
|
if domain in ("light","switch","fan","media_player","cover"):
|
|
83
90
|
return f"{domain}.turn_on" if state_on else f"{domain}.turn_off"
|
|
@@ -92,7 +99,8 @@ class ScheduleRegistry:
|
|
|
92
99
|
pkg: str
|
|
93
100
|
created: Dict[str, str] = field(default_factory=dict) # name -> entity_id
|
|
94
101
|
sensors: List[Dict] = field(default_factory=list) # collected template sensors (for YAML)
|
|
95
|
-
|
|
102
|
+
period_cache: Dict[Tuple[str,str], str] = field(default_factory=dict) # (sched, key)-> entity_id
|
|
103
|
+
|
|
96
104
|
def eid_for(self, name: str) -> str:
|
|
97
105
|
return f"binary_sensor.hassl_schedule_{self.pkg}_{_safe(name)}_active".lower()
|
|
98
106
|
|
|
@@ -105,6 +113,141 @@ class ScheduleRegistry:
|
|
|
105
113
|
self.created[name] = eid
|
|
106
114
|
return eid
|
|
107
115
|
|
|
116
|
+
# New: create/reuse a period template sensor for a schedule window
|
|
117
|
+
def ensure_period_sensor(self, sched_name: str, period: Dict[str, Any] | None) -> str | None:
|
|
118
|
+
if not period:
|
|
119
|
+
return None
|
|
120
|
+
key = (sched_name, str(period))
|
|
121
|
+
if key in self.period_cache:
|
|
122
|
+
return self.period_cache[key]
|
|
123
|
+
# Build a compact name with hash for stability
|
|
124
|
+
eid_name = f"hassl_period_{self.pkg}_{_safe(sched_name)}_{abs(hash(str(period)))%100000}"
|
|
125
|
+
entity_id = f"binary_sensor.{eid_name}"
|
|
126
|
+
tpl = _period_template(period)
|
|
127
|
+
self.sensors.append({"name": eid_name, "unique_id": eid_name, "state": f"{{{{ {tpl} }}}}"})
|
|
128
|
+
self.period_cache[key] = entity_id
|
|
129
|
+
return entity_id
|
|
130
|
+
|
|
131
|
+
# ---------- NEW: window helpers (mirrors rules_min logic) ----------
|
|
132
|
+
def _parse_offset(off: str) -> str:
|
|
133
|
+
if not off: return "00:00:00"
|
|
134
|
+
m = re.fullmatch(r"([+-])(\d+)(ms|s|m|h|d)", str(off).strip())
|
|
135
|
+
if not m: return "00:00:00"
|
|
136
|
+
sign, n, unit = m.group(1), int(m.group(2)), m.group(3)
|
|
137
|
+
seconds = {"ms": 0, "s": n, "m": n*60, "h": n*3600, "d": n*86400}[unit]
|
|
138
|
+
h = seconds // 3600
|
|
139
|
+
m_ = (seconds % 3600) // 60
|
|
140
|
+
s = seconds % 60
|
|
141
|
+
return f"{sign}{h:02d}:{m_:02d}:{s:02d}"
|
|
142
|
+
|
|
143
|
+
def _wrap_tpl(expr: str) -> str:
|
|
144
|
+
"""Ensure a Jinja expression is wrapped safely in {{ … }}."""
|
|
145
|
+
expr = expr.strip()
|
|
146
|
+
if expr.startswith("{{") and expr.endswith("}}"):
|
|
147
|
+
return expr
|
|
148
|
+
return "{{ " + expr + " }}"
|
|
149
|
+
|
|
150
|
+
def _clock_between_cond(hhmm_start: str, hhmm_end: str):
|
|
151
|
+
# Pure expression (no {% %} / inner {{ }}), safe to embed in {{ ... }}
|
|
152
|
+
ns = "now().strftime('%H:%M')"
|
|
153
|
+
s = hhmm_start
|
|
154
|
+
e = hhmm_end
|
|
155
|
+
|
|
156
|
+
expr = (
|
|
157
|
+
f"( ('{s}' < '{e}' and ({ns} >= '{s}' and {ns} < '{e}')) "
|
|
158
|
+
f"or ('{s}' >= '{e}' and ({ns} >= '{s}' or {ns} < '{e}')) )"
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
return {
|
|
162
|
+
"condition": "template",
|
|
163
|
+
"value_template": _wrap_tpl(expr)
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
def _sun_edge_cond(edge: str, ts: dict):
|
|
167
|
+
event = ts.get("event", "sunrise")
|
|
168
|
+
off = _parse_offset(ts.get("offset", "0s"))
|
|
169
|
+
cond = {"condition": "sun", edge: event}
|
|
170
|
+
if off and off != "00:00:00":
|
|
171
|
+
cond["offset"] = off
|
|
172
|
+
return cond
|
|
173
|
+
|
|
174
|
+
def _window_condition_from_specs(start_ts, end_ts):
|
|
175
|
+
# clock → clock
|
|
176
|
+
if isinstance(start_ts, dict) and start_ts.get("kind") == "clock" and \
|
|
177
|
+
isinstance(end_ts, dict) and end_ts.get("kind") == "clock":
|
|
178
|
+
s = start_ts.get("value", "00:00")
|
|
179
|
+
e = end_ts.get("value", "00:00")
|
|
180
|
+
return _clock_between_cond(s, e)
|
|
181
|
+
# sun → sun
|
|
182
|
+
if isinstance(start_ts, dict) and start_ts.get("kind") == "sun" and \
|
|
183
|
+
isinstance(end_ts, dict) and end_ts.get("kind") == "sun":
|
|
184
|
+
after_start = _sun_edge_cond("after", start_ts)
|
|
185
|
+
before_end = _sun_edge_cond("before", end_ts)
|
|
186
|
+
wrap = (start_ts.get("event") == "sunset" and end_ts.get("event") == "sunrise")
|
|
187
|
+
if wrap:
|
|
188
|
+
return {"condition": "or", "conditions": [after_start, before_end]}
|
|
189
|
+
return {"condition": "and", "conditions": [after_start, before_end]}
|
|
190
|
+
# mixed → minute-of-day template (pure expression, no {% %})
|
|
191
|
+
NOWM = "(now().hour*60 + now().minute)"
|
|
192
|
+
SM = "(((start.value[0:2]|int)*60 + (start.value[3:5]|int)) if start.kind == 'clock' else (as_local(state_attr('sun.sun','next_' ~ start.event)).hour*60 + as_local(state_attr('sun.sun','next_' ~ start.event)).minute))"
|
|
193
|
+
EM = "(((end.value[0:2]|int)*60 + (end.value[3:5]|int)) if end.kind == 'clock' else (as_local(state_attr('sun.sun','next_' ~ end.event)).hour*60 + as_local(state_attr('sun.sun','next_' ~ end.event)).minute))"
|
|
194
|
+
return {
|
|
195
|
+
"condition": "template",
|
|
196
|
+
"value_template": (
|
|
197
|
+
f"( ({SM} < {EM} and ({NOWM} >= {SM} and {NOWM} < {EM})) "
|
|
198
|
+
f"or ({SM} >= {EM} and ({NOWM} >= {SM} or {NOWM} < {EM})) )"
|
|
199
|
+
),
|
|
200
|
+
"variables": {"start": start_ts or {}, "end": end_ts or {}}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _day_selector_condition(sel: Optional[str]):
|
|
205
|
+
if sel == "weekdays":
|
|
206
|
+
return {"condition": "time", "weekday": ["mon","tue","wed","thu","fri"]}
|
|
207
|
+
if sel == "weekends":
|
|
208
|
+
return {"condition": "time", "weekday": ["sat","sun"]}
|
|
209
|
+
# daily / None
|
|
210
|
+
return None
|
|
211
|
+
|
|
212
|
+
def _holiday_condition(mode: Optional[str], hol_id: Optional[str]):
|
|
213
|
+
if not (mode and hol_id):
|
|
214
|
+
return None
|
|
215
|
+
# True when today is a holiday for 'only', false when 'except'
|
|
216
|
+
eid = f"binary_sensor.hassl_holiday_{hol_id}"
|
|
217
|
+
return {"condition": "state", "entity_id": eid, "state": "on" if mode == "only" else "off"}
|
|
218
|
+
|
|
219
|
+
def _norm_hmode(raw: Optional[str]) -> Optional[str]:
|
|
220
|
+
"""Coerce analyzer-provided holiday_mode variants to {'only','except',None}."""
|
|
221
|
+
if not raw:
|
|
222
|
+
return None
|
|
223
|
+
v = str(raw).strip().lower().replace("_", " ").replace("-", " ")
|
|
224
|
+
# Accept a bunch of user/analyzer phrasings
|
|
225
|
+
if any(k in v for k in ("except", "exclude", "unless", "not")):
|
|
226
|
+
return "except"
|
|
227
|
+
if any(k in v for k in ("only", "holiday only", "holidays only")):
|
|
228
|
+
return "only"
|
|
229
|
+
# Unknown → leave as-is to avoid surprising behavior
|
|
230
|
+
return raw
|
|
231
|
+
|
|
232
|
+
def _trigger_for(ts: Dict[str, Any]) -> Dict[str, Any]:
|
|
233
|
+
"""
|
|
234
|
+
Build a HA trigger for a time-spec dict:
|
|
235
|
+
- {"kind":"clock","value":"HH:MM"} -> time at HH:MM:00
|
|
236
|
+
- {"kind":"sun","event":"sunrise|sunset","offset":"+15m"} -> sun trigger (with offset)
|
|
237
|
+
"""
|
|
238
|
+
if isinstance(ts, dict) and ts.get("kind") == "clock":
|
|
239
|
+
hhmm = ts.get("value", "00:00")
|
|
240
|
+
at = hhmm if len(hhmm) == 8 else (hhmm + ":00" if len(hhmm) == 5 else "00:00:00")
|
|
241
|
+
return {"platform": "time", "at": str(at)}
|
|
242
|
+
if isinstance(ts, dict) and ts.get("kind") == "sun":
|
|
243
|
+
trig = {"platform": "sun", "event": ts.get("event", "sunrise")}
|
|
244
|
+
off = _parse_offset(ts.get("offset", "0s"))
|
|
245
|
+
if off and off != "00:00:00":
|
|
246
|
+
trig["offset"] = off
|
|
247
|
+
return trig
|
|
248
|
+
# Fallback: evaluate soon; maintenance automation will correct state anyway
|
|
249
|
+
return {"platform": "time_pattern", "minutes": "/1"}
|
|
250
|
+
|
|
108
251
|
def _jinja_offset(offset: str) -> str:
|
|
109
252
|
"""
|
|
110
253
|
Convert '+15m'/'-10s'/'2h' to a Jinja timedelta expression snippet:
|
|
@@ -236,6 +379,49 @@ def _emit_schedule_helper_yaml(entity_id: str, pkg: str, name: str, clauses: Lis
|
|
|
236
379
|
"state": f"{{{{ {state_tpl} }}}}"
|
|
237
380
|
}
|
|
238
381
|
|
|
382
|
+
# ---------- NEW: period sensor template builders ----------
|
|
383
|
+
def _period_template(period: Dict[str, Any]) -> str:
|
|
384
|
+
"""
|
|
385
|
+
period is a dict of shape:
|
|
386
|
+
{"kind":"months","data":{"list":[Mon,...]}} or {"kind":"months","data":{"range":[A,B]}}
|
|
387
|
+
{"kind":"dates","data":{"start":"MM-DD","end":"MM-DD"}} # can wrap year
|
|
388
|
+
{"kind":"range","data":{"start":"YYYY-MM-DD","end":"YYYY-MM-DD"}}
|
|
389
|
+
Returns a Jinja boolean expression.
|
|
390
|
+
"""
|
|
391
|
+
kind = period.get("kind")
|
|
392
|
+
data = period.get("data", {})
|
|
393
|
+
|
|
394
|
+
if kind == "months":
|
|
395
|
+
def m2n(m: str) -> int:
|
|
396
|
+
order = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]
|
|
397
|
+
return order.index(m)+1
|
|
398
|
+
if "list" in data:
|
|
399
|
+
months = [m2n(m) for m in data["list"]]
|
|
400
|
+
return f"( now().month in {months} )"
|
|
401
|
+
if "range" in data:
|
|
402
|
+
a, b = [m2n(x) for x in data["range"]]
|
|
403
|
+
return (
|
|
404
|
+
f"( ({a} <= now().month <= {b}) or "
|
|
405
|
+
f" ({a} > {b} and (now().month >= {a} or now().month <= {b})) )"
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
if kind == "dates":
|
|
409
|
+
# Compare zero-padded strings '%m-%d' (lexicographic works).
|
|
410
|
+
start = data.get("start"); end = data.get("end")
|
|
411
|
+
d = "now().strftime('%m-%d')"
|
|
412
|
+
return (
|
|
413
|
+
f"( ('{start}' <= {d} <= '{end}') or "
|
|
414
|
+
f" ('{start}' > '{end}' and ({d} >= '{start}' or {d} <= '{end}')) )"
|
|
415
|
+
)
|
|
416
|
+
if kind == "range":
|
|
417
|
+
start = data.get("start"); end = data.get("end")
|
|
418
|
+
# Keep this as a pure expression using filters.
|
|
419
|
+
return (
|
|
420
|
+
f"( now().date() >= '{start}'|as_datetime|date and "
|
|
421
|
+
f" now().date() <= '{end}'|as_datetime|date )"
|
|
422
|
+
)
|
|
423
|
+
return "true"
|
|
424
|
+
|
|
239
425
|
def _collect_named_schedules(ir: IRProgram) -> Iterable[Dict]:
|
|
240
426
|
"""
|
|
241
427
|
Collect named schedules from IR in either object, list, or dict form.
|
|
@@ -295,7 +481,6 @@ def _collect_named_schedules(ir: IRProgram) -> Iterable[Dict]:
|
|
|
295
481
|
def emit_package(ir: IRProgram, outdir: str):
|
|
296
482
|
ensure_dir(outdir)
|
|
297
483
|
|
|
298
|
-
print("DEBUG:", getattr(ir, "schedules", None))
|
|
299
484
|
# derive package slug early; use IR package if present
|
|
300
485
|
pkg = getattr(ir, "package", None) or _pkg_slug(outdir)
|
|
301
486
|
sched_reg = ScheduleRegistry(pkg)
|
|
@@ -304,12 +489,33 @@ def emit_package(ir: IRProgram, outdir: str):
|
|
|
304
489
|
scripts: Dict = {"script": {}}
|
|
305
490
|
automations: List[Dict] = []
|
|
306
491
|
|
|
492
|
+
# We no longer emit legacy YAML 'platform: workday' sections.
|
|
493
|
+
# Only emit template sensors that reference UI-defined Workday entities.
|
|
494
|
+
holiday_tpl_defs: List[Dict] = []
|
|
495
|
+
|
|
307
496
|
# ---------- PASS 1: create named schedule helpers ONCE per (pkg, name) ----------
|
|
308
497
|
for s in _collect_named_schedules(ir):
|
|
309
498
|
if not s.get("name"):
|
|
310
499
|
continue
|
|
311
500
|
sched_reg.register_decl(s["name"], s.get("clauses", []))
|
|
312
501
|
|
|
502
|
+
# ---------- PASS 1b: Holidays -> (template only; Workday via UI) ----------
|
|
503
|
+
# ir.holidays is {"id": {...}}; we only need the id to reference UI entities.
|
|
504
|
+
holidays_ir = getattr(ir, "holidays", {}) or {}
|
|
505
|
+
if holidays_ir:
|
|
506
|
+
for hid, h in holidays_ir.items():
|
|
507
|
+
# Template: holiday = NOT(not_holiday)
|
|
508
|
+
# Assumes you configured a UI Workday instance that:
|
|
509
|
+
# - has workdays = Mon..Sun
|
|
510
|
+
# - excludes = ['holiday']
|
|
511
|
+
# and renamed it to: binary_sensor.hassl_<id>_not_holiday
|
|
512
|
+
eid_name = f"hassl_holiday_{hid}"
|
|
513
|
+
holiday_tpl_defs.append({
|
|
514
|
+
"name": eid_name,
|
|
515
|
+
"unique_id": eid_name,
|
|
516
|
+
"state": "{{ is_state('binary_sensor.hassl_" + hid + "_not_holiday', 'off') }}"
|
|
517
|
+
})
|
|
518
|
+
|
|
313
519
|
# ---------- Context helpers for entities & per-prop contexts ----------
|
|
314
520
|
sync_entities = set(); entity_props = {}
|
|
315
521
|
for s in ir.syncs:
|
|
@@ -436,11 +642,13 @@ def emit_package(ir: IRProgram, outdir: str):
|
|
|
436
642
|
)
|
|
437
643
|
})
|
|
438
644
|
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
645
|
+
ptype = PROP_CONFIG.get(prop, {}).get("proxy", {}).get("type")
|
|
646
|
+
if ptype == "input_text":
|
|
647
|
+
proxy_e = f"input_text.hassl_{_safe(s.name)}_{prop}"
|
|
648
|
+
elif ptype == "input_boolean":
|
|
649
|
+
proxy_e = f"input_boolean.hassl_{_safe(s.name)}_{prop}"
|
|
650
|
+
else:
|
|
651
|
+
proxy_e = f"input_number.hassl_{_safe(s.name)}_{prop}"
|
|
444
652
|
|
|
445
653
|
if prop == "mute":
|
|
446
654
|
actions = [{
|
|
@@ -513,11 +721,14 @@ def emit_package(ir: IRProgram, outdir: str):
|
|
|
513
721
|
})
|
|
514
722
|
automations.append({"alias": f"HASSL sync {s.name} downstream onoff","mode":"queued","max":10,"trigger": trigger,"action": actions})
|
|
515
723
|
else:
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
724
|
+
ptype = PROP_CONFIG.get(prop, {}).get("proxy", {}).get("type")
|
|
725
|
+
if ptype == "input_text":
|
|
726
|
+
proxy_e = f"input_text.hassl_{_safe(s.name)}_{prop}"
|
|
727
|
+
elif ptype == "input_boolean":
|
|
728
|
+
proxy_e = f"input_boolean.hassl_{_safe(s.name)}_{prop}"
|
|
729
|
+
else:
|
|
730
|
+
proxy_e = f"input_number.hassl_{_safe(s.name)}_{prop}"
|
|
731
|
+
|
|
521
732
|
trigger = [{"platform": "state","entity_id": proxy_e}]
|
|
522
733
|
actions = []
|
|
523
734
|
cfg = PROP_CONFIG.get(prop, {})
|
|
@@ -551,6 +762,136 @@ def emit_package(ir: IRProgram, outdir: str):
|
|
|
551
762
|
})
|
|
552
763
|
automations.append({"alias": f"HASSL sync {s.name} downstream {prop}","mode":"queued","max":10,"trigger": trigger,"action": actions})
|
|
553
764
|
|
|
765
|
+
# ---------- New schedule windows (emit input_boolean + minute/sun maintenance automation) ----------
|
|
766
|
+
# IR provides schedules_windows: { name: [ {start,end,day_selector,period,holiday_*} ] }
|
|
767
|
+
sched_windows_ir = getattr(ir, "schedules_windows", {}) or {}
|
|
768
|
+
per_schedule_automations: Dict[str, List[Dict]] = {}
|
|
769
|
+
for sched_name, wins in sched_windows_ir.items():
|
|
770
|
+
# Ensure schedule boolean exists in helpers (include pkg prefix!)
|
|
771
|
+
sched_bool_key = f"hassl_sched_{_safe(pkg)}_{_safe(sched_name)}"
|
|
772
|
+
helpers["input_boolean"][sched_bool_key] = {
|
|
773
|
+
"name": f"HASSL Schedule {pkg}.{sched_name}"
|
|
774
|
+
}
|
|
775
|
+
bool_eid = f"input_boolean.{sched_bool_key}"
|
|
776
|
+
|
|
777
|
+
# --- Back-compat: emit '_active' template mirrors that follow the input_boolean ---
|
|
778
|
+
# 1) Primary mirror: binary_sensor.hassl_schedule_<pkg>_<name>_active
|
|
779
|
+
pkg_safe = _safe(pkg)
|
|
780
|
+
mirror_name = f"hassl_schedule_{pkg_safe}_{_safe(sched_name)}_active"
|
|
781
|
+
sched_reg.sensors.append({
|
|
782
|
+
"name": mirror_name,
|
|
783
|
+
"unique_id": mirror_name,
|
|
784
|
+
"state": "{{ is_state('" + bool_eid + "', 'on') }}"
|
|
785
|
+
})
|
|
786
|
+
# 2) Legacy alias (no pkg): binary_sensor.hassl_schedule_automations_<name>_active
|
|
787
|
+
# Some existing rulegen referenced this older name; keep it as a thin mirror.
|
|
788
|
+
legacy_alias = f"hassl_schedule_automations_{_safe(sched_name)}_active"
|
|
789
|
+
sched_reg.sensors.append({
|
|
790
|
+
"name": legacy_alias,
|
|
791
|
+
"unique_id": legacy_alias,
|
|
792
|
+
"state": "{{ is_state('" + bool_eid + "', 'on') }}"
|
|
793
|
+
})
|
|
794
|
+
|
|
795
|
+
# Build OR-of-windows condition bundles
|
|
796
|
+
or_conditions: List[Dict[str, Any]] = []
|
|
797
|
+
need_sun_triggers = False
|
|
798
|
+
|
|
799
|
+
for idx, w in enumerate(wins):
|
|
800
|
+
|
|
801
|
+
ds = w.get("day_selector")
|
|
802
|
+
href = w.get("holiday_ref")
|
|
803
|
+
hmode = _norm_hmode(w.get("holiday_mode"))
|
|
804
|
+
period = w.get("period")
|
|
805
|
+
if href and hmode is None and ds in ("weekdays", "weekends"):
|
|
806
|
+
hmode = "except"
|
|
807
|
+
|
|
808
|
+
# Coerce time specs to dicts compatible with _trigger_for/_window_condition_from_specs
|
|
809
|
+
raw_start = w.get("start")
|
|
810
|
+
raw_end = w.get("end")
|
|
811
|
+
def _coerce(ts):
|
|
812
|
+
if isinstance(ts, dict):
|
|
813
|
+
return ts
|
|
814
|
+
if isinstance(ts, str):
|
|
815
|
+
# accept "HH:MM" or "HH:MM:SS"
|
|
816
|
+
v = ts if len(ts) in (5,8) else "00:00"
|
|
817
|
+
return {"kind":"clock","value": v[:5] if len(v)==5 else v[:8]}
|
|
818
|
+
return {"kind":"clock","value":"00:00"}
|
|
819
|
+
start_ts = _coerce(raw_start)
|
|
820
|
+
end_ts = _coerce(raw_end)
|
|
821
|
+
|
|
822
|
+
# window condition (handles clock↔sun and wrap)
|
|
823
|
+
window_cond = _window_condition_from_specs(start_ts, end_ts)
|
|
824
|
+
if start_ts.get("kind") == "sun" or end_ts.get("kind") == "sun":
|
|
825
|
+
need_sun_triggers = True
|
|
826
|
+
|
|
827
|
+
# day selector & holiday & period
|
|
828
|
+
conds_and = [ c for c in (_day_selector_condition(ds),
|
|
829
|
+
_holiday_condition(hmode, href),
|
|
830
|
+
window_cond)
|
|
831
|
+
if c is not None ]
|
|
832
|
+
period_eid = sched_reg.ensure_period_sensor(sched_name, period)
|
|
833
|
+
if period_eid:
|
|
834
|
+
conds_and.append({"condition":"state", "entity_id": period_eid, "state":"on"})
|
|
835
|
+
or_conditions.append({ "condition": "and", "conditions": conds_and })
|
|
836
|
+
|
|
837
|
+
# --- Per-window explicit ON/OFF automations (edges only) ---
|
|
838
|
+
edge_conds = [ _day_selector_condition(ds), _holiday_condition(hmode, href) ]
|
|
839
|
+
if period_eid:
|
|
840
|
+
edge_conds.append({"condition": "state", "entity_id": period_eid, "state": "on"})
|
|
841
|
+
edge_conds = [c for c in edge_conds if c]
|
|
842
|
+
|
|
843
|
+
on_auto = {
|
|
844
|
+
"alias": f"HASSL schedule {pkg}.{sched_name} on_{idx}",
|
|
845
|
+
"mode": "single",
|
|
846
|
+
"trigger": [ _trigger_for(start_ts) ],
|
|
847
|
+
"action": [ { "service": "input_boolean.turn_on", "target": {"entity_id": bool_eid} } ]
|
|
848
|
+
}
|
|
849
|
+
ec = [c for c in edge_conds if c]
|
|
850
|
+
if ec:
|
|
851
|
+
on_auto["condition"] = ec
|
|
852
|
+
per_schedule_automations.setdefault(sched_name, []).append(on_auto)
|
|
853
|
+
|
|
854
|
+
off_auto = {
|
|
855
|
+
"alias": f"HASSL schedule {pkg}.{sched_name} off_{idx}",
|
|
856
|
+
"mode": "single",
|
|
857
|
+
"trigger": [ _trigger_for(end_ts) ],
|
|
858
|
+
"action": [ { "service": "input_boolean.turn_off", "target": {"entity_id": bool_eid} } ]
|
|
859
|
+
}
|
|
860
|
+
if ec:
|
|
861
|
+
off_auto["condition"] = ec
|
|
862
|
+
per_schedule_automations.setdefault(sched_name, []).append(off_auto)
|
|
863
|
+
|
|
864
|
+
# Composite choose: ON when any window matches, else OFF
|
|
865
|
+
choose_block = [{
|
|
866
|
+
"conditions": [{ "condition": "or", "conditions": or_conditions }] if or_conditions else [{"condition":"template","value_template":"false"}],
|
|
867
|
+
"sequence": [{"service": "input_boolean.turn_on", "target": {"entity_id": bool_eid}}]
|
|
868
|
+
}]
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
triggers = [
|
|
872
|
+
{"platform": "time_pattern", "minutes": "/1"},
|
|
873
|
+
# Re-evaluate on HA restart so the boolean is correct immediately
|
|
874
|
+
{"platform": "homeassistant", "event": "start"},
|
|
875
|
+
]
|
|
876
|
+
if need_sun_triggers:
|
|
877
|
+
# Nudge immedately at edges so the boolean flips promptly
|
|
878
|
+
triggers.extend([
|
|
879
|
+
{"platform": "sun", "event": "sunrise"},
|
|
880
|
+
{"platform": "sun", "event": "sunset"}
|
|
881
|
+
])
|
|
882
|
+
|
|
883
|
+
per_schedule_automations.setdefault(sched_name, []).append({
|
|
884
|
+
"alias": f"HASSL schedule {pkg}.{sched_name} maint",
|
|
885
|
+
"mode": "single",
|
|
886
|
+
"trigger": triggers,
|
|
887
|
+
"condition": [],
|
|
888
|
+
"action": [
|
|
889
|
+
{"choose": choose_block,
|
|
890
|
+
"default": [{"service": "input_boolean.turn_off", "target": {"entity_id": bool_eid}}]
|
|
891
|
+
}
|
|
892
|
+
]
|
|
893
|
+
})
|
|
894
|
+
|
|
554
895
|
# ---------- Write YAML ----------
|
|
555
896
|
# helpers & scripts
|
|
556
897
|
_dump_yaml(os.path.join(outdir, f"helpers_{pkg}.yaml"), helpers, ensure_sections=True)
|
|
@@ -563,8 +904,22 @@ def emit_package(ir: IRProgram, outdir: str):
|
|
|
563
904
|
{"template": [{"binary_sensor": sched_reg.sensors}]}
|
|
564
905
|
)
|
|
565
906
|
|
|
907
|
+
# Holidays file: emit only the template sensors; Workday instances are created via UI
|
|
908
|
+
if holiday_tpl_defs:
|
|
909
|
+
hol_doc: Dict[str, Any] = {}
|
|
910
|
+
hol_doc["template"] = [{"binary_sensor": holiday_tpl_defs}]
|
|
911
|
+
_dump_yaml(os.path.join(outdir, f"holidays_{pkg}.yaml"), hol_doc)
|
|
912
|
+
|
|
566
913
|
# automations per sync
|
|
567
914
|
for s in ir.syncs:
|
|
568
915
|
doc = [a for a in automations if a["alias"].startswith(f"HASSL sync {s.name}")]
|
|
569
916
|
if doc:
|
|
570
917
|
_dump_yaml(os.path.join(outdir, f"sync_{pkg}_{_safe(s.name)}.yaml"), {"automation": doc})
|
|
918
|
+
|
|
919
|
+
# automations per schedule (new windows)
|
|
920
|
+
for sched_name, autos in per_schedule_automations.items():
|
|
921
|
+
if autos:
|
|
922
|
+
_dump_yaml(
|
|
923
|
+
os.path.join(outdir, f"schedule_{pkg}_{_safe(sched_name)}.yaml"),
|
|
924
|
+
{"automation": autos}
|
|
925
|
+
)
|
hassl/codegen/rules_min.py
CHANGED
|
@@ -404,14 +404,52 @@ def generate_rules(ir, outdir):
|
|
|
404
404
|
rname = rule["name"]
|
|
405
405
|
gate = _gate_entity(rname)
|
|
406
406
|
|
|
407
|
-
#
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
407
|
+
# Build per-schedule gate conditions.
|
|
408
|
+
# For each referenced schedule, OR together its possible gate entities
|
|
409
|
+
# (e.g., input_boolean.hassl_sched_* OR binary_sensor.hassl_schedule_*_active).
|
|
410
|
+
schedule_gate_conditions = []
|
|
411
|
+
|
|
412
|
+
rule_gates = list(rule.get("schedule_gates") or []) if isinstance(rule, dict) else []
|
|
413
|
+
used_names = list(use_by_rule.get(rname, []) or [])
|
|
414
|
+
|
|
415
|
+
if rule_gates:
|
|
416
|
+
for g in rule_gates:
|
|
417
|
+
ents = [e for e in (g.get("entities") or []) if isinstance(e, str)]
|
|
418
|
+
# Also include the legacy, current-outdir slug binary_sensor expected by older tests/code
|
|
419
|
+
# Determine the base schedule name, then synthesize the local sensor id.
|
|
420
|
+
resolved = str(g.get("resolved", "")) if isinstance(g.get("resolved", ""), str) else ""
|
|
421
|
+
base = resolved.rsplit(".", 1)[-1] if resolved else None
|
|
422
|
+
if base:
|
|
423
|
+
legacy_local = _schedule_sensor(base, pkg) # e.g., binary_sensor.hassl_schedule_out_std_<base>_active
|
|
424
|
+
if legacy_local not in ents:
|
|
425
|
+
ents.append(legacy_local)
|
|
426
|
+
|
|
427
|
+
if not ents:
|
|
428
|
+
continue
|
|
429
|
+
if len(ents) == 1:
|
|
430
|
+
schedule_gate_conditions.append({
|
|
431
|
+
"condition": "state",
|
|
432
|
+
"entity_id": ents[0],
|
|
433
|
+
"state": "on"
|
|
434
|
+
})
|
|
435
|
+
else:
|
|
436
|
+
schedule_gate_conditions.append({
|
|
437
|
+
"condition": "or",
|
|
438
|
+
"conditions": [
|
|
439
|
+
{"condition": "state", "entity_id": e, "state": "on"}
|
|
440
|
+
for e in ents
|
|
441
|
+
]
|
|
442
|
+
})
|
|
443
|
+
else:
|
|
444
|
+
# Legacy fallback: only the template binary_sensor is known.
|
|
445
|
+
for nm in used_names:
|
|
446
|
+
base = str(nm).split(".")[-1]
|
|
447
|
+
decl_pkg = exported_sched_pkgs.get(base, pkg)
|
|
448
|
+
schedule_gate_conditions.append({
|
|
449
|
+
"condition": "state",
|
|
450
|
+
"entity_id": _schedule_sensor(base, decl_pkg),
|
|
451
|
+
"state": "on"
|
|
452
|
+
})
|
|
415
453
|
|
|
416
454
|
# 2) inline schedule clauses → compile directly to HA conditions (no helpers)
|
|
417
455
|
inline_clauses = inline_by_rule.get(rname, []) or []
|
|
@@ -420,9 +458,6 @@ def generate_rules(ir, outdir):
|
|
|
420
458
|
if isinstance(cl, dict) and cl.get("type") == "schedule_clause":
|
|
421
459
|
inline_schedule_conditions.append(_schedule_clause_to_condition(cl))
|
|
422
460
|
|
|
423
|
-
# de-dup schedule conditions
|
|
424
|
-
cond_schedule_entities = sorted(set(cond_schedule_entities))
|
|
425
|
-
|
|
426
461
|
# Now process each 'if' clause
|
|
427
462
|
for idx, clause in enumerate(rule["clauses"]):
|
|
428
463
|
# Each clause is {"condition": ..., "actions": [...]}
|
|
@@ -440,8 +475,10 @@ def generate_rules(ir, outdir):
|
|
|
440
475
|
cond_ha = _condition_to_ha(cond_in)
|
|
441
476
|
gate_cond = {"condition": "state", "entity_id": gate, "state": "on"}
|
|
442
477
|
|
|
443
|
-
# schedule gate conditions (all must be satisfied)
|
|
444
|
-
|
|
478
|
+
# schedule gate conditions (all must be satisfied);
|
|
479
|
+
# each item in schedule_gate_conditions is already either a state check
|
|
480
|
+
# or an OR of multiple state checks for a single schedule.
|
|
481
|
+
sched_conds = list(schedule_gate_conditions)
|
|
445
482
|
if inline_schedule_conditions:
|
|
446
483
|
sched_conds.extend(inline_schedule_conditions)
|
|
447
484
|
|