hassl 0.2.1__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hassl/__init__.py CHANGED
@@ -0,0 +1 @@
1
+ __version__ = "0.3.1"
hassl/ast/nodes.py CHANGED
@@ -1,10 +1,11 @@
1
1
  from dataclasses import dataclass, asdict, field
2
- from typing import List, Any, Dict
2
+ from typing import List, Any, Dict, Optional
3
3
 
4
4
  @dataclass
5
5
  class Alias:
6
6
  name: str
7
7
  entity: str
8
+ private: bool = False
8
9
 
9
10
  @dataclass
10
11
  class Sync:
@@ -18,17 +19,68 @@ class IfClause:
18
19
  condition: Dict[str, Any]
19
20
  actions: List[Dict[str, Any]]
20
21
 
22
+ # ---- NEW: Holiday sets & structured schedule windows ----
23
+ @dataclass
24
+ class HolidaySet:
25
+ id: str
26
+ country: str
27
+ province: Optional[str] = None
28
+ add: List[str] = field(default_factory=list) # YYYY-MM-DD
29
+ remove: List[str] = field(default_factory=list)
30
+ workdays: List[str] = field(default_factory=lambda: ["mon","tue","wed","thu","fri"])
31
+ excludes: List[str] = field(default_factory=lambda: ["sat","sun","holiday"])
32
+
33
+ @dataclass
34
+ class PeriodSelector:
35
+ # kind = 'months' | 'dates' | 'range'
36
+ kind: str
37
+ # data:
38
+ # - months: {"list":[Mon,...]} or {"range":[Mon,Mon]}
39
+ # - dates: {"start":"MM-DD","end":"MM-DD"}
40
+ # - range: {"start":"YYYY-MM-DD","end":"YYYY-MM-DD"}
41
+ data: Dict[str, Any]
42
+
43
+ @dataclass
44
+ class ScheduleWindow:
45
+ start: str # "HH:MM"
46
+ end: str # "HH:MM"
47
+ day_selector: str # "weekdays" | "weekends" | "daily"
48
+ period: Optional[PeriodSelector] = None
49
+ holiday_ref: Optional[str] = None # id from HolidaySet (for 'except'/'only')
50
+ holiday_mode: Optional[str] = None # "except" | "only" | None
51
+
52
+ @dataclass
53
+ class Schedule:
54
+ name: str
55
+ # raw clauses as produced by the transformer (legacy form)
56
+ clauses: List[Dict[str, Any]]
57
+ # structured windows for the new 'on ...' syntax (optional)
58
+ windows: List[ScheduleWindow] = field(default_factory=list)
59
+ private: bool = False
60
+
21
61
  @dataclass
22
62
  class Rule:
23
63
  name: str
24
- clauses: List[IfClause]
64
+ # allow schedule dicts
65
+ clauses: List[Any]
25
66
 
26
67
  @dataclass
27
68
  class Program:
28
69
  statements: List[object]
70
+ package: Optional[str] = None
71
+ # normalized import entries (dicts) from the transformer:
72
+ # {"type":"import","module": "...", "kind": "glob|list|alias", "items":
73
+ #[...], "as": "name"|None}
74
+ imports: List[Dict[str, Any]] = field(default_factory=list)
29
75
  def to_dict(self):
30
76
  def enc(x):
31
- if isinstance(x, (Alias, Sync, Rule, IfClause)):
77
+ if isinstance(x, (Alias, Sync, Rule, IfClause, Schedule,
78
+ HolidaySet, ScheduleWindow, PeriodSelector)):
32
79
  d = asdict(x); d["type"] = x.__class__.__name__; return d
33
80
  return x
34
- return {"type": "Program","statements": [enc(s) for s in self.statements]}
81
+ return {
82
+ "type": "Program",
83
+ "package": self.package,
84
+ "imports": self.imports,
85
+ "statements": [enc(s) for s in self.statements],
86
+ }
hassl/cli.py CHANGED
@@ -1,14 +1,220 @@
1
1
  import argparse
2
- import os, json
2
+ import os, json, glob
3
+ from pathlib import Path
4
+ from typing import Dict, Tuple, List
3
5
  from .parser.loader import load_grammar_text
4
6
  from .parser.transform import HasslTransformer
5
- from .ast.nodes import Program
7
+ from .ast.nodes import Program, Alias, Schedule
6
8
  from lark import Lark
9
+ from .semantics import analyzer as sem_analyzer
7
10
  from .semantics.analyzer import analyze
8
11
  from .codegen.package import emit_package
9
12
  from .codegen import generate as codegen_generate
10
13
 
11
- #GRAMMAR_PATH = os.path.join(os.path.dirname(__file__), "parser", "hassl.lark")
14
+ def parse_hassl(text: str) -> Program:
15
+ grammar = load_grammar_text()
16
+ parser = Lark(grammar, start="start", parser="lalr", maybe_placeholders=False)
17
+ tree = parser.parse(text)
18
+ program = HasslTransformer().transform(tree)
19
+ return program
20
+
21
+
22
+ def _normalize_module(importing_pkg: str, mod: str) -> str:
23
+ """
24
+ Resolve Python-like relative module notation to an absolute dotted id.
25
+ Examples (importing_pkg='home.addie.automations'):
26
+ '.shared' -> 'home.addie.shared'
27
+ '..shared' -> 'home.shared'
28
+ 'std.shared' (absolute) stays 'std.shared'
29
+ """
30
+ if not mod:
31
+ return mod
32
+ if not mod.startswith("."):
33
+ return mod # already absolute
34
+ # Count leading dots
35
+ i = 0
36
+ while i < len(mod) and mod[i] == ".":
37
+ i += 1
38
+ rel = mod[i:] # tail after dots (may be '')
39
+ base_parts = (importing_pkg or "").split(".")
40
+ # Pop one level per dot
41
+ up = i - 1 # '.x' means stay at same depth + replace last segment -> up=0
42
+ if up > 0 and up <= len(base_parts):
43
+ base_parts = base_parts[:len(base_parts) - up]
44
+ elif up > len(base_parts):
45
+ base_parts = []
46
+ if rel:
47
+ return ".".join([p for p in base_parts if p] + [rel])
48
+ return ".".join([p for p in base_parts if p])
49
+
50
+ def _derive_package_name(prog: Program, src_path: Path, module_root: Path | None) -> str:
51
+ """
52
+ If the source did not declare `package`, derive one from the path:
53
+ - If module_root is given and src_path is under it: use relative path (dots)
54
+ - Else: use file stem
55
+ """
56
+ if getattr(prog, "package", None):
57
+ return prog.package # declared
58
+ if module_root:
59
+ try:
60
+ rel = src_path.resolve().relative_to(module_root.resolve())
61
+ parts = list(rel.with_suffix("").parts)
62
+ if parts:
63
+ return ".".join(parts)
64
+ except Exception:
65
+ pass
66
+ return src_path.stem
67
+
68
+ def _collect_public_exports(prog: Program, pkg: str) -> Dict[Tuple[str,str,str], object]:
69
+ """
70
+ Build (pkg, kind, name) -> node for public alias/schedule in a single Program.
71
+ Accepts both Schedule nodes and transformer dicts {"type":"schedule_decl",...}.
72
+ """
73
+ out: Dict[Tuple[str,str,str], object] = {}
74
+ # Aliases
75
+ for s in prog.statements:
76
+ if isinstance(s, Alias):
77
+ if not getattr(s, "private", False):
78
+ out[(pkg, "alias", s.name)] = s
79
+ # Schedules (either dicts from transformer or Schedule nodes)
80
+ for s in prog.statements:
81
+ if isinstance(s, Schedule):
82
+ if not getattr(s, "private", False):
83
+ out[(pkg, "schedule", s.name)] = s
84
+ elif isinstance(s, dict) and s.get("type") == "schedule_decl" and not s.get("private", False):
85
+ name = s.get("name")
86
+ if isinstance(name, str) and name.strip():
87
+ out[(pkg, "schedule", name)] = Schedule(name=name, clauses=s.get("clauses", []) or [], private=False)
88
+ return out
89
+
90
+ def _scan_hassl_files(path: Path) -> List[Path]:
91
+ if path.is_file():
92
+ return [path]
93
+ return [Path(p) for p in glob.glob(str(path / "**" / "*.hassl"), recursive=True)]
94
+
95
+ def _module_to_path(module_root: Path, module: str) -> Path:
96
+ return (module_root / Path(module.replace(".", "/"))).with_suffix(".hassl")
97
+
98
+ def _ensure_imports_loaded(programs, module_root: Path):
99
+ """If imported packages aren't parsed yet, try to load their .hassl files from module_root."""
100
+ # programs: List[tuple[Path, Program, str]]
101
+ known_pkgs = {pkg for _, _, pkg in programs}
102
+ added = True
103
+ while added:
104
+ added = False
105
+ for _, prog, importer_pkg in list(programs):
106
+ for imp in getattr(prog, "imports", []) or []:
107
+ if not isinstance(imp, dict) or imp.get("type") != "import":
108
+ continue
109
+ raw_mod = imp.get("module", "")
110
+ if not raw_mod:
111
+ continue
112
+ # resolve relative notation against the importing package
113
+ abs_mod = _normalize_module(importer_pkg, raw_mod)
114
+ if abs_mod in known_pkgs:
115
+ continue
116
+ if not module_root:
117
+ continue
118
+ candidate = _module_to_path(module_root, abs_mod)
119
+ if candidate.exists():
120
+ print(f"[hasslc] Autoload candidate FOUND for '{abs_mod}': {candidate}")
121
+ with open(candidate, "r", encoding="utf-8") as f:
122
+ text = f.read()
123
+ p = parse_hassl(text)
124
+ # force package to declared or derived (declared will win)
125
+ # If the file declared a package, keep it. Otherwise, assign the resolved module id.
126
+ pkg_name = p.package or abs_mod
127
+ p.package = pkg_name
128
+ programs.append((candidate, p, pkg_name))
129
+ known_pkgs.add(pkg_name)
130
+ added = True
131
+ else:
132
+ print(f"[hasslc] Autoload candidate MISS for '{abs_mod}': {candidate}")
133
+
134
+ def main():
135
+ print("[hasslc] Using CLI file:", __file__)
136
+ ap = argparse.ArgumentParser(prog="hasslc", description="HASSL Compiler")
137
+ ap.add_argument("input", help="Input .hassl file OR directory")
138
+ ap.add_argument("-o", "--out", default="./packages/out", help="Output directory root for HA package(s)")
139
+ ap.add_argument("--module-root", default=None, help="Optional root to derive package names from paths")
140
+ args = ap.parse_args()
141
+
142
+ in_path = Path(args.input)
143
+ out_root = Path(args.out)
144
+ module_root = Path(args.module_root).resolve() if args.module_root else None
145
+
146
+ src_files = _scan_hassl_files(in_path)
147
+ if not src_files:
148
+ raise SystemExit(f"[hasslc] No .hassl files found in {in_path}")
149
+
150
+ # Pass 0: parse all and assign/derive package names
151
+ programs: List[tuple[Path, Program, str]] = []
152
+ for p in src_files:
153
+ with open(p, "r", encoding="utf-8") as f:
154
+ text = f.read()
155
+ prog = parse_hassl(text)
156
+ pkg_name = _derive_package_name(prog, p, module_root)
157
+ try:
158
+ prog.package = pkg_name
159
+ except Exception:
160
+ pass
161
+ programs.append((p, prog, pkg_name))
162
+
163
+ # auto-load any missing imports from --module_root
164
+ _ensure_imports_loaded(programs, module_root)
165
+
166
+ # Pass 1: collect public exports across all programs
167
+ GLOBAL_EXPORTS: Dict[Tuple[str,str,str], object] = {}
168
+ for path, prog, pkg in programs:
169
+ GLOBAL_EXPORTS.update(_collect_public_exports(prog, pkg))
170
+
171
+ # publish global exports to analyzer
172
+ sem_analyzer.GLOBAL_EXPORTS = GLOBAL_EXPORTS
173
+
174
+ # Pass 2: analyze each program with global view
175
+ os.makedirs(out_root, exist_ok=True)
176
+ all_ir = []
177
+ for path, prog, pkg in programs:
178
+ print(f"[hasslc] Parsing {path} (package: {pkg})")
179
+ print("[hasslc] AST:", json.dumps(prog.to_dict(), indent=2))
180
+ ir = analyze(prog)
181
+ print("[hasslc] IR:", json.dumps(ir.to_dict(), indent=2))
182
+ all_ir.append((pkg, ir))
183
+
184
+ # Emit: per package subdir
185
+ for pkg, ir in all_ir:
186
+ # One-level output: flatten dotted package id into a single directory name
187
+ # e.g., home.addie.automations -> packages/out/home_addie_automations/
188
+ pkg_dir = out_root / pkg.replace(".", "_")
189
+ print(f"[hasslc] Output directory (flat): {pkg_dir}")
190
+ os.makedirs(pkg_dir, exist_ok=True)
191
+ ir_dict = ir.to_dict() if hasattr(ir, "to_dict") else ir
192
+ codegen_generate(ir_dict, str(pkg_dir))
193
+ emit_package(ir, str(pkg_dir))
194
+ with open(pkg_dir / "DEBUG_ir.json", "w", encoding="utf-8") as dbg:
195
+ dbg.write(json.dumps(ir.to_dict(), indent=2))
196
+ print(f"[hasslc] Package written to {pkg_dir}")
197
+
198
+ # Also drop a cross-project export table for debugging
199
+ with open(out_root / "DEBUG_exports.json", "w", encoding="utf-8") as fp:
200
+ printable = {f"{k[0]}::{k[1]}::{k[2]}": ("Alias" if isinstance(v, Alias) else "Schedule") for k, v in GLOBAL_EXPORTS.items()}
201
+ json.dump(printable, fp, indent=2)
202
+ print(f"[hasslc] Global exports index written to {out_root / 'DEBUG_exports.json'}")
203
+
204
+ if __name__ == "__main__":
205
+ main()
206
+ import argparse
207
+ import os, json, glob
208
+ from pathlib import Path
209
+ from typing import Dict, Tuple, List
210
+ from .parser.loader import load_grammar_text
211
+ from .parser.transform import HasslTransformer
212
+ from .ast.nodes import Program, Alias, Schedule
213
+ from lark import Lark
214
+ from .semantics import analyzer as sem_analyzer
215
+ from .semantics.analyzer import analyze
216
+ from .codegen.package import emit_package
217
+ from .codegen import generate as codegen_generate
12
218
 
13
219
  def parse_hassl(text: str) -> Program:
14
220
  grammar = load_grammar_text()
@@ -17,26 +223,150 @@ def parse_hassl(text: str) -> Program:
17
223
  program = HasslTransformer().transform(tree)
18
224
  return program
19
225
 
226
+ def _derive_package_name(prog: Program, src_path: Path, module_root: Path | None) -> str:
227
+ """
228
+ If the source did not declare `package`, derive one from the path:
229
+ - If module_root is given and src_path is under it: use relative path (dots)
230
+ - Else: use file stem
231
+ """
232
+ if getattr(prog, "package", None):
233
+ return prog.package # declared
234
+ if module_root:
235
+ try:
236
+ rel = src_path.resolve().relative_to(module_root.resolve())
237
+ parts = list(rel.with_suffix("").parts)
238
+ if parts:
239
+ return ".".join(parts)
240
+ except Exception:
241
+ pass
242
+ return src_path.stem
243
+
244
+ def _collect_public_exports(prog: Program, pkg: str) -> Dict[Tuple[str,str,str], object]:
245
+ """
246
+ Build (pkg, kind, name) -> node for public alias/schedule in a single Program.
247
+ Accepts both Schedule nodes and transformer dicts {"type":"schedule_decl",...}.
248
+ """
249
+ out: Dict[Tuple[str,str,str], object] = {}
250
+ # Aliases
251
+ for s in prog.statements:
252
+ if isinstance(s, Alias):
253
+ if not getattr(s, "private", False):
254
+ out[(pkg, "alias", s.name)] = s
255
+ # Schedules (either dicts from transformer or Schedule nodes)
256
+ for s in prog.statements:
257
+ if isinstance(s, Schedule):
258
+ if not getattr(s, "private", False):
259
+ out[(pkg, "schedule", s.name)] = s
260
+ elif isinstance(s, dict) and s.get("type") == "schedule_decl" and not s.get("private", False):
261
+ name = s.get("name")
262
+ if isinstance(name, str) and name.strip():
263
+ out[(pkg, "schedule", name)] = Schedule(name=name, clauses=s.get("clauses", []) or [], private=False)
264
+ return out
265
+
266
+ def _scan_hassl_files(path: Path) -> List[Path]:
267
+ if path.is_file():
268
+ return [path]
269
+ return [Path(p) for p in glob.glob(str(path / "**" / "*.hassl"), recursive=True)]
270
+
271
+ def _module_to_path(module_root: Path, module: str) -> Path:
272
+ return (module_root / Path(module.replace(".", "/"))).with_suffix(".hassl")
273
+
274
+ def _ensure_imports_loaded(programs, module_root: Path):
275
+ """If imported packages aren't parsed yet, try to load their .hassl files from module_root."""
276
+ # programs: List[tuple[Path, Program, str]]
277
+ known_pkgs = {pkg for _, _, pkg in programs}
278
+ added = True
279
+ while added:
280
+ added = False
281
+ for _, prog, _pkg in list(programs):
282
+ for imp in getattr(prog, "imports", []) or []:
283
+ if not isinstance(imp, dict) or imp.get("type") != "import":
284
+ continue
285
+ mod = imp.get("module", "")
286
+ if not mod or mod in known_pkgs:
287
+ continue
288
+ if not module_root:
289
+ continue
290
+ candidate = _module_to_path(module_root, mod)
291
+ if candidate.exists():
292
+ print(f"[hasslc] Autoload candidate FOUND for '{mod}': {candidate}")
293
+ with open(candidate, "r", encoding="utf-8") as f:
294
+ text = f.read()
295
+ p = parse_hassl(text)
296
+ # force package to declared or derived (declared will win)
297
+ pkg_name = p.package or mod
298
+ p.package = pkg_name
299
+ programs.append((candidate, p, pkg_name))
300
+ known_pkgs.add(pkg_name)
301
+ added = True
302
+ else:
303
+ print(f"[hasslc] Autoload candidate MISS for '{mod}': {candidate}")
304
+
20
305
  def main():
21
306
  ap = argparse.ArgumentParser(prog="hasslc", description="HASSL Compiler")
22
- ap.add_argument("input", help="Input .hassl file")
23
- ap.add_argument("-o", "--out", default="./packages/out", help="Output directory for HA package")
307
+ ap.add_argument("input", help="Input .hassl file OR directory")
308
+ ap.add_argument("-o", "--out", default="./packages/out", help="Output directory root for HA package(s)")
309
+ ap.add_argument("--module-root", default=None, help="Optional root to derive package names from paths")
24
310
  args = ap.parse_args()
25
311
 
26
- with open(args.input) as f:
27
- src = f.read()
312
+ in_path = Path(args.input)
313
+ out_root = Path(args.out)
314
+ module_root = Path(args.module_root).resolve() if args.module_root else None
315
+
316
+ src_files = _scan_hassl_files(in_path)
317
+ if not src_files:
318
+ raise SystemExit(f"[hasslc] No .hassl files found in {in_path}")
319
+
320
+ # Pass 0: parse all and assign/derive package names
321
+ programs: List[tuple[Path, Program, str]] = []
322
+ for p in src_files:
323
+ with open(p, "r", encoding="utf-8") as f:
324
+ text = f.read()
325
+ prog = parse_hassl(text)
326
+ pkg_name = _derive_package_name(prog, p, module_root)
327
+ try:
328
+ prog.package = pkg_name
329
+ except Exception:
330
+ pass
331
+ programs.append((p, prog, pkg_name))
332
+
333
+ # auto-load any missing imports from --module_root
334
+ _ensure_imports_loaded(programs, module_root)
335
+
336
+ # Pass 1: collect public exports across all programs
337
+ GLOBAL_EXPORTS: Dict[Tuple[str,str,str], object] = {}
338
+ for path, prog, pkg in programs:
339
+ GLOBAL_EXPORTS.update(_collect_public_exports(prog, pkg))
340
+
341
+ # publish global exports to analyzer
342
+ sem_analyzer.GLOBAL_EXPORTS = GLOBAL_EXPORTS
343
+
344
+ # Pass 2: analyze each program with global view
345
+ os.makedirs(out_root, exist_ok=True)
346
+ all_ir = []
347
+ for path, prog, pkg in programs:
348
+ print(f"[hasslc] Parsing {path} (package: {pkg})")
349
+ print("[hasslc] AST:", json.dumps(prog.to_dict(), indent=2))
350
+ ir = analyze(prog)
351
+ print("[hasslc] IR:", json.dumps(ir.to_dict(), indent=2))
352
+ all_ir.append((pkg, ir))
28
353
 
29
- program = parse_hassl(src)
30
- print("[hasslc] AST:", program.to_dict())
31
- ir = analyze(program)
32
- print("[hasslc] IR:", ir.to_dict())
354
+ # Emit: per package subdir
355
+ for pkg, ir in all_ir:
356
+ pkg_dir = out_root / pkg.replace(".", "_")
357
+ os.makedirs(pkg_dir, exist_ok=True)
358
+ ir_dict = ir.to_dict() if hasattr(ir, "to_dict") else ir
359
+ codegen_generate(ir_dict, str(pkg_dir))
360
+ emit_package(ir, str(pkg_dir))
361
+ with open(pkg_dir / "DEBUG_ir.json", "w", encoding="utf-8") as dbg:
362
+ dbg.write(json.dumps(ir.to_dict(), indent=2))
363
+ print(f"[hasslc] Package written to {pkg_dir}")
33
364
 
34
- ir_dict = ir.to_dict() if hasattr(ir, "to_dict") else ir
35
- codegen_generate(ir_dict, args.out)
36
- print(f"[hasslc] Package written to {args.out}")
365
+ # Also drop a cross-project export table for debugging
366
+ with open(out_root / "DEBUG_exports.json", "w", encoding="utf-8") as fp:
367
+ printable = {f"{k[0]}::{k[1]}::{k[2]}": ("Alias" if isinstance(v, Alias) else "Schedule") for k, v in GLOBAL_EXPORTS.items()}
368
+ json.dump(printable, fp, indent=2)
369
+ print(f"[hasslc] Global exports index written to {out_root / 'DEBUG_exports.json'}")
37
370
 
38
- os.makedirs(args.out, exist_ok=True)
39
- emit_package(ir, args.out)
40
- with open(os.path.join(args.out, "DEBUG_ir.json"), "w") as dbg:
41
- dbg.write(json.dumps(ir.to_dict(), indent=2))
42
- print(f"[hasslc] Package written to {args.out}")
371
+ if __name__ == "__main__":
372
+ main()
@@ -0,0 +1,6 @@
1
+ # Minimal wrapper so CLI can import `generate`
2
+ from . import rules_min
3
+
4
+ def generate(ir_dict, outdir: str):
5
+ # delegate to the tested minimal emitter
6
+ return rules_min.generate_rules(ir_dict, outdir)
hassl/codegen/init.py ADDED
@@ -0,0 +1,3 @@
1
+ from .generate import generate
2
+
3
+ __all__ = ["generate"]