lifx-emulator 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lifx_emulator/__init__.py +31 -0
- lifx_emulator/__main__.py +607 -0
- lifx_emulator/api.py +1825 -0
- lifx_emulator/async_storage.py +308 -0
- lifx_emulator/constants.py +33 -0
- lifx_emulator/device.py +750 -0
- lifx_emulator/device_states.py +114 -0
- lifx_emulator/factories.py +380 -0
- lifx_emulator/handlers/__init__.py +39 -0
- lifx_emulator/handlers/base.py +49 -0
- lifx_emulator/handlers/device_handlers.py +340 -0
- lifx_emulator/handlers/light_handlers.py +372 -0
- lifx_emulator/handlers/multizone_handlers.py +249 -0
- lifx_emulator/handlers/registry.py +110 -0
- lifx_emulator/handlers/tile_handlers.py +309 -0
- lifx_emulator/observers.py +139 -0
- lifx_emulator/products/__init__.py +28 -0
- lifx_emulator/products/generator.py +771 -0
- lifx_emulator/products/registry.py +1446 -0
- lifx_emulator/products/specs.py +242 -0
- lifx_emulator/products/specs.yml +327 -0
- lifx_emulator/protocol/__init__.py +1 -0
- lifx_emulator/protocol/base.py +334 -0
- lifx_emulator/protocol/const.py +8 -0
- lifx_emulator/protocol/generator.py +1371 -0
- lifx_emulator/protocol/header.py +159 -0
- lifx_emulator/protocol/packets.py +1351 -0
- lifx_emulator/protocol/protocol_types.py +844 -0
- lifx_emulator/protocol/serializer.py +379 -0
- lifx_emulator/scenario_manager.py +402 -0
- lifx_emulator/scenario_persistence.py +206 -0
- lifx_emulator/server.py +482 -0
- lifx_emulator/state_restorer.py +259 -0
- lifx_emulator/state_serializer.py +130 -0
- lifx_emulator/storage_protocol.py +100 -0
- lifx_emulator-1.0.0.dist-info/METADATA +445 -0
- lifx_emulator-1.0.0.dist-info/RECORD +40 -0
- lifx_emulator-1.0.0.dist-info/WHEEL +4 -0
- lifx_emulator-1.0.0.dist-info/entry_points.txt +2 -0
- lifx_emulator-1.0.0.dist-info/licenses/LICENSE +35 -0
|
@@ -0,0 +1,1371 @@
|
|
|
1
|
+
"""Code generator for LIFX protocol structures.
|
|
2
|
+
|
|
3
|
+
Downloads the official protocol.yml from the LIFX GitHub repository and
|
|
4
|
+
generates Python types and packet classes. The YAML is never stored locally,
|
|
5
|
+
only parsed and converted into protocol classes.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import re
|
|
11
|
+
import sys
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any
|
|
14
|
+
from urllib.request import urlopen
|
|
15
|
+
|
|
16
|
+
import yaml
|
|
17
|
+
|
|
18
|
+
from lifx_emulator.protocol.const import PROTOCOL_URL
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TypeRegistry:
|
|
22
|
+
"""Registry of all protocol types for validation.
|
|
23
|
+
|
|
24
|
+
Tracks all defined types (enums, fields, packets, unions) to validate
|
|
25
|
+
that all type references in the protocol specification are valid.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(self) -> None:
|
|
29
|
+
"""Initialize empty type registry."""
|
|
30
|
+
self._enums: set[str] = set()
|
|
31
|
+
self._fields: set[str] = set()
|
|
32
|
+
self._packets: set[str] = set()
|
|
33
|
+
self._unions: set[str] = set()
|
|
34
|
+
self._basic_types: set[str] = {
|
|
35
|
+
"uint8",
|
|
36
|
+
"uint16",
|
|
37
|
+
"uint32",
|
|
38
|
+
"uint64",
|
|
39
|
+
"int8",
|
|
40
|
+
"int16",
|
|
41
|
+
"int32",
|
|
42
|
+
"int64",
|
|
43
|
+
"float32",
|
|
44
|
+
"bool",
|
|
45
|
+
"byte",
|
|
46
|
+
"reserved", # Special type for reserved fields
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
def register_enum(self, name: str) -> None:
|
|
50
|
+
"""Register an enum type.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
name: Enum type name
|
|
54
|
+
"""
|
|
55
|
+
self._enums.add(name)
|
|
56
|
+
|
|
57
|
+
def register_field(self, name: str) -> None:
|
|
58
|
+
"""Register a field structure type.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
name: Field structure type name
|
|
62
|
+
"""
|
|
63
|
+
self._fields.add(name)
|
|
64
|
+
|
|
65
|
+
def register_packet(self, name: str) -> None:
|
|
66
|
+
"""Register a packet type.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
name: Packet type name
|
|
70
|
+
"""
|
|
71
|
+
self._packets.add(name)
|
|
72
|
+
|
|
73
|
+
def register_union(self, name: str) -> None:
|
|
74
|
+
"""Register a union type.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
name: Union type name
|
|
78
|
+
"""
|
|
79
|
+
self._unions.add(name)
|
|
80
|
+
|
|
81
|
+
def is_enum(self, name: str) -> bool:
|
|
82
|
+
"""Check if a type is an enum.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
name: Type name to check
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
True if the type is an enum
|
|
89
|
+
"""
|
|
90
|
+
return name in self._enums
|
|
91
|
+
|
|
92
|
+
def has_type(self, name: str) -> bool:
|
|
93
|
+
"""Check if a type is defined.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
name: Type name to check
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
True if the type is defined
|
|
100
|
+
"""
|
|
101
|
+
return (
|
|
102
|
+
name in self._enums
|
|
103
|
+
or name in self._fields
|
|
104
|
+
or name in self._packets
|
|
105
|
+
or name in self._unions
|
|
106
|
+
or name in self._basic_types
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
def get_all_types(self) -> set[str]:
|
|
110
|
+
"""Get all registered types.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Set of all type names
|
|
114
|
+
"""
|
|
115
|
+
return (
|
|
116
|
+
self._enums
|
|
117
|
+
| self._fields
|
|
118
|
+
| self._packets
|
|
119
|
+
| self._unions
|
|
120
|
+
| self._basic_types
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def to_snake_case(name: str) -> str:
|
|
125
|
+
"""Convert PascalCase or camelCase to snake_case.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
name: PascalCase or camelCase string
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
snake_case string
|
|
132
|
+
"""
|
|
133
|
+
# Insert underscore before uppercase letters (except at start)
|
|
134
|
+
snake = re.sub(r"(?<!^)(?=[A-Z])", "_", name)
|
|
135
|
+
return snake.lower()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def format_long_import(
|
|
139
|
+
items: list[str], prefix: str = "from lifx_emulator.protocol.protocol_types import "
|
|
140
|
+
) -> str:
|
|
141
|
+
"""Format a long import statement across multiple lines.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
items: List of import items (e.g., ["Foo", "Bar as BazAlias"])
|
|
145
|
+
prefix: Import prefix
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
Formatted import string with line breaks if needed
|
|
149
|
+
"""
|
|
150
|
+
if not items:
|
|
151
|
+
return ""
|
|
152
|
+
|
|
153
|
+
# Try single line first
|
|
154
|
+
single_line = prefix + ", ".join(items)
|
|
155
|
+
if len(single_line) <= 120:
|
|
156
|
+
return single_line + "\n"
|
|
157
|
+
|
|
158
|
+
# Multi-line format
|
|
159
|
+
lines = [prefix + "("]
|
|
160
|
+
for i, item in enumerate(items):
|
|
161
|
+
if i < len(items) - 1:
|
|
162
|
+
lines.append(f" {item},")
|
|
163
|
+
else:
|
|
164
|
+
lines.append(f" {item},")
|
|
165
|
+
lines.append(")")
|
|
166
|
+
return "\n".join(lines) + "\n"
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def format_long_list(items: list[dict[str, Any]], max_line_length: int = 120) -> str:
|
|
170
|
+
"""Format a long list across multiple lines.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
items: List of dict items to format
|
|
174
|
+
max_line_length: Maximum line length before wrapping
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Formatted list string
|
|
178
|
+
"""
|
|
179
|
+
if not items:
|
|
180
|
+
return "[]"
|
|
181
|
+
|
|
182
|
+
# Try single line first
|
|
183
|
+
single_line = repr(items)
|
|
184
|
+
if len(single_line) <= max_line_length:
|
|
185
|
+
return single_line
|
|
186
|
+
|
|
187
|
+
# Multi-line format with one item per line
|
|
188
|
+
lines = ["["]
|
|
189
|
+
for i, item in enumerate(items):
|
|
190
|
+
item_str = repr(item)
|
|
191
|
+
if i < len(items) - 1:
|
|
192
|
+
lines.append(f" {item_str},")
|
|
193
|
+
else:
|
|
194
|
+
lines.append(f" {item_str},")
|
|
195
|
+
lines.append("]")
|
|
196
|
+
return "\n".join(lines)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def parse_field_type(field_type: str) -> tuple[str, int | None, bool]:
|
|
200
|
+
"""Parse a field type string.
|
|
201
|
+
|
|
202
|
+
Args:
|
|
203
|
+
field_type: Field type (e.g., 'uint16', '[32]uint8', '<HSBK>')
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
Tuple of (base_type, array_count, is_nested)
|
|
207
|
+
- base_type: The base type name
|
|
208
|
+
- array_count: Number of elements if array, None otherwise
|
|
209
|
+
- is_nested: True if it's a nested structure (<Type>)
|
|
210
|
+
"""
|
|
211
|
+
# Check for array: [N]type
|
|
212
|
+
array_match = re.match(r"\[(\d+)\](.+)", field_type)
|
|
213
|
+
if array_match:
|
|
214
|
+
count = int(array_match.group(1))
|
|
215
|
+
inner_type = array_match.group(2)
|
|
216
|
+
# Check if inner type is nested
|
|
217
|
+
if inner_type.startswith("<") and inner_type.endswith(">"):
|
|
218
|
+
return inner_type[1:-1], count, True
|
|
219
|
+
return inner_type, count, False
|
|
220
|
+
|
|
221
|
+
# Check for nested structure: <Type>
|
|
222
|
+
if field_type.startswith("<") and field_type.endswith(">"):
|
|
223
|
+
return field_type[1:-1], None, True
|
|
224
|
+
|
|
225
|
+
# Simple type
|
|
226
|
+
return field_type, None, False
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def camel_to_snake_upper(name: str) -> str:
|
|
230
|
+
"""Convert CamelCase to UPPER_SNAKE_CASE.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
name: CamelCase string
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
UPPER_SNAKE_CASE string
|
|
237
|
+
"""
|
|
238
|
+
# Insert underscore before uppercase letters (except at start)
|
|
239
|
+
snake = re.sub(r"(?<!^)(?=[A-Z])", "_", name)
|
|
240
|
+
return snake.upper()
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def generate_enum_code(enums: dict[str, Any]) -> str:
|
|
244
|
+
"""Generate Python Enum definitions with shortened names.
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
enums: Dictionary of enum definitions
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
Python code string
|
|
251
|
+
"""
|
|
252
|
+
code: list[str] = []
|
|
253
|
+
|
|
254
|
+
for enum_name, enum_def in sorted(enums.items()):
|
|
255
|
+
code.append(f"class {enum_name}(IntEnum):")
|
|
256
|
+
code.append(' """Auto-generated enum."""')
|
|
257
|
+
code.append("")
|
|
258
|
+
|
|
259
|
+
# Handle both old format (dict) and new format (list of dicts)
|
|
260
|
+
if isinstance(enum_def, dict) and "values" in enum_def:
|
|
261
|
+
# New format: {type: "uint16", values: [{name: "X", value: 1}, ...]}
|
|
262
|
+
values = enum_def["values"]
|
|
263
|
+
reserved_counter = 0
|
|
264
|
+
|
|
265
|
+
# Check if all values share a common prefix (enum name)
|
|
266
|
+
expected_prefix = camel_to_snake_upper(enum_name) + "_"
|
|
267
|
+
non_reserved = [
|
|
268
|
+
item["name"] for item in values if item["name"].lower() != "reserved"
|
|
269
|
+
]
|
|
270
|
+
has_common_prefix = non_reserved and all(
|
|
271
|
+
name.startswith(expected_prefix) for name in non_reserved
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
for item in sorted(values, key=lambda x: x["value"]):
|
|
275
|
+
protocol_name = item["name"]
|
|
276
|
+
member_value = item["value"]
|
|
277
|
+
|
|
278
|
+
# Handle reserved fields by making names unique
|
|
279
|
+
if protocol_name.lower() == "reserved":
|
|
280
|
+
member_name = f"RESERVED_{reserved_counter}"
|
|
281
|
+
reserved_counter += 1
|
|
282
|
+
# Remove redundant prefix for cleaner Python names
|
|
283
|
+
elif has_common_prefix and protocol_name.startswith(expected_prefix):
|
|
284
|
+
member_name = protocol_name[len(expected_prefix) :]
|
|
285
|
+
else:
|
|
286
|
+
member_name = protocol_name
|
|
287
|
+
|
|
288
|
+
code.append(f" {member_name} = {member_value}")
|
|
289
|
+
else:
|
|
290
|
+
# Old format: {MEMBER: value, ...}
|
|
291
|
+
for member_name, member_value in sorted(
|
|
292
|
+
enum_def.items(), key=lambda x: x[1]
|
|
293
|
+
):
|
|
294
|
+
code.append(f" {member_name} = {member_value}")
|
|
295
|
+
|
|
296
|
+
code.append("")
|
|
297
|
+
code.append("")
|
|
298
|
+
|
|
299
|
+
return "\n".join(code)
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def convert_type_to_python(
|
|
303
|
+
field_type: str, type_aliases: dict[str, str] | None = None
|
|
304
|
+
) -> str:
|
|
305
|
+
"""Convert a protocol field type to Python type annotation.
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
field_type: Protocol field type string
|
|
309
|
+
type_aliases: Optional dict mapping type names to their aliases
|
|
310
|
+
(for collision resolution)
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
Python type annotation string
|
|
314
|
+
"""
|
|
315
|
+
if type_aliases is None:
|
|
316
|
+
type_aliases = {}
|
|
317
|
+
|
|
318
|
+
base_type, array_count, is_nested = parse_field_type(field_type)
|
|
319
|
+
|
|
320
|
+
if array_count:
|
|
321
|
+
if is_nested:
|
|
322
|
+
# Use alias if one exists
|
|
323
|
+
type_name = type_aliases.get(base_type, base_type)
|
|
324
|
+
return f"list[{type_name}]"
|
|
325
|
+
elif base_type in ("uint8", "byte"):
|
|
326
|
+
# Special case: byte arrays
|
|
327
|
+
return "bytes"
|
|
328
|
+
else:
|
|
329
|
+
return "list[int]"
|
|
330
|
+
elif is_nested:
|
|
331
|
+
# Use alias if one exists
|
|
332
|
+
return type_aliases.get(base_type, base_type)
|
|
333
|
+
elif base_type in ("uint8", "uint16", "uint32", "uint64"):
|
|
334
|
+
return "int"
|
|
335
|
+
elif base_type in ("int8", "int16", "int32", "int64"):
|
|
336
|
+
return "int"
|
|
337
|
+
elif base_type == "float32":
|
|
338
|
+
return "float"
|
|
339
|
+
elif base_type == "bool":
|
|
340
|
+
return "bool"
|
|
341
|
+
else:
|
|
342
|
+
return "Any"
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def generate_pack_method(
|
|
346
|
+
fields_data: list[dict[str, Any]],
|
|
347
|
+
class_type: str = "field",
|
|
348
|
+
enum_types: set[str] | None = None,
|
|
349
|
+
) -> str:
|
|
350
|
+
"""Generate pack() method code for a field structure or packet.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
fields_data: List of field definitions
|
|
354
|
+
class_type: Either "field" or "packet"
|
|
355
|
+
enum_types: Set of enum type names for detection
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
Python method code string
|
|
359
|
+
"""
|
|
360
|
+
if enum_types is None:
|
|
361
|
+
enum_types = set()
|
|
362
|
+
|
|
363
|
+
code = []
|
|
364
|
+
code.append(" def pack(self) -> bytes:")
|
|
365
|
+
code.append(' """Pack to bytes."""')
|
|
366
|
+
code.append(" from lifx_emulator.protocol import serializer")
|
|
367
|
+
code.append(' result = b""')
|
|
368
|
+
code.append("")
|
|
369
|
+
|
|
370
|
+
for field_item in fields_data:
|
|
371
|
+
# Handle reserved fields (no name)
|
|
372
|
+
if "name" not in field_item:
|
|
373
|
+
size_bytes = field_item.get("size_bytes", 0)
|
|
374
|
+
code.append(f" # Reserved {size_bytes} bytes")
|
|
375
|
+
code.append(f" result += serializer.pack_reserved({size_bytes})")
|
|
376
|
+
continue
|
|
377
|
+
|
|
378
|
+
protocol_name = field_item["name"]
|
|
379
|
+
field_type = field_item["type"]
|
|
380
|
+
size_bytes = field_item.get("size_bytes", 0)
|
|
381
|
+
python_name = to_snake_case(protocol_name)
|
|
382
|
+
|
|
383
|
+
base_type, array_count, is_nested = parse_field_type(field_type)
|
|
384
|
+
|
|
385
|
+
# Check if this is an enum (nested but in enum_types)
|
|
386
|
+
is_enum = is_nested and base_type in enum_types
|
|
387
|
+
|
|
388
|
+
# Handle different field types
|
|
389
|
+
if array_count:
|
|
390
|
+
if is_enum:
|
|
391
|
+
# Array of enums - pack as array of ints
|
|
392
|
+
code.append(f" # {python_name}: list[{base_type}] (enum array)")
|
|
393
|
+
code.append(f" for item in self.{python_name}:")
|
|
394
|
+
code.append(
|
|
395
|
+
" result += serializer.pack_value(int(item), 'uint8')"
|
|
396
|
+
)
|
|
397
|
+
elif is_nested:
|
|
398
|
+
# Array of nested structures
|
|
399
|
+
code.append(f" # {python_name}: list[{base_type}]")
|
|
400
|
+
code.append(f" for item in self.{python_name}:")
|
|
401
|
+
code.append(" result += item.pack()")
|
|
402
|
+
elif base_type in ("uint8", "byte"):
|
|
403
|
+
# Byte array
|
|
404
|
+
code.append(f" # {python_name}: bytes ({size_bytes} bytes)")
|
|
405
|
+
pack_line = (
|
|
406
|
+
f" result += serializer.pack_bytes("
|
|
407
|
+
f"self.{python_name}, {size_bytes})"
|
|
408
|
+
)
|
|
409
|
+
code.append(pack_line)
|
|
410
|
+
else:
|
|
411
|
+
# Array of primitives
|
|
412
|
+
code.append(f" # {python_name}: list[{base_type}]")
|
|
413
|
+
pack_array = (
|
|
414
|
+
f" result += serializer.pack_array("
|
|
415
|
+
f"self.{python_name}, '{base_type}', {array_count})"
|
|
416
|
+
)
|
|
417
|
+
code.append(pack_array)
|
|
418
|
+
elif is_enum:
|
|
419
|
+
# Enum - pack as int
|
|
420
|
+
code.append(f" # {python_name}: {base_type} (enum)")
|
|
421
|
+
pack_enum = (
|
|
422
|
+
f" result += serializer.pack_value("
|
|
423
|
+
f"int(self.{python_name}), 'uint8')"
|
|
424
|
+
)
|
|
425
|
+
code.append(pack_enum)
|
|
426
|
+
elif is_nested:
|
|
427
|
+
# Nested structure
|
|
428
|
+
code.append(f" # {python_name}: {base_type}")
|
|
429
|
+
code.append(f" result += self.{python_name}.pack()")
|
|
430
|
+
else:
|
|
431
|
+
# Primitive type
|
|
432
|
+
code.append(f" # {python_name}: {base_type}")
|
|
433
|
+
pack_prim = (
|
|
434
|
+
f" result += serializer.pack_value("
|
|
435
|
+
f"self.{python_name}, '{base_type}')"
|
|
436
|
+
)
|
|
437
|
+
code.append(pack_prim)
|
|
438
|
+
|
|
439
|
+
code.append("")
|
|
440
|
+
code.append(" return result")
|
|
441
|
+
|
|
442
|
+
return "\n".join(code)
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def generate_unpack_method(
|
|
446
|
+
class_name: str,
|
|
447
|
+
fields_data: list[dict[str, Any]],
|
|
448
|
+
class_type: str = "field",
|
|
449
|
+
enum_types: set[str] | None = None,
|
|
450
|
+
) -> str:
|
|
451
|
+
"""Generate unpack() classmethod code for a field structure or packet.
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
class_name: Name of the class
|
|
455
|
+
fields_data: List of field definitions
|
|
456
|
+
class_type: Either "field" or "packet"
|
|
457
|
+
enum_types: Set of enum type names for detection
|
|
458
|
+
|
|
459
|
+
Returns:
|
|
460
|
+
Python method code string
|
|
461
|
+
"""
|
|
462
|
+
if enum_types is None:
|
|
463
|
+
enum_types = set()
|
|
464
|
+
|
|
465
|
+
code = []
|
|
466
|
+
code.append(" @classmethod")
|
|
467
|
+
unpack_sig = (
|
|
468
|
+
f" def unpack(cls, data: bytes, offset: int = 0) -> "
|
|
469
|
+
f"tuple[{class_name}, int]:"
|
|
470
|
+
)
|
|
471
|
+
code.append(unpack_sig)
|
|
472
|
+
code.append(' """Unpack from bytes."""')
|
|
473
|
+
code.append(" from lifx_emulator.protocol import serializer")
|
|
474
|
+
code.append(" current_offset = offset")
|
|
475
|
+
|
|
476
|
+
# Store field values
|
|
477
|
+
field_vars = []
|
|
478
|
+
|
|
479
|
+
for field_item in fields_data:
|
|
480
|
+
# Handle reserved fields (no name)
|
|
481
|
+
if "name" not in field_item:
|
|
482
|
+
size_bytes = field_item.get("size_bytes", 0)
|
|
483
|
+
code.append(f" # Skip reserved {size_bytes} bytes")
|
|
484
|
+
code.append(f" current_offset += {size_bytes}")
|
|
485
|
+
continue
|
|
486
|
+
|
|
487
|
+
protocol_name = field_item["name"]
|
|
488
|
+
field_type = field_item["type"]
|
|
489
|
+
size_bytes = field_item.get("size_bytes", 0)
|
|
490
|
+
python_name = to_snake_case(protocol_name)
|
|
491
|
+
field_vars.append(python_name)
|
|
492
|
+
|
|
493
|
+
base_type, array_count, is_nested = parse_field_type(field_type)
|
|
494
|
+
|
|
495
|
+
# Check if this is an enum (nested but in enum_types)
|
|
496
|
+
is_enum = is_nested and base_type in enum_types
|
|
497
|
+
|
|
498
|
+
# Handle different field types
|
|
499
|
+
if array_count:
|
|
500
|
+
if is_enum:
|
|
501
|
+
# Array of enums
|
|
502
|
+
code.append(f" # {python_name}: list[{base_type}] (enum array)")
|
|
503
|
+
code.append(f" {python_name} = []")
|
|
504
|
+
code.append(f" for _ in range({array_count}):")
|
|
505
|
+
unpack_enum_item = (
|
|
506
|
+
" item_raw, current_offset = "
|
|
507
|
+
"serializer.unpack_value(data, 'uint8', current_offset)"
|
|
508
|
+
)
|
|
509
|
+
code.append(unpack_enum_item)
|
|
510
|
+
code.append(f" {python_name}.append({base_type}(item_raw))")
|
|
511
|
+
elif is_nested:
|
|
512
|
+
# Array of nested structures
|
|
513
|
+
code.append(f" # {python_name}: list[{base_type}]")
|
|
514
|
+
code.append(f" {python_name} = []")
|
|
515
|
+
code.append(f" for _ in range({array_count}):")
|
|
516
|
+
unpack_nested = (
|
|
517
|
+
f" item, current_offset = "
|
|
518
|
+
f"{base_type}.unpack(data, current_offset)"
|
|
519
|
+
)
|
|
520
|
+
code.append(unpack_nested)
|
|
521
|
+
code.append(f" {python_name}.append(item)")
|
|
522
|
+
elif base_type in ("uint8", "byte"):
|
|
523
|
+
# Byte array
|
|
524
|
+
code.append(f" # {python_name}: bytes ({size_bytes} bytes)")
|
|
525
|
+
code.append(
|
|
526
|
+
f" {python_name}, current_offset = serializer.unpack_bytes("
|
|
527
|
+
)
|
|
528
|
+
code.append(f" data, {size_bytes}, current_offset")
|
|
529
|
+
code.append(" )")
|
|
530
|
+
else:
|
|
531
|
+
# Array of primitives
|
|
532
|
+
code.append(f" # {python_name}: list[{base_type}]")
|
|
533
|
+
code.append(
|
|
534
|
+
f" {python_name}, current_offset = serializer.unpack_array("
|
|
535
|
+
)
|
|
536
|
+
code.append(
|
|
537
|
+
f" data, '{base_type}', {array_count}, current_offset"
|
|
538
|
+
)
|
|
539
|
+
code.append(" )")
|
|
540
|
+
elif is_enum:
|
|
541
|
+
# Enum - unpack as int then convert
|
|
542
|
+
code.append(f" # {python_name}: {base_type} (enum)")
|
|
543
|
+
unpack_enum = (
|
|
544
|
+
f" {python_name}_raw, current_offset = "
|
|
545
|
+
f"serializer.unpack_value(data, 'uint8', current_offset)"
|
|
546
|
+
)
|
|
547
|
+
code.append(unpack_enum)
|
|
548
|
+
code.append(f" {python_name} = {base_type}({python_name}_raw)")
|
|
549
|
+
elif is_nested:
|
|
550
|
+
# Nested structure
|
|
551
|
+
code.append(f" # {python_name}: {base_type}")
|
|
552
|
+
unpack_nest = (
|
|
553
|
+
f" {python_name}, current_offset = "
|
|
554
|
+
f"{base_type}.unpack(data, current_offset)"
|
|
555
|
+
)
|
|
556
|
+
code.append(unpack_nest)
|
|
557
|
+
else:
|
|
558
|
+
# Primitive type
|
|
559
|
+
code.append(f" # {python_name}: {base_type}")
|
|
560
|
+
unpack_prim = (
|
|
561
|
+
f" {python_name}, current_offset = "
|
|
562
|
+
f"serializer.unpack_value(data, '{base_type}', current_offset)"
|
|
563
|
+
)
|
|
564
|
+
code.append(unpack_prim)
|
|
565
|
+
|
|
566
|
+
code.append("")
|
|
567
|
+
# Create instance - format long return statements
|
|
568
|
+
field_args = ", ".join([f"{name}={name}" for name in field_vars])
|
|
569
|
+
return_stmt = f" return cls({field_args}), current_offset"
|
|
570
|
+
|
|
571
|
+
# If too long, break across multiple lines
|
|
572
|
+
if len(return_stmt) > 120:
|
|
573
|
+
code.append(" return (")
|
|
574
|
+
code.append(" cls(")
|
|
575
|
+
for i, name in enumerate(field_vars):
|
|
576
|
+
if i < len(field_vars) - 1:
|
|
577
|
+
code.append(f" {name}={name},")
|
|
578
|
+
else:
|
|
579
|
+
code.append(f" {name}={name},")
|
|
580
|
+
code.append(" ),")
|
|
581
|
+
code.append(" current_offset,")
|
|
582
|
+
code.append(" )")
|
|
583
|
+
else:
|
|
584
|
+
code.append(return_stmt)
|
|
585
|
+
|
|
586
|
+
return "\n".join(code)
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
def generate_field_code(
|
|
590
|
+
fields: dict[str, Any],
|
|
591
|
+
compound_fields: dict[str, Any] | None = None,
|
|
592
|
+
unions: dict[str, Any] | None = None,
|
|
593
|
+
packets_as_fields: dict[str, Any] | None = None,
|
|
594
|
+
enum_types: set[str] | None = None,
|
|
595
|
+
) -> tuple[str, dict[str, dict[str, str]]]:
|
|
596
|
+
"""Generate Python dataclass definitions for field structures.
|
|
597
|
+
|
|
598
|
+
Args:
|
|
599
|
+
fields: Dictionary of field definitions
|
|
600
|
+
compound_fields: Dictionary of compound field definitions
|
|
601
|
+
unions: Dictionary of union definitions (treated as fields)
|
|
602
|
+
packets_as_fields: Dictionary of packets that are also used as field types
|
|
603
|
+
enum_types: Set of enum type names
|
|
604
|
+
|
|
605
|
+
Returns:
|
|
606
|
+
Tuple of (code string, field mappings dict)
|
|
607
|
+
Field mappings: {ClassName: {python_name: protocol_name}}
|
|
608
|
+
"""
|
|
609
|
+
if enum_types is None:
|
|
610
|
+
enum_types = set()
|
|
611
|
+
|
|
612
|
+
code = []
|
|
613
|
+
field_mappings: dict[str, dict[str, str]] = {}
|
|
614
|
+
all_fields = {**fields}
|
|
615
|
+
if compound_fields:
|
|
616
|
+
all_fields.update(compound_fields)
|
|
617
|
+
if unions:
|
|
618
|
+
all_fields.update(unions)
|
|
619
|
+
if packets_as_fields:
|
|
620
|
+
all_fields.update(packets_as_fields)
|
|
621
|
+
|
|
622
|
+
for field_name, field_def in sorted(all_fields.items()):
|
|
623
|
+
code.append("@dataclass")
|
|
624
|
+
code.append(f"class {field_name}:")
|
|
625
|
+
|
|
626
|
+
# Check if this is a union (has comment indicating it's a union)
|
|
627
|
+
is_union = isinstance(field_def, dict) and "comment" in field_def
|
|
628
|
+
if is_union:
|
|
629
|
+
code.append(
|
|
630
|
+
f' """Auto-generated union structure. {field_def.get("comment", "")}"""'
|
|
631
|
+
)
|
|
632
|
+
else:
|
|
633
|
+
code.append(' """Auto-generated field structure."""')
|
|
634
|
+
code.append("")
|
|
635
|
+
|
|
636
|
+
field_map: dict[str, str] = {}
|
|
637
|
+
fields_data = []
|
|
638
|
+
|
|
639
|
+
# Handle both old format (dict) and new format (list of dicts)
|
|
640
|
+
if isinstance(field_def, dict) and "fields" in field_def:
|
|
641
|
+
# New format: {size_bytes: N, fields: [{name: "X", type: "uint16"}, ...]}
|
|
642
|
+
field_list = field_def["fields"]
|
|
643
|
+
|
|
644
|
+
# For unions, treat as a raw bytes field (they overlay, so just store raw data)
|
|
645
|
+
if is_union:
|
|
646
|
+
size_bytes = field_def.get("size_bytes", 16)
|
|
647
|
+
code.append(f" data: bytes # Union of {size_bytes} bytes")
|
|
648
|
+
field_map["data"] = "data"
|
|
649
|
+
# For pack/unpack, use bytes field
|
|
650
|
+
fields_data = [
|
|
651
|
+
{
|
|
652
|
+
"name": "data",
|
|
653
|
+
"type": f"[{size_bytes}]byte",
|
|
654
|
+
"size_bytes": size_bytes,
|
|
655
|
+
}
|
|
656
|
+
]
|
|
657
|
+
else:
|
|
658
|
+
# Normal field structure - process all fields
|
|
659
|
+
fields_data = field_list # Save for pack/unpack generation
|
|
660
|
+
for field_item in field_list:
|
|
661
|
+
# Skip reserved fields without names (they won't be in dataclass)
|
|
662
|
+
if "name" not in field_item:
|
|
663
|
+
continue
|
|
664
|
+
protocol_name = field_item["name"]
|
|
665
|
+
attr_type = field_item["type"]
|
|
666
|
+
python_name = to_snake_case(protocol_name)
|
|
667
|
+
python_type = convert_type_to_python(attr_type)
|
|
668
|
+
|
|
669
|
+
code.append(f" {python_name}: {python_type}")
|
|
670
|
+
field_map[python_name] = protocol_name
|
|
671
|
+
else:
|
|
672
|
+
# Old format: {attr_name: type, ...}
|
|
673
|
+
# Convert to new format for pack/unpack generation
|
|
674
|
+
for protocol_name, attr_type in field_def.items():
|
|
675
|
+
python_name = to_snake_case(protocol_name)
|
|
676
|
+
python_type = convert_type_to_python(attr_type)
|
|
677
|
+
code.append(f" {python_name}: {python_type}")
|
|
678
|
+
field_map[python_name] = protocol_name
|
|
679
|
+
# Build fields_data for old format
|
|
680
|
+
fields_data.append({"name": protocol_name, "type": attr_type})
|
|
681
|
+
|
|
682
|
+
field_mappings[field_name] = field_map
|
|
683
|
+
|
|
684
|
+
# Add pack/unpack methods
|
|
685
|
+
if fields_data:
|
|
686
|
+
code.append("")
|
|
687
|
+
code.append(generate_pack_method(fields_data, "field", enum_types))
|
|
688
|
+
code.append("")
|
|
689
|
+
code.append(
|
|
690
|
+
generate_unpack_method(field_name, fields_data, "field", enum_types)
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
code.append("")
|
|
694
|
+
code.append("")
|
|
695
|
+
|
|
696
|
+
return "\n".join(code), field_mappings
|
|
697
|
+
|
|
698
|
+
|
|
699
|
+
def generate_nested_packet_code(
|
|
700
|
+
packets: dict[str, Any], type_aliases: dict[str, str] | None = None
|
|
701
|
+
) -> str:
|
|
702
|
+
"""Generate nested Python packet class definitions.
|
|
703
|
+
|
|
704
|
+
Args:
|
|
705
|
+
packets: Dictionary of packet definitions (grouped by category)
|
|
706
|
+
type_aliases: Optional dict mapping type names to their aliases (for collision resolution)
|
|
707
|
+
|
|
708
|
+
Returns:
|
|
709
|
+
Python code string with nested packet classes
|
|
710
|
+
"""
|
|
711
|
+
if type_aliases is None:
|
|
712
|
+
type_aliases = {}
|
|
713
|
+
|
|
714
|
+
code = []
|
|
715
|
+
|
|
716
|
+
# Flatten packets if they're grouped by category
|
|
717
|
+
flat_packets: list[tuple[str, str, dict[str, Any]]] = []
|
|
718
|
+
|
|
719
|
+
# Check if packets are grouped by category (new format)
|
|
720
|
+
sample_key = next(iter(packets.keys())) if packets else None
|
|
721
|
+
if sample_key and isinstance(packets[sample_key], dict):
|
|
722
|
+
sample_value = packets[sample_key]
|
|
723
|
+
# Check if this is a category grouping (contains nested packet dicts)
|
|
724
|
+
if any(isinstance(v, dict) and "pkt_type" in v for v in sample_value.values()):
|
|
725
|
+
# New format: grouped by category
|
|
726
|
+
for category, category_packets in packets.items():
|
|
727
|
+
for packet_name, packet_def in category_packets.items():
|
|
728
|
+
flat_packets.append((category, packet_name, packet_def))
|
|
729
|
+
else:
|
|
730
|
+
# Old format: flat packets with category field
|
|
731
|
+
for packet_name, packet_def in packets.items():
|
|
732
|
+
category = packet_def.get("category", "misc")
|
|
733
|
+
flat_packets.append((category, packet_name, packet_def))
|
|
734
|
+
|
|
735
|
+
# Group by category
|
|
736
|
+
categories: dict[str, list[tuple[str, dict[str, Any]]]] = {}
|
|
737
|
+
for category, packet_name, packet_def in flat_packets:
|
|
738
|
+
if category not in categories:
|
|
739
|
+
categories[category] = []
|
|
740
|
+
categories[category].append((packet_name, packet_def))
|
|
741
|
+
|
|
742
|
+
# Generate category classes with nested packet classes
|
|
743
|
+
for category in sorted(categories.keys()):
|
|
744
|
+
# Generate category class
|
|
745
|
+
# Quirk: Convert category names to proper camel case (multi_zone -> MultiZone)
|
|
746
|
+
# Split on underscores, capitalize each part, then join
|
|
747
|
+
parts = category.split("_")
|
|
748
|
+
category_class = "".join(part.capitalize() for part in parts)
|
|
749
|
+
code.append("")
|
|
750
|
+
code.append(f"class {category_class}(Packet):")
|
|
751
|
+
code.append(f' """{category_class} category packets."""')
|
|
752
|
+
code.append("")
|
|
753
|
+
|
|
754
|
+
# Generate nested packet classes
|
|
755
|
+
for packet_name, packet_def in sorted(categories[category]):
|
|
756
|
+
pkt_type = packet_def["pkt_type"]
|
|
757
|
+
fields_data = packet_def.get("fields", [])
|
|
758
|
+
|
|
759
|
+
# Remove category prefix from packet name (e.g., DeviceGetLabel -> GetLabel)
|
|
760
|
+
# The packet name format is: CategoryActionTarget (e.g., DeviceGetLabel, LightSetColor)
|
|
761
|
+
# Use case-insensitive matching to handle multi_zone -> Multizone -> MultiZone
|
|
762
|
+
short_name = packet_name
|
|
763
|
+
if packet_name.lower().startswith(category_class.lower()):
|
|
764
|
+
short_name = packet_name[len(category_class) :]
|
|
765
|
+
|
|
766
|
+
# Quirk: Rename Light.Get/Set/State to Light.GetColor/SetColor/StateColor
|
|
767
|
+
# for better clarity (Set and SetColor are different packets)
|
|
768
|
+
if category_class == "Light":
|
|
769
|
+
if short_name == "Get":
|
|
770
|
+
short_name = "GetColor"
|
|
771
|
+
elif short_name == "State":
|
|
772
|
+
short_name = "StateColor"
|
|
773
|
+
|
|
774
|
+
code.append(" @dataclass")
|
|
775
|
+
code.append(f" class {short_name}(Packet):")
|
|
776
|
+
code.append(f' """Packet type {pkt_type}."""')
|
|
777
|
+
code.append("")
|
|
778
|
+
code.append(f" PKT_TYPE: ClassVar[int] = {pkt_type}")
|
|
779
|
+
|
|
780
|
+
# Format fields_data - split long lists across multiple lines
|
|
781
|
+
# Account for the prefix " _fields: ClassVar[list[dict[str, Any]]] = " which is ~50 chars
|
|
782
|
+
fields_repr = format_long_list(fields_data, max_line_length=70)
|
|
783
|
+
if "\n" in fields_repr:
|
|
784
|
+
# Multi-line format - indent properly
|
|
785
|
+
code.append(" _fields: ClassVar[list[dict[str, Any]]] = (")
|
|
786
|
+
for line in fields_repr.split("\n"):
|
|
787
|
+
if line.strip():
|
|
788
|
+
code.append(f" {line}")
|
|
789
|
+
code.append(" )")
|
|
790
|
+
else:
|
|
791
|
+
code.append(
|
|
792
|
+
f" _fields: ClassVar[list[dict[str, Any]]] = {fields_repr}"
|
|
793
|
+
)
|
|
794
|
+
|
|
795
|
+
# Add packet metadata for smart request handling
|
|
796
|
+
# Classify packet by name pattern: Get*, Set*, State*, or OTHER
|
|
797
|
+
packet_kind = "OTHER"
|
|
798
|
+
if short_name.startswith("Get"):
|
|
799
|
+
packet_kind = "GET"
|
|
800
|
+
elif short_name.startswith("Set"):
|
|
801
|
+
packet_kind = "SET"
|
|
802
|
+
elif short_name.startswith("State"):
|
|
803
|
+
packet_kind = "STATE"
|
|
804
|
+
|
|
805
|
+
# Quirk: CopyFrameBuffer is semantically a SET operation
|
|
806
|
+
# It modifies device state without returning data
|
|
807
|
+
if category_class == "Tile" and short_name == "CopyFrameBuffer":
|
|
808
|
+
packet_kind = "SET"
|
|
809
|
+
|
|
810
|
+
code.append("")
|
|
811
|
+
code.append(" # Packet metadata for automatic handling")
|
|
812
|
+
code.append(f" _packet_kind: ClassVar[str] = {repr(packet_kind)}")
|
|
813
|
+
|
|
814
|
+
# Requires acknowledgement/response based on packet kind
|
|
815
|
+
# GET requests: ack_required=False, res_required=False (device responds anyway)
|
|
816
|
+
# SET requests: ack_required=True, res_required=False (need acknowledgement)
|
|
817
|
+
requires_ack = packet_kind == "SET"
|
|
818
|
+
requires_response = False
|
|
819
|
+
code.append(f" _requires_ack: ClassVar[bool] = {requires_ack}")
|
|
820
|
+
code.append(
|
|
821
|
+
f" _requires_response: ClassVar[bool] = {requires_response}"
|
|
822
|
+
)
|
|
823
|
+
code.append("")
|
|
824
|
+
|
|
825
|
+
# Generate dataclass fields (only non-reserved)
|
|
826
|
+
has_fields = False
|
|
827
|
+
if isinstance(fields_data, list):
|
|
828
|
+
for field_item in fields_data:
|
|
829
|
+
# Skip reserved fields
|
|
830
|
+
if "name" not in field_item:
|
|
831
|
+
continue
|
|
832
|
+
protocol_name = field_item["name"]
|
|
833
|
+
field_type = field_item["type"]
|
|
834
|
+
python_name = to_snake_case(protocol_name)
|
|
835
|
+
python_type = convert_type_to_python(field_type, type_aliases)
|
|
836
|
+
code.append(f" {python_name}: {python_type}")
|
|
837
|
+
has_fields = True
|
|
838
|
+
|
|
839
|
+
if not has_fields:
|
|
840
|
+
code.append(" pass")
|
|
841
|
+
|
|
842
|
+
code.append("")
|
|
843
|
+
|
|
844
|
+
code.append("")
|
|
845
|
+
|
|
846
|
+
return "\n".join(code)
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
def generate_types_file(
|
|
850
|
+
enums: dict[str, Any],
|
|
851
|
+
fields: dict[str, Any],
|
|
852
|
+
compound_fields: dict[str, Any] | None = None,
|
|
853
|
+
unions: dict[str, Any] | None = None,
|
|
854
|
+
packets_as_fields: dict[str, Any] | None = None,
|
|
855
|
+
) -> str:
|
|
856
|
+
"""Generate complete types.py file.
|
|
857
|
+
|
|
858
|
+
Args:
|
|
859
|
+
enums: Enum definitions
|
|
860
|
+
fields: Field structure definitions
|
|
861
|
+
compound_fields: Compound field definitions
|
|
862
|
+
unions: Union definitions
|
|
863
|
+
packets_as_fields: Packets that are also used as field types
|
|
864
|
+
|
|
865
|
+
Returns:
|
|
866
|
+
Complete Python file content
|
|
867
|
+
"""
|
|
868
|
+
header = '''"""Auto-generated LIFX protocol types.
|
|
869
|
+
|
|
870
|
+
DO NOT EDIT THIS FILE MANUALLY.
|
|
871
|
+
Generated from https://github.com/LIFX/public-protocol/blob/main/protocol.yml
|
|
872
|
+
by protocol/generator.py
|
|
873
|
+
|
|
874
|
+
Uses Pythonic naming conventions (snake_case fields, shortened enums) while
|
|
875
|
+
maintaining compatibility with the official LIFX protocol through mappings.
|
|
876
|
+
"""
|
|
877
|
+
|
|
878
|
+
from __future__ import annotations
|
|
879
|
+
|
|
880
|
+
from dataclasses import dataclass
|
|
881
|
+
from enum import IntEnum
|
|
882
|
+
|
|
883
|
+
|
|
884
|
+
'''
|
|
885
|
+
|
|
886
|
+
code = header
|
|
887
|
+
code += generate_enum_code(enums)
|
|
888
|
+
code += "\n"
|
|
889
|
+
|
|
890
|
+
# Extract enum names for pack/unpack generation
|
|
891
|
+
enum_names = set(enums.keys())
|
|
892
|
+
|
|
893
|
+
field_code, field_mappings = generate_field_code(
|
|
894
|
+
fields, compound_fields, unions, packets_as_fields, enum_names
|
|
895
|
+
)
|
|
896
|
+
code += field_code
|
|
897
|
+
code += "\n"
|
|
898
|
+
|
|
899
|
+
# Add type aliases for common names
|
|
900
|
+
code += "# Type aliases for convenience\n"
|
|
901
|
+
all_field_names = {
|
|
902
|
+
**fields,
|
|
903
|
+
**(compound_fields or {}),
|
|
904
|
+
**(unions or {}),
|
|
905
|
+
**(packets_as_fields or {}),
|
|
906
|
+
}
|
|
907
|
+
if "TileStateDevice" in all_field_names:
|
|
908
|
+
code += "TileDevice = TileStateDevice # Pythonic alias\n"
|
|
909
|
+
code += "\n"
|
|
910
|
+
|
|
911
|
+
# Add field name mappings as module-level constant (formatted for readability)
|
|
912
|
+
code += "# Field name mappings: Python name -> Protocol name\n"
|
|
913
|
+
code += "# Used by serializer to translate between conventions\n"
|
|
914
|
+
code += "FIELD_MAPPINGS: dict[str, dict[str, str]] = {\n"
|
|
915
|
+
for class_name in sorted(field_mappings.keys()):
|
|
916
|
+
mappings = field_mappings[class_name]
|
|
917
|
+
# Format each class mapping - if too long, break it into multiple lines
|
|
918
|
+
mappings_str = repr(mappings)
|
|
919
|
+
line = f" {repr(class_name)}: {mappings_str},"
|
|
920
|
+
if len(line) > 120:
|
|
921
|
+
# Multi-line format
|
|
922
|
+
code += f" {repr(class_name)}: {{\n"
|
|
923
|
+
for py_name, proto_name in sorted(mappings.items()):
|
|
924
|
+
code += f" {repr(py_name)}: {repr(proto_name)},\n"
|
|
925
|
+
code += " },\n"
|
|
926
|
+
else:
|
|
927
|
+
code += line + "\n"
|
|
928
|
+
code += "}\n"
|
|
929
|
+
code += "\n"
|
|
930
|
+
|
|
931
|
+
return code
|
|
932
|
+
|
|
933
|
+
|
|
934
|
+
def generate_packets_file(
|
|
935
|
+
packets: dict[str, Any],
|
|
936
|
+
fields: dict[str, Any],
|
|
937
|
+
compound_fields: dict[str, Any] | None = None,
|
|
938
|
+
unions: dict[str, Any] | None = None,
|
|
939
|
+
packets_as_fields: dict[str, Any] | None = None,
|
|
940
|
+
enums: dict[str, Any] | None = None,
|
|
941
|
+
) -> str:
|
|
942
|
+
"""Generate complete packets.py file.
|
|
943
|
+
|
|
944
|
+
Args:
|
|
945
|
+
packets: Packet definitions
|
|
946
|
+
fields: Field definitions (for imports)
|
|
947
|
+
compound_fields: Compound field definitions (for imports)
|
|
948
|
+
unions: Union definitions (for imports)
|
|
949
|
+
packets_as_fields: Packets that are also used as field types (for imports)
|
|
950
|
+
enums: Enum definitions for detecting enum types
|
|
951
|
+
|
|
952
|
+
Returns:
|
|
953
|
+
Complete Python file content
|
|
954
|
+
"""
|
|
955
|
+
# Extract enum names for pack/unpack generation
|
|
956
|
+
enum_names = set(enums.keys()) if enums else set()
|
|
957
|
+
|
|
958
|
+
# Collect all field types and enum types used in packets
|
|
959
|
+
used_fields = set()
|
|
960
|
+
used_enums = set()
|
|
961
|
+
all_fields = {**fields}
|
|
962
|
+
if compound_fields:
|
|
963
|
+
all_fields.update(compound_fields)
|
|
964
|
+
if unions:
|
|
965
|
+
all_fields.update(unions)
|
|
966
|
+
if packets_as_fields:
|
|
967
|
+
all_fields.update(packets_as_fields)
|
|
968
|
+
|
|
969
|
+
# Flatten packets to scan for used field types
|
|
970
|
+
flat_packets: list[dict[str, Any]] = []
|
|
971
|
+
for value in packets.values():
|
|
972
|
+
if isinstance(value, dict):
|
|
973
|
+
# Check if this is a category grouping
|
|
974
|
+
if any(isinstance(v, dict) and "pkt_type" in v for v in value.values()):
|
|
975
|
+
# New format: grouped by category
|
|
976
|
+
for packet_def in value.values():
|
|
977
|
+
flat_packets.append(packet_def)
|
|
978
|
+
elif "pkt_type" in value:
|
|
979
|
+
# Old format: direct packet
|
|
980
|
+
flat_packets.append(value)
|
|
981
|
+
|
|
982
|
+
for packet_def in flat_packets:
|
|
983
|
+
fields_data = packet_def.get("fields", [])
|
|
984
|
+
# Handle both list and dict formats
|
|
985
|
+
if isinstance(fields_data, list):
|
|
986
|
+
for field_item in fields_data:
|
|
987
|
+
if "type" in field_item:
|
|
988
|
+
field_type = field_item["type"]
|
|
989
|
+
base_type, _, is_nested = parse_field_type(field_type)
|
|
990
|
+
if is_nested:
|
|
991
|
+
if base_type in all_fields:
|
|
992
|
+
used_fields.add(base_type)
|
|
993
|
+
elif base_type in enum_names:
|
|
994
|
+
used_enums.add(base_type)
|
|
995
|
+
elif isinstance(fields_data, dict):
|
|
996
|
+
for field_type in fields_data.values():
|
|
997
|
+
base_type, _, is_nested = parse_field_type(field_type)
|
|
998
|
+
if is_nested:
|
|
999
|
+
if base_type in all_fields:
|
|
1000
|
+
used_fields.add(base_type)
|
|
1001
|
+
elif base_type in enum_names:
|
|
1002
|
+
used_enums.add(base_type)
|
|
1003
|
+
|
|
1004
|
+
# Generate imports with collision detection
|
|
1005
|
+
imports = ""
|
|
1006
|
+
all_imports = sorted(used_fields | used_enums)
|
|
1007
|
+
if all_imports:
|
|
1008
|
+
# Detect name collisions with packet category names
|
|
1009
|
+
category_names = set()
|
|
1010
|
+
for category in packets.keys():
|
|
1011
|
+
if isinstance(packets[category], dict):
|
|
1012
|
+
# Convert category name to class name (same as in generate_nested_packet_code)
|
|
1013
|
+
parts = category.split("_")
|
|
1014
|
+
category_class = "".join(part.capitalize() for part in parts)
|
|
1015
|
+
category_names.add(category_class)
|
|
1016
|
+
|
|
1017
|
+
# Generate import list with aliases for collisions
|
|
1018
|
+
import_items = []
|
|
1019
|
+
type_aliases = {} # Map original name to aliased name
|
|
1020
|
+
for name in all_imports:
|
|
1021
|
+
if name in category_names:
|
|
1022
|
+
# Use alias to avoid collision
|
|
1023
|
+
aliased_name = f"{name}Field"
|
|
1024
|
+
import_items.append(f"{name} as {aliased_name}")
|
|
1025
|
+
type_aliases[name] = aliased_name
|
|
1026
|
+
else:
|
|
1027
|
+
import_items.append(name)
|
|
1028
|
+
|
|
1029
|
+
imports = format_long_import(import_items) + "\n"
|
|
1030
|
+
else:
|
|
1031
|
+
type_aliases = {}
|
|
1032
|
+
imports = ""
|
|
1033
|
+
|
|
1034
|
+
header = f'''"""Auto-generated LIFX protocol packets.
|
|
1035
|
+
|
|
1036
|
+
DO NOT EDIT THIS FILE MANUALLY.
|
|
1037
|
+
Generated from https://github.com/LIFX/public-protocol/blob/main/protocol.yml
|
|
1038
|
+
by protocol/generator.py
|
|
1039
|
+
|
|
1040
|
+
Uses nested packet classes organized by category (Device, Light, etc.).
|
|
1041
|
+
Each packet inherits from base Packet class which provides generic pack/unpack.
|
|
1042
|
+
"""
|
|
1043
|
+
|
|
1044
|
+
from __future__ import annotations
|
|
1045
|
+
|
|
1046
|
+
from dataclasses import dataclass
|
|
1047
|
+
from typing import Any, ClassVar
|
|
1048
|
+
|
|
1049
|
+
from lifx_emulator.protocol.base import Packet
|
|
1050
|
+
{imports}
|
|
1051
|
+
'''
|
|
1052
|
+
|
|
1053
|
+
code = header
|
|
1054
|
+
packet_code = generate_nested_packet_code(packets, type_aliases)
|
|
1055
|
+
code += packet_code
|
|
1056
|
+
|
|
1057
|
+
# Generate packet registry for nested classes
|
|
1058
|
+
code += "\n\n"
|
|
1059
|
+
code += "# Packet Registry - maps packet type to nested packet class\n"
|
|
1060
|
+
code += "PACKET_REGISTRY: dict[int, type[Packet]] = {\n"
|
|
1061
|
+
|
|
1062
|
+
# Build registry with nested class paths
|
|
1063
|
+
registry_items = []
|
|
1064
|
+
for category, value in packets.items():
|
|
1065
|
+
if isinstance(value, dict):
|
|
1066
|
+
# Check if this is a category grouping
|
|
1067
|
+
if any(isinstance(v, dict) and "pkt_type" in v for v in value.values()):
|
|
1068
|
+
# New format: grouped by category
|
|
1069
|
+
# Quirk: Convert category names to proper camel case (multi_zone -> MultiZone)
|
|
1070
|
+
parts = category.split("_")
|
|
1071
|
+
category_class = "".join(part.capitalize() for part in parts)
|
|
1072
|
+
for packet_name, packet_def in value.items():
|
|
1073
|
+
pkt_type = packet_def.get("pkt_type")
|
|
1074
|
+
if pkt_type is not None:
|
|
1075
|
+
# Remove category prefix to get short name
|
|
1076
|
+
# Use case-insensitive matching to handle multi_zone -> Multizone -> MultiZone
|
|
1077
|
+
short_name = packet_name
|
|
1078
|
+
if packet_name.lower().startswith(category_class.lower()):
|
|
1079
|
+
short_name = packet_name[len(category_class) :]
|
|
1080
|
+
|
|
1081
|
+
# Quirk: Rename Light.Get/Set/State to Light.GetColor/SetColor/StateColor
|
|
1082
|
+
if category_class == "Light":
|
|
1083
|
+
if short_name == "Get":
|
|
1084
|
+
short_name = "GetColor"
|
|
1085
|
+
elif short_name == "State":
|
|
1086
|
+
short_name = "StateColor"
|
|
1087
|
+
|
|
1088
|
+
# Full path: Category.ShortName
|
|
1089
|
+
full_path = f"{category_class}.{short_name}"
|
|
1090
|
+
registry_items.append((pkt_type, full_path))
|
|
1091
|
+
|
|
1092
|
+
# Sort by packet type for readability
|
|
1093
|
+
for pkt_type, full_path in sorted(registry_items):
|
|
1094
|
+
code += f" {pkt_type}: {full_path},\n"
|
|
1095
|
+
|
|
1096
|
+
code += "}\n"
|
|
1097
|
+
code += "\n\n"
|
|
1098
|
+
code += "def get_packet_class(pkt_type: int) -> type[Packet] | None:\n"
|
|
1099
|
+
code += ' """Get packet class for a given packet type.\n'
|
|
1100
|
+
code += "\n"
|
|
1101
|
+
code += " Args:\n"
|
|
1102
|
+
code += " pkt_type: Packet type number\n"
|
|
1103
|
+
code += "\n"
|
|
1104
|
+
code += " Returns:\n"
|
|
1105
|
+
code += " Nested packet class, or None if unknown\n"
|
|
1106
|
+
code += ' """\n'
|
|
1107
|
+
code += " return PACKET_REGISTRY.get(pkt_type)\n"
|
|
1108
|
+
|
|
1109
|
+
return code
|
|
1110
|
+
|
|
1111
|
+
|
|
1112
|
+
def download_protocol() -> dict[str, Any]:
|
|
1113
|
+
"""Download and parse protocol.yml from LIFX GitHub repository.
|
|
1114
|
+
|
|
1115
|
+
Returns:
|
|
1116
|
+
Parsed protocol dictionary
|
|
1117
|
+
|
|
1118
|
+
Raises:
|
|
1119
|
+
URLError: If download fails
|
|
1120
|
+
yaml.YAMLError: If parsing fails
|
|
1121
|
+
"""
|
|
1122
|
+
print(f"Downloading protocol.yml from {PROTOCOL_URL}...")
|
|
1123
|
+
with urlopen(PROTOCOL_URL) as response: # nosec
|
|
1124
|
+
protocol_data = response.read()
|
|
1125
|
+
|
|
1126
|
+
print("Parsing protocol specification...")
|
|
1127
|
+
protocol = yaml.safe_load(protocol_data)
|
|
1128
|
+
return protocol
|
|
1129
|
+
|
|
1130
|
+
|
|
1131
|
+
def validate_protocol_spec(protocol: dict[str, Any]) -> list[str]:
|
|
1132
|
+
"""Validate protocol specification for missing type references.
|
|
1133
|
+
|
|
1134
|
+
Args:
|
|
1135
|
+
protocol: Parsed protocol dictionary
|
|
1136
|
+
|
|
1137
|
+
Returns:
|
|
1138
|
+
List of error messages (empty if validation passes)
|
|
1139
|
+
"""
|
|
1140
|
+
errors: list[str] = []
|
|
1141
|
+
registry = TypeRegistry()
|
|
1142
|
+
|
|
1143
|
+
# Register all types
|
|
1144
|
+
enums = protocol.get("enums", {})
|
|
1145
|
+
fields = protocol.get("fields", {})
|
|
1146
|
+
compound_fields = protocol.get("compound_fields", {})
|
|
1147
|
+
unions = protocol.get("unions", {})
|
|
1148
|
+
packets = protocol.get("packets", {})
|
|
1149
|
+
|
|
1150
|
+
# Register enums
|
|
1151
|
+
for enum_name in enums.keys():
|
|
1152
|
+
registry.register_enum(enum_name)
|
|
1153
|
+
|
|
1154
|
+
# Register field structures
|
|
1155
|
+
for field_name in fields.keys():
|
|
1156
|
+
registry.register_field(field_name)
|
|
1157
|
+
|
|
1158
|
+
# Register compound fields
|
|
1159
|
+
for field_name in compound_fields.keys():
|
|
1160
|
+
registry.register_field(field_name)
|
|
1161
|
+
|
|
1162
|
+
# Register unions
|
|
1163
|
+
for union_name in unions.keys():
|
|
1164
|
+
registry.register_union(union_name)
|
|
1165
|
+
|
|
1166
|
+
# Register packets (flatten by category)
|
|
1167
|
+
for category_packets in packets.values():
|
|
1168
|
+
if isinstance(category_packets, dict):
|
|
1169
|
+
for packet_name in category_packets.keys():
|
|
1170
|
+
registry.register_packet(packet_name)
|
|
1171
|
+
|
|
1172
|
+
# Validate field type references
|
|
1173
|
+
def validate_field_types(struct_name: str, struct_def: dict[str, Any]) -> None:
|
|
1174
|
+
"""Validate all field types in a structure."""
|
|
1175
|
+
if isinstance(struct_def, dict) and "fields" in struct_def:
|
|
1176
|
+
for field_item in struct_def["fields"]:
|
|
1177
|
+
if "type" in field_item:
|
|
1178
|
+
field_type = field_item["type"]
|
|
1179
|
+
field_name = field_item.get("name", "reserved")
|
|
1180
|
+
base_type, _, _ = parse_field_type(field_type)
|
|
1181
|
+
|
|
1182
|
+
# Check if type is defined
|
|
1183
|
+
if not registry.has_type(base_type):
|
|
1184
|
+
errors.append(
|
|
1185
|
+
f"{struct_name}.{field_name}: Unknown type '{base_type}' in field type '{field_type}'"
|
|
1186
|
+
)
|
|
1187
|
+
|
|
1188
|
+
# Validate fields
|
|
1189
|
+
for field_name, field_def in fields.items():
|
|
1190
|
+
validate_field_types(f"fields.{field_name}", field_def)
|
|
1191
|
+
|
|
1192
|
+
# Validate compound fields
|
|
1193
|
+
for field_name, field_def in compound_fields.items():
|
|
1194
|
+
validate_field_types(f"compound_fields.{field_name}", field_def)
|
|
1195
|
+
|
|
1196
|
+
# Validate unions
|
|
1197
|
+
for union_name, union_def in unions.items():
|
|
1198
|
+
validate_field_types(f"unions.{union_name}", union_def)
|
|
1199
|
+
|
|
1200
|
+
# Validate packets
|
|
1201
|
+
for category, category_packets in packets.items():
|
|
1202
|
+
if isinstance(category_packets, dict):
|
|
1203
|
+
for packet_name, packet_def in category_packets.items():
|
|
1204
|
+
if isinstance(packet_def, dict):
|
|
1205
|
+
validate_field_types(
|
|
1206
|
+
f"packets.{category}.{packet_name}", packet_def
|
|
1207
|
+
)
|
|
1208
|
+
|
|
1209
|
+
return errors
|
|
1210
|
+
|
|
1211
|
+
|
|
1212
|
+
def should_skip_button_relay(name: str) -> bool:
|
|
1213
|
+
"""Check if a name should be skipped (Button or Relay related).
|
|
1214
|
+
|
|
1215
|
+
Args:
|
|
1216
|
+
name: Type name to check (enum, field, union, packet, or category)
|
|
1217
|
+
|
|
1218
|
+
Returns:
|
|
1219
|
+
True if the name starts with Button or Relay, False otherwise
|
|
1220
|
+
"""
|
|
1221
|
+
return name.startswith("Button") or name.startswith("Relay")
|
|
1222
|
+
|
|
1223
|
+
|
|
1224
|
+
def filter_button_relay_items(items: dict[str, Any]) -> dict[str, Any]:
|
|
1225
|
+
"""Filter out Button and Relay items from a dictionary.
|
|
1226
|
+
|
|
1227
|
+
Args:
|
|
1228
|
+
items: Dictionary of items to filter
|
|
1229
|
+
|
|
1230
|
+
Returns:
|
|
1231
|
+
Filtered dictionary without Button/Relay items
|
|
1232
|
+
"""
|
|
1233
|
+
return {
|
|
1234
|
+
name: value
|
|
1235
|
+
for name, value in items.items()
|
|
1236
|
+
if not should_skip_button_relay(name)
|
|
1237
|
+
}
|
|
1238
|
+
|
|
1239
|
+
|
|
1240
|
+
def filter_button_relay_packets(packets: dict[str, Any]) -> dict[str, Any]:
|
|
1241
|
+
"""Filter out button and relay category packets.
|
|
1242
|
+
|
|
1243
|
+
Args:
|
|
1244
|
+
packets: Dictionary of packet definitions (grouped by category)
|
|
1245
|
+
|
|
1246
|
+
Returns:
|
|
1247
|
+
Filtered dictionary without button/relay categories
|
|
1248
|
+
"""
|
|
1249
|
+
return {
|
|
1250
|
+
category: category_packets
|
|
1251
|
+
for category, category_packets in packets.items()
|
|
1252
|
+
if category not in ("button", "relay")
|
|
1253
|
+
}
|
|
1254
|
+
|
|
1255
|
+
|
|
1256
|
+
def extract_packets_as_fields(
|
|
1257
|
+
packets: dict[str, Any], fields: dict[str, Any]
|
|
1258
|
+
) -> dict[str, Any]:
|
|
1259
|
+
"""Extract packets that are used as field types in other structures.
|
|
1260
|
+
|
|
1261
|
+
Args:
|
|
1262
|
+
packets: Dictionary of packet definitions
|
|
1263
|
+
fields: Dictionary of field definitions to scan
|
|
1264
|
+
|
|
1265
|
+
Returns:
|
|
1266
|
+
Dictionary of packet definitions that are referenced as field types
|
|
1267
|
+
"""
|
|
1268
|
+
packets_as_fields = {}
|
|
1269
|
+
|
|
1270
|
+
# Flatten packets first
|
|
1271
|
+
flat_packets = {}
|
|
1272
|
+
for category, category_packets in packets.items():
|
|
1273
|
+
if isinstance(category_packets, dict):
|
|
1274
|
+
for packet_name, packet_def in category_packets.items():
|
|
1275
|
+
if isinstance(packet_def, dict) and "pkt_type" in packet_def:
|
|
1276
|
+
flat_packets[packet_name] = packet_def
|
|
1277
|
+
|
|
1278
|
+
# Scan all fields for references to packet types
|
|
1279
|
+
all_structures = {**fields}
|
|
1280
|
+
|
|
1281
|
+
for struct_def in all_structures.values():
|
|
1282
|
+
if isinstance(struct_def, dict) and "fields" in struct_def:
|
|
1283
|
+
for field_item in struct_def["fields"]:
|
|
1284
|
+
if "type" in field_item:
|
|
1285
|
+
field_type = field_item["type"]
|
|
1286
|
+
base_type, _, is_nested = parse_field_type(field_type)
|
|
1287
|
+
|
|
1288
|
+
# Check if this references a packet
|
|
1289
|
+
if is_nested and base_type in flat_packets:
|
|
1290
|
+
packets_as_fields[base_type] = flat_packets[base_type]
|
|
1291
|
+
|
|
1292
|
+
return packets_as_fields
|
|
1293
|
+
|
|
1294
|
+
|
|
1295
|
+
def main() -> None:
|
|
1296
|
+
"""Main generator entry point."""
|
|
1297
|
+
try:
|
|
1298
|
+
# Download and parse protocol from GitHub
|
|
1299
|
+
protocol = download_protocol()
|
|
1300
|
+
except Exception as e:
|
|
1301
|
+
print(f"Error: Failed to download protocol.yml: {e}", file=sys.stderr)
|
|
1302
|
+
sys.exit(1)
|
|
1303
|
+
|
|
1304
|
+
# Extract sections
|
|
1305
|
+
enums = protocol.get("enums", {})
|
|
1306
|
+
fields = protocol.get("fields", {})
|
|
1307
|
+
compound_fields = protocol.get("compound_fields", {})
|
|
1308
|
+
unions = protocol.get("unions", {})
|
|
1309
|
+
packets = protocol.get("packets", {})
|
|
1310
|
+
|
|
1311
|
+
# Filter out Button and Relay items (not relevant for light control)
|
|
1312
|
+
print("Filtering out Button and Relay items...")
|
|
1313
|
+
enums = filter_button_relay_items(enums)
|
|
1314
|
+
fields = filter_button_relay_items(fields)
|
|
1315
|
+
compound_fields = filter_button_relay_items(compound_fields)
|
|
1316
|
+
unions = filter_button_relay_items(unions)
|
|
1317
|
+
packets = filter_button_relay_packets(packets)
|
|
1318
|
+
|
|
1319
|
+
# Rebuild protocol dict with filtered items for validation
|
|
1320
|
+
filtered_protocol = {
|
|
1321
|
+
**protocol,
|
|
1322
|
+
"enums": enums,
|
|
1323
|
+
"fields": fields,
|
|
1324
|
+
"compound_fields": compound_fields,
|
|
1325
|
+
"unions": unions,
|
|
1326
|
+
"packets": packets,
|
|
1327
|
+
}
|
|
1328
|
+
|
|
1329
|
+
# Validate filtered protocol specification
|
|
1330
|
+
print("Validating protocol specification...")
|
|
1331
|
+
validation_errors = validate_protocol_spec(filtered_protocol)
|
|
1332
|
+
if validation_errors:
|
|
1333
|
+
print("Validation failed with the following errors:", file=sys.stderr)
|
|
1334
|
+
for error in validation_errors:
|
|
1335
|
+
print(f" - {error}", file=sys.stderr)
|
|
1336
|
+
sys.exit(1)
|
|
1337
|
+
print("Validation passed!")
|
|
1338
|
+
|
|
1339
|
+
# Extract packets that are used as field types (e.g., DeviceStateVersion)
|
|
1340
|
+
packets_as_fields = extract_packets_as_fields(packets, fields)
|
|
1341
|
+
|
|
1342
|
+
print(f"Found {len(unions)} unions")
|
|
1343
|
+
print(
|
|
1344
|
+
f"Found {len(packets_as_fields)} packets used as field types: {list(packets_as_fields.keys())}"
|
|
1345
|
+
)
|
|
1346
|
+
|
|
1347
|
+
# Determine output directory
|
|
1348
|
+
project_root = Path(__file__).parent.parent.parent.parent
|
|
1349
|
+
protocol_dir = project_root / "src" / "lifx_emulator" / "protocol"
|
|
1350
|
+
|
|
1351
|
+
# Generate protocol_types.py (avoid conflict with Python's types module)
|
|
1352
|
+
types_code = generate_types_file(
|
|
1353
|
+
enums, fields, compound_fields, unions, packets_as_fields
|
|
1354
|
+
)
|
|
1355
|
+
types_file = protocol_dir / "protocol_types.py"
|
|
1356
|
+
with open(types_file, "w") as f:
|
|
1357
|
+
f.write(types_code)
|
|
1358
|
+
print(f"Generated {types_file}")
|
|
1359
|
+
|
|
1360
|
+
# Generate packets.py
|
|
1361
|
+
packets_code = generate_packets_file(
|
|
1362
|
+
packets, fields, compound_fields, unions, packets_as_fields, enums
|
|
1363
|
+
)
|
|
1364
|
+
packets_file = protocol_dir / "packets.py"
|
|
1365
|
+
with open(packets_file, "w") as f:
|
|
1366
|
+
f.write(packets_code)
|
|
1367
|
+
print(f"Generated {packets_file}")
|
|
1368
|
+
|
|
1369
|
+
|
|
1370
|
+
if __name__ == "__main__":
|
|
1371
|
+
main()
|