lifx-emulator 2.4.0__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. lifx_emulator-3.1.0.dist-info/METADATA +103 -0
  2. lifx_emulator-3.1.0.dist-info/RECORD +19 -0
  3. {lifx_emulator-2.4.0.dist-info → lifx_emulator-3.1.0.dist-info}/WHEEL +1 -1
  4. lifx_emulator-3.1.0.dist-info/entry_points.txt +2 -0
  5. lifx_emulator_app/__init__.py +10 -0
  6. {lifx_emulator → lifx_emulator_app}/__main__.py +2 -3
  7. {lifx_emulator → lifx_emulator_app}/api/__init__.py +1 -1
  8. {lifx_emulator → lifx_emulator_app}/api/app.py +9 -4
  9. {lifx_emulator → lifx_emulator_app}/api/mappers/__init__.py +1 -1
  10. {lifx_emulator → lifx_emulator_app}/api/mappers/device_mapper.py +1 -1
  11. {lifx_emulator → lifx_emulator_app}/api/models.py +1 -2
  12. lifx_emulator_app/api/routers/__init__.py +11 -0
  13. {lifx_emulator → lifx_emulator_app}/api/routers/devices.py +2 -2
  14. {lifx_emulator → lifx_emulator_app}/api/routers/monitoring.py +1 -1
  15. {lifx_emulator → lifx_emulator_app}/api/routers/scenarios.py +1 -1
  16. lifx_emulator_app/api/services/__init__.py +8 -0
  17. {lifx_emulator → lifx_emulator_app}/api/services/device_service.py +3 -2
  18. lifx_emulator_app/api/static/dashboard.js +588 -0
  19. lifx_emulator_app/api/templates/dashboard.html +357 -0
  20. lifx_emulator/__init__.py +0 -31
  21. lifx_emulator/api/routers/__init__.py +0 -11
  22. lifx_emulator/api/services/__init__.py +0 -8
  23. lifx_emulator/api/templates/dashboard.html +0 -899
  24. lifx_emulator/constants.py +0 -33
  25. lifx_emulator/devices/__init__.py +0 -37
  26. lifx_emulator/devices/device.py +0 -395
  27. lifx_emulator/devices/manager.py +0 -256
  28. lifx_emulator/devices/observers.py +0 -139
  29. lifx_emulator/devices/persistence.py +0 -308
  30. lifx_emulator/devices/state_restorer.py +0 -259
  31. lifx_emulator/devices/state_serializer.py +0 -157
  32. lifx_emulator/devices/states.py +0 -381
  33. lifx_emulator/factories/__init__.py +0 -39
  34. lifx_emulator/factories/builder.py +0 -375
  35. lifx_emulator/factories/default_config.py +0 -158
  36. lifx_emulator/factories/factory.py +0 -252
  37. lifx_emulator/factories/firmware_config.py +0 -77
  38. lifx_emulator/factories/serial_generator.py +0 -82
  39. lifx_emulator/handlers/__init__.py +0 -39
  40. lifx_emulator/handlers/base.py +0 -49
  41. lifx_emulator/handlers/device_handlers.py +0 -322
  42. lifx_emulator/handlers/light_handlers.py +0 -503
  43. lifx_emulator/handlers/multizone_handlers.py +0 -249
  44. lifx_emulator/handlers/registry.py +0 -110
  45. lifx_emulator/handlers/tile_handlers.py +0 -488
  46. lifx_emulator/products/__init__.py +0 -28
  47. lifx_emulator/products/generator.py +0 -1079
  48. lifx_emulator/products/registry.py +0 -1530
  49. lifx_emulator/products/specs.py +0 -284
  50. lifx_emulator/products/specs.yml +0 -386
  51. lifx_emulator/protocol/__init__.py +0 -1
  52. lifx_emulator/protocol/base.py +0 -446
  53. lifx_emulator/protocol/const.py +0 -8
  54. lifx_emulator/protocol/generator.py +0 -1384
  55. lifx_emulator/protocol/header.py +0 -159
  56. lifx_emulator/protocol/packets.py +0 -1351
  57. lifx_emulator/protocol/protocol_types.py +0 -817
  58. lifx_emulator/protocol/serializer.py +0 -379
  59. lifx_emulator/repositories/__init__.py +0 -22
  60. lifx_emulator/repositories/device_repository.py +0 -155
  61. lifx_emulator/repositories/storage_backend.py +0 -107
  62. lifx_emulator/scenarios/__init__.py +0 -22
  63. lifx_emulator/scenarios/manager.py +0 -322
  64. lifx_emulator/scenarios/models.py +0 -112
  65. lifx_emulator/scenarios/persistence.py +0 -241
  66. lifx_emulator/server.py +0 -464
  67. lifx_emulator-2.4.0.dist-info/METADATA +0 -107
  68. lifx_emulator-2.4.0.dist-info/RECORD +0 -62
  69. lifx_emulator-2.4.0.dist-info/entry_points.txt +0 -2
  70. lifx_emulator-2.4.0.dist-info/licenses/LICENSE +0 -35
@@ -1,1384 +0,0 @@
1
- """Code generator for LIFX protocol structures.
2
-
3
- Downloads the official protocol.yml from the LIFX GitHub repository and
4
- generates Python types and packet classes. The YAML is never stored locally,
5
- only parsed and converted into protocol classes.
6
- """
7
-
8
- from __future__ import annotations
9
-
10
- import re
11
- import sys
12
- from pathlib import Path
13
- from typing import Any
14
- from urllib.request import urlopen
15
-
16
- import yaml
17
-
18
- from lifx_emulator.protocol.const import PROTOCOL_URL
19
-
20
-
21
- class TypeRegistry:
22
- """Registry of all protocol types for validation.
23
-
24
- Tracks all defined types (enums, fields, packets, unions) to validate
25
- that all type references in the protocol specification are valid.
26
- """
27
-
28
- def __init__(self) -> None:
29
- """Initialize empty type registry."""
30
- self._enums: set[str] = set()
31
- self._fields: set[str] = set()
32
- self._packets: set[str] = set()
33
- self._unions: set[str] = set()
34
- self._basic_types: set[str] = {
35
- "uint8",
36
- "uint16",
37
- "uint32",
38
- "uint64",
39
- "int8",
40
- "int16",
41
- "int32",
42
- "int64",
43
- "float32",
44
- "bool",
45
- "byte",
46
- "reserved", # Special type for reserved fields
47
- }
48
-
49
- def register_enum(self, name: str) -> None:
50
- """Register an enum type.
51
-
52
- Args:
53
- name: Enum type name
54
- """
55
- self._enums.add(name)
56
-
57
- def register_field(self, name: str) -> None:
58
- """Register a field structure type.
59
-
60
- Args:
61
- name: Field structure type name
62
- """
63
- self._fields.add(name)
64
-
65
- def register_packet(self, name: str) -> None:
66
- """Register a packet type.
67
-
68
- Args:
69
- name: Packet type name
70
- """
71
- self._packets.add(name)
72
-
73
- def register_union(self, name: str) -> None:
74
- """Register a union type.
75
-
76
- Args:
77
- name: Union type name
78
- """
79
- self._unions.add(name)
80
-
81
- def is_enum(self, name: str) -> bool:
82
- """Check if a type is an enum.
83
-
84
- Args:
85
- name: Type name to check
86
-
87
- Returns:
88
- True if the type is an enum
89
- """
90
- return name in self._enums
91
-
92
- def has_type(self, name: str) -> bool:
93
- """Check if a type is defined.
94
-
95
- Args:
96
- name: Type name to check
97
-
98
- Returns:
99
- True if the type is defined
100
- """
101
- return (
102
- name in self._enums
103
- or name in self._fields
104
- or name in self._packets
105
- or name in self._unions
106
- or name in self._basic_types
107
- )
108
-
109
- def get_all_types(self) -> set[str]:
110
- """Get all registered types.
111
-
112
- Returns:
113
- Set of all type names
114
- """
115
- return (
116
- self._enums
117
- | self._fields
118
- | self._packets
119
- | self._unions
120
- | self._basic_types
121
- )
122
-
123
-
124
- def to_snake_case(name: str) -> str:
125
- """Convert PascalCase or camelCase to snake_case.
126
-
127
- Args:
128
- name: PascalCase or camelCase string
129
-
130
- Returns:
131
- snake_case string
132
- """
133
- # Insert underscore before uppercase letters (except at start)
134
- snake = re.sub(r"(?<!^)(?=[A-Z])", "_", name)
135
- return snake.lower()
136
-
137
-
138
- def format_long_import(
139
- items: list[str], prefix: str = "from lifx_emulator.protocol.protocol_types import "
140
- ) -> str:
141
- """Format a long import statement across multiple lines.
142
-
143
- Args:
144
- items: List of import items (e.g., ["Foo", "Bar as BazAlias"])
145
- prefix: Import prefix
146
-
147
- Returns:
148
- Formatted import string with line breaks if needed
149
- """
150
- if not items:
151
- return ""
152
-
153
- # Try single line first
154
- single_line = prefix + ", ".join(items)
155
- if len(single_line) <= 120:
156
- return single_line + "\n"
157
-
158
- # Multi-line format
159
- lines = [prefix + "("]
160
- for i, item in enumerate(items):
161
- if i < len(items) - 1:
162
- lines.append(f" {item},")
163
- else:
164
- lines.append(f" {item},")
165
- lines.append(")")
166
- return "\n".join(lines) + "\n"
167
-
168
-
169
- def format_long_list(items: list[dict[str, Any]], max_line_length: int = 120) -> str:
170
- """Format a long list across multiple lines.
171
-
172
- Args:
173
- items: List of dict items to format
174
- max_line_length: Maximum line length before wrapping
175
-
176
- Returns:
177
- Formatted list string
178
- """
179
- if not items:
180
- return "[]"
181
-
182
- # Try single line first
183
- single_line = repr(items)
184
- if len(single_line) <= max_line_length:
185
- return single_line
186
-
187
- # Multi-line format with one item per line
188
- lines = ["["]
189
- for i, item in enumerate(items):
190
- item_str = repr(item)
191
- if i < len(items) - 1:
192
- lines.append(f" {item_str},")
193
- else:
194
- lines.append(f" {item_str},")
195
- lines.append("]")
196
- return "\n".join(lines)
197
-
198
-
199
- def parse_field_type(field_type: str) -> tuple[str, int | None, bool]:
200
- """Parse a field type string.
201
-
202
- Args:
203
- field_type: Field type (e.g., 'uint16', '[32]uint8', '<HSBK>')
204
-
205
- Returns:
206
- Tuple of (base_type, array_count, is_nested)
207
- - base_type: The base type name
208
- - array_count: Number of elements if array, None otherwise
209
- - is_nested: True if it's a nested structure (<Type>)
210
- """
211
- # Check for array: [N]type
212
- array_match = re.match(r"\[(\d+)\](.+)", field_type)
213
- if array_match:
214
- count = int(array_match.group(1))
215
- inner_type = array_match.group(2)
216
- # Check if inner type is nested
217
- if inner_type.startswith("<") and inner_type.endswith(">"):
218
- return inner_type[1:-1], count, True
219
- return inner_type, count, False
220
-
221
- # Check for nested structure: <Type>
222
- if field_type.startswith("<") and field_type.endswith(">"):
223
- return field_type[1:-1], None, True
224
-
225
- # Simple type
226
- return field_type, None, False
227
-
228
-
229
- def camel_to_snake_upper(name: str) -> str:
230
- """Convert CamelCase to UPPER_SNAKE_CASE.
231
-
232
- Args:
233
- name: CamelCase string
234
-
235
- Returns:
236
- UPPER_SNAKE_CASE string
237
- """
238
- # Insert underscore before uppercase letters (except at start)
239
- snake = re.sub(r"(?<!^)(?=[A-Z])", "_", name)
240
- return snake.upper()
241
-
242
-
243
- def generate_enum_code(enums: dict[str, Any]) -> str:
244
- """Generate Python Enum definitions with shortened names.
245
-
246
- Args:
247
- enums: Dictionary of enum definitions
248
-
249
- Returns:
250
- Python code string
251
- """
252
- code: list[str] = []
253
-
254
- for enum_name, enum_def in sorted(enums.items()):
255
- code.append(f"class {enum_name}(IntEnum):")
256
- code.append(' """Auto-generated enum."""')
257
- code.append("")
258
-
259
- # Handle both old format (dict) and new format (list of dicts)
260
- if isinstance(enum_def, dict) and "values" in enum_def:
261
- # New format: {type: "uint16", values: [{name: "X", value: 1}, ...]}
262
- values = enum_def["values"]
263
- reserved_counter = 0
264
-
265
- # Check if all values share a common prefix (enum name)
266
- expected_prefix = camel_to_snake_upper(enum_name) + "_"
267
- non_reserved = [
268
- item["name"] for item in values if item["name"].lower() != "reserved"
269
- ]
270
- has_common_prefix = non_reserved and all(
271
- name.startswith(expected_prefix) for name in non_reserved
272
- )
273
-
274
- for item in sorted(values, key=lambda x: x["value"]):
275
- protocol_name = item["name"]
276
- member_value = item["value"]
277
-
278
- # Handle reserved fields by making names unique
279
- if protocol_name.lower() == "reserved":
280
- member_name = f"RESERVED_{reserved_counter}"
281
- reserved_counter += 1
282
- # Remove redundant prefix for cleaner Python names
283
- elif has_common_prefix and protocol_name.startswith(expected_prefix):
284
- member_name = protocol_name[len(expected_prefix) :]
285
- else:
286
- member_name = protocol_name
287
-
288
- code.append(f" {member_name} = {member_value}")
289
- else:
290
- # Old format: {MEMBER: value, ...}
291
- for member_name, member_value in sorted(
292
- enum_def.items(), key=lambda x: x[1]
293
- ):
294
- code.append(f" {member_name} = {member_value}")
295
-
296
- code.append("")
297
- code.append("")
298
-
299
- return "\n".join(code)
300
-
301
-
302
- def convert_type_to_python(
303
- field_type: str,
304
- type_aliases: dict[str, str] | None = None,
305
- field_name: str | None = None,
306
- ) -> str:
307
- """Convert a protocol field type to Python type annotation.
308
-
309
- Args:
310
- field_type: Protocol field type string
311
- type_aliases: Optional dict mapping type names to their aliases
312
- (for collision resolution)
313
- field_name: Optional field name for semantic type detection
314
- (e.g., "Label" fields are strings, not bytes)
315
-
316
- Returns:
317
- Python type annotation string
318
- """
319
- if type_aliases is None:
320
- type_aliases = {}
321
-
322
- base_type, array_count, is_nested = parse_field_type(field_type)
323
-
324
- if array_count:
325
- if is_nested:
326
- # Use alias if one exists
327
- type_name = type_aliases.get(base_type, base_type)
328
- return f"list[{type_name}]"
329
- elif base_type in ("uint8", "byte"):
330
- # Check if this is a string field (Label fields are UTF-8 strings)
331
- if field_name and field_name.lower() == "label":
332
- return "str"
333
- # Regular byte arrays
334
- return "bytes"
335
- else:
336
- return "list[int]"
337
- elif is_nested:
338
- # Use alias if one exists
339
- return type_aliases.get(base_type, base_type)
340
- elif base_type in ("uint8", "uint16", "uint32", "uint64"):
341
- return "int"
342
- elif base_type in ("int8", "int16", "int32", "int64"):
343
- return "int"
344
- elif base_type == "float32":
345
- return "float"
346
- elif base_type == "bool":
347
- return "bool"
348
- else:
349
- return "Any"
350
-
351
-
352
- def generate_pack_method(
353
- fields_data: list[dict[str, Any]],
354
- class_type: str = "field",
355
- enum_types: set[str] | None = None,
356
- ) -> str:
357
- """Generate pack() method code for a field structure or packet.
358
-
359
- Args:
360
- fields_data: List of field definitions
361
- class_type: Either "field" or "packet"
362
- enum_types: Set of enum type names for detection
363
-
364
- Returns:
365
- Python method code string
366
- """
367
- if enum_types is None:
368
- enum_types = set()
369
-
370
- code = []
371
- code.append(" def pack(self) -> bytes:")
372
- code.append(' """Pack to bytes."""')
373
- code.append(" from lifx_emulator.protocol import serializer")
374
- code.append(' result = b""')
375
- code.append("")
376
-
377
- for field_item in fields_data:
378
- # Handle reserved fields (no name)
379
- if "name" not in field_item:
380
- size_bytes = field_item.get("size_bytes", 0)
381
- code.append(f" # Reserved {size_bytes} bytes")
382
- code.append(f" result += serializer.pack_reserved({size_bytes})")
383
- continue
384
-
385
- protocol_name = field_item["name"]
386
- field_type = field_item["type"]
387
- size_bytes = field_item.get("size_bytes", 0)
388
- python_name = to_snake_case(protocol_name)
389
-
390
- base_type, array_count, is_nested = parse_field_type(field_type)
391
-
392
- # Check if this is an enum (nested but in enum_types)
393
- is_enum = is_nested and base_type in enum_types
394
-
395
- # Handle different field types
396
- if array_count:
397
- if is_enum:
398
- # Array of enums - pack as array of ints
399
- code.append(f" # {python_name}: list[{base_type}] (enum array)")
400
- code.append(f" for item in self.{python_name}:")
401
- code.append(
402
- " result += serializer.pack_value(int(item), 'uint8')"
403
- )
404
- elif is_nested:
405
- # Array of nested structures
406
- code.append(f" # {python_name}: list[{base_type}]")
407
- code.append(f" for item in self.{python_name}:")
408
- code.append(" result += item.pack()")
409
- elif base_type in ("uint8", "byte"):
410
- # Byte array
411
- code.append(f" # {python_name}: bytes ({size_bytes} bytes)")
412
- pack_line = (
413
- f" result += serializer.pack_bytes("
414
- f"self.{python_name}, {size_bytes})"
415
- )
416
- code.append(pack_line)
417
- else:
418
- # Array of primitives
419
- code.append(f" # {python_name}: list[{base_type}]")
420
- pack_array = (
421
- f" result += serializer.pack_array("
422
- f"self.{python_name}, '{base_type}', {array_count})"
423
- )
424
- code.append(pack_array)
425
- elif is_enum:
426
- # Enum - pack as int
427
- code.append(f" # {python_name}: {base_type} (enum)")
428
- pack_enum = (
429
- f" result += serializer.pack_value("
430
- f"int(self.{python_name}), 'uint8')"
431
- )
432
- code.append(pack_enum)
433
- elif is_nested:
434
- # Nested structure
435
- code.append(f" # {python_name}: {base_type}")
436
- code.append(f" result += self.{python_name}.pack()")
437
- else:
438
- # Primitive type
439
- code.append(f" # {python_name}: {base_type}")
440
- pack_prim = (
441
- f" result += serializer.pack_value("
442
- f"self.{python_name}, '{base_type}')"
443
- )
444
- code.append(pack_prim)
445
-
446
- code.append("")
447
- code.append(" return result")
448
-
449
- return "\n".join(code)
450
-
451
-
452
- def generate_unpack_method(
453
- class_name: str,
454
- fields_data: list[dict[str, Any]],
455
- class_type: str = "field",
456
- enum_types: set[str] | None = None,
457
- ) -> str:
458
- """Generate unpack() classmethod code for a field structure or packet.
459
-
460
- Args:
461
- class_name: Name of the class
462
- fields_data: List of field definitions
463
- class_type: Either "field" or "packet"
464
- enum_types: Set of enum type names for detection
465
-
466
- Returns:
467
- Python method code string
468
- """
469
- if enum_types is None:
470
- enum_types = set()
471
-
472
- code = []
473
- code.append(" @classmethod")
474
- unpack_sig = (
475
- f" def unpack(cls, data: bytes, offset: int = 0) -> "
476
- f"tuple[{class_name}, int]:"
477
- )
478
- code.append(unpack_sig)
479
- code.append(' """Unpack from bytes."""')
480
- code.append(" from lifx_emulator.protocol import serializer")
481
- code.append(" current_offset = offset")
482
-
483
- # Store field values
484
- field_vars = []
485
-
486
- for field_item in fields_data:
487
- # Handle reserved fields (no name)
488
- if "name" not in field_item:
489
- size_bytes = field_item.get("size_bytes", 0)
490
- code.append(f" # Skip reserved {size_bytes} bytes")
491
- code.append(f" current_offset += {size_bytes}")
492
- continue
493
-
494
- protocol_name = field_item["name"]
495
- field_type = field_item["type"]
496
- size_bytes = field_item.get("size_bytes", 0)
497
- python_name = to_snake_case(protocol_name)
498
- field_vars.append(python_name)
499
-
500
- base_type, array_count, is_nested = parse_field_type(field_type)
501
-
502
- # Check if this is an enum (nested but in enum_types)
503
- is_enum = is_nested and base_type in enum_types
504
-
505
- # Handle different field types
506
- if array_count:
507
- if is_enum:
508
- # Array of enums
509
- code.append(f" # {python_name}: list[{base_type}] (enum array)")
510
- code.append(f" {python_name} = []")
511
- code.append(f" for _ in range({array_count}):")
512
- unpack_enum_item = (
513
- " item_raw, current_offset = "
514
- "serializer.unpack_value(data, 'uint8', current_offset)"
515
- )
516
- code.append(unpack_enum_item)
517
- code.append(f" {python_name}.append({base_type}(item_raw))")
518
- elif is_nested:
519
- # Array of nested structures
520
- code.append(f" # {python_name}: list[{base_type}]")
521
- code.append(f" {python_name} = []")
522
- code.append(f" for _ in range({array_count}):")
523
- unpack_nested = (
524
- f" item, current_offset = "
525
- f"{base_type}.unpack(data, current_offset)"
526
- )
527
- code.append(unpack_nested)
528
- code.append(f" {python_name}.append(item)")
529
- elif base_type in ("uint8", "byte"):
530
- # Byte array
531
- code.append(f" # {python_name}: bytes ({size_bytes} bytes)")
532
- code.append(
533
- f" {python_name}, current_offset = serializer.unpack_bytes("
534
- )
535
- code.append(f" data, {size_bytes}, current_offset")
536
- code.append(" )")
537
- else:
538
- # Array of primitives
539
- code.append(f" # {python_name}: list[{base_type}]")
540
- code.append(
541
- f" {python_name}, current_offset = serializer.unpack_array("
542
- )
543
- code.append(
544
- f" data, '{base_type}', {array_count}, current_offset"
545
- )
546
- code.append(" )")
547
- elif is_enum:
548
- # Enum - unpack as int then convert
549
- code.append(f" # {python_name}: {base_type} (enum)")
550
- unpack_enum = (
551
- f" {python_name}_raw, current_offset = "
552
- f"serializer.unpack_value(data, 'uint8', current_offset)"
553
- )
554
- code.append(unpack_enum)
555
- code.append(f" {python_name} = {base_type}({python_name}_raw)")
556
- elif is_nested:
557
- # Nested structure
558
- code.append(f" # {python_name}: {base_type}")
559
- unpack_nest = (
560
- f" {python_name}, current_offset = "
561
- f"{base_type}.unpack(data, current_offset)"
562
- )
563
- code.append(unpack_nest)
564
- else:
565
- # Primitive type
566
- code.append(f" # {python_name}: {base_type}")
567
- unpack_prim = (
568
- f" {python_name}, current_offset = "
569
- f"serializer.unpack_value(data, '{base_type}', current_offset)"
570
- )
571
- code.append(unpack_prim)
572
-
573
- code.append("")
574
- # Create instance - format long return statements
575
- field_args = ", ".join([f"{name}={name}" for name in field_vars])
576
- return_stmt = f" return cls({field_args}), current_offset"
577
-
578
- # If too long, break across multiple lines
579
- if len(return_stmt) > 120:
580
- code.append(" return (")
581
- code.append(" cls(")
582
- for i, name in enumerate(field_vars):
583
- if i < len(field_vars) - 1:
584
- code.append(f" {name}={name},")
585
- else:
586
- code.append(f" {name}={name},")
587
- code.append(" ),")
588
- code.append(" current_offset,")
589
- code.append(" )")
590
- else:
591
- code.append(return_stmt)
592
-
593
- return "\n".join(code)
594
-
595
-
596
- def generate_field_code(
597
- fields: dict[str, Any],
598
- compound_fields: dict[str, Any] | None = None,
599
- unions: dict[str, Any] | None = None,
600
- packets_as_fields: dict[str, Any] | None = None,
601
- enum_types: set[str] | None = None,
602
- ) -> tuple[str, dict[str, dict[str, str]]]:
603
- """Generate Python dataclass definitions for field structures.
604
-
605
- Args:
606
- fields: Dictionary of field definitions
607
- compound_fields: Dictionary of compound field definitions
608
- unions: Dictionary of union definitions (treated as fields)
609
- packets_as_fields: Dictionary of packets that are also used as field types
610
- enum_types: Set of enum type names
611
-
612
- Returns:
613
- Tuple of (code string, field mappings dict)
614
- Field mappings: {ClassName: {python_name: protocol_name}}
615
- """
616
- if enum_types is None:
617
- enum_types = set()
618
-
619
- code = []
620
- field_mappings: dict[str, dict[str, str]] = {}
621
- all_fields = {**fields}
622
- if compound_fields:
623
- all_fields.update(compound_fields)
624
- if unions:
625
- all_fields.update(unions)
626
- if packets_as_fields:
627
- all_fields.update(packets_as_fields)
628
-
629
- for field_name, field_def in sorted(all_fields.items()):
630
- code.append("@dataclass")
631
- code.append(f"class {field_name}:")
632
-
633
- # Check if this is a union (has comment indicating it's a union)
634
- is_union = isinstance(field_def, dict) and "comment" in field_def
635
- if is_union:
636
- code.append(
637
- f' """Auto-generated union structure. {field_def.get("comment", "")}"""'
638
- )
639
- else:
640
- code.append(' """Auto-generated field structure."""')
641
- code.append("")
642
-
643
- field_map: dict[str, str] = {}
644
- fields_data = []
645
-
646
- # Handle both old format (dict) and new format (list of dicts)
647
- if isinstance(field_def, dict) and "fields" in field_def:
648
- # New format: {size_bytes: N, fields: [{name: "X", type: "uint16"}, ...]}
649
- field_list = field_def["fields"]
650
-
651
- # For unions, treat as a raw bytes field (they overlay, so just store raw data)
652
- if is_union:
653
- size_bytes = field_def.get("size_bytes", 16)
654
- code.append(f" data: bytes # Union of {size_bytes} bytes")
655
- field_map["data"] = "data"
656
- # For pack/unpack, use bytes field
657
- fields_data = [
658
- {
659
- "name": "data",
660
- "type": f"[{size_bytes}]byte",
661
- "size_bytes": size_bytes,
662
- }
663
- ]
664
- else:
665
- # Normal field structure - process all fields
666
- fields_data = field_list # Save for pack/unpack generation
667
- for field_item in field_list:
668
- # Skip reserved fields without names (they won't be in dataclass)
669
- if "name" not in field_item:
670
- continue
671
- protocol_name = field_item["name"]
672
- attr_type = field_item["type"]
673
- python_name = to_snake_case(protocol_name)
674
- python_type = convert_type_to_python(
675
- attr_type, field_name=protocol_name
676
- )
677
-
678
- code.append(f" {python_name}: {python_type}")
679
- field_map[python_name] = protocol_name
680
- else:
681
- # Old format: {attr_name: type, ...}
682
- # Convert to new format for pack/unpack generation
683
- for protocol_name, attr_type in field_def.items():
684
- python_name = to_snake_case(protocol_name)
685
- python_type = convert_type_to_python(
686
- attr_type, field_name=protocol_name
687
- )
688
- code.append(f" {python_name}: {python_type}")
689
- field_map[python_name] = protocol_name
690
- # Build fields_data for old format
691
- fields_data.append({"name": protocol_name, "type": attr_type})
692
-
693
- field_mappings[field_name] = field_map
694
-
695
- # Add pack/unpack methods
696
- if fields_data:
697
- code.append("")
698
- code.append(generate_pack_method(fields_data, "field", enum_types))
699
- code.append("")
700
- code.append(
701
- generate_unpack_method(field_name, fields_data, "field", enum_types)
702
- )
703
-
704
- code.append("")
705
- code.append("")
706
-
707
- return "\n".join(code), field_mappings
708
-
709
-
710
- def generate_nested_packet_code(
711
- packets: dict[str, Any], type_aliases: dict[str, str] | None = None
712
- ) -> str:
713
- """Generate nested Python packet class definitions.
714
-
715
- Args:
716
- packets: Dictionary of packet definitions (grouped by category)
717
- type_aliases: Optional dict mapping type names to their aliases (for collision resolution)
718
-
719
- Returns:
720
- Python code string with nested packet classes
721
- """
722
- if type_aliases is None:
723
- type_aliases = {}
724
-
725
- code = []
726
-
727
- # Flatten packets if they're grouped by category
728
- flat_packets: list[tuple[str, str, dict[str, Any]]] = []
729
-
730
- # Check if packets are grouped by category (new format)
731
- sample_key = next(iter(packets.keys())) if packets else None
732
- if sample_key and isinstance(packets[sample_key], dict):
733
- sample_value = packets[sample_key]
734
- # Check if this is a category grouping (contains nested packet dicts)
735
- if any(isinstance(v, dict) and "pkt_type" in v for v in sample_value.values()):
736
- # New format: grouped by category
737
- for category, category_packets in packets.items():
738
- for packet_name, packet_def in category_packets.items():
739
- flat_packets.append((category, packet_name, packet_def))
740
- else:
741
- # Old format: flat packets with category field
742
- for packet_name, packet_def in packets.items():
743
- category = packet_def.get("category", "misc")
744
- flat_packets.append((category, packet_name, packet_def))
745
-
746
- # Group by category
747
- categories: dict[str, list[tuple[str, dict[str, Any]]]] = {}
748
- for category, packet_name, packet_def in flat_packets:
749
- if category not in categories:
750
- categories[category] = []
751
- categories[category].append((packet_name, packet_def))
752
-
753
- # Generate category classes with nested packet classes
754
- for category in sorted(categories.keys()):
755
- # Generate category class
756
- # Quirk: Convert category names to proper camel case (multi_zone -> MultiZone)
757
- # Split on underscores, capitalize each part, then join
758
- parts = category.split("_")
759
- category_class = "".join(part.capitalize() for part in parts)
760
- code.append("")
761
- code.append(f"class {category_class}(Packet):")
762
- code.append(f' """{category_class} category packets."""')
763
- code.append("")
764
-
765
- # Generate nested packet classes
766
- for packet_name, packet_def in sorted(categories[category]):
767
- pkt_type = packet_def["pkt_type"]
768
- fields_data = packet_def.get("fields", [])
769
-
770
- # Remove category prefix from packet name (e.g., DeviceGetLabel -> GetLabel)
771
- # The packet name format is: CategoryActionTarget (e.g., DeviceGetLabel, LightSetColor)
772
- # Use case-insensitive matching to handle multi_zone -> Multizone -> MultiZone
773
- short_name = packet_name
774
- if packet_name.lower().startswith(category_class.lower()):
775
- short_name = packet_name[len(category_class) :]
776
-
777
- # Quirk: Rename Light.Get/Set/State to Light.GetColor/SetColor/StateColor
778
- # for better clarity (Set and SetColor are different packets)
779
- if category_class == "Light":
780
- if short_name == "Get":
781
- short_name = "GetColor"
782
- elif short_name == "State":
783
- short_name = "StateColor"
784
-
785
- code.append(" @dataclass")
786
- code.append(f" class {short_name}(Packet):")
787
- code.append(f' """Packet type {pkt_type}."""')
788
- code.append("")
789
- code.append(f" PKT_TYPE: ClassVar[int] = {pkt_type}")
790
-
791
- # Format fields_data - split long lists across multiple lines
792
- # Account for the prefix " _fields: ClassVar[list[dict[str, Any]]] = " which is ~50 chars
793
- fields_repr = format_long_list(fields_data, max_line_length=70)
794
- if "\n" in fields_repr:
795
- # Multi-line format - indent properly
796
- code.append(" _fields: ClassVar[list[dict[str, Any]]] = (")
797
- for line in fields_repr.split("\n"):
798
- if line.strip():
799
- code.append(f" {line}")
800
- code.append(" )")
801
- else:
802
- code.append(
803
- f" _fields: ClassVar[list[dict[str, Any]]] = {fields_repr}"
804
- )
805
-
806
- # Add packet metadata for smart request handling
807
- # Classify packet by name pattern: Get*, Set*, State*, or OTHER
808
- packet_kind = "OTHER"
809
- if short_name.startswith("Get"):
810
- packet_kind = "GET"
811
- elif short_name.startswith("Set"):
812
- packet_kind = "SET"
813
- elif short_name.startswith("State"):
814
- packet_kind = "STATE"
815
-
816
- # Quirk: CopyFrameBuffer is semantically a SET operation
817
- # It modifies device state without returning data
818
- if category_class == "Tile" and short_name == "CopyFrameBuffer":
819
- packet_kind = "SET"
820
-
821
- code.append("")
822
- code.append(" # Packet metadata for automatic handling")
823
- code.append(f" _packet_kind: ClassVar[str] = {repr(packet_kind)}")
824
-
825
- # Requires acknowledgement/response based on packet kind
826
- # GET requests: ack_required=False, res_required=False (device responds anyway)
827
- # SET requests: ack_required=True, res_required=False (need acknowledgement)
828
- requires_ack = packet_kind == "SET"
829
- requires_response = False
830
- code.append(f" _requires_ack: ClassVar[bool] = {requires_ack}")
831
- code.append(
832
- f" _requires_response: ClassVar[bool] = {requires_response}"
833
- )
834
- code.append("")
835
-
836
- # Generate dataclass fields (only non-reserved)
837
- has_fields = False
838
- if isinstance(fields_data, list):
839
- for field_item in fields_data:
840
- # Skip reserved fields
841
- if "name" not in field_item:
842
- continue
843
- protocol_name = field_item["name"]
844
- field_type = field_item["type"]
845
- python_name = to_snake_case(protocol_name)
846
- python_type = convert_type_to_python(
847
- field_type, type_aliases, field_name=protocol_name
848
- )
849
- code.append(f" {python_name}: {python_type}")
850
- has_fields = True
851
-
852
- if not has_fields:
853
- code.append(" pass")
854
-
855
- code.append("")
856
-
857
- code.append("")
858
-
859
- return "\n".join(code)
860
-
861
-
862
- def generate_types_file(
863
- enums: dict[str, Any],
864
- fields: dict[str, Any],
865
- compound_fields: dict[str, Any] | None = None,
866
- unions: dict[str, Any] | None = None,
867
- packets_as_fields: dict[str, Any] | None = None,
868
- ) -> str:
869
- """Generate complete types.py file.
870
-
871
- Args:
872
- enums: Enum definitions
873
- fields: Field structure definitions
874
- compound_fields: Compound field definitions
875
- unions: Union definitions
876
- packets_as_fields: Packets that are also used as field types
877
-
878
- Returns:
879
- Complete Python file content
880
- """
881
- header = '''"""Auto-generated LIFX protocol types.
882
-
883
- DO NOT EDIT THIS FILE MANUALLY.
884
- Generated from https://github.com/LIFX/public-protocol/blob/main/protocol.yml
885
- by protocol/generator.py
886
-
887
- Uses Pythonic naming conventions (snake_case fields, shortened enums) while
888
- maintaining compatibility with the official LIFX protocol through mappings.
889
- """
890
-
891
- from __future__ import annotations
892
-
893
- from dataclasses import dataclass
894
- from enum import IntEnum
895
-
896
-
897
- '''
898
-
899
- code = header
900
- code += generate_enum_code(enums)
901
- code += "\n"
902
-
903
- # Extract enum names for pack/unpack generation
904
- enum_names = set(enums.keys())
905
-
906
- field_code, field_mappings = generate_field_code(
907
- fields, compound_fields, unions, packets_as_fields, enum_names
908
- )
909
- code += field_code
910
- code += "\n"
911
-
912
- # Add type aliases for common names
913
- code += "# Type aliases for convenience\n"
914
- all_field_names = {
915
- **fields,
916
- **(compound_fields or {}),
917
- **(unions or {}),
918
- **(packets_as_fields or {}),
919
- }
920
- if "TileStateDevice" in all_field_names:
921
- code += "TileDevice = TileStateDevice # Pythonic alias\n"
922
- code += "\n"
923
-
924
- # Add field name mappings as module-level constant (formatted for readability)
925
- code += "# Field name mappings: Python name -> Protocol name\n"
926
- code += "# Used by serializer to translate between conventions\n"
927
- code += "FIELD_MAPPINGS: dict[str, dict[str, str]] = {\n"
928
- for class_name in sorted(field_mappings.keys()):
929
- mappings = field_mappings[class_name]
930
- # Format each class mapping - if too long, break it into multiple lines
931
- mappings_str = repr(mappings)
932
- line = f" {repr(class_name)}: {mappings_str},"
933
- if len(line) > 120:
934
- # Multi-line format
935
- code += f" {repr(class_name)}: {{\n"
936
- for py_name, proto_name in sorted(mappings.items()):
937
- code += f" {repr(py_name)}: {repr(proto_name)},\n"
938
- code += " },\n"
939
- else:
940
- code += line + "\n"
941
- code += "}\n"
942
- code += "\n"
943
-
944
- return code
945
-
946
-
947
- def generate_packets_file(
948
- packets: dict[str, Any],
949
- fields: dict[str, Any],
950
- compound_fields: dict[str, Any] | None = None,
951
- unions: dict[str, Any] | None = None,
952
- packets_as_fields: dict[str, Any] | None = None,
953
- enums: dict[str, Any] | None = None,
954
- ) -> str:
955
- """Generate complete packets.py file.
956
-
957
- Args:
958
- packets: Packet definitions
959
- fields: Field definitions (for imports)
960
- compound_fields: Compound field definitions (for imports)
961
- unions: Union definitions (for imports)
962
- packets_as_fields: Packets that are also used as field types (for imports)
963
- enums: Enum definitions for detecting enum types
964
-
965
- Returns:
966
- Complete Python file content
967
- """
968
- # Extract enum names for pack/unpack generation
969
- enum_names = set(enums.keys()) if enums else set()
970
-
971
- # Collect all field types and enum types used in packets
972
- used_fields = set()
973
- used_enums = set()
974
- all_fields = {**fields}
975
- if compound_fields:
976
- all_fields.update(compound_fields)
977
- if unions:
978
- all_fields.update(unions)
979
- if packets_as_fields:
980
- all_fields.update(packets_as_fields)
981
-
982
- # Flatten packets to scan for used field types
983
- flat_packets: list[dict[str, Any]] = []
984
- for value in packets.values():
985
- if isinstance(value, dict):
986
- # Check if this is a category grouping
987
- if any(isinstance(v, dict) and "pkt_type" in v for v in value.values()):
988
- # New format: grouped by category
989
- for packet_def in value.values():
990
- flat_packets.append(packet_def)
991
- elif "pkt_type" in value:
992
- # Old format: direct packet
993
- flat_packets.append(value)
994
-
995
- for packet_def in flat_packets:
996
- fields_data = packet_def.get("fields", [])
997
- # Handle both list and dict formats
998
- if isinstance(fields_data, list):
999
- for field_item in fields_data:
1000
- if "type" in field_item:
1001
- field_type = field_item["type"]
1002
- base_type, _, is_nested = parse_field_type(field_type)
1003
- if is_nested:
1004
- if base_type in all_fields:
1005
- used_fields.add(base_type)
1006
- elif base_type in enum_names:
1007
- used_enums.add(base_type)
1008
- elif isinstance(fields_data, dict):
1009
- for field_type in fields_data.values():
1010
- base_type, _, is_nested = parse_field_type(field_type)
1011
- if is_nested:
1012
- if base_type in all_fields:
1013
- used_fields.add(base_type)
1014
- elif base_type in enum_names:
1015
- used_enums.add(base_type)
1016
-
1017
- # Generate imports with collision detection
1018
- imports = ""
1019
- all_imports = sorted(used_fields | used_enums)
1020
- if all_imports:
1021
- # Detect name collisions with packet category names
1022
- category_names = set()
1023
- for category in packets.keys():
1024
- if isinstance(packets[category], dict):
1025
- # Convert category name to class name (same as in generate_nested_packet_code)
1026
- parts = category.split("_")
1027
- category_class = "".join(part.capitalize() for part in parts)
1028
- category_names.add(category_class)
1029
-
1030
- # Generate import list with aliases for collisions
1031
- import_items = []
1032
- type_aliases = {} # Map original name to aliased name
1033
- for name in all_imports:
1034
- if name in category_names:
1035
- # Use alias to avoid collision
1036
- aliased_name = f"{name}Field"
1037
- import_items.append(f"{name} as {aliased_name}")
1038
- type_aliases[name] = aliased_name
1039
- else:
1040
- import_items.append(name)
1041
-
1042
- imports = format_long_import(import_items) + "\n"
1043
- else:
1044
- type_aliases = {}
1045
- imports = ""
1046
-
1047
- header = f'''"""Auto-generated LIFX protocol packets.
1048
-
1049
- DO NOT EDIT THIS FILE MANUALLY.
1050
- Generated from https://github.com/LIFX/public-protocol/blob/main/protocol.yml
1051
- by protocol/generator.py
1052
-
1053
- Uses nested packet classes organized by category (Device, Light, etc.).
1054
- Each packet inherits from base Packet class which provides generic pack/unpack.
1055
- """
1056
-
1057
- from __future__ import annotations
1058
-
1059
- from dataclasses import dataclass
1060
- from typing import Any, ClassVar
1061
-
1062
- from lifx_emulator.protocol.base import Packet
1063
- {imports}
1064
- '''
1065
-
1066
- code = header
1067
- packet_code = generate_nested_packet_code(packets, type_aliases)
1068
- code += packet_code
1069
-
1070
- # Generate packet registry for nested classes
1071
- code += "\n\n"
1072
- code += "# Packet Registry - maps packet type to nested packet class\n"
1073
- code += "PACKET_REGISTRY: dict[int, type[Packet]] = {\n"
1074
-
1075
- # Build registry with nested class paths
1076
- registry_items = []
1077
- for category, value in packets.items():
1078
- if isinstance(value, dict):
1079
- # Check if this is a category grouping
1080
- if any(isinstance(v, dict) and "pkt_type" in v for v in value.values()):
1081
- # New format: grouped by category
1082
- # Quirk: Convert category names to proper camel case (multi_zone -> MultiZone)
1083
- parts = category.split("_")
1084
- category_class = "".join(part.capitalize() for part in parts)
1085
- for packet_name, packet_def in value.items():
1086
- pkt_type = packet_def.get("pkt_type")
1087
- if pkt_type is not None:
1088
- # Remove category prefix to get short name
1089
- # Use case-insensitive matching to handle multi_zone -> Multizone -> MultiZone
1090
- short_name = packet_name
1091
- if packet_name.lower().startswith(category_class.lower()):
1092
- short_name = packet_name[len(category_class) :]
1093
-
1094
- # Quirk: Rename Light.Get/Set/State to Light.GetColor/SetColor/StateColor
1095
- if category_class == "Light":
1096
- if short_name == "Get":
1097
- short_name = "GetColor"
1098
- elif short_name == "State":
1099
- short_name = "StateColor"
1100
-
1101
- # Full path: Category.ShortName
1102
- full_path = f"{category_class}.{short_name}"
1103
- registry_items.append((pkt_type, full_path))
1104
-
1105
- # Sort by packet type for readability
1106
- for pkt_type, full_path in sorted(registry_items):
1107
- code += f" {pkt_type}: {full_path},\n"
1108
-
1109
- code += "}\n"
1110
- code += "\n\n"
1111
- code += "def get_packet_class(pkt_type: int) -> type[Packet] | None:\n"
1112
- code += ' """Get packet class for a given packet type.\n'
1113
- code += "\n"
1114
- code += " Args:\n"
1115
- code += " pkt_type: Packet type number\n"
1116
- code += "\n"
1117
- code += " Returns:\n"
1118
- code += " Nested packet class, or None if unknown\n"
1119
- code += ' """\n'
1120
- code += " return PACKET_REGISTRY.get(pkt_type)\n"
1121
-
1122
- return code
1123
-
1124
-
1125
- def download_protocol() -> dict[str, Any]:
1126
- """Download and parse protocol.yml from LIFX GitHub repository.
1127
-
1128
- Returns:
1129
- Parsed protocol dictionary
1130
-
1131
- Raises:
1132
- URLError: If download fails
1133
- yaml.YAMLError: If parsing fails
1134
- """
1135
- print(f"Downloading protocol.yml from {PROTOCOL_URL}...")
1136
- with urlopen(PROTOCOL_URL) as response: # nosec
1137
- protocol_data = response.read()
1138
-
1139
- print("Parsing protocol specification...")
1140
- protocol = yaml.safe_load(protocol_data)
1141
- return protocol
1142
-
1143
-
1144
- def validate_protocol_spec(protocol: dict[str, Any]) -> list[str]:
1145
- """Validate protocol specification for missing type references.
1146
-
1147
- Args:
1148
- protocol: Parsed protocol dictionary
1149
-
1150
- Returns:
1151
- List of error messages (empty if validation passes)
1152
- """
1153
- errors: list[str] = []
1154
- registry = TypeRegistry()
1155
-
1156
- # Register all types
1157
- enums = protocol.get("enums", {})
1158
- fields = protocol.get("fields", {})
1159
- compound_fields = protocol.get("compound_fields", {})
1160
- unions = protocol.get("unions", {})
1161
- packets = protocol.get("packets", {})
1162
-
1163
- # Register enums
1164
- for enum_name in enums.keys():
1165
- registry.register_enum(enum_name)
1166
-
1167
- # Register field structures
1168
- for field_name in fields.keys():
1169
- registry.register_field(field_name)
1170
-
1171
- # Register compound fields
1172
- for field_name in compound_fields.keys():
1173
- registry.register_field(field_name)
1174
-
1175
- # Register unions
1176
- for union_name in unions.keys():
1177
- registry.register_union(union_name)
1178
-
1179
- # Register packets (flatten by category)
1180
- for category_packets in packets.values():
1181
- if isinstance(category_packets, dict):
1182
- for packet_name in category_packets.keys():
1183
- registry.register_packet(packet_name)
1184
-
1185
- # Validate field type references
1186
- def validate_field_types(struct_name: str, struct_def: dict[str, Any]) -> None:
1187
- """Validate all field types in a structure."""
1188
- if isinstance(struct_def, dict) and "fields" in struct_def:
1189
- for field_item in struct_def["fields"]:
1190
- if "type" in field_item:
1191
- field_type = field_item["type"]
1192
- field_name = field_item.get("name", "reserved")
1193
- base_type, _, _ = parse_field_type(field_type)
1194
-
1195
- # Check if type is defined
1196
- if not registry.has_type(base_type):
1197
- errors.append(
1198
- f"{struct_name}.{field_name}: Unknown type '{base_type}' in field type '{field_type}'"
1199
- )
1200
-
1201
- # Validate fields
1202
- for field_name, field_def in fields.items():
1203
- validate_field_types(f"fields.{field_name}", field_def)
1204
-
1205
- # Validate compound fields
1206
- for field_name, field_def in compound_fields.items():
1207
- validate_field_types(f"compound_fields.{field_name}", field_def)
1208
-
1209
- # Validate unions
1210
- for union_name, union_def in unions.items():
1211
- validate_field_types(f"unions.{union_name}", union_def)
1212
-
1213
- # Validate packets
1214
- for category, category_packets in packets.items():
1215
- if isinstance(category_packets, dict):
1216
- for packet_name, packet_def in category_packets.items():
1217
- if isinstance(packet_def, dict):
1218
- validate_field_types(
1219
- f"packets.{category}.{packet_name}", packet_def
1220
- )
1221
-
1222
- return errors
1223
-
1224
-
1225
- def should_skip_button_relay(name: str) -> bool:
1226
- """Check if a name should be skipped (Button or Relay related).
1227
-
1228
- Args:
1229
- name: Type name to check (enum, field, union, packet, or category)
1230
-
1231
- Returns:
1232
- True if the name starts with Button or Relay, False otherwise
1233
- """
1234
- return name.startswith("Button") or name.startswith("Relay")
1235
-
1236
-
1237
- def filter_button_relay_items(items: dict[str, Any]) -> dict[str, Any]:
1238
- """Filter out Button and Relay items from a dictionary.
1239
-
1240
- Args:
1241
- items: Dictionary of items to filter
1242
-
1243
- Returns:
1244
- Filtered dictionary without Button/Relay items
1245
- """
1246
- return {
1247
- name: value
1248
- for name, value in items.items()
1249
- if not should_skip_button_relay(name)
1250
- }
1251
-
1252
-
1253
- def filter_button_relay_packets(packets: dict[str, Any]) -> dict[str, Any]:
1254
- """Filter out button and relay category packets.
1255
-
1256
- Args:
1257
- packets: Dictionary of packet definitions (grouped by category)
1258
-
1259
- Returns:
1260
- Filtered dictionary without button/relay categories
1261
- """
1262
- return {
1263
- category: category_packets
1264
- for category, category_packets in packets.items()
1265
- if category not in ("button", "relay")
1266
- }
1267
-
1268
-
1269
- def extract_packets_as_fields(
1270
- packets: dict[str, Any], fields: dict[str, Any]
1271
- ) -> dict[str, Any]:
1272
- """Extract packets that are used as field types in other structures.
1273
-
1274
- Args:
1275
- packets: Dictionary of packet definitions
1276
- fields: Dictionary of field definitions to scan
1277
-
1278
- Returns:
1279
- Dictionary of packet definitions that are referenced as field types
1280
- """
1281
- packets_as_fields = {}
1282
-
1283
- # Flatten packets first
1284
- flat_packets = {}
1285
- for category, category_packets in packets.items():
1286
- if isinstance(category_packets, dict):
1287
- for packet_name, packet_def in category_packets.items():
1288
- if isinstance(packet_def, dict) and "pkt_type" in packet_def:
1289
- flat_packets[packet_name] = packet_def
1290
-
1291
- # Scan all fields for references to packet types
1292
- all_structures = {**fields}
1293
-
1294
- for struct_def in all_structures.values():
1295
- if isinstance(struct_def, dict) and "fields" in struct_def:
1296
- for field_item in struct_def["fields"]:
1297
- if "type" in field_item:
1298
- field_type = field_item["type"]
1299
- base_type, _, is_nested = parse_field_type(field_type)
1300
-
1301
- # Check if this references a packet
1302
- if is_nested and base_type in flat_packets:
1303
- packets_as_fields[base_type] = flat_packets[base_type]
1304
-
1305
- return packets_as_fields
1306
-
1307
-
1308
- def main() -> None:
1309
- """Main generator entry point."""
1310
- try:
1311
- # Download and parse protocol from GitHub
1312
- protocol = download_protocol()
1313
- except Exception as e:
1314
- print(f"Error: Failed to download protocol.yml: {e}", file=sys.stderr)
1315
- sys.exit(1)
1316
-
1317
- # Extract sections
1318
- enums = protocol.get("enums", {})
1319
- fields = protocol.get("fields", {})
1320
- compound_fields = protocol.get("compound_fields", {})
1321
- unions = protocol.get("unions", {})
1322
- packets = protocol.get("packets", {})
1323
-
1324
- # Filter out Button and Relay items (not relevant for light control)
1325
- print("Filtering out Button and Relay items...")
1326
- enums = filter_button_relay_items(enums)
1327
- fields = filter_button_relay_items(fields)
1328
- compound_fields = filter_button_relay_items(compound_fields)
1329
- unions = filter_button_relay_items(unions)
1330
- packets = filter_button_relay_packets(packets)
1331
-
1332
- # Rebuild protocol dict with filtered items for validation
1333
- filtered_protocol = {
1334
- **protocol,
1335
- "enums": enums,
1336
- "fields": fields,
1337
- "compound_fields": compound_fields,
1338
- "unions": unions,
1339
- "packets": packets,
1340
- }
1341
-
1342
- # Validate filtered protocol specification
1343
- print("Validating protocol specification...")
1344
- validation_errors = validate_protocol_spec(filtered_protocol)
1345
- if validation_errors:
1346
- print("Validation failed with the following errors:", file=sys.stderr)
1347
- for error in validation_errors:
1348
- print(f" - {error}", file=sys.stderr)
1349
- sys.exit(1)
1350
- print("Validation passed!")
1351
-
1352
- # Extract packets that are used as field types (e.g., DeviceStateVersion)
1353
- packets_as_fields = extract_packets_as_fields(packets, fields)
1354
-
1355
- print(f"Found {len(unions)} unions")
1356
- print(
1357
- f"Found {len(packets_as_fields)} packets used as field types: {list(packets_as_fields.keys())}"
1358
- )
1359
-
1360
- # Determine output directory
1361
- project_root = Path(__file__).parent.parent.parent.parent
1362
- protocol_dir = project_root / "src" / "lifx_emulator" / "protocol"
1363
-
1364
- # Generate protocol_types.py (avoid conflict with Python's types module)
1365
- types_code = generate_types_file(
1366
- enums, fields, compound_fields, unions, packets_as_fields
1367
- )
1368
- types_file = protocol_dir / "protocol_types.py"
1369
- with open(types_file, "w") as f:
1370
- f.write(types_code)
1371
- print(f"Generated {types_file}")
1372
-
1373
- # Generate packets.py
1374
- packets_code = generate_packets_file(
1375
- packets, fields, compound_fields, unions, packets_as_fields, enums
1376
- )
1377
- packets_file = protocol_dir / "packets.py"
1378
- with open(packets_file, "w") as f:
1379
- f.write(packets_code)
1380
- print(f"Generated {packets_file}")
1381
-
1382
-
1383
- if __name__ == "__main__":
1384
- main()