lifx-emulator 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lifx_emulator/__init__.py +31 -0
- lifx_emulator/__main__.py +607 -0
- lifx_emulator/api.py +1825 -0
- lifx_emulator/async_storage.py +308 -0
- lifx_emulator/constants.py +33 -0
- lifx_emulator/device.py +750 -0
- lifx_emulator/device_states.py +114 -0
- lifx_emulator/factories.py +380 -0
- lifx_emulator/handlers/__init__.py +39 -0
- lifx_emulator/handlers/base.py +49 -0
- lifx_emulator/handlers/device_handlers.py +340 -0
- lifx_emulator/handlers/light_handlers.py +372 -0
- lifx_emulator/handlers/multizone_handlers.py +249 -0
- lifx_emulator/handlers/registry.py +110 -0
- lifx_emulator/handlers/tile_handlers.py +309 -0
- lifx_emulator/observers.py +139 -0
- lifx_emulator/products/__init__.py +28 -0
- lifx_emulator/products/generator.py +771 -0
- lifx_emulator/products/registry.py +1446 -0
- lifx_emulator/products/specs.py +242 -0
- lifx_emulator/products/specs.yml +327 -0
- lifx_emulator/protocol/__init__.py +1 -0
- lifx_emulator/protocol/base.py +334 -0
- lifx_emulator/protocol/const.py +8 -0
- lifx_emulator/protocol/generator.py +1371 -0
- lifx_emulator/protocol/header.py +159 -0
- lifx_emulator/protocol/packets.py +1351 -0
- lifx_emulator/protocol/protocol_types.py +844 -0
- lifx_emulator/protocol/serializer.py +379 -0
- lifx_emulator/scenario_manager.py +402 -0
- lifx_emulator/scenario_persistence.py +206 -0
- lifx_emulator/server.py +482 -0
- lifx_emulator/state_restorer.py +259 -0
- lifx_emulator/state_serializer.py +130 -0
- lifx_emulator/storage_protocol.py +100 -0
- lifx_emulator-1.0.0.dist-info/METADATA +445 -0
- lifx_emulator-1.0.0.dist-info/RECORD +40 -0
- lifx_emulator-1.0.0.dist-info/WHEEL +4 -0
- lifx_emulator-1.0.0.dist-info/entry_points.txt +2 -0
- lifx_emulator-1.0.0.dist-info/licenses/LICENSE +35 -0
|
@@ -0,0 +1,379 @@
|
|
|
1
|
+
"""Binary serialization for LIFX protocol packets.
|
|
2
|
+
|
|
3
|
+
Handles packing and unpacking of protocol structures using struct module.
|
|
4
|
+
All multi-byte values use little-endian byte order per LIFX specification.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import struct
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
# Type format mapping for struct module (little-endian)
|
|
13
|
+
TYPE_FORMATS: dict[str, str] = {
|
|
14
|
+
"uint8": "B",
|
|
15
|
+
"uint16": "H",
|
|
16
|
+
"uint32": "I",
|
|
17
|
+
"uint64": "Q",
|
|
18
|
+
"int8": "b",
|
|
19
|
+
"int16": "h",
|
|
20
|
+
"int32": "i",
|
|
21
|
+
"int64": "q",
|
|
22
|
+
"float32": "f",
|
|
23
|
+
"bool": "?",
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
# Type sizes in bytes
|
|
27
|
+
TYPE_SIZES: dict[str, int] = {
|
|
28
|
+
"uint8": 1,
|
|
29
|
+
"uint16": 2,
|
|
30
|
+
"uint32": 4,
|
|
31
|
+
"uint64": 8,
|
|
32
|
+
"int8": 1,
|
|
33
|
+
"int16": 2,
|
|
34
|
+
"int32": 4,
|
|
35
|
+
"int64": 8,
|
|
36
|
+
"float32": 4,
|
|
37
|
+
"bool": 1,
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
# Pre-compiled struct.Struct objects for faster pack/unpack (optimization)
|
|
41
|
+
_STRUCT_CACHE: dict[str, struct.Struct] = {
|
|
42
|
+
"uint8": struct.Struct("<B"),
|
|
43
|
+
"uint16": struct.Struct("<H"),
|
|
44
|
+
"uint32": struct.Struct("<I"),
|
|
45
|
+
"uint64": struct.Struct("<Q"),
|
|
46
|
+
"int8": struct.Struct("<b"),
|
|
47
|
+
"int16": struct.Struct("<h"),
|
|
48
|
+
"int32": struct.Struct("<i"),
|
|
49
|
+
"int64": struct.Struct("<q"),
|
|
50
|
+
"float32": struct.Struct("<f"),
|
|
51
|
+
"bool": struct.Struct("<?"),
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def get_type_size(type_name: str) -> int:
|
|
56
|
+
"""Get the size in bytes of a type.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
type_name: Type name (e.g., 'uint16', 'float32')
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Size in bytes
|
|
63
|
+
|
|
64
|
+
Raises:
|
|
65
|
+
ValueError: If type is unknown
|
|
66
|
+
"""
|
|
67
|
+
if type_name not in TYPE_SIZES:
|
|
68
|
+
raise ValueError(f"Unknown type: {type_name}")
|
|
69
|
+
return TYPE_SIZES[type_name]
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def pack_value(value: Any, type_name: str) -> bytes:
|
|
73
|
+
"""Pack a single value into bytes.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
value: Value to pack
|
|
77
|
+
type_name: Type name (e.g., 'uint16', 'float32')
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Packed bytes
|
|
81
|
+
|
|
82
|
+
Raises:
|
|
83
|
+
ValueError: If type is unknown
|
|
84
|
+
struct.error: If value doesn't match type
|
|
85
|
+
"""
|
|
86
|
+
if type_name not in _STRUCT_CACHE:
|
|
87
|
+
raise ValueError(f"Unknown type: {type_name}")
|
|
88
|
+
|
|
89
|
+
return _STRUCT_CACHE[type_name].pack(value)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def unpack_value(data: bytes, type_name: str, offset: int = 0) -> tuple[Any, int]:
|
|
93
|
+
"""Unpack a single value from bytes.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
data: Bytes to unpack from
|
|
97
|
+
type_name: Type name (e.g., 'uint16', 'float32')
|
|
98
|
+
offset: Offset in bytes to start unpacking
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
Tuple of (unpacked_value, new_offset)
|
|
102
|
+
|
|
103
|
+
Raises:
|
|
104
|
+
ValueError: If type is unknown or data is too short
|
|
105
|
+
struct.error: If data format is invalid
|
|
106
|
+
"""
|
|
107
|
+
if type_name not in _STRUCT_CACHE:
|
|
108
|
+
raise ValueError(f"Unknown type: {type_name}")
|
|
109
|
+
|
|
110
|
+
size = TYPE_SIZES[type_name]
|
|
111
|
+
|
|
112
|
+
if len(data) < offset + size:
|
|
113
|
+
raise ValueError(
|
|
114
|
+
f"Not enough data to unpack {type_name}: "
|
|
115
|
+
f"need {offset + size} bytes, got {len(data)}"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
value = _STRUCT_CACHE[type_name].unpack_from(data, offset)[0]
|
|
119
|
+
return value, offset + size
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def pack_array(values: list[Any], element_type: str, count: int) -> bytes:
|
|
123
|
+
"""Pack an array of values into bytes.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
values: List of values to pack
|
|
127
|
+
element_type: Type of each element (e.g., 'uint8', 'uint16')
|
|
128
|
+
count: Expected number of elements
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
Packed bytes
|
|
132
|
+
|
|
133
|
+
Raises:
|
|
134
|
+
ValueError: If values length doesn't match count or type is unknown
|
|
135
|
+
"""
|
|
136
|
+
if len(values) != count:
|
|
137
|
+
raise ValueError(f"Expected {count} values, got {len(values)}")
|
|
138
|
+
|
|
139
|
+
# Optimization: Pack entire primitive array at once with single struct call
|
|
140
|
+
if element_type in TYPE_FORMATS:
|
|
141
|
+
format_str = f"<{count}{TYPE_FORMATS[element_type]}"
|
|
142
|
+
return struct.pack(format_str, *values)
|
|
143
|
+
|
|
144
|
+
# Fall back to element-by-element for complex types
|
|
145
|
+
result = b""
|
|
146
|
+
for value in values:
|
|
147
|
+
result += pack_value(value, element_type)
|
|
148
|
+
return result
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def unpack_array(
|
|
152
|
+
data: bytes, element_type: str, count: int, offset: int = 0
|
|
153
|
+
) -> tuple[list[Any], int]:
|
|
154
|
+
"""Unpack an array of values from bytes.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
data: Bytes to unpack from
|
|
158
|
+
element_type: Type of each element
|
|
159
|
+
count: Number of elements to unpack
|
|
160
|
+
offset: Offset in bytes to start unpacking
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Tuple of (list_of_values, new_offset)
|
|
164
|
+
"""
|
|
165
|
+
# Optimization: Unpack entire primitive array at once with single struct call
|
|
166
|
+
if element_type in TYPE_FORMATS:
|
|
167
|
+
format_str = f"<{count}{TYPE_FORMATS[element_type]}"
|
|
168
|
+
size = TYPE_SIZES[element_type] * count
|
|
169
|
+
|
|
170
|
+
if len(data) < offset + size:
|
|
171
|
+
raise ValueError(
|
|
172
|
+
f"Not enough data to unpack array: "
|
|
173
|
+
f"need {offset + size} bytes, got {len(data)}"
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
values = list(struct.unpack_from(format_str, data, offset))
|
|
177
|
+
return values, offset + size
|
|
178
|
+
|
|
179
|
+
# Fall back to element-by-element for complex types
|
|
180
|
+
values = []
|
|
181
|
+
current_offset = offset
|
|
182
|
+
|
|
183
|
+
for _ in range(count):
|
|
184
|
+
value, current_offset = unpack_value(data, element_type, current_offset)
|
|
185
|
+
values.append(value)
|
|
186
|
+
|
|
187
|
+
return values, current_offset
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def pack_string(value: str, length: int) -> bytes:
|
|
191
|
+
"""Pack a string into fixed-length byte array.
|
|
192
|
+
|
|
193
|
+
Safely truncates at UTF-8 character boundaries to avoid creating
|
|
194
|
+
invalid UTF-8 sequences that could crash device firmware (VUL-002 mitigation).
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
value: String to pack
|
|
198
|
+
length: Fixed length in bytes
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Packed bytes (null-padded if necessary)
|
|
202
|
+
"""
|
|
203
|
+
encoded = value.encode("utf-8")
|
|
204
|
+
|
|
205
|
+
# Safe truncation at character boundary
|
|
206
|
+
if len(encoded) > length:
|
|
207
|
+
# Decode and re-encode to find safe truncation point
|
|
208
|
+
truncated = encoded[:length]
|
|
209
|
+
# Find valid UTF-8 boundary by trying to decode
|
|
210
|
+
while truncated:
|
|
211
|
+
try:
|
|
212
|
+
truncated.decode("utf-8")
|
|
213
|
+
break
|
|
214
|
+
except UnicodeDecodeError:
|
|
215
|
+
# Remove last byte and try again
|
|
216
|
+
truncated = truncated[:-1]
|
|
217
|
+
encoded = truncated
|
|
218
|
+
|
|
219
|
+
return encoded.ljust(length, b"\x00")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def unpack_string(data: bytes, length: int, offset: int = 0) -> tuple[str, int]:
|
|
223
|
+
"""Unpack a fixed-length string from bytes.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
data: Bytes to unpack from
|
|
227
|
+
length: Length in bytes to read
|
|
228
|
+
offset: Offset in bytes to start unpacking
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
Tuple of (string, new_offset)
|
|
232
|
+
"""
|
|
233
|
+
if len(data) < offset + length:
|
|
234
|
+
raise ValueError(
|
|
235
|
+
f"Not enough data to unpack string: "
|
|
236
|
+
f"need {offset + length} bytes, got {len(data)}"
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
raw_bytes = data[offset : offset + length]
|
|
240
|
+
# Strip null bytes and decode
|
|
241
|
+
string = raw_bytes.rstrip(b"\x00").decode("utf-8", errors="replace")
|
|
242
|
+
return string, offset + length
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def pack_reserved(size: int) -> bytes:
|
|
246
|
+
"""Pack reserved (zero) bytes.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
size: Number of bytes
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
Zero bytes
|
|
253
|
+
"""
|
|
254
|
+
return b"\x00" * size
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def pack_bytes(data: bytes, length: int) -> bytes:
|
|
258
|
+
"""Pack bytes into fixed-length byte array.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
data: Bytes to pack
|
|
262
|
+
length: Fixed length in bytes
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
Packed bytes (null-padded or truncated if necessary)
|
|
266
|
+
"""
|
|
267
|
+
if len(data) >= length:
|
|
268
|
+
return data[:length]
|
|
269
|
+
return data + b"\x00" * (length - len(data))
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def unpack_bytes(data: bytes, length: int, offset: int = 0) -> tuple[bytes, int]:
|
|
273
|
+
"""Unpack fixed-length byte array from bytes.
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
data: Bytes to unpack from
|
|
277
|
+
length: Length in bytes to read
|
|
278
|
+
offset: Offset in bytes to start unpacking
|
|
279
|
+
|
|
280
|
+
Returns:
|
|
281
|
+
Tuple of (bytes, new_offset)
|
|
282
|
+
|
|
283
|
+
Raises:
|
|
284
|
+
ValueError: If data is too short
|
|
285
|
+
"""
|
|
286
|
+
if len(data) < offset + length:
|
|
287
|
+
raise ValueError(
|
|
288
|
+
f"Not enough data to unpack bytes: "
|
|
289
|
+
f"need {offset + length} bytes, got {len(data)}"
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
raw_bytes = data[offset : offset + length]
|
|
293
|
+
return raw_bytes, offset + length
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
class FieldSerializer:
|
|
297
|
+
"""Serializer for structured fields with nested types."""
|
|
298
|
+
|
|
299
|
+
def __init__(self, field_definitions: dict[str, dict[str, str]]):
|
|
300
|
+
"""Initialize serializer with field definitions.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
field_definitions: Dict mapping field names to their structure definitions
|
|
304
|
+
(e.g., {"HSBK": {"hue": "uint16", "saturation": "uint16", ...}})
|
|
305
|
+
"""
|
|
306
|
+
self.field_definitions = field_definitions
|
|
307
|
+
|
|
308
|
+
def pack_field(self, field_data: dict[str, Any], field_name: str) -> bytes:
|
|
309
|
+
"""Pack a structured field.
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
field_data: Dictionary of field values
|
|
313
|
+
field_name: Name of the field structure (e.g., "HSBK")
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
Packed bytes
|
|
317
|
+
|
|
318
|
+
Raises:
|
|
319
|
+
ValueError: If field_name is unknown
|
|
320
|
+
"""
|
|
321
|
+
if field_name not in self.field_definitions:
|
|
322
|
+
raise ValueError(f"Unknown field: {field_name}")
|
|
323
|
+
|
|
324
|
+
field_def = self.field_definitions[field_name]
|
|
325
|
+
result = b""
|
|
326
|
+
|
|
327
|
+
for attr_name, attr_type in field_def.items():
|
|
328
|
+
if attr_name not in field_data:
|
|
329
|
+
raise ValueError(f"Missing attribute {attr_name} in {field_name}")
|
|
330
|
+
result += pack_value(field_data[attr_name], attr_type)
|
|
331
|
+
|
|
332
|
+
return result
|
|
333
|
+
|
|
334
|
+
def unpack_field(
|
|
335
|
+
self, data: bytes, field_name: str, offset: int = 0
|
|
336
|
+
) -> tuple[dict[str, Any], int]:
|
|
337
|
+
"""Unpack a structured field.
|
|
338
|
+
|
|
339
|
+
Args:
|
|
340
|
+
data: Bytes to unpack from
|
|
341
|
+
field_name: Name of the field structure
|
|
342
|
+
offset: Offset to start unpacking
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
Tuple of (field_dict, new_offset)
|
|
346
|
+
|
|
347
|
+
Raises:
|
|
348
|
+
ValueError: If field_name is unknown
|
|
349
|
+
"""
|
|
350
|
+
if field_name not in self.field_definitions:
|
|
351
|
+
raise ValueError(f"Unknown field: {field_name}")
|
|
352
|
+
|
|
353
|
+
field_def = self.field_definitions[field_name]
|
|
354
|
+
field_data: dict[str, Any] = {}
|
|
355
|
+
current_offset = offset
|
|
356
|
+
|
|
357
|
+
for attr_name, attr_type in field_def.items():
|
|
358
|
+
value, current_offset = unpack_value(data, attr_type, current_offset)
|
|
359
|
+
field_data[attr_name] = value
|
|
360
|
+
|
|
361
|
+
return field_data, current_offset
|
|
362
|
+
|
|
363
|
+
def get_field_size(self, field_name: str) -> int:
|
|
364
|
+
"""Get the size in bytes of a field structure.
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
field_name: Name of the field structure
|
|
368
|
+
|
|
369
|
+
Returns:
|
|
370
|
+
Size in bytes
|
|
371
|
+
|
|
372
|
+
Raises:
|
|
373
|
+
ValueError: If field_name is unknown
|
|
374
|
+
"""
|
|
375
|
+
if field_name not in self.field_definitions:
|
|
376
|
+
raise ValueError(f"Unknown field: {field_name}")
|
|
377
|
+
|
|
378
|
+
field_def = self.field_definitions[field_name]
|
|
379
|
+
return sum(TYPE_SIZES[attr_type] for attr_type in field_def.values())
|