matter-python-client 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,94 @@
1
+ """Matter Exceptions."""
2
+
3
+ from __future__ import annotations
4
+
5
+ # mapping from error_code to Exception class
6
+ ERROR_MAP: dict[int, type] = {}
7
+
8
+
9
+ class MatterError(Exception):
10
+ """Generic Matter exception."""
11
+
12
+ error_code = 0
13
+
14
+ def __init_subclass__(cls, *args, **kwargs) -> None: # type: ignore[no-untyped-def]
15
+ """Register a subclass."""
16
+ super().__init_subclass__(*args, **kwargs)
17
+ ERROR_MAP[cls.error_code] = cls
18
+
19
+
20
+ class UnknownError(MatterError):
21
+ """Error raised when there an unknown/invalid command is requested."""
22
+
23
+ error_code = 0 # to map all generic errors
24
+
25
+
26
+ class NodeCommissionFailed(MatterError):
27
+ """Error raised when interview of a device failed."""
28
+
29
+ error_code = 1
30
+
31
+
32
+ class NodeInterviewFailed(MatterError):
33
+ """Error raised when interview of a device failed."""
34
+
35
+ error_code = 2
36
+
37
+
38
+ class NodeNotReady(MatterError):
39
+ """Error raised when performing action on node that has not been fully added."""
40
+
41
+ error_code = 3
42
+
43
+
44
+ class NodeNotResolving(MatterError):
45
+ """Error raised when no CASE session could be established."""
46
+
47
+ error_code = 4
48
+
49
+
50
+ class NodeNotExists(MatterError):
51
+ """Error raised when performing action on node that does not exist."""
52
+
53
+ error_code = 5
54
+
55
+
56
+ class VersionMismatch(MatterError):
57
+ """Issue raised when SDK version mismatches."""
58
+
59
+ error_code = 6
60
+
61
+
62
+ class SDKStackError(MatterError):
63
+ """Generic SDK stack error."""
64
+
65
+ error_code = 7
66
+
67
+
68
+ class InvalidArguments(MatterError):
69
+ """Error raised when there are invalid arguments provided for a command."""
70
+
71
+ error_code = 8
72
+
73
+
74
+ class InvalidCommand(MatterError):
75
+ """Error raised when there an unknown/invalid command is requested."""
76
+
77
+ error_code = 9
78
+
79
+
80
+ class UpdateCheckError(MatterError):
81
+ """Error raised when there was an error during searching for updates."""
82
+
83
+ error_code = 10
84
+
85
+
86
+ class UpdateError(MatterError):
87
+ """Error raised when there was an error during applying updates."""
88
+
89
+ error_code = 11
90
+
91
+
92
+ def exception_from_error_code(error_code: int) -> type[MatterError]:
93
+ """Return correct Exception class from error_code."""
94
+ return ERROR_MAP.get(error_code, MatterError)
File without changes
@@ -0,0 +1,70 @@
1
+ """Several helpers for the WebSockets API."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Callable, Coroutine
6
+ from dataclasses import MISSING, dataclass
7
+ import inspect
8
+ from typing import Any, TypeVar, get_type_hints
9
+
10
+ from matter_server.common.helpers.util import parse_value
11
+
12
+ _F = TypeVar("_F", bound=Callable[..., Any])
13
+
14
+
15
+ @dataclass
16
+ class APICommandHandler:
17
+ """Model for an API command handler."""
18
+
19
+ command: str
20
+ signature: inspect.Signature
21
+ type_hints: dict[str, Any]
22
+ target: Callable[..., Coroutine[Any, Any, Any]]
23
+
24
+ @classmethod
25
+ def parse(
26
+ cls, command: str, func: Callable[..., Coroutine[Any, Any, Any]]
27
+ ) -> "APICommandHandler":
28
+ """Parse APICommandHandler by providing a function."""
29
+ return APICommandHandler(
30
+ command=command,
31
+ signature=inspect.signature(func),
32
+ type_hints=get_type_hints(func),
33
+ target=func,
34
+ )
35
+
36
+
37
+ def api_command(command: str) -> Callable[[_F], _F]:
38
+ """Decorate a function as API route/command."""
39
+
40
+ def decorate(func: _F) -> _F:
41
+ func.api_cmd = command # type: ignore[attr-defined]
42
+ return func
43
+
44
+ return decorate
45
+
46
+
47
+ def parse_arguments(
48
+ func_sig: inspect.Signature,
49
+ func_types: dict[str, Any],
50
+ args: dict | None = None,
51
+ strict: bool = False,
52
+ ) -> dict[str, Any]:
53
+ """Parse (and convert) incoming arguments to correct types."""
54
+ if args is None:
55
+ args = {}
56
+ final_args = {}
57
+ # ignore extra args if not strict
58
+ if strict:
59
+ for key, value in args.items():
60
+ if key not in func_sig.parameters:
61
+ raise KeyError(f"Invalid parameter: '{key}'")
62
+ # parse arguments to correct type
63
+ for name, param in func_sig.parameters.items():
64
+ value = args.get(name)
65
+ if param.default is inspect.Parameter.empty:
66
+ default = MISSING
67
+ else:
68
+ default = param.default
69
+ final_args[name] = parse_value(name, value, func_types[name], default)
70
+ return final_args
@@ -0,0 +1,48 @@
1
+ """Helpers to work with (de)serializing of json."""
2
+
3
+ from base64 import b64encode
4
+ from typing import Any
5
+
6
+ from chip.clusters.Types import Nullable
7
+ from chip.tlv import float32, uint
8
+ import orjson
9
+
10
+ JSON_ENCODE_EXCEPTIONS = (TypeError, ValueError)
11
+ JSON_DECODE_EXCEPTIONS = (orjson.JSONDecodeError,)
12
+
13
+
14
+ def json_encoder_default(obj: Any) -> Any:
15
+ """Convert Special objects.
16
+
17
+ Hand other objects to the original method.
18
+ """
19
+ # pylint: disable=too-many-return-statements
20
+ if getattr(obj, "do_not_serialize", None):
21
+ return None
22
+ if isinstance(obj, (set, tuple)):
23
+ return list(obj)
24
+ if isinstance(obj, float32):
25
+ return float(obj)
26
+ if isinstance(obj, uint):
27
+ return int(obj)
28
+ if isinstance(obj, Nullable):
29
+ return None
30
+ if isinstance(obj, bytes):
31
+ return b64encode(obj).decode("utf-8")
32
+ if isinstance(obj, Exception):
33
+ return str(obj)
34
+ if type(obj) is type: # pylint: disable=unidiomatic-typecheck
35
+ return f"{obj.__module__}.{obj.__qualname__}"
36
+ raise TypeError
37
+
38
+
39
+ def json_dumps(data: Any) -> str:
40
+ """Dump json string."""
41
+ return orjson.dumps(
42
+ data,
43
+ option=orjson.OPT_NON_STR_KEYS | orjson.OPT_INDENT_2,
44
+ default=json_encoder_default,
45
+ ).decode("utf-8")
46
+
47
+
48
+ json_loads = orjson.loads
@@ -0,0 +1,359 @@
1
+ """Utils for Matter server (and client)."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import base64
6
+ from base64 import b64decode
7
+ import binascii
8
+ from dataclasses import MISSING, asdict, fields, is_dataclass
9
+ from datetime import datetime
10
+ from enum import Enum
11
+ from functools import cache
12
+ from importlib.metadata import PackageNotFoundError, version as pkg_version
13
+ import logging
14
+ import platform
15
+ import socket
16
+ from types import NoneType, UnionType
17
+ from typing import (
18
+ TYPE_CHECKING,
19
+ Any,
20
+ TypeVar,
21
+ Union,
22
+ cast,
23
+ get_args,
24
+ get_origin,
25
+ get_type_hints,
26
+ )
27
+
28
+ from chip.clusters.Types import Nullable, NullValue
29
+ from chip.tlv import float32, uint
30
+
31
+ if TYPE_CHECKING:
32
+ from _typeshed import DataclassInstance
33
+ from chip.clusters.ClusterObjects import (
34
+ ClusterAttributeDescriptor,
35
+ ClusterObjectDescriptor,
36
+ )
37
+
38
+ _T = TypeVar("_T", bound=DataclassInstance)
39
+
40
+ CHIP_CLUSTERS_PKG_NAME = "home-assistant-chip-clusters"
41
+ CHIP_CORE_PKG_NAME = "home-assistant-chip-core"
42
+
43
+ cached_fields = cache(fields)
44
+ cached_type_hints = cache(get_type_hints)
45
+
46
+
47
+ def create_attribute_path_from_attribute(
48
+ endpoint_id: int, attribute: type[ClusterAttributeDescriptor]
49
+ ) -> str:
50
+ """Create path/identifier for an Attribute."""
51
+ return create_attribute_path(
52
+ endpoint_id, attribute.cluster_id, attribute.attribute_id
53
+ )
54
+
55
+
56
+ def create_attribute_path(
57
+ endpoint: int | None, cluster_id: int | None, attribute_id: int | None
58
+ ) -> str:
59
+ """
60
+ Create path/identifier string for an Attribute.
61
+
62
+ Returns same output as `Attribute.AttributePath` string representation.
63
+ endpoint/cluster_id/attribute_id
64
+ """
65
+ return f"{endpoint}/{cluster_id}/{attribute_id}"
66
+
67
+
68
+ def parse_attribute_path(
69
+ attribute_path: str,
70
+ ) -> tuple[int | None, int | None, int | None]:
71
+ """Parse AttributePath string into tuple of endpoint_id, cluster_id, attribute_id."""
72
+ endpoint_id_str, cluster_id_str, attribute_id_str = attribute_path.split("/")
73
+ endpoint_id = int(endpoint_id_str) if endpoint_id_str.isnumeric() else None
74
+ cluster_id = int(cluster_id_str) if cluster_id_str.isnumeric() else None
75
+ attribute_id = int(attribute_id_str) if attribute_id_str.isnumeric() else None
76
+ return (endpoint_id, cluster_id, attribute_id)
77
+
78
+
79
+ def dataclass_to_dict(obj_in: DataclassInstance) -> dict:
80
+ """Convert dataclass instance to dict."""
81
+
82
+ return asdict(
83
+ obj_in,
84
+ dict_factory=lambda x: {
85
+ # ensure the dict key is a string
86
+ str(k): v
87
+ for (k, v) in x
88
+ },
89
+ )
90
+
91
+
92
+ def parse_utc_timestamp(datetime_string: str) -> datetime:
93
+ """Parse datetime from string."""
94
+ return datetime.fromisoformat(datetime_string)
95
+
96
+
97
+ def _get_descriptor_key(descriptor: ClusterObjectDescriptor, key: str | int) -> str:
98
+ """Return correct Cluster attribute key for a tag id."""
99
+ if (isinstance(key, str) and key.isnumeric()) or isinstance(key, int):
100
+ if field := descriptor.GetFieldByTag(int(key)):
101
+ return cast(str, field.Label)
102
+ return cast(str, key)
103
+
104
+
105
+ def parse_value(
106
+ name: str,
107
+ value: Any,
108
+ value_type: Any,
109
+ default: Any = MISSING,
110
+ allow_none: bool = False,
111
+ allow_sdk_types: bool = False,
112
+ ) -> Any:
113
+ """
114
+ Try to parse a value from raw (json) data and type annotations.
115
+
116
+ If allow_sdk_types is False, any SDK specific custom data types will be converted.
117
+ """
118
+ # pylint: disable=too-many-return-statements,too-many-branches
119
+
120
+ if isinstance(value_type, str):
121
+ # this shouldn't happen, but just in case
122
+ value_type = get_type_hints(value_type, globals(), locals())
123
+
124
+ # handle value is None/missing but a default value is set
125
+ if value is None and not isinstance(default, type(MISSING)):
126
+ return default
127
+ # handle value is None and sdk type is Nullable
128
+ if value is None and value_type is Nullable:
129
+ return Nullable() if allow_sdk_types else None
130
+ # handle value is None (but that is allowed according to the annotations)
131
+ if value is None and value_type is NoneType:
132
+ return None
133
+
134
+ if isinstance(value, dict):
135
+ if descriptor := getattr(value_type, "descriptor", None):
136
+ # handle matter TLV dicts where the keys are just tag identifiers
137
+ value = {_get_descriptor_key(descriptor, x): y for x, y in value.items()}
138
+ # handle a parse error in the sdk which is returned as:
139
+ # {'TLVValue': None, 'Reason': None} or {'TLVValue': None}
140
+ if value.get("TLVValue", MISSING) is None:
141
+ if value_type in (None, Nullable, Any):
142
+ return None
143
+ value = None
144
+
145
+ if is_dataclass(value_type) and isinstance(value, dict):
146
+ return dataclass_from_dict(value_type, value) # type: ignore[arg-type]
147
+ # get origin value type and inspect one-by-one
148
+ origin: Any = get_origin(value_type)
149
+ if origin in (list, tuple, set) and isinstance(value, (list, tuple, set)):
150
+ return origin(
151
+ parse_value(name, subvalue, get_args(value_type)[0])
152
+ for subvalue in value
153
+ if subvalue is not None
154
+ )
155
+ # handle dictionary where we should inspect all values
156
+ if origin is dict:
157
+ subkey_type = get_args(value_type)[0]
158
+ subvalue_type = get_args(value_type)[1]
159
+ return {
160
+ parse_value(subkey, subkey, subkey_type): parse_value(
161
+ f"{subkey}.value",
162
+ subvalue,
163
+ subvalue_type,
164
+ allow_none=allow_none,
165
+ allow_sdk_types=allow_sdk_types,
166
+ )
167
+ for subkey, subvalue in value.items()
168
+ }
169
+ # handle Union type
170
+ if origin is Union or origin is UnionType:
171
+ sub_value_types = get_args(value_type)
172
+ # return early if value is None and None or Nullable allowed
173
+ if value is None and Nullable in sub_value_types and allow_sdk_types:
174
+ return NullValue
175
+ if value is None and NoneType in sub_value_types:
176
+ return None
177
+ # try all possible types
178
+ for sub_arg_type in sub_value_types:
179
+ # try them all until one succeeds
180
+ try:
181
+ return parse_value(
182
+ name,
183
+ value,
184
+ sub_arg_type,
185
+ allow_none=allow_none,
186
+ allow_sdk_types=allow_sdk_types,
187
+ )
188
+ except (KeyError, TypeError, ValueError):
189
+ pass
190
+ # if we get to this point, all possibilities failed
191
+ # find out if we should raise or log this
192
+ err = (
193
+ f"Value {value} of type {type(value)} is invalid for {name}, "
194
+ f"expected value of type {value_type}"
195
+ )
196
+ if NoneType not in sub_value_types:
197
+ # raise exception, we have no idea how to handle this value
198
+ raise TypeError(err)
199
+ # failed to parse the (sub) value but None allowed, log only
200
+ logging.getLogger(__name__).warning(err)
201
+ return None
202
+ if origin is type:
203
+ return get_type_hints(value, globals(), locals())
204
+ # handle Any as value type (which is basically unprocessable)
205
+ if value_type is Any:
206
+ return value
207
+ # handle value is None (but that is allowed)
208
+ if value is None and allow_none:
209
+ return None
210
+ # raise if value is None and the value is required according to annotations
211
+ if value is None:
212
+ raise KeyError(f"`{name}` of type `{value_type}` is required.")
213
+
214
+ try:
215
+ if issubclass(value_type, Enum):
216
+ # handle enums from the SDK that have a value that does not exist in the enum (sigh)
217
+ # pylint: disable=protected-access
218
+ if value not in value_type._value2member_map_:
219
+ # we do not want to crash so we return the raw value
220
+ return value
221
+ return value_type(value)
222
+ if issubclass(value_type, datetime):
223
+ return parse_utc_timestamp(value)
224
+ except TypeError:
225
+ # happens if value_type is not a class
226
+ pass
227
+
228
+ # common type conversions (e.g. int as string)
229
+ if value_type is float and isinstance(value, int):
230
+ return float(value)
231
+ if value_type is int and isinstance(value, str) and value.isnumeric():
232
+ return int(value)
233
+ # handle bytes values (sent over the wire as base64 encoded strings)
234
+ if value_type is bytes and isinstance(value, str):
235
+ try:
236
+ return b64decode(value.encode("utf-8"))
237
+ except binascii.Error:
238
+ # unfortunately sometimes the data is malformed
239
+ # as it is not super important we ignore it (for now)
240
+ return b""
241
+
242
+ # handle NOCStruct.noc which is typed/specified as bytes but parsed
243
+ # as integer in the tlv parser somehow.
244
+ # https://github.com/home-assistant/core/issues/113279
245
+ # https://github.com/home-assistant/core/issues/116304
246
+ if name == "NOCStruct.noc" and not isinstance(value, bytes):
247
+ return b""
248
+
249
+ # Matter SDK specific types
250
+ if value_type is uint and (
251
+ isinstance(value, int) or (isinstance(value, str) and value.isnumeric())
252
+ ):
253
+ return uint(value) if allow_sdk_types else int(value)
254
+ if value_type is float32 and (
255
+ isinstance(value, (float, int))
256
+ or (isinstance(value, str) and value.isnumeric())
257
+ ):
258
+ return float32(value) if allow_sdk_types else float(value)
259
+
260
+ # If we reach this point, we could not match the value with the type and we raise
261
+ if not isinstance(value, value_type):
262
+ raise TypeError(
263
+ f"Value {value} of type {type(value)} is invalid for {name}, "
264
+ f"expected value of type {value_type}"
265
+ )
266
+ return value
267
+
268
+
269
+ def dataclass_from_dict(
270
+ cls: type[_T],
271
+ dict_obj: dict,
272
+ strict: bool = False,
273
+ allow_sdk_types: bool = False,
274
+ ) -> _T:
275
+ """
276
+ Create (instance of) a dataclass by providing a dict with values.
277
+
278
+ Including support for nested structures and common type conversions.
279
+ If strict mode enabled, any additional keys in the provided dict will result in a KeyError.
280
+ """
281
+ dc_fields = cached_fields(cls)
282
+ if strict:
283
+ extra_keys = dict_obj.keys() - {f.name for f in dc_fields}
284
+ if extra_keys:
285
+ raise KeyError(
286
+ f"Extra key(s) {','.join(extra_keys)} not allowed for {str(cls)}"
287
+ )
288
+ type_hints = cached_type_hints(cls)
289
+ return cls(
290
+ **{
291
+ field.name: parse_value(
292
+ f"{cls.__name__}.{field.name}",
293
+ dict_obj.get(field.name),
294
+ type_hints[field.name],
295
+ field.default,
296
+ allow_none=not strict,
297
+ allow_sdk_types=allow_sdk_types,
298
+ )
299
+ for field in dc_fields
300
+ if field.init
301
+ }
302
+ )
303
+
304
+
305
+ def package_version(pkg_name: str) -> str:
306
+ """
307
+ Return the version of an installed package.
308
+
309
+ Will return `0.0.0` if the package is not found.
310
+ """
311
+ try:
312
+ installed_version = pkg_version(pkg_name)
313
+ if installed_version is None:
314
+ return "0.0.0" # type: ignore[unreachable]
315
+ return installed_version
316
+ except PackageNotFoundError:
317
+ return "0.0.0"
318
+
319
+
320
+ @cache
321
+ def chip_clusters_version() -> str:
322
+ """Return the version of the CHIP SDK (clusters package) that is installed."""
323
+ return package_version(CHIP_CLUSTERS_PKG_NAME)
324
+
325
+
326
+ @cache
327
+ def chip_core_version() -> str:
328
+ """Return the version of the CHIP SDK (core package) that is installed."""
329
+ if platform.system() == "Darwin":
330
+ # TODO: Fix this once we can install our own wheels on macos.
331
+ return chip_clusters_version()
332
+ return package_version(CHIP_CORE_PKG_NAME)
333
+
334
+
335
+ def convert_hex_string(hex_str: str | bytes) -> str:
336
+ """Convert (Base64 encoded) byte array received from the sdk to a regular (unicode) string."""
337
+ if isinstance(hex_str, str):
338
+ # note that the bytes string can be optionally base64 encoded
339
+ # when we send it back and forth over our api
340
+ hex_str = base64.b64decode(hex_str)
341
+
342
+ return "".join(f"{byte:02x}" for byte in hex_str)
343
+
344
+
345
+ def convert_mac_address(hex_mac: str | bytes) -> str:
346
+ """Convert (Base64 encoded) byte array MAC received from the sdk to a regular mac-address."""
347
+ if isinstance(hex_mac, str):
348
+ # note that the bytes string can be optionally base64 encoded
349
+ hex_mac = base64.b64decode(hex_mac)
350
+
351
+ return ":".join("{:02x}".format(byte) for byte in hex_mac) # pylint: disable=C0209
352
+
353
+
354
+ def convert_ip_address(hex_ip: str | bytes, ipv6: bool = False) -> str:
355
+ """Convert (Base64 encoded) byte array IP received from the Matter SDK to a regular IP."""
356
+ if isinstance(hex_ip, str):
357
+ # note that the bytes string can be optionally base64 encoded
358
+ hex_ip = base64.b64decode(hex_ip)
359
+ return socket.inet_ntop(socket.AF_INET6 if ipv6 else socket.AF_INET, hex_ip)