ucapi 0.6.0__tar.gz → 0.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ucapi-0.6.0 → ucapi-0.7.0}/CHANGELOG.md +10 -0
- {ucapi-0.6.0/ucapi.egg-info → ucapi-0.7.0}/PKG-INFO +1 -1
- ucapi-0.7.0/scripts/git-tag.py +168 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/test-requirements.txt +1 -1
- {ucapi-0.6.0 → ucapi-0.7.0}/tests/test_api.py +50 -10
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/_version.py +3 -3
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/api.py +474 -114
- {ucapi-0.6.0 → ucapi-0.7.0/ucapi.egg-info}/PKG-INFO +1 -1
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi.egg-info/SOURCES.txt +1 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/CONTRIBUTING.md +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/LICENSE +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/README.md +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/docs/code_guidelines.md +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/docs/setup.md +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/README.md +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/hello_integration.json +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/hello_integration.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/remote.json +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/remote.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/remote_ui_page.json +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/setup_flow.json +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/setup_flow.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/voice.json +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/examples/voice.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/pyproject.toml +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/requirements.txt +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/scripts/compile_protos.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/setup.cfg +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/tests/test_media_player.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/tests/test_paging.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/tests/test_voice_assistant.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/__init__.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/api_definitions.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/button.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/climate.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/cover.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/entities.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/entity.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/ir_emitter.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/light.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/media_player.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/msg_definitions.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/proto/__init__.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/proto/ucr_integration_voice.proto +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/proto/ucr_integration_voice_pb2.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/proto/ucr_integration_voice_pb2.pyi +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/remote.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/select.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/sensor.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/switch.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/ui.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/voice_assistant.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi/voice_stream.py +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi.egg-info/dependency_links.txt +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi.egg-info/requires.txt +0 -0
- {ucapi-0.6.0 → ucapi-0.7.0}/ucapi.egg-info/top_level.txt +0 -0
|
@@ -11,6 +11,16 @@ _Changes in the next release_
|
|
|
11
11
|
|
|
12
12
|
---
|
|
13
13
|
|
|
14
|
+
## v0.7.0 - 2026-05-10
|
|
15
|
+
### Added
|
|
16
|
+
- Add requests for supported entity types, version and localization. Only send available entities with supported entity types by @albaintor and @kennymc-c ([#47](https://github.com/unfoldedcircle/integration-python-library/pull/47)).
|
|
17
|
+
|
|
18
|
+
### Changed
|
|
19
|
+
- Improved WS msg processing with dedicated consumer, producer and router tasks with asyncio queues ([#47](https://github.com/unfoldedcircle/integration-python-library/pull/47)).
|
|
20
|
+
- Sanitize log messages to prevent sensitive information exposure ([#56](https://github.com/unfoldedcircle/integration-python-library/pull/56)).
|
|
21
|
+
- Log WebSocket messages as JSON data instead of a Python dict ([#58](https://github.com/unfoldedcircle/integration-python-library/pull/58)).
|
|
22
|
+
- Updated GitHub actions and test dependencies.
|
|
23
|
+
|
|
14
24
|
## v0.6.0 - 2026-04-10
|
|
15
25
|
### Breaking Changes
|
|
16
26
|
- Renamed `MediaType` to `MediaContentType` and changed enums to lowercase. See media-player entity documentation for more information ([#50](https://github.com/unfoldedcircle/integration-python-library/pull/50)).
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Git tag release script to simplify creating git tags with changelog since last release.
|
|
4
|
+
Copyright (c) 2026 Unfolded Circle.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import subprocess
|
|
9
|
+
import sys
|
|
10
|
+
import tempfile
|
|
11
|
+
import re
|
|
12
|
+
import json
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def run_command(command, check=True):
|
|
16
|
+
"""Run a shell command and return the output."""
|
|
17
|
+
try:
|
|
18
|
+
result = subprocess.run(
|
|
19
|
+
command, shell=True, check=check, capture_output=True, text=True
|
|
20
|
+
)
|
|
21
|
+
return result.stdout.strip()
|
|
22
|
+
except subprocess.CalledProcessError as e:
|
|
23
|
+
print(f"Error running command: {command}")
|
|
24
|
+
print(f"Stdout: {e.stdout}")
|
|
25
|
+
print(f"Stderr: {e.stderr}")
|
|
26
|
+
if check:
|
|
27
|
+
sys.exit(1)
|
|
28
|
+
return None
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_latest_tag():
|
|
32
|
+
"""Get the latest git tag."""
|
|
33
|
+
return run_command("git describe --tags --abbrev=0", check=False)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_commits_since(tag):
|
|
37
|
+
"""Get commits since the specified tag."""
|
|
38
|
+
format_str = '--pretty=format:"%s|%an"'
|
|
39
|
+
if tag:
|
|
40
|
+
return run_command(f"git log {tag}..HEAD {format_str}")
|
|
41
|
+
else:
|
|
42
|
+
return run_command(f"git log {format_str}")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def is_valid_semver(tag):
|
|
46
|
+
"""Check if the version is a valid semver format (X.Y.Z)."""
|
|
47
|
+
return re.match(r"^\d+\.\d+\.\d+$", tag) is not None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
import argparse
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def main():
|
|
54
|
+
parser = argparse.ArgumentParser(
|
|
55
|
+
description="Git tag script to simplify creating git tags."
|
|
56
|
+
)
|
|
57
|
+
parser.add_argument(
|
|
58
|
+
"version", help="The new version in semver format (e.g., 0.21.0)"
|
|
59
|
+
)
|
|
60
|
+
parser.add_argument(
|
|
61
|
+
"--dry-run", action="store_true", help="Do not create or push the tag"
|
|
62
|
+
)
|
|
63
|
+
args = parser.parse_args()
|
|
64
|
+
|
|
65
|
+
version = args.version
|
|
66
|
+
dry_run = args.dry_run
|
|
67
|
+
if not is_valid_semver(version):
|
|
68
|
+
print(f"Error: Version '{version}' is not in valid semver format (X.Y.Z)")
|
|
69
|
+
sys.exit(1)
|
|
70
|
+
|
|
71
|
+
new_tag = f"v{version}"
|
|
72
|
+
|
|
73
|
+
# Check if tag already exists
|
|
74
|
+
existing_tags = run_command("git tag").split("\n")
|
|
75
|
+
if new_tag in existing_tags:
|
|
76
|
+
print(f"Error: Tag '{new_tag}' already exists.")
|
|
77
|
+
sys.exit(1)
|
|
78
|
+
|
|
79
|
+
latest_tag = get_latest_tag()
|
|
80
|
+
print(f"Latest tag: {latest_tag}")
|
|
81
|
+
|
|
82
|
+
if latest_tag:
|
|
83
|
+
commits = get_commits_since(latest_tag)
|
|
84
|
+
else:
|
|
85
|
+
commits = get_commits_since(None)
|
|
86
|
+
|
|
87
|
+
if not commits:
|
|
88
|
+
print("No commits since last tag.")
|
|
89
|
+
sys.exit(0)
|
|
90
|
+
|
|
91
|
+
# Process commits
|
|
92
|
+
formatted_pr_commits = []
|
|
93
|
+
formatted_other_commits = []
|
|
94
|
+
|
|
95
|
+
for line in commits.split("\n"):
|
|
96
|
+
if not line:
|
|
97
|
+
continue
|
|
98
|
+
parts = line.split("|")
|
|
99
|
+
if len(parts) < 2:
|
|
100
|
+
continue
|
|
101
|
+
message = parts[0]
|
|
102
|
+
_author = parts[1]
|
|
103
|
+
|
|
104
|
+
# Extract PR number if present
|
|
105
|
+
pr_match = re.search(r"\(#(\d+)\)", message)
|
|
106
|
+
if pr_match:
|
|
107
|
+
pr_num = pr_match.group(1)
|
|
108
|
+
# Remove the (#num) part from message
|
|
109
|
+
clean_message = re.sub(r"\s*\(#\d+\)", "", message).strip()
|
|
110
|
+
formatted_line = f"{clean_message} in #{pr_num}"
|
|
111
|
+
formatted_pr_commits.append(formatted_line)
|
|
112
|
+
else:
|
|
113
|
+
formatted_line = f"{message}"
|
|
114
|
+
formatted_other_commits.append(formatted_line)
|
|
115
|
+
|
|
116
|
+
initial_message = f"Release {new_tag}\n\n"
|
|
117
|
+
if formatted_pr_commits:
|
|
118
|
+
initial_message += "Pull Requests:\n"
|
|
119
|
+
for line in formatted_pr_commits:
|
|
120
|
+
initial_message += f"- {line}\n"
|
|
121
|
+
initial_message += "\n"
|
|
122
|
+
|
|
123
|
+
if formatted_other_commits:
|
|
124
|
+
initial_message += "Other Changes:\n"
|
|
125
|
+
for line in formatted_other_commits:
|
|
126
|
+
initial_message += f"- {line}\n"
|
|
127
|
+
|
|
128
|
+
# Create temporary file for editing
|
|
129
|
+
with tempfile.NamedTemporaryFile(suffix=".txt", delete=False) as tmp:
|
|
130
|
+
tmp.write(initial_message.encode("utf-8"))
|
|
131
|
+
tmp_path = tmp.name
|
|
132
|
+
|
|
133
|
+
editor = os.environ.get("EDITOR", "nano")
|
|
134
|
+
subprocess.call([editor, tmp_path])
|
|
135
|
+
|
|
136
|
+
with open(tmp_path, "r") as f:
|
|
137
|
+
tag_message = f.read().strip()
|
|
138
|
+
|
|
139
|
+
if not tag_message:
|
|
140
|
+
os.unlink(tmp_path)
|
|
141
|
+
print("Tag message is empty. Aborting.")
|
|
142
|
+
sys.exit(1)
|
|
143
|
+
|
|
144
|
+
print("\n--- Tag Message ---")
|
|
145
|
+
print(tag_message)
|
|
146
|
+
print("-------------------\n")
|
|
147
|
+
|
|
148
|
+
confirm = input(f"Create and push tag {new_tag}? (y/n): ")
|
|
149
|
+
if confirm.lower() == "y":
|
|
150
|
+
if dry_run:
|
|
151
|
+
print(f"[DRY-RUN] Would create annotated tag: {new_tag}")
|
|
152
|
+
print(f"[DRY-RUN] Would push tag {new_tag} to origin.")
|
|
153
|
+
else:
|
|
154
|
+
# Create annotated tag
|
|
155
|
+
run_command(f'git tag -a {new_tag} -F "{tmp_path}"')
|
|
156
|
+
print(f"Tag {new_tag} created locally.")
|
|
157
|
+
|
|
158
|
+
# Push tag
|
|
159
|
+
run_command(f"git push origin {new_tag}")
|
|
160
|
+
print(f"Tag {new_tag} pushed to origin.")
|
|
161
|
+
else:
|
|
162
|
+
print("Aborted.")
|
|
163
|
+
|
|
164
|
+
os.unlink(tmp_path)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
if __name__ == "__main__":
|
|
168
|
+
main()
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
# Workaround: use a pre-commit hook with https://github.com/scikit-image/scikit-image/blob/main/tools/generate_requirements.py
|
|
4
4
|
|
|
5
5
|
# pin pylint version: it has a tendendy for stricter rules in patch updates!
|
|
6
|
-
pylint==4.0.
|
|
6
|
+
pylint==4.0.5
|
|
7
7
|
flake8-docstrings
|
|
8
8
|
flake8
|
|
9
9
|
black
|
|
@@ -1,22 +1,22 @@
|
|
|
1
1
|
import unittest
|
|
2
2
|
from copy import deepcopy
|
|
3
3
|
|
|
4
|
-
from ucapi.api import
|
|
4
|
+
from ucapi.api import sanitize_json_message
|
|
5
5
|
from ucapi.media_player import Attributes
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
class
|
|
8
|
+
class TestSanitizeJsonMessage(unittest.TestCase):
|
|
9
9
|
|
|
10
10
|
def test_no_modification_when_no_msg_data(self):
|
|
11
11
|
data = {}
|
|
12
|
-
result =
|
|
12
|
+
result = sanitize_json_message(data)
|
|
13
13
|
self.assertEqual(result, {}, "The result should be an empty dictionary")
|
|
14
14
|
|
|
15
15
|
def test_no_changes_when_media_image_url_not_present(self):
|
|
16
16
|
data = {"msg_data": {"attributes": {"state": "playing", "volume": 50}}}
|
|
17
17
|
original = deepcopy(data)
|
|
18
18
|
|
|
19
|
-
result =
|
|
19
|
+
result = sanitize_json_message(data)
|
|
20
20
|
|
|
21
21
|
self.assertEqual(
|
|
22
22
|
result,
|
|
@@ -36,9 +36,9 @@ class TestFilterLogMsgData(unittest.TestCase):
|
|
|
36
36
|
expected_result = deepcopy(data)
|
|
37
37
|
expected_result["msg_data"]["attributes"][
|
|
38
38
|
Attributes.MEDIA_IMAGE_URL
|
|
39
|
-
] = "data
|
|
39
|
+
] = "data:..."
|
|
40
40
|
|
|
41
|
-
result =
|
|
41
|
+
result = sanitize_json_message(data)
|
|
42
42
|
|
|
43
43
|
self.assertEqual(
|
|
44
44
|
result, expected_result, "The MEDIA_IMAGE_URL attribute should be filtered"
|
|
@@ -65,12 +65,12 @@ class TestFilterLogMsgData(unittest.TestCase):
|
|
|
65
65
|
expected_result = deepcopy(data)
|
|
66
66
|
expected_result["msg_data"][0]["attributes"][
|
|
67
67
|
Attributes.MEDIA_IMAGE_URL
|
|
68
|
-
] = "data
|
|
68
|
+
] = "data:..."
|
|
69
69
|
expected_result["msg_data"][1]["attributes"][
|
|
70
70
|
Attributes.MEDIA_IMAGE_URL
|
|
71
|
-
] = "data
|
|
71
|
+
] = "data:..."
|
|
72
72
|
|
|
73
|
-
result =
|
|
73
|
+
result = sanitize_json_message(data)
|
|
74
74
|
|
|
75
75
|
self.assertEqual(
|
|
76
76
|
result,
|
|
@@ -88,8 +88,48 @@ class TestFilterLogMsgData(unittest.TestCase):
|
|
|
88
88
|
}
|
|
89
89
|
original_data = deepcopy(data)
|
|
90
90
|
|
|
91
|
-
|
|
91
|
+
sanitize_json_message(data)
|
|
92
92
|
|
|
93
93
|
self.assertEqual(
|
|
94
94
|
data, original_data, "The input data should not be modified by the function"
|
|
95
95
|
)
|
|
96
|
+
|
|
97
|
+
def test_generic_sensitive_keys_redaction(self):
|
|
98
|
+
sensitive_keys = [
|
|
99
|
+
"token",
|
|
100
|
+
"token_id",
|
|
101
|
+
"access_token",
|
|
102
|
+
"refresh_token",
|
|
103
|
+
"id_token",
|
|
104
|
+
"authorization_code",
|
|
105
|
+
"client_secret",
|
|
106
|
+
"secret",
|
|
107
|
+
"auth_url",
|
|
108
|
+
"client_data",
|
|
109
|
+
"password",
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
for key in sensitive_keys:
|
|
113
|
+
msg = {key: "sensitive-value", "other": "public-value"}
|
|
114
|
+
sanitized = sanitize_json_message(msg)
|
|
115
|
+
self.assertEqual(
|
|
116
|
+
sanitized[key], "***REDACTED***", f"{key} should be redacted"
|
|
117
|
+
)
|
|
118
|
+
self.assertEqual(
|
|
119
|
+
sanitized["other"], "public-value", "public fields should remain intact"
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
def test_recursive_redaction(self):
|
|
123
|
+
msg = {
|
|
124
|
+
"level1": {
|
|
125
|
+
"token": "secret1",
|
|
126
|
+
"level2": {"secret": "secret2", "public": "data"},
|
|
127
|
+
},
|
|
128
|
+
"array": [{"refresh_token": "secret3"}, "plain-string"],
|
|
129
|
+
}
|
|
130
|
+
sanitized = sanitize_json_message(msg)
|
|
131
|
+
self.assertEqual(sanitized["level1"]["token"], "***REDACTED***")
|
|
132
|
+
self.assertEqual(sanitized["level1"]["level2"]["secret"], "***REDACTED***")
|
|
133
|
+
self.assertEqual(sanitized["level1"]["level2"]["public"], "data")
|
|
134
|
+
self.assertEqual(sanitized["array"][0]["refresh_token"], "***REDACTED***")
|
|
135
|
+
self.assertEqual(sanitized["array"][1], "plain-string")
|
|
@@ -18,7 +18,7 @@ version_tuple: tuple[int | str, ...]
|
|
|
18
18
|
commit_id: str | None
|
|
19
19
|
__commit_id__: str | None
|
|
20
20
|
|
|
21
|
-
__version__ = version = '0.
|
|
22
|
-
__version_tuple__ = version_tuple = (0,
|
|
21
|
+
__version__ = version = '0.7.0'
|
|
22
|
+
__version_tuple__ = version_tuple = (0, 7, 0)
|
|
23
23
|
|
|
24
|
-
__commit_id__ = commit_id = '
|
|
24
|
+
__commit_id__ = commit_id = 'gf6e6e4a0e'
|
|
@@ -78,6 +78,18 @@ class _VoiceSessionContext:
|
|
|
78
78
|
handler_task: asyncio.Task | None = None
|
|
79
79
|
|
|
80
80
|
|
|
81
|
+
@dataclass(slots=True)
|
|
82
|
+
class _WsContext:
|
|
83
|
+
"""Websocket context."""
|
|
84
|
+
|
|
85
|
+
incoming: asyncio.Queue[str | bytes | None]
|
|
86
|
+
outgoing: asyncio.Queue[str | None]
|
|
87
|
+
pending: dict[int, asyncio.Future]
|
|
88
|
+
consumer_task: asyncio.Task | None = None
|
|
89
|
+
producer_task: asyncio.Task | None = None
|
|
90
|
+
router_task: asyncio.Task | None = None
|
|
91
|
+
|
|
92
|
+
|
|
81
93
|
# pylint: disable=too-many-public-methods, too-many-lines
|
|
82
94
|
class IntegrationAPI:
|
|
83
95
|
"""Integration API to communicate with Remote Two/3."""
|
|
@@ -107,12 +119,18 @@ class IntegrationAPI:
|
|
|
107
119
|
self._available_entities = Entities("available", self._loop)
|
|
108
120
|
self._configured_entities = Entities("configured", self._loop)
|
|
109
121
|
|
|
122
|
+
self._req_id = 1 # Request ID counter for outgoing requests
|
|
123
|
+
|
|
110
124
|
self._voice_handler: VoiceStreamHandler | None = None
|
|
111
125
|
self._voice_session_timeout: int = self.DEFAULT_VOICE_SESSION_TIMEOUT_S
|
|
112
126
|
# Active voice sessions
|
|
113
127
|
self._voice_sessions: dict[VoiceSessionKey, _VoiceSessionContext] = {}
|
|
114
128
|
# Enforce: at most one active session per entity_id (across all websockets)
|
|
115
129
|
self._voice_session_by_entity: dict[str, VoiceSessionKey] = {}
|
|
130
|
+
# Websocket context with incoming & outgoing queues and handlers
|
|
131
|
+
self._ws_contexts: dict[Any, _WsContext] = {}
|
|
132
|
+
# Supported entity types
|
|
133
|
+
self._supported_entity_types: list[str] | None = None
|
|
116
134
|
|
|
117
135
|
# Setup event loop
|
|
118
136
|
asyncio.set_event_loop(self._loop)
|
|
@@ -129,9 +147,10 @@ class IntegrationAPI:
|
|
|
129
147
|
self, driver_path: str, setup_handler: uc.SetupHandler | None = None
|
|
130
148
|
):
|
|
131
149
|
"""
|
|
132
|
-
Load driver configuration and start integration-API WebSocket server.
|
|
150
|
+
Load driver configuration and start the integration-API WebSocket server.
|
|
133
151
|
|
|
134
|
-
:param driver_path: path to configuration file
|
|
152
|
+
:param driver_path: path to the configuration file. If it is not an absolute
|
|
153
|
+
path, the current working directory is used.
|
|
135
154
|
:param setup_handler: optional driver setup handler if the driver metadata
|
|
136
155
|
contains a setup_data_schema object
|
|
137
156
|
"""
|
|
@@ -214,40 +233,74 @@ class IntegrationAPI:
|
|
|
214
233
|
await asyncio.Future()
|
|
215
234
|
|
|
216
235
|
async def _handle_ws(self, websocket) -> None:
|
|
236
|
+
# Initialize incoming and outgoing queues
|
|
237
|
+
incoming: asyncio.Queue[str | bytes | None] = asyncio.Queue(maxsize=100)
|
|
238
|
+
outgoing: asyncio.Queue[str | None] = asyncio.Queue(maxsize=100)
|
|
239
|
+
|
|
240
|
+
ctx = _WsContext(
|
|
241
|
+
incoming=incoming,
|
|
242
|
+
outgoing=outgoing,
|
|
243
|
+
pending={},
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
self._clients.add(websocket)
|
|
247
|
+
self._ws_contexts[websocket] = ctx
|
|
248
|
+
|
|
217
249
|
try:
|
|
218
|
-
self._clients.add(websocket)
|
|
219
250
|
_LOG.info("WS: Client added: %s", websocket.remote_address)
|
|
220
251
|
|
|
252
|
+
ctx.consumer_task = self._loop.create_task(
|
|
253
|
+
self._ws_consumer(websocket, ctx)
|
|
254
|
+
)
|
|
255
|
+
ctx.producer_task = self._loop.create_task(
|
|
256
|
+
self._ws_producer(websocket, ctx)
|
|
257
|
+
)
|
|
258
|
+
ctx.router_task = self._loop.create_task(self._ws_router(websocket, ctx))
|
|
259
|
+
|
|
221
260
|
# authenticate on connection
|
|
222
261
|
await self._authenticate(websocket, True)
|
|
223
|
-
|
|
224
262
|
self._events.emit(uc.Events.CLIENT_CONNECTED, websocket=websocket)
|
|
263
|
+
tasks = [
|
|
264
|
+
t
|
|
265
|
+
for t in [ctx.consumer_task, ctx.producer_task, ctx.router_task]
|
|
266
|
+
if t is not None
|
|
267
|
+
]
|
|
268
|
+
done, pending = await asyncio.wait(
|
|
269
|
+
tasks,
|
|
270
|
+
return_when=asyncio.FIRST_COMPLETED,
|
|
271
|
+
)
|
|
225
272
|
|
|
226
|
-
|
|
227
|
-
#
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
273
|
+
if pending:
|
|
274
|
+
# graceful shutdown: wait a bit for pending tasks to process sentinel 'None'
|
|
275
|
+
_LOG.debug("[%s] WS: Draining tasks", websocket.remote_address)
|
|
276
|
+
await asyncio.wait(pending, timeout=1.0)
|
|
277
|
+
|
|
278
|
+
for task in pending:
|
|
279
|
+
task.cancel()
|
|
280
|
+
|
|
281
|
+
results = await asyncio.gather(*done, *pending, return_exceptions=True)
|
|
282
|
+
for result in results:
|
|
283
|
+
if isinstance(result, Exception) and not isinstance(
|
|
284
|
+
result, asyncio.CancelledError
|
|
285
|
+
):
|
|
286
|
+
_LOG.error(
|
|
287
|
+
"[%s] WS: Exception in task",
|
|
237
288
|
websocket.remote_address,
|
|
238
|
-
|
|
289
|
+
exc_info=result,
|
|
239
290
|
)
|
|
240
291
|
|
|
241
292
|
except ConnectionClosedOK:
|
|
242
293
|
_LOG.info("[%s] WS: Connection closed", websocket.remote_address)
|
|
243
294
|
|
|
244
295
|
except websockets.exceptions.ConnectionClosedError as e:
|
|
245
|
-
|
|
296
|
+
close = e.rcvd or e.sent
|
|
297
|
+
code = getattr(close, "code", None)
|
|
298
|
+
reason = getattr(close, "reason", None)
|
|
246
299
|
_LOG.info(
|
|
247
|
-
"[%s] WS: Connection closed with error %
|
|
300
|
+
"[%s] WS: Connection closed with error %s: %s",
|
|
248
301
|
websocket.remote_address,
|
|
249
|
-
|
|
250
|
-
|
|
302
|
+
code,
|
|
303
|
+
reason,
|
|
251
304
|
)
|
|
252
305
|
|
|
253
306
|
except websockets.exceptions.WebSocketException as e:
|
|
@@ -258,22 +311,103 @@ class IntegrationAPI:
|
|
|
258
311
|
)
|
|
259
312
|
|
|
260
313
|
finally:
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
314
|
+
await self._cleanup_ws(websocket)
|
|
315
|
+
|
|
316
|
+
async def _ws_consumer(self, websocket, ctx: _WsContext) -> None:
|
|
317
|
+
"""Route incoming message (requests or events from remote or responses to driver)."""
|
|
318
|
+
try:
|
|
319
|
+
async for raw_message in websocket:
|
|
320
|
+
if isinstance(raw_message, str):
|
|
321
|
+
try:
|
|
322
|
+
data = json.loads(raw_message)
|
|
323
|
+
except json.JSONDecodeError:
|
|
324
|
+
_LOG.warning(
|
|
325
|
+
"[%s] WS: Invalid JSON message: %s",
|
|
326
|
+
websocket.remote_address,
|
|
327
|
+
raw_message,
|
|
328
|
+
)
|
|
329
|
+
continue
|
|
330
|
+
|
|
331
|
+
kind: str | None = None
|
|
332
|
+
if isinstance(data, dict):
|
|
333
|
+
kind = data.get("kind")
|
|
334
|
+
|
|
335
|
+
# Handle the response to a previous driver request
|
|
336
|
+
if kind == "resp":
|
|
337
|
+
self._handle_pending_response(websocket, data)
|
|
338
|
+
# Otherwise handle the json request
|
|
339
|
+
else:
|
|
340
|
+
await ctx.incoming.put(data)
|
|
341
|
+
# Handle the binary message
|
|
342
|
+
elif isinstance(raw_message, (bytes, bytearray, memoryview)):
|
|
343
|
+
await ctx.incoming.put(bytes(raw_message))
|
|
344
|
+
else:
|
|
345
|
+
_LOG.warning(
|
|
346
|
+
"[%s] WS: Unsupported message type %s",
|
|
269
347
|
websocket.remote_address,
|
|
270
|
-
|
|
271
|
-
ex,
|
|
348
|
+
type(raw_message).__name__,
|
|
272
349
|
)
|
|
350
|
+
finally:
|
|
351
|
+
await ctx.incoming.put(None)
|
|
352
|
+
await ctx.outgoing.put(None)
|
|
353
|
+
|
|
354
|
+
async def _ws_producer(self, websocket, ctx: _WsContext) -> None:
|
|
355
|
+
"""Route outgoing messages."""
|
|
356
|
+
try:
|
|
357
|
+
while True:
|
|
358
|
+
msg = await ctx.outgoing.get()
|
|
359
|
+
if msg is None:
|
|
360
|
+
break
|
|
361
|
+
await websocket.send(msg)
|
|
362
|
+
except (ConnectionClosedOK, websockets.exceptions.ConnectionClosedError):
|
|
363
|
+
pass
|
|
364
|
+
|
|
365
|
+
async def _ws_router(self, websocket, ctx: _WsContext) -> None:
|
|
366
|
+
"""Route incoming requests."""
|
|
367
|
+
while True:
|
|
368
|
+
message = await ctx.incoming.get()
|
|
369
|
+
if message is None:
|
|
370
|
+
break
|
|
371
|
+
if isinstance(message, dict):
|
|
372
|
+
await self._process_ws_message(websocket, message)
|
|
373
|
+
elif isinstance(message, bytes):
|
|
374
|
+
await self._process_ws_binary_message(websocket, message)
|
|
375
|
+
else:
|
|
376
|
+
_LOG.warning(
|
|
377
|
+
"[%s] WS: Unsupported routed message type %s",
|
|
378
|
+
websocket.remote_address,
|
|
379
|
+
type(message).__name__,
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
def _get_ws_context(self, websocket) -> _WsContext | None:
|
|
383
|
+
return self._ws_contexts.get(websocket)
|
|
384
|
+
|
|
385
|
+
async def _enqueue_ws_payload(self, websocket, payload: dict[str, Any]) -> None:
|
|
386
|
+
ctx = self._get_ws_context(websocket)
|
|
387
|
+
if ctx is None or websocket not in self._clients:
|
|
388
|
+
_LOG.error("Error sending payload: connection no longer established")
|
|
389
|
+
return
|
|
273
390
|
|
|
274
|
-
|
|
275
|
-
_LOG.
|
|
276
|
-
|
|
391
|
+
if _LOG.isEnabledFor(logging.DEBUG):
|
|
392
|
+
_LOG.debug(
|
|
393
|
+
"[%s] <-: %s",
|
|
394
|
+
websocket.remote_address,
|
|
395
|
+
json.dumps(sanitize_json_message(payload)),
|
|
396
|
+
)
|
|
397
|
+
|
|
398
|
+
match payload.get("kind"):
|
|
399
|
+
case "event":
|
|
400
|
+
try:
|
|
401
|
+
ctx.outgoing.put_nowait(json.dumps(payload))
|
|
402
|
+
except asyncio.QueueFull:
|
|
403
|
+
_LOG.warning(
|
|
404
|
+
"[%s] Outgoing queue full, dropping event",
|
|
405
|
+
websocket.remote_address,
|
|
406
|
+
)
|
|
407
|
+
case "req":
|
|
408
|
+
ctx.outgoing.put_nowait(json.dumps(payload))
|
|
409
|
+
case _:
|
|
410
|
+
await ctx.outgoing.put(json.dumps(payload))
|
|
277
411
|
|
|
278
412
|
async def _send_ok_result(
|
|
279
413
|
self, websocket, req_id: int, msg_data: dict[str, Any] | list | None = None
|
|
@@ -312,7 +446,7 @@ class IntegrationAPI:
|
|
|
312
446
|
"""
|
|
313
447
|
await self._send_ws_response(websocket, req_id, "result", msg_data, status_code)
|
|
314
448
|
|
|
315
|
-
# pylint: disable=
|
|
449
|
+
# pylint: disable=too-many-positional-arguments
|
|
316
450
|
async def _send_ws_response(
|
|
317
451
|
self,
|
|
318
452
|
websocket,
|
|
@@ -340,16 +474,7 @@ class IntegrationAPI:
|
|
|
340
474
|
"msg": msg,
|
|
341
475
|
"msg_data": msg_data if msg_data is not None else {},
|
|
342
476
|
}
|
|
343
|
-
|
|
344
|
-
if websocket in self._clients:
|
|
345
|
-
data_dump = json.dumps(data)
|
|
346
|
-
if _LOG.isEnabledFor(logging.DEBUG):
|
|
347
|
-
_LOG.debug(
|
|
348
|
-
"[%s] ->: %s", websocket.remote_address, filter_log_msg_data(data)
|
|
349
|
-
)
|
|
350
|
-
await websocket.send(data_dump)
|
|
351
|
-
else:
|
|
352
|
-
_LOG.error("Error sending response: connection no longer established")
|
|
477
|
+
await self._enqueue_ws_payload(websocket, data)
|
|
353
478
|
|
|
354
479
|
async def _broadcast_ws_event(
|
|
355
480
|
self, msg: str, msg_data: dict[str, Any], category: uc.EventCategory
|
|
@@ -365,17 +490,13 @@ class IntegrationAPI:
|
|
|
365
490
|
:param category: event category
|
|
366
491
|
"""
|
|
367
492
|
data = {"kind": "event", "msg": msg, "msg_data": msg_data, "cat": category}
|
|
368
|
-
data_dump = json.dumps(data)
|
|
369
|
-
|
|
370
493
|
for websocket in self._clients.copy():
|
|
371
|
-
if _LOG.isEnabledFor(logging.DEBUG):
|
|
372
|
-
_LOG.debug(
|
|
373
|
-
"[%s] =>: %s", websocket.remote_address, filter_log_msg_data(data)
|
|
374
|
-
)
|
|
375
494
|
try:
|
|
376
|
-
await
|
|
377
|
-
except
|
|
378
|
-
|
|
495
|
+
await self._enqueue_ws_payload(websocket, data)
|
|
496
|
+
except Exception: # pylint: disable=broad-exception-caught
|
|
497
|
+
_LOG.exception(
|
|
498
|
+
"Failed to enqueue broadcast for %s", websocket.remote_address
|
|
499
|
+
)
|
|
379
500
|
|
|
380
501
|
async def _send_ws_event(
|
|
381
502
|
self, websocket, msg: str, msg_data: dict[str, Any], category: uc.EventCategory
|
|
@@ -392,35 +513,121 @@ class IntegrationAPI:
|
|
|
392
513
|
websockets.ConnectionClosed: When the connection is closed.
|
|
393
514
|
"""
|
|
394
515
|
data = {"kind": "event", "msg": msg, "msg_data": msg_data, "cat": category}
|
|
395
|
-
|
|
516
|
+
await self._enqueue_ws_payload(websocket, data)
|
|
396
517
|
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
_LOG.error("Error sending event: connection no longer established")
|
|
405
|
-
|
|
406
|
-
async def _process_ws_message(self, websocket, message) -> None:
|
|
407
|
-
_LOG.debug("[%s] <-: %s", websocket.remote_address, message)
|
|
518
|
+
async def _process_ws_message(self, websocket, data: dict[str, Any]) -> None:
|
|
519
|
+
if _LOG.isEnabledFor(logging.DEBUG):
|
|
520
|
+
_LOG.debug(
|
|
521
|
+
"[%s] ->: %s",
|
|
522
|
+
websocket.remote_address,
|
|
523
|
+
json.dumps(sanitize_json_message(data)),
|
|
524
|
+
)
|
|
408
525
|
|
|
409
|
-
data = json.loads(message)
|
|
410
526
|
kind = data["kind"]
|
|
411
|
-
req_id = data
|
|
527
|
+
req_id = data.get("id")
|
|
412
528
|
msg = data["msg"]
|
|
413
|
-
msg_data = data
|
|
529
|
+
msg_data = data.get("msg_data")
|
|
414
530
|
|
|
415
531
|
if kind == "req":
|
|
416
532
|
if req_id is None:
|
|
417
533
|
_LOG.warning(
|
|
418
|
-
"Ignoring request message with missing '
|
|
534
|
+
"Ignoring request message with missing 'id': %s",
|
|
535
|
+
data,
|
|
419
536
|
)
|
|
420
|
-
|
|
421
|
-
|
|
537
|
+
return
|
|
538
|
+
await self._handle_ws_request_msg(websocket, msg, req_id, msg_data)
|
|
422
539
|
elif kind == "event":
|
|
423
540
|
await self._handle_ws_event_msg(websocket, msg, msg_data)
|
|
541
|
+
else:
|
|
542
|
+
_LOG.warning(
|
|
543
|
+
"[%s] WS: Unsupported routed message kind %s",
|
|
544
|
+
websocket.remote_address,
|
|
545
|
+
kind,
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
def _handle_pending_response(self, websocket, data: dict[str, Any]) -> None:
|
|
549
|
+
"""Resolve the response message that corresponds to a pending request from the driver."""
|
|
550
|
+
resp_id = data.get("req_id", data.get("id"))
|
|
551
|
+
if resp_id is None:
|
|
552
|
+
_LOG.warning(
|
|
553
|
+
"[%s] WS: Received resp without req_id/id: %s",
|
|
554
|
+
websocket.remote_address,
|
|
555
|
+
data,
|
|
556
|
+
)
|
|
557
|
+
return
|
|
558
|
+
|
|
559
|
+
ctx = self._get_ws_context(websocket)
|
|
560
|
+
if ctx is None:
|
|
561
|
+
_LOG.debug("[%s] WS: No context for resp", websocket.remote_address)
|
|
562
|
+
return
|
|
563
|
+
|
|
564
|
+
fut = ctx.pending.get(int(resp_id))
|
|
565
|
+
if fut is None:
|
|
566
|
+
_LOG.debug(
|
|
567
|
+
"[%s] WS: Unmatched resp_id=%s (not pending). msg=%s",
|
|
568
|
+
websocket.remote_address,
|
|
569
|
+
resp_id,
|
|
570
|
+
data.get("msg"),
|
|
571
|
+
)
|
|
572
|
+
return
|
|
573
|
+
|
|
574
|
+
if not fut.done():
|
|
575
|
+
fut.set_result(data)
|
|
576
|
+
|
|
577
|
+
async def _ws_request(
|
|
578
|
+
self,
|
|
579
|
+
websocket,
|
|
580
|
+
msg: str,
|
|
581
|
+
msg_data: dict[str, Any] | None = None,
|
|
582
|
+
*,
|
|
583
|
+
timeout: float = 10.0,
|
|
584
|
+
) -> dict[str, Any]:
|
|
585
|
+
"""
|
|
586
|
+
Send a request over websocket and await the matching response.
|
|
587
|
+
|
|
588
|
+
- Uses a Future stored in self._ws_pending[websocket][req_id]
|
|
589
|
+
- Reader task (_handle_ws -> _process_ws_message) completes the future on 'resp'
|
|
590
|
+
- Raises TimeoutError on timeout
|
|
591
|
+
:param websocket: client connection
|
|
592
|
+
:param msg: event message name
|
|
593
|
+
:param msg_data: message data payload
|
|
594
|
+
:param timeout: timeout for message
|
|
595
|
+
"""
|
|
596
|
+
# Ensure per-socket structures exist (in case you call before _handle_ws init)
|
|
597
|
+
ctx = self._get_ws_context(websocket)
|
|
598
|
+
if ctx is None:
|
|
599
|
+
raise ConnectionError("WebSocket context not found")
|
|
600
|
+
|
|
601
|
+
# Allocate req_id safely
|
|
602
|
+
req_id = self._req_id
|
|
603
|
+
self._req_id += 1
|
|
604
|
+
|
|
605
|
+
fut = self._loop.create_future()
|
|
606
|
+
ctx.pending[req_id] = fut
|
|
607
|
+
|
|
608
|
+
try:
|
|
609
|
+
payload: dict[str, Any] = {"kind": "req", "id": req_id, "msg": msg}
|
|
610
|
+
if msg_data is not None:
|
|
611
|
+
payload["msg_data"] = msg_data
|
|
612
|
+
|
|
613
|
+
await self._enqueue_ws_payload(websocket, payload)
|
|
614
|
+
|
|
615
|
+
# Await response from client until given timeout
|
|
616
|
+
resp = await asyncio.wait_for(fut, timeout=timeout)
|
|
617
|
+
return resp
|
|
618
|
+
|
|
619
|
+
except asyncio.TimeoutError as ex:
|
|
620
|
+
_LOG.error(
|
|
621
|
+
"[%s] Timeout waiting for response to %s (req_id=%s) %s",
|
|
622
|
+
websocket.remote_address,
|
|
623
|
+
msg,
|
|
624
|
+
req_id,
|
|
625
|
+
ex,
|
|
626
|
+
)
|
|
627
|
+
raise ex
|
|
628
|
+
finally:
|
|
629
|
+
# Cleanup pending future entry
|
|
630
|
+
ctx.pending.pop(req_id, None)
|
|
424
631
|
|
|
425
632
|
async def _process_ws_binary_message(self, websocket, data: bytes) -> None:
|
|
426
633
|
"""Process a binary WebSocket message using protobuf IntegrationMessage.
|
|
@@ -431,7 +638,7 @@ class IntegrationAPI:
|
|
|
431
638
|
"""
|
|
432
639
|
if _LOG.isEnabledFor(logging.DEBUG):
|
|
433
640
|
_LOG.debug(
|
|
434
|
-
"[%s]
|
|
641
|
+
"[%s] ->: <binary %d bytes>", websocket.remote_address, len(data)
|
|
435
642
|
)
|
|
436
643
|
|
|
437
644
|
# Parse IntegrationMessage from bytes
|
|
@@ -462,6 +669,30 @@ class IntegrationAPI:
|
|
|
462
669
|
kind,
|
|
463
670
|
)
|
|
464
671
|
|
|
672
|
+
async def _cleanup_ws(self, websocket) -> None:
|
|
673
|
+
ctx = self._ws_contexts.pop(websocket, None)
|
|
674
|
+
|
|
675
|
+
keys_to_cleanup = [k for k in self._voice_sessions if k[0] is websocket]
|
|
676
|
+
for key in keys_to_cleanup:
|
|
677
|
+
try:
|
|
678
|
+
await self._cleanup_voice_session(key, VoiceEndReason.REMOTE)
|
|
679
|
+
except Exception as ex: # pylint: disable=broad-exception-caught
|
|
680
|
+
_LOG.exception(
|
|
681
|
+
"[%s] WS: Error during voice session cleanup for session_id=%s: %s",
|
|
682
|
+
websocket.remote_address,
|
|
683
|
+
key[1],
|
|
684
|
+
ex,
|
|
685
|
+
)
|
|
686
|
+
|
|
687
|
+
if ctx is not None:
|
|
688
|
+
for fut in ctx.pending.values():
|
|
689
|
+
if not fut.done():
|
|
690
|
+
fut.set_exception(ConnectionError("WebSocket disconnected"))
|
|
691
|
+
|
|
692
|
+
self._clients.discard(websocket)
|
|
693
|
+
_LOG.info("[%s] WS: Client removed", websocket.remote_address)
|
|
694
|
+
self._events.emit(uc.Events.CLIENT_DISCONNECTED, websocket=websocket)
|
|
695
|
+
|
|
465
696
|
async def _on_remote_voice_begin(self, websocket, msg: RemoteVoiceBegin) -> None:
|
|
466
697
|
"""Handle a RemoteVoiceBegin protobuf message.
|
|
467
698
|
|
|
@@ -702,13 +933,7 @@ class IntegrationAPI:
|
|
|
702
933
|
{"state": self.device_state},
|
|
703
934
|
)
|
|
704
935
|
elif msg == uc.WsMessages.GET_AVAILABLE_ENTITIES:
|
|
705
|
-
|
|
706
|
-
await self._send_ws_response(
|
|
707
|
-
websocket,
|
|
708
|
-
req_id,
|
|
709
|
-
uc.WsMsgEvents.AVAILABLE_ENTITIES,
|
|
710
|
-
{"available_entities": available_entities},
|
|
711
|
-
)
|
|
936
|
+
await self._get_available_entities(websocket, req_id)
|
|
712
937
|
elif msg == uc.WsMessages.GET_ENTITY_STATES:
|
|
713
938
|
entity_states = await self._configured_entities.get_states()
|
|
714
939
|
await self._send_ws_response(
|
|
@@ -1351,10 +1576,108 @@ class IntegrationAPI:
|
|
|
1351
1576
|
"""
|
|
1352
1577
|
self._events.remove_all_listeners(event)
|
|
1353
1578
|
|
|
1579
|
+
async def get_supported_entity_types(
|
|
1580
|
+
self, websocket, *, timeout: float = 5.0
|
|
1581
|
+
) -> list[str]:
|
|
1582
|
+
"""Request supported entity types from client and return msg_data."""
|
|
1583
|
+
resp = await self._ws_request(
|
|
1584
|
+
websocket,
|
|
1585
|
+
"get_supported_entity_types",
|
|
1586
|
+
timeout=timeout,
|
|
1587
|
+
)
|
|
1588
|
+
if resp.get("msg") != "supported_entity_types":
|
|
1589
|
+
_LOG.debug(
|
|
1590
|
+
"[%s] Unexpected resp msg for get_supported_entity_types: %s",
|
|
1591
|
+
websocket.remote_address,
|
|
1592
|
+
resp.get("msg"),
|
|
1593
|
+
)
|
|
1594
|
+
return resp.get("msg_data", [])
|
|
1595
|
+
|
|
1596
|
+
async def get_version(
|
|
1597
|
+
self, websocket, *, timeout: float = 5.0
|
|
1598
|
+
) -> dict[str, Any] | None:
|
|
1599
|
+
"""Request client version and return msg_data."""
|
|
1600
|
+
resp = await self._ws_request(
|
|
1601
|
+
websocket,
|
|
1602
|
+
"get_version",
|
|
1603
|
+
timeout=timeout,
|
|
1604
|
+
)
|
|
1605
|
+
if resp.get("msg") != "version":
|
|
1606
|
+
_LOG.debug(
|
|
1607
|
+
"[%s] Unexpected resp msg for get_version: %s",
|
|
1608
|
+
websocket.remote_address,
|
|
1609
|
+
resp.get("msg"),
|
|
1610
|
+
)
|
|
1611
|
+
|
|
1612
|
+
return resp.get("msg_data")
|
|
1613
|
+
|
|
1614
|
+
async def get_localization_cfg(
|
|
1615
|
+
self, websocket, *, timeout: float = 5.0
|
|
1616
|
+
) -> dict[str, Any] | None:
|
|
1617
|
+
"""Request localization config and return msg_data."""
|
|
1618
|
+
resp = await self._ws_request(
|
|
1619
|
+
websocket,
|
|
1620
|
+
"get_localization_cfg",
|
|
1621
|
+
timeout=timeout,
|
|
1622
|
+
)
|
|
1623
|
+
|
|
1624
|
+
if resp.get("msg") != "localization_cfg":
|
|
1625
|
+
_LOG.debug(
|
|
1626
|
+
"[%s] Unexpected resp msg for get_localization_cfg: %s",
|
|
1627
|
+
websocket.remote_address,
|
|
1628
|
+
resp.get("msg"),
|
|
1629
|
+
)
|
|
1630
|
+
|
|
1631
|
+
return resp.get("msg_data")
|
|
1632
|
+
|
|
1633
|
+
async def _update_supported_entity_types(
|
|
1634
|
+
self, websocket, *, timeout: float = 5.0
|
|
1635
|
+
) -> None:
|
|
1636
|
+
"""Update supported entity types by remote."""
|
|
1637
|
+
await asyncio.sleep(0)
|
|
1638
|
+
try:
|
|
1639
|
+
self._supported_entity_types = await self.get_supported_entity_types(
|
|
1640
|
+
websocket, timeout=timeout
|
|
1641
|
+
)
|
|
1642
|
+
_LOG.debug(
|
|
1643
|
+
"[%s] Supported entity types %s",
|
|
1644
|
+
websocket.remote_address,
|
|
1645
|
+
self._supported_entity_types,
|
|
1646
|
+
)
|
|
1647
|
+
except Exception as ex: # pylint: disable=W0718
|
|
1648
|
+
_LOG.error(
|
|
1649
|
+
"[%s] Unable to retrieve entity types %s",
|
|
1650
|
+
websocket.remote_address,
|
|
1651
|
+
ex,
|
|
1652
|
+
)
|
|
1653
|
+
|
|
1654
|
+
async def _get_available_entities(self, websocket, req_id) -> None:
|
|
1655
|
+
if self._supported_entity_types is None:
|
|
1656
|
+
# Request supported entity types from remote
|
|
1657
|
+
await self._update_supported_entity_types(websocket)
|
|
1658
|
+
available_entities = self._available_entities.get_all()
|
|
1659
|
+
if self._supported_entity_types:
|
|
1660
|
+
available_entities = [
|
|
1661
|
+
entity
|
|
1662
|
+
for entity in available_entities
|
|
1663
|
+
if entity.get("entity_type") in self._supported_entity_types
|
|
1664
|
+
]
|
|
1665
|
+
await self._send_ws_response(
|
|
1666
|
+
websocket,
|
|
1667
|
+
req_id,
|
|
1668
|
+
uc.WsMsgEvents.AVAILABLE_ENTITIES,
|
|
1669
|
+
{"available_entities": available_entities},
|
|
1670
|
+
)
|
|
1671
|
+
|
|
1354
1672
|
##############
|
|
1355
1673
|
# Properties #
|
|
1356
1674
|
##############
|
|
1357
1675
|
|
|
1676
|
+
@property
|
|
1677
|
+
def clients(self) -> set:
|
|
1678
|
+
"""Return all clients."""
|
|
1679
|
+
return self._clients.copy()
|
|
1680
|
+
|
|
1358
1681
|
@property
|
|
1359
1682
|
def client_count(self) -> int:
|
|
1360
1683
|
"""Return number of WebSocket clients."""
|
|
@@ -1462,46 +1785,83 @@ def local_hostname() -> str:
|
|
|
1462
1785
|
)
|
|
1463
1786
|
|
|
1464
1787
|
|
|
1465
|
-
|
|
1788
|
+
_REDACTED_VALUE = "***REDACTED***"
|
|
1789
|
+
_SENSITIVE_KEYS = {
|
|
1790
|
+
"token",
|
|
1791
|
+
"token_id",
|
|
1792
|
+
"access_token",
|
|
1793
|
+
"refresh_token",
|
|
1794
|
+
"id_token",
|
|
1795
|
+
"authorization_code",
|
|
1796
|
+
"client_secret",
|
|
1797
|
+
"secret",
|
|
1798
|
+
"auth_url",
|
|
1799
|
+
"client_data",
|
|
1800
|
+
"password",
|
|
1801
|
+
}
|
|
1802
|
+
|
|
1803
|
+
|
|
1804
|
+
def _filter_base64_images(json_data: Any) -> Any:
|
|
1805
|
+
"""
|
|
1806
|
+
Filter out base64 encoded images from a JSON object.
|
|
1807
|
+
|
|
1808
|
+
**Attention:** the provided JSON object is modified in-place!
|
|
1809
|
+
|
|
1810
|
+
:param json_data: The JSON object to filter.
|
|
1811
|
+
:returns: The filtered JSON object.
|
|
1812
|
+
"""
|
|
1813
|
+
if json_data and isinstance(json_data, dict) and "msg_data" in json_data:
|
|
1814
|
+
msg_data = json_data["msg_data"]
|
|
1815
|
+
if isinstance(msg_data, list):
|
|
1816
|
+
for item in msg_data:
|
|
1817
|
+
if (
|
|
1818
|
+
isinstance(item, dict)
|
|
1819
|
+
and "attributes" in item
|
|
1820
|
+
and isinstance(item["attributes"], dict)
|
|
1821
|
+
and item["attributes"]
|
|
1822
|
+
.get(MediaAttr.MEDIA_IMAGE_URL, "")
|
|
1823
|
+
.startswith("data:")
|
|
1824
|
+
):
|
|
1825
|
+
item["attributes"][MediaAttr.MEDIA_IMAGE_URL] = "data:..."
|
|
1826
|
+
elif (
|
|
1827
|
+
isinstance(msg_data, dict)
|
|
1828
|
+
and "attributes" in msg_data
|
|
1829
|
+
and isinstance(msg_data["attributes"], dict)
|
|
1830
|
+
and msg_data["attributes"]
|
|
1831
|
+
.get(MediaAttr.MEDIA_IMAGE_URL, "")
|
|
1832
|
+
.startswith("data:")
|
|
1833
|
+
):
|
|
1834
|
+
msg_data["attributes"][MediaAttr.MEDIA_IMAGE_URL] = "data:..."
|
|
1835
|
+
return json_data
|
|
1836
|
+
|
|
1837
|
+
|
|
1838
|
+
def sanitize_json_message(data: Any) -> Any:
|
|
1466
1839
|
"""
|
|
1467
|
-
|
|
1840
|
+
Sanitizes a JSON message by redacting sensitive fields such as tokens and secrets.
|
|
1841
|
+
|
|
1842
|
+
Base64 encoded images starting with `data:` are removed in `msg_data.attributes.media_image_url`
|
|
1843
|
+
fields to limit log output.
|
|
1468
1844
|
|
|
1469
|
-
|
|
1470
|
-
- dict object: key `attributes`
|
|
1471
|
-
- list object: every list item `attributes`
|
|
1472
|
-
- Filtered attributes: `MEDIA_IMAGE_URL`
|
|
1845
|
+
The original message is not modified, the returned redacted message is a deepcopy.
|
|
1473
1846
|
|
|
1474
|
-
:param data:
|
|
1475
|
-
:return:
|
|
1847
|
+
:param data: The JSON object to be sanitized.
|
|
1848
|
+
:return: The sanitized JSON object with sensitive information redacted.
|
|
1476
1849
|
"""
|
|
1477
1850
|
# do not modify the original dict
|
|
1478
|
-
|
|
1479
|
-
if not
|
|
1851
|
+
json_upd = deepcopy(data)
|
|
1852
|
+
if not json_upd:
|
|
1480
1853
|
return {}
|
|
1481
1854
|
|
|
1482
|
-
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
):
|
|
1494
|
-
log_upd["msg_data"]["attributes"][MediaAttr.MEDIA_IMAGE_URL] = "data:***"
|
|
1495
|
-
elif isinstance(log_upd["msg_data"], list):
|
|
1496
|
-
for item in log_upd["msg_data"]:
|
|
1497
|
-
if (
|
|
1498
|
-
"attributes" in item
|
|
1499
|
-
and MediaAttr.MEDIA_IMAGE_URL in item["attributes"]
|
|
1500
|
-
and (
|
|
1501
|
-
media_image_url := item["attributes"][MediaAttr.MEDIA_IMAGE_URL]
|
|
1502
|
-
)
|
|
1503
|
-
and media_image_url.startswith("data:")
|
|
1504
|
-
):
|
|
1505
|
-
item["attributes"][MediaAttr.MEDIA_IMAGE_URL] = "data:***"
|
|
1855
|
+
def sanitize_for_logging(value: Any) -> Any:
|
|
1856
|
+
if value and isinstance(value, (dict, list)):
|
|
1857
|
+
if isinstance(value, list):
|
|
1858
|
+
return [sanitize_for_logging(item) for item in value]
|
|
1859
|
+
|
|
1860
|
+
for k, v in value.items():
|
|
1861
|
+
if k in _SENSITIVE_KEYS:
|
|
1862
|
+
value[k] = _REDACTED_VALUE
|
|
1863
|
+
else:
|
|
1864
|
+
value[k] = sanitize_for_logging(v)
|
|
1865
|
+
return value
|
|
1506
1866
|
|
|
1507
|
-
return
|
|
1867
|
+
return sanitize_for_logging(_filter_base64_images(json_upd))
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|