dbus2mqtt 0.1.2__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbus2mqtt might be problematic. Click here for more details.
- dbus2mqtt/{config.py → config/__init__.py} +18 -15
- dbus2mqtt/config/jsonarparse.py +31 -0
- dbus2mqtt/dbus/dbus_client.py +62 -38
- dbus2mqtt/dbus/dbus_types.py +2 -2
- dbus2mqtt/dbus/dbus_util.py +2 -2
- dbus2mqtt/dbus/introspection_patches/mpris_playerctl.py +151 -0
- dbus2mqtt/dbus/introspection_patches/mpris_vlc.py +122 -0
- dbus2mqtt/event_broker.py +1 -1
- dbus2mqtt/flow/__init__.py +22 -3
- dbus2mqtt/flow/actions/context_set.py +2 -0
- dbus2mqtt/flow/actions/mqtt_publish.py +24 -6
- dbus2mqtt/flow/flow_processor.py +28 -26
- dbus2mqtt/main.py +12 -18
- dbus2mqtt/mqtt/mqtt_client.py +40 -24
- dbus2mqtt/template/dbus_template_functions.py +2 -2
- dbus2mqtt/template/templating.py +57 -80
- {dbus2mqtt-0.1.2.dist-info → dbus2mqtt-0.3.0.dist-info}/METADATA +9 -12
- dbus2mqtt-0.3.0.dist-info/RECORD +23 -0
- dbus2mqtt-0.1.2.dist-info/RECORD +0 -20
- {dbus2mqtt-0.1.2.dist-info → dbus2mqtt-0.3.0.dist-info}/WHEEL +0 -0
- {dbus2mqtt-0.1.2.dist-info → dbus2mqtt-0.3.0.dist-info}/entry_points.txt +0 -0
- {dbus2mqtt-0.1.2.dist-info → dbus2mqtt-0.3.0.dist-info}/licenses/LICENSE +0 -0
dbus2mqtt/event_broker.py
CHANGED
dbus2mqtt/flow/__init__.py
CHANGED
|
@@ -6,15 +6,34 @@ class FlowExecutionContext:
|
|
|
6
6
|
|
|
7
7
|
def __init__(self, name: str | None, global_flows_context: dict[str, Any], flow_context: dict[str, Any]):
|
|
8
8
|
self.name = name
|
|
9
|
+
|
|
9
10
|
self.global_flows_context = global_flows_context
|
|
11
|
+
"""
|
|
12
|
+
Global flows context which is shared across all flows.
|
|
13
|
+
Modifiable by user.
|
|
14
|
+
**Not** cleaned up after flow execution.
|
|
15
|
+
"""
|
|
16
|
+
|
|
10
17
|
self.flow_context = flow_context
|
|
18
|
+
"""
|
|
19
|
+
Flow context which contains flow specific context like 'subscription_bus_name'.
|
|
20
|
+
**Not** modifiable by user.
|
|
21
|
+
**Not** cleaned up after flow execution.
|
|
22
|
+
"""
|
|
11
23
|
|
|
12
|
-
# per flow execution context
|
|
13
24
|
self.context: dict[str, Any] = {}
|
|
25
|
+
"""
|
|
26
|
+
Per flow execution context.
|
|
27
|
+
Modifiable by user.
|
|
28
|
+
Cleaned up after each flow execution
|
|
29
|
+
"""
|
|
14
30
|
|
|
15
31
|
def get_aggregated_context(self) -> dict[str, Any]:
|
|
16
|
-
"""
|
|
17
|
-
|
|
32
|
+
"""
|
|
33
|
+
Get the aggregated context for the flow execution.
|
|
34
|
+
Merges global flows context, flow context, and local context
|
|
35
|
+
"""
|
|
36
|
+
|
|
18
37
|
context = {}
|
|
19
38
|
if self.global_flows_context:
|
|
20
39
|
context.update(self.global_flows_context)
|
|
@@ -15,12 +15,14 @@ class ContextSetAction(FlowAction):
|
|
|
15
15
|
async def execute(self, context: FlowExecutionContext):
|
|
16
16
|
|
|
17
17
|
aggregated_context = context.get_aggregated_context()
|
|
18
|
+
|
|
18
19
|
if self.config.global_context:
|
|
19
20
|
context_new = await self.templating.async_render_template(self.config.global_context, dict, aggregated_context)
|
|
20
21
|
logger.debug(f"Update global_context with: {context_new}")
|
|
21
22
|
context.global_flows_context.update(context_new)
|
|
22
23
|
|
|
23
24
|
if self.config.context:
|
|
25
|
+
|
|
24
26
|
context_new = await self.templating.async_render_template(self.config.context, dict, aggregated_context)
|
|
25
27
|
logger.debug(f"Update context with: {context_new}")
|
|
26
28
|
context.context.update(context_new)
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
|
|
2
2
|
import logging
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from urllib.parse import urlparse
|
|
5
|
+
|
|
6
|
+
from jinja2.exceptions import TemplateError
|
|
5
7
|
|
|
6
8
|
from dbus2mqtt import AppContext
|
|
7
9
|
from dbus2mqtt.config import FlowActionMqttPublishConfig
|
|
@@ -24,14 +26,30 @@ class MqttPublishAction(FlowAction):
|
|
|
24
26
|
try:
|
|
25
27
|
mqtt_topic = await self.templating.async_render_template(self.config.topic, str, render_context)
|
|
26
28
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
+
if self.config.payload_type == "text":
|
|
30
|
+
res_type = str
|
|
31
|
+
elif self.config.payload_type == "binary":
|
|
32
|
+
res_type = str
|
|
33
|
+
else:
|
|
34
|
+
res_type = dict
|
|
35
|
+
|
|
36
|
+
payload = await self.templating.async_render_template(self.config.payload_template, res_type, render_context)
|
|
37
|
+
|
|
38
|
+
# for binary payloads, payload contains the file to read binary data from
|
|
39
|
+
if isinstance(payload, str) and self.config.payload_type == "binary":
|
|
40
|
+
uri = payload
|
|
41
|
+
payload = urlparse(uri)
|
|
42
|
+
if not payload.scheme == "file":
|
|
43
|
+
raise ValueError(f"Expected readable file, got: '{uri}'")
|
|
44
|
+
|
|
29
45
|
|
|
30
|
-
except
|
|
31
|
-
logger.warning(f"Error rendering jinja template, flow: '{context.name}',
|
|
46
|
+
except TemplateError as e:
|
|
47
|
+
logger.warning(f"Error rendering jinja template, flow: '{context.name or ''}', msg={e}, payload_template={self.config.payload_template}, render_context={render_context}", exc_info=True)
|
|
32
48
|
return
|
|
33
49
|
except Exception as e:
|
|
34
|
-
|
|
50
|
+
# Dont log full exception info to avoid log spamming on dbus errors
|
|
51
|
+
# due to clients disconnecting
|
|
52
|
+
logger.warning(f"Error rendering jinja template, flow: '{context.name or ''}', msg={e} payload_template={self.config.payload_template}, render_context={render_context}")
|
|
35
53
|
return
|
|
36
54
|
|
|
37
55
|
logger.debug(f"public_mqtt: flow={context.name}, payload={payload}")
|
dbus2mqtt/flow/flow_processor.py
CHANGED
|
@@ -7,7 +7,7 @@ from typing import Any
|
|
|
7
7
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
|
8
8
|
|
|
9
9
|
from dbus2mqtt import AppContext
|
|
10
|
-
from dbus2mqtt.config import FlowConfig, FlowTriggerConfig
|
|
10
|
+
from dbus2mqtt.config import FlowConfig, FlowTriggerConfig, FlowTriggerDbusSignalConfig
|
|
11
11
|
from dbus2mqtt.event_broker import FlowTriggerMessage
|
|
12
12
|
from dbus2mqtt.flow import FlowAction, FlowExecutionContext
|
|
13
13
|
from dbus2mqtt.flow.actions.context_set import ContextSetAction
|
|
@@ -46,6 +46,7 @@ class FlowScheduler:
|
|
|
46
46
|
if not existing_job and trigger.type == "schedule":
|
|
47
47
|
logger.info(f"Starting scheduler[{trigger.id}] for flow {flow.id}")
|
|
48
48
|
if trigger.interval:
|
|
49
|
+
trigger_args: dict[str, Any] = trigger.interval
|
|
49
50
|
# Each schedule gets its own job
|
|
50
51
|
self.scheduler.add_job(
|
|
51
52
|
self._schedule_flow_strigger,
|
|
@@ -55,9 +56,10 @@ class FlowScheduler:
|
|
|
55
56
|
misfire_grace_time=5,
|
|
56
57
|
coalesce=True,
|
|
57
58
|
args=[flow, trigger],
|
|
58
|
-
**
|
|
59
|
+
**trigger_args
|
|
59
60
|
)
|
|
60
61
|
elif trigger.cron:
|
|
62
|
+
trigger_args: dict[str, Any] = trigger.cron
|
|
61
63
|
# Each schedule gets its own job
|
|
62
64
|
self.scheduler.add_job(
|
|
63
65
|
self._schedule_flow_strigger,
|
|
@@ -67,7 +69,7 @@ class FlowScheduler:
|
|
|
67
69
|
misfire_grace_time=5,
|
|
68
70
|
coalesce=True,
|
|
69
71
|
args=[flow, trigger],
|
|
70
|
-
**
|
|
72
|
+
**trigger_args
|
|
71
73
|
)
|
|
72
74
|
|
|
73
75
|
def stop_flow_set(self, flows):
|
|
@@ -160,38 +162,38 @@ class FlowProcessor:
|
|
|
160
162
|
await self._process_flow_trigger(flow_trigger_message)
|
|
161
163
|
|
|
162
164
|
except Exception as e:
|
|
163
|
-
|
|
165
|
+
# exc_info is only set when running in verbose mode to avoid lots of stack traces being printed
|
|
166
|
+
# while flows are still running and the DBus object was just removed. Some examples:
|
|
167
|
+
|
|
168
|
+
log_level = logging.WARN
|
|
169
|
+
|
|
170
|
+
# 1: error during context_set
|
|
171
|
+
# WARNING:dbus2mqtt.flow.flow_processor:flow_processor_task: Exception The name org.mpris.MediaPlayer2.firefox.instance_1_672 was not provided by any .service files
|
|
172
|
+
if "was not provided by any .service files" in str(e):
|
|
173
|
+
log_level = logging.DEBUG
|
|
174
|
+
|
|
175
|
+
logger.log(log_level, f"flow_processor_task: Exception {e}", exc_info=logger.isEnabledFor(logging.DEBUG))
|
|
164
176
|
finally:
|
|
165
177
|
self.event_broker.flow_trigger_queue.async_q.task_done()
|
|
166
178
|
|
|
179
|
+
def _trigger_config_to_str(self, config: FlowTriggerConfig) -> str:
|
|
180
|
+
if isinstance(config, FlowTriggerDbusSignalConfig):
|
|
181
|
+
return f"{config.type}({config.signal})"
|
|
182
|
+
return config.type
|
|
183
|
+
|
|
167
184
|
async def _process_flow_trigger(self, flow_trigger_message: FlowTriggerMessage):
|
|
168
|
-
|
|
185
|
+
|
|
186
|
+
trigger_str = self._trigger_config_to_str(flow_trigger_message.flow_trigger_config)
|
|
187
|
+
flow_str = flow_trigger_message.flow_config.name or flow_trigger_message.flow_config.id
|
|
188
|
+
|
|
189
|
+
log_message = f"on_trigger: {trigger_str}, flow={flow_str}, time={flow_trigger_message.timestamp.isoformat()}"
|
|
190
|
+
|
|
169
191
|
if flow_trigger_message.flow_trigger_config.type != "schedule":
|
|
170
192
|
logger.info(log_message)
|
|
171
193
|
else:
|
|
172
194
|
logger.debug(log_message)
|
|
173
195
|
|
|
174
196
|
flow_id = flow_trigger_message.flow_config.id
|
|
175
|
-
# flow_name = flow_trigger_message.flow_config.name
|
|
176
197
|
|
|
177
198
|
flow = self._flows[flow_id]
|
|
178
|
-
await flow.execute_actions(trigger_context=flow_trigger_message.
|
|
179
|
-
|
|
180
|
-
# # Create a flow from the YAML configuration
|
|
181
|
-
# for flow_config in config['flows']:
|
|
182
|
-
# flow_name = flow_config['name']
|
|
183
|
-
# triggers = flow_config.get('triggers', [])
|
|
184
|
-
# actions = flow_config.get('actions', [])
|
|
185
|
-
|
|
186
|
-
# with Flow(flow_name) as flow:
|
|
187
|
-
# data = "sensor_data"
|
|
188
|
-
# for action in actions:
|
|
189
|
-
# if action['type'] == 'python_script':
|
|
190
|
-
# process_data(data)
|
|
191
|
-
# elif action['type'] == 'mqtt_publish':
|
|
192
|
-
# mqtt_publish(action['topic'], action['message_template'], data)
|
|
193
|
-
|
|
194
|
-
# # Add scheduling trigger if defined
|
|
195
|
-
# for trigger in triggers:
|
|
196
|
-
# if trigger['type'] == 'schedule' and 'cron' in trigger:
|
|
197
|
-
# flow.schedule = CronSchedule(cron=trigger['cron'])
|
|
199
|
+
await flow.execute_actions(trigger_context=flow_trigger_message.trigger_context)
|
dbus2mqtt/main.py
CHANGED
|
@@ -5,12 +5,12 @@ import sys
|
|
|
5
5
|
from typing import cast
|
|
6
6
|
|
|
7
7
|
import colorlog
|
|
8
|
-
import
|
|
8
|
+
import dbus_fast.aio as dbus_aio
|
|
9
9
|
import dotenv
|
|
10
|
-
import jsonargparse
|
|
11
10
|
|
|
12
11
|
from dbus2mqtt import AppContext
|
|
13
12
|
from dbus2mqtt.config import Config
|
|
13
|
+
from dbus2mqtt.config.jsonarparse import new_argument_parser
|
|
14
14
|
from dbus2mqtt.dbus.dbus_client import DbusClient
|
|
15
15
|
from dbus2mqtt.event_broker import EventBroker
|
|
16
16
|
from dbus2mqtt.flow.flow_processor import FlowProcessor, FlowScheduler
|
|
@@ -41,14 +41,14 @@ async def dbus_processor_task(app_context: AppContext, flow_scheduler: FlowSched
|
|
|
41
41
|
|
|
42
42
|
async def mqtt_processor_task(app_context: AppContext):
|
|
43
43
|
|
|
44
|
-
|
|
44
|
+
loop = asyncio.get_running_loop()
|
|
45
|
+
mqtt_client_run_future = loop.create_future()
|
|
46
|
+
|
|
47
|
+
mqtt_client = MqttClient(app_context, loop)
|
|
45
48
|
|
|
46
49
|
mqtt_client.connect()
|
|
47
50
|
mqtt_client.client.loop_start()
|
|
48
51
|
|
|
49
|
-
loop = asyncio.get_running_loop()
|
|
50
|
-
mqtt_client_run_future = loop.create_future()
|
|
51
|
-
|
|
52
52
|
try:
|
|
53
53
|
await asyncio.gather(
|
|
54
54
|
mqtt_client_run_future,
|
|
@@ -84,6 +84,7 @@ async def run(config: Config):
|
|
|
84
84
|
except asyncio.CancelledError:
|
|
85
85
|
pass
|
|
86
86
|
|
|
87
|
+
|
|
87
88
|
def main():
|
|
88
89
|
|
|
89
90
|
# load environment from .env if it exists
|
|
@@ -92,8 +93,7 @@ def main():
|
|
|
92
93
|
logger.info(f"Loaded environment variables from {dotenv_file}")
|
|
93
94
|
dotenv.load_dotenv(dotenv_path=dotenv_file)
|
|
94
95
|
|
|
95
|
-
|
|
96
|
-
parser = jsonargparse.ArgumentParser(default_config_files=["config.yaml"], default_env=True, env_prefix=False)
|
|
96
|
+
parser = new_argument_parser()
|
|
97
97
|
|
|
98
98
|
parser.add_argument("--verbose", "-v", nargs="?", const=True, help="Enable verbose logging")
|
|
99
99
|
parser.add_argument("--config", action="config")
|
|
@@ -121,15 +121,9 @@ def main():
|
|
|
121
121
|
apscheduler_logger = logging.getLogger("apscheduler")
|
|
122
122
|
apscheduler_logger.setLevel(logging.WARNING)
|
|
123
123
|
|
|
124
|
-
|
|
125
|
-
# handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(name)s:%(message)s'))
|
|
126
|
-
|
|
127
|
-
# logger = colorlog.getLogger('')
|
|
128
|
-
# for handler in logger.handlers:
|
|
129
|
-
# print(handler.st)
|
|
130
|
-
# if isinstance(handler, colorlog.StreamHandler):
|
|
131
|
-
# handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(name)s:%(message)s'))
|
|
132
|
-
|
|
133
124
|
logger.debug(f"config: {config}")
|
|
134
125
|
|
|
135
|
-
|
|
126
|
+
try:
|
|
127
|
+
asyncio.run(run(config))
|
|
128
|
+
except KeyboardInterrupt:
|
|
129
|
+
return 0
|
dbus2mqtt/mqtt/mqtt_client.py
CHANGED
|
@@ -4,6 +4,8 @@ import json
|
|
|
4
4
|
import logging
|
|
5
5
|
|
|
6
6
|
from typing import Any
|
|
7
|
+
from urllib.parse import ParseResult
|
|
8
|
+
from urllib.request import urlopen
|
|
7
9
|
|
|
8
10
|
import paho.mqtt.client as mqtt
|
|
9
11
|
import yaml
|
|
@@ -18,7 +20,7 @@ logger = logging.getLogger(__name__)
|
|
|
18
20
|
|
|
19
21
|
class MqttClient:
|
|
20
22
|
|
|
21
|
-
def __init__(self, app_context: AppContext):
|
|
23
|
+
def __init__(self, app_context: AppContext, loop):
|
|
22
24
|
self.config = app_context.config.mqtt
|
|
23
25
|
self.event_broker = app_context.event_broker
|
|
24
26
|
|
|
@@ -35,6 +37,9 @@ class MqttClient:
|
|
|
35
37
|
self.client.on_connect = self.on_connect
|
|
36
38
|
self.client.on_message = self.on_message
|
|
37
39
|
|
|
40
|
+
self.loop = loop
|
|
41
|
+
self.connected_event = asyncio.Event()
|
|
42
|
+
|
|
38
43
|
def connect(self):
|
|
39
44
|
|
|
40
45
|
# mqtt_client.on_message = lambda client, userdata, message: asyncio.create_task(mqtt_on_message(client, userdata, message))
|
|
@@ -43,39 +48,48 @@ class MqttClient:
|
|
|
43
48
|
port=self.config.port
|
|
44
49
|
)
|
|
45
50
|
|
|
46
|
-
# def on_dbus_signal(self, bus_name: str, path: str, interface: str, signal: str, topic, msg: dict[str, Any]):
|
|
47
|
-
# payload = json.dumps(msg)
|
|
48
|
-
# logger.debug(f"on_dbus_signal: payload={payload}")
|
|
49
|
-
# self.client.publish(topic=topic, payload=payload)
|
|
50
|
-
|
|
51
51
|
async def mqtt_publish_queue_processor_task(self):
|
|
52
|
+
|
|
53
|
+
first_message = True
|
|
54
|
+
|
|
52
55
|
"""Continuously processes messages from the async queue."""
|
|
53
56
|
while True:
|
|
54
57
|
msg = await self.event_broker.mqtt_publish_queue.async_q.get() # Wait for a message
|
|
58
|
+
|
|
55
59
|
try:
|
|
56
|
-
payload = msg.payload
|
|
60
|
+
payload: str | bytes | None = msg.payload
|
|
57
61
|
type = msg.payload_serialization_type
|
|
58
|
-
if
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
62
|
+
if type == "text":
|
|
63
|
+
payload = str(msg.payload)
|
|
64
|
+
if isinstance(msg.payload, dict) and type == "json":
|
|
65
|
+
payload = json.dumps(msg.payload)
|
|
66
|
+
elif isinstance(msg.payload, dict) and type == "yaml":
|
|
67
|
+
payload = yaml.dump(msg.payload)
|
|
68
|
+
elif isinstance(msg.payload, ParseResult) and type == "binary":
|
|
69
|
+
try:
|
|
70
|
+
with urlopen(msg.payload.geturl()) as response:
|
|
71
|
+
payload = response.read()
|
|
72
|
+
except Exception as e:
|
|
73
|
+
# In case failing uri reads, we still publish an empty msg to avoid stale data
|
|
74
|
+
payload = None
|
|
75
|
+
logger.warning(f"mqtt_publish_queue_processor_task: Exception {e}", exc_info=logger.isEnabledFor(logging.DEBUG))
|
|
76
|
+
|
|
77
|
+
payload_log_msg = payload if isinstance(payload, str) else msg.payload
|
|
78
|
+
logger.debug(f"mqtt_publish_queue_processor_task: topic={msg.topic}, type={payload.__class__}, payload={payload_log_msg}")
|
|
79
|
+
|
|
80
|
+
if first_message:
|
|
81
|
+
await asyncio.wait_for(self.connected_event.wait(), timeout=5)
|
|
82
|
+
|
|
83
|
+
self.client.publish(topic=msg.topic, payload=payload or "").wait_for_publish(timeout=1000)
|
|
84
|
+
if first_message:
|
|
85
|
+
logger.info(f"First message published: topic={msg.topic}, payload={payload_log_msg}")
|
|
86
|
+
first_message = False
|
|
87
|
+
|
|
68
88
|
except Exception as e:
|
|
69
|
-
logger.warning(f"mqtt_publish_queue_processor_task: Exception {e}", exc_info=
|
|
89
|
+
logger.warning(f"mqtt_publish_queue_processor_task: Exception {e}", exc_info=logger.isEnabledFor(logging.DEBUG))
|
|
70
90
|
finally:
|
|
71
91
|
self.event_broker.mqtt_publish_queue.async_q.task_done()
|
|
72
92
|
|
|
73
|
-
|
|
74
|
-
async def run(self):
|
|
75
|
-
"""Runs the MQTT loop in a non-blocking way with asyncio."""
|
|
76
|
-
self.client.loop_start() # Runs Paho's loop in a background thread
|
|
77
|
-
await asyncio.Event().wait() # Keeps the coroutine alive
|
|
78
|
-
|
|
79
93
|
# The callback for when the client receives a CONNACK response from the server.
|
|
80
94
|
def on_connect(self, client: mqtt.Client, userdata, flags, reason_code, properties):
|
|
81
95
|
if reason_code.is_failure:
|
|
@@ -86,6 +100,8 @@ class MqttClient:
|
|
|
86
100
|
# reconnect then subscriptions will be renewed.
|
|
87
101
|
client.subscribe("dbus2mqtt/#", options=SubscribeOptions(noLocal=True))
|
|
88
102
|
|
|
103
|
+
self.loop.call_soon_threadsafe(self.connected_event.set)
|
|
104
|
+
|
|
89
105
|
def on_message(self, client: mqtt.Client, userdata: Any, msg: mqtt.MQTTMessage):
|
|
90
106
|
|
|
91
107
|
payload = msg.payload.decode()
|
|
@@ -3,8 +3,8 @@ import logging
|
|
|
3
3
|
|
|
4
4
|
from typing import Any
|
|
5
5
|
|
|
6
|
-
from
|
|
7
|
-
from
|
|
6
|
+
from dbus_fast.constants import ErrorType
|
|
7
|
+
from dbus_fast.errors import DBusError
|
|
8
8
|
|
|
9
9
|
from dbus2mqtt.dbus.dbus_client import DbusClient
|
|
10
10
|
|
dbus2mqtt/template/templating.py
CHANGED
|
@@ -1,49 +1,11 @@
|
|
|
1
1
|
|
|
2
2
|
from datetime import datetime
|
|
3
|
-
from typing import Any
|
|
4
|
-
|
|
5
|
-
import
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
Environment,
|
|
10
|
-
StrictUndefined,
|
|
11
|
-
)
|
|
12
|
-
from yaml import SafeDumper, SafeLoader
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def _represent_template_str(dumper: SafeDumper, data: str):
|
|
16
|
-
data = data.replace("{{", "template:{{", 1)
|
|
17
|
-
data = data.replace("}}", "}}:template", 1)
|
|
18
|
-
# return dumper.represent_str(f"template:{data}:template")
|
|
19
|
-
return dumper.represent_str(data)
|
|
20
|
-
|
|
21
|
-
class _CustomSafeLoader(SafeLoader):
|
|
22
|
-
def __init__(self, stream):
|
|
23
|
-
super().__init__(stream)
|
|
24
|
-
|
|
25
|
-
# Disable parsing ISO date strings
|
|
26
|
-
self.add_constructor('tag:yaml.org,2002:timestamp', lambda _l, n: n.value)
|
|
27
|
-
|
|
28
|
-
class _CustomSafeDumper(SafeDumper):
|
|
29
|
-
def __init__(self, stream, **kwargs):
|
|
30
|
-
super().__init__(stream, **kwargs)
|
|
31
|
-
self.add_representer(_TemplatedStr, _represent_template_str)
|
|
32
|
-
|
|
33
|
-
class _TemplatedStr(str):
|
|
34
|
-
"""A marker class to force template string formatting in YAML."""
|
|
35
|
-
pass
|
|
36
|
-
|
|
37
|
-
def _mark_templates(obj):
|
|
38
|
-
if isinstance(obj, dict):
|
|
39
|
-
return {k: _mark_templates(v) for k, v in obj.items()}
|
|
40
|
-
elif isinstance(obj, list):
|
|
41
|
-
return [_mark_templates(v) for v in obj]
|
|
42
|
-
elif isinstance(obj, str):
|
|
43
|
-
s = obj.strip()
|
|
44
|
-
if s.startswith("{{") and s.endswith("}}"):
|
|
45
|
-
return _TemplatedStr(obj)
|
|
46
|
-
return obj
|
|
3
|
+
from typing import Any, TypeVar
|
|
4
|
+
|
|
5
|
+
from jinja2 import BaseLoader, StrictUndefined, TemplateError
|
|
6
|
+
from jinja2.nativetypes import NativeEnvironment
|
|
7
|
+
|
|
8
|
+
TemplateResultType = TypeVar('TemplateResultType')
|
|
47
9
|
|
|
48
10
|
class TemplateEngine:
|
|
49
11
|
def __init__(self):
|
|
@@ -51,14 +13,14 @@ class TemplateEngine:
|
|
|
51
13
|
engine_globals = {}
|
|
52
14
|
engine_globals['now'] = datetime.now
|
|
53
15
|
|
|
54
|
-
self.jinja2_env =
|
|
16
|
+
self.jinja2_env = NativeEnvironment(
|
|
55
17
|
loader=BaseLoader(),
|
|
56
18
|
extensions=['jinja2_ansible_filters.AnsibleCoreFiltersExtension'],
|
|
57
19
|
undefined=StrictUndefined,
|
|
58
20
|
keep_trailing_newline=False
|
|
59
21
|
)
|
|
60
22
|
|
|
61
|
-
self.jinja2_async_env =
|
|
23
|
+
self.jinja2_async_env = NativeEnvironment(
|
|
62
24
|
loader=BaseLoader(),
|
|
63
25
|
extensions=['jinja2_ansible_filters.AnsibleCoreFiltersExtension'],
|
|
64
26
|
undefined=StrictUndefined,
|
|
@@ -66,7 +28,6 @@ class TemplateEngine:
|
|
|
66
28
|
)
|
|
67
29
|
|
|
68
30
|
self.app_context: dict[str, Any] = {}
|
|
69
|
-
# self.dbus_context: dict[str, Any] = {}
|
|
70
31
|
|
|
71
32
|
self.jinja2_env.globals.update(engine_globals)
|
|
72
33
|
self.jinja2_async_env.globals.update(engine_globals)
|
|
@@ -78,52 +39,68 @@ class TemplateEngine:
|
|
|
78
39
|
def update_app_context(self, context: dict[str, Any]):
|
|
79
40
|
self.app_context.update(context)
|
|
80
41
|
|
|
81
|
-
def
|
|
82
|
-
template_str = _mark_templates(value)
|
|
83
|
-
template_str = yaml.dump(template_str, Dumper=_CustomSafeDumper)
|
|
84
|
-
# value= yaml.safe_dump(value, default_style=None)
|
|
85
|
-
# print(f"_dict_to_templatable_str: {value}")
|
|
86
|
-
template_str = template_str.replace("template:{{", "{{").replace("}}:template", "}}")
|
|
87
|
-
# print(value)
|
|
88
|
-
return template_str
|
|
42
|
+
def _convert_value(self, res: Any, res_type: type[TemplateResultType]) -> TemplateResultType:
|
|
89
43
|
|
|
90
|
-
|
|
91
|
-
|
|
44
|
+
if res is None:
|
|
45
|
+
return res
|
|
92
46
|
|
|
93
|
-
|
|
47
|
+
if isinstance(res, res_type):
|
|
48
|
+
return res
|
|
94
49
|
|
|
95
|
-
|
|
96
|
-
return
|
|
50
|
+
try:
|
|
51
|
+
return res_type(res) # type: ignore
|
|
97
52
|
|
|
98
|
-
|
|
99
|
-
raise ValueError(f"
|
|
53
|
+
except Exception as e:
|
|
54
|
+
raise ValueError(f"Error converting rendered template result from '{type(res).__name__}' to '{res_type.__name__}'") from e
|
|
100
55
|
|
|
101
|
-
|
|
102
|
-
if dict_template:
|
|
103
|
-
template = self._dict_to_templatable_str(template)
|
|
56
|
+
def _render_template_nested(self, templatable: str | dict[str, Any], context: dict[str, Any] = {}) -> Any:
|
|
104
57
|
|
|
105
|
-
|
|
58
|
+
if isinstance(templatable, str):
|
|
59
|
+
try:
|
|
60
|
+
return self.jinja2_env.from_string(templatable).render(**context)
|
|
61
|
+
except TemplateError as e:
|
|
62
|
+
raise TemplateError(f"Error compiling template, template={templatable}: {e}") from e
|
|
106
63
|
|
|
107
|
-
|
|
108
|
-
res =
|
|
64
|
+
elif isinstance(templatable, dict):
|
|
65
|
+
res = {}
|
|
66
|
+
for k, v in templatable.items():
|
|
67
|
+
if isinstance(v, dict) or isinstance(v, str):
|
|
68
|
+
res[k] = self._render_template_nested(v, context)
|
|
69
|
+
else:
|
|
70
|
+
res[k] = v
|
|
71
|
+
return res
|
|
109
72
|
|
|
110
|
-
|
|
73
|
+
def render_template(self, templatable: str | dict[str, Any], res_type: type[TemplateResultType], context: dict[str, Any] = {}) -> TemplateResultType:
|
|
111
74
|
|
|
112
|
-
|
|
75
|
+
if isinstance(templatable, dict) and res_type is not dict:
|
|
76
|
+
raise ValueError(f"res_type should dict for dictionary templates, templatable={templatable}")
|
|
77
|
+
|
|
78
|
+
res = self._render_template_nested(templatable, context)
|
|
79
|
+
res = self._convert_value(res, res_type)
|
|
80
|
+
return res
|
|
113
81
|
|
|
114
|
-
|
|
115
|
-
return None
|
|
82
|
+
async def _async_render_template_nested(self, templatable: str | dict[str, Any], context: dict[str, Any] = {}) -> Any:
|
|
116
83
|
|
|
117
|
-
if
|
|
118
|
-
|
|
84
|
+
if isinstance(templatable, str):
|
|
85
|
+
try:
|
|
86
|
+
return await self.jinja2_async_env.from_string(templatable).render_async(**context)
|
|
87
|
+
except TemplateError as e:
|
|
88
|
+
raise TemplateError(f"Error compiling template, template={templatable}: {e}") from e
|
|
119
89
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
90
|
+
elif isinstance(templatable, dict):
|
|
91
|
+
res = {}
|
|
92
|
+
for k, v in templatable.items():
|
|
93
|
+
if isinstance(v, dict) or isinstance(v, str):
|
|
94
|
+
res[k] = await self._async_render_template_nested(v, context)
|
|
95
|
+
else:
|
|
96
|
+
res[k] = v
|
|
97
|
+
return res
|
|
123
98
|
|
|
124
|
-
|
|
99
|
+
async def async_render_template(self, templatable: str | dict[str, Any], res_type: type[TemplateResultType], context: dict[str, Any] = {}) -> TemplateResultType:
|
|
125
100
|
|
|
126
|
-
if res_type is dict:
|
|
127
|
-
|
|
101
|
+
if isinstance(templatable, dict) and res_type is not dict:
|
|
102
|
+
raise ValueError(f"res_type should be dict for dictionary templates, templatable={templatable}")
|
|
128
103
|
|
|
104
|
+
res = await self._async_render_template_nested(templatable, context)
|
|
105
|
+
res = self._convert_value(res, res_type)
|
|
129
106
|
return res
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dbus2mqtt
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: A Python tool to expose Linux D-Bus signals, methods and properties over MQTT - featuring templating, payload enrichment and Home Assistant-ready examples
|
|
5
5
|
Project-URL: Repository, https://github.com/jwnmulder/dbus2mqtt.git
|
|
6
6
|
Project-URL: Issues, https://github.com/jwnmulder/dbus2mqtt/issues
|
|
@@ -19,7 +19,7 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
19
19
|
Requires-Python: >=3.10
|
|
20
20
|
Requires-Dist: apscheduler>=3.11.0
|
|
21
21
|
Requires-Dist: colorlog>=6.9.0
|
|
22
|
-
Requires-Dist: dbus-
|
|
22
|
+
Requires-Dist: dbus-fast>=2.44.1
|
|
23
23
|
Requires-Dist: janus>=2.0.0
|
|
24
24
|
Requires-Dist: jinja2-ansible-filters>=1.3.2
|
|
25
25
|
Requires-Dist: jinja2>=3.1.6
|
|
@@ -51,12 +51,6 @@ This makes it easy to integrate Linux desktop services or system signals into MQ
|
|
|
51
51
|
|
|
52
52
|
Initial testing has focused on MPRIS integration. A table of tested MPRIS players and their supported methods can be found here: [home_assistant_media_player.md](https://github.com/jwnmulder/dbus2mqtt/blob/main/docs/examples/home_assistant_media_player.md)
|
|
53
53
|
|
|
54
|
-
|
|
55
|
-
TODO list
|
|
56
|
-
|
|
57
|
-
* Improve error handling when deleting message with 'retain' set. WARNING:dbus2mqtt.mqtt_client:on_message: Unexpected payload, expecting json, topic=dbus2mqtt/org.mpris.MediaPlayer2/command, payload=, error=Expecting value: line 1 column 1 (char 0)
|
|
58
|
-
* Property set only works the first time, need to restart after which the first set will work again
|
|
59
|
-
|
|
60
54
|
## Getting started with dbus2mqtt
|
|
61
55
|
|
|
62
56
|
Create a `config.yaml` file with the contents shown below. This configuration will expose all bus properties from the `org.mpris.MediaPlayer2.Player` interface to MQTT on the `dbus2mqtt/org.mpris.MediaPlayer2/state` topic. Have a look at [docs/examples](docs/examples.md) for more examples
|
|
@@ -86,7 +80,7 @@ dbus:
|
|
|
86
80
|
topic: dbus2mqtt/org.mpris.MediaPlayer2/state
|
|
87
81
|
payload_type: json
|
|
88
82
|
payload_template: |
|
|
89
|
-
{{ dbus_call(mpris_bus_name, path, 'org.freedesktop.DBus.Properties', 'GetAll', ['org.mpris.MediaPlayer2.Player'])
|
|
83
|
+
{{ dbus_call(mpris_bus_name, path, 'org.freedesktop.DBus.Properties', 'GetAll', ['org.mpris.MediaPlayer2.Player']) }}
|
|
90
84
|
```
|
|
91
85
|
|
|
92
86
|
MQTT connection details can be configured in that same `config.yaml` file or via environment variables. For now create a `.env` file with the following contents.
|
|
@@ -117,6 +111,7 @@ cp docs/examples/home_assistant_media_player.yaml $HOME/.config/dbus2mqtt/config
|
|
|
117
111
|
cp .env.example $HOME/.config/dbus2mqtt/.env
|
|
118
112
|
|
|
119
113
|
# run image and automatically start on reboot
|
|
114
|
+
docker pull jwnmulder/dbus2mqtt
|
|
120
115
|
docker run --detach --name dbus2mqtt \
|
|
121
116
|
--volume "$HOME"/.config/dbus2mqtt:"$HOME"/.config/dbus2mqtt \
|
|
122
117
|
--volume /run/user:/run/user \
|
|
@@ -143,7 +138,7 @@ dbus2mqtt leverages [jsonargparse](https://jsonargparse.readthedocs.io/en/stable
|
|
|
143
138
|
### MQTT and D-Bus connection details
|
|
144
139
|
|
|
145
140
|
```bash
|
|
146
|
-
#
|
|
141
|
+
# dbus_fast configuration
|
|
147
142
|
export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/1000/bus
|
|
148
143
|
|
|
149
144
|
# dbus2mqtt configuration
|
|
@@ -170,13 +165,15 @@ dbus:
|
|
|
170
165
|
path: /org/mpris/MediaPlayer2
|
|
171
166
|
interfaces:
|
|
172
167
|
- interface: org.mpris.MediaPlayer2.Player
|
|
173
|
-
|
|
168
|
+
mqtt_command_topic: dbus2mqtt/org.mpris.MediaPlayer2/command
|
|
174
169
|
methods:
|
|
175
170
|
- method: Pause
|
|
176
171
|
- method: Play
|
|
177
172
|
```
|
|
178
173
|
|
|
179
|
-
This configuration will expose 2 methods. Triggering methods can be done by publishing json messages to the `dbus2mqtt/org.mpris.MediaPlayer2/command` MQTT topic. Arguments can be passed along in `args
|
|
174
|
+
This configuration will expose 2 methods. Triggering methods can be done by publishing json messages to the `dbus2mqtt/org.mpris.MediaPlayer2/command` MQTT topic. Arguments can be passed along in `args`.
|
|
175
|
+
|
|
176
|
+
Note that methods are called on **all** bus_names matching the configured pattern
|
|
180
177
|
|
|
181
178
|
```json
|
|
182
179
|
{
|