dbus2mqtt 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbus2mqtt might be problematic. Click here for more details.

@@ -0,0 +1,70 @@
1
+ import asyncio
2
+ import logging
3
+
4
+ from dataclasses import dataclass
5
+ from datetime import datetime
6
+ from typing import Any
7
+
8
+ import janus
9
+
10
+ from dbus2mqtt.config import (
11
+ FlowConfig,
12
+ FlowTriggerConfig,
13
+ SignalConfig,
14
+ SubscriptionConfig,
15
+ )
16
+ from dbus2mqtt.dbus.dbus_types import BusNameSubscriptions
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ @dataclass
22
+ class MqttMessage:
23
+ topic: str
24
+ payload: Any
25
+ payload_serialization_type: str = "json"
26
+
27
+ @dataclass
28
+ class DbusSignalWithState:
29
+ bus_name_subscriptions: BusNameSubscriptions
30
+ path: str
31
+ interface_name: str
32
+ subscription_config: SubscriptionConfig
33
+ signal_config: SignalConfig
34
+ args: list[Any]
35
+
36
+ @dataclass
37
+ class FlowTriggerMessage:
38
+ flow_config: FlowConfig
39
+ flow_trigger_config: FlowTriggerConfig
40
+ timestamp: datetime
41
+ context: dict[str, Any] | None = None
42
+
43
+ class EventBroker:
44
+ def __init__(self):
45
+ self.mqtt_receive_queue = janus.Queue[MqttMessage]()
46
+ self.mqtt_publish_queue = janus.Queue[MqttMessage]()
47
+ self.dbus_signal_queue = janus.Queue[DbusSignalWithState]()
48
+ self.flow_trigger_queue = janus.Queue[FlowTriggerMessage]()
49
+ # self.dbus_send_queue: janus.Queue
50
+
51
+ async def close(self):
52
+ await asyncio.gather(
53
+ self.mqtt_receive_queue.aclose(),
54
+ self.mqtt_publish_queue.aclose(),
55
+ self.dbus_signal_queue.aclose(),
56
+ self.flow_trigger_queue.aclose(),
57
+ return_exceptions=True
58
+ )
59
+
60
+ def on_mqtt_receive(self, msg: MqttMessage):
61
+ # logger.debug("on_mqtt_receive")
62
+ self.mqtt_receive_queue.sync_q.put(msg)
63
+
64
+ async def publish_to_mqtt(self, msg: MqttMessage):
65
+ # logger.debug("publish_to_mqtt")
66
+ await self.mqtt_publish_queue.async_q.put(msg)
67
+
68
+ def on_dbus_signal(self, signal: DbusSignalWithState):
69
+ # logger.debug("on_dbus_signal")
70
+ self.dbus_signal_queue.sync_q.put(signal)
@@ -0,0 +1,32 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any
3
+
4
+
5
+ class FlowExecutionContext:
6
+
7
+ def __init__(self, name: str | None, global_flows_context: dict[str, Any], flow_context: dict[str, Any]):
8
+ self.name = name
9
+ self.global_flows_context = global_flows_context
10
+ self.flow_context = flow_context
11
+
12
+ # per flow execution context
13
+ self.context: dict[str, Any] = {}
14
+
15
+ def get_aggregated_context(self) -> dict[str, Any]:
16
+ """Get the aggregated context for the flow execution."""
17
+ # Merge global flows context, flow context, and local context
18
+ context = {}
19
+ if self.global_flows_context:
20
+ context.update(self.global_flows_context)
21
+ if self.flow_context:
22
+ context.update(self.flow_context)
23
+ if self.context:
24
+ context.update(self.context)
25
+ return context
26
+
27
+ class FlowAction(ABC):
28
+
29
+ @abstractmethod
30
+ async def execute(self, context: FlowExecutionContext):
31
+ """Execute the action with the given flow execution context."""
32
+ pass
@@ -0,0 +1,26 @@
1
+ import logging
2
+
3
+ from dbus2mqtt import AppContext
4
+ from dbus2mqtt.config import FlowActionContextSetConfig
5
+ from dbus2mqtt.flow import FlowAction, FlowExecutionContext
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ class ContextSetAction(FlowAction):
10
+
11
+ def __init__(self, config: FlowActionContextSetConfig, app_context: AppContext):
12
+ self.config = config
13
+ self.templating = app_context.templating
14
+
15
+ async def execute(self, context: FlowExecutionContext):
16
+
17
+ aggregated_context = context.get_aggregated_context()
18
+ if self.config.global_context:
19
+ context_new = await self.templating.async_render_template(self.config.global_context, dict, aggregated_context)
20
+ logger.debug(f"Update global_context with: {context_new}")
21
+ context.global_flows_context.update(context_new)
22
+
23
+ if self.config.context:
24
+ context_new = await self.templating.async_render_template(self.config.context, dict, aggregated_context)
25
+ logger.debug(f"Update context with: {context_new}")
26
+ context.context.update(context_new)
@@ -0,0 +1,39 @@
1
+
2
+ import logging
3
+
4
+ from jinja2.exceptions import TemplateRuntimeError
5
+
6
+ from dbus2mqtt import AppContext
7
+ from dbus2mqtt.config import FlowActionMqttPublishConfig
8
+ from dbus2mqtt.flow import FlowAction, FlowExecutionContext
9
+ from dbus2mqtt.mqtt.mqtt_client import MqttMessage
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ class MqttPublishAction(FlowAction):
14
+
15
+ def __init__(self, config: FlowActionMqttPublishConfig, app_context: AppContext):
16
+ self.config = config
17
+ self.event_broker = app_context.event_broker
18
+ self.templating = app_context.templating
19
+
20
+ async def execute(self, context: FlowExecutionContext):
21
+
22
+ render_context = context.get_aggregated_context()
23
+
24
+ try:
25
+ mqtt_topic = await self.templating.async_render_template(self.config.topic, str, render_context)
26
+
27
+ payload_res_type = str if self.config.payload_type == "text" else dict
28
+ payload = await self.templating.async_render_template(self.config.payload_template, payload_res_type, render_context)
29
+
30
+ except TemplateRuntimeError as e:
31
+ logger.warning(f"Error rendering jinja template, flow: '{context.name}', error: {str(e)}. render_context={render_context}", exc_info=True)
32
+ return
33
+ except Exception as e:
34
+ logger.warning(f"Error rendering jinja template, flow: '{context.name}', error: {str(e)}. render_context={render_context}", exc_info=False)
35
+ return
36
+
37
+ logger.debug(f"public_mqtt: flow={context.name}, payload={payload}")
38
+
39
+ await self.event_broker.publish_to_mqtt(MqttMessage(mqtt_topic, payload, payload_serialization_type=self.config.payload_type))
@@ -0,0 +1,197 @@
1
+ import asyncio
2
+ import logging
3
+
4
+ from datetime import datetime
5
+ from typing import Any
6
+
7
+ from apscheduler.schedulers.asyncio import AsyncIOScheduler
8
+
9
+ from dbus2mqtt import AppContext
10
+ from dbus2mqtt.config import FlowConfig, FlowTriggerConfig
11
+ from dbus2mqtt.event_broker import FlowTriggerMessage
12
+ from dbus2mqtt.flow import FlowAction, FlowExecutionContext
13
+ from dbus2mqtt.flow.actions.context_set import ContextSetAction
14
+ from dbus2mqtt.flow.actions.mqtt_publish import MqttPublishAction
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+ class FlowScheduler:
19
+
20
+ def __init__(self, app_context: AppContext):
21
+ self.config = app_context.config
22
+ self.event_broker = app_context.event_broker
23
+ self.scheduler = AsyncIOScheduler()
24
+
25
+ async def _schedule_flow_strigger(self, flow, trigger_config: FlowTriggerConfig):
26
+ trigger = FlowTriggerMessage(flow, trigger_config, datetime.now())
27
+ await self.event_broker.flow_trigger_queue.async_q.put(trigger)
28
+
29
+ async def scheduler_task(self):
30
+
31
+ self.scheduler.start()
32
+
33
+ # configure global flow trigger
34
+ self.start_flow_set(self.config.flows)
35
+
36
+ while True:
37
+ await asyncio.sleep(1000)
38
+
39
+ def start_flow_set(self, flows: list[FlowConfig]):
40
+ for flow in flows:
41
+ for trigger in flow.triggers:
42
+ if trigger.type == "schedule":
43
+ existing_job = self.scheduler.get_job(trigger.id)
44
+ if existing_job:
45
+ logger.debug(f"Skipping creation, flow scheduler already exists, id={trigger.id}")
46
+ if not existing_job and trigger.type == "schedule":
47
+ logger.info(f"Starting scheduler[{trigger.id}] for flow {flow.id}")
48
+ if trigger.interval:
49
+ # Each schedule gets its own job
50
+ self.scheduler.add_job(
51
+ self._schedule_flow_strigger,
52
+ "interval",
53
+ id=trigger.id,
54
+ max_instances=1,
55
+ misfire_grace_time=5,
56
+ coalesce=True,
57
+ args=[flow, trigger],
58
+ **trigger.interval
59
+ )
60
+ elif trigger.cron:
61
+ # Each schedule gets its own job
62
+ self.scheduler.add_job(
63
+ self._schedule_flow_strigger,
64
+ "cron",
65
+ id=trigger.id,
66
+ max_instances=1,
67
+ misfire_grace_time=5,
68
+ coalesce=True,
69
+ args=[flow, trigger],
70
+ **trigger.cron
71
+ )
72
+
73
+ def stop_flow_set(self, flows):
74
+ for flow in flows:
75
+ for trigger in flow.triggers:
76
+ if trigger.type == "schedule":
77
+ logger.info(f"Stopping scheduler[{trigger.id}] for flow {flow.id}")
78
+ self.scheduler.remove_job(trigger.id)
79
+
80
+ class FlowActionContext:
81
+
82
+ def __init__(self, app_context: AppContext, flow_config: FlowConfig, global_flows_context: dict[str, Any], flow_context: dict[str, Any]):
83
+ self.app_context = app_context
84
+ self.global_flows_context = global_flows_context
85
+ self.flow_context = flow_context
86
+ self.flow_config = flow_config
87
+
88
+ self.flow_actions = self._setup_flow_actions()
89
+
90
+ def _setup_flow_actions(self) -> list[FlowAction]:
91
+
92
+ res = []
93
+ for action_config in self.flow_config.actions:
94
+ action = None
95
+ if action_config.type == "context_set":
96
+ action = ContextSetAction(action_config, self.app_context)
97
+ if action_config.type == "mqtt_publish":
98
+ action = MqttPublishAction(action_config, self.app_context)
99
+ if action:
100
+ res.append(action)
101
+
102
+ return res
103
+
104
+ async def execute_actions(self, trigger_context: dict[str, Any] | None):
105
+
106
+ # per flow execution context
107
+ context = FlowExecutionContext(
108
+ self.flow_config.name,
109
+ global_flows_context=self.global_flows_context,
110
+ flow_context=self.flow_context)
111
+
112
+ if trigger_context:
113
+ context.context.update(trigger_context)
114
+
115
+ for action in self.flow_actions:
116
+ await action.execute(context)
117
+
118
+ class FlowProcessor:
119
+
120
+ def __init__(self, app_context: AppContext):
121
+ self.app_context = app_context
122
+ self.event_broker = app_context.event_broker
123
+
124
+ self._global_context: dict[str, Any] = {}
125
+
126
+ self._flows: dict[str, FlowActionContext] = {}
127
+
128
+ # register global flows
129
+ self.register_flows(app_context.config.flows)
130
+
131
+ # register dbus subscription flows
132
+ for subscription in app_context.config.dbus.subscriptions:
133
+ flow_context = {
134
+ "subscription_bus_name": subscription.bus_name,
135
+ "subscription_path": subscription.path,
136
+ "subscription_interfaces": [i.interface for i in subscription.interfaces],
137
+ }
138
+ self.register_flows(subscription.flows, flow_context)
139
+
140
+ def register_flows(self, flows: list[FlowConfig], flow_context: dict[str, Any] = {}):
141
+ """Register flows with the flow processor."""
142
+
143
+ for flow_config in flows:
144
+ flow_action_context = FlowActionContext(
145
+ self.app_context,
146
+ flow_config,
147
+ self._global_context,
148
+ flow_context
149
+ )
150
+ self._flows[flow_config.id] = flow_action_context
151
+
152
+ async def flow_processor_task(self):
153
+ """Continuously processes messages from the async queue."""
154
+
155
+ # logger.info(f"flow_processor_task: configuring flows={[f.name for f in self.app_context.config.flows]}")
156
+
157
+ while True:
158
+ flow_trigger_message = await self.event_broker.flow_trigger_queue.async_q.get() # Wait for a message
159
+ try:
160
+ await self._process_flow_trigger(flow_trigger_message)
161
+
162
+ except Exception as e:
163
+ logger.warning(f"flow_processor_task: Exception {e}", exc_info=True)
164
+ finally:
165
+ self.event_broker.flow_trigger_queue.async_q.task_done()
166
+
167
+ async def _process_flow_trigger(self, flow_trigger_message: FlowTriggerMessage):
168
+ log_message = f"on_trigger: {flow_trigger_message.flow_trigger_config.type}, time={flow_trigger_message.timestamp.isoformat()}"
169
+ if flow_trigger_message.flow_trigger_config.type != "schedule":
170
+ logger.info(log_message)
171
+ else:
172
+ logger.debug(log_message)
173
+
174
+ flow_id = flow_trigger_message.flow_config.id
175
+ # flow_name = flow_trigger_message.flow_config.name
176
+
177
+ flow = self._flows[flow_id]
178
+ await flow.execute_actions(trigger_context=flow_trigger_message.context)
179
+
180
+ # # Create a flow from the YAML configuration
181
+ # for flow_config in config['flows']:
182
+ # flow_name = flow_config['name']
183
+ # triggers = flow_config.get('triggers', [])
184
+ # actions = flow_config.get('actions', [])
185
+
186
+ # with Flow(flow_name) as flow:
187
+ # data = "sensor_data"
188
+ # for action in actions:
189
+ # if action['type'] == 'python_script':
190
+ # process_data(data)
191
+ # elif action['type'] == 'mqtt_publish':
192
+ # mqtt_publish(action['topic'], action['message_template'], data)
193
+
194
+ # # Add scheduling trigger if defined
195
+ # for trigger in triggers:
196
+ # if trigger['type'] == 'schedule' and 'cron' in trigger:
197
+ # flow.schedule = CronSchedule(cron=trigger['cron'])
dbus2mqtt/main.py ADDED
@@ -0,0 +1,135 @@
1
+ import asyncio
2
+ import logging
3
+ import sys
4
+
5
+ from typing import cast
6
+
7
+ import colorlog
8
+ import dbus_next.aio as dbus_aio
9
+ import dotenv
10
+ import jsonargparse
11
+
12
+ from dbus2mqtt import AppContext
13
+ from dbus2mqtt.config import Config
14
+ from dbus2mqtt.dbus.dbus_client import DbusClient
15
+ from dbus2mqtt.event_broker import EventBroker
16
+ from dbus2mqtt.flow.flow_processor import FlowProcessor, FlowScheduler
17
+ from dbus2mqtt.mqtt.mqtt_client import MqttClient
18
+ from dbus2mqtt.template.dbus_template_functions import jinja_custom_dbus_functions
19
+ from dbus2mqtt.template.templating import TemplateEngine
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ async def dbus_processor_task(app_context: AppContext, flow_scheduler: FlowScheduler):
25
+
26
+ bus = dbus_aio.message_bus.MessageBus()
27
+
28
+ dbus_client = DbusClient(app_context, bus, flow_scheduler)
29
+ app_context.templating.add_functions(jinja_custom_dbus_functions(dbus_client))
30
+
31
+ await dbus_client.connect()
32
+
33
+ loop = asyncio.get_running_loop()
34
+ dbus_client_run_future = loop.create_future()
35
+
36
+ await asyncio.gather(
37
+ dbus_client_run_future,
38
+ asyncio.create_task(dbus_client.dbus_signal_queue_processor_task()),
39
+ asyncio.create_task(dbus_client.mqtt_receive_queue_processor_task())
40
+ )
41
+
42
+ async def mqtt_processor_task(app_context: AppContext):
43
+
44
+ mqtt_client = MqttClient(app_context)
45
+
46
+ mqtt_client.connect()
47
+ mqtt_client.client.loop_start()
48
+
49
+ loop = asyncio.get_running_loop()
50
+ mqtt_client_run_future = loop.create_future()
51
+
52
+ try:
53
+ await asyncio.gather(
54
+ mqtt_client_run_future,
55
+ asyncio.create_task(mqtt_client.mqtt_publish_queue_processor_task())
56
+ )
57
+ except asyncio.CancelledError:
58
+ mqtt_client.client.loop_stop()
59
+
60
+ async def flow_processor_task(app_context: AppContext):
61
+
62
+ flow_processor = FlowProcessor(app_context)
63
+
64
+ await asyncio.gather(
65
+ asyncio.create_task(flow_processor.flow_processor_task())
66
+ )
67
+
68
+ async def run(config: Config):
69
+
70
+ event_broker = EventBroker()
71
+ template_engine = TemplateEngine()
72
+
73
+ app_context = AppContext(config, event_broker, template_engine)
74
+
75
+ flow_scheduler = FlowScheduler(app_context)
76
+
77
+ try:
78
+ await asyncio.gather(
79
+ dbus_processor_task(app_context, flow_scheduler),
80
+ mqtt_processor_task(app_context),
81
+ flow_processor_task(app_context),
82
+ asyncio.create_task(flow_scheduler.scheduler_task())
83
+ )
84
+ except asyncio.CancelledError:
85
+ pass
86
+
87
+ def main():
88
+
89
+ # load environment from .env if it exists
90
+ dotenv_file = dotenv.find_dotenv(usecwd=True)
91
+ if len(dotenv_file) > 0:
92
+ logger.info(f"Loaded environment variables from {dotenv_file}")
93
+ dotenv.load_dotenv(dotenv_path=dotenv_file)
94
+
95
+ # unless specified otherwise, load config from config.yaml
96
+ parser = jsonargparse.ArgumentParser(default_config_files=["config.yaml"], default_env=True, env_prefix=False)
97
+
98
+ parser.add_argument("--verbose", "-v", nargs="?", const=True, help="Enable verbose logging")
99
+ parser.add_argument("--config", action="config")
100
+ parser.add_class_arguments(Config)
101
+
102
+ cfg = parser.parse_args()
103
+
104
+ config: Config = cast(Config, parser.instantiate_classes(cfg))
105
+
106
+ handler = colorlog.StreamHandler(stream=sys.stdout)
107
+ handler.setFormatter(colorlog.ColoredFormatter(
108
+ '%(log_color)s%(levelname)s:%(name)s:%(message)s',
109
+ log_colors={
110
+ "DEBUG": "light_black",
111
+ "WARNING": "yellow",
112
+ "ERROR": "red",
113
+ "CRITICAL": "bold_red",
114
+ }
115
+ ))
116
+
117
+ if cfg.verbose:
118
+ logging.basicConfig(level=logging.DEBUG, handlers=[handler])
119
+ else:
120
+ logging.basicConfig(level=logging.INFO, handlers=[handler])
121
+ apscheduler_logger = logging.getLogger("apscheduler")
122
+ apscheduler_logger.setLevel(logging.WARNING)
123
+
124
+
125
+ # handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(name)s:%(message)s'))
126
+
127
+ # logger = colorlog.getLogger('')
128
+ # for handler in logger.handlers:
129
+ # print(handler.st)
130
+ # if isinstance(handler, colorlog.StreamHandler):
131
+ # handler.setFormatter(colorlog.ColoredFormatter('%(log_color)s%(levelname)s:%(name)s:%(message)s'))
132
+
133
+ logger.debug(f"config: {config}")
134
+
135
+ asyncio.run(run(config))
@@ -0,0 +1,101 @@
1
+
2
+ import asyncio
3
+ import json
4
+ import logging
5
+
6
+ from typing import Any
7
+
8
+ import paho.mqtt.client as mqtt
9
+ import yaml
10
+
11
+ from paho.mqtt.enums import CallbackAPIVersion
12
+ from paho.mqtt.subscribeoptions import SubscribeOptions
13
+
14
+ from dbus2mqtt import AppContext
15
+ from dbus2mqtt.event_broker import MqttMessage
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+ class MqttClient:
20
+
21
+ def __init__(self, app_context: AppContext):
22
+ self.config = app_context.config.mqtt
23
+ self.event_broker = app_context.event_broker
24
+
25
+ self.client = mqtt.Client(
26
+ protocol=mqtt.MQTTv5,
27
+ callback_api_version=CallbackAPIVersion.VERSION2
28
+ )
29
+
30
+ self.client.username_pw_set(
31
+ username=self.config.username,
32
+ password=self.config.password.get_secret_value()
33
+ )
34
+
35
+ self.client.on_connect = self.on_connect
36
+ self.client.on_message = self.on_message
37
+
38
+ def connect(self):
39
+
40
+ # mqtt_client.on_message = lambda client, userdata, message: asyncio.create_task(mqtt_on_message(client, userdata, message))
41
+ self.client.connect_async(
42
+ host=self.config.host,
43
+ port=self.config.port
44
+ )
45
+
46
+ # def on_dbus_signal(self, bus_name: str, path: str, interface: str, signal: str, topic, msg: dict[str, Any]):
47
+ # payload = json.dumps(msg)
48
+ # logger.debug(f"on_dbus_signal: payload={payload}")
49
+ # self.client.publish(topic=topic, payload=payload)
50
+
51
+ async def mqtt_publish_queue_processor_task(self):
52
+ """Continuously processes messages from the async queue."""
53
+ while True:
54
+ msg = await self.event_broker.mqtt_publish_queue.async_q.get() # Wait for a message
55
+ try:
56
+ payload = msg.payload
57
+ type = msg.payload_serialization_type
58
+ if isinstance(msg.payload, dict):
59
+ if type == "json":
60
+ payload = json.dumps(msg.payload)
61
+ elif type == "yaml":
62
+ payload = yaml.dump(msg.payload)
63
+ elif type == "text":
64
+ payload = str(payload)
65
+
66
+ logger.debug(f"mqtt_publish_queue_processor_task: payload={payload}")
67
+ self.client.publish(topic=msg.topic, payload=payload)
68
+ except Exception as e:
69
+ logger.warning(f"mqtt_publish_queue_processor_task: Exception {e}", exc_info=True)
70
+ finally:
71
+ self.event_broker.mqtt_publish_queue.async_q.task_done()
72
+
73
+
74
+ async def run(self):
75
+ """Runs the MQTT loop in a non-blocking way with asyncio."""
76
+ self.client.loop_start() # Runs Paho's loop in a background thread
77
+ await asyncio.Event().wait() # Keeps the coroutine alive
78
+
79
+ # The callback for when the client receives a CONNACK response from the server.
80
+ def on_connect(self, client: mqtt.Client, userdata, flags, reason_code, properties):
81
+ if reason_code.is_failure:
82
+ logger.warning(f"on_connect: Failed to connect: {reason_code}. Will retry connection")
83
+ else:
84
+ logger.info(f"on_connect: Connected to {self.config.host}:{self.config.port}")
85
+ # Subscribing in on_connect() means that if we lose the connection and
86
+ # reconnect then subscriptions will be renewed.
87
+ client.subscribe("dbus2mqtt/#", options=SubscribeOptions(noLocal=True))
88
+
89
+ def on_message(self, client: mqtt.Client, userdata: Any, msg: mqtt.MQTTMessage):
90
+
91
+ payload = msg.payload.decode()
92
+ if msg.retain:
93
+ logger.info(f"on_message: skipping msg with retain=True, topic={msg.topic}, payload={payload}")
94
+ return
95
+
96
+ try:
97
+ json_payload = json.loads(payload)
98
+ logger.debug(f"on_message: msg.topic={msg.topic}, msg.payload={json.dumps(json_payload)}")
99
+ self.event_broker.on_mqtt_receive(MqttMessage(msg.topic, json_payload))
100
+ except json.JSONDecodeError as e:
101
+ logger.warning(f"on_message: Unexpected payload, expecting json, topic={msg.topic}, payload={payload}, error={e}")
@@ -0,0 +1,68 @@
1
+ import fnmatch
2
+ import logging
3
+
4
+ from typing import Any
5
+
6
+ from dbus_next.constants import ErrorType
7
+ from dbus_next.errors import DBusError
8
+
9
+ from dbus2mqtt.dbus.dbus_client import DbusClient
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ class DbusContext:
14
+
15
+ def __init__(self, dbus_client: DbusClient):
16
+ self.dbus_client = dbus_client
17
+
18
+ def async_dbus_list_fn(self, bus_name_pattern: str):
19
+
20
+ res = []
21
+
22
+ self.dbus_client.subscriptions
23
+ for bus_name in self.dbus_client.subscriptions.keys():
24
+ if fnmatch.fnmatchcase(bus_name, bus_name_pattern):
25
+ res.append(bus_name)
26
+
27
+ return res
28
+
29
+ async def async_dbus_call_fn(self, bus_name: str, path: str, interface: str, method:str, method_args: list[Any] = []):
30
+
31
+ if not isinstance(method_args, list):
32
+ # Pylance will mentiod this line is unreachable. It is not as jinja2 can pass in any type
33
+ raise ValueError("method_args must be a list")
34
+
35
+ proxy_object = self.dbus_client.get_proxy_object(bus_name, path)
36
+ if not proxy_object:
37
+ raise ValueError(f"No matching subscription found for bus_name: {bus_name}, path: {path}")
38
+
39
+ obj_interface = proxy_object.get_interface(interface)
40
+
41
+ return await self.dbus_client.call_dbus_interface_method(obj_interface, method, method_args)
42
+
43
+ async def async_dbus_property_get_fn(self, bus_name: str, path: str, interface: str, property:str, default_unsupported: Any = None):
44
+
45
+ proxy_object = self.dbus_client.get_proxy_object(bus_name, path)
46
+ if not proxy_object:
47
+ raise ValueError(f"No matching subscription found for bus_name: {bus_name}, path: {path}")
48
+
49
+ obj_interface = proxy_object.get_interface(interface)
50
+
51
+ try:
52
+ return await self.dbus_client.get_dbus_interface_property(obj_interface, property)
53
+ except DBusError as e:
54
+ if e.type == ErrorType.NOT_SUPPORTED.value and default_unsupported is not None:
55
+ return default_unsupported
56
+
57
+ def jinja_custom_dbus_functions(dbus_client: DbusClient) -> dict[str, Any]:
58
+
59
+ dbus_context = DbusContext(dbus_client)
60
+
61
+ custom_functions: dict[str, Any] = {}
62
+ custom_functions.update({
63
+ "dbus_list": dbus_context.async_dbus_list_fn,
64
+ "dbus_call": dbus_context.async_dbus_call_fn,
65
+ "dbus_property_get": dbus_context.async_dbus_property_get_fn
66
+ })
67
+
68
+ return custom_functions