mmrelay 1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mmrelay might be problematic. Click here for more details.
- mmrelay/__init__.py +9 -0
- mmrelay/cli.py +384 -0
- mmrelay/config.py +218 -0
- mmrelay/config_checker.py +133 -0
- mmrelay/db_utils.py +309 -0
- mmrelay/log_utils.py +107 -0
- mmrelay/main.py +281 -0
- mmrelay/matrix_utils.py +754 -0
- mmrelay/meshtastic_utils.py +569 -0
- mmrelay/plugin_loader.py +336 -0
- mmrelay/plugins/__init__.py +3 -0
- mmrelay/plugins/base_plugin.py +212 -0
- mmrelay/plugins/debug_plugin.py +17 -0
- mmrelay/plugins/drop_plugin.py +120 -0
- mmrelay/plugins/health_plugin.py +64 -0
- mmrelay/plugins/help_plugin.py +55 -0
- mmrelay/plugins/map_plugin.py +323 -0
- mmrelay/plugins/mesh_relay_plugin.py +134 -0
- mmrelay/plugins/nodes_plugin.py +92 -0
- mmrelay/plugins/ping_plugin.py +118 -0
- mmrelay/plugins/telemetry_plugin.py +179 -0
- mmrelay/plugins/weather_plugin.py +208 -0
- mmrelay/setup_utils.py +263 -0
- mmrelay-1.0.dist-info/METADATA +160 -0
- mmrelay-1.0.dist-info/RECORD +29 -0
- mmrelay-1.0.dist-info/WHEEL +5 -0
- mmrelay-1.0.dist-info/entry_points.txt +2 -0
- mmrelay-1.0.dist-info/licenses/LICENSE +21 -0
- mmrelay-1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
import yaml
|
|
7
|
+
from yaml.loader import SafeLoader
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_config_paths():
|
|
11
|
+
"""
|
|
12
|
+
Get a list of possible configuration file paths.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
list: A list of possible configuration file paths
|
|
16
|
+
"""
|
|
17
|
+
from mmrelay.config import get_config_paths as get_paths
|
|
18
|
+
|
|
19
|
+
return get_paths()
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def check_config():
|
|
23
|
+
"""
|
|
24
|
+
Check if the configuration file is valid.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
bool: True if the configuration is valid, False otherwise.
|
|
28
|
+
"""
|
|
29
|
+
config_paths = get_config_paths()
|
|
30
|
+
config_path = None
|
|
31
|
+
|
|
32
|
+
# Try each config path in order until we find one that exists
|
|
33
|
+
for path in config_paths:
|
|
34
|
+
if os.path.isfile(path):
|
|
35
|
+
config_path = path
|
|
36
|
+
print(f"Found configuration file at: {config_path}")
|
|
37
|
+
try:
|
|
38
|
+
with open(config_path, "r") as f:
|
|
39
|
+
config = yaml.load(f, Loader=SafeLoader)
|
|
40
|
+
|
|
41
|
+
# Check if config is empty
|
|
42
|
+
if not config:
|
|
43
|
+
print("Error: Configuration file is empty or invalid")
|
|
44
|
+
return False
|
|
45
|
+
|
|
46
|
+
# Check matrix section
|
|
47
|
+
if "matrix" not in config:
|
|
48
|
+
print("Error: Missing 'matrix' section in config")
|
|
49
|
+
return False
|
|
50
|
+
|
|
51
|
+
matrix_section = config["matrix"]
|
|
52
|
+
required_matrix_fields = ["homeserver", "access_token", "bot_user_id"]
|
|
53
|
+
missing_matrix_fields = [
|
|
54
|
+
field
|
|
55
|
+
for field in required_matrix_fields
|
|
56
|
+
if field not in matrix_section
|
|
57
|
+
]
|
|
58
|
+
|
|
59
|
+
if missing_matrix_fields:
|
|
60
|
+
print(
|
|
61
|
+
f"Error: Missing required fields in 'matrix' section: {', '.join(missing_matrix_fields)}"
|
|
62
|
+
)
|
|
63
|
+
return False
|
|
64
|
+
|
|
65
|
+
# Check matrix_rooms section
|
|
66
|
+
if "matrix_rooms" not in config or not config["matrix_rooms"]:
|
|
67
|
+
print("Error: Missing or empty 'matrix_rooms' section in config")
|
|
68
|
+
return False
|
|
69
|
+
|
|
70
|
+
if not isinstance(config["matrix_rooms"], list):
|
|
71
|
+
print("Error: 'matrix_rooms' must be a list")
|
|
72
|
+
return False
|
|
73
|
+
|
|
74
|
+
for i, room in enumerate(config["matrix_rooms"]):
|
|
75
|
+
if not isinstance(room, dict):
|
|
76
|
+
print(
|
|
77
|
+
f"Error: Room {i+1} in 'matrix_rooms' must be a dictionary"
|
|
78
|
+
)
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
if "id" not in room:
|
|
82
|
+
print(
|
|
83
|
+
f"Error: Room {i+1} in 'matrix_rooms' is missing the 'id' field"
|
|
84
|
+
)
|
|
85
|
+
return False
|
|
86
|
+
|
|
87
|
+
# Check meshtastic section
|
|
88
|
+
if "meshtastic" not in config:
|
|
89
|
+
print("Error: Missing 'meshtastic' section in config")
|
|
90
|
+
return False
|
|
91
|
+
|
|
92
|
+
meshtastic_section = config["meshtastic"]
|
|
93
|
+
if "connection_type" not in meshtastic_section:
|
|
94
|
+
print("Error: Missing 'connection_type' in 'meshtastic' section")
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
connection_type = meshtastic_section["connection_type"]
|
|
98
|
+
if connection_type not in ["tcp", "serial", "ble"]:
|
|
99
|
+
print(
|
|
100
|
+
f"Error: Invalid 'connection_type': {connection_type}. Must be 'tcp', 'serial', or 'ble'"
|
|
101
|
+
)
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
# Check connection-specific fields
|
|
105
|
+
if (
|
|
106
|
+
connection_type == "serial"
|
|
107
|
+
and "serial_port" not in meshtastic_section
|
|
108
|
+
):
|
|
109
|
+
print("Error: Missing 'serial_port' for 'serial' connection type")
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
if connection_type == "tcp" and "host" not in meshtastic_section:
|
|
113
|
+
print("Error: Missing 'host' for 'tcp' connection type")
|
|
114
|
+
return False
|
|
115
|
+
|
|
116
|
+
if connection_type == "ble" and "ble_address" not in meshtastic_section:
|
|
117
|
+
print("Error: Missing 'ble_address' for 'ble' connection type")
|
|
118
|
+
return False
|
|
119
|
+
|
|
120
|
+
print("Configuration file is valid!")
|
|
121
|
+
return True
|
|
122
|
+
except yaml.YAMLError as e:
|
|
123
|
+
print(f"Error parsing YAML in {config_path}: {e}")
|
|
124
|
+
return False
|
|
125
|
+
except Exception as e:
|
|
126
|
+
print(f"Error checking configuration: {e}")
|
|
127
|
+
return False
|
|
128
|
+
|
|
129
|
+
print("Error: No configuration file found in any of the following locations:")
|
|
130
|
+
for path in config_paths:
|
|
131
|
+
print(f" - {path}")
|
|
132
|
+
print("\nRun 'mmrelay --generate-config' to generate a sample configuration file.")
|
|
133
|
+
return False
|
mmrelay/db_utils.py
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import sqlite3
|
|
4
|
+
|
|
5
|
+
from mmrelay.config import get_data_dir
|
|
6
|
+
from mmrelay.log_utils import get_logger
|
|
7
|
+
|
|
8
|
+
# Global config variable that will be set from main.py
|
|
9
|
+
config = None
|
|
10
|
+
|
|
11
|
+
logger = get_logger(name="db_utils")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Get the database path
|
|
15
|
+
def get_db_path():
|
|
16
|
+
"""
|
|
17
|
+
Returns the path to the SQLite database file.
|
|
18
|
+
By default, uses the standard data directory (~/.mmrelay/data).
|
|
19
|
+
Can be overridden by setting 'path' under 'database' in config.yaml.
|
|
20
|
+
"""
|
|
21
|
+
global config
|
|
22
|
+
|
|
23
|
+
# Check if config is available
|
|
24
|
+
if config is not None:
|
|
25
|
+
# Check if database path is specified in config (preferred format)
|
|
26
|
+
if "database" in config and "path" in config["database"]:
|
|
27
|
+
custom_path = config["database"]["path"]
|
|
28
|
+
if custom_path:
|
|
29
|
+
# Ensure the directory exists
|
|
30
|
+
db_dir = os.path.dirname(custom_path)
|
|
31
|
+
if db_dir:
|
|
32
|
+
os.makedirs(db_dir, exist_ok=True)
|
|
33
|
+
logger.info(f"Using database path from config: {custom_path}")
|
|
34
|
+
return custom_path
|
|
35
|
+
|
|
36
|
+
# Check legacy format (db section)
|
|
37
|
+
if "db" in config and "path" in config["db"]:
|
|
38
|
+
custom_path = config["db"]["path"]
|
|
39
|
+
if custom_path:
|
|
40
|
+
# Ensure the directory exists
|
|
41
|
+
db_dir = os.path.dirname(custom_path)
|
|
42
|
+
if db_dir:
|
|
43
|
+
os.makedirs(db_dir, exist_ok=True)
|
|
44
|
+
logger.warning(
|
|
45
|
+
"Using 'db.path' configuration (legacy). 'database.path' is now the preferred format and 'db.path' will be deprecated in a future version."
|
|
46
|
+
)
|
|
47
|
+
return custom_path
|
|
48
|
+
|
|
49
|
+
# Use the standard data directory
|
|
50
|
+
return os.path.join(get_data_dir(), "meshtastic.sqlite")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# Initialize SQLite database
|
|
54
|
+
def initialize_database():
|
|
55
|
+
db_path = get_db_path()
|
|
56
|
+
# Check if database exists
|
|
57
|
+
if os.path.exists(db_path):
|
|
58
|
+
logger.info(f"Loading database from: {db_path}")
|
|
59
|
+
else:
|
|
60
|
+
logger.info(f"Creating new database at: {db_path}")
|
|
61
|
+
with sqlite3.connect(db_path) as conn:
|
|
62
|
+
cursor = conn.cursor()
|
|
63
|
+
# Updated table schema: matrix_event_id is now PRIMARY KEY, meshtastic_id is not necessarily unique
|
|
64
|
+
cursor.execute(
|
|
65
|
+
"CREATE TABLE IF NOT EXISTS longnames (meshtastic_id TEXT PRIMARY KEY, longname TEXT)"
|
|
66
|
+
)
|
|
67
|
+
cursor.execute(
|
|
68
|
+
"CREATE TABLE IF NOT EXISTS shortnames (meshtastic_id TEXT PRIMARY KEY, shortname TEXT)"
|
|
69
|
+
)
|
|
70
|
+
cursor.execute(
|
|
71
|
+
"CREATE TABLE IF NOT EXISTS plugin_data (plugin_name TEXT, meshtastic_id TEXT, data TEXT, PRIMARY KEY (plugin_name, meshtastic_id))"
|
|
72
|
+
)
|
|
73
|
+
# Changed the schema for message_map: matrix_event_id is now primary key
|
|
74
|
+
# Added a new column 'meshtastic_meshnet' to store the meshnet origin of the message.
|
|
75
|
+
# If table already exists, we try adding the column if it doesn't exist.
|
|
76
|
+
cursor.execute(
|
|
77
|
+
"CREATE TABLE IF NOT EXISTS message_map (meshtastic_id INTEGER, matrix_event_id TEXT PRIMARY KEY, matrix_room_id TEXT, meshtastic_text TEXT, meshtastic_meshnet TEXT)"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Attempt to add meshtastic_meshnet column if it's missing (for upgrades)
|
|
81
|
+
# This is a no-op if the column already exists.
|
|
82
|
+
# If user runs fresh, it will already be there from CREATE TABLE IF NOT EXISTS.
|
|
83
|
+
try:
|
|
84
|
+
cursor.execute("ALTER TABLE message_map ADD COLUMN meshtastic_meshnet TEXT")
|
|
85
|
+
except sqlite3.OperationalError:
|
|
86
|
+
# Column already exists, or table just created with it
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
conn.commit()
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def store_plugin_data(plugin_name, meshtastic_id, data):
|
|
93
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
94
|
+
cursor = conn.cursor()
|
|
95
|
+
cursor.execute(
|
|
96
|
+
"INSERT OR REPLACE INTO plugin_data (plugin_name, meshtastic_id, data) VALUES (?, ?, ?) ON CONFLICT (plugin_name, meshtastic_id) DO UPDATE SET data = ?",
|
|
97
|
+
(plugin_name, meshtastic_id, json.dumps(data), json.dumps(data)),
|
|
98
|
+
)
|
|
99
|
+
conn.commit()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def delete_plugin_data(plugin_name, meshtastic_id):
|
|
103
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
104
|
+
cursor = conn.cursor()
|
|
105
|
+
cursor.execute(
|
|
106
|
+
"DELETE FROM plugin_data WHERE plugin_name=? AND meshtastic_id=?",
|
|
107
|
+
(plugin_name, meshtastic_id),
|
|
108
|
+
)
|
|
109
|
+
conn.commit()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
# Get the data for a given plugin and Meshtastic ID
|
|
113
|
+
def get_plugin_data_for_node(plugin_name, meshtastic_id):
|
|
114
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
115
|
+
cursor = conn.cursor()
|
|
116
|
+
cursor.execute(
|
|
117
|
+
"SELECT data FROM plugin_data WHERE plugin_name=? AND meshtastic_id=?",
|
|
118
|
+
(
|
|
119
|
+
plugin_name,
|
|
120
|
+
meshtastic_id,
|
|
121
|
+
),
|
|
122
|
+
)
|
|
123
|
+
result = cursor.fetchone()
|
|
124
|
+
return json.loads(result[0] if result else "[]")
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# Get the data for a given plugin
|
|
128
|
+
def get_plugin_data(plugin_name):
|
|
129
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
130
|
+
cursor = conn.cursor()
|
|
131
|
+
cursor.execute(
|
|
132
|
+
"SELECT data FROM plugin_data WHERE plugin_name=? ",
|
|
133
|
+
(plugin_name,),
|
|
134
|
+
)
|
|
135
|
+
return cursor.fetchall()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
# Get the longname for a given Meshtastic ID
|
|
139
|
+
def get_longname(meshtastic_id):
|
|
140
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
141
|
+
cursor = conn.cursor()
|
|
142
|
+
cursor.execute(
|
|
143
|
+
"SELECT longname FROM longnames WHERE meshtastic_id=?", (meshtastic_id,)
|
|
144
|
+
)
|
|
145
|
+
result = cursor.fetchone()
|
|
146
|
+
return result[0] if result else None
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def save_longname(meshtastic_id, longname):
|
|
150
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
151
|
+
cursor = conn.cursor()
|
|
152
|
+
cursor.execute(
|
|
153
|
+
"INSERT OR REPLACE INTO longnames (meshtastic_id, longname) VALUES (?, ?)",
|
|
154
|
+
(meshtastic_id, longname),
|
|
155
|
+
)
|
|
156
|
+
conn.commit()
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def update_longnames(nodes):
|
|
160
|
+
if nodes:
|
|
161
|
+
for node in nodes.values():
|
|
162
|
+
user = node.get("user")
|
|
163
|
+
if user:
|
|
164
|
+
meshtastic_id = user["id"]
|
|
165
|
+
longname = user.get("longName", "N/A")
|
|
166
|
+
save_longname(meshtastic_id, longname)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def get_shortname(meshtastic_id):
|
|
170
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
171
|
+
cursor = conn.cursor()
|
|
172
|
+
cursor.execute(
|
|
173
|
+
"SELECT shortname FROM shortnames WHERE meshtastic_id=?", (meshtastic_id,)
|
|
174
|
+
)
|
|
175
|
+
result = cursor.fetchone()
|
|
176
|
+
return result[0] if result else None
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def save_shortname(meshtastic_id, shortname):
|
|
180
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
181
|
+
cursor = conn.cursor()
|
|
182
|
+
cursor.execute(
|
|
183
|
+
"INSERT OR REPLACE INTO shortnames (meshtastic_id, shortname) VALUES (?, ?)",
|
|
184
|
+
(meshtastic_id, shortname),
|
|
185
|
+
)
|
|
186
|
+
conn.commit()
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def update_shortnames(nodes):
|
|
190
|
+
if nodes:
|
|
191
|
+
for node in nodes.values():
|
|
192
|
+
user = node.get("user")
|
|
193
|
+
if user:
|
|
194
|
+
meshtastic_id = user["id"]
|
|
195
|
+
shortname = user.get("shortName", "N/A")
|
|
196
|
+
save_shortname(meshtastic_id, shortname)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def store_message_map(
|
|
200
|
+
meshtastic_id,
|
|
201
|
+
matrix_event_id,
|
|
202
|
+
matrix_room_id,
|
|
203
|
+
meshtastic_text,
|
|
204
|
+
meshtastic_meshnet=None,
|
|
205
|
+
):
|
|
206
|
+
"""
|
|
207
|
+
Stores a message map in the database.
|
|
208
|
+
|
|
209
|
+
:param meshtastic_id: The Meshtastic message ID (integer or None)
|
|
210
|
+
:param matrix_event_id: The Matrix event ID (string, primary key)
|
|
211
|
+
:param matrix_room_id: The Matrix room ID (string)
|
|
212
|
+
:param meshtastic_text: The text of the Meshtastic message
|
|
213
|
+
:param meshtastic_meshnet: The name of the meshnet this message originated from.
|
|
214
|
+
This helps us identify remote vs local mesh origins.
|
|
215
|
+
"""
|
|
216
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
217
|
+
cursor = conn.cursor()
|
|
218
|
+
logger.debug(
|
|
219
|
+
f"Storing message map: meshtastic_id={meshtastic_id}, matrix_event_id={matrix_event_id}, matrix_room_id={matrix_room_id}, meshtastic_text={meshtastic_text}, meshtastic_meshnet={meshtastic_meshnet}"
|
|
220
|
+
)
|
|
221
|
+
cursor.execute(
|
|
222
|
+
"INSERT OR REPLACE INTO message_map (meshtastic_id, matrix_event_id, matrix_room_id, meshtastic_text, meshtastic_meshnet) VALUES (?, ?, ?, ?, ?)",
|
|
223
|
+
(
|
|
224
|
+
meshtastic_id,
|
|
225
|
+
matrix_event_id,
|
|
226
|
+
matrix_room_id,
|
|
227
|
+
meshtastic_text,
|
|
228
|
+
meshtastic_meshnet,
|
|
229
|
+
),
|
|
230
|
+
)
|
|
231
|
+
conn.commit()
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def get_message_map_by_meshtastic_id(meshtastic_id):
|
|
235
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
236
|
+
cursor = conn.cursor()
|
|
237
|
+
cursor.execute(
|
|
238
|
+
"SELECT matrix_event_id, matrix_room_id, meshtastic_text, meshtastic_meshnet FROM message_map WHERE meshtastic_id=?",
|
|
239
|
+
(meshtastic_id,),
|
|
240
|
+
)
|
|
241
|
+
result = cursor.fetchone()
|
|
242
|
+
logger.debug(
|
|
243
|
+
f"Retrieved message map by meshtastic_id={meshtastic_id}: {result}"
|
|
244
|
+
)
|
|
245
|
+
if result:
|
|
246
|
+
# result = (matrix_event_id, matrix_room_id, meshtastic_text, meshtastic_meshnet)
|
|
247
|
+
return result[0], result[1], result[2], result[3]
|
|
248
|
+
return None
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def get_message_map_by_matrix_event_id(matrix_event_id):
|
|
252
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
253
|
+
cursor = conn.cursor()
|
|
254
|
+
cursor.execute(
|
|
255
|
+
"SELECT meshtastic_id, matrix_room_id, meshtastic_text, meshtastic_meshnet FROM message_map WHERE matrix_event_id=?",
|
|
256
|
+
(matrix_event_id,),
|
|
257
|
+
)
|
|
258
|
+
result = cursor.fetchone()
|
|
259
|
+
logger.debug(
|
|
260
|
+
f"Retrieved message map by matrix_event_id={matrix_event_id}: {result}"
|
|
261
|
+
)
|
|
262
|
+
if result:
|
|
263
|
+
# result = (meshtastic_id, matrix_room_id, meshtastic_text, meshtastic_meshnet)
|
|
264
|
+
return result[0], result[1], result[2], result[3]
|
|
265
|
+
return None
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def wipe_message_map():
|
|
269
|
+
"""
|
|
270
|
+
Wipes all entries from the message_map table.
|
|
271
|
+
Useful when database.msg_map.wipe_on_restart or db.msg_map.wipe_on_restart is True,
|
|
272
|
+
ensuring no stale data remains.
|
|
273
|
+
"""
|
|
274
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
275
|
+
cursor = conn.cursor()
|
|
276
|
+
cursor.execute("DELETE FROM message_map")
|
|
277
|
+
conn.commit()
|
|
278
|
+
logger.info("message_map table wiped successfully.")
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def prune_message_map(msgs_to_keep):
|
|
282
|
+
"""
|
|
283
|
+
Prune the message_map table to keep only the most recent msgs_to_keep entries
|
|
284
|
+
in order to prevent database bloat.
|
|
285
|
+
We use the matrix_event_id's insertion order as a heuristic.
|
|
286
|
+
Note: matrix_event_id is a string, so we rely on the rowid or similar approach.
|
|
287
|
+
|
|
288
|
+
Approach:
|
|
289
|
+
- Count total rows.
|
|
290
|
+
- If total > msgs_to_keep, delete oldest entries based on rowid.
|
|
291
|
+
"""
|
|
292
|
+
with sqlite3.connect(get_db_path()) as conn:
|
|
293
|
+
cursor = conn.cursor()
|
|
294
|
+
# Count total entries
|
|
295
|
+
cursor.execute("SELECT COUNT(*) FROM message_map")
|
|
296
|
+
total = cursor.fetchone()[0]
|
|
297
|
+
|
|
298
|
+
if total > msgs_to_keep:
|
|
299
|
+
# Delete oldest entries by rowid since matrix_event_id is primary key but not necessarily numeric.
|
|
300
|
+
# rowid is auto-incremented and reflects insertion order.
|
|
301
|
+
to_delete = total - msgs_to_keep
|
|
302
|
+
cursor.execute(
|
|
303
|
+
"DELETE FROM message_map WHERE rowid IN (SELECT rowid FROM message_map ORDER BY rowid ASC LIMIT ?)",
|
|
304
|
+
(to_delete,),
|
|
305
|
+
)
|
|
306
|
+
conn.commit()
|
|
307
|
+
logger.info(
|
|
308
|
+
f"Pruned {to_delete} old message_map entries, keeping last {msgs_to_keep}."
|
|
309
|
+
)
|
mmrelay/log_utils.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
from logging.handlers import RotatingFileHandler
|
|
4
|
+
|
|
5
|
+
from mmrelay.cli import parse_arguments
|
|
6
|
+
from mmrelay.config import get_log_dir
|
|
7
|
+
|
|
8
|
+
# Global config variable that will be set from main.py
|
|
9
|
+
config = None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_logger(name):
|
|
13
|
+
logger = logging.getLogger(name=name)
|
|
14
|
+
|
|
15
|
+
# Default to INFO level if config is not available
|
|
16
|
+
log_level = logging.INFO
|
|
17
|
+
|
|
18
|
+
# Try to get log level from config
|
|
19
|
+
global config
|
|
20
|
+
if config is not None and "logging" in config and "level" in config["logging"]:
|
|
21
|
+
log_level = getattr(logging, config["logging"]["level"].upper())
|
|
22
|
+
|
|
23
|
+
logger.setLevel(log_level)
|
|
24
|
+
logger.propagate = False
|
|
25
|
+
|
|
26
|
+
# Add stream handler (console logging)
|
|
27
|
+
stream_handler = logging.StreamHandler()
|
|
28
|
+
stream_handler.setFormatter(
|
|
29
|
+
logging.Formatter(
|
|
30
|
+
fmt="%(asctime)s %(levelname)s:%(name)s:%(message)s",
|
|
31
|
+
datefmt="%Y-%m-%d %H:%M:%S %z",
|
|
32
|
+
)
|
|
33
|
+
)
|
|
34
|
+
logger.addHandler(stream_handler)
|
|
35
|
+
|
|
36
|
+
# Check command line arguments for log file path
|
|
37
|
+
args = parse_arguments()
|
|
38
|
+
|
|
39
|
+
# Check if file logging is enabled
|
|
40
|
+
if (
|
|
41
|
+
config is not None
|
|
42
|
+
and config.get("logging", {}).get("log_to_file", False)
|
|
43
|
+
or args.logfile
|
|
44
|
+
):
|
|
45
|
+
# Priority: 1. Command line arg, 2. Config file, 3. Default location (~/.mmrelay/logs)
|
|
46
|
+
if args.logfile:
|
|
47
|
+
log_file = args.logfile
|
|
48
|
+
else:
|
|
49
|
+
config_log_file = (
|
|
50
|
+
config.get("logging", {}).get("filename")
|
|
51
|
+
if config is not None
|
|
52
|
+
else None
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if config_log_file:
|
|
56
|
+
# Use the log file specified in config
|
|
57
|
+
log_file = config_log_file
|
|
58
|
+
else:
|
|
59
|
+
# Default to standard log directory
|
|
60
|
+
log_file = os.path.join(get_log_dir(), "mmrelay.log")
|
|
61
|
+
|
|
62
|
+
# Create log directory if it doesn't exist
|
|
63
|
+
log_dir = os.path.dirname(log_file)
|
|
64
|
+
if log_dir: # Ensure non-empty directory paths exist
|
|
65
|
+
os.makedirs(log_dir, exist_ok=True)
|
|
66
|
+
|
|
67
|
+
# Log which file we're using (only for the first logger)
|
|
68
|
+
if name == "M<>M Relay":
|
|
69
|
+
# Create a basic logger to log the log file path
|
|
70
|
+
# This is needed because we can't use the logger we're creating to log its own creation
|
|
71
|
+
basic_logger = logging.getLogger("LogSetup")
|
|
72
|
+
basic_logger.setLevel(logging.INFO)
|
|
73
|
+
basic_handler = logging.StreamHandler()
|
|
74
|
+
basic_handler.setFormatter(
|
|
75
|
+
logging.Formatter(
|
|
76
|
+
fmt="%(asctime)s %(levelname)s:%(name)s:%(message)s",
|
|
77
|
+
datefmt="%Y-%m-%d %H:%M:%S %z",
|
|
78
|
+
)
|
|
79
|
+
)
|
|
80
|
+
basic_logger.addHandler(basic_handler)
|
|
81
|
+
basic_logger.info(f"Writing logs to: {log_file}")
|
|
82
|
+
|
|
83
|
+
# Create a file handler for logging
|
|
84
|
+
try:
|
|
85
|
+
# Set up size-based log rotation
|
|
86
|
+
max_bytes = 10 * 1024 * 1024 # Default 10 MB
|
|
87
|
+
backup_count = 1 # Default to 1 backup
|
|
88
|
+
|
|
89
|
+
if config is not None and "logging" in config:
|
|
90
|
+
max_bytes = config["logging"].get("max_log_size", max_bytes)
|
|
91
|
+
backup_count = config["logging"].get("backup_count", backup_count)
|
|
92
|
+
file_handler = RotatingFileHandler(
|
|
93
|
+
log_file, maxBytes=max_bytes, backupCount=backup_count, encoding="utf-8"
|
|
94
|
+
)
|
|
95
|
+
except Exception as e:
|
|
96
|
+
print(f"Error creating log file at {log_file}: {e}")
|
|
97
|
+
return logger # Return logger without file handler
|
|
98
|
+
|
|
99
|
+
file_handler.setFormatter(
|
|
100
|
+
logging.Formatter(
|
|
101
|
+
fmt="%(asctime)s %(levelname)s:%(name)s:%(message)s",
|
|
102
|
+
datefmt="%Y-%m-%d %H:%M:%S %z",
|
|
103
|
+
)
|
|
104
|
+
)
|
|
105
|
+
logger.addHandler(file_handler)
|
|
106
|
+
|
|
107
|
+
return logger
|