mmrelay 1.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. mmrelay/__init__.py +5 -0
  2. mmrelay/__main__.py +29 -0
  3. mmrelay/cli.py +2013 -0
  4. mmrelay/cli_utils.py +746 -0
  5. mmrelay/config.py +956 -0
  6. mmrelay/constants/__init__.py +65 -0
  7. mmrelay/constants/app.py +29 -0
  8. mmrelay/constants/config.py +78 -0
  9. mmrelay/constants/database.py +22 -0
  10. mmrelay/constants/formats.py +20 -0
  11. mmrelay/constants/messages.py +45 -0
  12. mmrelay/constants/network.py +45 -0
  13. mmrelay/constants/plugins.py +42 -0
  14. mmrelay/constants/queue.py +20 -0
  15. mmrelay/db_runtime.py +269 -0
  16. mmrelay/db_utils.py +1017 -0
  17. mmrelay/e2ee_utils.py +400 -0
  18. mmrelay/log_utils.py +274 -0
  19. mmrelay/main.py +439 -0
  20. mmrelay/matrix_utils.py +3091 -0
  21. mmrelay/meshtastic_utils.py +1245 -0
  22. mmrelay/message_queue.py +647 -0
  23. mmrelay/plugin_loader.py +1933 -0
  24. mmrelay/plugins/__init__.py +3 -0
  25. mmrelay/plugins/base_plugin.py +638 -0
  26. mmrelay/plugins/debug_plugin.py +30 -0
  27. mmrelay/plugins/drop_plugin.py +127 -0
  28. mmrelay/plugins/health_plugin.py +64 -0
  29. mmrelay/plugins/help_plugin.py +79 -0
  30. mmrelay/plugins/map_plugin.py +353 -0
  31. mmrelay/plugins/mesh_relay_plugin.py +222 -0
  32. mmrelay/plugins/nodes_plugin.py +92 -0
  33. mmrelay/plugins/ping_plugin.py +128 -0
  34. mmrelay/plugins/telemetry_plugin.py +179 -0
  35. mmrelay/plugins/weather_plugin.py +312 -0
  36. mmrelay/runtime_utils.py +35 -0
  37. mmrelay/setup_utils.py +828 -0
  38. mmrelay/tools/__init__.py +27 -0
  39. mmrelay/tools/mmrelay.service +19 -0
  40. mmrelay/tools/sample-docker-compose-prebuilt.yaml +30 -0
  41. mmrelay/tools/sample-docker-compose.yaml +30 -0
  42. mmrelay/tools/sample.env +10 -0
  43. mmrelay/tools/sample_config.yaml +120 -0
  44. mmrelay/windows_utils.py +346 -0
  45. mmrelay-1.2.6.dist-info/METADATA +145 -0
  46. mmrelay-1.2.6.dist-info/RECORD +50 -0
  47. mmrelay-1.2.6.dist-info/WHEEL +5 -0
  48. mmrelay-1.2.6.dist-info/entry_points.txt +2 -0
  49. mmrelay-1.2.6.dist-info/licenses/LICENSE +675 -0
  50. mmrelay-1.2.6.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1933 @@
1
+ # trunk-ignore-all(bandit)
2
+ import hashlib
3
+ import importlib
4
+ import importlib.util
5
+ import os
6
+ import re
7
+ import shlex
8
+ import shutil
9
+ import site
10
+ import subprocess
11
+ import sys
12
+ import tempfile
13
+ import threading
14
+ import time
15
+ from contextlib import contextmanager
16
+ from typing import List, Set
17
+ from urllib.parse import urlparse
18
+
19
+ try:
20
+ import schedule
21
+ except ImportError:
22
+ schedule = None
23
+
24
+ from mmrelay.config import get_app_path, get_base_dir
25
+ from mmrelay.constants.plugins import (
26
+ DEFAULT_ALLOWED_COMMUNITY_HOSTS,
27
+ PIP_SOURCE_FLAGS,
28
+ RISKY_REQUIREMENT_PREFIXES,
29
+ )
30
+ from mmrelay.log_utils import get_logger
31
+
32
+ # Global config variable that will be set from main.py
33
+ config = None
34
+
35
+ logger = get_logger(name="Plugins")
36
+ sorted_active_plugins = []
37
+ plugins_loaded = False
38
+
39
+ # Global scheduler management
40
+ _global_scheduler_thread = None
41
+ _global_scheduler_stop_event = None
42
+
43
+
44
+ try:
45
+ _PLUGIN_DEPS_DIR = os.path.join(get_base_dir(), "plugins", "deps")
46
+ except (OSError, RuntimeError, ValueError) as exc: # pragma: no cover
47
+ logger.debug("Unable to resolve base dir for plugin deps at import time: %s", exc)
48
+ _PLUGIN_DEPS_DIR = None
49
+ else:
50
+ try:
51
+ os.makedirs(_PLUGIN_DEPS_DIR, exist_ok=True)
52
+ except OSError as exc: # pragma: no cover - logging only in unusual environments
53
+ logger.debug(
54
+ f"Unable to create plugin dependency directory '{_PLUGIN_DEPS_DIR}': {exc}"
55
+ )
56
+ _PLUGIN_DEPS_DIR = None
57
+ else:
58
+ deps_path = os.fspath(_PLUGIN_DEPS_DIR)
59
+ if deps_path not in sys.path:
60
+ sys.path.append(deps_path)
61
+
62
+
63
+ def _collect_requirements(
64
+ requirements_file: str, visited: Set[str] | None = None
65
+ ) -> List[str]:
66
+ """
67
+ Parse a requirements file into a flattened list of installable requirement lines.
68
+
69
+ Ignores blank lines and full-line or inline comments, preserves PEP 508 requirement syntax,
70
+ and resolves nested includes and constraint files. Supported include forms:
71
+ - "-r <file>" or "--requirement <file>"
72
+ - "-c <file>" or "--constraint <file>"
73
+ - "--requirement=<file>" and "--constraint=<file>"
74
+ Relative include paths are resolved relative to the directory containing the given file.
75
+
76
+ Returns:
77
+ A list of requirement lines suitable for passing to pip. Returns an empty list if the
78
+ file cannot be read or if a nested include recursion is detected (the latter is logged
79
+ and the duplicate include is skipped).
80
+ """
81
+ normalized_path = os.path.abspath(requirements_file)
82
+ visited = visited or set()
83
+
84
+ if normalized_path in visited:
85
+ logger.warning(
86
+ "Requirements file recursion detected for %s; skipping duplicate include.",
87
+ normalized_path,
88
+ )
89
+ return []
90
+
91
+ visited.add(normalized_path)
92
+ requirements: List[str] = []
93
+ base_dir = os.path.dirname(normalized_path)
94
+
95
+ try:
96
+ with open(normalized_path, encoding="utf-8") as handle:
97
+ for raw_line in handle:
98
+ line = raw_line.strip()
99
+ if not line or line.startswith("#"):
100
+ continue
101
+ if " #" in line:
102
+ line = line.split(" #", 1)[0].strip()
103
+ if not line:
104
+ continue
105
+
106
+ lower_line = line.lower()
107
+
108
+ def _resolve_nested(path_str: str) -> None:
109
+ nested_path = (
110
+ path_str
111
+ if os.path.isabs(path_str)
112
+ else os.path.join(base_dir, path_str)
113
+ )
114
+ requirements.extend(
115
+ _collect_requirements(nested_path, visited=visited)
116
+ )
117
+
118
+ is_req_eq = lower_line.startswith("--requirement=")
119
+ is_con_eq = lower_line.startswith("--constraint=")
120
+
121
+ if is_req_eq or is_con_eq:
122
+ nested = line.split("=", 1)[1].strip()
123
+ _resolve_nested(nested)
124
+ continue
125
+
126
+ is_req = lower_line.startswith(("-r ", "--requirement "))
127
+ is_con = lower_line.startswith(("-c ", "--constraint "))
128
+
129
+ if is_req or is_con:
130
+ parts = line.split(None, 1)
131
+ if len(parts) == 2:
132
+ _resolve_nested(parts[1].strip())
133
+ else:
134
+ directive_type = (
135
+ "requirement include" if is_req else "constraint"
136
+ )
137
+ logger.warning(
138
+ "Ignoring malformed %s directive in %s: %s",
139
+ directive_type,
140
+ normalized_path,
141
+ raw_line.rstrip(),
142
+ )
143
+ continue
144
+
145
+ # Check for malformed standalone directives
146
+ if lower_line in ("-r", "-c", "--requirement", "--constraint"):
147
+ logger.warning(
148
+ "Malformed directive, missing file: %s",
149
+ raw_line.rstrip(),
150
+ )
151
+ continue
152
+
153
+ requirements.append(line)
154
+ except (FileNotFoundError, OSError) as e:
155
+ logger.warning("Error reading requirements file %s: %s", normalized_path, e)
156
+ return []
157
+
158
+ return requirements
159
+
160
+
161
+ @contextmanager
162
+ def _temp_sys_path(path: str):
163
+ """
164
+ Temporarily prepends a directory to Python's import search path for the duration of a with-block.
165
+
166
+ The given path is inserted at the front of sys.path on entry. On exit the first occurrence of the path is removed; if the path is not present, no error is raised.
167
+
168
+ Parameters:
169
+ path (str | os.PathLike): Directory to temporarily add to sys.path.
170
+ """
171
+ path = os.fspath(path)
172
+ sys.path.insert(0, path)
173
+ try:
174
+ yield
175
+ finally:
176
+ try:
177
+ sys.path.remove(path)
178
+ except ValueError:
179
+ pass
180
+
181
+
182
+ def _get_security_settings() -> dict:
183
+ """
184
+ Return the security settings mapping from the global configuration.
185
+
186
+ If the global `config` is unset or does not contain a `"security"` mapping, or if `"security"` exists but is not a dict, this returns an empty dict.
187
+
188
+ Returns:
189
+ dict: The security configuration dictionary, or an empty dict when unavailable or invalid.
190
+ """
191
+ if not config:
192
+ return {}
193
+ security_config = config.get("security", {})
194
+ return security_config if isinstance(security_config, dict) else {}
195
+
196
+
197
+ def _get_allowed_repo_hosts() -> list[str]:
198
+ """
199
+ Determine the normalized allowlist of community plugin repository hosts.
200
+
201
+ Reads the security configuration's "community_repo_hosts" value and returns a list
202
+ of lowercase host strings with surrounding whitespace removed. If the setting is
203
+ missing or not a list, returns a copy of DEFAULT_ALLOWED_COMMUNITY_HOSTS. Non-string
204
+ or empty entries in the configured list are ignored.
205
+
206
+ Returns:
207
+ list[str]: A list of allowed repository hostnames in lowercase.
208
+ """
209
+ security_config = _get_security_settings()
210
+ hosts = security_config.get("community_repo_hosts")
211
+
212
+ if hosts is None:
213
+ return list(DEFAULT_ALLOWED_COMMUNITY_HOSTS)
214
+
215
+ if isinstance(hosts, str):
216
+ hosts = [hosts]
217
+
218
+ if not isinstance(hosts, list):
219
+ return list(DEFAULT_ALLOWED_COMMUNITY_HOSTS)
220
+
221
+ return [
222
+ host.strip().lower() for host in hosts if isinstance(host, str) and host.strip()
223
+ ]
224
+
225
+
226
+ def _allow_local_plugin_paths() -> bool:
227
+ """
228
+ Determine whether local filesystem plugin paths are permitted for community plugins.
229
+
230
+ Returns:
231
+ True if the security setting `"allow_local_plugin_paths"` is enabled, False otherwise.
232
+ """
233
+ return bool(_get_security_settings().get("allow_local_plugin_paths", False))
234
+
235
+
236
+ def _host_in_allowlist(host: str, allowlist: list[str]) -> bool:
237
+ """
238
+ Determine whether a host matches or is a subdomain of any hostname in an allowlist.
239
+
240
+ Parameters:
241
+ host (str): Hostname to check.
242
+ allowlist (list[str]): List of allowed hostnames; comparison is case-insensitive.
243
+
244
+ Returns:
245
+ bool: `True` if `host` equals or is a subdomain of any entry in `allowlist`, `False` otherwise.
246
+ """
247
+ host = (host or "").lower()
248
+ if not host:
249
+ return False
250
+ for allowed in allowlist:
251
+ allowed = allowed.lower()
252
+ if host == allowed or host.endswith(f".{allowed}"):
253
+ return True
254
+ return False
255
+
256
+
257
+ def _normalize_repo_target(repo_url: str) -> tuple[str, str]:
258
+ """
259
+ Normalize a repository URL or SSH spec into a tuple of (scheme, host).
260
+
261
+ Returns:
262
+ tuple[str, str]: `scheme` normalized to lowercase (uses "ssh" for `git@` SSH specs and `git+ssh`/`ssh+git` schemes), and `host` lowercased or an empty string if no host is present.
263
+ """
264
+ repo_url = (repo_url or "").strip()
265
+ if repo_url.startswith("git@"):
266
+ _, _, host_and_path = repo_url.partition("@")
267
+ host, _, _ = host_and_path.partition(":")
268
+ return "ssh", host.lower()
269
+ parsed = urlparse(repo_url)
270
+ scheme = (parsed.scheme or "").lower()
271
+ host = (parsed.hostname or "").lower()
272
+ if scheme in {"git+ssh", "ssh+git"}:
273
+ scheme = "ssh"
274
+ return scheme, host
275
+
276
+
277
+ def _is_repo_url_allowed(repo_url: str) -> bool:
278
+ """
279
+ Determine whether a repository URL or local filesystem path is permitted for community plugins.
280
+
281
+ Accepts a repository specification (URL or local path). Rejects empty values and entries beginning with '-'. Local filesystem paths are permitted only when configured security settings allow local plugin paths and the path exists; `file://` schemes follow the same restriction. Plain `http://` URLs are disallowed. Only `https` and `ssh` repository URLs are permitted, and the repository host must be included in the configured allowlist.
282
+
283
+ Returns:
284
+ bool: `true` if the repository is allowed, `false` otherwise.
285
+ """
286
+ repo_url = (repo_url or "").strip()
287
+ if not repo_url:
288
+ return False
289
+
290
+ if repo_url.startswith("-"):
291
+ return False
292
+
293
+ scheme, host = _normalize_repo_target(repo_url)
294
+
295
+ if not scheme:
296
+ if _allow_local_plugin_paths():
297
+ if os.path.exists(repo_url):
298
+ return True
299
+ logger.error("Local repository path does not exist: %s", repo_url)
300
+ return False
301
+ logger.error(
302
+ "Invalid repository '%s'. Local paths are disabled, and remote URLs must include a scheme (e.g., 'https://').",
303
+ repo_url,
304
+ )
305
+ return False
306
+
307
+ if scheme == "file":
308
+ if _allow_local_plugin_paths():
309
+ return True
310
+ logger.error("file:// repositories are disabled for security reasons.")
311
+ return False
312
+
313
+ if scheme == "http":
314
+ logger.error("Plain HTTP community plugin URLs are not allowed: %s", repo_url)
315
+ return False
316
+
317
+ if scheme not in {"https", "ssh"}:
318
+ logger.error("Unsupported repository scheme '%s' for %s", scheme, repo_url)
319
+ return False
320
+
321
+ allowed_hosts = _get_allowed_repo_hosts()
322
+ if not _host_in_allowlist(host, allowed_hosts):
323
+ logger.error(
324
+ "Repository host '%s' is not in the allowed community host list %s",
325
+ host or "unknown",
326
+ allowed_hosts,
327
+ )
328
+ return False
329
+
330
+ return True
331
+
332
+
333
+ def _is_requirement_risky(req_string: str) -> bool:
334
+ """
335
+ Determine whether a requirement line references a VCS or URL source and should be treated as risky.
336
+
337
+ Checks for known risky prefixes (VCS/URL specifiers) or the presence of both `@` and `://`, which indicate a URL-based requirement.
338
+
339
+ Returns:
340
+ `true` if the requirement references a VCS or URL source, `false` otherwise.
341
+ """
342
+ lowered = req_string.lower()
343
+ return any(lowered.startswith(prefix) for prefix in RISKY_REQUIREMENT_PREFIXES) or (
344
+ "@" in req_string and "://" in req_string
345
+ )
346
+
347
+
348
+ # Pre-compute short-form flag characters for efficiency
349
+ PIP_SHORT_SOURCE_FLAGS = {
350
+ f[1] for f in PIP_SOURCE_FLAGS if len(f) == 2 and f.startswith("-")
351
+ }
352
+
353
+
354
+ def _filter_risky_requirement_lines(
355
+ requirement_lines: List[str],
356
+ ) -> tuple[List[str], List[str]]:
357
+ """
358
+ Categorizes requirement lines into safe and flagged groups based on whether they reference VCS or URL sources.
359
+
360
+ This function purely classifies lines without checking configuration. The caller should decide
361
+ whether to install flagged requirements based on security settings.
362
+
363
+ Returns:
364
+ safe_lines (List[str]): Requirement lines considered safe for installation.
365
+ flagged_lines (List[str]): Requirement lines that reference VCS/URL sources and were flagged as risky.
366
+ """
367
+ safe_lines: List[str] = []
368
+ flagged_lines: List[str] = []
369
+
370
+ for line in requirement_lines:
371
+ # Tokenize line for validation
372
+ tokens = shlex.split(line, posix=True, comments=True)
373
+ if not tokens:
374
+ continue
375
+
376
+ # Check if any token in line is risky
377
+ line_is_risky = False
378
+ for token in tokens:
379
+ # Handle editable flags with values (--editable=url)
380
+ if token.startswith("-") and "=" in token:
381
+ flag_name, _, flag_value = token.partition("=")
382
+ if flag_name.lower() in PIP_SOURCE_FLAGS and _is_requirement_risky(
383
+ flag_value
384
+ ):
385
+ line_is_risky = True
386
+ continue
387
+
388
+ # Handle short-form flags with attached values (-iflagvalue, -ivalue)
389
+ if token.startswith("-") and not token.startswith("--") and len(token) > 2:
390
+ flag_char = token[1]
391
+ if flag_char in PIP_SHORT_SOURCE_FLAGS:
392
+ flag_value = token[
393
+ 2:
394
+ ] # Extract everything after the flag character
395
+ if _is_requirement_risky(flag_value):
396
+ line_is_risky = True
397
+ continue
398
+
399
+ # Handle flags that take values
400
+ if token.lower() in PIP_SOURCE_FLAGS:
401
+ continue # Skip flag tokens, as they don't indicate risk by themselves
402
+
403
+ # Check if token itself is risky
404
+ if _is_requirement_risky(token):
405
+ line_is_risky = True
406
+
407
+ if line_is_risky:
408
+ flagged_lines.append(line)
409
+ else:
410
+ safe_lines.append(line)
411
+
412
+ return safe_lines, flagged_lines
413
+
414
+
415
+ def _filter_risky_requirements(
416
+ requirements: List[str],
417
+ ) -> tuple[List[str], List[str], bool]:
418
+ """
419
+ Remove requirement tokens that point to VCS/URL sources unless explicitly allowed.
420
+
421
+ Deprecated: Use _filter_risky_requirement_lines for line-based filtering.
422
+ """
423
+ # For backward compatibility, assume requirements are lines
424
+ safe_lines, flagged_lines = _filter_risky_requirement_lines(requirements)
425
+ allow_untrusted = bool(
426
+ _get_security_settings().get("allow_untrusted_dependencies", False)
427
+ )
428
+ return safe_lines, flagged_lines, allow_untrusted
429
+
430
+
431
+ def _clean_python_cache(directory: str) -> None:
432
+ """
433
+ Remove Python bytecode cache files and __pycache__ directories under the given directory.
434
+
435
+ Walks the directory tree rooted at `directory` and deletes any `__pycache__` directories and `.pyc` files it finds; removal errors are logged and ignored so the operation is non-fatal.
436
+
437
+ Parameters:
438
+ directory (str): Path to the directory to clean of Python cache files.
439
+ """
440
+ if not os.path.isdir(directory):
441
+ return
442
+
443
+ cache_dirs_removed = 0
444
+ pyc_files_removed = 0
445
+ for root, dirs, files in os.walk(directory):
446
+ # Remove __pycache__ directories
447
+ if "__pycache__" in dirs:
448
+ cache_path = os.path.join(root, "__pycache__")
449
+ try:
450
+ shutil.rmtree(cache_path)
451
+ logger.debug(f"Removed Python cache directory: {cache_path}")
452
+ cache_dirs_removed += 1
453
+ except OSError as e:
454
+ logger.debug(f"Could not remove cache directory {cache_path}: {e}")
455
+ # Remove from dirs list to prevent walking into it
456
+ dirs.remove("__pycache__")
457
+
458
+ # Also remove any .pyc files in the current directory
459
+ pyc_files = (f for f in files if f.endswith(".pyc"))
460
+ for pyc_file in pyc_files:
461
+ pyc_path = os.path.join(root, pyc_file)
462
+ try:
463
+ os.remove(pyc_path)
464
+ logger.debug(f"Removed .pyc file: {pyc_path}")
465
+ pyc_files_removed += 1
466
+ except OSError as e:
467
+ logger.debug(f"Could not remove .pyc file {pyc_path}: {e}")
468
+
469
+ if cache_dirs_removed > 0 or pyc_files_removed > 0:
470
+ log_parts = []
471
+ if cache_dirs_removed > 0:
472
+ log_parts.append(
473
+ f"{cache_dirs_removed} Python cache director{'y' if cache_dirs_removed == 1 else 'ies'}"
474
+ )
475
+ if pyc_files_removed > 0:
476
+ log_parts.append(
477
+ f"{pyc_files_removed} .pyc file{'' if pyc_files_removed == 1 else 's'}"
478
+ )
479
+ logger.info(f"Cleaned {' and '.join(log_parts)} from {directory}")
480
+
481
+
482
+ def _reset_caches_for_tests():
483
+ """
484
+ Reset the global plugin loader caches to their initial state for testing purposes.
485
+
486
+ Clears cached plugin instances and loading state to ensure test isolation and prevent interference between test runs.
487
+ """
488
+ global sorted_active_plugins, plugins_loaded
489
+ sorted_active_plugins = []
490
+ plugins_loaded = False
491
+
492
+
493
+ def _refresh_dependency_paths() -> None:
494
+ """
495
+ Ensure packages installed into user or site directories become importable.
496
+
497
+ This function collects candidate site paths from site.getusersitepackages() and
498
+ site.getsitepackages() (when available), and registers each directory with the
499
+ import system. It prefers site.addsitedir(path) but falls back to appending the
500
+ path to sys.path if addsitedir fails. After modifying the import paths it calls
501
+ importlib.invalidate_caches() so newly installed packages are discoverable.
502
+
503
+ Side effects:
504
+ - May modify sys.path and the interpreter's site directories.
505
+ - Calls importlib.invalidate_caches() to refresh import machinery.
506
+ - Logs warnings if adding a directory via site.addsitedir fails.
507
+ """
508
+
509
+ candidate_paths = []
510
+
511
+ try:
512
+ user_site = site.getusersitepackages()
513
+ if isinstance(user_site, str):
514
+ candidate_paths.append(user_site)
515
+ else:
516
+ candidate_paths.extend(user_site)
517
+ except AttributeError:
518
+ logger.debug("site.getusersitepackages() not available in this environment.")
519
+
520
+ try:
521
+ site_packages = site.getsitepackages()
522
+ candidate_paths.extend(site_packages)
523
+ except AttributeError:
524
+ logger.debug("site.getsitepackages() not available in this environment.")
525
+
526
+ if _PLUGIN_DEPS_DIR:
527
+ candidate_paths.append(os.fspath(_PLUGIN_DEPS_DIR))
528
+
529
+ for path in dict.fromkeys(candidate_paths): # dedupe while preserving order
530
+ if not path:
531
+ continue
532
+ if path not in sys.path:
533
+ try:
534
+ site.addsitedir(path)
535
+ except OSError as e:
536
+ logger.warning(
537
+ f"site.addsitedir failed for '{path}': {e}. Falling back to sys.path.insert(0, ...)."
538
+ )
539
+ sys.path.insert(0, path)
540
+
541
+ # Ensure import machinery notices new packages
542
+ importlib.invalidate_caches()
543
+
544
+
545
+ def _install_requirements_for_repo(repo_path: str, repo_name: str) -> None:
546
+ """
547
+ Install Python dependencies for a community plugin repository from a requirements.txt file.
548
+
549
+ If a requirements.txt file exists at repo_path, this function will attempt to install the listed
550
+ dependencies and then refresh interpreter import paths so newly installed packages become importable.
551
+
552
+ Behavior highlights:
553
+ - No-op if requirements.txt is missing or empty.
554
+ - Respects the global auto-install configuration; if auto-install is disabled, the function logs and returns.
555
+ - In a pipx-managed environment (detected via PIPX_* env vars) it uses `pipx inject mmrelay ...` to
556
+ add dependencies to the application's pipx venv.
557
+ - Otherwise it uses `python -m pip install -r requirements.txt` and adds `--user` when not running
558
+ inside a virtual environment.
559
+ - After a successful install it calls the path refresh routine so the interpreter can import newly
560
+ installed packages.
561
+
562
+ Parameters that need extra context:
563
+ - repo_path: filesystem path to the plugin repository directory (the function looks for
564
+ repo_path/requirements.txt).
565
+ - repo_name: human-readable repository name used in log messages.
566
+
567
+ Side effects:
568
+ - Installs packages (via pipx or pip) and updates interpreter import paths.
569
+ - Logs on success or failure; on installation failure it logs an exception and a warning that the
570
+ plugin may not work correctly without its dependencies.
571
+ """
572
+
573
+ requirements_path = os.path.join(repo_path, "requirements.txt")
574
+ if not os.path.isfile(requirements_path):
575
+ return
576
+
577
+ if not _check_auto_install_enabled(config):
578
+ logger.warning(
579
+ "Auto-install of requirements for %s disabled by config; skipping.",
580
+ repo_name,
581
+ )
582
+ return
583
+
584
+ try:
585
+ in_pipx = any(
586
+ key in os.environ
587
+ for key in ("PIPX_HOME", "PIPX_LOCAL_VENVS", "PIPX_BIN_DIR")
588
+ )
589
+
590
+ # Collect requirements as full lines to preserve PEP 508 compliance
591
+ # (version specifiers, environment markers, etc.)
592
+ requirements_lines = _collect_requirements(requirements_path)
593
+
594
+ safe_requirements, flagged_requirements = _filter_risky_requirement_lines(
595
+ requirements_lines
596
+ )
597
+
598
+ # Check security configuration for handling flagged requirements
599
+ allow_untrusted = bool(
600
+ _get_security_settings().get("allow_untrusted_dependencies", False)
601
+ )
602
+
603
+ if flagged_requirements:
604
+ if allow_untrusted:
605
+ logger.warning(
606
+ "Allowing %d flagged dependency entries for %s due to security.allow_untrusted_dependencies=True",
607
+ len(flagged_requirements),
608
+ repo_name,
609
+ )
610
+ # Include flagged requirements when allowed
611
+ safe_requirements.extend(flagged_requirements)
612
+ else:
613
+ logger.warning(
614
+ "Skipping %d flagged dependency entries for %s. Set security.allow_untrusted_dependencies=True to override.",
615
+ len(flagged_requirements),
616
+ repo_name,
617
+ )
618
+ else:
619
+ pass
620
+
621
+ installed_packages = False
622
+
623
+ if in_pipx:
624
+ logger.info("Installing requirements for plugin %s with pipx", repo_name)
625
+ pipx_path = shutil.which("pipx")
626
+ if not pipx_path:
627
+ raise FileNotFoundError("pipx executable not found on PATH")
628
+ # Check if there are actual packages to install (not just flags)
629
+ packages = [r for r in safe_requirements if not r.startswith("-")]
630
+ if packages:
631
+ # Write safe requirements to a temporary file to handle hashed requirements
632
+ # and environment markers properly
633
+ with tempfile.NamedTemporaryFile(
634
+ mode="w", suffix=".txt", delete=False
635
+ ) as temp_file:
636
+ temp_path = temp_file.name
637
+ for entry in safe_requirements:
638
+ temp_file.write(entry + "\n")
639
+
640
+ try:
641
+ cmd = [
642
+ pipx_path,
643
+ "inject",
644
+ "mmrelay",
645
+ "--requirement",
646
+ temp_path,
647
+ ]
648
+ _run(cmd, timeout=600)
649
+ installed_packages = True
650
+ finally:
651
+ # Clean up the temporary file
652
+ try:
653
+ os.unlink(temp_path)
654
+ except OSError:
655
+ logger.debug(
656
+ "Failed to clean up temporary requirements file: %s",
657
+ temp_path,
658
+ )
659
+ else:
660
+ logger.info(
661
+ "No dependencies listed in %s; skipping pipx injection.",
662
+ requirements_path,
663
+ )
664
+ else:
665
+ in_venv = (sys.prefix != getattr(sys, "base_prefix", sys.prefix)) or (
666
+ "VIRTUAL_ENV" in os.environ
667
+ )
668
+ logger.info("Installing requirements for plugin %s with pip", repo_name)
669
+ packages = [r for r in safe_requirements if not r.startswith("-")]
670
+ if not packages:
671
+ logger.info(
672
+ "Requirements in %s provided no installable packages; skipping pip install.",
673
+ requirements_path,
674
+ )
675
+ else:
676
+ cmd = [
677
+ sys.executable,
678
+ "-m",
679
+ "pip",
680
+ "install",
681
+ "--disable-pip-version-check",
682
+ "--no-input",
683
+ ]
684
+ if not in_venv:
685
+ cmd.append("--user")
686
+
687
+ # Write safe requirements to a temporary file to handle hashed requirements properly
688
+ with tempfile.NamedTemporaryFile(
689
+ mode="w", suffix=".txt", delete=False
690
+ ) as temp_file:
691
+ temp_path = temp_file.name
692
+ for entry in safe_requirements:
693
+ temp_file.write(entry + "\n")
694
+
695
+ try:
696
+ cmd.extend(["-r", temp_path])
697
+ _run(cmd, timeout=600)
698
+ installed_packages = True
699
+ finally:
700
+ # Clean up the temporary file
701
+ try:
702
+ os.unlink(temp_path)
703
+ except OSError:
704
+ logger.debug(
705
+ "Failed to clean up temporary requirements file: %s",
706
+ temp_path,
707
+ )
708
+
709
+ if installed_packages:
710
+ logger.info("Successfully installed requirements for plugin %s", repo_name)
711
+ _refresh_dependency_paths()
712
+ else:
713
+ logger.info("No dependency installation run for plugin %s", repo_name)
714
+ except (subprocess.CalledProcessError, FileNotFoundError):
715
+ logger.exception(
716
+ "Error installing requirements for plugin %s (requirements: %s)",
717
+ repo_name,
718
+ requirements_path,
719
+ )
720
+ logger.warning(
721
+ "Plugin %s may not work correctly without its dependencies",
722
+ repo_name,
723
+ )
724
+
725
+
726
+ def _get_plugin_dirs(plugin_type):
727
+ """
728
+ Return a prioritized list of existing plugin directories for the given plugin type.
729
+
730
+ Attempts to ensure and prefer a per-user plugins directory (base_dir/plugins/<type>) and also includes a local application plugins directory (app_path/plugins/<type>) for backward compatibility. Each directory is created if possible; directories that cannot be created or accessed are omitted from the result.
731
+
732
+ Parameters:
733
+ plugin_type (str): Plugin category, e.g. "custom" or "community".
734
+
735
+ Returns:
736
+ list[str]: Ordered list of plugin directories to search (user directory first when available, then local directory).
737
+ """
738
+ dirs = []
739
+
740
+ # Check user directory first (preferred location)
741
+ user_dir = os.path.join(get_base_dir(), "plugins", plugin_type)
742
+ try:
743
+ os.makedirs(user_dir, exist_ok=True)
744
+ dirs.append(user_dir)
745
+ except (OSError, PermissionError) as e:
746
+ logger.warning(f"Cannot create user plugin directory {user_dir}: {e}")
747
+
748
+ # Check local directory (backward compatibility)
749
+ local_dir = os.path.join(get_app_path(), "plugins", plugin_type)
750
+ try:
751
+ os.makedirs(local_dir, exist_ok=True)
752
+ dirs.append(local_dir)
753
+ except (OSError, PermissionError):
754
+ # Skip local directory if we can't create it (e.g., in Docker)
755
+ logger.debug(f"Cannot create local plugin directory {local_dir}, skipping")
756
+
757
+ return dirs
758
+
759
+
760
+ def get_custom_plugin_dirs():
761
+ """
762
+ Return the list of directories to search for custom plugins, ordered by priority.
763
+
764
+ The directories include the user-specific custom plugins directory and a local directory for backward compatibility.
765
+ """
766
+ return _get_plugin_dirs("custom")
767
+
768
+
769
+ def get_community_plugin_dirs():
770
+ """
771
+ List community plugin directories in priority order.
772
+
773
+ Includes the per-user community plugins directory and a legacy local application directory for backward compatibility; directories that cannot be accessed or created are omitted.
774
+
775
+ Returns:
776
+ list[str]: Filesystem paths to search for community plugins, ordered from highest to lowest priority.
777
+ """
778
+ return _get_plugin_dirs("community")
779
+
780
+
781
+ def _run(cmd, timeout=120, retry_attempts=1, retry_delay=1, **kwargs):
782
+ # Validate command to prevent shell injection
783
+ """
784
+ Run a subprocess command with validated arguments, optional retries, and a configurable timeout.
785
+
786
+ Validates that `cmd` is a non-empty list of non-empty strings, disallows `shell=True`, sets `text=True` by default, and optionally retries failed attempts with a delay.
787
+
788
+ Parameters:
789
+ cmd (list[str]): Command and arguments to execute; must be a non-empty list of non-empty strings.
790
+ timeout (int | float): Maximum seconds to allow the process to run before raising TimeoutExpired.
791
+ retry_attempts (int): Number of execution attempts (minimum 1).
792
+ retry_delay (int | float): Seconds to wait between retry attempts.
793
+ **kwargs: Additional keyword arguments forwarded to subprocess.run (e.g., cwd, env). `text=True` is used by default if not provided.
794
+
795
+ Returns:
796
+ subprocess.CompletedProcess: The completed process object returned by subprocess.run.
797
+
798
+ Raises:
799
+ TypeError: If `cmd` is not a list or any element of `cmd` is not a string.
800
+ ValueError: If `cmd` is empty, contains empty/whitespace-only arguments, or if `shell=True` is provided.
801
+ subprocess.CalledProcessError: If the subprocess exits with a non-zero status.
802
+ subprocess.TimeoutExpired: If the process exceeds the specified timeout.
803
+ """
804
+ if not isinstance(cmd, list):
805
+ raise TypeError("cmd must be a list of str")
806
+ if not cmd:
807
+ raise ValueError("Command list cannot be empty")
808
+ if not all(isinstance(arg, str) for arg in cmd):
809
+ raise TypeError("all command arguments must be strings")
810
+ if any(not arg.strip() for arg in cmd):
811
+ raise ValueError("command arguments cannot be empty/whitespace")
812
+ if kwargs.get("shell"):
813
+ raise ValueError("shell=True is not allowed in _run")
814
+ # Ensure text mode by default
815
+ kwargs.setdefault("text", True)
816
+
817
+ attempts = max(int(retry_attempts or 1), 1)
818
+ delay = max(float(retry_delay or 0), 0.0)
819
+
820
+ for attempt in range(1, attempts + 1):
821
+ try:
822
+ return subprocess.run(cmd, check=True, timeout=timeout, **kwargs)
823
+ except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as exc:
824
+ if attempt >= attempts:
825
+ raise
826
+ logger.warning(
827
+ "Command %s failed on attempt %d/%d: %s",
828
+ cmd[0],
829
+ attempt,
830
+ attempts,
831
+ exc,
832
+ )
833
+ if delay:
834
+ time.sleep(delay)
835
+
836
+
837
+ def _run_git(cmd, timeout=120, **kwargs):
838
+ """
839
+ Run a git command using the module's safe subprocess runner with conservative retry defaults.
840
+
841
+ Parameters:
842
+ cmd (list[str]): Command and arguments to run (e.g., ['git', 'clone', '...']).
843
+ timeout (int): Maximum seconds to wait for each attempt.
844
+ **kwargs: Additional options forwarded to `_run` (can override retries).
845
+
846
+ Returns:
847
+ subprocess.CompletedProcess: The completed process result containing `returncode`, `stdout`, and `stderr`.
848
+ """
849
+ kwargs.setdefault("retry_attempts", 3)
850
+ kwargs.setdefault("retry_delay", 2)
851
+ return _run(cmd, timeout=timeout, **kwargs)
852
+
853
+
854
+ def _check_auto_install_enabled(config):
855
+ """
856
+ Determine if automatic dependency installation is enabled in the provided configuration.
857
+
858
+ Parameters:
859
+ config (dict|None): Configuration mapping; expected to contain a "security" dict with an
860
+ optional boolean "auto_install_deps" key.
861
+
862
+ Returns:
863
+ bool: `True` if automatic installation is enabled, `False` otherwise. If `config` is falsy
864
+ or the key is missing, automatic installation is enabled by default.
865
+ """
866
+ if not config:
867
+ return True
868
+ return bool(config.get("security", {}).get("auto_install_deps", True))
869
+
870
+
871
+ def _raise_install_error(pkg_name):
872
+ """
873
+ Log a warning about disabled auto-install and raise a CalledProcessError.
874
+
875
+ Parameters:
876
+ pkg_name (str): Name of the package that could not be installed (used in the log message).
877
+
878
+ Raises:
879
+ subprocess.CalledProcessError: Always raised to signal an installation failure when auto-install is disabled.
880
+ """
881
+ logger.warning(
882
+ f"Auto-install disabled; cannot install {pkg_name}. See docs for enabling."
883
+ )
884
+ raise subprocess.CalledProcessError(1, "pip/pipx")
885
+
886
+
887
+ def clone_or_update_repo(repo_url, ref, plugins_dir):
888
+ """
889
+ Ensure a community plugin git repository exists under plugins_dir and is checked out at the specified ref.
890
+
891
+ Attempts to clone the repository into plugins_dir/<repo_name> or update an existing clone so that it is on the requested ref. The ref argument must be a dict with keys `"type"` (either `"tag"` or `"branch"`) and `"value"` (the tag or branch name). Falls back to common default branches ("main", "master") when appropriate.
892
+
893
+ Parameters:
894
+ repo_url (str): URL or SSH spec of the git repository to clone or update.
895
+ ref (dict): Reference specification with keys:
896
+ - type (str): "tag" or "branch".
897
+ - value (str): The tag or branch name to check out.
898
+ plugins_dir (str): Directory under which the repository should be placed.
899
+
900
+ Returns:
901
+ bool: `True` if the repository was successfully cloned or updated, `False` otherwise.
902
+ """
903
+ repo_url = (repo_url or "").strip()
904
+ ref_type = ref.get("type") # expected: "tag" or "branch"
905
+ ref_value = (ref.get("value") or "").strip()
906
+
907
+ if not _is_repo_url_allowed(repo_url):
908
+ return False
909
+ allowed_ref_types = {"tag", "branch"}
910
+ if ref_type not in allowed_ref_types:
911
+ logger.error(
912
+ "Invalid ref type %r (expected 'tag' or 'branch') for %r",
913
+ ref_type,
914
+ repo_url,
915
+ )
916
+ return False
917
+ if not ref_value:
918
+ logger.error("Missing ref value for %s on %r", ref_type, repo_url)
919
+ return False
920
+ if ref_value.startswith("-"):
921
+ logger.error("Ref value looks invalid (starts with '-'): %r", ref_value)
922
+ return False
923
+ if not re.fullmatch(r"[A-Za-z0-9][A-Za-z0-9._/-]*", ref_value):
924
+ logger.error("Invalid %s name supplied: %r", ref_type, ref_value)
925
+ return False
926
+
927
+ # Extract the repository name from the URL
928
+ repo_name = os.path.splitext(os.path.basename(repo_url.rstrip("/")))[0]
929
+ repo_path = os.path.join(plugins_dir, repo_name)
930
+
931
+ # Default branch names to try if ref is not specified
932
+ default_branches = ["main", "master"]
933
+
934
+ # Log what we're trying to do
935
+ logger.info(f"Using {ref_type} '{ref_value}' for repository {repo_name}")
936
+
937
+ # If it's a branch and one of the default branches, we'll handle it specially
938
+ is_default_branch = ref_type == "branch" and ref_value in default_branches
939
+
940
+ if os.path.isdir(repo_path):
941
+ try:
942
+ # Fetch all branches but don't fetch tags to avoid conflicts
943
+ try:
944
+ _run_git(["git", "-C", repo_path, "fetch", "origin"], timeout=120)
945
+ except subprocess.CalledProcessError as e:
946
+ logger.warning(f"Error fetching from remote: {e}")
947
+ # Continue anyway, we'll try to use what we have
948
+
949
+ # If it's a default branch, handle it differently
950
+ if is_default_branch:
951
+ try:
952
+ # Check if we're already on the right branch
953
+ current_branch = _run_git(
954
+ ["git", "-C", repo_path, "rev-parse", "--abbrev-ref", "HEAD"],
955
+ capture_output=True,
956
+ ).stdout.strip()
957
+
958
+ if current_branch == ref_value:
959
+ # We're on the right branch, just pull
960
+ try:
961
+ _run_git(
962
+ ["git", "-C", repo_path, "pull", "origin", ref_value],
963
+ timeout=120,
964
+ )
965
+ logger.info(
966
+ f"Updated repository {repo_name} branch {ref_value}"
967
+ )
968
+ return True
969
+ except subprocess.CalledProcessError as e:
970
+ logger.warning(f"Error pulling branch {ref_value}: {e}")
971
+ # Continue anyway, we'll use what we have
972
+ return True
973
+ else:
974
+ # Switch to the right branch
975
+ _run_git(
976
+ ["git", "-C", repo_path, "checkout", ref_value],
977
+ timeout=120,
978
+ )
979
+ _run_git(
980
+ ["git", "-C", repo_path, "pull", "origin", ref_value],
981
+ timeout=120,
982
+ )
983
+ if ref_type == "branch":
984
+ logger.info(f"Switched to and updated branch {ref_value}")
985
+ else:
986
+ logger.info(f"Switched to and updated tag {ref_value}")
987
+ return True
988
+ except subprocess.CalledProcessError:
989
+ # If we can't checkout the specified branch, try the other default branch
990
+ other_default = "main" if ref_value == "master" else "master"
991
+ try:
992
+ logger.warning(
993
+ f"Branch {ref_value} not found, trying {other_default}"
994
+ )
995
+ _run_git(
996
+ ["git", "-C", repo_path, "checkout", other_default],
997
+ timeout=120,
998
+ )
999
+ _run_git(
1000
+ ["git", "-C", repo_path, "pull", "origin", other_default],
1001
+ timeout=120,
1002
+ )
1003
+ logger.info(
1004
+ f"Using {other_default} branch instead of {ref_value}"
1005
+ )
1006
+ return True
1007
+ except subprocess.CalledProcessError:
1008
+ # If that fails too, we can't update the repository
1009
+ logger.warning(
1010
+ "Could not checkout any default branch, repository update failed"
1011
+ )
1012
+ return False
1013
+ else:
1014
+ if ref_type == "branch":
1015
+ try:
1016
+ _run_git(
1017
+ ["git", "-C", repo_path, "checkout", ref_value],
1018
+ timeout=120,
1019
+ )
1020
+ _run_git(
1021
+ ["git", "-C", repo_path, "pull", "origin", ref_value],
1022
+ timeout=120,
1023
+ )
1024
+ logger.info(
1025
+ f"Updated repository {repo_name} to branch {ref_value}"
1026
+ )
1027
+ return True
1028
+ except subprocess.CalledProcessError as exc:
1029
+ logger.warning(
1030
+ "Failed to update branch %s for %s: %s",
1031
+ ref_value,
1032
+ repo_name,
1033
+ exc,
1034
+ )
1035
+ return False
1036
+
1037
+ # Handle tag checkout
1038
+ # Check if we're already on the correct tag/commit
1039
+ try:
1040
+ # Get the current commit hash
1041
+ current_commit = _run_git(
1042
+ ["git", "-C", repo_path, "rev-parse", "HEAD"],
1043
+ capture_output=True,
1044
+ ).stdout.strip()
1045
+
1046
+ # Get the commit hash for the tag
1047
+ tag_commit = None
1048
+ try:
1049
+ tag_commit = _run_git(
1050
+ ["git", "-C", repo_path, "rev-parse", ref_value],
1051
+ capture_output=True,
1052
+ ).stdout.strip()
1053
+ except subprocess.CalledProcessError:
1054
+ # Tag doesn't exist locally, we'll need to fetch it
1055
+ pass
1056
+
1057
+ # If we're already at the tag's commit, we're done
1058
+ if tag_commit and current_commit == tag_commit:
1059
+ logger.info(
1060
+ f"Repository {repo_name} is already at tag {ref_value}"
1061
+ )
1062
+ return True
1063
+
1064
+ # Otherwise, try to checkout the tag or branch
1065
+ _run_git(
1066
+ ["git", "-C", repo_path, "checkout", ref_value],
1067
+ timeout=120,
1068
+ )
1069
+ logger.info(f"Updated repository {repo_name} to tag {ref_value}")
1070
+ return True
1071
+ except subprocess.CalledProcessError:
1072
+ # If tag checkout fails, try to fetch it specifically
1073
+ logger.warning(
1074
+ f"Tag {ref_value} not found locally, trying to fetch it specifically"
1075
+ )
1076
+ try:
1077
+ # Try to fetch the specific tag, but first remove any existing tag with the same name
1078
+ try:
1079
+ # Delete the local tag if it exists to avoid conflicts
1080
+ _run_git(
1081
+ ["git", "-C", repo_path, "tag", "-d", ref_value],
1082
+ timeout=120,
1083
+ )
1084
+ except subprocess.CalledProcessError:
1085
+ # Tag doesn't exist locally, which is fine
1086
+ pass
1087
+
1088
+ # Now fetch the tag from remote
1089
+ try:
1090
+ # Try to fetch the tag
1091
+ _run_git(
1092
+ [
1093
+ "git",
1094
+ "-C",
1095
+ repo_path,
1096
+ "fetch",
1097
+ "origin",
1098
+ f"refs/tags/{ref_value}",
1099
+ ],
1100
+ timeout=120,
1101
+ )
1102
+ except subprocess.CalledProcessError:
1103
+ # If that fails, try to fetch the tag without the refs/tags/ prefix
1104
+ _run_git(
1105
+ [
1106
+ "git",
1107
+ "-C",
1108
+ repo_path,
1109
+ "fetch",
1110
+ "origin",
1111
+ f"refs/tags/{ref_value}:refs/tags/{ref_value}",
1112
+ ],
1113
+ timeout=120,
1114
+ )
1115
+
1116
+ _run_git(
1117
+ ["git", "-C", repo_path, "checkout", ref_value],
1118
+ timeout=120,
1119
+ )
1120
+ logger.info(
1121
+ f"Successfully fetched and checked out tag {ref_value}"
1122
+ )
1123
+ return True
1124
+ except subprocess.CalledProcessError:
1125
+ # If that fails too, try as a branch
1126
+ logger.warning(
1127
+ f"Could not fetch tag {ref_value}, trying as a branch"
1128
+ )
1129
+ try:
1130
+ _run_git(
1131
+ ["git", "-C", repo_path, "fetch", "origin", ref_value],
1132
+ timeout=120,
1133
+ )
1134
+ _run_git(
1135
+ ["git", "-C", repo_path, "checkout", ref_value],
1136
+ timeout=120,
1137
+ )
1138
+ _run_git(
1139
+ ["git", "-C", repo_path, "pull", "origin", ref_value],
1140
+ timeout=120,
1141
+ )
1142
+ logger.info(
1143
+ f"Updated repository {repo_name} to branch {ref_value}"
1144
+ )
1145
+ return True
1146
+ except subprocess.CalledProcessError:
1147
+ # If all else fails, just use a default branch
1148
+ logger.warning(
1149
+ f"Could not checkout {ref_value} as tag or branch, trying default branches"
1150
+ )
1151
+ for default_branch in default_branches:
1152
+ try:
1153
+ _run_git(
1154
+ [
1155
+ "git",
1156
+ "-C",
1157
+ repo_path,
1158
+ "checkout",
1159
+ default_branch,
1160
+ ],
1161
+ timeout=120,
1162
+ )
1163
+ _run_git(
1164
+ [
1165
+ "git",
1166
+ "-C",
1167
+ repo_path,
1168
+ "pull",
1169
+ "origin",
1170
+ default_branch,
1171
+ ],
1172
+ timeout=120,
1173
+ )
1174
+ logger.info(
1175
+ f"Using {default_branch} instead of {ref_value}"
1176
+ )
1177
+ return True
1178
+ except subprocess.CalledProcessError:
1179
+ continue
1180
+
1181
+ # If we get here, we couldn't checkout any branch
1182
+ logger.warning(
1183
+ "Could not checkout any branch, using current state"
1184
+ )
1185
+ return True
1186
+ except (subprocess.CalledProcessError, FileNotFoundError) as e:
1187
+ logger.error(f"Error updating repository {repo_name}: {e}")
1188
+ logger.error(
1189
+ f"Please manually git clone the repository {repo_url} into {repo_path}"
1190
+ )
1191
+ return False
1192
+ else:
1193
+ # Repository doesn't exist yet, clone it
1194
+ try:
1195
+ os.makedirs(plugins_dir, exist_ok=True)
1196
+ except (OSError, PermissionError):
1197
+ logger.exception(f"Cannot create plugin directory {plugins_dir}")
1198
+ logger.error(f"Skipping repository {repo_name} due to permission error")
1199
+ return False
1200
+
1201
+ # Now try to clone the repository
1202
+ try:
1203
+ # If it's a default branch, just clone it directly
1204
+ if is_default_branch:
1205
+ try:
1206
+ # Try to clone with the specified branch
1207
+ _run_git(
1208
+ ["git", "clone", "--branch", ref_value, repo_url],
1209
+ cwd=plugins_dir,
1210
+ timeout=120,
1211
+ )
1212
+ if ref_type == "branch":
1213
+ logger.info(
1214
+ f"Cloned repository {repo_name} from {repo_url} at branch {ref_value}"
1215
+ )
1216
+ else:
1217
+ logger.info(
1218
+ f"Cloned repository {repo_name} from {repo_url} at tag {ref_value}"
1219
+ )
1220
+ return True
1221
+ except subprocess.CalledProcessError:
1222
+ # If that fails, try the other default branch
1223
+ other_default = "main" if ref_value == "master" else "master"
1224
+ try:
1225
+ logger.warning(
1226
+ f"Could not clone with branch {ref_value}, trying {other_default}"
1227
+ )
1228
+ _run_git(
1229
+ ["git", "clone", "--branch", other_default, repo_url],
1230
+ cwd=plugins_dir,
1231
+ timeout=120,
1232
+ )
1233
+ logger.info(
1234
+ f"Cloned repository {repo_name} from {repo_url} at branch {other_default}"
1235
+ )
1236
+ return True
1237
+ except subprocess.CalledProcessError:
1238
+ # If that fails too, clone without specifying a branch
1239
+ logger.warning(
1240
+ f"Could not clone with branch {other_default}, cloning default branch"
1241
+ )
1242
+ _run_git(
1243
+ ["git", "clone", repo_url],
1244
+ cwd=plugins_dir,
1245
+ timeout=120,
1246
+ )
1247
+ logger.info(
1248
+ f"Cloned repository {repo_name} from {repo_url} (default branch)"
1249
+ )
1250
+ return True
1251
+ else:
1252
+ # It's a tag, try to clone with the tag
1253
+ try:
1254
+ # Try to clone with the specified tag
1255
+ _run_git(
1256
+ ["git", "clone", "--branch", ref_value, repo_url],
1257
+ cwd=plugins_dir,
1258
+ timeout=120,
1259
+ )
1260
+ if ref_type == "branch":
1261
+ logger.info(
1262
+ f"Cloned repository {repo_name} from {repo_url} at branch {ref_value}"
1263
+ )
1264
+ else:
1265
+ logger.info(
1266
+ f"Cloned repository {repo_name} from {repo_url} at tag {ref_value}"
1267
+ )
1268
+ return True
1269
+ except subprocess.CalledProcessError:
1270
+ # If that fails, clone without specifying a tag
1271
+ logger.warning(
1272
+ f"Could not clone with tag {ref_value}, cloning default branch"
1273
+ )
1274
+ _run_git(
1275
+ ["git", "clone", repo_url],
1276
+ cwd=plugins_dir,
1277
+ timeout=120,
1278
+ )
1279
+
1280
+ # Then try to fetch and checkout the tag
1281
+ try:
1282
+ # Try to fetch the tag
1283
+ try:
1284
+ _run_git(
1285
+ [
1286
+ "git",
1287
+ "-C",
1288
+ repo_path,
1289
+ "fetch",
1290
+ "origin",
1291
+ f"refs/tags/{ref_value}",
1292
+ ]
1293
+ )
1294
+ except subprocess.CalledProcessError:
1295
+ # If that fails, try to fetch the tag without the refs/tags/ prefix
1296
+ _run_git(
1297
+ [
1298
+ "git",
1299
+ "-C",
1300
+ repo_path,
1301
+ "fetch",
1302
+ "origin",
1303
+ f"refs/tags/{ref_value}:refs/tags/{ref_value}",
1304
+ ]
1305
+ )
1306
+
1307
+ # Now checkout the tag
1308
+ _run_git(
1309
+ ["git", "-C", repo_path, "checkout", ref_value],
1310
+ timeout=120,
1311
+ )
1312
+ if ref_type == "branch":
1313
+ logger.info(
1314
+ f"Cloned repository {repo_name} and checked out branch {ref_value}"
1315
+ )
1316
+ else:
1317
+ logger.info(
1318
+ f"Cloned repository {repo_name} and checked out tag {ref_value}"
1319
+ )
1320
+ return True
1321
+ except subprocess.CalledProcessError:
1322
+ # If that fails, try as a branch
1323
+ try:
1324
+ logger.warning(
1325
+ f"Could not checkout {ref_value} as a tag, trying as a branch"
1326
+ )
1327
+ _run_git(
1328
+ ["git", "-C", repo_path, "fetch", "origin", ref_value],
1329
+ timeout=120,
1330
+ )
1331
+ _run_git(
1332
+ ["git", "-C", repo_path, "checkout", ref_value],
1333
+ timeout=120,
1334
+ )
1335
+ logger.info(
1336
+ f"Cloned repository {repo_name} and checked out branch {ref_value}"
1337
+ )
1338
+ return True
1339
+ except subprocess.CalledProcessError:
1340
+ logger.warning(
1341
+ f"Could not checkout {ref_value}, using default branch"
1342
+ )
1343
+ logger.info(
1344
+ f"Cloned repository {repo_name} from {repo_url} (default branch)"
1345
+ )
1346
+ return True
1347
+ except (subprocess.CalledProcessError, FileNotFoundError):
1348
+ logger.exception(f"Error cloning repository {repo_name}")
1349
+ logger.error(
1350
+ f"Please manually git clone the repository {repo_url} into {repo_path}"
1351
+ )
1352
+ return False
1353
+
1354
+
1355
+ def load_plugins_from_directory(directory, recursive=False):
1356
+ """
1357
+ Discovers and instantiates Plugin classes from Python modules in the given directory.
1358
+
1359
+ Searches directory (optionally recursively) for .py files, imports each module under an isolated name, and instantiates any top-level `Plugin` class found. On import failures for missing dependencies, the function may attempt to install those dependencies when auto-install is enabled; it also refreshes import paths and retries loading. The function may modify interpreter import state (e.g., entries in sys.modules) and can invoke external installers when auto-install is enabled.
1360
+
1361
+ Parameters:
1362
+ directory (str): Path to the directory containing plugin Python files.
1363
+ recursive (bool): If True, scan subdirectories recursively; otherwise only the top-level directory.
1364
+
1365
+ Returns:
1366
+ list: Instances of discovered plugin classes (may be empty).
1367
+ """
1368
+ plugins = []
1369
+ if os.path.isdir(directory):
1370
+ # Clean Python cache to ensure fresh code loading
1371
+ _clean_python_cache(directory)
1372
+ for root, _dirs, files in os.walk(directory):
1373
+ for filename in files:
1374
+ if filename.endswith(".py"):
1375
+ plugin_path = os.path.join(root, filename)
1376
+ module_name = (
1377
+ "plugin_"
1378
+ + hashlib.sha256(plugin_path.encode("utf-8")).hexdigest()
1379
+ )
1380
+ spec = importlib.util.spec_from_file_location(
1381
+ module_name, plugin_path
1382
+ )
1383
+ if not spec or not getattr(spec, "loader", None):
1384
+ logger.warning(
1385
+ f"Skipping plugin {plugin_path}: no import spec/loader."
1386
+ )
1387
+ continue
1388
+ plugin_module = importlib.util.module_from_spec(spec)
1389
+
1390
+ # Create a compatibility layer for plugins
1391
+ # This allows plugins to import from 'plugins' or 'mmrelay.plugins'
1392
+ if "mmrelay.plugins" not in sys.modules:
1393
+ import mmrelay.plugins
1394
+
1395
+ sys.modules["mmrelay.plugins"] = mmrelay.plugins
1396
+
1397
+ # For backward compatibility with older plugins
1398
+ if "plugins" not in sys.modules:
1399
+ import mmrelay.plugins
1400
+
1401
+ sys.modules["plugins"] = mmrelay.plugins
1402
+
1403
+ plugin_dir = os.path.dirname(plugin_path)
1404
+
1405
+ try:
1406
+ with _temp_sys_path(plugin_dir):
1407
+ spec.loader.exec_module(plugin_module)
1408
+ if hasattr(plugin_module, "Plugin"):
1409
+ plugins.append(plugin_module.Plugin())
1410
+ else:
1411
+ logger.warning(
1412
+ f"{plugin_path} does not define a Plugin class."
1413
+ )
1414
+ except ModuleNotFoundError as e:
1415
+ missing_module = getattr(e, "name", None)
1416
+ if not missing_module:
1417
+ m = re.search(
1418
+ r"No module named ['\"]([^'\"]+)['\"]", str(e)
1419
+ )
1420
+ missing_module = m.group(1) if m else str(e)
1421
+ # Prefer top-level distribution name for installation
1422
+ raw = (missing_module or "").strip()
1423
+ top = raw.split(".", 1)[0]
1424
+ m = re.match(r"[A-Za-z0-9][A-Za-z0-9._-]*", top)
1425
+ if not m:
1426
+ logger.warning(
1427
+ f"Refusing to auto-install suspicious dependency name from {plugin_path!s}: {raw!r}"
1428
+ )
1429
+ raise
1430
+ missing_pkg = m.group(0)
1431
+ logger.warning(
1432
+ f"Missing dependency for plugin {plugin_path}: {missing_pkg}"
1433
+ )
1434
+
1435
+ # Try to automatically install the missing dependency
1436
+ try:
1437
+ if not _check_auto_install_enabled(config):
1438
+ _raise_install_error(missing_pkg)
1439
+ # Check if we're running in a pipx environment
1440
+ in_pipx = (
1441
+ "PIPX_HOME" in os.environ
1442
+ or "PIPX_LOCAL_VENVS" in os.environ
1443
+ )
1444
+
1445
+ if in_pipx:
1446
+ logger.info(
1447
+ f"Attempting to install missing dependency with pipx inject: {missing_pkg}"
1448
+ )
1449
+ pipx_path = shutil.which("pipx")
1450
+ if not pipx_path:
1451
+ raise FileNotFoundError(
1452
+ "pipx executable not found on PATH"
1453
+ )
1454
+ _run(
1455
+ [pipx_path, "inject", "mmrelay", missing_pkg],
1456
+ timeout=300,
1457
+ )
1458
+ else:
1459
+ in_venv = (
1460
+ sys.prefix
1461
+ != getattr(sys, "base_prefix", sys.prefix)
1462
+ ) or ("VIRTUAL_ENV" in os.environ)
1463
+ logger.info(
1464
+ f"Attempting to install missing dependency with pip: {missing_pkg}"
1465
+ )
1466
+ cmd = [
1467
+ sys.executable,
1468
+ "-m",
1469
+ "pip",
1470
+ "install",
1471
+ missing_pkg,
1472
+ "--disable-pip-version-check",
1473
+ "--no-input",
1474
+ ]
1475
+ if not in_venv:
1476
+ cmd += ["--user"]
1477
+ _run(cmd, timeout=300)
1478
+
1479
+ logger.info(
1480
+ f"Successfully installed {missing_pkg}, retrying plugin load"
1481
+ )
1482
+ try:
1483
+ _refresh_dependency_paths()
1484
+ except (OSError, ImportError, AttributeError) as e:
1485
+ logger.debug(
1486
+ f"Path refresh after auto-install failed: {e}"
1487
+ )
1488
+
1489
+ # Try to load the module again
1490
+ try:
1491
+ with _temp_sys_path(plugin_dir):
1492
+ spec.loader.exec_module(plugin_module)
1493
+
1494
+ if hasattr(plugin_module, "Plugin"):
1495
+ plugins.append(plugin_module.Plugin())
1496
+ else:
1497
+ logger.warning(
1498
+ f"{plugin_path} does not define a Plugin class."
1499
+ )
1500
+ except ModuleNotFoundError:
1501
+ logger.exception(
1502
+ f"Module {missing_module} still not available after installation. "
1503
+ f"The package name might be different from the import name."
1504
+ )
1505
+ except Exception:
1506
+ logger.exception(
1507
+ "Error loading plugin %s after dependency installation",
1508
+ plugin_path,
1509
+ )
1510
+
1511
+ except subprocess.CalledProcessError:
1512
+ logger.exception(
1513
+ f"Failed to automatically install {missing_pkg}. "
1514
+ f"Please install manually:\n"
1515
+ f" pipx inject mmrelay {missing_pkg} # if using pipx\n"
1516
+ f" pip install {missing_pkg} # if using pip\n"
1517
+ f" pip install --user {missing_pkg} # if not in a venv"
1518
+ )
1519
+ except Exception:
1520
+ logger.exception(f"Error loading plugin {plugin_path}")
1521
+ if not recursive:
1522
+ break
1523
+
1524
+ return plugins
1525
+
1526
+
1527
+ def schedule_job(plugin_name: str, interval: int = 1):
1528
+ """
1529
+ Create a job that runs every specified interval for a plugin.
1530
+
1531
+ Parameters:
1532
+ plugin_name (str): Name of the plugin for job tagging
1533
+ interval (int): Interval for job execution
1534
+
1535
+ Returns:
1536
+ Job object that can be configured with time units and actions
1537
+ """
1538
+ if schedule is None:
1539
+ return None
1540
+
1541
+ job = schedule.every(interval)
1542
+ job.tag(plugin_name)
1543
+ return job
1544
+
1545
+
1546
+ def clear_plugin_jobs(plugin_name: str) -> None:
1547
+ """Clear all jobs for a specific plugin."""
1548
+ if schedule is not None:
1549
+ schedule.clear(plugin_name)
1550
+
1551
+
1552
+ def start_global_scheduler():
1553
+ """
1554
+ Start the global scheduler thread for all plugins.
1555
+
1556
+ Creates and starts a single daemon thread that runs schedule.run_pending()
1557
+ for all plugins. This eliminates race conditions from multiple threads
1558
+ accessing the schedule library's global state.
1559
+ """
1560
+ global _global_scheduler_thread, _global_scheduler_stop_event
1561
+
1562
+ if schedule is None:
1563
+ logger.warning(
1564
+ "Schedule library not available, plugin background jobs disabled"
1565
+ )
1566
+ return
1567
+
1568
+ if _global_scheduler_thread is not None and _global_scheduler_thread.is_alive():
1569
+ logger.debug("Global scheduler thread already running")
1570
+ return
1571
+
1572
+ _global_scheduler_stop_event = threading.Event()
1573
+
1574
+ def scheduler_loop():
1575
+ """Main scheduler loop that runs pending jobs."""
1576
+ logger.debug("Global scheduler thread started")
1577
+ while not _global_scheduler_stop_event.is_set():
1578
+ if schedule:
1579
+ schedule.run_pending()
1580
+ # Wait up to 1 second or until stop is requested
1581
+ _global_scheduler_stop_event.wait(1)
1582
+ logger.debug("Global scheduler thread stopped")
1583
+
1584
+ _global_scheduler_thread = threading.Thread(
1585
+ target=scheduler_loop, name="global-plugin-scheduler", daemon=True
1586
+ )
1587
+ _global_scheduler_thread.start()
1588
+ logger.info("Global plugin scheduler started")
1589
+
1590
+
1591
+ def stop_global_scheduler():
1592
+ """
1593
+ Stop the global scheduler thread.
1594
+
1595
+ Signals the scheduler thread to stop and waits for it to terminate.
1596
+ Clears all scheduled jobs to prevent memory leaks.
1597
+ """
1598
+ global _global_scheduler_thread, _global_scheduler_stop_event
1599
+
1600
+ if _global_scheduler_thread is None:
1601
+ return
1602
+
1603
+ logger.debug("Stopping global scheduler thread")
1604
+
1605
+ # Signal the thread to stop
1606
+ if _global_scheduler_stop_event:
1607
+ _global_scheduler_stop_event.set()
1608
+
1609
+ # Wait for thread to finish
1610
+ if _global_scheduler_thread.is_alive():
1611
+ _global_scheduler_thread.join(timeout=5)
1612
+ if _global_scheduler_thread.is_alive():
1613
+ logger.warning("Global scheduler thread did not stop within timeout")
1614
+
1615
+ # Clear all scheduled jobs
1616
+ if schedule:
1617
+ schedule.clear()
1618
+
1619
+ _global_scheduler_thread = None
1620
+ _global_scheduler_stop_event = None
1621
+ logger.info("Global plugin scheduler stopped")
1622
+
1623
+
1624
+ def load_plugins(passed_config=None):
1625
+ """
1626
+ Load, initialize, and return the application's active plugins according to the given or global configuration.
1627
+
1628
+ Loads core, custom, and community plugins (cloning/updating community repositories and installing their dependencies as needed), starts each plugin that is configured active, and returns the resulting list sorted by plugin priority. Uses the global configuration when no configuration is passed and returns a cached result if plugins were already loaded.
1629
+
1630
+ Parameters:
1631
+ passed_config (dict, optional): Configuration to use instead of the module-global config.
1632
+
1633
+ Returns:
1634
+ list: Active plugin instances sorted by their `priority` attribute.
1635
+ """
1636
+ global sorted_active_plugins
1637
+ global plugins_loaded
1638
+ global config
1639
+
1640
+ if plugins_loaded:
1641
+ return sorted_active_plugins
1642
+
1643
+ logger.info("Checking plugin config...")
1644
+
1645
+ # Update the global config if a config is passed
1646
+ if passed_config is not None:
1647
+ config = passed_config
1648
+
1649
+ # Check if config is available
1650
+ if config is None:
1651
+ logger.error("No configuration available. Cannot load plugins.")
1652
+ return []
1653
+
1654
+ # Import core plugins
1655
+ from mmrelay.plugins.debug_plugin import Plugin as DebugPlugin
1656
+ from mmrelay.plugins.drop_plugin import Plugin as DropPlugin
1657
+ from mmrelay.plugins.health_plugin import Plugin as HealthPlugin
1658
+ from mmrelay.plugins.help_plugin import Plugin as HelpPlugin
1659
+ from mmrelay.plugins.map_plugin import Plugin as MapPlugin
1660
+ from mmrelay.plugins.mesh_relay_plugin import Plugin as MeshRelayPlugin
1661
+ from mmrelay.plugins.nodes_plugin import Plugin as NodesPlugin
1662
+ from mmrelay.plugins.ping_plugin import Plugin as PingPlugin
1663
+ from mmrelay.plugins.telemetry_plugin import Plugin as TelemetryPlugin
1664
+ from mmrelay.plugins.weather_plugin import Plugin as WeatherPlugin
1665
+
1666
+ # Initial list of core plugins
1667
+ core_plugins = [
1668
+ HealthPlugin(),
1669
+ MapPlugin(),
1670
+ MeshRelayPlugin(),
1671
+ PingPlugin(),
1672
+ TelemetryPlugin(),
1673
+ WeatherPlugin(),
1674
+ HelpPlugin(),
1675
+ NodesPlugin(),
1676
+ DropPlugin(),
1677
+ DebugPlugin(),
1678
+ ]
1679
+
1680
+ plugins = core_plugins.copy()
1681
+
1682
+ # Process and load custom plugins
1683
+ custom_plugins_config = config.get("custom-plugins", {})
1684
+ custom_plugin_dirs = get_custom_plugin_dirs()
1685
+
1686
+ active_custom_plugins = [
1687
+ plugin_name
1688
+ for plugin_name, plugin_info in custom_plugins_config.items()
1689
+ if plugin_info.get("active", False)
1690
+ ]
1691
+
1692
+ if active_custom_plugins:
1693
+ logger.debug(
1694
+ f"Loading active custom plugins: {', '.join(active_custom_plugins)}"
1695
+ )
1696
+
1697
+ # Only load custom plugins that are explicitly enabled
1698
+ for plugin_name in active_custom_plugins:
1699
+ plugin_found = False
1700
+
1701
+ # Try each directory in order
1702
+ for custom_dir in custom_plugin_dirs:
1703
+ plugin_path = os.path.join(custom_dir, plugin_name)
1704
+ if os.path.exists(plugin_path):
1705
+ logger.debug(f"Loading custom plugin from: {plugin_path}")
1706
+ try:
1707
+ plugins.extend(
1708
+ load_plugins_from_directory(plugin_path, recursive=False)
1709
+ )
1710
+ plugin_found = True
1711
+ break
1712
+ except Exception:
1713
+ logger.exception(f"Failed to load custom plugin {plugin_name}")
1714
+ continue
1715
+
1716
+ if not plugin_found:
1717
+ logger.warning(
1718
+ f"Custom plugin '{plugin_name}' not found in any of the plugin directories"
1719
+ )
1720
+
1721
+ # Process and download community plugins
1722
+ community_plugins_config = config.get("community-plugins", {})
1723
+ community_plugin_dirs = get_community_plugin_dirs()
1724
+
1725
+ if not community_plugin_dirs:
1726
+ logger.warning(
1727
+ "No writable community plugin directories available; clone/update operations will be skipped."
1728
+ )
1729
+ community_plugins_dir = None
1730
+ else:
1731
+ community_plugins_dir = community_plugin_dirs[0]
1732
+
1733
+ # Create community plugins directory if needed
1734
+ active_community_plugins = [
1735
+ plugin_name
1736
+ for plugin_name, plugin_info in community_plugins_config.items()
1737
+ if plugin_info.get("active", False)
1738
+ ]
1739
+
1740
+ if active_community_plugins:
1741
+ # Ensure all community plugin directories exist
1742
+ for dir_path in community_plugin_dirs:
1743
+ try:
1744
+ os.makedirs(dir_path, exist_ok=True)
1745
+ except (OSError, PermissionError) as e:
1746
+ logger.warning(
1747
+ f"Cannot create community plugin directory {dir_path}: {e}"
1748
+ )
1749
+
1750
+ logger.debug(
1751
+ f"Loading active community plugins: {', '.join(active_community_plugins)}"
1752
+ )
1753
+
1754
+ # Only process community plugins if config section exists and is a dictionary
1755
+ if isinstance(community_plugins_config, dict):
1756
+ for plugin_name, plugin_info in community_plugins_config.items():
1757
+ if not plugin_info.get("active", False):
1758
+ logger.debug(
1759
+ f"Skipping community plugin {plugin_name} - not active in config"
1760
+ )
1761
+ continue
1762
+
1763
+ repo_url = plugin_info.get("repository")
1764
+
1765
+ # Support both tag and branch parameters
1766
+ tag = plugin_info.get("tag")
1767
+ branch = plugin_info.get("branch")
1768
+
1769
+ # Determine what to use (tag, branch, or default)
1770
+ if tag and branch:
1771
+ logger.warning(
1772
+ f"Both tag and branch specified for plugin {plugin_name}, using tag"
1773
+ )
1774
+ ref = {"type": "tag", "value": tag}
1775
+ elif tag:
1776
+ ref = {"type": "tag", "value": tag}
1777
+ elif branch:
1778
+ ref = {"type": "branch", "value": branch}
1779
+ else:
1780
+ # Default to main branch if neither is specified
1781
+ ref = {"type": "branch", "value": "main"}
1782
+
1783
+ if repo_url:
1784
+ if community_plugins_dir is None:
1785
+ logger.warning(
1786
+ "Skipping community plugin %s: no accessible plugin directory",
1787
+ plugin_name,
1788
+ )
1789
+ continue
1790
+
1791
+ # Clone to the user directory by default
1792
+ repo_name = os.path.splitext(os.path.basename(repo_url.rstrip("/")))[0]
1793
+ success = clone_or_update_repo(repo_url, ref, community_plugins_dir)
1794
+ if not success:
1795
+ logger.warning(
1796
+ f"Failed to clone/update plugin {plugin_name}, skipping"
1797
+ )
1798
+ continue
1799
+ repo_path = os.path.join(community_plugins_dir, repo_name)
1800
+ _install_requirements_for_repo(repo_path, repo_name)
1801
+ else:
1802
+ logger.error("Repository URL not specified for a community plugin")
1803
+ logger.error("Please specify the repository URL in config.yaml")
1804
+ continue
1805
+
1806
+ # Only load community plugins that are explicitly enabled
1807
+ for plugin_name in active_community_plugins:
1808
+ plugin_info = community_plugins_config[plugin_name]
1809
+ repo_url = plugin_info.get("repository")
1810
+ if repo_url:
1811
+ # Extract repository name from URL
1812
+ repo_name = os.path.splitext(os.path.basename(repo_url.rstrip("/")))[0]
1813
+
1814
+ # Try each directory in order
1815
+ plugin_found = False
1816
+ for dir_path in community_plugin_dirs:
1817
+ plugin_path = os.path.join(dir_path, repo_name)
1818
+ if os.path.exists(plugin_path):
1819
+ logger.info(f"Loading community plugin from: {plugin_path}")
1820
+ try:
1821
+ plugins.extend(
1822
+ load_plugins_from_directory(plugin_path, recursive=True)
1823
+ )
1824
+ plugin_found = True
1825
+ break
1826
+ except Exception:
1827
+ logger.exception(
1828
+ "Failed to load community plugin %s", repo_name
1829
+ )
1830
+ continue
1831
+
1832
+ if not plugin_found:
1833
+ logger.warning(
1834
+ f"Community plugin '{repo_name}' not found in any of the plugin directories"
1835
+ )
1836
+ else:
1837
+ logger.error(
1838
+ "Repository URL not specified for community plugin: %s",
1839
+ plugin_name,
1840
+ )
1841
+
1842
+ # Start global scheduler for all plugins
1843
+ start_global_scheduler()
1844
+
1845
+ # Filter and sort active plugins by priority
1846
+ active_plugins = []
1847
+ for plugin in plugins:
1848
+ plugin_name = getattr(plugin, "plugin_name", plugin.__class__.__name__)
1849
+
1850
+ # Determine if the plugin is active based on the configuration
1851
+ if plugin in core_plugins:
1852
+ # Core plugins: default to inactive unless specified otherwise
1853
+ plugin_config = config.get("plugins", {}).get(plugin_name, {})
1854
+ is_active = plugin_config.get("active", False)
1855
+ else:
1856
+ # Custom and community plugins: default to inactive unless specified
1857
+ if plugin_name in config.get("custom-plugins", {}):
1858
+ plugin_config = config.get("custom-plugins", {}).get(plugin_name, {})
1859
+ elif plugin_name in community_plugins_config:
1860
+ plugin_config = community_plugins_config.get(plugin_name, {})
1861
+ else:
1862
+ plugin_config = {}
1863
+
1864
+ is_active = plugin_config.get("active", False)
1865
+
1866
+ if is_active:
1867
+ plugin.priority = plugin_config.get(
1868
+ "priority", getattr(plugin, "priority", 100)
1869
+ )
1870
+ try:
1871
+ plugin.start()
1872
+ except Exception:
1873
+ logger.exception(f"Error starting plugin {plugin_name}")
1874
+ stop_callable = getattr(plugin, "stop", None)
1875
+ if callable(stop_callable):
1876
+ try:
1877
+ stop_callable()
1878
+ except Exception:
1879
+ logger.debug(
1880
+ "Error while running stop() for failed plugin %s",
1881
+ plugin_name,
1882
+ )
1883
+ continue
1884
+ active_plugins.append(plugin)
1885
+
1886
+ sorted_active_plugins = sorted(active_plugins, key=lambda plugin: plugin.priority)
1887
+
1888
+ # Log all loaded plugins
1889
+ if sorted_active_plugins:
1890
+ plugin_names = [
1891
+ getattr(plugin, "plugin_name", plugin.__class__.__name__)
1892
+ for plugin in sorted_active_plugins
1893
+ ]
1894
+ logger.info(f"Loaded: {', '.join(plugin_names)}")
1895
+ else:
1896
+ logger.info("Loaded: none")
1897
+
1898
+ plugins_loaded = True # Set the flag to indicate that plugins have been loaded
1899
+ return sorted_active_plugins
1900
+
1901
+
1902
+ def shutdown_plugins() -> None:
1903
+ """
1904
+ Stop all active plugins and reset loader state to allow a clean reload.
1905
+
1906
+ Calls each plugin's stop() method if present; exceptions from stop() are caught and logged. Plugins that do not implement stop() are skipped. After attempting to stop all plugins, clears the active plugin list and marks plugins as not loaded.
1907
+ """
1908
+ global sorted_active_plugins, plugins_loaded
1909
+
1910
+ if not sorted_active_plugins:
1911
+ plugins_loaded = False
1912
+ return
1913
+
1914
+ logger.info("Stopping %d plugin(s)...", len(sorted_active_plugins))
1915
+ for plugin in list(sorted_active_plugins):
1916
+ plugin_name = getattr(plugin, "plugin_name", plugin.__class__.__name__)
1917
+ stop_callable = getattr(plugin, "stop", None)
1918
+ if callable(stop_callable):
1919
+ try:
1920
+ stop_callable()
1921
+ except Exception:
1922
+ logger.exception("Error stopping plugin %s", plugin_name)
1923
+ else:
1924
+ logger.debug(
1925
+ "Plugin %s does not implement stop(); skipping lifecycle cleanup",
1926
+ plugin_name,
1927
+ )
1928
+
1929
+ # Stop global scheduler after all plugins are stopped
1930
+ stop_global_scheduler()
1931
+
1932
+ sorted_active_plugins = []
1933
+ plugins_loaded = False