updates2mqtt 1.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- updates2mqtt/__init__.py +5 -0
- updates2mqtt/__main__.py +6 -0
- updates2mqtt/app.py +203 -0
- updates2mqtt/config.py +135 -0
- updates2mqtt/hass_formatter.py +75 -0
- updates2mqtt/integrations/__init__.py +1 -0
- updates2mqtt/integrations/docker.py +402 -0
- updates2mqtt/integrations/git_utils.py +64 -0
- updates2mqtt/model.py +98 -0
- updates2mqtt/mqtt.py +286 -0
- updates2mqtt/py.typed +0 -0
- updates2mqtt-1.3.4.dist-info/METADATA +236 -0
- updates2mqtt-1.3.4.dist-info/RECORD +16 -0
- updates2mqtt-1.3.4.dist-info/WHEEL +4 -0
- updates2mqtt-1.3.4.dist-info/entry_points.txt +2 -0
- updates2mqtt-1.3.4.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import subprocess
|
|
3
|
+
import time
|
|
4
|
+
import typing
|
|
5
|
+
from collections.abc import AsyncGenerator, Callable
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, cast
|
|
9
|
+
|
|
10
|
+
import docker
|
|
11
|
+
import docker.errors
|
|
12
|
+
import structlog
|
|
13
|
+
from docker.models.containers import Container
|
|
14
|
+
from hishel.httpx import SyncCacheClient
|
|
15
|
+
|
|
16
|
+
from updates2mqtt.config import DockerConfig, DockerPackageUpdateInfo, NodeConfig, PackageUpdateInfo, UpdateInfoConfig
|
|
17
|
+
from updates2mqtt.model import Discovery, ReleaseProvider
|
|
18
|
+
|
|
19
|
+
from .git_utils import git_check_update_available, git_pull, git_timestamp, git_trust
|
|
20
|
+
|
|
21
|
+
if typing.TYPE_CHECKING:
|
|
22
|
+
from docker.models.images import Image, RegistryData
|
|
23
|
+
|
|
24
|
+
# distinguish docker build from docker pull?
|
|
25
|
+
|
|
26
|
+
log = structlog.get_logger()
|
|
27
|
+
NO_KNOWN_IMAGE = "UNKNOWN"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class DockerComposeCommand(Enum):
|
|
31
|
+
BUILD = "build"
|
|
32
|
+
UP = "up"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def safe_json_dt(t: float | None) -> str | None:
|
|
36
|
+
return time.strftime("%Y-%m-%dT%H:%M:%S.0000", time.gmtime(t)) if t else None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class DockerProvider(ReleaseProvider):
|
|
40
|
+
def __init__(self, cfg: DockerConfig, common_pkg_cfg: UpdateInfoConfig, node_cfg: NodeConfig) -> None:
|
|
41
|
+
super().__init__("docker")
|
|
42
|
+
self.client: docker.DockerClient = docker.from_env()
|
|
43
|
+
self.cfg: DockerConfig = cfg
|
|
44
|
+
self.node_cfg: NodeConfig = node_cfg
|
|
45
|
+
self.common_pkgs: dict[str, PackageUpdateInfo] = common_pkg_cfg.common_packages if common_pkg_cfg else {}
|
|
46
|
+
# TODO: refresh discovered packages periodically
|
|
47
|
+
self.discovered_pkgs: dict[str, PackageUpdateInfo] = self.discover_metadata()
|
|
48
|
+
|
|
49
|
+
def update(self, discovery: Discovery) -> bool:
|
|
50
|
+
logger: Any = self.log.bind(container=discovery.name, action="update")
|
|
51
|
+
logger.info("Updating - last at %s", discovery.update_last_attempt)
|
|
52
|
+
discovery.update_last_attempt = time.time()
|
|
53
|
+
self.fetch(discovery)
|
|
54
|
+
restarted = self.restart(discovery)
|
|
55
|
+
logger.info("Updated - recorded at %s", discovery.update_last_attempt)
|
|
56
|
+
return restarted
|
|
57
|
+
|
|
58
|
+
def fetch(self, discovery: Discovery) -> None:
|
|
59
|
+
logger = self.log.bind(container=discovery.name, action="fetch")
|
|
60
|
+
|
|
61
|
+
image_ref: str | None = discovery.custom.get("image_ref")
|
|
62
|
+
platform: str | None = discovery.custom.get("platform")
|
|
63
|
+
if discovery.custom.get("can_pull") and image_ref:
|
|
64
|
+
logger.info("Pulling", image_ref=image_ref, platform=platform)
|
|
65
|
+
image: Image = self.client.images.pull(image_ref, platform=platform, all_tags=False)
|
|
66
|
+
if image:
|
|
67
|
+
logger.info("Pulled", image_id=image.id, image_ref=image_ref, platform=platform)
|
|
68
|
+
else:
|
|
69
|
+
logger.warn("Unable to pull", image_ref=image_ref, platform=platform)
|
|
70
|
+
elif discovery.can_build:
|
|
71
|
+
compose_path: str | None = discovery.custom.get("compose_path")
|
|
72
|
+
git_repo_path: str | None = discovery.custom.get("git_repo_path")
|
|
73
|
+
if not compose_path or not git_repo_path:
|
|
74
|
+
logger.warn("No compose path or git repo path configured, skipped build")
|
|
75
|
+
return
|
|
76
|
+
if compose_path and not Path(git_repo_path).is_absolute():
|
|
77
|
+
full_repo_path: Path = Path(compose_path) / git_repo_path
|
|
78
|
+
else:
|
|
79
|
+
full_repo_path = Path(git_repo_path)
|
|
80
|
+
if git_check_update_available(full_repo_path, Path(self.node_cfg.git_path)):
|
|
81
|
+
git_pull(full_repo_path, Path(self.node_cfg.git_path))
|
|
82
|
+
if compose_path:
|
|
83
|
+
self.build(discovery, compose_path)
|
|
84
|
+
else:
|
|
85
|
+
logger.warn("No compose path configured, skipped build")
|
|
86
|
+
|
|
87
|
+
def build(self, discovery: Discovery, compose_path: str) -> bool:
|
|
88
|
+
logger = self.log.bind(container=discovery.name, action="build")
|
|
89
|
+
logger.info("Building")
|
|
90
|
+
return self.execute_compose(DockerComposeCommand.BUILD, "", compose_path, logger)
|
|
91
|
+
|
|
92
|
+
def execute_compose(self, command: DockerComposeCommand, args: str, cwd: str | None, logger: structlog.BoundLogger) -> bool:
|
|
93
|
+
if not cwd or not Path(cwd).is_dir():
|
|
94
|
+
logger.warn("Invalid compose path, skipped %s", command)
|
|
95
|
+
return False
|
|
96
|
+
logger.info(f"Executing compose {command} {args}")
|
|
97
|
+
cmd: str = "docker-compose" if self.cfg.compose_version == "v1" else "docker compose"
|
|
98
|
+
cmd = cmd + " " + command.value
|
|
99
|
+
if args:
|
|
100
|
+
cmd = cmd + " " + args
|
|
101
|
+
|
|
102
|
+
proc = subprocess.run(cmd, check=False, shell=True, cwd=cwd)
|
|
103
|
+
if proc.returncode == 0:
|
|
104
|
+
logger.info(f"{command} via compose successful")
|
|
105
|
+
return True
|
|
106
|
+
logger.warn(
|
|
107
|
+
f"{command} failed: %s",
|
|
108
|
+
proc.returncode,
|
|
109
|
+
)
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
def restart(self, discovery: Discovery) -> bool:
|
|
113
|
+
logger = self.log.bind(container=discovery.name, action="restart")
|
|
114
|
+
compose_path = discovery.custom.get("compose_path")
|
|
115
|
+
return self.execute_compose(DockerComposeCommand.UP, "--detach --yes", compose_path, logger)
|
|
116
|
+
|
|
117
|
+
def rescan(self, discovery: Discovery) -> Discovery | None:
|
|
118
|
+
logger = self.log.bind(container=discovery.name, action="rescan")
|
|
119
|
+
try:
|
|
120
|
+
c: Container = self.client.containers.get(discovery.name)
|
|
121
|
+
if c:
|
|
122
|
+
rediscovery = self.analyze(c, discovery.session, original_discovery=discovery)
|
|
123
|
+
if rediscovery:
|
|
124
|
+
self.discoveries[rediscovery.name] = rediscovery
|
|
125
|
+
return rediscovery
|
|
126
|
+
logger.warn("Unable to find container for rescan")
|
|
127
|
+
except docker.errors.NotFound:
|
|
128
|
+
logger.warn("Container not found in Docker")
|
|
129
|
+
except docker.errors.APIError:
|
|
130
|
+
logger.exception("Docker API error retrieving container")
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
def analyze(self, c: Container, session: str, original_discovery: Discovery | None = None) -> Discovery | None:
|
|
134
|
+
logger = self.log.bind(container=c.name, action="analyze")
|
|
135
|
+
image_ref = None
|
|
136
|
+
image_name = None
|
|
137
|
+
local_versions = None
|
|
138
|
+
if c.attrs is None:
|
|
139
|
+
logger.warn("No container attributes found, discovery rejected") # type: ignore[unreachable]
|
|
140
|
+
return None
|
|
141
|
+
if c.name is None:
|
|
142
|
+
logger.warn("No container name found, discovery rejected")
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
def env_override(env_var: str, default: Any) -> Any | None:
|
|
146
|
+
return default if c_env.get(env_var) is None else c_env.get(env_var)
|
|
147
|
+
|
|
148
|
+
env_str = c.attrs["Config"]["Env"]
|
|
149
|
+
c_env = dict(env.split("=", maxsplit=1) for env in env_str if "==" not in env)
|
|
150
|
+
ignore_container: str | None = env_override("UPD2MQTT_IGNORE", "FALSE")
|
|
151
|
+
if ignore_container and ignore_container.upper() in ("1", "TRUE"):
|
|
152
|
+
logger.info("Container ignored due to UPD2MQTT_IGNORE setting")
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
image: Image | None = c.image
|
|
156
|
+
if image is not None and image.tags and len(image.tags) > 0:
|
|
157
|
+
image_ref = image.tags[0]
|
|
158
|
+
else:
|
|
159
|
+
image_ref = c.attrs.get("Config", {}).get("Image")
|
|
160
|
+
if image_ref is None:
|
|
161
|
+
logger.warn("No image or image attributes found")
|
|
162
|
+
else:
|
|
163
|
+
try:
|
|
164
|
+
image_name = image_ref.split(":")[0]
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.warn("No tags found (%s) : %s", image, e)
|
|
167
|
+
if image is not None and image.attrs is not None:
|
|
168
|
+
try:
|
|
169
|
+
local_versions = [i.split("@")[1][7:19] for i in image.attrs["RepoDigests"]]
|
|
170
|
+
except Exception as e:
|
|
171
|
+
logger.warn("Cannot determine local version: %s", e)
|
|
172
|
+
logger.warn("RepoDigests=%s", image.attrs.get("RepoDigests"))
|
|
173
|
+
|
|
174
|
+
platform: str = "Unknown"
|
|
175
|
+
pkg_info: PackageUpdateInfo = self.default_metadata(image_name)
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
picture_url = env_override("UPD2MQTT_PICTURE", pkg_info.logo_url)
|
|
179
|
+
relnotes_url = env_override("UPD2MQTT_RELNOTES", pkg_info.release_notes_url)
|
|
180
|
+
if image is not None and image.attrs is not None:
|
|
181
|
+
platform = "/".join(
|
|
182
|
+
filter(
|
|
183
|
+
None,
|
|
184
|
+
[
|
|
185
|
+
image.attrs["Os"],
|
|
186
|
+
image.attrs["Architecture"],
|
|
187
|
+
image.attrs.get("Variant"),
|
|
188
|
+
],
|
|
189
|
+
),
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
reg_data: RegistryData | None = None
|
|
193
|
+
latest_version: str | None = NO_KNOWN_IMAGE
|
|
194
|
+
local_version: str | None = NO_KNOWN_IMAGE
|
|
195
|
+
|
|
196
|
+
if image_ref and local_versions:
|
|
197
|
+
retries_left = 3
|
|
198
|
+
while reg_data is None and retries_left > 0 and not self.stopped.is_set():
|
|
199
|
+
try:
|
|
200
|
+
reg_data = self.client.images.get_registry_data(image_ref)
|
|
201
|
+
latest_version = reg_data.short_id[7:] if reg_data else None
|
|
202
|
+
except docker.errors.APIError as e:
|
|
203
|
+
retries_left -= 1
|
|
204
|
+
if retries_left == 0 or e.is_client_error():
|
|
205
|
+
logger.warn("Failed to fetch registry data: [%s] %s", e.errno, e.explanation)
|
|
206
|
+
else:
|
|
207
|
+
logger.debug("Failed to fetch registry data, retrying: %s", e)
|
|
208
|
+
|
|
209
|
+
if local_versions:
|
|
210
|
+
# might be multiple RepoDigests if image has been pulled multiple times with diff manifests
|
|
211
|
+
local_version = latest_version if latest_version in local_versions else local_versions[0]
|
|
212
|
+
|
|
213
|
+
def save_if_set(key: str, val: datetime.datetime | str | None) -> None:
|
|
214
|
+
if val is not None:
|
|
215
|
+
custom[key] = val
|
|
216
|
+
|
|
217
|
+
image_ref = image_ref or ""
|
|
218
|
+
|
|
219
|
+
custom: dict[str, str | datetime.datetime | bool] = {}
|
|
220
|
+
custom["platform"] = platform
|
|
221
|
+
custom["image_ref"] = image_ref
|
|
222
|
+
save_if_set("compose_path", c.labels.get("com.docker.compose.project.working_dir"))
|
|
223
|
+
save_if_set("compose_version", c.labels.get("com.docker.compose.version"))
|
|
224
|
+
save_if_set("git_repo_path", c_env.get("UPD2MQTT_GIT_REPO_PATH"))
|
|
225
|
+
save_if_set("apt_pkgs", c_env.get("UPD2MQTT_APT_PKGS"))
|
|
226
|
+
|
|
227
|
+
if c_env.get("UPD2MQTT_UPDATE") == "AUTO":
|
|
228
|
+
logger.debug("Auto update policy detected")
|
|
229
|
+
update_policy = "Auto"
|
|
230
|
+
else:
|
|
231
|
+
update_policy = "Passive"
|
|
232
|
+
|
|
233
|
+
if custom.get("git_repo_path") and custom.get("compose_path"):
|
|
234
|
+
full_repo_path: Path = Path(cast("str", custom.get("compose_path"))).joinpath(
|
|
235
|
+
cast("str", custom.get("git_repo_path"))
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
git_trust(full_repo_path, Path(self.node_cfg.git_path))
|
|
239
|
+
save_if_set("git_local_timestamp", git_timestamp(full_repo_path, Path(self.node_cfg.git_path)))
|
|
240
|
+
features: list[str] = []
|
|
241
|
+
can_pull: bool = (
|
|
242
|
+
self.cfg.allow_pull
|
|
243
|
+
and image_ref is not None
|
|
244
|
+
and image_ref != ""
|
|
245
|
+
and (local_version != NO_KNOWN_IMAGE or latest_version != NO_KNOWN_IMAGE)
|
|
246
|
+
)
|
|
247
|
+
can_build: bool = self.cfg.allow_build and custom.get("git_repo_path") is not None
|
|
248
|
+
can_restart: bool = self.cfg.allow_restart and custom.get("compose_path") is not None
|
|
249
|
+
can_update: bool = False
|
|
250
|
+
if can_pull or can_build or can_restart:
|
|
251
|
+
# public install-neutral capabilities and Home Assistant features
|
|
252
|
+
can_update = True
|
|
253
|
+
features.append("INSTALL")
|
|
254
|
+
features.append("PROGRESS")
|
|
255
|
+
if relnotes_url:
|
|
256
|
+
features.append("RELEASE_NOTES")
|
|
257
|
+
custom["can_pull"] = can_pull
|
|
258
|
+
|
|
259
|
+
return Discovery(
|
|
260
|
+
self,
|
|
261
|
+
c.name,
|
|
262
|
+
session,
|
|
263
|
+
entity_picture_url=picture_url,
|
|
264
|
+
release_url=relnotes_url,
|
|
265
|
+
current_version=local_version,
|
|
266
|
+
update_policy=update_policy,
|
|
267
|
+
update_last_attempt=(original_discovery and original_discovery.update_last_attempt) or None,
|
|
268
|
+
latest_version=latest_version if latest_version != NO_KNOWN_IMAGE else local_version,
|
|
269
|
+
title_template="Docker image update for {name} on {node}",
|
|
270
|
+
device_icon=self.cfg.device_icon,
|
|
271
|
+
can_update=can_update,
|
|
272
|
+
can_build=can_build,
|
|
273
|
+
can_restart=can_restart,
|
|
274
|
+
status=(c.status == "running" and "on") or "off",
|
|
275
|
+
custom=custom,
|
|
276
|
+
features=features,
|
|
277
|
+
)
|
|
278
|
+
except Exception:
|
|
279
|
+
logger.exception("Docker Discovery Failure", container_attrs=c.attrs)
|
|
280
|
+
return None
|
|
281
|
+
|
|
282
|
+
async def scan(self, session: str) -> AsyncGenerator[Discovery]: # type: ignore # noqa: PGH003
|
|
283
|
+
logger = self.log.bind(session=session, action="scan")
|
|
284
|
+
containers = results = 0
|
|
285
|
+
for c in self.client.containers.list():
|
|
286
|
+
if self.stopped.is_set():
|
|
287
|
+
logger.info(f"Shutdown detected, aborting scan at {c}")
|
|
288
|
+
break
|
|
289
|
+
containers = containers + 1
|
|
290
|
+
result = self.analyze(cast("Container", c), session)
|
|
291
|
+
if result:
|
|
292
|
+
self.discoveries[result.name] = result
|
|
293
|
+
results = results + 1
|
|
294
|
+
yield result
|
|
295
|
+
logger.info("Completed", container_count=containers, result_count=results)
|
|
296
|
+
|
|
297
|
+
def command(self, discovery_name: str, command: str, on_update_start: Callable, on_update_end: Callable) -> bool:
|
|
298
|
+
logger = self.log.bind(container=discovery_name, action="command", command=command)
|
|
299
|
+
logger.info("Executing")
|
|
300
|
+
discovery: Discovery | None = None
|
|
301
|
+
updated: bool = False
|
|
302
|
+
try:
|
|
303
|
+
discovery = self.resolve(discovery_name)
|
|
304
|
+
if not discovery:
|
|
305
|
+
logger.warn("Unknown entity", entity=discovery_name)
|
|
306
|
+
elif command != "install":
|
|
307
|
+
logger.warn("Unknown command")
|
|
308
|
+
else:
|
|
309
|
+
if discovery.can_update:
|
|
310
|
+
rediscovery: Discovery | None = None
|
|
311
|
+
logger.info("Starting update ...")
|
|
312
|
+
on_update_start(discovery)
|
|
313
|
+
if self.update(discovery):
|
|
314
|
+
logger.info("Rescanning ...")
|
|
315
|
+
rediscovery = self.rescan(discovery)
|
|
316
|
+
updated = rediscovery is not None
|
|
317
|
+
logger.info("Rescanned %s: %s", updated, rediscovery)
|
|
318
|
+
else:
|
|
319
|
+
logger.info("Rescan with no result")
|
|
320
|
+
on_update_end(rediscovery or discovery)
|
|
321
|
+
else:
|
|
322
|
+
logger.warning("Update not supported for this container")
|
|
323
|
+
except Exception:
|
|
324
|
+
logger.exception("Failed to handle", discovery_name=discovery_name, command=command)
|
|
325
|
+
if discovery:
|
|
326
|
+
on_update_end(discovery)
|
|
327
|
+
return updated
|
|
328
|
+
|
|
329
|
+
def resolve(self, discovery_name: str) -> Discovery | None:
|
|
330
|
+
return self.discoveries.get(discovery_name)
|
|
331
|
+
|
|
332
|
+
def hass_state_format(self, discovery: Discovery) -> dict: # noqa: ARG002
|
|
333
|
+
# disable since hass mqtt update has strict json schema for message
|
|
334
|
+
return {
|
|
335
|
+
# "docker_image_ref": discovery.custom.get("image_ref"),
|
|
336
|
+
# "last_update_attempt": safe_json_dt(discovery.update_last_attempt),
|
|
337
|
+
# "can_pull": discovery.custom.get("can_pull"),
|
|
338
|
+
# "can_build": discovery.custom.get("can_build"),
|
|
339
|
+
# "can_restart": discovery.custom.get("can_restart"),
|
|
340
|
+
# "git_repo_path": discovery.custom.get("git_repo_path"),
|
|
341
|
+
# "compose_path": discovery.custom.get("compose_path"),
|
|
342
|
+
# "platform": discovery.custom.get("platform"),
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
def default_metadata(self, image_name: str | None) -> PackageUpdateInfo:
|
|
346
|
+
relnotes_url: str | None = None
|
|
347
|
+
picture_url: str | None = self.cfg.default_entity_picture_url
|
|
348
|
+
|
|
349
|
+
if image_name is not None:
|
|
350
|
+
for pkg in self.common_pkgs.values():
|
|
351
|
+
if pkg.docker is not None and pkg.docker.image_name is not None and pkg.docker.image_name == image_name:
|
|
352
|
+
self.log.debug(
|
|
353
|
+
"Found common package", pkg=pkg.docker.image_name, logo_url=picture_url, relnotes_url=relnotes_url
|
|
354
|
+
)
|
|
355
|
+
return pkg
|
|
356
|
+
for pkg in self.discovered_pkgs.values():
|
|
357
|
+
if pkg.docker is not None and pkg.docker.image_name is not None and pkg.docker.image_name == image_name:
|
|
358
|
+
self.log.debug(
|
|
359
|
+
"Found discovered package", pkg=pkg.docker.image_name, logo_url=picture_url, relnotes_url=relnotes_url
|
|
360
|
+
)
|
|
361
|
+
return pkg
|
|
362
|
+
|
|
363
|
+
self.log.debug("No common or discovered package found", image_name=image_name)
|
|
364
|
+
return PackageUpdateInfo(
|
|
365
|
+
DockerPackageUpdateInfo(image_name or NO_KNOWN_IMAGE), logo_url=picture_url, release_notes_url=relnotes_url
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
def discover_metadata(self) -> dict[str, PackageUpdateInfo]:
|
|
369
|
+
pkgs: dict[str, PackageUpdateInfo] = {}
|
|
370
|
+
cfg = self.cfg.discover_metadata.get("linuxserver.io")
|
|
371
|
+
if cfg and cfg.enabled:
|
|
372
|
+
linuxserver_metadata(pkgs, cache_ttl=cfg.cache_ttl)
|
|
373
|
+
return pkgs
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def linuxserver_metadata_api(cache_ttl: int) -> dict:
|
|
377
|
+
"""Fetch and cache linuxserver.io API call for image metadata"""
|
|
378
|
+
try:
|
|
379
|
+
with SyncCacheClient(headers=[("cache-control", f"max-age={cache_ttl}")]) as client:
|
|
380
|
+
log.debug(f"Fetching linuxserver.io metadata from API, cache_ttl={cache_ttl}")
|
|
381
|
+
req = client.get("https://api.linuxserver.io/api/v1/images?include_config=false&include_deprecated=false")
|
|
382
|
+
return req.json()
|
|
383
|
+
except Exception:
|
|
384
|
+
log.exception("Failed to fetch linuxserver.io metadata")
|
|
385
|
+
return {}
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def linuxserver_metadata(discovered_pkgs: dict[str, PackageUpdateInfo], cache_ttl: int) -> None:
|
|
389
|
+
"""Fetch linuxserver.io metadata for all their images via their API"""
|
|
390
|
+
repos: list = linuxserver_metadata_api(cache_ttl).get("data", {}).get("repositories", {}).get("linuxserver", [])
|
|
391
|
+
added = 0
|
|
392
|
+
for repo in repos:
|
|
393
|
+
image_name = repo.get("name")
|
|
394
|
+
if image_name and image_name not in discovered_pkgs:
|
|
395
|
+
discovered_pkgs[image_name] = PackageUpdateInfo(
|
|
396
|
+
DockerPackageUpdateInfo(f"lscr.io/linuxserver/{image_name}"),
|
|
397
|
+
logo_url=repo["project_logo"],
|
|
398
|
+
release_notes_url=f"{repo['github_url']}/releases",
|
|
399
|
+
)
|
|
400
|
+
added += 1
|
|
401
|
+
log.debug("Added linuxserver.io package", pkg=image_name)
|
|
402
|
+
log.info(f"Added {added} linuxserver.io package details")
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import subprocess
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import structlog
|
|
6
|
+
|
|
7
|
+
log = structlog.get_logger()
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def git_trust(repo_path: Path, git_path: Path) -> bool:
|
|
11
|
+
try:
|
|
12
|
+
subprocess.run(f"{git_path} config --global --add safe.directory {repo_path}", check=True, shell=True, cwd=repo_path)
|
|
13
|
+
return True
|
|
14
|
+
except Exception as e:
|
|
15
|
+
log.warn("GIT Unable to trust repo at %s: %s", repo_path, e)
|
|
16
|
+
return False
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def git_timestamp(repo_path: Path, git_path: Path) -> datetime.datetime | None:
|
|
20
|
+
result = None
|
|
21
|
+
try:
|
|
22
|
+
result = subprocess.run(
|
|
23
|
+
str(git_path) + r" log -1 --format=%cI --no-show-signature",
|
|
24
|
+
cwd=repo_path,
|
|
25
|
+
shell=True,
|
|
26
|
+
text=True,
|
|
27
|
+
capture_output=True,
|
|
28
|
+
check=True,
|
|
29
|
+
)
|
|
30
|
+
return datetime.datetime.fromisoformat(result.stdout.strip())
|
|
31
|
+
except Exception as e:
|
|
32
|
+
log.warn("GIT Unable to parse timestamp at %s - %s: %s", repo_path, result.stdout if result else "<NO RESULT>", e)
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def git_check_update_available(repo_path: Path, git_path: Path, timeout: int = 120) -> bool:
|
|
37
|
+
result = None
|
|
38
|
+
try:
|
|
39
|
+
# check if remote repo ahead
|
|
40
|
+
result = subprocess.run(
|
|
41
|
+
f"{git_path} fetch;{git_path} status -uno",
|
|
42
|
+
capture_output=True,
|
|
43
|
+
text=True,
|
|
44
|
+
shell=True,
|
|
45
|
+
check=True,
|
|
46
|
+
cwd=repo_path,
|
|
47
|
+
timeout=timeout,
|
|
48
|
+
)
|
|
49
|
+
if result.returncode == 0 and "Your branch is behind" in result.stdout:
|
|
50
|
+
log.info("Local git repo update available", path=repo_path, status=result.stdout.strip())
|
|
51
|
+
return True
|
|
52
|
+
except Exception as e:
|
|
53
|
+
log.warn("GIT Unable to check status %s: %s", result.stdout if result else "<NO RESULT>", e)
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def git_pull(repo_path: Path, git_path: Path) -> bool:
|
|
58
|
+
log.info("GIT Pulling git at %s", repo_path)
|
|
59
|
+
proc = subprocess.run(f"{git_path} pull", shell=True, check=False, cwd=repo_path, timeout=300)
|
|
60
|
+
if proc.returncode == 0:
|
|
61
|
+
log.info("GIT pull at %s successful", repo_path)
|
|
62
|
+
return True
|
|
63
|
+
log.warn("GIT pull at %s failed: %s", repo_path, proc.returncode)
|
|
64
|
+
return False
|
updates2mqtt/model.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
from collections.abc import AsyncGenerator, Callable
|
|
3
|
+
from threading import Event
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import structlog
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Discovery:
|
|
10
|
+
"""Discovered component from a scan"""
|
|
11
|
+
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
provider: "ReleaseProvider",
|
|
15
|
+
name: str,
|
|
16
|
+
session: str,
|
|
17
|
+
entity_picture_url: str | None = None,
|
|
18
|
+
current_version: str | None = None,
|
|
19
|
+
latest_version: str | None = None,
|
|
20
|
+
can_update: bool = False,
|
|
21
|
+
can_build: bool = False,
|
|
22
|
+
can_restart: bool = False,
|
|
23
|
+
status: str = "on",
|
|
24
|
+
update_policy: str | None = None,
|
|
25
|
+
update_last_attempt: float | None = None,
|
|
26
|
+
release_url: str | None = None,
|
|
27
|
+
release_summary: str | None = None,
|
|
28
|
+
title_template: str = "Update for {name} on {node}",
|
|
29
|
+
device_icon: str | None = None,
|
|
30
|
+
custom: dict[str, Any] | None = None,
|
|
31
|
+
features: list[str] | None = None,
|
|
32
|
+
) -> None:
|
|
33
|
+
self.provider: ReleaseProvider = provider
|
|
34
|
+
self.source_type: str = provider.source_type
|
|
35
|
+
self.session: str = session
|
|
36
|
+
self.name: str = name
|
|
37
|
+
self.entity_picture_url: str | None = entity_picture_url
|
|
38
|
+
self.current_version: str | None = current_version
|
|
39
|
+
self.latest_version: str | None = latest_version
|
|
40
|
+
self.can_update: bool = can_update
|
|
41
|
+
self.can_build: bool = can_build
|
|
42
|
+
self.can_restart: bool = can_restart
|
|
43
|
+
self.release_url: str | None = release_url
|
|
44
|
+
self.release_summary: str | None = release_summary
|
|
45
|
+
self.title_template: str | None = title_template
|
|
46
|
+
self.device_icon: str | None = device_icon
|
|
47
|
+
self.status: str = status
|
|
48
|
+
self.update_policy: str | None = update_policy
|
|
49
|
+
self.update_last_attempt: float | None = update_last_attempt
|
|
50
|
+
self.custom: dict[str, Any] = custom or {}
|
|
51
|
+
self.features: list[str] = features or []
|
|
52
|
+
|
|
53
|
+
def __repr__(self) -> str:
|
|
54
|
+
"""Build a custom string representation"""
|
|
55
|
+
return f"Discovery('{self.name}','{self.source_type}',current={self.current_version},latest={self.latest_version})"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class ReleaseProvider:
|
|
59
|
+
"""Abstract base class for release providers, such as container scanners or package managers API calls"""
|
|
60
|
+
|
|
61
|
+
def __init__(self, source_type: str = "base") -> None:
|
|
62
|
+
self.source_type: str = source_type
|
|
63
|
+
self.discoveries: dict[str, Discovery] = {}
|
|
64
|
+
self.log: Any = structlog.get_logger().bind(integration=self.source_type)
|
|
65
|
+
self.stopped = Event()
|
|
66
|
+
|
|
67
|
+
def stop(self) -> None:
|
|
68
|
+
"""Stop any loops or background tasks"""
|
|
69
|
+
self.log.info("Asking release provider to stop", source_type=self.source_type)
|
|
70
|
+
self.stopped.set()
|
|
71
|
+
|
|
72
|
+
@abstractmethod
|
|
73
|
+
def update(self, discovery: Discovery) -> bool:
|
|
74
|
+
"""Attempt to update the component version"""
|
|
75
|
+
|
|
76
|
+
@abstractmethod
|
|
77
|
+
def rescan(self, discovery: Discovery) -> Discovery | None:
|
|
78
|
+
"""Rescan a previously discovered component"""
|
|
79
|
+
|
|
80
|
+
@abstractmethod
|
|
81
|
+
async def scan(self, session: str) -> AsyncGenerator[Discovery]:
|
|
82
|
+
"""Scan for components to monitor"""
|
|
83
|
+
|
|
84
|
+
def hass_config_format(self, discovery: Discovery) -> dict:
|
|
85
|
+
_ = discovery
|
|
86
|
+
return {}
|
|
87
|
+
|
|
88
|
+
def hass_state_format(self, discovery: Discovery) -> dict:
|
|
89
|
+
_ = discovery
|
|
90
|
+
return {}
|
|
91
|
+
|
|
92
|
+
@abstractmethod
|
|
93
|
+
def command(self, discovery_name: str, command: str, on_update_start: Callable, on_update_end: Callable) -> bool:
|
|
94
|
+
"""Execute a command on a discovered component"""
|
|
95
|
+
|
|
96
|
+
@abstractmethod
|
|
97
|
+
def resolve(self, discovery_name: str) -> Discovery | None:
|
|
98
|
+
"""Resolve a discovered component by name"""
|