updates2mqtt 1.6.0__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- updates2mqtt/app.py +10 -7
- updates2mqtt/config.py +30 -19
- updates2mqtt/hass_formatter.py +7 -17
- updates2mqtt/integrations/docker.py +148 -149
- updates2mqtt/integrations/docker_enrich.py +344 -0
- updates2mqtt/model.py +134 -16
- updates2mqtt/mqtt.py +26 -5
- {updates2mqtt-1.6.0.dist-info → updates2mqtt-1.7.0.dist-info}/METADATA +9 -13
- updates2mqtt-1.7.0.dist-info/RECORD +16 -0
- {updates2mqtt-1.6.0.dist-info → updates2mqtt-1.7.0.dist-info}/WHEEL +2 -2
- updates2mqtt-1.6.0.dist-info/RECORD +0 -15
- {updates2mqtt-1.6.0.dist-info → updates2mqtt-1.7.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
import structlog
|
|
5
|
+
from docker.auth import resolve_repository_name
|
|
6
|
+
from hishel.httpx import SyncCacheClient
|
|
7
|
+
from httpx import Response
|
|
8
|
+
from omegaconf import MissingMandatoryValue, OmegaConf, ValidationError
|
|
9
|
+
|
|
10
|
+
from updates2mqtt.config import (
|
|
11
|
+
NO_KNOWN_IMAGE,
|
|
12
|
+
PKG_INFO_FILE,
|
|
13
|
+
DockerConfig,
|
|
14
|
+
DockerPackageUpdateInfo,
|
|
15
|
+
PackageUpdateInfo,
|
|
16
|
+
UpdateInfoConfig,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
log = structlog.get_logger()
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PackageEnricher:
|
|
23
|
+
def __init__(self, docker_cfg: DockerConfig) -> None:
|
|
24
|
+
self.pkgs: dict[str, PackageUpdateInfo] = {}
|
|
25
|
+
self.cfg: DockerConfig = docker_cfg
|
|
26
|
+
self.log: Any = structlog.get_logger().bind(integration="docker")
|
|
27
|
+
|
|
28
|
+
def initialize(self) -> None:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
def enrich(self, image_name: str | None, image_ref: str | None, log: Any) -> PackageUpdateInfo | None:
|
|
32
|
+
def match(pkg: PackageUpdateInfo) -> bool:
|
|
33
|
+
if pkg is not None and pkg.docker is not None and pkg.docker.image_name is not None:
|
|
34
|
+
if image_name is not None and image_name == pkg.docker.image_name:
|
|
35
|
+
return True
|
|
36
|
+
if image_ref is not None and image_ref == pkg.docker.image_name:
|
|
37
|
+
return True
|
|
38
|
+
return False
|
|
39
|
+
|
|
40
|
+
if image_name is not None and image_ref is not None:
|
|
41
|
+
for pkg in self.pkgs.values():
|
|
42
|
+
if match(pkg):
|
|
43
|
+
log.debug(
|
|
44
|
+
"Found common package",
|
|
45
|
+
image_name=pkg.docker.image_name, # type: ignore [union-attr]
|
|
46
|
+
logo_url=pkg.logo_url,
|
|
47
|
+
relnotes_url=pkg.release_notes_url,
|
|
48
|
+
)
|
|
49
|
+
return pkg
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class DefaultPackageEnricher(PackageEnricher):
|
|
54
|
+
def enrich(self, image_name: str | None, image_ref: str | None, log: Any) -> PackageUpdateInfo | None:
|
|
55
|
+
log.debug("Default pkg info", image_name=image_name, image_ref=image_ref)
|
|
56
|
+
return PackageUpdateInfo(
|
|
57
|
+
DockerPackageUpdateInfo(image_name or NO_KNOWN_IMAGE),
|
|
58
|
+
logo_url=self.cfg.default_entity_picture_url,
|
|
59
|
+
release_notes_url=None,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class CommonPackageEnricher(PackageEnricher):
|
|
64
|
+
def initialize(self) -> None:
|
|
65
|
+
if PKG_INFO_FILE.exists():
|
|
66
|
+
log.debug("Loading common package update info", path=PKG_INFO_FILE)
|
|
67
|
+
cfg = OmegaConf.load(PKG_INFO_FILE)
|
|
68
|
+
else:
|
|
69
|
+
log.warn("No common package update info found", path=PKG_INFO_FILE)
|
|
70
|
+
cfg = OmegaConf.structured(UpdateInfoConfig)
|
|
71
|
+
try:
|
|
72
|
+
# omegaconf broken-ness on optional fields and converting to backclasses
|
|
73
|
+
self.pkgs: dict[str, PackageUpdateInfo] = {
|
|
74
|
+
pkg: PackageUpdateInfo(**pkg_cfg) for pkg, pkg_cfg in cfg.common_packages.items()
|
|
75
|
+
}
|
|
76
|
+
except (MissingMandatoryValue, ValidationError) as e:
|
|
77
|
+
log.error("Configuration error %s", e, path=PKG_INFO_FILE.as_posix())
|
|
78
|
+
raise
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class LinuxServerIOPackageEnricher(PackageEnricher):
|
|
82
|
+
def initialize(self) -> None:
|
|
83
|
+
cfg = self.cfg.discover_metadata.get("linuxserver.io")
|
|
84
|
+
if cfg is None or not cfg.enabled:
|
|
85
|
+
return
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
with SyncCacheClient(headers=[("cache-control", f"max-age={cfg.cache_ttl}")]) as client:
|
|
89
|
+
log.debug(f"Fetching linuxserver.io metadata from API, cache_ttl={cfg.cache_ttl}")
|
|
90
|
+
response: Response = client.get(
|
|
91
|
+
"https://api.linuxserver.io/api/v1/images?include_config=false&include_deprecated=false"
|
|
92
|
+
)
|
|
93
|
+
if response.status_code != 200:
|
|
94
|
+
log.error("Failed to fetch linuxserver.io metadata, non-200 response", status_code=response.status_code)
|
|
95
|
+
return
|
|
96
|
+
api_data: Any = response.json()
|
|
97
|
+
repos: list = api_data.get("data", {}).get("repositories", {}).get("linuxserver", [])
|
|
98
|
+
except Exception:
|
|
99
|
+
log.exception("Failed to fetch linuxserver.io metadata")
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
added = 0
|
|
103
|
+
for repo in repos:
|
|
104
|
+
image_name = repo.get("name")
|
|
105
|
+
if image_name and image_name not in self.pkgs:
|
|
106
|
+
self.pkgs[image_name] = PackageUpdateInfo(
|
|
107
|
+
DockerPackageUpdateInfo(f"lscr.io/linuxserver/{image_name}"),
|
|
108
|
+
logo_url=repo["project_logo"],
|
|
109
|
+
release_notes_url=f"{repo['github_url']}/releases",
|
|
110
|
+
)
|
|
111
|
+
added += 1
|
|
112
|
+
log.debug("Added linuxserver.io package", pkg=image_name)
|
|
113
|
+
log.info(f"Added {added} linuxserver.io package details")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def fetch_url(
|
|
117
|
+
url: str, cache_ttl: int = 300, bearer_token: str | None = None, response_type: str | None = None
|
|
118
|
+
) -> Response | None:
|
|
119
|
+
try:
|
|
120
|
+
headers = [("cache-control", f"max-age={cache_ttl}")]
|
|
121
|
+
if bearer_token:
|
|
122
|
+
headers.append(("Authorization", f"Bearer {bearer_token}"))
|
|
123
|
+
if response_type:
|
|
124
|
+
headers.append(("Accept", response_type))
|
|
125
|
+
with SyncCacheClient(headers=headers) as client:
|
|
126
|
+
log.debug(f"Fetching URL {url}, cache_ttl={cache_ttl}")
|
|
127
|
+
response: Response = client.get(url)
|
|
128
|
+
if not response.is_success:
|
|
129
|
+
log.debug("URL %s fetch returned non-success status: %s", url, response.status_code)
|
|
130
|
+
return response
|
|
131
|
+
except Exception as e:
|
|
132
|
+
log.debug("URL %s failed to fetch: %s", url, e)
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def validate_url(url: str, cache_ttl: int = 300) -> bool:
|
|
137
|
+
response: Response | None = fetch_url(url, cache_ttl=cache_ttl)
|
|
138
|
+
return response is not None and response.is_success
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
SOURCE_PLATFORM_GITHUB = "GitHub"
|
|
142
|
+
SOURCE_PLATFORMS = {SOURCE_PLATFORM_GITHUB: r"https://github.com/.*"}
|
|
143
|
+
DIFF_URL_TEMPLATES = {
|
|
144
|
+
SOURCE_PLATFORM_GITHUB: "{source}/commit/{revision}",
|
|
145
|
+
}
|
|
146
|
+
RELEASE_URL_TEMPLATES = {SOURCE_PLATFORM_GITHUB: "{source}/releases/tag/{version}"}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class SourceReleaseEnricher:
|
|
150
|
+
def __init__(self) -> None:
|
|
151
|
+
self.log: Any = structlog.get_logger().bind(integration="docker")
|
|
152
|
+
|
|
153
|
+
def enrich(self, annotations: dict[str, str]) -> dict[str, str]:
|
|
154
|
+
results: dict[str, str] = {}
|
|
155
|
+
image_version: str | None = annotations.get("org.opencontainers.image.version")
|
|
156
|
+
image_digest: str | None = annotations.get("org.opencontainers.image.revision")
|
|
157
|
+
source = annotations.get("org.opencontainers.image.source")
|
|
158
|
+
source_platforms = [platform for platform, pattern in SOURCE_PLATFORMS.items() if re.match(pattern, source or "")]
|
|
159
|
+
if not source_platforms:
|
|
160
|
+
self.log.debug("No known source platform found on container", source=source)
|
|
161
|
+
return results
|
|
162
|
+
source_platform = source_platforms[0]
|
|
163
|
+
|
|
164
|
+
if source:
|
|
165
|
+
template_vars: dict[str, str | None] = {
|
|
166
|
+
"source": source,
|
|
167
|
+
"version": image_version,
|
|
168
|
+
"revision": image_digest,
|
|
169
|
+
}
|
|
170
|
+
diff_url = DIFF_URL_TEMPLATES[source_platform].format(**template_vars)
|
|
171
|
+
if validate_url(diff_url):
|
|
172
|
+
results["diff_url"] = diff_url
|
|
173
|
+
|
|
174
|
+
release_url = RELEASE_URL_TEMPLATES[source_platform].format(**template_vars)
|
|
175
|
+
|
|
176
|
+
if validate_url(release_url):
|
|
177
|
+
results["release_url"] = release_url
|
|
178
|
+
|
|
179
|
+
if source_platform == SOURCE_PLATFORM_GITHUB and source:
|
|
180
|
+
base_api = source.replace("https://github.com", "https://api.github.com/repos")
|
|
181
|
+
api_response: Response | None = fetch_url(f"{base_api}/releases/tags/{image_version}")
|
|
182
|
+
if api_response and api_response.is_success:
|
|
183
|
+
api_results: Any = httpx_json_content(api_response, {})
|
|
184
|
+
results["release_summary"] = api_results.get("body") # ty:ignore[possibly-missing-attribute]
|
|
185
|
+
reactions = api_results.get("reactions") # ty:ignore[possibly-missing-attribute]
|
|
186
|
+
if reactions:
|
|
187
|
+
results["net_score"] = reactions.get("+1", 0) - reactions.get("-1", 0)
|
|
188
|
+
else:
|
|
189
|
+
self.log.debug(
|
|
190
|
+
"Failed to fetch GitHub release info",
|
|
191
|
+
url=f"{base_api}/releases/tags/{image_version}",
|
|
192
|
+
status_code=(api_response and api_response.status_code) or None,
|
|
193
|
+
)
|
|
194
|
+
return results
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class AuthError(Exception):
|
|
198
|
+
pass
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
REGISTRIES = {
|
|
202
|
+
# registry: (auth_host, api_host, service)
|
|
203
|
+
"docker.io": ("auth.docker.io", "registry-1.docker.io", "registry.docker.io"),
|
|
204
|
+
"mcr.microsoft.com": (None, "mcr.microsoft.com", "mcr.microsoft.com"),
|
|
205
|
+
"ghcr.io": ("ghcr.io", "ghcr.io", "ghcr.io"),
|
|
206
|
+
"lscr.io": ("ghcr.io", "lscr.io", "ghcr.io"),
|
|
207
|
+
"codeberg.org": ("codeberg.org", "codeberg.org", "container_registry"),
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def httpx_json_content(response: Response, default: Any = None) -> Any | None:
|
|
212
|
+
if response and "json" in response.headers.get("content-type"):
|
|
213
|
+
try:
|
|
214
|
+
return response.json()
|
|
215
|
+
except Exception:
|
|
216
|
+
log.debug("Failed to parse JSON response: %s", response.text)
|
|
217
|
+
return default
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
class LabelEnricher:
|
|
221
|
+
def __init__(self) -> None:
|
|
222
|
+
self.log: Any = structlog.get_logger().bind(integration="docker")
|
|
223
|
+
|
|
224
|
+
def fetch_token(self, auth_host: str, service: str, image_name: str) -> str | None:
|
|
225
|
+
logger = self.log.bind(image_name=image_name, action="auth_registry")
|
|
226
|
+
auth_url: str = f"https://{auth_host}/token?scope=repository:{image_name}:pull&service={service}"
|
|
227
|
+
response: Response | None = fetch_url(auth_url, cache_ttl=30)
|
|
228
|
+
if response and response.is_success:
|
|
229
|
+
api_data = httpx_json_content(response, {})
|
|
230
|
+
token: str | None = api_data.get("token") if api_data else None
|
|
231
|
+
if token:
|
|
232
|
+
return token
|
|
233
|
+
logger.warning("No token found in response")
|
|
234
|
+
raise AuthError(f"No token found in response for {image_name}")
|
|
235
|
+
|
|
236
|
+
logger.debug(
|
|
237
|
+
"Non-success response fetching token: %s",
|
|
238
|
+
(response and response.status_code) or None,
|
|
239
|
+
)
|
|
240
|
+
if response and response.status_code == 404:
|
|
241
|
+
response = fetch_url(f"https://{auth_host}/v2/")
|
|
242
|
+
if response and response.status_code == 401:
|
|
243
|
+
auth = response.headers.get("www-authenticate")
|
|
244
|
+
if not auth:
|
|
245
|
+
logger.debug("No www-authenticate header found in 401 response")
|
|
246
|
+
raise AuthError(f"No www-authenticate header found on 401 for {image_name}")
|
|
247
|
+
match = re.search(r'realm="([^"]+)",service="([^"]+)",scope="([^"]+)"', auth)
|
|
248
|
+
if not match:
|
|
249
|
+
logger.debug("No realm/service/scope found in www-authenticate header")
|
|
250
|
+
raise AuthError(f"No realm/service/scope found on 401 headers for {image_name}")
|
|
251
|
+
|
|
252
|
+
realm, service, scope = match.groups()
|
|
253
|
+
auth_url = f"{realm}?service={service}&scope={scope}"
|
|
254
|
+
response = fetch_url(auth_url)
|
|
255
|
+
if response and response.is_success:
|
|
256
|
+
token_data = response.json()
|
|
257
|
+
logger.debug("Fetched registry token")
|
|
258
|
+
return token_data.get("token")
|
|
259
|
+
|
|
260
|
+
logger.debug("Failed to fetch registry token")
|
|
261
|
+
raise AuthError(f"Failed to fetch token for {image_name}")
|
|
262
|
+
|
|
263
|
+
def fetch_annotations(
|
|
264
|
+
self,
|
|
265
|
+
image_ref: str,
|
|
266
|
+
os: str,
|
|
267
|
+
arch: str,
|
|
268
|
+
token: str | None = None,
|
|
269
|
+
mutable_cache_ttl: int = 600,
|
|
270
|
+
immutable_cache_ttl: int = 86400,
|
|
271
|
+
) -> dict[str, str]:
|
|
272
|
+
logger = self.log.bind(image_ref=image_ref, action="enrich_registry")
|
|
273
|
+
annotations: dict[str, str] = {}
|
|
274
|
+
if token:
|
|
275
|
+
logger.debug("Using provided token to fetch manifest for image %s", image_ref)
|
|
276
|
+
registry, ref = resolve_repository_name(image_ref)
|
|
277
|
+
default_host = (registry, registry, registry)
|
|
278
|
+
auth_host: str | None = REGISTRIES.get(registry, default_host)[0]
|
|
279
|
+
api_host: str | None = REGISTRIES.get(registry, default_host)[1]
|
|
280
|
+
service: str = REGISTRIES.get(registry, default_host)[2]
|
|
281
|
+
img_name = ref.split(":")[0] if ":" in ref else ref
|
|
282
|
+
img_name = img_name if "/" in img_name else f"library/{img_name}"
|
|
283
|
+
if auth_host is not None and token is None:
|
|
284
|
+
token = self.fetch_token(auth_host, service, img_name)
|
|
285
|
+
|
|
286
|
+
img_tag = ref.split(":")[1] if ":" in ref else "latest"
|
|
287
|
+
img_tag = img_tag.split("@")[0] if "@" in img_tag else img_tag
|
|
288
|
+
response: Response | None = fetch_url(
|
|
289
|
+
f"https://{api_host}/v2/{img_name}/manifests/{img_tag}",
|
|
290
|
+
cache_ttl=mutable_cache_ttl,
|
|
291
|
+
bearer_token=token,
|
|
292
|
+
response_type="application/vnd.oci.image.index.v1+json",
|
|
293
|
+
)
|
|
294
|
+
if response is None:
|
|
295
|
+
logger.debug("Empty response for manifest for image")
|
|
296
|
+
return annotations
|
|
297
|
+
if not response.is_success:
|
|
298
|
+
api_data = httpx_json_content(response, {})
|
|
299
|
+
logger.warning(
|
|
300
|
+
"Failed to fetch manifest: %s",
|
|
301
|
+
api_data.get("errors") if api_data else response.text,
|
|
302
|
+
)
|
|
303
|
+
return annotations
|
|
304
|
+
index = response.json()
|
|
305
|
+
logger.debug(
|
|
306
|
+
"INDEX %s manifests, %s annotations",
|
|
307
|
+
len(index.get("manifests", [])),
|
|
308
|
+
len(index.get("annotations", [])),
|
|
309
|
+
)
|
|
310
|
+
annotations = index.get("annotations", {})
|
|
311
|
+
for m in index.get("manifests", []):
|
|
312
|
+
platform_info = m.get("platform", {})
|
|
313
|
+
if platform_info.get("os") == os and platform_info.get("architecture") == arch:
|
|
314
|
+
digest = m.get("digest")
|
|
315
|
+
media_type = m.get("mediaType")
|
|
316
|
+
response = fetch_url(
|
|
317
|
+
f"https://{api_host}/v2/{img_name}/manifests/{digest}",
|
|
318
|
+
cache_ttl=immutable_cache_ttl,
|
|
319
|
+
bearer_token=token,
|
|
320
|
+
response_type=media_type,
|
|
321
|
+
)
|
|
322
|
+
if response and response.is_success:
|
|
323
|
+
api_data = httpx_json_content(response, None)
|
|
324
|
+
if api_data:
|
|
325
|
+
logger.debug(
|
|
326
|
+
"MANIFEST %s layers, %s annotations",
|
|
327
|
+
len(api_data.get("layers", [])),
|
|
328
|
+
len(api_data.get("annotations", [])),
|
|
329
|
+
)
|
|
330
|
+
if api_data.get("annotations"):
|
|
331
|
+
annotations.update(api_data.get("annotations", {}))
|
|
332
|
+
else:
|
|
333
|
+
logger.debug("No annotations found in manifest: %s", api_data)
|
|
334
|
+
|
|
335
|
+
if not annotations:
|
|
336
|
+
logger.debug("No annotations found from registry data")
|
|
337
|
+
return annotations
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
r"""
|
|
341
|
+
https://ghcr.io/token\?scope\="repository:rhizomatics/updates2mqtt:pull"
|
|
342
|
+
https://ghcr.io/v2/rhizomatics/updates2mqtt/manifests/sha256:2c8edc1f9400ef02a93c3b754d4419082ceb5d049178c3a3968e3fd56caf7f29 Accept:application/vnd.oci.image.index.v1+json Accept:application/vnd.oci.image.manifest.v1+json Accept:application/vnd.docker.distribution.manifest.v2+json
|
|
343
|
+
https://ghcr.io/v2/rhizomatics/updates2mqtt/manifests/latest Accept:application/vnd.oci.image.index.v1+json Accept:application/vnd.oci.image.manifest.v1+json Accept:appli
|
|
344
|
+
""" # noqa: E501
|
updates2mqtt/model.py
CHANGED
|
@@ -1,10 +1,33 @@
|
|
|
1
|
+
import datetime as dt
|
|
1
2
|
import json
|
|
3
|
+
import re
|
|
4
|
+
import time
|
|
2
5
|
from abc import abstractmethod
|
|
3
6
|
from collections.abc import AsyncGenerator, Callable
|
|
7
|
+
from enum import StrEnum
|
|
4
8
|
from threading import Event
|
|
5
9
|
from typing import Any
|
|
6
10
|
|
|
7
11
|
import structlog
|
|
12
|
+
from tzlocal import get_localzone
|
|
13
|
+
|
|
14
|
+
from updates2mqtt.config import NO_KNOWN_IMAGE, NodeConfig, PublishPolicy, Selector, UpdatePolicy
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def timestamp(time_value: float | None) -> str | None:
|
|
18
|
+
if time_value is None:
|
|
19
|
+
return None
|
|
20
|
+
try:
|
|
21
|
+
return dt.datetime.fromtimestamp(time_value, tz=get_localzone()).isoformat()
|
|
22
|
+
except: # noqa: E722
|
|
23
|
+
return None
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class VersionPolicy(StrEnum):
|
|
27
|
+
AUTO = "AUTO"
|
|
28
|
+
VERSION = "VERSION"
|
|
29
|
+
DIGEST = "DIGEST"
|
|
30
|
+
VERSION_DIGEST = "VERSION_DIGEST"
|
|
8
31
|
|
|
9
32
|
|
|
10
33
|
class Discovery:
|
|
@@ -23,9 +46,10 @@ class Discovery:
|
|
|
23
46
|
can_build: bool = False,
|
|
24
47
|
can_restart: bool = False,
|
|
25
48
|
status: str = "on",
|
|
49
|
+
publish_policy: PublishPolicy = PublishPolicy.HOMEASSISTANT,
|
|
26
50
|
update_type: str | None = "Update",
|
|
27
|
-
update_policy:
|
|
28
|
-
|
|
51
|
+
update_policy: UpdatePolicy = UpdatePolicy.PASSIVE,
|
|
52
|
+
version_policy: VersionPolicy = VersionPolicy.AUTO,
|
|
29
53
|
release_url: str | None = None,
|
|
30
54
|
release_summary: str | None = None,
|
|
31
55
|
title_template: str = "{discovery.update_type} for {discovery.name} on {discovery.node}",
|
|
@@ -33,6 +57,7 @@ class Discovery:
|
|
|
33
57
|
custom: dict[str, Any] | None = None,
|
|
34
58
|
features: list[str] | None = None,
|
|
35
59
|
throttled: bool = False,
|
|
60
|
+
previous: "Discovery|None" = None,
|
|
36
61
|
) -> None:
|
|
37
62
|
self.provider: ReleaseProvider = provider
|
|
38
63
|
self.source_type: str = provider.source_type
|
|
@@ -51,11 +76,24 @@ class Discovery:
|
|
|
51
76
|
self.device_icon: str | None = device_icon
|
|
52
77
|
self.update_type: str | None = update_type
|
|
53
78
|
self.status: str = status
|
|
54
|
-
self.
|
|
55
|
-
self.
|
|
79
|
+
self.publish_policy: PublishPolicy = publish_policy
|
|
80
|
+
self.update_policy: UpdatePolicy = update_policy
|
|
81
|
+
self.version_policy: VersionPolicy = version_policy
|
|
82
|
+
self.update_last_attempt: float | None = None
|
|
56
83
|
self.custom: dict[str, Any] = custom or {}
|
|
57
84
|
self.features: list[str] = features or []
|
|
58
85
|
self.throttled: bool = throttled
|
|
86
|
+
self.scan_count: int
|
|
87
|
+
self.first_timestamp: float
|
|
88
|
+
self.last_timestamp: float = time.time()
|
|
89
|
+
|
|
90
|
+
if previous:
|
|
91
|
+
self.update_last_attempt = previous.update_last_attempt
|
|
92
|
+
self.first_timestamp = previous.first_timestamp
|
|
93
|
+
self.scan_count = previous.scan_count + 1
|
|
94
|
+
else:
|
|
95
|
+
self.first_timestamp = time.time()
|
|
96
|
+
self.scan_count = 1
|
|
59
97
|
|
|
60
98
|
def __repr__(self) -> str:
|
|
61
99
|
"""Build a custom string representation"""
|
|
@@ -76,16 +114,49 @@ class Discovery:
|
|
|
76
114
|
return self.title_template.format(discovery=self)
|
|
77
115
|
return self.name
|
|
78
116
|
|
|
117
|
+
def as_dict(self) -> dict[str, str | list | dict | bool | int | None]:
|
|
118
|
+
return {
|
|
119
|
+
"name": self.name,
|
|
120
|
+
"node": self.node,
|
|
121
|
+
"provider": {"source_type": self.provider.source_type},
|
|
122
|
+
"first_scan": {"timestamp": timestamp(self.first_timestamp)},
|
|
123
|
+
"last_scan": {"timestamp": timestamp(self.last_timestamp), "session": self.session, "throttled": self.throttled},
|
|
124
|
+
"scan_count": self.scan_count,
|
|
125
|
+
"installed_version": self.current_version,
|
|
126
|
+
"latest_version": self.latest_version,
|
|
127
|
+
"title": self.title,
|
|
128
|
+
"release_summary": self.release_summary,
|
|
129
|
+
"release_url": self.release_url,
|
|
130
|
+
"entity_picture_url": self.entity_picture_url,
|
|
131
|
+
"can_update": self.can_update,
|
|
132
|
+
"can_build": self.can_build,
|
|
133
|
+
"can_restart": self.can_restart,
|
|
134
|
+
"device_icon": self.device_icon,
|
|
135
|
+
"update_type": self.update_type,
|
|
136
|
+
"status": self.status,
|
|
137
|
+
"features": self.features,
|
|
138
|
+
"update_policy": self.update_policy,
|
|
139
|
+
"publish_policy": self.publish_policy,
|
|
140
|
+
"version_policy": self.version_policy,
|
|
141
|
+
"update": {"last_attempt": timestamp(self.update_last_attempt), "in_progress": False},
|
|
142
|
+
self.source_type: self.custom,
|
|
143
|
+
}
|
|
144
|
+
|
|
79
145
|
|
|
80
146
|
class ReleaseProvider:
|
|
81
147
|
"""Abstract base class for release providers, such as container scanners or package managers API calls"""
|
|
82
148
|
|
|
83
|
-
def __init__(self, source_type: str = "base") -> None:
|
|
149
|
+
def __init__(self, node_cfg: NodeConfig, source_type: str = "base") -> None:
|
|
84
150
|
self.source_type: str = source_type
|
|
85
151
|
self.discoveries: dict[str, Discovery] = {}
|
|
152
|
+
self.node_cfg: NodeConfig = node_cfg
|
|
86
153
|
self.log: Any = structlog.get_logger().bind(integration=self.source_type)
|
|
87
154
|
self.stopped = Event()
|
|
88
155
|
|
|
156
|
+
def initialize(self) -> None:
|
|
157
|
+
"""Initialize any loops or background tasks, make any startup API calls"""
|
|
158
|
+
pass
|
|
159
|
+
|
|
89
160
|
def stop(self) -> None:
|
|
90
161
|
"""Stop any loops or background tasks"""
|
|
91
162
|
self.log.info("Asking release provider to stop", source_type=self.source_type)
|
|
@@ -106,18 +177,9 @@ class ReleaseProvider:
|
|
|
106
177
|
@abstractmethod
|
|
107
178
|
async def scan(self, session: str) -> AsyncGenerator[Discovery]:
|
|
108
179
|
"""Scan for components to monitor"""
|
|
109
|
-
raise NotImplementedError
|
|
110
180
|
# force recognition as an async generator
|
|
111
|
-
if False:
|
|
112
|
-
yield 0
|
|
113
|
-
|
|
114
|
-
def hass_config_format(self, discovery: Discovery) -> dict:
|
|
115
|
-
_ = discovery
|
|
116
|
-
return {}
|
|
117
|
-
|
|
118
|
-
def hass_state_format(self, discovery: Discovery) -> dict:
|
|
119
|
-
_ = discovery
|
|
120
|
-
return {}
|
|
181
|
+
if False:
|
|
182
|
+
yield 0 # type: ignore[unreachable]
|
|
121
183
|
|
|
122
184
|
@abstractmethod
|
|
123
185
|
def command(self, discovery_name: str, command: str, on_update_start: Callable, on_update_end: Callable) -> bool:
|
|
@@ -126,3 +188,59 @@ class ReleaseProvider:
|
|
|
126
188
|
@abstractmethod
|
|
127
189
|
def resolve(self, discovery_name: str) -> Discovery | None:
|
|
128
190
|
"""Resolve a discovered component by name"""
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class Selection:
|
|
194
|
+
def __init__(self, selector: Selector, value: str | None) -> None:
|
|
195
|
+
self.result: bool = True
|
|
196
|
+
self.matched: str | None = None
|
|
197
|
+
if value is None:
|
|
198
|
+
self.result = selector.include is None
|
|
199
|
+
return
|
|
200
|
+
if selector.exclude is not None:
|
|
201
|
+
self.result = True
|
|
202
|
+
if any(re.search(pat, value) for pat in selector.exclude):
|
|
203
|
+
self.matched = value
|
|
204
|
+
self.result = False
|
|
205
|
+
if selector.include is not None:
|
|
206
|
+
self.result = False
|
|
207
|
+
if any(re.search(pat, value) for pat in selector.include):
|
|
208
|
+
self.matched = value
|
|
209
|
+
self.result = True
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
VERSION_RE = r"[vV]?[0-9]+(\.[0-9]+)*"
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def select_version(
|
|
216
|
+
version_policy: VersionPolicy,
|
|
217
|
+
version: str | None,
|
|
218
|
+
digest: str | None,
|
|
219
|
+
other_version: str | None = None,
|
|
220
|
+
other_digest: str | None = None,
|
|
221
|
+
) -> str:
|
|
222
|
+
if version_policy == VersionPolicy.VERSION and version:
|
|
223
|
+
return version
|
|
224
|
+
if version_policy == VersionPolicy.DIGEST and digest and digest != NO_KNOWN_IMAGE:
|
|
225
|
+
return digest
|
|
226
|
+
if version_policy == VersionPolicy.VERSION_DIGEST and version and digest and digest != NO_KNOWN_IMAGE:
|
|
227
|
+
return f"{version} ({digest})"
|
|
228
|
+
# AUTO or fallback
|
|
229
|
+
if version_policy == VersionPolicy.AUTO and version and re.match(VERSION_RE, version or ""):
|
|
230
|
+
# Smells like semver
|
|
231
|
+
if other_version is None and other_digest is None:
|
|
232
|
+
return version
|
|
233
|
+
if re.match(VERSION_RE, other_version or "") and (
|
|
234
|
+
(version == other_version and digest == other_digest) or (version != other_version and digest != other_digest)
|
|
235
|
+
):
|
|
236
|
+
# Only semver if versions and digest consistently same or different
|
|
237
|
+
return version
|
|
238
|
+
|
|
239
|
+
if version and digest and digest != NO_KNOWN_IMAGE:
|
|
240
|
+
return f"{version}:{digest}"
|
|
241
|
+
if version:
|
|
242
|
+
return version
|
|
243
|
+
if digest and digest != NO_KNOWN_IMAGE:
|
|
244
|
+
return digest
|
|
245
|
+
|
|
246
|
+
return other_version or other_version or NO_KNOWN_IMAGE
|
updates2mqtt/mqtt.py
CHANGED
|
@@ -16,7 +16,7 @@ from paho.mqtt.reasoncodes import ReasonCode
|
|
|
16
16
|
|
|
17
17
|
from updates2mqtt.model import Discovery, ReleaseProvider
|
|
18
18
|
|
|
19
|
-
from .config import HomeAssistantConfig, MqttConfig, NodeConfig
|
|
19
|
+
from .config import HomeAssistantConfig, MqttConfig, NodeConfig, PublishPolicy
|
|
20
20
|
from .hass_formatter import hass_format_config, hass_format_state
|
|
21
21
|
|
|
22
22
|
log = structlog.get_logger()
|
|
@@ -235,7 +235,10 @@ class MqttPublisher:
|
|
|
235
235
|
updated = provider.command(comp_name, command, on_update_start, on_update_end)
|
|
236
236
|
discovery = provider.resolve(comp_name)
|
|
237
237
|
if updated and discovery:
|
|
238
|
-
|
|
238
|
+
if discovery.publish_policy in (PublishPolicy.HOMEASSISTANT, PublishPolicy.MQTT):
|
|
239
|
+
self.publish_discovery(discovery)
|
|
240
|
+
if discovery.publish_policy == PublishPolicy.HOMEASSISTANT:
|
|
241
|
+
self.publish_hass_state(discovery)
|
|
239
242
|
else:
|
|
240
243
|
logger.debug("No change to republish after execution")
|
|
241
244
|
logger.info("Execution ended")
|
|
@@ -283,10 +286,12 @@ class MqttPublisher:
|
|
|
283
286
|
|
|
284
287
|
def handle_message(self, msg: mqtt.MQTTMessage | LocalMessage) -> None:
|
|
285
288
|
def update_start(discovery: Discovery) -> None:
|
|
286
|
-
|
|
289
|
+
if discovery.publish_policy in (PublishPolicy.HOMEASSISTANT, PublishPolicy.MQTT):
|
|
290
|
+
self.publish_hass_state(discovery, in_progress=True)
|
|
287
291
|
|
|
288
292
|
def update_end(discovery: Discovery) -> None:
|
|
289
|
-
|
|
293
|
+
if discovery.publish_policy in (PublishPolicy.HOMEASSISTANT, PublishPolicy.MQTT):
|
|
294
|
+
self.publish_hass_state(discovery, in_progress=False)
|
|
290
295
|
|
|
291
296
|
if self.event_loop is not None:
|
|
292
297
|
asyncio.run_coroutine_threadsafe(self.execute_command(msg, update_start, update_end), self.event_loop)
|
|
@@ -298,12 +303,26 @@ class MqttPublisher:
|
|
|
298
303
|
return f"{prefix}/update/{self.node_cfg.name}_{discovery.source_type}_{discovery.name}/update/config"
|
|
299
304
|
|
|
300
305
|
def state_topic(self, discovery: Discovery) -> str:
|
|
306
|
+
return f"{self.cfg.topic_root}/{self.node_cfg.name}/{discovery.source_type}/{discovery.name}/state"
|
|
307
|
+
|
|
308
|
+
def general_topic(self, discovery: Discovery) -> str:
|
|
301
309
|
return f"{self.cfg.topic_root}/{self.node_cfg.name}/{discovery.source_type}/{discovery.name}"
|
|
302
310
|
|
|
303
311
|
def command_topic(self, provider: ReleaseProvider) -> str:
|
|
304
312
|
return f"{self.cfg.topic_root}/{self.node_cfg.name}/{provider.source_type}"
|
|
305
313
|
|
|
314
|
+
def publish_discovery(self, discovery: Discovery, in_progress: bool = False) -> None:
|
|
315
|
+
"""Comprehensive, non Home Assistant specific, base publication"""
|
|
316
|
+
if discovery.publish_policy not in (PublishPolicy.HOMEASSISTANT, PublishPolicy.MQTT):
|
|
317
|
+
return
|
|
318
|
+
self.log.debug("Discovery publish: %s", discovery)
|
|
319
|
+
payload: dict[str, Any] = discovery.as_dict()
|
|
320
|
+
payload["update"]["in_progress"] = in_progress # ty:ignore[invalid-assignment]
|
|
321
|
+
self.publish(self.general_topic(discovery), payload)
|
|
322
|
+
|
|
306
323
|
def publish_hass_state(self, discovery: Discovery, in_progress: bool = False) -> None:
|
|
324
|
+
if discovery.publish_policy != PublishPolicy.HOMEASSISTANT:
|
|
325
|
+
return
|
|
307
326
|
self.log.debug("HASS State update, in progress: %s, discovery: %s", in_progress, discovery)
|
|
308
327
|
self.publish(
|
|
309
328
|
self.state_topic(discovery),
|
|
@@ -315,6 +334,8 @@ class MqttPublisher:
|
|
|
315
334
|
)
|
|
316
335
|
|
|
317
336
|
def publish_hass_config(self, discovery: Discovery) -> None:
|
|
337
|
+
if discovery.publish_policy != PublishPolicy.HOMEASSISTANT:
|
|
338
|
+
return
|
|
318
339
|
object_id = f"{discovery.source_type}_{self.node_cfg.name}_{discovery.name}"
|
|
319
340
|
self.publish(
|
|
320
341
|
self.config_topic(discovery),
|
|
@@ -323,10 +344,10 @@ class MqttPublisher:
|
|
|
323
344
|
object_id=object_id,
|
|
324
345
|
area=self.hass_cfg.area,
|
|
325
346
|
state_topic=self.state_topic(discovery),
|
|
347
|
+
attrs_topic=self.general_topic(discovery) if self.hass_cfg.extra_attributes else None,
|
|
326
348
|
command_topic=self.command_topic(discovery.provider),
|
|
327
349
|
force_command_topic=self.hass_cfg.force_command_topic,
|
|
328
350
|
device_creation=self.hass_cfg.device_creation,
|
|
329
|
-
session=discovery.session,
|
|
330
351
|
),
|
|
331
352
|
)
|
|
332
353
|
|