updates2mqtt 1.7.0__py3-none-any.whl → 1.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- updates2mqtt/cli.py +150 -0
- updates2mqtt/config.py +32 -2
- updates2mqtt/hass_formatter.py +5 -4
- updates2mqtt/helpers.py +226 -0
- updates2mqtt/integrations/docker.py +308 -252
- updates2mqtt/integrations/docker_enrich.py +714 -182
- updates2mqtt/integrations/git_utils.py +5 -5
- updates2mqtt/model.py +94 -89
- updates2mqtt/mqtt.py +5 -0
- {updates2mqtt-1.7.0.dist-info → updates2mqtt-1.7.2.dist-info}/METADATA +13 -7
- updates2mqtt-1.7.2.dist-info/RECORD +18 -0
- {updates2mqtt-1.7.0.dist-info → updates2mqtt-1.7.2.dist-info}/entry_points.txt +1 -0
- updates2mqtt-1.7.0.dist-info/RECORD +0 -16
- {updates2mqtt-1.7.0.dist-info → updates2mqtt-1.7.2.dist-info}/WHEEL +0 -0
|
@@ -1,9 +1,10 @@
|
|
|
1
|
+
import random
|
|
2
|
+
import re
|
|
1
3
|
import subprocess
|
|
2
4
|
import time
|
|
3
5
|
import typing
|
|
4
6
|
from collections.abc import AsyncGenerator, Callable
|
|
5
7
|
from enum import Enum
|
|
6
|
-
from http import HTTPStatus
|
|
7
8
|
from pathlib import Path
|
|
8
9
|
from threading import Event
|
|
9
10
|
from typing import Any, cast
|
|
@@ -11,32 +12,39 @@ from typing import Any, cast
|
|
|
11
12
|
import docker
|
|
12
13
|
import docker.errors
|
|
13
14
|
import structlog
|
|
14
|
-
from docker.auth import resolve_repository_name
|
|
15
15
|
from docker.models.containers import Container
|
|
16
16
|
|
|
17
17
|
from updates2mqtt.config import (
|
|
18
|
-
|
|
18
|
+
SEMVER_RE,
|
|
19
|
+
UNKNOWN_VERSION,
|
|
20
|
+
VERSION_RE,
|
|
19
21
|
DockerConfig,
|
|
20
22
|
NodeConfig,
|
|
21
23
|
PackageUpdateInfo,
|
|
22
24
|
PublishPolicy,
|
|
25
|
+
RegistryAPI,
|
|
23
26
|
UpdatePolicy,
|
|
27
|
+
VersionPolicy,
|
|
24
28
|
)
|
|
29
|
+
from updates2mqtt.helpers import Selection, Throttler
|
|
25
30
|
from updates2mqtt.integrations.docker_enrich import (
|
|
26
|
-
AuthError,
|
|
27
31
|
CommonPackageEnricher,
|
|
32
|
+
ContainerDistributionAPIVersionLookup,
|
|
28
33
|
DefaultPackageEnricher,
|
|
29
|
-
|
|
34
|
+
DockerClientVersionLookup,
|
|
35
|
+
DockerImageInfo,
|
|
36
|
+
DockerServiceDetails,
|
|
30
37
|
LinuxServerIOPackageEnricher,
|
|
38
|
+
LocalContainerInfo,
|
|
31
39
|
PackageEnricher,
|
|
32
40
|
SourceReleaseEnricher,
|
|
33
41
|
)
|
|
34
|
-
from updates2mqtt.model import Discovery,
|
|
42
|
+
from updates2mqtt.model import Discovery, ReleaseDetail, ReleaseProvider
|
|
35
43
|
|
|
36
|
-
from .git_utils import git_check_update_available, git_iso_timestamp,
|
|
44
|
+
from .git_utils import git_check_update_available, git_iso_timestamp, git_local_digest, git_pull, git_trust
|
|
37
45
|
|
|
38
46
|
if typing.TYPE_CHECKING:
|
|
39
|
-
from docker.models.images import Image
|
|
47
|
+
from docker.models.images import Image
|
|
40
48
|
|
|
41
49
|
# distinguish docker build from docker pull?
|
|
42
50
|
|
|
@@ -59,7 +67,7 @@ class ContainerCustomization:
|
|
|
59
67
|
env_prefix: str = "UPD2MQTT_"
|
|
60
68
|
|
|
61
69
|
def __init__(self, container: Container) -> None:
|
|
62
|
-
self.update:
|
|
70
|
+
self.update: UpdatePolicy = UpdatePolicy.PASSIVE # was known as UPD2MQTT_UPDATE before policies and labels
|
|
63
71
|
self.git_repo_path: str | None = None
|
|
64
72
|
self.picture: str | None = None
|
|
65
73
|
self.relnotes: str | None = None
|
|
@@ -108,11 +116,11 @@ class ContainerCustomization:
|
|
|
108
116
|
setattr(self, attr, v.upper() in ("TRUE", "YES", "1"))
|
|
109
117
|
elif isinstance(getattr(self, attr), VersionPolicy):
|
|
110
118
|
setattr(self, attr, VersionPolicy[v.upper()])
|
|
119
|
+
elif isinstance(getattr(self, attr), UpdatePolicy):
|
|
120
|
+
setattr(self, attr, UpdatePolicy[v.upper()])
|
|
111
121
|
else:
|
|
112
122
|
setattr(self, attr, v)
|
|
113
123
|
|
|
114
|
-
self.update = self.update.upper()
|
|
115
|
-
|
|
116
124
|
|
|
117
125
|
class DockerProvider(ReleaseProvider):
|
|
118
126
|
def __init__(
|
|
@@ -126,16 +134,19 @@ class DockerProvider(ReleaseProvider):
|
|
|
126
134
|
self.cfg: DockerConfig = cfg
|
|
127
135
|
|
|
128
136
|
# TODO: refresh discovered packages periodically
|
|
129
|
-
self.
|
|
130
|
-
self.api_throttle_pause: int = cfg.default_api_backoff
|
|
137
|
+
self.throttler = Throttler(self.cfg.default_api_backoff, self.log, self.stopped)
|
|
131
138
|
self.self_bounce: Event | None = self_bounce
|
|
132
139
|
self.pkg_enrichers: list[PackageEnricher] = [
|
|
133
140
|
CommonPackageEnricher(self.cfg),
|
|
134
141
|
LinuxServerIOPackageEnricher(self.cfg),
|
|
135
142
|
DefaultPackageEnricher(self.cfg),
|
|
136
143
|
]
|
|
137
|
-
self.
|
|
144
|
+
self.docker_client_image_lookup = DockerClientVersionLookup(
|
|
145
|
+
self.client, self.throttler, self.cfg.registry, self.cfg.default_api_backoff
|
|
146
|
+
)
|
|
147
|
+
self.registry_image_lookup = ContainerDistributionAPIVersionLookup(self.throttler, self.cfg.registry)
|
|
138
148
|
self.release_enricher = SourceReleaseEnricher()
|
|
149
|
+
self.local_info_builder = LocalContainerInfo()
|
|
139
150
|
|
|
140
151
|
def initialize(self) -> None:
|
|
141
152
|
for enricher in self.pkg_enrichers:
|
|
@@ -152,19 +163,21 @@ class DockerProvider(ReleaseProvider):
|
|
|
152
163
|
|
|
153
164
|
def fetch(self, discovery: Discovery) -> None:
|
|
154
165
|
logger = self.log.bind(container=discovery.name, action="fetch")
|
|
166
|
+
installed_info: DockerImageInfo | None = cast("DockerImageInfo|None", discovery.current_detail)
|
|
167
|
+
service_info: DockerServiceDetails | None = cast("DockerServiceDetails|None", discovery.installation_detail)
|
|
155
168
|
|
|
156
|
-
image_ref: str | None =
|
|
157
|
-
platform: str | None =
|
|
158
|
-
if discovery.
|
|
169
|
+
image_ref: str | None = installed_info.ref if installed_info else None
|
|
170
|
+
platform: str | None = installed_info.platform if installed_info else None
|
|
171
|
+
if discovery.can_pull and image_ref:
|
|
159
172
|
logger.info("Pulling", image_ref=image_ref, platform=platform)
|
|
160
173
|
image: Image = self.client.images.pull(image_ref, platform=platform, all_tags=False)
|
|
161
174
|
if image:
|
|
162
175
|
logger.info("Pulled", image_id=image.id, image_ref=image_ref, platform=platform)
|
|
163
176
|
else:
|
|
164
177
|
logger.warn("Unable to pull", image_ref=image_ref, platform=platform)
|
|
165
|
-
elif discovery.can_build:
|
|
166
|
-
compose_path: str | None =
|
|
167
|
-
git_repo_path: str | None =
|
|
178
|
+
elif discovery.can_build and service_info:
|
|
179
|
+
compose_path: str | None = service_info.compose_path
|
|
180
|
+
git_repo_path: str | None = service_info.git_repo_path
|
|
168
181
|
logger.debug("can_build check", git_repo=git_repo_path)
|
|
169
182
|
if not compose_path or not git_repo_path:
|
|
170
183
|
logger.warn("No compose path or git repo path configured, skipped build")
|
|
@@ -172,10 +185,7 @@ class DockerProvider(ReleaseProvider):
|
|
|
172
185
|
|
|
173
186
|
full_repo_path: Path = self.full_repo_path(compose_path, git_repo_path)
|
|
174
187
|
if git_pull(full_repo_path, Path(self.node_cfg.git_path)):
|
|
175
|
-
|
|
176
|
-
self.build(discovery, compose_path)
|
|
177
|
-
else:
|
|
178
|
-
logger.warn("No compose path configured, skipped build")
|
|
188
|
+
self.build(discovery)
|
|
179
189
|
else:
|
|
180
190
|
logger.debug("Skipping git_pull, no update")
|
|
181
191
|
|
|
@@ -186,14 +196,19 @@ class DockerProvider(ReleaseProvider):
|
|
|
186
196
|
return Path(compose_path) / git_repo_path
|
|
187
197
|
return Path(git_repo_path)
|
|
188
198
|
|
|
189
|
-
def build(self, discovery: Discovery
|
|
199
|
+
def build(self, discovery: Discovery) -> bool:
|
|
190
200
|
logger = self.log.bind(container=discovery.name, action="build")
|
|
191
|
-
|
|
201
|
+
service_info: DockerServiceDetails | None = cast("DockerServiceDetails|None", discovery.installation_detail)
|
|
202
|
+
|
|
203
|
+
if not service_info or not service_info.compose_path:
|
|
204
|
+
logger.warn("No service_info available on compose")
|
|
205
|
+
return False
|
|
206
|
+
logger.info("Building", compose_path=service_info.compose_path, service=service_info.compose_service)
|
|
192
207
|
return self.execute_compose(
|
|
193
208
|
command=DockerComposeCommand.BUILD,
|
|
194
209
|
args="",
|
|
195
|
-
service=
|
|
196
|
-
cwd=compose_path,
|
|
210
|
+
service=service_info.compose_service,
|
|
211
|
+
cwd=service_info.compose_path,
|
|
197
212
|
logger=logger,
|
|
198
213
|
)
|
|
199
214
|
|
|
@@ -227,16 +242,28 @@ class DockerProvider(ReleaseProvider):
|
|
|
227
242
|
|
|
228
243
|
def restart(self, discovery: Discovery) -> bool:
|
|
229
244
|
logger = self.log.bind(container=discovery.name, action="restart")
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
245
|
+
installed_info: DockerImageInfo | None = cast("DockerImageInfo|None", discovery.current_detail)
|
|
246
|
+
service_info: DockerServiceDetails | None = cast("DockerServiceDetails|None", discovery.installation_detail)
|
|
247
|
+
|
|
248
|
+
if (
|
|
249
|
+
self.self_bounce is not None
|
|
250
|
+
and installed_info
|
|
251
|
+
and service_info
|
|
252
|
+
and (
|
|
253
|
+
"ghcr.io/rhizomatics/updates2mqtt" in installed_info.ref
|
|
254
|
+
or (service_info.git_repo_path and service_info.git_repo_path.endswith("updates2mqtt"))
|
|
255
|
+
)
|
|
233
256
|
):
|
|
234
257
|
logger.warning("Attempting to self-bounce")
|
|
235
258
|
self.self_bounce.set()
|
|
236
|
-
|
|
237
|
-
|
|
259
|
+
if service_info is None:
|
|
260
|
+
return False
|
|
238
261
|
return self.execute_compose(
|
|
239
|
-
command=DockerComposeCommand.UP,
|
|
262
|
+
command=DockerComposeCommand.UP,
|
|
263
|
+
args="--detach --yes",
|
|
264
|
+
service=service_info.compose_service,
|
|
265
|
+
cwd=service_info.compose_path,
|
|
266
|
+
logger=logger,
|
|
240
267
|
)
|
|
241
268
|
|
|
242
269
|
def rescan(self, discovery: Discovery) -> Discovery | None:
|
|
@@ -245,7 +272,7 @@ class DockerProvider(ReleaseProvider):
|
|
|
245
272
|
c: Container = self.client.containers.get(discovery.name)
|
|
246
273
|
if c:
|
|
247
274
|
rediscovery = self.analyze(c, discovery.session, previous_discovery=discovery)
|
|
248
|
-
if rediscovery:
|
|
275
|
+
if rediscovery and not rediscovery.throttled:
|
|
249
276
|
self.discoveries[rediscovery.name] = rediscovery
|
|
250
277
|
return rediscovery
|
|
251
278
|
logger.warn("Unable to find container for rescan")
|
|
@@ -255,22 +282,9 @@ class DockerProvider(ReleaseProvider):
|
|
|
255
282
|
logger.exception("Docker API error retrieving container")
|
|
256
283
|
return None
|
|
257
284
|
|
|
258
|
-
def check_throttle(self, repo_id: str) -> bool:
|
|
259
|
-
if self.pause_api_until.get(repo_id) is not None:
|
|
260
|
-
if self.pause_api_until[repo_id] < time.time():
|
|
261
|
-
del self.pause_api_until[repo_id]
|
|
262
|
-
self.log.info("%s throttling wait complete", repo_id)
|
|
263
|
-
else:
|
|
264
|
-
self.log.debug("%s throttling has %s secs left", repo_id, self.pause_api_until[repo_id] - time.time())
|
|
265
|
-
return True
|
|
266
|
-
return False
|
|
267
|
-
|
|
268
285
|
def analyze(self, c: Container, session: str, previous_discovery: Discovery | None = None) -> Discovery | None:
|
|
269
286
|
logger = self.log.bind(container=c.name, action="analyze")
|
|
270
287
|
|
|
271
|
-
image_ref: str | None = None
|
|
272
|
-
image_name: str | None = None
|
|
273
|
-
local_versions = None
|
|
274
288
|
if c.attrs is None or not c.attrs:
|
|
275
289
|
logger.warn("No container attributes found, discovery rejected")
|
|
276
290
|
return None
|
|
@@ -282,252 +296,150 @@ class DockerProvider(ReleaseProvider):
|
|
|
282
296
|
if customization.ignore:
|
|
283
297
|
logger.info("Container ignored due to UPD2MQTT_IGNORE setting")
|
|
284
298
|
return None
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
if
|
|
289
|
-
image_ref = image.tags[0]
|
|
290
|
-
else:
|
|
291
|
-
image_ref = c.attrs.get("Config", {}).get("Image")
|
|
292
|
-
if image_ref is None:
|
|
293
|
-
logger.warn("No image or image attributes found")
|
|
294
|
-
else:
|
|
295
|
-
repo_id, _ = resolve_repository_name(image_ref)
|
|
296
|
-
try:
|
|
297
|
-
image_name = image_ref.split(":")[0]
|
|
298
|
-
except Exception as e:
|
|
299
|
-
logger.warn("No tags found (%s) : %s", image, e)
|
|
300
|
-
if image is not None and image.attrs is not None:
|
|
301
|
-
try:
|
|
302
|
-
local_versions = [i.split("@")[1][7:19] for i in image.attrs["RepoDigests"]]
|
|
303
|
-
except Exception as e:
|
|
304
|
-
logger.warn("Cannot determine local version: %s", e)
|
|
305
|
-
logger.warn("RepoDigests=%s", image.attrs.get("RepoDigests"))
|
|
306
|
-
|
|
307
|
-
selection = Selection(self.cfg.image_ref_select, image_ref)
|
|
308
|
-
publish_policy: PublishPolicy = PublishPolicy.MQTT if not selection.result else PublishPolicy.HOMEASSISTANT
|
|
309
|
-
version_policy: VersionPolicy = VersionPolicy.AUTO if not customization.version_policy else customization.version_policy
|
|
310
|
-
|
|
311
|
-
if customization.update == "AUTO":
|
|
299
|
+
version_policy: VersionPolicy = (
|
|
300
|
+
self.cfg.version_policy if not customization.version_policy else customization.version_policy
|
|
301
|
+
)
|
|
302
|
+
if customization.update == UpdatePolicy.AUTO:
|
|
312
303
|
logger.debug("Auto update policy detected")
|
|
313
|
-
|
|
314
|
-
else:
|
|
315
|
-
update_policy = UpdatePolicy.PASSIVE
|
|
304
|
+
update_policy: UpdatePolicy = customization.update or UpdatePolicy.PASSIVE
|
|
316
305
|
|
|
317
|
-
|
|
318
|
-
|
|
306
|
+
local_info: DockerImageInfo
|
|
307
|
+
service_info: DockerServiceDetails
|
|
308
|
+
local_info, service_info = self.local_info_builder.build_image_info(c)
|
|
309
|
+
pkg_info: PackageUpdateInfo = self.default_metadata(local_info)
|
|
319
310
|
|
|
320
311
|
try:
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
if
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
while reg_data is None and retries_left > 0 and not self.stopped.is_set():
|
|
346
|
-
try:
|
|
347
|
-
logger.debug("Fetching registry data", image_ref=image_ref)
|
|
348
|
-
reg_data = self.client.images.get_registry_data(image_ref)
|
|
349
|
-
logger.debug(
|
|
350
|
-
"Registry Data: id:%s,image:%s, attrs:%s",
|
|
351
|
-
reg_data.id,
|
|
352
|
-
reg_data.image_name,
|
|
353
|
-
reg_data.attrs,
|
|
354
|
-
)
|
|
355
|
-
latest_digest = reg_data.short_id[7:] if reg_data else None
|
|
356
|
-
|
|
357
|
-
except docker.errors.APIError as e:
|
|
358
|
-
if e.status_code == HTTPStatus.TOO_MANY_REQUESTS:
|
|
359
|
-
retry_secs: int
|
|
360
|
-
try:
|
|
361
|
-
retry_secs = int(e.response.headers.get("Retry-After", self.api_throttle_pause)) # type: ignore[union-attr]
|
|
362
|
-
except: # noqa: E722
|
|
363
|
-
retry_secs = self.api_throttle_pause
|
|
364
|
-
logger.warn("Docker Registry throttling requests for %s seconds, %s", retry_secs, e.explanation)
|
|
365
|
-
self.pause_api_until[repo_id] = time.time() + retries_left
|
|
366
|
-
return None
|
|
367
|
-
retries_left -= 1
|
|
368
|
-
if retries_left == 0 or e.is_client_error():
|
|
369
|
-
logger.warn("Failed to fetch registry data: [%s] %s", e.errno, e.explanation)
|
|
370
|
-
else:
|
|
371
|
-
logger.debug("Failed to fetch registry data, retrying: %s", e)
|
|
372
|
-
|
|
373
|
-
installed_digest: str | None = NO_KNOWN_IMAGE
|
|
374
|
-
installed_version: str | None = None
|
|
375
|
-
if local_versions:
|
|
376
|
-
# might be multiple RepoDigests if image has been pulled multiple times with diff manifests
|
|
377
|
-
installed_digest = latest_digest if latest_digest in local_versions else local_versions[0]
|
|
378
|
-
logger.debug(f"Setting local digest to {installed_digest}, local_versions:{local_versions}")
|
|
379
|
-
|
|
380
|
-
def save_if_set(key: str, val: str | None) -> None:
|
|
381
|
-
if val is not None:
|
|
382
|
-
custom[key] = val
|
|
383
|
-
|
|
384
|
-
image_ref = image_ref or ""
|
|
385
|
-
|
|
386
|
-
custom: dict[str, str | bool | int | list[str] | dict[str, Any] | None] = {}
|
|
387
|
-
custom["platform"] = platform
|
|
388
|
-
custom["image_ref"] = image_ref
|
|
389
|
-
custom["installed_digest"] = installed_digest
|
|
390
|
-
custom["latest_digest"] = latest_digest
|
|
391
|
-
custom["repo_id"] = repo_id
|
|
392
|
-
custom["git_repo_path"] = customization.git_repo_path
|
|
393
|
-
|
|
394
|
-
if c.labels:
|
|
395
|
-
save_if_set("compose_path", c.labels.get("com.docker.compose.project.working_dir"))
|
|
396
|
-
save_if_set("compose_version", c.labels.get("com.docker.compose.version"))
|
|
397
|
-
save_if_set("compose_service", c.labels.get("com.docker.compose.service"))
|
|
398
|
-
save_if_set("documentation_url", c.labels.get("org.opencontainers.image.documentation"))
|
|
399
|
-
save_if_set("description", c.labels.get("org.opencontainers.image.description"))
|
|
400
|
-
save_if_set("current_image_created", c.labels.get("org.opencontainers.image.created"))
|
|
401
|
-
save_if_set("current_image_version", c.labels.get("org.opencontainers.image.version"))
|
|
402
|
-
save_if_set("vendor", c.labels.get("org.opencontainers.image.vendor"))
|
|
403
|
-
installed_version = c.labels.get("org.opencontainers.image.version")
|
|
312
|
+
service_info.git_repo_path = customization.git_repo_path
|
|
313
|
+
|
|
314
|
+
registry_selection = Selection(self.cfg.registry_select, local_info.index_name)
|
|
315
|
+
latest_info: DockerImageInfo
|
|
316
|
+
if local_info.pinned:
|
|
317
|
+
logger.debug("Skipping registry fetch for local pinned image, %s", local_info.ref)
|
|
318
|
+
latest_info = local_info.reuse()
|
|
319
|
+
elif registry_selection and local_info.ref and not local_info.local_build:
|
|
320
|
+
if self.cfg.registry.api == RegistryAPI.DOCKER_CLIENT:
|
|
321
|
+
latest_info = self.docker_client_image_lookup.lookup(local_info)
|
|
322
|
+
elif self.cfg.registry.api == RegistryAPI.OCI_V2:
|
|
323
|
+
latest_info = self.registry_image_lookup.lookup(local_info, token=customization.registry_token)
|
|
324
|
+
elif self.cfg.registry.api == RegistryAPI.OCI_V2_MINIMAL:
|
|
325
|
+
latest_info = self.registry_image_lookup.lookup(
|
|
326
|
+
local_info, token=customization.registry_token, minimal=True
|
|
327
|
+
)
|
|
328
|
+
else: # assuming RegistryAPI.DISABLED
|
|
329
|
+
logger.debug(f"Skipping registry check, disabled in config {self.cfg.registry.api}")
|
|
330
|
+
latest_info = local_info.reuse()
|
|
331
|
+
elif local_info.local_build:
|
|
332
|
+
# assume its a locally built image if no RepoDigests available
|
|
333
|
+
latest_info = local_info.reuse()
|
|
334
|
+
latest_info.short_digest = None
|
|
335
|
+
latest_info.image_digest = None
|
|
404
336
|
else:
|
|
405
|
-
logger.debug("
|
|
406
|
-
|
|
337
|
+
logger.debug("Registry selection rules suppressed metadata lookup")
|
|
338
|
+
latest_info = local_info.reuse()
|
|
407
339
|
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
os, arch = platform.split("/")[:2] if "/" in platform else (platform, "Unknown")
|
|
418
|
-
try:
|
|
419
|
-
annotations: dict[str, str] = self.label_enricher.fetch_annotations(
|
|
420
|
-
image_ref, os, arch, token=customization.registry_token
|
|
421
|
-
)
|
|
422
|
-
except AuthError as e:
|
|
423
|
-
logger.warning("Authentication error prevented Docker Registry entichment: %s", e)
|
|
424
|
-
annotations = {}
|
|
425
|
-
|
|
426
|
-
if annotations:
|
|
427
|
-
save_if_set("latest_image_created", annotations.get("org.opencontainers.image.created"))
|
|
428
|
-
save_if_set("source", annotations.get("org.opencontainers.image.source"))
|
|
429
|
-
save_if_set("documentation_url", annotations.get("org.opencontainers.image.documentation"))
|
|
430
|
-
save_if_set("description", annotations.get("org.opencontainers.image.description"))
|
|
431
|
-
save_if_set("latest_image_version", annotations.get("org.opencontainers.image.version"))
|
|
432
|
-
save_if_set("vendor", annotations.get("org.opencontainers.image.vendor"))
|
|
433
|
-
latest_version = annotations.get("org.opencontainers.image.version")
|
|
434
|
-
custom.update(self.release_enricher.enrich(annotations) or {})
|
|
435
|
-
|
|
436
|
-
if custom.get("git_repo_path") and custom.get("compose_path"):
|
|
437
|
-
full_repo_path: Path = Path(cast("str", custom.get("compose_path"))).joinpath(
|
|
438
|
-
cast("str", custom.get("git_repo_path"))
|
|
439
|
-
)
|
|
340
|
+
release_info: ReleaseDetail | None = self.release_enricher.enrich(
|
|
341
|
+
latest_info,
|
|
342
|
+
source_repo_url=pkg_info.source_repo_url,
|
|
343
|
+
notes_url=customization.relnotes or pkg_info.release_notes_url,
|
|
344
|
+
)
|
|
345
|
+
logger.debug("Enriched release info: %s", release_info)
|
|
346
|
+
|
|
347
|
+
if service_info.git_repo_path and service_info.compose_path:
|
|
348
|
+
full_repo_path: Path = Path(service_info.compose_path).joinpath(service_info.git_repo_path)
|
|
440
349
|
|
|
441
350
|
git_trust(full_repo_path, Path(self.node_cfg.git_path))
|
|
442
|
-
|
|
443
|
-
|
|
351
|
+
service_info.git_local_timestamp = git_iso_timestamp(full_repo_path, Path(self.node_cfg.git_path))
|
|
352
|
+
|
|
444
353
|
can_pull: bool = (
|
|
445
354
|
self.cfg.allow_pull
|
|
446
|
-
and
|
|
447
|
-
and
|
|
448
|
-
and
|
|
355
|
+
and not local_info.local_build
|
|
356
|
+
and local_info.ref is not None
|
|
357
|
+
and local_info.ref != ""
|
|
358
|
+
and (local_info.short_digest is not None or latest_info.short_digest is not None)
|
|
449
359
|
)
|
|
450
360
|
if self.cfg.allow_pull and not can_pull:
|
|
451
361
|
logger.debug(
|
|
452
|
-
f"Pull unavailable,
|
|
362
|
+
f"Pull unavailable, ref:{local_info.ref},local:{local_info.short_digest},latest:{latest_info.short_digest}"
|
|
453
363
|
)
|
|
454
364
|
|
|
455
365
|
can_build: bool = False
|
|
456
366
|
if self.cfg.allow_build:
|
|
457
|
-
can_build =
|
|
367
|
+
can_build = service_info.git_repo_path is not None and service_info.compose_path is not None
|
|
458
368
|
if not can_build:
|
|
459
|
-
if
|
|
369
|
+
if service_info.git_repo_path is not None:
|
|
460
370
|
logger.debug(
|
|
461
|
-
"Local build ignored for git_repo_path=%s because no compose_path",
|
|
371
|
+
"Local build ignored for git_repo_path=%s because no compose_path", service_info.git_repo_path
|
|
462
372
|
)
|
|
463
373
|
else:
|
|
464
374
|
full_repo_path = self.full_repo_path(
|
|
465
|
-
cast("str",
|
|
375
|
+
cast("str", service_info.compose_path), cast("str", service_info.git_repo_path)
|
|
466
376
|
)
|
|
467
|
-
if
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
377
|
+
if local_info.local_build and full_repo_path:
|
|
378
|
+
git_versionish = git_local_digest(full_repo_path, Path(self.node_cfg.git_path))
|
|
379
|
+
if git_versionish:
|
|
380
|
+
local_info.git_digest = git_versionish
|
|
381
|
+
logger.debug("Git digest for local code %s", git_versionish)
|
|
382
|
+
|
|
383
|
+
behind_count: int = git_check_update_available(full_repo_path, Path(self.node_cfg.git_path))
|
|
384
|
+
if behind_count > 0:
|
|
385
|
+
latest_info.git_digest = f"{git_versionish}+{behind_count}"
|
|
386
|
+
logger.info("Git update available, generating version %s", latest_info.git_digest)
|
|
387
|
+
else:
|
|
388
|
+
logger.debug(f"Git update not available, local repo:{full_repo_path}")
|
|
389
|
+
latest_info.git_digest = git_versionish
|
|
390
|
+
|
|
391
|
+
can_restart: bool = self.cfg.allow_restart and service_info.compose_path is not None
|
|
481
392
|
|
|
482
|
-
if can_pull or can_build or can_restart:
|
|
483
|
-
# public install-neutral capabilities and Home Assistant features
|
|
484
|
-
can_update = True
|
|
485
|
-
features.append("INSTALL")
|
|
486
|
-
features.append("PROGRESS")
|
|
487
|
-
elif any((self.cfg.allow_build, self.cfg.allow_restart, self.cfg.allow_pull)):
|
|
488
|
-
logger.info(f"Update not available, can_pull:{can_pull}, can_build:{can_build},can_restart{can_restart}")
|
|
489
|
-
if relnotes_url:
|
|
490
|
-
features.append("RELEASE_NOTES")
|
|
491
393
|
if can_pull:
|
|
492
394
|
update_type = "Docker Image"
|
|
493
395
|
elif can_build:
|
|
494
396
|
update_type = "Docker Build"
|
|
495
397
|
else:
|
|
496
398
|
update_type = "Unavailable"
|
|
497
|
-
|
|
399
|
+
|
|
498
400
|
# can_pull,can_build etc are only info flags
|
|
499
401
|
# the HASS update process is driven by comparing current and available versions
|
|
500
402
|
|
|
501
|
-
public_installed_version
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
public_latest_version =
|
|
505
|
-
version_policy,
|
|
403
|
+
public_installed_version: str
|
|
404
|
+
public_latest_version: str
|
|
405
|
+
version_basis: str
|
|
406
|
+
public_installed_version, public_latest_version, version_basis = select_versions(
|
|
407
|
+
version_policy, local_info, latest_info
|
|
506
408
|
)
|
|
507
409
|
|
|
410
|
+
publish_policy: PublishPolicy = PublishPolicy.HOMEASSISTANT
|
|
411
|
+
img_ref_selection = Selection(self.cfg.image_ref_select, local_info.ref)
|
|
412
|
+
version_selection = Selection(self.cfg.version_select, latest_info.version)
|
|
413
|
+
if not img_ref_selection or not version_selection:
|
|
414
|
+
self.log.info(
|
|
415
|
+
"Excluding from HA Discovery for include/exclude rule: %s, %s", local_info.ref, latest_info.version
|
|
416
|
+
)
|
|
417
|
+
publish_policy = PublishPolicy.MQTT
|
|
418
|
+
|
|
508
419
|
discovery: Discovery = Discovery(
|
|
509
420
|
self,
|
|
510
421
|
c.name,
|
|
511
422
|
session,
|
|
512
423
|
node=self.node_cfg.name,
|
|
513
|
-
entity_picture_url=
|
|
514
|
-
release_url=relnotes_url,
|
|
515
|
-
release_summary=release_summary,
|
|
424
|
+
entity_picture_url=customization.picture or pkg_info.logo_url,
|
|
516
425
|
current_version=public_installed_version,
|
|
517
426
|
publish_policy=publish_policy,
|
|
518
427
|
update_policy=update_policy,
|
|
519
428
|
version_policy=version_policy,
|
|
429
|
+
version_basis=version_basis,
|
|
520
430
|
latest_version=public_latest_version,
|
|
521
431
|
device_icon=self.cfg.device_icon,
|
|
522
|
-
|
|
432
|
+
can_pull=can_pull,
|
|
523
433
|
update_type=update_type,
|
|
524
434
|
can_build=can_build,
|
|
525
435
|
can_restart=can_restart,
|
|
526
436
|
status=(c.status == "running" and "on") or "off",
|
|
527
|
-
|
|
528
|
-
features=features,
|
|
529
|
-
throttled=registry_throttled,
|
|
437
|
+
throttled=latest_info.throttled,
|
|
530
438
|
previous=previous_discovery,
|
|
439
|
+
release_detail=release_info,
|
|
440
|
+
installation_detail=service_info,
|
|
441
|
+
current_detail=local_info,
|
|
442
|
+
latest_detail=latest_info,
|
|
531
443
|
)
|
|
532
444
|
logger.debug("Analyze generated discovery: %s", discovery)
|
|
533
445
|
return discovery
|
|
@@ -540,13 +452,17 @@ class DockerProvider(ReleaseProvider):
|
|
|
540
452
|
# metadata_version: str = c.labels.get("org.opencontainers.image.version")
|
|
541
453
|
# metadata_revision: str = c.labels.get("org.opencontainers.image.revision")
|
|
542
454
|
|
|
543
|
-
async def scan(self, session: str) -> AsyncGenerator[Discovery]:
|
|
455
|
+
async def scan(self, session: str, shuffle: bool = True) -> AsyncGenerator[Discovery]:
|
|
544
456
|
logger = self.log.bind(session=session, action="scan", source=self.source_type)
|
|
545
457
|
containers: int = 0
|
|
546
458
|
results: int = 0
|
|
547
459
|
throttled: int = 0
|
|
548
|
-
|
|
549
|
-
|
|
460
|
+
|
|
461
|
+
targets: list[Container] = self.client.containers.list()
|
|
462
|
+
if shuffle:
|
|
463
|
+
random.shuffle(targets)
|
|
464
|
+
logger.debug("Starting scanning %s containers", len(targets))
|
|
465
|
+
for c in targets:
|
|
550
466
|
logger.debug("Analyzing container", container=c.name)
|
|
551
467
|
if self.stopped.is_set():
|
|
552
468
|
logger.info(f"Shutdown detected, aborting scan at {c}")
|
|
@@ -554,7 +470,7 @@ class DockerProvider(ReleaseProvider):
|
|
|
554
470
|
containers = containers + 1
|
|
555
471
|
result: Discovery | None = self.analyze(c, session)
|
|
556
472
|
if result:
|
|
557
|
-
logger.debug("Analyzed container", result_name=result.name,
|
|
473
|
+
logger.debug("Analyzed container", result_name=result.name, throttled=result.throttled)
|
|
558
474
|
self.discoveries[result.name] = result
|
|
559
475
|
results = results + 1
|
|
560
476
|
throttled += 1 if result.throttled else 0
|
|
@@ -580,10 +496,10 @@ class DockerProvider(ReleaseProvider):
|
|
|
580
496
|
logger.info("Starting update ...")
|
|
581
497
|
on_update_start(discovery)
|
|
582
498
|
if self.update(discovery):
|
|
583
|
-
logger.
|
|
499
|
+
logger.debug("Rescanning ...")
|
|
584
500
|
rediscovery = self.rescan(discovery)
|
|
585
|
-
updated = rediscovery is not None
|
|
586
|
-
logger.info("Rescanned
|
|
501
|
+
updated = rediscovery is not None and not rediscovery.throttled
|
|
502
|
+
logger.info("Rescanned, updated:%s", updated)
|
|
587
503
|
else:
|
|
588
504
|
logger.info("Rescan with no result")
|
|
589
505
|
on_update_end(rediscovery or discovery)
|
|
@@ -598,9 +514,149 @@ class DockerProvider(ReleaseProvider):
|
|
|
598
514
|
def resolve(self, discovery_name: str) -> Discovery | None:
|
|
599
515
|
return self.discoveries.get(discovery_name)
|
|
600
516
|
|
|
601
|
-
def default_metadata(self,
|
|
517
|
+
def default_metadata(self, image_info: DockerImageInfo) -> PackageUpdateInfo:
|
|
602
518
|
for enricher in self.pkg_enrichers:
|
|
603
|
-
pkg_info = enricher.enrich(
|
|
519
|
+
pkg_info = enricher.enrich(image_info)
|
|
604
520
|
if pkg_info is not None:
|
|
605
521
|
return pkg_info
|
|
606
522
|
raise ValueError("No enricher could provide metadata, not even default enricher")
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
def select_versions(version_policy: VersionPolicy, installed: DockerImageInfo, latest: DockerImageInfo) -> tuple[str, str, str]:
|
|
526
|
+
"""Pick the best version string to display based on the version policy and available data
|
|
527
|
+
|
|
528
|
+
Ensures that both local installed and remote latest versions are derived in same way
|
|
529
|
+
Falls back to digest if version not reliable or not consistent with current/available version
|
|
530
|
+
"""
|
|
531
|
+
phase: int = 0
|
|
532
|
+
shortcircuit: str | None = None
|
|
533
|
+
|
|
534
|
+
def basis(rule: str) -> str:
|
|
535
|
+
return f"{rule}-{phase}" if not shortcircuit else f"{rule}-{phase}-{shortcircuit}"
|
|
536
|
+
|
|
537
|
+
# shortcircuit the logic if there's nothing to compare
|
|
538
|
+
if latest.throttled:
|
|
539
|
+
log.debug("Flattening versions for throttled update %s", installed.ref)
|
|
540
|
+
shortcircuit = "THR"
|
|
541
|
+
latest = installed
|
|
542
|
+
elif not any((latest.short_digest, latest.repo_digest, latest.git_digest, latest.version)):
|
|
543
|
+
log.debug("Flattening versions for empty update %s", installed.ref)
|
|
544
|
+
shortcircuit = "NUP"
|
|
545
|
+
latest = installed
|
|
546
|
+
elif latest.short_digest == installed.short_digest and latest.short_digest is not None:
|
|
547
|
+
log.debug("Flattening versions for identical update %s", installed.ref)
|
|
548
|
+
shortcircuit = "SDM"
|
|
549
|
+
latest = installed
|
|
550
|
+
elif installed.image_digest in latest.repo_digests:
|
|
551
|
+
# TODO: avoid this by better adaptations for different registries and single/multi manifests
|
|
552
|
+
log.debug(
|
|
553
|
+
"Matching new repo_digest against installed image digest for %s image %s", installed.index_name, installed.name
|
|
554
|
+
)
|
|
555
|
+
shortcircuit = "FGA"
|
|
556
|
+
latest = installed
|
|
557
|
+
elif latest.image_digest in installed.repo_digests:
|
|
558
|
+
# TODO: avoid this by better adaptations for different registries and single/multi manifests
|
|
559
|
+
log.debug(
|
|
560
|
+
"Matching new image_digest against installed repo digest for %s image %s", installed.index_name, installed.name
|
|
561
|
+
)
|
|
562
|
+
shortcircuit = "FGB"
|
|
563
|
+
latest = installed
|
|
564
|
+
|
|
565
|
+
if version_policy == VersionPolicy.VERSION and installed.version and latest.version:
|
|
566
|
+
return installed.version, latest.version, basis("version")
|
|
567
|
+
|
|
568
|
+
installed_digest_available: bool = installed.short_digest is not None and installed.short_digest != ""
|
|
569
|
+
latest_digest_available: bool = latest.short_digest is not None and latest.short_digest != ""
|
|
570
|
+
|
|
571
|
+
if version_policy == VersionPolicy.DIGEST and installed_digest_available and latest_digest_available:
|
|
572
|
+
return installed.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
573
|
+
if (
|
|
574
|
+
version_policy == VersionPolicy.VERSION_DIGEST
|
|
575
|
+
and installed.version
|
|
576
|
+
and latest.version
|
|
577
|
+
and installed_digest_available
|
|
578
|
+
and latest_digest_available
|
|
579
|
+
):
|
|
580
|
+
return (
|
|
581
|
+
f"{installed.version}:{installed.short_digest}",
|
|
582
|
+
f"{latest.version}:{latest.short_digest}",
|
|
583
|
+
basis("version-digest"),
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
phase = 1
|
|
587
|
+
if version_policy == VersionPolicy.AUTO and (
|
|
588
|
+
(installed.version == latest.version and installed.short_digest == latest.short_digest)
|
|
589
|
+
or (installed.version != latest.version and installed.short_digest != latest.short_digest)
|
|
590
|
+
):
|
|
591
|
+
# detect semver, or casual semver (e.g. v1.030)
|
|
592
|
+
# only use this if both version and digest are consistently agreeing or disagreeing
|
|
593
|
+
# if the strict conditions work, people see nice version numbers on screen rather than hashes
|
|
594
|
+
if (
|
|
595
|
+
installed.version
|
|
596
|
+
and re.match(SEMVER_RE, installed.version or "")
|
|
597
|
+
and latest.version
|
|
598
|
+
and re.match(SEMVER_RE, latest.version or "")
|
|
599
|
+
):
|
|
600
|
+
# Smells like semver, override if not using version_policy
|
|
601
|
+
return installed.version, latest.version, basis("semver")
|
|
602
|
+
if (
|
|
603
|
+
installed.version
|
|
604
|
+
and re.match(VERSION_RE, installed.version or "")
|
|
605
|
+
and latest.version
|
|
606
|
+
and re.match(VERSION_RE, latest.version or "")
|
|
607
|
+
):
|
|
608
|
+
# Smells like casual semver, override if not using version_policy
|
|
609
|
+
return installed.version, latest.version, basis("causualver")
|
|
610
|
+
|
|
611
|
+
# AUTO or fallback
|
|
612
|
+
phase = 2
|
|
613
|
+
if installed.version and latest.version and installed_digest_available and latest_digest_available:
|
|
614
|
+
return (
|
|
615
|
+
f"{installed.version}:{installed.short_digest}",
|
|
616
|
+
f"{latest.version}:{latest.short_digest}",
|
|
617
|
+
basis("version-digest"),
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
# and ((other_digest is None and other_version is None) or (other_digest is not None and other_version is not None))
|
|
621
|
+
|
|
622
|
+
if installed.version and latest.version:
|
|
623
|
+
return installed.version, latest.version, basis("version")
|
|
624
|
+
|
|
625
|
+
# Check for local builds
|
|
626
|
+
phase = 3
|
|
627
|
+
if installed.git_digest and latest.git_digest:
|
|
628
|
+
return f"git:{installed.git_digest}", f"git:{latest.git_digest}", basis("git")
|
|
629
|
+
|
|
630
|
+
# Fall back to digests, image or repo index
|
|
631
|
+
phase = 4
|
|
632
|
+
if installed_digest_available and latest_digest_available:
|
|
633
|
+
return installed.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
634
|
+
if installed.version and not latest.version and not latest.short_digest and not latest.repo_digest:
|
|
635
|
+
return installed.version, installed.version, basis("version")
|
|
636
|
+
phase = 5
|
|
637
|
+
if not installed_digest_available and latest_digest_available:
|
|
638
|
+
# odd condition if local image has no identity, even out versions so no update alert
|
|
639
|
+
return latest.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
640
|
+
|
|
641
|
+
# Fall back to repo digests
|
|
642
|
+
phase = 6
|
|
643
|
+
|
|
644
|
+
def condense_repo_id(i: DockerImageInfo) -> str:
|
|
645
|
+
v: str | None = i.condense_digest(i.repo_digest) if i.repo_digest else None
|
|
646
|
+
return v or ""
|
|
647
|
+
|
|
648
|
+
if installed.repo_digest and latest.repo_digest:
|
|
649
|
+
# where the image digest isn't available, fall back to a repo digest
|
|
650
|
+
return condense_repo_id(installed), condense_repo_id(latest), basis("repo-digest")
|
|
651
|
+
|
|
652
|
+
phase = 7
|
|
653
|
+
if latest.repo_digest and latest.repo_digest in installed.repo_digests:
|
|
654
|
+
# installed has multiple RepoDigests from multiple pulls and one of them matches latest current repo digest
|
|
655
|
+
return condense_repo_id(latest), condense_repo_id(latest), basis("repo-digest")
|
|
656
|
+
|
|
657
|
+
if installed_digest_available and not latest_digest_available:
|
|
658
|
+
return installed.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
659
|
+
|
|
660
|
+
log.warn("No versions can be determined for %s", installed.ref)
|
|
661
|
+
phase = 999
|
|
662
|
+
return UNKNOWN_VERSION, UNKNOWN_VERSION, basis("failure")
|