updates2mqtt 1.6.0__py3-none-any.whl → 1.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- updates2mqtt/app.py +10 -7
- updates2mqtt/cli.py +150 -0
- updates2mqtt/config.py +61 -20
- updates2mqtt/hass_formatter.py +12 -21
- updates2mqtt/helpers.py +226 -0
- updates2mqtt/integrations/docker.py +356 -301
- updates2mqtt/integrations/docker_enrich.py +876 -0
- updates2mqtt/integrations/git_utils.py +5 -5
- updates2mqtt/model.py +147 -24
- updates2mqtt/mqtt.py +31 -5
- {updates2mqtt-1.6.0.dist-info → updates2mqtt-1.7.2.dist-info}/METADATA +21 -19
- updates2mqtt-1.7.2.dist-info/RECORD +18 -0
- {updates2mqtt-1.6.0.dist-info → updates2mqtt-1.7.2.dist-info}/WHEEL +2 -2
- {updates2mqtt-1.6.0.dist-info → updates2mqtt-1.7.2.dist-info}/entry_points.txt +1 -0
- updates2mqtt-1.6.0.dist-info/RECORD +0 -15
|
@@ -1,10 +1,10 @@
|
|
|
1
|
+
import random
|
|
1
2
|
import re
|
|
2
3
|
import subprocess
|
|
3
4
|
import time
|
|
4
5
|
import typing
|
|
5
6
|
from collections.abc import AsyncGenerator, Callable
|
|
6
7
|
from enum import Enum
|
|
7
|
-
from http import HTTPStatus
|
|
8
8
|
from pathlib import Path
|
|
9
9
|
from threading import Event
|
|
10
10
|
from typing import Any, cast
|
|
@@ -12,22 +12,43 @@ from typing import Any, cast
|
|
|
12
12
|
import docker
|
|
13
13
|
import docker.errors
|
|
14
14
|
import structlog
|
|
15
|
-
from docker.auth import resolve_repository_name
|
|
16
15
|
from docker.models.containers import Container
|
|
17
|
-
from hishel.httpx import SyncCacheClient
|
|
18
16
|
|
|
19
|
-
from updates2mqtt.config import
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
17
|
+
from updates2mqtt.config import (
|
|
18
|
+
SEMVER_RE,
|
|
19
|
+
UNKNOWN_VERSION,
|
|
20
|
+
VERSION_RE,
|
|
21
|
+
DockerConfig,
|
|
22
|
+
NodeConfig,
|
|
23
|
+
PackageUpdateInfo,
|
|
24
|
+
PublishPolicy,
|
|
25
|
+
RegistryAPI,
|
|
26
|
+
UpdatePolicy,
|
|
27
|
+
VersionPolicy,
|
|
28
|
+
)
|
|
29
|
+
from updates2mqtt.helpers import Selection, Throttler
|
|
30
|
+
from updates2mqtt.integrations.docker_enrich import (
|
|
31
|
+
CommonPackageEnricher,
|
|
32
|
+
ContainerDistributionAPIVersionLookup,
|
|
33
|
+
DefaultPackageEnricher,
|
|
34
|
+
DockerClientVersionLookup,
|
|
35
|
+
DockerImageInfo,
|
|
36
|
+
DockerServiceDetails,
|
|
37
|
+
LinuxServerIOPackageEnricher,
|
|
38
|
+
LocalContainerInfo,
|
|
39
|
+
PackageEnricher,
|
|
40
|
+
SourceReleaseEnricher,
|
|
41
|
+
)
|
|
42
|
+
from updates2mqtt.model import Discovery, ReleaseDetail, ReleaseProvider
|
|
43
|
+
|
|
44
|
+
from .git_utils import git_check_update_available, git_iso_timestamp, git_local_digest, git_pull, git_trust
|
|
23
45
|
|
|
24
46
|
if typing.TYPE_CHECKING:
|
|
25
|
-
from docker.models.images import Image
|
|
47
|
+
from docker.models.images import Image
|
|
26
48
|
|
|
27
49
|
# distinguish docker build from docker pull?
|
|
28
50
|
|
|
29
51
|
log = structlog.get_logger()
|
|
30
|
-
NO_KNOWN_IMAGE = "UNKNOWN"
|
|
31
52
|
|
|
32
53
|
|
|
33
54
|
class DockerComposeCommand(Enum):
|
|
@@ -46,13 +67,13 @@ class ContainerCustomization:
|
|
|
46
67
|
env_prefix: str = "UPD2MQTT_"
|
|
47
68
|
|
|
48
69
|
def __init__(self, container: Container) -> None:
|
|
49
|
-
self.update:
|
|
70
|
+
self.update: UpdatePolicy = UpdatePolicy.PASSIVE # was known as UPD2MQTT_UPDATE before policies and labels
|
|
50
71
|
self.git_repo_path: str | None = None
|
|
51
72
|
self.picture: str | None = None
|
|
52
73
|
self.relnotes: str | None = None
|
|
53
74
|
self.ignore: bool = False
|
|
54
|
-
self.
|
|
55
|
-
self.
|
|
75
|
+
self.version_policy: VersionPolicy | None = None
|
|
76
|
+
self.registry_token: str | None = None
|
|
56
77
|
|
|
57
78
|
if not container.attrs or container.attrs.get("Config") is None:
|
|
58
79
|
return
|
|
@@ -93,30 +114,43 @@ class ContainerCustomization:
|
|
|
93
114
|
if v is not None:
|
|
94
115
|
if isinstance(getattr(self, attr), bool):
|
|
95
116
|
setattr(self, attr, v.upper() in ("TRUE", "YES", "1"))
|
|
117
|
+
elif isinstance(getattr(self, attr), VersionPolicy):
|
|
118
|
+
setattr(self, attr, VersionPolicy[v.upper()])
|
|
119
|
+
elif isinstance(getattr(self, attr), UpdatePolicy):
|
|
120
|
+
setattr(self, attr, UpdatePolicy[v.upper()])
|
|
96
121
|
else:
|
|
97
122
|
setattr(self, attr, v)
|
|
98
123
|
|
|
99
|
-
self.update = self.update.upper()
|
|
100
|
-
|
|
101
124
|
|
|
102
125
|
class DockerProvider(ReleaseProvider):
|
|
103
126
|
def __init__(
|
|
104
127
|
self,
|
|
105
128
|
cfg: DockerConfig,
|
|
106
|
-
common_pkg_cfg: dict[str, PackageUpdateInfo],
|
|
107
129
|
node_cfg: NodeConfig,
|
|
108
130
|
self_bounce: Event | None = None,
|
|
109
131
|
) -> None:
|
|
110
|
-
super().__init__("docker")
|
|
132
|
+
super().__init__(node_cfg, "docker")
|
|
111
133
|
self.client: docker.DockerClient = docker.from_env()
|
|
112
134
|
self.cfg: DockerConfig = cfg
|
|
113
|
-
|
|
114
|
-
self.common_pkgs: dict[str, PackageUpdateInfo] = common_pkg_cfg if common_pkg_cfg else {}
|
|
135
|
+
|
|
115
136
|
# TODO: refresh discovered packages periodically
|
|
116
|
-
self.
|
|
117
|
-
self.pause_api_until: dict[str, float] = {}
|
|
118
|
-
self.api_throttle_pause: int = cfg.api_throttle_wait
|
|
137
|
+
self.throttler = Throttler(self.cfg.default_api_backoff, self.log, self.stopped)
|
|
119
138
|
self.self_bounce: Event | None = self_bounce
|
|
139
|
+
self.pkg_enrichers: list[PackageEnricher] = [
|
|
140
|
+
CommonPackageEnricher(self.cfg),
|
|
141
|
+
LinuxServerIOPackageEnricher(self.cfg),
|
|
142
|
+
DefaultPackageEnricher(self.cfg),
|
|
143
|
+
]
|
|
144
|
+
self.docker_client_image_lookup = DockerClientVersionLookup(
|
|
145
|
+
self.client, self.throttler, self.cfg.registry, self.cfg.default_api_backoff
|
|
146
|
+
)
|
|
147
|
+
self.registry_image_lookup = ContainerDistributionAPIVersionLookup(self.throttler, self.cfg.registry)
|
|
148
|
+
self.release_enricher = SourceReleaseEnricher()
|
|
149
|
+
self.local_info_builder = LocalContainerInfo()
|
|
150
|
+
|
|
151
|
+
def initialize(self) -> None:
|
|
152
|
+
for enricher in self.pkg_enrichers:
|
|
153
|
+
enricher.initialize()
|
|
120
154
|
|
|
121
155
|
def update(self, discovery: Discovery) -> bool:
|
|
122
156
|
logger: Any = self.log.bind(container=discovery.name, action="update")
|
|
@@ -129,19 +163,21 @@ class DockerProvider(ReleaseProvider):
|
|
|
129
163
|
|
|
130
164
|
def fetch(self, discovery: Discovery) -> None:
|
|
131
165
|
logger = self.log.bind(container=discovery.name, action="fetch")
|
|
166
|
+
installed_info: DockerImageInfo | None = cast("DockerImageInfo|None", discovery.current_detail)
|
|
167
|
+
service_info: DockerServiceDetails | None = cast("DockerServiceDetails|None", discovery.installation_detail)
|
|
132
168
|
|
|
133
|
-
image_ref: str | None =
|
|
134
|
-
platform: str | None =
|
|
135
|
-
if discovery.
|
|
169
|
+
image_ref: str | None = installed_info.ref if installed_info else None
|
|
170
|
+
platform: str | None = installed_info.platform if installed_info else None
|
|
171
|
+
if discovery.can_pull and image_ref:
|
|
136
172
|
logger.info("Pulling", image_ref=image_ref, platform=platform)
|
|
137
173
|
image: Image = self.client.images.pull(image_ref, platform=platform, all_tags=False)
|
|
138
174
|
if image:
|
|
139
175
|
logger.info("Pulled", image_id=image.id, image_ref=image_ref, platform=platform)
|
|
140
176
|
else:
|
|
141
177
|
logger.warn("Unable to pull", image_ref=image_ref, platform=platform)
|
|
142
|
-
elif discovery.can_build:
|
|
143
|
-
compose_path: str | None =
|
|
144
|
-
git_repo_path: str | None =
|
|
178
|
+
elif discovery.can_build and service_info:
|
|
179
|
+
compose_path: str | None = service_info.compose_path
|
|
180
|
+
git_repo_path: str | None = service_info.git_repo_path
|
|
145
181
|
logger.debug("can_build check", git_repo=git_repo_path)
|
|
146
182
|
if not compose_path or not git_repo_path:
|
|
147
183
|
logger.warn("No compose path or git repo path configured, skipped build")
|
|
@@ -149,10 +185,7 @@ class DockerProvider(ReleaseProvider):
|
|
|
149
185
|
|
|
150
186
|
full_repo_path: Path = self.full_repo_path(compose_path, git_repo_path)
|
|
151
187
|
if git_pull(full_repo_path, Path(self.node_cfg.git_path)):
|
|
152
|
-
|
|
153
|
-
self.build(discovery, compose_path)
|
|
154
|
-
else:
|
|
155
|
-
logger.warn("No compose path configured, skipped build")
|
|
188
|
+
self.build(discovery)
|
|
156
189
|
else:
|
|
157
190
|
logger.debug("Skipping git_pull, no update")
|
|
158
191
|
|
|
@@ -163,14 +196,19 @@ class DockerProvider(ReleaseProvider):
|
|
|
163
196
|
return Path(compose_path) / git_repo_path
|
|
164
197
|
return Path(git_repo_path)
|
|
165
198
|
|
|
166
|
-
def build(self, discovery: Discovery
|
|
199
|
+
def build(self, discovery: Discovery) -> bool:
|
|
167
200
|
logger = self.log.bind(container=discovery.name, action="build")
|
|
168
|
-
|
|
201
|
+
service_info: DockerServiceDetails | None = cast("DockerServiceDetails|None", discovery.installation_detail)
|
|
202
|
+
|
|
203
|
+
if not service_info or not service_info.compose_path:
|
|
204
|
+
logger.warn("No service_info available on compose")
|
|
205
|
+
return False
|
|
206
|
+
logger.info("Building", compose_path=service_info.compose_path, service=service_info.compose_service)
|
|
169
207
|
return self.execute_compose(
|
|
170
208
|
command=DockerComposeCommand.BUILD,
|
|
171
209
|
args="",
|
|
172
|
-
service=
|
|
173
|
-
cwd=compose_path,
|
|
210
|
+
service=service_info.compose_service,
|
|
211
|
+
cwd=service_info.compose_path,
|
|
174
212
|
logger=logger,
|
|
175
213
|
)
|
|
176
214
|
|
|
@@ -204,16 +242,28 @@ class DockerProvider(ReleaseProvider):
|
|
|
204
242
|
|
|
205
243
|
def restart(self, discovery: Discovery) -> bool:
|
|
206
244
|
logger = self.log.bind(container=discovery.name, action="restart")
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
245
|
+
installed_info: DockerImageInfo | None = cast("DockerImageInfo|None", discovery.current_detail)
|
|
246
|
+
service_info: DockerServiceDetails | None = cast("DockerServiceDetails|None", discovery.installation_detail)
|
|
247
|
+
|
|
248
|
+
if (
|
|
249
|
+
self.self_bounce is not None
|
|
250
|
+
and installed_info
|
|
251
|
+
and service_info
|
|
252
|
+
and (
|
|
253
|
+
"ghcr.io/rhizomatics/updates2mqtt" in installed_info.ref
|
|
254
|
+
or (service_info.git_repo_path and service_info.git_repo_path.endswith("updates2mqtt"))
|
|
255
|
+
)
|
|
210
256
|
):
|
|
211
257
|
logger.warning("Attempting to self-bounce")
|
|
212
258
|
self.self_bounce.set()
|
|
213
|
-
|
|
214
|
-
|
|
259
|
+
if service_info is None:
|
|
260
|
+
return False
|
|
215
261
|
return self.execute_compose(
|
|
216
|
-
command=DockerComposeCommand.UP,
|
|
262
|
+
command=DockerComposeCommand.UP,
|
|
263
|
+
args="--detach --yes",
|
|
264
|
+
service=service_info.compose_service,
|
|
265
|
+
cwd=service_info.compose_path,
|
|
266
|
+
logger=logger,
|
|
217
267
|
)
|
|
218
268
|
|
|
219
269
|
def rescan(self, discovery: Discovery) -> Discovery | None:
|
|
@@ -221,8 +271,8 @@ class DockerProvider(ReleaseProvider):
|
|
|
221
271
|
try:
|
|
222
272
|
c: Container = self.client.containers.get(discovery.name)
|
|
223
273
|
if c:
|
|
224
|
-
rediscovery = self.analyze(c, discovery.session,
|
|
225
|
-
if rediscovery:
|
|
274
|
+
rediscovery = self.analyze(c, discovery.session, previous_discovery=discovery)
|
|
275
|
+
if rediscovery and not rediscovery.throttled:
|
|
226
276
|
self.discoveries[rediscovery.name] = rediscovery
|
|
227
277
|
return rediscovery
|
|
228
278
|
logger.warn("Unable to find container for rescan")
|
|
@@ -232,22 +282,9 @@ class DockerProvider(ReleaseProvider):
|
|
|
232
282
|
logger.exception("Docker API error retrieving container")
|
|
233
283
|
return None
|
|
234
284
|
|
|
235
|
-
def
|
|
236
|
-
if self.pause_api_until.get(repo_id) is not None:
|
|
237
|
-
if self.pause_api_until[repo_id] < time.time():
|
|
238
|
-
del self.pause_api_until[repo_id]
|
|
239
|
-
log.info("%s throttling wait complete", repo_id)
|
|
240
|
-
else:
|
|
241
|
-
log.debug("%s throttling has %s secs left", repo_id, self.pause_api_until[repo_id] - time.time())
|
|
242
|
-
return True
|
|
243
|
-
return False
|
|
244
|
-
|
|
245
|
-
def analyze(self, c: Container, session: str, original_discovery: Discovery | None = None) -> Discovery | None:
|
|
285
|
+
def analyze(self, c: Container, session: str, previous_discovery: Discovery | None = None) -> Discovery | None:
|
|
246
286
|
logger = self.log.bind(container=c.name, action="analyze")
|
|
247
287
|
|
|
248
|
-
image_ref: str | None = None
|
|
249
|
-
image_name: str | None = None
|
|
250
|
-
local_versions = None
|
|
251
288
|
if c.attrs is None or not c.attrs:
|
|
252
289
|
logger.warn("No container attributes found, discovery rejected")
|
|
253
290
|
return None
|
|
@@ -259,202 +296,150 @@ class DockerProvider(ReleaseProvider):
|
|
|
259
296
|
if customization.ignore:
|
|
260
297
|
logger.info("Container ignored due to UPD2MQTT_IGNORE setting")
|
|
261
298
|
return None
|
|
299
|
+
version_policy: VersionPolicy = (
|
|
300
|
+
self.cfg.version_policy if not customization.version_policy else customization.version_policy
|
|
301
|
+
)
|
|
302
|
+
if customization.update == UpdatePolicy.AUTO:
|
|
303
|
+
logger.debug("Auto update policy detected")
|
|
304
|
+
update_policy: UpdatePolicy = customization.update or UpdatePolicy.PASSIVE
|
|
262
305
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
else:
|
|
268
|
-
image_ref = c.attrs.get("Config", {}).get("Image")
|
|
269
|
-
if image_ref is None:
|
|
270
|
-
logger.warn("No image or image attributes found")
|
|
271
|
-
else:
|
|
272
|
-
repo_id, _ = resolve_repository_name(image_ref)
|
|
273
|
-
try:
|
|
274
|
-
image_name = image_ref.split(":")[0]
|
|
275
|
-
except Exception as e:
|
|
276
|
-
logger.warn("No tags found (%s) : %s", image, e)
|
|
277
|
-
if image is not None and image.attrs is not None:
|
|
278
|
-
try:
|
|
279
|
-
local_versions = [i.split("@")[1][7:19] for i in image.attrs["RepoDigests"]]
|
|
280
|
-
except Exception as e:
|
|
281
|
-
logger.warn("Cannot determine local version: %s", e)
|
|
282
|
-
logger.warn("RepoDigests=%s", image.attrs.get("RepoDigests"))
|
|
283
|
-
|
|
284
|
-
platform: str = "Unknown"
|
|
285
|
-
pkg_info: PackageUpdateInfo = self.default_metadata(image_name, image_ref=image_ref)
|
|
306
|
+
local_info: DockerImageInfo
|
|
307
|
+
service_info: DockerServiceDetails
|
|
308
|
+
local_info, service_info = self.local_info_builder.build_image_info(c)
|
|
309
|
+
pkg_info: PackageUpdateInfo = self.default_metadata(local_info)
|
|
286
310
|
|
|
287
311
|
try:
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
)
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
log.debug(
|
|
313
|
-
"Registry Data: id:%s,image:%s, attrs:%s",
|
|
314
|
-
reg_data.id,
|
|
315
|
-
reg_data.image_name,
|
|
316
|
-
reg_data.attrs,
|
|
317
|
-
)
|
|
318
|
-
latest_version = reg_data.short_id[7:] if reg_data else None
|
|
319
|
-
except docker.errors.APIError as e:
|
|
320
|
-
if e.status_code == HTTPStatus.TOO_MANY_REQUESTS:
|
|
321
|
-
logger.warn("Docker Registry throttling requests, %s", e.explanation)
|
|
322
|
-
self.pause_api_until[repo_id] = time.time() + self.api_throttle_pause
|
|
323
|
-
return None
|
|
324
|
-
retries_left -= 1
|
|
325
|
-
if retries_left == 0 or e.is_client_error():
|
|
326
|
-
logger.warn("Failed to fetch registry data: [%s] %s", e.errno, e.explanation)
|
|
327
|
-
else:
|
|
328
|
-
logger.debug("Failed to fetch registry data, retrying: %s", e)
|
|
329
|
-
|
|
330
|
-
local_version: str | None = NO_KNOWN_IMAGE
|
|
331
|
-
if local_versions:
|
|
332
|
-
# might be multiple RepoDigests if image has been pulled multiple times with diff manifests
|
|
333
|
-
local_version = latest_version if latest_version in local_versions else local_versions[0]
|
|
334
|
-
log.debug(f"Setting local version to {local_version}, local_versions:{local_versions}")
|
|
335
|
-
|
|
336
|
-
def save_if_set(key: str, val: str | None) -> None:
|
|
337
|
-
if val is not None:
|
|
338
|
-
custom[key] = val
|
|
339
|
-
|
|
340
|
-
image_ref = image_ref or ""
|
|
341
|
-
|
|
342
|
-
custom: dict[str, str | bool] = {}
|
|
343
|
-
custom["platform"] = platform
|
|
344
|
-
custom["image_ref"] = image_ref
|
|
345
|
-
custom["repo_id"] = repo_id
|
|
346
|
-
if registry_throttled:
|
|
347
|
-
custom["registry_throttled"] = True
|
|
348
|
-
save_if_set("compose_path", c.labels.get("com.docker.compose.project.working_dir"))
|
|
349
|
-
save_if_set("compose_version", c.labels.get("com.docker.compose.version"))
|
|
350
|
-
save_if_set("compose_service", c.labels.get("com.docker.compose.service"))
|
|
351
|
-
save_if_set("git_repo_path", customization.git_repo_path)
|
|
352
|
-
# save_if_set("apt_pkgs", c_env.get("UPD2MQTT_APT_PKGS"))
|
|
353
|
-
|
|
354
|
-
if customization.update == "AUTO":
|
|
355
|
-
logger.debug("Auto update policy detected")
|
|
356
|
-
update_policy = "Auto"
|
|
312
|
+
service_info.git_repo_path = customization.git_repo_path
|
|
313
|
+
|
|
314
|
+
registry_selection = Selection(self.cfg.registry_select, local_info.index_name)
|
|
315
|
+
latest_info: DockerImageInfo
|
|
316
|
+
if local_info.pinned:
|
|
317
|
+
logger.debug("Skipping registry fetch for local pinned image, %s", local_info.ref)
|
|
318
|
+
latest_info = local_info.reuse()
|
|
319
|
+
elif registry_selection and local_info.ref and not local_info.local_build:
|
|
320
|
+
if self.cfg.registry.api == RegistryAPI.DOCKER_CLIENT:
|
|
321
|
+
latest_info = self.docker_client_image_lookup.lookup(local_info)
|
|
322
|
+
elif self.cfg.registry.api == RegistryAPI.OCI_V2:
|
|
323
|
+
latest_info = self.registry_image_lookup.lookup(local_info, token=customization.registry_token)
|
|
324
|
+
elif self.cfg.registry.api == RegistryAPI.OCI_V2_MINIMAL:
|
|
325
|
+
latest_info = self.registry_image_lookup.lookup(
|
|
326
|
+
local_info, token=customization.registry_token, minimal=True
|
|
327
|
+
)
|
|
328
|
+
else: # assuming RegistryAPI.DISABLED
|
|
329
|
+
logger.debug(f"Skipping registry check, disabled in config {self.cfg.registry.api}")
|
|
330
|
+
latest_info = local_info.reuse()
|
|
331
|
+
elif local_info.local_build:
|
|
332
|
+
# assume its a locally built image if no RepoDigests available
|
|
333
|
+
latest_info = local_info.reuse()
|
|
334
|
+
latest_info.short_digest = None
|
|
335
|
+
latest_info.image_digest = None
|
|
357
336
|
else:
|
|
358
|
-
|
|
337
|
+
logger.debug("Registry selection rules suppressed metadata lookup")
|
|
338
|
+
latest_info = local_info.reuse()
|
|
359
339
|
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
340
|
+
release_info: ReleaseDetail | None = self.release_enricher.enrich(
|
|
341
|
+
latest_info,
|
|
342
|
+
source_repo_url=pkg_info.source_repo_url,
|
|
343
|
+
notes_url=customization.relnotes or pkg_info.release_notes_url,
|
|
344
|
+
)
|
|
345
|
+
logger.debug("Enriched release info: %s", release_info)
|
|
346
|
+
|
|
347
|
+
if service_info.git_repo_path and service_info.compose_path:
|
|
348
|
+
full_repo_path: Path = Path(service_info.compose_path).joinpath(service_info.git_repo_path)
|
|
364
349
|
|
|
365
350
|
git_trust(full_repo_path, Path(self.node_cfg.git_path))
|
|
366
|
-
|
|
367
|
-
|
|
351
|
+
service_info.git_local_timestamp = git_iso_timestamp(full_repo_path, Path(self.node_cfg.git_path))
|
|
352
|
+
|
|
368
353
|
can_pull: bool = (
|
|
369
354
|
self.cfg.allow_pull
|
|
370
|
-
and
|
|
371
|
-
and
|
|
372
|
-
and
|
|
355
|
+
and not local_info.local_build
|
|
356
|
+
and local_info.ref is not None
|
|
357
|
+
and local_info.ref != ""
|
|
358
|
+
and (local_info.short_digest is not None or latest_info.short_digest is not None)
|
|
373
359
|
)
|
|
374
360
|
if self.cfg.allow_pull and not can_pull:
|
|
375
361
|
logger.debug(
|
|
376
|
-
f"Pull
|
|
362
|
+
f"Pull unavailable, ref:{local_info.ref},local:{local_info.short_digest},latest:{latest_info.short_digest}"
|
|
377
363
|
)
|
|
378
|
-
skip_pull: bool = False
|
|
379
|
-
if can_pull and latest_version is not None:
|
|
380
|
-
if customization.version_include and not re.match(customization.version_include, latest_version):
|
|
381
|
-
logger.info(f"Skipping version {latest_version} not matching include pattern")
|
|
382
|
-
skip_pull = True
|
|
383
|
-
latest_version = local_version
|
|
384
|
-
if customization.version_exclude and re.match(customization.version_exclude, latest_version): # type: ignore[arg-type]
|
|
385
|
-
logger.info(f"Skipping version {latest_version} matching exclude pattern")
|
|
386
|
-
skip_pull = True
|
|
387
|
-
latest_version = local_version
|
|
388
364
|
|
|
389
365
|
can_build: bool = False
|
|
390
366
|
if self.cfg.allow_build:
|
|
391
|
-
can_build =
|
|
367
|
+
can_build = service_info.git_repo_path is not None and service_info.compose_path is not None
|
|
392
368
|
if not can_build:
|
|
393
|
-
if
|
|
394
|
-
|
|
395
|
-
"Local build ignored for git_repo_path=%s because no compose_path",
|
|
369
|
+
if service_info.git_repo_path is not None:
|
|
370
|
+
logger.debug(
|
|
371
|
+
"Local build ignored for git_repo_path=%s because no compose_path", service_info.git_repo_path
|
|
396
372
|
)
|
|
397
373
|
else:
|
|
398
374
|
full_repo_path = self.full_repo_path(
|
|
399
|
-
cast("str",
|
|
375
|
+
cast("str", service_info.compose_path), cast("str", service_info.git_repo_path)
|
|
400
376
|
)
|
|
401
|
-
if
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
can_update = True
|
|
419
|
-
features.append("INSTALL")
|
|
420
|
-
features.append("PROGRESS")
|
|
421
|
-
elif any((self.cfg.allow_build, self.cfg.allow_restart, self.cfg.allow_pull)):
|
|
422
|
-
logger.info(f"Update not available, can_pull:{can_pull}, can_build:{can_build},can_restart{can_restart}")
|
|
423
|
-
if relnotes_url:
|
|
424
|
-
features.append("RELEASE_NOTES")
|
|
425
|
-
if skip_pull:
|
|
426
|
-
update_type: str = "Skipped"
|
|
427
|
-
elif can_pull:
|
|
377
|
+
if local_info.local_build and full_repo_path:
|
|
378
|
+
git_versionish = git_local_digest(full_repo_path, Path(self.node_cfg.git_path))
|
|
379
|
+
if git_versionish:
|
|
380
|
+
local_info.git_digest = git_versionish
|
|
381
|
+
logger.debug("Git digest for local code %s", git_versionish)
|
|
382
|
+
|
|
383
|
+
behind_count: int = git_check_update_available(full_repo_path, Path(self.node_cfg.git_path))
|
|
384
|
+
if behind_count > 0:
|
|
385
|
+
latest_info.git_digest = f"{git_versionish}+{behind_count}"
|
|
386
|
+
logger.info("Git update available, generating version %s", latest_info.git_digest)
|
|
387
|
+
else:
|
|
388
|
+
logger.debug(f"Git update not available, local repo:{full_repo_path}")
|
|
389
|
+
latest_info.git_digest = git_versionish
|
|
390
|
+
|
|
391
|
+
can_restart: bool = self.cfg.allow_restart and service_info.compose_path is not None
|
|
392
|
+
|
|
393
|
+
if can_pull:
|
|
428
394
|
update_type = "Docker Image"
|
|
429
395
|
elif can_build:
|
|
430
396
|
update_type = "Docker Build"
|
|
431
397
|
else:
|
|
432
398
|
update_type = "Unavailable"
|
|
433
|
-
|
|
434
|
-
custom["skip_pull"] = skip_pull
|
|
399
|
+
|
|
435
400
|
# can_pull,can_build etc are only info flags
|
|
436
401
|
# the HASS update process is driven by comparing current and available versions
|
|
437
402
|
|
|
403
|
+
public_installed_version: str
|
|
404
|
+
public_latest_version: str
|
|
405
|
+
version_basis: str
|
|
406
|
+
public_installed_version, public_latest_version, version_basis = select_versions(
|
|
407
|
+
version_policy, local_info, latest_info
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
publish_policy: PublishPolicy = PublishPolicy.HOMEASSISTANT
|
|
411
|
+
img_ref_selection = Selection(self.cfg.image_ref_select, local_info.ref)
|
|
412
|
+
version_selection = Selection(self.cfg.version_select, latest_info.version)
|
|
413
|
+
if not img_ref_selection or not version_selection:
|
|
414
|
+
self.log.info(
|
|
415
|
+
"Excluding from HA Discovery for include/exclude rule: %s, %s", local_info.ref, latest_info.version
|
|
416
|
+
)
|
|
417
|
+
publish_policy = PublishPolicy.MQTT
|
|
418
|
+
|
|
438
419
|
discovery: Discovery = Discovery(
|
|
439
420
|
self,
|
|
440
421
|
c.name,
|
|
441
422
|
session,
|
|
442
423
|
node=self.node_cfg.name,
|
|
443
|
-
entity_picture_url=
|
|
444
|
-
|
|
445
|
-
|
|
424
|
+
entity_picture_url=customization.picture or pkg_info.logo_url,
|
|
425
|
+
current_version=public_installed_version,
|
|
426
|
+
publish_policy=publish_policy,
|
|
446
427
|
update_policy=update_policy,
|
|
447
|
-
|
|
448
|
-
|
|
428
|
+
version_policy=version_policy,
|
|
429
|
+
version_basis=version_basis,
|
|
430
|
+
latest_version=public_latest_version,
|
|
449
431
|
device_icon=self.cfg.device_icon,
|
|
450
|
-
|
|
432
|
+
can_pull=can_pull,
|
|
451
433
|
update_type=update_type,
|
|
452
434
|
can_build=can_build,
|
|
453
435
|
can_restart=can_restart,
|
|
454
436
|
status=(c.status == "running" and "on") or "off",
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
437
|
+
throttled=latest_info.throttled,
|
|
438
|
+
previous=previous_discovery,
|
|
439
|
+
release_detail=release_info,
|
|
440
|
+
installation_detail=service_info,
|
|
441
|
+
current_detail=local_info,
|
|
442
|
+
latest_detail=latest_info,
|
|
458
443
|
)
|
|
459
444
|
logger.debug("Analyze generated discovery: %s", discovery)
|
|
460
445
|
return discovery
|
|
@@ -463,13 +448,21 @@ class DockerProvider(ReleaseProvider):
|
|
|
463
448
|
logger.debug("Analyze returned empty discovery")
|
|
464
449
|
return None
|
|
465
450
|
|
|
466
|
-
|
|
451
|
+
# def version(self, c: Container, version_type: str):
|
|
452
|
+
# metadata_version: str = c.labels.get("org.opencontainers.image.version")
|
|
453
|
+
# metadata_revision: str = c.labels.get("org.opencontainers.image.revision")
|
|
454
|
+
|
|
455
|
+
async def scan(self, session: str, shuffle: bool = True) -> AsyncGenerator[Discovery]:
|
|
467
456
|
logger = self.log.bind(session=session, action="scan", source=self.source_type)
|
|
468
457
|
containers: int = 0
|
|
469
458
|
results: int = 0
|
|
470
459
|
throttled: int = 0
|
|
471
|
-
|
|
472
|
-
|
|
460
|
+
|
|
461
|
+
targets: list[Container] = self.client.containers.list()
|
|
462
|
+
if shuffle:
|
|
463
|
+
random.shuffle(targets)
|
|
464
|
+
logger.debug("Starting scanning %s containers", len(targets))
|
|
465
|
+
for c in targets:
|
|
473
466
|
logger.debug("Analyzing container", container=c.name)
|
|
474
467
|
if self.stopped.is_set():
|
|
475
468
|
logger.info(f"Shutdown detected, aborting scan at {c}")
|
|
@@ -477,7 +470,7 @@ class DockerProvider(ReleaseProvider):
|
|
|
477
470
|
containers = containers + 1
|
|
478
471
|
result: Discovery | None = self.analyze(c, session)
|
|
479
472
|
if result:
|
|
480
|
-
logger.debug("Analyzed container", result_name=result.name,
|
|
473
|
+
logger.debug("Analyzed container", result_name=result.name, throttled=result.throttled)
|
|
481
474
|
self.discoveries[result.name] = result
|
|
482
475
|
results = results + 1
|
|
483
476
|
throttled += 1 if result.throttled else 0
|
|
@@ -503,10 +496,10 @@ class DockerProvider(ReleaseProvider):
|
|
|
503
496
|
logger.info("Starting update ...")
|
|
504
497
|
on_update_start(discovery)
|
|
505
498
|
if self.update(discovery):
|
|
506
|
-
logger.
|
|
499
|
+
logger.debug("Rescanning ...")
|
|
507
500
|
rediscovery = self.rescan(discovery)
|
|
508
|
-
updated = rediscovery is not None
|
|
509
|
-
logger.info("Rescanned
|
|
501
|
+
updated = rediscovery is not None and not rediscovery.throttled
|
|
502
|
+
logger.info("Rescanned, updated:%s", updated)
|
|
510
503
|
else:
|
|
511
504
|
logger.info("Rescan with no result")
|
|
512
505
|
on_update_end(rediscovery or discovery)
|
|
@@ -521,87 +514,149 @@ class DockerProvider(ReleaseProvider):
|
|
|
521
514
|
def resolve(self, discovery_name: str) -> Discovery | None:
|
|
522
515
|
return self.discoveries.get(discovery_name)
|
|
523
516
|
|
|
524
|
-
def
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
517
|
+
def default_metadata(self, image_info: DockerImageInfo) -> PackageUpdateInfo:
|
|
518
|
+
for enricher in self.pkg_enrichers:
|
|
519
|
+
pkg_info = enricher.enrich(image_info)
|
|
520
|
+
if pkg_info is not None:
|
|
521
|
+
return pkg_info
|
|
522
|
+
raise ValueError("No enricher could provide metadata, not even default enricher")
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
def select_versions(version_policy: VersionPolicy, installed: DockerImageInfo, latest: DockerImageInfo) -> tuple[str, str, str]:
|
|
526
|
+
"""Pick the best version string to display based on the version policy and available data
|
|
527
|
+
|
|
528
|
+
Ensures that both local installed and remote latest versions are derived in same way
|
|
529
|
+
Falls back to digest if version not reliable or not consistent with current/available version
|
|
530
|
+
"""
|
|
531
|
+
phase: int = 0
|
|
532
|
+
shortcircuit: str | None = None
|
|
533
|
+
|
|
534
|
+
def basis(rule: str) -> str:
|
|
535
|
+
return f"{rule}-{phase}" if not shortcircuit else f"{rule}-{phase}-{shortcircuit}"
|
|
536
|
+
|
|
537
|
+
# shortcircuit the logic if there's nothing to compare
|
|
538
|
+
if latest.throttled:
|
|
539
|
+
log.debug("Flattening versions for throttled update %s", installed.ref)
|
|
540
|
+
shortcircuit = "THR"
|
|
541
|
+
latest = installed
|
|
542
|
+
elif not any((latest.short_digest, latest.repo_digest, latest.git_digest, latest.version)):
|
|
543
|
+
log.debug("Flattening versions for empty update %s", installed.ref)
|
|
544
|
+
shortcircuit = "NUP"
|
|
545
|
+
latest = installed
|
|
546
|
+
elif latest.short_digest == installed.short_digest and latest.short_digest is not None:
|
|
547
|
+
log.debug("Flattening versions for identical update %s", installed.ref)
|
|
548
|
+
shortcircuit = "SDM"
|
|
549
|
+
latest = installed
|
|
550
|
+
elif installed.image_digest in latest.repo_digests:
|
|
551
|
+
# TODO: avoid this by better adaptations for different registries and single/multi manifests
|
|
552
|
+
log.debug(
|
|
553
|
+
"Matching new repo_digest against installed image digest for %s image %s", installed.index_name, installed.name
|
|
554
|
+
)
|
|
555
|
+
shortcircuit = "FGA"
|
|
556
|
+
latest = installed
|
|
557
|
+
elif latest.image_digest in installed.repo_digests:
|
|
558
|
+
# TODO: avoid this by better adaptations for different registries and single/multi manifests
|
|
559
|
+
log.debug(
|
|
560
|
+
"Matching new image_digest against installed repo digest for %s image %s", installed.index_name, installed.name
|
|
561
|
+
)
|
|
562
|
+
shortcircuit = "FGB"
|
|
563
|
+
latest = installed
|
|
564
|
+
|
|
565
|
+
if version_policy == VersionPolicy.VERSION and installed.version and latest.version:
|
|
566
|
+
return installed.version, latest.version, basis("version")
|
|
567
|
+
|
|
568
|
+
installed_digest_available: bool = installed.short_digest is not None and installed.short_digest != ""
|
|
569
|
+
latest_digest_available: bool = latest.short_digest is not None and latest.short_digest != ""
|
|
570
|
+
|
|
571
|
+
if version_policy == VersionPolicy.DIGEST and installed_digest_available and latest_digest_available:
|
|
572
|
+
return installed.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
573
|
+
if (
|
|
574
|
+
version_policy == VersionPolicy.VERSION_DIGEST
|
|
575
|
+
and installed.version
|
|
576
|
+
and latest.version
|
|
577
|
+
and installed_digest_available
|
|
578
|
+
and latest_digest_available
|
|
579
|
+
):
|
|
580
|
+
return (
|
|
581
|
+
f"{installed.version}:{installed.short_digest}",
|
|
582
|
+
f"{latest.version}:{latest.short_digest}",
|
|
583
|
+
basis("version-digest"),
|
|
584
|
+
)
|
|
565
585
|
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
586
|
+
phase = 1
|
|
587
|
+
if version_policy == VersionPolicy.AUTO and (
|
|
588
|
+
(installed.version == latest.version and installed.short_digest == latest.short_digest)
|
|
589
|
+
or (installed.version != latest.version and installed.short_digest != latest.short_digest)
|
|
590
|
+
):
|
|
591
|
+
# detect semver, or casual semver (e.g. v1.030)
|
|
592
|
+
# only use this if both version and digest are consistently agreeing or disagreeing
|
|
593
|
+
# if the strict conditions work, people see nice version numbers on screen rather than hashes
|
|
594
|
+
if (
|
|
595
|
+
installed.version
|
|
596
|
+
and re.match(SEMVER_RE, installed.version or "")
|
|
597
|
+
and latest.version
|
|
598
|
+
and re.match(SEMVER_RE, latest.version or "")
|
|
599
|
+
):
|
|
600
|
+
# Smells like semver, override if not using version_policy
|
|
601
|
+
return installed.version, latest.version, basis("semver")
|
|
602
|
+
if (
|
|
603
|
+
installed.version
|
|
604
|
+
and re.match(VERSION_RE, installed.version or "")
|
|
605
|
+
and latest.version
|
|
606
|
+
and re.match(VERSION_RE, latest.version or "")
|
|
607
|
+
):
|
|
608
|
+
# Smells like casual semver, override if not using version_policy
|
|
609
|
+
return installed.version, latest.version, basis("causualver")
|
|
610
|
+
|
|
611
|
+
# AUTO or fallback
|
|
612
|
+
phase = 2
|
|
613
|
+
if installed.version and latest.version and installed_digest_available and latest_digest_available:
|
|
614
|
+
return (
|
|
615
|
+
f"{installed.version}:{installed.short_digest}",
|
|
616
|
+
f"{latest.version}:{latest.short_digest}",
|
|
617
|
+
basis("version-digest"),
|
|
571
618
|
)
|
|
572
619
|
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
620
|
+
# and ((other_digest is None and other_version is None) or (other_digest is not None and other_version is not None))
|
|
621
|
+
|
|
622
|
+
if installed.version and latest.version:
|
|
623
|
+
return installed.version, latest.version, basis("version")
|
|
624
|
+
|
|
625
|
+
# Check for local builds
|
|
626
|
+
phase = 3
|
|
627
|
+
if installed.git_digest and latest.git_digest:
|
|
628
|
+
return f"git:{installed.git_digest}", f"git:{latest.git_digest}", basis("git")
|
|
629
|
+
|
|
630
|
+
# Fall back to digests, image or repo index
|
|
631
|
+
phase = 4
|
|
632
|
+
if installed_digest_available and latest_digest_available:
|
|
633
|
+
return installed.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
634
|
+
if installed.version and not latest.version and not latest.short_digest and not latest.repo_digest:
|
|
635
|
+
return installed.version, installed.version, basis("version")
|
|
636
|
+
phase = 5
|
|
637
|
+
if not installed_digest_available and latest_digest_available:
|
|
638
|
+
# odd condition if local image has no identity, even out versions so no update alert
|
|
639
|
+
return latest.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
640
|
+
|
|
641
|
+
# Fall back to repo digests
|
|
642
|
+
phase = 6
|
|
643
|
+
|
|
644
|
+
def condense_repo_id(i: DockerImageInfo) -> str:
|
|
645
|
+
v: str | None = i.condense_digest(i.repo_digest) if i.repo_digest else None
|
|
646
|
+
return v or ""
|
|
647
|
+
|
|
648
|
+
if installed.repo_digest and latest.repo_digest:
|
|
649
|
+
# where the image digest isn't available, fall back to a repo digest
|
|
650
|
+
return condense_repo_id(installed), condense_repo_id(latest), basis("repo-digest")
|
|
651
|
+
|
|
652
|
+
phase = 7
|
|
653
|
+
if latest.repo_digest and latest.repo_digest in installed.repo_digests:
|
|
654
|
+
# installed has multiple RepoDigests from multiple pulls and one of them matches latest current repo digest
|
|
655
|
+
return condense_repo_id(latest), condense_repo_id(latest), basis("repo-digest")
|
|
656
|
+
|
|
657
|
+
if installed_digest_available and not latest_digest_available:
|
|
658
|
+
return installed.short_digest, latest.short_digest, basis("digest") # type: ignore[return-value]
|
|
659
|
+
|
|
660
|
+
log.warn("No versions can be determined for %s", installed.ref)
|
|
661
|
+
phase = 999
|
|
662
|
+
return UNKNOWN_VERSION, UNKNOWN_VERSION, basis("failure")
|