updates2mqtt 1.7.0__tar.gz → 1.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/PKG-INFO +13 -7
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/README.md +10 -4
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/pyproject.toml +5 -3
- updates2mqtt-1.7.3/src/updates2mqtt/cli.py +150 -0
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/config.py +32 -2
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/hass_formatter.py +5 -4
- updates2mqtt-1.7.3/src/updates2mqtt/helpers.py +226 -0
- updates2mqtt-1.7.3/src/updates2mqtt/integrations/docker.py +662 -0
- updates2mqtt-1.7.3/src/updates2mqtt/integrations/docker_enrich.py +876 -0
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/integrations/git_utils.py +5 -5
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/model.py +94 -89
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/mqtt.py +5 -0
- updates2mqtt-1.7.0/src/updates2mqtt/integrations/docker.py +0 -606
- updates2mqtt-1.7.0/src/updates2mqtt/integrations/docker_enrich.py +0 -344
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/__init__.py +0 -0
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/__main__.py +0 -0
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/app.py +0 -0
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/integrations/__init__.py +0 -0
- {updates2mqtt-1.7.0 → updates2mqtt-1.7.3}/src/updates2mqtt/py.typed +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: updates2mqtt
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.3
|
|
4
4
|
Summary: System update and docker image notification and execution over MQTT
|
|
5
5
|
Keywords: mqtt,docker,oci,container,updates,automation,home-assistant,homeassistant,selfhosting
|
|
6
6
|
Author: jey burrows
|
|
@@ -18,14 +18,14 @@ Classifier: Intended Audience :: System Administrators
|
|
|
18
18
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
19
19
|
Classifier: Typing :: Typed
|
|
20
20
|
Classifier: Programming Language :: Python
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
22
22
|
Requires-Dist: docker>=7.1.0
|
|
23
23
|
Requires-Dist: paho-mqtt>=2.1.0
|
|
24
24
|
Requires-Dist: omegaconf>=2.3.0
|
|
25
25
|
Requires-Dist: structlog>=25.4.0
|
|
26
26
|
Requires-Dist: rich>=14.0.0
|
|
27
27
|
Requires-Dist: httpx>=0.28.1
|
|
28
|
-
Requires-Dist: hishel[httpx]>=
|
|
28
|
+
Requires-Dist: hishel[httpx]>=1.1.0
|
|
29
29
|
Requires-Dist: usingversion>=0.1.2
|
|
30
30
|
Requires-Dist: tzlocal>=5.3.1
|
|
31
31
|
Requires-Python: >=3.13
|
|
@@ -61,7 +61,7 @@ Description-Content-Type: text/markdown
|
|
|
61
61
|
|
|
62
62
|
Let Home Assistant tell you about new updates to Docker images for your containers.
|
|
63
63
|
|
|
64
|
-

|
|
64
|
+
{width=300}
|
|
65
65
|
|
|
66
66
|
Read the release notes, and optionally click *Update* to trigger a Docker *pull* (or optionally *build*) and *update*.
|
|
67
67
|
|
|
@@ -72,7 +72,8 @@ Read the release notes, and optionally click *Update* to trigger a Docker *pull*
|
|
|
72
72
|
|
|
73
73
|
Updates2MQTT perioidically checks for new versions of components being available, and publishes new version info to MQTT. HomeAssistant auto discovery is supported, so all updates can be seen in the same place as Home Assistant's own components and add-ins.
|
|
74
74
|
|
|
75
|
-
Currently only Docker containers are supported, either via an image registry check, or a git repo for source (see [Local Builds](local_builds.md))
|
|
75
|
+
Currently only Docker containers are supported, either via an image registry check (using either v1 Docker APIs or the OCI v2 API), or a git repo for source (see [Local Builds](local_builds.md)), with specific handling for Docker, Github Container Registry, Gitlab, Codeberg, Microsoft Container Registry and LinuxServer Registry, with adaptive behaviour to cope with most
|
|
76
|
+
others. The design is modular, so other update sources can be added, at least for notification. The next anticipated is **apt** for Debian based systems.
|
|
76
77
|
|
|
77
78
|
Components can also be updated, either automatically or triggered via MQTT, for example by hitting the *Install* button in the HomeAssistant update dialog. Icons and release notes can be specified for a better HA experience. See [Home Assistant Integration](home_assistant.md) for details.
|
|
78
79
|
|
|
@@ -81,7 +82,7 @@ To get started, read the [Installation](installation.md) and [Configuration](con
|
|
|
81
82
|
For a quick spin, try this:
|
|
82
83
|
|
|
83
84
|
```bash
|
|
84
|
-
docker run -v /var/run/docker.sock:/var/run/docker.sock -e MQTT_USER=user1 -e MQTT_PASS=user1 -e MQTT_HOST=192.168.1.5 ghcr.io/rhizomatics/updates2mqtt:
|
|
85
|
+
docker run -v /var/run/docker.sock:/var/run/docker.sock -e MQTT_USER=user1 -e MQTT_PASS=user1 -e MQTT_HOST=192.168.1.5 ghcr.io/rhizomatics/updates2mqtt:latest
|
|
85
86
|
```
|
|
86
87
|
|
|
87
88
|
or without Docker, using [uv](https://docs.astral.sh/uv/)
|
|
@@ -159,6 +160,8 @@ The following environment variables can be used to configure containers for `upd
|
|
|
159
160
|
| `UPD2MQTT_GIT_REPO_PATH` | Relative path to a local git repo if the image is built locally. | |
|
|
160
161
|
| `UPD2MQTT_IGNORE` | If set to `True`, the container will be ignored by Updates2MQTT. | False |
|
|
161
162
|
| |
|
|
163
|
+
| `UPD2MQTT_VERSION_POLICY` | Change how version derived from container label or image hash, `Version`,`Digest`,`Version_Digest` with default of `Auto`|
|
|
164
|
+
| `UPD2MQTT_REGISTRY_TOKEN` | Access token for authentication to container distribution API, as alternative to making a call to `token` service |
|
|
162
165
|
|
|
163
166
|
### Docker Labels
|
|
164
167
|
|
|
@@ -171,6 +174,9 @@ Alternatively, use Docker labels
|
|
|
171
174
|
| `updates2mqtt.relnotes` | `UPD2MQTT_RELNOTES` |
|
|
172
175
|
| `updates2mqtt.git_repo_path` | `UPD2MQTT_GIT_REPO_PATH` |
|
|
173
176
|
| `updates2mqtt.ignore` | `UPD2MQTT_IGNORE` |
|
|
177
|
+
| `updates2mqtt.version_policy` | `UPD2MQTT_VERSION_POLICY` |
|
|
178
|
+
| `updates2mqtt.registry_token` | `UPD2MQTT_REGISTRY_TOKEN` |
|
|
179
|
+
|
|
174
180
|
|
|
175
181
|
|
|
176
182
|
```yaml title="Example Compose Snippet"
|
|
@@ -200,7 +206,7 @@ This component relies on several open source packages:
|
|
|
200
206
|
- [Eclipse Paho](https://eclipse.dev/paho/files/paho.mqtt.python/html/client.html) MQTT client
|
|
201
207
|
- [OmegaConf](https://omegaconf.readthedocs.io) for configuration and validation
|
|
202
208
|
- [structlog](https://www.structlog.org/en/stable/) for structured logging and [rich](https://rich.readthedocs.io/en/stable/) for better exception reporting
|
|
203
|
-
- [hishel](https://hishel.com/
|
|
209
|
+
- [hishel](https://hishel.com/) for caching metadata
|
|
204
210
|
- [httpx](https://www.python-httpx.org) for retrieving metadata
|
|
205
211
|
- The Astral [uv](https://docs.astral.sh/uv/) and [ruff](https://docs.astral.sh/ruff/) tools for development and build
|
|
206
212
|
- [pytest](https://docs.pytest.org/en/stable/) and supporting add-ins for automated testing
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Let Home Assistant tell you about new updates to Docker images for your containers.
|
|
25
25
|
|
|
26
|
-

|
|
26
|
+
{width=300}
|
|
27
27
|
|
|
28
28
|
Read the release notes, and optionally click *Update* to trigger a Docker *pull* (or optionally *build*) and *update*.
|
|
29
29
|
|
|
@@ -34,7 +34,8 @@ Read the release notes, and optionally click *Update* to trigger a Docker *pull*
|
|
|
34
34
|
|
|
35
35
|
Updates2MQTT perioidically checks for new versions of components being available, and publishes new version info to MQTT. HomeAssistant auto discovery is supported, so all updates can be seen in the same place as Home Assistant's own components and add-ins.
|
|
36
36
|
|
|
37
|
-
Currently only Docker containers are supported, either via an image registry check, or a git repo for source (see [Local Builds](local_builds.md))
|
|
37
|
+
Currently only Docker containers are supported, either via an image registry check (using either v1 Docker APIs or the OCI v2 API), or a git repo for source (see [Local Builds](local_builds.md)), with specific handling for Docker, Github Container Registry, Gitlab, Codeberg, Microsoft Container Registry and LinuxServer Registry, with adaptive behaviour to cope with most
|
|
38
|
+
others. The design is modular, so other update sources can be added, at least for notification. The next anticipated is **apt** for Debian based systems.
|
|
38
39
|
|
|
39
40
|
Components can also be updated, either automatically or triggered via MQTT, for example by hitting the *Install* button in the HomeAssistant update dialog. Icons and release notes can be specified for a better HA experience. See [Home Assistant Integration](home_assistant.md) for details.
|
|
40
41
|
|
|
@@ -43,7 +44,7 @@ To get started, read the [Installation](installation.md) and [Configuration](con
|
|
|
43
44
|
For a quick spin, try this:
|
|
44
45
|
|
|
45
46
|
```bash
|
|
46
|
-
docker run -v /var/run/docker.sock:/var/run/docker.sock -e MQTT_USER=user1 -e MQTT_PASS=user1 -e MQTT_HOST=192.168.1.5 ghcr.io/rhizomatics/updates2mqtt:
|
|
47
|
+
docker run -v /var/run/docker.sock:/var/run/docker.sock -e MQTT_USER=user1 -e MQTT_PASS=user1 -e MQTT_HOST=192.168.1.5 ghcr.io/rhizomatics/updates2mqtt:latest
|
|
47
48
|
```
|
|
48
49
|
|
|
49
50
|
or without Docker, using [uv](https://docs.astral.sh/uv/)
|
|
@@ -121,6 +122,8 @@ The following environment variables can be used to configure containers for `upd
|
|
|
121
122
|
| `UPD2MQTT_GIT_REPO_PATH` | Relative path to a local git repo if the image is built locally. | |
|
|
122
123
|
| `UPD2MQTT_IGNORE` | If set to `True`, the container will be ignored by Updates2MQTT. | False |
|
|
123
124
|
| |
|
|
125
|
+
| `UPD2MQTT_VERSION_POLICY` | Change how version derived from container label or image hash, `Version`,`Digest`,`Version_Digest` with default of `Auto`|
|
|
126
|
+
| `UPD2MQTT_REGISTRY_TOKEN` | Access token for authentication to container distribution API, as alternative to making a call to `token` service |
|
|
124
127
|
|
|
125
128
|
### Docker Labels
|
|
126
129
|
|
|
@@ -133,6 +136,9 @@ Alternatively, use Docker labels
|
|
|
133
136
|
| `updates2mqtt.relnotes` | `UPD2MQTT_RELNOTES` |
|
|
134
137
|
| `updates2mqtt.git_repo_path` | `UPD2MQTT_GIT_REPO_PATH` |
|
|
135
138
|
| `updates2mqtt.ignore` | `UPD2MQTT_IGNORE` |
|
|
139
|
+
| `updates2mqtt.version_policy` | `UPD2MQTT_VERSION_POLICY` |
|
|
140
|
+
| `updates2mqtt.registry_token` | `UPD2MQTT_REGISTRY_TOKEN` |
|
|
141
|
+
|
|
136
142
|
|
|
137
143
|
|
|
138
144
|
```yaml title="Example Compose Snippet"
|
|
@@ -162,7 +168,7 @@ This component relies on several open source packages:
|
|
|
162
168
|
- [Eclipse Paho](https://eclipse.dev/paho/files/paho.mqtt.python/html/client.html) MQTT client
|
|
163
169
|
- [OmegaConf](https://omegaconf.readthedocs.io) for configuration and validation
|
|
164
170
|
- [structlog](https://www.structlog.org/en/stable/) for structured logging and [rich](https://rich.readthedocs.io/en/stable/) for better exception reporting
|
|
165
|
-
- [hishel](https://hishel.com/
|
|
171
|
+
- [hishel](https://hishel.com/) for caching metadata
|
|
166
172
|
- [httpx](https://www.python-httpx.org) for retrieving metadata
|
|
167
173
|
- The Astral [uv](https://docs.astral.sh/uv/) and [ruff](https://docs.astral.sh/ruff/) tools for development and build
|
|
168
174
|
- [pytest](https://docs.pytest.org/en/stable/) and supporting add-ins for automated testing
|
|
@@ -7,7 +7,7 @@ authors = [
|
|
|
7
7
|
]
|
|
8
8
|
|
|
9
9
|
requires-python = ">=3.13"
|
|
10
|
-
version = "1.7.
|
|
10
|
+
version = "1.7.3"
|
|
11
11
|
license="Apache-2.0"
|
|
12
12
|
keywords=["mqtt", "docker", "oci","container","updates", "automation","home-assistant","homeassistant","selfhosting"]
|
|
13
13
|
|
|
@@ -18,7 +18,7 @@ dependencies = [
|
|
|
18
18
|
"structlog>=25.4.0",
|
|
19
19
|
"rich>=14.0.0",
|
|
20
20
|
"httpx>=0.28.1",
|
|
21
|
-
"hishel[httpx]>=
|
|
21
|
+
"hishel[httpx]>=1.1.0",
|
|
22
22
|
"usingversion>=0.1.2",
|
|
23
23
|
"tzlocal>=5.3.1",
|
|
24
24
|
]
|
|
@@ -35,11 +35,12 @@ classifiers = [
|
|
|
35
35
|
"License :: OSI Approved :: Apache Software License",
|
|
36
36
|
"Typing :: Typed",
|
|
37
37
|
"Programming Language :: Python",
|
|
38
|
-
"Programming Language :: Python :: 3.
|
|
38
|
+
"Programming Language :: Python :: 3.14"
|
|
39
39
|
]
|
|
40
40
|
|
|
41
41
|
[project.scripts]
|
|
42
42
|
updates2mqtt = "updates2mqtt.app:run"
|
|
43
|
+
cli = "updates2mqtt.cli:main"
|
|
43
44
|
|
|
44
45
|
[tool.uv]
|
|
45
46
|
compile-bytecode = true
|
|
@@ -54,6 +55,7 @@ dev = [
|
|
|
54
55
|
"pytest-asyncio>=1.2.0",
|
|
55
56
|
"pytest-cov>=7.0.0",
|
|
56
57
|
"pytest-httpx",
|
|
58
|
+
"pytest-xdist",
|
|
57
59
|
"pytest-mqtt>=0.6.0",
|
|
58
60
|
"pytest-subprocess>=1.5.3",
|
|
59
61
|
"coverage",
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
import structlog
|
|
4
|
+
from omegaconf import DictConfig, OmegaConf
|
|
5
|
+
from rich import print_json
|
|
6
|
+
|
|
7
|
+
from updates2mqtt.config import DockerConfig, NodeConfig, RegistryConfig
|
|
8
|
+
from updates2mqtt.helpers import Throttler
|
|
9
|
+
from updates2mqtt.integrations.docker import DockerProvider
|
|
10
|
+
from updates2mqtt.integrations.docker_enrich import (
|
|
11
|
+
REGISTRIES,
|
|
12
|
+
ContainerDistributionAPIVersionLookup,
|
|
13
|
+
DockerImageInfo,
|
|
14
|
+
fetch_url,
|
|
15
|
+
)
|
|
16
|
+
from updates2mqtt.model import Discovery
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from httpx import Response
|
|
20
|
+
|
|
21
|
+
log = structlog.get_logger()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
"""
|
|
25
|
+
Super simple CLI
|
|
26
|
+
|
|
27
|
+
python updates2mqtt.cli container=frigate
|
|
28
|
+
|
|
29
|
+
python updates2mqtt.cli container=frigate api=docker_client log_level=DEBUG
|
|
30
|
+
|
|
31
|
+
ython3 updates2mqtt/cli.py blob=ghcr.io/homarr-labs/homarr@sha256:af79a3339de5ed8ef7f5a0186ff3deb86f40b213ba75249291f2f68aef082a25 | jq '.config.Labels'
|
|
32
|
+
|
|
33
|
+
python3 updates2mqtt/cli.py manifest=ghcr.io/blakeblackshear/frigate:stable
|
|
34
|
+
|
|
35
|
+
python3 updates2mqtt/cli.py blob=ghcr.io/blakeblackshear/frigate@sha256:ef8d56a7d50b545af176e950ce328aec7f0b7bc5baebdca189fe661d97924980
|
|
36
|
+
|
|
37
|
+
python3 updates2mqtt/cli.py manifest=ghcr.io/blakeblackshear/frigate@sha256:c68fd78fd3237c9ba81b5aa927f17b54f46705990f43b4b5d5596cfbbb626af4
|
|
38
|
+
""" # noqa: E501
|
|
39
|
+
|
|
40
|
+
OCI_MANIFEST_TYPES: list[str] = [
|
|
41
|
+
"application/vnd.oci.image.manifest.v1+json",
|
|
42
|
+
"application/vnd.oci.image.index.v1+json",
|
|
43
|
+
"application/vnd.oci.descriptor.v1+json",
|
|
44
|
+
"application/vnd.oci.empty.v1+json",
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
OCI_CONFIG_TYPES: list[str] = [
|
|
48
|
+
"application/vnd.oci.image.config.v1+json",
|
|
49
|
+
]
|
|
50
|
+
|
|
51
|
+
OCI_LAYER_TYPES: list[str] = [
|
|
52
|
+
"application/vnd.oci.image.layer.v1.tar",
|
|
53
|
+
"application/vnd.oci.image.layer.v1.tar+gzip",
|
|
54
|
+
"application/vnd.oci.image.layer.v1.tar+zstd",
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
OCI_NONDISTRIBUTABLE_LAYER_TYPES: list[str] = [
|
|
58
|
+
"application/vnd.oci.image.layer.nondistributable.v1.tar",
|
|
59
|
+
"application/vnd.oci.image.layer.nondistributable.v1.tar+gzip",
|
|
60
|
+
"application/vnd.oci.image.layer.nondistributable.v1.tar+zstd",
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
# Docker Compatibility MIME Types
|
|
64
|
+
DOCKER_MANIFEST_TYPES: list[str] = [
|
|
65
|
+
"application/vnd.docker.distribution.manifest.v2+json",
|
|
66
|
+
"application/vnd.docker.distribution.manifest.list.v2+json",
|
|
67
|
+
"application/vnd.docker.distribution.manifest.v1+json",
|
|
68
|
+
"application/vnd.docker.distribution.manifest.v1+prettyjws",
|
|
69
|
+
]
|
|
70
|
+
|
|
71
|
+
DOCKER_CONFIG_TYPES: list[str] = [
|
|
72
|
+
"application/vnd.docker.container.image.v1+json",
|
|
73
|
+
]
|
|
74
|
+
|
|
75
|
+
DOCKER_LAYER_TYPES: list[str] = [
|
|
76
|
+
"application/vnd.docker.image.rootfs.diff.tar.gzip",
|
|
77
|
+
"application/vnd.docker.image.rootfs.foreign.diff.tar.gzip",
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
# Combined constants
|
|
81
|
+
ALL_MANIFEST_TYPES: list[str] = OCI_MANIFEST_TYPES + DOCKER_MANIFEST_TYPES
|
|
82
|
+
ALL_CONFIG_TYPES: list[str] = OCI_CONFIG_TYPES + DOCKER_CONFIG_TYPES
|
|
83
|
+
ALL_LAYER_TYPES: list[str] = OCI_LAYER_TYPES + OCI_NONDISTRIBUTABLE_LAYER_TYPES + DOCKER_LAYER_TYPES
|
|
84
|
+
|
|
85
|
+
# All content types that might be returned by the API
|
|
86
|
+
ALL_OCI_MEDIA_TYPES: list[str] = (
|
|
87
|
+
ALL_MANIFEST_TYPES
|
|
88
|
+
+ ALL_CONFIG_TYPES
|
|
89
|
+
+ ALL_LAYER_TYPES
|
|
90
|
+
+ ["application/octet-stream", "application/json"] # Error responses
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def dump_url(doc_type: str, img_ref: str) -> None:
|
|
95
|
+
lookup = ContainerDistributionAPIVersionLookup(Throttler(), RegistryConfig())
|
|
96
|
+
img_info = DockerImageInfo(img_ref)
|
|
97
|
+
if not img_info.index_name or not img_info.name:
|
|
98
|
+
log.error("Unable to parse %ss", img_ref)
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
api_host: str | None = REGISTRIES.get(img_info.index_name, (img_info.index_name, img_info.index_name))[1]
|
|
102
|
+
|
|
103
|
+
if doc_type == "blob":
|
|
104
|
+
if not img_info.pinned_digest:
|
|
105
|
+
log.warning("No digest found in %s", img_ref)
|
|
106
|
+
return
|
|
107
|
+
url: str = f"https://{api_host}/v2/{img_info.name}/blobs/{img_info.pinned_digest}"
|
|
108
|
+
elif doc_type == "manifest":
|
|
109
|
+
if not img_info.tag_or_digest:
|
|
110
|
+
log.warning("No tag or digest found in %s", img_ref)
|
|
111
|
+
return
|
|
112
|
+
url = f"https://{api_host}/v2/{img_info.name}/manifests/{img_info.tag_or_digest}"
|
|
113
|
+
else:
|
|
114
|
+
return
|
|
115
|
+
|
|
116
|
+
token: str | None = lookup.fetch_token(img_info.index_name, img_info.name)
|
|
117
|
+
|
|
118
|
+
response: Response | None = fetch_url(url, bearer_token=token, follow_redirects=True, response_type=ALL_OCI_MEDIA_TYPES)
|
|
119
|
+
if response:
|
|
120
|
+
log.debug(f"{response.status_code}: {url}")
|
|
121
|
+
log.debug("HEADERS")
|
|
122
|
+
for k, v in response.headers.items():
|
|
123
|
+
log.debug(f"{k}: {v}")
|
|
124
|
+
log.debug("CONTENTS")
|
|
125
|
+
print_json(response.text)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def main() -> None:
|
|
129
|
+
# will be a proper cli someday
|
|
130
|
+
cli_conf: DictConfig = OmegaConf.from_cli()
|
|
131
|
+
structlog.configure(wrapper_class=structlog.make_filtering_bound_logger(cli_conf.get("log_level", "WARNING")))
|
|
132
|
+
|
|
133
|
+
if cli_conf.get("blob"):
|
|
134
|
+
dump_url("blob", cli_conf.get("blob"))
|
|
135
|
+
elif cli_conf.get("manifest"):
|
|
136
|
+
dump_url("manifest", cli_conf.get("manifest"))
|
|
137
|
+
|
|
138
|
+
else:
|
|
139
|
+
docker_scanner = DockerProvider(
|
|
140
|
+
DockerConfig(registry=RegistryConfig(api=cli_conf.get("api", "OCI_V2"))), NodeConfig(), None
|
|
141
|
+
)
|
|
142
|
+
discovery: Discovery | None = docker_scanner.rescan(
|
|
143
|
+
Discovery(docker_scanner, cli_conf.get("container", "frigate"), "cli", "manual")
|
|
144
|
+
)
|
|
145
|
+
if discovery:
|
|
146
|
+
log.info(discovery.as_dict())
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
if __name__ == "__main__":
|
|
150
|
+
main()
|
|
@@ -10,7 +10,10 @@ from omegaconf import MISSING, DictConfig, MissingMandatoryValue, OmegaConf, Val
|
|
|
10
10
|
log = structlog.get_logger()
|
|
11
11
|
|
|
12
12
|
PKG_INFO_FILE = Path("./common_packages.yaml")
|
|
13
|
-
|
|
13
|
+
UNKNOWN_VERSION = "UNKNOWN"
|
|
14
|
+
VERSION_RE = r"[vVr]?[0-9]+(\.[0-9]+)*"
|
|
15
|
+
# source: https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
|
|
16
|
+
SEMVER_RE = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" # noqa: E501
|
|
14
17
|
|
|
15
18
|
|
|
16
19
|
class UpdatePolicy(StrEnum):
|
|
@@ -32,6 +35,13 @@ class LogLevel(StrEnum):
|
|
|
32
35
|
CRITICAL = "CRITICAL"
|
|
33
36
|
|
|
34
37
|
|
|
38
|
+
class RegistryAPI(StrEnum):
|
|
39
|
+
OCI_V2 = "OCI_V2"
|
|
40
|
+
OCI_V2_MINIMAL = "OCI_V2"
|
|
41
|
+
DOCKER_CLIENT = "DOCKER_CLIENT"
|
|
42
|
+
DISABLED = "DISABLED"
|
|
43
|
+
|
|
44
|
+
|
|
35
45
|
class VersionType:
|
|
36
46
|
SHORT_SHA = "short_sha"
|
|
37
47
|
FULL_SHA = "full_sha"
|
|
@@ -39,6 +49,14 @@ class VersionType:
|
|
|
39
49
|
VERSION = "version"
|
|
40
50
|
|
|
41
51
|
|
|
52
|
+
@dataclass
|
|
53
|
+
class RegistryConfig:
|
|
54
|
+
api: RegistryAPI = RegistryAPI.OCI_V2
|
|
55
|
+
mutable_cache_ttl: int | None = None # default to server cache hint
|
|
56
|
+
immutable_cache_ttl: int | None = 7776000 # 90 days
|
|
57
|
+
token_cache_ttl: int | None = None # default to server cache hint
|
|
58
|
+
|
|
59
|
+
|
|
42
60
|
@dataclass
|
|
43
61
|
class MqttConfig:
|
|
44
62
|
host: str = "${oc.env:MQTT_HOST,localhost}"
|
|
@@ -61,6 +79,13 @@ class Selector:
|
|
|
61
79
|
exclude: list[str] | None = None
|
|
62
80
|
|
|
63
81
|
|
|
82
|
+
class VersionPolicy(StrEnum):
|
|
83
|
+
AUTO = "AUTO"
|
|
84
|
+
VERSION = "VERSION"
|
|
85
|
+
DIGEST = "DIGEST"
|
|
86
|
+
VERSION_DIGEST = "VERSION_DIGEST"
|
|
87
|
+
|
|
88
|
+
|
|
64
89
|
@dataclass
|
|
65
90
|
class DockerConfig:
|
|
66
91
|
enabled: bool = True
|
|
@@ -74,8 +99,12 @@ class DockerConfig:
|
|
|
74
99
|
discover_metadata: dict[str, MetadataSourceConfig] = field(
|
|
75
100
|
default_factory=lambda: {"linuxserver.io": MetadataSourceConfig(enabled=True)}
|
|
76
101
|
)
|
|
102
|
+
registry: RegistryConfig = field(default_factory=lambda: RegistryConfig())
|
|
77
103
|
default_api_backoff: int = 60 * 15
|
|
78
104
|
image_ref_select: Selector = field(default_factory=lambda: Selector())
|
|
105
|
+
version_select: Selector = field(default_factory=lambda: Selector())
|
|
106
|
+
version_policy: VersionPolicy = VersionPolicy.AUTO
|
|
107
|
+
registry_select: Selector = field(default_factory=lambda: Selector())
|
|
79
108
|
|
|
80
109
|
|
|
81
110
|
@dataclass
|
|
@@ -125,7 +154,7 @@ class Config:
|
|
|
125
154
|
|
|
126
155
|
@dataclass
|
|
127
156
|
class DockerPackageUpdateInfo:
|
|
128
|
-
image_name: str = MISSING
|
|
157
|
+
image_name: str = MISSING # untagged image ref
|
|
129
158
|
|
|
130
159
|
|
|
131
160
|
@dataclass
|
|
@@ -133,6 +162,7 @@ class PackageUpdateInfo:
|
|
|
133
162
|
docker: DockerPackageUpdateInfo | None = field(default_factory=DockerPackageUpdateInfo)
|
|
134
163
|
logo_url: str | None = None
|
|
135
164
|
release_notes_url: str | None = None
|
|
165
|
+
source_repo_url: str | None = None
|
|
136
166
|
|
|
137
167
|
|
|
138
168
|
@dataclass
|
|
@@ -71,9 +71,10 @@ def hass_format_state(discovery: Discovery, session: str, in_progress: bool = Fa
|
|
|
71
71
|
"title": discovery.title,
|
|
72
72
|
"in_progress": in_progress,
|
|
73
73
|
}
|
|
74
|
-
if discovery.
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
74
|
+
if discovery.release_detail:
|
|
75
|
+
if discovery.release_detail.summary:
|
|
76
|
+
state["release_summary"] = discovery.release_detail.summary
|
|
77
|
+
if discovery.release_detail.notes_url:
|
|
78
|
+
state["release_url"] = discovery.release_detail.notes_url
|
|
78
79
|
|
|
79
80
|
return state
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import datetime as dt
|
|
2
|
+
import re
|
|
3
|
+
import time
|
|
4
|
+
from threading import Event
|
|
5
|
+
from typing import Any
|
|
6
|
+
from urllib.parse import urlparse
|
|
7
|
+
|
|
8
|
+
import structlog
|
|
9
|
+
from hishel import CacheOptions, SpecificationPolicy # pyright: ignore[reportAttributeAccessIssue]
|
|
10
|
+
from hishel.httpx import SyncCacheClient
|
|
11
|
+
from httpx import Response
|
|
12
|
+
from tzlocal import get_localzone
|
|
13
|
+
|
|
14
|
+
from updates2mqtt.config import Selector
|
|
15
|
+
|
|
16
|
+
log = structlog.get_logger()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def timestamp(time_value: float | None) -> str | None:
|
|
20
|
+
if time_value is None:
|
|
21
|
+
return None
|
|
22
|
+
try:
|
|
23
|
+
return dt.datetime.fromtimestamp(time_value, tz=get_localzone()).isoformat()
|
|
24
|
+
except: # noqa: E722
|
|
25
|
+
return None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Selection:
|
|
29
|
+
def __init__(self, selector: Selector, value: str | None) -> None:
|
|
30
|
+
self.result: bool = True
|
|
31
|
+
self.matched: str | None = None
|
|
32
|
+
if value is None:
|
|
33
|
+
self.result = selector.include is None
|
|
34
|
+
return
|
|
35
|
+
if selector.exclude is not None:
|
|
36
|
+
self.result = True
|
|
37
|
+
if any(re.search(pat, value) for pat in selector.exclude):
|
|
38
|
+
self.matched = value
|
|
39
|
+
self.result = False
|
|
40
|
+
if selector.include is not None:
|
|
41
|
+
self.result = False
|
|
42
|
+
if any(re.search(pat, value) for pat in selector.include):
|
|
43
|
+
self.matched = value
|
|
44
|
+
self.result = True
|
|
45
|
+
|
|
46
|
+
def __bool__(self) -> bool:
|
|
47
|
+
"""Expose the actual boolean so objects can be appropriately truthy"""
|
|
48
|
+
return self.result
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ThrottledError(Exception):
|
|
52
|
+
def __init__(self, message: str, retry_secs: int) -> None:
|
|
53
|
+
super().__init__(message)
|
|
54
|
+
self.retry_secs = retry_secs
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class Throttler:
|
|
58
|
+
DEFAULT_SITE = "DEFAULT_SITE"
|
|
59
|
+
|
|
60
|
+
def __init__(self, api_throttle_pause: int = 30, logger: Any | None = None, semaphore: Event | None = None) -> None:
|
|
61
|
+
self.log: Any = logger or log
|
|
62
|
+
self.pause_api_until: dict[str, float] = {}
|
|
63
|
+
self.api_throttle_pause: int = api_throttle_pause
|
|
64
|
+
self.semaphore = semaphore
|
|
65
|
+
|
|
66
|
+
def check_throttle(self, index_name: str | None = None) -> bool:
|
|
67
|
+
if self.semaphore and self.semaphore.is_set():
|
|
68
|
+
return True
|
|
69
|
+
index_name = index_name or self.DEFAULT_SITE
|
|
70
|
+
if self.pause_api_until.get(index_name) is not None:
|
|
71
|
+
if self.pause_api_until[index_name] < time.time():
|
|
72
|
+
del self.pause_api_until[index_name]
|
|
73
|
+
self.log.info("%s throttling wait complete", index_name)
|
|
74
|
+
else:
|
|
75
|
+
self.log.debug("%s throttling has %0.3f secs left", index_name, self.pause_api_until[index_name] - time.time())
|
|
76
|
+
return True
|
|
77
|
+
return False
|
|
78
|
+
|
|
79
|
+
def throttle(
|
|
80
|
+
self,
|
|
81
|
+
index_name: str | None = None,
|
|
82
|
+
retry_secs: int | None = None,
|
|
83
|
+
explanation: str | None = None,
|
|
84
|
+
raise_exception: bool = False,
|
|
85
|
+
) -> None:
|
|
86
|
+
index_name = index_name or self.DEFAULT_SITE
|
|
87
|
+
retry_secs = retry_secs if retry_secs and retry_secs > 0 else self.api_throttle_pause
|
|
88
|
+
self.log.warn("%s throttling requests for %s seconds, %s", index_name, retry_secs, explanation)
|
|
89
|
+
self.pause_api_until[index_name] = time.time() + retry_secs
|
|
90
|
+
if raise_exception:
|
|
91
|
+
raise ThrottledError(explanation or f"{index_name} throttled request", retry_secs)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class CacheMetadata:
|
|
95
|
+
"""Cache metadata extracted from hishel response extensions"""
|
|
96
|
+
|
|
97
|
+
def __init__(self, response: Response) -> None:
|
|
98
|
+
self.from_cache: bool = response.extensions.get("hishel_from_cache", False)
|
|
99
|
+
self.revalidated: bool = response.extensions.get("hishel_revalidated", False)
|
|
100
|
+
self.created_at: float | None = response.extensions.get("hishel_created_at")
|
|
101
|
+
self.stored: bool = response.extensions.get("hishel_stored", False)
|
|
102
|
+
self.age: float | None = None
|
|
103
|
+
if self.created_at is not None:
|
|
104
|
+
self.age = time.time() - self.created_at
|
|
105
|
+
|
|
106
|
+
def __str__(self) -> str:
|
|
107
|
+
"""Summarize in a string"""
|
|
108
|
+
return f"cached: {self.from_cache}, revalidated: {self.revalidated}, age:{self.age}, stored:{self.stored}"
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class APIStats:
|
|
112
|
+
def __init__(self) -> None:
|
|
113
|
+
self.fetches: int = 0
|
|
114
|
+
self.cached: int = 0
|
|
115
|
+
self.revalidated: int = 0
|
|
116
|
+
self.failed: dict[int, int] = {}
|
|
117
|
+
self.elapsed: float = 0
|
|
118
|
+
self.max_cache_age: float = 0
|
|
119
|
+
|
|
120
|
+
def tick(self, response: Response | None) -> None:
|
|
121
|
+
self.fetches += 1
|
|
122
|
+
if response is None:
|
|
123
|
+
self.failed.setdefault(0, 0)
|
|
124
|
+
self.failed[0] += 1
|
|
125
|
+
return
|
|
126
|
+
cache_metadata: CacheMetadata = CacheMetadata(response)
|
|
127
|
+
self.cached += 1 if cache_metadata.from_cache else 0
|
|
128
|
+
self.revalidated += 1 if cache_metadata.revalidated else 0
|
|
129
|
+
if response.elapsed:
|
|
130
|
+
self.elapsed += response.elapsed.microseconds / 1000000
|
|
131
|
+
self.elapsed += response.elapsed.seconds
|
|
132
|
+
if not response.is_success:
|
|
133
|
+
self.failed.setdefault(response.status_code, 0)
|
|
134
|
+
self.failed[response.status_code] += 1
|
|
135
|
+
if cache_metadata.age is not None and (self.max_cache_age is None or cache_metadata.age > self.max_cache_age):
|
|
136
|
+
self.max_cache_age = cache_metadata.age
|
|
137
|
+
|
|
138
|
+
def hit_ratio(self) -> float:
|
|
139
|
+
return round(self.cached / self.fetches, 2) if self.cached and self.fetches else 0
|
|
140
|
+
|
|
141
|
+
def average_elapsed(self) -> float:
|
|
142
|
+
return round(self.elapsed / self.fetches, 2) if self.elapsed and self.fetches else 0
|
|
143
|
+
|
|
144
|
+
def __str__(self) -> str:
|
|
145
|
+
"""Log line friendly string summary"""
|
|
146
|
+
return (
|
|
147
|
+
f"fetches: {self.fetches}, cache ratio: {self.hit_ratio():.2%}, revalidated: {self.revalidated}, "
|
|
148
|
+
+ f"errors: {', '.join(f'{status_code}:{fails}' for status_code, fails in self.failed.items())}, "
|
|
149
|
+
+ f"oldest cache hit: {self.max_cache_age:.2f}, avg elapsed: {self.average_elapsed()}"
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class APIStatsCounter:
|
|
154
|
+
def __init__(self) -> None:
|
|
155
|
+
self.stats_report_interval: int = 100
|
|
156
|
+
self.host_stats: dict[str, APIStats] = {}
|
|
157
|
+
self.fetches: int = 0
|
|
158
|
+
self.log: Any = structlog.get_logger().bind()
|
|
159
|
+
|
|
160
|
+
def stats(self, url: str, response: Response | None) -> None:
|
|
161
|
+
try:
|
|
162
|
+
host: str = urlparse(url).hostname or "UNKNOWN"
|
|
163
|
+
api_stats: APIStats = self.host_stats.setdefault(host, APIStats())
|
|
164
|
+
api_stats.tick(response)
|
|
165
|
+
self.fetches += 1
|
|
166
|
+
if self.fetches % self.stats_report_interval == 0:
|
|
167
|
+
self.log.info(
|
|
168
|
+
"OCI_V2 API Stats Summary\n%s", "\n".join(f"{host} {stats}" for host, stats in self.host_stats.items())
|
|
169
|
+
)
|
|
170
|
+
except Exception as e:
|
|
171
|
+
self.log.warning("Failed to tick stats: %s", e)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def fetch_url(
|
|
175
|
+
url: str,
|
|
176
|
+
cache_ttl: int | None = None, # default to server responses for cache ttl
|
|
177
|
+
bearer_token: str | None = None,
|
|
178
|
+
response_type: str | list[str] | None = None,
|
|
179
|
+
follow_redirects: bool = False,
|
|
180
|
+
allow_stale: bool = False,
|
|
181
|
+
method: str = "GET",
|
|
182
|
+
api_stats_counter: APIStatsCounter | None = None,
|
|
183
|
+
) -> Response | None:
|
|
184
|
+
try:
|
|
185
|
+
headers = [("cache-control", f"max-age={cache_ttl}")]
|
|
186
|
+
if bearer_token:
|
|
187
|
+
headers.append(("Authorization", f"Bearer {bearer_token}"))
|
|
188
|
+
if response_type:
|
|
189
|
+
response_type = [response_type] if isinstance(response_type, str) else response_type
|
|
190
|
+
if response_type and isinstance(response_type, (tuple, list)):
|
|
191
|
+
headers.extend(("Accept", mime_type) for mime_type in response_type)
|
|
192
|
+
|
|
193
|
+
cache_policy = SpecificationPolicy(
|
|
194
|
+
cache_options=CacheOptions(
|
|
195
|
+
shared=False, # Private browser cache
|
|
196
|
+
allow_stale=allow_stale,
|
|
197
|
+
)
|
|
198
|
+
)
|
|
199
|
+
with SyncCacheClient(headers=headers, follow_redirects=follow_redirects, policy=cache_policy) as client:
|
|
200
|
+
log.debug(f"Fetching URL {url}, redirects={follow_redirects}, headers={headers}, cache_ttl={cache_ttl}")
|
|
201
|
+
response: Response = client.request(method=method, url=url, extensions={"hishel_ttl": cache_ttl})
|
|
202
|
+
cache_metadata: CacheMetadata = CacheMetadata(response)
|
|
203
|
+
if not response.is_success:
|
|
204
|
+
log.debug("URL %s fetch returned non-success status: %s, %s", url, response.status_code, cache_metadata.stored)
|
|
205
|
+
elif response:
|
|
206
|
+
log.debug(
|
|
207
|
+
"URL response: status: %s, cached: %s, revalidated: %s, cache age: %s, stored: %s",
|
|
208
|
+
response.status_code,
|
|
209
|
+
cache_metadata.from_cache,
|
|
210
|
+
cache_metadata.revalidated,
|
|
211
|
+
cache_metadata.age,
|
|
212
|
+
cache_metadata.stored,
|
|
213
|
+
)
|
|
214
|
+
if api_stats_counter:
|
|
215
|
+
api_stats_counter.stats(url, response)
|
|
216
|
+
return response
|
|
217
|
+
except Exception as e:
|
|
218
|
+
log.debug("URL %s failed to fetch: %s", url, e)
|
|
219
|
+
if api_stats_counter:
|
|
220
|
+
api_stats_counter.stats(url, None)
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def validate_url(url: str, cache_ttl: int = 300) -> bool:
|
|
225
|
+
response: Response | None = fetch_url(url, method="HEAD", cache_ttl=cache_ttl, follow_redirects=True)
|
|
226
|
+
return response is not None and response.status_code != 404
|