updates2mqtt 1.5.1__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- updates2mqtt/app.py +23 -11
- updates2mqtt/config.py +30 -18
- updates2mqtt/hass_formatter.py +7 -17
- updates2mqtt/integrations/docker.py +303 -151
- updates2mqtt/integrations/docker_enrich.py +344 -0
- updates2mqtt/integrations/git_utils.py +71 -14
- updates2mqtt/model.py +150 -16
- updates2mqtt/mqtt.py +28 -7
- {updates2mqtt-1.5.1.dist-info → updates2mqtt-1.7.0.dist-info}/METADATA +68 -33
- updates2mqtt-1.7.0.dist-info/RECORD +16 -0
- {updates2mqtt-1.5.1.dist-info → updates2mqtt-1.7.0.dist-info}/WHEEL +1 -1
- {updates2mqtt-1.5.1.dist-info → updates2mqtt-1.7.0.dist-info}/entry_points.txt +1 -0
- updates2mqtt-1.5.1.dist-info/RECORD +0 -16
- updates2mqtt-1.5.1.dist-info/licenses/LICENSE +0 -201
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
import structlog
|
|
5
|
+
from docker.auth import resolve_repository_name
|
|
6
|
+
from hishel.httpx import SyncCacheClient
|
|
7
|
+
from httpx import Response
|
|
8
|
+
from omegaconf import MissingMandatoryValue, OmegaConf, ValidationError
|
|
9
|
+
|
|
10
|
+
from updates2mqtt.config import (
|
|
11
|
+
NO_KNOWN_IMAGE,
|
|
12
|
+
PKG_INFO_FILE,
|
|
13
|
+
DockerConfig,
|
|
14
|
+
DockerPackageUpdateInfo,
|
|
15
|
+
PackageUpdateInfo,
|
|
16
|
+
UpdateInfoConfig,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
log = structlog.get_logger()
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PackageEnricher:
|
|
23
|
+
def __init__(self, docker_cfg: DockerConfig) -> None:
|
|
24
|
+
self.pkgs: dict[str, PackageUpdateInfo] = {}
|
|
25
|
+
self.cfg: DockerConfig = docker_cfg
|
|
26
|
+
self.log: Any = structlog.get_logger().bind(integration="docker")
|
|
27
|
+
|
|
28
|
+
def initialize(self) -> None:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
def enrich(self, image_name: str | None, image_ref: str | None, log: Any) -> PackageUpdateInfo | None:
|
|
32
|
+
def match(pkg: PackageUpdateInfo) -> bool:
|
|
33
|
+
if pkg is not None and pkg.docker is not None and pkg.docker.image_name is not None:
|
|
34
|
+
if image_name is not None and image_name == pkg.docker.image_name:
|
|
35
|
+
return True
|
|
36
|
+
if image_ref is not None and image_ref == pkg.docker.image_name:
|
|
37
|
+
return True
|
|
38
|
+
return False
|
|
39
|
+
|
|
40
|
+
if image_name is not None and image_ref is not None:
|
|
41
|
+
for pkg in self.pkgs.values():
|
|
42
|
+
if match(pkg):
|
|
43
|
+
log.debug(
|
|
44
|
+
"Found common package",
|
|
45
|
+
image_name=pkg.docker.image_name, # type: ignore [union-attr]
|
|
46
|
+
logo_url=pkg.logo_url,
|
|
47
|
+
relnotes_url=pkg.release_notes_url,
|
|
48
|
+
)
|
|
49
|
+
return pkg
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class DefaultPackageEnricher(PackageEnricher):
|
|
54
|
+
def enrich(self, image_name: str | None, image_ref: str | None, log: Any) -> PackageUpdateInfo | None:
|
|
55
|
+
log.debug("Default pkg info", image_name=image_name, image_ref=image_ref)
|
|
56
|
+
return PackageUpdateInfo(
|
|
57
|
+
DockerPackageUpdateInfo(image_name or NO_KNOWN_IMAGE),
|
|
58
|
+
logo_url=self.cfg.default_entity_picture_url,
|
|
59
|
+
release_notes_url=None,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class CommonPackageEnricher(PackageEnricher):
|
|
64
|
+
def initialize(self) -> None:
|
|
65
|
+
if PKG_INFO_FILE.exists():
|
|
66
|
+
log.debug("Loading common package update info", path=PKG_INFO_FILE)
|
|
67
|
+
cfg = OmegaConf.load(PKG_INFO_FILE)
|
|
68
|
+
else:
|
|
69
|
+
log.warn("No common package update info found", path=PKG_INFO_FILE)
|
|
70
|
+
cfg = OmegaConf.structured(UpdateInfoConfig)
|
|
71
|
+
try:
|
|
72
|
+
# omegaconf broken-ness on optional fields and converting to backclasses
|
|
73
|
+
self.pkgs: dict[str, PackageUpdateInfo] = {
|
|
74
|
+
pkg: PackageUpdateInfo(**pkg_cfg) for pkg, pkg_cfg in cfg.common_packages.items()
|
|
75
|
+
}
|
|
76
|
+
except (MissingMandatoryValue, ValidationError) as e:
|
|
77
|
+
log.error("Configuration error %s", e, path=PKG_INFO_FILE.as_posix())
|
|
78
|
+
raise
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class LinuxServerIOPackageEnricher(PackageEnricher):
|
|
82
|
+
def initialize(self) -> None:
|
|
83
|
+
cfg = self.cfg.discover_metadata.get("linuxserver.io")
|
|
84
|
+
if cfg is None or not cfg.enabled:
|
|
85
|
+
return
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
with SyncCacheClient(headers=[("cache-control", f"max-age={cfg.cache_ttl}")]) as client:
|
|
89
|
+
log.debug(f"Fetching linuxserver.io metadata from API, cache_ttl={cfg.cache_ttl}")
|
|
90
|
+
response: Response = client.get(
|
|
91
|
+
"https://api.linuxserver.io/api/v1/images?include_config=false&include_deprecated=false"
|
|
92
|
+
)
|
|
93
|
+
if response.status_code != 200:
|
|
94
|
+
log.error("Failed to fetch linuxserver.io metadata, non-200 response", status_code=response.status_code)
|
|
95
|
+
return
|
|
96
|
+
api_data: Any = response.json()
|
|
97
|
+
repos: list = api_data.get("data", {}).get("repositories", {}).get("linuxserver", [])
|
|
98
|
+
except Exception:
|
|
99
|
+
log.exception("Failed to fetch linuxserver.io metadata")
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
added = 0
|
|
103
|
+
for repo in repos:
|
|
104
|
+
image_name = repo.get("name")
|
|
105
|
+
if image_name and image_name not in self.pkgs:
|
|
106
|
+
self.pkgs[image_name] = PackageUpdateInfo(
|
|
107
|
+
DockerPackageUpdateInfo(f"lscr.io/linuxserver/{image_name}"),
|
|
108
|
+
logo_url=repo["project_logo"],
|
|
109
|
+
release_notes_url=f"{repo['github_url']}/releases",
|
|
110
|
+
)
|
|
111
|
+
added += 1
|
|
112
|
+
log.debug("Added linuxserver.io package", pkg=image_name)
|
|
113
|
+
log.info(f"Added {added} linuxserver.io package details")
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def fetch_url(
|
|
117
|
+
url: str, cache_ttl: int = 300, bearer_token: str | None = None, response_type: str | None = None
|
|
118
|
+
) -> Response | None:
|
|
119
|
+
try:
|
|
120
|
+
headers = [("cache-control", f"max-age={cache_ttl}")]
|
|
121
|
+
if bearer_token:
|
|
122
|
+
headers.append(("Authorization", f"Bearer {bearer_token}"))
|
|
123
|
+
if response_type:
|
|
124
|
+
headers.append(("Accept", response_type))
|
|
125
|
+
with SyncCacheClient(headers=headers) as client:
|
|
126
|
+
log.debug(f"Fetching URL {url}, cache_ttl={cache_ttl}")
|
|
127
|
+
response: Response = client.get(url)
|
|
128
|
+
if not response.is_success:
|
|
129
|
+
log.debug("URL %s fetch returned non-success status: %s", url, response.status_code)
|
|
130
|
+
return response
|
|
131
|
+
except Exception as e:
|
|
132
|
+
log.debug("URL %s failed to fetch: %s", url, e)
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def validate_url(url: str, cache_ttl: int = 300) -> bool:
|
|
137
|
+
response: Response | None = fetch_url(url, cache_ttl=cache_ttl)
|
|
138
|
+
return response is not None and response.is_success
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
SOURCE_PLATFORM_GITHUB = "GitHub"
|
|
142
|
+
SOURCE_PLATFORMS = {SOURCE_PLATFORM_GITHUB: r"https://github.com/.*"}
|
|
143
|
+
DIFF_URL_TEMPLATES = {
|
|
144
|
+
SOURCE_PLATFORM_GITHUB: "{source}/commit/{revision}",
|
|
145
|
+
}
|
|
146
|
+
RELEASE_URL_TEMPLATES = {SOURCE_PLATFORM_GITHUB: "{source}/releases/tag/{version}"}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class SourceReleaseEnricher:
|
|
150
|
+
def __init__(self) -> None:
|
|
151
|
+
self.log: Any = structlog.get_logger().bind(integration="docker")
|
|
152
|
+
|
|
153
|
+
def enrich(self, annotations: dict[str, str]) -> dict[str, str]:
|
|
154
|
+
results: dict[str, str] = {}
|
|
155
|
+
image_version: str | None = annotations.get("org.opencontainers.image.version")
|
|
156
|
+
image_digest: str | None = annotations.get("org.opencontainers.image.revision")
|
|
157
|
+
source = annotations.get("org.opencontainers.image.source")
|
|
158
|
+
source_platforms = [platform for platform, pattern in SOURCE_PLATFORMS.items() if re.match(pattern, source or "")]
|
|
159
|
+
if not source_platforms:
|
|
160
|
+
self.log.debug("No known source platform found on container", source=source)
|
|
161
|
+
return results
|
|
162
|
+
source_platform = source_platforms[0]
|
|
163
|
+
|
|
164
|
+
if source:
|
|
165
|
+
template_vars: dict[str, str | None] = {
|
|
166
|
+
"source": source,
|
|
167
|
+
"version": image_version,
|
|
168
|
+
"revision": image_digest,
|
|
169
|
+
}
|
|
170
|
+
diff_url = DIFF_URL_TEMPLATES[source_platform].format(**template_vars)
|
|
171
|
+
if validate_url(diff_url):
|
|
172
|
+
results["diff_url"] = diff_url
|
|
173
|
+
|
|
174
|
+
release_url = RELEASE_URL_TEMPLATES[source_platform].format(**template_vars)
|
|
175
|
+
|
|
176
|
+
if validate_url(release_url):
|
|
177
|
+
results["release_url"] = release_url
|
|
178
|
+
|
|
179
|
+
if source_platform == SOURCE_PLATFORM_GITHUB and source:
|
|
180
|
+
base_api = source.replace("https://github.com", "https://api.github.com/repos")
|
|
181
|
+
api_response: Response | None = fetch_url(f"{base_api}/releases/tags/{image_version}")
|
|
182
|
+
if api_response and api_response.is_success:
|
|
183
|
+
api_results: Any = httpx_json_content(api_response, {})
|
|
184
|
+
results["release_summary"] = api_results.get("body") # ty:ignore[possibly-missing-attribute]
|
|
185
|
+
reactions = api_results.get("reactions") # ty:ignore[possibly-missing-attribute]
|
|
186
|
+
if reactions:
|
|
187
|
+
results["net_score"] = reactions.get("+1", 0) - reactions.get("-1", 0)
|
|
188
|
+
else:
|
|
189
|
+
self.log.debug(
|
|
190
|
+
"Failed to fetch GitHub release info",
|
|
191
|
+
url=f"{base_api}/releases/tags/{image_version}",
|
|
192
|
+
status_code=(api_response and api_response.status_code) or None,
|
|
193
|
+
)
|
|
194
|
+
return results
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class AuthError(Exception):
|
|
198
|
+
pass
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
REGISTRIES = {
|
|
202
|
+
# registry: (auth_host, api_host, service)
|
|
203
|
+
"docker.io": ("auth.docker.io", "registry-1.docker.io", "registry.docker.io"),
|
|
204
|
+
"mcr.microsoft.com": (None, "mcr.microsoft.com", "mcr.microsoft.com"),
|
|
205
|
+
"ghcr.io": ("ghcr.io", "ghcr.io", "ghcr.io"),
|
|
206
|
+
"lscr.io": ("ghcr.io", "lscr.io", "ghcr.io"),
|
|
207
|
+
"codeberg.org": ("codeberg.org", "codeberg.org", "container_registry"),
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def httpx_json_content(response: Response, default: Any = None) -> Any | None:
|
|
212
|
+
if response and "json" in response.headers.get("content-type"):
|
|
213
|
+
try:
|
|
214
|
+
return response.json()
|
|
215
|
+
except Exception:
|
|
216
|
+
log.debug("Failed to parse JSON response: %s", response.text)
|
|
217
|
+
return default
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
class LabelEnricher:
|
|
221
|
+
def __init__(self) -> None:
|
|
222
|
+
self.log: Any = structlog.get_logger().bind(integration="docker")
|
|
223
|
+
|
|
224
|
+
def fetch_token(self, auth_host: str, service: str, image_name: str) -> str | None:
|
|
225
|
+
logger = self.log.bind(image_name=image_name, action="auth_registry")
|
|
226
|
+
auth_url: str = f"https://{auth_host}/token?scope=repository:{image_name}:pull&service={service}"
|
|
227
|
+
response: Response | None = fetch_url(auth_url, cache_ttl=30)
|
|
228
|
+
if response and response.is_success:
|
|
229
|
+
api_data = httpx_json_content(response, {})
|
|
230
|
+
token: str | None = api_data.get("token") if api_data else None
|
|
231
|
+
if token:
|
|
232
|
+
return token
|
|
233
|
+
logger.warning("No token found in response")
|
|
234
|
+
raise AuthError(f"No token found in response for {image_name}")
|
|
235
|
+
|
|
236
|
+
logger.debug(
|
|
237
|
+
"Non-success response fetching token: %s",
|
|
238
|
+
(response and response.status_code) or None,
|
|
239
|
+
)
|
|
240
|
+
if response and response.status_code == 404:
|
|
241
|
+
response = fetch_url(f"https://{auth_host}/v2/")
|
|
242
|
+
if response and response.status_code == 401:
|
|
243
|
+
auth = response.headers.get("www-authenticate")
|
|
244
|
+
if not auth:
|
|
245
|
+
logger.debug("No www-authenticate header found in 401 response")
|
|
246
|
+
raise AuthError(f"No www-authenticate header found on 401 for {image_name}")
|
|
247
|
+
match = re.search(r'realm="([^"]+)",service="([^"]+)",scope="([^"]+)"', auth)
|
|
248
|
+
if not match:
|
|
249
|
+
logger.debug("No realm/service/scope found in www-authenticate header")
|
|
250
|
+
raise AuthError(f"No realm/service/scope found on 401 headers for {image_name}")
|
|
251
|
+
|
|
252
|
+
realm, service, scope = match.groups()
|
|
253
|
+
auth_url = f"{realm}?service={service}&scope={scope}"
|
|
254
|
+
response = fetch_url(auth_url)
|
|
255
|
+
if response and response.is_success:
|
|
256
|
+
token_data = response.json()
|
|
257
|
+
logger.debug("Fetched registry token")
|
|
258
|
+
return token_data.get("token")
|
|
259
|
+
|
|
260
|
+
logger.debug("Failed to fetch registry token")
|
|
261
|
+
raise AuthError(f"Failed to fetch token for {image_name}")
|
|
262
|
+
|
|
263
|
+
def fetch_annotations(
|
|
264
|
+
self,
|
|
265
|
+
image_ref: str,
|
|
266
|
+
os: str,
|
|
267
|
+
arch: str,
|
|
268
|
+
token: str | None = None,
|
|
269
|
+
mutable_cache_ttl: int = 600,
|
|
270
|
+
immutable_cache_ttl: int = 86400,
|
|
271
|
+
) -> dict[str, str]:
|
|
272
|
+
logger = self.log.bind(image_ref=image_ref, action="enrich_registry")
|
|
273
|
+
annotations: dict[str, str] = {}
|
|
274
|
+
if token:
|
|
275
|
+
logger.debug("Using provided token to fetch manifest for image %s", image_ref)
|
|
276
|
+
registry, ref = resolve_repository_name(image_ref)
|
|
277
|
+
default_host = (registry, registry, registry)
|
|
278
|
+
auth_host: str | None = REGISTRIES.get(registry, default_host)[0]
|
|
279
|
+
api_host: str | None = REGISTRIES.get(registry, default_host)[1]
|
|
280
|
+
service: str = REGISTRIES.get(registry, default_host)[2]
|
|
281
|
+
img_name = ref.split(":")[0] if ":" in ref else ref
|
|
282
|
+
img_name = img_name if "/" in img_name else f"library/{img_name}"
|
|
283
|
+
if auth_host is not None and token is None:
|
|
284
|
+
token = self.fetch_token(auth_host, service, img_name)
|
|
285
|
+
|
|
286
|
+
img_tag = ref.split(":")[1] if ":" in ref else "latest"
|
|
287
|
+
img_tag = img_tag.split("@")[0] if "@" in img_tag else img_tag
|
|
288
|
+
response: Response | None = fetch_url(
|
|
289
|
+
f"https://{api_host}/v2/{img_name}/manifests/{img_tag}",
|
|
290
|
+
cache_ttl=mutable_cache_ttl,
|
|
291
|
+
bearer_token=token,
|
|
292
|
+
response_type="application/vnd.oci.image.index.v1+json",
|
|
293
|
+
)
|
|
294
|
+
if response is None:
|
|
295
|
+
logger.debug("Empty response for manifest for image")
|
|
296
|
+
return annotations
|
|
297
|
+
if not response.is_success:
|
|
298
|
+
api_data = httpx_json_content(response, {})
|
|
299
|
+
logger.warning(
|
|
300
|
+
"Failed to fetch manifest: %s",
|
|
301
|
+
api_data.get("errors") if api_data else response.text,
|
|
302
|
+
)
|
|
303
|
+
return annotations
|
|
304
|
+
index = response.json()
|
|
305
|
+
logger.debug(
|
|
306
|
+
"INDEX %s manifests, %s annotations",
|
|
307
|
+
len(index.get("manifests", [])),
|
|
308
|
+
len(index.get("annotations", [])),
|
|
309
|
+
)
|
|
310
|
+
annotations = index.get("annotations", {})
|
|
311
|
+
for m in index.get("manifests", []):
|
|
312
|
+
platform_info = m.get("platform", {})
|
|
313
|
+
if platform_info.get("os") == os and platform_info.get("architecture") == arch:
|
|
314
|
+
digest = m.get("digest")
|
|
315
|
+
media_type = m.get("mediaType")
|
|
316
|
+
response = fetch_url(
|
|
317
|
+
f"https://{api_host}/v2/{img_name}/manifests/{digest}",
|
|
318
|
+
cache_ttl=immutable_cache_ttl,
|
|
319
|
+
bearer_token=token,
|
|
320
|
+
response_type=media_type,
|
|
321
|
+
)
|
|
322
|
+
if response and response.is_success:
|
|
323
|
+
api_data = httpx_json_content(response, None)
|
|
324
|
+
if api_data:
|
|
325
|
+
logger.debug(
|
|
326
|
+
"MANIFEST %s layers, %s annotations",
|
|
327
|
+
len(api_data.get("layers", [])),
|
|
328
|
+
len(api_data.get("annotations", [])),
|
|
329
|
+
)
|
|
330
|
+
if api_data.get("annotations"):
|
|
331
|
+
annotations.update(api_data.get("annotations", {}))
|
|
332
|
+
else:
|
|
333
|
+
logger.debug("No annotations found in manifest: %s", api_data)
|
|
334
|
+
|
|
335
|
+
if not annotations:
|
|
336
|
+
logger.debug("No annotations found from registry data")
|
|
337
|
+
return annotations
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
r"""
|
|
341
|
+
https://ghcr.io/token\?scope\="repository:rhizomatics/updates2mqtt:pull"
|
|
342
|
+
https://ghcr.io/v2/rhizomatics/updates2mqtt/manifests/sha256:2c8edc1f9400ef02a93c3b754d4419082ceb5d049178c3a3968e3fd56caf7f29 Accept:application/vnd.oci.image.index.v1+json Accept:application/vnd.oci.image.manifest.v1+json Accept:application/vnd.docker.distribution.manifest.v2+json
|
|
343
|
+
https://ghcr.io/v2/rhizomatics/updates2mqtt/manifests/latest Accept:application/vnd.oci.image.index.v1+json Accept:application/vnd.oci.image.manifest.v1+json Accept:appli
|
|
344
|
+
""" # noqa: E501
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import datetime
|
|
2
|
+
import re
|
|
2
3
|
import subprocess
|
|
3
4
|
from pathlib import Path
|
|
5
|
+
from re import Match
|
|
4
6
|
|
|
5
7
|
import structlog
|
|
6
8
|
|
|
@@ -12,11 +14,11 @@ def git_trust(repo_path: Path, git_path: Path) -> bool:
|
|
|
12
14
|
subprocess.run(f"{git_path} config --global --add safe.directory {repo_path}", check=True, shell=True, cwd=repo_path)
|
|
13
15
|
return True
|
|
14
16
|
except Exception as e:
|
|
15
|
-
log.warn("GIT Unable to trust repo at %s: %s", repo_path, e)
|
|
17
|
+
log.warn("GIT Unable to trust repo at %s: %s", repo_path, e, action="git_trust")
|
|
16
18
|
return False
|
|
17
19
|
|
|
18
20
|
|
|
19
|
-
def
|
|
21
|
+
def git_iso_timestamp(repo_path: Path, git_path: Path) -> str | None:
|
|
20
22
|
result = None
|
|
21
23
|
try:
|
|
22
24
|
result = subprocess.run(
|
|
@@ -27,15 +29,49 @@ def git_timestamp(repo_path: Path, git_path: Path) -> datetime.datetime | None:
|
|
|
27
29
|
capture_output=True,
|
|
28
30
|
check=True,
|
|
29
31
|
)
|
|
30
|
-
|
|
32
|
+
# round-trip the iso format for pythony consistency
|
|
33
|
+
return datetime.datetime.fromisoformat(result.stdout.strip()).isoformat()
|
|
31
34
|
except subprocess.CalledProcessError as cpe:
|
|
32
|
-
log.warn("GIT No result from git log at %s: %s", repo_path, cpe)
|
|
35
|
+
log.warn("GIT No result from git log at %s: %s", repo_path, cpe, action="git_iso_timestamp")
|
|
33
36
|
except Exception as e:
|
|
34
|
-
log.error(
|
|
37
|
+
log.error(
|
|
38
|
+
"GIT Unable to parse timestamp at %s - %s: %s",
|
|
39
|
+
repo_path,
|
|
40
|
+
result.stdout if result else "<NO RESULT>",
|
|
41
|
+
e,
|
|
42
|
+
action="git_iso_timestamp",
|
|
43
|
+
)
|
|
35
44
|
return None
|
|
36
45
|
|
|
37
46
|
|
|
38
|
-
def
|
|
47
|
+
def git_local_version(repo_path: Path, git_path: Path) -> str | None:
|
|
48
|
+
result = None
|
|
49
|
+
try:
|
|
50
|
+
result = subprocess.run(
|
|
51
|
+
f"{git_path} rev-parse HEAD",
|
|
52
|
+
cwd=repo_path,
|
|
53
|
+
shell=True,
|
|
54
|
+
text=True,
|
|
55
|
+
capture_output=True,
|
|
56
|
+
check=True,
|
|
57
|
+
)
|
|
58
|
+
if result.returncode == 0:
|
|
59
|
+
log.debug("Local git rev-parse", action="git_local_version", path=repo_path, version=result.stdout.strip())
|
|
60
|
+
return f"git:{result.stdout.strip()}"[:19]
|
|
61
|
+
except subprocess.CalledProcessError as cpe:
|
|
62
|
+
log.warn("GIT No result from git rev-parse at %s: %s", repo_path, cpe, action="git_local_version")
|
|
63
|
+
except Exception as e:
|
|
64
|
+
log.error(
|
|
65
|
+
"GIT Unable to retrieve version at %s - %s: %s",
|
|
66
|
+
repo_path,
|
|
67
|
+
result.stdout if result else "<NO RESULT>",
|
|
68
|
+
e,
|
|
69
|
+
action="git_local_version",
|
|
70
|
+
)
|
|
71
|
+
return None
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def git_check_update_available(repo_path: Path, git_path: Path, timeout: int = 120) -> int:
|
|
39
75
|
result = None
|
|
40
76
|
try:
|
|
41
77
|
# check if remote repo ahead
|
|
@@ -48,19 +84,40 @@ def git_check_update_available(repo_path: Path, git_path: Path, timeout: int = 1
|
|
|
48
84
|
cwd=repo_path,
|
|
49
85
|
timeout=timeout,
|
|
50
86
|
)
|
|
51
|
-
if result.returncode == 0
|
|
52
|
-
|
|
53
|
-
|
|
87
|
+
if result.returncode == 0:
|
|
88
|
+
count_match: Match[str] | None = re.search(
|
|
89
|
+
r"Your branch is behind.*by (\d+) commit", result.stdout, flags=re.MULTILINE
|
|
90
|
+
)
|
|
91
|
+
if count_match and count_match.groups():
|
|
92
|
+
log.debug(
|
|
93
|
+
"Local git repo update available: %s (%s)",
|
|
94
|
+
count_match.group(1),
|
|
95
|
+
result.stdout.strip(),
|
|
96
|
+
action="git_check",
|
|
97
|
+
path=repo_path,
|
|
98
|
+
)
|
|
99
|
+
return int(count_match.group(1))
|
|
100
|
+
log.debug("Local git repo no update available", action="git_check", path=repo_path, status=result.stdout.strip())
|
|
101
|
+
return 0
|
|
102
|
+
|
|
103
|
+
log.debug(
|
|
104
|
+
"No git update available",
|
|
105
|
+
action="git_check",
|
|
106
|
+
path=repo_path,
|
|
107
|
+
returncode=result.returncode,
|
|
108
|
+
stdout=result.stdout,
|
|
109
|
+
stderr=result.stderr,
|
|
110
|
+
)
|
|
54
111
|
except Exception as e:
|
|
55
|
-
log.warn("GIT Unable to check status %s: %s", result.stdout if result else "<NO RESULT>", e)
|
|
56
|
-
return
|
|
112
|
+
log.warn("GIT Unable to check status %s: %s", result.stdout if result else "<NO RESULT>", e, action="git_check")
|
|
113
|
+
return 0
|
|
57
114
|
|
|
58
115
|
|
|
59
116
|
def git_pull(repo_path: Path, git_path: Path) -> bool:
|
|
60
|
-
log.info("GIT Pulling git at %s", repo_path)
|
|
117
|
+
log.info("GIT Pulling git at %s", repo_path, action="git_pull")
|
|
61
118
|
proc = subprocess.run(f"{git_path} pull", shell=True, check=False, cwd=repo_path, timeout=300)
|
|
62
119
|
if proc.returncode == 0:
|
|
63
|
-
log.info("GIT pull at %s successful", repo_path)
|
|
120
|
+
log.info("GIT pull at %s successful", repo_path, action="git_pull")
|
|
64
121
|
return True
|
|
65
|
-
log.warn("GIT pull at %s failed: %s", repo_path, proc.returncode)
|
|
122
|
+
log.warn("GIT pull at %s failed: %s", repo_path, proc.returncode, action="git_pull", stdout=proc.stdout, stderr=proc.stderr)
|
|
66
123
|
return False
|