arch-ops-server 3.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,397 @@
1
+ # SPDX-License-Identifier: GPL-3.0-only OR MIT
2
+ """
3
+ Mirror management module.
4
+ Manages and optimizes pacman mirrors for better download performance.
5
+ """
6
+
7
+ import logging
8
+ import re
9
+ import time
10
+ from pathlib import Path
11
+ from typing import Dict, Any, List, Optional
12
+
13
+ import httpx
14
+
15
+ from .utils import (
16
+ IS_ARCH,
17
+ create_error_response,
18
+ )
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+ # Mirror list path
23
+ MIRRORLIST_PATH = "/etc/pacman.d/mirrorlist"
24
+
25
+ # Arch Linux mirror status JSON
26
+ MIRROR_STATUS_URL = "https://archlinux.org/mirrors/status/json/"
27
+
28
+
29
+ async def list_active_mirrors() -> Dict[str, Any]:
30
+ """
31
+ List currently configured mirrors from mirrorlist.
32
+
33
+ Returns:
34
+ Dict with active and commented mirrors
35
+ """
36
+ if not IS_ARCH:
37
+ return create_error_response(
38
+ "NotSupported",
39
+ "This feature is only available on Arch Linux"
40
+ )
41
+
42
+ logger.info("Reading mirrorlist configuration")
43
+
44
+ try:
45
+ mirrorlist = Path(MIRRORLIST_PATH)
46
+
47
+ if not mirrorlist.exists():
48
+ return create_error_response(
49
+ "NotFound",
50
+ f"Mirrorlist not found at {MIRRORLIST_PATH}"
51
+ )
52
+
53
+ active_mirrors = []
54
+ commented_mirrors = []
55
+
56
+ with open(mirrorlist, 'r') as f:
57
+ for line in f:
58
+ line = line.strip()
59
+
60
+ # Skip empty lines and comments that aren't mirrors
61
+ if not line or (line.startswith('#') and 'Server' not in line):
62
+ continue
63
+
64
+ # Check if it's a commented mirror
65
+ if line.startswith('#'):
66
+ # Extract mirror URL
67
+ match = re.search(r'Server\s*=\s*(.+)', line)
68
+ if match:
69
+ commented_mirrors.append({
70
+ "url": match.group(1).strip(),
71
+ "active": False
72
+ })
73
+ elif line.startswith('Server'):
74
+ # Active mirror
75
+ match = re.search(r'Server\s*=\s*(.+)', line)
76
+ if match:
77
+ active_mirrors.append({
78
+ "url": match.group(1).strip(),
79
+ "active": True
80
+ })
81
+
82
+ logger.info(f"Found {len(active_mirrors)} active, {len(commented_mirrors)} commented mirrors")
83
+
84
+ return {
85
+ "active_count": len(active_mirrors),
86
+ "commented_count": len(commented_mirrors),
87
+ "active_mirrors": active_mirrors,
88
+ "commented_mirrors": commented_mirrors,
89
+ "mirrorlist_path": str(mirrorlist)
90
+ }
91
+
92
+ except Exception as e:
93
+ logger.error(f"Failed to read mirrorlist: {e}")
94
+ return create_error_response(
95
+ "MirrorlistError",
96
+ f"Failed to read mirrorlist: {str(e)}"
97
+ )
98
+
99
+
100
+ async def test_mirror_speed(mirror_url: Optional[str] = None) -> Dict[str, Any]:
101
+ """
102
+ Test mirror response time.
103
+
104
+ Args:
105
+ mirror_url: Specific mirror URL to test, or None to test all active mirrors
106
+
107
+ Returns:
108
+ Dict with mirror latency results
109
+ """
110
+ if not IS_ARCH:
111
+ return create_error_response(
112
+ "NotSupported",
113
+ "This feature is only available on Arch Linux"
114
+ )
115
+
116
+ logger.info(f"Testing mirror speed: {mirror_url or 'all active'}")
117
+
118
+ try:
119
+ mirrors_to_test = []
120
+
121
+ if mirror_url:
122
+ mirrors_to_test = [mirror_url]
123
+ else:
124
+ # Get active mirrors
125
+ result = await list_active_mirrors()
126
+ if "error" in result:
127
+ return result
128
+
129
+ mirrors_to_test = [m["url"] for m in result.get("active_mirrors", [])]
130
+
131
+ if not mirrors_to_test:
132
+ return create_error_response(
133
+ "NoMirrors",
134
+ "No mirrors to test"
135
+ )
136
+
137
+ results = []
138
+
139
+ async with httpx.AsyncClient(timeout=10.0, follow_redirects=True) as client:
140
+ for mirror in mirrors_to_test:
141
+ # Replace $repo and $arch with actual values for testing
142
+ test_url = mirror.replace("$repo", "core").replace("$arch", "x86_64")
143
+
144
+ # Add a test file path (core.db is small and always present)
145
+ if not test_url.endswith('/'):
146
+ test_url += '/'
147
+ test_url += "core.db"
148
+
149
+ try:
150
+ start_time = time.time()
151
+ response = await client.head(test_url)
152
+ latency = (time.time() - start_time) * 1000 # Convert to ms
153
+
154
+ results.append({
155
+ "mirror": mirror,
156
+ "latency_ms": round(latency, 2),
157
+ "status_code": response.status_code,
158
+ "success": response.status_code == 200
159
+ })
160
+
161
+ except httpx.TimeoutException:
162
+ results.append({
163
+ "mirror": mirror,
164
+ "latency_ms": -1,
165
+ "status_code": 0,
166
+ "success": False,
167
+ "error": "timeout"
168
+ })
169
+
170
+ except Exception as e:
171
+ results.append({
172
+ "mirror": mirror,
173
+ "latency_ms": -1,
174
+ "status_code": 0,
175
+ "success": False,
176
+ "error": str(e)
177
+ })
178
+
179
+ # Sort by latency (successful tests first)
180
+ results.sort(key=lambda x: (not x["success"], x["latency_ms"] if x["latency_ms"] > 0 else float('inf')))
181
+
182
+ logger.info(f"Tested {len(results)} mirrors")
183
+
184
+ return {
185
+ "tested_count": len(results),
186
+ "results": results,
187
+ "fastest": results[0] if results and results[0]["success"] else None
188
+ }
189
+
190
+ except Exception as e:
191
+ logger.error(f"Failed to test mirrors: {e}")
192
+ return create_error_response(
193
+ "MirrorTestError",
194
+ f"Failed to test mirror speed: {str(e)}"
195
+ )
196
+
197
+
198
+ async def suggest_fastest_mirrors(
199
+ country: Optional[str] = None,
200
+ limit: int = 10
201
+ ) -> Dict[str, Any]:
202
+ """
203
+ Suggest optimal mirrors based on official mirror status.
204
+
205
+ Args:
206
+ country: Optional country code to filter mirrors (e.g., 'US', 'DE')
207
+ limit: Number of mirrors to suggest (default 10)
208
+
209
+ Returns:
210
+ Dict with recommended mirrors
211
+ """
212
+ logger.info(f"Fetching mirror suggestions (country={country}, limit={limit})")
213
+
214
+ try:
215
+ async with httpx.AsyncClient(timeout=15.0) as client:
216
+ response = await client.get(MIRROR_STATUS_URL)
217
+ response.raise_for_status()
218
+
219
+ data = response.json()
220
+ mirrors = data.get("urls", [])
221
+
222
+ if not mirrors:
223
+ return create_error_response(
224
+ "NoData",
225
+ "No mirror data available from archlinux.org"
226
+ )
227
+
228
+ # Filter mirrors
229
+ filtered_mirrors = []
230
+
231
+ for mirror in mirrors:
232
+ # Skip if country specified and doesn't match
233
+ if country and mirror.get("country_code") != country.upper():
234
+ continue
235
+
236
+ # Skip if not active or has issues
237
+ if not mirror.get("active", False):
238
+ continue
239
+
240
+ # Skip if last sync is too old (more than 24 hours)
241
+ last_sync = mirror.get("last_sync")
242
+ if last_sync is None:
243
+ continue
244
+
245
+ # Calculate score (lower is better)
246
+ # Factors: completion percentage, delay, duration
247
+ completion = mirror.get("completion_pct", 0)
248
+ delay = mirror.get("delay", 0) or 0 # Handle None
249
+ duration_avg = mirror.get("duration_avg", 0) or 0
250
+
251
+ # Skip incomplete mirrors
252
+ if completion < 100:
253
+ continue
254
+
255
+ # Score: delay (hours) + duration (seconds converted to hours equivalent)
256
+ score = delay + (duration_avg / 3600)
257
+
258
+ filtered_mirrors.append({
259
+ "url": mirror.get("url"),
260
+ "country": mirror.get("country"),
261
+ "country_code": mirror.get("country_code"),
262
+ "protocol": mirror.get("protocol"),
263
+ "completion_pct": completion,
264
+ "delay_hours": delay,
265
+ "duration_avg": duration_avg,
266
+ "duration_stddev": mirror.get("duration_stddev"),
267
+ "score": round(score, 2),
268
+ "last_sync": last_sync
269
+ })
270
+
271
+ # Sort by score (lower is better)
272
+ filtered_mirrors.sort(key=lambda x: x["score"])
273
+
274
+ # Limit results
275
+ suggested_mirrors = filtered_mirrors[:limit]
276
+
277
+ logger.info(f"Suggesting {len(suggested_mirrors)} mirrors")
278
+
279
+ return {
280
+ "suggested_count": len(suggested_mirrors),
281
+ "total_available": len(filtered_mirrors),
282
+ "country_filter": country,
283
+ "mirrors": suggested_mirrors
284
+ }
285
+
286
+ except httpx.HTTPStatusError as e:
287
+ logger.error(f"HTTP error fetching mirror status: {e}")
288
+ return create_error_response(
289
+ "HTTPError",
290
+ f"Failed to fetch mirror status: HTTP {e.response.status_code}"
291
+ )
292
+ except httpx.TimeoutException:
293
+ logger.error("Timeout fetching mirror status")
294
+ return create_error_response(
295
+ "Timeout",
296
+ "Request to mirror status API timed out"
297
+ )
298
+ except Exception as e:
299
+ logger.error(f"Failed to suggest mirrors: {e}")
300
+ return create_error_response(
301
+ "MirrorSuggestionError",
302
+ f"Failed to suggest mirrors: {str(e)}"
303
+ )
304
+
305
+
306
+ async def check_mirrorlist_health() -> Dict[str, Any]:
307
+ """
308
+ Verify mirror configuration health.
309
+ Checks for common issues like no active mirrors, outdated mirrorlist.
310
+
311
+ Returns:
312
+ Dict with health assessment and recommendations
313
+ """
314
+ if not IS_ARCH:
315
+ return create_error_response(
316
+ "NotSupported",
317
+ "This feature is only available on Arch Linux"
318
+ )
319
+
320
+ logger.info("Checking mirrorlist health")
321
+
322
+ try:
323
+ issues = []
324
+ warnings = []
325
+ recommendations = []
326
+
327
+ # Get active mirrors
328
+ result = await list_active_mirrors()
329
+ if "error" in result:
330
+ return result
331
+
332
+ active_mirrors = result.get("active_mirrors", [])
333
+
334
+ # Check: No active mirrors
335
+ if len(active_mirrors) == 0:
336
+ issues.append("No active mirrors configured")
337
+ recommendations.append("Uncomment mirrors in /etc/pacman.d/mirrorlist or use reflector to generate a new mirrorlist")
338
+
339
+ # Check: Only one active mirror (no redundancy)
340
+ elif len(active_mirrors) == 1:
341
+ warnings.append("Only one active mirror (no redundancy)")
342
+ recommendations.append("Enable additional mirrors for redundancy")
343
+
344
+ # Check: Too many active mirrors (can slow down updates)
345
+ elif len(active_mirrors) > 10:
346
+ warnings.append(f"Many active mirrors ({len(active_mirrors)}) may slow down updates")
347
+ recommendations.append("Consider reducing to 3-5 fastest mirrors")
348
+
349
+ # Test mirrors
350
+ test_result = await test_mirror_speed()
351
+ if "error" not in test_result:
352
+ test_results = test_result.get("results", [])
353
+
354
+ # Check: All mirrors failing
355
+ successful_mirrors = [r for r in test_results if r.get("success", False)]
356
+
357
+ if len(successful_mirrors) == 0:
358
+ issues.append("All mirrors are unreachable or failing")
359
+ recommendations.append("Check network connectivity and consider updating mirrorlist")
360
+
361
+ # Check: High latency
362
+ elif successful_mirrors:
363
+ avg_latency = sum(m["latency_ms"] for m in successful_mirrors) / len(successful_mirrors)
364
+ if avg_latency > 1000:
365
+ warnings.append(f"High average mirror latency ({avg_latency:.0f}ms)")
366
+ recommendations.append("Consider using geographically closer mirrors")
367
+
368
+ # Health score
369
+ health_score = 100
370
+ health_score -= len(issues) * 40
371
+ health_score -= len(warnings) * 15
372
+ health_score = max(0, health_score)
373
+
374
+ health_status = "healthy"
375
+ if health_score < 50:
376
+ health_status = "critical"
377
+ elif health_score < 70:
378
+ health_status = "warning"
379
+
380
+ logger.info(f"Mirror health: {health_status} (score: {health_score})")
381
+
382
+ return {
383
+ "health_status": health_status,
384
+ "health_score": health_score,
385
+ "issues": issues,
386
+ "warnings": warnings,
387
+ "recommendations": recommendations,
388
+ "active_mirrors_count": len(active_mirrors)
389
+ }
390
+
391
+ except Exception as e:
392
+ logger.error(f"Failed to check mirror health: {e}")
393
+ return create_error_response(
394
+ "HealthCheckError",
395
+ f"Failed to check mirrorlist health: {str(e)}"
396
+ )
397
+
@@ -0,0 +1,288 @@
1
+ # SPDX-License-Identifier: GPL-3.0-only OR MIT
2
+ """
3
+ Arch Linux news feed integration module.
4
+ Fetches and parses Arch Linux news announcements for critical updates.
5
+ """
6
+
7
+ import logging
8
+ import re
9
+ from datetime import datetime
10
+ from pathlib import Path
11
+ from typing import Dict, Any, List, Optional
12
+ from xml.etree import ElementTree as ET
13
+
14
+ import httpx
15
+
16
+ from .utils import (
17
+ IS_ARCH,
18
+ run_command,
19
+ create_error_response,
20
+ )
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+ # Arch Linux news RSS feed URL
25
+ ARCH_NEWS_URL = "https://archlinux.org/feeds/news/"
26
+
27
+ # Keywords indicating critical/manual intervention required
28
+ CRITICAL_KEYWORDS = [
29
+ "manual intervention",
30
+ "action required",
31
+ "before upgrading",
32
+ "breaking change",
33
+ "manual action",
34
+ "requires manual",
35
+ "important notice"
36
+ ]
37
+
38
+
39
+ async def get_latest_news(
40
+ limit: int = 10,
41
+ since_date: Optional[str] = None
42
+ ) -> Dict[str, Any]:
43
+ """
44
+ Fetch recent Arch Linux news from RSS feed.
45
+
46
+ Args:
47
+ limit: Maximum number of news items to return (default 10)
48
+ since_date: Optional date in ISO format (YYYY-MM-DD) to filter news
49
+
50
+ Returns:
51
+ Dict with news items (title, date, summary, link)
52
+ """
53
+ logger.info(f"Fetching latest Arch Linux news (limit={limit})")
54
+
55
+ try:
56
+ async with httpx.AsyncClient(timeout=10.0) as client:
57
+ response = await client.get(ARCH_NEWS_URL)
58
+ response.raise_for_status()
59
+
60
+ # Parse RSS feed
61
+ root = ET.fromstring(response.content)
62
+
63
+ # Find all items (RSS 2.0 format)
64
+ news_items = []
65
+
66
+ for item in root.findall('.//item')[:limit]:
67
+ title_elem = item.find('title')
68
+ link_elem = item.find('link')
69
+ pub_date_elem = item.find('pubDate')
70
+ description_elem = item.find('description')
71
+
72
+ if title_elem is None or link_elem is None:
73
+ continue
74
+
75
+ title = title_elem.text
76
+ link = link_elem.text
77
+ pub_date = pub_date_elem.text if pub_date_elem is not None else ""
78
+
79
+ # Parse description and strip HTML tags
80
+ description = ""
81
+ if description_elem is not None and description_elem.text:
82
+ description = re.sub(r'<[^>]+>', '', description_elem.text)
83
+ # Truncate to first 300 chars for summary
84
+ description = description[:300] + "..." if len(description) > 300 else description
85
+
86
+ # Parse date
87
+ published_date = ""
88
+ if pub_date:
89
+ try:
90
+ # Parse RFC 822 date format
91
+ dt = datetime.strptime(pub_date, "%a, %d %b %Y %H:%M:%S %z")
92
+ published_date = dt.isoformat()
93
+ except ValueError:
94
+ published_date = pub_date
95
+
96
+ # Filter by date if requested
97
+ if since_date and published_date:
98
+ try:
99
+ item_date = datetime.fromisoformat(published_date.replace('Z', '+00:00'))
100
+ filter_date = datetime.fromisoformat(since_date + "T00:00:00+00:00")
101
+ if item_date < filter_date:
102
+ continue
103
+ except ValueError as e:
104
+ logger.warning(f"Failed to parse date for filtering: {e}")
105
+
106
+ news_items.append({
107
+ "title": title,
108
+ "link": link,
109
+ "published": published_date,
110
+ "summary": description.strip()
111
+ })
112
+
113
+ logger.info(f"Successfully fetched {len(news_items)} news items")
114
+
115
+ return {
116
+ "count": len(news_items),
117
+ "news": news_items
118
+ }
119
+
120
+ except httpx.HTTPStatusError as e:
121
+ logger.error(f"HTTP error fetching news: {e}")
122
+ return create_error_response(
123
+ "HTTPError",
124
+ f"Failed to fetch Arch news: HTTP {e.response.status_code}"
125
+ )
126
+ except httpx.TimeoutException:
127
+ logger.error("Timeout fetching Arch news")
128
+ return create_error_response(
129
+ "Timeout",
130
+ "Request to Arch news feed timed out"
131
+ )
132
+ except ET.ParseError as e:
133
+ logger.error(f"Failed to parse RSS feed: {e}")
134
+ return create_error_response(
135
+ "ParseError",
136
+ f"Failed to parse Arch news RSS feed: {str(e)}"
137
+ )
138
+ except Exception as e:
139
+ logger.error(f"Unexpected error fetching news: {e}")
140
+ return create_error_response(
141
+ "NewsError",
142
+ f"Failed to fetch Arch news: {str(e)}"
143
+ )
144
+
145
+
146
+ async def check_critical_news(limit: int = 20) -> Dict[str, Any]:
147
+ """
148
+ Check for critical Arch Linux news requiring manual intervention.
149
+
150
+ Args:
151
+ limit: Number of recent news items to check (default 20)
152
+
153
+ Returns:
154
+ Dict with critical news items
155
+ """
156
+ logger.info("Checking for critical Arch Linux news")
157
+
158
+ result = await get_latest_news(limit=limit)
159
+
160
+ if "error" in result:
161
+ return result
162
+
163
+ news_items = result.get("news", [])
164
+ critical_items = []
165
+
166
+ # Scan for critical keywords
167
+ for item in news_items:
168
+ title_lower = item["title"].lower()
169
+ summary_lower = item["summary"].lower()
170
+
171
+ # Check if any critical keyword is in title or summary
172
+ is_critical = any(
173
+ keyword in title_lower or keyword in summary_lower
174
+ for keyword in CRITICAL_KEYWORDS
175
+ )
176
+
177
+ if is_critical:
178
+ # Identify which keywords matched
179
+ matched_keywords = [
180
+ keyword for keyword in CRITICAL_KEYWORDS
181
+ if keyword in title_lower or keyword in summary_lower
182
+ ]
183
+
184
+ critical_items.append({
185
+ **item,
186
+ "matched_keywords": matched_keywords,
187
+ "severity": "critical"
188
+ })
189
+
190
+ logger.info(f"Found {len(critical_items)} critical news items")
191
+
192
+ return {
193
+ "critical_count": len(critical_items),
194
+ "has_critical": len(critical_items) > 0,
195
+ "critical_news": critical_items,
196
+ "checked_items": len(news_items)
197
+ }
198
+
199
+
200
+ async def get_news_since_last_update() -> Dict[str, Any]:
201
+ """
202
+ Get news posted since last pacman update.
203
+ Parses /var/log/pacman.log for last update timestamp.
204
+
205
+ Returns:
206
+ Dict with news items posted after last update
207
+ """
208
+ if not IS_ARCH:
209
+ return create_error_response(
210
+ "NotSupported",
211
+ "This feature is only available on Arch Linux"
212
+ )
213
+
214
+ logger.info("Getting news since last pacman update")
215
+
216
+ try:
217
+ # Parse pacman log for last update timestamp
218
+ pacman_log = Path("/var/log/pacman.log")
219
+
220
+ if not pacman_log.exists():
221
+ return create_error_response(
222
+ "NotFound",
223
+ "Pacman log file not found at /var/log/pacman.log"
224
+ )
225
+
226
+ # Find last system update timestamp
227
+ last_update = None
228
+
229
+ with open(pacman_log, 'r') as f:
230
+ for line in f:
231
+ # Look for upgrade entries
232
+ if " upgraded " in line or " installed " in line or "starting full system upgrade" in line:
233
+ # Extract timestamp [YYYY-MM-DD HH:MM]
234
+ match = re.match(r'\[(\d{4}-\d{2}-\d{2})\s+(\d{2}:\d{2})\]', line)
235
+ if match:
236
+ date_str = f"{match.group(1)}T{match.group(2)}:00+00:00"
237
+ try:
238
+ last_update = datetime.fromisoformat(date_str)
239
+ except ValueError:
240
+ continue
241
+
242
+ if last_update is None:
243
+ logger.warning("Could not determine last update timestamp")
244
+ return create_error_response(
245
+ "NotFound",
246
+ "Could not determine last system update timestamp from pacman log"
247
+ )
248
+
249
+ logger.info(f"Last update: {last_update.isoformat()}")
250
+
251
+ # Fetch recent news
252
+ result = await get_latest_news(limit=30)
253
+
254
+ if "error" in result:
255
+ return result
256
+
257
+ news_items = result.get("news", [])
258
+ news_since_update = []
259
+
260
+ for item in news_items:
261
+ published_str = item.get("published", "")
262
+ if not published_str:
263
+ continue
264
+
265
+ try:
266
+ published = datetime.fromisoformat(published_str.replace('Z', '+00:00'))
267
+ if published > last_update:
268
+ news_since_update.append(item)
269
+ except ValueError as e:
270
+ logger.warning(f"Failed to parse date: {e}")
271
+ continue
272
+
273
+ logger.info(f"Found {len(news_since_update)} news items since last update")
274
+
275
+ return {
276
+ "last_update": last_update.isoformat(),
277
+ "news_count": len(news_since_update),
278
+ "has_news": len(news_since_update) > 0,
279
+ "news": news_since_update
280
+ }
281
+
282
+ except Exception as e:
283
+ logger.error(f"Failed to get news since update: {e}")
284
+ return create_error_response(
285
+ "NewsError",
286
+ f"Failed to get news since last update: {str(e)}"
287
+ )
288
+