arch-ops-server 0.1.3__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arch_ops_server/__init__.py +87 -1
- arch_ops_server/config.py +361 -0
- arch_ops_server/logs.py +345 -0
- arch_ops_server/mirrors.py +397 -0
- arch_ops_server/news.py +288 -0
- arch_ops_server/pacman.py +985 -0
- arch_ops_server/server.py +1257 -61
- arch_ops_server/system.py +307 -0
- arch_ops_server-3.0.0.dist-info/METADATA +250 -0
- arch_ops_server-3.0.0.dist-info/RECORD +16 -0
- arch_ops_server-0.1.3.dist-info/METADATA +0 -133
- arch_ops_server-0.1.3.dist-info/RECORD +0 -11
- {arch_ops_server-0.1.3.dist-info → arch_ops_server-3.0.0.dist-info}/WHEEL +0 -0
- {arch_ops_server-0.1.3.dist-info → arch_ops_server-3.0.0.dist-info}/entry_points.txt +0 -0
arch_ops_server/__init__.py
CHANGED
|
@@ -18,7 +18,54 @@ from .aur import (
|
|
|
18
18
|
analyze_package_metadata_risk,
|
|
19
19
|
install_package_secure
|
|
20
20
|
)
|
|
21
|
-
from .pacman import
|
|
21
|
+
from .pacman import (
|
|
22
|
+
get_official_package_info,
|
|
23
|
+
check_updates_dry_run,
|
|
24
|
+
remove_package,
|
|
25
|
+
remove_packages_batch,
|
|
26
|
+
list_orphan_packages,
|
|
27
|
+
remove_orphans,
|
|
28
|
+
find_package_owner,
|
|
29
|
+
list_package_files,
|
|
30
|
+
search_package_files,
|
|
31
|
+
verify_package_integrity,
|
|
32
|
+
list_package_groups,
|
|
33
|
+
list_group_packages,
|
|
34
|
+
list_explicit_packages,
|
|
35
|
+
mark_as_explicit,
|
|
36
|
+
mark_as_dependency,
|
|
37
|
+
check_database_freshness
|
|
38
|
+
)
|
|
39
|
+
from .system import (
|
|
40
|
+
get_system_info,
|
|
41
|
+
check_disk_space,
|
|
42
|
+
get_pacman_cache_stats,
|
|
43
|
+
check_failed_services,
|
|
44
|
+
get_boot_logs
|
|
45
|
+
)
|
|
46
|
+
from .news import (
|
|
47
|
+
get_latest_news,
|
|
48
|
+
check_critical_news,
|
|
49
|
+
get_news_since_last_update
|
|
50
|
+
)
|
|
51
|
+
from .logs import (
|
|
52
|
+
get_transaction_history,
|
|
53
|
+
find_when_installed,
|
|
54
|
+
find_failed_transactions,
|
|
55
|
+
get_database_sync_history
|
|
56
|
+
)
|
|
57
|
+
from .mirrors import (
|
|
58
|
+
list_active_mirrors,
|
|
59
|
+
test_mirror_speed,
|
|
60
|
+
suggest_fastest_mirrors,
|
|
61
|
+
check_mirrorlist_health
|
|
62
|
+
)
|
|
63
|
+
from .config import (
|
|
64
|
+
analyze_pacman_conf,
|
|
65
|
+
analyze_makepkg_conf,
|
|
66
|
+
check_ignored_packages,
|
|
67
|
+
get_parallel_downloads_setting
|
|
68
|
+
)
|
|
22
69
|
from .utils import IS_ARCH, run_command
|
|
23
70
|
|
|
24
71
|
# Import server from the server module
|
|
@@ -71,6 +118,45 @@ __all__ = [
|
|
|
71
118
|
# Pacman
|
|
72
119
|
"get_official_package_info",
|
|
73
120
|
"check_updates_dry_run",
|
|
121
|
+
"remove_package",
|
|
122
|
+
"remove_packages_batch",
|
|
123
|
+
"list_orphan_packages",
|
|
124
|
+
"remove_orphans",
|
|
125
|
+
"find_package_owner",
|
|
126
|
+
"list_package_files",
|
|
127
|
+
"search_package_files",
|
|
128
|
+
"verify_package_integrity",
|
|
129
|
+
"list_package_groups",
|
|
130
|
+
"list_group_packages",
|
|
131
|
+
"list_explicit_packages",
|
|
132
|
+
"mark_as_explicit",
|
|
133
|
+
"mark_as_dependency",
|
|
134
|
+
"check_database_freshness",
|
|
135
|
+
# System
|
|
136
|
+
"get_system_info",
|
|
137
|
+
"check_disk_space",
|
|
138
|
+
"get_pacman_cache_stats",
|
|
139
|
+
"check_failed_services",
|
|
140
|
+
"get_boot_logs",
|
|
141
|
+
# News
|
|
142
|
+
"get_latest_news",
|
|
143
|
+
"check_critical_news",
|
|
144
|
+
"get_news_since_last_update",
|
|
145
|
+
# Logs
|
|
146
|
+
"get_transaction_history",
|
|
147
|
+
"find_when_installed",
|
|
148
|
+
"find_failed_transactions",
|
|
149
|
+
"get_database_sync_history",
|
|
150
|
+
# Mirrors
|
|
151
|
+
"list_active_mirrors",
|
|
152
|
+
"test_mirror_speed",
|
|
153
|
+
"suggest_fastest_mirrors",
|
|
154
|
+
"check_mirrorlist_health",
|
|
155
|
+
# Config
|
|
156
|
+
"analyze_pacman_conf",
|
|
157
|
+
"analyze_makepkg_conf",
|
|
158
|
+
"check_ignored_packages",
|
|
159
|
+
"get_parallel_downloads_setting",
|
|
74
160
|
# Utils
|
|
75
161
|
"IS_ARCH",
|
|
76
162
|
"run_command",
|
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
# SPDX-License-Identifier: GPL-3.0-only OR MIT
|
|
2
|
+
"""
|
|
3
|
+
Configuration file parsing module.
|
|
4
|
+
Parses and analyzes pacman and makepkg configuration files.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import re
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Dict, Any, List, Optional
|
|
11
|
+
|
|
12
|
+
from .utils import (
|
|
13
|
+
IS_ARCH,
|
|
14
|
+
create_error_response,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
# Configuration file paths
|
|
20
|
+
PACMAN_CONF = "/etc/pacman.conf"
|
|
21
|
+
MAKEPKG_CONF = "/etc/makepkg.conf"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def parse_config_file(file_path: str) -> Dict[str, Any]:
|
|
25
|
+
"""
|
|
26
|
+
Parse a configuration file with INI-like format.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
file_path: Path to configuration file
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Dict with parsed configuration data
|
|
33
|
+
"""
|
|
34
|
+
config = {
|
|
35
|
+
"options": {},
|
|
36
|
+
"repositories": [],
|
|
37
|
+
"comments": []
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
current_section = None
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
with open(file_path, 'r') as f:
|
|
44
|
+
for line_num, line in enumerate(f, 1):
|
|
45
|
+
original_line = line
|
|
46
|
+
line = line.strip()
|
|
47
|
+
|
|
48
|
+
# Skip empty lines
|
|
49
|
+
if not line:
|
|
50
|
+
continue
|
|
51
|
+
|
|
52
|
+
# Store comments
|
|
53
|
+
if line.startswith('#'):
|
|
54
|
+
config["comments"].append({
|
|
55
|
+
"line": line_num,
|
|
56
|
+
"text": line
|
|
57
|
+
})
|
|
58
|
+
continue
|
|
59
|
+
|
|
60
|
+
# Section headers [SectionName]
|
|
61
|
+
section_match = re.match(r'\[(\w+)\]', line)
|
|
62
|
+
if section_match:
|
|
63
|
+
current_section = section_match.group(1)
|
|
64
|
+
|
|
65
|
+
# If it's a repository section
|
|
66
|
+
if current_section not in ["options", "Options"]:
|
|
67
|
+
config["repositories"].append({
|
|
68
|
+
"name": current_section,
|
|
69
|
+
"line": line_num
|
|
70
|
+
})
|
|
71
|
+
continue
|
|
72
|
+
|
|
73
|
+
# Key = Value pairs
|
|
74
|
+
if '=' in line:
|
|
75
|
+
key, value = line.split('=', 1)
|
|
76
|
+
key = key.strip()
|
|
77
|
+
value = value.strip()
|
|
78
|
+
|
|
79
|
+
if current_section and current_section.lower() == "options":
|
|
80
|
+
config["options"][key] = value
|
|
81
|
+
elif current_section:
|
|
82
|
+
# Add to repository config
|
|
83
|
+
for repo in config["repositories"]:
|
|
84
|
+
if repo["name"] == current_section:
|
|
85
|
+
if "config" not in repo:
|
|
86
|
+
repo["config"] = {}
|
|
87
|
+
repo["config"][key] = value
|
|
88
|
+
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.error(f"Failed to parse config file {file_path}: {e}")
|
|
91
|
+
|
|
92
|
+
return config
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
async def analyze_pacman_conf() -> Dict[str, Any]:
|
|
96
|
+
"""
|
|
97
|
+
Parse and analyze pacman.conf.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Dict with parsed pacman configuration
|
|
101
|
+
"""
|
|
102
|
+
if not IS_ARCH:
|
|
103
|
+
return create_error_response(
|
|
104
|
+
"NotSupported",
|
|
105
|
+
"This feature is only available on Arch Linux"
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
logger.info("Analyzing pacman.conf")
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
pacman_conf = Path(PACMAN_CONF)
|
|
112
|
+
|
|
113
|
+
if not pacman_conf.exists():
|
|
114
|
+
return create_error_response(
|
|
115
|
+
"NotFound",
|
|
116
|
+
f"pacman.conf not found at {PACMAN_CONF}"
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
config = parse_config_file(PACMAN_CONF)
|
|
120
|
+
|
|
121
|
+
# Extract specific important options
|
|
122
|
+
options = config.get("options", {})
|
|
123
|
+
|
|
124
|
+
# Parse multi-value options
|
|
125
|
+
ignored_packages = []
|
|
126
|
+
ignored_groups = []
|
|
127
|
+
|
|
128
|
+
for key, value in options.items():
|
|
129
|
+
if key == "IgnorePkg":
|
|
130
|
+
ignored_packages = [p.strip() for p in value.split()]
|
|
131
|
+
elif key == "IgnoreGroup":
|
|
132
|
+
ignored_groups = [g.strip() for g in value.split()]
|
|
133
|
+
|
|
134
|
+
# Parse ParallelDownloads
|
|
135
|
+
parallel_downloads = options.get("ParallelDownloads", "1")
|
|
136
|
+
try:
|
|
137
|
+
parallel_downloads = int(parallel_downloads)
|
|
138
|
+
except ValueError:
|
|
139
|
+
parallel_downloads = 1
|
|
140
|
+
|
|
141
|
+
# Extract repository list
|
|
142
|
+
repositories = [repo["name"] for repo in config.get("repositories", [])]
|
|
143
|
+
|
|
144
|
+
# Check for security settings
|
|
145
|
+
sig_level = options.get("SigLevel", "")
|
|
146
|
+
local_file_sig_level = options.get("LocalFileSigLevel", "")
|
|
147
|
+
|
|
148
|
+
logger.info(f"Parsed pacman.conf: {len(repositories)} repos, {parallel_downloads} parallel downloads")
|
|
149
|
+
|
|
150
|
+
return {
|
|
151
|
+
"config_path": str(pacman_conf),
|
|
152
|
+
"repositories": repositories,
|
|
153
|
+
"repository_count": len(repositories),
|
|
154
|
+
"parallel_downloads": parallel_downloads,
|
|
155
|
+
"ignored_packages": ignored_packages,
|
|
156
|
+
"ignored_groups": ignored_groups,
|
|
157
|
+
"sig_level": sig_level,
|
|
158
|
+
"local_file_sig_level": local_file_sig_level,
|
|
159
|
+
"all_options": options,
|
|
160
|
+
"raw_config": config
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
except Exception as e:
|
|
164
|
+
logger.error(f"Failed to analyze pacman.conf: {e}")
|
|
165
|
+
return create_error_response(
|
|
166
|
+
"ConfigParseError",
|
|
167
|
+
f"Failed to analyze pacman.conf: {str(e)}"
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
async def analyze_makepkg_conf() -> Dict[str, Any]:
|
|
172
|
+
"""
|
|
173
|
+
Parse and analyze makepkg.conf.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Dict with parsed makepkg configuration
|
|
177
|
+
"""
|
|
178
|
+
if not IS_ARCH:
|
|
179
|
+
return create_error_response(
|
|
180
|
+
"NotSupported",
|
|
181
|
+
"This feature is only available on Arch Linux"
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
logger.info("Analyzing makepkg.conf")
|
|
185
|
+
|
|
186
|
+
try:
|
|
187
|
+
makepkg_conf = Path(MAKEPKG_CONF)
|
|
188
|
+
|
|
189
|
+
if not makepkg_conf.exists():
|
|
190
|
+
return create_error_response(
|
|
191
|
+
"NotFound",
|
|
192
|
+
f"makepkg.conf not found at {MAKEPKG_CONF}"
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
config = {}
|
|
196
|
+
|
|
197
|
+
# Parse as shell script variables
|
|
198
|
+
with open(makepkg_conf, 'r') as f:
|
|
199
|
+
for line in f:
|
|
200
|
+
line = line.strip()
|
|
201
|
+
|
|
202
|
+
# Skip comments and empty lines
|
|
203
|
+
if not line or line.startswith('#'):
|
|
204
|
+
continue
|
|
205
|
+
|
|
206
|
+
# Match VAR=value or VAR="value"
|
|
207
|
+
match = re.match(r'^([A-Z_]+)=(.+)$', line)
|
|
208
|
+
if match:
|
|
209
|
+
key = match.group(1)
|
|
210
|
+
value = match.group(2)
|
|
211
|
+
|
|
212
|
+
# Remove quotes
|
|
213
|
+
value = value.strip('"').strip("'")
|
|
214
|
+
|
|
215
|
+
config[key] = value
|
|
216
|
+
|
|
217
|
+
# Extract important settings
|
|
218
|
+
cflags = config.get("CFLAGS", "")
|
|
219
|
+
cxxflags = config.get("CXXFLAGS", "")
|
|
220
|
+
makeflags = config.get("MAKEFLAGS", "")
|
|
221
|
+
buildenv = config.get("BUILDENV", "")
|
|
222
|
+
options = config.get("OPTIONS", "")
|
|
223
|
+
|
|
224
|
+
# Parse MAKEFLAGS for job count
|
|
225
|
+
jobs = 1
|
|
226
|
+
jobs_match = re.search(r'-j\s*(\d+)', makeflags)
|
|
227
|
+
if jobs_match:
|
|
228
|
+
jobs = int(jobs_match.group(1))
|
|
229
|
+
|
|
230
|
+
# Parse BUILDENV
|
|
231
|
+
buildenv_list = [opt.strip() for opt in buildenv.split()] if buildenv else []
|
|
232
|
+
|
|
233
|
+
# Parse OPTIONS
|
|
234
|
+
options_list = [opt.strip() for opt in options.split()] if options else []
|
|
235
|
+
|
|
236
|
+
# Detect architecture
|
|
237
|
+
carch = config.get("CARCH", "unknown")
|
|
238
|
+
|
|
239
|
+
# Compression settings
|
|
240
|
+
pkgext = config.get("PKGEXT", ".pkg.tar.zst")
|
|
241
|
+
|
|
242
|
+
logger.info(f"Parsed makepkg.conf: {jobs} jobs, {carch} arch")
|
|
243
|
+
|
|
244
|
+
return {
|
|
245
|
+
"config_path": str(makepkg_conf),
|
|
246
|
+
"cflags": cflags,
|
|
247
|
+
"cxxflags": cxxflags,
|
|
248
|
+
"makeflags": makeflags,
|
|
249
|
+
"jobs": jobs,
|
|
250
|
+
"buildenv": buildenv_list,
|
|
251
|
+
"options": options_list,
|
|
252
|
+
"carch": carch,
|
|
253
|
+
"pkgext": pkgext,
|
|
254
|
+
"all_config": config
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
except Exception as e:
|
|
258
|
+
logger.error(f"Failed to analyze makepkg.conf: {e}")
|
|
259
|
+
return create_error_response(
|
|
260
|
+
"ConfigParseError",
|
|
261
|
+
f"Failed to analyze makepkg.conf: {str(e)}"
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
async def check_ignored_packages() -> Dict[str, Any]:
|
|
266
|
+
"""
|
|
267
|
+
List packages ignored in updates.
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
Dict with ignored packages and groups
|
|
271
|
+
"""
|
|
272
|
+
if not IS_ARCH:
|
|
273
|
+
return create_error_response(
|
|
274
|
+
"NotSupported",
|
|
275
|
+
"This feature is only available on Arch Linux"
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
logger.info("Checking ignored packages")
|
|
279
|
+
|
|
280
|
+
try:
|
|
281
|
+
result = await analyze_pacman_conf()
|
|
282
|
+
|
|
283
|
+
if "error" in result:
|
|
284
|
+
return result
|
|
285
|
+
|
|
286
|
+
ignored_packages = result.get("ignored_packages", [])
|
|
287
|
+
ignored_groups = result.get("ignored_groups", [])
|
|
288
|
+
|
|
289
|
+
# Warnings for critical packages
|
|
290
|
+
critical_packages = ["linux", "systemd", "pacman", "glibc"]
|
|
291
|
+
critical_ignored = [pkg for pkg in ignored_packages if pkg in critical_packages]
|
|
292
|
+
|
|
293
|
+
warnings = []
|
|
294
|
+
if critical_ignored:
|
|
295
|
+
warnings.append(f"Critical system packages are ignored: {', '.join(critical_ignored)}")
|
|
296
|
+
|
|
297
|
+
logger.info(f"Found {len(ignored_packages)} ignored packages, {len(ignored_groups)} ignored groups")
|
|
298
|
+
|
|
299
|
+
return {
|
|
300
|
+
"ignored_packages": ignored_packages,
|
|
301
|
+
"ignored_packages_count": len(ignored_packages),
|
|
302
|
+
"ignored_groups": ignored_groups,
|
|
303
|
+
"ignored_groups_count": len(ignored_groups),
|
|
304
|
+
"critical_ignored": critical_ignored,
|
|
305
|
+
"warnings": warnings,
|
|
306
|
+
"has_ignored": len(ignored_packages) > 0 or len(ignored_groups) > 0
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
logger.error(f"Failed to check ignored packages: {e}")
|
|
311
|
+
return create_error_response(
|
|
312
|
+
"ConfigCheckError",
|
|
313
|
+
f"Failed to check ignored packages: {str(e)}"
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
async def get_parallel_downloads_setting() -> Dict[str, Any]:
|
|
318
|
+
"""
|
|
319
|
+
Get parallel downloads configuration.
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
Dict with parallel downloads setting and recommendations
|
|
323
|
+
"""
|
|
324
|
+
if not IS_ARCH:
|
|
325
|
+
return create_error_response(
|
|
326
|
+
"NotSupported",
|
|
327
|
+
"This feature is only available on Arch Linux"
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
logger.info("Checking parallel downloads setting")
|
|
331
|
+
|
|
332
|
+
try:
|
|
333
|
+
result = await analyze_pacman_conf()
|
|
334
|
+
|
|
335
|
+
if "error" in result:
|
|
336
|
+
return result
|
|
337
|
+
|
|
338
|
+
parallel_downloads = result.get("parallel_downloads", 1)
|
|
339
|
+
|
|
340
|
+
# Recommendations
|
|
341
|
+
recommendations = []
|
|
342
|
+
if parallel_downloads == 1:
|
|
343
|
+
recommendations.append("Consider increasing ParallelDownloads to 3-5 for faster updates")
|
|
344
|
+
elif parallel_downloads > 10:
|
|
345
|
+
recommendations.append("Very high ParallelDownloads may strain mirrors; consider reducing to 5-7")
|
|
346
|
+
|
|
347
|
+
logger.info(f"Parallel downloads: {parallel_downloads}")
|
|
348
|
+
|
|
349
|
+
return {
|
|
350
|
+
"parallel_downloads": parallel_downloads,
|
|
351
|
+
"is_default": parallel_downloads == 1,
|
|
352
|
+
"recommendations": recommendations
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
except Exception as e:
|
|
356
|
+
logger.error(f"Failed to check parallel downloads: {e}")
|
|
357
|
+
return create_error_response(
|
|
358
|
+
"ConfigCheckError",
|
|
359
|
+
f"Failed to check parallel downloads setting: {str(e)}"
|
|
360
|
+
)
|
|
361
|
+
|