arch-ops-server 3.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- arch_ops_server/__init__.py +176 -0
- arch_ops_server/aur.py +1190 -0
- arch_ops_server/config.py +361 -0
- arch_ops_server/http_server.py +829 -0
- arch_ops_server/logs.py +345 -0
- arch_ops_server/mirrors.py +397 -0
- arch_ops_server/news.py +288 -0
- arch_ops_server/pacman.py +1305 -0
- arch_ops_server/py.typed +0 -0
- arch_ops_server/server.py +1869 -0
- arch_ops_server/system.py +307 -0
- arch_ops_server/utils.py +313 -0
- arch_ops_server/wiki.py +245 -0
- arch_ops_server-3.0.1.dist-info/METADATA +253 -0
- arch_ops_server-3.0.1.dist-info/RECORD +17 -0
- arch_ops_server-3.0.1.dist-info/WHEEL +4 -0
- arch_ops_server-3.0.1.dist-info/entry_points.txt +4 -0
|
@@ -0,0 +1,1305 @@
|
|
|
1
|
+
# SPDX-License-Identifier: GPL-3.0-only OR MIT
|
|
2
|
+
"""
|
|
3
|
+
Pacman/Official Repository interface module.
|
|
4
|
+
Provides package info and update checks with hybrid local/remote approach.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import re
|
|
9
|
+
from typing import Dict, Any, List, Optional
|
|
10
|
+
import httpx
|
|
11
|
+
|
|
12
|
+
from .utils import (
|
|
13
|
+
IS_ARCH,
|
|
14
|
+
run_command,
|
|
15
|
+
create_error_response,
|
|
16
|
+
check_command_exists
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
# Arch Linux package API
|
|
22
|
+
ARCH_PACKAGES_API = "https://archlinux.org/packages/search/json/"
|
|
23
|
+
|
|
24
|
+
# HTTP client settings
|
|
25
|
+
DEFAULT_TIMEOUT = 10.0
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def get_official_package_info(package_name: str) -> Dict[str, Any]:
|
|
29
|
+
"""
|
|
30
|
+
Get information about an official repository package.
|
|
31
|
+
|
|
32
|
+
Uses hybrid approach:
|
|
33
|
+
- If on Arch Linux: Execute `pacman -Si` for local database query
|
|
34
|
+
- Otherwise: Query archlinux.org API
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
package_name: Package name
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Dict with package information
|
|
41
|
+
"""
|
|
42
|
+
logger.info(f"Fetching info for official package: {package_name}")
|
|
43
|
+
|
|
44
|
+
# Try local pacman first if on Arch
|
|
45
|
+
if IS_ARCH and check_command_exists("pacman"):
|
|
46
|
+
info = await _get_package_info_local(package_name)
|
|
47
|
+
if info is not None:
|
|
48
|
+
return info
|
|
49
|
+
logger.warning(f"Local pacman query failed for {package_name}, trying remote API")
|
|
50
|
+
|
|
51
|
+
# Fallback to remote API
|
|
52
|
+
return await _get_package_info_remote(package_name)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
async def _get_package_info_local(package_name: str) -> Optional[Dict[str, Any]]:
|
|
56
|
+
"""
|
|
57
|
+
Query package info using local pacman command.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
package_name: Package name
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Package info dict or None if failed
|
|
64
|
+
"""
|
|
65
|
+
try:
|
|
66
|
+
exit_code, stdout, stderr = await run_command(
|
|
67
|
+
["pacman", "-Si", package_name],
|
|
68
|
+
timeout=5,
|
|
69
|
+
check=False
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
if exit_code != 0:
|
|
73
|
+
logger.debug(f"pacman -Si failed for {package_name}")
|
|
74
|
+
return None
|
|
75
|
+
|
|
76
|
+
# Parse pacman output
|
|
77
|
+
info = _parse_pacman_output(stdout)
|
|
78
|
+
|
|
79
|
+
if info:
|
|
80
|
+
info["source"] = "local"
|
|
81
|
+
logger.info(f"Successfully fetched {package_name} info locally")
|
|
82
|
+
return info
|
|
83
|
+
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
except Exception as e:
|
|
87
|
+
logger.warning(f"Local pacman query failed: {e}")
|
|
88
|
+
return None
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
async def _get_package_info_remote(package_name: str) -> Dict[str, Any]:
|
|
92
|
+
"""
|
|
93
|
+
Query package info using archlinux.org API.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
package_name: Package name
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Package info dict or error response
|
|
100
|
+
"""
|
|
101
|
+
params = {
|
|
102
|
+
"name": package_name,
|
|
103
|
+
"exact": "on" # Exact match only
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
async with httpx.AsyncClient(timeout=DEFAULT_TIMEOUT) as client:
|
|
108
|
+
response = await client.get(ARCH_PACKAGES_API, params=params)
|
|
109
|
+
response.raise_for_status()
|
|
110
|
+
|
|
111
|
+
data = response.json()
|
|
112
|
+
results = data.get("results", [])
|
|
113
|
+
|
|
114
|
+
if not results:
|
|
115
|
+
return create_error_response(
|
|
116
|
+
"NotFound",
|
|
117
|
+
f"Official package '{package_name}' not found in repositories"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Take first exact match (there should only be one)
|
|
121
|
+
pkg = results[0]
|
|
122
|
+
|
|
123
|
+
info = {
|
|
124
|
+
"source": "remote",
|
|
125
|
+
"name": pkg.get("pkgname"),
|
|
126
|
+
"repository": pkg.get("repo"),
|
|
127
|
+
"version": pkg.get("pkgver"),
|
|
128
|
+
"release": pkg.get("pkgrel"),
|
|
129
|
+
"epoch": pkg.get("epoch"),
|
|
130
|
+
"description": pkg.get("pkgdesc"),
|
|
131
|
+
"url": pkg.get("url"),
|
|
132
|
+
"architecture": pkg.get("arch"),
|
|
133
|
+
"maintainers": pkg.get("maintainers", []),
|
|
134
|
+
"packager": pkg.get("packager"),
|
|
135
|
+
"build_date": pkg.get("build_date"),
|
|
136
|
+
"last_update": pkg.get("last_update"),
|
|
137
|
+
"licenses": pkg.get("licenses", []),
|
|
138
|
+
"groups": pkg.get("groups", []),
|
|
139
|
+
"provides": pkg.get("provides", []),
|
|
140
|
+
"depends": pkg.get("depends", []),
|
|
141
|
+
"optdepends": pkg.get("optdepends", []),
|
|
142
|
+
"conflicts": pkg.get("conflicts", []),
|
|
143
|
+
"replaces": pkg.get("replaces", []),
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
logger.info(f"Successfully fetched {package_name} info remotely")
|
|
147
|
+
|
|
148
|
+
return info
|
|
149
|
+
|
|
150
|
+
except httpx.TimeoutException:
|
|
151
|
+
logger.error(f"Remote package info fetch timed out for: {package_name}")
|
|
152
|
+
return create_error_response(
|
|
153
|
+
"TimeoutError",
|
|
154
|
+
f"Package info fetch timed out for: {package_name}"
|
|
155
|
+
)
|
|
156
|
+
except httpx.HTTPStatusError as e:
|
|
157
|
+
logger.error(f"Remote package info HTTP error: {e}")
|
|
158
|
+
return create_error_response(
|
|
159
|
+
"HTTPError",
|
|
160
|
+
f"Package info fetch failed with status {e.response.status_code}",
|
|
161
|
+
str(e)
|
|
162
|
+
)
|
|
163
|
+
except Exception as e:
|
|
164
|
+
logger.error(f"Remote package info fetch failed: {e}")
|
|
165
|
+
return create_error_response(
|
|
166
|
+
"InfoError",
|
|
167
|
+
f"Failed to get package info: {str(e)}"
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _parse_pacman_output(output: str) -> Optional[Dict[str, Any]]:
|
|
172
|
+
"""
|
|
173
|
+
Parse pacman -Si output into structured dict.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
output: Raw pacman -Si output
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
Parsed package info or None
|
|
180
|
+
"""
|
|
181
|
+
if not output.strip():
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
info = {}
|
|
185
|
+
current_key = None
|
|
186
|
+
|
|
187
|
+
for line in output.split('\n'):
|
|
188
|
+
# Match "Key : Value" pattern
|
|
189
|
+
match = re.match(r'^(\w[\w\s]*?)\s*:\s*(.*)$', line)
|
|
190
|
+
if match:
|
|
191
|
+
key = match.group(1).strip().lower().replace(' ', '_')
|
|
192
|
+
value = match.group(2).strip()
|
|
193
|
+
|
|
194
|
+
# Handle special fields
|
|
195
|
+
if key in ['depends_on', 'optional_deps', 'required_by',
|
|
196
|
+
'conflicts_with', 'replaces', 'groups', 'provides']:
|
|
197
|
+
# These can be multi-line or space-separated
|
|
198
|
+
if value.lower() == 'none':
|
|
199
|
+
info[key] = []
|
|
200
|
+
else:
|
|
201
|
+
info[key] = [v.strip() for v in value.split() if v.strip()]
|
|
202
|
+
else:
|
|
203
|
+
info[key] = value
|
|
204
|
+
|
|
205
|
+
current_key = key
|
|
206
|
+
elif current_key and line.startswith(' '):
|
|
207
|
+
# Continuation line (indented)
|
|
208
|
+
continuation = line.strip()
|
|
209
|
+
if continuation and current_key in info:
|
|
210
|
+
if isinstance(info[current_key], list):
|
|
211
|
+
info[current_key].extend([v.strip() for v in continuation.split() if v.strip()])
|
|
212
|
+
else:
|
|
213
|
+
info[current_key] += ' ' + continuation
|
|
214
|
+
|
|
215
|
+
return info if info else None
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
async def check_updates_dry_run() -> Dict[str, Any]:
|
|
219
|
+
"""
|
|
220
|
+
Check for available system updates without applying them.
|
|
221
|
+
|
|
222
|
+
Only works on Arch Linux systems with checkupdates command.
|
|
223
|
+
Requires pacman-contrib package.
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
Dict with list of available updates or error response
|
|
227
|
+
"""
|
|
228
|
+
logger.info("Checking for system updates (dry run)")
|
|
229
|
+
|
|
230
|
+
# Only supported on Arch Linux
|
|
231
|
+
if not IS_ARCH:
|
|
232
|
+
return create_error_response(
|
|
233
|
+
"NotSupported",
|
|
234
|
+
"Update checking is only supported on Arch Linux systems",
|
|
235
|
+
"This server is not running on Arch Linux"
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
# Check if checkupdates command exists
|
|
239
|
+
if not check_command_exists("checkupdates"):
|
|
240
|
+
return create_error_response(
|
|
241
|
+
"CommandNotFound",
|
|
242
|
+
"checkupdates command not found",
|
|
243
|
+
"Install pacman-contrib package: pacman -S pacman-contrib"
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
try:
|
|
247
|
+
exit_code, stdout, stderr = await run_command(
|
|
248
|
+
["checkupdates"],
|
|
249
|
+
timeout=30, # Can take longer for sync
|
|
250
|
+
check=False
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
# Exit code 0: updates available
|
|
254
|
+
# Exit code 2: no updates available
|
|
255
|
+
# Other: error
|
|
256
|
+
|
|
257
|
+
if exit_code == 2 or not stdout.strip():
|
|
258
|
+
logger.info("No updates available")
|
|
259
|
+
return {
|
|
260
|
+
"updates_available": False,
|
|
261
|
+
"count": 0,
|
|
262
|
+
"packages": []
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
if exit_code != 0:
|
|
266
|
+
logger.error(f"checkupdates failed with code {exit_code}: {stderr}")
|
|
267
|
+
return create_error_response(
|
|
268
|
+
"CommandError",
|
|
269
|
+
f"checkupdates command failed: {stderr}",
|
|
270
|
+
f"Exit code: {exit_code}"
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
# Parse checkupdates output
|
|
274
|
+
updates = _parse_checkupdates_output(stdout)
|
|
275
|
+
|
|
276
|
+
logger.info(f"Found {len(updates)} available updates")
|
|
277
|
+
|
|
278
|
+
return {
|
|
279
|
+
"updates_available": True,
|
|
280
|
+
"count": len(updates),
|
|
281
|
+
"packages": updates
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logger.error(f"Update check failed: {e}")
|
|
286
|
+
return create_error_response(
|
|
287
|
+
"UpdateCheckError",
|
|
288
|
+
f"Failed to check for updates: {str(e)}"
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def _parse_checkupdates_output(output: str) -> List[Dict[str, str]]:
|
|
293
|
+
"""
|
|
294
|
+
Parse checkupdates command output.
|
|
295
|
+
|
|
296
|
+
Format: "package current_version -> new_version"
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
output: Raw checkupdates output
|
|
300
|
+
|
|
301
|
+
Returns:
|
|
302
|
+
List of update dicts
|
|
303
|
+
"""
|
|
304
|
+
updates = []
|
|
305
|
+
|
|
306
|
+
for line in output.strip().split('\n'):
|
|
307
|
+
if not line.strip():
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
# Match pattern: "package old_ver -> new_ver"
|
|
311
|
+
match = re.match(r'^(\S+)\s+(\S+)\s+->\s+(\S+)$', line)
|
|
312
|
+
if match:
|
|
313
|
+
updates.append({
|
|
314
|
+
"package": match.group(1),
|
|
315
|
+
"current_version": match.group(2),
|
|
316
|
+
"new_version": match.group(3)
|
|
317
|
+
})
|
|
318
|
+
|
|
319
|
+
return updates
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
async def remove_package(
|
|
323
|
+
package_name: str,
|
|
324
|
+
remove_dependencies: bool = False,
|
|
325
|
+
force: bool = False
|
|
326
|
+
) -> Dict[str, Any]:
|
|
327
|
+
"""
|
|
328
|
+
Remove a single package from the system.
|
|
329
|
+
|
|
330
|
+
Args:
|
|
331
|
+
package_name: Name of package to remove
|
|
332
|
+
remove_dependencies: If True, remove unneeded dependencies (pacman -Rs)
|
|
333
|
+
force: If True, force removal ignoring dependencies (pacman -Rdd)
|
|
334
|
+
|
|
335
|
+
Returns:
|
|
336
|
+
Dict with removal status and information
|
|
337
|
+
"""
|
|
338
|
+
if not IS_ARCH:
|
|
339
|
+
return create_error_response(
|
|
340
|
+
"NotSupported",
|
|
341
|
+
"Package removal is only available on Arch Linux"
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
if not check_command_exists("pacman"):
|
|
345
|
+
return create_error_response(
|
|
346
|
+
"CommandNotFound",
|
|
347
|
+
"pacman command not found"
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
logger.info(f"Removing package: {package_name} (deps={remove_dependencies}, force={force})")
|
|
351
|
+
|
|
352
|
+
# Build command based on options
|
|
353
|
+
cmd = ["sudo", "pacman"]
|
|
354
|
+
|
|
355
|
+
if force:
|
|
356
|
+
cmd.extend(["-Rdd"]) # Force remove, skip dependency checks
|
|
357
|
+
elif remove_dependencies:
|
|
358
|
+
cmd.extend(["-Rs"]) # Remove with unused dependencies
|
|
359
|
+
else:
|
|
360
|
+
cmd.extend(["-R"]) # Basic removal
|
|
361
|
+
|
|
362
|
+
cmd.extend(["--noconfirm", package_name])
|
|
363
|
+
|
|
364
|
+
try:
|
|
365
|
+
exit_code, stdout, stderr = await run_command(
|
|
366
|
+
cmd,
|
|
367
|
+
timeout=60, # Longer timeout for removal
|
|
368
|
+
check=False,
|
|
369
|
+
skip_sudo_check=True # We're using sudo in the command
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
if exit_code != 0:
|
|
373
|
+
logger.error(f"Package removal failed: {stderr}")
|
|
374
|
+
return create_error_response(
|
|
375
|
+
"RemovalError",
|
|
376
|
+
f"Failed to remove {package_name}: {stderr}",
|
|
377
|
+
f"Exit code: {exit_code}"
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
logger.info(f"Successfully removed {package_name}")
|
|
381
|
+
|
|
382
|
+
return {
|
|
383
|
+
"success": True,
|
|
384
|
+
"package": package_name,
|
|
385
|
+
"removed_dependencies": remove_dependencies,
|
|
386
|
+
"output": stdout
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
except Exception as e:
|
|
390
|
+
logger.error(f"Package removal failed with exception: {e}")
|
|
391
|
+
return create_error_response(
|
|
392
|
+
"RemovalError",
|
|
393
|
+
f"Failed to remove {package_name}: {str(e)}"
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
async def remove_packages_batch(
|
|
398
|
+
package_names: List[str],
|
|
399
|
+
remove_dependencies: bool = False
|
|
400
|
+
) -> Dict[str, Any]:
|
|
401
|
+
"""
|
|
402
|
+
Remove multiple packages in a single transaction.
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
package_names: List of package names to remove
|
|
406
|
+
remove_dependencies: If True, remove unneeded dependencies
|
|
407
|
+
|
|
408
|
+
Returns:
|
|
409
|
+
Dict with removal status
|
|
410
|
+
"""
|
|
411
|
+
if not IS_ARCH:
|
|
412
|
+
return create_error_response(
|
|
413
|
+
"NotSupported",
|
|
414
|
+
"Package removal is only available on Arch Linux"
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
if not check_command_exists("pacman"):
|
|
418
|
+
return create_error_response(
|
|
419
|
+
"CommandNotFound",
|
|
420
|
+
"pacman command not found"
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
if not package_names:
|
|
424
|
+
return create_error_response(
|
|
425
|
+
"ValidationError",
|
|
426
|
+
"No packages specified for removal"
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
logger.info(f"Batch removing {len(package_names)} packages (deps={remove_dependencies})")
|
|
430
|
+
|
|
431
|
+
# Build command
|
|
432
|
+
cmd = ["sudo", "pacman"]
|
|
433
|
+
|
|
434
|
+
if remove_dependencies:
|
|
435
|
+
cmd.extend(["-Rs"])
|
|
436
|
+
else:
|
|
437
|
+
cmd.extend(["-R"])
|
|
438
|
+
|
|
439
|
+
cmd.extend(["--noconfirm"] + package_names)
|
|
440
|
+
|
|
441
|
+
try:
|
|
442
|
+
exit_code, stdout, stderr = await run_command(
|
|
443
|
+
cmd,
|
|
444
|
+
timeout=120, # Longer timeout for batch removal
|
|
445
|
+
check=False,
|
|
446
|
+
skip_sudo_check=True
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
if exit_code != 0:
|
|
450
|
+
logger.error(f"Batch removal failed: {stderr}")
|
|
451
|
+
return create_error_response(
|
|
452
|
+
"RemovalError",
|
|
453
|
+
f"Failed to remove packages: {stderr}",
|
|
454
|
+
f"Exit code: {exit_code}"
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
logger.info(f"Successfully removed {len(package_names)} packages")
|
|
458
|
+
|
|
459
|
+
return {
|
|
460
|
+
"success": True,
|
|
461
|
+
"package_count": len(package_names),
|
|
462
|
+
"packages": package_names,
|
|
463
|
+
"removed_dependencies": remove_dependencies,
|
|
464
|
+
"output": stdout
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
except Exception as e:
|
|
468
|
+
logger.error(f"Batch removal failed with exception: {e}")
|
|
469
|
+
return create_error_response(
|
|
470
|
+
"RemovalError",
|
|
471
|
+
f"Failed to remove packages: {str(e)}"
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
async def list_orphan_packages() -> Dict[str, Any]:
|
|
476
|
+
"""
|
|
477
|
+
List all orphaned packages (dependencies no longer required).
|
|
478
|
+
|
|
479
|
+
Returns:
|
|
480
|
+
Dict with list of orphan packages
|
|
481
|
+
"""
|
|
482
|
+
if not IS_ARCH:
|
|
483
|
+
return create_error_response(
|
|
484
|
+
"NotSupported",
|
|
485
|
+
"Orphan package detection is only available on Arch Linux"
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
if not check_command_exists("pacman"):
|
|
489
|
+
return create_error_response(
|
|
490
|
+
"CommandNotFound",
|
|
491
|
+
"pacman command not found"
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
logger.info("Listing orphan packages")
|
|
495
|
+
|
|
496
|
+
try:
|
|
497
|
+
exit_code, stdout, stderr = await run_command(
|
|
498
|
+
["pacman", "-Qtdq"],
|
|
499
|
+
timeout=10,
|
|
500
|
+
check=False
|
|
501
|
+
)
|
|
502
|
+
|
|
503
|
+
# Exit code 1 with no output means no orphans
|
|
504
|
+
if exit_code == 1 and not stdout.strip():
|
|
505
|
+
logger.info("No orphan packages found")
|
|
506
|
+
return {
|
|
507
|
+
"orphan_count": 0,
|
|
508
|
+
"orphans": []
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
if exit_code != 0:
|
|
512
|
+
logger.error(f"Failed to list orphans: {stderr}")
|
|
513
|
+
return create_error_response(
|
|
514
|
+
"CommandError",
|
|
515
|
+
f"Failed to list orphan packages: {stderr}",
|
|
516
|
+
f"Exit code: {exit_code}"
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
# Parse output - one package per line
|
|
520
|
+
orphans = [pkg.strip() for pkg in stdout.strip().split('\n') if pkg.strip()]
|
|
521
|
+
|
|
522
|
+
logger.info(f"Found {len(orphans)} orphan packages")
|
|
523
|
+
|
|
524
|
+
return {
|
|
525
|
+
"orphan_count": len(orphans),
|
|
526
|
+
"orphans": orphans
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
except Exception as e:
|
|
530
|
+
logger.error(f"Orphan listing failed with exception: {e}")
|
|
531
|
+
return create_error_response(
|
|
532
|
+
"CommandError",
|
|
533
|
+
f"Failed to list orphan packages: {str(e)}"
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
async def remove_orphans(dry_run: bool = True, exclude: Optional[List[str]] = None) -> Dict[str, Any]:
|
|
538
|
+
"""
|
|
539
|
+
Remove all orphaned packages.
|
|
540
|
+
|
|
541
|
+
Args:
|
|
542
|
+
dry_run: If True, show what would be removed without actually removing
|
|
543
|
+
exclude: List of packages to exclude from removal
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
Dict with removal status
|
|
547
|
+
"""
|
|
548
|
+
if not IS_ARCH:
|
|
549
|
+
return create_error_response(
|
|
550
|
+
"NotSupported",
|
|
551
|
+
"Orphan removal is only available on Arch Linux"
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
if not check_command_exists("pacman"):
|
|
555
|
+
return create_error_response(
|
|
556
|
+
"CommandNotFound",
|
|
557
|
+
"pacman command not found"
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
# First, get list of orphans
|
|
561
|
+
orphans_result = await list_orphan_packages()
|
|
562
|
+
|
|
563
|
+
if orphans_result.get("error"):
|
|
564
|
+
return orphans_result
|
|
565
|
+
|
|
566
|
+
orphans = orphans_result.get("orphans", [])
|
|
567
|
+
|
|
568
|
+
if not orphans:
|
|
569
|
+
return {
|
|
570
|
+
"removed_count": 0,
|
|
571
|
+
"packages": [],
|
|
572
|
+
"message": "No orphan packages to remove"
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
# Apply exclusions if provided
|
|
576
|
+
if exclude:
|
|
577
|
+
orphans = [pkg for pkg in orphans if pkg not in exclude]
|
|
578
|
+
if not orphans:
|
|
579
|
+
return {
|
|
580
|
+
"removed_count": 0,
|
|
581
|
+
"packages": [],
|
|
582
|
+
"message": "All orphan packages are in exclusion list"
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
logger.info(f"Removing {len(orphans)} orphan packages (dry_run={dry_run})")
|
|
586
|
+
|
|
587
|
+
if dry_run:
|
|
588
|
+
return {
|
|
589
|
+
"dry_run": True,
|
|
590
|
+
"would_remove_count": len(orphans),
|
|
591
|
+
"packages": orphans,
|
|
592
|
+
"message": "This is a dry run. No packages were removed."
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
try:
|
|
596
|
+
# Remove orphans using pacman -Rns
|
|
597
|
+
cmd = ["sudo", "pacman", "-Rns", "--noconfirm"] + orphans
|
|
598
|
+
|
|
599
|
+
exit_code, stdout, stderr = await run_command(
|
|
600
|
+
cmd,
|
|
601
|
+
timeout=120,
|
|
602
|
+
check=False,
|
|
603
|
+
skip_sudo_check=True
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
if exit_code != 0:
|
|
607
|
+
logger.error(f"Orphan removal failed: {stderr}")
|
|
608
|
+
return create_error_response(
|
|
609
|
+
"RemovalError",
|
|
610
|
+
f"Failed to remove orphan packages: {stderr}",
|
|
611
|
+
f"Exit code: {exit_code}"
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
logger.info(f"Successfully removed {len(orphans)} orphan packages")
|
|
615
|
+
|
|
616
|
+
return {
|
|
617
|
+
"success": True,
|
|
618
|
+
"removed_count": len(orphans),
|
|
619
|
+
"packages": orphans,
|
|
620
|
+
"output": stdout
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
except Exception as e:
|
|
624
|
+
logger.error(f"Orphan removal failed with exception: {e}")
|
|
625
|
+
return create_error_response(
|
|
626
|
+
"RemovalError",
|
|
627
|
+
f"Failed to remove orphan packages: {str(e)}"
|
|
628
|
+
)
|
|
629
|
+
|
|
630
|
+
|
|
631
|
+
async def find_package_owner(file_path: str) -> Dict[str, Any]:
|
|
632
|
+
"""
|
|
633
|
+
Find which package owns a specific file.
|
|
634
|
+
|
|
635
|
+
Args:
|
|
636
|
+
file_path: Absolute path to file
|
|
637
|
+
|
|
638
|
+
Returns:
|
|
639
|
+
Dict with package owner information
|
|
640
|
+
"""
|
|
641
|
+
if not IS_ARCH:
|
|
642
|
+
return create_error_response(
|
|
643
|
+
"NotSupported",
|
|
644
|
+
"Package ownership queries are only available on Arch Linux"
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
if not check_command_exists("pacman"):
|
|
648
|
+
return create_error_response(
|
|
649
|
+
"CommandNotFound",
|
|
650
|
+
"pacman command not found"
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
logger.info(f"Finding owner of file: {file_path}")
|
|
654
|
+
|
|
655
|
+
try:
|
|
656
|
+
exit_code, stdout, stderr = await run_command(
|
|
657
|
+
["pacman", "-Qo", file_path],
|
|
658
|
+
timeout=5,
|
|
659
|
+
check=False
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
if exit_code != 0:
|
|
663
|
+
logger.info(f"No package owns {file_path}")
|
|
664
|
+
return create_error_response(
|
|
665
|
+
"NotFound",
|
|
666
|
+
f"No package owns this file: {file_path}",
|
|
667
|
+
stderr
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
# Parse output: "/path/to/file is owned by package 1.0-1"
|
|
671
|
+
match = re.search(r'is owned by (\S+)\s+(\S+)', stdout)
|
|
672
|
+
if match:
|
|
673
|
+
package_name = match.group(1)
|
|
674
|
+
version = match.group(2)
|
|
675
|
+
|
|
676
|
+
logger.info(f"File {file_path} is owned by {package_name} {version}")
|
|
677
|
+
|
|
678
|
+
return {
|
|
679
|
+
"file": file_path,
|
|
680
|
+
"package": package_name,
|
|
681
|
+
"version": version
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
return create_error_response(
|
|
685
|
+
"ParseError",
|
|
686
|
+
f"Could not parse pacman output: {stdout}"
|
|
687
|
+
)
|
|
688
|
+
|
|
689
|
+
except Exception as e:
|
|
690
|
+
logger.error(f"Package ownership query failed: {e}")
|
|
691
|
+
return create_error_response(
|
|
692
|
+
"CommandError",
|
|
693
|
+
f"Failed to find package owner: {str(e)}"
|
|
694
|
+
)
|
|
695
|
+
|
|
696
|
+
|
|
697
|
+
async def list_package_files(package_name: str, filter_pattern: Optional[str] = None) -> Dict[str, Any]:
|
|
698
|
+
"""
|
|
699
|
+
List all files owned by a package.
|
|
700
|
+
|
|
701
|
+
Args:
|
|
702
|
+
package_name: Name of package
|
|
703
|
+
filter_pattern: Optional regex pattern to filter files
|
|
704
|
+
|
|
705
|
+
Returns:
|
|
706
|
+
Dict with list of files
|
|
707
|
+
"""
|
|
708
|
+
if not IS_ARCH:
|
|
709
|
+
return create_error_response(
|
|
710
|
+
"NotSupported",
|
|
711
|
+
"Package file listing is only available on Arch Linux"
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
if not check_command_exists("pacman"):
|
|
715
|
+
return create_error_response(
|
|
716
|
+
"CommandNotFound",
|
|
717
|
+
"pacman command not found"
|
|
718
|
+
)
|
|
719
|
+
|
|
720
|
+
logger.info(f"Listing files for package: {package_name}")
|
|
721
|
+
|
|
722
|
+
try:
|
|
723
|
+
exit_code, stdout, stderr = await run_command(
|
|
724
|
+
["pacman", "-Ql", package_name],
|
|
725
|
+
timeout=10,
|
|
726
|
+
check=False
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
if exit_code != 0:
|
|
730
|
+
logger.error(f"Failed to list files for {package_name}: {stderr}")
|
|
731
|
+
return create_error_response(
|
|
732
|
+
"NotFound",
|
|
733
|
+
f"Package not found or no files: {package_name}",
|
|
734
|
+
stderr
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
# Parse output: "package /path/to/file"
|
|
738
|
+
files = []
|
|
739
|
+
for line in stdout.strip().split('\n'):
|
|
740
|
+
if not line.strip():
|
|
741
|
+
continue
|
|
742
|
+
|
|
743
|
+
parts = line.split(maxsplit=1)
|
|
744
|
+
if len(parts) == 2:
|
|
745
|
+
file_path = parts[1]
|
|
746
|
+
|
|
747
|
+
# Apply filter if provided
|
|
748
|
+
if filter_pattern:
|
|
749
|
+
if re.search(filter_pattern, file_path):
|
|
750
|
+
files.append(file_path)
|
|
751
|
+
else:
|
|
752
|
+
files.append(file_path)
|
|
753
|
+
|
|
754
|
+
logger.info(f"Found {len(files)} files for {package_name}")
|
|
755
|
+
|
|
756
|
+
return {
|
|
757
|
+
"package": package_name,
|
|
758
|
+
"file_count": len(files),
|
|
759
|
+
"files": files,
|
|
760
|
+
"filter_applied": filter_pattern is not None
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
except Exception as e:
|
|
764
|
+
logger.error(f"File listing failed: {e}")
|
|
765
|
+
return create_error_response(
|
|
766
|
+
"CommandError",
|
|
767
|
+
f"Failed to list package files: {str(e)}"
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
|
|
771
|
+
async def search_package_files(filename_pattern: str) -> Dict[str, Any]:
|
|
772
|
+
"""
|
|
773
|
+
Search for files across all packages.
|
|
774
|
+
|
|
775
|
+
Args:
|
|
776
|
+
filename_pattern: Filename pattern to search for
|
|
777
|
+
|
|
778
|
+
Returns:
|
|
779
|
+
Dict with matching files and packages
|
|
780
|
+
"""
|
|
781
|
+
if not IS_ARCH:
|
|
782
|
+
return create_error_response(
|
|
783
|
+
"NotSupported",
|
|
784
|
+
"Package file search is only available on Arch Linux"
|
|
785
|
+
)
|
|
786
|
+
|
|
787
|
+
if not check_command_exists("pacman"):
|
|
788
|
+
return create_error_response(
|
|
789
|
+
"CommandNotFound",
|
|
790
|
+
"pacman command not found"
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
logger.info(f"Searching for files matching: {filename_pattern}")
|
|
794
|
+
|
|
795
|
+
try:
|
|
796
|
+
# First check if file database is synced
|
|
797
|
+
exit_code, stdout, stderr = await run_command(
|
|
798
|
+
["pacman", "-F", filename_pattern],
|
|
799
|
+
timeout=30,
|
|
800
|
+
check=False
|
|
801
|
+
)
|
|
802
|
+
|
|
803
|
+
if exit_code == 1 and "database" in stderr.lower():
|
|
804
|
+
return create_error_response(
|
|
805
|
+
"DatabaseNotSynced",
|
|
806
|
+
"Package file database not synced. Run 'sudo pacman -Fy' first.",
|
|
807
|
+
"File database needs to be synchronized before searching"
|
|
808
|
+
)
|
|
809
|
+
|
|
810
|
+
if exit_code != 0 and not stdout.strip():
|
|
811
|
+
logger.info(f"No files found matching {filename_pattern}")
|
|
812
|
+
return {
|
|
813
|
+
"pattern": filename_pattern,
|
|
814
|
+
"match_count": 0,
|
|
815
|
+
"matches": []
|
|
816
|
+
}
|
|
817
|
+
|
|
818
|
+
# Parse output: "repository/package version\n path/to/file"
|
|
819
|
+
matches = []
|
|
820
|
+
current_package = None
|
|
821
|
+
|
|
822
|
+
for line in stdout.strip().split('\n'):
|
|
823
|
+
if not line.strip():
|
|
824
|
+
continue
|
|
825
|
+
|
|
826
|
+
if line.startswith(' '):
|
|
827
|
+
# This is a file path
|
|
828
|
+
if current_package:
|
|
829
|
+
file_path = line.strip()
|
|
830
|
+
matches.append({
|
|
831
|
+
"package": current_package["package"],
|
|
832
|
+
"repository": current_package["repository"],
|
|
833
|
+
"version": current_package["version"],
|
|
834
|
+
"file": file_path
|
|
835
|
+
})
|
|
836
|
+
else:
|
|
837
|
+
# This is a package line: "repository/package version"
|
|
838
|
+
parts = line.split()
|
|
839
|
+
if len(parts) >= 2:
|
|
840
|
+
repo_pkg = parts[0].split('/')
|
|
841
|
+
if len(repo_pkg) == 2:
|
|
842
|
+
current_package = {
|
|
843
|
+
"repository": repo_pkg[0],
|
|
844
|
+
"package": repo_pkg[1],
|
|
845
|
+
"version": parts[1]
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
logger.info(f"Found {len(matches)} files matching {filename_pattern}")
|
|
849
|
+
|
|
850
|
+
return {
|
|
851
|
+
"pattern": filename_pattern,
|
|
852
|
+
"match_count": len(matches),
|
|
853
|
+
"matches": matches
|
|
854
|
+
}
|
|
855
|
+
|
|
856
|
+
except Exception as e:
|
|
857
|
+
logger.error(f"File search failed: {e}")
|
|
858
|
+
return create_error_response(
|
|
859
|
+
"CommandError",
|
|
860
|
+
f"Failed to search package files: {str(e)}"
|
|
861
|
+
)
|
|
862
|
+
|
|
863
|
+
|
|
864
|
+
async def verify_package_integrity(package_name: str, thorough: bool = False) -> Dict[str, Any]:
|
|
865
|
+
"""
|
|
866
|
+
Verify integrity of an installed package.
|
|
867
|
+
|
|
868
|
+
Args:
|
|
869
|
+
package_name: Name of package to verify
|
|
870
|
+
thorough: If True, perform thorough check (pacman -Qkk)
|
|
871
|
+
|
|
872
|
+
Returns:
|
|
873
|
+
Dict with verification results
|
|
874
|
+
"""
|
|
875
|
+
if not IS_ARCH:
|
|
876
|
+
return create_error_response(
|
|
877
|
+
"NotSupported",
|
|
878
|
+
"Package verification is only available on Arch Linux"
|
|
879
|
+
)
|
|
880
|
+
|
|
881
|
+
if not check_command_exists("pacman"):
|
|
882
|
+
return create_error_response(
|
|
883
|
+
"CommandNotFound",
|
|
884
|
+
"pacman command not found"
|
|
885
|
+
)
|
|
886
|
+
|
|
887
|
+
logger.info(f"Verifying package integrity: {package_name} (thorough={thorough})")
|
|
888
|
+
|
|
889
|
+
try:
|
|
890
|
+
cmd = ["pacman", "-Qkk" if thorough else "-Qk", package_name]
|
|
891
|
+
|
|
892
|
+
exit_code, stdout, stderr = await run_command(
|
|
893
|
+
cmd,
|
|
894
|
+
timeout=30,
|
|
895
|
+
check=False
|
|
896
|
+
)
|
|
897
|
+
|
|
898
|
+
if exit_code != 0 and "was not found" in stderr:
|
|
899
|
+
return create_error_response(
|
|
900
|
+
"NotFound",
|
|
901
|
+
f"Package not installed: {package_name}"
|
|
902
|
+
)
|
|
903
|
+
|
|
904
|
+
# Parse verification output
|
|
905
|
+
issues = []
|
|
906
|
+
for line in stdout.strip().split('\n'):
|
|
907
|
+
if "warning" in line.lower() or "missing" in line.lower():
|
|
908
|
+
issues.append(line.strip())
|
|
909
|
+
|
|
910
|
+
logger.info(f"Found {len(issues)} issues for {package_name}")
|
|
911
|
+
|
|
912
|
+
return {
|
|
913
|
+
"package": package_name,
|
|
914
|
+
"thorough": thorough,
|
|
915
|
+
"issues_found": len(issues),
|
|
916
|
+
"issues": issues,
|
|
917
|
+
"all_ok": len(issues) == 0
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
except Exception as e:
|
|
921
|
+
logger.error(f"Package verification failed: {e}")
|
|
922
|
+
return create_error_response(
|
|
923
|
+
"CommandError",
|
|
924
|
+
f"Failed to verify package: {str(e)}"
|
|
925
|
+
)
|
|
926
|
+
|
|
927
|
+
|
|
928
|
+
async def list_package_groups() -> Dict[str, Any]:
|
|
929
|
+
"""
|
|
930
|
+
List all available package groups.
|
|
931
|
+
|
|
932
|
+
Returns:
|
|
933
|
+
Dict with list of groups
|
|
934
|
+
"""
|
|
935
|
+
if not IS_ARCH:
|
|
936
|
+
return create_error_response(
|
|
937
|
+
"NotSupported",
|
|
938
|
+
"Package groups are only available on Arch Linux"
|
|
939
|
+
)
|
|
940
|
+
|
|
941
|
+
if not check_command_exists("pacman"):
|
|
942
|
+
return create_error_response(
|
|
943
|
+
"CommandNotFound",
|
|
944
|
+
"pacman command not found"
|
|
945
|
+
)
|
|
946
|
+
|
|
947
|
+
logger.info("Listing package groups")
|
|
948
|
+
|
|
949
|
+
try:
|
|
950
|
+
exit_code, stdout, stderr = await run_command(
|
|
951
|
+
["pacman", "-Sg"],
|
|
952
|
+
timeout=10,
|
|
953
|
+
check=False
|
|
954
|
+
)
|
|
955
|
+
|
|
956
|
+
if exit_code != 0:
|
|
957
|
+
return create_error_response(
|
|
958
|
+
"CommandError",
|
|
959
|
+
f"Failed to list groups: {stderr}"
|
|
960
|
+
)
|
|
961
|
+
|
|
962
|
+
# Parse output - format: "group package"
|
|
963
|
+
groups = set()
|
|
964
|
+
for line in stdout.strip().split('\n'):
|
|
965
|
+
if line.strip():
|
|
966
|
+
parts = line.split()
|
|
967
|
+
if parts:
|
|
968
|
+
groups.add(parts[0])
|
|
969
|
+
|
|
970
|
+
groups_list = sorted(list(groups))
|
|
971
|
+
|
|
972
|
+
logger.info(f"Found {len(groups_list)} package groups")
|
|
973
|
+
|
|
974
|
+
return {
|
|
975
|
+
"group_count": len(groups_list),
|
|
976
|
+
"groups": groups_list
|
|
977
|
+
}
|
|
978
|
+
|
|
979
|
+
except Exception as e:
|
|
980
|
+
logger.error(f"Failed to list groups: {e}")
|
|
981
|
+
return create_error_response(
|
|
982
|
+
"CommandError",
|
|
983
|
+
f"Failed to list package groups: {str(e)}"
|
|
984
|
+
)
|
|
985
|
+
|
|
986
|
+
|
|
987
|
+
async def list_group_packages(group_name: str) -> Dict[str, Any]:
|
|
988
|
+
"""
|
|
989
|
+
List packages in a specific group.
|
|
990
|
+
|
|
991
|
+
Args:
|
|
992
|
+
group_name: Name of the group
|
|
993
|
+
|
|
994
|
+
Returns:
|
|
995
|
+
Dict with packages in the group
|
|
996
|
+
"""
|
|
997
|
+
if not IS_ARCH:
|
|
998
|
+
return create_error_response(
|
|
999
|
+
"NotSupported",
|
|
1000
|
+
"Package groups are only available on Arch Linux"
|
|
1001
|
+
)
|
|
1002
|
+
|
|
1003
|
+
if not check_command_exists("pacman"):
|
|
1004
|
+
return create_error_response(
|
|
1005
|
+
"CommandNotFound",
|
|
1006
|
+
"pacman command not found"
|
|
1007
|
+
)
|
|
1008
|
+
|
|
1009
|
+
logger.info(f"Listing packages in group: {group_name}")
|
|
1010
|
+
|
|
1011
|
+
try:
|
|
1012
|
+
exit_code, stdout, stderr = await run_command(
|
|
1013
|
+
["pacman", "-Sg", group_name],
|
|
1014
|
+
timeout=10,
|
|
1015
|
+
check=False
|
|
1016
|
+
)
|
|
1017
|
+
|
|
1018
|
+
if exit_code != 0:
|
|
1019
|
+
return create_error_response(
|
|
1020
|
+
"NotFound",
|
|
1021
|
+
f"Group not found: {group_name}"
|
|
1022
|
+
)
|
|
1023
|
+
|
|
1024
|
+
# Parse output - format: "group package"
|
|
1025
|
+
packages = []
|
|
1026
|
+
for line in stdout.strip().split('\n'):
|
|
1027
|
+
if line.strip():
|
|
1028
|
+
parts = line.split()
|
|
1029
|
+
if len(parts) >= 2:
|
|
1030
|
+
packages.append(parts[1])
|
|
1031
|
+
|
|
1032
|
+
logger.info(f"Found {len(packages)} packages in {group_name}")
|
|
1033
|
+
|
|
1034
|
+
return {
|
|
1035
|
+
"group": group_name,
|
|
1036
|
+
"package_count": len(packages),
|
|
1037
|
+
"packages": packages
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
except Exception as e:
|
|
1041
|
+
logger.error(f"Failed to list group packages: {e}")
|
|
1042
|
+
return create_error_response(
|
|
1043
|
+
"CommandError",
|
|
1044
|
+
f"Failed to list packages in group: {str(e)}"
|
|
1045
|
+
)
|
|
1046
|
+
|
|
1047
|
+
|
|
1048
|
+
async def list_explicit_packages() -> Dict[str, Any]:
|
|
1049
|
+
"""
|
|
1050
|
+
List explicitly installed packages.
|
|
1051
|
+
|
|
1052
|
+
Returns:
|
|
1053
|
+
Dict with list of explicit packages
|
|
1054
|
+
"""
|
|
1055
|
+
if not IS_ARCH:
|
|
1056
|
+
return create_error_response(
|
|
1057
|
+
"NotSupported",
|
|
1058
|
+
"Package install reason queries are only available on Arch Linux"
|
|
1059
|
+
)
|
|
1060
|
+
|
|
1061
|
+
if not check_command_exists("pacman"):
|
|
1062
|
+
return create_error_response(
|
|
1063
|
+
"CommandNotFound",
|
|
1064
|
+
"pacman command not found"
|
|
1065
|
+
)
|
|
1066
|
+
|
|
1067
|
+
logger.info("Listing explicitly installed packages")
|
|
1068
|
+
|
|
1069
|
+
try:
|
|
1070
|
+
exit_code, stdout, stderr = await run_command(
|
|
1071
|
+
["pacman", "-Qe"],
|
|
1072
|
+
timeout=15,
|
|
1073
|
+
check=False
|
|
1074
|
+
)
|
|
1075
|
+
|
|
1076
|
+
if exit_code != 0:
|
|
1077
|
+
return create_error_response(
|
|
1078
|
+
"CommandError",
|
|
1079
|
+
f"Failed to list explicit packages: {stderr}"
|
|
1080
|
+
)
|
|
1081
|
+
|
|
1082
|
+
# Parse output - format: "package version"
|
|
1083
|
+
packages = []
|
|
1084
|
+
for line in stdout.strip().split('\n'):
|
|
1085
|
+
if line.strip():
|
|
1086
|
+
parts = line.split()
|
|
1087
|
+
if len(parts) >= 2:
|
|
1088
|
+
packages.append({
|
|
1089
|
+
"name": parts[0],
|
|
1090
|
+
"version": parts[1]
|
|
1091
|
+
})
|
|
1092
|
+
|
|
1093
|
+
logger.info(f"Found {len(packages)} explicitly installed packages")
|
|
1094
|
+
|
|
1095
|
+
return {
|
|
1096
|
+
"package_count": len(packages),
|
|
1097
|
+
"packages": packages
|
|
1098
|
+
}
|
|
1099
|
+
|
|
1100
|
+
except Exception as e:
|
|
1101
|
+
logger.error(f"Failed to list explicit packages: {e}")
|
|
1102
|
+
return create_error_response(
|
|
1103
|
+
"CommandError",
|
|
1104
|
+
f"Failed to list explicit packages: {str(e)}"
|
|
1105
|
+
)
|
|
1106
|
+
|
|
1107
|
+
|
|
1108
|
+
async def mark_as_explicit(package_name: str) -> Dict[str, Any]:
|
|
1109
|
+
"""
|
|
1110
|
+
Mark a package as explicitly installed.
|
|
1111
|
+
|
|
1112
|
+
Args:
|
|
1113
|
+
package_name: Name of package to mark
|
|
1114
|
+
|
|
1115
|
+
Returns:
|
|
1116
|
+
Dict with operation status
|
|
1117
|
+
"""
|
|
1118
|
+
if not IS_ARCH:
|
|
1119
|
+
return create_error_response(
|
|
1120
|
+
"NotSupported",
|
|
1121
|
+
"Package marking is only available on Arch Linux"
|
|
1122
|
+
)
|
|
1123
|
+
|
|
1124
|
+
if not check_command_exists("pacman"):
|
|
1125
|
+
return create_error_response(
|
|
1126
|
+
"CommandNotFound",
|
|
1127
|
+
"pacman command not found"
|
|
1128
|
+
)
|
|
1129
|
+
|
|
1130
|
+
logger.info(f"Marking {package_name} as explicitly installed")
|
|
1131
|
+
|
|
1132
|
+
try:
|
|
1133
|
+
exit_code, stdout, stderr = await run_command(
|
|
1134
|
+
["sudo", "pacman", "-D", "--asexplicit", package_name],
|
|
1135
|
+
timeout=10,
|
|
1136
|
+
check=False,
|
|
1137
|
+
skip_sudo_check=True
|
|
1138
|
+
)
|
|
1139
|
+
|
|
1140
|
+
if exit_code != 0:
|
|
1141
|
+
return create_error_response(
|
|
1142
|
+
"CommandError",
|
|
1143
|
+
f"Failed to mark package as explicit: {stderr}"
|
|
1144
|
+
)
|
|
1145
|
+
|
|
1146
|
+
logger.info(f"Successfully marked {package_name} as explicit")
|
|
1147
|
+
|
|
1148
|
+
return {
|
|
1149
|
+
"success": True,
|
|
1150
|
+
"package": package_name,
|
|
1151
|
+
"marked_as": "explicit"
|
|
1152
|
+
}
|
|
1153
|
+
|
|
1154
|
+
except Exception as e:
|
|
1155
|
+
logger.error(f"Failed to mark package: {e}")
|
|
1156
|
+
return create_error_response(
|
|
1157
|
+
"CommandError",
|
|
1158
|
+
f"Failed to mark package as explicit: {str(e)}"
|
|
1159
|
+
)
|
|
1160
|
+
|
|
1161
|
+
|
|
1162
|
+
async def mark_as_dependency(package_name: str) -> Dict[str, Any]:
|
|
1163
|
+
"""
|
|
1164
|
+
Mark a package as a dependency.
|
|
1165
|
+
|
|
1166
|
+
Args:
|
|
1167
|
+
package_name: Name of package to mark
|
|
1168
|
+
|
|
1169
|
+
Returns:
|
|
1170
|
+
Dict with operation status
|
|
1171
|
+
"""
|
|
1172
|
+
if not IS_ARCH:
|
|
1173
|
+
return create_error_response(
|
|
1174
|
+
"NotSupported",
|
|
1175
|
+
"Package marking is only available on Arch Linux"
|
|
1176
|
+
)
|
|
1177
|
+
|
|
1178
|
+
if not check_command_exists("pacman"):
|
|
1179
|
+
return create_error_response(
|
|
1180
|
+
"CommandNotFound",
|
|
1181
|
+
"pacman command not found"
|
|
1182
|
+
)
|
|
1183
|
+
|
|
1184
|
+
logger.info(f"Marking {package_name} as dependency")
|
|
1185
|
+
|
|
1186
|
+
try:
|
|
1187
|
+
exit_code, stdout, stderr = await run_command(
|
|
1188
|
+
["sudo", "pacman", "-D", "--asdeps", package_name],
|
|
1189
|
+
timeout=10,
|
|
1190
|
+
check=False,
|
|
1191
|
+
skip_sudo_check=True
|
|
1192
|
+
)
|
|
1193
|
+
|
|
1194
|
+
if exit_code != 0:
|
|
1195
|
+
return create_error_response(
|
|
1196
|
+
"CommandError",
|
|
1197
|
+
f"Failed to mark package as dependency: {stderr}"
|
|
1198
|
+
)
|
|
1199
|
+
|
|
1200
|
+
logger.info(f"Successfully marked {package_name} as dependency")
|
|
1201
|
+
|
|
1202
|
+
return {
|
|
1203
|
+
"success": True,
|
|
1204
|
+
"package": package_name,
|
|
1205
|
+
"marked_as": "dependency"
|
|
1206
|
+
}
|
|
1207
|
+
|
|
1208
|
+
except Exception as e:
|
|
1209
|
+
logger.error(f"Failed to mark package: {e}")
|
|
1210
|
+
return create_error_response(
|
|
1211
|
+
"CommandError",
|
|
1212
|
+
f"Failed to mark package as dependency: {str(e)}"
|
|
1213
|
+
)
|
|
1214
|
+
|
|
1215
|
+
|
|
1216
|
+
async def check_database_freshness() -> Dict[str, Any]:
|
|
1217
|
+
"""
|
|
1218
|
+
Check when package databases were last synchronized.
|
|
1219
|
+
|
|
1220
|
+
Returns:
|
|
1221
|
+
Dict with database sync timestamps per repository
|
|
1222
|
+
"""
|
|
1223
|
+
if not IS_ARCH:
|
|
1224
|
+
return create_error_response(
|
|
1225
|
+
"NotSupported",
|
|
1226
|
+
"Database freshness check is only available on Arch Linux"
|
|
1227
|
+
)
|
|
1228
|
+
|
|
1229
|
+
logger.info("Checking database freshness")
|
|
1230
|
+
|
|
1231
|
+
try:
|
|
1232
|
+
from pathlib import Path
|
|
1233
|
+
from datetime import datetime, timedelta
|
|
1234
|
+
|
|
1235
|
+
sync_dir = Path("/var/lib/pacman/sync")
|
|
1236
|
+
|
|
1237
|
+
if not sync_dir.exists():
|
|
1238
|
+
return create_error_response(
|
|
1239
|
+
"NotFound",
|
|
1240
|
+
"Pacman sync directory not found"
|
|
1241
|
+
)
|
|
1242
|
+
|
|
1243
|
+
# Get all .db files
|
|
1244
|
+
db_files = list(sync_dir.glob("*.db"))
|
|
1245
|
+
|
|
1246
|
+
if not db_files:
|
|
1247
|
+
return create_error_response(
|
|
1248
|
+
"NotFound",
|
|
1249
|
+
"No database files found"
|
|
1250
|
+
)
|
|
1251
|
+
|
|
1252
|
+
databases = []
|
|
1253
|
+
now = datetime.now()
|
|
1254
|
+
oldest_db = None
|
|
1255
|
+
oldest_age = timedelta(0)
|
|
1256
|
+
|
|
1257
|
+
for db_file in db_files:
|
|
1258
|
+
mtime = datetime.fromtimestamp(db_file.stat().st_mtime)
|
|
1259
|
+
age = now - mtime
|
|
1260
|
+
hours_old = age.total_seconds() / 3600
|
|
1261
|
+
|
|
1262
|
+
db_info = {
|
|
1263
|
+
"repository": db_file.stem, # Remove .db extension
|
|
1264
|
+
"last_sync": mtime.isoformat(),
|
|
1265
|
+
"hours_old": round(hours_old, 1)
|
|
1266
|
+
}
|
|
1267
|
+
|
|
1268
|
+
# Warn if older than 24 hours
|
|
1269
|
+
if hours_old > 24:
|
|
1270
|
+
db_info["warning"] = f"Database is {hours_old:.0f} hours old (> 24h)"
|
|
1271
|
+
|
|
1272
|
+
databases.append(db_info)
|
|
1273
|
+
|
|
1274
|
+
# Track oldest
|
|
1275
|
+
if oldest_db is None or age > oldest_age:
|
|
1276
|
+
oldest_db = db_info["repository"]
|
|
1277
|
+
oldest_age = age
|
|
1278
|
+
|
|
1279
|
+
# Sort by hours_old descending (oldest first)
|
|
1280
|
+
databases.sort(key=lambda x: x["hours_old"], reverse=True)
|
|
1281
|
+
|
|
1282
|
+
logger.info(f"Checked {len(databases)} databases, oldest: {oldest_age.total_seconds() / 3600:.1f}h")
|
|
1283
|
+
|
|
1284
|
+
recommendations = []
|
|
1285
|
+
if oldest_age.total_seconds() / 3600 > 24:
|
|
1286
|
+
recommendations.append("Databases are stale (> 24h). Run 'sudo pacman -Sy' to synchronize.")
|
|
1287
|
+
if oldest_age.total_seconds() / 3600 > 168: # 1 week
|
|
1288
|
+
recommendations.append("Databases are very stale (> 1 week). Consider full system update.")
|
|
1289
|
+
|
|
1290
|
+
return {
|
|
1291
|
+
"database_count": len(databases),
|
|
1292
|
+
"databases": databases,
|
|
1293
|
+
"oldest_database": oldest_db,
|
|
1294
|
+
"oldest_age_hours": round(oldest_age.total_seconds() / 3600, 1),
|
|
1295
|
+
"recommendations": recommendations,
|
|
1296
|
+
"needs_sync": oldest_age.total_seconds() / 3600 > 24
|
|
1297
|
+
}
|
|
1298
|
+
|
|
1299
|
+
except Exception as e:
|
|
1300
|
+
logger.error(f"Failed to check database freshness: {e}")
|
|
1301
|
+
return create_error_response(
|
|
1302
|
+
"CheckError",
|
|
1303
|
+
f"Failed to check database freshness: {str(e)}"
|
|
1304
|
+
)
|
|
1305
|
+
|