mcp-ssh-vps 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. mcp_ssh_vps-0.4.1.dist-info/METADATA +482 -0
  2. mcp_ssh_vps-0.4.1.dist-info/RECORD +47 -0
  3. mcp_ssh_vps-0.4.1.dist-info/WHEEL +5 -0
  4. mcp_ssh_vps-0.4.1.dist-info/entry_points.txt +4 -0
  5. mcp_ssh_vps-0.4.1.dist-info/licenses/LICENSE +21 -0
  6. mcp_ssh_vps-0.4.1.dist-info/top_level.txt +1 -0
  7. sshmcp/__init__.py +3 -0
  8. sshmcp/cli.py +473 -0
  9. sshmcp/config.py +155 -0
  10. sshmcp/core/__init__.py +5 -0
  11. sshmcp/core/container.py +291 -0
  12. sshmcp/models/__init__.py +15 -0
  13. sshmcp/models/command.py +69 -0
  14. sshmcp/models/file.py +102 -0
  15. sshmcp/models/machine.py +139 -0
  16. sshmcp/monitoring/__init__.py +0 -0
  17. sshmcp/monitoring/alerts.py +464 -0
  18. sshmcp/prompts/__init__.py +7 -0
  19. sshmcp/prompts/backup.py +151 -0
  20. sshmcp/prompts/deploy.py +115 -0
  21. sshmcp/prompts/monitor.py +146 -0
  22. sshmcp/resources/__init__.py +7 -0
  23. sshmcp/resources/logs.py +99 -0
  24. sshmcp/resources/metrics.py +204 -0
  25. sshmcp/resources/status.py +160 -0
  26. sshmcp/security/__init__.py +7 -0
  27. sshmcp/security/audit.py +314 -0
  28. sshmcp/security/rate_limiter.py +221 -0
  29. sshmcp/security/totp.py +392 -0
  30. sshmcp/security/validator.py +234 -0
  31. sshmcp/security/whitelist.py +169 -0
  32. sshmcp/server.py +632 -0
  33. sshmcp/ssh/__init__.py +6 -0
  34. sshmcp/ssh/async_client.py +247 -0
  35. sshmcp/ssh/client.py +464 -0
  36. sshmcp/ssh/executor.py +79 -0
  37. sshmcp/ssh/forwarding.py +368 -0
  38. sshmcp/ssh/pool.py +343 -0
  39. sshmcp/ssh/shell.py +518 -0
  40. sshmcp/ssh/transfer.py +461 -0
  41. sshmcp/tools/__init__.py +13 -0
  42. sshmcp/tools/commands.py +226 -0
  43. sshmcp/tools/files.py +220 -0
  44. sshmcp/tools/helpers.py +321 -0
  45. sshmcp/tools/history.py +372 -0
  46. sshmcp/tools/processes.py +214 -0
  47. sshmcp/tools/servers.py +484 -0
sshmcp/ssh/transfer.py ADDED
@@ -0,0 +1,461 @@
1
+ """File and directory transfer operations via SFTP."""
2
+
3
+ import os
4
+ import stat
5
+ from pathlib import Path
6
+ from typing import Callable
7
+
8
+ import paramiko
9
+ import structlog
10
+
11
+ from sshmcp.models.machine import MachineConfig
12
+ from sshmcp.ssh.client import SSHClient
13
+
14
+ logger = structlog.get_logger()
15
+
16
+
17
+ class TransferError(Exception):
18
+ """Error during file transfer."""
19
+
20
+ pass
21
+
22
+
23
+ class TransferProgress:
24
+ """Progress tracking for file transfers."""
25
+
26
+ def __init__(self, total_files: int = 0, total_bytes: int = 0):
27
+ self.total_files = total_files
28
+ self.total_bytes = total_bytes
29
+ self.transferred_files = 0
30
+ self.transferred_bytes = 0
31
+ self.current_file = ""
32
+ self.errors: list[str] = []
33
+
34
+ @property
35
+ def file_progress(self) -> float:
36
+ """Get file transfer progress percentage."""
37
+ if self.total_files == 0:
38
+ return 0.0
39
+ return (self.transferred_files / self.total_files) * 100
40
+
41
+ @property
42
+ def byte_progress(self) -> float:
43
+ """Get byte transfer progress percentage."""
44
+ if self.total_bytes == 0:
45
+ return 0.0
46
+ return (self.transferred_bytes / self.total_bytes) * 100
47
+
48
+ def to_dict(self) -> dict:
49
+ """Convert to dictionary."""
50
+ return {
51
+ "total_files": self.total_files,
52
+ "total_bytes": self.total_bytes,
53
+ "transferred_files": self.transferred_files,
54
+ "transferred_bytes": self.transferred_bytes,
55
+ "current_file": self.current_file,
56
+ "file_progress": round(self.file_progress, 1),
57
+ "byte_progress": round(self.byte_progress, 1),
58
+ "errors": self.errors,
59
+ }
60
+
61
+
62
+ class DirectoryTransfer:
63
+ """
64
+ Transfer directories between local and remote systems via SFTP.
65
+
66
+ Provides rsync-like functionality for syncing directories.
67
+ """
68
+
69
+ def __init__(self, ssh_client: SSHClient) -> None:
70
+ """
71
+ Initialize directory transfer.
72
+
73
+ Args:
74
+ ssh_client: Connected SSH client.
75
+ """
76
+ self.ssh_client = ssh_client
77
+ self._sftp: paramiko.SFTPClient | None = None
78
+ self._progress_callback: Callable[[TransferProgress], None] | None = None
79
+
80
+ def set_progress_callback(
81
+ self, callback: Callable[[TransferProgress], None]
82
+ ) -> None:
83
+ """Set callback for progress updates."""
84
+ self._progress_callback = callback
85
+
86
+ def _get_sftp(self) -> paramiko.SFTPClient:
87
+ """Get or create SFTP client."""
88
+ if self._sftp is None:
89
+ if not self.ssh_client.is_connected:
90
+ self.ssh_client.connect()
91
+ self._sftp = self.ssh_client._client.open_sftp() # type: ignore
92
+ return self._sftp
93
+
94
+ def upload_directory(
95
+ self,
96
+ local_path: str,
97
+ remote_path: str,
98
+ exclude_patterns: list[str] | None = None,
99
+ delete_extra: bool = False,
100
+ ) -> TransferProgress:
101
+ """
102
+ Upload a local directory to remote server.
103
+
104
+ Args:
105
+ local_path: Local directory path.
106
+ remote_path: Remote destination path.
107
+ exclude_patterns: Patterns to exclude (glob-style).
108
+ delete_extra: Delete files on remote not in local.
109
+
110
+ Returns:
111
+ TransferProgress with results.
112
+ """
113
+ local = Path(local_path)
114
+ if not local.exists():
115
+ raise TransferError(f"Local path does not exist: {local_path}")
116
+ if not local.is_dir():
117
+ raise TransferError(f"Local path is not a directory: {local_path}")
118
+
119
+ sftp = self._get_sftp()
120
+ progress = TransferProgress()
121
+ exclude = exclude_patterns or []
122
+
123
+ # Count files first
124
+ for root, dirs, files in os.walk(local):
125
+ for f in files:
126
+ file_path = Path(root) / f
127
+ if not self._should_exclude(str(file_path), exclude):
128
+ progress.total_files += 1
129
+ progress.total_bytes += file_path.stat().st_size
130
+
131
+ # Create remote base directory
132
+ self._ensure_remote_dir(sftp, remote_path)
133
+
134
+ # Track remote files for deletion
135
+ remote_files: set[str] = set()
136
+ if delete_extra:
137
+ remote_files = self._list_remote_files(sftp, remote_path)
138
+
139
+ uploaded_files: set[str] = set()
140
+
141
+ # Upload files
142
+ for root, dirs, files in os.walk(local):
143
+ rel_root = Path(root).relative_to(local)
144
+ remote_dir = f"{remote_path}/{rel_root}".rstrip("/.")
145
+
146
+ # Create subdirectories
147
+ if str(rel_root) != ".":
148
+ self._ensure_remote_dir(sftp, remote_dir)
149
+
150
+ for f in files:
151
+ local_file = Path(root) / f
152
+ rel_file = str(rel_root / f).lstrip("./")
153
+ remote_file = f"{remote_path}/{rel_file}"
154
+
155
+ if self._should_exclude(str(local_file), exclude):
156
+ continue
157
+
158
+ progress.current_file = str(local_file)
159
+
160
+ try:
161
+ sftp.put(str(local_file), remote_file)
162
+ progress.transferred_files += 1
163
+ progress.transferred_bytes += local_file.stat().st_size
164
+ uploaded_files.add(rel_file)
165
+
166
+ if self._progress_callback:
167
+ self._progress_callback(progress)
168
+
169
+ except Exception as e:
170
+ progress.errors.append(f"{local_file}: {e}")
171
+ logger.error(
172
+ "upload_file_error", file=str(local_file), error=str(e)
173
+ )
174
+
175
+ # Delete extra files on remote
176
+ if delete_extra:
177
+ extra_files = remote_files - uploaded_files
178
+ for rel_file in extra_files:
179
+ try:
180
+ remote_file = f"{remote_path}/{rel_file}"
181
+ sftp.remove(remote_file)
182
+ logger.info("deleted_extra_file", file=remote_file)
183
+ except Exception as e:
184
+ progress.errors.append(f"delete {rel_file}: {e}")
185
+
186
+ logger.info(
187
+ "directory_uploaded",
188
+ local=local_path,
189
+ remote=remote_path,
190
+ files=progress.transferred_files,
191
+ )
192
+
193
+ return progress
194
+
195
+ def download_directory(
196
+ self,
197
+ remote_path: str,
198
+ local_path: str,
199
+ exclude_patterns: list[str] | None = None,
200
+ delete_extra: bool = False,
201
+ ) -> TransferProgress:
202
+ """
203
+ Download a remote directory to local system.
204
+
205
+ Args:
206
+ remote_path: Remote directory path.
207
+ local_path: Local destination path.
208
+ exclude_patterns: Patterns to exclude.
209
+ delete_extra: Delete files in local not in remote.
210
+
211
+ Returns:
212
+ TransferProgress with results.
213
+ """
214
+ sftp = self._get_sftp()
215
+ local = Path(local_path)
216
+ progress = TransferProgress()
217
+ exclude = exclude_patterns or []
218
+
219
+ # Count remote files first
220
+ remote_files_info = self._list_remote_files_with_info(sftp, remote_path)
221
+ for rel_path, size in remote_files_info.items():
222
+ if not self._should_exclude(rel_path, exclude):
223
+ progress.total_files += 1
224
+ progress.total_bytes += size
225
+
226
+ # Create local base directory
227
+ local.mkdir(parents=True, exist_ok=True)
228
+
229
+ # Track local files for deletion
230
+ local_files: set[str] = set()
231
+ if delete_extra:
232
+ for root, dirs, files in os.walk(local):
233
+ for f in files:
234
+ rel = str((Path(root) / f).relative_to(local))
235
+ local_files.add(rel)
236
+
237
+ downloaded_files: set[str] = set()
238
+
239
+ # Download files
240
+ self._download_recursive(
241
+ sftp, remote_path, local, "", progress, exclude, downloaded_files
242
+ )
243
+
244
+ # Delete extra local files
245
+ if delete_extra:
246
+ extra_files = local_files - downloaded_files
247
+ for rel_file in extra_files:
248
+ try:
249
+ (local / rel_file).unlink()
250
+ logger.info("deleted_extra_local_file", file=rel_file)
251
+ except Exception as e:
252
+ progress.errors.append(f"delete {rel_file}: {e}")
253
+
254
+ logger.info(
255
+ "directory_downloaded",
256
+ remote=remote_path,
257
+ local=local_path,
258
+ files=progress.transferred_files,
259
+ )
260
+
261
+ return progress
262
+
263
+ def sync_directory(
264
+ self,
265
+ local_path: str,
266
+ remote_path: str,
267
+ direction: str = "upload",
268
+ exclude_patterns: list[str] | None = None,
269
+ delete_extra: bool = False,
270
+ ) -> TransferProgress:
271
+ """
272
+ Sync a directory (rsync-like behavior).
273
+
274
+ Args:
275
+ local_path: Local directory path.
276
+ remote_path: Remote directory path.
277
+ direction: "upload" or "download".
278
+ exclude_patterns: Patterns to exclude.
279
+ delete_extra: Delete files not in source.
280
+
281
+ Returns:
282
+ TransferProgress with results.
283
+ """
284
+ if direction == "upload":
285
+ return self.upload_directory(
286
+ local_path, remote_path, exclude_patterns, delete_extra
287
+ )
288
+ elif direction == "download":
289
+ return self.download_directory(
290
+ remote_path, local_path, exclude_patterns, delete_extra
291
+ )
292
+ else:
293
+ raise TransferError(f"Invalid direction: {direction}")
294
+
295
+ def _ensure_remote_dir(self, sftp: paramiko.SFTPClient, path: str) -> None:
296
+ """Ensure remote directory exists."""
297
+ try:
298
+ sftp.stat(path)
299
+ except FileNotFoundError:
300
+ # Create parent directories recursively
301
+ parts = path.split("/")
302
+ current = ""
303
+ for part in parts:
304
+ if not part:
305
+ continue
306
+ current = f"{current}/{part}"
307
+ try:
308
+ sftp.stat(current)
309
+ except FileNotFoundError:
310
+ sftp.mkdir(current)
311
+
312
+ def _list_remote_files(
313
+ self, sftp: paramiko.SFTPClient, path: str, prefix: str = ""
314
+ ) -> set[str]:
315
+ """List all files in remote directory recursively."""
316
+ files: set[str] = set()
317
+
318
+ try:
319
+ for entry in sftp.listdir_attr(path):
320
+ full_path = f"{path}/{entry.filename}"
321
+ rel_path = f"{prefix}/{entry.filename}".lstrip("/")
322
+
323
+ if stat.S_ISDIR(entry.st_mode or 0):
324
+ files.update(self._list_remote_files(sftp, full_path, rel_path))
325
+ else:
326
+ files.add(rel_path)
327
+ except Exception:
328
+ pass
329
+
330
+ return files
331
+
332
+ def _list_remote_files_with_info(
333
+ self, sftp: paramiko.SFTPClient, path: str, prefix: str = ""
334
+ ) -> dict[str, int]:
335
+ """List all files with sizes in remote directory."""
336
+ files: dict[str, int] = {}
337
+
338
+ try:
339
+ for entry in sftp.listdir_attr(path):
340
+ full_path = f"{path}/{entry.filename}"
341
+ rel_path = f"{prefix}/{entry.filename}".lstrip("/")
342
+
343
+ if stat.S_ISDIR(entry.st_mode or 0):
344
+ files.update(
345
+ self._list_remote_files_with_info(sftp, full_path, rel_path)
346
+ )
347
+ else:
348
+ files[rel_path] = entry.st_size or 0
349
+ except Exception:
350
+ pass
351
+
352
+ return files
353
+
354
+ def _download_recursive(
355
+ self,
356
+ sftp: paramiko.SFTPClient,
357
+ remote_path: str,
358
+ local_base: Path,
359
+ prefix: str,
360
+ progress: TransferProgress,
361
+ exclude: list[str],
362
+ downloaded: set[str],
363
+ ) -> None:
364
+ """Recursively download directory contents."""
365
+ try:
366
+ for entry in sftp.listdir_attr(remote_path):
367
+ remote_file = f"{remote_path}/{entry.filename}"
368
+ rel_path = f"{prefix}/{entry.filename}".lstrip("/")
369
+ local_file = local_base / rel_path
370
+
371
+ if self._should_exclude(rel_path, exclude):
372
+ continue
373
+
374
+ if stat.S_ISDIR(entry.st_mode or 0):
375
+ local_file.mkdir(parents=True, exist_ok=True)
376
+ self._download_recursive(
377
+ sftp,
378
+ remote_file,
379
+ local_base,
380
+ rel_path,
381
+ progress,
382
+ exclude,
383
+ downloaded,
384
+ )
385
+ else:
386
+ progress.current_file = remote_file
387
+
388
+ try:
389
+ local_file.parent.mkdir(parents=True, exist_ok=True)
390
+ sftp.get(remote_file, str(local_file))
391
+ progress.transferred_files += 1
392
+ progress.transferred_bytes += entry.st_size or 0
393
+ downloaded.add(rel_path)
394
+
395
+ if self._progress_callback:
396
+ self._progress_callback(progress)
397
+
398
+ except Exception as e:
399
+ progress.errors.append(f"{remote_file}: {e}")
400
+
401
+ except Exception as e:
402
+ progress.errors.append(f"{remote_path}: {e}")
403
+
404
+ def _should_exclude(self, path: str, patterns: list[str]) -> bool:
405
+ """Check if path matches any exclude pattern."""
406
+ import fnmatch
407
+
408
+ for pattern in patterns:
409
+ if fnmatch.fnmatch(path, pattern):
410
+ return True
411
+ if fnmatch.fnmatch(os.path.basename(path), pattern):
412
+ return True
413
+ return False
414
+
415
+ def close(self) -> None:
416
+ """Close SFTP connection."""
417
+ if self._sftp:
418
+ try:
419
+ self._sftp.close()
420
+ except Exception:
421
+ pass
422
+ self._sftp = None
423
+
424
+
425
+ def sync_directory(
426
+ machine: MachineConfig,
427
+ local_path: str,
428
+ remote_path: str,
429
+ direction: str = "upload",
430
+ exclude: list[str] | None = None,
431
+ delete_extra: bool = False,
432
+ progress_callback: Callable[[TransferProgress], None] | None = None,
433
+ ) -> TransferProgress:
434
+ """
435
+ Convenience function to sync a directory.
436
+
437
+ Args:
438
+ machine: Machine configuration.
439
+ local_path: Local directory path.
440
+ remote_path: Remote directory path.
441
+ direction: "upload" or "download".
442
+ exclude: Patterns to exclude.
443
+ delete_extra: Delete files not in source.
444
+ progress_callback: Optional progress callback.
445
+
446
+ Returns:
447
+ TransferProgress with results.
448
+ """
449
+ client = SSHClient(machine)
450
+ client.connect()
451
+
452
+ try:
453
+ transfer = DirectoryTransfer(client)
454
+ if progress_callback:
455
+ transfer.set_progress_callback(progress_callback)
456
+
457
+ return transfer.sync_directory(
458
+ local_path, remote_path, direction, exclude, delete_extra
459
+ )
460
+ finally:
461
+ client.disconnect()
@@ -0,0 +1,13 @@
1
+ """MCP Tools for SSH operations."""
2
+
3
+ from sshmcp.tools.commands import execute_command
4
+ from sshmcp.tools.files import list_files, read_file, upload_file
5
+ from sshmcp.tools.processes import manage_process
6
+
7
+ __all__ = [
8
+ "execute_command",
9
+ "read_file",
10
+ "upload_file",
11
+ "list_files",
12
+ "manage_process",
13
+ ]
@@ -0,0 +1,226 @@
1
+ """MCP Tool for command execution."""
2
+
3
+ from concurrent.futures import ThreadPoolExecutor, as_completed
4
+ from typing import Any
5
+
6
+ import structlog
7
+
8
+ from sshmcp.config import get_config, get_machine, list_machines
9
+ from sshmcp.security.audit import get_audit_logger
10
+ from sshmcp.security.validator import check_command_safety, validate_command
11
+ from sshmcp.ssh.client import SSHExecutionError
12
+ from sshmcp.ssh.pool import get_pool
13
+
14
+ logger = structlog.get_logger()
15
+
16
+
17
+ def execute_command(
18
+ host: str,
19
+ command: str,
20
+ timeout: int | None = None,
21
+ ) -> dict[str, Any]:
22
+ """
23
+ Execute a command on remote VPS server via SSH.
24
+
25
+ This tool allows AI agents to execute commands on configured VPS servers
26
+ with security validation and timeout protection.
27
+
28
+ Args:
29
+ host: Name of the host from machines.json configuration.
30
+ command: Shell command to execute (must match whitelist patterns).
31
+ timeout: Maximum execution time in seconds (default: from config).
32
+
33
+ Returns:
34
+ Dictionary with:
35
+ - exit_code: Command exit code (0 = success)
36
+ - stdout: Standard output text
37
+ - stderr: Standard error text
38
+ - duration_ms: Execution time in milliseconds
39
+ - success: Boolean indicating success
40
+ - host: Host where command was executed
41
+ - command: The executed command
42
+
43
+ Raises:
44
+ ValueError: If host not found or command not allowed.
45
+ RuntimeError: If SSH connection or execution fails.
46
+
47
+ Example:
48
+ >>> execute_command("production-server", "git pull origin main")
49
+ {"exit_code": 0, "stdout": "Already up to date.", "stderr": "", ...}
50
+ """
51
+ audit = get_audit_logger()
52
+
53
+ # Get machine configuration
54
+ try:
55
+ machine = get_machine(host)
56
+ except Exception as e:
57
+ audit.log(
58
+ event="command_rejected",
59
+ error=f"Host not found: {host}",
60
+ metadata={"requested_host": host},
61
+ )
62
+ raise ValueError(f"Host not found: {host}") from e
63
+
64
+ # Validate command against security rules
65
+ is_valid, error_msg = validate_command(command, machine.security)
66
+ if not is_valid:
67
+ audit.log_command_rejected(host, command, error_msg or "Validation failed")
68
+ raise ValueError(f"Command not allowed: {error_msg}")
69
+
70
+ # Check for safety warnings
71
+ warnings = check_command_safety(command)
72
+ if warnings:
73
+ logger.warning(
74
+ "command_safety_warnings",
75
+ host=host,
76
+ command=command,
77
+ warnings=warnings,
78
+ )
79
+
80
+ # Get timeout
81
+ if timeout is None:
82
+ timeout = machine.security.timeout_seconds
83
+
84
+ # Execute command
85
+ pool = get_pool()
86
+ pool.register_machine(machine)
87
+
88
+ try:
89
+ client = pool.get_client(host)
90
+ try:
91
+ result = client.execute(command, timeout=timeout)
92
+
93
+ audit.log_command_executed(
94
+ host=host,
95
+ command=command,
96
+ exit_code=result.exit_code,
97
+ duration_ms=result.duration_ms,
98
+ )
99
+
100
+ return result.to_dict()
101
+
102
+ finally:
103
+ pool.release_client(client)
104
+
105
+ except SSHExecutionError as e:
106
+ audit.log_command_failed(host, command, str(e))
107
+ raise RuntimeError(f"Command execution failed: {e}") from e
108
+ except Exception as e:
109
+ audit.log_command_failed(host, command, str(e))
110
+ raise RuntimeError(f"SSH error: {e}") from e
111
+
112
+
113
+ def execute_on_multiple(
114
+ hosts: list[str],
115
+ command: str,
116
+ timeout: int | None = None,
117
+ stop_on_error: bool = False,
118
+ parallel: bool = True,
119
+ ) -> dict[str, Any]:
120
+ """
121
+ Execute a command on multiple VPS servers.
122
+
123
+ Runs the same command on multiple servers, optionally in parallel.
124
+ Useful for checking status, deploying updates, or running maintenance
125
+ across a fleet of servers.
126
+
127
+ Args:
128
+ hosts: List of host names to execute on. Use ["*"] for all servers,
129
+ or ["tag:production"] to filter by tag.
130
+ command: Shell command to execute.
131
+ timeout: Maximum execution time per server in seconds.
132
+ stop_on_error: If True, stop execution on first error.
133
+ parallel: If True, execute on all hosts simultaneously.
134
+
135
+ Returns:
136
+ Dictionary with results from each server:
137
+ - total: Number of servers
138
+ - successful: Number of successful executions
139
+ - failed: Number of failed executions
140
+ - results: Per-server results
141
+
142
+ Example:
143
+ >>> execute_on_multiple(["web1", "web2"], "uptime")
144
+ {"total": 2, "successful": 2, "results": {...}}
145
+
146
+ >>> execute_on_multiple(["*"], "docker ps") # All servers
147
+ >>> execute_on_multiple(["tag:production"], "uptime") # By tag
148
+ """
149
+ # Expand host list
150
+ if hosts == ["*"] or hosts == "*":
151
+ hosts = list_machines()
152
+ elif len(hosts) == 1 and hosts[0].startswith("tag:"):
153
+ tag = hosts[0][4:]
154
+ config = get_config()
155
+ hosts = [
156
+ m.name
157
+ for m in config.machines
158
+ if hasattr(m, "tags") and tag in (m.tags or [])
159
+ ]
160
+ if not hosts:
161
+ return {
162
+ "success": False,
163
+ "error": f"No servers found with tag: {tag}",
164
+ "available_tags": _get_all_tags(),
165
+ }
166
+
167
+ if not hosts:
168
+ return {
169
+ "success": False,
170
+ "error": "No hosts specified",
171
+ "available_servers": list_machines(),
172
+ }
173
+
174
+ results = {}
175
+ successful = 0
176
+ failed = 0
177
+
178
+ def run_on_host(host: str) -> tuple[str, dict]:
179
+ try:
180
+ result = execute_command(host, command, timeout)
181
+ return host, {"success": True, **result}
182
+ except Exception as e:
183
+ return host, {"success": False, "error": str(e)}
184
+
185
+ if parallel and len(hosts) > 1:
186
+ with ThreadPoolExecutor(max_workers=min(len(hosts), 10)) as executor:
187
+ futures = {executor.submit(run_on_host, host): host for host in hosts}
188
+ for future in as_completed(futures):
189
+ host, result = future.result()
190
+ results[host] = result
191
+ if result["success"]:
192
+ successful += 1
193
+ else:
194
+ failed += 1
195
+ if stop_on_error:
196
+ executor.shutdown(wait=False, cancel_futures=True)
197
+ break
198
+ else:
199
+ for host in hosts:
200
+ host, result = run_on_host(host)
201
+ results[host] = result
202
+ if result["success"]:
203
+ successful += 1
204
+ else:
205
+ failed += 1
206
+ if stop_on_error:
207
+ break
208
+
209
+ return {
210
+ "success": failed == 0,
211
+ "total": len(hosts),
212
+ "successful": successful,
213
+ "failed": failed,
214
+ "command": command,
215
+ "results": results,
216
+ }
217
+
218
+
219
+ def _get_all_tags() -> list[str]:
220
+ """Get all unique tags from configured servers."""
221
+ config = get_config()
222
+ tags = set()
223
+ for m in config.machines:
224
+ if hasattr(m, "tags") and m.tags:
225
+ tags.update(m.tags)
226
+ return sorted(tags)