vm-tool 1.0.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. examples/README.md +5 -0
  2. examples/__init__.py +1 -0
  3. examples/cloud/README.md +3 -0
  4. examples/cloud/__init__.py +1 -0
  5. examples/cloud/ssh_identity_file.py +27 -0
  6. examples/cloud/ssh_password.py +27 -0
  7. examples/cloud/template_cloud_setup.py +36 -0
  8. examples/deploy_full_setup.py +44 -0
  9. examples/docker-compose.example.yml +47 -0
  10. examples/ec2-setup.sh +95 -0
  11. examples/github-actions-ec2.yml +245 -0
  12. examples/github-actions-full-setup.yml +58 -0
  13. examples/local/.keep +1 -0
  14. examples/local/README.md +3 -0
  15. examples/local/__init__.py +1 -0
  16. examples/local/template_local_setup.py +27 -0
  17. examples/production-deploy.sh +70 -0
  18. examples/rollback.sh +52 -0
  19. examples/setup.sh +52 -0
  20. examples/ssh_key_management.py +22 -0
  21. examples/version_check.sh +3 -0
  22. vm_tool/__init__.py +0 -0
  23. vm_tool/alerting.py +274 -0
  24. vm_tool/audit.py +118 -0
  25. vm_tool/backup.py +125 -0
  26. vm_tool/benchmarking.py +200 -0
  27. vm_tool/cli.py +761 -0
  28. vm_tool/cloud.py +125 -0
  29. vm_tool/completion.py +200 -0
  30. vm_tool/compliance.py +104 -0
  31. vm_tool/config.py +92 -0
  32. vm_tool/drift.py +98 -0
  33. vm_tool/generator.py +462 -0
  34. vm_tool/health.py +197 -0
  35. vm_tool/history.py +131 -0
  36. vm_tool/kubernetes.py +89 -0
  37. vm_tool/metrics.py +183 -0
  38. vm_tool/notifications.py +152 -0
  39. vm_tool/plugins.py +119 -0
  40. vm_tool/policy.py +197 -0
  41. vm_tool/rbac.py +140 -0
  42. vm_tool/recovery.py +169 -0
  43. vm_tool/reporting.py +218 -0
  44. vm_tool/runner.py +445 -0
  45. vm_tool/secrets.py +285 -0
  46. vm_tool/ssh.py +150 -0
  47. vm_tool/state.py +122 -0
  48. vm_tool/strategies/__init__.py +16 -0
  49. vm_tool/strategies/ab_testing.py +258 -0
  50. vm_tool/strategies/blue_green.py +227 -0
  51. vm_tool/strategies/canary.py +277 -0
  52. vm_tool/validation.py +267 -0
  53. vm_tool/vm_setup/cleanup.yml +27 -0
  54. vm_tool/vm_setup/docker/create_docker_service.yml +63 -0
  55. vm_tool/vm_setup/docker/docker_setup.yml +7 -0
  56. vm_tool/vm_setup/docker/install_docker_and_compose.yml +92 -0
  57. vm_tool/vm_setup/docker/login_to_docker_hub.yml +6 -0
  58. vm_tool/vm_setup/github/git_configuration.yml +68 -0
  59. vm_tool/vm_setup/inventory.yml +1 -0
  60. vm_tool/vm_setup/k8s.yml +15 -0
  61. vm_tool/vm_setup/main.yml +27 -0
  62. vm_tool/vm_setup/monitoring.yml +42 -0
  63. vm_tool/vm_setup/project_service.yml +17 -0
  64. vm_tool/vm_setup/push_code.yml +40 -0
  65. vm_tool/vm_setup/setup.yml +17 -0
  66. vm_tool/vm_setup/setup_project_env.yml +7 -0
  67. vm_tool/webhooks.py +83 -0
  68. vm_tool-1.0.32.dist-info/METADATA +213 -0
  69. vm_tool-1.0.32.dist-info/RECORD +73 -0
  70. vm_tool-1.0.32.dist-info/WHEEL +5 -0
  71. vm_tool-1.0.32.dist-info/entry_points.txt +2 -0
  72. vm_tool-1.0.32.dist-info/licenses/LICENSE +21 -0
  73. vm_tool-1.0.32.dist-info/top_level.txt +2 -0
vm_tool/reporting.py ADDED
@@ -0,0 +1,218 @@
1
+ """Deployment reporting and analytics."""
2
+
3
+ import logging
4
+ from typing import Dict, Any, List, Optional
5
+ from datetime import datetime, timedelta
6
+ from pathlib import Path
7
+ import json
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class DeploymentReport:
13
+ """Generate comprehensive deployment reports."""
14
+
15
+ def __init__(self, report_dir: str = ".vm_tool/reports"):
16
+ self.report_dir = Path(report_dir)
17
+ self.report_dir.mkdir(parents=True, exist_ok=True)
18
+
19
+ def generate_deployment_report(
20
+ self, deployment_id: str, host: str, duration: float, success: bool, **metadata
21
+ ) -> Dict[str, Any]:
22
+ """Generate report for a single deployment."""
23
+ report = {
24
+ "deployment_id": deployment_id,
25
+ "host": host,
26
+ "timestamp": datetime.now().isoformat(),
27
+ "duration_seconds": duration,
28
+ "success": success,
29
+ "metadata": metadata,
30
+ }
31
+
32
+ # Save report
33
+ report_file = self.report_dir / f"{deployment_id}.json"
34
+ with open(report_file, "w") as f:
35
+ json.dump(report, f, indent=2)
36
+
37
+ logger.info(f"📊 Deployment report saved: {report_file}")
38
+
39
+ return report
40
+
41
+ def generate_summary_report(self, days: int = 7) -> Dict[str, Any]:
42
+ """Generate summary report for recent deployments."""
43
+ logger.info(f"📊 Generating summary report for last {days} days")
44
+
45
+ # Load recent deployments
46
+ cutoff_date = datetime.now() - timedelta(days=days)
47
+ deployments = []
48
+
49
+ for report_file in self.report_dir.glob("*.json"):
50
+ try:
51
+ with open(report_file) as f:
52
+ deployment = json.load(f)
53
+
54
+ # Check if within date range
55
+ timestamp = datetime.fromisoformat(deployment.get("timestamp", ""))
56
+ if timestamp >= cutoff_date:
57
+ deployments.append(deployment)
58
+ except Exception as e:
59
+ logger.warning(f"Failed to load report {report_file}: {e}")
60
+
61
+ # Calculate statistics
62
+ total = len(deployments)
63
+ successful = sum(1 for d in deployments if d.get("success"))
64
+ failed = total - successful
65
+
66
+ durations = [
67
+ d["duration_seconds"] for d in deployments if "duration_seconds" in d
68
+ ]
69
+ avg_duration = sum(durations) / len(durations) if durations else 0
70
+
71
+ # Group by host
72
+ hosts = {}
73
+ for d in deployments:
74
+ host = d.get("host", "unknown")
75
+ if host not in hosts:
76
+ hosts[host] = {"total": 0, "successful": 0, "failed": 0}
77
+ hosts[host]["total"] += 1
78
+ if d.get("success"):
79
+ hosts[host]["successful"] += 1
80
+ else:
81
+ hosts[host]["failed"] += 1
82
+
83
+ summary = {
84
+ "period_days": days,
85
+ "total_deployments": total,
86
+ "successful": successful,
87
+ "failed": failed,
88
+ "success_rate": (successful / total * 100) if total > 0 else 0,
89
+ "average_duration_seconds": avg_duration,
90
+ "hosts": hosts,
91
+ "recent_deployments": sorted(
92
+ deployments, key=lambda x: x.get("timestamp", ""), reverse=True
93
+ )[
94
+ :10
95
+ ], # Last 10
96
+ }
97
+
98
+ # Save summary
99
+ summary_file = (
100
+ self.report_dir / f"summary_{datetime.now().strftime('%Y%m%d')}.json"
101
+ )
102
+ with open(summary_file, "w") as f:
103
+ json.dump(summary, f, indent=2)
104
+
105
+ logger.info(f"📊 Summary report saved: {summary_file}")
106
+
107
+ return summary
108
+
109
+ def print_summary_report(self, days: int = 7):
110
+ """Print summary report to console."""
111
+ summary = self.generate_summary_report(days)
112
+
113
+ print(f"\n📊 Deployment Summary Report ({days} days)")
114
+ print("=" * 60)
115
+ print(f"Total Deployments: {summary['total_deployments']}")
116
+ print(f"Successful: {summary['successful']} ({summary['success_rate']:.1f}%)")
117
+ print(f"Failed: {summary['failed']}")
118
+ print(f"Average Duration: {summary['average_duration_seconds']:.2f}s")
119
+
120
+ print("\nDeployments by Host:")
121
+ for host, stats in summary["hosts"].items():
122
+ success_rate = (
123
+ (stats["successful"] / stats["total"] * 100)
124
+ if stats["total"] > 0
125
+ else 0
126
+ )
127
+ print(f" {host}: {stats['total']} total, {success_rate:.1f}% success")
128
+
129
+ print("\nRecent Deployments:")
130
+ for d in summary["recent_deployments"]:
131
+ status = "✅" if d["success"] else "❌"
132
+ timestamp = datetime.fromisoformat(d["timestamp"]).strftime(
133
+ "%Y-%m-%d %H:%M"
134
+ )
135
+ print(f" {status} {d['deployment_id']} - {d['host']} - {timestamp}")
136
+
137
+ def export_to_html(
138
+ self, days: int = 7, output_file: str = "deployment_report.html"
139
+ ):
140
+ """Export report to HTML."""
141
+ summary = self.generate_summary_report(days)
142
+
143
+ html = f"""
144
+ <!DOCTYPE html>
145
+ <html>
146
+ <head>
147
+ <title>Deployment Report</title>
148
+ <style>
149
+ body {{ font-family: Arial, sans-serif; margin: 20px; }}
150
+ h1 {{ color: #333; }}
151
+ .stats {{ display: flex; gap: 20px; margin: 20px 0; }}
152
+ .stat-card {{ background: #f5f5f5; padding: 15px; border-radius: 5px; flex: 1; }}
153
+ .stat-value {{ font-size: 24px; font-weight: bold; color: #007bff; }}
154
+ table {{ width: 100%; border-collapse: collapse; margin: 20px 0; }}
155
+ th, td {{ padding: 10px; text-align: left; border-bottom: 1px solid #ddd; }}
156
+ th {{ background: #007bff; color: white; }}
157
+ .success {{ color: green; }}
158
+ .failed {{ color: red; }}
159
+ </style>
160
+ </head>
161
+ <body>
162
+ <h1>Deployment Report ({days} days)</h1>
163
+
164
+ <div class="stats">
165
+ <div class="stat-card">
166
+ <div>Total Deployments</div>
167
+ <div class="stat-value">{summary['total_deployments']}</div>
168
+ </div>
169
+ <div class="stat-card">
170
+ <div>Success Rate</div>
171
+ <div class="stat-value">{summary['success_rate']:.1f}%</div>
172
+ </div>
173
+ <div class="stat-card">
174
+ <div>Avg Duration</div>
175
+ <div class="stat-value">{summary['average_duration_seconds']:.1f}s</div>
176
+ </div>
177
+ </div>
178
+
179
+ <h2>Recent Deployments</h2>
180
+ <table>
181
+ <tr>
182
+ <th>Status</th>
183
+ <th>ID</th>
184
+ <th>Host</th>
185
+ <th>Timestamp</th>
186
+ <th>Duration</th>
187
+ </tr>
188
+ """
189
+
190
+ for d in summary["recent_deployments"]:
191
+ status_class = "success" if d["success"] else "failed"
192
+ status_icon = "✅" if d["success"] else "❌"
193
+ timestamp = datetime.fromisoformat(d["timestamp"]).strftime(
194
+ "%Y-%m-%d %H:%M"
195
+ )
196
+
197
+ html += f"""
198
+ <tr>
199
+ <td class="{status_class}">{status_icon}</td>
200
+ <td>{d['deployment_id']}</td>
201
+ <td>{d['host']}</td>
202
+ <td>{timestamp}</td>
203
+ <td>{d.get('duration_seconds', 0):.2f}s</td>
204
+ </tr>
205
+ """
206
+
207
+ html += """
208
+ </table>
209
+ </body>
210
+ </html>
211
+ """
212
+
213
+ output_path = self.report_dir / output_file
214
+ with open(output_path, "w") as f:
215
+ f.write(html)
216
+
217
+ logger.info(f"📊 HTML report exported: {output_path}")
218
+ return str(output_path)
vm_tool/runner.py ADDED
@@ -0,0 +1,445 @@
1
+ import logging
2
+ import os
3
+ import sys
4
+ from typing import List, Optional
5
+
6
+ import ansible_runner
7
+ import yaml
8
+ from pydantic import BaseModel, Field, model_validator, validator
9
+
10
+ # Configure logging
11
+ logging.basicConfig(
12
+ level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
13
+ )
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class SetupRunnerConfig(BaseModel):
18
+ """
19
+ Configuration model for setting up the runner.
20
+
21
+ Attributes:
22
+ github_username (Optional[str]): GitHub username for authentication.
23
+ github_token (Optional[str]): GitHub token for authentication.
24
+ github_project_url (str): URL of the GitHub repository.
25
+ github_branch (str): GitHub branch to use (default: 'main').
26
+ docker_compose_file_path (str): Path to the Docker Compose file (default: 'docker-compose.yml').
27
+ dockerhub_username (Optional[str]): DockerHub username (optional).
28
+ dockerhub_password (Optional[str]): DockerHub password (required if username is provided).
29
+ env_path (Optional[str]): Path where the environment file should be created (optional).
30
+ env_data (Optional[dict]): Environment data to dump into the file (optional, should be a dict).
31
+ """
32
+
33
+ github_username: Optional[str] = Field(
34
+ default=None, description="GitHub username for authentication (optional)"
35
+ )
36
+ github_token: Optional[str] = Field(
37
+ default=None, description="GitHub token for authentication (optional)"
38
+ )
39
+ github_project_url: str = Field(..., description="URL of the GitHub repository")
40
+ github_branch: str = Field(
41
+ default="main", description="GitHub branch to use (default: 'main')"
42
+ )
43
+ docker_compose_file_path: str = Field(
44
+ default="docker-compose.yml",
45
+ description="Path to the Docker Compose file (default: 'docker-compose.yml')",
46
+ )
47
+ dockerhub_username: Optional[str] = Field(
48
+ default=None, description="DockerHub username (optional)"
49
+ )
50
+ dockerhub_password: Optional[str] = Field(
51
+ default=None,
52
+ description="DockerHub password (required if username is provided)",
53
+ )
54
+ env_path: Optional[str] = Field(
55
+ default=None,
56
+ description="Path where the environment file should be created (optional)",
57
+ )
58
+ env_data: Optional[dict] = Field(
59
+ default=None,
60
+ description="Environment data to dump into the file (optional, should be a dict)",
61
+ )
62
+
63
+ @validator("env_path", always=True)
64
+ def check_env_path_with_env_data(cls, v, values):
65
+ """If env_data is provided, env_path must also be provided."""
66
+ if values.get("env_data") is not None and not v:
67
+ raise ValueError("env_path must be provided if env_data is specified")
68
+ return v
69
+
70
+ @validator("env_data", always=True)
71
+ def check_env_data_with_env_path(cls, v, values):
72
+ """If env_path is provided, env_data must also be provided."""
73
+ if values.get("env_path") is not None and v is None:
74
+ raise ValueError("env_data must be provided if env_path is specified")
75
+ return v
76
+
77
+ @validator("dockerhub_password", always=True)
78
+ def check_dockerhub_password(cls, v, values):
79
+ """Ensures that a password is provided if a DockerHub username is set."""
80
+ if values.get("dockerhub_username") and not v:
81
+ raise ValueError(
82
+ "DockerHub password must be provided if DockerHub username is specified"
83
+ )
84
+ return v
85
+
86
+ @validator("dockerhub_username", always=True)
87
+ def check_dockerhub_username(cls, v, values):
88
+ """Ensures that a username is provided if a DockerHub password is set."""
89
+ if values.get("dockerhub_password") and not v:
90
+ raise ValueError(
91
+ "DockerHub username must be provided if DockerHub password is specified"
92
+ )
93
+ return v
94
+
95
+ @validator("github_token", always=True)
96
+ def check_github_token(cls, v, values):
97
+ """Ensures that a GitHub token is provided if a GitHub username is set."""
98
+ if values.get("github_username") and not v:
99
+ raise ValueError(
100
+ "GitHub token must be provided if GitHub username is specified"
101
+ )
102
+ return v
103
+
104
+ @validator("github_username", always=True)
105
+ def check_github_username(cls, v, values):
106
+ """Ensures that a GitHub username is provided if a GitHub token is set."""
107
+ if values.get("github_token") and not v:
108
+ raise ValueError(
109
+ "GitHub username must be provided if GitHub token is specified"
110
+ )
111
+ return v
112
+
113
+
114
+ class SSHConfig(BaseModel):
115
+ """
116
+ Configuration model for SSH authentication.
117
+
118
+ Attributes:
119
+ ssh_username (str): SSH username.
120
+ ssh_hostname (str): SSH host/IP.
121
+ ssh_password (Optional[str]): SSH password (optional if identity file is provided).
122
+ ssh_identity_file (Optional[str]): Path to SSH private key file (optional if password is provided).
123
+ """
124
+
125
+ ssh_username: str = Field(..., description="SSH username")
126
+ ssh_hostname: str = Field(..., description="SSH host/IP")
127
+ ssh_password: Optional[str] = Field(
128
+ default=None, description="SSH password (optional if identity file is provided)"
129
+ )
130
+ ssh_identity_file: Optional[str] = Field(
131
+ default=None,
132
+ description="Path to SSH private key file (optional if password is provided)",
133
+ )
134
+
135
+ @model_validator(mode="before")
136
+ def validate_authentication(cls, values):
137
+ """Ensures that either an SSH password or identity file is provided for authentication."""
138
+ password = values.get("ssh_password")
139
+ identity_file = values.get("ssh_identity_file")
140
+ if not password and not identity_file:
141
+ raise ValueError(
142
+ "Either ssh_password or ssh_identity_file must be provided."
143
+ )
144
+ return values
145
+
146
+
147
+ class SetupRunner:
148
+ """
149
+ Main class to handle setup execution.
150
+
151
+ Attributes:
152
+ github_username (str): GitHub username.
153
+ github_token (str): GitHub token.
154
+ github_project_url (str): GitHub repository URL.
155
+ github_branch (str): GitHub branch to use.
156
+ docker_compose_file_path (str): Path to Docker Compose file.
157
+ dockerhub_username (str): DockerHub username.
158
+ dockerhub_password (str): DockerHub password.
159
+ """
160
+
161
+ def __init__(self, config: SetupRunnerConfig):
162
+ """Initializes the setup runner with the given configuration."""
163
+ self.config = config
164
+ self.github_username = config.github_username
165
+ self.github_token = config.github_token
166
+ self.github_project_url = config.github_project_url
167
+ self.github_branch = config.github_branch
168
+ self.docker_compose_file_path = config.docker_compose_file_path
169
+ self.dockerhub_username = config.dockerhub_username
170
+ self.dockerhub_password = config.dockerhub_password
171
+ self.env_path = config.env_path
172
+ self.env_data = config.env_data
173
+
174
+ def _get_git_commit(self) -> Optional[str]:
175
+ """Get current git commit hash if in a git repository."""
176
+ import subprocess
177
+
178
+ try:
179
+ result = subprocess.run(
180
+ ["git", "rev-parse", "HEAD"],
181
+ capture_output=True,
182
+ text=True,
183
+ timeout=5,
184
+ )
185
+ if result.returncode == 0:
186
+ return result.stdout.strip()
187
+ except Exception:
188
+ pass
189
+ return None
190
+
191
+ def _run_ansible_playbook(
192
+ self, extravars: dict, inventory_file: str = "inventory.yml"
193
+ ):
194
+ """Executes an Ansible playbook with the given variables and inventory."""
195
+ current_dir = os.path.dirname(os.path.abspath(__file__))
196
+ venv_dir = os.path.join(sys.prefix, "ansible_runner_data")
197
+ os.makedirs(venv_dir, exist_ok=True)
198
+
199
+ playbook_path = os.path.join(current_dir, "vm_setup", "main.yml")
200
+ inventory_path = os.path.join(current_dir, "vm_setup", inventory_file)
201
+
202
+ try:
203
+ r = ansible_runner.run(
204
+ private_data_dir=venv_dir,
205
+ playbook=playbook_path,
206
+ inventory=inventory_path,
207
+ extravars=extravars,
208
+ )
209
+
210
+ if r.rc != 0:
211
+ logger.error(
212
+ f"Ansible playbook execution failed with return code {r.rc}: {r.stdout}"
213
+ )
214
+ raise RuntimeError(
215
+ f"Ansible playbook execution failed with return code {r.rc}: {r.stdout}"
216
+ )
217
+
218
+ logger.info("Ansible playbook executed successfully.")
219
+
220
+ except Exception as e:
221
+ logger.error(
222
+ f"An error occurred while running the Ansible playbook: {str(e)}"
223
+ )
224
+ raise RuntimeError(
225
+ f"An error occurred while running the Ansible playbook: {str(e)}"
226
+ )
227
+
228
+ def run_setup(self):
229
+ """Runs the setup process using Ansible."""
230
+ extravars = {
231
+ "GITHUB_USERNAME": self.github_username,
232
+ "GITHUB_TOKEN": self.github_token,
233
+ "GITHUB_PROJECT_URL": self.github_project_url,
234
+ "GITHUB_BRANCH": self.github_branch,
235
+ "DOCKERHUB_USERNAME": self.dockerhub_username,
236
+ "DOCKERHUB_PASSWORD": self.dockerhub_password,
237
+ "EXECUTION_TYPE": "normal",
238
+ }
239
+
240
+ if self.docker_compose_file_path:
241
+ extravars["DOCKER_COMPOSE_FILE_PATH"] = self.docker_compose_file_path
242
+ if self.env_path and self.env_data:
243
+ extravars["ENV_PATH"] = self.env_path
244
+ extravars["ENV_DATA"] = self.env_data
245
+
246
+ self._run_ansible_playbook(extravars, "inventory.yml")
247
+
248
+ def run_cloud_setup(self, ssh_configs: List[SSHConfig]):
249
+ """Runs the cloud setup using Ansible with dynamic inventory generation."""
250
+ inventory_file_path = os.path.join(
251
+ os.path.dirname(os.path.abspath(__file__)),
252
+ "vm_setup",
253
+ "dynamic_inventory.yml",
254
+ )
255
+
256
+ inventory_content = {
257
+ "all": {
258
+ "hosts": {},
259
+ "vars": {"ansible_python_interpreter": "/usr/bin/python3"},
260
+ }
261
+ }
262
+
263
+ for i, ssh_config in enumerate(ssh_configs):
264
+ host_key = f"cloud_host_{i}"
265
+ host_entry = {
266
+ "ansible_host": ssh_config.ssh_hostname,
267
+ "ansible_user": ssh_config.ssh_username,
268
+ }
269
+ if ssh_config.ssh_identity_file:
270
+ host_entry["ansible_ssh_private_key_file"] = (
271
+ ssh_config.ssh_identity_file
272
+ )
273
+ elif ssh_config.ssh_password:
274
+ host_entry["ansible_ssh_pass"] = ssh_config.ssh_password
275
+
276
+ inventory_content["all"]["hosts"][host_key] = host_entry
277
+
278
+ with open(inventory_file_path, "w") as inventory_file:
279
+ yaml.dump(inventory_content, inventory_file)
280
+
281
+ extravars = {
282
+ "GITHUB_USERNAME": self.github_username,
283
+ "GITHUB_TOKEN": self.github_token,
284
+ "GITHUB_PROJECT_URL": self.github_project_url,
285
+ "GITHUB_BRANCH": self.github_branch,
286
+ "DOCKERHUB_USERNAME": self.dockerhub_username,
287
+ "DOCKERHUB_PASSWORD": self.dockerhub_password,
288
+ "EXECUTION_TYPE": "cloud",
289
+ }
290
+
291
+ if self.docker_compose_file_path:
292
+ extravars["DOCKER_COMPOSE_FILE_PATH"] = self.docker_compose_file_path
293
+ if self.env_path and self.env_data:
294
+ extravars["ENV_PATH"] = self.env_path
295
+ extravars["ENV_DATA"] = self.env_data
296
+
297
+ self._run_ansible_playbook(extravars, "dynamic_inventory.yml")
298
+
299
+ def run_k8s_setup(self, inventory_file="inventory.yml"):
300
+ """Runs the K8s setup playbook."""
301
+ logger.info("Starting K8s setup...")
302
+ # Reuse existing variables or allow bare execution
303
+ extravars = {"ansible_python_interpreter": "/usr/bin/python3"}
304
+ self._run_ansible_playbook(extravars, "k8s.yml")
305
+ logger.info("K8s setup completed.")
306
+
307
+ def run_monitoring_setup(self, inventory_file="inventory.yml"):
308
+ """Runs the Monitoring setup playbook."""
309
+ logger.info("Starting Monitoring setup...")
310
+ extravars = {"ansible_python_interpreter": "/usr/bin/python3"}
311
+ self._run_ansible_playbook(extravars, "monitoring.yml")
312
+ logger.info("Monitoring setup completed.")
313
+
314
+ def run_docker_deploy(
315
+ self,
316
+ compose_file="docker-compose.yml",
317
+ inventory_file="inventory.yml",
318
+ host: str = None,
319
+ user: str = None,
320
+ env_file: str = None,
321
+ deploy_command: str = None,
322
+ force: bool = False,
323
+ ):
324
+ """Runs the Docker Compose deployment with idempotency."""
325
+ from vm_tool.state import DeploymentState
326
+
327
+ # Initialize state tracker
328
+ state = DeploymentState()
329
+
330
+ # Compute hash of compose file for change detection
331
+ compose_hash = state.compute_hash(compose_file)
332
+
333
+ # Check if deployment is needed (unless force is True)
334
+ service_name = "docker-compose"
335
+ if host and not force:
336
+ if not state.needs_update(host, compose_hash, service_name):
337
+ logger.info(
338
+ f"✅ Deployment is up-to-date for {host}. "
339
+ f"Use --force to redeploy anyway."
340
+ )
341
+ print(
342
+ f"✅ No changes detected. Deployment is up-to-date.\n"
343
+ f" Use --force flag to redeploy anyway."
344
+ )
345
+ return
346
+
347
+ target_inventory = inventory_file
348
+
349
+ # Generate dynamic inventory if host is provided
350
+ if host:
351
+ logger.info(f"Generating dynamic inventory for host: {host}")
352
+ inventory_content = {
353
+ "all": {
354
+ "hosts": {
355
+ "target_host": {
356
+ "ansible_host": host,
357
+ "ansible_connection": "ssh",
358
+ "ansible_ssh_common_args": "-o StrictHostKeyChecking=no",
359
+ }
360
+ },
361
+ "vars": {"ansible_python_interpreter": "/usr/bin/python3"},
362
+ }
363
+ }
364
+ if user:
365
+ inventory_content["all"]["hosts"]["target_host"]["ansible_user"] = user
366
+
367
+ # Use a temporary file or override inventory.yml locally
368
+ current_dir = os.path.dirname(os.path.abspath(__file__))
369
+ generated_inventory_path = os.path.join(
370
+ current_dir, "vm_setup", "generated_inventory.yml"
371
+ )
372
+
373
+ with open(generated_inventory_path, "w") as f:
374
+ yaml.dump(inventory_content, f)
375
+
376
+ target_inventory = "generated_inventory.yml"
377
+
378
+ logger.info(
379
+ f"Starting Docker deployment using {compose_file} on {target_inventory}..."
380
+ )
381
+
382
+ extravars = {
383
+ "ansible_python_interpreter": "/usr/bin/python3",
384
+ "DOCKER_COMPOSE_FILE_PATH": compose_file,
385
+ "GITHUB_USERNAME": self.github_username,
386
+ "GITHUB_TOKEN": self.github_token,
387
+ "GITHUB_PROJECT_URL": self.github_project_url,
388
+ "DEPLOY_MODE": "push",
389
+ "SOURCE_PATH": os.getcwd(), # Current working directory where vm_tool is run
390
+ "project_dest_dir": "~/app",
391
+ "GITHUB_REPOSITORY_OWNER": os.environ.get("GITHUB_REPOSITORY_OWNER", ""),
392
+ }
393
+
394
+ if env_file:
395
+ extravars["ENV_FILE_PATH"] = env_file
396
+
397
+ if deploy_command:
398
+ extravars["DEPLOY_COMMAND"] = deploy_command
399
+
400
+ playbook_path = os.path.join(
401
+ os.path.dirname(__file__), "vm_setup", "push_code.yml"
402
+ )
403
+
404
+ try:
405
+ r = ansible_runner.run(
406
+ private_data_dir=os.path.dirname(__file__),
407
+ playbook=playbook_path,
408
+ inventory=target_inventory,
409
+ extravars=extravars,
410
+ )
411
+
412
+ if r.status == "successful":
413
+ logger.info("Docker deployment completed successfully.")
414
+ # Record successful deployment
415
+ if host:
416
+ state.record_deployment(
417
+ host, compose_file, compose_hash, service_name
418
+ )
419
+ logger.info(f"✅ Deployment state recorded for {host}")
420
+
421
+ # Record in history
422
+ from vm_tool.history import DeploymentHistory
423
+
424
+ history = DeploymentHistory()
425
+ git_commit = self._get_git_commit()
426
+ deployment_id = history.record_deployment(
427
+ host=host,
428
+ compose_file=compose_file,
429
+ compose_hash=compose_hash,
430
+ git_commit=git_commit,
431
+ service_name=service_name,
432
+ status="success",
433
+ )
434
+ logger.info(f"📝 Deployment recorded in history: {deployment_id}")
435
+ else:
436
+ error_msg = f"Deployment failed with status: {r.status}"
437
+ logger.error(error_msg)
438
+ if host:
439
+ state.mark_failed(host, service_name, error_msg)
440
+ raise RuntimeError(error_msg)
441
+
442
+ except Exception as e:
443
+ if host:
444
+ state.mark_failed(host, service_name, str(e))
445
+ raise