vm-tool 1.0.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/README.md +5 -0
- examples/__init__.py +1 -0
- examples/cloud/README.md +3 -0
- examples/cloud/__init__.py +1 -0
- examples/cloud/ssh_identity_file.py +27 -0
- examples/cloud/ssh_password.py +27 -0
- examples/cloud/template_cloud_setup.py +36 -0
- examples/deploy_full_setup.py +44 -0
- examples/docker-compose.example.yml +47 -0
- examples/ec2-setup.sh +95 -0
- examples/github-actions-ec2.yml +245 -0
- examples/github-actions-full-setup.yml +58 -0
- examples/local/.keep +1 -0
- examples/local/README.md +3 -0
- examples/local/__init__.py +1 -0
- examples/local/template_local_setup.py +27 -0
- examples/production-deploy.sh +70 -0
- examples/rollback.sh +52 -0
- examples/setup.sh +52 -0
- examples/ssh_key_management.py +22 -0
- examples/version_check.sh +3 -0
- vm_tool/__init__.py +0 -0
- vm_tool/alerting.py +274 -0
- vm_tool/audit.py +118 -0
- vm_tool/backup.py +125 -0
- vm_tool/benchmarking.py +200 -0
- vm_tool/cli.py +761 -0
- vm_tool/cloud.py +125 -0
- vm_tool/completion.py +200 -0
- vm_tool/compliance.py +104 -0
- vm_tool/config.py +92 -0
- vm_tool/drift.py +98 -0
- vm_tool/generator.py +462 -0
- vm_tool/health.py +197 -0
- vm_tool/history.py +131 -0
- vm_tool/kubernetes.py +89 -0
- vm_tool/metrics.py +183 -0
- vm_tool/notifications.py +152 -0
- vm_tool/plugins.py +119 -0
- vm_tool/policy.py +197 -0
- vm_tool/rbac.py +140 -0
- vm_tool/recovery.py +169 -0
- vm_tool/reporting.py +218 -0
- vm_tool/runner.py +445 -0
- vm_tool/secrets.py +285 -0
- vm_tool/ssh.py +150 -0
- vm_tool/state.py +122 -0
- vm_tool/strategies/__init__.py +16 -0
- vm_tool/strategies/ab_testing.py +258 -0
- vm_tool/strategies/blue_green.py +227 -0
- vm_tool/strategies/canary.py +277 -0
- vm_tool/validation.py +267 -0
- vm_tool/vm_setup/cleanup.yml +27 -0
- vm_tool/vm_setup/docker/create_docker_service.yml +63 -0
- vm_tool/vm_setup/docker/docker_setup.yml +7 -0
- vm_tool/vm_setup/docker/install_docker_and_compose.yml +92 -0
- vm_tool/vm_setup/docker/login_to_docker_hub.yml +6 -0
- vm_tool/vm_setup/github/git_configuration.yml +68 -0
- vm_tool/vm_setup/inventory.yml +1 -0
- vm_tool/vm_setup/k8s.yml +15 -0
- vm_tool/vm_setup/main.yml +27 -0
- vm_tool/vm_setup/monitoring.yml +42 -0
- vm_tool/vm_setup/project_service.yml +17 -0
- vm_tool/vm_setup/push_code.yml +40 -0
- vm_tool/vm_setup/setup.yml +17 -0
- vm_tool/vm_setup/setup_project_env.yml +7 -0
- vm_tool/webhooks.py +83 -0
- vm_tool-1.0.32.dist-info/METADATA +213 -0
- vm_tool-1.0.32.dist-info/RECORD +73 -0
- vm_tool-1.0.32.dist-info/WHEEL +5 -0
- vm_tool-1.0.32.dist-info/entry_points.txt +2 -0
- vm_tool-1.0.32.dist-info/licenses/LICENSE +21 -0
- vm_tool-1.0.32.dist-info/top_level.txt +2 -0
vm_tool/generator.py
ADDED
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
"""Dynamic CI/CD pipeline generator with all vm_tool features."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PipelineGenerator:
|
|
9
|
+
"""Generate CI/CD pipelines with vm_tool features."""
|
|
10
|
+
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
platform: str = "github",
|
|
14
|
+
strategy: str = "docker", # docker, registry
|
|
15
|
+
enable_monitoring: bool = False,
|
|
16
|
+
enable_health_checks: bool = True,
|
|
17
|
+
enable_backup: bool = True,
|
|
18
|
+
enable_rollback: bool = True,
|
|
19
|
+
enable_drift_detection: bool = False,
|
|
20
|
+
enable_dry_run: bool = True,
|
|
21
|
+
health_port: Optional[int] = 8000,
|
|
22
|
+
health_url: Optional[str] = None,
|
|
23
|
+
backup_paths: Optional[list] = None,
|
|
24
|
+
app_port: int = 8000,
|
|
25
|
+
):
|
|
26
|
+
self.platform = platform
|
|
27
|
+
self.strategy = strategy
|
|
28
|
+
self.enable_monitoring = enable_monitoring
|
|
29
|
+
self.enable_health_checks = enable_health_checks
|
|
30
|
+
self.enable_backup = enable_backup
|
|
31
|
+
self.enable_rollback = enable_rollback
|
|
32
|
+
self.enable_drift_detection = enable_drift_detection
|
|
33
|
+
self.enable_dry_run = enable_dry_run
|
|
34
|
+
self.health_port = health_port
|
|
35
|
+
self.health_url = (
|
|
36
|
+
health_url or f"http://${{{{ secrets.EC2_HOST }}}}:{app_port}/health"
|
|
37
|
+
)
|
|
38
|
+
self.backup_paths = backup_paths or ["/app", "/etc/nginx"]
|
|
39
|
+
self.app_port = app_port
|
|
40
|
+
|
|
41
|
+
# New options
|
|
42
|
+
self.run_linting = False
|
|
43
|
+
self.run_tests = False
|
|
44
|
+
self.python_version = "3.11"
|
|
45
|
+
self.branch = "main"
|
|
46
|
+
|
|
47
|
+
def set_options(
|
|
48
|
+
self,
|
|
49
|
+
run_linting: bool = False,
|
|
50
|
+
run_tests: bool = False,
|
|
51
|
+
python_version: str = "3.11",
|
|
52
|
+
branch: str = "main",
|
|
53
|
+
):
|
|
54
|
+
"""Set additional options for the pipeline."""
|
|
55
|
+
self.run_linting = run_linting
|
|
56
|
+
self.run_tests = run_tests
|
|
57
|
+
self.python_version = python_version
|
|
58
|
+
self.branch = branch
|
|
59
|
+
|
|
60
|
+
def generate(self) -> str:
|
|
61
|
+
"""Generate pipeline based on platform."""
|
|
62
|
+
if self.platform == "github":
|
|
63
|
+
return self._generate_github_actions()
|
|
64
|
+
elif self.platform == "gitlab":
|
|
65
|
+
raise NotImplementedError("GitLab CI support coming soon")
|
|
66
|
+
else:
|
|
67
|
+
raise ValueError(f"Unsupported platform: {self.platform}")
|
|
68
|
+
|
|
69
|
+
def _generate_github_actions(self) -> str:
|
|
70
|
+
"""Generate GitHub Actions workflow with all features."""
|
|
71
|
+
|
|
72
|
+
# Build steps dynamically
|
|
73
|
+
# Build steps dynamically
|
|
74
|
+
steps = []
|
|
75
|
+
|
|
76
|
+
# Basic setup steps
|
|
77
|
+
steps.extend(
|
|
78
|
+
[
|
|
79
|
+
self._step_checkout(),
|
|
80
|
+
self._step_validate_secrets(),
|
|
81
|
+
self._step_setup_python(),
|
|
82
|
+
self._step_install_vm_tool(),
|
|
83
|
+
]
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
if self.run_linting:
|
|
87
|
+
steps.append(self._step_run_linting())
|
|
88
|
+
|
|
89
|
+
if self.run_tests:
|
|
90
|
+
steps.append(self._step_run_tests())
|
|
91
|
+
|
|
92
|
+
# Build and Push (Registry Strategy)
|
|
93
|
+
if self.strategy == "registry":
|
|
94
|
+
steps.append(self._step_login_ghcr())
|
|
95
|
+
steps.append(self._step_build_push())
|
|
96
|
+
|
|
97
|
+
steps.extend(
|
|
98
|
+
[
|
|
99
|
+
self._step_setup_ssh(),
|
|
100
|
+
self._step_validate_ssh(),
|
|
101
|
+
]
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Copy files (only if NOT registry strategy, or just config for registry)
|
|
105
|
+
if self.strategy == "registry":
|
|
106
|
+
# For registry, we only need docker-compose and .env, not the full source
|
|
107
|
+
steps.append(self._step_copy_compose_only())
|
|
108
|
+
else:
|
|
109
|
+
steps.append(self._step_copy_files())
|
|
110
|
+
|
|
111
|
+
# Backup step
|
|
112
|
+
if self.enable_backup:
|
|
113
|
+
steps.append(self._step_create_backup())
|
|
114
|
+
|
|
115
|
+
# Drift detection (pre-deployment)
|
|
116
|
+
if self.enable_drift_detection:
|
|
117
|
+
steps.append(self._step_drift_check())
|
|
118
|
+
|
|
119
|
+
# Dry-run step
|
|
120
|
+
if self.enable_dry_run:
|
|
121
|
+
steps.append(self._step_dry_run())
|
|
122
|
+
|
|
123
|
+
# Main deployment
|
|
124
|
+
steps.append(self._step_deploy())
|
|
125
|
+
|
|
126
|
+
# Health checks
|
|
127
|
+
if self.enable_health_checks:
|
|
128
|
+
steps.append(self._step_health_check())
|
|
129
|
+
|
|
130
|
+
# Verification
|
|
131
|
+
steps.append(self._step_verify())
|
|
132
|
+
|
|
133
|
+
# Rollback on failure
|
|
134
|
+
if self.enable_rollback:
|
|
135
|
+
steps.append(self._step_rollback())
|
|
136
|
+
|
|
137
|
+
# Cleanup
|
|
138
|
+
steps.append(self._step_cleanup())
|
|
139
|
+
|
|
140
|
+
# Notification
|
|
141
|
+
steps.append(self._step_notification())
|
|
142
|
+
|
|
143
|
+
# Combine all steps
|
|
144
|
+
steps_yaml = "\n".join(steps)
|
|
145
|
+
|
|
146
|
+
return f"""name: Deploy to EC2 with vm_tool
|
|
147
|
+
|
|
148
|
+
on:
|
|
149
|
+
push:
|
|
150
|
+
branches: [ {self.branch} ]
|
|
151
|
+
pull_request:
|
|
152
|
+
branches: [ {self.branch} ]
|
|
153
|
+
workflow_dispatch:
|
|
154
|
+
|
|
155
|
+
env:
|
|
156
|
+
EC2_HOST: ${{{{ secrets.EC2_HOST }}}}
|
|
157
|
+
EC2_USER: ${{{{ secrets.EC2_USER }}}}
|
|
158
|
+
APP_PORT: {self.app_port}
|
|
159
|
+
|
|
160
|
+
jobs:
|
|
161
|
+
deploy:
|
|
162
|
+
runs-on: ubuntu-latest
|
|
163
|
+
|
|
164
|
+
steps:
|
|
165
|
+
{steps_yaml}
|
|
166
|
+
"""
|
|
167
|
+
|
|
168
|
+
def _step_checkout(self) -> str:
|
|
169
|
+
return """ - name: Checkout code
|
|
170
|
+
uses: actions/checkout@v4"""
|
|
171
|
+
|
|
172
|
+
def _step_validate_secrets(self) -> str:
|
|
173
|
+
return """
|
|
174
|
+
- name: Validate Required Secrets
|
|
175
|
+
run: |
|
|
176
|
+
echo "🔐 Validating GitHub Secrets..."
|
|
177
|
+
MISSING_SECRETS=()
|
|
178
|
+
|
|
179
|
+
if [ -z "${{ secrets.EC2_HOST }}" ]; then
|
|
180
|
+
MISSING_SECRETS+=("EC2_HOST")
|
|
181
|
+
fi
|
|
182
|
+
|
|
183
|
+
if [ -z "${{ secrets.EC2_USER }}" ]; then
|
|
184
|
+
MISSING_SECRETS+=("EC2_USER")
|
|
185
|
+
fi
|
|
186
|
+
|
|
187
|
+
if [ -z "${{ secrets.EC2_SSH_KEY }}" ]; then
|
|
188
|
+
MISSING_SECRETS+=("EC2_SSH_KEY")
|
|
189
|
+
fi
|
|
190
|
+
|
|
191
|
+
if [ ${#MISSING_SECRETS[@]} -ne 0 ]; then
|
|
192
|
+
echo ""
|
|
193
|
+
echo "❌ ERROR: Missing required GitHub Secrets!"
|
|
194
|
+
echo ""
|
|
195
|
+
echo "Missing: ${MISSING_SECRETS[*]}"
|
|
196
|
+
echo ""
|
|
197
|
+
echo "📝 How to add secrets:"
|
|
198
|
+
echo "1. Go to: Repository → Settings → Secrets → Actions"
|
|
199
|
+
echo "2. Add each secret:"
|
|
200
|
+
echo ""
|
|
201
|
+
|
|
202
|
+
if [[ " ${MISSING_SECRETS[*]} " =~ " EC2_HOST " ]]; then
|
|
203
|
+
echo " EC2_HOST: Your EC2 IP (e.g., 54.123.45.67)"
|
|
204
|
+
fi
|
|
205
|
+
|
|
206
|
+
if [[ " ${MISSING_SECRETS[*]} " =~ " EC2_USER " ]]; then
|
|
207
|
+
echo " EC2_USER: SSH username (e.g., ubuntu)"
|
|
208
|
+
fi
|
|
209
|
+
|
|
210
|
+
if [[ " ${MISSING_SECRETS[*]} " =~ " EC2_SSH_KEY " ]]; then
|
|
211
|
+
echo " EC2_SSH_KEY: Run 'cat ~/.ssh/id_rsa' and copy output"
|
|
212
|
+
fi
|
|
213
|
+
|
|
214
|
+
echo ""
|
|
215
|
+
echo "📚 See: docs/ssh-key-setup.md"
|
|
216
|
+
exit 1
|
|
217
|
+
fi
|
|
218
|
+
|
|
219
|
+
echo "✅ All secrets configured"
|
|
220
|
+
"""
|
|
221
|
+
|
|
222
|
+
def _step_setup_python(self) -> str:
|
|
223
|
+
return f"""
|
|
224
|
+
- name: Set up Python
|
|
225
|
+
uses: actions/setup-python@v4
|
|
226
|
+
with:
|
|
227
|
+
python-version: '{self.python_version}'"""
|
|
228
|
+
|
|
229
|
+
def _step_install_vm_tool(self) -> str:
|
|
230
|
+
return """
|
|
231
|
+
- name: Install vm_tool
|
|
232
|
+
run: pip install vm-tool"""
|
|
233
|
+
|
|
234
|
+
def _step_run_linting(self) -> str:
|
|
235
|
+
return """
|
|
236
|
+
- name: Lint with flake8
|
|
237
|
+
run: |
|
|
238
|
+
pip install flake8
|
|
239
|
+
# stop the build if there are Python syntax errors or undefined names
|
|
240
|
+
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
|
241
|
+
# exit-zero treats all errors as warnings.
|
|
242
|
+
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics"""
|
|
243
|
+
|
|
244
|
+
def _step_run_tests(self) -> str:
|
|
245
|
+
return """
|
|
246
|
+
- name: Test with pytest
|
|
247
|
+
run: |
|
|
248
|
+
pip install pytest
|
|
249
|
+
pytest"""
|
|
250
|
+
|
|
251
|
+
def _step_login_ghcr(self) -> str:
|
|
252
|
+
return """
|
|
253
|
+
- name: Log in to GitHub Container Registry
|
|
254
|
+
uses: docker/login-action@v3
|
|
255
|
+
with:
|
|
256
|
+
registry: ghcr.io
|
|
257
|
+
username: ${{ github.actor }}
|
|
258
|
+
password: ${{ secrets.GITHUB_TOKEN }}"""
|
|
259
|
+
|
|
260
|
+
def _step_build_push(self) -> str:
|
|
261
|
+
return """
|
|
262
|
+
- name: Build and push Docker images
|
|
263
|
+
env:
|
|
264
|
+
GITHUB_REPOSITORY_OWNER: ${{ github.repository_owner }}
|
|
265
|
+
run: |
|
|
266
|
+
# Create .env file for build context if needed
|
|
267
|
+
if [ -f .env.production ]; then
|
|
268
|
+
cp .env.production .env
|
|
269
|
+
fi
|
|
270
|
+
|
|
271
|
+
# Build and push using docker-compose
|
|
272
|
+
docker-compose build
|
|
273
|
+
docker-compose push"""
|
|
274
|
+
|
|
275
|
+
def _step_copy_compose_only(self) -> str:
|
|
276
|
+
return """
|
|
277
|
+
- name: Copy docker-compose to EC2
|
|
278
|
+
run: |
|
|
279
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} \\
|
|
280
|
+
'mkdir -p ~/app'
|
|
281
|
+
|
|
282
|
+
scp -i ~/.ssh/deploy_key docker-compose.yml \\
|
|
283
|
+
${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }}:~/app/
|
|
284
|
+
|
|
285
|
+
# Copy .env file
|
|
286
|
+
if [ -f .env.production ]; then
|
|
287
|
+
scp -i ~/.ssh/deploy_key .env.production \\
|
|
288
|
+
${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }}:~/app/.env
|
|
289
|
+
fi"""
|
|
290
|
+
|
|
291
|
+
def _step_setup_ssh(self) -> str:
|
|
292
|
+
return """
|
|
293
|
+
- name: Set up SSH
|
|
294
|
+
run: |
|
|
295
|
+
mkdir -p ~/.ssh
|
|
296
|
+
echo "${{ secrets.EC2_SSH_KEY }}" > ~/.ssh/deploy_key
|
|
297
|
+
chmod 600 ~/.ssh/deploy_key
|
|
298
|
+
ssh-keyscan -H ${{ secrets.EC2_HOST }} >> ~/.ssh/known_hosts"""
|
|
299
|
+
|
|
300
|
+
def _step_validate_ssh(self) -> str:
|
|
301
|
+
return """
|
|
302
|
+
- name: Validate SSH Connection
|
|
303
|
+
run: |
|
|
304
|
+
echo "✅ Testing SSH connection..."
|
|
305
|
+
ssh -i ~/.ssh/deploy_key -o StrictHostKeyChecking=no \\
|
|
306
|
+
${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} "echo 'Connected'" || {
|
|
307
|
+
echo "❌ SSH failed! Check docs/ssh-key-setup.md"
|
|
308
|
+
exit 1
|
|
309
|
+
}"""
|
|
310
|
+
|
|
311
|
+
def _step_copy_files(self) -> str:
|
|
312
|
+
return """
|
|
313
|
+
- name: Copy docker-compose to EC2
|
|
314
|
+
run: |
|
|
315
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} \\
|
|
316
|
+
'mkdir -p ~/app'
|
|
317
|
+
|
|
318
|
+
scp -i ~/.ssh/deploy_key docker-compose.yml \\
|
|
319
|
+
${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }}:~/app/
|
|
320
|
+
|
|
321
|
+
# Copy any .env files if they exist
|
|
322
|
+
if [ -f .env.production ]; then
|
|
323
|
+
scp -i ~/.ssh/deploy_key .env.production \\
|
|
324
|
+
${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }}:~/app/.env
|
|
325
|
+
fi"""
|
|
326
|
+
|
|
327
|
+
def _step_create_backup(self) -> str:
|
|
328
|
+
return """
|
|
329
|
+
- name: Create backup
|
|
330
|
+
run: |
|
|
331
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} << 'EOF'
|
|
332
|
+
mkdir -p ~/backups
|
|
333
|
+
if [ -d ~/app ]; then
|
|
334
|
+
tar -czf ~/backups/backup-$(date +%Y%m%d-%H%M%S).tar.gz -C ~/app . 2>/dev/null || true
|
|
335
|
+
echo "✅ Backup created"
|
|
336
|
+
fi
|
|
337
|
+
EOF"""
|
|
338
|
+
|
|
339
|
+
def _step_drift_check(self) -> str:
|
|
340
|
+
return """
|
|
341
|
+
- name: Check drift
|
|
342
|
+
continue-on-error: true
|
|
343
|
+
run: |
|
|
344
|
+
echo "🔍 Checking for configuration drift..."
|
|
345
|
+
# Add drift detection logic"""
|
|
346
|
+
|
|
347
|
+
def _step_dry_run(self) -> str:
|
|
348
|
+
return """
|
|
349
|
+
- name: Dry-run
|
|
350
|
+
run: |
|
|
351
|
+
echo "🔍 DRY-RUN: Previewing deployment"
|
|
352
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} << 'EOF'
|
|
353
|
+
cd ~/app && docker-compose config
|
|
354
|
+
EOF"""
|
|
355
|
+
|
|
356
|
+
def _step_deploy(self) -> str:
|
|
357
|
+
return """
|
|
358
|
+
- name: Deploy with vm_tool (Ansible-based)
|
|
359
|
+
run: |
|
|
360
|
+
# Create inventory file for Ansible
|
|
361
|
+
cat > inventory.yml << EOF
|
|
362
|
+
all:
|
|
363
|
+
hosts:
|
|
364
|
+
production:
|
|
365
|
+
ansible_host: ${{ secrets.EC2_HOST }}
|
|
366
|
+
ansible_user: ${{ secrets.EC2_USER }}
|
|
367
|
+
ansible_ssh_private_key_file: ~/.ssh/deploy_key
|
|
368
|
+
EOF
|
|
369
|
+
|
|
370
|
+
# Deploy using vm_tool (uses Ansible under the hood)
|
|
371
|
+
export GITHUB_REPOSITORY_OWNER=${{ github.repository_owner }}
|
|
372
|
+
vm_tool deploy-docker \\
|
|
373
|
+
--host ${{ secrets.EC2_HOST }} \\
|
|
374
|
+
--user ${{ secrets.EC2_USER }} \\
|
|
375
|
+
--compose-file ~/app/docker-compose.yml \\
|
|
376
|
+
--inventory inventory.yml \\
|
|
377
|
+
--force"""
|
|
378
|
+
|
|
379
|
+
def _step_health_check(self) -> str:
|
|
380
|
+
return f"""
|
|
381
|
+
- name: Health check
|
|
382
|
+
run: |
|
|
383
|
+
for i in {{{{1..30}}}}; do
|
|
384
|
+
if curl -f {self.health_url} 2>/dev/null; then
|
|
385
|
+
echo "✅ Health check passed"
|
|
386
|
+
exit 0
|
|
387
|
+
fi
|
|
388
|
+
sleep 2
|
|
389
|
+
done
|
|
390
|
+
echo "❌ Health check failed"
|
|
391
|
+
exit 1"""
|
|
392
|
+
|
|
393
|
+
def _step_verify(self) -> str:
|
|
394
|
+
return """
|
|
395
|
+
- name: Verify
|
|
396
|
+
run: |
|
|
397
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} << 'EOF'
|
|
398
|
+
cd ~/app
|
|
399
|
+
docker-compose ps
|
|
400
|
+
docker-compose logs --tail=20
|
|
401
|
+
EOF"""
|
|
402
|
+
|
|
403
|
+
def _step_rollback(self) -> str:
|
|
404
|
+
return """
|
|
405
|
+
- name: Rollback on failure
|
|
406
|
+
if: failure()
|
|
407
|
+
run: |
|
|
408
|
+
echo "⚠️ Rolling back..."
|
|
409
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} << 'EOF'
|
|
410
|
+
BACKUP=$(ls -t ~/backups/*.tar.gz 2>/dev/null | head -1)
|
|
411
|
+
if [ -n "$BACKUP" ]; then
|
|
412
|
+
cd ~/app && tar -xzf $BACKUP
|
|
413
|
+
docker-compose up -d
|
|
414
|
+
echo "✅ Rolled back"
|
|
415
|
+
fi
|
|
416
|
+
EOF"""
|
|
417
|
+
|
|
418
|
+
def _step_cleanup(self) -> str:
|
|
419
|
+
return """
|
|
420
|
+
- name: Cleanup
|
|
421
|
+
if: success()
|
|
422
|
+
run: |
|
|
423
|
+
ssh -i ~/.ssh/deploy_key ${{ secrets.EC2_USER }}@${{ secrets.EC2_HOST }} << 'EOF'
|
|
424
|
+
cd ~/backups 2>/dev/null || exit 0
|
|
425
|
+
ls -t *.tar.gz 2>/dev/null | tail -n +6 | xargs rm -f || true
|
|
426
|
+
EOF"""
|
|
427
|
+
|
|
428
|
+
def _step_notification(self) -> str:
|
|
429
|
+
return """
|
|
430
|
+
- name: Notify
|
|
431
|
+
if: always()
|
|
432
|
+
run: |
|
|
433
|
+
if [ "${{ job.status }}" == "success" ]; then
|
|
434
|
+
echo "✅ Deployed to ${{ secrets.EC2_HOST }}:${{ env.APP_PORT }}"
|
|
435
|
+
else
|
|
436
|
+
echo "❌ Deployment failed"
|
|
437
|
+
fi"""
|
|
438
|
+
|
|
439
|
+
def _generate_gitlab_ci(self) -> str:
|
|
440
|
+
"""Generate GitLab CI pipeline."""
|
|
441
|
+
return """# GitLab CI (Coming Soon)
|
|
442
|
+
# Use GitHub Actions for now
|
|
443
|
+
"""
|
|
444
|
+
|
|
445
|
+
def save(self, output_path: Optional[str] = None) -> str:
|
|
446
|
+
"""Save generated pipeline to file."""
|
|
447
|
+
content = self.generate()
|
|
448
|
+
|
|
449
|
+
if output_path is None:
|
|
450
|
+
if self.platform == "github":
|
|
451
|
+
output_path = ".github/workflows/deploy.yml"
|
|
452
|
+
elif self.platform == "gitlab":
|
|
453
|
+
output_path = ".gitlab-ci.yml"
|
|
454
|
+
|
|
455
|
+
# Create directory if it doesn't exist
|
|
456
|
+
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
|
457
|
+
|
|
458
|
+
# Write file
|
|
459
|
+
with open(output_path, "w") as f:
|
|
460
|
+
f.write(content)
|
|
461
|
+
|
|
462
|
+
return output_path
|
vm_tool/health.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
"""Health check and smoke test functionality."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import socket
|
|
5
|
+
import time
|
|
6
|
+
from typing import Dict, List, Optional
|
|
7
|
+
from urllib.parse import urlparse
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class HealthCheck:
|
|
13
|
+
"""Performs health checks on deployed services."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, host: str, timeout: int = 30):
|
|
16
|
+
self.host = host
|
|
17
|
+
self.timeout = timeout
|
|
18
|
+
|
|
19
|
+
def check_port(self, port: int) -> bool:
|
|
20
|
+
"""Check if a port is open and accepting connections."""
|
|
21
|
+
try:
|
|
22
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
23
|
+
sock.settimeout(5)
|
|
24
|
+
result = sock.connect_ex((self.host, port))
|
|
25
|
+
sock.close()
|
|
26
|
+
return result == 0
|
|
27
|
+
except socket.error as e:
|
|
28
|
+
logger.warning(f"Port check failed for {self.host}:{port} - {e}")
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
def check_http(self, url: str, expected_status: int = 200) -> bool:
|
|
32
|
+
"""Check if HTTP endpoint returns expected status."""
|
|
33
|
+
try:
|
|
34
|
+
import requests
|
|
35
|
+
|
|
36
|
+
response = requests.get(url, timeout=5)
|
|
37
|
+
return response.status_code == expected_status
|
|
38
|
+
except ImportError:
|
|
39
|
+
logger.warning("requests library not installed, skipping HTTP check")
|
|
40
|
+
return True # Don't fail if requests not available
|
|
41
|
+
except Exception as e:
|
|
42
|
+
logger.warning(f"HTTP check failed for {url} - {e}")
|
|
43
|
+
return False
|
|
44
|
+
|
|
45
|
+
def wait_for_port(self, port: int, max_attempts: int = None) -> bool:
|
|
46
|
+
"""Wait for a port to become available."""
|
|
47
|
+
if max_attempts is None:
|
|
48
|
+
max_attempts = self.timeout
|
|
49
|
+
|
|
50
|
+
logger.info(f"Waiting for {self.host}:{port} to be available...")
|
|
51
|
+
|
|
52
|
+
for attempt in range(max_attempts):
|
|
53
|
+
if self.check_port(port):
|
|
54
|
+
logger.info(f"✅ Port {port} is now available")
|
|
55
|
+
return True
|
|
56
|
+
|
|
57
|
+
if attempt < max_attempts - 1:
|
|
58
|
+
time.sleep(1)
|
|
59
|
+
|
|
60
|
+
logger.error(f"❌ Port {port} did not become available after {max_attempts}s")
|
|
61
|
+
return False
|
|
62
|
+
|
|
63
|
+
def wait_for_http(
|
|
64
|
+
self, url: str, expected_status: int = 200, max_attempts: int = None
|
|
65
|
+
) -> bool:
|
|
66
|
+
"""Wait for HTTP endpoint to return expected status."""
|
|
67
|
+
if max_attempts is None:
|
|
68
|
+
max_attempts = self.timeout
|
|
69
|
+
|
|
70
|
+
logger.info(f"Waiting for {url} to respond with status {expected_status}...")
|
|
71
|
+
|
|
72
|
+
for attempt in range(max_attempts):
|
|
73
|
+
if self.check_http(url, expected_status):
|
|
74
|
+
logger.info(f"✅ {url} is now responding correctly")
|
|
75
|
+
return True
|
|
76
|
+
|
|
77
|
+
if attempt < max_attempts - 1:
|
|
78
|
+
time.sleep(1)
|
|
79
|
+
|
|
80
|
+
logger.error(f"❌ {url} did not respond correctly after {max_attempts}s")
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
def run_custom_check(self, command: str) -> bool:
|
|
84
|
+
"""Run a custom health check command via SSH."""
|
|
85
|
+
import subprocess
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
# Execute command via SSH
|
|
89
|
+
ssh_command = [
|
|
90
|
+
"ssh",
|
|
91
|
+
"-o",
|
|
92
|
+
"StrictHostKeyChecking=no",
|
|
93
|
+
self.host,
|
|
94
|
+
command,
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
result = subprocess.run(
|
|
98
|
+
ssh_command, capture_output=True, text=True, timeout=10
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
if result.returncode == 0:
|
|
102
|
+
logger.info(f"✅ Custom check passed: {command}")
|
|
103
|
+
return True
|
|
104
|
+
else:
|
|
105
|
+
logger.error(
|
|
106
|
+
f"❌ Custom check failed: {command}\n"
|
|
107
|
+
f" stdout: {result.stdout}\n"
|
|
108
|
+
f" stderr: {result.stderr}"
|
|
109
|
+
)
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
except subprocess.TimeoutExpired:
|
|
113
|
+
logger.error(f"❌ Custom check timed out: {command}")
|
|
114
|
+
return False
|
|
115
|
+
except Exception as e:
|
|
116
|
+
logger.error(f"❌ Custom check error: {command} - {e}")
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class SmokeTestSuite:
|
|
121
|
+
"""Manages a suite of smoke tests."""
|
|
122
|
+
|
|
123
|
+
def __init__(self, host: str):
|
|
124
|
+
self.host = host
|
|
125
|
+
self.health_check = HealthCheck(host)
|
|
126
|
+
self.tests: List[Dict] = []
|
|
127
|
+
|
|
128
|
+
def add_port_check(self, port: int, name: str = None):
|
|
129
|
+
"""Add a port availability check."""
|
|
130
|
+
if name is None:
|
|
131
|
+
name = f"Port {port}"
|
|
132
|
+
|
|
133
|
+
self.tests.append({"type": "port", "port": port, "name": name})
|
|
134
|
+
|
|
135
|
+
def add_http_check(self, url: str, expected_status: int = 200, name: str = None):
|
|
136
|
+
"""Add an HTTP endpoint check."""
|
|
137
|
+
if name is None:
|
|
138
|
+
name = f"HTTP {url}"
|
|
139
|
+
|
|
140
|
+
self.tests.append(
|
|
141
|
+
{
|
|
142
|
+
"type": "http",
|
|
143
|
+
"url": url,
|
|
144
|
+
"expected_status": expected_status,
|
|
145
|
+
"name": name,
|
|
146
|
+
}
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
def add_custom_check(self, command: str, name: str):
|
|
150
|
+
"""Add a custom command check."""
|
|
151
|
+
self.tests.append({"type": "custom", "command": command, "name": name})
|
|
152
|
+
|
|
153
|
+
def run_all(self) -> bool:
|
|
154
|
+
"""Run all smoke tests and return overall result."""
|
|
155
|
+
if not self.tests:
|
|
156
|
+
logger.info("No smoke tests configured")
|
|
157
|
+
return True
|
|
158
|
+
|
|
159
|
+
logger.info(f"Running {len(self.tests)} smoke tests...")
|
|
160
|
+
print(f"\n🧪 Running Smoke Tests ({len(self.tests)} tests)...")
|
|
161
|
+
|
|
162
|
+
passed = 0
|
|
163
|
+
failed = 0
|
|
164
|
+
|
|
165
|
+
for test in self.tests:
|
|
166
|
+
test_type = test["type"]
|
|
167
|
+
name = test["name"]
|
|
168
|
+
|
|
169
|
+
print(f" • {name}...", end=" ", flush=True)
|
|
170
|
+
|
|
171
|
+
if test_type == "port":
|
|
172
|
+
result = self.health_check.wait_for_port(test["port"])
|
|
173
|
+
elif test_type == "http":
|
|
174
|
+
result = self.health_check.wait_for_http(
|
|
175
|
+
test["url"], test["expected_status"]
|
|
176
|
+
)
|
|
177
|
+
elif test_type == "custom":
|
|
178
|
+
result = self.health_check.run_custom_check(test["command"])
|
|
179
|
+
else:
|
|
180
|
+
logger.error(f"Unknown test type: {test_type}")
|
|
181
|
+
result = False
|
|
182
|
+
|
|
183
|
+
if result:
|
|
184
|
+
print("✅ PASS")
|
|
185
|
+
passed += 1
|
|
186
|
+
else:
|
|
187
|
+
print("❌ FAIL")
|
|
188
|
+
failed += 1
|
|
189
|
+
|
|
190
|
+
print(f"\n📊 Results: {passed} passed, {failed} failed")
|
|
191
|
+
|
|
192
|
+
if failed > 0:
|
|
193
|
+
logger.error(f"Smoke tests failed: {failed}/{len(self.tests)}")
|
|
194
|
+
return False
|
|
195
|
+
|
|
196
|
+
logger.info(f"All smoke tests passed: {passed}/{len(self.tests)}")
|
|
197
|
+
return True
|