deployvm 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deployvm/__init__.py +38 -0
- deployvm/__main__.py +4 -0
- deployvm/apps.py +626 -0
- deployvm/cli.py +823 -0
- deployvm/providers.py +1136 -0
- deployvm/server.py +773 -0
- deployvm/types.py +45 -0
- deployvm/utils.py +75 -0
- deployvm-0.1.3.dist-info/METADATA +363 -0
- deployvm-0.1.3.dist-info/RECORD +12 -0
- deployvm-0.1.3.dist-info/WHEEL +4 -0
- deployvm-0.1.3.dist-info/entry_points.txt +2 -0
deployvm/__init__.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Deploy VM - Cloud deployment tool for DigitalOcean and AWS."""
|
|
2
|
+
|
|
3
|
+
from .cli import app
|
|
4
|
+
from .providers import (
|
|
5
|
+
AWSProvider,
|
|
6
|
+
DigitalOceanProvider,
|
|
7
|
+
Provider,
|
|
8
|
+
get_provider,
|
|
9
|
+
)
|
|
10
|
+
from .types import (
|
|
11
|
+
AppInfo,
|
|
12
|
+
AppType,
|
|
13
|
+
InstanceData,
|
|
14
|
+
InstanceListItem,
|
|
15
|
+
InstanceResult,
|
|
16
|
+
ProviderName,
|
|
17
|
+
)
|
|
18
|
+
from .utils import error, get_ssh_user, log, run_cmd, run_cmd_json, warn
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"AWSProvider",
|
|
22
|
+
"DigitalOceanProvider",
|
|
23
|
+
"Provider",
|
|
24
|
+
"get_provider",
|
|
25
|
+
"app",
|
|
26
|
+
"log",
|
|
27
|
+
"warn",
|
|
28
|
+
"error",
|
|
29
|
+
"get_ssh_user",
|
|
30
|
+
"run_cmd",
|
|
31
|
+
"run_cmd_json",
|
|
32
|
+
"AppInfo",
|
|
33
|
+
"AppType",
|
|
34
|
+
"InstanceData",
|
|
35
|
+
"InstanceListItem",
|
|
36
|
+
"InstanceResult",
|
|
37
|
+
"ProviderName",
|
|
38
|
+
]
|
deployvm/__main__.py
ADDED
deployvm/apps.py
ADDED
|
@@ -0,0 +1,626 @@
|
|
|
1
|
+
"""App deployment logic for Nuxt and FastAPI applications."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import json
|
|
5
|
+
import subprocess
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from textwrap import dedent
|
|
8
|
+
|
|
9
|
+
from .server import (
|
|
10
|
+
rsync,
|
|
11
|
+
ssh,
|
|
12
|
+
ssh_as_user,
|
|
13
|
+
ssh_script,
|
|
14
|
+
ssh_write_file,
|
|
15
|
+
)
|
|
16
|
+
from .utils import error, log, warn
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def filter_aws_credentials_from_env(
|
|
20
|
+
source_dir: str,
|
|
21
|
+
provider_name: str,
|
|
22
|
+
ip: str,
|
|
23
|
+
remote_path: str,
|
|
24
|
+
ssh_user: str,
|
|
25
|
+
) -> bool:
|
|
26
|
+
"""Filter AWS credentials from .env and upload filtered version.
|
|
27
|
+
|
|
28
|
+
Removes AWS_PROFILE, AWS_ACCESS_KEY_ID, and AWS_SECRET_ACCESS_KEY
|
|
29
|
+
since AWS EC2 instances use IAM roles instead. Ensures AWS_REGION
|
|
30
|
+
is present for Bedrock and other AWS services.
|
|
31
|
+
|
|
32
|
+
:param source_dir: Local source directory path
|
|
33
|
+
:param provider_name: Cloud provider name
|
|
34
|
+
:param ip: Remote instance IP
|
|
35
|
+
:param remote_path: Remote path for filtered .env file
|
|
36
|
+
:param ssh_user: SSH user for rsync
|
|
37
|
+
:return: True if .env should be excluded from main rsync, False otherwise
|
|
38
|
+
"""
|
|
39
|
+
env_path = Path(source_dir) / ".env"
|
|
40
|
+
|
|
41
|
+
# Only filter for AWS deployments with .env file
|
|
42
|
+
if provider_name != "aws" or not env_path.exists():
|
|
43
|
+
return False
|
|
44
|
+
|
|
45
|
+
log("Filtering AWS credentials from .env (EC2 instances use IAM roles)")
|
|
46
|
+
|
|
47
|
+
lines = env_path.read_text().splitlines()
|
|
48
|
+
aws_vars = ["AWS_PROFILE=", "AWS_ACCESS_KEY_ID=", "AWS_SECRET_ACCESS_KEY="]
|
|
49
|
+
|
|
50
|
+
filtered_lines = []
|
|
51
|
+
filtered_vars = []
|
|
52
|
+
has_aws_region = False
|
|
53
|
+
removing_profile = False
|
|
54
|
+
|
|
55
|
+
for line in lines:
|
|
56
|
+
stripped = line.strip()
|
|
57
|
+
# Check if AWS_REGION is already present
|
|
58
|
+
if stripped.startswith("AWS_REGION="):
|
|
59
|
+
has_aws_region = True
|
|
60
|
+
filtered_lines.append(line)
|
|
61
|
+
elif any(stripped.startswith(var) for var in aws_vars):
|
|
62
|
+
# Extract variable name for logging
|
|
63
|
+
var_name = stripped.split("=")[0]
|
|
64
|
+
filtered_vars.append(var_name)
|
|
65
|
+
if var_name == "AWS_PROFILE":
|
|
66
|
+
removing_profile = True
|
|
67
|
+
else:
|
|
68
|
+
filtered_lines.append(line)
|
|
69
|
+
|
|
70
|
+
if filtered_vars:
|
|
71
|
+
log(f" Removed: {', '.join(filtered_vars)}")
|
|
72
|
+
else:
|
|
73
|
+
log(" No AWS credentials found in .env")
|
|
74
|
+
|
|
75
|
+
# If we're removing AWS_PROFILE but AWS_REGION is missing, we need to add it
|
|
76
|
+
if removing_profile and not has_aws_region:
|
|
77
|
+
# Import here to avoid circular dependency
|
|
78
|
+
from .providers import AWSProvider
|
|
79
|
+
|
|
80
|
+
# Get region from AWS config (may come from profile)
|
|
81
|
+
aws_config = AWSProvider.get_aws_config(is_raise_exception=False)
|
|
82
|
+
region = aws_config.get("region_name")
|
|
83
|
+
|
|
84
|
+
if region:
|
|
85
|
+
filtered_lines.append(f"AWS_REGION={region}")
|
|
86
|
+
log(f" Added AWS_REGION={region} (from AWS profile configuration)")
|
|
87
|
+
else:
|
|
88
|
+
warn(
|
|
89
|
+
"AWS_PROFILE removed but AWS_REGION not found!\n"
|
|
90
|
+
" Bedrock and other AWS services require AWS_REGION.\n"
|
|
91
|
+
" Please add AWS_REGION to your .env file."
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
# Upload filtered .env file directly (no temp file needed)
|
|
95
|
+
filtered_content = "\n".join(filtered_lines) + "\n"
|
|
96
|
+
|
|
97
|
+
# Remove any existing .env (could be a directory from failed previous run)
|
|
98
|
+
ssh(ip, f"sudo rm -rf {remote_path}", user=ssh_user)
|
|
99
|
+
|
|
100
|
+
ssh_write_file(ip, remote_path, filtered_content, user=ssh_user)
|
|
101
|
+
|
|
102
|
+
return True # Signal that .env should be excluded from main rsync
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class BaseApp:
|
|
106
|
+
"""Base class for app deployment."""
|
|
107
|
+
|
|
108
|
+
def __init__(self, instance_data: dict, provider_name: str):
|
|
109
|
+
"""Initialize app.
|
|
110
|
+
|
|
111
|
+
:param instance_data: Instance data from .instance.json
|
|
112
|
+
:param provider_name: Cloud provider name
|
|
113
|
+
"""
|
|
114
|
+
self.instance = instance_data
|
|
115
|
+
self.ip = instance_data["ip"]
|
|
116
|
+
self.provider_name = provider_name
|
|
117
|
+
self.ssh_user = "deploy"
|
|
118
|
+
|
|
119
|
+
def compute_source_hash(
|
|
120
|
+
self, local_path: str, exclude: list[str] | None = None
|
|
121
|
+
) -> str:
|
|
122
|
+
"""Compute hash of local source directory."""
|
|
123
|
+
source_path = Path(local_path)
|
|
124
|
+
if exclude is None:
|
|
125
|
+
exclude = [".git"]
|
|
126
|
+
|
|
127
|
+
hasher = hashlib.md5()
|
|
128
|
+
for f in sorted(source_path.rglob("*")):
|
|
129
|
+
if f.is_file() and not any(ex in str(f) for ex in exclude):
|
|
130
|
+
hasher.update(str(f.relative_to(source_path)).encode())
|
|
131
|
+
hasher.update(f.read_bytes())
|
|
132
|
+
return hasher.hexdigest()
|
|
133
|
+
|
|
134
|
+
def select_app(self, app_type: str, app_name: str | None = None) -> dict:
|
|
135
|
+
"""Select app from instance data.
|
|
136
|
+
|
|
137
|
+
:param app_type: App type (nuxt or fastapi)
|
|
138
|
+
:param app_name: App name (if multiple apps exist)
|
|
139
|
+
:return: App data dict
|
|
140
|
+
"""
|
|
141
|
+
apps = [app for app in self.instance.get("apps", []) if app["type"] == app_type]
|
|
142
|
+
|
|
143
|
+
if app_name is None:
|
|
144
|
+
if len(apps) == 1:
|
|
145
|
+
return apps[0]
|
|
146
|
+
elif len(apps) > 1:
|
|
147
|
+
app_names = ", ".join(app["name"] for app in apps)
|
|
148
|
+
error(
|
|
149
|
+
f"Multiple {app_type} apps found: {app_names}. Use --app-name to specify."
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
# Fallback for old single-app instances
|
|
153
|
+
return {"name": "app", "type": app_type}
|
|
154
|
+
|
|
155
|
+
for app in apps:
|
|
156
|
+
if app["name"] == app_name:
|
|
157
|
+
return app
|
|
158
|
+
|
|
159
|
+
error(f"App '{app_name}' not found in instance")
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class NuxtApp(BaseApp):
|
|
163
|
+
"""Nuxt app deployment."""
|
|
164
|
+
|
|
165
|
+
def __init__(
|
|
166
|
+
self,
|
|
167
|
+
instance_data: dict,
|
|
168
|
+
provider_name: str,
|
|
169
|
+
*,
|
|
170
|
+
user: str,
|
|
171
|
+
app_name: str = "nuxt",
|
|
172
|
+
port: int = 3000,
|
|
173
|
+
node_version: int = 20,
|
|
174
|
+
):
|
|
175
|
+
super().__init__(instance_data, provider_name)
|
|
176
|
+
self.user = user
|
|
177
|
+
self.app_name = app_name
|
|
178
|
+
self.port = port
|
|
179
|
+
self.node_version = node_version
|
|
180
|
+
self.app_dir = f"/home/{user}/{app_name}"
|
|
181
|
+
|
|
182
|
+
def detect_node_version(self, source: str) -> int | None:
|
|
183
|
+
"""Detect Node.js version from project files.
|
|
184
|
+
|
|
185
|
+
:param source: Path to source directory
|
|
186
|
+
:return: Major Node.js version number
|
|
187
|
+
"""
|
|
188
|
+
import re
|
|
189
|
+
|
|
190
|
+
source_path = Path(source)
|
|
191
|
+
|
|
192
|
+
for filename in [".nvmrc", ".node-version"]:
|
|
193
|
+
version_file = source_path / filename
|
|
194
|
+
if version_file.exists():
|
|
195
|
+
content = version_file.read_text().strip().lstrip("v")
|
|
196
|
+
try:
|
|
197
|
+
return int(content.split(".")[0])
|
|
198
|
+
except ValueError:
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
package_json = source_path / "package.json"
|
|
202
|
+
if package_json.exists():
|
|
203
|
+
try:
|
|
204
|
+
data = json.loads(package_json.read_text())
|
|
205
|
+
node_constraint = data.get("engines", {}).get("node", "")
|
|
206
|
+
match = re.search(r"(\d+)", node_constraint)
|
|
207
|
+
if match:
|
|
208
|
+
return int(match.group(1))
|
|
209
|
+
except (json.JSONDecodeError, ValueError):
|
|
210
|
+
pass
|
|
211
|
+
|
|
212
|
+
return None
|
|
213
|
+
|
|
214
|
+
def generate_pm2_config(self) -> str:
|
|
215
|
+
"""Generate PM2 ecosystem config."""
|
|
216
|
+
return dedent(f"""
|
|
217
|
+
const fs = require('fs');
|
|
218
|
+
const path = require('path');
|
|
219
|
+
|
|
220
|
+
// Load .env file if it exists
|
|
221
|
+
const envPath = path.join(__dirname, '.env');
|
|
222
|
+
const envVars = {{}};
|
|
223
|
+
if (fs.existsSync(envPath)) {{
|
|
224
|
+
const content = fs.readFileSync(envPath, 'utf-8');
|
|
225
|
+
content.split('\\n').forEach(line => {{
|
|
226
|
+
const trimmed = line.trim();
|
|
227
|
+
if (trimmed && !trimmed.startsWith('#')) {{
|
|
228
|
+
const [key, ...valueParts] = trimmed.split('=');
|
|
229
|
+
if (key && valueParts.length) {{
|
|
230
|
+
let value = valueParts.join('=').trim();
|
|
231
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
232
|
+
(value.startsWith("'") && value.endsWith("'"))) {{
|
|
233
|
+
value = value.slice(1, -1);
|
|
234
|
+
}}
|
|
235
|
+
envVars[key.trim()] = value;
|
|
236
|
+
}}
|
|
237
|
+
}}
|
|
238
|
+
}});
|
|
239
|
+
}}
|
|
240
|
+
|
|
241
|
+
module.exports = {{
|
|
242
|
+
apps: [{{
|
|
243
|
+
name: '{self.app_name}',
|
|
244
|
+
script: './.output/server/index.mjs',
|
|
245
|
+
cwd: '{self.app_dir}',
|
|
246
|
+
instances: 'max',
|
|
247
|
+
exec_mode: 'cluster',
|
|
248
|
+
env: {{
|
|
249
|
+
NODE_ENV: 'production',
|
|
250
|
+
PORT: {self.port},
|
|
251
|
+
...envVars
|
|
252
|
+
}}
|
|
253
|
+
}}]
|
|
254
|
+
}};
|
|
255
|
+
""").strip()
|
|
256
|
+
|
|
257
|
+
def sync(
|
|
258
|
+
self,
|
|
259
|
+
source: str,
|
|
260
|
+
*,
|
|
261
|
+
local_build: bool = True,
|
|
262
|
+
force: bool = False,
|
|
263
|
+
):
|
|
264
|
+
"""Sync Nuxt app to server.
|
|
265
|
+
|
|
266
|
+
:param source: Local source directory
|
|
267
|
+
:param local_build: Build locally instead of on server
|
|
268
|
+
:param force: Force rebuild even if source unchanged
|
|
269
|
+
"""
|
|
270
|
+
source = str(Path(source).resolve())
|
|
271
|
+
|
|
272
|
+
if not Path(source).exists():
|
|
273
|
+
error(f"Source directory not found: {source}")
|
|
274
|
+
|
|
275
|
+
detected_version = self.detect_node_version(source)
|
|
276
|
+
if detected_version:
|
|
277
|
+
log(f"Detected Node.js version {detected_version} from project config")
|
|
278
|
+
self.node_version = detected_version
|
|
279
|
+
|
|
280
|
+
log(f"Deploying to {self.ip}...")
|
|
281
|
+
|
|
282
|
+
log(f"Installing Node.js {self.node_version} and PM2...")
|
|
283
|
+
node_script = dedent(f"""
|
|
284
|
+
set -e
|
|
285
|
+
if ! command -v node &> /dev/null; then
|
|
286
|
+
curl -fsSL https://deb.nodesource.com/setup_{self.node_version}.x | sudo bash -
|
|
287
|
+
sudo apt-get install -y nodejs
|
|
288
|
+
fi
|
|
289
|
+
node --version
|
|
290
|
+
if ! command -v pm2 &> /dev/null; then
|
|
291
|
+
sudo npm install -g pm2
|
|
292
|
+
fi
|
|
293
|
+
sudo mkdir -p {self.app_dir}
|
|
294
|
+
sudo chown -R {self.user}:{self.user} {self.app_dir}
|
|
295
|
+
""").strip()
|
|
296
|
+
ssh_script(self.ip, node_script, user=self.ssh_user, show_output=True)
|
|
297
|
+
|
|
298
|
+
log("Generating PM2 ecosystem config...")
|
|
299
|
+
ecosystem_config = self.generate_pm2_config()
|
|
300
|
+
ssh_write_file(
|
|
301
|
+
self.ip,
|
|
302
|
+
f"{self.app_dir}/ecosystem.config.cjs",
|
|
303
|
+
ecosystem_config,
|
|
304
|
+
user=self.ssh_user,
|
|
305
|
+
)
|
|
306
|
+
ssh(
|
|
307
|
+
self.ip,
|
|
308
|
+
f"sudo chown {self.user}:{self.user} {self.app_dir}/ecosystem.config.cjs",
|
|
309
|
+
user=self.ssh_user,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
nuxt_exclude = [
|
|
313
|
+
"node_modules",
|
|
314
|
+
".git",
|
|
315
|
+
".output",
|
|
316
|
+
".nuxt",
|
|
317
|
+
"public/projects",
|
|
318
|
+
"data/scripts/models",
|
|
319
|
+
"json/projects",
|
|
320
|
+
]
|
|
321
|
+
local_hash = self.compute_source_hash(source, nuxt_exclude)
|
|
322
|
+
try:
|
|
323
|
+
remote_hash = ssh(
|
|
324
|
+
self.ip,
|
|
325
|
+
f"cat {self.app_dir}/.source_hash 2>/dev/null || echo ''",
|
|
326
|
+
user=self.ssh_user,
|
|
327
|
+
).strip()
|
|
328
|
+
except Exception:
|
|
329
|
+
remote_hash = ""
|
|
330
|
+
|
|
331
|
+
if not force and local_hash == remote_hash and remote_hash:
|
|
332
|
+
log("Source unchanged, restarting app...")
|
|
333
|
+
restart_script = dedent(f"""
|
|
334
|
+
if ! su - {self.user} -c "pm2 reload {self.app_name}" 2>/dev/null; then
|
|
335
|
+
pkill -u {self.user} -f pm2 || true
|
|
336
|
+
rm -rf /home/{self.user}/.pm2 || true
|
|
337
|
+
rm -f /home/{self.user}/.pm2/*.sock /home/{self.user}/.pm2/pm2.pid 2>/dev/null || true
|
|
338
|
+
sleep 1
|
|
339
|
+
su - {self.user} -c "cd {self.app_dir} && pm2 start ecosystem.config.cjs && pm2 save"
|
|
340
|
+
fi
|
|
341
|
+
""").strip()
|
|
342
|
+
ssh_script(self.ip, restart_script, user=self.ssh_user)
|
|
343
|
+
log("App restarted")
|
|
344
|
+
return
|
|
345
|
+
|
|
346
|
+
if local_build:
|
|
347
|
+
log("Building locally...")
|
|
348
|
+
subprocess.run(["npm", "install"], cwd=source, check=True)
|
|
349
|
+
subprocess.run(["npm", "run", "build"], cwd=source, check=True)
|
|
350
|
+
|
|
351
|
+
if not Path(source, ".output").exists():
|
|
352
|
+
error("Build failed - no .output directory")
|
|
353
|
+
|
|
354
|
+
log("Uploading...")
|
|
355
|
+
exclude = [
|
|
356
|
+
"/node_modules",
|
|
357
|
+
".nuxt",
|
|
358
|
+
".git",
|
|
359
|
+
"ecosystem.config.cjs",
|
|
360
|
+
".source_hash",
|
|
361
|
+
"public/projects",
|
|
362
|
+
"data/scripts/models",
|
|
363
|
+
"json/projects",
|
|
364
|
+
]
|
|
365
|
+
if not local_build:
|
|
366
|
+
exclude.append(".output")
|
|
367
|
+
rsync(source, self.ip, self.app_dir, exclude=exclude, user=self.ssh_user)
|
|
368
|
+
|
|
369
|
+
if not local_build:
|
|
370
|
+
log("Building on server...")
|
|
371
|
+
build_script = dedent(f"""
|
|
372
|
+
set -e
|
|
373
|
+
cd {self.app_dir}
|
|
374
|
+
export NODE_OPTIONS="--max-old-space-size=1024"
|
|
375
|
+
su - {self.user} -c "cd {self.app_dir} && rm -rf package-lock.json .nuxt && npm install && npm run build"
|
|
376
|
+
""").strip()
|
|
377
|
+
ssh_script(self.ip, build_script, user=self.ssh_user, show_output=True)
|
|
378
|
+
|
|
379
|
+
log("Starting app...")
|
|
380
|
+
start_script = dedent(f"""
|
|
381
|
+
set -e
|
|
382
|
+
echo "{local_hash}" > {self.app_dir}/.source_hash
|
|
383
|
+
# Legacy fallback: fix Nitro import.meta.url resolution for PM2
|
|
384
|
+
# (nginx now serves .output/public directly, so this is rarely needed)
|
|
385
|
+
sed -i 's/dirname(fileURLToPath(import.meta.url))/dirname(fileURLToPath(globalThis._importMeta_.url))/g' \
|
|
386
|
+
{self.app_dir}/.output/server/chunks/nitro/nitro.mjs 2>/dev/null || true
|
|
387
|
+
sudo chown -R {self.user}:{self.user} {self.app_dir}
|
|
388
|
+
sudo pkill -u {self.user} -f pm2 || true
|
|
389
|
+
sudo pkill -u {self.user} -f "node.*index.mjs" || true
|
|
390
|
+
sudo rm -rf /home/{self.user}/.pm2 || true
|
|
391
|
+
sudo rm -f /home/{self.user}/.pm2/*.sock /home/{self.user}/.pm2/pm2.pid 2>/dev/null || true
|
|
392
|
+
sleep 1
|
|
393
|
+
su - {self.user} -c "cd {self.app_dir} && pm2 start ecosystem.config.cjs && pm2 save"
|
|
394
|
+
sudo pm2 startup systemd -u {self.user} --hp /home/{self.user} 2>/dev/null || true
|
|
395
|
+
""").strip()
|
|
396
|
+
ssh_script(self.ip, start_script, user=self.ssh_user)
|
|
397
|
+
log("App deployed!")
|
|
398
|
+
|
|
399
|
+
def restart(self):
|
|
400
|
+
"""Restart PM2 app."""
|
|
401
|
+
log(f"Restarting {self.app_name}...")
|
|
402
|
+
ssh_as_user(
|
|
403
|
+
self.ip, self.user, f"pm2 reload {self.app_name}", ssh_user=self.ssh_user
|
|
404
|
+
)
|
|
405
|
+
log("App restarted")
|
|
406
|
+
|
|
407
|
+
def status(self):
|
|
408
|
+
"""Show PM2 status."""
|
|
409
|
+
return ssh_as_user(self.ip, self.user, "pm2 list", ssh_user=self.ssh_user)
|
|
410
|
+
|
|
411
|
+
def logs(self, lines: int = 50):
|
|
412
|
+
"""Show PM2 logs."""
|
|
413
|
+
return ssh_as_user(
|
|
414
|
+
self.ip,
|
|
415
|
+
self.user,
|
|
416
|
+
f"pm2 logs {self.app_name} --lines {lines} --nostream",
|
|
417
|
+
ssh_user=self.ssh_user,
|
|
418
|
+
)
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def validate_uv_lockfile(source: str):
|
|
422
|
+
"""Validate that uv.lock is in sync with pyproject.toml.
|
|
423
|
+
|
|
424
|
+
:param source: Path to source directory
|
|
425
|
+
:raises SystemExit: If lockfile is out of sync
|
|
426
|
+
"""
|
|
427
|
+
source_path = Path(source)
|
|
428
|
+
lockfile = source_path / "uv.lock"
|
|
429
|
+
|
|
430
|
+
# If no lockfile exists, uv sync will create one (no --frozen flag used)
|
|
431
|
+
if not lockfile.exists():
|
|
432
|
+
return
|
|
433
|
+
|
|
434
|
+
# Check if lockfile is in sync using uv lock --check
|
|
435
|
+
log("Validating uv.lock is in sync with pyproject.toml...")
|
|
436
|
+
result = subprocess.run(
|
|
437
|
+
["uv", "lock", "--check"],
|
|
438
|
+
cwd=source,
|
|
439
|
+
capture_output=True,
|
|
440
|
+
text=True
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
if result.returncode != 0:
|
|
444
|
+
error(
|
|
445
|
+
"uv.lock is out of sync with pyproject.toml!\n"
|
|
446
|
+
f" Location: {source}\n"
|
|
447
|
+
f" Error: {result.stderr.strip()}\n\n"
|
|
448
|
+
"Fix this by running:\n"
|
|
449
|
+
f" cd {source}\n"
|
|
450
|
+
" uv lock\n\n"
|
|
451
|
+
"Then redeploy."
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
log("✓ uv.lock is in sync")
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
class FastAPIApp(BaseApp):
|
|
458
|
+
"""FastAPI app deployment."""
|
|
459
|
+
|
|
460
|
+
def __init__(
|
|
461
|
+
self,
|
|
462
|
+
instance_data: dict,
|
|
463
|
+
provider_name: str,
|
|
464
|
+
*,
|
|
465
|
+
user: str,
|
|
466
|
+
app_name: str = "fastapi",
|
|
467
|
+
port: int = 8000,
|
|
468
|
+
command: str | None = None,
|
|
469
|
+
):
|
|
470
|
+
super().__init__(instance_data, provider_name)
|
|
471
|
+
self.user = user
|
|
472
|
+
self.app_name = app_name
|
|
473
|
+
self.port = port
|
|
474
|
+
self.command = command # Only used by sync(), not needed for status/logs/restart
|
|
475
|
+
self.app_dir = f"/home/{user}/{app_name}"
|
|
476
|
+
|
|
477
|
+
def sync(self, source: str, *, force: bool = False) -> bool:
|
|
478
|
+
"""Sync FastAPI app to server using supervisord.
|
|
479
|
+
|
|
480
|
+
:param source: Local source directory
|
|
481
|
+
:param force: Force rebuild even if source unchanged
|
|
482
|
+
:return: True if full sync, False if source unchanged
|
|
483
|
+
"""
|
|
484
|
+
if not self.command:
|
|
485
|
+
error("Command is required for sync operation")
|
|
486
|
+
|
|
487
|
+
source = str(Path(source).resolve())
|
|
488
|
+
|
|
489
|
+
if not Path(source).exists():
|
|
490
|
+
error(f"Source directory not found: {source}")
|
|
491
|
+
|
|
492
|
+
if not (Path(source) / "pyproject.toml").exists():
|
|
493
|
+
error(f"pyproject.toml not found in {source}")
|
|
494
|
+
|
|
495
|
+
validate_uv_lockfile(source)
|
|
496
|
+
|
|
497
|
+
log(f"Deploying FastAPI to {self.ip}...")
|
|
498
|
+
|
|
499
|
+
log("Installing uv and supervisor...")
|
|
500
|
+
setup_script = dedent(f"""
|
|
501
|
+
set -e
|
|
502
|
+
sudo apt-get update
|
|
503
|
+
sudo apt-get install -y supervisor curl
|
|
504
|
+
|
|
505
|
+
sudo mkdir -p /home/{self.user}/{self.app_name}
|
|
506
|
+
sudo mkdir -p /var/log/{self.app_name}
|
|
507
|
+
sudo chown -R {self.user}:{self.user} /home/{self.user}/{self.app_name}
|
|
508
|
+
sudo chown -R {self.user}:{self.user} /var/log/{self.app_name}
|
|
509
|
+
|
|
510
|
+
sudo su - {self.user} -c "curl -LsSf https://astral.sh/uv/install.sh | sh"
|
|
511
|
+
""").strip()
|
|
512
|
+
ssh_script(self.ip, setup_script, user=self.ssh_user, show_output=True)
|
|
513
|
+
|
|
514
|
+
python_exclude = [".venv", "__pycache__", ".git", "*.pyc"]
|
|
515
|
+
local_hash = self.compute_source_hash(source, python_exclude)
|
|
516
|
+
try:
|
|
517
|
+
remote_hash = ssh(
|
|
518
|
+
self.ip,
|
|
519
|
+
f"cat /home/{self.user}/{self.app_name}/.source_hash 2>/dev/null || echo ''",
|
|
520
|
+
user=self.ssh_user,
|
|
521
|
+
).strip()
|
|
522
|
+
except Exception:
|
|
523
|
+
remote_hash = ""
|
|
524
|
+
|
|
525
|
+
if not force and local_hash == remote_hash and remote_hash:
|
|
526
|
+
log("Source unchanged, restarting app...")
|
|
527
|
+
ssh_script(
|
|
528
|
+
self.ip,
|
|
529
|
+
f"sudo supervisorctl restart {self.app_name}",
|
|
530
|
+
user=self.ssh_user,
|
|
531
|
+
)
|
|
532
|
+
log("App restarted")
|
|
533
|
+
return False
|
|
534
|
+
|
|
535
|
+
log("Uploading...")
|
|
536
|
+
|
|
537
|
+
# Filter AWS credentials from .env for AWS deployments
|
|
538
|
+
exclude_env = filter_aws_credentials_from_env(
|
|
539
|
+
source,
|
|
540
|
+
self.provider_name,
|
|
541
|
+
self.ip,
|
|
542
|
+
f"/home/{self.user}/{self.app_name}/.env",
|
|
543
|
+
self.ssh_user,
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
exclude = [".venv", "__pycache__", ".git", "*.pyc", ".source_hash"]
|
|
547
|
+
if exclude_env:
|
|
548
|
+
exclude.append(".env")
|
|
549
|
+
|
|
550
|
+
rsync(
|
|
551
|
+
source,
|
|
552
|
+
self.ip,
|
|
553
|
+
f"/home/{self.user}/{self.app_name}",
|
|
554
|
+
exclude=exclude,
|
|
555
|
+
user=self.ssh_user,
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
log("Setting up Python environment...")
|
|
559
|
+
venv_script = dedent(f"""
|
|
560
|
+
set -e
|
|
561
|
+
sudo chown -R {self.user}:{self.user} /home/{self.user}/{self.app_name}
|
|
562
|
+
|
|
563
|
+
FROZEN=""
|
|
564
|
+
if [ -f "/home/{self.user}/{self.app_name}/uv.lock" ]; then
|
|
565
|
+
FROZEN="--frozen"
|
|
566
|
+
fi
|
|
567
|
+
|
|
568
|
+
sudo su - {self.user} -c "cd /home/{self.user}/{self.app_name} && ~/.local/bin/uv sync $FROZEN"
|
|
569
|
+
""").strip()
|
|
570
|
+
ssh_script(self.ip, venv_script, user=self.ssh_user)
|
|
571
|
+
|
|
572
|
+
log("Configuring supervisord...")
|
|
573
|
+
# Validate command starts with 'uv'
|
|
574
|
+
if not self.command.strip().startswith("uv "):
|
|
575
|
+
error(f"Command must start with 'uv': {self.command}")
|
|
576
|
+
|
|
577
|
+
supervisor_config = dedent(f"""
|
|
578
|
+
[program:{self.app_name}]
|
|
579
|
+
directory=/home/{self.user}/{self.app_name}
|
|
580
|
+
command={self.command}
|
|
581
|
+
user={self.user}
|
|
582
|
+
autostart=true
|
|
583
|
+
autorestart=true
|
|
584
|
+
stopasgroup=true
|
|
585
|
+
killasgroup=true
|
|
586
|
+
stderr_logfile=/var/log/{self.app_name}/error.log
|
|
587
|
+
stdout_logfile=/var/log/{self.app_name}/access.log
|
|
588
|
+
environment=PATH="/home/{self.user}/.local/bin:/usr/local/bin:/usr/bin:/bin"
|
|
589
|
+
""").strip()
|
|
590
|
+
ssh_write_file(
|
|
591
|
+
self.ip,
|
|
592
|
+
f"/etc/supervisor/conf.d/{self.app_name}.conf",
|
|
593
|
+
supervisor_config,
|
|
594
|
+
user=self.ssh_user,
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
hash_write_cmd = f'echo "{local_hash}" | sudo tee /home/{self.user}/{self.app_name}/.source_hash > /dev/null'
|
|
598
|
+
|
|
599
|
+
ssh_script(
|
|
600
|
+
self.ip,
|
|
601
|
+
f"{hash_write_cmd} && "
|
|
602
|
+
f"sudo chown {self.user}:{self.user} /home/{self.user}/{self.app_name}/.source_hash && "
|
|
603
|
+
f"sudo supervisorctl reread && sudo supervisorctl update && sudo supervisorctl restart {self.app_name}",
|
|
604
|
+
user=self.ssh_user,
|
|
605
|
+
)
|
|
606
|
+
log("FastAPI app deployed!")
|
|
607
|
+
return True
|
|
608
|
+
|
|
609
|
+
def restart(self):
|
|
610
|
+
"""Restart supervisord app."""
|
|
611
|
+
log(f"Restarting {self.app_name}...")
|
|
612
|
+
ssh(self.ip, f"sudo supervisorctl restart {self.app_name}", user=self.ssh_user)
|
|
613
|
+
log("App restarted")
|
|
614
|
+
|
|
615
|
+
def status(self):
|
|
616
|
+
"""Show supervisord status."""
|
|
617
|
+
return ssh(self.ip, "sudo supervisorctl status", user=self.ssh_user)
|
|
618
|
+
|
|
619
|
+
def logs(self, lines: int = 50):
|
|
620
|
+
"""Show supervisord logs."""
|
|
621
|
+
log(f"Last {lines} lines of {self.app_name} logs:")
|
|
622
|
+
return ssh(
|
|
623
|
+
self.ip,
|
|
624
|
+
f"tail -n {lines} /var/log/{self.app_name}/access.log /var/log/{self.app_name}/error.log 2>/dev/null || echo 'No logs found'",
|
|
625
|
+
user=self.ssh_user,
|
|
626
|
+
)
|