tetra-rp 0.6.0__py3-none-any.whl → 0.24.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tetra_rp/__init__.py +109 -19
- tetra_rp/cli/commands/__init__.py +1 -0
- tetra_rp/cli/commands/apps.py +143 -0
- tetra_rp/cli/commands/build.py +1082 -0
- tetra_rp/cli/commands/build_utils/__init__.py +1 -0
- tetra_rp/cli/commands/build_utils/handler_generator.py +176 -0
- tetra_rp/cli/commands/build_utils/lb_handler_generator.py +309 -0
- tetra_rp/cli/commands/build_utils/manifest.py +430 -0
- tetra_rp/cli/commands/build_utils/mothership_handler_generator.py +75 -0
- tetra_rp/cli/commands/build_utils/scanner.py +596 -0
- tetra_rp/cli/commands/deploy.py +580 -0
- tetra_rp/cli/commands/init.py +123 -0
- tetra_rp/cli/commands/resource.py +108 -0
- tetra_rp/cli/commands/run.py +296 -0
- tetra_rp/cli/commands/test_mothership.py +458 -0
- tetra_rp/cli/commands/undeploy.py +533 -0
- tetra_rp/cli/main.py +97 -0
- tetra_rp/cli/utils/__init__.py +1 -0
- tetra_rp/cli/utils/app.py +15 -0
- tetra_rp/cli/utils/conda.py +127 -0
- tetra_rp/cli/utils/deployment.py +530 -0
- tetra_rp/cli/utils/ignore.py +143 -0
- tetra_rp/cli/utils/skeleton.py +184 -0
- tetra_rp/cli/utils/skeleton_template/.env.example +4 -0
- tetra_rp/cli/utils/skeleton_template/.flashignore +40 -0
- tetra_rp/cli/utils/skeleton_template/.gitignore +44 -0
- tetra_rp/cli/utils/skeleton_template/README.md +263 -0
- tetra_rp/cli/utils/skeleton_template/main.py +44 -0
- tetra_rp/cli/utils/skeleton_template/mothership.py +55 -0
- tetra_rp/cli/utils/skeleton_template/pyproject.toml +58 -0
- tetra_rp/cli/utils/skeleton_template/requirements.txt +1 -0
- tetra_rp/cli/utils/skeleton_template/workers/__init__.py +0 -0
- tetra_rp/cli/utils/skeleton_template/workers/cpu/__init__.py +19 -0
- tetra_rp/cli/utils/skeleton_template/workers/cpu/endpoint.py +36 -0
- tetra_rp/cli/utils/skeleton_template/workers/gpu/__init__.py +19 -0
- tetra_rp/cli/utils/skeleton_template/workers/gpu/endpoint.py +61 -0
- tetra_rp/client.py +136 -33
- tetra_rp/config.py +29 -0
- tetra_rp/core/api/runpod.py +591 -39
- tetra_rp/core/deployment.py +232 -0
- tetra_rp/core/discovery.py +425 -0
- tetra_rp/core/exceptions.py +50 -0
- tetra_rp/core/resources/__init__.py +27 -9
- tetra_rp/core/resources/app.py +738 -0
- tetra_rp/core/resources/base.py +139 -4
- tetra_rp/core/resources/constants.py +21 -0
- tetra_rp/core/resources/cpu.py +115 -13
- tetra_rp/core/resources/gpu.py +182 -16
- tetra_rp/core/resources/live_serverless.py +153 -16
- tetra_rp/core/resources/load_balancer_sls_resource.py +440 -0
- tetra_rp/core/resources/network_volume.py +126 -31
- tetra_rp/core/resources/resource_manager.py +436 -35
- tetra_rp/core/resources/serverless.py +537 -120
- tetra_rp/core/resources/serverless_cpu.py +201 -0
- tetra_rp/core/resources/template.py +1 -59
- tetra_rp/core/utils/constants.py +10 -0
- tetra_rp/core/utils/file_lock.py +260 -0
- tetra_rp/core/utils/http.py +67 -0
- tetra_rp/core/utils/lru_cache.py +75 -0
- tetra_rp/core/utils/singleton.py +36 -1
- tetra_rp/core/validation.py +44 -0
- tetra_rp/execute_class.py +301 -0
- tetra_rp/protos/remote_execution.py +98 -9
- tetra_rp/runtime/__init__.py +1 -0
- tetra_rp/runtime/circuit_breaker.py +274 -0
- tetra_rp/runtime/config.py +12 -0
- tetra_rp/runtime/exceptions.py +49 -0
- tetra_rp/runtime/generic_handler.py +206 -0
- tetra_rp/runtime/lb_handler.py +189 -0
- tetra_rp/runtime/load_balancer.py +160 -0
- tetra_rp/runtime/manifest_fetcher.py +192 -0
- tetra_rp/runtime/metrics.py +325 -0
- tetra_rp/runtime/models.py +73 -0
- tetra_rp/runtime/mothership_provisioner.py +512 -0
- tetra_rp/runtime/production_wrapper.py +266 -0
- tetra_rp/runtime/reliability_config.py +149 -0
- tetra_rp/runtime/retry_manager.py +118 -0
- tetra_rp/runtime/serialization.py +124 -0
- tetra_rp/runtime/service_registry.py +346 -0
- tetra_rp/runtime/state_manager_client.py +248 -0
- tetra_rp/stubs/live_serverless.py +35 -17
- tetra_rp/stubs/load_balancer_sls.py +357 -0
- tetra_rp/stubs/registry.py +145 -19
- {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/METADATA +398 -60
- tetra_rp-0.24.0.dist-info/RECORD +99 -0
- {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/WHEEL +1 -1
- tetra_rp-0.24.0.dist-info/entry_points.txt +2 -0
- tetra_rp/core/pool/cluster_manager.py +0 -177
- tetra_rp/core/pool/dataclass.py +0 -18
- tetra_rp/core/pool/ex.py +0 -38
- tetra_rp/core/pool/job.py +0 -22
- tetra_rp/core/pool/worker.py +0 -19
- tetra_rp/core/resources/utils.py +0 -50
- tetra_rp/core/utils/json.py +0 -33
- tetra_rp-0.6.0.dist-info/RECORD +0 -39
- /tetra_rp/{core/pool → cli}/__init__.py +0 -0
- {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1082 @@
|
|
|
1
|
+
"""Flash build command - Package Flash applications for deployment."""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import importlib.util
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
import shutil
|
|
9
|
+
import subprocess
|
|
10
|
+
import sys
|
|
11
|
+
import tarfile
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
import typer
|
|
16
|
+
from rich.console import Console
|
|
17
|
+
from rich.panel import Panel
|
|
18
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
19
|
+
from rich.table import Table
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
import tomllib # Python 3.11+
|
|
23
|
+
except ImportError:
|
|
24
|
+
import tomli as tomllib # Python 3.9-3.10
|
|
25
|
+
|
|
26
|
+
from ..utils.ignore import get_file_tree, load_ignore_patterns
|
|
27
|
+
from .build_utils.handler_generator import HandlerGenerator
|
|
28
|
+
from .build_utils.lb_handler_generator import LBHandlerGenerator
|
|
29
|
+
from .build_utils.manifest import ManifestBuilder
|
|
30
|
+
from .build_utils.mothership_handler_generator import generate_mothership_handler
|
|
31
|
+
from .build_utils.scanner import RemoteDecoratorScanner
|
|
32
|
+
|
|
33
|
+
logger = logging.getLogger(__name__)
|
|
34
|
+
|
|
35
|
+
console = Console()
|
|
36
|
+
|
|
37
|
+
# Constants
|
|
38
|
+
# Timeout for pip install operations (large packages like torch can take 5-10 minutes)
|
|
39
|
+
PIP_INSTALL_TIMEOUT_SECONDS = 600
|
|
40
|
+
# Timeout for ensurepip (lightweight operation, typically completes in <10 seconds)
|
|
41
|
+
ENSUREPIP_TIMEOUT_SECONDS = 30
|
|
42
|
+
# Timeout for version checks (should be instant)
|
|
43
|
+
VERSION_CHECK_TIMEOUT_SECONDS = 5
|
|
44
|
+
|
|
45
|
+
# RunPod serverless deployment limit (hard limit enforced by platform)
|
|
46
|
+
RUNPOD_MAX_ARCHIVE_SIZE_MB = 500
|
|
47
|
+
|
|
48
|
+
# RunPod Serverless platform specifications
|
|
49
|
+
# RunPod serverless runs on x86_64 Linux, regardless of build platform
|
|
50
|
+
# Support multiple manylinux versions (newer versions are backward compatible)
|
|
51
|
+
RUNPOD_PLATFORMS = [
|
|
52
|
+
"manylinux_2_28_x86_64", # glibc 2.28+ (newest, for Python 3.13+)
|
|
53
|
+
"manylinux_2_17_x86_64", # glibc 2.17+ (covers most modern packages)
|
|
54
|
+
"manylinux2014_x86_64", # glibc 2.17 (legacy compatibility)
|
|
55
|
+
]
|
|
56
|
+
RUNPOD_PYTHON_IMPL = "cp" # CPython implementation
|
|
57
|
+
|
|
58
|
+
# Pip command identifiers
|
|
59
|
+
UV_COMMAND = "uv"
|
|
60
|
+
PIP_MODULE = "pip"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _find_local_tetra_rp() -> Optional[Path]:
|
|
64
|
+
"""Find local tetra_rp source directory if available.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Path to tetra_rp package directory, or None if not found or installed from PyPI
|
|
68
|
+
"""
|
|
69
|
+
try:
|
|
70
|
+
spec = importlib.util.find_spec("tetra_rp")
|
|
71
|
+
|
|
72
|
+
if not spec or not spec.origin:
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
# Get package directory (spec.origin is __init__.py path)
|
|
76
|
+
pkg_dir = Path(spec.origin).parent
|
|
77
|
+
|
|
78
|
+
# Skip if installed in site-packages (PyPI install)
|
|
79
|
+
if "site-packages" in str(pkg_dir):
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
# Must be development install
|
|
83
|
+
return pkg_dir
|
|
84
|
+
|
|
85
|
+
except Exception:
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _bundle_local_tetra_rp(build_dir: Path) -> bool:
|
|
90
|
+
"""Copy local tetra_rp source into build directory.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
build_dir: Target build directory
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
True if bundled successfully, False otherwise
|
|
97
|
+
"""
|
|
98
|
+
tetra_pkg = _find_local_tetra_rp()
|
|
99
|
+
|
|
100
|
+
if not tetra_pkg:
|
|
101
|
+
console.print(
|
|
102
|
+
"[yellow]⚠ Local tetra_rp not found or using PyPI install[/yellow]"
|
|
103
|
+
)
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
# Copy tetra_rp to build
|
|
107
|
+
dest = build_dir / "tetra_rp"
|
|
108
|
+
if dest.exists():
|
|
109
|
+
shutil.rmtree(dest)
|
|
110
|
+
|
|
111
|
+
shutil.copytree(
|
|
112
|
+
tetra_pkg,
|
|
113
|
+
dest,
|
|
114
|
+
ignore=shutil.ignore_patterns("__pycache__", "*.pyc", ".pytest_cache"),
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
console.print(f"[cyan]✓ Bundled local tetra_rp from {tetra_pkg}[/cyan]")
|
|
118
|
+
return True
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def _extract_tetra_rp_dependencies(tetra_pkg_dir: Path) -> list[str]:
|
|
122
|
+
"""Extract runtime dependencies from tetra_rp's pyproject.toml.
|
|
123
|
+
|
|
124
|
+
When bundling local tetra_rp source, we need to also install its dependencies
|
|
125
|
+
so they're available in the build environment.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
tetra_pkg_dir: Path to tetra_rp package directory (src/tetra_rp)
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
List of dependency strings, empty list if parsing fails
|
|
132
|
+
"""
|
|
133
|
+
try:
|
|
134
|
+
# Navigate from tetra_rp package to project root
|
|
135
|
+
# tetra_pkg_dir is src/tetra_rp, need to go up 2 levels to reach project root
|
|
136
|
+
project_root = tetra_pkg_dir.parent.parent
|
|
137
|
+
pyproject_path = project_root / "pyproject.toml"
|
|
138
|
+
|
|
139
|
+
if not pyproject_path.exists():
|
|
140
|
+
console.print(
|
|
141
|
+
"[yellow]⚠ tetra_rp pyproject.toml not found, "
|
|
142
|
+
"dependencies may be missing[/yellow]"
|
|
143
|
+
)
|
|
144
|
+
return []
|
|
145
|
+
|
|
146
|
+
# Parse TOML
|
|
147
|
+
with open(pyproject_path, "rb") as f:
|
|
148
|
+
data = tomllib.load(f)
|
|
149
|
+
|
|
150
|
+
# Extract dependencies from [project.dependencies]
|
|
151
|
+
dependencies = data.get("project", {}).get("dependencies", [])
|
|
152
|
+
|
|
153
|
+
if dependencies:
|
|
154
|
+
console.print(
|
|
155
|
+
f"[dim]Found {len(dependencies)} tetra_rp dependencies to install[/dim]"
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
return dependencies
|
|
159
|
+
|
|
160
|
+
except Exception as e:
|
|
161
|
+
console.print(f"[yellow]⚠ Failed to parse tetra_rp dependencies: {e}[/yellow]")
|
|
162
|
+
return []
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _remove_tetra_from_requirements(build_dir: Path) -> None:
|
|
166
|
+
"""Remove tetra_rp from requirements.txt and clean up dist-info since we bundled source."""
|
|
167
|
+
req_file = build_dir / "requirements.txt"
|
|
168
|
+
|
|
169
|
+
if not req_file.exists():
|
|
170
|
+
return
|
|
171
|
+
|
|
172
|
+
lines = req_file.read_text().splitlines()
|
|
173
|
+
filtered = [
|
|
174
|
+
line
|
|
175
|
+
for line in lines
|
|
176
|
+
if not line.strip().startswith("tetra_rp")
|
|
177
|
+
and not line.strip().startswith("tetra-rp")
|
|
178
|
+
]
|
|
179
|
+
|
|
180
|
+
req_file.write_text("\n".join(filtered) + "\n")
|
|
181
|
+
|
|
182
|
+
# Remove tetra_rp dist-info directory to avoid conflicts with bundled source
|
|
183
|
+
# dist-info is created by pip install and can confuse Python's import system
|
|
184
|
+
for dist_info in build_dir.glob("tetra_rp-*.dist-info"):
|
|
185
|
+
if dist_info.is_dir():
|
|
186
|
+
shutil.rmtree(dist_info)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def build_command(
|
|
190
|
+
no_deps: bool = typer.Option(
|
|
191
|
+
False, "--no-deps", help="Skip transitive dependencies during pip install"
|
|
192
|
+
),
|
|
193
|
+
keep_build: bool = typer.Option(
|
|
194
|
+
False, "--keep-build", help="Keep .build directory after creating archive"
|
|
195
|
+
),
|
|
196
|
+
output_name: str | None = typer.Option(
|
|
197
|
+
None, "--output", "-o", help="Custom archive name (default: archive.tar.gz)"
|
|
198
|
+
),
|
|
199
|
+
exclude: str | None = typer.Option(
|
|
200
|
+
None,
|
|
201
|
+
"--exclude",
|
|
202
|
+
help="Comma-separated packages to exclude (e.g., 'torch,torchvision')",
|
|
203
|
+
),
|
|
204
|
+
use_local_tetra: bool = typer.Option(
|
|
205
|
+
False,
|
|
206
|
+
"--use-local-tetra",
|
|
207
|
+
help="Bundle local tetra_rp source instead of PyPI version (for development/testing)",
|
|
208
|
+
),
|
|
209
|
+
):
|
|
210
|
+
"""
|
|
211
|
+
Build Flash application for deployment.
|
|
212
|
+
|
|
213
|
+
Packages the application code and dependencies into a self-contained tarball,
|
|
214
|
+
similar to AWS Lambda packaging. All pip packages are installed as local modules.
|
|
215
|
+
|
|
216
|
+
Examples:
|
|
217
|
+
flash build # Build with all dependencies
|
|
218
|
+
flash build --no-deps # Skip transitive dependencies
|
|
219
|
+
flash build --keep-build # Keep temporary build directory
|
|
220
|
+
flash build -o my-app.tar.gz # Custom archive name
|
|
221
|
+
flash build --exclude torch,torchvision # Exclude large packages (assume in base image)
|
|
222
|
+
"""
|
|
223
|
+
try:
|
|
224
|
+
# Validate project structure
|
|
225
|
+
project_dir, app_name = discover_flash_project()
|
|
226
|
+
|
|
227
|
+
if not validate_project_structure(project_dir):
|
|
228
|
+
console.print("[red]Error:[/red] Not a valid Flash project")
|
|
229
|
+
console.print("Run [bold]flash init[/bold] to create a Flash project")
|
|
230
|
+
raise typer.Exit(1)
|
|
231
|
+
|
|
232
|
+
# Create build directory first to ensure clean state before collecting files
|
|
233
|
+
build_dir = create_build_directory(project_dir, app_name)
|
|
234
|
+
|
|
235
|
+
# Parse exclusions
|
|
236
|
+
excluded_packages = []
|
|
237
|
+
if exclude:
|
|
238
|
+
excluded_packages = [pkg.strip().lower() for pkg in exclude.split(",")]
|
|
239
|
+
|
|
240
|
+
# Display configuration
|
|
241
|
+
_display_build_config(
|
|
242
|
+
project_dir, app_name, no_deps, keep_build, output_name, excluded_packages
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
# Execute build
|
|
246
|
+
with Progress(
|
|
247
|
+
SpinnerColumn(),
|
|
248
|
+
TextColumn("[progress.description]{task.description}"),
|
|
249
|
+
console=console,
|
|
250
|
+
) as progress:
|
|
251
|
+
# Load ignore patterns
|
|
252
|
+
ignore_task = progress.add_task("Loading ignore patterns...")
|
|
253
|
+
spec = load_ignore_patterns(project_dir)
|
|
254
|
+
progress.update(ignore_task, description="[green]✓ Loaded ignore patterns")
|
|
255
|
+
progress.stop_task(ignore_task)
|
|
256
|
+
|
|
257
|
+
# Collect files
|
|
258
|
+
collect_task = progress.add_task("Collecting project files...")
|
|
259
|
+
files = get_file_tree(project_dir, spec)
|
|
260
|
+
progress.update(
|
|
261
|
+
collect_task,
|
|
262
|
+
description=f"[green]✓ Found {len(files)} files to package",
|
|
263
|
+
)
|
|
264
|
+
progress.stop_task(collect_task)
|
|
265
|
+
|
|
266
|
+
# Note: build directory already created before progress tracking
|
|
267
|
+
build_task = progress.add_task("Creating build directory...")
|
|
268
|
+
progress.update(
|
|
269
|
+
build_task,
|
|
270
|
+
description="[green]✓ Created .flash/.build/",
|
|
271
|
+
)
|
|
272
|
+
progress.stop_task(build_task)
|
|
273
|
+
|
|
274
|
+
try:
|
|
275
|
+
# Copy files
|
|
276
|
+
copy_task = progress.add_task("Copying project files...")
|
|
277
|
+
copy_project_files(files, project_dir, build_dir)
|
|
278
|
+
progress.update(
|
|
279
|
+
copy_task, description=f"[green]✓ Copied {len(files)} files"
|
|
280
|
+
)
|
|
281
|
+
progress.stop_task(copy_task)
|
|
282
|
+
|
|
283
|
+
# Generate handlers and manifest
|
|
284
|
+
manifest_task = progress.add_task("Generating service manifest...")
|
|
285
|
+
try:
|
|
286
|
+
scanner = RemoteDecoratorScanner(build_dir)
|
|
287
|
+
remote_functions = scanner.discover_remote_functions()
|
|
288
|
+
|
|
289
|
+
# Always build manifest (includes mothership even without @remote functions)
|
|
290
|
+
manifest_builder = ManifestBuilder(
|
|
291
|
+
app_name, remote_functions, scanner, build_dir=build_dir
|
|
292
|
+
)
|
|
293
|
+
manifest = manifest_builder.build()
|
|
294
|
+
manifest_path = build_dir / "flash_manifest.json"
|
|
295
|
+
manifest_path.write_text(json.dumps(manifest, indent=2))
|
|
296
|
+
|
|
297
|
+
# Copy manifest to .flash/ directory for deployment reference
|
|
298
|
+
# This avoids needing to extract from tarball during deploy
|
|
299
|
+
flash_dir = project_dir / ".flash"
|
|
300
|
+
deployment_manifest_path = flash_dir / "flash_manifest.json"
|
|
301
|
+
shutil.copy2(manifest_path, deployment_manifest_path)
|
|
302
|
+
|
|
303
|
+
# Generate handler files if there are resources
|
|
304
|
+
handler_paths = []
|
|
305
|
+
manifest_resources = manifest.get("resources", {})
|
|
306
|
+
|
|
307
|
+
if manifest_resources:
|
|
308
|
+
# Separate resources by type
|
|
309
|
+
# Use flag determined by isinstance() at scan time
|
|
310
|
+
lb_resources = {
|
|
311
|
+
name: data
|
|
312
|
+
for name, data in manifest_resources.items()
|
|
313
|
+
if data.get("is_load_balanced", False)
|
|
314
|
+
}
|
|
315
|
+
qb_resources = {
|
|
316
|
+
name: data
|
|
317
|
+
for name, data in manifest_resources.items()
|
|
318
|
+
if not data.get("is_load_balanced", False)
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
# Generate LB handlers
|
|
322
|
+
if lb_resources:
|
|
323
|
+
lb_gen = LBHandlerGenerator(manifest, build_dir)
|
|
324
|
+
handler_paths.extend(lb_gen.generate_handlers())
|
|
325
|
+
|
|
326
|
+
# Generate QB handlers
|
|
327
|
+
if qb_resources:
|
|
328
|
+
qb_gen = HandlerGenerator(manifest, build_dir)
|
|
329
|
+
handler_paths.extend(qb_gen.generate_handlers())
|
|
330
|
+
|
|
331
|
+
# Generate mothership handler if present in manifest
|
|
332
|
+
mothership_resources = {
|
|
333
|
+
name: data
|
|
334
|
+
for name, data in manifest_resources.items()
|
|
335
|
+
if data.get("is_mothership", False)
|
|
336
|
+
}
|
|
337
|
+
if mothership_resources:
|
|
338
|
+
for (
|
|
339
|
+
resource_name,
|
|
340
|
+
resource_data,
|
|
341
|
+
) in mothership_resources.items():
|
|
342
|
+
mothership_handler_path = (
|
|
343
|
+
build_dir / "handlers" / "handler_mothership.py"
|
|
344
|
+
)
|
|
345
|
+
generate_mothership_handler(
|
|
346
|
+
main_file=resource_data.get("main_file", "main.py"),
|
|
347
|
+
app_variable=resource_data.get(
|
|
348
|
+
"app_variable", "app"
|
|
349
|
+
),
|
|
350
|
+
output_path=mothership_handler_path,
|
|
351
|
+
)
|
|
352
|
+
handler_paths.append(str(mothership_handler_path))
|
|
353
|
+
|
|
354
|
+
if handler_paths:
|
|
355
|
+
progress.update(
|
|
356
|
+
manifest_task,
|
|
357
|
+
description=f"[green]✓ Generated {len(handler_paths)} handlers and manifest",
|
|
358
|
+
)
|
|
359
|
+
elif manifest_resources:
|
|
360
|
+
progress.update(
|
|
361
|
+
manifest_task,
|
|
362
|
+
description=f"[green]✓ Generated manifest with {len(manifest_resources)} resources",
|
|
363
|
+
)
|
|
364
|
+
else:
|
|
365
|
+
progress.update(
|
|
366
|
+
manifest_task,
|
|
367
|
+
description="[yellow]⚠ No resources detected",
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
except (ImportError, SyntaxError) as e:
|
|
371
|
+
progress.stop_task(manifest_task)
|
|
372
|
+
console.print(f"[red]Error:[/red] Code analysis failed: {e}")
|
|
373
|
+
logger.exception("Code analysis failed")
|
|
374
|
+
raise typer.Exit(1)
|
|
375
|
+
except ValueError as e:
|
|
376
|
+
progress.stop_task(manifest_task)
|
|
377
|
+
console.print(f"[red]Error:[/red] {e}")
|
|
378
|
+
logger.exception("Handler generation validation failed")
|
|
379
|
+
raise typer.Exit(1)
|
|
380
|
+
except Exception as e:
|
|
381
|
+
progress.stop_task(manifest_task)
|
|
382
|
+
logger.exception("Handler generation failed")
|
|
383
|
+
console.print(
|
|
384
|
+
f"[yellow]Warning:[/yellow] Handler generation failed: {e}"
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
progress.stop_task(manifest_task)
|
|
388
|
+
|
|
389
|
+
except typer.Exit:
|
|
390
|
+
# Clean up on fatal errors (ImportError, SyntaxError, ValueError)
|
|
391
|
+
if build_dir.exists():
|
|
392
|
+
shutil.rmtree(build_dir)
|
|
393
|
+
raise
|
|
394
|
+
except Exception as e:
|
|
395
|
+
# Clean up on unexpected errors
|
|
396
|
+
if build_dir.exists():
|
|
397
|
+
shutil.rmtree(build_dir)
|
|
398
|
+
console.print(f"[red]Error:[/red] Build failed: {e}")
|
|
399
|
+
logger.exception("Build failed")
|
|
400
|
+
raise typer.Exit(1)
|
|
401
|
+
|
|
402
|
+
# Extract tetra_rp dependencies if bundling local version
|
|
403
|
+
tetra_deps = []
|
|
404
|
+
if use_local_tetra:
|
|
405
|
+
tetra_pkg = _find_local_tetra_rp()
|
|
406
|
+
if tetra_pkg:
|
|
407
|
+
tetra_deps = _extract_tetra_rp_dependencies(tetra_pkg)
|
|
408
|
+
|
|
409
|
+
# Install dependencies
|
|
410
|
+
deps_task = progress.add_task("Installing dependencies...")
|
|
411
|
+
requirements = collect_requirements(project_dir, build_dir)
|
|
412
|
+
|
|
413
|
+
# Add tetra_rp dependencies if bundling local version
|
|
414
|
+
# This ensures all tetra_rp runtime dependencies are available in the build
|
|
415
|
+
requirements.extend(tetra_deps)
|
|
416
|
+
|
|
417
|
+
# Filter out excluded packages
|
|
418
|
+
if excluded_packages:
|
|
419
|
+
original_count = len(requirements)
|
|
420
|
+
matched_exclusions = set()
|
|
421
|
+
filtered_requirements = []
|
|
422
|
+
|
|
423
|
+
for req in requirements:
|
|
424
|
+
if should_exclude_package(req, excluded_packages):
|
|
425
|
+
# Extract which exclusion matched
|
|
426
|
+
pkg_name = extract_package_name(req)
|
|
427
|
+
if pkg_name in excluded_packages:
|
|
428
|
+
matched_exclusions.add(pkg_name)
|
|
429
|
+
else:
|
|
430
|
+
filtered_requirements.append(req)
|
|
431
|
+
|
|
432
|
+
requirements = filtered_requirements
|
|
433
|
+
excluded_count = original_count - len(requirements)
|
|
434
|
+
|
|
435
|
+
if excluded_count > 0:
|
|
436
|
+
console.print(
|
|
437
|
+
f"[yellow]Excluded {excluded_count} package(s) "
|
|
438
|
+
f"(assumed in base image)[/yellow]"
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
# Warn about exclusions that didn't match any packages
|
|
442
|
+
unmatched = set(excluded_packages) - matched_exclusions
|
|
443
|
+
if unmatched:
|
|
444
|
+
console.print(
|
|
445
|
+
f"[yellow]Warning: No packages matched exclusions: "
|
|
446
|
+
f"{', '.join(sorted(unmatched))}[/yellow]"
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
if not requirements:
|
|
450
|
+
progress.update(
|
|
451
|
+
deps_task,
|
|
452
|
+
description="[yellow]⚠ No dependencies found",
|
|
453
|
+
)
|
|
454
|
+
else:
|
|
455
|
+
progress.update(
|
|
456
|
+
deps_task,
|
|
457
|
+
description=f"Installing {len(requirements)} packages...",
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
success = install_dependencies(build_dir, requirements, no_deps)
|
|
461
|
+
|
|
462
|
+
if not success:
|
|
463
|
+
progress.stop_task(deps_task)
|
|
464
|
+
console.print("[red]Error:[/red] Failed to install dependencies")
|
|
465
|
+
raise typer.Exit(1)
|
|
466
|
+
|
|
467
|
+
progress.update(
|
|
468
|
+
deps_task,
|
|
469
|
+
description=f"[green]✓ Installed {len(requirements)} packages",
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
progress.stop_task(deps_task)
|
|
473
|
+
|
|
474
|
+
# Bundle local tetra_rp if requested
|
|
475
|
+
if use_local_tetra:
|
|
476
|
+
tetra_task = progress.add_task("Bundling local tetra_rp...")
|
|
477
|
+
if _bundle_local_tetra_rp(build_dir):
|
|
478
|
+
_remove_tetra_from_requirements(build_dir)
|
|
479
|
+
progress.update(
|
|
480
|
+
tetra_task,
|
|
481
|
+
description="[green]✓ Bundled local tetra_rp",
|
|
482
|
+
)
|
|
483
|
+
else:
|
|
484
|
+
progress.update(
|
|
485
|
+
tetra_task,
|
|
486
|
+
description="[yellow]⚠ Using PyPI tetra_rp",
|
|
487
|
+
)
|
|
488
|
+
progress.stop_task(tetra_task)
|
|
489
|
+
|
|
490
|
+
# Clean up Python bytecode before archiving
|
|
491
|
+
cleanup_python_bytecode(build_dir)
|
|
492
|
+
|
|
493
|
+
# Create archive
|
|
494
|
+
archive_task = progress.add_task("Creating archive...")
|
|
495
|
+
archive_name = output_name or "archive.tar.gz"
|
|
496
|
+
archive_path = project_dir / ".flash" / archive_name
|
|
497
|
+
|
|
498
|
+
create_tarball(build_dir, archive_path, app_name)
|
|
499
|
+
|
|
500
|
+
# Get archive size
|
|
501
|
+
size_mb = archive_path.stat().st_size / (1024 * 1024)
|
|
502
|
+
|
|
503
|
+
progress.update(
|
|
504
|
+
archive_task,
|
|
505
|
+
description=f"[green]✓ Created {archive_name} ({size_mb:.1f} MB)",
|
|
506
|
+
)
|
|
507
|
+
progress.stop_task(archive_task)
|
|
508
|
+
|
|
509
|
+
# Warning for size limit
|
|
510
|
+
if size_mb > RUNPOD_MAX_ARCHIVE_SIZE_MB:
|
|
511
|
+
console.print()
|
|
512
|
+
console.print(
|
|
513
|
+
Panel(
|
|
514
|
+
f"[yellow bold]⚠ WARNING: Archive exceeds RunPod limit[/yellow bold]\n\n"
|
|
515
|
+
f"[yellow]Archive size:[/yellow] {size_mb:.1f} MB\n"
|
|
516
|
+
f"[yellow]RunPod limit:[/yellow] {RUNPOD_MAX_ARCHIVE_SIZE_MB} MB\n"
|
|
517
|
+
f"[yellow]Over by:[/yellow] {size_mb - RUNPOD_MAX_ARCHIVE_SIZE_MB:.1f} MB\n\n"
|
|
518
|
+
f"[dim]Use --exclude to skip packages in base image:\n"
|
|
519
|
+
f" flash build --exclude torch,torchvision,torchaudio[/dim]",
|
|
520
|
+
title="Deployment Size Warning",
|
|
521
|
+
border_style="yellow",
|
|
522
|
+
)
|
|
523
|
+
)
|
|
524
|
+
console.print()
|
|
525
|
+
|
|
526
|
+
# Cleanup
|
|
527
|
+
if not keep_build:
|
|
528
|
+
cleanup_task = progress.add_task("Cleaning up...")
|
|
529
|
+
cleanup_build_directory(build_dir)
|
|
530
|
+
progress.update(
|
|
531
|
+
cleanup_task, description="[green]✓ Removed .build directory"
|
|
532
|
+
)
|
|
533
|
+
progress.stop_task(cleanup_task)
|
|
534
|
+
|
|
535
|
+
# Success summary
|
|
536
|
+
_display_build_summary(archive_path, app_name, len(files), len(requirements))
|
|
537
|
+
|
|
538
|
+
except KeyboardInterrupt:
|
|
539
|
+
console.print("\n[yellow]Build cancelled by user[/yellow]")
|
|
540
|
+
raise typer.Exit(1)
|
|
541
|
+
except Exception as e:
|
|
542
|
+
console.print(f"\n[red]Build failed:[/red] {e}")
|
|
543
|
+
import traceback
|
|
544
|
+
|
|
545
|
+
console.print(traceback.format_exc())
|
|
546
|
+
raise typer.Exit(1)
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
def discover_flash_project() -> tuple[Path, str]:
|
|
550
|
+
"""
|
|
551
|
+
Discover Flash project directory and app name.
|
|
552
|
+
|
|
553
|
+
Returns:
|
|
554
|
+
Tuple of (project_dir, app_name)
|
|
555
|
+
|
|
556
|
+
Raises:
|
|
557
|
+
typer.Exit: If not in a Flash project directory
|
|
558
|
+
"""
|
|
559
|
+
project_dir = Path.cwd()
|
|
560
|
+
app_name = project_dir.name
|
|
561
|
+
|
|
562
|
+
return project_dir, app_name
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
def validate_project_structure(project_dir: Path) -> bool:
|
|
566
|
+
"""
|
|
567
|
+
Validate that directory is a Flash project.
|
|
568
|
+
|
|
569
|
+
Args:
|
|
570
|
+
project_dir: Directory to validate
|
|
571
|
+
|
|
572
|
+
Returns:
|
|
573
|
+
True if valid Flash project
|
|
574
|
+
"""
|
|
575
|
+
main_py = project_dir / "main.py"
|
|
576
|
+
|
|
577
|
+
if not main_py.exists():
|
|
578
|
+
console.print(f"[red]Error:[/red] main.py not found in {project_dir}")
|
|
579
|
+
return False
|
|
580
|
+
|
|
581
|
+
# Check if main.py has FastAPI app
|
|
582
|
+
try:
|
|
583
|
+
content = main_py.read_text(encoding="utf-8")
|
|
584
|
+
if "FastAPI" not in content:
|
|
585
|
+
console.print(
|
|
586
|
+
"[yellow]Warning:[/yellow] main.py does not appear to have a FastAPI app"
|
|
587
|
+
)
|
|
588
|
+
except Exception:
|
|
589
|
+
pass
|
|
590
|
+
|
|
591
|
+
return True
|
|
592
|
+
|
|
593
|
+
|
|
594
|
+
def create_build_directory(project_dir: Path, app_name: str) -> Path:
|
|
595
|
+
"""
|
|
596
|
+
Create .flash/.build/ directory.
|
|
597
|
+
|
|
598
|
+
Args:
|
|
599
|
+
project_dir: Flash project directory
|
|
600
|
+
app_name: Application name (used for archive naming, not directory structure)
|
|
601
|
+
|
|
602
|
+
Returns:
|
|
603
|
+
Path to build directory
|
|
604
|
+
"""
|
|
605
|
+
flash_dir = project_dir / ".flash"
|
|
606
|
+
flash_dir.mkdir(exist_ok=True)
|
|
607
|
+
|
|
608
|
+
build_dir = flash_dir / ".build"
|
|
609
|
+
|
|
610
|
+
# Remove existing build directory
|
|
611
|
+
if build_dir.exists():
|
|
612
|
+
shutil.rmtree(build_dir)
|
|
613
|
+
|
|
614
|
+
build_dir.mkdir(parents=True, exist_ok=True)
|
|
615
|
+
|
|
616
|
+
return build_dir
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def copy_project_files(files: list[Path], source_dir: Path, dest_dir: Path) -> None:
|
|
620
|
+
"""
|
|
621
|
+
Copy project files to build directory.
|
|
622
|
+
|
|
623
|
+
Args:
|
|
624
|
+
files: List of files to copy
|
|
625
|
+
source_dir: Source directory
|
|
626
|
+
dest_dir: Destination directory
|
|
627
|
+
"""
|
|
628
|
+
for file_path in files:
|
|
629
|
+
# Get relative path
|
|
630
|
+
rel_path = file_path.relative_to(source_dir)
|
|
631
|
+
|
|
632
|
+
# Create destination path
|
|
633
|
+
dest_path = dest_dir / rel_path
|
|
634
|
+
|
|
635
|
+
# Create parent directories
|
|
636
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
637
|
+
|
|
638
|
+
# Copy file
|
|
639
|
+
shutil.copy2(file_path, dest_path)
|
|
640
|
+
|
|
641
|
+
|
|
642
|
+
def cleanup_python_bytecode(build_dir: Path) -> None:
|
|
643
|
+
"""
|
|
644
|
+
Remove Python bytecode files and __pycache__ directories from build directory.
|
|
645
|
+
|
|
646
|
+
These files are generated during the build process when Python imports modules
|
|
647
|
+
for validation. They are platform-specific and will be regenerated on the
|
|
648
|
+
deployment platform, so including them is unnecessary.
|
|
649
|
+
|
|
650
|
+
Args:
|
|
651
|
+
build_dir: Build directory to clean up
|
|
652
|
+
"""
|
|
653
|
+
# Remove all __pycache__ directories
|
|
654
|
+
for pycache_dir in build_dir.rglob("__pycache__"):
|
|
655
|
+
if pycache_dir.is_dir():
|
|
656
|
+
shutil.rmtree(pycache_dir)
|
|
657
|
+
|
|
658
|
+
# Remove any stray .pyc, .pyo, .pyd files
|
|
659
|
+
for bytecode_pattern in ["*.pyc", "*.pyo", "*.pyd"]:
|
|
660
|
+
for bytecode_file in build_dir.rglob(bytecode_pattern):
|
|
661
|
+
if bytecode_file.is_file():
|
|
662
|
+
bytecode_file.unlink()
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
def collect_requirements(project_dir: Path, build_dir: Path) -> list[str]:
|
|
666
|
+
"""
|
|
667
|
+
Collect all requirements from requirements.txt and @remote decorators.
|
|
668
|
+
|
|
669
|
+
Args:
|
|
670
|
+
project_dir: Flash project directory
|
|
671
|
+
build_dir: Build directory (to check for workers)
|
|
672
|
+
|
|
673
|
+
Returns:
|
|
674
|
+
List of requirement strings
|
|
675
|
+
"""
|
|
676
|
+
requirements = []
|
|
677
|
+
|
|
678
|
+
# Load requirements.txt
|
|
679
|
+
req_file = project_dir / "requirements.txt"
|
|
680
|
+
if req_file.exists():
|
|
681
|
+
try:
|
|
682
|
+
content = req_file.read_text(encoding="utf-8")
|
|
683
|
+
for line in content.splitlines():
|
|
684
|
+
line = line.strip()
|
|
685
|
+
# Skip empty lines and comments
|
|
686
|
+
if line and not line.startswith("#"):
|
|
687
|
+
requirements.append(line)
|
|
688
|
+
except Exception as e:
|
|
689
|
+
console.print(
|
|
690
|
+
f"[yellow]Warning:[/yellow] Failed to read requirements.txt: {e}"
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
# Extract dependencies from @remote decorators
|
|
694
|
+
workers_dir = build_dir / "workers"
|
|
695
|
+
if workers_dir.exists():
|
|
696
|
+
remote_deps = extract_remote_dependencies(workers_dir)
|
|
697
|
+
requirements.extend(remote_deps)
|
|
698
|
+
|
|
699
|
+
# Remove duplicates while preserving order
|
|
700
|
+
seen = set()
|
|
701
|
+
unique_requirements = []
|
|
702
|
+
for req in requirements:
|
|
703
|
+
if req not in seen:
|
|
704
|
+
seen.add(req)
|
|
705
|
+
unique_requirements.append(req)
|
|
706
|
+
|
|
707
|
+
return unique_requirements
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+
def extract_package_name(requirement: str) -> str:
|
|
711
|
+
"""
|
|
712
|
+
Extract the package name from a requirement specification.
|
|
713
|
+
|
|
714
|
+
Handles version specifiers, extras, and other pip requirement syntax.
|
|
715
|
+
|
|
716
|
+
Args:
|
|
717
|
+
requirement: Requirement string (e.g., "torch>=2.0.0", "numpy[extra]")
|
|
718
|
+
|
|
719
|
+
Returns:
|
|
720
|
+
Package name in lowercase (e.g., "torch", "numpy")
|
|
721
|
+
|
|
722
|
+
Examples:
|
|
723
|
+
>>> extract_package_name("torch>=2.0.0")
|
|
724
|
+
"torch"
|
|
725
|
+
>>> extract_package_name("numpy[extra]")
|
|
726
|
+
"numpy"
|
|
727
|
+
>>> extract_package_name("my-package==1.0.0")
|
|
728
|
+
"my-package"
|
|
729
|
+
"""
|
|
730
|
+
# Split on version specifiers, extras, and environment markers
|
|
731
|
+
# This regex matches: < > = ! [ ; (common pip requirement delimiters)
|
|
732
|
+
package_name = re.split(r"[<>=!\[;]", requirement)[0].strip().lower()
|
|
733
|
+
return package_name
|
|
734
|
+
|
|
735
|
+
|
|
736
|
+
def should_exclude_package(requirement: str, exclusions: list[str]) -> bool:
|
|
737
|
+
"""
|
|
738
|
+
Check if a requirement should be excluded based on package name matching.
|
|
739
|
+
|
|
740
|
+
Uses exact package name matching (after normalization) to avoid false positives.
|
|
741
|
+
|
|
742
|
+
Args:
|
|
743
|
+
requirement: Requirement string (e.g., "torch>=2.0.0")
|
|
744
|
+
exclusions: List of package names to exclude (lowercase)
|
|
745
|
+
|
|
746
|
+
Returns:
|
|
747
|
+
True if package should be excluded, False otherwise
|
|
748
|
+
|
|
749
|
+
Examples:
|
|
750
|
+
>>> should_exclude_package("torch>=2.0.0", ["torch", "numpy"])
|
|
751
|
+
True
|
|
752
|
+
>>> should_exclude_package("torch-vision==0.15.0", ["torch"])
|
|
753
|
+
False # torch-vision is different from torch
|
|
754
|
+
"""
|
|
755
|
+
package_name = extract_package_name(requirement)
|
|
756
|
+
return package_name in exclusions
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
def extract_remote_dependencies(workers_dir: Path) -> list[str]:
|
|
760
|
+
"""
|
|
761
|
+
Extract dependencies from @remote decorators in worker files.
|
|
762
|
+
|
|
763
|
+
Args:
|
|
764
|
+
workers_dir: Path to workers directory
|
|
765
|
+
|
|
766
|
+
Returns:
|
|
767
|
+
List of dependency strings
|
|
768
|
+
"""
|
|
769
|
+
dependencies = []
|
|
770
|
+
|
|
771
|
+
for py_file in workers_dir.glob("**/*.py"):
|
|
772
|
+
if py_file.name == "__init__.py":
|
|
773
|
+
continue
|
|
774
|
+
|
|
775
|
+
try:
|
|
776
|
+
tree = ast.parse(py_file.read_text(encoding="utf-8"))
|
|
777
|
+
|
|
778
|
+
for node in ast.walk(tree):
|
|
779
|
+
if isinstance(node, (ast.ClassDef, ast.FunctionDef)):
|
|
780
|
+
for decorator in node.decorator_list:
|
|
781
|
+
if isinstance(decorator, ast.Call):
|
|
782
|
+
func_name = None
|
|
783
|
+
if isinstance(decorator.func, ast.Name):
|
|
784
|
+
func_name = decorator.func.id
|
|
785
|
+
elif isinstance(decorator.func, ast.Attribute):
|
|
786
|
+
func_name = decorator.func.attr
|
|
787
|
+
|
|
788
|
+
if func_name == "remote":
|
|
789
|
+
# Extract dependencies keyword argument
|
|
790
|
+
for keyword in decorator.keywords:
|
|
791
|
+
if keyword.arg == "dependencies":
|
|
792
|
+
if isinstance(keyword.value, ast.List):
|
|
793
|
+
for elt in keyword.value.elts:
|
|
794
|
+
if isinstance(elt, ast.Constant):
|
|
795
|
+
dependencies.append(elt.value)
|
|
796
|
+
|
|
797
|
+
except Exception as e:
|
|
798
|
+
console.print(
|
|
799
|
+
f"[yellow]Warning:[/yellow] Failed to parse {py_file.name}: {e}"
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
return dependencies
|
|
803
|
+
|
|
804
|
+
|
|
805
|
+
def install_dependencies(
|
|
806
|
+
build_dir: Path, requirements: list[str], no_deps: bool
|
|
807
|
+
) -> bool:
|
|
808
|
+
"""
|
|
809
|
+
Install dependencies to build directory using pip or uv pip.
|
|
810
|
+
|
|
811
|
+
Installs packages for Linux x86_64 platform to ensure compatibility with
|
|
812
|
+
RunPod serverless, regardless of the build platform (macOS, Windows, Linux).
|
|
813
|
+
|
|
814
|
+
Auto-installation behavior:
|
|
815
|
+
- If standard pip is not available, it will be automatically installed via ensurepip
|
|
816
|
+
- This modifies the current virtual environment (persists after build completes)
|
|
817
|
+
- Standard pip is strongly preferred for cross-platform builds due to better
|
|
818
|
+
manylinux compatibility (uv pip has known issues with manylinux_2_27+)
|
|
819
|
+
|
|
820
|
+
Args:
|
|
821
|
+
build_dir: Build directory (pip --target)
|
|
822
|
+
requirements: List of requirements to install
|
|
823
|
+
no_deps: If True, skip transitive dependencies
|
|
824
|
+
|
|
825
|
+
Returns:
|
|
826
|
+
True if successful
|
|
827
|
+
"""
|
|
828
|
+
if not requirements:
|
|
829
|
+
return True
|
|
830
|
+
|
|
831
|
+
# Prefer standard pip over uv pip for cross-platform builds
|
|
832
|
+
# Standard pip's --platform flag works correctly with manylinux tags
|
|
833
|
+
# uv pip has known issues with manylinux_2_27/2_28 detection (uv issue #5106)
|
|
834
|
+
pip_cmd = [sys.executable, "-m", PIP_MODULE]
|
|
835
|
+
pip_available = False
|
|
836
|
+
|
|
837
|
+
try:
|
|
838
|
+
result = subprocess.run(
|
|
839
|
+
pip_cmd + ["--version"],
|
|
840
|
+
capture_output=True,
|
|
841
|
+
text=True,
|
|
842
|
+
timeout=VERSION_CHECK_TIMEOUT_SECONDS,
|
|
843
|
+
)
|
|
844
|
+
if result.returncode == 0:
|
|
845
|
+
pip_available = True
|
|
846
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
|
847
|
+
pass
|
|
848
|
+
|
|
849
|
+
# If pip not available, install it using ensurepip
|
|
850
|
+
# This modifies the current virtual environment
|
|
851
|
+
if not pip_available:
|
|
852
|
+
console.print(
|
|
853
|
+
"[yellow]Standard pip not found. Installing pip for reliable cross-platform builds...[/yellow]"
|
|
854
|
+
)
|
|
855
|
+
try:
|
|
856
|
+
result = subprocess.run(
|
|
857
|
+
[sys.executable, "-m", "ensurepip", "--upgrade"],
|
|
858
|
+
capture_output=True,
|
|
859
|
+
text=True,
|
|
860
|
+
timeout=ENSUREPIP_TIMEOUT_SECONDS,
|
|
861
|
+
)
|
|
862
|
+
if result.returncode == 0:
|
|
863
|
+
# Verify pip is now available
|
|
864
|
+
result = subprocess.run(
|
|
865
|
+
pip_cmd + ["--version"],
|
|
866
|
+
capture_output=True,
|
|
867
|
+
text=True,
|
|
868
|
+
timeout=VERSION_CHECK_TIMEOUT_SECONDS,
|
|
869
|
+
)
|
|
870
|
+
if result.returncode == 0:
|
|
871
|
+
pip_available = True
|
|
872
|
+
console.print(
|
|
873
|
+
"[green]✓[/green] Standard pip installed successfully"
|
|
874
|
+
)
|
|
875
|
+
except (subprocess.SubprocessError, FileNotFoundError) as e:
|
|
876
|
+
console.print(f"[yellow]Warning:[/yellow] Failed to install pip: {e}")
|
|
877
|
+
|
|
878
|
+
# If pip still not available, try uv pip (less reliable for cross-platform)
|
|
879
|
+
if not pip_available:
|
|
880
|
+
try:
|
|
881
|
+
result = subprocess.run(
|
|
882
|
+
[UV_COMMAND, PIP_MODULE, "--version"],
|
|
883
|
+
capture_output=True,
|
|
884
|
+
text=True,
|
|
885
|
+
timeout=VERSION_CHECK_TIMEOUT_SECONDS,
|
|
886
|
+
)
|
|
887
|
+
if result.returncode == 0:
|
|
888
|
+
pip_cmd = [UV_COMMAND, PIP_MODULE]
|
|
889
|
+
pip_available = True
|
|
890
|
+
console.print(
|
|
891
|
+
f"[yellow]Warning:[/yellow] Using '{UV_COMMAND} {PIP_MODULE}' which has known issues "
|
|
892
|
+
f"with newer manylinux tags (manylinux_2_27+)"
|
|
893
|
+
)
|
|
894
|
+
console.print(
|
|
895
|
+
"[yellow]This may fail for Python 3.13+ with newer packages (e.g., numpy 2.4+)[/yellow]"
|
|
896
|
+
)
|
|
897
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
|
898
|
+
pass
|
|
899
|
+
|
|
900
|
+
# If neither available, error out
|
|
901
|
+
if not pip_available:
|
|
902
|
+
console.print(
|
|
903
|
+
f"[red]Error:[/red] Neither {PIP_MODULE} nor {UV_COMMAND} {PIP_MODULE} found"
|
|
904
|
+
)
|
|
905
|
+
console.print(f"\n[yellow]Install {PIP_MODULE} with one of:[/yellow]")
|
|
906
|
+
console.print(" • python -m ensurepip --upgrade")
|
|
907
|
+
console.print(f" • {UV_COMMAND} {PIP_MODULE} install {PIP_MODULE}")
|
|
908
|
+
return False
|
|
909
|
+
|
|
910
|
+
# Get current Python version for compatibility
|
|
911
|
+
python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
|
912
|
+
|
|
913
|
+
# Determine if using uv pip or standard pip (different flag formats)
|
|
914
|
+
is_uv_pip = pip_cmd[0] == UV_COMMAND
|
|
915
|
+
|
|
916
|
+
# Build pip command with platform-specific flags for RunPod serverless
|
|
917
|
+
cmd = pip_cmd + [
|
|
918
|
+
"install",
|
|
919
|
+
"--target",
|
|
920
|
+
str(build_dir),
|
|
921
|
+
"--python-version",
|
|
922
|
+
python_version,
|
|
923
|
+
"--upgrade",
|
|
924
|
+
]
|
|
925
|
+
|
|
926
|
+
# Add platform-specific flags based on pip variant
|
|
927
|
+
if is_uv_pip:
|
|
928
|
+
# uv pip uses --python-platform with simpler values
|
|
929
|
+
# Note: uv has known issues with manylinux_2_27+ detection (issue #5106)
|
|
930
|
+
cmd.extend(
|
|
931
|
+
[
|
|
932
|
+
"--python-platform",
|
|
933
|
+
"x86_64-unknown-linux-gnu",
|
|
934
|
+
"--no-build", # Don't build from source, use binary wheels only
|
|
935
|
+
]
|
|
936
|
+
)
|
|
937
|
+
else:
|
|
938
|
+
# Standard pip uses --platform with manylinux tags
|
|
939
|
+
# Specify multiple platforms for broader compatibility
|
|
940
|
+
for platform in RUNPOD_PLATFORMS:
|
|
941
|
+
cmd.extend(["--platform", platform])
|
|
942
|
+
cmd.extend(
|
|
943
|
+
[
|
|
944
|
+
"--implementation",
|
|
945
|
+
RUNPOD_PYTHON_IMPL,
|
|
946
|
+
"--only-binary=:all:",
|
|
947
|
+
]
|
|
948
|
+
)
|
|
949
|
+
|
|
950
|
+
if no_deps:
|
|
951
|
+
cmd.append("--no-deps")
|
|
952
|
+
|
|
953
|
+
cmd.extend(requirements)
|
|
954
|
+
|
|
955
|
+
# Log platform targeting info
|
|
956
|
+
if is_uv_pip:
|
|
957
|
+
platform_str = "x86_64-unknown-linux-gnu"
|
|
958
|
+
else:
|
|
959
|
+
platform_str = f"{len(RUNPOD_PLATFORMS)} manylinux variants"
|
|
960
|
+
console.print(f"[dim]Installing for: {platform_str}, Python {python_version}[/dim]")
|
|
961
|
+
|
|
962
|
+
try:
|
|
963
|
+
result = subprocess.run(
|
|
964
|
+
cmd,
|
|
965
|
+
capture_output=True,
|
|
966
|
+
text=True,
|
|
967
|
+
timeout=PIP_INSTALL_TIMEOUT_SECONDS,
|
|
968
|
+
)
|
|
969
|
+
|
|
970
|
+
if result.returncode != 0:
|
|
971
|
+
console.print(f"[red]pip install failed:[/red]\n{result.stderr}")
|
|
972
|
+
return False
|
|
973
|
+
|
|
974
|
+
return True
|
|
975
|
+
|
|
976
|
+
except subprocess.TimeoutExpired:
|
|
977
|
+
console.print(
|
|
978
|
+
f"[red]pip install timed out ({PIP_INSTALL_TIMEOUT_SECONDS} seconds)[/red]"
|
|
979
|
+
)
|
|
980
|
+
return False
|
|
981
|
+
except Exception as e:
|
|
982
|
+
console.print(f"[red]pip install error:[/red] {e}")
|
|
983
|
+
return False
|
|
984
|
+
|
|
985
|
+
|
|
986
|
+
def create_tarball(build_dir: Path, output_path: Path, app_name: str) -> None:
|
|
987
|
+
"""
|
|
988
|
+
Create gzipped tarball of build directory.
|
|
989
|
+
|
|
990
|
+
Args:
|
|
991
|
+
build_dir: Build directory to archive
|
|
992
|
+
output_path: Output archive path
|
|
993
|
+
app_name: Application name (unused, for compatibility)
|
|
994
|
+
"""
|
|
995
|
+
# Remove existing archive
|
|
996
|
+
if output_path.exists():
|
|
997
|
+
output_path.unlink()
|
|
998
|
+
|
|
999
|
+
# Create tarball with build directory contents at root level
|
|
1000
|
+
with tarfile.open(output_path, "w:gz") as tar:
|
|
1001
|
+
tar.add(build_dir, arcname=".")
|
|
1002
|
+
|
|
1003
|
+
|
|
1004
|
+
def cleanup_build_directory(build_base: Path) -> None:
|
|
1005
|
+
"""
|
|
1006
|
+
Remove build directory.
|
|
1007
|
+
|
|
1008
|
+
Args:
|
|
1009
|
+
build_base: .build directory to remove
|
|
1010
|
+
"""
|
|
1011
|
+
if build_base.exists():
|
|
1012
|
+
shutil.rmtree(build_base)
|
|
1013
|
+
|
|
1014
|
+
|
|
1015
|
+
def _display_build_config(
|
|
1016
|
+
project_dir: Path,
|
|
1017
|
+
app_name: str,
|
|
1018
|
+
no_deps: bool,
|
|
1019
|
+
keep_build: bool,
|
|
1020
|
+
output_name: str | None,
|
|
1021
|
+
excluded_packages: list[str],
|
|
1022
|
+
):
|
|
1023
|
+
"""Display build configuration."""
|
|
1024
|
+
archive_name = output_name or "archive.tar.gz"
|
|
1025
|
+
|
|
1026
|
+
config_text = (
|
|
1027
|
+
f"[bold]Project:[/bold] {app_name}\n"
|
|
1028
|
+
f"[bold]Directory:[/bold] {project_dir}\n"
|
|
1029
|
+
f"[bold]Archive:[/bold] .flash/{archive_name}\n"
|
|
1030
|
+
f"[bold]Skip transitive deps:[/bold] {no_deps}\n"
|
|
1031
|
+
f"[bold]Keep build dir:[/bold] {keep_build}"
|
|
1032
|
+
)
|
|
1033
|
+
|
|
1034
|
+
if excluded_packages:
|
|
1035
|
+
config_text += (
|
|
1036
|
+
f"\n[bold]Excluded packages:[/bold] {', '.join(excluded_packages)}"
|
|
1037
|
+
)
|
|
1038
|
+
|
|
1039
|
+
console.print(
|
|
1040
|
+
Panel(
|
|
1041
|
+
config_text,
|
|
1042
|
+
title="Flash Build Configuration",
|
|
1043
|
+
expand=False,
|
|
1044
|
+
)
|
|
1045
|
+
)
|
|
1046
|
+
|
|
1047
|
+
|
|
1048
|
+
def _display_build_summary(
|
|
1049
|
+
archive_path: Path, app_name: str, file_count: int, dep_count: int
|
|
1050
|
+
):
|
|
1051
|
+
"""Display build summary."""
|
|
1052
|
+
size_mb = archive_path.stat().st_size / (1024 * 1024)
|
|
1053
|
+
|
|
1054
|
+
summary = Table(show_header=False, box=None)
|
|
1055
|
+
summary.add_column("Item", style="bold")
|
|
1056
|
+
summary.add_column("Value", style="cyan")
|
|
1057
|
+
|
|
1058
|
+
summary.add_row("Application", app_name)
|
|
1059
|
+
summary.add_row("Files packaged", str(file_count))
|
|
1060
|
+
summary.add_row("Dependencies", str(dep_count))
|
|
1061
|
+
summary.add_row("Archive", str(archive_path.relative_to(Path.cwd())))
|
|
1062
|
+
summary.add_row("Size", f"{size_mb:.1f} MB")
|
|
1063
|
+
|
|
1064
|
+
console.print("\n")
|
|
1065
|
+
console.print(summary)
|
|
1066
|
+
|
|
1067
|
+
archive_rel = archive_path.relative_to(Path.cwd())
|
|
1068
|
+
|
|
1069
|
+
next_steps = (
|
|
1070
|
+
f"[bold]{app_name}[/bold] built successfully!\n\n"
|
|
1071
|
+
f"[bold]Archive:[/bold] {archive_rel}\n\n"
|
|
1072
|
+
f"Next: Use [cyan]flash deploy[/cyan] to deploy to RunPod."
|
|
1073
|
+
)
|
|
1074
|
+
|
|
1075
|
+
console.print(
|
|
1076
|
+
Panel(
|
|
1077
|
+
next_steps,
|
|
1078
|
+
title="✓ Build Complete",
|
|
1079
|
+
expand=False,
|
|
1080
|
+
border_style="green",
|
|
1081
|
+
)
|
|
1082
|
+
)
|