buildfunctions 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- buildfunctions/__init__.py +139 -1
- buildfunctions/client.py +282 -0
- buildfunctions/cpu_function.py +167 -0
- buildfunctions/cpu_sandbox.py +393 -0
- buildfunctions/dotdict.py +39 -0
- buildfunctions/errors.py +90 -0
- buildfunctions/framework.py +22 -0
- buildfunctions/gpu_function.py +241 -0
- buildfunctions/gpu_sandbox.py +443 -0
- buildfunctions/http_client.py +97 -0
- buildfunctions/memory.py +28 -0
- buildfunctions/py.typed +0 -0
- buildfunctions/resolve_code.py +109 -0
- buildfunctions/types.py +227 -0
- buildfunctions/uploader.py +198 -0
- buildfunctions-0.2.1.dist-info/METADATA +176 -0
- buildfunctions-0.2.1.dist-info/RECORD +18 -0
- {buildfunctions-0.2.0.dist-info → buildfunctions-0.2.1.dist-info}/WHEEL +1 -2
- buildfunctions/api.py +0 -2
- buildfunctions-0.2.0.dist-info/METADATA +0 -6
- buildfunctions-0.2.0.dist-info/RECORD +0 -6
- buildfunctions-0.2.0.dist-info/top_level.txt +0 -1
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""GPU Function - Deploy GPU-accelerated serverless functions to Buildfunctions."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
from buildfunctions.dotdict import DotDict
|
|
12
|
+
from buildfunctions.errors import ValidationError
|
|
13
|
+
from buildfunctions.framework import detect_framework
|
|
14
|
+
from buildfunctions.memory import parse_memory
|
|
15
|
+
from buildfunctions.resolve_code import resolve_code
|
|
16
|
+
from buildfunctions.types import DeployedFunction, GPUFunctionOptions
|
|
17
|
+
|
|
18
|
+
DEFAULT_GPU_BUILD_URL = "https://prod-gpu-build.buildfunctions.link"
|
|
19
|
+
DEFAULT_BASE_URL = "https://www.buildfunctions.com"
|
|
20
|
+
|
|
21
|
+
# Module-level state
|
|
22
|
+
_global_api_token: str | None = None
|
|
23
|
+
_global_gpu_build_url: str | None = None
|
|
24
|
+
_global_base_url: str | None = None
|
|
25
|
+
_global_user_id: str | None = None
|
|
26
|
+
_global_username: str | None = None
|
|
27
|
+
_global_compute_tier: str | None = None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def set_gpu_api_token(
|
|
31
|
+
api_token: str,
|
|
32
|
+
gpu_build_url: str | None = None,
|
|
33
|
+
base_url: str | None = None,
|
|
34
|
+
user_id: str | None = None,
|
|
35
|
+
username: str | None = None,
|
|
36
|
+
compute_tier: str | None = None,
|
|
37
|
+
) -> None:
|
|
38
|
+
"""Set the API token for GPU function deployment."""
|
|
39
|
+
global _global_api_token, _global_gpu_build_url, _global_base_url, _global_user_id, _global_username, _global_compute_tier
|
|
40
|
+
_global_api_token = api_token
|
|
41
|
+
_global_gpu_build_url = gpu_build_url
|
|
42
|
+
_global_base_url = base_url
|
|
43
|
+
_global_user_id = user_id
|
|
44
|
+
_global_username = username
|
|
45
|
+
_global_compute_tier = compute_tier
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _get_file_extension(language: str) -> str:
|
|
49
|
+
extensions: dict[str, str] = {
|
|
50
|
+
"javascript": ".js",
|
|
51
|
+
"typescript": ".ts",
|
|
52
|
+
"python": ".py",
|
|
53
|
+
"go": ".go",
|
|
54
|
+
"shell": ".sh",
|
|
55
|
+
}
|
|
56
|
+
return extensions.get(language, ".js")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _get_default_runtime(language: str) -> str:
|
|
60
|
+
if language == "javascript":
|
|
61
|
+
raise ValidationError('JavaScript requires explicit runtime: "nodejs" or "deno"')
|
|
62
|
+
return language
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _format_requirements(requirements: str | list[str] | None) -> str:
|
|
66
|
+
if not requirements:
|
|
67
|
+
return ""
|
|
68
|
+
if isinstance(requirements, list):
|
|
69
|
+
return "\n".join(requirements)
|
|
70
|
+
return requirements
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _validate_options(options: GPUFunctionOptions) -> None:
|
|
74
|
+
name = options.get("name")
|
|
75
|
+
if not name or not isinstance(name, str):
|
|
76
|
+
raise ValidationError("Function name is required")
|
|
77
|
+
|
|
78
|
+
import re
|
|
79
|
+
|
|
80
|
+
if not re.match(r"^[a-z0-9-]+$", name.lower()):
|
|
81
|
+
raise ValidationError("Function name can only contain lowercase letters, numbers, and hyphens")
|
|
82
|
+
|
|
83
|
+
code = options.get("code")
|
|
84
|
+
if not code or not isinstance(code, str):
|
|
85
|
+
raise ValidationError("Function code is required")
|
|
86
|
+
|
|
87
|
+
if not options.get("language"):
|
|
88
|
+
raise ValidationError("Language is required")
|
|
89
|
+
|
|
90
|
+
if options.get("language") != "python":
|
|
91
|
+
raise ValidationError("GPU Functions currently only support Python. Additional languages coming soon.")
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _build_request_body(options: GPUFunctionOptions) -> dict[str, Any]:
|
|
95
|
+
name = options["name"]
|
|
96
|
+
language = options["language"]
|
|
97
|
+
code = options["code"]
|
|
98
|
+
config = options.get("config", {})
|
|
99
|
+
env_variables = options.get("env_variables", {})
|
|
100
|
+
dependencies = options.get("dependencies")
|
|
101
|
+
cron_schedule = options.get("cron_schedule")
|
|
102
|
+
framework = options.get("framework")
|
|
103
|
+
|
|
104
|
+
runtime = options.get("runtime") or _get_default_runtime(language)
|
|
105
|
+
gpu = options.get("gpu", "T4")
|
|
106
|
+
file_ext = _get_file_extension(language)
|
|
107
|
+
function_name = name.lower()
|
|
108
|
+
requirements = _format_requirements(dependencies)
|
|
109
|
+
|
|
110
|
+
env_vars_list = [{"key": k, "value": v} for k, v in env_variables.items()] if env_variables else []
|
|
111
|
+
|
|
112
|
+
cpu_cores = options.get("vcpus") or 10
|
|
113
|
+
|
|
114
|
+
return {
|
|
115
|
+
"name": function_name,
|
|
116
|
+
"language": language,
|
|
117
|
+
"runtime": runtime,
|
|
118
|
+
"sourceWith": code,
|
|
119
|
+
"sourceWithout": code,
|
|
120
|
+
"fileExt": file_ext,
|
|
121
|
+
"processorType": "GPU",
|
|
122
|
+
"gpu": gpu,
|
|
123
|
+
"memoryAllocated": parse_memory(config.get("memory", 4096)) if config.get("memory") else 4096,
|
|
124
|
+
"timeout": config.get("timeout", 180) if config else 180,
|
|
125
|
+
"cpuCores": cpu_cores, # vCPUs for the GPU function VM (hotplugged at runtime)
|
|
126
|
+
"envVariables": json.dumps(env_vars_list),
|
|
127
|
+
"requirements": requirements,
|
|
128
|
+
"cronExpression": cron_schedule or "",
|
|
129
|
+
"totalVariables": len(env_variables) if env_variables else 0,
|
|
130
|
+
"selectedFramework": framework or detect_framework(requirements),
|
|
131
|
+
"useEmptyFolder": True,
|
|
132
|
+
"selectedFunction": {
|
|
133
|
+
"name": function_name,
|
|
134
|
+
"sourceWith": code,
|
|
135
|
+
"runtime": runtime,
|
|
136
|
+
"language": language,
|
|
137
|
+
"sizeInBytes": len(code.encode("utf-8")),
|
|
138
|
+
},
|
|
139
|
+
"selectedModel": {
|
|
140
|
+
"currentModelName": None,
|
|
141
|
+
"isCreatingNewModel": True,
|
|
142
|
+
"gpufProjectTitleState": "test",
|
|
143
|
+
"useEmptyFolder": True,
|
|
144
|
+
},
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
async def _create_gpu_function(options: GPUFunctionOptions) -> DeployedFunction | None:
|
|
149
|
+
"""Internal function to create and deploy a GPU function."""
|
|
150
|
+
if not _global_api_token:
|
|
151
|
+
raise ValidationError("API key not set. Initialize Buildfunctions client first.")
|
|
152
|
+
|
|
153
|
+
api_token = _global_api_token
|
|
154
|
+
gpu_build_url = _global_gpu_build_url or DEFAULT_GPU_BUILD_URL
|
|
155
|
+
base_url = _global_base_url or DEFAULT_BASE_URL
|
|
156
|
+
user_id = _global_user_id
|
|
157
|
+
username = _global_username
|
|
158
|
+
compute_tier = _global_compute_tier
|
|
159
|
+
|
|
160
|
+
resolved_code = await resolve_code(options["code"])
|
|
161
|
+
resolved_options = {**options, "code": resolved_code}
|
|
162
|
+
_validate_options(resolved_options)
|
|
163
|
+
|
|
164
|
+
resolved_runtime = resolved_options.get("runtime") or _get_default_runtime(resolved_options["language"])
|
|
165
|
+
|
|
166
|
+
body = {
|
|
167
|
+
**_build_request_body(resolved_options),
|
|
168
|
+
"userId": user_id,
|
|
169
|
+
"username": username,
|
|
170
|
+
"computeTier": compute_tier,
|
|
171
|
+
"runCommand": None,
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(1800.0)) as client:
|
|
176
|
+
response = await client.post(
|
|
177
|
+
f"{gpu_build_url}/build",
|
|
178
|
+
headers={
|
|
179
|
+
"Content-Type": "application/json",
|
|
180
|
+
"Connection": "keep-alive",
|
|
181
|
+
},
|
|
182
|
+
json=body,
|
|
183
|
+
)
|
|
184
|
+
except httpx.TimeoutException:
|
|
185
|
+
return None
|
|
186
|
+
|
|
187
|
+
if response.status_code not in (200, 201):
|
|
188
|
+
return None
|
|
189
|
+
|
|
190
|
+
try:
|
|
191
|
+
data = response.json()
|
|
192
|
+
except Exception:
|
|
193
|
+
data = {"success": response.status_code == 201}
|
|
194
|
+
|
|
195
|
+
site_id = (data.get("data") or {}).get("siteId") or data.get("siteId") or data.get("id")
|
|
196
|
+
func_name = options["name"].lower()
|
|
197
|
+
endpoint = data.get("endpoint") or f"https://{func_name}.buildfunctions.app"
|
|
198
|
+
|
|
199
|
+
config = options.get("config", {})
|
|
200
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
201
|
+
|
|
202
|
+
async def delete_fn() -> None:
|
|
203
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client:
|
|
204
|
+
await client.request(
|
|
205
|
+
"DELETE",
|
|
206
|
+
f"{base_url}/api/sdk/function/delete",
|
|
207
|
+
headers={
|
|
208
|
+
"Content-Type": "application/json",
|
|
209
|
+
"Authorization": f"Bearer {api_token}",
|
|
210
|
+
},
|
|
211
|
+
json={"siteId": site_id},
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
return DotDict({
|
|
215
|
+
"id": site_id or "",
|
|
216
|
+
"name": func_name,
|
|
217
|
+
"subdomain": func_name,
|
|
218
|
+
"endpoint": endpoint,
|
|
219
|
+
"lambdaUrl": (data.get("data") or {}).get("sslCertificateEndpoint", ""),
|
|
220
|
+
"language": options["language"],
|
|
221
|
+
"runtime": resolved_runtime,
|
|
222
|
+
"lambdaMemoryAllocated": parse_memory(config.get("memory", 4096)) if config.get("memory") else 4096,
|
|
223
|
+
"timeoutSeconds": config.get("timeout", 180) if config else 180,
|
|
224
|
+
"isGPUF": True,
|
|
225
|
+
"framework": options.get("framework", ""),
|
|
226
|
+
"createdAt": now,
|
|
227
|
+
"updatedAt": now,
|
|
228
|
+
"delete": delete_fn,
|
|
229
|
+
})
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
class GPUFunction:
|
|
233
|
+
"""GPU Function factory - matches TypeScript SDK pattern."""
|
|
234
|
+
|
|
235
|
+
@staticmethod
|
|
236
|
+
async def create(options: GPUFunctionOptions) -> DeployedFunction | None:
|
|
237
|
+
"""Create and deploy a new GPU function."""
|
|
238
|
+
return await _create_gpu_function(options)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
create_gpu_function = _create_gpu_function
|
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
"""GPU Sandbox - Hardware-isolated execution environment with GPU acceleration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import re
|
|
7
|
+
import unicodedata
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import httpx
|
|
12
|
+
|
|
13
|
+
from buildfunctions.dotdict import DotDict
|
|
14
|
+
from buildfunctions.errors import BuildfunctionsError, ValidationError
|
|
15
|
+
from buildfunctions.framework import detect_framework
|
|
16
|
+
from buildfunctions.memory import parse_memory
|
|
17
|
+
from buildfunctions.resolve_code import resolve_code
|
|
18
|
+
from buildfunctions.types import (
|
|
19
|
+
FileMetadata,
|
|
20
|
+
GPUSandboxConfig,
|
|
21
|
+
GPUSandboxInstance,
|
|
22
|
+
GPUType,
|
|
23
|
+
RunResult,
|
|
24
|
+
UploadOptions,
|
|
25
|
+
)
|
|
26
|
+
from buildfunctions.uploader import get_files_in_directory, upload_model_files
|
|
27
|
+
|
|
28
|
+
DEFAULT_GPU_BUILD_URL = "https://prod-gpu-build.buildfunctions.link"
|
|
29
|
+
DEFAULT_BASE_URL = "https://www.buildfunctions.com"
|
|
30
|
+
|
|
31
|
+
# Module-level state
|
|
32
|
+
_global_api_token: str | None = None
|
|
33
|
+
_global_gpu_build_url: str | None = None
|
|
34
|
+
_global_base_url: str | None = None
|
|
35
|
+
_global_user_id: str | None = None
|
|
36
|
+
_global_username: str | None = None
|
|
37
|
+
_global_compute_tier: str | None = None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def set_gpu_sandbox_api_token(
|
|
41
|
+
api_token: str,
|
|
42
|
+
gpu_build_url: str | None = None,
|
|
43
|
+
user_id: str | None = None,
|
|
44
|
+
username: str | None = None,
|
|
45
|
+
compute_tier: str | None = None,
|
|
46
|
+
base_url: str | None = None,
|
|
47
|
+
) -> None:
|
|
48
|
+
"""Set the API token for GPU Sandbox operations."""
|
|
49
|
+
global _global_api_token, _global_gpu_build_url, _global_user_id
|
|
50
|
+
global _global_username, _global_compute_tier, _global_base_url
|
|
51
|
+
_global_api_token = api_token
|
|
52
|
+
_global_gpu_build_url = gpu_build_url
|
|
53
|
+
_global_user_id = user_id
|
|
54
|
+
_global_username = username
|
|
55
|
+
_global_compute_tier = compute_tier
|
|
56
|
+
_global_base_url = base_url
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _validate_config(config: GPUSandboxConfig) -> None:
|
|
60
|
+
name = config.get("name")
|
|
61
|
+
if not name or not isinstance(name, str):
|
|
62
|
+
raise ValidationError("Sandbox name is required")
|
|
63
|
+
|
|
64
|
+
language = config.get("language")
|
|
65
|
+
if not language or not isinstance(language, str):
|
|
66
|
+
raise ValidationError("Language is required")
|
|
67
|
+
|
|
68
|
+
if language != "python":
|
|
69
|
+
raise ValidationError("GPU Sandboxes currently only support Python. Additional languages coming soon.")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _get_file_extension(language: str) -> str:
|
|
73
|
+
extensions: dict[str, str] = {
|
|
74
|
+
"javascript": ".js",
|
|
75
|
+
"typescript": ".ts",
|
|
76
|
+
"python": ".py",
|
|
77
|
+
"go": ".go",
|
|
78
|
+
"shell": ".sh",
|
|
79
|
+
}
|
|
80
|
+
return extensions.get(language, ".py")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _get_default_runtime(language: str) -> str:
|
|
84
|
+
if language == "javascript":
|
|
85
|
+
raise ValidationError('JavaScript requires explicit runtime: "nodejs" or "deno"')
|
|
86
|
+
return language
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _is_local_path(path: str) -> bool:
|
|
90
|
+
if not path:
|
|
91
|
+
return False
|
|
92
|
+
return (path.startswith("/") or path.startswith("./") or path.startswith("../")) and Path(path).exists()
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _sanitize_model_name(name: str) -> str:
|
|
96
|
+
result = name.lower()
|
|
97
|
+
result = unicodedata.normalize("NFD", result)
|
|
98
|
+
result = re.sub(r"[\u0300-\u036f]", "", result)
|
|
99
|
+
result = result.strip()
|
|
100
|
+
result = result.replace("&", "-and-")
|
|
101
|
+
result = re.sub(r"[^a-z0-9 -]", "", result)
|
|
102
|
+
result = re.sub(r"\s+", "-", result)
|
|
103
|
+
result = re.sub(r"-+", "-", result)
|
|
104
|
+
return result
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _format_requirements(requirements: str | list[str] | None) -> str:
|
|
108
|
+
if not requirements:
|
|
109
|
+
return ""
|
|
110
|
+
if isinstance(requirements, list):
|
|
111
|
+
return "\n".join(requirements)
|
|
112
|
+
return requirements
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _get_local_model_info(model_path: str, sandbox_name: str) -> dict[str, Any]:
|
|
116
|
+
"""Collect local model file metadata."""
|
|
117
|
+
path = Path(model_path)
|
|
118
|
+
if not path.is_dir():
|
|
119
|
+
raise ValidationError("Model path must be a directory")
|
|
120
|
+
|
|
121
|
+
local_upload_file_name = path.name
|
|
122
|
+
sanitized_model_name = _sanitize_model_name(sandbox_name)
|
|
123
|
+
files = get_files_in_directory(model_path)
|
|
124
|
+
|
|
125
|
+
if not files:
|
|
126
|
+
raise ValidationError("No files found in model directory")
|
|
127
|
+
|
|
128
|
+
files_within_model_folder = [
|
|
129
|
+
{
|
|
130
|
+
"name": f["name"],
|
|
131
|
+
"size": f["size"],
|
|
132
|
+
"type": f["type"],
|
|
133
|
+
"webkitRelativePath": f["webkit_relative_path"],
|
|
134
|
+
}
|
|
135
|
+
for f in files
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
file_names_within_model_folder = [f["name"] for f in files]
|
|
139
|
+
|
|
140
|
+
return {
|
|
141
|
+
"files": files,
|
|
142
|
+
"files_within_model_folder": files_within_model_folder,
|
|
143
|
+
"file_names_within_model_folder": file_names_within_model_folder,
|
|
144
|
+
"local_upload_file_name": local_upload_file_name,
|
|
145
|
+
"sanitized_model_name": sanitized_model_name,
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _build_request_body(config: GPUSandboxConfig, local_model_info: dict[str, Any] | None) -> dict[str, Any]:
|
|
150
|
+
name = config["name"].lower()
|
|
151
|
+
language = config["language"]
|
|
152
|
+
runtime = config.get("runtime") or _get_default_runtime(language)
|
|
153
|
+
code = config.get("code", "")
|
|
154
|
+
file_ext = _get_file_extension(language)
|
|
155
|
+
gpu = config.get("gpu", "T4")
|
|
156
|
+
requirements = _format_requirements(config.get("requirements"))
|
|
157
|
+
|
|
158
|
+
has_local_model = local_model_info is not None
|
|
159
|
+
model_name = local_model_info["sanitized_model_name"] if has_local_model else None
|
|
160
|
+
|
|
161
|
+
cpu_cores = config.get("vcpus") or 10
|
|
162
|
+
|
|
163
|
+
body: dict[str, Any] = {
|
|
164
|
+
"name": name,
|
|
165
|
+
"language": language,
|
|
166
|
+
"runtime": runtime,
|
|
167
|
+
"sourceWith": code,
|
|
168
|
+
"sourceWithout": code,
|
|
169
|
+
"fileExt": file_ext,
|
|
170
|
+
"processorType": "GPU",
|
|
171
|
+
"sandboxType": "gpu",
|
|
172
|
+
"gpu": gpu,
|
|
173
|
+
"memoryAllocated": parse_memory(config["memory"]) if config.get("memory") else 10000,
|
|
174
|
+
"timeout": config.get("timeout", 300),
|
|
175
|
+
"cpuCores": cpu_cores, # vCPUs for the GPU sandbox VM (hotplugged at runtime)
|
|
176
|
+
"envVariables": json.dumps(config.get("env_variables", [])),
|
|
177
|
+
"requirements": requirements,
|
|
178
|
+
"cronExpression": "",
|
|
179
|
+
"totalVariables": len(config.get("env_variables", [])),
|
|
180
|
+
"selectedFramework": detect_framework(requirements),
|
|
181
|
+
"useEmptyFolder": not has_local_model,
|
|
182
|
+
"modelPath": (
|
|
183
|
+
f"{local_model_info['sanitized_model_name']}/mnt/storage/{local_model_info['local_upload_file_name']}"
|
|
184
|
+
if has_local_model
|
|
185
|
+
else None
|
|
186
|
+
),
|
|
187
|
+
"selectedFunction": {
|
|
188
|
+
"name": name,
|
|
189
|
+
"sourceWith": code,
|
|
190
|
+
"runtime": runtime,
|
|
191
|
+
"language": language,
|
|
192
|
+
"sizeInBytes": len(code.encode("utf-8")) if code else 0,
|
|
193
|
+
},
|
|
194
|
+
"selectedModel": (
|
|
195
|
+
{
|
|
196
|
+
"name": local_model_info["sanitized_model_name"],
|
|
197
|
+
"modelName": local_model_info["sanitized_model_name"],
|
|
198
|
+
"currentModelName": local_model_info["local_upload_file_name"],
|
|
199
|
+
"isCreatingNewModel": True,
|
|
200
|
+
"gpufProjectTitleState": local_model_info["sanitized_model_name"],
|
|
201
|
+
"useEmptyFolder": False,
|
|
202
|
+
"files": local_model_info["files_within_model_folder"],
|
|
203
|
+
}
|
|
204
|
+
if has_local_model
|
|
205
|
+
else {
|
|
206
|
+
"currentModelName": None,
|
|
207
|
+
"isCreatingNewModel": True,
|
|
208
|
+
"gpufProjectTitleState": "test",
|
|
209
|
+
"useEmptyFolder": True,
|
|
210
|
+
}
|
|
211
|
+
),
|
|
212
|
+
"filesWithinModelFolder": local_model_info["files_within_model_folder"] if has_local_model else [],
|
|
213
|
+
"fileNamesWithinModelFolder": local_model_info["file_names_within_model_folder"] if has_local_model else [],
|
|
214
|
+
"modelName": model_name,
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
return body
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _create_gpu_sandbox_instance(
|
|
221
|
+
sandbox_id: str,
|
|
222
|
+
name: str,
|
|
223
|
+
runtime: str,
|
|
224
|
+
gpu: GPUType,
|
|
225
|
+
endpoint: str,
|
|
226
|
+
api_token: str,
|
|
227
|
+
gpu_build_url: str,
|
|
228
|
+
base_url: str,
|
|
229
|
+
) -> DotDict:
|
|
230
|
+
"""Create a GPU sandbox instance with run/upload/delete methods."""
|
|
231
|
+
deleted = {"value": False}
|
|
232
|
+
|
|
233
|
+
async def run(code: str | None = None) -> RunResult:
|
|
234
|
+
if deleted["value"]:
|
|
235
|
+
raise BuildfunctionsError("Sandbox has been deleted", "INVALID_REQUEST")
|
|
236
|
+
|
|
237
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(300.0)) as client:
|
|
238
|
+
response = await client.post(
|
|
239
|
+
endpoint,
|
|
240
|
+
headers={
|
|
241
|
+
"Content-Type": "application/json",
|
|
242
|
+
"Authorization": f"Bearer {api_token}",
|
|
243
|
+
},
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
response_text = response.text
|
|
247
|
+
if not response_text:
|
|
248
|
+
raise BuildfunctionsError("Empty response from sandbox", "UNKNOWN_ERROR", response.status_code)
|
|
249
|
+
|
|
250
|
+
if not response.is_success:
|
|
251
|
+
raise BuildfunctionsError(f"Execution failed: {response_text}", "UNKNOWN_ERROR", response.status_code)
|
|
252
|
+
|
|
253
|
+
# Try to parse as JSON, otherwise return raw text
|
|
254
|
+
try:
|
|
255
|
+
data = json.loads(response_text)
|
|
256
|
+
except json.JSONDecodeError:
|
|
257
|
+
data = response_text
|
|
258
|
+
|
|
259
|
+
return RunResult(
|
|
260
|
+
response=data,
|
|
261
|
+
status=response.status_code,
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
async def upload(options: UploadOptions) -> None:
|
|
265
|
+
if deleted["value"]:
|
|
266
|
+
raise BuildfunctionsError("Sandbox has been deleted", "INVALID_REQUEST")
|
|
267
|
+
|
|
268
|
+
local_path = options.get("local_path")
|
|
269
|
+
file_path = options.get("file_path")
|
|
270
|
+
|
|
271
|
+
if not local_path or not file_path:
|
|
272
|
+
raise ValidationError("Both local_path and file_path are required")
|
|
273
|
+
|
|
274
|
+
local = Path(local_path)
|
|
275
|
+
if not local.exists():
|
|
276
|
+
raise ValidationError(f"Local file not found: {local_path}")
|
|
277
|
+
|
|
278
|
+
content = local.read_text(encoding="utf-8")
|
|
279
|
+
|
|
280
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(60.0)) as client:
|
|
281
|
+
response = await client.post(
|
|
282
|
+
f"{base_url}/api/sdk/sandbox/upload",
|
|
283
|
+
headers={
|
|
284
|
+
"Content-Type": "application/json",
|
|
285
|
+
"Authorization": f"Bearer {api_token}",
|
|
286
|
+
},
|
|
287
|
+
json={
|
|
288
|
+
"sandboxId": sandbox_id,
|
|
289
|
+
"filePath": file_path,
|
|
290
|
+
"content": content,
|
|
291
|
+
"type": "gpu",
|
|
292
|
+
},
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
if not response.is_success:
|
|
296
|
+
raise BuildfunctionsError("Upload failed", "UNKNOWN_ERROR", response.status_code)
|
|
297
|
+
|
|
298
|
+
async def delete_fn() -> None:
|
|
299
|
+
if deleted["value"]:
|
|
300
|
+
return
|
|
301
|
+
|
|
302
|
+
# Use the same endpoint as CPU sandbox - buildfunctions web app handles the delete
|
|
303
|
+
# This ensures proper HOST cleanup for occupied VMs
|
|
304
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client:
|
|
305
|
+
response = await client.request(
|
|
306
|
+
"DELETE",
|
|
307
|
+
f"{base_url}/api/sdk/sandbox/delete",
|
|
308
|
+
headers={
|
|
309
|
+
"Content-Type": "application/json",
|
|
310
|
+
"Authorization": f"Bearer {api_token}",
|
|
311
|
+
},
|
|
312
|
+
json={
|
|
313
|
+
"sandboxId": sandbox_id,
|
|
314
|
+
"type": "gpu",
|
|
315
|
+
},
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
if not response.is_success:
|
|
319
|
+
raise BuildfunctionsError("Delete failed", "UNKNOWN_ERROR", response.status_code)
|
|
320
|
+
|
|
321
|
+
deleted["value"] = True
|
|
322
|
+
|
|
323
|
+
return DotDict({
|
|
324
|
+
"id": sandbox_id,
|
|
325
|
+
"name": name,
|
|
326
|
+
"runtime": runtime,
|
|
327
|
+
"endpoint": endpoint,
|
|
328
|
+
"type": "gpu",
|
|
329
|
+
"gpu": gpu,
|
|
330
|
+
"run": run,
|
|
331
|
+
"upload": upload,
|
|
332
|
+
"delete": delete_fn,
|
|
333
|
+
})
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
async def _create_gpu_sandbox(config: GPUSandboxConfig) -> DotDict:
|
|
337
|
+
"""Create a new GPU sandbox."""
|
|
338
|
+
if not _global_api_token:
|
|
339
|
+
raise ValidationError("API key not set. Initialize Buildfunctions client first.")
|
|
340
|
+
|
|
341
|
+
_validate_config(config)
|
|
342
|
+
|
|
343
|
+
gpu_build_url = _global_gpu_build_url or DEFAULT_GPU_BUILD_URL
|
|
344
|
+
base_url = _global_base_url or DEFAULT_BASE_URL
|
|
345
|
+
api_token = _global_api_token
|
|
346
|
+
|
|
347
|
+
# Check if model is a local path
|
|
348
|
+
model_config = config.get("model")
|
|
349
|
+
model_path = model_config if isinstance(model_config, str) else (model_config.get("path") if isinstance(model_config, dict) else None)
|
|
350
|
+
local_model_info: dict[str, Any] | None = None
|
|
351
|
+
|
|
352
|
+
if model_path and _is_local_path(model_path):
|
|
353
|
+
print(f" Local model detected: {model_path}")
|
|
354
|
+
local_model_info = _get_local_model_info(model_path, config["name"])
|
|
355
|
+
print(f" Found {len(local_model_info['files'])} files to upload")
|
|
356
|
+
|
|
357
|
+
# Resolve code (inline string or file path)
|
|
358
|
+
resolved_code = await resolve_code(config["code"]) if config.get("code") else ""
|
|
359
|
+
resolved_config = {**config, "code": resolved_code}
|
|
360
|
+
|
|
361
|
+
request_body = _build_request_body(resolved_config, local_model_info)
|
|
362
|
+
|
|
363
|
+
body = {
|
|
364
|
+
**request_body,
|
|
365
|
+
"userId": _global_user_id,
|
|
366
|
+
"username": _global_username,
|
|
367
|
+
"computeTier": _global_compute_tier,
|
|
368
|
+
"runCommand": None,
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
try:
|
|
372
|
+
async with httpx.AsyncClient(timeout=httpx.Timeout(1800.0)) as client:
|
|
373
|
+
response = await client.post(
|
|
374
|
+
f"{gpu_build_url}/build",
|
|
375
|
+
headers={
|
|
376
|
+
"Content-Type": "application/json",
|
|
377
|
+
"Connection": "keep-alive",
|
|
378
|
+
},
|
|
379
|
+
json=body,
|
|
380
|
+
)
|
|
381
|
+
except httpx.TimeoutException:
|
|
382
|
+
raise BuildfunctionsError("GPU sandbox build timed out", "NETWORK_ERROR")
|
|
383
|
+
|
|
384
|
+
if response.status_code not in (200, 201):
|
|
385
|
+
raise BuildfunctionsError(
|
|
386
|
+
f"Failed to create sandbox: {response.text}", "UNKNOWN_ERROR", response.status_code
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
try:
|
|
390
|
+
data = response.json()
|
|
391
|
+
except Exception:
|
|
392
|
+
data = {"success": response.status_code == 201}
|
|
393
|
+
|
|
394
|
+
# Upload local model files if present
|
|
395
|
+
if local_model_info:
|
|
396
|
+
model_presigned = (data.get("modelAndFunctionPresignedUrls") or {}).get("modelPresignedUrls")
|
|
397
|
+
if model_presigned:
|
|
398
|
+
print(" Uploading model files to S3...")
|
|
399
|
+
try:
|
|
400
|
+
await upload_model_files(
|
|
401
|
+
local_model_info["files"],
|
|
402
|
+
model_presigned,
|
|
403
|
+
data.get("bucketName", ""),
|
|
404
|
+
base_url,
|
|
405
|
+
)
|
|
406
|
+
print(" Model files uploaded successfully")
|
|
407
|
+
except Exception as e:
|
|
408
|
+
raise BuildfunctionsError(
|
|
409
|
+
f"Sandbox created but model upload failed: {e}", "UNKNOWN_ERROR"
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
sandbox_id = (data.get("data") or {}).get("siteId") or data.get("siteId") or data.get("id")
|
|
413
|
+
name = config["name"].lower()
|
|
414
|
+
sandbox_runtime = config.get("runtime", config["language"])
|
|
415
|
+
sandbox_endpoint = (
|
|
416
|
+
data.get("endpoint")
|
|
417
|
+
or (data.get("data") or {}).get("sslCertificateEndpoint")
|
|
418
|
+
or f"https://{name}.buildfunctions.app"
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
return _create_gpu_sandbox_instance(
|
|
422
|
+
sandbox_id or name,
|
|
423
|
+
name,
|
|
424
|
+
sandbox_runtime,
|
|
425
|
+
config.get("gpu", "T4"),
|
|
426
|
+
sandbox_endpoint,
|
|
427
|
+
api_token,
|
|
428
|
+
gpu_build_url,
|
|
429
|
+
base_url,
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
class GPUSandbox:
|
|
434
|
+
"""GPU Sandbox factory - matches TypeScript SDK pattern."""
|
|
435
|
+
|
|
436
|
+
@staticmethod
|
|
437
|
+
async def create(config: GPUSandboxConfig) -> DotDict:
|
|
438
|
+
"""Create a new GPU sandbox."""
|
|
439
|
+
return await _create_gpu_sandbox(config)
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
# Alias for direct function call style
|
|
443
|
+
create_gpu_sandbox = _create_gpu_sandbox
|