hopx-ai 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hopx_ai/__init__.py +114 -0
- hopx_ai/_agent_client.py +373 -0
- hopx_ai/_async_client.py +230 -0
- hopx_ai/_client.py +230 -0
- hopx_ai/_generated/__init__.py +22 -0
- hopx_ai/_generated/models.py +502 -0
- hopx_ai/_utils.py +9 -0
- hopx_ai/_ws_client.py +141 -0
- hopx_ai/async_sandbox.py +427 -0
- hopx_ai/cache.py +97 -0
- hopx_ai/commands.py +174 -0
- hopx_ai/desktop.py +1227 -0
- hopx_ai/env_vars.py +242 -0
- hopx_ai/errors.py +249 -0
- hopx_ai/files.py +489 -0
- hopx_ai/models.py +274 -0
- hopx_ai/models_updated.py +270 -0
- hopx_ai/sandbox.py +1439 -0
- hopx_ai/template/__init__.py +47 -0
- hopx_ai/template/build_flow.py +540 -0
- hopx_ai/template/builder.py +300 -0
- hopx_ai/template/file_hasher.py +81 -0
- hopx_ai/template/ready_checks.py +106 -0
- hopx_ai/template/tar_creator.py +122 -0
- hopx_ai/template/types.py +199 -0
- hopx_ai/terminal.py +164 -0
- hopx_ai-0.1.11.dist-info/METADATA +462 -0
- hopx_ai-0.1.11.dist-info/RECORD +29 -0
- hopx_ai-0.1.11.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Template Building Module
|
|
3
|
+
|
|
4
|
+
Provides fluent API for building custom templates.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .builder import Template, create_template
|
|
8
|
+
from .build_flow import get_logs
|
|
9
|
+
from .ready_checks import (
|
|
10
|
+
wait_for_port,
|
|
11
|
+
wait_for_url,
|
|
12
|
+
wait_for_file,
|
|
13
|
+
wait_for_process,
|
|
14
|
+
wait_for_command,
|
|
15
|
+
)
|
|
16
|
+
from .types import (
|
|
17
|
+
StepType,
|
|
18
|
+
Step,
|
|
19
|
+
ReadyCheck,
|
|
20
|
+
ReadyCheckType,
|
|
21
|
+
BuildOptions,
|
|
22
|
+
BuildResult,
|
|
23
|
+
CreateVMOptions,
|
|
24
|
+
VM,
|
|
25
|
+
LogsResponse,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
__all__ = [
|
|
29
|
+
"Template",
|
|
30
|
+
"create_template",
|
|
31
|
+
"get_logs",
|
|
32
|
+
"wait_for_port",
|
|
33
|
+
"wait_for_url",
|
|
34
|
+
"wait_for_file",
|
|
35
|
+
"wait_for_process",
|
|
36
|
+
"wait_for_command",
|
|
37
|
+
"StepType",
|
|
38
|
+
"Step",
|
|
39
|
+
"ReadyCheck",
|
|
40
|
+
"ReadyCheckType",
|
|
41
|
+
"BuildOptions",
|
|
42
|
+
"BuildResult",
|
|
43
|
+
"CreateVMOptions",
|
|
44
|
+
"VM",
|
|
45
|
+
"LogsResponse",
|
|
46
|
+
]
|
|
47
|
+
|
|
@@ -0,0 +1,540 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Build Flow - Orchestrates the complete build process
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import time
|
|
7
|
+
import asyncio
|
|
8
|
+
import aiohttp
|
|
9
|
+
from typing import List, Optional, Set
|
|
10
|
+
from dataclasses import dataclass, asdict
|
|
11
|
+
|
|
12
|
+
from .types import (
|
|
13
|
+
Step,
|
|
14
|
+
StepType,
|
|
15
|
+
BuildOptions,
|
|
16
|
+
BuildResult,
|
|
17
|
+
CreateVMOptions,
|
|
18
|
+
VM,
|
|
19
|
+
UploadLinkResponse,
|
|
20
|
+
BuildResponse,
|
|
21
|
+
BuildStatusResponse,
|
|
22
|
+
)
|
|
23
|
+
from .file_hasher import FileHasher
|
|
24
|
+
from .tar_creator import TarCreator
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
DEFAULT_BASE_URL = "https://api.your-domain.com"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _validate_template(template) -> None:
|
|
31
|
+
"""Validate template before building"""
|
|
32
|
+
steps = template.get_steps()
|
|
33
|
+
|
|
34
|
+
if not steps:
|
|
35
|
+
raise ValueError("Template must have at least one step")
|
|
36
|
+
|
|
37
|
+
# Check for FROM step
|
|
38
|
+
has_from = any(step.type == StepType.FROM for step in steps)
|
|
39
|
+
if not has_from:
|
|
40
|
+
raise ValueError(
|
|
41
|
+
"Template must start with a FROM step.\n"
|
|
42
|
+
"Examples:\n"
|
|
43
|
+
" .from_ubuntu_image('22.04')\n"
|
|
44
|
+
" .from_python_image('3.12')\n"
|
|
45
|
+
" .from_node_image('20')"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
# Check for meaningful steps (not just FROM + ENV)
|
|
49
|
+
meaningful_steps = [
|
|
50
|
+
step for step in steps
|
|
51
|
+
if step.type not in [StepType.FROM, StepType.ENV, StepType.WORKDIR, StepType.USER]
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
if not meaningful_steps:
|
|
55
|
+
raise ValueError(
|
|
56
|
+
"Template must have at least one build step besides FROM/ENV/WORKDIR/USER.\n"
|
|
57
|
+
"Environment variables can be set when creating a sandbox.\n"
|
|
58
|
+
"Add at least one of:\n"
|
|
59
|
+
" .run_cmd('...') - Execute shell command\n"
|
|
60
|
+
" .apt_install(...) - Install system packages\n"
|
|
61
|
+
" .pip_install(...) - Install Python packages\n"
|
|
62
|
+
" .npm_install(...) - Install Node packages\n"
|
|
63
|
+
" .copy('src', 'dst') - Copy files"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
async def build_template(template, options: BuildOptions) -> BuildResult:
|
|
68
|
+
"""
|
|
69
|
+
Build a template
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
template: Template instance
|
|
73
|
+
options: Build options
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
BuildResult with template ID and helpers
|
|
77
|
+
"""
|
|
78
|
+
base_url = options.base_url or DEFAULT_BASE_URL
|
|
79
|
+
context_path = options.context_path or os.getcwd()
|
|
80
|
+
|
|
81
|
+
# Validate template
|
|
82
|
+
_validate_template(template)
|
|
83
|
+
|
|
84
|
+
# Step 1: Calculate file hashes for COPY steps
|
|
85
|
+
steps_with_hashes = await calculate_step_hashes(
|
|
86
|
+
template.get_steps(),
|
|
87
|
+
context_path,
|
|
88
|
+
options
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# Step 2: Upload files for COPY steps
|
|
92
|
+
await upload_files(steps_with_hashes, context_path, base_url, options)
|
|
93
|
+
|
|
94
|
+
# Step 3: Trigger build
|
|
95
|
+
build_response = await trigger_build(
|
|
96
|
+
steps_with_hashes,
|
|
97
|
+
template.get_start_cmd(),
|
|
98
|
+
template.get_ready_check(),
|
|
99
|
+
base_url,
|
|
100
|
+
options,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
# Step 4: Stream logs (if callback provided)
|
|
104
|
+
if options.on_log or options.on_progress:
|
|
105
|
+
await stream_logs(build_response.build_id, base_url, options)
|
|
106
|
+
|
|
107
|
+
# Step 5: Poll status until complete
|
|
108
|
+
final_status = await poll_status(build_response.build_id, base_url, options)
|
|
109
|
+
|
|
110
|
+
# Status "active" means template is ready
|
|
111
|
+
if final_status.status not in ["active", "success"]:
|
|
112
|
+
raise Exception(f"Build failed: {final_status.error or 'Unknown error'}")
|
|
113
|
+
|
|
114
|
+
# Calculate duration
|
|
115
|
+
try:
|
|
116
|
+
# Try parsing with timezone first
|
|
117
|
+
from datetime import datetime
|
|
118
|
+
started = datetime.fromisoformat(final_status.started_at.replace('Z', '+00:00'))
|
|
119
|
+
duration = int(time.time() * 1000) - int(started.timestamp() * 1000)
|
|
120
|
+
except Exception:
|
|
121
|
+
# Fallback: use current time
|
|
122
|
+
duration = 0
|
|
123
|
+
|
|
124
|
+
# Create VM helper function
|
|
125
|
+
async def create_vm_helper(vm_options: CreateVMOptions = None) -> VM:
|
|
126
|
+
return await create_vm_from_template(
|
|
127
|
+
final_status.template_id,
|
|
128
|
+
base_url,
|
|
129
|
+
options,
|
|
130
|
+
vm_options or CreateVMOptions()
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# Return result
|
|
134
|
+
return BuildResult(
|
|
135
|
+
build_id=build_response.build_id,
|
|
136
|
+
template_id=final_status.template_id,
|
|
137
|
+
duration=duration,
|
|
138
|
+
_create_vm_func=create_vm_helper,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
async def calculate_step_hashes(
|
|
143
|
+
steps: List[Step],
|
|
144
|
+
context_path: str,
|
|
145
|
+
options: BuildOptions
|
|
146
|
+
) -> List[Step]:
|
|
147
|
+
"""Calculate file hashes for COPY steps"""
|
|
148
|
+
hasher = FileHasher()
|
|
149
|
+
result = []
|
|
150
|
+
|
|
151
|
+
for step in steps:
|
|
152
|
+
if step.type == StepType.COPY:
|
|
153
|
+
src, dest = step.args[0], step.args[1]
|
|
154
|
+
sources = src.split(',')
|
|
155
|
+
|
|
156
|
+
# Calculate hash for all sources
|
|
157
|
+
hash_value = await hasher.calculate_multi_hash(
|
|
158
|
+
[(s, dest) for s in sources],
|
|
159
|
+
context_path
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Create new step with hash
|
|
163
|
+
new_step = Step(
|
|
164
|
+
type=step.type,
|
|
165
|
+
args=step.args,
|
|
166
|
+
files_hash=hash_value,
|
|
167
|
+
skip_cache=step.skip_cache,
|
|
168
|
+
)
|
|
169
|
+
result.append(new_step)
|
|
170
|
+
else:
|
|
171
|
+
result.append(step)
|
|
172
|
+
|
|
173
|
+
return result
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
async def upload_files(
|
|
177
|
+
steps: List[Step],
|
|
178
|
+
context_path: str,
|
|
179
|
+
base_url: str,
|
|
180
|
+
options: BuildOptions,
|
|
181
|
+
) -> None:
|
|
182
|
+
"""Upload files for COPY steps"""
|
|
183
|
+
tar_creator = TarCreator()
|
|
184
|
+
uploaded_hashes: Set[str] = set()
|
|
185
|
+
|
|
186
|
+
async with aiohttp.ClientSession() as session:
|
|
187
|
+
for step in steps:
|
|
188
|
+
if step.type == StepType.COPY and step.files_hash:
|
|
189
|
+
# Skip if already uploaded
|
|
190
|
+
if step.files_hash in uploaded_hashes:
|
|
191
|
+
continue
|
|
192
|
+
|
|
193
|
+
# Get sources
|
|
194
|
+
src = step.args[0]
|
|
195
|
+
sources = src.split(',')
|
|
196
|
+
|
|
197
|
+
# Create tar.gz
|
|
198
|
+
tar_result = await tar_creator.create_multi_tar_gz(sources, context_path)
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
# Request upload link
|
|
202
|
+
upload_link = await get_upload_link(
|
|
203
|
+
step.files_hash,
|
|
204
|
+
tar_result.size,
|
|
205
|
+
base_url,
|
|
206
|
+
options.api_key,
|
|
207
|
+
session,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# Upload if not already present
|
|
211
|
+
if not upload_link.present and upload_link.upload_url:
|
|
212
|
+
await upload_file(upload_link.upload_url, tar_result, session)
|
|
213
|
+
|
|
214
|
+
uploaded_hashes.add(step.files_hash)
|
|
215
|
+
finally:
|
|
216
|
+
# Cleanup temporary file
|
|
217
|
+
tar_result.cleanup()
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
async def get_upload_link(
|
|
221
|
+
files_hash: str,
|
|
222
|
+
content_length: int,
|
|
223
|
+
base_url: str,
|
|
224
|
+
api_key: str,
|
|
225
|
+
session: aiohttp.ClientSession,
|
|
226
|
+
) -> UploadLinkResponse:
|
|
227
|
+
"""Get presigned upload URL"""
|
|
228
|
+
async with session.post(
|
|
229
|
+
f"{base_url}/v1/templates/files/upload-link",
|
|
230
|
+
headers={
|
|
231
|
+
"Authorization": f"Bearer {api_key}",
|
|
232
|
+
"Content-Type": "application/json",
|
|
233
|
+
},
|
|
234
|
+
json={
|
|
235
|
+
"files_hash": files_hash,
|
|
236
|
+
"content_length": content_length,
|
|
237
|
+
},
|
|
238
|
+
) as response:
|
|
239
|
+
if not response.ok:
|
|
240
|
+
raise Exception(f"Failed to get upload link: {response.status}")
|
|
241
|
+
|
|
242
|
+
data = await response.json()
|
|
243
|
+
return UploadLinkResponse(**data)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
async def upload_file(
|
|
247
|
+
upload_url: str,
|
|
248
|
+
tar_result,
|
|
249
|
+
session: aiohttp.ClientSession,
|
|
250
|
+
) -> None:
|
|
251
|
+
"""Upload file to R2"""
|
|
252
|
+
with tar_result.open('rb') as f:
|
|
253
|
+
file_content = f.read()
|
|
254
|
+
|
|
255
|
+
async with session.put(
|
|
256
|
+
upload_url,
|
|
257
|
+
headers={
|
|
258
|
+
"Content-Type": "application/gzip",
|
|
259
|
+
"Content-Length": str(tar_result.size),
|
|
260
|
+
},
|
|
261
|
+
data=file_content,
|
|
262
|
+
) as response:
|
|
263
|
+
if not response.ok:
|
|
264
|
+
raise Exception(f"Upload failed: {response.status}")
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
async def trigger_build(
|
|
268
|
+
steps: List[Step],
|
|
269
|
+
start_cmd: Optional[str],
|
|
270
|
+
ready_cmd: Optional[dict],
|
|
271
|
+
base_url: str,
|
|
272
|
+
options: BuildOptions,
|
|
273
|
+
) -> BuildResponse:
|
|
274
|
+
"""Trigger build"""
|
|
275
|
+
# Convert steps to dict
|
|
276
|
+
steps_dict = []
|
|
277
|
+
for step in steps:
|
|
278
|
+
step_dict = {
|
|
279
|
+
"type": step.type.value,
|
|
280
|
+
"args": step.args,
|
|
281
|
+
}
|
|
282
|
+
if step.files_hash:
|
|
283
|
+
step_dict["filesHash"] = step.files_hash
|
|
284
|
+
if step.skip_cache:
|
|
285
|
+
step_dict["skipCache"] = True
|
|
286
|
+
steps_dict.append(step_dict)
|
|
287
|
+
|
|
288
|
+
# Convert ready check to dict
|
|
289
|
+
ready_cmd_dict = None
|
|
290
|
+
if ready_cmd:
|
|
291
|
+
ready_cmd_dict = {
|
|
292
|
+
"type": ready_cmd.type.value,
|
|
293
|
+
"timeout": ready_cmd.timeout,
|
|
294
|
+
"interval": ready_cmd.interval,
|
|
295
|
+
}
|
|
296
|
+
if ready_cmd.port:
|
|
297
|
+
ready_cmd_dict["port"] = ready_cmd.port
|
|
298
|
+
if ready_cmd.url:
|
|
299
|
+
ready_cmd_dict["url"] = ready_cmd.url
|
|
300
|
+
if ready_cmd.path:
|
|
301
|
+
ready_cmd_dict["path"] = ready_cmd.path
|
|
302
|
+
if ready_cmd.process_name:
|
|
303
|
+
ready_cmd_dict["processName"] = ready_cmd.process_name
|
|
304
|
+
if ready_cmd.command:
|
|
305
|
+
ready_cmd_dict["command"] = ready_cmd.command
|
|
306
|
+
|
|
307
|
+
async with aiohttp.ClientSession() as session:
|
|
308
|
+
async with session.post(
|
|
309
|
+
f"{base_url}/v1/templates/build",
|
|
310
|
+
headers={
|
|
311
|
+
"Authorization": f"Bearer {options.api_key}",
|
|
312
|
+
"Content-Type": "application/json",
|
|
313
|
+
},
|
|
314
|
+
json={
|
|
315
|
+
"alias": options.alias,
|
|
316
|
+
"steps": steps_dict,
|
|
317
|
+
"startCmd": start_cmd,
|
|
318
|
+
"readyCmd": ready_cmd_dict,
|
|
319
|
+
"cpu": options.cpu,
|
|
320
|
+
"memory": options.memory,
|
|
321
|
+
"diskGB": options.disk_gb,
|
|
322
|
+
"skipCache": options.skip_cache,
|
|
323
|
+
},
|
|
324
|
+
) as response:
|
|
325
|
+
if not response.ok:
|
|
326
|
+
raise Exception(f"Build trigger failed: {response.status}")
|
|
327
|
+
|
|
328
|
+
data = await response.json()
|
|
329
|
+
return BuildResponse(**data)
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
async def stream_logs(
|
|
333
|
+
build_id: str,
|
|
334
|
+
base_url: str,
|
|
335
|
+
options: BuildOptions,
|
|
336
|
+
) -> None:
|
|
337
|
+
"""Stream logs via polling (offset-based)"""
|
|
338
|
+
offset = 0
|
|
339
|
+
last_progress = -1
|
|
340
|
+
|
|
341
|
+
async with aiohttp.ClientSession() as session:
|
|
342
|
+
while True:
|
|
343
|
+
try:
|
|
344
|
+
async with session.get(
|
|
345
|
+
f"{base_url}/v1/templates/build/{build_id}/logs",
|
|
346
|
+
params={"offset": offset},
|
|
347
|
+
headers={
|
|
348
|
+
"Authorization": f"Bearer {options.api_key}",
|
|
349
|
+
},
|
|
350
|
+
) as response:
|
|
351
|
+
if not response.ok:
|
|
352
|
+
return # Stop streaming on error
|
|
353
|
+
|
|
354
|
+
data = await response.json()
|
|
355
|
+
logs = data.get("logs", "")
|
|
356
|
+
offset = data.get("offset", offset)
|
|
357
|
+
status = data.get("status", "unknown")
|
|
358
|
+
complete = data.get("complete", False)
|
|
359
|
+
|
|
360
|
+
# Output logs line by line
|
|
361
|
+
if logs and options.on_log:
|
|
362
|
+
for line in logs.split('\n'):
|
|
363
|
+
if line.strip():
|
|
364
|
+
# Extract log level if present
|
|
365
|
+
level = "INFO"
|
|
366
|
+
if "❌" in line or "ERROR" in line:
|
|
367
|
+
level = "ERROR"
|
|
368
|
+
elif "✅" in line:
|
|
369
|
+
level = "INFO"
|
|
370
|
+
elif "⚠" in line or "WARN" in line:
|
|
371
|
+
level = "WARN"
|
|
372
|
+
|
|
373
|
+
options.on_log({
|
|
374
|
+
"level": level,
|
|
375
|
+
"message": line,
|
|
376
|
+
"timestamp": ""
|
|
377
|
+
})
|
|
378
|
+
|
|
379
|
+
# Update progress (estimate based on status)
|
|
380
|
+
if options.on_progress and status == "building":
|
|
381
|
+
# Simple progress estimation
|
|
382
|
+
progress = 50 # Building phase
|
|
383
|
+
if progress != last_progress:
|
|
384
|
+
options.on_progress(progress)
|
|
385
|
+
last_progress = progress
|
|
386
|
+
|
|
387
|
+
# Check if complete
|
|
388
|
+
if complete or status in ["active", "success", "failed"]:
|
|
389
|
+
if options.on_progress and status in ["active", "success"]:
|
|
390
|
+
options.on_progress(100)
|
|
391
|
+
return
|
|
392
|
+
|
|
393
|
+
# Wait before next poll
|
|
394
|
+
await asyncio.sleep(2)
|
|
395
|
+
|
|
396
|
+
except Exception as e:
|
|
397
|
+
# Stop streaming on error
|
|
398
|
+
return
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
async def poll_status(
|
|
402
|
+
build_id: str,
|
|
403
|
+
base_url: str,
|
|
404
|
+
options: BuildOptions,
|
|
405
|
+
interval_ms: int = 2000,
|
|
406
|
+
) -> BuildStatusResponse:
|
|
407
|
+
"""Poll build status"""
|
|
408
|
+
async with aiohttp.ClientSession() as session:
|
|
409
|
+
while True:
|
|
410
|
+
async with session.get(
|
|
411
|
+
f"{base_url}/v1/templates/build/{build_id}/status",
|
|
412
|
+
headers={
|
|
413
|
+
"Authorization": f"Bearer {options.api_key}",
|
|
414
|
+
},
|
|
415
|
+
) as response:
|
|
416
|
+
if not response.ok:
|
|
417
|
+
raise Exception(f"Status check failed: {response.status}")
|
|
418
|
+
|
|
419
|
+
data = await response.json()
|
|
420
|
+
status = BuildStatusResponse(**data)
|
|
421
|
+
|
|
422
|
+
# Status can be: building, active (success), failed
|
|
423
|
+
if status.status in ["active", "success", "failed"]:
|
|
424
|
+
return status
|
|
425
|
+
|
|
426
|
+
# Wait before next poll
|
|
427
|
+
await asyncio.sleep(interval_ms / 1000)
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
async def get_logs(
|
|
431
|
+
build_id: str,
|
|
432
|
+
api_key: str,
|
|
433
|
+
offset: int = 0,
|
|
434
|
+
base_url: str = None,
|
|
435
|
+
) -> "LogsResponse":
|
|
436
|
+
"""
|
|
437
|
+
Get build logs with offset-based polling
|
|
438
|
+
|
|
439
|
+
Args:
|
|
440
|
+
build_id: Build ID
|
|
441
|
+
api_key: API key
|
|
442
|
+
offset: Starting offset (default: 0)
|
|
443
|
+
base_url: Base URL (default: https://api.hopx.dev)
|
|
444
|
+
|
|
445
|
+
Returns:
|
|
446
|
+
LogsResponse with logs, offset, status, complete
|
|
447
|
+
|
|
448
|
+
Example:
|
|
449
|
+
```python
|
|
450
|
+
from bunnyshell.template import get_logs
|
|
451
|
+
|
|
452
|
+
# Get logs from beginning
|
|
453
|
+
response = await get_logs("123", "api_key")
|
|
454
|
+
print(response.logs)
|
|
455
|
+
|
|
456
|
+
# Get new logs from last offset
|
|
457
|
+
response = await get_logs("123", "api_key", offset=response.offset)
|
|
458
|
+
```
|
|
459
|
+
"""
|
|
460
|
+
from .types import LogsResponse
|
|
461
|
+
|
|
462
|
+
if base_url is None:
|
|
463
|
+
base_url = DEFAULT_BASE_URL
|
|
464
|
+
|
|
465
|
+
async with aiohttp.ClientSession() as session:
|
|
466
|
+
async with session.get(
|
|
467
|
+
f"{base_url}/v1/templates/build/{build_id}/logs",
|
|
468
|
+
params={"offset": offset},
|
|
469
|
+
headers={
|
|
470
|
+
"Authorization": f"Bearer {api_key}",
|
|
471
|
+
},
|
|
472
|
+
) as response:
|
|
473
|
+
if not response.ok:
|
|
474
|
+
raise Exception(f"Get logs failed: {response.status}")
|
|
475
|
+
|
|
476
|
+
data = await response.json()
|
|
477
|
+
return LogsResponse(
|
|
478
|
+
logs=data.get("logs", ""),
|
|
479
|
+
offset=data.get("offset", 0),
|
|
480
|
+
status=data.get("status", "unknown"),
|
|
481
|
+
complete=data.get("complete", False),
|
|
482
|
+
request_id=data.get("request_id"),
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
async def create_vm_from_template(
|
|
487
|
+
template_id: str,
|
|
488
|
+
base_url: str,
|
|
489
|
+
build_options: BuildOptions,
|
|
490
|
+
vm_options: CreateVMOptions,
|
|
491
|
+
) -> VM:
|
|
492
|
+
"""Create VM from template"""
|
|
493
|
+
async with aiohttp.ClientSession() as session:
|
|
494
|
+
async with session.post(
|
|
495
|
+
f"{base_url}/v1/vms/create",
|
|
496
|
+
headers={
|
|
497
|
+
"Authorization": f"Bearer {build_options.api_key}",
|
|
498
|
+
"Content-Type": "application/json",
|
|
499
|
+
},
|
|
500
|
+
json={
|
|
501
|
+
"templateID": template_id,
|
|
502
|
+
"alias": vm_options.alias,
|
|
503
|
+
"cpu": vm_options.cpu,
|
|
504
|
+
"memory": vm_options.memory,
|
|
505
|
+
"diskGB": vm_options.disk_gb,
|
|
506
|
+
"envVars": vm_options.env_vars,
|
|
507
|
+
},
|
|
508
|
+
) as response:
|
|
509
|
+
if not response.ok:
|
|
510
|
+
raise Exception(f"VM creation failed: {response.status}")
|
|
511
|
+
|
|
512
|
+
vm_data = await response.json()
|
|
513
|
+
|
|
514
|
+
# Create delete function
|
|
515
|
+
async def delete_func():
|
|
516
|
+
await delete_vm(vm_data["vmID"], base_url, build_options.api_key)
|
|
517
|
+
|
|
518
|
+
return VM(
|
|
519
|
+
vm_id=vm_data["vmID"],
|
|
520
|
+
template_id=vm_data["templateID"],
|
|
521
|
+
status=vm_data["status"],
|
|
522
|
+
ip=vm_data["ip"],
|
|
523
|
+
agent_url=vm_data["agentUrl"],
|
|
524
|
+
started_at=vm_data["startedAt"],
|
|
525
|
+
_delete_func=delete_func,
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
async def delete_vm(vm_id: str, base_url: str, api_key: str) -> None:
|
|
530
|
+
"""Delete VM"""
|
|
531
|
+
async with aiohttp.ClientSession() as session:
|
|
532
|
+
async with session.delete(
|
|
533
|
+
f"{base_url}/v1/vms/{vm_id}",
|
|
534
|
+
headers={
|
|
535
|
+
"Authorization": f"Bearer {api_key}",
|
|
536
|
+
},
|
|
537
|
+
) as response:
|
|
538
|
+
if not response.ok:
|
|
539
|
+
raise Exception(f"VM deletion failed: {response.status}")
|
|
540
|
+
|