blaxel 0.1.9rc36__py3-none-any.whl → 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/agents/__init__.py +52 -15
- blaxel/authentication/__init__.py +11 -2
- blaxel/client/api/compute/create_sandbox_preview.py +179 -0
- blaxel/client/api/compute/create_sandbox_preview_token.py +192 -0
- blaxel/client/api/compute/delete_sandbox_preview.py +167 -0
- blaxel/client/api/compute/delete_sandbox_preview_token.py +180 -0
- blaxel/client/api/compute/get_sandbox_preview.py +167 -0
- blaxel/client/api/compute/list_sandbox_preview_tokens.py +172 -0
- blaxel/client/api/compute/list_sandbox_previews.py +159 -0
- blaxel/client/api/compute/update_sandbox_preview.py +192 -0
- blaxel/client/api/integrations/get_integration.py +64 -7
- blaxel/client/api/workspaces/check_workspace_availability.py +165 -0
- blaxel/client/models/__init__.py +32 -2
- blaxel/client/models/check_workspace_availability_body.py +60 -0
- blaxel/client/models/delete_sandbox_preview_token_response_200.py +60 -0
- blaxel/client/models/integration.py +197 -0
- blaxel/client/models/integration_additional_infos.py +45 -0
- blaxel/client/models/integration_endpoint.py +143 -0
- blaxel/client/models/integration_endpoint_token.py +79 -0
- blaxel/client/models/integration_endpoints.py +61 -0
- blaxel/client/models/integration_headers.py +45 -0
- blaxel/client/models/integration_organization.py +88 -0
- blaxel/client/models/integration_query_params.py +45 -0
- blaxel/client/models/metrics.py +9 -0
- blaxel/client/models/preview.py +96 -0
- blaxel/client/models/preview_metadata.py +133 -0
- blaxel/client/models/preview_spec.py +79 -0
- blaxel/client/models/preview_token.py +96 -0
- blaxel/client/models/preview_token_metadata.py +97 -0
- blaxel/client/models/preview_token_spec.py +88 -0
- blaxel/common/autoload.py +0 -2
- blaxel/common/internal.py +75 -0
- blaxel/common/settings.py +6 -1
- blaxel/mcp/server.py +2 -1
- blaxel/sandbox/base.py +68 -0
- blaxel/sandbox/client/__init__.py +8 -0
- blaxel/sandbox/client/api/__init__.py +1 -0
- blaxel/sandbox/client/api/filesystem/__init__.py +0 -0
- blaxel/sandbox/client/api/filesystem/delete_filesystem_path.py +184 -0
- blaxel/sandbox/client/api/filesystem/get_filesystem_path.py +184 -0
- blaxel/sandbox/client/api/filesystem/put_filesystem_path.py +189 -0
- blaxel/sandbox/client/api/network/__init__.py +0 -0
- blaxel/sandbox/client/api/network/delete_network_process_pid_monitor.py +169 -0
- blaxel/sandbox/client/api/network/get_network_process_pid_ports.py +169 -0
- blaxel/sandbox/client/api/network/post_network_process_pid_monitor.py +195 -0
- blaxel/sandbox/client/api/process/__init__.py +0 -0
- blaxel/sandbox/client/api/process/delete_process_identifier.py +163 -0
- blaxel/sandbox/client/api/process/delete_process_identifier_kill.py +189 -0
- blaxel/sandbox/client/api/process/get_process.py +135 -0
- blaxel/sandbox/client/api/process/get_process_identifier.py +159 -0
- blaxel/sandbox/client/api/process/get_process_identifier_logs.py +188 -0
- blaxel/sandbox/client/api/process/get_process_identifier_logs_stream.py +190 -0
- blaxel/sandbox/client/api/process/post_process.py +176 -0
- blaxel/sandbox/client/client.py +162 -0
- blaxel/sandbox/client/errors.py +16 -0
- blaxel/sandbox/client/models/__init__.py +41 -0
- blaxel/sandbox/client/models/delete_network_process_pid_monitor_response_200.py +45 -0
- blaxel/sandbox/client/models/directory.py +112 -0
- blaxel/sandbox/client/models/error_response.py +60 -0
- blaxel/sandbox/client/models/file.py +105 -0
- blaxel/sandbox/client/models/file_request.py +78 -0
- blaxel/sandbox/client/models/file_with_content.py +114 -0
- blaxel/sandbox/client/models/get_network_process_pid_ports_response_200.py +45 -0
- blaxel/sandbox/client/models/get_process_identifier_logs_response_200.py +45 -0
- blaxel/sandbox/client/models/get_process_identifier_logs_stream_response_200.py +45 -0
- blaxel/sandbox/client/models/port_monitor_request.py +60 -0
- blaxel/sandbox/client/models/post_network_process_pid_monitor_response_200.py +45 -0
- blaxel/sandbox/client/models/process_kill_request.py +60 -0
- blaxel/sandbox/client/models/process_request.py +118 -0
- blaxel/sandbox/client/models/process_response.py +123 -0
- blaxel/sandbox/client/models/subdirectory.py +60 -0
- blaxel/sandbox/client/models/success_response.py +69 -0
- blaxel/sandbox/client/py.typed +1 -0
- blaxel/sandbox/client/types.py +46 -0
- blaxel/sandbox/filesystem.py +104 -0
- blaxel/sandbox/process.py +57 -0
- blaxel/sandbox/sandbox.py +92 -0
- blaxel/tools/__init__.py +62 -21
- {blaxel-0.1.9rc36.dist-info → blaxel-0.1.10.dist-info}/METADATA +1 -1
- {blaxel-0.1.9rc36.dist-info → blaxel-0.1.10.dist-info}/RECORD +82 -14
- blaxel/client/models/sandboxes.py +0 -129
- {blaxel-0.1.9rc36.dist-info → blaxel-0.1.10.dist-info}/WHEEL +0 -0
- {blaxel-0.1.9rc36.dist-info → blaxel-0.1.10.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,60 @@
|
|
1
|
+
from typing import Any, TypeVar, Union
|
2
|
+
|
3
|
+
from attrs import define as _attrs_define
|
4
|
+
from attrs import field as _attrs_field
|
5
|
+
|
6
|
+
from ..types import UNSET, Unset
|
7
|
+
|
8
|
+
T = TypeVar("T", bound="Subdirectory")
|
9
|
+
|
10
|
+
|
11
|
+
@_attrs_define
|
12
|
+
class Subdirectory:
|
13
|
+
"""
|
14
|
+
Attributes:
|
15
|
+
path (Union[Unset, str]):
|
16
|
+
"""
|
17
|
+
|
18
|
+
path: Union[Unset, str] = UNSET
|
19
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
20
|
+
|
21
|
+
def to_dict(self) -> dict[str, Any]:
|
22
|
+
path = self.path
|
23
|
+
|
24
|
+
field_dict: dict[str, Any] = {}
|
25
|
+
field_dict.update(self.additional_properties)
|
26
|
+
field_dict.update({})
|
27
|
+
if path is not UNSET:
|
28
|
+
field_dict["path"] = path
|
29
|
+
|
30
|
+
return field_dict
|
31
|
+
|
32
|
+
@classmethod
|
33
|
+
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T:
|
34
|
+
if not src_dict:
|
35
|
+
return None
|
36
|
+
d = src_dict.copy()
|
37
|
+
path = d.pop("path", UNSET)
|
38
|
+
|
39
|
+
subdirectory = cls(
|
40
|
+
path=path,
|
41
|
+
)
|
42
|
+
|
43
|
+
subdirectory.additional_properties = d
|
44
|
+
return subdirectory
|
45
|
+
|
46
|
+
@property
|
47
|
+
def additional_keys(self) -> list[str]:
|
48
|
+
return list(self.additional_properties.keys())
|
49
|
+
|
50
|
+
def __getitem__(self, key: str) -> Any:
|
51
|
+
return self.additional_properties[key]
|
52
|
+
|
53
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
54
|
+
self.additional_properties[key] = value
|
55
|
+
|
56
|
+
def __delitem__(self, key: str) -> None:
|
57
|
+
del self.additional_properties[key]
|
58
|
+
|
59
|
+
def __contains__(self, key: str) -> bool:
|
60
|
+
return key in self.additional_properties
|
@@ -0,0 +1,69 @@
|
|
1
|
+
from typing import Any, TypeVar, Union
|
2
|
+
|
3
|
+
from attrs import define as _attrs_define
|
4
|
+
from attrs import field as _attrs_field
|
5
|
+
|
6
|
+
from ..types import UNSET, Unset
|
7
|
+
|
8
|
+
T = TypeVar("T", bound="SuccessResponse")
|
9
|
+
|
10
|
+
|
11
|
+
@_attrs_define
|
12
|
+
class SuccessResponse:
|
13
|
+
"""
|
14
|
+
Attributes:
|
15
|
+
message (Union[Unset, str]): Example: File created successfully.
|
16
|
+
path (Union[Unset, str]): Example: /path/to/file.
|
17
|
+
"""
|
18
|
+
|
19
|
+
message: Union[Unset, str] = UNSET
|
20
|
+
path: Union[Unset, str] = UNSET
|
21
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
22
|
+
|
23
|
+
def to_dict(self) -> dict[str, Any]:
|
24
|
+
message = self.message
|
25
|
+
|
26
|
+
path = self.path
|
27
|
+
|
28
|
+
field_dict: dict[str, Any] = {}
|
29
|
+
field_dict.update(self.additional_properties)
|
30
|
+
field_dict.update({})
|
31
|
+
if message is not UNSET:
|
32
|
+
field_dict["message"] = message
|
33
|
+
if path is not UNSET:
|
34
|
+
field_dict["path"] = path
|
35
|
+
|
36
|
+
return field_dict
|
37
|
+
|
38
|
+
@classmethod
|
39
|
+
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T:
|
40
|
+
if not src_dict:
|
41
|
+
return None
|
42
|
+
d = src_dict.copy()
|
43
|
+
message = d.pop("message", UNSET)
|
44
|
+
|
45
|
+
path = d.pop("path", UNSET)
|
46
|
+
|
47
|
+
success_response = cls(
|
48
|
+
message=message,
|
49
|
+
path=path,
|
50
|
+
)
|
51
|
+
|
52
|
+
success_response.additional_properties = d
|
53
|
+
return success_response
|
54
|
+
|
55
|
+
@property
|
56
|
+
def additional_keys(self) -> list[str]:
|
57
|
+
return list(self.additional_properties.keys())
|
58
|
+
|
59
|
+
def __getitem__(self, key: str) -> Any:
|
60
|
+
return self.additional_properties[key]
|
61
|
+
|
62
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
63
|
+
self.additional_properties[key] = value
|
64
|
+
|
65
|
+
def __delitem__(self, key: str) -> None:
|
66
|
+
del self.additional_properties[key]
|
67
|
+
|
68
|
+
def __contains__(self, key: str) -> bool:
|
69
|
+
return key in self.additional_properties
|
@@ -0,0 +1 @@
|
|
1
|
+
# Marker file for PEP 561
|
@@ -0,0 +1,46 @@
|
|
1
|
+
"""Contains some shared types for properties"""
|
2
|
+
|
3
|
+
from collections.abc import MutableMapping
|
4
|
+
from http import HTTPStatus
|
5
|
+
from typing import BinaryIO, Generic, Literal, Optional, TypeVar
|
6
|
+
|
7
|
+
from attrs import define
|
8
|
+
|
9
|
+
|
10
|
+
class Unset:
|
11
|
+
def __bool__(self) -> Literal[False]:
|
12
|
+
return False
|
13
|
+
|
14
|
+
|
15
|
+
UNSET: Unset = Unset()
|
16
|
+
|
17
|
+
FileJsonType = tuple[Optional[str], BinaryIO, Optional[str]]
|
18
|
+
|
19
|
+
|
20
|
+
@define
|
21
|
+
class File:
|
22
|
+
"""Contains information for file uploads"""
|
23
|
+
|
24
|
+
payload: BinaryIO
|
25
|
+
file_name: Optional[str] = None
|
26
|
+
mime_type: Optional[str] = None
|
27
|
+
|
28
|
+
def to_tuple(self) -> FileJsonType:
|
29
|
+
"""Return a tuple representation that httpx will accept for multipart/form-data"""
|
30
|
+
return self.file_name, self.payload, self.mime_type
|
31
|
+
|
32
|
+
|
33
|
+
T = TypeVar("T")
|
34
|
+
|
35
|
+
|
36
|
+
@define
|
37
|
+
class Response(Generic[T]):
|
38
|
+
"""A response from an endpoint"""
|
39
|
+
|
40
|
+
status_code: HTTPStatus
|
41
|
+
content: bytes
|
42
|
+
headers: MutableMapping[str, str]
|
43
|
+
parsed: Optional[T]
|
44
|
+
|
45
|
+
|
46
|
+
__all__ = ["UNSET", "File", "FileJsonType", "Response", "Unset"]
|
@@ -0,0 +1,104 @@
|
|
1
|
+
import asyncio
|
2
|
+
from typing import Dict
|
3
|
+
|
4
|
+
from ..common.settings import settings
|
5
|
+
from .base import SandboxHandleBase
|
6
|
+
from .client.api.filesystem.delete_filesystem_path import (
|
7
|
+
asyncio_detailed as delete_filesystem_by_path,
|
8
|
+
)
|
9
|
+
from .client.api.filesystem.get_filesystem_path import asyncio_detailed as get_filesystem_by_path
|
10
|
+
from .client.api.filesystem.put_filesystem_path import asyncio_detailed as put_filesystem_by_path
|
11
|
+
from .client.client import client
|
12
|
+
from .client.models import Directory, FileRequest, SuccessResponse
|
13
|
+
|
14
|
+
|
15
|
+
class SandboxFileSystem(SandboxHandleBase):
|
16
|
+
def __init__(self, sandbox):
|
17
|
+
super().__init__(sandbox)
|
18
|
+
self.client = client.with_base_url(self.url).with_headers(settings.headers)
|
19
|
+
|
20
|
+
async def mkdir(self, path: str, permissions: str = "0755") -> SuccessResponse:
|
21
|
+
path = self.format_path(path)
|
22
|
+
body = FileRequest(is_directory=True, permissions=permissions)
|
23
|
+
response = await put_filesystem_by_path(path=path, client=self.client, body=body)
|
24
|
+
self.handle_response(response)
|
25
|
+
return response.parsed
|
26
|
+
|
27
|
+
async def write(self, path: str, content: str) -> SuccessResponse:
|
28
|
+
path = self.format_path(path)
|
29
|
+
body = FileRequest(content=content)
|
30
|
+
response = await put_filesystem_by_path(path=path, client=self.client, body=body)
|
31
|
+
self.handle_response(response)
|
32
|
+
return response.parsed
|
33
|
+
|
34
|
+
async def read(self, path: str) -> str:
|
35
|
+
path = self.format_path(path)
|
36
|
+
response = await get_filesystem_by_path(path=path, client=self.client)
|
37
|
+
self.handle_response(response)
|
38
|
+
if "content" not in response.parsed.additional_properties:
|
39
|
+
raise Exception('{"error": "File not found"}')
|
40
|
+
return response.parsed.additional_properties["content"]
|
41
|
+
|
42
|
+
async def rm(self, path: str, recursive: bool = False) -> SuccessResponse:
|
43
|
+
path = self.format_path(path)
|
44
|
+
response = await delete_filesystem_by_path(path=path, client=self.client, recursive=recursive)
|
45
|
+
self.handle_response(response)
|
46
|
+
return response.parsed
|
47
|
+
|
48
|
+
async def ls(self, path: str) -> Directory:
|
49
|
+
path = self.format_path(path)
|
50
|
+
response = await get_filesystem_by_path(path=path, client=self.client)
|
51
|
+
self.handle_response(response)
|
52
|
+
if not hasattr(response.parsed, "files") and not hasattr(response.parsed, "subdirectories"):
|
53
|
+
raise Exception('{"error": "Directory not found"}')
|
54
|
+
return response.parsed
|
55
|
+
|
56
|
+
async def cp(self, source: str, destination: str) -> Dict[str, str]:
|
57
|
+
source = self.format_path(source)
|
58
|
+
destination = self.format_path(destination)
|
59
|
+
response = await get_filesystem_by_path(path=source, client=self.client)
|
60
|
+
self.handle_response(response)
|
61
|
+
data = response.parsed
|
62
|
+
if "content" in data.additional_properties:
|
63
|
+
await self.write(destination, data.additional_properties["content"])
|
64
|
+
return {
|
65
|
+
"message": "File copied successfully",
|
66
|
+
"source": source,
|
67
|
+
"destination": destination,
|
68
|
+
}
|
69
|
+
elif hasattr(data, "subdirectories") or hasattr(data, "files"):
|
70
|
+
# Create destination directory
|
71
|
+
await self.mkdir(destination)
|
72
|
+
# Process subdirectories in batches of 5
|
73
|
+
subdirectories = getattr(data, "subdirectories", []) or []
|
74
|
+
for i in range(0, len(subdirectories), 5):
|
75
|
+
batch = subdirectories[i:i+5]
|
76
|
+
await asyncio.gather(*[
|
77
|
+
self.cp(
|
78
|
+
getattr(subdir, "path", f"{source}/{getattr(subdir, 'path', '')}"),
|
79
|
+
f"{destination}/{getattr(subdir, 'path', '')}"
|
80
|
+
) for subdir in batch
|
81
|
+
])
|
82
|
+
# Process files in batches of 10
|
83
|
+
files = getattr(data, "files", []) or []
|
84
|
+
for i in range(0, len(files), 10):
|
85
|
+
batch = files[i:i+10]
|
86
|
+
await asyncio.gather(*[
|
87
|
+
self.write(
|
88
|
+
f"{destination}/{getattr(file, 'path', '')}",
|
89
|
+
await self.read(getattr(file, "path", f"{source}/{getattr(file, 'path', '')}"))
|
90
|
+
) for file in batch
|
91
|
+
])
|
92
|
+
return {
|
93
|
+
"message": "Directory copied successfully",
|
94
|
+
"source": source,
|
95
|
+
"destination": destination,
|
96
|
+
}
|
97
|
+
raise Exception("Unsupported file type")
|
98
|
+
|
99
|
+
def format_path(self, path: str) -> str:
|
100
|
+
if path == "/":
|
101
|
+
return "%2F"
|
102
|
+
if path.startswith("/"):
|
103
|
+
path = path[1:]
|
104
|
+
return path
|
@@ -0,0 +1,57 @@
|
|
1
|
+
from blaxel.sandbox.client.models.process_request import ProcessRequest
|
2
|
+
|
3
|
+
from .base import SandboxHandleBase
|
4
|
+
from .client.api.process.delete_process_identifier import (
|
5
|
+
asyncio_detailed as delete_process_by_identifier,
|
6
|
+
)
|
7
|
+
from .client.api.process.delete_process_identifier_kill import (
|
8
|
+
asyncio_detailed as delete_process_by_identifier_kill,
|
9
|
+
)
|
10
|
+
from .client.api.process.get_process import asyncio_detailed as get_process
|
11
|
+
from .client.api.process.get_process_identifier import asyncio_detailed as get_process_by_identifier
|
12
|
+
from .client.api.process.get_process_identifier_logs import (
|
13
|
+
asyncio_detailed as get_process_by_identifier_logs,
|
14
|
+
)
|
15
|
+
from .client.api.process.post_process import asyncio_detailed as post_process
|
16
|
+
from .client.models import (
|
17
|
+
GetProcessIdentifierLogsResponse200,
|
18
|
+
ProcessKillRequest,
|
19
|
+
ProcessResponse,
|
20
|
+
SuccessResponse,
|
21
|
+
)
|
22
|
+
|
23
|
+
|
24
|
+
class SandboxProcess(SandboxHandleBase):
|
25
|
+
async def exec(self, process: ProcessRequest) -> ProcessResponse:
|
26
|
+
response = await post_process(client=self.client, body=process)
|
27
|
+
self.handle_response(response)
|
28
|
+
return response.parsed
|
29
|
+
|
30
|
+
async def get(self, identifier: str) -> ProcessResponse:
|
31
|
+
response = await get_process_by_identifier(identifier=identifier, client=self.client)
|
32
|
+
self.handle_response(response)
|
33
|
+
return response.parsed
|
34
|
+
|
35
|
+
async def list(self) -> list[ProcessResponse]:
|
36
|
+
response = await get_process(client=self.client)
|
37
|
+
self.handle_response(response)
|
38
|
+
return response.parsed
|
39
|
+
|
40
|
+
async def stop(self, identifier: str) -> SuccessResponse:
|
41
|
+
response = await delete_process_by_identifier(identifier=identifier, client=self.client)
|
42
|
+
self.handle_response(response)
|
43
|
+
return response.parsed
|
44
|
+
|
45
|
+
async def kill(self, identifier: str, signal: str = "SIGKILL") -> SuccessResponse:
|
46
|
+
kill_request = ProcessKillRequest(signal=signal)
|
47
|
+
response = await delete_process_by_identifier_kill(identifier=identifier, client=self.client, body=kill_request)
|
48
|
+
self.handle_response(response)
|
49
|
+
return response.parsed
|
50
|
+
|
51
|
+
async def logs(self, identifier: str, type_: str = "stdout") -> str:
|
52
|
+
response = await get_process_by_identifier_logs(identifier=identifier, client=self.client)
|
53
|
+
self.handle_response(response)
|
54
|
+
data: GetProcessIdentifierLogsResponse200 = response.parsed
|
55
|
+
if type_ in data.additional_properties:
|
56
|
+
return data.additional_properties[type_]
|
57
|
+
raise Exception("Unsupported log type")
|
@@ -0,0 +1,92 @@
|
|
1
|
+
import asyncio
|
2
|
+
import logging
|
3
|
+
import time
|
4
|
+
from typing import List
|
5
|
+
|
6
|
+
from ..client.api.compute.create_sandbox import asyncio as create_sandbox
|
7
|
+
from ..client.api.compute.delete_sandbox import asyncio as delete_sandbox
|
8
|
+
from ..client.api.compute.get_sandbox import asyncio as get_sandbox
|
9
|
+
from ..client.api.compute.list_sandboxes import asyncio as list_sandboxes
|
10
|
+
from ..client.client import client
|
11
|
+
from ..client.models import Sandbox
|
12
|
+
from .filesystem import SandboxFileSystem
|
13
|
+
from .process import SandboxProcess
|
14
|
+
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
class SandboxInstance:
|
18
|
+
def __init__(self, sandbox: Sandbox):
|
19
|
+
self.sandbox = sandbox
|
20
|
+
self.fs = SandboxFileSystem(sandbox)
|
21
|
+
self.process = SandboxProcess(sandbox)
|
22
|
+
|
23
|
+
@property
|
24
|
+
def metadata(self):
|
25
|
+
return self.sandbox.metadata
|
26
|
+
|
27
|
+
@property
|
28
|
+
def status(self):
|
29
|
+
return self.sandbox.status
|
30
|
+
|
31
|
+
@property
|
32
|
+
def events(self):
|
33
|
+
return self.sandbox.events
|
34
|
+
|
35
|
+
@property
|
36
|
+
def spec(self):
|
37
|
+
return self.sandbox.spec
|
38
|
+
|
39
|
+
async def wait(self, max_wait: int = 60000, interval: int = 1000) -> None:
|
40
|
+
start_time = time.time() * 1000 # Convert to milliseconds
|
41
|
+
while self.sandbox.status != "DEPLOYED":
|
42
|
+
await asyncio.sleep(interval / 1000) # Convert to seconds
|
43
|
+
try:
|
44
|
+
response = await get_sandbox(
|
45
|
+
self.sandbox.metadata.name,
|
46
|
+
client=client,
|
47
|
+
)
|
48
|
+
logger.info(f"Waiting for sandbox to be deployed, status: {response.status}")
|
49
|
+
self.sandbox = response
|
50
|
+
except Exception as e:
|
51
|
+
logger.error("Could not retrieve sandbox", exc_info=e)
|
52
|
+
|
53
|
+
if self.sandbox.status == "FAILED":
|
54
|
+
raise Exception("Sandbox failed to deploy")
|
55
|
+
|
56
|
+
if (time.time() * 1000) - start_time > max_wait:
|
57
|
+
raise Exception("Sandbox did not deploy in time")
|
58
|
+
|
59
|
+
@classmethod
|
60
|
+
async def create(cls, sandbox: Sandbox) -> "SandboxInstance":
|
61
|
+
if not sandbox.spec:
|
62
|
+
raise Exception("Sandbox spec is required")
|
63
|
+
if not sandbox.spec.runtime:
|
64
|
+
raise Exception("Sandbox runtime is required")
|
65
|
+
sandbox.spec.runtime.generation = sandbox.spec.runtime.generation or "mk3"
|
66
|
+
|
67
|
+
response = await create_sandbox(
|
68
|
+
client=client,
|
69
|
+
body=sandbox,
|
70
|
+
)
|
71
|
+
return cls(response)
|
72
|
+
|
73
|
+
@classmethod
|
74
|
+
async def get(cls, sandbox_name: str) -> "SandboxInstance":
|
75
|
+
response = await get_sandbox(
|
76
|
+
sandbox_name,
|
77
|
+
client=client,
|
78
|
+
)
|
79
|
+
return cls(response)
|
80
|
+
|
81
|
+
@classmethod
|
82
|
+
async def list(cls) -> List["SandboxInstance"]:
|
83
|
+
response = await list_sandboxes()
|
84
|
+
return [cls(sandbox) for sandbox in response]
|
85
|
+
|
86
|
+
@classmethod
|
87
|
+
async def delete(cls, sandbox_name: str) -> Sandbox:
|
88
|
+
response = await delete_sandbox(
|
89
|
+
sandbox_name,
|
90
|
+
client=client,
|
91
|
+
)
|
92
|
+
return response
|
blaxel/tools/__init__.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import json
|
3
3
|
import os
|
4
|
+
import traceback
|
4
5
|
from contextlib import AsyncExitStack
|
5
6
|
from logging import getLogger
|
6
7
|
from typing import Any, cast
|
@@ -10,6 +11,7 @@ from mcp.types import CallToolResult
|
|
10
11
|
from mcp.types import Tool as MCPTool
|
11
12
|
|
12
13
|
from ..common.env import env
|
14
|
+
from ..common.internal import get_global_unique_hash
|
13
15
|
from ..common.settings import settings
|
14
16
|
from ..instrumentation.span import SpanManager
|
15
17
|
from ..mcp.client import websocket_client
|
@@ -22,15 +24,19 @@ if os.getenv("BL_SERVER_PORT"):
|
|
22
24
|
DEFAULT_TIMEOUT = 5
|
23
25
|
|
24
26
|
class PersistentWebSocket:
|
25
|
-
def __init__(self, url: str, timeout: int = DEFAULT_TIMEOUT, timeout_enabled: bool = True):
|
27
|
+
def __init__(self, url: str, name: str, timeout: int = DEFAULT_TIMEOUT, timeout_enabled: bool = True):
|
26
28
|
self.url = url
|
29
|
+
self.name = name
|
27
30
|
self.timeout = timeout
|
28
|
-
self.timeout_enabled = timeout_enabled
|
29
31
|
self.session_exit_stack = AsyncExitStack()
|
30
32
|
self.client_exit_stack = AsyncExitStack()
|
31
33
|
self.session: ClientSession = None
|
32
34
|
self.timer_task = None
|
33
35
|
self.tools_cache = []
|
36
|
+
if settings.bl_cloud:
|
37
|
+
self.timeout_enabled = False
|
38
|
+
else:
|
39
|
+
self.timeout_enabled = timeout_enabled
|
34
40
|
|
35
41
|
def with_metas(self, metas: dict[str, Any]):
|
36
42
|
self.metas = metas
|
@@ -46,19 +52,32 @@ class PersistentWebSocket:
|
|
46
52
|
logger.debug(f"Tool {tool_name} returned {call_tool_result}")
|
47
53
|
if self.timeout_enabled:
|
48
54
|
self._reset_timer()
|
55
|
+
else:
|
56
|
+
await self._close()
|
49
57
|
return call_tool_result
|
50
58
|
|
51
59
|
async def list_tools(self):
|
52
|
-
|
53
|
-
|
54
|
-
self.
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
self.
|
61
|
-
|
60
|
+
logger.debug(f"Listing tools for {self.name}")
|
61
|
+
span_attributes = {
|
62
|
+
"tool.server": self.url,
|
63
|
+
"tool.server_name": self.name,
|
64
|
+
"span.type": "tool.list",
|
65
|
+
}
|
66
|
+
with SpanManager("blaxel-tracer").create_active_span(self.name, span_attributes) as span:
|
67
|
+
await self._initialize()
|
68
|
+
logger.debug(f"Initialized websocket for {self.name}")
|
69
|
+
if self.timeout_enabled:
|
70
|
+
self._remove_timer()
|
71
|
+
logger.debug("Listing tools")
|
72
|
+
list_tools_result = await self.session.list_tools()
|
73
|
+
self.tools_cache = list_tools_result.tools
|
74
|
+
logger.debug(f"Tools listed: {list_tools_result}")
|
75
|
+
if self.timeout_enabled:
|
76
|
+
self._reset_timer()
|
77
|
+
else:
|
78
|
+
await self._close()
|
79
|
+
span.set_attribute("tool.list.result", list_tools_result.model_dump_json())
|
80
|
+
return list_tools_result
|
62
81
|
|
63
82
|
def get_tools(self):
|
64
83
|
return self.tools_cache
|
@@ -125,6 +144,7 @@ def convert_mcp_tool_to_blaxel_tool(
|
|
125
144
|
"tool.args": json.dumps(arguments),
|
126
145
|
"tool.server": url,
|
127
146
|
"tool.server_name": name,
|
147
|
+
"span.type": "tool.call",
|
128
148
|
}
|
129
149
|
with SpanManager("blaxel-tracer").create_active_span("blaxel-tool-call", span_attributes):
|
130
150
|
logger.debug(f"Calling tool {tool.name} with arguments {arguments}")
|
@@ -163,22 +183,37 @@ class BlTools:
|
|
163
183
|
self.timeout = timeout
|
164
184
|
self.timeout_enabled = timeout_enabled
|
165
185
|
|
166
|
-
def
|
167
|
-
|
186
|
+
def _internal_url(self, name: str):
|
187
|
+
"""Get the internal URL for the agent using a hash of workspace and agent name."""
|
188
|
+
hash = get_global_unique_hash(settings.workspace, "function", name)
|
189
|
+
return f"{settings.run_internal_protocol}://bl-{settings.env}-{hash}.{settings.run_internal_hostname}"
|
168
190
|
|
169
|
-
def
|
191
|
+
def _forced_url(self, name: str):
|
192
|
+
"""Get the forced URL from environment variables if set."""
|
170
193
|
env_var = name.replace("-", "_").upper()
|
171
194
|
if env[f"BL_FUNCTION_{env_var}_URL"]:
|
172
195
|
return env[f"BL_FUNCTION_{env_var}_URL"]
|
173
|
-
|
174
|
-
|
175
|
-
|
196
|
+
return None
|
197
|
+
|
198
|
+
def _external_url(self, name: str):
|
199
|
+
return f"{settings.run_url}/{settings.workspace}/functions/{name}"
|
176
200
|
|
177
|
-
def _fallback_url(self, name: str)
|
201
|
+
def _fallback_url(self, name: str):
|
178
202
|
if self._external_url(name) != self._url(name):
|
179
203
|
return self._external_url(name)
|
180
204
|
return None
|
181
205
|
|
206
|
+
def _url(self, name: str):
|
207
|
+
logger.debug(f"Getting URL for {name}")
|
208
|
+
if self._forced_url(name):
|
209
|
+
logger.debug(f"Forced URL found for {name}: {self._forced_url(name)}")
|
210
|
+
return self._forced_url(name)
|
211
|
+
if settings.run_internal_hostname:
|
212
|
+
logger.debug(f"Internal hostname found for {name}: {self._internal_url(name)}")
|
213
|
+
return self._internal_url(name)
|
214
|
+
logger.debug(f"No URL found for {name}, using external URL")
|
215
|
+
return self._external_url(name)
|
216
|
+
|
182
217
|
def get_tools(self) -> list[Tool]:
|
183
218
|
"""Get a list of all tools from all connected servers."""
|
184
219
|
all_tools: list[Tool] = []
|
@@ -240,8 +275,13 @@ class BlTools:
|
|
240
275
|
except Exception as e:
|
241
276
|
if not self._fallback_url(name):
|
242
277
|
raise e
|
278
|
+
logger.warning(f"Error connecting to {name}: {e}\n{traceback.format_exc()}")
|
243
279
|
url = self._fallback_url(name)
|
244
|
-
|
280
|
+
try:
|
281
|
+
await self.connect_with_url(name, url)
|
282
|
+
except Exception as e:
|
283
|
+
logger.error(f"Error connecting to {name} with fallback URL: {e}\n{traceback.format_exc()}")
|
284
|
+
raise e
|
245
285
|
|
246
286
|
async def connect_with_url(
|
247
287
|
self, name: str, url: str
|
@@ -255,7 +295,8 @@ class BlTools:
|
|
255
295
|
logger.debug(f"Initializing session and loading tools from {url}")
|
256
296
|
|
257
297
|
if not toolPersistances.get(name):
|
258
|
-
|
298
|
+
logger.debug(f"Creating new persistent websocket for {name}")
|
299
|
+
toolPersistances[name] = PersistentWebSocket(url, name, timeout=self.timeout, timeout_enabled=self.timeout_enabled)
|
259
300
|
await toolPersistances[name].list_tools()
|
260
301
|
logger.debug(f"Loaded {len(toolPersistances[name].get_tools())} tools from {url}")
|
261
302
|
return toolPersistances[name].with_metas(self.metas)
|