blaxel 0.2.0rc6__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/core/sandbox/__init__.py +2 -0
- blaxel/core/sandbox/client/models/directory.py +37 -44
- blaxel/core/sandbox/client/models/error_response.py +9 -9
- blaxel/core/sandbox/client/models/file.py +33 -39
- blaxel/core/sandbox/client/models/file_with_content.py +37 -44
- blaxel/core/sandbox/client/models/process_logs.py +17 -19
- blaxel/core/sandbox/client/models/process_response.py +46 -51
- blaxel/core/sandbox/client/models/subdirectory.py +13 -14
- blaxel/core/sandbox/client/models/success_response.py +13 -14
- blaxel/core/sandbox/process.py +43 -5
- blaxel/core/sandbox/sandbox.py +72 -16
- blaxel/core/sandbox/types.py +21 -0
- blaxel/llamaindex/custom/cohere.py +13 -38
- blaxel/telemetry/instrumentation/blaxel_core.py +3 -2
- blaxel/telemetry/instrumentation/blaxel_langgraph.py +1 -1
- blaxel/telemetry/instrumentation/blaxel_langgraph_gemini.py +1 -1
- blaxel/telemetry/instrumentation/blaxel_llamaindex.py +1 -1
- blaxel/telemetry/instrumentation/map.py +8 -8
- {blaxel-0.2.0rc6.dist-info → blaxel-0.2.1.dist-info}/METADATA +1 -1
- {blaxel-0.2.0rc6.dist-info → blaxel-0.2.1.dist-info}/RECORD +22 -22
- {blaxel-0.2.0rc6.dist-info → blaxel-0.2.1.dist-info}/WHEEL +0 -0
- {blaxel-0.2.0rc6.dist-info → blaxel-0.2.1.dist-info}/licenses/LICENSE +0 -0
blaxel/core/sandbox/__init__.py
CHANGED
@@ -8,6 +8,7 @@ from .sandbox import (
|
|
8
8
|
from .types import (
|
9
9
|
CopyResponse,
|
10
10
|
SandboxConfiguration,
|
11
|
+
SandboxCreateConfiguration,
|
11
12
|
SandboxFilesystemFile,
|
12
13
|
SessionCreateOptions,
|
13
14
|
SessionWithToken,
|
@@ -19,6 +20,7 @@ __all__ = [
|
|
19
20
|
"SessionCreateOptions",
|
20
21
|
"SessionWithToken",
|
21
22
|
"SandboxConfiguration",
|
23
|
+
"SandboxCreateConfiguration",
|
22
24
|
"WatchEvent",
|
23
25
|
"SandboxFilesystemFile",
|
24
26
|
"CopyResponse",
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import TYPE_CHECKING, Any, TypeVar
|
1
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
if TYPE_CHECKING:
|
9
7
|
from ..models.file import File
|
10
8
|
from ..models.subdirectory import Subdirectory
|
@@ -17,54 +15,49 @@ T = TypeVar("T", bound="Directory")
|
|
17
15
|
class Directory:
|
18
16
|
"""
|
19
17
|
Attributes:
|
20
|
-
files (
|
21
|
-
name (
|
22
|
-
path (
|
23
|
-
subdirectories (
|
18
|
+
files (list['File']):
|
19
|
+
name (str):
|
20
|
+
path (str):
|
21
|
+
subdirectories (list['Subdirectory']): @name Subdirectories
|
24
22
|
"""
|
25
23
|
|
26
|
-
files:
|
27
|
-
name:
|
28
|
-
path:
|
29
|
-
subdirectories:
|
24
|
+
files: list["File"]
|
25
|
+
name: str
|
26
|
+
path: str
|
27
|
+
subdirectories: list["Subdirectory"]
|
30
28
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
31
29
|
|
32
30
|
def to_dict(self) -> dict[str, Any]:
|
33
|
-
files
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
files_item = files_item_data.to_dict()
|
41
|
-
files.append(files_item)
|
31
|
+
files = []
|
32
|
+
for files_item_data in self.files:
|
33
|
+
if type(files_item_data) is dict:
|
34
|
+
files_item = files_item_data
|
35
|
+
else:
|
36
|
+
files_item = files_item_data.to_dict()
|
37
|
+
files.append(files_item)
|
42
38
|
|
43
39
|
name = self.name
|
44
40
|
|
45
41
|
path = self.path
|
46
42
|
|
47
|
-
subdirectories
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
subdirectories_item = subdirectories_item_data.to_dict()
|
55
|
-
subdirectories.append(subdirectories_item)
|
43
|
+
subdirectories = []
|
44
|
+
for subdirectories_item_data in self.subdirectories:
|
45
|
+
if type(subdirectories_item_data) is dict:
|
46
|
+
subdirectories_item = subdirectories_item_data
|
47
|
+
else:
|
48
|
+
subdirectories_item = subdirectories_item_data.to_dict()
|
49
|
+
subdirectories.append(subdirectories_item)
|
56
50
|
|
57
51
|
field_dict: dict[str, Any] = {}
|
58
52
|
field_dict.update(self.additional_properties)
|
59
|
-
field_dict.update(
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
field_dict["subdirectories"] = subdirectories
|
53
|
+
field_dict.update(
|
54
|
+
{
|
55
|
+
"files": files,
|
56
|
+
"name": name,
|
57
|
+
"path": path,
|
58
|
+
"subdirectories": subdirectories,
|
59
|
+
}
|
60
|
+
)
|
68
61
|
|
69
62
|
return field_dict
|
70
63
|
|
@@ -77,19 +70,19 @@ class Directory:
|
|
77
70
|
return None
|
78
71
|
d = src_dict.copy()
|
79
72
|
files = []
|
80
|
-
_files = d.pop("files"
|
81
|
-
for files_item_data in _files
|
73
|
+
_files = d.pop("files")
|
74
|
+
for files_item_data in _files:
|
82
75
|
files_item = File.from_dict(files_item_data)
|
83
76
|
|
84
77
|
files.append(files_item)
|
85
78
|
|
86
|
-
name = d.pop("name"
|
79
|
+
name = d.pop("name")
|
87
80
|
|
88
|
-
path = d.pop("path"
|
81
|
+
path = d.pop("path")
|
89
82
|
|
90
83
|
subdirectories = []
|
91
|
-
_subdirectories = d.pop("subdirectories"
|
92
|
-
for subdirectories_item_data in _subdirectories
|
84
|
+
_subdirectories = d.pop("subdirectories")
|
85
|
+
for subdirectories_item_data in _subdirectories:
|
93
86
|
subdirectories_item = Subdirectory.from_dict(subdirectories_item_data)
|
94
87
|
|
95
88
|
subdirectories.append(subdirectories_item)
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
T = TypeVar("T", bound="ErrorResponse")
|
9
7
|
|
10
8
|
|
@@ -12,10 +10,10 @@ T = TypeVar("T", bound="ErrorResponse")
|
|
12
10
|
class ErrorResponse:
|
13
11
|
"""
|
14
12
|
Attributes:
|
15
|
-
error (
|
13
|
+
error (str): Example: Error message.
|
16
14
|
"""
|
17
15
|
|
18
|
-
error:
|
16
|
+
error: str
|
19
17
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
20
18
|
|
21
19
|
def to_dict(self) -> dict[str, Any]:
|
@@ -23,9 +21,11 @@ class ErrorResponse:
|
|
23
21
|
|
24
22
|
field_dict: dict[str, Any] = {}
|
25
23
|
field_dict.update(self.additional_properties)
|
26
|
-
field_dict.update(
|
27
|
-
|
28
|
-
|
24
|
+
field_dict.update(
|
25
|
+
{
|
26
|
+
"error": error,
|
27
|
+
}
|
28
|
+
)
|
29
29
|
|
30
30
|
return field_dict
|
31
31
|
|
@@ -34,7 +34,7 @@ class ErrorResponse:
|
|
34
34
|
if not src_dict:
|
35
35
|
return None
|
36
36
|
d = src_dict.copy()
|
37
|
-
error = d.pop("error"
|
37
|
+
error = d.pop("error")
|
38
38
|
|
39
39
|
error_response = cls(
|
40
40
|
error=error,
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
T = TypeVar("T", bound="File")
|
9
7
|
|
10
8
|
|
@@ -12,22 +10,22 @@ T = TypeVar("T", bound="File")
|
|
12
10
|
class File:
|
13
11
|
"""
|
14
12
|
Attributes:
|
15
|
-
group (
|
16
|
-
last_modified (
|
17
|
-
name (
|
18
|
-
owner (
|
19
|
-
path (
|
20
|
-
permissions (
|
21
|
-
size (
|
13
|
+
group (str):
|
14
|
+
last_modified (str):
|
15
|
+
name (str):
|
16
|
+
owner (str):
|
17
|
+
path (str):
|
18
|
+
permissions (str):
|
19
|
+
size (int):
|
22
20
|
"""
|
23
21
|
|
24
|
-
group:
|
25
|
-
last_modified:
|
26
|
-
name:
|
27
|
-
owner:
|
28
|
-
path:
|
29
|
-
permissions:
|
30
|
-
size:
|
22
|
+
group: str
|
23
|
+
last_modified: str
|
24
|
+
name: str
|
25
|
+
owner: str
|
26
|
+
path: str
|
27
|
+
permissions: str
|
28
|
+
size: int
|
31
29
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
32
30
|
|
33
31
|
def to_dict(self) -> dict[str, Any]:
|
@@ -47,21 +45,17 @@ class File:
|
|
47
45
|
|
48
46
|
field_dict: dict[str, Any] = {}
|
49
47
|
field_dict.update(self.additional_properties)
|
50
|
-
field_dict.update(
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
if permissions is not UNSET:
|
62
|
-
field_dict["permissions"] = permissions
|
63
|
-
if size is not UNSET:
|
64
|
-
field_dict["size"] = size
|
48
|
+
field_dict.update(
|
49
|
+
{
|
50
|
+
"group": group,
|
51
|
+
"lastModified": last_modified,
|
52
|
+
"name": name,
|
53
|
+
"owner": owner,
|
54
|
+
"path": path,
|
55
|
+
"permissions": permissions,
|
56
|
+
"size": size,
|
57
|
+
}
|
58
|
+
)
|
65
59
|
|
66
60
|
return field_dict
|
67
61
|
|
@@ -70,19 +64,19 @@ class File:
|
|
70
64
|
if not src_dict:
|
71
65
|
return None
|
72
66
|
d = src_dict.copy()
|
73
|
-
group = d.pop("group"
|
67
|
+
group = d.pop("group")
|
74
68
|
|
75
|
-
last_modified = d.pop("lastModified"
|
69
|
+
last_modified = d.pop("lastModified")
|
76
70
|
|
77
|
-
name = d.pop("name"
|
71
|
+
name = d.pop("name")
|
78
72
|
|
79
|
-
owner = d.pop("owner"
|
73
|
+
owner = d.pop("owner")
|
80
74
|
|
81
|
-
path = d.pop("path"
|
75
|
+
path = d.pop("path")
|
82
76
|
|
83
|
-
permissions = d.pop("permissions"
|
77
|
+
permissions = d.pop("permissions")
|
84
78
|
|
85
|
-
size = d.pop("size"
|
79
|
+
size = d.pop("size")
|
86
80
|
|
87
81
|
file = cls(
|
88
82
|
group=group,
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
T = TypeVar("T", bound="FileWithContent")
|
9
7
|
|
10
8
|
|
@@ -12,24 +10,24 @@ T = TypeVar("T", bound="FileWithContent")
|
|
12
10
|
class FileWithContent:
|
13
11
|
"""
|
14
12
|
Attributes:
|
15
|
-
content (
|
16
|
-
group (
|
17
|
-
last_modified (
|
18
|
-
name (
|
19
|
-
owner (
|
20
|
-
path (
|
21
|
-
permissions (
|
22
|
-
size (
|
13
|
+
content (str):
|
14
|
+
group (str):
|
15
|
+
last_modified (str):
|
16
|
+
name (str):
|
17
|
+
owner (str):
|
18
|
+
path (str):
|
19
|
+
permissions (str):
|
20
|
+
size (int):
|
23
21
|
"""
|
24
22
|
|
25
|
-
content:
|
26
|
-
group:
|
27
|
-
last_modified:
|
28
|
-
name:
|
29
|
-
owner:
|
30
|
-
path:
|
31
|
-
permissions:
|
32
|
-
size:
|
23
|
+
content: str
|
24
|
+
group: str
|
25
|
+
last_modified: str
|
26
|
+
name: str
|
27
|
+
owner: str
|
28
|
+
path: str
|
29
|
+
permissions: str
|
30
|
+
size: int
|
33
31
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
34
32
|
|
35
33
|
def to_dict(self) -> dict[str, Any]:
|
@@ -51,23 +49,18 @@ class FileWithContent:
|
|
51
49
|
|
52
50
|
field_dict: dict[str, Any] = {}
|
53
51
|
field_dict.update(self.additional_properties)
|
54
|
-
field_dict.update(
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
field_dict["path"] = path
|
67
|
-
if permissions is not UNSET:
|
68
|
-
field_dict["permissions"] = permissions
|
69
|
-
if size is not UNSET:
|
70
|
-
field_dict["size"] = size
|
52
|
+
field_dict.update(
|
53
|
+
{
|
54
|
+
"content": content,
|
55
|
+
"group": group,
|
56
|
+
"lastModified": last_modified,
|
57
|
+
"name": name,
|
58
|
+
"owner": owner,
|
59
|
+
"path": path,
|
60
|
+
"permissions": permissions,
|
61
|
+
"size": size,
|
62
|
+
}
|
63
|
+
)
|
71
64
|
|
72
65
|
return field_dict
|
73
66
|
|
@@ -76,21 +69,21 @@ class FileWithContent:
|
|
76
69
|
if not src_dict:
|
77
70
|
return None
|
78
71
|
d = src_dict.copy()
|
79
|
-
content = d.pop("content"
|
72
|
+
content = d.pop("content")
|
80
73
|
|
81
|
-
group = d.pop("group"
|
74
|
+
group = d.pop("group")
|
82
75
|
|
83
|
-
last_modified = d.pop("lastModified"
|
76
|
+
last_modified = d.pop("lastModified")
|
84
77
|
|
85
|
-
name = d.pop("name"
|
78
|
+
name = d.pop("name")
|
86
79
|
|
87
|
-
owner = d.pop("owner"
|
80
|
+
owner = d.pop("owner")
|
88
81
|
|
89
|
-
path = d.pop("path"
|
82
|
+
path = d.pop("path")
|
90
83
|
|
91
|
-
permissions = d.pop("permissions"
|
84
|
+
permissions = d.pop("permissions")
|
92
85
|
|
93
|
-
size = d.pop("size"
|
86
|
+
size = d.pop("size")
|
94
87
|
|
95
88
|
file_with_content = cls(
|
96
89
|
content=content,
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
T = TypeVar("T", bound="ProcessLogs")
|
9
7
|
|
10
8
|
|
@@ -12,14 +10,14 @@ T = TypeVar("T", bound="ProcessLogs")
|
|
12
10
|
class ProcessLogs:
|
13
11
|
"""
|
14
12
|
Attributes:
|
15
|
-
logs (
|
16
|
-
stderr (
|
17
|
-
stdout (
|
13
|
+
logs (str): Example: logs output.
|
14
|
+
stderr (str): Example: stderr output.
|
15
|
+
stdout (str): Example: stdout output.
|
18
16
|
"""
|
19
17
|
|
20
|
-
logs:
|
21
|
-
stderr:
|
22
|
-
stdout:
|
18
|
+
logs: str
|
19
|
+
stderr: str
|
20
|
+
stdout: str
|
23
21
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
24
22
|
|
25
23
|
def to_dict(self) -> dict[str, Any]:
|
@@ -31,13 +29,13 @@ class ProcessLogs:
|
|
31
29
|
|
32
30
|
field_dict: dict[str, Any] = {}
|
33
31
|
field_dict.update(self.additional_properties)
|
34
|
-
field_dict.update(
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
32
|
+
field_dict.update(
|
33
|
+
{
|
34
|
+
"logs": logs,
|
35
|
+
"stderr": stderr,
|
36
|
+
"stdout": stdout,
|
37
|
+
}
|
38
|
+
)
|
41
39
|
|
42
40
|
return field_dict
|
43
41
|
|
@@ -46,11 +44,11 @@ class ProcessLogs:
|
|
46
44
|
if not src_dict:
|
47
45
|
return None
|
48
46
|
d = src_dict.copy()
|
49
|
-
logs = d.pop("logs"
|
47
|
+
logs = d.pop("logs")
|
50
48
|
|
51
|
-
stderr = d.pop("stderr"
|
49
|
+
stderr = d.pop("stderr")
|
52
50
|
|
53
|
-
stdout = d.pop("stdout"
|
51
|
+
stdout = d.pop("stdout")
|
54
52
|
|
55
53
|
process_logs = cls(
|
56
54
|
logs=logs,
|
@@ -1,10 +1,9 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
6
|
from ..models.process_response_status import ProcessResponseStatus
|
7
|
-
from ..types import UNSET, Unset
|
8
7
|
|
9
8
|
T = TypeVar("T", bound="ProcessResponse")
|
10
9
|
|
@@ -13,24 +12,26 @@ T = TypeVar("T", bound="ProcessResponse")
|
|
13
12
|
class ProcessResponse:
|
14
13
|
"""
|
15
14
|
Attributes:
|
16
|
-
command (
|
17
|
-
completed_at (
|
18
|
-
exit_code (
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
15
|
+
command (str): Example: ls -la.
|
16
|
+
completed_at (str): Example: Wed, 01 Jan 2023 12:01:00 GMT.
|
17
|
+
exit_code (int):
|
18
|
+
logs (str): Example: logs output.
|
19
|
+
name (str): Example: my-process.
|
20
|
+
pid (str): Example: 1234.
|
21
|
+
started_at (str): Example: Wed, 01 Jan 2023 12:00:00 GMT.
|
22
|
+
status (ProcessResponseStatus): Example: running.
|
23
|
+
working_dir (str): Example: /home/user.
|
24
24
|
"""
|
25
25
|
|
26
|
-
command:
|
27
|
-
completed_at:
|
28
|
-
exit_code:
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
26
|
+
command: str
|
27
|
+
completed_at: str
|
28
|
+
exit_code: int
|
29
|
+
logs: str
|
30
|
+
name: str
|
31
|
+
pid: str
|
32
|
+
started_at: str
|
33
|
+
status: ProcessResponseStatus
|
34
|
+
working_dir: str
|
34
35
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
35
36
|
|
36
37
|
def to_dict(self) -> dict[str, Any]:
|
@@ -40,37 +41,33 @@ class ProcessResponse:
|
|
40
41
|
|
41
42
|
exit_code = self.exit_code
|
42
43
|
|
44
|
+
logs = self.logs
|
45
|
+
|
43
46
|
name = self.name
|
44
47
|
|
45
48
|
pid = self.pid
|
46
49
|
|
47
50
|
started_at = self.started_at
|
48
51
|
|
49
|
-
status
|
50
|
-
if not isinstance(self.status, Unset):
|
51
|
-
status = self.status.value
|
52
|
+
status = self.status.value
|
52
53
|
|
53
54
|
working_dir = self.working_dir
|
54
55
|
|
55
56
|
field_dict: dict[str, Any] = {}
|
56
57
|
field_dict.update(self.additional_properties)
|
57
|
-
field_dict.update(
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
if status is not UNSET:
|
71
|
-
field_dict["status"] = status
|
72
|
-
if working_dir is not UNSET:
|
73
|
-
field_dict["workingDir"] = working_dir
|
58
|
+
field_dict.update(
|
59
|
+
{
|
60
|
+
"command": command,
|
61
|
+
"completedAt": completed_at,
|
62
|
+
"exitCode": exit_code,
|
63
|
+
"logs": logs,
|
64
|
+
"name": name,
|
65
|
+
"pid": pid,
|
66
|
+
"startedAt": started_at,
|
67
|
+
"status": status,
|
68
|
+
"workingDir": working_dir,
|
69
|
+
}
|
70
|
+
)
|
74
71
|
|
75
72
|
return field_dict
|
76
73
|
|
@@ -79,31 +76,29 @@ class ProcessResponse:
|
|
79
76
|
if not src_dict:
|
80
77
|
return None
|
81
78
|
d = src_dict.copy()
|
82
|
-
command = d.pop("command"
|
79
|
+
command = d.pop("command")
|
80
|
+
|
81
|
+
completed_at = d.pop("completedAt")
|
83
82
|
|
84
|
-
|
83
|
+
exit_code = d.pop("exitCode")
|
85
84
|
|
86
|
-
|
85
|
+
logs = d.pop("logs")
|
87
86
|
|
88
|
-
name = d.pop("name"
|
87
|
+
name = d.pop("name")
|
89
88
|
|
90
|
-
pid = d.pop("pid"
|
89
|
+
pid = d.pop("pid")
|
91
90
|
|
92
|
-
started_at = d.pop("startedAt"
|
91
|
+
started_at = d.pop("startedAt")
|
93
92
|
|
94
|
-
|
95
|
-
status: Union[Unset, ProcessResponseStatus]
|
96
|
-
if isinstance(_status, Unset):
|
97
|
-
status = UNSET
|
98
|
-
else:
|
99
|
-
status = ProcessResponseStatus(_status)
|
93
|
+
status = ProcessResponseStatus(d.pop("status"))
|
100
94
|
|
101
|
-
working_dir = d.pop("workingDir"
|
95
|
+
working_dir = d.pop("workingDir")
|
102
96
|
|
103
97
|
process_response = cls(
|
104
98
|
command=command,
|
105
99
|
completed_at=completed_at,
|
106
100
|
exit_code=exit_code,
|
101
|
+
logs=logs,
|
107
102
|
name=name,
|
108
103
|
pid=pid,
|
109
104
|
started_at=started_at,
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
T = TypeVar("T", bound="Subdirectory")
|
9
7
|
|
10
8
|
|
@@ -12,12 +10,12 @@ T = TypeVar("T", bound="Subdirectory")
|
|
12
10
|
class Subdirectory:
|
13
11
|
"""
|
14
12
|
Attributes:
|
15
|
-
name (
|
16
|
-
path (
|
13
|
+
name (str):
|
14
|
+
path (str):
|
17
15
|
"""
|
18
16
|
|
19
|
-
name:
|
20
|
-
path:
|
17
|
+
name: str
|
18
|
+
path: str
|
21
19
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
22
20
|
|
23
21
|
def to_dict(self) -> dict[str, Any]:
|
@@ -27,11 +25,12 @@ class Subdirectory:
|
|
27
25
|
|
28
26
|
field_dict: dict[str, Any] = {}
|
29
27
|
field_dict.update(self.additional_properties)
|
30
|
-
field_dict.update(
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
28
|
+
field_dict.update(
|
29
|
+
{
|
30
|
+
"name": name,
|
31
|
+
"path": path,
|
32
|
+
}
|
33
|
+
)
|
35
34
|
|
36
35
|
return field_dict
|
37
36
|
|
@@ -40,9 +39,9 @@ class Subdirectory:
|
|
40
39
|
if not src_dict:
|
41
40
|
return None
|
42
41
|
d = src_dict.copy()
|
43
|
-
name = d.pop("name"
|
42
|
+
name = d.pop("name")
|
44
43
|
|
45
|
-
path = d.pop("path"
|
44
|
+
path = d.pop("path")
|
46
45
|
|
47
46
|
subdirectory = cls(
|
48
47
|
name=name,
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import Any, TypeVar
|
1
|
+
from typing import Any, TypeVar
|
2
2
|
|
3
3
|
from attrs import define as _attrs_define
|
4
4
|
from attrs import field as _attrs_field
|
5
5
|
|
6
|
-
from ..types import UNSET, Unset
|
7
|
-
|
8
6
|
T = TypeVar("T", bound="SuccessResponse")
|
9
7
|
|
10
8
|
|
@@ -12,12 +10,12 @@ T = TypeVar("T", bound="SuccessResponse")
|
|
12
10
|
class SuccessResponse:
|
13
11
|
"""
|
14
12
|
Attributes:
|
15
|
-
message (
|
16
|
-
path (
|
13
|
+
message (str): Example: File created successfully.
|
14
|
+
path (str): Example: /path/to/file.
|
17
15
|
"""
|
18
16
|
|
19
|
-
message:
|
20
|
-
path:
|
17
|
+
message: str
|
18
|
+
path: str
|
21
19
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
22
20
|
|
23
21
|
def to_dict(self) -> dict[str, Any]:
|
@@ -27,11 +25,12 @@ class SuccessResponse:
|
|
27
25
|
|
28
26
|
field_dict: dict[str, Any] = {}
|
29
27
|
field_dict.update(self.additional_properties)
|
30
|
-
field_dict.update(
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
28
|
+
field_dict.update(
|
29
|
+
{
|
30
|
+
"message": message,
|
31
|
+
"path": path,
|
32
|
+
}
|
33
|
+
)
|
35
34
|
|
36
35
|
return field_dict
|
37
36
|
|
@@ -40,9 +39,9 @@ class SuccessResponse:
|
|
40
39
|
if not src_dict:
|
41
40
|
return None
|
42
41
|
d = src_dict.copy()
|
43
|
-
message = d.pop("message"
|
42
|
+
message = d.pop("message")
|
44
43
|
|
45
|
-
path = d.pop("path"
|
44
|
+
path = d.pop("path")
|
46
45
|
|
47
46
|
success_response = cls(
|
48
47
|
message=message,
|
blaxel/core/sandbox/process.py
CHANGED
@@ -75,16 +75,54 @@ class SandboxProcess(SandboxAction):
|
|
75
75
|
|
76
76
|
return {"close": close}
|
77
77
|
|
78
|
-
async def exec(
|
78
|
+
async def exec(
|
79
|
+
self,
|
80
|
+
process: Union[ProcessRequest, Dict[str, Any]],
|
81
|
+
on_log: Optional[Callable[[str], None]] = None,
|
82
|
+
) -> ProcessResponse:
|
79
83
|
if isinstance(process, dict):
|
80
84
|
process = ProcessRequest.from_dict(process)
|
81
85
|
|
86
|
+
# Store original wait_for_completion setting
|
87
|
+
should_wait_for_completion = process.wait_for_completion
|
88
|
+
|
89
|
+
# Always start process without wait_for_completion to avoid server-side blocking
|
90
|
+
if should_wait_for_completion and on_log is not None:
|
91
|
+
process.wait_for_completion = False
|
92
|
+
|
82
93
|
async with self.get_client() as client_instance:
|
83
94
|
response = await client_instance.post("/process", json=process.to_dict())
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
95
|
+
# Parse JSON response only once, with better error handling
|
96
|
+
response_data = None
|
97
|
+
if response.content:
|
98
|
+
try:
|
99
|
+
response_data = response.json()
|
100
|
+
except Exception:
|
101
|
+
# If JSON parsing fails, check the response first
|
102
|
+
self.handle_response_error(response, None, None)
|
103
|
+
raise
|
104
|
+
|
105
|
+
self.handle_response_error(response, response_data, None)
|
106
|
+
result = ProcessResponse.from_dict(response_data)
|
107
|
+
|
108
|
+
# Handle wait_for_completion with parallel log streaming
|
109
|
+
if should_wait_for_completion:
|
110
|
+
stream_control = None
|
111
|
+
if on_log is not None:
|
112
|
+
stream_control = self.stream_logs(result.pid, {"on_log": on_log})
|
113
|
+
try:
|
114
|
+
# Wait for process completion
|
115
|
+
result = await self.wait(result.pid, interval=50)
|
116
|
+
finally:
|
117
|
+
# Clean up log streaming
|
118
|
+
if stream_control:
|
119
|
+
stream_control["close"]()
|
120
|
+
else:
|
121
|
+
# For non-blocking execution, set up log streaming immediately if requested
|
122
|
+
if on_log is not None:
|
123
|
+
stream = self.stream_logs(result.pid, {"on_log": on_log})
|
124
|
+
result.additional_properties["close"] = stream["close"]
|
125
|
+
return result
|
88
126
|
|
89
127
|
async def wait(
|
90
128
|
self, identifier: str, max_wait: int = 60000, interval: int = 1000
|
blaxel/core/sandbox/sandbox.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import logging
|
3
3
|
import time
|
4
|
+
import uuid
|
4
5
|
from typing import Any, Dict, List, Union
|
5
6
|
|
6
7
|
from ..client.api.compute.create_sandbox import asyncio as create_sandbox
|
@@ -8,13 +9,13 @@ from ..client.api.compute.delete_sandbox import asyncio as delete_sandbox
|
|
8
9
|
from ..client.api.compute.get_sandbox import asyncio as get_sandbox
|
9
10
|
from ..client.api.compute.list_sandboxes import asyncio as list_sandboxes
|
10
11
|
from ..client.client import client
|
11
|
-
from ..client.models import Metadata, Sandbox
|
12
|
+
from ..client.models import Metadata, Runtime, Sandbox, SandboxSpec
|
12
13
|
from .filesystem import SandboxFileSystem
|
13
14
|
from .network import SandboxNetwork
|
14
15
|
from .preview import SandboxPreviews
|
15
16
|
from .process import SandboxProcess
|
16
17
|
from .session import SandboxSessions
|
17
|
-
from .types import SandboxConfiguration, SessionWithToken
|
18
|
+
from .types import SandboxConfiguration, SandboxCreateConfiguration, SessionWithToken
|
18
19
|
|
19
20
|
logger = logging.getLogger(__name__)
|
20
21
|
|
@@ -67,15 +68,54 @@ class SandboxInstance:
|
|
67
68
|
raise Exception("Sandbox did not deploy in time")
|
68
69
|
|
69
70
|
@classmethod
|
70
|
-
async def create(
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
71
|
+
async def create(
|
72
|
+
cls, sandbox: Union[Sandbox, SandboxCreateConfiguration, Dict[str, Any], None] = None
|
73
|
+
) -> "SandboxInstance":
|
74
|
+
# Generate default values
|
75
|
+
default_name = f"sandbox-{uuid.uuid4().hex[:8]}"
|
76
|
+
default_image = "blaxel/prod-base:latest"
|
77
|
+
default_memory = 4096
|
78
|
+
|
79
|
+
# Handle SandboxCreateConfiguration or simple dict with name/image/memory keys
|
80
|
+
if sandbox is None or isinstance(sandbox, (SandboxCreateConfiguration, dict)) and (
|
81
|
+
not isinstance(sandbox, Sandbox) and (
|
82
|
+
sandbox is None or
|
83
|
+
'name' in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__) or
|
84
|
+
'image' in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__) or
|
85
|
+
'memory' in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
|
86
|
+
)
|
87
|
+
):
|
88
|
+
if sandbox is None:
|
89
|
+
sandbox = SandboxCreateConfiguration()
|
90
|
+
elif isinstance(sandbox, dict) and not isinstance(sandbox, Sandbox):
|
91
|
+
sandbox = SandboxCreateConfiguration.from_dict(sandbox)
|
92
|
+
|
93
|
+
# Set defaults if not provided
|
94
|
+
name = sandbox.name or default_name
|
95
|
+
image = sandbox.image or default_image
|
96
|
+
memory = sandbox.memory or default_memory
|
97
|
+
|
98
|
+
# Create full Sandbox object
|
99
|
+
sandbox = Sandbox(
|
100
|
+
metadata=Metadata(name=name),
|
101
|
+
spec=SandboxSpec(runtime=Runtime(image=image, memory=memory, generation="mk3"))
|
102
|
+
)
|
103
|
+
else:
|
104
|
+
# Handle existing Sandbox object or dict conversion
|
105
|
+
if isinstance(sandbox, dict):
|
106
|
+
sandbox = Sandbox.from_dict(sandbox)
|
107
|
+
|
108
|
+
# Set defaults for missing fields
|
109
|
+
if not sandbox.metadata:
|
110
|
+
sandbox.metadata = Metadata(name=uuid.uuid4().hex.replace('-', ''))
|
111
|
+
if not sandbox.spec:
|
112
|
+
sandbox.spec = SandboxSpec(runtime=Runtime(image=default_image))
|
113
|
+
if not sandbox.spec.runtime:
|
114
|
+
sandbox.spec.runtime = Runtime(image=default_image, memory=default_memory)
|
115
|
+
|
116
|
+
sandbox.spec.runtime.image = sandbox.spec.runtime.image or default_image
|
117
|
+
sandbox.spec.runtime.memory = sandbox.spec.runtime.memory or default_memory
|
118
|
+
sandbox.spec.runtime.generation = sandbox.spec.runtime.generation or "mk3"
|
79
119
|
|
80
120
|
response = await create_sandbox(
|
81
121
|
client=client,
|
@@ -106,14 +146,30 @@ class SandboxInstance:
|
|
106
146
|
|
107
147
|
@classmethod
|
108
148
|
async def create_if_not_exists(
|
109
|
-
cls, sandbox: Union[Sandbox, Dict[str, Any]]
|
149
|
+
cls, sandbox: Union[Sandbox, SandboxCreateConfiguration, Dict[str, Any]]
|
110
150
|
) -> "SandboxInstance":
|
111
151
|
"""Create a sandbox if it doesn't exist, otherwise return existing."""
|
112
|
-
if isinstance(sandbox, dict):
|
113
|
-
sandbox = Sandbox.from_dict(sandbox)
|
114
|
-
|
115
152
|
try:
|
116
|
-
|
153
|
+
# Extract name from different configuration types
|
154
|
+
if isinstance(sandbox, SandboxCreateConfiguration):
|
155
|
+
name = sandbox.name
|
156
|
+
elif isinstance(sandbox, dict):
|
157
|
+
if 'name' in sandbox:
|
158
|
+
name = sandbox['name']
|
159
|
+
elif 'metadata' in sandbox and isinstance(sandbox['metadata'], dict):
|
160
|
+
name = sandbox['metadata'].get('name')
|
161
|
+
else:
|
162
|
+
# If no name provided, we can't check if it exists, so create new
|
163
|
+
return await cls.create(sandbox)
|
164
|
+
elif isinstance(sandbox, Sandbox):
|
165
|
+
name = sandbox.metadata.name if sandbox.metadata else None
|
166
|
+
else:
|
167
|
+
name = None
|
168
|
+
|
169
|
+
if not name:
|
170
|
+
raise ValueError("Sandbox name is required")
|
171
|
+
|
172
|
+
sandbox_instance = await cls.get(name)
|
117
173
|
return sandbox_instance
|
118
174
|
except Exception as e:
|
119
175
|
# Check if it's a 404 error (sandbox not found)
|
blaxel/core/sandbox/types.py
CHANGED
@@ -101,3 +101,24 @@ class CopyResponse:
|
|
101
101
|
self.message = message
|
102
102
|
self.source = source
|
103
103
|
self.destination = destination
|
104
|
+
|
105
|
+
|
106
|
+
class SandboxCreateConfiguration:
|
107
|
+
"""Simplified configuration for creating sandboxes with default values."""
|
108
|
+
def __init__(
|
109
|
+
self,
|
110
|
+
name: Optional[str] = None,
|
111
|
+
image: Optional[str] = None,
|
112
|
+
memory: Optional[int] = None,
|
113
|
+
):
|
114
|
+
self.name = name
|
115
|
+
self.image = image
|
116
|
+
self.memory = memory
|
117
|
+
|
118
|
+
@classmethod
|
119
|
+
def from_dict(cls, data: Dict[str, Any]) -> "SandboxCreateConfiguration":
|
120
|
+
return cls(
|
121
|
+
name=data.get("name"),
|
122
|
+
image=data.get("image"),
|
123
|
+
memory=data.get("memory"),
|
124
|
+
)
|
@@ -47,7 +47,6 @@ class Cohere(FunctionCallingLLM):
|
|
47
47
|
|
48
48
|
llm = Cohere(model="command", api_key=api_key)
|
49
49
|
resp = llm.complete("Paul Graham is ")
|
50
|
-
print(resp)
|
51
50
|
```
|
52
51
|
"""
|
53
52
|
|
@@ -55,9 +54,7 @@ class Cohere(FunctionCallingLLM):
|
|
55
54
|
temperature: Optional[float] = Field(
|
56
55
|
description="The temperature to use for sampling.", default=None
|
57
56
|
)
|
58
|
-
max_retries: int = Field(
|
59
|
-
default=10, description="The maximum number of API retries."
|
60
|
-
)
|
57
|
+
max_retries: int = Field(default=10, description="The maximum number of API retries.")
|
61
58
|
additional_kwargs: Dict[str, Any] = Field(
|
62
59
|
default_factory=dict, description="Additional kwargs for the Cohere API."
|
63
60
|
)
|
@@ -167,9 +164,7 @@ class Cohere(FunctionCallingLLM):
|
|
167
164
|
error_on_no_tool_call: bool = False,
|
168
165
|
) -> List[ToolSelection]:
|
169
166
|
"""Predict and call the tool."""
|
170
|
-
tool_calls: List[ToolCall] = (
|
171
|
-
response.message.additional_kwargs.get("tool_calls", []) or []
|
172
|
-
)
|
167
|
+
tool_calls: List[ToolCall] = response.message.additional_kwargs.get("tool_calls", []) or []
|
173
168
|
|
174
169
|
if len(tool_calls) < 1 and error_on_no_tool_call:
|
175
170
|
raise ValueError(
|
@@ -218,10 +213,8 @@ class Cohere(FunctionCallingLLM):
|
|
218
213
|
|
219
214
|
messages, documents = remove_documents_from_messages(messages)
|
220
215
|
|
221
|
-
tool_results: Optional[
|
222
|
-
|
223
|
-
] = _messages_to_cohere_tool_results_curr_chat_turn(messages) or kwargs.get(
|
224
|
-
"tool_results"
|
216
|
+
tool_results: Optional[List[Dict[str, Any]]] = (
|
217
|
+
_messages_to_cohere_tool_results_curr_chat_turn(messages) or kwargs.get("tool_results")
|
225
218
|
)
|
226
219
|
if not tool_results:
|
227
220
|
tool_results = None
|
@@ -235,12 +228,8 @@ class Cohere(FunctionCallingLLM):
|
|
235
228
|
if message.role == MessageRole.TOOL:
|
236
229
|
temp_tool_results += _message_to_cohere_tool_results(messages, i)
|
237
230
|
|
238
|
-
if (i == len(messages) - 1) or messages[
|
239
|
-
|
240
|
-
].role != MessageRole.TOOL:
|
241
|
-
cohere_message = _get_message_cohere_format(
|
242
|
-
message, temp_tool_results
|
243
|
-
)
|
231
|
+
if (i == len(messages) - 1) or messages[i + 1].role != MessageRole.TOOL:
|
232
|
+
cohere_message = _get_message_cohere_format(message, temp_tool_results)
|
244
233
|
chat_history.append(cohere_message)
|
245
234
|
temp_tool_results = []
|
246
235
|
else:
|
@@ -262,12 +251,8 @@ class Cohere(FunctionCallingLLM):
|
|
262
251
|
if message.role == MessageRole.TOOL:
|
263
252
|
temp_tool_results += _message_to_cohere_tool_results(messages, i)
|
264
253
|
|
265
|
-
if (i == len(messages) - 1) or messages[
|
266
|
-
|
267
|
-
].role != MessageRole.TOOL:
|
268
|
-
cohere_message = _get_message_cohere_format(
|
269
|
-
message, temp_tool_results
|
270
|
-
)
|
254
|
+
if (i == len(messages) - 1) or messages[i + 1].role != MessageRole.TOOL:
|
255
|
+
cohere_message = _get_message_cohere_format(message, temp_tool_results)
|
271
256
|
chat_history.append(cohere_message)
|
272
257
|
temp_tool_results = []
|
273
258
|
else:
|
@@ -327,9 +312,7 @@ class Cohere(FunctionCallingLLM):
|
|
327
312
|
)
|
328
313
|
|
329
314
|
@llm_completion_callback()
|
330
|
-
def complete(
|
331
|
-
self, prompt: str, formatted: bool = False, **kwargs: Any
|
332
|
-
) -> CompletionResponse:
|
315
|
+
def complete(self, prompt: str, formatted: bool = False, **kwargs: Any) -> CompletionResponse:
|
333
316
|
all_kwargs = self._get_all_kwargs(**kwargs)
|
334
317
|
if "stream" in all_kwargs:
|
335
318
|
warnings.warn(
|
@@ -351,9 +334,7 @@ class Cohere(FunctionCallingLLM):
|
|
351
334
|
)
|
352
335
|
|
353
336
|
@llm_chat_callback()
|
354
|
-
def stream_chat(
|
355
|
-
self, messages: Sequence[ChatMessage], **kwargs: Any
|
356
|
-
) -> ChatResponseGen:
|
337
|
+
def stream_chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponseGen:
|
357
338
|
all_kwargs = self._get_all_kwargs(**kwargs)
|
358
339
|
all_kwargs["stream"] = True
|
359
340
|
if all_kwargs["model"] not in CHAT_MODELS:
|
@@ -402,16 +383,12 @@ class Cohere(FunctionCallingLLM):
|
|
402
383
|
for r in response:
|
403
384
|
content_delta = r.text
|
404
385
|
content += content_delta
|
405
|
-
yield CompletionResponse(
|
406
|
-
text=content, delta=content_delta, raw=r._asdict()
|
407
|
-
)
|
386
|
+
yield CompletionResponse(text=content, delta=content_delta, raw=r._asdict())
|
408
387
|
|
409
388
|
return gen()
|
410
389
|
|
411
390
|
@llm_chat_callback()
|
412
|
-
async def achat(
|
413
|
-
self, messages: Sequence[ChatMessage], **kwargs: Any
|
414
|
-
) -> ChatResponse:
|
391
|
+
async def achat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
|
415
392
|
all_kwargs = self._get_all_kwargs(**kwargs)
|
416
393
|
if all_kwargs["model"] not in CHAT_MODELS:
|
417
394
|
raise ValueError(f"{all_kwargs['model']} not supported for chat")
|
@@ -535,8 +512,6 @@ class Cohere(FunctionCallingLLM):
|
|
535
512
|
async for r in response:
|
536
513
|
content_delta = r.text
|
537
514
|
content += content_delta
|
538
|
-
yield CompletionResponse(
|
539
|
-
text=content, delta=content_delta, raw=r._asdict()
|
540
|
-
)
|
515
|
+
yield CompletionResponse(text=content, delta=content_delta, raw=r._asdict())
|
541
516
|
|
542
517
|
return gen()
|
@@ -10,9 +10,10 @@ from blaxel.telemetry.span import SpanManager
|
|
10
10
|
|
11
11
|
logger = logging.getLogger(__name__)
|
12
12
|
|
13
|
+
|
13
14
|
class BlaxelCoreInstrumentor(BaseInstrumentor):
|
14
15
|
def instrumentation_dependencies(self):
|
15
|
-
return [
|
16
|
+
return []
|
16
17
|
|
17
18
|
def _instrument(self, **kwargs):
|
18
19
|
tracer_provider = kwargs.get("tracer_provider")
|
@@ -121,4 +122,4 @@ class BlaxelCoreInstrumentor(BaseInstrumentor):
|
|
121
122
|
Tool.sync_coroutine = traced_sync_coroutine
|
122
123
|
return Tool
|
123
124
|
|
124
|
-
blaxel.core.tools.convert_mcp_tool_to_blaxel_tool = traced_convert_mcp_tool_to_blaxel_tool
|
125
|
+
blaxel.core.tools.convert_mcp_tool_to_blaxel_tool = traced_convert_mcp_tool_to_blaxel_tool
|
@@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
|
|
11
11
|
|
12
12
|
class BlaxelLanggraphInstrumentor(BaseInstrumentor):
|
13
13
|
def instrumentation_dependencies(self):
|
14
|
-
return ["
|
14
|
+
return ["langgraph"]
|
15
15
|
|
16
16
|
def _instrument(self, **kwargs):
|
17
17
|
tracer_provider = kwargs.get("tracer_provider")
|
@@ -316,7 +316,7 @@ class BlaxelLanggraphGeminiInstrumentor(BaseInstrumentor):
|
|
316
316
|
Config.exception_logger = exception_logger
|
317
317
|
|
318
318
|
def instrumentation_dependencies(self) -> Collection[str]:
|
319
|
-
return ["
|
319
|
+
return ["langgraph"]
|
320
320
|
|
321
321
|
def _instrument(self, **kwargs):
|
322
322
|
tracer_provider = kwargs.get("tracer_provider")
|
@@ -30,7 +30,7 @@ TO_INSTRUMENT = [
|
|
30
30
|
|
31
31
|
class BlaxelLlamaIndexInstrumentor(BaseInstrumentor):
|
32
32
|
def instrumentation_dependencies(self):
|
33
|
-
return ["
|
33
|
+
return ["llama_index"]
|
34
34
|
|
35
35
|
def _instrument(self, **kwargs):
|
36
36
|
tracer_provider = kwargs.get("tracer_provider")
|
@@ -35,28 +35,28 @@ MAPPINGS: Dict[str, InstrumentationMapping] = {
|
|
35
35
|
required_packages=["google-generativeai"],
|
36
36
|
ignore_if_packages=[],
|
37
37
|
),
|
38
|
-
"
|
38
|
+
"blaxel_core": InstrumentationMapping(
|
39
39
|
module_path="blaxel.telemetry.instrumentation.blaxel_core",
|
40
40
|
class_name="BlaxelCoreInstrumentor",
|
41
|
-
required_packages=[
|
41
|
+
required_packages=[],
|
42
42
|
ignore_if_packages=[],
|
43
43
|
),
|
44
|
-
"
|
44
|
+
"blaxel_langgraph": InstrumentationMapping(
|
45
45
|
module_path="blaxel.telemetry.instrumentation.blaxel_langgraph",
|
46
46
|
class_name="BlaxelLanggraphInstrumentor",
|
47
|
-
required_packages=["
|
47
|
+
required_packages=["langgraph"],
|
48
48
|
ignore_if_packages=[],
|
49
49
|
),
|
50
|
-
"
|
50
|
+
"blaxel_langgraph_gemini": InstrumentationMapping(
|
51
51
|
module_path="blaxel.telemetry.instrumentation.blaxel_langgraph_gemini",
|
52
52
|
class_name="BlaxelLanggraphGeminiInstrumentor",
|
53
|
-
required_packages=["
|
53
|
+
required_packages=["langgraph"],
|
54
54
|
ignore_if_packages=[],
|
55
55
|
),
|
56
|
-
"
|
56
|
+
"blaxel_llamaindex": InstrumentationMapping(
|
57
57
|
module_path="blaxel.telemetry.instrumentation.blaxel_llamaindex",
|
58
58
|
class_name="BlaxelLlamaIndexInstrumentor",
|
59
|
-
required_packages=["
|
59
|
+
required_packages=["llama_index"],
|
60
60
|
ignore_if_packages=[],
|
61
61
|
),
|
62
62
|
}
|
@@ -304,15 +304,15 @@ blaxel/core/mcp/__init__.py,sha256=5VjkiQFb1QWW5QKRgwPHARlxZJ9Xqaz0diJTpM8LLF0,1
|
|
304
304
|
blaxel/core/mcp/client.py,sha256=aK3wSnsO8DmT1BZqw4eiCMF71Jwvni6Qga0DhPP806Y,5437
|
305
305
|
blaxel/core/mcp/server.py,sha256=tXySGZKgK3IllYOzYOecp58BixKBkmAIvQp_4nSM_Ww,5919
|
306
306
|
blaxel/core/models/__init__.py,sha256=HbRDsMnUFHkPC-MMkzPXh4mUqkVjqO6p3j7m00N_XSo,1722
|
307
|
-
blaxel/core/sandbox/__init__.py,sha256=
|
307
|
+
blaxel/core/sandbox/__init__.py,sha256=oF3sX5MbwSqfwhOtF5ODYWwapHffbkp2UI78jPBn78U,617
|
308
308
|
blaxel/core/sandbox/action.py,sha256=9Zjkco7YkLzBThD3N2Hr5SpeEiqU_-Ktk8HlKpkpiAg,2802
|
309
309
|
blaxel/core/sandbox/filesystem.py,sha256=dyIvDdlPZO0ijD6mXXX8Yl0t75VijQ6_uMz_9rJd-_4,11317
|
310
310
|
blaxel/core/sandbox/network.py,sha256=P5jLd4AAg1zgyIK4qGWvZaDZ5BzIcxRx2ffz_JLsLMI,357
|
311
311
|
blaxel/core/sandbox/preview.py,sha256=M6FulOxPghUBpb5fLxu1Rd3ekLeCbZ_dgt4s1X2Cneo,5354
|
312
|
-
blaxel/core/sandbox/process.py,sha256=
|
313
|
-
blaxel/core/sandbox/sandbox.py,sha256=
|
312
|
+
blaxel/core/sandbox/process.py,sha256=7zEngDTs2XiNsMm9TZ4lEDUiRpS3af8dw60pEvRuHDY,8357
|
313
|
+
blaxel/core/sandbox/sandbox.py,sha256=4sG_i_VcuGx_cKEGMY-2eUUuGmtQBYMNIToL7YDuluM,8151
|
314
314
|
blaxel/core/sandbox/session.py,sha256=4SH1tyXcQ9UqJx4lMwxAlp7x9Te_anDrdSEG6AlNkvU,4496
|
315
|
-
blaxel/core/sandbox/types.py,sha256=
|
315
|
+
blaxel/core/sandbox/types.py,sha256=yaKkp5Sn0b4Ri2_ZbdEjXuA-rjo2_DfDIHYz-cueZCA,3564
|
316
316
|
blaxel/core/sandbox/client/__init__.py,sha256=N26bD5o1jsTb48oExow6Rgivd8ylaU9jaWZfZsVilP8,128
|
317
317
|
blaxel/core/sandbox/client/client.py,sha256=tcP8cJ4Q3dV9aB3yQ01dDXO-ekfsa3WGGFz4DQAEf8I,7079
|
318
318
|
blaxel/core/sandbox/client/errors.py,sha256=gO8GBmKqmSNgAg-E5oT-oOyxztvp7V_6XG7OUTT15q0,546
|
@@ -340,21 +340,21 @@ blaxel/core/sandbox/client/api/process/get_ws_process_identifier_logs_stream.py,
|
|
340
340
|
blaxel/core/sandbox/client/api/process/post_process.py,sha256=CgA1Q0vLAfeOT5nMd2ESrO8d6_0kGsrcBs1LSEVZrjo,4656
|
341
341
|
blaxel/core/sandbox/client/models/__init__.py,sha256=eOax5cMg8O7Skpi0vApItrajTY0gx-wRHVe3YC5wBt8,1356
|
342
342
|
blaxel/core/sandbox/client/models/delete_network_process_pid_monitor_response_200.py,sha256=9cQgKDjG98sMridjXKgeR2oZzFKcQ0G9QIojhwYFosI,1376
|
343
|
-
blaxel/core/sandbox/client/models/directory.py,sha256=
|
344
|
-
blaxel/core/sandbox/client/models/error_response.py,sha256=
|
345
|
-
blaxel/core/sandbox/client/models/file.py,sha256=
|
343
|
+
blaxel/core/sandbox/client/models/directory.py,sha256=Cufag2aJuBHXfxDnfiXL4oEiQE5GSb5FWH7Tu_hOhtc,3186
|
344
|
+
blaxel/core/sandbox/client/models/error_response.py,sha256=4UZjR8ej0uFPl4CmkMplYebqS3U9EDodqNBltQtGvxI,1493
|
345
|
+
blaxel/core/sandbox/client/models/file.py,sha256=MkYNSh9Rj1Gt0txfBeFEcEVg1Z-Z1Tbp31nv3C_WZaA,2446
|
346
346
|
blaxel/core/sandbox/client/models/file_request.py,sha256=xOZSru-fae-En-_2YBgkHa_6iGbqbJsG3RLqBuajVY0,2227
|
347
|
-
blaxel/core/sandbox/client/models/file_with_content.py,sha256=
|
347
|
+
blaxel/core/sandbox/client/models/file_with_content.py,sha256=iDMYClAJBJK-l1Kr0yKuNoiHNG3zJVt44XLsE1ax3IA,2680
|
348
348
|
blaxel/core/sandbox/client/models/get_network_process_pid_ports_response_200.py,sha256=x4uv80kK0GVroWO98l5sE84a6uwZ8pnUKTpGg81ipWA,1351
|
349
349
|
blaxel/core/sandbox/client/models/port_monitor_request.py,sha256=LK7sjAK1TF1ojgG4vGytaKLVtV6-SNXxfZ3sxew1cRE,1698
|
350
350
|
blaxel/core/sandbox/client/models/post_network_process_pid_monitor_response_200.py,sha256=Y8BvNGKU8SlzTGqhaQZk_WWIrmFpNU0LVcmLFjNvqhA,1366
|
351
|
-
blaxel/core/sandbox/client/models/process_logs.py,sha256=
|
351
|
+
blaxel/core/sandbox/client/models/process_logs.py,sha256=WVBgPsvUzPlBIuJzahGor-mXq7RF4qXhi5e9XcUPyOM,1847
|
352
352
|
blaxel/core/sandbox/client/models/process_request.py,sha256=b5WoVKQtKibRw1a1xN6nv7Gj4wiAWGPL_Kj9WmjGKsE,4134
|
353
353
|
blaxel/core/sandbox/client/models/process_request_env.py,sha256=hzVPY4mk4g4bKj_S3uXRyaXvp1V6WB2hlQM0XhB7Xgw,1294
|
354
|
-
blaxel/core/sandbox/client/models/process_response.py,sha256=
|
354
|
+
blaxel/core/sandbox/client/models/process_response.py,sha256=vXHK6NBnuf0KYVzpkaApC29KOKwDE-7jBxy_WszApXs,3239
|
355
355
|
blaxel/core/sandbox/client/models/process_response_status.py,sha256=hCE1gCtheV83T6PYURG3k-rPZSauvyTFhsxLEtudgYE,246
|
356
|
-
blaxel/core/sandbox/client/models/subdirectory.py,sha256=
|
357
|
-
blaxel/core/sandbox/client/models/success_response.py,sha256=
|
356
|
+
blaxel/core/sandbox/client/models/subdirectory.py,sha256=obztIgi_XjT4pPoT1-dyOX6O8k0e4ERZUTiI9qgkhJM,1593
|
357
|
+
blaxel/core/sandbox/client/models/success_response.py,sha256=ksioQ0qNV7yhXlySY3_3EycsJ1ec7LSxBMwIq4HnfVQ,1703
|
358
358
|
blaxel/core/tools/__init__.py,sha256=9_pgKEU5GbPfqfGN4aZTkthnaV4AVYf63jIZw44peBQ,10167
|
359
359
|
blaxel/core/tools/common.py,sha256=JGK052v_fvwWBFYnIArlBnFFViYyFrqdDn3gdVf53EU,1332
|
360
360
|
blaxel/core/tools/types.py,sha256=YPCGJ4vZDhqR0X2H_TWtc5chQScsC32nGTQdRKJlO8Y,707
|
@@ -379,7 +379,7 @@ blaxel/llamaindex/__init__.py,sha256=iZ3QbZhlwKvP91ChcqSXVkpRrzurMxJoQfKdZFzE2AA
|
|
379
379
|
blaxel/llamaindex/model.py,sha256=FYUCTA80LKC4-UfESh43htoHrsoZjwwVaxXD51T0IuE,2557
|
380
380
|
blaxel/llamaindex/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
381
381
|
blaxel/llamaindex/tools.py,sha256=LgrKe-o0I8_JE4DxcM2YYlHN-15LKmXuBKm08NYLV9w,907
|
382
|
-
blaxel/llamaindex/custom/cohere.py,sha256=
|
382
|
+
blaxel/llamaindex/custom/cohere.py,sha256=zoUv4NWwMZLZEynYTC3JuOvtinguRgtuRwS31wIm3rI,18713
|
383
383
|
blaxel/openai/__init__.py,sha256=YkizVtcYL2m9v-z5B1EReYVu9n9V-DCxJhSB2mvqOs0,123
|
384
384
|
blaxel/openai/model.py,sha256=lGz4zrV4sURPb6aLtroMRV5-CgfQUq15PeTgjc7QkTI,600
|
385
385
|
blaxel/openai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -394,15 +394,15 @@ blaxel/telemetry/exporters.py,sha256=EoX3uaBVku1Rg49pSNXKFyHhgY5OV3Ih6UlqgjF5epw
|
|
394
394
|
blaxel/telemetry/manager.py,sha256=3yYBxxqQKl1rCKrn0GVz9jR5jouC1nsElbAaH8tTtgA,9075
|
395
395
|
blaxel/telemetry/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
396
396
|
blaxel/telemetry/span.py,sha256=b8lpWe5nqek4w9YrBLJCIYz4Z3n9Z_49H_AkqpAGOiM,3660
|
397
|
-
blaxel/telemetry/instrumentation/blaxel_core.py,sha256=
|
398
|
-
blaxel/telemetry/instrumentation/blaxel_langgraph.py,sha256=
|
399
|
-
blaxel/telemetry/instrumentation/blaxel_langgraph_gemini.py,sha256=
|
400
|
-
blaxel/telemetry/instrumentation/blaxel_llamaindex.py,sha256=
|
401
|
-
blaxel/telemetry/instrumentation/map.py,sha256=
|
397
|
+
blaxel/telemetry/instrumentation/blaxel_core.py,sha256=7PNseKq7bOQ4Z6a8jW7AGm0ojcdcusjoiH_5iQsR5ic,4881
|
398
|
+
blaxel/telemetry/instrumentation/blaxel_langgraph.py,sha256=sBxt8kzRjsFepm86vaCBbGvpYI4mWPK_PB3wiLxSuOk,4235
|
399
|
+
blaxel/telemetry/instrumentation/blaxel_langgraph_gemini.py,sha256=hQbOaaMsHte_Igq_vhPgNXYd40IwuqPB0TUP-5VAtME,11953
|
400
|
+
blaxel/telemetry/instrumentation/blaxel_llamaindex.py,sha256=HA4YUXrRYvG5U1J8MAJnymmmj4ZCDydv-pXXhrftHOA,3101
|
401
|
+
blaxel/telemetry/instrumentation/map.py,sha256=PCzZJj39yiYVYJrxLBNP-NW-tjjYyTijwEDeI9njuDY,2174
|
402
402
|
blaxel/telemetry/instrumentation/utils.py,sha256=KInMYZH-mu9_wvetmf0EmgrfN3Sw8IWk2Y95v2u90_U,1901
|
403
403
|
blaxel/telemetry/log/log.py,sha256=RvQByRjZMoP_dRaAZu8oK6DTegsHs-xV4W-UIqis6CA,2461
|
404
404
|
blaxel/telemetry/log/logger.py,sha256=NPAS3g82ryROjvc_DEZaTIfrcehoLEZoP-JkLxADxc0,4113
|
405
|
-
blaxel-0.2.
|
406
|
-
blaxel-0.2.
|
407
|
-
blaxel-0.2.
|
408
|
-
blaxel-0.2.
|
405
|
+
blaxel-0.2.1.dist-info/METADATA,sha256=Fz0hc9Jnt47pb6WAgAlNHECYHluqJoPtRefCqjr6tdQ,9875
|
406
|
+
blaxel-0.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
407
|
+
blaxel-0.2.1.dist-info/licenses/LICENSE,sha256=p5PNQvpvyDT_0aYBDgmV1fFI_vAD2aSV0wWG7VTgRis,1069
|
408
|
+
blaxel-0.2.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|