blaxel 0.2.35__py3-none-any.whl → 0.2.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/__init__.py +2 -2
- blaxel/core/client/api/compute/create_sandbox.py +21 -1
- blaxel/core/client/api/jobs/create_job_execution.py +12 -12
- blaxel/core/client/api/volumes/update_volume.py +187 -0
- blaxel/core/client/models/__init__.py +10 -6
- blaxel/core/client/models/{create_job_execution_response.py → create_job_execution_output.py} +11 -13
- blaxel/core/client/models/{create_job_execution_response_tasks_item.py → create_job_execution_output_tasks_item.py} +5 -5
- blaxel/core/client/models/create_job_execution_request.py +31 -0
- blaxel/core/client/models/create_job_execution_request_env.py +50 -0
- blaxel/core/client/models/function_runtime.py +18 -0
- blaxel/core/client/models/{function_spec_transport.py → function_runtime_transport.py} +2 -2
- blaxel/core/client/models/function_spec.py +0 -18
- blaxel/core/client/models/job_execution_spec.py +35 -0
- blaxel/core/client/models/job_execution_spec_env_override.py +50 -0
- blaxel/core/client/models/port_protocol.py +1 -0
- blaxel/core/client/models/preview.py +48 -1
- blaxel/core/client/models/sandbox.py +10 -0
- blaxel/core/common/settings.py +5 -0
- blaxel/core/jobs/__init__.py +60 -88
- blaxel/core/sandbox/__init__.py +12 -0
- blaxel/core/{client/api/invitations/list_all_pending_invitations.py → sandbox/client/api/system/get_health.py} +26 -34
- blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
- blaxel/core/sandbox/client/models/__init__.py +8 -0
- blaxel/core/sandbox/client/models/content_search_match.py +24 -25
- blaxel/core/sandbox/client/models/content_search_response.py +25 -29
- blaxel/core/sandbox/client/models/find_match.py +13 -14
- blaxel/core/sandbox/client/models/find_response.py +21 -24
- blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
- blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
- blaxel/core/sandbox/client/models/health_response.py +159 -0
- blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
- blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
- blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
- blaxel/core/sandbox/default/__init__.py +2 -0
- blaxel/core/sandbox/default/filesystem.py +20 -6
- blaxel/core/sandbox/default/preview.py +48 -1
- blaxel/core/sandbox/default/process.py +66 -21
- blaxel/core/sandbox/default/sandbox.py +104 -6
- blaxel/core/sandbox/default/system.py +71 -0
- blaxel/core/sandbox/sync/__init__.py +2 -0
- blaxel/core/sandbox/sync/filesystem.py +19 -2
- blaxel/core/sandbox/sync/preview.py +50 -3
- blaxel/core/sandbox/sync/process.py +38 -15
- blaxel/core/sandbox/sync/sandbox.py +97 -5
- blaxel/core/sandbox/sync/system.py +71 -0
- blaxel/core/sandbox/types.py +212 -5
- blaxel/core/volume/volume.py +209 -4
- blaxel/langgraph/model.py +25 -14
- blaxel/langgraph/tools.py +15 -12
- blaxel/llamaindex/model.py +33 -24
- blaxel/llamaindex/tools.py +9 -4
- blaxel/pydantic/model.py +26 -12
- blaxel-0.2.37.dist-info/METADATA +569 -0
- {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/RECORD +57 -47
- blaxel-0.2.35.dist-info/METADATA +0 -228
- /blaxel/core/{client/api/invitations → sandbox/client/api/system}/__init__.py +0 -0
- {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/WHEEL +0 -0
- {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Any, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="CreateJobExecutionRequestEnv")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class CreateJobExecutionRequestEnv:
|
|
11
|
+
"""Environment variable overrides (optional, will merge with job's environment variables)
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
{"MY_VAR": "custom_value", "BATCH_SIZE": "100"}
|
|
15
|
+
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict)
|
|
19
|
+
|
|
20
|
+
def to_dict(self) -> dict[str, Any]:
|
|
21
|
+
field_dict: dict[str, Any] = {}
|
|
22
|
+
field_dict.update(self.additional_properties)
|
|
23
|
+
|
|
24
|
+
return field_dict
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
28
|
+
if not src_dict:
|
|
29
|
+
return None
|
|
30
|
+
d = src_dict.copy()
|
|
31
|
+
create_job_execution_request_env = cls()
|
|
32
|
+
|
|
33
|
+
create_job_execution_request_env.additional_properties = d
|
|
34
|
+
return create_job_execution_request_env
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def additional_keys(self) -> list[str]:
|
|
38
|
+
return list(self.additional_properties.keys())
|
|
39
|
+
|
|
40
|
+
def __getitem__(self, key: str) -> str:
|
|
41
|
+
return self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __setitem__(self, key: str, value: str) -> None:
|
|
44
|
+
self.additional_properties[key] = value
|
|
45
|
+
|
|
46
|
+
def __delitem__(self, key: str) -> None:
|
|
47
|
+
del self.additional_properties[key]
|
|
48
|
+
|
|
49
|
+
def __contains__(self, key: str) -> bool:
|
|
50
|
+
return key in self.additional_properties
|
|
@@ -4,6 +4,7 @@ from attrs import define as _attrs_define
|
|
|
4
4
|
from attrs import field as _attrs_field
|
|
5
5
|
|
|
6
6
|
from ..models.function_runtime_generation import FunctionRuntimeGeneration
|
|
7
|
+
from ..models.function_runtime_transport import FunctionRuntimeTransport
|
|
7
8
|
from ..types import UNSET, Unset
|
|
8
9
|
|
|
9
10
|
if TYPE_CHECKING:
|
|
@@ -29,6 +30,8 @@ class FunctionRuntime:
|
|
|
29
30
|
in MB / 2048, e.g., 4096MB = 2 CPUs). Example: 2048.
|
|
30
31
|
min_scale (Union[Unset, int]): Minimum instances to keep warm. Set to 1+ to eliminate cold starts, 0 for scale-
|
|
31
32
|
to-zero.
|
|
33
|
+
transport (Union[Unset, FunctionRuntimeTransport]): Transport compatibility for the MCP, can be "websocket" or
|
|
34
|
+
"http-stream" Example: http-stream.
|
|
32
35
|
"""
|
|
33
36
|
|
|
34
37
|
envs: Union[Unset, list["Env"]] = UNSET
|
|
@@ -37,6 +40,7 @@ class FunctionRuntime:
|
|
|
37
40
|
max_scale: Union[Unset, int] = UNSET
|
|
38
41
|
memory: Union[Unset, int] = UNSET
|
|
39
42
|
min_scale: Union[Unset, int] = UNSET
|
|
43
|
+
transport: Union[Unset, FunctionRuntimeTransport] = UNSET
|
|
40
44
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
41
45
|
|
|
42
46
|
def to_dict(self) -> dict[str, Any]:
|
|
@@ -62,6 +66,10 @@ class FunctionRuntime:
|
|
|
62
66
|
|
|
63
67
|
min_scale = self.min_scale
|
|
64
68
|
|
|
69
|
+
transport: Union[Unset, str] = UNSET
|
|
70
|
+
if not isinstance(self.transport, Unset):
|
|
71
|
+
transport = self.transport.value
|
|
72
|
+
|
|
65
73
|
field_dict: dict[str, Any] = {}
|
|
66
74
|
field_dict.update(self.additional_properties)
|
|
67
75
|
field_dict.update({})
|
|
@@ -77,6 +85,8 @@ class FunctionRuntime:
|
|
|
77
85
|
field_dict["memory"] = memory
|
|
78
86
|
if min_scale is not UNSET:
|
|
79
87
|
field_dict["minScale"] = min_scale
|
|
88
|
+
if transport is not UNSET:
|
|
89
|
+
field_dict["transport"] = transport
|
|
80
90
|
|
|
81
91
|
return field_dict
|
|
82
92
|
|
|
@@ -109,6 +119,13 @@ class FunctionRuntime:
|
|
|
109
119
|
|
|
110
120
|
min_scale = d.pop("minScale", d.pop("min_scale", UNSET))
|
|
111
121
|
|
|
122
|
+
_transport = d.pop("transport", UNSET)
|
|
123
|
+
transport: Union[Unset, FunctionRuntimeTransport]
|
|
124
|
+
if isinstance(_transport, Unset):
|
|
125
|
+
transport = UNSET
|
|
126
|
+
else:
|
|
127
|
+
transport = FunctionRuntimeTransport(_transport)
|
|
128
|
+
|
|
112
129
|
function_runtime = cls(
|
|
113
130
|
envs=envs,
|
|
114
131
|
generation=generation,
|
|
@@ -116,6 +133,7 @@ class FunctionRuntime:
|
|
|
116
133
|
max_scale=max_scale,
|
|
117
134
|
memory=memory,
|
|
118
135
|
min_scale=min_scale,
|
|
136
|
+
transport=transport,
|
|
119
137
|
)
|
|
120
138
|
|
|
121
139
|
function_runtime.additional_properties = d
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
class
|
|
4
|
+
class FunctionRuntimeTransport(str, Enum):
|
|
5
5
|
HTTP_STREAM = "http-stream"
|
|
6
6
|
WEBSOCKET = "websocket"
|
|
7
7
|
|
|
@@ -9,7 +9,7 @@ class FunctionSpecTransport(str, Enum):
|
|
|
9
9
|
return str(self.value)
|
|
10
10
|
|
|
11
11
|
@classmethod
|
|
12
|
-
def _missing_(cls, value: object) -> "
|
|
12
|
+
def _missing_(cls, value: object) -> "FunctionRuntimeTransport | None":
|
|
13
13
|
if isinstance(value, str):
|
|
14
14
|
upper_value = value.upper()
|
|
15
15
|
for member in cls:
|
|
@@ -3,7 +3,6 @@ from typing import TYPE_CHECKING, Any, TypeVar, Union, cast
|
|
|
3
3
|
from attrs import define as _attrs_define
|
|
4
4
|
from attrs import field as _attrs_field
|
|
5
5
|
|
|
6
|
-
from ..models.function_spec_transport import FunctionSpecTransport
|
|
7
6
|
from ..types import UNSET, Unset
|
|
8
7
|
|
|
9
8
|
if TYPE_CHECKING:
|
|
@@ -27,8 +26,6 @@ class FunctionSpec:
|
|
|
27
26
|
revision (Union[Unset, RevisionConfiguration]): Revision configuration
|
|
28
27
|
runtime (Union[Unset, FunctionRuntime]): Runtime configuration defining how the MCP server function is deployed
|
|
29
28
|
and scaled
|
|
30
|
-
transport (Union[Unset, FunctionSpecTransport]): Transport compatibility for the MCP, can be "websocket" or
|
|
31
|
-
"http-stream" Example: http-stream.
|
|
32
29
|
triggers (Union[Unset, list['Trigger']]): Triggers to use your agent
|
|
33
30
|
"""
|
|
34
31
|
|
|
@@ -37,7 +34,6 @@ class FunctionSpec:
|
|
|
37
34
|
policies: Union[Unset, list[str]] = UNSET
|
|
38
35
|
revision: Union[Unset, "RevisionConfiguration"] = UNSET
|
|
39
36
|
runtime: Union[Unset, "FunctionRuntime"] = UNSET
|
|
40
|
-
transport: Union[Unset, FunctionSpecTransport] = UNSET
|
|
41
37
|
triggers: Union[Unset, list["Trigger"]] = UNSET
|
|
42
38
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
43
39
|
|
|
@@ -72,10 +68,6 @@ class FunctionSpec:
|
|
|
72
68
|
elif self.runtime and isinstance(self.runtime, dict):
|
|
73
69
|
runtime = self.runtime
|
|
74
70
|
|
|
75
|
-
transport: Union[Unset, str] = UNSET
|
|
76
|
-
if not isinstance(self.transport, Unset):
|
|
77
|
-
transport = self.transport.value
|
|
78
|
-
|
|
79
71
|
triggers: Union[Unset, list[dict[str, Any]]] = UNSET
|
|
80
72
|
if not isinstance(self.triggers, Unset):
|
|
81
73
|
triggers = []
|
|
@@ -99,8 +91,6 @@ class FunctionSpec:
|
|
|
99
91
|
field_dict["revision"] = revision
|
|
100
92
|
if runtime is not UNSET:
|
|
101
93
|
field_dict["runtime"] = runtime
|
|
102
|
-
if transport is not UNSET:
|
|
103
|
-
field_dict["transport"] = transport
|
|
104
94
|
if triggers is not UNSET:
|
|
105
95
|
field_dict["triggers"] = triggers
|
|
106
96
|
|
|
@@ -137,13 +127,6 @@ class FunctionSpec:
|
|
|
137
127
|
else:
|
|
138
128
|
runtime = FunctionRuntime.from_dict(_runtime)
|
|
139
129
|
|
|
140
|
-
_transport = d.pop("transport", UNSET)
|
|
141
|
-
transport: Union[Unset, FunctionSpecTransport]
|
|
142
|
-
if isinstance(_transport, Unset):
|
|
143
|
-
transport = UNSET
|
|
144
|
-
else:
|
|
145
|
-
transport = FunctionSpecTransport(_transport)
|
|
146
|
-
|
|
147
130
|
triggers = []
|
|
148
131
|
_triggers = d.pop("triggers", UNSET)
|
|
149
132
|
for componentsschemas_triggers_item_data in _triggers or []:
|
|
@@ -159,7 +142,6 @@ class FunctionSpec:
|
|
|
159
142
|
policies=policies,
|
|
160
143
|
revision=revision,
|
|
161
144
|
runtime=runtime,
|
|
162
|
-
transport=transport,
|
|
163
145
|
triggers=triggers,
|
|
164
146
|
)
|
|
165
147
|
|
|
@@ -6,6 +6,7 @@ from attrs import field as _attrs_field
|
|
|
6
6
|
from ..types import UNSET, Unset
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
|
+
from ..models.job_execution_spec_env_override import JobExecutionSpecEnvOverride
|
|
9
10
|
from ..models.job_execution_task import JobExecutionTask
|
|
10
11
|
|
|
11
12
|
|
|
@@ -17,17 +18,35 @@ class JobExecutionSpec:
|
|
|
17
18
|
"""Job execution specification
|
|
18
19
|
|
|
19
20
|
Attributes:
|
|
21
|
+
env_override (Union[Unset, JobExecutionSpecEnvOverride]): Environment variable overrides (if provided for this
|
|
22
|
+
execution, values are masked with ***) Example: {"MY_VAR": "***", "BATCH_SIZE": "***"}.
|
|
23
|
+
memory_override (Union[Unset, int]): Memory override in megabytes (if provided for this execution) Example:
|
|
24
|
+
2048.
|
|
20
25
|
parallelism (Union[Unset, int]): Number of parallel tasks Example: 5.
|
|
21
26
|
tasks (Union[Unset, list['JobExecutionTask']]): List of execution tasks
|
|
22
27
|
timeout (Union[Unset, int]): Job timeout in seconds (captured at execution creation time) Example: 3600.
|
|
23
28
|
"""
|
|
24
29
|
|
|
30
|
+
env_override: Union[Unset, "JobExecutionSpecEnvOverride"] = UNSET
|
|
31
|
+
memory_override: Union[Unset, int] = UNSET
|
|
25
32
|
parallelism: Union[Unset, int] = UNSET
|
|
26
33
|
tasks: Union[Unset, list["JobExecutionTask"]] = UNSET
|
|
27
34
|
timeout: Union[Unset, int] = UNSET
|
|
28
35
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
29
36
|
|
|
30
37
|
def to_dict(self) -> dict[str, Any]:
|
|
38
|
+
env_override: Union[Unset, dict[str, Any]] = UNSET
|
|
39
|
+
if (
|
|
40
|
+
self.env_override
|
|
41
|
+
and not isinstance(self.env_override, Unset)
|
|
42
|
+
and not isinstance(self.env_override, dict)
|
|
43
|
+
):
|
|
44
|
+
env_override = self.env_override.to_dict()
|
|
45
|
+
elif self.env_override and isinstance(self.env_override, dict):
|
|
46
|
+
env_override = self.env_override
|
|
47
|
+
|
|
48
|
+
memory_override = self.memory_override
|
|
49
|
+
|
|
31
50
|
parallelism = self.parallelism
|
|
32
51
|
|
|
33
52
|
tasks: Union[Unset, list[dict[str, Any]]] = UNSET
|
|
@@ -45,6 +64,10 @@ class JobExecutionSpec:
|
|
|
45
64
|
field_dict: dict[str, Any] = {}
|
|
46
65
|
field_dict.update(self.additional_properties)
|
|
47
66
|
field_dict.update({})
|
|
67
|
+
if env_override is not UNSET:
|
|
68
|
+
field_dict["envOverride"] = env_override
|
|
69
|
+
if memory_override is not UNSET:
|
|
70
|
+
field_dict["memoryOverride"] = memory_override
|
|
48
71
|
if parallelism is not UNSET:
|
|
49
72
|
field_dict["parallelism"] = parallelism
|
|
50
73
|
if tasks is not UNSET:
|
|
@@ -56,11 +79,21 @@ class JobExecutionSpec:
|
|
|
56
79
|
|
|
57
80
|
@classmethod
|
|
58
81
|
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
82
|
+
from ..models.job_execution_spec_env_override import JobExecutionSpecEnvOverride
|
|
59
83
|
from ..models.job_execution_task import JobExecutionTask
|
|
60
84
|
|
|
61
85
|
if not src_dict:
|
|
62
86
|
return None
|
|
63
87
|
d = src_dict.copy()
|
|
88
|
+
_env_override = d.pop("envOverride", d.pop("env_override", UNSET))
|
|
89
|
+
env_override: Union[Unset, JobExecutionSpecEnvOverride]
|
|
90
|
+
if isinstance(_env_override, Unset):
|
|
91
|
+
env_override = UNSET
|
|
92
|
+
else:
|
|
93
|
+
env_override = JobExecutionSpecEnvOverride.from_dict(_env_override)
|
|
94
|
+
|
|
95
|
+
memory_override = d.pop("memoryOverride", d.pop("memory_override", UNSET))
|
|
96
|
+
|
|
64
97
|
parallelism = d.pop("parallelism", UNSET)
|
|
65
98
|
|
|
66
99
|
tasks = []
|
|
@@ -73,6 +106,8 @@ class JobExecutionSpec:
|
|
|
73
106
|
timeout = d.pop("timeout", UNSET)
|
|
74
107
|
|
|
75
108
|
job_execution_spec = cls(
|
|
109
|
+
env_override=env_override,
|
|
110
|
+
memory_override=memory_override,
|
|
76
111
|
parallelism=parallelism,
|
|
77
112
|
tasks=tasks,
|
|
78
113
|
timeout=timeout,
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Any, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="JobExecutionSpecEnvOverride")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class JobExecutionSpecEnvOverride:
|
|
11
|
+
"""Environment variable overrides (if provided for this execution, values are masked with ***)
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
{"MY_VAR": "***", "BATCH_SIZE": "***"}
|
|
15
|
+
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
19
|
+
|
|
20
|
+
def to_dict(self) -> dict[str, Any]:
|
|
21
|
+
field_dict: dict[str, Any] = {}
|
|
22
|
+
field_dict.update(self.additional_properties)
|
|
23
|
+
|
|
24
|
+
return field_dict
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
28
|
+
if not src_dict:
|
|
29
|
+
return None
|
|
30
|
+
d = src_dict.copy()
|
|
31
|
+
job_execution_spec_env_override = cls()
|
|
32
|
+
|
|
33
|
+
job_execution_spec_env_override.additional_properties = d
|
|
34
|
+
return job_execution_spec_env_override
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def additional_keys(self) -> list[str]:
|
|
38
|
+
return list(self.additional_properties.keys())
|
|
39
|
+
|
|
40
|
+
def __getitem__(self, key: str) -> Any:
|
|
41
|
+
return self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
44
|
+
self.additional_properties[key] = value
|
|
45
|
+
|
|
46
|
+
def __delitem__(self, key: str) -> None:
|
|
47
|
+
del self.additional_properties[key]
|
|
48
|
+
|
|
49
|
+
def __contains__(self, key: str) -> bool:
|
|
50
|
+
return key in self.additional_properties
|
|
@@ -1,9 +1,13 @@
|
|
|
1
|
-
from typing import TYPE_CHECKING, Any, TypeVar
|
|
1
|
+
from typing import TYPE_CHECKING, Any, TypeVar, Union
|
|
2
2
|
|
|
3
3
|
from attrs import define as _attrs_define
|
|
4
4
|
from attrs import field as _attrs_field
|
|
5
5
|
|
|
6
|
+
from ..models.status import Status
|
|
7
|
+
from ..types import UNSET, Unset
|
|
8
|
+
|
|
6
9
|
if TYPE_CHECKING:
|
|
10
|
+
from ..models.core_event import CoreEvent
|
|
7
11
|
from ..models.preview_metadata import PreviewMetadata
|
|
8
12
|
from ..models.preview_spec import PreviewSpec
|
|
9
13
|
|
|
@@ -18,10 +22,14 @@ class Preview:
|
|
|
18
22
|
Attributes:
|
|
19
23
|
metadata (PreviewMetadata): PreviewMetadata
|
|
20
24
|
spec (PreviewSpec): Preview of a Resource
|
|
25
|
+
events (Union[Unset, list['CoreEvent']]): Events happening on a resource deployed on Blaxel
|
|
26
|
+
status (Union[Unset, Status]): Deployment status of a resource deployed on Blaxel
|
|
21
27
|
"""
|
|
22
28
|
|
|
23
29
|
metadata: "PreviewMetadata"
|
|
24
30
|
spec: "PreviewSpec"
|
|
31
|
+
events: Union[Unset, list["CoreEvent"]] = UNSET
|
|
32
|
+
status: Union[Unset, Status] = UNSET
|
|
25
33
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
26
34
|
|
|
27
35
|
def to_dict(self) -> dict[str, Any]:
|
|
@@ -35,6 +43,22 @@ class Preview:
|
|
|
35
43
|
else:
|
|
36
44
|
spec = self.spec.to_dict()
|
|
37
45
|
|
|
46
|
+
events: Union[Unset, list[dict[str, Any]]] = UNSET
|
|
47
|
+
if not isinstance(self.events, Unset):
|
|
48
|
+
events = []
|
|
49
|
+
for componentsschemas_core_events_item_data in self.events:
|
|
50
|
+
if type(componentsschemas_core_events_item_data) is dict:
|
|
51
|
+
componentsschemas_core_events_item = componentsschemas_core_events_item_data
|
|
52
|
+
else:
|
|
53
|
+
componentsschemas_core_events_item = (
|
|
54
|
+
componentsschemas_core_events_item_data.to_dict()
|
|
55
|
+
)
|
|
56
|
+
events.append(componentsschemas_core_events_item)
|
|
57
|
+
|
|
58
|
+
status: Union[Unset, str] = UNSET
|
|
59
|
+
if not isinstance(self.status, Unset):
|
|
60
|
+
status = self.status.value
|
|
61
|
+
|
|
38
62
|
field_dict: dict[str, Any] = {}
|
|
39
63
|
field_dict.update(self.additional_properties)
|
|
40
64
|
field_dict.update(
|
|
@@ -43,11 +67,16 @@ class Preview:
|
|
|
43
67
|
"spec": spec,
|
|
44
68
|
}
|
|
45
69
|
)
|
|
70
|
+
if events is not UNSET:
|
|
71
|
+
field_dict["events"] = events
|
|
72
|
+
if status is not UNSET:
|
|
73
|
+
field_dict["status"] = status
|
|
46
74
|
|
|
47
75
|
return field_dict
|
|
48
76
|
|
|
49
77
|
@classmethod
|
|
50
78
|
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
79
|
+
from ..models.core_event import CoreEvent
|
|
51
80
|
from ..models.preview_metadata import PreviewMetadata
|
|
52
81
|
from ..models.preview_spec import PreviewSpec
|
|
53
82
|
|
|
@@ -58,9 +87,27 @@ class Preview:
|
|
|
58
87
|
|
|
59
88
|
spec = PreviewSpec.from_dict(d.pop("spec"))
|
|
60
89
|
|
|
90
|
+
events = []
|
|
91
|
+
_events = d.pop("events", UNSET)
|
|
92
|
+
for componentsschemas_core_events_item_data in _events or []:
|
|
93
|
+
componentsschemas_core_events_item = CoreEvent.from_dict(
|
|
94
|
+
componentsschemas_core_events_item_data
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
events.append(componentsschemas_core_events_item)
|
|
98
|
+
|
|
99
|
+
_status = d.pop("status", UNSET)
|
|
100
|
+
status: Union[Unset, Status]
|
|
101
|
+
if isinstance(_status, Unset):
|
|
102
|
+
status = UNSET
|
|
103
|
+
else:
|
|
104
|
+
status = Status(_status)
|
|
105
|
+
|
|
61
106
|
preview = cls(
|
|
62
107
|
metadata=metadata,
|
|
63
108
|
spec=spec,
|
|
109
|
+
events=events,
|
|
110
|
+
status=status,
|
|
64
111
|
)
|
|
65
112
|
|
|
66
113
|
preview.additional_properties = d
|
|
@@ -26,6 +26,8 @@ class Sandbox:
|
|
|
26
26
|
spec (SandboxSpec): Configuration for a sandbox including its image, memory, ports, region, and lifecycle
|
|
27
27
|
policies
|
|
28
28
|
events (Union[Unset, list['CoreEvent']]): Events happening on a resource deployed on Blaxel
|
|
29
|
+
expires_in (Union[Unset, int]): Time in seconds until the sandbox is automatically deleted based on TTL and
|
|
30
|
+
lifecycle policies. Only present for sandboxes with lifecycle configured.
|
|
29
31
|
last_used_at (Union[Unset, str]): Last time the sandbox was used (read-only, managed by the system)
|
|
30
32
|
status (Union[Unset, Status]): Deployment status of a resource deployed on Blaxel
|
|
31
33
|
"""
|
|
@@ -33,6 +35,7 @@ class Sandbox:
|
|
|
33
35
|
metadata: "Metadata"
|
|
34
36
|
spec: "SandboxSpec"
|
|
35
37
|
events: Union[Unset, list["CoreEvent"]] = UNSET
|
|
38
|
+
expires_in: Union[Unset, int] = UNSET
|
|
36
39
|
last_used_at: Union[Unset, str] = UNSET
|
|
37
40
|
status: Union[Unset, Status] = UNSET
|
|
38
41
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
@@ -60,6 +63,8 @@ class Sandbox:
|
|
|
60
63
|
)
|
|
61
64
|
events.append(componentsschemas_core_events_item)
|
|
62
65
|
|
|
66
|
+
expires_in = self.expires_in
|
|
67
|
+
|
|
63
68
|
last_used_at = self.last_used_at
|
|
64
69
|
|
|
65
70
|
status: Union[Unset, str] = UNSET
|
|
@@ -76,6 +81,8 @@ class Sandbox:
|
|
|
76
81
|
)
|
|
77
82
|
if events is not UNSET:
|
|
78
83
|
field_dict["events"] = events
|
|
84
|
+
if expires_in is not UNSET:
|
|
85
|
+
field_dict["expiresIn"] = expires_in
|
|
79
86
|
if last_used_at is not UNSET:
|
|
80
87
|
field_dict["lastUsedAt"] = last_used_at
|
|
81
88
|
if status is not UNSET:
|
|
@@ -105,6 +112,8 @@ class Sandbox:
|
|
|
105
112
|
|
|
106
113
|
events.append(componentsschemas_core_events_item)
|
|
107
114
|
|
|
115
|
+
expires_in = d.pop("expiresIn", d.pop("expires_in", UNSET))
|
|
116
|
+
|
|
108
117
|
last_used_at = d.pop("lastUsedAt", d.pop("last_used_at", UNSET))
|
|
109
118
|
|
|
110
119
|
_status = d.pop("status", UNSET)
|
|
@@ -118,6 +127,7 @@ class Sandbox:
|
|
|
118
127
|
metadata=metadata,
|
|
119
128
|
spec=spec,
|
|
120
129
|
events=events,
|
|
130
|
+
expires_in=expires_in,
|
|
121
131
|
last_used_at=last_used_at,
|
|
122
132
|
status=status,
|
|
123
133
|
)
|
blaxel/core/common/settings.py
CHANGED
blaxel/core/jobs/__init__.py
CHANGED
|
@@ -18,8 +18,6 @@ from ..client.models.create_job_execution_request import (
|
|
|
18
18
|
CreateJobExecutionRequest,
|
|
19
19
|
)
|
|
20
20
|
from ..client.models.job_execution import JobExecution
|
|
21
|
-
from ..common.internal import get_forced_url, get_global_unique_hash
|
|
22
|
-
from ..common.settings import settings
|
|
23
21
|
|
|
24
22
|
|
|
25
23
|
class BlJobWrapper:
|
|
@@ -74,99 +72,73 @@ class BlJob:
|
|
|
74
72
|
def __init__(self, name: str):
|
|
75
73
|
self.name = name
|
|
76
74
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
75
|
+
def run(
|
|
76
|
+
self,
|
|
77
|
+
tasks: List[Dict[str, Any]],
|
|
78
|
+
env: Dict[str, str] | None = None,
|
|
79
|
+
memory: int | None = None,
|
|
80
|
+
execution_id: str | None = None,
|
|
81
|
+
) -> str:
|
|
82
|
+
"""
|
|
83
|
+
Run the job with the provided tasks and optional overrides.
|
|
82
84
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
85
|
+
Args:
|
|
86
|
+
tasks: List of task parameters for parallel execution
|
|
87
|
+
env: Optional environment variable overrides (merged with job's environment)
|
|
88
|
+
memory: Optional memory override in megabytes (must be <= job's configured memory)
|
|
89
|
+
execution_id: Optional custom execution ID
|
|
87
90
|
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
return f"{settings.run_url}/{settings.workspace}/jobs/{self.name}"
|
|
91
|
+
Returns:
|
|
92
|
+
str: The execution ID
|
|
91
93
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
return None
|
|
94
|
+
Raises:
|
|
95
|
+
Exception: If the job execution fails
|
|
96
|
+
"""
|
|
97
|
+
logger.debug(f"Job Calling: {self.name}")
|
|
97
98
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
def call(self, url, input_data, headers: dict = {}, params: dict = {}):
|
|
107
|
-
body = {"tasks": input_data}
|
|
108
|
-
|
|
109
|
-
# Merge settings headers with provided headers
|
|
110
|
-
merged_headers = {
|
|
111
|
-
**settings.headers,
|
|
112
|
-
"Content-Type": "application/json",
|
|
113
|
-
**headers,
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
return client.get_httpx_client().post(
|
|
117
|
-
url + "/executions",
|
|
118
|
-
headers=merged_headers,
|
|
119
|
-
json=body,
|
|
120
|
-
params=params,
|
|
121
|
-
)
|
|
99
|
+
request = CreateJobExecutionRequest(tasks=tasks)
|
|
100
|
+
if env is not None:
|
|
101
|
+
request.env = env
|
|
102
|
+
if memory is not None:
|
|
103
|
+
request.memory = memory
|
|
104
|
+
if execution_id is not None:
|
|
105
|
+
request.execution_id = execution_id
|
|
122
106
|
|
|
123
|
-
|
|
124
|
-
logger.debug(f"Job Calling: {self.name}")
|
|
125
|
-
body = {"tasks": input_data}
|
|
126
|
-
|
|
127
|
-
# Merge settings headers with provided headers
|
|
128
|
-
merged_headers = {
|
|
129
|
-
**settings.headers,
|
|
130
|
-
"Content-Type": "application/json",
|
|
131
|
-
**headers,
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
return await client.get_async_httpx_client().post(
|
|
135
|
-
url + "/executions",
|
|
136
|
-
headers=merged_headers,
|
|
137
|
-
json=body,
|
|
138
|
-
params=params,
|
|
139
|
-
)
|
|
107
|
+
return self.create_execution(request)
|
|
140
108
|
|
|
141
|
-
def
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
109
|
+
async def arun(
|
|
110
|
+
self,
|
|
111
|
+
tasks: List[Dict[str, Any]],
|
|
112
|
+
env: Dict[str, str] | None = None,
|
|
113
|
+
memory: int | None = None,
|
|
114
|
+
execution_id: str | None = None,
|
|
115
|
+
) -> str:
|
|
116
|
+
"""
|
|
117
|
+
Run the job with the provided tasks and optional overrides (async).
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
tasks: List of task parameters for parallel execution
|
|
121
|
+
env: Optional environment variable overrides (merged with job's environment)
|
|
122
|
+
memory: Optional memory override in megabytes (must be <= job's configured memory)
|
|
123
|
+
execution_id: Optional custom execution ID
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
str: The execution ID
|
|
127
|
+
|
|
128
|
+
Raises:
|
|
129
|
+
Exception: If the job execution fails
|
|
130
|
+
"""
|
|
157
131
|
logger.debug(f"Job Calling: {self.name}")
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
)
|
|
169
|
-
return response.text
|
|
132
|
+
|
|
133
|
+
request = CreateJobExecutionRequest(tasks=tasks)
|
|
134
|
+
if env is not None:
|
|
135
|
+
request.env = env
|
|
136
|
+
if memory is not None:
|
|
137
|
+
request.memory = memory
|
|
138
|
+
if execution_id is not None:
|
|
139
|
+
request.execution_id = execution_id
|
|
140
|
+
|
|
141
|
+
return await self.acreate_execution(request)
|
|
170
142
|
|
|
171
143
|
def create_execution(self, request: CreateJobExecutionRequest) -> str:
|
|
172
144
|
"""
|