blaxel 0.2.35__py3-none-any.whl → 0.2.36__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/__init__.py +2 -2
- blaxel/core/client/api/compute/create_sandbox.py +21 -1
- blaxel/core/client/api/jobs/create_job_execution.py +12 -12
- blaxel/core/client/api/volumes/update_volume.py +187 -0
- blaxel/core/client/models/__init__.py +10 -6
- blaxel/core/client/models/{create_job_execution_response.py → create_job_execution_output.py} +11 -13
- blaxel/core/client/models/{create_job_execution_response_tasks_item.py → create_job_execution_output_tasks_item.py} +5 -5
- blaxel/core/client/models/create_job_execution_request.py +31 -0
- blaxel/core/client/models/create_job_execution_request_env.py +50 -0
- blaxel/core/client/models/function_runtime.py +18 -0
- blaxel/core/client/models/{function_spec_transport.py → function_runtime_transport.py} +2 -2
- blaxel/core/client/models/function_spec.py +0 -18
- blaxel/core/client/models/job_execution_spec.py +35 -0
- blaxel/core/client/models/job_execution_spec_env_override.py +50 -0
- blaxel/core/client/models/port_protocol.py +1 -0
- blaxel/core/common/settings.py +5 -0
- blaxel/core/jobs/__init__.py +60 -88
- blaxel/core/sandbox/default/sandbox.py +69 -2
- blaxel/core/sandbox/sync/sandbox.py +69 -2
- blaxel/core/volume/volume.py +203 -4
- blaxel/langgraph/model.py +25 -14
- blaxel/langgraph/tools.py +16 -12
- blaxel/llamaindex/model.py +33 -24
- blaxel/llamaindex/tools.py +9 -4
- blaxel/pydantic/model.py +26 -12
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/METADATA +1 -1
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/RECORD +29 -28
- blaxel/core/client/api/invitations/__init__.py +0 -0
- blaxel/core/client/api/invitations/list_all_pending_invitations.py +0 -142
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/WHEEL +0 -0
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Any, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="CreateJobExecutionRequestEnv")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class CreateJobExecutionRequestEnv:
|
|
11
|
+
"""Environment variable overrides (optional, will merge with job's environment variables)
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
{"MY_VAR": "custom_value", "BATCH_SIZE": "100"}
|
|
15
|
+
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
19
|
+
|
|
20
|
+
def to_dict(self) -> dict[str, Any]:
|
|
21
|
+
field_dict: dict[str, Any] = {}
|
|
22
|
+
field_dict.update(self.additional_properties)
|
|
23
|
+
|
|
24
|
+
return field_dict
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
28
|
+
if not src_dict:
|
|
29
|
+
return None
|
|
30
|
+
d = src_dict.copy()
|
|
31
|
+
create_job_execution_request_env = cls()
|
|
32
|
+
|
|
33
|
+
create_job_execution_request_env.additional_properties = d
|
|
34
|
+
return create_job_execution_request_env
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def additional_keys(self) -> list[str]:
|
|
38
|
+
return list(self.additional_properties.keys())
|
|
39
|
+
|
|
40
|
+
def __getitem__(self, key: str) -> Any:
|
|
41
|
+
return self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
44
|
+
self.additional_properties[key] = value
|
|
45
|
+
|
|
46
|
+
def __delitem__(self, key: str) -> None:
|
|
47
|
+
del self.additional_properties[key]
|
|
48
|
+
|
|
49
|
+
def __contains__(self, key: str) -> bool:
|
|
50
|
+
return key in self.additional_properties
|
|
@@ -4,6 +4,7 @@ from attrs import define as _attrs_define
|
|
|
4
4
|
from attrs import field as _attrs_field
|
|
5
5
|
|
|
6
6
|
from ..models.function_runtime_generation import FunctionRuntimeGeneration
|
|
7
|
+
from ..models.function_runtime_transport import FunctionRuntimeTransport
|
|
7
8
|
from ..types import UNSET, Unset
|
|
8
9
|
|
|
9
10
|
if TYPE_CHECKING:
|
|
@@ -29,6 +30,8 @@ class FunctionRuntime:
|
|
|
29
30
|
in MB / 2048, e.g., 4096MB = 2 CPUs). Example: 2048.
|
|
30
31
|
min_scale (Union[Unset, int]): Minimum instances to keep warm. Set to 1+ to eliminate cold starts, 0 for scale-
|
|
31
32
|
to-zero.
|
|
33
|
+
transport (Union[Unset, FunctionRuntimeTransport]): Transport compatibility for the MCP, can be "websocket" or
|
|
34
|
+
"http-stream" Example: http-stream.
|
|
32
35
|
"""
|
|
33
36
|
|
|
34
37
|
envs: Union[Unset, list["Env"]] = UNSET
|
|
@@ -37,6 +40,7 @@ class FunctionRuntime:
|
|
|
37
40
|
max_scale: Union[Unset, int] = UNSET
|
|
38
41
|
memory: Union[Unset, int] = UNSET
|
|
39
42
|
min_scale: Union[Unset, int] = UNSET
|
|
43
|
+
transport: Union[Unset, FunctionRuntimeTransport] = UNSET
|
|
40
44
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
41
45
|
|
|
42
46
|
def to_dict(self) -> dict[str, Any]:
|
|
@@ -62,6 +66,10 @@ class FunctionRuntime:
|
|
|
62
66
|
|
|
63
67
|
min_scale = self.min_scale
|
|
64
68
|
|
|
69
|
+
transport: Union[Unset, str] = UNSET
|
|
70
|
+
if not isinstance(self.transport, Unset):
|
|
71
|
+
transport = self.transport.value
|
|
72
|
+
|
|
65
73
|
field_dict: dict[str, Any] = {}
|
|
66
74
|
field_dict.update(self.additional_properties)
|
|
67
75
|
field_dict.update({})
|
|
@@ -77,6 +85,8 @@ class FunctionRuntime:
|
|
|
77
85
|
field_dict["memory"] = memory
|
|
78
86
|
if min_scale is not UNSET:
|
|
79
87
|
field_dict["minScale"] = min_scale
|
|
88
|
+
if transport is not UNSET:
|
|
89
|
+
field_dict["transport"] = transport
|
|
80
90
|
|
|
81
91
|
return field_dict
|
|
82
92
|
|
|
@@ -109,6 +119,13 @@ class FunctionRuntime:
|
|
|
109
119
|
|
|
110
120
|
min_scale = d.pop("minScale", d.pop("min_scale", UNSET))
|
|
111
121
|
|
|
122
|
+
_transport = d.pop("transport", UNSET)
|
|
123
|
+
transport: Union[Unset, FunctionRuntimeTransport]
|
|
124
|
+
if isinstance(_transport, Unset):
|
|
125
|
+
transport = UNSET
|
|
126
|
+
else:
|
|
127
|
+
transport = FunctionRuntimeTransport(_transport)
|
|
128
|
+
|
|
112
129
|
function_runtime = cls(
|
|
113
130
|
envs=envs,
|
|
114
131
|
generation=generation,
|
|
@@ -116,6 +133,7 @@ class FunctionRuntime:
|
|
|
116
133
|
max_scale=max_scale,
|
|
117
134
|
memory=memory,
|
|
118
135
|
min_scale=min_scale,
|
|
136
|
+
transport=transport,
|
|
119
137
|
)
|
|
120
138
|
|
|
121
139
|
function_runtime.additional_properties = d
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
class
|
|
4
|
+
class FunctionRuntimeTransport(str, Enum):
|
|
5
5
|
HTTP_STREAM = "http-stream"
|
|
6
6
|
WEBSOCKET = "websocket"
|
|
7
7
|
|
|
@@ -9,7 +9,7 @@ class FunctionSpecTransport(str, Enum):
|
|
|
9
9
|
return str(self.value)
|
|
10
10
|
|
|
11
11
|
@classmethod
|
|
12
|
-
def _missing_(cls, value: object) -> "
|
|
12
|
+
def _missing_(cls, value: object) -> "FunctionRuntimeTransport | None":
|
|
13
13
|
if isinstance(value, str):
|
|
14
14
|
upper_value = value.upper()
|
|
15
15
|
for member in cls:
|
|
@@ -3,7 +3,6 @@ from typing import TYPE_CHECKING, Any, TypeVar, Union, cast
|
|
|
3
3
|
from attrs import define as _attrs_define
|
|
4
4
|
from attrs import field as _attrs_field
|
|
5
5
|
|
|
6
|
-
from ..models.function_spec_transport import FunctionSpecTransport
|
|
7
6
|
from ..types import UNSET, Unset
|
|
8
7
|
|
|
9
8
|
if TYPE_CHECKING:
|
|
@@ -27,8 +26,6 @@ class FunctionSpec:
|
|
|
27
26
|
revision (Union[Unset, RevisionConfiguration]): Revision configuration
|
|
28
27
|
runtime (Union[Unset, FunctionRuntime]): Runtime configuration defining how the MCP server function is deployed
|
|
29
28
|
and scaled
|
|
30
|
-
transport (Union[Unset, FunctionSpecTransport]): Transport compatibility for the MCP, can be "websocket" or
|
|
31
|
-
"http-stream" Example: http-stream.
|
|
32
29
|
triggers (Union[Unset, list['Trigger']]): Triggers to use your agent
|
|
33
30
|
"""
|
|
34
31
|
|
|
@@ -37,7 +34,6 @@ class FunctionSpec:
|
|
|
37
34
|
policies: Union[Unset, list[str]] = UNSET
|
|
38
35
|
revision: Union[Unset, "RevisionConfiguration"] = UNSET
|
|
39
36
|
runtime: Union[Unset, "FunctionRuntime"] = UNSET
|
|
40
|
-
transport: Union[Unset, FunctionSpecTransport] = UNSET
|
|
41
37
|
triggers: Union[Unset, list["Trigger"]] = UNSET
|
|
42
38
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
43
39
|
|
|
@@ -72,10 +68,6 @@ class FunctionSpec:
|
|
|
72
68
|
elif self.runtime and isinstance(self.runtime, dict):
|
|
73
69
|
runtime = self.runtime
|
|
74
70
|
|
|
75
|
-
transport: Union[Unset, str] = UNSET
|
|
76
|
-
if not isinstance(self.transport, Unset):
|
|
77
|
-
transport = self.transport.value
|
|
78
|
-
|
|
79
71
|
triggers: Union[Unset, list[dict[str, Any]]] = UNSET
|
|
80
72
|
if not isinstance(self.triggers, Unset):
|
|
81
73
|
triggers = []
|
|
@@ -99,8 +91,6 @@ class FunctionSpec:
|
|
|
99
91
|
field_dict["revision"] = revision
|
|
100
92
|
if runtime is not UNSET:
|
|
101
93
|
field_dict["runtime"] = runtime
|
|
102
|
-
if transport is not UNSET:
|
|
103
|
-
field_dict["transport"] = transport
|
|
104
94
|
if triggers is not UNSET:
|
|
105
95
|
field_dict["triggers"] = triggers
|
|
106
96
|
|
|
@@ -137,13 +127,6 @@ class FunctionSpec:
|
|
|
137
127
|
else:
|
|
138
128
|
runtime = FunctionRuntime.from_dict(_runtime)
|
|
139
129
|
|
|
140
|
-
_transport = d.pop("transport", UNSET)
|
|
141
|
-
transport: Union[Unset, FunctionSpecTransport]
|
|
142
|
-
if isinstance(_transport, Unset):
|
|
143
|
-
transport = UNSET
|
|
144
|
-
else:
|
|
145
|
-
transport = FunctionSpecTransport(_transport)
|
|
146
|
-
|
|
147
130
|
triggers = []
|
|
148
131
|
_triggers = d.pop("triggers", UNSET)
|
|
149
132
|
for componentsschemas_triggers_item_data in _triggers or []:
|
|
@@ -159,7 +142,6 @@ class FunctionSpec:
|
|
|
159
142
|
policies=policies,
|
|
160
143
|
revision=revision,
|
|
161
144
|
runtime=runtime,
|
|
162
|
-
transport=transport,
|
|
163
145
|
triggers=triggers,
|
|
164
146
|
)
|
|
165
147
|
|
|
@@ -6,6 +6,7 @@ from attrs import field as _attrs_field
|
|
|
6
6
|
from ..types import UNSET, Unset
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
|
+
from ..models.job_execution_spec_env_override import JobExecutionSpecEnvOverride
|
|
9
10
|
from ..models.job_execution_task import JobExecutionTask
|
|
10
11
|
|
|
11
12
|
|
|
@@ -17,17 +18,35 @@ class JobExecutionSpec:
|
|
|
17
18
|
"""Job execution specification
|
|
18
19
|
|
|
19
20
|
Attributes:
|
|
21
|
+
env_override (Union[Unset, JobExecutionSpecEnvOverride]): Environment variable overrides (if provided for this
|
|
22
|
+
execution, values are masked with ***) Example: {"MY_VAR": "***", "BATCH_SIZE": "***"}.
|
|
23
|
+
memory_override (Union[Unset, int]): Memory override in megabytes (if provided for this execution) Example:
|
|
24
|
+
2048.
|
|
20
25
|
parallelism (Union[Unset, int]): Number of parallel tasks Example: 5.
|
|
21
26
|
tasks (Union[Unset, list['JobExecutionTask']]): List of execution tasks
|
|
22
27
|
timeout (Union[Unset, int]): Job timeout in seconds (captured at execution creation time) Example: 3600.
|
|
23
28
|
"""
|
|
24
29
|
|
|
30
|
+
env_override: Union[Unset, "JobExecutionSpecEnvOverride"] = UNSET
|
|
31
|
+
memory_override: Union[Unset, int] = UNSET
|
|
25
32
|
parallelism: Union[Unset, int] = UNSET
|
|
26
33
|
tasks: Union[Unset, list["JobExecutionTask"]] = UNSET
|
|
27
34
|
timeout: Union[Unset, int] = UNSET
|
|
28
35
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
29
36
|
|
|
30
37
|
def to_dict(self) -> dict[str, Any]:
|
|
38
|
+
env_override: Union[Unset, dict[str, Any]] = UNSET
|
|
39
|
+
if (
|
|
40
|
+
self.env_override
|
|
41
|
+
and not isinstance(self.env_override, Unset)
|
|
42
|
+
and not isinstance(self.env_override, dict)
|
|
43
|
+
):
|
|
44
|
+
env_override = self.env_override.to_dict()
|
|
45
|
+
elif self.env_override and isinstance(self.env_override, dict):
|
|
46
|
+
env_override = self.env_override
|
|
47
|
+
|
|
48
|
+
memory_override = self.memory_override
|
|
49
|
+
|
|
31
50
|
parallelism = self.parallelism
|
|
32
51
|
|
|
33
52
|
tasks: Union[Unset, list[dict[str, Any]]] = UNSET
|
|
@@ -45,6 +64,10 @@ class JobExecutionSpec:
|
|
|
45
64
|
field_dict: dict[str, Any] = {}
|
|
46
65
|
field_dict.update(self.additional_properties)
|
|
47
66
|
field_dict.update({})
|
|
67
|
+
if env_override is not UNSET:
|
|
68
|
+
field_dict["envOverride"] = env_override
|
|
69
|
+
if memory_override is not UNSET:
|
|
70
|
+
field_dict["memoryOverride"] = memory_override
|
|
48
71
|
if parallelism is not UNSET:
|
|
49
72
|
field_dict["parallelism"] = parallelism
|
|
50
73
|
if tasks is not UNSET:
|
|
@@ -56,11 +79,21 @@ class JobExecutionSpec:
|
|
|
56
79
|
|
|
57
80
|
@classmethod
|
|
58
81
|
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
82
|
+
from ..models.job_execution_spec_env_override import JobExecutionSpecEnvOverride
|
|
59
83
|
from ..models.job_execution_task import JobExecutionTask
|
|
60
84
|
|
|
61
85
|
if not src_dict:
|
|
62
86
|
return None
|
|
63
87
|
d = src_dict.copy()
|
|
88
|
+
_env_override = d.pop("envOverride", d.pop("env_override", UNSET))
|
|
89
|
+
env_override: Union[Unset, JobExecutionSpecEnvOverride]
|
|
90
|
+
if isinstance(_env_override, Unset):
|
|
91
|
+
env_override = UNSET
|
|
92
|
+
else:
|
|
93
|
+
env_override = JobExecutionSpecEnvOverride.from_dict(_env_override)
|
|
94
|
+
|
|
95
|
+
memory_override = d.pop("memoryOverride", d.pop("memory_override", UNSET))
|
|
96
|
+
|
|
64
97
|
parallelism = d.pop("parallelism", UNSET)
|
|
65
98
|
|
|
66
99
|
tasks = []
|
|
@@ -73,6 +106,8 @@ class JobExecutionSpec:
|
|
|
73
106
|
timeout = d.pop("timeout", UNSET)
|
|
74
107
|
|
|
75
108
|
job_execution_spec = cls(
|
|
109
|
+
env_override=env_override,
|
|
110
|
+
memory_override=memory_override,
|
|
76
111
|
parallelism=parallelism,
|
|
77
112
|
tasks=tasks,
|
|
78
113
|
timeout=timeout,
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Any, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="JobExecutionSpecEnvOverride")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class JobExecutionSpecEnvOverride:
|
|
11
|
+
"""Environment variable overrides (if provided for this execution, values are masked with ***)
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
{"MY_VAR": "***", "BATCH_SIZE": "***"}
|
|
15
|
+
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
19
|
+
|
|
20
|
+
def to_dict(self) -> dict[str, Any]:
|
|
21
|
+
field_dict: dict[str, Any] = {}
|
|
22
|
+
field_dict.update(self.additional_properties)
|
|
23
|
+
|
|
24
|
+
return field_dict
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
28
|
+
if not src_dict:
|
|
29
|
+
return None
|
|
30
|
+
d = src_dict.copy()
|
|
31
|
+
job_execution_spec_env_override = cls()
|
|
32
|
+
|
|
33
|
+
job_execution_spec_env_override.additional_properties = d
|
|
34
|
+
return job_execution_spec_env_override
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def additional_keys(self) -> list[str]:
|
|
38
|
+
return list(self.additional_properties.keys())
|
|
39
|
+
|
|
40
|
+
def __getitem__(self, key: str) -> Any:
|
|
41
|
+
return self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
44
|
+
self.additional_properties[key] = value
|
|
45
|
+
|
|
46
|
+
def __delitem__(self, key: str) -> None:
|
|
47
|
+
del self.additional_properties[key]
|
|
48
|
+
|
|
49
|
+
def __contains__(self, key: str) -> bool:
|
|
50
|
+
return key in self.additional_properties
|
blaxel/core/common/settings.py
CHANGED
blaxel/core/jobs/__init__.py
CHANGED
|
@@ -18,8 +18,6 @@ from ..client.models.create_job_execution_request import (
|
|
|
18
18
|
CreateJobExecutionRequest,
|
|
19
19
|
)
|
|
20
20
|
from ..client.models.job_execution import JobExecution
|
|
21
|
-
from ..common.internal import get_forced_url, get_global_unique_hash
|
|
22
|
-
from ..common.settings import settings
|
|
23
21
|
|
|
24
22
|
|
|
25
23
|
class BlJobWrapper:
|
|
@@ -74,99 +72,73 @@ class BlJob:
|
|
|
74
72
|
def __init__(self, name: str):
|
|
75
73
|
self.name = name
|
|
76
74
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
75
|
+
def run(
|
|
76
|
+
self,
|
|
77
|
+
tasks: List[Dict[str, Any]],
|
|
78
|
+
env: Dict[str, Any] | None = None,
|
|
79
|
+
memory: int | None = None,
|
|
80
|
+
execution_id: str | None = None,
|
|
81
|
+
) -> str:
|
|
82
|
+
"""
|
|
83
|
+
Run the job with the provided tasks and optional overrides.
|
|
82
84
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
85
|
+
Args:
|
|
86
|
+
tasks: List of task parameters for parallel execution
|
|
87
|
+
env: Optional environment variable overrides (merged with job's environment)
|
|
88
|
+
memory: Optional memory override in megabytes (must be <= job's configured memory)
|
|
89
|
+
execution_id: Optional custom execution ID
|
|
87
90
|
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
return f"{settings.run_url}/{settings.workspace}/jobs/{self.name}"
|
|
91
|
+
Returns:
|
|
92
|
+
str: The execution ID
|
|
91
93
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
return None
|
|
94
|
+
Raises:
|
|
95
|
+
Exception: If the job execution fails
|
|
96
|
+
"""
|
|
97
|
+
logger.debug(f"Job Calling: {self.name}")
|
|
97
98
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
def call(self, url, input_data, headers: dict = {}, params: dict = {}):
|
|
107
|
-
body = {"tasks": input_data}
|
|
108
|
-
|
|
109
|
-
# Merge settings headers with provided headers
|
|
110
|
-
merged_headers = {
|
|
111
|
-
**settings.headers,
|
|
112
|
-
"Content-Type": "application/json",
|
|
113
|
-
**headers,
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
return client.get_httpx_client().post(
|
|
117
|
-
url + "/executions",
|
|
118
|
-
headers=merged_headers,
|
|
119
|
-
json=body,
|
|
120
|
-
params=params,
|
|
121
|
-
)
|
|
99
|
+
request = CreateJobExecutionRequest(tasks=tasks)
|
|
100
|
+
if env is not None:
|
|
101
|
+
request.env = env
|
|
102
|
+
if memory is not None:
|
|
103
|
+
request.memory = memory
|
|
104
|
+
if execution_id is not None:
|
|
105
|
+
request.execution_id = execution_id
|
|
122
106
|
|
|
123
|
-
|
|
124
|
-
logger.debug(f"Job Calling: {self.name}")
|
|
125
|
-
body = {"tasks": input_data}
|
|
126
|
-
|
|
127
|
-
# Merge settings headers with provided headers
|
|
128
|
-
merged_headers = {
|
|
129
|
-
**settings.headers,
|
|
130
|
-
"Content-Type": "application/json",
|
|
131
|
-
**headers,
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
return await client.get_async_httpx_client().post(
|
|
135
|
-
url + "/executions",
|
|
136
|
-
headers=merged_headers,
|
|
137
|
-
json=body,
|
|
138
|
-
params=params,
|
|
139
|
-
)
|
|
107
|
+
return self.create_execution(request)
|
|
140
108
|
|
|
141
|
-
def
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
109
|
+
async def arun(
|
|
110
|
+
self,
|
|
111
|
+
tasks: List[Dict[str, Any]],
|
|
112
|
+
env: Dict[str, Any] | None = None,
|
|
113
|
+
memory: int | None = None,
|
|
114
|
+
execution_id: str | None = None,
|
|
115
|
+
) -> str:
|
|
116
|
+
"""
|
|
117
|
+
Run the job with the provided tasks and optional overrides (async).
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
tasks: List of task parameters for parallel execution
|
|
121
|
+
env: Optional environment variable overrides (merged with job's environment)
|
|
122
|
+
memory: Optional memory override in megabytes (must be <= job's configured memory)
|
|
123
|
+
execution_id: Optional custom execution ID
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
str: The execution ID
|
|
127
|
+
|
|
128
|
+
Raises:
|
|
129
|
+
Exception: If the job execution fails
|
|
130
|
+
"""
|
|
157
131
|
logger.debug(f"Job Calling: {self.name}")
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
)
|
|
169
|
-
return response.text
|
|
132
|
+
|
|
133
|
+
request = CreateJobExecutionRequest(tasks=tasks)
|
|
134
|
+
if env is not None:
|
|
135
|
+
request.env = env
|
|
136
|
+
if memory is not None:
|
|
137
|
+
request.memory = memory
|
|
138
|
+
if execution_id is not None:
|
|
139
|
+
request.execution_id = execution_id
|
|
140
|
+
|
|
141
|
+
return await self.acreate_execution(request)
|
|
170
142
|
|
|
171
143
|
def create_execution(self, request: CreateJobExecutionRequest) -> str:
|
|
172
144
|
"""
|
|
@@ -8,10 +8,11 @@ from ...client.api.compute.get_sandbox import asyncio as get_sandbox
|
|
|
8
8
|
from ...client.api.compute.list_sandboxes import asyncio as list_sandboxes
|
|
9
9
|
from ...client.api.compute.update_sandbox import asyncio as update_sandbox
|
|
10
10
|
from ...client.client import client
|
|
11
|
-
from ...client.models import Metadata, Sandbox, SandboxRuntime, SandboxSpec
|
|
11
|
+
from ...client.models import Metadata, Sandbox, SandboxLifecycle, SandboxRuntime, SandboxSpec
|
|
12
12
|
from ...client.models.error import Error
|
|
13
13
|
from ...client.models.sandbox_error import SandboxError
|
|
14
14
|
from ...client.types import UNSET
|
|
15
|
+
from ...common.settings import settings
|
|
15
16
|
from ..types import (
|
|
16
17
|
SandboxConfiguration,
|
|
17
18
|
SandboxCreateConfiguration,
|
|
@@ -162,7 +163,7 @@ class SandboxInstance:
|
|
|
162
163
|
volumes = config._normalize_volumes() or UNSET
|
|
163
164
|
ttl = config.ttl
|
|
164
165
|
expires = config.expires
|
|
165
|
-
region = config.region
|
|
166
|
+
region = config.region or settings.region
|
|
166
167
|
lifecycle = config.lifecycle
|
|
167
168
|
# snapshot_enabled = sandbox.snapshot_enabled
|
|
168
169
|
|
|
@@ -304,6 +305,72 @@ class SandboxInstance:
|
|
|
304
305
|
# Return new instance with updated sandbox
|
|
305
306
|
return cls(response)
|
|
306
307
|
|
|
308
|
+
@classmethod
|
|
309
|
+
async def update_ttl(cls, sandbox_name: str, ttl: str) -> "SandboxInstance":
|
|
310
|
+
"""Update sandbox TTL without recreating it.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
sandbox_name: The name of the sandbox to update
|
|
314
|
+
ttl: The new TTL value (e.g., "5m", "1h", "30s")
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
A new SandboxInstance with updated TTL
|
|
318
|
+
"""
|
|
319
|
+
# Get the existing sandbox
|
|
320
|
+
sandbox_instance = await cls.get(sandbox_name)
|
|
321
|
+
sandbox = sandbox_instance.sandbox
|
|
322
|
+
|
|
323
|
+
# Prepare the updated sandbox object
|
|
324
|
+
updated_sandbox = Sandbox.from_dict(sandbox.to_dict())
|
|
325
|
+
if updated_sandbox.spec is None or updated_sandbox.spec.runtime is None:
|
|
326
|
+
raise ValueError(f"Sandbox {sandbox_name} has invalid spec")
|
|
327
|
+
|
|
328
|
+
# Update TTL
|
|
329
|
+
updated_sandbox.spec.runtime.ttl = ttl
|
|
330
|
+
|
|
331
|
+
# Call the update API
|
|
332
|
+
response = await update_sandbox(
|
|
333
|
+
sandbox_name=sandbox_name,
|
|
334
|
+
client=client,
|
|
335
|
+
body=updated_sandbox,
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
return cls(response)
|
|
339
|
+
|
|
340
|
+
@classmethod
|
|
341
|
+
async def update_lifecycle(
|
|
342
|
+
cls, sandbox_name: str, lifecycle: SandboxLifecycle
|
|
343
|
+
) -> "SandboxInstance":
|
|
344
|
+
"""Update sandbox lifecycle configuration without recreating it.
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
sandbox_name: The name of the sandbox to update
|
|
348
|
+
lifecycle: The new lifecycle configuration
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
A new SandboxInstance with updated lifecycle
|
|
352
|
+
"""
|
|
353
|
+
# Get the existing sandbox
|
|
354
|
+
sandbox_instance = await cls.get(sandbox_name)
|
|
355
|
+
sandbox = sandbox_instance.sandbox
|
|
356
|
+
|
|
357
|
+
# Prepare the updated sandbox object
|
|
358
|
+
updated_sandbox = Sandbox.from_dict(sandbox.to_dict())
|
|
359
|
+
if updated_sandbox.spec is None:
|
|
360
|
+
raise ValueError(f"Sandbox {sandbox_name} has invalid spec")
|
|
361
|
+
|
|
362
|
+
# Update lifecycle
|
|
363
|
+
updated_sandbox.spec.lifecycle = lifecycle
|
|
364
|
+
|
|
365
|
+
# Call the update API
|
|
366
|
+
response = await update_sandbox(
|
|
367
|
+
sandbox_name=sandbox_name,
|
|
368
|
+
client=client,
|
|
369
|
+
body=updated_sandbox,
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
return cls(response)
|
|
373
|
+
|
|
307
374
|
@classmethod
|
|
308
375
|
async def create_if_not_exists(
|
|
309
376
|
cls, sandbox: Union[Sandbox, SandboxCreateConfiguration, Dict[str, Any]]
|