blaxel 0.2.35__py3-none-any.whl → 0.2.36__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/__init__.py +2 -2
- blaxel/core/client/api/compute/create_sandbox.py +21 -1
- blaxel/core/client/api/jobs/create_job_execution.py +12 -12
- blaxel/core/client/api/volumes/update_volume.py +187 -0
- blaxel/core/client/models/__init__.py +10 -6
- blaxel/core/client/models/{create_job_execution_response.py → create_job_execution_output.py} +11 -13
- blaxel/core/client/models/{create_job_execution_response_tasks_item.py → create_job_execution_output_tasks_item.py} +5 -5
- blaxel/core/client/models/create_job_execution_request.py +31 -0
- blaxel/core/client/models/create_job_execution_request_env.py +50 -0
- blaxel/core/client/models/function_runtime.py +18 -0
- blaxel/core/client/models/{function_spec_transport.py → function_runtime_transport.py} +2 -2
- blaxel/core/client/models/function_spec.py +0 -18
- blaxel/core/client/models/job_execution_spec.py +35 -0
- blaxel/core/client/models/job_execution_spec_env_override.py +50 -0
- blaxel/core/client/models/port_protocol.py +1 -0
- blaxel/core/common/settings.py +5 -0
- blaxel/core/jobs/__init__.py +60 -88
- blaxel/core/sandbox/default/sandbox.py +69 -2
- blaxel/core/sandbox/sync/sandbox.py +69 -2
- blaxel/core/volume/volume.py +203 -4
- blaxel/langgraph/model.py +25 -14
- blaxel/langgraph/tools.py +16 -12
- blaxel/llamaindex/model.py +33 -24
- blaxel/llamaindex/tools.py +9 -4
- blaxel/pydantic/model.py +26 -12
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/METADATA +1 -1
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/RECORD +29 -28
- blaxel/core/client/api/invitations/__init__.py +0 -0
- blaxel/core/client/api/invitations/list_all_pending_invitations.py +0 -142
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/WHEEL +0 -0
- {blaxel-0.2.35.dist-info → blaxel-0.2.36.dist-info}/licenses/LICENSE +0 -0
blaxel/__init__.py
CHANGED
|
@@ -4,8 +4,8 @@ from .core.common.autoload import autoload
|
|
|
4
4
|
from .core.common.env import env
|
|
5
5
|
from .core.common.settings import settings
|
|
6
6
|
|
|
7
|
-
__version__ = "0.2.
|
|
8
|
-
__commit__ = "
|
|
7
|
+
__version__ = "0.2.36"
|
|
8
|
+
__commit__ = "70f38e31dd50b26cbb54665420e165b89f3fa4b9"
|
|
9
9
|
__sentry_dsn__ = "https://9711de13cd02b285ca4378c01de8dc30@o4508714045276160.ingest.us.sentry.io/4510461121462272"
|
|
10
10
|
__all__ = ["autoload", "settings", "env"]
|
|
11
11
|
|
|
@@ -7,18 +7,26 @@ from ... import errors
|
|
|
7
7
|
from ...client import Client
|
|
8
8
|
from ...models.sandbox import Sandbox
|
|
9
9
|
from ...models.sandbox_error import SandboxError
|
|
10
|
-
from ...types import Response
|
|
10
|
+
from ...types import UNSET, Response, Unset
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
def _get_kwargs(
|
|
14
14
|
*,
|
|
15
15
|
body: Sandbox,
|
|
16
|
+
create_if_not_exist: Union[Unset, bool] = False,
|
|
16
17
|
) -> dict[str, Any]:
|
|
17
18
|
headers: dict[str, Any] = {}
|
|
18
19
|
|
|
20
|
+
params: dict[str, Any] = {}
|
|
21
|
+
|
|
22
|
+
params["createIfNotExist"] = create_if_not_exist
|
|
23
|
+
|
|
24
|
+
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
|
|
25
|
+
|
|
19
26
|
_kwargs: dict[str, Any] = {
|
|
20
27
|
"method": "post",
|
|
21
28
|
"url": "/sandboxes",
|
|
29
|
+
"params": params,
|
|
22
30
|
}
|
|
23
31
|
|
|
24
32
|
if type(body) is dict:
|
|
@@ -81,6 +89,7 @@ def sync_detailed(
|
|
|
81
89
|
*,
|
|
82
90
|
client: Client,
|
|
83
91
|
body: Sandbox,
|
|
92
|
+
create_if_not_exist: Union[Unset, bool] = False,
|
|
84
93
|
) -> Response[Union[Sandbox, SandboxError]]:
|
|
85
94
|
"""Create sandbox
|
|
86
95
|
|
|
@@ -88,6 +97,7 @@ def sync_detailed(
|
|
|
88
97
|
idle and resume instantly, preserving memory state including running processes and filesystem.
|
|
89
98
|
|
|
90
99
|
Args:
|
|
100
|
+
create_if_not_exist (Union[Unset, bool]): Default: False.
|
|
91
101
|
body (Sandbox): Lightweight virtual machine for secure AI code execution. Sandboxes resume
|
|
92
102
|
from standby in under 25ms and automatically scale to zero after inactivity, preserving
|
|
93
103
|
memory state including running processes and filesystem.
|
|
@@ -102,6 +112,7 @@ def sync_detailed(
|
|
|
102
112
|
|
|
103
113
|
kwargs = _get_kwargs(
|
|
104
114
|
body=body,
|
|
115
|
+
create_if_not_exist=create_if_not_exist,
|
|
105
116
|
)
|
|
106
117
|
|
|
107
118
|
response = client.get_httpx_client().request(
|
|
@@ -115,6 +126,7 @@ def sync(
|
|
|
115
126
|
*,
|
|
116
127
|
client: Client,
|
|
117
128
|
body: Sandbox,
|
|
129
|
+
create_if_not_exist: Union[Unset, bool] = False,
|
|
118
130
|
) -> Union[Sandbox, SandboxError] | None:
|
|
119
131
|
"""Create sandbox
|
|
120
132
|
|
|
@@ -122,6 +134,7 @@ def sync(
|
|
|
122
134
|
idle and resume instantly, preserving memory state including running processes and filesystem.
|
|
123
135
|
|
|
124
136
|
Args:
|
|
137
|
+
create_if_not_exist (Union[Unset, bool]): Default: False.
|
|
125
138
|
body (Sandbox): Lightweight virtual machine for secure AI code execution. Sandboxes resume
|
|
126
139
|
from standby in under 25ms and automatically scale to zero after inactivity, preserving
|
|
127
140
|
memory state including running processes and filesystem.
|
|
@@ -137,6 +150,7 @@ def sync(
|
|
|
137
150
|
return sync_detailed(
|
|
138
151
|
client=client,
|
|
139
152
|
body=body,
|
|
153
|
+
create_if_not_exist=create_if_not_exist,
|
|
140
154
|
).parsed
|
|
141
155
|
|
|
142
156
|
|
|
@@ -144,6 +158,7 @@ async def asyncio_detailed(
|
|
|
144
158
|
*,
|
|
145
159
|
client: Client,
|
|
146
160
|
body: Sandbox,
|
|
161
|
+
create_if_not_exist: Union[Unset, bool] = False,
|
|
147
162
|
) -> Response[Union[Sandbox, SandboxError]]:
|
|
148
163
|
"""Create sandbox
|
|
149
164
|
|
|
@@ -151,6 +166,7 @@ async def asyncio_detailed(
|
|
|
151
166
|
idle and resume instantly, preserving memory state including running processes and filesystem.
|
|
152
167
|
|
|
153
168
|
Args:
|
|
169
|
+
create_if_not_exist (Union[Unset, bool]): Default: False.
|
|
154
170
|
body (Sandbox): Lightweight virtual machine for secure AI code execution. Sandboxes resume
|
|
155
171
|
from standby in under 25ms and automatically scale to zero after inactivity, preserving
|
|
156
172
|
memory state including running processes and filesystem.
|
|
@@ -165,6 +181,7 @@ async def asyncio_detailed(
|
|
|
165
181
|
|
|
166
182
|
kwargs = _get_kwargs(
|
|
167
183
|
body=body,
|
|
184
|
+
create_if_not_exist=create_if_not_exist,
|
|
168
185
|
)
|
|
169
186
|
|
|
170
187
|
response = await client.get_async_httpx_client().request(**kwargs)
|
|
@@ -176,6 +193,7 @@ async def asyncio(
|
|
|
176
193
|
*,
|
|
177
194
|
client: Client,
|
|
178
195
|
body: Sandbox,
|
|
196
|
+
create_if_not_exist: Union[Unset, bool] = False,
|
|
179
197
|
) -> Union[Sandbox, SandboxError] | None:
|
|
180
198
|
"""Create sandbox
|
|
181
199
|
|
|
@@ -183,6 +201,7 @@ async def asyncio(
|
|
|
183
201
|
idle and resume instantly, preserving memory state including running processes and filesystem.
|
|
184
202
|
|
|
185
203
|
Args:
|
|
204
|
+
create_if_not_exist (Union[Unset, bool]): Default: False.
|
|
186
205
|
body (Sandbox): Lightweight virtual machine for secure AI code execution. Sandboxes resume
|
|
187
206
|
from standby in under 25ms and automatically scale to zero after inactivity, preserving
|
|
188
207
|
memory state including running processes and filesystem.
|
|
@@ -199,5 +218,6 @@ async def asyncio(
|
|
|
199
218
|
await asyncio_detailed(
|
|
200
219
|
client=client,
|
|
201
220
|
body=body,
|
|
221
|
+
create_if_not_exist=create_if_not_exist,
|
|
202
222
|
)
|
|
203
223
|
).parsed
|
|
@@ -5,8 +5,8 @@ import httpx
|
|
|
5
5
|
|
|
6
6
|
from ... import errors
|
|
7
7
|
from ...client import Client
|
|
8
|
+
from ...models.create_job_execution_output import CreateJobExecutionOutput
|
|
8
9
|
from ...models.create_job_execution_request import CreateJobExecutionRequest
|
|
9
|
-
from ...models.create_job_execution_response import CreateJobExecutionResponse
|
|
10
10
|
from ...types import Response
|
|
11
11
|
|
|
12
12
|
|
|
@@ -36,9 +36,9 @@ def _get_kwargs(
|
|
|
36
36
|
|
|
37
37
|
def _parse_response(
|
|
38
38
|
*, client: Client, response: httpx.Response
|
|
39
|
-
) -> Union[Any,
|
|
39
|
+
) -> Union[Any, CreateJobExecutionOutput] | None:
|
|
40
40
|
if response.status_code == 200:
|
|
41
|
-
response_200 =
|
|
41
|
+
response_200 = CreateJobExecutionOutput.from_dict(response.json())
|
|
42
42
|
|
|
43
43
|
return response_200
|
|
44
44
|
if response.status_code == 400:
|
|
@@ -55,7 +55,7 @@ def _parse_response(
|
|
|
55
55
|
|
|
56
56
|
def _build_response(
|
|
57
57
|
*, client: Client, response: httpx.Response
|
|
58
|
-
) -> Response[Union[Any,
|
|
58
|
+
) -> Response[Union[Any, CreateJobExecutionOutput]]:
|
|
59
59
|
return Response(
|
|
60
60
|
status_code=HTTPStatus(response.status_code),
|
|
61
61
|
content=response.content,
|
|
@@ -69,7 +69,7 @@ def sync_detailed(
|
|
|
69
69
|
*,
|
|
70
70
|
client: Client,
|
|
71
71
|
body: CreateJobExecutionRequest,
|
|
72
|
-
) -> Response[Union[Any,
|
|
72
|
+
) -> Response[Union[Any, CreateJobExecutionOutput]]:
|
|
73
73
|
"""Create job execution
|
|
74
74
|
|
|
75
75
|
Triggers a new execution of the batch job. Each execution runs multiple tasks in parallel according
|
|
@@ -84,7 +84,7 @@ def sync_detailed(
|
|
|
84
84
|
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
85
85
|
|
|
86
86
|
Returns:
|
|
87
|
-
Response[Union[Any,
|
|
87
|
+
Response[Union[Any, CreateJobExecutionOutput]]
|
|
88
88
|
"""
|
|
89
89
|
|
|
90
90
|
kwargs = _get_kwargs(
|
|
@@ -104,7 +104,7 @@ def sync(
|
|
|
104
104
|
*,
|
|
105
105
|
client: Client,
|
|
106
106
|
body: CreateJobExecutionRequest,
|
|
107
|
-
) -> Union[Any,
|
|
107
|
+
) -> Union[Any, CreateJobExecutionOutput] | None:
|
|
108
108
|
"""Create job execution
|
|
109
109
|
|
|
110
110
|
Triggers a new execution of the batch job. Each execution runs multiple tasks in parallel according
|
|
@@ -119,7 +119,7 @@ def sync(
|
|
|
119
119
|
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
120
120
|
|
|
121
121
|
Returns:
|
|
122
|
-
Union[Any,
|
|
122
|
+
Union[Any, CreateJobExecutionOutput]
|
|
123
123
|
"""
|
|
124
124
|
|
|
125
125
|
return sync_detailed(
|
|
@@ -134,7 +134,7 @@ async def asyncio_detailed(
|
|
|
134
134
|
*,
|
|
135
135
|
client: Client,
|
|
136
136
|
body: CreateJobExecutionRequest,
|
|
137
|
-
) -> Response[Union[Any,
|
|
137
|
+
) -> Response[Union[Any, CreateJobExecutionOutput]]:
|
|
138
138
|
"""Create job execution
|
|
139
139
|
|
|
140
140
|
Triggers a new execution of the batch job. Each execution runs multiple tasks in parallel according
|
|
@@ -149,7 +149,7 @@ async def asyncio_detailed(
|
|
|
149
149
|
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
150
150
|
|
|
151
151
|
Returns:
|
|
152
|
-
Response[Union[Any,
|
|
152
|
+
Response[Union[Any, CreateJobExecutionOutput]]
|
|
153
153
|
"""
|
|
154
154
|
|
|
155
155
|
kwargs = _get_kwargs(
|
|
@@ -167,7 +167,7 @@ async def asyncio(
|
|
|
167
167
|
*,
|
|
168
168
|
client: Client,
|
|
169
169
|
body: CreateJobExecutionRequest,
|
|
170
|
-
) -> Union[Any,
|
|
170
|
+
) -> Union[Any, CreateJobExecutionOutput] | None:
|
|
171
171
|
"""Create job execution
|
|
172
172
|
|
|
173
173
|
Triggers a new execution of the batch job. Each execution runs multiple tasks in parallel according
|
|
@@ -182,7 +182,7 @@ async def asyncio(
|
|
|
182
182
|
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
183
183
|
|
|
184
184
|
Returns:
|
|
185
|
-
Union[Any,
|
|
185
|
+
Union[Any, CreateJobExecutionOutput]
|
|
186
186
|
"""
|
|
187
187
|
|
|
188
188
|
return (
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
from http import HTTPStatus
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
import httpx
|
|
5
|
+
|
|
6
|
+
from ... import errors
|
|
7
|
+
from ...client import Client
|
|
8
|
+
from ...models.volume import Volume
|
|
9
|
+
from ...types import Response
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _get_kwargs(
|
|
13
|
+
volume_name: str,
|
|
14
|
+
*,
|
|
15
|
+
body: Volume,
|
|
16
|
+
) -> dict[str, Any]:
|
|
17
|
+
headers: dict[str, Any] = {}
|
|
18
|
+
|
|
19
|
+
_kwargs: dict[str, Any] = {
|
|
20
|
+
"method": "put",
|
|
21
|
+
"url": f"/volumes/{volume_name}",
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if type(body) is dict:
|
|
25
|
+
_body = body
|
|
26
|
+
else:
|
|
27
|
+
_body = body.to_dict()
|
|
28
|
+
|
|
29
|
+
_kwargs["json"] = _body
|
|
30
|
+
headers["Content-Type"] = "application/json"
|
|
31
|
+
|
|
32
|
+
_kwargs["headers"] = headers
|
|
33
|
+
return _kwargs
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _parse_response(*, client: Client, response: httpx.Response) -> Volume | None:
|
|
37
|
+
if response.status_code == 200:
|
|
38
|
+
response_200 = Volume.from_dict(response.json())
|
|
39
|
+
|
|
40
|
+
return response_200
|
|
41
|
+
if client.raise_on_unexpected_status:
|
|
42
|
+
raise errors.UnexpectedStatus(response.status_code, response.content)
|
|
43
|
+
else:
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _build_response(*, client: Client, response: httpx.Response) -> Response[Volume]:
|
|
48
|
+
return Response(
|
|
49
|
+
status_code=HTTPStatus(response.status_code),
|
|
50
|
+
content=response.content,
|
|
51
|
+
headers=response.headers,
|
|
52
|
+
parsed=_parse_response(client=client, response=response),
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def sync_detailed(
|
|
57
|
+
volume_name: str,
|
|
58
|
+
*,
|
|
59
|
+
client: Client,
|
|
60
|
+
body: Volume,
|
|
61
|
+
) -> Response[Volume]:
|
|
62
|
+
"""Update volume
|
|
63
|
+
|
|
64
|
+
Updates a volume.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
volume_name (str):
|
|
68
|
+
body (Volume): Persistent storage volume that can be attached to sandboxes for durable
|
|
69
|
+
file storage across sessions. Volumes survive sandbox deletion and can be reattached to
|
|
70
|
+
new sandboxes.
|
|
71
|
+
|
|
72
|
+
Raises:
|
|
73
|
+
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
|
74
|
+
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Response[Volume]
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
kwargs = _get_kwargs(
|
|
81
|
+
volume_name=volume_name,
|
|
82
|
+
body=body,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
response = client.get_httpx_client().request(
|
|
86
|
+
**kwargs,
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
return _build_response(client=client, response=response)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def sync(
|
|
93
|
+
volume_name: str,
|
|
94
|
+
*,
|
|
95
|
+
client: Client,
|
|
96
|
+
body: Volume,
|
|
97
|
+
) -> Volume | None:
|
|
98
|
+
"""Update volume
|
|
99
|
+
|
|
100
|
+
Updates a volume.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
volume_name (str):
|
|
104
|
+
body (Volume): Persistent storage volume that can be attached to sandboxes for durable
|
|
105
|
+
file storage across sessions. Volumes survive sandbox deletion and can be reattached to
|
|
106
|
+
new sandboxes.
|
|
107
|
+
|
|
108
|
+
Raises:
|
|
109
|
+
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
|
110
|
+
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Volume
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
return sync_detailed(
|
|
117
|
+
volume_name=volume_name,
|
|
118
|
+
client=client,
|
|
119
|
+
body=body,
|
|
120
|
+
).parsed
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
async def asyncio_detailed(
|
|
124
|
+
volume_name: str,
|
|
125
|
+
*,
|
|
126
|
+
client: Client,
|
|
127
|
+
body: Volume,
|
|
128
|
+
) -> Response[Volume]:
|
|
129
|
+
"""Update volume
|
|
130
|
+
|
|
131
|
+
Updates a volume.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
volume_name (str):
|
|
135
|
+
body (Volume): Persistent storage volume that can be attached to sandboxes for durable
|
|
136
|
+
file storage across sessions. Volumes survive sandbox deletion and can be reattached to
|
|
137
|
+
new sandboxes.
|
|
138
|
+
|
|
139
|
+
Raises:
|
|
140
|
+
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
|
141
|
+
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
Response[Volume]
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
kwargs = _get_kwargs(
|
|
148
|
+
volume_name=volume_name,
|
|
149
|
+
body=body,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
response = await client.get_async_httpx_client().request(**kwargs)
|
|
153
|
+
|
|
154
|
+
return _build_response(client=client, response=response)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
async def asyncio(
|
|
158
|
+
volume_name: str,
|
|
159
|
+
*,
|
|
160
|
+
client: Client,
|
|
161
|
+
body: Volume,
|
|
162
|
+
) -> Volume | None:
|
|
163
|
+
"""Update volume
|
|
164
|
+
|
|
165
|
+
Updates a volume.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
volume_name (str):
|
|
169
|
+
body (Volume): Persistent storage volume that can be attached to sandboxes for durable
|
|
170
|
+
file storage across sessions. Volumes survive sandbox deletion and can be reattached to
|
|
171
|
+
new sandboxes.
|
|
172
|
+
|
|
173
|
+
Raises:
|
|
174
|
+
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
|
175
|
+
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
Volume
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
return (
|
|
182
|
+
await asyncio_detailed(
|
|
183
|
+
volume_name=volume_name,
|
|
184
|
+
client=client,
|
|
185
|
+
body=body,
|
|
186
|
+
)
|
|
187
|
+
).parsed
|
|
@@ -12,10 +12,11 @@ from .continent import Continent
|
|
|
12
12
|
from .core_event import CoreEvent
|
|
13
13
|
from .country import Country
|
|
14
14
|
from .create_api_key_for_service_account_body import CreateApiKeyForServiceAccountBody
|
|
15
|
+
from .create_job_execution_output import CreateJobExecutionOutput
|
|
16
|
+
from .create_job_execution_output_tasks_item import CreateJobExecutionOutputTasksItem
|
|
15
17
|
from .create_job_execution_request import CreateJobExecutionRequest
|
|
18
|
+
from .create_job_execution_request_env import CreateJobExecutionRequestEnv
|
|
16
19
|
from .create_job_execution_request_tasks_item import CreateJobExecutionRequestTasksItem
|
|
17
|
-
from .create_job_execution_response import CreateJobExecutionResponse
|
|
18
|
-
from .create_job_execution_response_tasks_item import CreateJobExecutionResponseTasksItem
|
|
19
20
|
from .create_workspace_service_account_body import CreateWorkspaceServiceAccountBody
|
|
20
21
|
from .create_workspace_service_account_response_200 import CreateWorkspaceServiceAccountResponse200
|
|
21
22
|
from .custom_domain import CustomDomain
|
|
@@ -43,8 +44,8 @@ from .form_secrets import FormSecrets
|
|
|
43
44
|
from .function import Function
|
|
44
45
|
from .function_runtime import FunctionRuntime
|
|
45
46
|
from .function_runtime_generation import FunctionRuntimeGeneration
|
|
47
|
+
from .function_runtime_transport import FunctionRuntimeTransport
|
|
46
48
|
from .function_spec import FunctionSpec
|
|
47
|
-
from .function_spec_transport import FunctionSpecTransport
|
|
48
49
|
from .get_workspace_service_accounts_response_200_item import (
|
|
49
50
|
GetWorkspaceServiceAccountsResponse200Item,
|
|
50
51
|
)
|
|
@@ -72,6 +73,7 @@ from .job import Job
|
|
|
72
73
|
from .job_execution import JobExecution
|
|
73
74
|
from .job_execution_metadata import JobExecutionMetadata
|
|
74
75
|
from .job_execution_spec import JobExecutionSpec
|
|
76
|
+
from .job_execution_spec_env_override import JobExecutionSpecEnvOverride
|
|
75
77
|
from .job_execution_stats import JobExecutionStats
|
|
76
78
|
from .job_execution_status import JobExecutionStatus
|
|
77
79
|
from .job_execution_task import JobExecutionTask
|
|
@@ -174,10 +176,11 @@ __all__ = (
|
|
|
174
176
|
"CoreEvent",
|
|
175
177
|
"Country",
|
|
176
178
|
"CreateApiKeyForServiceAccountBody",
|
|
179
|
+
"CreateJobExecutionOutput",
|
|
180
|
+
"CreateJobExecutionOutputTasksItem",
|
|
177
181
|
"CreateJobExecutionRequest",
|
|
182
|
+
"CreateJobExecutionRequestEnv",
|
|
178
183
|
"CreateJobExecutionRequestTasksItem",
|
|
179
|
-
"CreateJobExecutionResponse",
|
|
180
|
-
"CreateJobExecutionResponseTasksItem",
|
|
181
184
|
"CreateWorkspaceServiceAccountBody",
|
|
182
185
|
"CreateWorkspaceServiceAccountResponse200",
|
|
183
186
|
"CustomDomain",
|
|
@@ -205,8 +208,8 @@ __all__ = (
|
|
|
205
208
|
"Function",
|
|
206
209
|
"FunctionRuntime",
|
|
207
210
|
"FunctionRuntimeGeneration",
|
|
211
|
+
"FunctionRuntimeTransport",
|
|
208
212
|
"FunctionSpec",
|
|
209
|
-
"FunctionSpecTransport",
|
|
210
213
|
"GetWorkspaceServiceAccountsResponse200Item",
|
|
211
214
|
"Image",
|
|
212
215
|
"ImageMetadata",
|
|
@@ -232,6 +235,7 @@ __all__ = (
|
|
|
232
235
|
"JobExecution",
|
|
233
236
|
"JobExecutionMetadata",
|
|
234
237
|
"JobExecutionSpec",
|
|
238
|
+
"JobExecutionSpecEnvOverride",
|
|
235
239
|
"JobExecutionStats",
|
|
236
240
|
"JobExecutionStatus",
|
|
237
241
|
"JobExecutionTask",
|
blaxel/core/client/models/{create_job_execution_response.py → create_job_execution_output.py}
RENAMED
|
@@ -6,16 +6,14 @@ from attrs import field as _attrs_field
|
|
|
6
6
|
from ..types import UNSET, Unset
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
|
-
from ..models.
|
|
10
|
-
CreateJobExecutionResponseTasksItem,
|
|
11
|
-
)
|
|
9
|
+
from ..models.create_job_execution_output_tasks_item import CreateJobExecutionOutputTasksItem
|
|
12
10
|
|
|
13
11
|
|
|
14
|
-
T = TypeVar("T", bound="
|
|
12
|
+
T = TypeVar("T", bound="CreateJobExecutionOutput")
|
|
15
13
|
|
|
16
14
|
|
|
17
15
|
@_attrs_define
|
|
18
|
-
class
|
|
16
|
+
class CreateJobExecutionOutput:
|
|
19
17
|
"""Response returned when a job execution is successfully created. Contains identifiers and the tasks that will be
|
|
20
18
|
executed.
|
|
21
19
|
|
|
@@ -25,7 +23,7 @@ class CreateJobExecutionResponse:
|
|
|
25
23
|
id (Union[Unset, str]): Unique identifier for this request, used for idempotency and tracking. Auto-generated if
|
|
26
24
|
not provided in the request. Example: 550e8400-e29b-41d4-a716-446655440000.
|
|
27
25
|
job_id (Union[Unset, str]): Name of the job that this execution belongs to Example: data-processing-job.
|
|
28
|
-
tasks (Union[Unset, list['
|
|
26
|
+
tasks (Union[Unset, list['CreateJobExecutionOutputTasksItem']]): Array of task configurations that will be
|
|
29
27
|
executed in parallel according to the job's concurrency settings. Each task can have custom parameters.
|
|
30
28
|
workspace_id (Union[Unset, str]): Name of the workspace where the job execution was created Example: my-
|
|
31
29
|
workspace.
|
|
@@ -34,7 +32,7 @@ class CreateJobExecutionResponse:
|
|
|
34
32
|
execution_id: Union[Unset, str] = UNSET
|
|
35
33
|
id: Union[Unset, str] = UNSET
|
|
36
34
|
job_id: Union[Unset, str] = UNSET
|
|
37
|
-
tasks: Union[Unset, list["
|
|
35
|
+
tasks: Union[Unset, list["CreateJobExecutionOutputTasksItem"]] = UNSET
|
|
38
36
|
workspace_id: Union[Unset, str] = UNSET
|
|
39
37
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
40
38
|
|
|
@@ -75,8 +73,8 @@ class CreateJobExecutionResponse:
|
|
|
75
73
|
|
|
76
74
|
@classmethod
|
|
77
75
|
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
78
|
-
from ..models.
|
|
79
|
-
|
|
76
|
+
from ..models.create_job_execution_output_tasks_item import (
|
|
77
|
+
CreateJobExecutionOutputTasksItem,
|
|
80
78
|
)
|
|
81
79
|
|
|
82
80
|
if not src_dict:
|
|
@@ -91,13 +89,13 @@ class CreateJobExecutionResponse:
|
|
|
91
89
|
tasks = []
|
|
92
90
|
_tasks = d.pop("tasks", UNSET)
|
|
93
91
|
for tasks_item_data in _tasks or []:
|
|
94
|
-
tasks_item =
|
|
92
|
+
tasks_item = CreateJobExecutionOutputTasksItem.from_dict(tasks_item_data)
|
|
95
93
|
|
|
96
94
|
tasks.append(tasks_item)
|
|
97
95
|
|
|
98
96
|
workspace_id = d.pop("workspaceId", d.pop("workspace_id", UNSET))
|
|
99
97
|
|
|
100
|
-
|
|
98
|
+
create_job_execution_output = cls(
|
|
101
99
|
execution_id=execution_id,
|
|
102
100
|
id=id,
|
|
103
101
|
job_id=job_id,
|
|
@@ -105,8 +103,8 @@ class CreateJobExecutionResponse:
|
|
|
105
103
|
workspace_id=workspace_id,
|
|
106
104
|
)
|
|
107
105
|
|
|
108
|
-
|
|
109
|
-
return
|
|
106
|
+
create_job_execution_output.additional_properties = d
|
|
107
|
+
return create_job_execution_output
|
|
110
108
|
|
|
111
109
|
@property
|
|
112
110
|
def additional_keys(self) -> list[str]:
|
|
@@ -3,11 +3,11 @@ from typing import Any, TypeVar
|
|
|
3
3
|
from attrs import define as _attrs_define
|
|
4
4
|
from attrs import field as _attrs_field
|
|
5
5
|
|
|
6
|
-
T = TypeVar("T", bound="
|
|
6
|
+
T = TypeVar("T", bound="CreateJobExecutionOutputTasksItem")
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
@_attrs_define
|
|
10
|
-
class
|
|
10
|
+
class CreateJobExecutionOutputTasksItem:
|
|
11
11
|
""" """
|
|
12
12
|
|
|
13
13
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
@@ -23,10 +23,10 @@ class CreateJobExecutionResponseTasksItem:
|
|
|
23
23
|
if not src_dict:
|
|
24
24
|
return None
|
|
25
25
|
d = src_dict.copy()
|
|
26
|
-
|
|
26
|
+
create_job_execution_output_tasks_item = cls()
|
|
27
27
|
|
|
28
|
-
|
|
29
|
-
return
|
|
28
|
+
create_job_execution_output_tasks_item.additional_properties = d
|
|
29
|
+
return create_job_execution_output_tasks_item
|
|
30
30
|
|
|
31
31
|
@property
|
|
32
32
|
def additional_keys(self) -> list[str]:
|
|
@@ -6,6 +6,7 @@ from attrs import field as _attrs_field
|
|
|
6
6
|
from ..types import UNSET, Unset
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
|
+
from ..models.create_job_execution_request_env import CreateJobExecutionRequestEnv
|
|
9
10
|
from ..models.create_job_execution_request_tasks_item import CreateJobExecutionRequestTasksItem
|
|
10
11
|
|
|
11
12
|
|
|
@@ -17,28 +18,42 @@ class CreateJobExecutionRequest:
|
|
|
17
18
|
"""Request to create a job execution
|
|
18
19
|
|
|
19
20
|
Attributes:
|
|
21
|
+
env (Union[Unset, CreateJobExecutionRequestEnv]): Environment variable overrides (optional, will merge with
|
|
22
|
+
job's environment variables) Example: {"MY_VAR": "custom_value", "BATCH_SIZE": "100"}.
|
|
20
23
|
execution_id (Union[Unset, str]): Execution ID (optional, will be generated if not provided)
|
|
21
24
|
id (Union[Unset, str]): Unique message ID
|
|
22
25
|
job_id (Union[Unset, str]): Job ID Example: data-processing-job.
|
|
26
|
+
memory (Union[Unset, int]): Memory override in megabytes (optional, must be lower than or equal to job's
|
|
27
|
+
configured memory) Example: 2048.
|
|
23
28
|
tasks (Union[Unset, list['CreateJobExecutionRequestTasksItem']]): Array of task parameters for parallel
|
|
24
29
|
execution
|
|
25
30
|
workspace_id (Union[Unset, str]): Workspace ID
|
|
26
31
|
"""
|
|
27
32
|
|
|
33
|
+
env: Union[Unset, "CreateJobExecutionRequestEnv"] = UNSET
|
|
28
34
|
execution_id: Union[Unset, str] = UNSET
|
|
29
35
|
id: Union[Unset, str] = UNSET
|
|
30
36
|
job_id: Union[Unset, str] = UNSET
|
|
37
|
+
memory: Union[Unset, int] = UNSET
|
|
31
38
|
tasks: Union[Unset, list["CreateJobExecutionRequestTasksItem"]] = UNSET
|
|
32
39
|
workspace_id: Union[Unset, str] = UNSET
|
|
33
40
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
34
41
|
|
|
35
42
|
def to_dict(self) -> dict[str, Any]:
|
|
43
|
+
env: Union[Unset, dict[str, Any]] = UNSET
|
|
44
|
+
if self.env and not isinstance(self.env, Unset) and not isinstance(self.env, dict):
|
|
45
|
+
env = self.env.to_dict()
|
|
46
|
+
elif self.env and isinstance(self.env, dict):
|
|
47
|
+
env = self.env
|
|
48
|
+
|
|
36
49
|
execution_id = self.execution_id
|
|
37
50
|
|
|
38
51
|
id = self.id
|
|
39
52
|
|
|
40
53
|
job_id = self.job_id
|
|
41
54
|
|
|
55
|
+
memory = self.memory
|
|
56
|
+
|
|
42
57
|
tasks: Union[Unset, list[dict[str, Any]]] = UNSET
|
|
43
58
|
if not isinstance(self.tasks, Unset):
|
|
44
59
|
tasks = []
|
|
@@ -54,12 +69,16 @@ class CreateJobExecutionRequest:
|
|
|
54
69
|
field_dict: dict[str, Any] = {}
|
|
55
70
|
field_dict.update(self.additional_properties)
|
|
56
71
|
field_dict.update({})
|
|
72
|
+
if env is not UNSET:
|
|
73
|
+
field_dict["env"] = env
|
|
57
74
|
if execution_id is not UNSET:
|
|
58
75
|
field_dict["executionId"] = execution_id
|
|
59
76
|
if id is not UNSET:
|
|
60
77
|
field_dict["id"] = id
|
|
61
78
|
if job_id is not UNSET:
|
|
62
79
|
field_dict["jobId"] = job_id
|
|
80
|
+
if memory is not UNSET:
|
|
81
|
+
field_dict["memory"] = memory
|
|
63
82
|
if tasks is not UNSET:
|
|
64
83
|
field_dict["tasks"] = tasks
|
|
65
84
|
if workspace_id is not UNSET:
|
|
@@ -69,6 +88,7 @@ class CreateJobExecutionRequest:
|
|
|
69
88
|
|
|
70
89
|
@classmethod
|
|
71
90
|
def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T | None:
|
|
91
|
+
from ..models.create_job_execution_request_env import CreateJobExecutionRequestEnv
|
|
72
92
|
from ..models.create_job_execution_request_tasks_item import (
|
|
73
93
|
CreateJobExecutionRequestTasksItem,
|
|
74
94
|
)
|
|
@@ -76,12 +96,21 @@ class CreateJobExecutionRequest:
|
|
|
76
96
|
if not src_dict:
|
|
77
97
|
return None
|
|
78
98
|
d = src_dict.copy()
|
|
99
|
+
_env = d.pop("env", UNSET)
|
|
100
|
+
env: Union[Unset, CreateJobExecutionRequestEnv]
|
|
101
|
+
if isinstance(_env, Unset):
|
|
102
|
+
env = UNSET
|
|
103
|
+
else:
|
|
104
|
+
env = CreateJobExecutionRequestEnv.from_dict(_env)
|
|
105
|
+
|
|
79
106
|
execution_id = d.pop("executionId", d.pop("execution_id", UNSET))
|
|
80
107
|
|
|
81
108
|
id = d.pop("id", UNSET)
|
|
82
109
|
|
|
83
110
|
job_id = d.pop("jobId", d.pop("job_id", UNSET))
|
|
84
111
|
|
|
112
|
+
memory = d.pop("memory", UNSET)
|
|
113
|
+
|
|
85
114
|
tasks = []
|
|
86
115
|
_tasks = d.pop("tasks", UNSET)
|
|
87
116
|
for tasks_item_data in _tasks or []:
|
|
@@ -92,9 +121,11 @@ class CreateJobExecutionRequest:
|
|
|
92
121
|
workspace_id = d.pop("workspaceId", d.pop("workspace_id", UNSET))
|
|
93
122
|
|
|
94
123
|
create_job_execution_request = cls(
|
|
124
|
+
env=env,
|
|
95
125
|
execution_id=execution_id,
|
|
96
126
|
id=id,
|
|
97
127
|
job_id=job_id,
|
|
128
|
+
memory=memory,
|
|
98
129
|
tasks=tasks,
|
|
99
130
|
workspace_id=workspace_id,
|
|
100
131
|
)
|