blaxel 0.2.36__py3-none-any.whl → 0.2.38rc122__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/__init__.py +2 -2
- blaxel/core/client/models/create_job_execution_request_env.py +3 -3
- blaxel/core/client/models/preview.py +48 -1
- blaxel/core/client/models/sandbox.py +10 -0
- blaxel/core/jobs/__init__.py +2 -2
- blaxel/core/sandbox/__init__.py +12 -0
- blaxel/core/sandbox/client/api/system/__init__.py +0 -0
- blaxel/core/sandbox/client/api/system/get_health.py +134 -0
- blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
- blaxel/core/sandbox/client/models/__init__.py +8 -0
- blaxel/core/sandbox/client/models/content_search_match.py +24 -25
- blaxel/core/sandbox/client/models/content_search_response.py +25 -29
- blaxel/core/sandbox/client/models/find_match.py +13 -14
- blaxel/core/sandbox/client/models/find_response.py +21 -24
- blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
- blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
- blaxel/core/sandbox/client/models/health_response.py +159 -0
- blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
- blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
- blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
- blaxel/core/sandbox/default/__init__.py +2 -0
- blaxel/core/sandbox/default/filesystem.py +20 -6
- blaxel/core/sandbox/default/preview.py +48 -1
- blaxel/core/sandbox/default/process.py +66 -21
- blaxel/core/sandbox/default/sandbox.py +36 -5
- blaxel/core/sandbox/default/system.py +71 -0
- blaxel/core/sandbox/sync/__init__.py +2 -0
- blaxel/core/sandbox/sync/filesystem.py +19 -2
- blaxel/core/sandbox/sync/preview.py +50 -3
- blaxel/core/sandbox/sync/process.py +38 -15
- blaxel/core/sandbox/sync/sandbox.py +29 -4
- blaxel/core/sandbox/sync/system.py +71 -0
- blaxel/core/sandbox/types.py +212 -5
- blaxel/core/volume/volume.py +6 -0
- blaxel/langgraph/tools.py +0 -1
- blaxel/llamaindex/model.py +119 -74
- blaxel-0.2.38rc122.dist-info/METADATA +569 -0
- {blaxel-0.2.36.dist-info → blaxel-0.2.38rc122.dist-info}/RECORD +40 -31
- blaxel-0.2.36.dist-info/METADATA +0 -228
- {blaxel-0.2.36.dist-info → blaxel-0.2.38rc122.dist-info}/WHEEL +0 -0
- {blaxel-0.2.36.dist-info → blaxel-0.2.38rc122.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,569 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: blaxel
|
|
3
|
+
Version: 0.2.38rc122
|
|
4
|
+
Summary: Blaxel - AI development platform SDK
|
|
5
|
+
Project-URL: Homepage, https://blaxel.ai
|
|
6
|
+
Project-URL: Documentation, https://docs.blaxel.ai
|
|
7
|
+
Project-URL: Repository, https://github.com/blaxel-ai/sdk-python
|
|
8
|
+
Project-URL: Changelog, https://docs.blaxel.ai/changelog
|
|
9
|
+
Author-email: cploujoux <cploujoux@blaxel.ai>
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Requires-Python: <3.14,>=3.10
|
|
12
|
+
Requires-Dist: attrs>=21.3.0
|
|
13
|
+
Requires-Dist: httpx>=0.27.0
|
|
14
|
+
Requires-Dist: mcp>=1.9.4
|
|
15
|
+
Requires-Dist: pydantic>=2.0.0
|
|
16
|
+
Requires-Dist: pyjwt>=2.0.0
|
|
17
|
+
Requires-Dist: python-dateutil>=2.8.0
|
|
18
|
+
Requires-Dist: pyyaml>=6.0.0
|
|
19
|
+
Requires-Dist: requests>=2.32.3
|
|
20
|
+
Requires-Dist: tomli>=2.2.1
|
|
21
|
+
Requires-Dist: websockets<16.0.0
|
|
22
|
+
Provides-Extra: all
|
|
23
|
+
Requires-Dist: crewai==0.159.0; extra == 'all'
|
|
24
|
+
Requires-Dist: google-adk>=1.4.0; extra == 'all'
|
|
25
|
+
Requires-Dist: langchain-anthropic>=0.3.10; extra == 'all'
|
|
26
|
+
Requires-Dist: langchain-cerebras<0.6.0,>=0.5.0; extra == 'all'
|
|
27
|
+
Requires-Dist: langchain-cohere>=0.4.3; extra == 'all'
|
|
28
|
+
Requires-Dist: langchain-community<0.4.0,>=0.3.3; extra == 'all'
|
|
29
|
+
Requires-Dist: langchain-core<0.4.0,>=0.3.13; extra == 'all'
|
|
30
|
+
Requires-Dist: langchain-deepseek-official>=0.1.0.post1; extra == 'all'
|
|
31
|
+
Requires-Dist: langchain-openai>=0.3.10; extra == 'all'
|
|
32
|
+
Requires-Dist: langchain-xai>=0.2.2; extra == 'all'
|
|
33
|
+
Requires-Dist: langgraph<0.3.0,>=0.2.40; extra == 'all'
|
|
34
|
+
Requires-Dist: litellm==1.74.9; extra == 'all'
|
|
35
|
+
Requires-Dist: litellm>=1.63.11; extra == 'all'
|
|
36
|
+
Requires-Dist: livekit-agents[anthropic,cartesia,deepgram,elevenlabs,groq,openai,silero,turn-detector]~=1.0; extra == 'all'
|
|
37
|
+
Requires-Dist: livekit-plugins-noise-cancellation~=0.2; extra == 'all'
|
|
38
|
+
Requires-Dist: llama-index-llms-anthropic>=0.6.14; extra == 'all'
|
|
39
|
+
Requires-Dist: llama-index-llms-cerebras>=0.2.2; extra == 'all'
|
|
40
|
+
Requires-Dist: llama-index-llms-cohere>=0.4.1; extra == 'all'
|
|
41
|
+
Requires-Dist: llama-index-llms-deepseek>=0.1.1; extra == 'all'
|
|
42
|
+
Requires-Dist: llama-index-llms-google-genai>=0.1.13; extra == 'all'
|
|
43
|
+
Requires-Dist: llama-index-llms-groq>=0.3.1; extra == 'all'
|
|
44
|
+
Requires-Dist: llama-index-llms-mistralai>=0.4.0; extra == 'all'
|
|
45
|
+
Requires-Dist: llama-index-llms-openai>=0.3.42; extra == 'all'
|
|
46
|
+
Requires-Dist: llama-index>=0.12.46; extra == 'all'
|
|
47
|
+
Requires-Dist: openai-agents>=0.0.19; extra == 'all'
|
|
48
|
+
Requires-Dist: openai==1.99.9; extra == 'all'
|
|
49
|
+
Requires-Dist: opentelemetry-exporter-otlp>=1.28.0; extra == 'all'
|
|
50
|
+
Requires-Dist: opentelemetry-instrumentation-anthropic==0.41.0; extra == 'all'
|
|
51
|
+
Requires-Dist: opentelemetry-instrumentation-cohere==0.41.0; extra == 'all'
|
|
52
|
+
Requires-Dist: opentelemetry-instrumentation-crewai==0.41.0; extra == 'all'
|
|
53
|
+
Requires-Dist: opentelemetry-instrumentation-fastapi==0.56b0; extra == 'all'
|
|
54
|
+
Requires-Dist: opentelemetry-instrumentation-google-generativeai==0.41.0; extra == 'all'
|
|
55
|
+
Requires-Dist: opentelemetry-instrumentation-langchain>=0.35.0; extra == 'all'
|
|
56
|
+
Requires-Dist: opentelemetry-instrumentation-llamaindex>=0.40.7; extra == 'all'
|
|
57
|
+
Requires-Dist: opentelemetry-instrumentation-ollama==0.41.0; extra == 'all'
|
|
58
|
+
Requires-Dist: opentelemetry-instrumentation-openai==0.41.0; extra == 'all'
|
|
59
|
+
Requires-Dist: pillow>=10.0.0; extra == 'all'
|
|
60
|
+
Requires-Dist: pydantic-ai>=0.0.48; extra == 'all'
|
|
61
|
+
Provides-Extra: core
|
|
62
|
+
Provides-Extra: crewai
|
|
63
|
+
Requires-Dist: crewai==0.159.0; extra == 'crewai'
|
|
64
|
+
Requires-Dist: litellm==1.74.9; extra == 'crewai'
|
|
65
|
+
Requires-Dist: openai==1.99.9; extra == 'crewai'
|
|
66
|
+
Requires-Dist: opentelemetry-instrumentation-crewai==0.41.0; extra == 'crewai'
|
|
67
|
+
Provides-Extra: dev
|
|
68
|
+
Requires-Dist: pyright; extra == 'dev'
|
|
69
|
+
Requires-Dist: pytest; extra == 'dev'
|
|
70
|
+
Requires-Dist: ruff; extra == 'dev'
|
|
71
|
+
Requires-Dist: uv; extra == 'dev'
|
|
72
|
+
Provides-Extra: googleadk
|
|
73
|
+
Requires-Dist: google-adk>=1.4.0; extra == 'googleadk'
|
|
74
|
+
Requires-Dist: litellm>=1.63.11; extra == 'googleadk'
|
|
75
|
+
Provides-Extra: langgraph
|
|
76
|
+
Requires-Dist: langchain-anthropic>=0.3.10; extra == 'langgraph'
|
|
77
|
+
Requires-Dist: langchain-cerebras<0.6.0,>=0.5.0; extra == 'langgraph'
|
|
78
|
+
Requires-Dist: langchain-cohere>=0.4.3; extra == 'langgraph'
|
|
79
|
+
Requires-Dist: langchain-community<0.4.0,>=0.3.3; extra == 'langgraph'
|
|
80
|
+
Requires-Dist: langchain-core<0.4.0,>=0.3.13; extra == 'langgraph'
|
|
81
|
+
Requires-Dist: langchain-deepseek-official>=0.1.0.post1; extra == 'langgraph'
|
|
82
|
+
Requires-Dist: langchain-openai>=0.3.10; extra == 'langgraph'
|
|
83
|
+
Requires-Dist: langchain-xai>=0.2.2; extra == 'langgraph'
|
|
84
|
+
Requires-Dist: langgraph<0.3.0,>=0.2.40; extra == 'langgraph'
|
|
85
|
+
Requires-Dist: opentelemetry-instrumentation-langchain>=0.35.0; extra == 'langgraph'
|
|
86
|
+
Requires-Dist: pillow>=10.0.0; extra == 'langgraph'
|
|
87
|
+
Provides-Extra: livekit
|
|
88
|
+
Requires-Dist: livekit-agents[anthropic,cartesia,deepgram,elevenlabs,groq,openai,silero,turn-detector]~=1.0; extra == 'livekit'
|
|
89
|
+
Requires-Dist: livekit-plugins-noise-cancellation~=0.2; extra == 'livekit'
|
|
90
|
+
Provides-Extra: llamaindex
|
|
91
|
+
Requires-Dist: llama-index-llms-anthropic>=0.6.14; extra == 'llamaindex'
|
|
92
|
+
Requires-Dist: llama-index-llms-cerebras>=0.2.2; extra == 'llamaindex'
|
|
93
|
+
Requires-Dist: llama-index-llms-cohere>=0.4.1; extra == 'llamaindex'
|
|
94
|
+
Requires-Dist: llama-index-llms-deepseek>=0.1.1; extra == 'llamaindex'
|
|
95
|
+
Requires-Dist: llama-index-llms-google-genai>=0.1.13; extra == 'llamaindex'
|
|
96
|
+
Requires-Dist: llama-index-llms-groq>=0.3.1; extra == 'llamaindex'
|
|
97
|
+
Requires-Dist: llama-index-llms-mistralai>=0.4.0; extra == 'llamaindex'
|
|
98
|
+
Requires-Dist: llama-index-llms-openai>=0.3.42; extra == 'llamaindex'
|
|
99
|
+
Requires-Dist: llama-index>=0.12.46; extra == 'llamaindex'
|
|
100
|
+
Requires-Dist: opentelemetry-instrumentation-llamaindex>=0.40.7; extra == 'llamaindex'
|
|
101
|
+
Provides-Extra: openai
|
|
102
|
+
Requires-Dist: openai-agents>=0.0.19; extra == 'openai'
|
|
103
|
+
Provides-Extra: pydantic
|
|
104
|
+
Requires-Dist: pydantic-ai>=0.0.48; extra == 'pydantic'
|
|
105
|
+
Provides-Extra: telemetry
|
|
106
|
+
Requires-Dist: opentelemetry-exporter-otlp>=1.28.0; extra == 'telemetry'
|
|
107
|
+
Requires-Dist: opentelemetry-instrumentation-anthropic==0.41.0; extra == 'telemetry'
|
|
108
|
+
Requires-Dist: opentelemetry-instrumentation-cohere==0.41.0; extra == 'telemetry'
|
|
109
|
+
Requires-Dist: opentelemetry-instrumentation-fastapi==0.56b0; extra == 'telemetry'
|
|
110
|
+
Requires-Dist: opentelemetry-instrumentation-google-generativeai==0.41.0; extra == 'telemetry'
|
|
111
|
+
Requires-Dist: opentelemetry-instrumentation-ollama==0.41.0; extra == 'telemetry'
|
|
112
|
+
Requires-Dist: opentelemetry-instrumentation-openai==0.41.0; extra == 'telemetry'
|
|
113
|
+
Description-Content-Type: text/markdown
|
|
114
|
+
|
|
115
|
+
# Blaxel Python SDK
|
|
116
|
+
|
|
117
|
+
[Blaxel](https://blaxel.ai) is a perpetual sandbox platform that achieves near instant latency by keeping infinite secure sandboxes on automatic standby, while co-hosting your agent logic to cut network overhead.
|
|
118
|
+
|
|
119
|
+
This repository contains Blaxel's Python SDK, which lets you create and manage sandboxes and other resources on Blaxel.
|
|
120
|
+
|
|
121
|
+
## Installation
|
|
122
|
+
|
|
123
|
+
```bash
|
|
124
|
+
pip install blaxel
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
## Authentication
|
|
128
|
+
|
|
129
|
+
The SDK authenticates with your Blaxel workspace using these sources (in priority order):
|
|
130
|
+
|
|
131
|
+
1. Blaxel CLI, when logged in
|
|
132
|
+
2. Environment variables in `.env` file (`BL_WORKSPACE`, `BL_API_KEY`)
|
|
133
|
+
3. System environment variables
|
|
134
|
+
4. Blaxel configuration file (`~/.blaxel/config.yaml`)
|
|
135
|
+
|
|
136
|
+
When developing locally, the recommended method is to just log in to your workspace with the Blaxel CLI:
|
|
137
|
+
|
|
138
|
+
```bash
|
|
139
|
+
bl login YOUR-WORKSPACE
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
This allows you to run Blaxel SDK functions that will automatically connect to your workspace without additional setup. When you deploy on Blaxel, this connection persists automatically.
|
|
143
|
+
|
|
144
|
+
When running Blaxel SDK from a remote server that is not Blaxel-hosted, we recommend using environment variables as described in the third option above.
|
|
145
|
+
|
|
146
|
+
## Usage
|
|
147
|
+
|
|
148
|
+
### Sandboxes
|
|
149
|
+
|
|
150
|
+
Sandboxes are secure, instant-launching compute environments that scale to zero after inactivity and resume in under 25ms.
|
|
151
|
+
|
|
152
|
+
```python
|
|
153
|
+
import asyncio
|
|
154
|
+
from blaxel.core import SandboxInstance
|
|
155
|
+
|
|
156
|
+
async def main():
|
|
157
|
+
|
|
158
|
+
# Create a new sandbox
|
|
159
|
+
sandbox = await SandboxInstance.create_if_not_exists({
|
|
160
|
+
"name": "my-sandbox",
|
|
161
|
+
"image": "blaxel/base-image:latest",
|
|
162
|
+
"memory": 4096,
|
|
163
|
+
"region": "us-pdx-1",
|
|
164
|
+
"ports": [{"target": 3000, "protocol": "HTTP"}],
|
|
165
|
+
"labels": {"env": "dev", "project": "my-project"},
|
|
166
|
+
"ttl": "24h"
|
|
167
|
+
})
|
|
168
|
+
|
|
169
|
+
# Get existing sandbox
|
|
170
|
+
existing = await SandboxInstance.get("my-sandbox")
|
|
171
|
+
|
|
172
|
+
# Delete sandbox (using class)
|
|
173
|
+
await SandboxInstance.delete("my-sandbox")
|
|
174
|
+
|
|
175
|
+
# Delete sandbox (using instance)
|
|
176
|
+
await existing.delete()
|
|
177
|
+
|
|
178
|
+
if __name__ == "__main__":
|
|
179
|
+
asyncio.run(main())
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
#### Preview URLs
|
|
183
|
+
|
|
184
|
+
Generate public preview URLs to access services running in your sandbox:
|
|
185
|
+
|
|
186
|
+
```python
|
|
187
|
+
import asyncio
|
|
188
|
+
from blaxel.core import SandboxInstance
|
|
189
|
+
|
|
190
|
+
async def main():
|
|
191
|
+
|
|
192
|
+
# Get existing sandbox
|
|
193
|
+
sandbox = await SandboxInstance.get("my-sandbox")
|
|
194
|
+
|
|
195
|
+
# Start a web server in the sandbox
|
|
196
|
+
await sandbox.process.exec({
|
|
197
|
+
"command": "python -m http.server 3000",
|
|
198
|
+
"working_dir": "/app",
|
|
199
|
+
"wait_for_ports": [3000]
|
|
200
|
+
})
|
|
201
|
+
|
|
202
|
+
# Create a public preview URL
|
|
203
|
+
preview = await sandbox.previews.create_if_not_exists({
|
|
204
|
+
"metadata": {"name": "app-preview"},
|
|
205
|
+
"spec": {
|
|
206
|
+
"port": 3000,
|
|
207
|
+
"public": True
|
|
208
|
+
}
|
|
209
|
+
})
|
|
210
|
+
|
|
211
|
+
print(preview.spec.url) # https://xyz.preview.bl.run
|
|
212
|
+
|
|
213
|
+
if __name__ == "__main__":
|
|
214
|
+
asyncio.run(main())
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
Previews can also be private, with or without a custom prefix. When you create a private preview URL, a [token](https://docs.blaxel.ai/Sandboxes/Preview-url#private-preview-urls) is required to access the URL, passed as a request parameter or request header.
|
|
218
|
+
|
|
219
|
+
```python
|
|
220
|
+
# ...
|
|
221
|
+
|
|
222
|
+
# Create a private preview URL
|
|
223
|
+
private_preview = await sandbox.previews.create_if_not_exists({
|
|
224
|
+
"metadata": {"name": "private-app-preview"},
|
|
225
|
+
"spec": {
|
|
226
|
+
"port": 3000,
|
|
227
|
+
"public": False
|
|
228
|
+
}
|
|
229
|
+
})
|
|
230
|
+
|
|
231
|
+
# Create a public preview URL with a custom prefix
|
|
232
|
+
custom_preview = await sandbox.previews.create_if_not_exists({
|
|
233
|
+
"metadata": {"name": "custom-app-preview"},
|
|
234
|
+
"spec": {
|
|
235
|
+
"port": 3000,
|
|
236
|
+
"prefix_url": "my-app",
|
|
237
|
+
"public": True
|
|
238
|
+
}
|
|
239
|
+
})
|
|
240
|
+
```
|
|
241
|
+
|
|
242
|
+
#### Process execution
|
|
243
|
+
|
|
244
|
+
Execute and manage processes in your sandbox:
|
|
245
|
+
|
|
246
|
+
```python
|
|
247
|
+
import asyncio
|
|
248
|
+
from blaxel.core import SandboxInstance
|
|
249
|
+
|
|
250
|
+
async def main():
|
|
251
|
+
|
|
252
|
+
# Get existing sandbox
|
|
253
|
+
sandbox = await SandboxInstance.get("my-sandbox")
|
|
254
|
+
|
|
255
|
+
# Execute a command
|
|
256
|
+
process = await sandbox.process.exec({
|
|
257
|
+
"name": "build-process",
|
|
258
|
+
"command": "npm run build",
|
|
259
|
+
"working_dir": "/app",
|
|
260
|
+
"wait_for_completion": True,
|
|
261
|
+
"timeout": 60000 # 60 seconds
|
|
262
|
+
})
|
|
263
|
+
|
|
264
|
+
# Kill a running process
|
|
265
|
+
await sandbox.process.kill("build-process")
|
|
266
|
+
|
|
267
|
+
if __name__ == "__main__":
|
|
268
|
+
asyncio.run(main())
|
|
269
|
+
```
|
|
270
|
+
|
|
271
|
+
Restart a process if it fails, up to a maximum number of restart attempts:
|
|
272
|
+
|
|
273
|
+
```python
|
|
274
|
+
# ...
|
|
275
|
+
|
|
276
|
+
# Run with auto-restart on failure
|
|
277
|
+
process = await sandbox.process.exec({
|
|
278
|
+
"name": "web-server",
|
|
279
|
+
"command": "python -m http.server 3000 --bind 0.0.0.0",
|
|
280
|
+
"restart_on_failure": True,
|
|
281
|
+
"max_restarts": 5
|
|
282
|
+
})
|
|
283
|
+
```
|
|
284
|
+
|
|
285
|
+
#### Filesystem operations
|
|
286
|
+
|
|
287
|
+
Manage files and directories within your sandbox:
|
|
288
|
+
|
|
289
|
+
```python
|
|
290
|
+
import asyncio
|
|
291
|
+
from blaxel.core import SandboxInstance
|
|
292
|
+
|
|
293
|
+
async def main():
|
|
294
|
+
|
|
295
|
+
# Get existing sandbox
|
|
296
|
+
sandbox = await SandboxInstance.get("my-sandbox")
|
|
297
|
+
|
|
298
|
+
# Write and read text files
|
|
299
|
+
await sandbox.fs.write("/app/config.json", '{"key": "value"}')
|
|
300
|
+
content = await sandbox.fs.read("/app/config.json")
|
|
301
|
+
|
|
302
|
+
# Write and read binary files
|
|
303
|
+
with open("./image.png", "rb") as f:
|
|
304
|
+
binary_data = f.read()
|
|
305
|
+
await sandbox.fs.write_binary("/app/image.png", binary_data)
|
|
306
|
+
blob = await sandbox.fs.read_binary("/app/image.png")
|
|
307
|
+
|
|
308
|
+
# Create directories
|
|
309
|
+
await sandbox.fs.mkdir("/app/uploads")
|
|
310
|
+
|
|
311
|
+
# List files
|
|
312
|
+
listing = await sandbox.fs.ls("/app")
|
|
313
|
+
subdirectories = listing.subdirectories
|
|
314
|
+
files = listing.files
|
|
315
|
+
|
|
316
|
+
# Search for text within files
|
|
317
|
+
matches = await sandbox.fs.grep("pattern", "/app", case_sensitive=True, context_lines=2, max_results=5, file_pattern="*.py", exclude_dirs=["__pycache__"])
|
|
318
|
+
|
|
319
|
+
# Find files and directories matching specified patterns
|
|
320
|
+
results = await sandbox.fs.find("/app", type="file", patterns=["*.md", "*.html"], max_results=1000)
|
|
321
|
+
|
|
322
|
+
# Watch for file changes
|
|
323
|
+
def on_change(event):
|
|
324
|
+
print(event.op, event.path)
|
|
325
|
+
|
|
326
|
+
handle = sandbox.fs.watch("/app", on_change, {
|
|
327
|
+
"with_content": True,
|
|
328
|
+
"ignore": ["node_modules", ".git"]
|
|
329
|
+
})
|
|
330
|
+
|
|
331
|
+
# Close watcher
|
|
332
|
+
handle["close"]()
|
|
333
|
+
|
|
334
|
+
if __name__ == "__main__":
|
|
335
|
+
asyncio.run(main())
|
|
336
|
+
```
|
|
337
|
+
|
|
338
|
+
#### Volumes
|
|
339
|
+
|
|
340
|
+
Persist data by attaching and using volumes:
|
|
341
|
+
|
|
342
|
+
```python
|
|
343
|
+
import asyncio
|
|
344
|
+
from blaxel.core import VolumeInstance, SandboxInstance
|
|
345
|
+
|
|
346
|
+
async def main():
|
|
347
|
+
|
|
348
|
+
# Create a volume
|
|
349
|
+
volume = await VolumeInstance.create_if_not_exists({
|
|
350
|
+
"name": "my-volume",
|
|
351
|
+
"size": 1024, # MB
|
|
352
|
+
"region": "us-pdx-1",
|
|
353
|
+
"labels": {"env": "test", "project": "12345"}
|
|
354
|
+
})
|
|
355
|
+
|
|
356
|
+
# Attach volume to sandbox
|
|
357
|
+
sandbox = await SandboxInstance.create_if_not_exists({
|
|
358
|
+
"name": "my-sandbox",
|
|
359
|
+
"image": "blaxel/base-image:latest",
|
|
360
|
+
"volumes": [
|
|
361
|
+
{"name": "my-volume", "mount_path": "/data", "read_only": False}
|
|
362
|
+
]
|
|
363
|
+
})
|
|
364
|
+
|
|
365
|
+
# List volumes
|
|
366
|
+
volumes = await VolumeInstance.list()
|
|
367
|
+
|
|
368
|
+
# Delete volume (using class)
|
|
369
|
+
await VolumeInstance.delete("my-volume")
|
|
370
|
+
|
|
371
|
+
# Delete volume (using instance)
|
|
372
|
+
await volume.delete()
|
|
373
|
+
|
|
374
|
+
if __name__ == "__main__":
|
|
375
|
+
asyncio.run(main())
|
|
376
|
+
```
|
|
377
|
+
|
|
378
|
+
### Batch jobs
|
|
379
|
+
|
|
380
|
+
Blaxel lets you support agentic workflows by offloading asynchronous batch processing tasks to its scalable infrastructure, where they can run in parallel. Jobs can run multiple times within a single execution and accept optional input parameters.
|
|
381
|
+
|
|
382
|
+
```python
|
|
383
|
+
import asyncio
|
|
384
|
+
from blaxel.core.jobs import bl_job
|
|
385
|
+
from blaxel.core.client.models import CreateJobExecutionRequest
|
|
386
|
+
|
|
387
|
+
async def main():
|
|
388
|
+
# Create and run a job execution
|
|
389
|
+
job = bl_job("job-name")
|
|
390
|
+
|
|
391
|
+
execution_id = await job.acreate_execution(CreateJobExecutionRequest(
|
|
392
|
+
tasks=[
|
|
393
|
+
{"name": "John"},
|
|
394
|
+
{"name": "Jane"},
|
|
395
|
+
{"name": "Bob"}
|
|
396
|
+
]
|
|
397
|
+
))
|
|
398
|
+
|
|
399
|
+
# Get execution status
|
|
400
|
+
# Returns: "pending" | "running" | "completed" | "failed"
|
|
401
|
+
status = await job.aget_execution_status(execution_id)
|
|
402
|
+
|
|
403
|
+
# Get execution details
|
|
404
|
+
execution = await job.aget_execution(execution_id)
|
|
405
|
+
print(execution.status, execution.metadata)
|
|
406
|
+
|
|
407
|
+
# Wait for completion
|
|
408
|
+
try:
|
|
409
|
+
result = await job.await_for_execution(
|
|
410
|
+
execution_id,
|
|
411
|
+
max_wait=300, # 5 minutes (seconds)
|
|
412
|
+
interval=2 # Poll every 2 seconds
|
|
413
|
+
)
|
|
414
|
+
print(f"Completed: {result.status}")
|
|
415
|
+
except Exception as error:
|
|
416
|
+
print(f"Timeout: {error}")
|
|
417
|
+
|
|
418
|
+
# List all executions
|
|
419
|
+
executions = await job.alist_executions()
|
|
420
|
+
|
|
421
|
+
# Delete an execution
|
|
422
|
+
await job.acancel_execution(execution_id)
|
|
423
|
+
|
|
424
|
+
if __name__ == "__main__":
|
|
425
|
+
asyncio.run(main())
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
Synchronous calls are [also available](https://docs.blaxel.ai/Jobs/Manage-job-execution-py).
|
|
429
|
+
|
|
430
|
+
### Framework integrations
|
|
431
|
+
|
|
432
|
+
Blaxel provides additional packages for framework-specific integrations and telemetry:
|
|
433
|
+
|
|
434
|
+
```bash
|
|
435
|
+
# With specific integrations
|
|
436
|
+
pip install "blaxel[telemetry]"
|
|
437
|
+
pip install "blaxel[crewai]"
|
|
438
|
+
pip install "blaxel[openai]"
|
|
439
|
+
pip install "blaxel[langgraph]"
|
|
440
|
+
pip install "blaxel[livekit]"
|
|
441
|
+
pip install "blaxel[llamaindex]"
|
|
442
|
+
pip install "blaxel[pydantic]"
|
|
443
|
+
pip install "blaxel[googleadk]"
|
|
444
|
+
|
|
445
|
+
# Everything
|
|
446
|
+
pip install "blaxel[all]"
|
|
447
|
+
```
|
|
448
|
+
|
|
449
|
+
#### Model use
|
|
450
|
+
|
|
451
|
+
Blaxel acts as a unified gateway for model APIs, centralizing access credentials, tracing and telemetry. You can integrate with any model API provider, or deploy your own custom model. When a model is deployed on Blaxel, a global API endpoint is also created to call it.
|
|
452
|
+
|
|
453
|
+
The SDK includes a helper function that creates a reference to a model deployed on Blaxel and returns a framework-specific model client that routes API calls through Blaxel's unified gateway.
|
|
454
|
+
|
|
455
|
+
```python
|
|
456
|
+
from blaxel.core import bl_model
|
|
457
|
+
|
|
458
|
+
# With OpenAI
|
|
459
|
+
from blaxel.openai import bl_model
|
|
460
|
+
model = await bl_model("gpt-5-mini")
|
|
461
|
+
|
|
462
|
+
# With LangChain
|
|
463
|
+
from blaxel.langgraph import bl_model
|
|
464
|
+
model = await bl_model("gpt-5-mini")
|
|
465
|
+
|
|
466
|
+
# With LlamaIndex
|
|
467
|
+
from blaxel.llamaindex import bl_model
|
|
468
|
+
model = await bl_model("gpt-5-mini")
|
|
469
|
+
|
|
470
|
+
# With Pydantic AI
|
|
471
|
+
from blaxel.pydantic import bl_model
|
|
472
|
+
model = await bl_model("gpt-5-mini")
|
|
473
|
+
|
|
474
|
+
# With CrewAI
|
|
475
|
+
from blaxel.crewai import bl_model
|
|
476
|
+
model = await bl_model("gpt-5-mini")
|
|
477
|
+
|
|
478
|
+
# With Google ADK
|
|
479
|
+
from blaxel.googleadk import bl_model
|
|
480
|
+
model = await bl_model("gpt-5-mini")
|
|
481
|
+
|
|
482
|
+
# With LiveKit
|
|
483
|
+
from blaxel.livekit import bl_model
|
|
484
|
+
model = await bl_model("gpt-5-mini")
|
|
485
|
+
```
|
|
486
|
+
|
|
487
|
+
#### MCP tool use
|
|
488
|
+
|
|
489
|
+
Blaxel lets you deploy and host Model Context Protocol (MCP) servers, accessible at a global endpoint over streamable HTTP.
|
|
490
|
+
|
|
491
|
+
The SDK includes a helper function that retrieves and returns tool definitions from a Blaxel-hosted MCP server in the format required by specific frameworks.
|
|
492
|
+
|
|
493
|
+
```python
|
|
494
|
+
# With OpenAI
|
|
495
|
+
from blaxel.openai import bl_tools
|
|
496
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
497
|
+
|
|
498
|
+
# With Pydantic AI
|
|
499
|
+
from blaxel.pydantic import bl_tools
|
|
500
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
501
|
+
|
|
502
|
+
# With LlamaIndex
|
|
503
|
+
from blaxel.llamaindex import bl_tools
|
|
504
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
505
|
+
|
|
506
|
+
# With LangChain
|
|
507
|
+
from blaxel.langgraph import bl_tools
|
|
508
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
509
|
+
|
|
510
|
+
# With CrewAI
|
|
511
|
+
from blaxel.crewai import bl_tools
|
|
512
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
513
|
+
|
|
514
|
+
# With Google ADK
|
|
515
|
+
from blaxel.googleadk import bl_tools
|
|
516
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
517
|
+
|
|
518
|
+
# With LiveKit
|
|
519
|
+
from blaxel.livekit import bl_tools
|
|
520
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
521
|
+
```
|
|
522
|
+
|
|
523
|
+
Here is an example of retrieving tool definitions from a Blaxel sandbox's MCP server for use with the OpenAI SDK:
|
|
524
|
+
|
|
525
|
+
```python
|
|
526
|
+
import asyncio
|
|
527
|
+
from blaxel.core import SandboxInstance
|
|
528
|
+
from blaxel.openai import bl_tools
|
|
529
|
+
|
|
530
|
+
async def main():
|
|
531
|
+
|
|
532
|
+
# Create a new sandbox
|
|
533
|
+
sandbox = await SandboxInstance.create_if_not_exists({
|
|
534
|
+
"name": "my-sandbox",
|
|
535
|
+
"image": "blaxel/base-image:latest",
|
|
536
|
+
"memory": 4096,
|
|
537
|
+
"region": "us-pdx-1",
|
|
538
|
+
"ports": [{"target": 3000, "protocol": "HTTP"}],
|
|
539
|
+
"ttl": "24h"
|
|
540
|
+
})
|
|
541
|
+
|
|
542
|
+
# Get sandbox MCP tools
|
|
543
|
+
tools = await bl_tools(["sandbox/my-sandbox"])
|
|
544
|
+
|
|
545
|
+
if __name__ == "__main__":
|
|
546
|
+
asyncio.run(main())
|
|
547
|
+
```
|
|
548
|
+
|
|
549
|
+
### Telemetry
|
|
550
|
+
|
|
551
|
+
Instrumentation happens automatically when workloads run on Blaxel.
|
|
552
|
+
|
|
553
|
+
Enable automatic telemetry by importing the `blaxel.telemetry` package:
|
|
554
|
+
|
|
555
|
+
```python
|
|
556
|
+
import blaxel.telemetry
|
|
557
|
+
```
|
|
558
|
+
|
|
559
|
+
## Requirements
|
|
560
|
+
|
|
561
|
+
- Python 3.9 or later
|
|
562
|
+
|
|
563
|
+
## Contributing
|
|
564
|
+
|
|
565
|
+
Contributions are welcome! Please feel free to [submit a pull request](https://github.com/blaxel-ai/sdk-python/pulls).
|
|
566
|
+
|
|
567
|
+
## License
|
|
568
|
+
|
|
569
|
+
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|