blaxel 0.2.36__py3-none-any.whl → 0.2.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. blaxel/__init__.py +2 -2
  2. blaxel/core/client/models/create_job_execution_request_env.py +3 -3
  3. blaxel/core/client/models/preview.py +48 -1
  4. blaxel/core/client/models/sandbox.py +10 -0
  5. blaxel/core/jobs/__init__.py +2 -2
  6. blaxel/core/sandbox/__init__.py +12 -0
  7. blaxel/core/sandbox/client/api/system/__init__.py +0 -0
  8. blaxel/core/sandbox/client/api/system/get_health.py +134 -0
  9. blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
  10. blaxel/core/sandbox/client/models/__init__.py +8 -0
  11. blaxel/core/sandbox/client/models/content_search_match.py +24 -25
  12. blaxel/core/sandbox/client/models/content_search_response.py +25 -29
  13. blaxel/core/sandbox/client/models/find_match.py +13 -14
  14. blaxel/core/sandbox/client/models/find_response.py +21 -24
  15. blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
  16. blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
  17. blaxel/core/sandbox/client/models/health_response.py +159 -0
  18. blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
  19. blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
  20. blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
  21. blaxel/core/sandbox/default/__init__.py +2 -0
  22. blaxel/core/sandbox/default/filesystem.py +20 -6
  23. blaxel/core/sandbox/default/preview.py +48 -1
  24. blaxel/core/sandbox/default/process.py +66 -21
  25. blaxel/core/sandbox/default/sandbox.py +36 -5
  26. blaxel/core/sandbox/default/system.py +71 -0
  27. blaxel/core/sandbox/sync/__init__.py +2 -0
  28. blaxel/core/sandbox/sync/filesystem.py +19 -2
  29. blaxel/core/sandbox/sync/preview.py +50 -3
  30. blaxel/core/sandbox/sync/process.py +38 -15
  31. blaxel/core/sandbox/sync/sandbox.py +29 -4
  32. blaxel/core/sandbox/sync/system.py +71 -0
  33. blaxel/core/sandbox/types.py +212 -5
  34. blaxel/core/tools/__init__.py +4 -0
  35. blaxel/core/volume/volume.py +10 -0
  36. blaxel/crewai/model.py +81 -44
  37. blaxel/crewai/tools.py +85 -2
  38. blaxel/googleadk/model.py +22 -3
  39. blaxel/googleadk/tools.py +25 -6
  40. blaxel/langgraph/custom/gemini.py +19 -12
  41. blaxel/langgraph/model.py +26 -18
  42. blaxel/langgraph/tools.py +6 -12
  43. blaxel/livekit/model.py +7 -2
  44. blaxel/livekit/tools.py +3 -1
  45. blaxel/llamaindex/model.py +145 -84
  46. blaxel/llamaindex/tools.py +6 -4
  47. blaxel/openai/model.py +7 -1
  48. blaxel/openai/tools.py +13 -3
  49. blaxel/pydantic/model.py +38 -24
  50. blaxel/pydantic/tools.py +37 -4
  51. blaxel-0.2.38.dist-info/METADATA +528 -0
  52. {blaxel-0.2.36.dist-info → blaxel-0.2.38.dist-info}/RECORD +54 -45
  53. blaxel-0.2.36.dist-info/METADATA +0 -228
  54. {blaxel-0.2.36.dist-info → blaxel-0.2.38.dist-info}/WHEEL +0 -0
  55. {blaxel-0.2.36.dist-info → blaxel-0.2.38.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,528 @@
1
+ Metadata-Version: 2.4
2
+ Name: blaxel
3
+ Version: 0.2.38
4
+ Summary: Blaxel - AI development platform SDK
5
+ Project-URL: Homepage, https://blaxel.ai
6
+ Project-URL: Documentation, https://docs.blaxel.ai
7
+ Project-URL: Repository, https://github.com/blaxel-ai/sdk-python
8
+ Project-URL: Changelog, https://docs.blaxel.ai/changelog
9
+ Author-email: cploujoux <cploujoux@blaxel.ai>
10
+ License-File: LICENSE
11
+ Requires-Python: <3.14,>=3.10
12
+ Requires-Dist: attrs>=21.3.0
13
+ Requires-Dist: httpx>=0.27.0
14
+ Requires-Dist: mcp>=1.9.4
15
+ Requires-Dist: pydantic>=2.0.0
16
+ Requires-Dist: pyjwt>=2.0.0
17
+ Requires-Dist: python-dateutil>=2.8.0
18
+ Requires-Dist: pyyaml>=6.0.0
19
+ Requires-Dist: requests>=2.32.3
20
+ Requires-Dist: tomli>=2.0.2
21
+ Requires-Dist: websockets<16.0.0
22
+ Provides-Extra: core
23
+ Provides-Extra: crewai
24
+ Requires-Dist: crewai[litellm]==1.9.3; extra == 'crewai'
25
+ Requires-Dist: opentelemetry-instrumentation-crewai==0.41.0; extra == 'crewai'
26
+ Provides-Extra: dev
27
+ Requires-Dist: pyright; extra == 'dev'
28
+ Requires-Dist: pytest; extra == 'dev'
29
+ Requires-Dist: ruff; extra == 'dev'
30
+ Requires-Dist: uv; extra == 'dev'
31
+ Provides-Extra: googleadk
32
+ Requires-Dist: google-adk>=1.4.0; extra == 'googleadk'
33
+ Requires-Dist: litellm>=1.63.11; extra == 'googleadk'
34
+ Provides-Extra: langgraph
35
+ Requires-Dist: langchain-anthropic>=0.3.10; extra == 'langgraph'
36
+ Requires-Dist: langchain-cerebras<0.6.0,>=0.5.0; extra == 'langgraph'
37
+ Requires-Dist: langchain-cohere>=0.4.3; extra == 'langgraph'
38
+ Requires-Dist: langchain-community<0.4.0,>=0.3.3; extra == 'langgraph'
39
+ Requires-Dist: langchain-core<0.4.0,>=0.3.13; extra == 'langgraph'
40
+ Requires-Dist: langchain-deepseek-official>=0.1.0.post1; extra == 'langgraph'
41
+ Requires-Dist: langchain-openai>=0.3.10; extra == 'langgraph'
42
+ Requires-Dist: langchain-xai>=0.2.2; extra == 'langgraph'
43
+ Requires-Dist: langgraph<0.3.0,>=0.2.40; extra == 'langgraph'
44
+ Requires-Dist: opentelemetry-instrumentation-langchain>=0.35.0; extra == 'langgraph'
45
+ Requires-Dist: pillow>=10.0.0; extra == 'langgraph'
46
+ Provides-Extra: livekit
47
+ Requires-Dist: livekit-agents[anthropic,cartesia,deepgram,elevenlabs,groq,openai,silero,turn-detector]~=1.0; extra == 'livekit'
48
+ Requires-Dist: livekit-plugins-noise-cancellation~=0.2; extra == 'livekit'
49
+ Provides-Extra: llamaindex
50
+ Requires-Dist: llama-index-llms-anthropic>=0.6.14; extra == 'llamaindex'
51
+ Requires-Dist: llama-index-llms-cerebras>=0.2.2; extra == 'llamaindex'
52
+ Requires-Dist: llama-index-llms-cohere>=0.4.1; extra == 'llamaindex'
53
+ Requires-Dist: llama-index-llms-deepseek>=0.1.1; extra == 'llamaindex'
54
+ Requires-Dist: llama-index-llms-google-genai>=0.1.13; extra == 'llamaindex'
55
+ Requires-Dist: llama-index-llms-groq>=0.3.1; extra == 'llamaindex'
56
+ Requires-Dist: llama-index-llms-mistralai>=0.4.0; extra == 'llamaindex'
57
+ Requires-Dist: llama-index-llms-openai>=0.3.42; extra == 'llamaindex'
58
+ Requires-Dist: llama-index>=0.14.13; extra == 'llamaindex'
59
+ Requires-Dist: opentelemetry-instrumentation-llamaindex>=0.40.7; extra == 'llamaindex'
60
+ Provides-Extra: openai
61
+ Requires-Dist: openai-agents>=0.0.19; extra == 'openai'
62
+ Provides-Extra: pydantic
63
+ Requires-Dist: pydantic-ai>=0.0.48; extra == 'pydantic'
64
+ Provides-Extra: telemetry
65
+ Requires-Dist: opentelemetry-exporter-otlp>=1.28.0; extra == 'telemetry'
66
+ Requires-Dist: opentelemetry-instrumentation-anthropic==0.41.0; extra == 'telemetry'
67
+ Requires-Dist: opentelemetry-instrumentation-cohere==0.41.0; extra == 'telemetry'
68
+ Requires-Dist: opentelemetry-instrumentation-fastapi==0.55b0; extra == 'telemetry'
69
+ Requires-Dist: opentelemetry-instrumentation-google-generativeai==0.41.0; extra == 'telemetry'
70
+ Requires-Dist: opentelemetry-instrumentation-ollama==0.41.0; extra == 'telemetry'
71
+ Requires-Dist: opentelemetry-instrumentation-openai==0.41.0; extra == 'telemetry'
72
+ Description-Content-Type: text/markdown
73
+
74
+ # Blaxel Python SDK
75
+
76
+ [Blaxel](https://blaxel.ai) is a perpetual sandbox platform that achieves near instant latency by keeping infinite secure sandboxes on automatic standby, while co-hosting your agent logic to cut network overhead.
77
+
78
+ This repository contains Blaxel's Python SDK, which lets you create and manage sandboxes and other resources on Blaxel.
79
+
80
+ ## Installation
81
+
82
+ ```bash
83
+ pip install blaxel
84
+ ```
85
+
86
+ ## Authentication
87
+
88
+ The SDK authenticates with your Blaxel workspace using these sources (in priority order):
89
+
90
+ 1. Blaxel CLI, when logged in
91
+ 2. Environment variables in `.env` file (`BL_WORKSPACE`, `BL_API_KEY`)
92
+ 3. System environment variables
93
+ 4. Blaxel configuration file (`~/.blaxel/config.yaml`)
94
+
95
+ When developing locally, the recommended method is to just log in to your workspace with the Blaxel CLI:
96
+
97
+ ```bash
98
+ bl login YOUR-WORKSPACE
99
+ ```
100
+
101
+ This allows you to run Blaxel SDK functions that will automatically connect to your workspace without additional setup. When you deploy on Blaxel, this connection persists automatically.
102
+
103
+ When running Blaxel SDK from a remote server that is not Blaxel-hosted, we recommend using environment variables as described in the third option above.
104
+
105
+ ## Usage
106
+
107
+ ### Sandboxes
108
+
109
+ Sandboxes are secure, instant-launching compute environments that scale to zero after inactivity and resume in under 25ms.
110
+
111
+ ```python
112
+ import asyncio
113
+ from blaxel.core import SandboxInstance
114
+
115
+ async def main():
116
+
117
+ # Create a new sandbox
118
+ sandbox = await SandboxInstance.create_if_not_exists({
119
+ "name": "my-sandbox",
120
+ "image": "blaxel/base-image:latest",
121
+ "memory": 4096,
122
+ "region": "us-pdx-1",
123
+ "ports": [{"target": 3000, "protocol": "HTTP"}],
124
+ "labels": {"env": "dev", "project": "my-project"},
125
+ "ttl": "24h"
126
+ })
127
+
128
+ # Get existing sandbox
129
+ existing = await SandboxInstance.get("my-sandbox")
130
+
131
+ # Delete sandbox (using class)
132
+ await SandboxInstance.delete("my-sandbox")
133
+
134
+ # Delete sandbox (using instance)
135
+ await existing.delete()
136
+
137
+ if __name__ == "__main__":
138
+ asyncio.run(main())
139
+ ```
140
+
141
+ #### Preview URLs
142
+
143
+ Generate public preview URLs to access services running in your sandbox:
144
+
145
+ ```python
146
+ import asyncio
147
+ from blaxel.core import SandboxInstance
148
+
149
+ async def main():
150
+
151
+ # Get existing sandbox
152
+ sandbox = await SandboxInstance.get("my-sandbox")
153
+
154
+ # Start a web server in the sandbox
155
+ await sandbox.process.exec({
156
+ "command": "python -m http.server 3000",
157
+ "working_dir": "/app",
158
+ "wait_for_ports": [3000]
159
+ })
160
+
161
+ # Create a public preview URL
162
+ preview = await sandbox.previews.create_if_not_exists({
163
+ "metadata": {"name": "app-preview"},
164
+ "spec": {
165
+ "port": 3000,
166
+ "public": True
167
+ }
168
+ })
169
+
170
+ print(preview.spec.url) # https://xyz.preview.bl.run
171
+
172
+ if __name__ == "__main__":
173
+ asyncio.run(main())
174
+ ```
175
+
176
+ Previews can also be private, with or without a custom prefix. When you create a private preview URL, a [token](https://docs.blaxel.ai/Sandboxes/Preview-url#private-preview-urls) is required to access the URL, passed as a request parameter or request header.
177
+
178
+ ```python
179
+ # ...
180
+
181
+ # Create a private preview URL
182
+ private_preview = await sandbox.previews.create_if_not_exists({
183
+ "metadata": {"name": "private-app-preview"},
184
+ "spec": {
185
+ "port": 3000,
186
+ "public": False
187
+ }
188
+ })
189
+
190
+ # Create a public preview URL with a custom prefix
191
+ custom_preview = await sandbox.previews.create_if_not_exists({
192
+ "metadata": {"name": "custom-app-preview"},
193
+ "spec": {
194
+ "port": 3000,
195
+ "prefix_url": "my-app",
196
+ "public": True
197
+ }
198
+ })
199
+ ```
200
+
201
+ #### Process execution
202
+
203
+ Execute and manage processes in your sandbox:
204
+
205
+ ```python
206
+ import asyncio
207
+ from blaxel.core import SandboxInstance
208
+
209
+ async def main():
210
+
211
+ # Get existing sandbox
212
+ sandbox = await SandboxInstance.get("my-sandbox")
213
+
214
+ # Execute a command
215
+ process = await sandbox.process.exec({
216
+ "name": "build-process",
217
+ "command": "npm run build",
218
+ "working_dir": "/app",
219
+ "wait_for_completion": True,
220
+ "timeout": 60000 # 60 seconds
221
+ })
222
+
223
+ # Kill a running process
224
+ await sandbox.process.kill("build-process")
225
+
226
+ if __name__ == "__main__":
227
+ asyncio.run(main())
228
+ ```
229
+
230
+ Restart a process if it fails, up to a maximum number of restart attempts:
231
+
232
+ ```python
233
+ # ...
234
+
235
+ # Run with auto-restart on failure
236
+ process = await sandbox.process.exec({
237
+ "name": "web-server",
238
+ "command": "python -m http.server 3000 --bind 0.0.0.0",
239
+ "restart_on_failure": True,
240
+ "max_restarts": 5
241
+ })
242
+ ```
243
+
244
+ #### Filesystem operations
245
+
246
+ Manage files and directories within your sandbox:
247
+
248
+ ```python
249
+ import asyncio
250
+ from blaxel.core import SandboxInstance
251
+
252
+ async def main():
253
+
254
+ # Get existing sandbox
255
+ sandbox = await SandboxInstance.get("my-sandbox")
256
+
257
+ # Write and read text files
258
+ await sandbox.fs.write("/app/config.json", '{"key": "value"}')
259
+ content = await sandbox.fs.read("/app/config.json")
260
+
261
+ # Write and read binary files
262
+ with open("./image.png", "rb") as f:
263
+ binary_data = f.read()
264
+ await sandbox.fs.write_binary("/app/image.png", binary_data)
265
+ blob = await sandbox.fs.read_binary("/app/image.png")
266
+
267
+ # Create directories
268
+ await sandbox.fs.mkdir("/app/uploads")
269
+
270
+ # List files
271
+ listing = await sandbox.fs.ls("/app")
272
+ subdirectories = listing.subdirectories
273
+ files = listing.files
274
+
275
+ # Search for text within files
276
+ matches = await sandbox.fs.grep("pattern", "/app", case_sensitive=True, context_lines=2, max_results=5, file_pattern="*.py", exclude_dirs=["__pycache__"])
277
+
278
+ # Find files and directories matching specified patterns
279
+ results = await sandbox.fs.find("/app", type="file", patterns=["*.md", "*.html"], max_results=1000)
280
+
281
+ # Watch for file changes
282
+ def on_change(event):
283
+ print(event.op, event.path)
284
+
285
+ handle = sandbox.fs.watch("/app", on_change, {
286
+ "with_content": True,
287
+ "ignore": ["node_modules", ".git"]
288
+ })
289
+
290
+ # Close watcher
291
+ handle["close"]()
292
+
293
+ if __name__ == "__main__":
294
+ asyncio.run(main())
295
+ ```
296
+
297
+ #### Volumes
298
+
299
+ Persist data by attaching and using volumes:
300
+
301
+ ```python
302
+ import asyncio
303
+ from blaxel.core import VolumeInstance, SandboxInstance
304
+
305
+ async def main():
306
+
307
+ # Create a volume
308
+ volume = await VolumeInstance.create_if_not_exists({
309
+ "name": "my-volume",
310
+ "size": 1024, # MB
311
+ "region": "us-pdx-1",
312
+ "labels": {"env": "test", "project": "12345"}
313
+ })
314
+
315
+ # Attach volume to sandbox
316
+ sandbox = await SandboxInstance.create_if_not_exists({
317
+ "name": "my-sandbox",
318
+ "image": "blaxel/base-image:latest",
319
+ "volumes": [
320
+ {"name": "my-volume", "mount_path": "/data", "read_only": False}
321
+ ]
322
+ })
323
+
324
+ # List volumes
325
+ volumes = await VolumeInstance.list()
326
+
327
+ # Delete volume (using class)
328
+ await VolumeInstance.delete("my-volume")
329
+
330
+ # Delete volume (using instance)
331
+ await volume.delete()
332
+
333
+ if __name__ == "__main__":
334
+ asyncio.run(main())
335
+ ```
336
+
337
+ ### Batch jobs
338
+
339
+ Blaxel lets you support agentic workflows by offloading asynchronous batch processing tasks to its scalable infrastructure, where they can run in parallel. Jobs can run multiple times within a single execution and accept optional input parameters.
340
+
341
+ ```python
342
+ import asyncio
343
+ from blaxel.core.jobs import bl_job
344
+ from blaxel.core.client.models import CreateJobExecutionRequest
345
+
346
+ async def main():
347
+ # Create and run a job execution
348
+ job = bl_job("job-name")
349
+
350
+ execution_id = await job.acreate_execution(CreateJobExecutionRequest(
351
+ tasks=[
352
+ {"name": "John"},
353
+ {"name": "Jane"},
354
+ {"name": "Bob"}
355
+ ]
356
+ ))
357
+
358
+ # Get execution status
359
+ # Returns: "pending" | "running" | "completed" | "failed"
360
+ status = await job.aget_execution_status(execution_id)
361
+
362
+ # Get execution details
363
+ execution = await job.aget_execution(execution_id)
364
+ print(execution.status, execution.metadata)
365
+
366
+ # Wait for completion
367
+ try:
368
+ result = await job.await_for_execution(
369
+ execution_id,
370
+ max_wait=300, # 5 minutes (seconds)
371
+ interval=2 # Poll every 2 seconds
372
+ )
373
+ print(f"Completed: {result.status}")
374
+ except Exception as error:
375
+ print(f"Timeout: {error}")
376
+
377
+ # List all executions
378
+ executions = await job.alist_executions()
379
+
380
+ # Delete an execution
381
+ await job.acancel_execution(execution_id)
382
+
383
+ if __name__ == "__main__":
384
+ asyncio.run(main())
385
+ ```
386
+
387
+ Synchronous calls are [also available](https://docs.blaxel.ai/Jobs/Manage-job-execution-py).
388
+
389
+ ### Framework integrations
390
+
391
+ Blaxel provides additional packages for framework-specific integrations and telemetry:
392
+
393
+ ```bash
394
+ # With specific integrations
395
+ pip install "blaxel[telemetry]"
396
+ pip install "blaxel[crewai]"
397
+ pip install "blaxel[openai]"
398
+ pip install "blaxel[langgraph]"
399
+ pip install "blaxel[livekit]"
400
+ pip install "blaxel[llamaindex]"
401
+ pip install "blaxel[pydantic]"
402
+ pip install "blaxel[googleadk]"
403
+
404
+ # Everything
405
+ pip install "blaxel[all]"
406
+ ```
407
+
408
+ #### Model use
409
+
410
+ Blaxel acts as a unified gateway for model APIs, centralizing access credentials, tracing and telemetry. You can integrate with any model API provider, or deploy your own custom model. When a model is deployed on Blaxel, a global API endpoint is also created to call it.
411
+
412
+ The SDK includes a helper function that creates a reference to a model deployed on Blaxel and returns a framework-specific model client that routes API calls through Blaxel's unified gateway.
413
+
414
+ ```python
415
+ from blaxel.core import bl_model
416
+
417
+ # With OpenAI
418
+ from blaxel.openai import bl_model
419
+ model = await bl_model("gpt-5-mini")
420
+
421
+ # With LangChain
422
+ from blaxel.langgraph import bl_model
423
+ model = await bl_model("gpt-5-mini")
424
+
425
+ # With LlamaIndex
426
+ from blaxel.llamaindex import bl_model
427
+ model = await bl_model("gpt-5-mini")
428
+
429
+ # With Pydantic AI
430
+ from blaxel.pydantic import bl_model
431
+ model = await bl_model("gpt-5-mini")
432
+
433
+ # With CrewAI
434
+ from blaxel.crewai import bl_model
435
+ model = await bl_model("gpt-5-mini")
436
+
437
+ # With Google ADK
438
+ from blaxel.googleadk import bl_model
439
+ model = await bl_model("gpt-5-mini")
440
+
441
+ # With LiveKit
442
+ from blaxel.livekit import bl_model
443
+ model = await bl_model("gpt-5-mini")
444
+ ```
445
+
446
+ #### MCP tool use
447
+
448
+ Blaxel lets you deploy and host Model Context Protocol (MCP) servers, accessible at a global endpoint over streamable HTTP.
449
+
450
+ The SDK includes a helper function that retrieves and returns tool definitions from a Blaxel-hosted MCP server in the format required by specific frameworks.
451
+
452
+ ```python
453
+ # With OpenAI
454
+ from blaxel.openai import bl_tools
455
+ tools = await bl_tools(["sandbox/my-sandbox"])
456
+
457
+ # With Pydantic AI
458
+ from blaxel.pydantic import bl_tools
459
+ tools = await bl_tools(["sandbox/my-sandbox"])
460
+
461
+ # With LlamaIndex
462
+ from blaxel.llamaindex import bl_tools
463
+ tools = await bl_tools(["sandbox/my-sandbox"])
464
+
465
+ # With LangChain
466
+ from blaxel.langgraph import bl_tools
467
+ tools = await bl_tools(["sandbox/my-sandbox"])
468
+
469
+ # With CrewAI
470
+ from blaxel.crewai import bl_tools
471
+ tools = await bl_tools(["sandbox/my-sandbox"])
472
+
473
+ # With Google ADK
474
+ from blaxel.googleadk import bl_tools
475
+ tools = await bl_tools(["sandbox/my-sandbox"])
476
+
477
+ # With LiveKit
478
+ from blaxel.livekit import bl_tools
479
+ tools = await bl_tools(["sandbox/my-sandbox"])
480
+ ```
481
+
482
+ Here is an example of retrieving tool definitions from a Blaxel sandbox's MCP server for use with the OpenAI SDK:
483
+
484
+ ```python
485
+ import asyncio
486
+ from blaxel.core import SandboxInstance
487
+ from blaxel.openai import bl_tools
488
+
489
+ async def main():
490
+
491
+ # Create a new sandbox
492
+ sandbox = await SandboxInstance.create_if_not_exists({
493
+ "name": "my-sandbox",
494
+ "image": "blaxel/base-image:latest",
495
+ "memory": 4096,
496
+ "region": "us-pdx-1",
497
+ "ports": [{"target": 3000, "protocol": "HTTP"}],
498
+ "ttl": "24h"
499
+ })
500
+
501
+ # Get sandbox MCP tools
502
+ tools = await bl_tools(["sandbox/my-sandbox"])
503
+
504
+ if __name__ == "__main__":
505
+ asyncio.run(main())
506
+ ```
507
+
508
+ ### Telemetry
509
+
510
+ Instrumentation happens automatically when workloads run on Blaxel.
511
+
512
+ Enable automatic telemetry by importing the `blaxel.telemetry` package:
513
+
514
+ ```python
515
+ import blaxel.telemetry
516
+ ```
517
+
518
+ ## Requirements
519
+
520
+ - Python 3.9 or later
521
+
522
+ ## Contributing
523
+
524
+ Contributions are welcome! Please feel free to [submit a pull request](https://github.com/blaxel-ai/sdk-python/pulls).
525
+
526
+ ## License
527
+
528
+ This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.