blaxel 0.2.32__py3-none-any.whl → 0.2.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -184,55 +184,170 @@ class SandboxProcess(SandboxAction):
184
184
  ) -> Union[ProcessResponse, ProcessResponseWithLog]:
185
185
  """Execute a process in the sandbox."""
186
186
  on_log = None
187
+ on_stdout = None
188
+ on_stderr = None
189
+
187
190
  if isinstance(process, ProcessRequestWithLog):
188
191
  on_log = process.on_log
192
+ on_stdout = process.on_stdout
193
+ on_stderr = process.on_stderr
189
194
  process = process.to_dict()
190
195
 
191
196
  if isinstance(process, dict):
192
197
  if "on_log" in process:
193
198
  on_log = process["on_log"]
194
199
  del process["on_log"]
200
+ if "on_stdout" in process:
201
+ on_stdout = process["on_stdout"]
202
+ del process["on_stdout"]
203
+ if "on_stderr" in process:
204
+ on_stderr = process["on_stderr"]
205
+ del process["on_stderr"]
195
206
  process = ProcessRequest.from_dict(process)
196
207
 
197
208
  # Store original wait_for_completion setting
198
209
  should_wait_for_completion = process.wait_for_completion
199
210
 
200
- # Always start process without wait_for_completion to avoid server-side blocking
201
- if should_wait_for_completion and on_log is not None:
202
- process.wait_for_completion = False
203
-
204
- client = self.get_client()
205
- response = await client.post("/process", json=process.to_dict())
206
- try:
207
- content_bytes = await response.aread()
208
- self.handle_response_error(response)
209
- import json
210
-
211
- response_data = json.loads(content_bytes) if content_bytes else None
212
- result = ProcessResponse.from_dict(response_data)
213
- finally:
214
- await response.aclose()
215
-
216
- # Handle wait_for_completion with parallel log streaming
217
- if should_wait_for_completion and on_log is not None:
218
- stream_control = self._stream_logs(result.pid, {"on_log": on_log})
211
+ # When waiting for completion with streaming callbacks, use streaming endpoint
212
+ if should_wait_for_completion and (on_log or on_stdout or on_stderr):
213
+ return await self._exec_with_streaming(
214
+ process, on_log=on_log, on_stdout=on_stdout, on_stderr=on_stderr
215
+ )
216
+ else:
217
+ client = self.get_client()
218
+ response = await client.post("/process", json=process.to_dict())
219
219
  try:
220
- # Wait for process completion
221
- result = await self.wait(result.pid, interval=500, max_wait=1000 * 60 * 60)
220
+ content_bytes = await response.aread()
221
+ self.handle_response_error(response)
222
+ import json
223
+
224
+ response_data = json.loads(content_bytes) if content_bytes else None
225
+ result = ProcessResponse.from_dict(response_data)
222
226
  finally:
223
- # Clean up log streaming
224
- if stream_control:
225
- stream_control["close"]()
226
- else:
227
- # For non-blocking execution, set up log streaming immediately if requested
228
- if on_log is not None:
229
- stream_control = self._stream_logs(result.pid, {"on_log": on_log})
227
+ await response.aclose()
228
+
229
+ if on_log or on_stdout or on_stderr:
230
+ stream_control = self._stream_logs(
231
+ result.pid, {"on_log": on_log, "on_stdout": on_stdout, "on_stderr": on_stderr}
232
+ )
230
233
  return ProcessResponseWithLog(
231
234
  result,
232
235
  lambda: stream_control["close"]() if stream_control else None,
233
236
  )
234
237
 
235
- return result
238
+ return result
239
+
240
+ async def _exec_with_streaming(
241
+ self,
242
+ process_request: ProcessRequest,
243
+ on_log: Callable[[str], None] | None = None,
244
+ on_stdout: Callable[[str], None] | None = None,
245
+ on_stderr: Callable[[str], None] | None = None,
246
+ ) -> ProcessResponseWithLog:
247
+ """Execute a process with streaming response handling for NDJSON."""
248
+ import json
249
+
250
+ headers = (
251
+ self.sandbox_config.headers
252
+ if self.sandbox_config.force_url
253
+ else {**settings.headers, **self.sandbox_config.headers}
254
+ )
255
+
256
+ async with httpx.AsyncClient() as client_instance:
257
+ async with client_instance.stream(
258
+ "POST",
259
+ f"{self.url}/process",
260
+ headers={
261
+ **headers,
262
+ "Content-Type": "application/json",
263
+ "Accept": "text/event-stream",
264
+ },
265
+ json=process_request.to_dict(),
266
+ timeout=None,
267
+ ) as response:
268
+ if response.status_code >= 400:
269
+ error_text = await response.aread()
270
+ raise Exception(f"Failed to execute process: {error_text}")
271
+
272
+ content_type = response.headers.get("Content-Type", "")
273
+ is_streaming = "application/x-ndjson" in content_type
274
+
275
+ # Fallback: server doesn't support streaming, use legacy approach
276
+ if not is_streaming:
277
+ content = await response.aread()
278
+ data = json.loads(content)
279
+ result = ProcessResponse.from_dict(data)
280
+
281
+ # If process already completed (server waited), emit logs through callbacks
282
+ if result.status == "completed" or result.status == "failed":
283
+ if result.stdout:
284
+ for line in result.stdout.split("\n"):
285
+ if line:
286
+ if on_stdout:
287
+ on_stdout(line)
288
+ if result.stderr:
289
+ for line in result.stderr.split("\n"):
290
+ if line:
291
+ if on_stderr:
292
+ on_stderr(line)
293
+ if result.logs:
294
+ for line in result.logs.split("\n"):
295
+ if line:
296
+ if on_log:
297
+ on_log(line)
298
+
299
+ return ProcessResponseWithLog(result, lambda: None)
300
+
301
+ # Streaming response handling
302
+ buffer = ""
303
+ result = None
304
+
305
+ async for chunk in response.aiter_text():
306
+ buffer += chunk
307
+ lines = buffer.split("\n")
308
+ buffer = lines.pop()
309
+
310
+ for line in lines:
311
+ if not line.strip():
312
+ continue
313
+ try:
314
+ parsed = json.loads(line)
315
+ parsed_type = parsed.get("type", "")
316
+ parsed_data = parsed.get("data", "")
317
+
318
+ if parsed_type == "stdout":
319
+ if parsed_data:
320
+ if on_stdout:
321
+ on_stdout(parsed_data)
322
+ if on_log:
323
+ on_log(parsed_data)
324
+ elif parsed_type == "stderr":
325
+ if parsed_data:
326
+ if on_stderr:
327
+ on_stderr(parsed_data)
328
+ if on_log:
329
+ on_log(parsed_data)
330
+ elif parsed_type == "result":
331
+ try:
332
+ result = ProcessResponse.from_dict(json.loads(parsed_data))
333
+ except Exception:
334
+ raise Exception(f"Failed to parse result JSON: {parsed_data}")
335
+ except json.JSONDecodeError:
336
+ continue
337
+
338
+ # Process any remaining buffer
339
+ if buffer.strip():
340
+ if buffer.startswith("result:"):
341
+ json_str = buffer[7:]
342
+ try:
343
+ result = ProcessResponse.from_dict(json.loads(json_str))
344
+ except Exception:
345
+ raise Exception(f"Failed to parse result JSON: {json_str}")
346
+
347
+ if not result:
348
+ raise Exception("No result received from streaming response")
349
+
350
+ return ProcessResponseWithLog(result, lambda: None)
236
351
 
237
352
  async def wait(
238
353
  self, identifier: str, max_wait: int = 60000, interval: int = 1000
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
  import uuid
3
- from typing import Any, Dict, List, Union
3
+ from typing import Any, Callable, Dict, List, Union
4
4
 
5
5
  from ...client.api.compute.create_sandbox import asyncio as create_sandbox
6
6
  from ...client.api.compute.delete_sandbox import asyncio as delete_sandbox
@@ -26,6 +26,24 @@ from .session import SandboxSessions
26
26
  logger = logging.getLogger(__name__)
27
27
 
28
28
 
29
+ class _AsyncDeleteDescriptor:
30
+ """Descriptor that provides both class-level and instance-level delete functionality."""
31
+
32
+ def __init__(self, delete_func: Callable):
33
+ self._delete_func = delete_func
34
+
35
+ def __get__(self, instance, owner):
36
+ if instance is None:
37
+ # Called on the class: SandboxInstance.delete("name")
38
+ return self._delete_func
39
+ else:
40
+ # Called on an instance: instance.delete()
41
+ async def instance_delete() -> Sandbox:
42
+ return await self._delete_func(instance.metadata.name)
43
+
44
+ return instance_delete
45
+
46
+
29
47
  class SandboxInstance:
30
48
  def __init__(
31
49
  self,
@@ -107,6 +125,7 @@ class SandboxInstance:
107
125
  or "lifecycle" in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
108
126
  or "snapshot_enabled"
109
127
  in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
128
+ or "labels" in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
110
129
  )
111
130
  )
112
131
  ):
@@ -135,7 +154,7 @@ class SandboxInstance:
135
154
 
136
155
  # Create full Sandbox object
137
156
  sandbox = Sandbox(
138
- metadata=Metadata(name=name),
157
+ metadata=Metadata(name=name, labels=config.labels),
139
158
  spec=SandboxSpec(
140
159
  runtime=Runtime(
141
160
  image=image,
@@ -202,14 +221,6 @@ class SandboxInstance:
202
221
  response = await list_sandboxes()
203
222
  return [cls(sandbox) for sandbox in response]
204
223
 
205
- @classmethod
206
- async def delete(cls, sandbox_name: str) -> Sandbox:
207
- response = await delete_sandbox(
208
- sandbox_name,
209
- client=client,
210
- )
211
- return response
212
-
213
224
  @classmethod
214
225
  async def update_metadata(
215
226
  cls, sandbox_name: str, metadata: SandboxUpdateMetadata
@@ -319,3 +330,16 @@ class SandboxInstance:
319
330
  headers={"X-Blaxel-Preview-Token": session.token},
320
331
  params={"bl_preview_token": session.token},
321
332
  )
333
+
334
+
335
+ async def _delete_sandbox_by_name(sandbox_name: str) -> Sandbox:
336
+ """Delete a sandbox by name."""
337
+ response = await delete_sandbox(
338
+ sandbox_name,
339
+ client=client,
340
+ )
341
+ return response
342
+
343
+
344
+ # Assign the delete descriptor to support both class-level and instance-level calls
345
+ SandboxInstance.delete = _AsyncDeleteDescriptor(_delete_sandbox_by_name)
@@ -143,43 +143,169 @@ class SyncSandboxProcess(SyncSandboxAction):
143
143
  process: Union[ProcessRequest, ProcessRequestWithLog, Dict[str, Any]],
144
144
  ) -> Union[ProcessResponse, ProcessResponseWithLog]:
145
145
  on_log = None
146
+ on_stdout = None
147
+ on_stderr = None
148
+
146
149
  if isinstance(process, ProcessRequestWithLog):
147
150
  on_log = process.on_log
151
+ on_stdout = process.on_stdout
152
+ on_stderr = process.on_stderr
148
153
  process = process.to_dict()
154
+
149
155
  if isinstance(process, dict):
150
156
  if "on_log" in process:
151
157
  on_log = process["on_log"]
152
158
  del process["on_log"]
159
+ if "on_stdout" in process:
160
+ on_stdout = process["on_stdout"]
161
+ del process["on_stdout"]
162
+ if "on_stderr" in process:
163
+ on_stderr = process["on_stderr"]
164
+ del process["on_stderr"]
153
165
  process = ProcessRequest.from_dict(process)
166
+
154
167
  should_wait_for_completion = process.wait_for_completion
155
- if should_wait_for_completion and on_log is not None:
156
- process.wait_for_completion = False
157
- with self.get_client() as client_instance:
158
- response = client_instance.post("/process", json=process.to_dict())
159
- response_data = None
160
- if response.content:
161
- try:
162
- response_data = response.json()
163
- except Exception:
164
- self.handle_response_error(response)
165
- raise
166
- self.handle_response_error(response)
167
- result = ProcessResponse.from_dict(response_data)
168
- if should_wait_for_completion and on_log is not None:
169
- stream_control = self._stream_logs(result.pid, {"on_log": on_log})
170
- try:
171
- result = self.wait(result.pid, interval=500, max_wait=1000 * 60 * 60)
172
- finally:
173
- if stream_control:
174
- stream_control["close"]()
175
- else:
176
- if on_log is not None:
177
- stream_control = self._stream_logs(result.pid, {"on_log": on_log})
168
+
169
+ # When waiting for completion with streaming callbacks, use streaming endpoint
170
+ if should_wait_for_completion and (on_log or on_stdout or on_stderr):
171
+ return self._exec_with_streaming(
172
+ process, on_log=on_log, on_stdout=on_stdout, on_stderr=on_stderr
173
+ )
174
+ else:
175
+ with self.get_client() as client_instance:
176
+ response = client_instance.post("/process", json=process.to_dict())
177
+ response_data = None
178
+ if response.content:
179
+ try:
180
+ response_data = response.json()
181
+ except Exception:
182
+ self.handle_response_error(response)
183
+ raise
184
+ self.handle_response_error(response)
185
+ result = ProcessResponse.from_dict(response_data)
186
+
187
+ if on_log or on_stdout or on_stderr:
188
+ stream_control = self._stream_logs(
189
+ result.pid, {"on_log": on_log, "on_stdout": on_stdout, "on_stderr": on_stderr}
190
+ )
178
191
  return ProcessResponseWithLog(
179
192
  result,
180
193
  lambda: stream_control["close"]() if stream_control else None,
181
194
  )
182
- return result
195
+
196
+ return result
197
+
198
+ def _exec_with_streaming(
199
+ self,
200
+ process_request: ProcessRequest,
201
+ on_log: Callable[[str], None] | None = None,
202
+ on_stdout: Callable[[str], None] | None = None,
203
+ on_stderr: Callable[[str], None] | None = None,
204
+ ) -> ProcessResponseWithLog:
205
+ """Execute a process with streaming response handling for NDJSON."""
206
+ import json
207
+
208
+ headers = (
209
+ self.sandbox_config.headers
210
+ if self.sandbox_config.force_url
211
+ else {**settings.headers, **self.sandbox_config.headers}
212
+ )
213
+
214
+ with httpx.Client() as client_instance:
215
+ with client_instance.stream(
216
+ "POST",
217
+ f"{self.url}/process",
218
+ headers={
219
+ **headers,
220
+ "Content-Type": "application/json",
221
+ "Accept": "text/event-stream",
222
+ },
223
+ json=process_request.to_dict(),
224
+ timeout=None,
225
+ ) as response:
226
+ if response.status_code >= 400:
227
+ error_text = response.read()
228
+ raise Exception(f"Failed to execute process: {error_text}")
229
+
230
+ content_type = response.headers.get("Content-Type", "")
231
+ is_streaming = "application/x-ndjson" in content_type
232
+
233
+ # Fallback: server doesn't support streaming, use legacy approach
234
+ if not is_streaming:
235
+ content = response.read()
236
+ data = json.loads(content)
237
+ result = ProcessResponse.from_dict(data)
238
+
239
+ # If process already completed (server waited), emit logs through callbacks
240
+ if result.status == "completed" or result.status == "failed":
241
+ if result.stdout:
242
+ for line in result.stdout.split("\n"):
243
+ if line:
244
+ if on_stdout:
245
+ on_stdout(line)
246
+ if result.stderr:
247
+ for line in result.stderr.split("\n"):
248
+ if line:
249
+ if on_stderr:
250
+ on_stderr(line)
251
+ if result.logs:
252
+ for line in result.logs.split("\n"):
253
+ if line:
254
+ if on_log:
255
+ on_log(line)
256
+
257
+ return ProcessResponseWithLog(result, lambda: None)
258
+
259
+ # Streaming response handling
260
+ buffer = ""
261
+ result = None
262
+
263
+ for chunk in response.iter_text():
264
+ buffer += chunk
265
+ lines = buffer.split("\n")
266
+ buffer = lines.pop()
267
+
268
+ for line in lines:
269
+ if not line.strip():
270
+ continue
271
+ try:
272
+ parsed = json.loads(line)
273
+ parsed_type = parsed.get("type", "")
274
+ parsed_data = parsed.get("data", "")
275
+
276
+ if parsed_type == "stdout":
277
+ if parsed_data:
278
+ if on_stdout:
279
+ on_stdout(parsed_data)
280
+ if on_log:
281
+ on_log(parsed_data)
282
+ elif parsed_type == "stderr":
283
+ if parsed_data:
284
+ if on_stderr:
285
+ on_stderr(parsed_data)
286
+ if on_log:
287
+ on_log(parsed_data)
288
+ elif parsed_type == "result":
289
+ try:
290
+ result = ProcessResponse.from_dict(json.loads(parsed_data))
291
+ except Exception:
292
+ raise Exception(f"Failed to parse result JSON: {parsed_data}")
293
+ except json.JSONDecodeError:
294
+ continue
295
+
296
+ # Process any remaining buffer
297
+ if buffer.strip():
298
+ if buffer.startswith("result:"):
299
+ json_str = buffer[7:]
300
+ try:
301
+ result = ProcessResponse.from_dict(json.loads(json_str))
302
+ except Exception:
303
+ raise Exception(f"Failed to parse result JSON: {json_str}")
304
+
305
+ if not result:
306
+ raise Exception("No result received from streaming response")
307
+
308
+ return ProcessResponseWithLog(result, lambda: None)
183
309
 
184
310
  def wait(self, identifier: str, max_wait: int = 60000, interval: int = 1000) -> ProcessResponse:
185
311
  start_time = time.monotonic() * 1000
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
  import uuid
3
- from typing import Any, Dict, List, Union
3
+ from typing import Any, Callable, Dict, List, Union
4
4
 
5
5
  from ...client.api.compute.create_sandbox import sync as create_sandbox
6
6
  from ...client.api.compute.delete_sandbox import sync as delete_sandbox
@@ -26,6 +26,24 @@ from .session import SyncSandboxSessions
26
26
  logger = logging.getLogger(__name__)
27
27
 
28
28
 
29
+ class _SyncDeleteDescriptor:
30
+ """Descriptor that provides both class-level and instance-level delete functionality."""
31
+
32
+ def __init__(self, delete_func: Callable):
33
+ self._delete_func = delete_func
34
+
35
+ def __get__(self, instance, owner):
36
+ if instance is None:
37
+ # Called on the class: SyncSandboxInstance.delete("name")
38
+ return self._delete_func
39
+ else:
40
+ # Called on an instance: instance.delete()
41
+ def instance_delete() -> Sandbox:
42
+ return self._delete_func(instance.metadata.name)
43
+
44
+ return instance_delete
45
+
46
+
29
47
  class SyncSandboxInstance:
30
48
  def __init__(
31
49
  self,
@@ -102,6 +120,7 @@ class SyncSandboxInstance:
102
120
  or "lifecycle" in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
103
121
  or "snapshot_enabled"
104
122
  in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
123
+ or "labels" in (sandbox if isinstance(sandbox, dict) else sandbox.__dict__)
105
124
  )
106
125
  )
107
126
  ):
@@ -125,7 +144,7 @@ class SyncSandboxInstance:
125
144
  region = config.region
126
145
  lifecycle = config.lifecycle
127
146
  sandbox = Sandbox(
128
- metadata=Metadata(name=name),
147
+ metadata=Metadata(name=name, labels=config.labels),
129
148
  spec=SandboxSpec(
130
149
  runtime=Runtime(
131
150
  image=image,
@@ -184,14 +203,6 @@ class SyncSandboxInstance:
184
203
  response = list_sandboxes(client=client)
185
204
  return [cls(sandbox) for sandbox in response]
186
205
 
187
- @classmethod
188
- def delete(cls, sandbox_name: str) -> Sandbox:
189
- response = delete_sandbox(
190
- sandbox_name,
191
- client=client,
192
- )
193
- return response
194
-
195
206
  @classmethod
196
207
  def update_metadata(
197
208
  cls, sandbox_name: str, metadata: SandboxUpdateMetadata
@@ -261,3 +272,16 @@ class SyncSandboxInstance:
261
272
  headers={"X-Blaxel-Preview-Token": session.token},
262
273
  params={"bl_preview_token": session.token},
263
274
  )
275
+
276
+
277
+ def _delete_sandbox_by_name(sandbox_name: str) -> Sandbox:
278
+ """Delete a sandbox by name."""
279
+ response = delete_sandbox(
280
+ sandbox_name,
281
+ client=client,
282
+ )
283
+ return response
284
+
285
+
286
+ # Assign the delete descriptor to support both class-level and instance-level calls
287
+ SyncSandboxInstance.delete = _SyncDeleteDescriptor(_delete_sandbox_by_name)
@@ -155,6 +155,7 @@ class SandboxCreateConfiguration:
155
155
  region: str | None = None,
156
156
  lifecycle: Union[SandboxLifecycle, Dict[str, Any]] | None = None,
157
157
  snapshot_enabled: bool | None = None,
158
+ labels: Dict[str, str] | None = None,
158
159
  ):
159
160
  self.name = name
160
161
  self.image = image
@@ -167,6 +168,7 @@ class SandboxCreateConfiguration:
167
168
  self.region = region
168
169
  self.lifecycle = lifecycle
169
170
  self.snapshot_enabled = snapshot_enabled
171
+ self.labels = labels
170
172
 
171
173
  @classmethod
172
174
  def from_dict(cls, data: Dict[str, Any]) -> "SandboxCreateConfiguration":
@@ -190,6 +192,7 @@ class SandboxCreateConfiguration:
190
192
  region=data.get("region"),
191
193
  lifecycle=lifecycle,
192
194
  snapshot_enabled=data.get("snapshot_enabled"),
195
+ labels=data.get("labels"),
193
196
  )
194
197
 
195
198
  def _normalize_ports(self) -> List[Port] | None:
@@ -282,6 +285,8 @@ class SandboxCreateConfiguration:
282
285
  @_attrs_define
283
286
  class ProcessRequestWithLog(ProcessRequest):
284
287
  on_log: Callable[[str], None] | None = None
288
+ on_stdout: Callable[[str], None] | None = None
289
+ on_stderr: Callable[[str], None] | None = None
285
290
 
286
291
 
287
292
  class ProcessResponseWithLog:
@@ -1,5 +1,5 @@
1
1
  """Volume module for persistent storage management."""
2
2
 
3
- from .volume import VolumeCreateConfiguration, VolumeInstance
3
+ from .volume import SyncVolumeInstance, VolumeCreateConfiguration, VolumeInstance
4
4
 
5
- __all__ = ["VolumeInstance", "VolumeCreateConfiguration"]
5
+ __all__ = ["VolumeInstance", "SyncVolumeInstance", "VolumeCreateConfiguration"]