blaxel 0.2.30__py3-none-any.whl → 0.2.31rc120__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. blaxel/__init__.py +2 -2
  2. blaxel/core/client/client.py +18 -2
  3. blaxel/core/sandbox/client/api/filesystem/delete_filesystem_tree_path.py +188 -0
  4. blaxel/core/sandbox/client/api/filesystem/get_filesystem_content_search_path.py +250 -0
  5. blaxel/core/sandbox/client/api/filesystem/get_filesystem_find_path.py +248 -0
  6. blaxel/core/sandbox/client/api/filesystem/get_filesystem_search_path.py +237 -0
  7. blaxel/core/sandbox/client/api/filesystem/get_filesystem_tree_path.py +197 -0
  8. blaxel/core/sandbox/client/api/filesystem/put_filesystem_tree_path.py +223 -0
  9. blaxel/core/sandbox/client/models/__init__.py +16 -0
  10. blaxel/core/sandbox/client/models/content_search_match.py +98 -0
  11. blaxel/core/sandbox/client/models/content_search_response.py +97 -0
  12. blaxel/core/sandbox/client/models/find_match.py +69 -0
  13. blaxel/core/sandbox/client/models/find_response.py +88 -0
  14. blaxel/core/sandbox/client/models/fuzzy_search_match.py +78 -0
  15. blaxel/core/sandbox/client/models/fuzzy_search_response.py +88 -0
  16. blaxel/core/sandbox/client/models/tree_request.py +76 -0
  17. blaxel/core/sandbox/client/models/tree_request_files.py +49 -0
  18. blaxel/core/sandbox/default/action.py +12 -8
  19. blaxel/core/sandbox/default/filesystem.py +238 -48
  20. blaxel/core/sandbox/default/interpreter.py +62 -55
  21. blaxel/core/sandbox/default/process.py +66 -46
  22. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/METADATA +1 -1
  23. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/RECORD +25 -11
  24. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/WHEEL +0 -0
  25. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,88 @@
1
+ from typing import TYPE_CHECKING, Any, TypeVar, Union
2
+
3
+ from attrs import define as _attrs_define
4
+ from attrs import field as _attrs_field
5
+
6
+ from ..types import UNSET, Unset
7
+
8
+ if TYPE_CHECKING:
9
+ from ..models.fuzzy_search_match import FuzzySearchMatch
10
+
11
+
12
+ T = TypeVar("T", bound="FuzzySearchResponse")
13
+
14
+
15
+ @_attrs_define
16
+ class FuzzySearchResponse:
17
+ """
18
+ Attributes:
19
+ matches (Union[Unset, list['FuzzySearchMatch']]):
20
+ total (Union[Unset, int]): Example: 5.
21
+ """
22
+
23
+ matches: Union[Unset, list["FuzzySearchMatch"]] = UNSET
24
+ total: Union[Unset, int] = UNSET
25
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
26
+
27
+ def to_dict(self) -> dict[str, Any]:
28
+ matches: Union[Unset, list[dict[str, Any]]] = UNSET
29
+ if not isinstance(self.matches, Unset):
30
+ matches = []
31
+ for matches_item_data in self.matches:
32
+ if type(matches_item_data) is dict:
33
+ matches_item = matches_item_data
34
+ else:
35
+ matches_item = matches_item_data.to_dict()
36
+ matches.append(matches_item)
37
+
38
+ total = self.total
39
+
40
+ field_dict: dict[str, Any] = {}
41
+ field_dict.update(self.additional_properties)
42
+ field_dict.update({})
43
+ if matches is not UNSET:
44
+ field_dict["matches"] = matches
45
+ if total is not UNSET:
46
+ field_dict["total"] = total
47
+
48
+ return field_dict
49
+
50
+ @classmethod
51
+ def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T:
52
+ from ..models.fuzzy_search_match import FuzzySearchMatch
53
+
54
+ if not src_dict:
55
+ return None
56
+ d = src_dict.copy()
57
+ matches = []
58
+ _matches = d.pop("matches", UNSET)
59
+ for matches_item_data in _matches or []:
60
+ matches_item = FuzzySearchMatch.from_dict(matches_item_data)
61
+
62
+ matches.append(matches_item)
63
+
64
+ total = d.pop("total", UNSET)
65
+
66
+ fuzzy_search_response = cls(
67
+ matches=matches,
68
+ total=total,
69
+ )
70
+
71
+ fuzzy_search_response.additional_properties = d
72
+ return fuzzy_search_response
73
+
74
+ @property
75
+ def additional_keys(self) -> list[str]:
76
+ return list(self.additional_properties.keys())
77
+
78
+ def __getitem__(self, key: str) -> Any:
79
+ return self.additional_properties[key]
80
+
81
+ def __setitem__(self, key: str, value: Any) -> None:
82
+ self.additional_properties[key] = value
83
+
84
+ def __delitem__(self, key: str) -> None:
85
+ del self.additional_properties[key]
86
+
87
+ def __contains__(self, key: str) -> bool:
88
+ return key in self.additional_properties
@@ -0,0 +1,76 @@
1
+ from typing import TYPE_CHECKING, Any, TypeVar, Union
2
+
3
+ from attrs import define as _attrs_define
4
+ from attrs import field as _attrs_field
5
+
6
+ from ..types import UNSET, Unset
7
+
8
+ if TYPE_CHECKING:
9
+ from ..models.tree_request_files import TreeRequestFiles
10
+
11
+
12
+ T = TypeVar("T", bound="TreeRequest")
13
+
14
+
15
+ @_attrs_define
16
+ class TreeRequest:
17
+ """
18
+ Attributes:
19
+ files (Union[Unset, TreeRequestFiles]): Example: {'"dir/file2.txt"': '"content2"}', '{"file1.txt"':
20
+ '"content1"'}.
21
+ """
22
+
23
+ files: Union[Unset, "TreeRequestFiles"] = UNSET
24
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
25
+
26
+ def to_dict(self) -> dict[str, Any]:
27
+ files: Union[Unset, dict[str, Any]] = UNSET
28
+ if self.files and not isinstance(self.files, Unset) and not isinstance(self.files, dict):
29
+ files = self.files.to_dict()
30
+ elif self.files and isinstance(self.files, dict):
31
+ files = self.files
32
+
33
+ field_dict: dict[str, Any] = {}
34
+ field_dict.update(self.additional_properties)
35
+ field_dict.update({})
36
+ if files is not UNSET:
37
+ field_dict["files"] = files
38
+
39
+ return field_dict
40
+
41
+ @classmethod
42
+ def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T:
43
+ from ..models.tree_request_files import TreeRequestFiles
44
+
45
+ if not src_dict:
46
+ return None
47
+ d = src_dict.copy()
48
+ _files = d.pop("files", UNSET)
49
+ files: Union[Unset, TreeRequestFiles]
50
+ if isinstance(_files, Unset):
51
+ files = UNSET
52
+ else:
53
+ files = TreeRequestFiles.from_dict(_files)
54
+
55
+ tree_request = cls(
56
+ files=files,
57
+ )
58
+
59
+ tree_request.additional_properties = d
60
+ return tree_request
61
+
62
+ @property
63
+ def additional_keys(self) -> list[str]:
64
+ return list(self.additional_properties.keys())
65
+
66
+ def __getitem__(self, key: str) -> Any:
67
+ return self.additional_properties[key]
68
+
69
+ def __setitem__(self, key: str, value: Any) -> None:
70
+ self.additional_properties[key] = value
71
+
72
+ def __delitem__(self, key: str) -> None:
73
+ del self.additional_properties[key]
74
+
75
+ def __contains__(self, key: str) -> bool:
76
+ return key in self.additional_properties
@@ -0,0 +1,49 @@
1
+ from typing import Any, TypeVar
2
+
3
+ from attrs import define as _attrs_define
4
+ from attrs import field as _attrs_field
5
+
6
+ T = TypeVar("T", bound="TreeRequestFiles")
7
+
8
+
9
+ @_attrs_define
10
+ class TreeRequestFiles:
11
+ """
12
+ Example:
13
+ {'"dir/file2.txt"': '"content2"}', '{"file1.txt"': '"content1"'}
14
+
15
+ """
16
+
17
+ additional_properties: dict[str, str] = _attrs_field(init=False, factory=dict)
18
+
19
+ def to_dict(self) -> dict[str, Any]:
20
+ field_dict: dict[str, Any] = {}
21
+ field_dict.update(self.additional_properties)
22
+
23
+ return field_dict
24
+
25
+ @classmethod
26
+ def from_dict(cls: type[T], src_dict: dict[str, Any]) -> T:
27
+ if not src_dict:
28
+ return None
29
+ d = src_dict.copy()
30
+ tree_request_files = cls()
31
+
32
+ tree_request_files.additional_properties = d
33
+ return tree_request_files
34
+
35
+ @property
36
+ def additional_keys(self) -> list[str]:
37
+ return list(self.additional_properties.keys())
38
+
39
+ def __getitem__(self, key: str) -> str:
40
+ return self.additional_properties[key]
41
+
42
+ def __setitem__(self, key: str, value: str) -> None:
43
+ self.additional_properties[key] = value
44
+
45
+ def __delitem__(self, key: str) -> None:
46
+ del self.additional_properties[key]
47
+
48
+ def __contains__(self, key: str) -> bool:
49
+ return key in self.additional_properties
@@ -1,5 +1,6 @@
1
1
 
2
2
  import httpx
3
+ from contextlib import asynccontextmanager
3
4
 
4
5
  from ...common.internal import get_forced_url, get_global_unique_hash
5
6
  from ...common.settings import settings
@@ -9,6 +10,7 @@ from ..types import ResponseError, SandboxConfiguration
9
10
  class SandboxAction:
10
11
  def __init__(self, sandbox_config: SandboxConfiguration):
11
12
  self.sandbox_config = sandbox_config
13
+ self._client: httpx.AsyncClient | None = None
12
14
 
13
15
  @property
14
16
  def name(self) -> str:
@@ -54,15 +56,17 @@ class SandboxAction:
54
56
  return None
55
57
 
56
58
  def get_client(self) -> httpx.AsyncClient:
57
- if self.sandbox_config.force_url:
58
- return httpx.AsyncClient(
59
- base_url=self.sandbox_config.force_url, headers=self.sandbox_config.headers
59
+ """Get persistent HTTP client for this sandbox instance."""
60
+ if self._client is None:
61
+ base_url = self.sandbox_config.force_url or self.url
62
+ self._client = httpx.AsyncClient(
63
+ base_url=base_url,
64
+ headers=self.sandbox_config.headers if self.sandbox_config.force_url else {**settings.headers, **self.sandbox_config.headers},
65
+ http2=False,
66
+ limits=httpx.Limits(max_connections=100, max_keepalive_connections=20),
67
+ timeout=httpx.Timeout(300.0, connect=10.0),
60
68
  )
61
- # Create a new client instance each time to avoid "Cannot open a client instance more than once" error
62
- return httpx.AsyncClient(
63
- base_url=self.url,
64
- headers={**settings.headers, **self.sandbox_config.headers},
65
- )
69
+ return self._client
66
70
 
67
71
  def handle_response_error(self, response: httpx.Response):
68
72
  if not response.is_success:
@@ -34,10 +34,14 @@ class SandboxFileSystem(SandboxAction):
34
34
  path = self.format_path(path)
35
35
  body = FileRequest(is_directory=True, permissions=permissions)
36
36
 
37
- async with self.get_client() as client_instance:
38
- response = await client_instance.put(f"/filesystem/{path}", json=body.to_dict())
37
+ client = self.get_client()
38
+ response = await client.put(f"/filesystem/{path}", json=body.to_dict())
39
+ try:
40
+ data = json.loads(await response.aread())
39
41
  self.handle_response_error(response)
40
- return SuccessResponse.from_dict(response.json())
42
+ return SuccessResponse.from_dict(data)
43
+ finally:
44
+ await response.aclose()
41
45
 
42
46
  async def write(self, path: str, content: str) -> SuccessResponse:
43
47
  path = self.format_path(path)
@@ -53,10 +57,14 @@ class SandboxFileSystem(SandboxAction):
53
57
  # Use regular upload for small files
54
58
  body = FileRequest(content=content)
55
59
 
56
- async with self.get_client() as client_instance:
57
- response = await client_instance.put(f"/filesystem/{path}", json=body.to_dict())
60
+ client = self.get_client()
61
+ response = await client.put(f"/filesystem/{path}", json=body.to_dict())
62
+ try:
63
+ data = json.loads(await response.aread())
58
64
  self.handle_response_error(response)
59
- return SuccessResponse.from_dict(response.json())
65
+ return SuccessResponse.from_dict(data)
66
+ finally:
67
+ await response.aclose()
60
68
 
61
69
  async def write_binary(self, path: str, content: Union[bytes, bytearray, str]) -> SuccessResponse:
62
70
  """Write binary content to a file.
@@ -96,13 +104,16 @@ class SandboxFileSystem(SandboxAction):
96
104
  url = f"{self.url}/filesystem/{path}"
97
105
  headers = {**settings.headers, **self.sandbox_config.headers}
98
106
 
99
- async with self.get_client() as client_instance:
100
- response = await client_instance.put(url, files=files, data=data, headers=headers)
101
-
107
+ client = self.get_client()
108
+ response = await client.put(url, files=files, data=data, headers=headers)
109
+ try:
110
+ content_bytes = await response.aread()
102
111
  if not response.is_success:
103
- raise Exception(f"Failed to write binary: {response.status_code} {response.text}")
104
-
105
- return SuccessResponse.from_dict(response.json())
112
+ error_text = content_bytes.decode('utf-8', errors='ignore')
113
+ raise Exception(f"Failed to write binary: {response.status_code} {error_text}")
114
+ return SuccessResponse.from_dict(json.loads(content_bytes))
115
+ finally:
116
+ await response.aclose()
106
117
 
107
118
  async def write_tree(
108
119
  self,
@@ -118,26 +129,32 @@ class SandboxFileSystem(SandboxAction):
118
129
 
119
130
  path = destination_path or ""
120
131
 
121
- async with self.get_client() as client_instance:
122
- response = await client_instance.put(
123
- f"/filesystem/tree/{path}",
124
- json={"files": files_dict},
125
- headers={"Content-Type": "application/json"},
126
- )
132
+ client = self.get_client()
133
+ response = await client.put(
134
+ f"/filesystem/tree/{path}",
135
+ json={"files": files_dict},
136
+ headers={"Content-Type": "application/json"},
137
+ )
138
+ try:
139
+ data = json.loads(await response.aread())
127
140
  self.handle_response_error(response)
128
- return Directory.from_dict(response.json())
141
+ return Directory.from_dict(data)
142
+ finally:
143
+ await response.aclose()
129
144
 
130
145
  async def read(self, path: str) -> str:
131
146
  path = self.format_path(path)
132
147
 
133
- async with self.get_client() as client_instance:
134
- response = await client_instance.get(f"/filesystem/{path}")
148
+ client = self.get_client()
149
+ response = await client.get(f"/filesystem/{path}")
150
+ try:
151
+ data = json.loads(await response.aread())
135
152
  self.handle_response_error(response)
136
-
137
- data = response.json()
138
153
  if "content" in data:
139
154
  return data["content"]
140
155
  raise Exception("Unsupported file type")
156
+ finally:
157
+ await response.aclose()
141
158
 
142
159
  async def read_binary(self, path: str) -> bytes:
143
160
  """Read binary content from a file.
@@ -157,10 +174,14 @@ class SandboxFileSystem(SandboxAction):
157
174
  "Accept": "application/octet-stream",
158
175
  }
159
176
 
160
- async with self.get_client() as client_instance:
161
- response = await client_instance.get(url, headers=headers)
177
+ client = self.get_client()
178
+ response = await client.get(url, headers=headers)
179
+ try:
180
+ content = await response.aread()
162
181
  self.handle_response_error(response)
163
- return response.content
182
+ return content
183
+ finally:
184
+ await response.aclose()
164
185
 
165
186
  async def download(self, src: str, destination_path: str, mode: int = 0o644) -> None:
166
187
  """Download a file from the sandbox to the local filesystem.
@@ -178,23 +199,177 @@ class SandboxFileSystem(SandboxAction):
178
199
  async def rm(self, path: str, recursive: bool = False) -> SuccessResponse:
179
200
  path = self.format_path(path)
180
201
 
181
- async with self.get_client() as client_instance:
182
- params = {"recursive": "true"} if recursive else {}
183
- response = await client_instance.delete(f"/filesystem/{path}", params=params)
202
+ client = self.get_client()
203
+ params = {"recursive": "true"} if recursive else {}
204
+ response = await client.delete(f"/filesystem/{path}", params=params)
205
+ try:
206
+ data = json.loads(await response.aread())
184
207
  self.handle_response_error(response)
185
- return SuccessResponse.from_dict(response.json())
208
+ return SuccessResponse.from_dict(data)
209
+ finally:
210
+ await response.aclose()
186
211
 
187
212
  async def ls(self, path: str) -> Directory:
188
213
  path = self.format_path(path)
189
214
 
190
- async with self.get_client() as client_instance:
191
- response = await client_instance.get(f"/filesystem/{path}")
215
+ client = self.get_client()
216
+ response = await client.get(f"/filesystem/{path}")
217
+ try:
218
+ data = json.loads(await response.aread())
192
219
  self.handle_response_error(response)
193
-
194
- data = response.json()
195
220
  if not ("files" in data or "subdirectories" in data):
196
221
  raise Exception('{"error": "Directory not found"}')
197
222
  return Directory.from_dict(data)
223
+ finally:
224
+ await response.aclose()
225
+
226
+ async def find(
227
+ self,
228
+ path: str,
229
+ type: str | None = None,
230
+ patterns: List[str] | None = None,
231
+ max_results: int | None = None,
232
+ exclude_dirs: List[str] | None = None,
233
+ exclude_hidden: bool | None = None,
234
+ ):
235
+ """Find files and directories.
236
+
237
+ Args:
238
+ path: Path to search in
239
+ type: Type of search ('file' or 'directory')
240
+ patterns: File patterns to include (e.g., ['*.py', '*.json'])
241
+ max_results: Maximum number of results to return
242
+ exclude_dirs: Directory names to skip
243
+ exclude_hidden: Exclude hidden files and directories
244
+
245
+ Returns:
246
+ FindResponse with matching files/directories
247
+ """
248
+ path = self.format_path(path)
249
+
250
+ params = {}
251
+ if type is not None:
252
+ params['type'] = type
253
+ if patterns is not None and len(patterns) > 0:
254
+ params['patterns'] = ','.join(patterns)
255
+ if max_results is not None:
256
+ params['maxResults'] = max_results
257
+ if exclude_dirs is not None and len(exclude_dirs) > 0:
258
+ params['excludeDirs'] = ','.join(exclude_dirs)
259
+ if exclude_hidden is not None:
260
+ params['excludeHidden'] = exclude_hidden
261
+
262
+ url = f"{self.url}/filesystem-find/{path}"
263
+ headers = {**settings.headers, **self.sandbox_config.headers}
264
+
265
+ client = self.get_client()
266
+ response = await client.get(url, params=params, headers=headers)
267
+ try:
268
+ data = json.loads(await response.aread())
269
+ self.handle_response_error(response)
270
+
271
+ from ..client.models.find_response import FindResponse
272
+ return FindResponse.from_dict(data)
273
+ finally:
274
+ await response.aclose()
275
+
276
+ async def grep(
277
+ self,
278
+ query: str,
279
+ path: str = "/",
280
+ case_sensitive: bool | None = None,
281
+ context_lines: int | None = None,
282
+ max_results: int | None = None,
283
+ file_pattern: str | None = None,
284
+ exclude_dirs: List[str] | None = None,
285
+ ):
286
+ """Search for text content inside files using ripgrep.
287
+
288
+ Args:
289
+ query: Text to search for
290
+ path: Directory path to search in
291
+ case_sensitive: Case sensitive search (default: false)
292
+ context_lines: Number of context lines to include (default: 0)
293
+ max_results: Maximum number of results to return (default: 100)
294
+ file_pattern: File pattern to include (e.g., '*.py')
295
+ exclude_dirs: Directory names to skip
296
+
297
+ Returns:
298
+ ContentSearchResponse with matching lines
299
+ """
300
+ path = self.format_path(path)
301
+
302
+ params = {'query': query}
303
+ if case_sensitive is not None:
304
+ params['caseSensitive'] = case_sensitive
305
+ if context_lines is not None:
306
+ params['contextLines'] = context_lines
307
+ if max_results is not None:
308
+ params['maxResults'] = max_results
309
+ if file_pattern is not None:
310
+ params['filePattern'] = file_pattern
311
+ if exclude_dirs is not None and len(exclude_dirs) > 0:
312
+ params['excludeDirs'] = ','.join(exclude_dirs)
313
+
314
+ url = f"{self.url}/filesystem-content-search/{path}"
315
+ headers = {**settings.headers, **self.sandbox_config.headers}
316
+
317
+ client = self.get_client()
318
+ response = await client.get(url, params=params, headers=headers)
319
+ try:
320
+ data = json.loads(await response.aread())
321
+ self.handle_response_error(response)
322
+
323
+ from ..client.models.content_search_response import ContentSearchResponse
324
+ return ContentSearchResponse.from_dict(data)
325
+ finally:
326
+ await response.aclose()
327
+
328
+ async def find(
329
+ self,
330
+ path: str,
331
+ type: str | None = None,
332
+ patterns: List[str] | None = None,
333
+ max_results: int | None = None,
334
+ exclude_dirs: List[str] | None = None,
335
+ exclude_hidden: bool | None = None,
336
+ ):
337
+ """Find files and directories.
338
+
339
+ Args:
340
+ path: Path to search in
341
+ type: Type of search ('file' or 'directory')
342
+ patterns: File patterns to include (e.g., ['*.py', '*.json'])
343
+ max_results: Maximum number of results to return
344
+ exclude_dirs: Directory names to skip
345
+ exclude_hidden: Exclude hidden files and directories
346
+
347
+ Returns:
348
+ FindResponse with matching files/directories
349
+ """
350
+ path = self.format_path(path)
351
+
352
+ params = {}
353
+ if type is not None:
354
+ params['type'] = type
355
+ if patterns is not None and len(patterns) > 0:
356
+ params['patterns'] = ','.join(patterns)
357
+ if max_results is not None:
358
+ params['maxResults'] = max_results
359
+ if exclude_dirs is not None and len(exclude_dirs) > 0:
360
+ params['excludeDirs'] = ','.join(exclude_dirs)
361
+ if exclude_hidden is not None:
362
+ params['excludeHidden'] = exclude_hidden
363
+
364
+ url = f"{self.url}/filesystem-find/{path}"
365
+ headers = {**settings.headers, **self.sandbox_config.headers}
366
+
367
+ async with self.get_client() as client_instance:
368
+ response = await client_instance.get(url, params=params, headers=headers)
369
+ self.handle_response_error(response)
370
+
371
+ from ..client.models.find_response import FindResponse
372
+ return FindResponse.from_dict(response.json())
198
373
 
199
374
  async def cp(self, source: str, destination: str, max_wait: int = 180000) -> CopyResponse:
200
375
  """Copy files or directories using the cp command.
@@ -328,10 +503,14 @@ class SandboxFileSystem(SandboxAction):
328
503
  headers = {**settings.headers, **self.sandbox_config.headers}
329
504
  body = {"permissions": permissions}
330
505
 
331
- async with self.get_client() as client_instance:
332
- response = await client_instance.post(url, json=body, headers=headers)
506
+ client = self.get_client()
507
+ response = await client.post(url, json=body, headers=headers)
508
+ try:
509
+ data = json.loads(await response.aread())
333
510
  self.handle_response_error(response)
334
- return response.json()
511
+ return data
512
+ finally:
513
+ await response.aclose()
335
514
 
336
515
  async def _upload_part(
337
516
  self, upload_id: str, part_number: int, data: bytes
@@ -344,12 +523,16 @@ class SandboxFileSystem(SandboxAction):
344
523
  # Prepare multipart form data with the file chunk
345
524
  files = {"file": ("part", io.BytesIO(data), "application/octet-stream")}
346
525
 
347
- async with self.get_client() as client_instance:
348
- response = await client_instance.put(
349
- url, files=files, params=params, headers=headers
350
- )
526
+ client = self.get_client()
527
+ response = await client.put(
528
+ url, files=files, params=params, headers=headers
529
+ )
530
+ try:
531
+ data = json.loads(await response.aread())
351
532
  self.handle_response_error(response)
352
- return response.json()
533
+ return data
534
+ finally:
535
+ await response.aclose()
353
536
 
354
537
  async def _complete_multipart_upload(
355
538
  self, upload_id: str, parts: List[Dict[str, Any]]
@@ -359,21 +542,28 @@ class SandboxFileSystem(SandboxAction):
359
542
  headers = {**settings.headers, **self.sandbox_config.headers}
360
543
  body = {"parts": parts}
361
544
 
362
- async with self.get_client() as client_instance:
363
- response = await client_instance.post(url, json=body, headers=headers)
545
+ client = self.get_client()
546
+ response = await client.post(url, json=body, headers=headers)
547
+ try:
548
+ data = json.loads(await response.aread())
364
549
  self.handle_response_error(response)
365
- return SuccessResponse.from_dict(response.json())
550
+ return SuccessResponse.from_dict(data)
551
+ finally:
552
+ await response.aclose()
366
553
 
367
554
  async def _abort_multipart_upload(self, upload_id: str) -> None:
368
555
  """Abort a multipart upload and clean up all parts."""
369
556
  url = f"{self.url}/filesystem-multipart/{upload_id}/abort"
370
557
  headers = {**settings.headers, **self.sandbox_config.headers}
371
558
 
372
- async with self.get_client() as client_instance:
373
- response = await client_instance.delete(url, headers=headers)
559
+ client = self.get_client()
560
+ response = await client.delete(url, headers=headers)
561
+ try:
374
562
  # Don't raise error if abort fails - we want to throw the original error
375
563
  if not response.is_success:
376
- logger.warning(f"Warning: Failed to abort multipart upload: {response.status_code}")
564
+ print(f"Warning: Failed to abort multipart upload: {response.status_code}")
565
+ finally:
566
+ await response.aclose()
377
567
 
378
568
  async def _upload_with_multipart(
379
569
  self, path: str, data: bytes, permissions: str = "0644"