blaxel 0.2.30__py3-none-any.whl → 0.2.31rc120__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. blaxel/__init__.py +2 -2
  2. blaxel/core/client/client.py +18 -2
  3. blaxel/core/sandbox/client/api/filesystem/delete_filesystem_tree_path.py +188 -0
  4. blaxel/core/sandbox/client/api/filesystem/get_filesystem_content_search_path.py +250 -0
  5. blaxel/core/sandbox/client/api/filesystem/get_filesystem_find_path.py +248 -0
  6. blaxel/core/sandbox/client/api/filesystem/get_filesystem_search_path.py +237 -0
  7. blaxel/core/sandbox/client/api/filesystem/get_filesystem_tree_path.py +197 -0
  8. blaxel/core/sandbox/client/api/filesystem/put_filesystem_tree_path.py +223 -0
  9. blaxel/core/sandbox/client/models/__init__.py +16 -0
  10. blaxel/core/sandbox/client/models/content_search_match.py +98 -0
  11. blaxel/core/sandbox/client/models/content_search_response.py +97 -0
  12. blaxel/core/sandbox/client/models/find_match.py +69 -0
  13. blaxel/core/sandbox/client/models/find_response.py +88 -0
  14. blaxel/core/sandbox/client/models/fuzzy_search_match.py +78 -0
  15. blaxel/core/sandbox/client/models/fuzzy_search_response.py +88 -0
  16. blaxel/core/sandbox/client/models/tree_request.py +76 -0
  17. blaxel/core/sandbox/client/models/tree_request_files.py +49 -0
  18. blaxel/core/sandbox/default/action.py +12 -8
  19. blaxel/core/sandbox/default/filesystem.py +238 -48
  20. blaxel/core/sandbox/default/interpreter.py +62 -55
  21. blaxel/core/sandbox/default/process.py +66 -46
  22. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/METADATA +1 -1
  23. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/RECORD +25 -11
  24. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/WHEEL +0 -0
  25. {blaxel-0.2.30.dist-info → blaxel-0.2.31rc120.dist-info}/licenses/LICENSE +0 -0
blaxel/__init__.py CHANGED
@@ -4,8 +4,8 @@ from .core.common.autoload import autoload
4
4
  from .core.common.env import env
5
5
  from .core.common.settings import settings
6
6
 
7
- __version__ = "0.2.30"
8
- __commit__ = "65a88e27efd0b99c49c23a5cc339865d3bafca2e"
7
+ __version__ = "0.2.31.pre-120"
8
+ __commit__ = "a23256023f1d955467e2ee297a96b3d321172acf"
9
9
  __sentry_dsn__ = "https://9711de13cd02b285ca4378c01de8dc30@o4508714045276160.ingest.us.sentry.io/4510461121462272"
10
10
  __all__ = ["autoload", "settings", "env"]
11
11
 
@@ -105,6 +105,12 @@ class Client:
105
105
  def get_httpx_client(self) -> httpx.Client:
106
106
  """Get the underlying httpx.Client, constructing a new one if not previously set"""
107
107
  if self._client is None:
108
+ # Set default limits if not provided in httpx_args to prevent TLS handshake issues
109
+ limits = self._httpx_args.get("limits")
110
+ if limits is None:
111
+ limits = httpx.Limits(max_connections=100, max_keepalive_connections=20)
112
+ # Disable HTTP/2 to prevent TLS handshake issues in certain environments
113
+ http2 = self._httpx_args.get("http2", False)
108
114
  self._client = httpx.Client(
109
115
  base_url=self._base_url,
110
116
  cookies=self._cookies,
@@ -113,7 +119,9 @@ class Client:
113
119
  verify=self._verify_ssl,
114
120
  follow_redirects=self._follow_redirects,
115
121
  auth=self._auth,
116
- **self._httpx_args,
122
+ limits=limits,
123
+ http2=http2,
124
+ **{k: v for k, v in self._httpx_args.items() if k not in ("limits", "http2")},
117
125
  )
118
126
  return self._client
119
127
 
@@ -137,6 +145,12 @@ class Client:
137
145
  def get_async_httpx_client(self) -> httpx.AsyncClient:
138
146
  """Get the underlying httpx.AsyncClient, constructing a new one if not previously set"""
139
147
  if self._async_client is None:
148
+ # Set default limits if not provided in httpx_args to prevent TLS handshake issues
149
+ limits = self._httpx_args.get("limits")
150
+ if limits is None:
151
+ limits = httpx.Limits(max_connections=100, max_keepalive_connections=20)
152
+ # Disable HTTP/2 to prevent TLS handshake issues in certain environments
153
+ http2 = self._httpx_args.get("http2", False)
140
154
  self._async_client = httpx.AsyncClient(
141
155
  base_url=self._base_url,
142
156
  cookies=self._cookies,
@@ -145,7 +159,9 @@ class Client:
145
159
  verify=self._verify_ssl,
146
160
  follow_redirects=self._follow_redirects,
147
161
  auth=self._auth,
148
- **self._httpx_args,
162
+ limits=limits,
163
+ http2=http2,
164
+ **{k: v for k, v in self._httpx_args.items() if k not in ("limits", "http2")},
149
165
  )
150
166
  return self._async_client
151
167
 
@@ -0,0 +1,188 @@
1
+ from http import HTTPStatus
2
+ from typing import Any, Union
3
+
4
+ import httpx
5
+
6
+ from ... import errors
7
+ from ...client import Client
8
+ from ...models.error_response import ErrorResponse
9
+ from ...models.success_response import SuccessResponse
10
+ from ...types import UNSET, Response, Unset
11
+
12
+
13
+ def _get_kwargs(
14
+ path: str,
15
+ *,
16
+ recursive: Union[Unset, bool] = UNSET,
17
+ ) -> dict[str, Any]:
18
+ params: dict[str, Any] = {}
19
+
20
+ params["recursive"] = recursive
21
+
22
+ params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
23
+
24
+ _kwargs: dict[str, Any] = {
25
+ "method": "delete",
26
+ "url": f"/filesystem/tree/{path}",
27
+ "params": params,
28
+ }
29
+
30
+ return _kwargs
31
+
32
+
33
+ def _parse_response(*, client: Client, response: httpx.Response) -> Union[ErrorResponse, SuccessResponse] | None:
34
+ if response.status_code == 200:
35
+ response_200 = SuccessResponse.from_dict(response.json())
36
+
37
+ return response_200
38
+ if response.status_code == 400:
39
+ response_400 = ErrorResponse.from_dict(response.json())
40
+
41
+ return response_400
42
+ if response.status_code == 422:
43
+ response_422 = ErrorResponse.from_dict(response.json())
44
+
45
+ return response_422
46
+ if response.status_code == 500:
47
+ response_500 = ErrorResponse.from_dict(response.json())
48
+
49
+ return response_500
50
+ if client.raise_on_unexpected_status:
51
+ raise errors.UnexpectedStatus(response.status_code, response.content)
52
+ else:
53
+ return None
54
+
55
+
56
+ def _build_response(*, client: Client, response: httpx.Response) -> Response[Union[ErrorResponse, SuccessResponse]]:
57
+ return Response(
58
+ status_code=HTTPStatus(response.status_code),
59
+ content=response.content,
60
+ headers=response.headers,
61
+ parsed=_parse_response(client=client, response=response),
62
+ )
63
+
64
+
65
+ def sync_detailed(
66
+ path: str,
67
+ *,
68
+ client: Union[Client],
69
+ recursive: Union[Unset, bool] = UNSET,
70
+ ) -> Response[Union[ErrorResponse, SuccessResponse]]:
71
+ """Delete directory tree
72
+
73
+ Delete a directory tree recursively
74
+
75
+ Args:
76
+ path (str):
77
+ recursive (Union[Unset, bool]):
78
+
79
+ Raises:
80
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
81
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
82
+
83
+ Returns:
84
+ Response[Union[ErrorResponse, SuccessResponse]]
85
+ """
86
+
87
+ kwargs = _get_kwargs(
88
+ path=path,
89
+ recursive=recursive,
90
+ )
91
+
92
+ response = client.get_httpx_client().request(
93
+ **kwargs,
94
+ )
95
+
96
+ return _build_response(client=client, response=response)
97
+
98
+
99
+ def sync(
100
+ path: str,
101
+ *,
102
+ client: Union[Client],
103
+ recursive: Union[Unset, bool] = UNSET,
104
+ ) -> Union[ErrorResponse, SuccessResponse] | None:
105
+ """Delete directory tree
106
+
107
+ Delete a directory tree recursively
108
+
109
+ Args:
110
+ path (str):
111
+ recursive (Union[Unset, bool]):
112
+
113
+ Raises:
114
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
115
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
116
+
117
+ Returns:
118
+ Union[ErrorResponse, SuccessResponse]
119
+ """
120
+
121
+ return sync_detailed(
122
+ path=path,
123
+ client=client,
124
+ recursive=recursive,
125
+ ).parsed
126
+
127
+
128
+ async def asyncio_detailed(
129
+ path: str,
130
+ *,
131
+ client: Union[Client],
132
+ recursive: Union[Unset, bool] = UNSET,
133
+ ) -> Response[Union[ErrorResponse, SuccessResponse]]:
134
+ """Delete directory tree
135
+
136
+ Delete a directory tree recursively
137
+
138
+ Args:
139
+ path (str):
140
+ recursive (Union[Unset, bool]):
141
+
142
+ Raises:
143
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
144
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
145
+
146
+ Returns:
147
+ Response[Union[ErrorResponse, SuccessResponse]]
148
+ """
149
+
150
+ kwargs = _get_kwargs(
151
+ path=path,
152
+ recursive=recursive,
153
+ )
154
+
155
+ response = await client.get_async_httpx_client().request(**kwargs)
156
+
157
+ return _build_response(client=client, response=response)
158
+
159
+
160
+ async def asyncio(
161
+ path: str,
162
+ *,
163
+ client: Union[Client],
164
+ recursive: Union[Unset, bool] = UNSET,
165
+ ) -> Union[ErrorResponse, SuccessResponse] | None:
166
+ """Delete directory tree
167
+
168
+ Delete a directory tree recursively
169
+
170
+ Args:
171
+ path (str):
172
+ recursive (Union[Unset, bool]):
173
+
174
+ Raises:
175
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
176
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
177
+
178
+ Returns:
179
+ Union[ErrorResponse, SuccessResponse]
180
+ """
181
+
182
+ return (
183
+ await asyncio_detailed(
184
+ path=path,
185
+ client=client,
186
+ recursive=recursive,
187
+ )
188
+ ).parsed
@@ -0,0 +1,250 @@
1
+ from http import HTTPStatus
2
+ from typing import Any, Union
3
+
4
+ import httpx
5
+
6
+ from ... import errors
7
+ from ...client import Client
8
+ from ...models.content_search_response import ContentSearchResponse
9
+ from ...models.error_response import ErrorResponse
10
+ from ...types import UNSET, Response, Unset
11
+
12
+
13
+ def _get_kwargs(
14
+ path: str,
15
+ *,
16
+ query: str,
17
+ case_sensitive: Union[Unset, bool] = UNSET,
18
+ max_results: Union[Unset, int] = UNSET,
19
+ file_pattern: Union[Unset, str] = UNSET,
20
+ exclude_dirs: Union[Unset, str] = UNSET,
21
+ ) -> dict[str, Any]:
22
+ params: dict[str, Any] = {}
23
+
24
+ params["query"] = query
25
+
26
+ params["caseSensitive"] = case_sensitive
27
+
28
+ params["maxResults"] = max_results
29
+
30
+ params["filePattern"] = file_pattern
31
+
32
+ params["excludeDirs"] = exclude_dirs
33
+
34
+ params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
35
+
36
+ _kwargs: dict[str, Any] = {
37
+ "method": "get",
38
+ "url": f"/filesystem-content-search/{path}",
39
+ "params": params,
40
+ }
41
+
42
+ return _kwargs
43
+
44
+
45
+ def _parse_response(*, client: Client, response: httpx.Response) -> Union[ContentSearchResponse, ErrorResponse] | None:
46
+ if response.status_code == 200:
47
+ response_200 = ContentSearchResponse.from_dict(response.json())
48
+
49
+ return response_200
50
+ if response.status_code == 400:
51
+ response_400 = ErrorResponse.from_dict(response.json())
52
+
53
+ return response_400
54
+ if response.status_code == 422:
55
+ response_422 = ErrorResponse.from_dict(response.json())
56
+
57
+ return response_422
58
+ if response.status_code == 500:
59
+ response_500 = ErrorResponse.from_dict(response.json())
60
+
61
+ return response_500
62
+ if client.raise_on_unexpected_status:
63
+ raise errors.UnexpectedStatus(response.status_code, response.content)
64
+ else:
65
+ return None
66
+
67
+
68
+ def _build_response(
69
+ *, client: Client, response: httpx.Response
70
+ ) -> Response[Union[ContentSearchResponse, ErrorResponse]]:
71
+ return Response(
72
+ status_code=HTTPStatus(response.status_code),
73
+ content=response.content,
74
+ headers=response.headers,
75
+ parsed=_parse_response(client=client, response=response),
76
+ )
77
+
78
+
79
+ def sync_detailed(
80
+ path: str,
81
+ *,
82
+ client: Union[Client],
83
+ query: str,
84
+ case_sensitive: Union[Unset, bool] = UNSET,
85
+ max_results: Union[Unset, int] = UNSET,
86
+ file_pattern: Union[Unset, str] = UNSET,
87
+ exclude_dirs: Union[Unset, str] = UNSET,
88
+ ) -> Response[Union[ContentSearchResponse, ErrorResponse]]:
89
+ """Search for text content in files
90
+
91
+ Searches for text content inside files using ripgrep. Returns matching lines with context.
92
+
93
+ Args:
94
+ path (str):
95
+ query (str):
96
+ case_sensitive (Union[Unset, bool]):
97
+ max_results (Union[Unset, int]):
98
+ file_pattern (Union[Unset, str]):
99
+ exclude_dirs (Union[Unset, str]):
100
+
101
+ Raises:
102
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
103
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
104
+
105
+ Returns:
106
+ Response[Union[ContentSearchResponse, ErrorResponse]]
107
+ """
108
+
109
+ kwargs = _get_kwargs(
110
+ path=path,
111
+ query=query,
112
+ case_sensitive=case_sensitive,
113
+ max_results=max_results,
114
+ file_pattern=file_pattern,
115
+ exclude_dirs=exclude_dirs,
116
+ )
117
+
118
+ response = client.get_httpx_client().request(
119
+ **kwargs,
120
+ )
121
+
122
+ return _build_response(client=client, response=response)
123
+
124
+
125
+ def sync(
126
+ path: str,
127
+ *,
128
+ client: Union[Client],
129
+ query: str,
130
+ case_sensitive: Union[Unset, bool] = UNSET,
131
+ max_results: Union[Unset, int] = UNSET,
132
+ file_pattern: Union[Unset, str] = UNSET,
133
+ exclude_dirs: Union[Unset, str] = UNSET,
134
+ ) -> Union[ContentSearchResponse, ErrorResponse] | None:
135
+ """Search for text content in files
136
+
137
+ Searches for text content inside files using ripgrep. Returns matching lines with context.
138
+
139
+ Args:
140
+ path (str):
141
+ query (str):
142
+ case_sensitive (Union[Unset, bool]):
143
+ max_results (Union[Unset, int]):
144
+ file_pattern (Union[Unset, str]):
145
+ exclude_dirs (Union[Unset, str]):
146
+
147
+ Raises:
148
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
149
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
150
+
151
+ Returns:
152
+ Union[ContentSearchResponse, ErrorResponse]
153
+ """
154
+
155
+ return sync_detailed(
156
+ path=path,
157
+ client=client,
158
+ query=query,
159
+ case_sensitive=case_sensitive,
160
+ max_results=max_results,
161
+ file_pattern=file_pattern,
162
+ exclude_dirs=exclude_dirs,
163
+ ).parsed
164
+
165
+
166
+ async def asyncio_detailed(
167
+ path: str,
168
+ *,
169
+ client: Union[Client],
170
+ query: str,
171
+ case_sensitive: Union[Unset, bool] = UNSET,
172
+ max_results: Union[Unset, int] = UNSET,
173
+ file_pattern: Union[Unset, str] = UNSET,
174
+ exclude_dirs: Union[Unset, str] = UNSET,
175
+ ) -> Response[Union[ContentSearchResponse, ErrorResponse]]:
176
+ """Search for text content in files
177
+
178
+ Searches for text content inside files using ripgrep. Returns matching lines with context.
179
+
180
+ Args:
181
+ path (str):
182
+ query (str):
183
+ case_sensitive (Union[Unset, bool]):
184
+ max_results (Union[Unset, int]):
185
+ file_pattern (Union[Unset, str]):
186
+ exclude_dirs (Union[Unset, str]):
187
+
188
+ Raises:
189
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
190
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
191
+
192
+ Returns:
193
+ Response[Union[ContentSearchResponse, ErrorResponse]]
194
+ """
195
+
196
+ kwargs = _get_kwargs(
197
+ path=path,
198
+ query=query,
199
+ case_sensitive=case_sensitive,
200
+ max_results=max_results,
201
+ file_pattern=file_pattern,
202
+ exclude_dirs=exclude_dirs,
203
+ )
204
+
205
+ response = await client.get_async_httpx_client().request(**kwargs)
206
+
207
+ return _build_response(client=client, response=response)
208
+
209
+
210
+ async def asyncio(
211
+ path: str,
212
+ *,
213
+ client: Union[Client],
214
+ query: str,
215
+ case_sensitive: Union[Unset, bool] = UNSET,
216
+ max_results: Union[Unset, int] = UNSET,
217
+ file_pattern: Union[Unset, str] = UNSET,
218
+ exclude_dirs: Union[Unset, str] = UNSET,
219
+ ) -> Union[ContentSearchResponse, ErrorResponse] | None:
220
+ """Search for text content in files
221
+
222
+ Searches for text content inside files using ripgrep. Returns matching lines with context.
223
+
224
+ Args:
225
+ path (str):
226
+ query (str):
227
+ case_sensitive (Union[Unset, bool]):
228
+ max_results (Union[Unset, int]):
229
+ file_pattern (Union[Unset, str]):
230
+ exclude_dirs (Union[Unset, str]):
231
+
232
+ Raises:
233
+ errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
234
+ httpx.TimeoutException: If the request takes longer than Client.timeout.
235
+
236
+ Returns:
237
+ Union[ContentSearchResponse, ErrorResponse]
238
+ """
239
+
240
+ return (
241
+ await asyncio_detailed(
242
+ path=path,
243
+ client=client,
244
+ query=query,
245
+ case_sensitive=case_sensitive,
246
+ max_results=max_results,
247
+ file_pattern=file_pattern,
248
+ exclude_dirs=exclude_dirs,
249
+ )
250
+ ).parsed