deepagents 0.2.7__tar.gz → 0.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {deepagents-0.2.7 → deepagents-0.3.0}/PKG-INFO +5 -8
- {deepagents-0.2.7 → deepagents-0.3.0}/README.md +1 -1
- deepagents-0.3.0/deepagents/backends/composite.py +561 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/backends/filesystem.py +90 -23
- deepagents-0.3.0/deepagents/backends/protocol.py +458 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/backends/sandbox.py +24 -5
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/backends/state.py +6 -10
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/backends/store.py +72 -8
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/graph.py +18 -4
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/middleware/filesystem.py +209 -27
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/middleware/subagents.py +4 -2
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents.egg-info/PKG-INFO +5 -8
- deepagents-0.3.0/deepagents.egg-info/requires.txt +4 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/pyproject.toml +8 -7
- deepagents-0.2.7/deepagents/backends/composite.py +0 -249
- deepagents-0.2.7/deepagents/backends/protocol.py +0 -196
- deepagents-0.2.7/deepagents.egg-info/requires.txt +0 -7
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/__init__.py +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/backends/__init__.py +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/backends/utils.py +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/middleware/__init__.py +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents/middleware/patch_tool_calls.py +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents.egg-info/SOURCES.txt +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents.egg-info/dependency_links.txt +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/deepagents.egg-info/top_level.txt +0 -0
- {deepagents-0.2.7 → deepagents-0.3.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: deepagents
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
|
|
5
5
|
License: MIT
|
|
6
6
|
Project-URL: Homepage, https://docs.langchain.com/oss/python/deepagents/overview
|
|
@@ -11,13 +11,10 @@ Project-URL: Slack, https://www.langchain.com/join-community
|
|
|
11
11
|
Project-URL: Reddit, https://www.reddit.com/r/LangChain/
|
|
12
12
|
Requires-Python: <4.0,>=3.11
|
|
13
13
|
Description-Content-Type: text/markdown
|
|
14
|
-
Requires-Dist: langchain-anthropic<2.0.0,>=1.
|
|
15
|
-
Requires-Dist: langchain<2.0.0,>=1.0
|
|
16
|
-
Requires-Dist: langchain-core<2.0.0,>=1.
|
|
14
|
+
Requires-Dist: langchain-anthropic<2.0.0,>=1.2.0
|
|
15
|
+
Requires-Dist: langchain<2.0.0,>=1.1.0
|
|
16
|
+
Requires-Dist: langchain-core<2.0.0,>=1.1.0
|
|
17
17
|
Requires-Dist: wcmatch
|
|
18
|
-
Requires-Dist: daytona>=0.113.0
|
|
19
|
-
Requires-Dist: runloop-api-client>=0.66.1
|
|
20
|
-
Requires-Dist: tavily>=1.1.0
|
|
21
18
|
|
|
22
19
|
# 🧠🤖Deep Agents
|
|
23
20
|
|
|
@@ -27,7 +24,7 @@ This architecture, however, can yield agents that are “shallow” and fail to
|
|
|
27
24
|
Applications like “Deep Research”, "Manus", and “Claude Code” have gotten around this limitation by implementing a combination of four things:
|
|
28
25
|
a **planning tool**, **sub agents**, access to a **file system**, and a **detailed prompt**.
|
|
29
26
|
|
|
30
|
-
<img src="deep_agents.png" alt="deep agent" width="600"/>
|
|
27
|
+
<img src="../../deep_agents.png" alt="deep agent" width="600"/>
|
|
31
28
|
|
|
32
29
|
`deepagents` is a Python package that implements these in a general purpose way so that you can easily create a Deep Agent for your application. For a full overview and quickstart of `deepagents`, the best resource is our [docs](https://docs.langchain.com/oss/python/deepagents/overview).
|
|
33
30
|
|
|
@@ -6,7 +6,7 @@ This architecture, however, can yield agents that are “shallow” and fail to
|
|
|
6
6
|
Applications like “Deep Research”, "Manus", and “Claude Code” have gotten around this limitation by implementing a combination of four things:
|
|
7
7
|
a **planning tool**, **sub agents**, access to a **file system**, and a **detailed prompt**.
|
|
8
8
|
|
|
9
|
-
<img src="deep_agents.png" alt="deep agent" width="600"/>
|
|
9
|
+
<img src="../../deep_agents.png" alt="deep agent" width="600"/>
|
|
10
10
|
|
|
11
11
|
`deepagents` is a Python package that implements these in a general purpose way so that you can easily create a Deep Agent for your application. For a full overview and quickstart of `deepagents`, the best resource is our [docs](https://docs.langchain.com/oss/python/deepagents/overview).
|
|
12
12
|
|
|
@@ -0,0 +1,561 @@
|
|
|
1
|
+
"""CompositeBackend: Route operations to different backends based on path prefix."""
|
|
2
|
+
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
|
|
5
|
+
from deepagents.backends.protocol import (
|
|
6
|
+
BackendProtocol,
|
|
7
|
+
EditResult,
|
|
8
|
+
ExecuteResponse,
|
|
9
|
+
FileDownloadResponse,
|
|
10
|
+
FileInfo,
|
|
11
|
+
FileUploadResponse,
|
|
12
|
+
GrepMatch,
|
|
13
|
+
SandboxBackendProtocol,
|
|
14
|
+
WriteResult,
|
|
15
|
+
)
|
|
16
|
+
from deepagents.backends.state import StateBackend
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CompositeBackend:
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
default: BackendProtocol | StateBackend,
|
|
23
|
+
routes: dict[str, BackendProtocol],
|
|
24
|
+
) -> None:
|
|
25
|
+
# Default backend
|
|
26
|
+
self.default = default
|
|
27
|
+
|
|
28
|
+
# Virtual routes
|
|
29
|
+
self.routes = routes
|
|
30
|
+
|
|
31
|
+
# Sort routes by length (longest first) for correct prefix matching
|
|
32
|
+
self.sorted_routes = sorted(routes.items(), key=lambda x: len(x[0]), reverse=True)
|
|
33
|
+
|
|
34
|
+
def _get_backend_and_key(self, key: str) -> tuple[BackendProtocol, str]:
|
|
35
|
+
"""Determine which backend handles this key and strip prefix.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
key: Original file path
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
Tuple of (backend, stripped_key) where stripped_key has the route
|
|
42
|
+
prefix removed (but keeps leading slash).
|
|
43
|
+
"""
|
|
44
|
+
# Check routes in order of length (longest first)
|
|
45
|
+
for prefix, backend in self.sorted_routes:
|
|
46
|
+
if key.startswith(prefix):
|
|
47
|
+
# Strip full prefix and ensure a leading slash remains
|
|
48
|
+
# e.g., "/memories/notes.txt" → "/notes.txt"; "/memories/" → "/"
|
|
49
|
+
suffix = key[len(prefix) :]
|
|
50
|
+
stripped_key = f"/{suffix}" if suffix else "/"
|
|
51
|
+
return backend, stripped_key
|
|
52
|
+
|
|
53
|
+
return self.default, key
|
|
54
|
+
|
|
55
|
+
def ls_info(self, path: str) -> list[FileInfo]:
|
|
56
|
+
"""List files and directories in the specified directory (non-recursive).
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
path: Absolute path to directory.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
List of FileInfo-like dicts with route prefixes added, for files and directories directly in the directory.
|
|
63
|
+
Directories have a trailing / in their path and is_dir=True.
|
|
64
|
+
"""
|
|
65
|
+
# Check if path matches a specific route
|
|
66
|
+
for route_prefix, backend in self.sorted_routes:
|
|
67
|
+
if path.startswith(route_prefix.rstrip("/")):
|
|
68
|
+
# Query only the matching routed backend
|
|
69
|
+
suffix = path[len(route_prefix) :]
|
|
70
|
+
search_path = f"/{suffix}" if suffix else "/"
|
|
71
|
+
infos = backend.ls_info(search_path)
|
|
72
|
+
prefixed: list[FileInfo] = []
|
|
73
|
+
for fi in infos:
|
|
74
|
+
fi = dict(fi)
|
|
75
|
+
fi["path"] = f"{route_prefix[:-1]}{fi['path']}"
|
|
76
|
+
prefixed.append(fi)
|
|
77
|
+
return prefixed
|
|
78
|
+
|
|
79
|
+
# At root, aggregate default and all routed backends
|
|
80
|
+
if path == "/":
|
|
81
|
+
results: list[FileInfo] = []
|
|
82
|
+
results.extend(self.default.ls_info(path))
|
|
83
|
+
for route_prefix, backend in self.sorted_routes:
|
|
84
|
+
# Add the route itself as a directory (e.g., /memories/)
|
|
85
|
+
results.append(
|
|
86
|
+
{
|
|
87
|
+
"path": route_prefix,
|
|
88
|
+
"is_dir": True,
|
|
89
|
+
"size": 0,
|
|
90
|
+
"modified_at": "",
|
|
91
|
+
}
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
results.sort(key=lambda x: x.get("path", ""))
|
|
95
|
+
return results
|
|
96
|
+
|
|
97
|
+
# Path doesn't match a route: query only default backend
|
|
98
|
+
return self.default.ls_info(path)
|
|
99
|
+
|
|
100
|
+
async def als_info(self, path: str) -> list[FileInfo]:
|
|
101
|
+
"""Async version of ls_info."""
|
|
102
|
+
# Check if path matches a specific route
|
|
103
|
+
for route_prefix, backend in self.sorted_routes:
|
|
104
|
+
if path.startswith(route_prefix.rstrip("/")):
|
|
105
|
+
# Query only the matching routed backend
|
|
106
|
+
suffix = path[len(route_prefix) :]
|
|
107
|
+
search_path = f"/{suffix}" if suffix else "/"
|
|
108
|
+
infos = await backend.als_info(search_path)
|
|
109
|
+
prefixed: list[FileInfo] = []
|
|
110
|
+
for fi in infos:
|
|
111
|
+
fi = dict(fi)
|
|
112
|
+
fi["path"] = f"{route_prefix[:-1]}{fi['path']}"
|
|
113
|
+
prefixed.append(fi)
|
|
114
|
+
return prefixed
|
|
115
|
+
|
|
116
|
+
# At root, aggregate default and all routed backends
|
|
117
|
+
if path == "/":
|
|
118
|
+
results: list[FileInfo] = []
|
|
119
|
+
results.extend(await self.default.als_info(path))
|
|
120
|
+
for route_prefix, backend in self.sorted_routes:
|
|
121
|
+
# Add the route itself as a directory (e.g., /memories/)
|
|
122
|
+
results.append(
|
|
123
|
+
{
|
|
124
|
+
"path": route_prefix,
|
|
125
|
+
"is_dir": True,
|
|
126
|
+
"size": 0,
|
|
127
|
+
"modified_at": "",
|
|
128
|
+
}
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
results.sort(key=lambda x: x.get("path", ""))
|
|
132
|
+
return results
|
|
133
|
+
|
|
134
|
+
# Path doesn't match a route: query only default backend
|
|
135
|
+
return await self.default.als_info(path)
|
|
136
|
+
|
|
137
|
+
def read(
|
|
138
|
+
self,
|
|
139
|
+
file_path: str,
|
|
140
|
+
offset: int = 0,
|
|
141
|
+
limit: int = 2000,
|
|
142
|
+
) -> str:
|
|
143
|
+
"""Read file content, routing to appropriate backend.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
file_path: Absolute file path.
|
|
147
|
+
offset: Line offset to start reading from (0-indexed).
|
|
148
|
+
limit: Maximum number of lines to read.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
Formatted file content with line numbers, or error message.
|
|
152
|
+
"""
|
|
153
|
+
backend, stripped_key = self._get_backend_and_key(file_path)
|
|
154
|
+
return backend.read(stripped_key, offset=offset, limit=limit)
|
|
155
|
+
|
|
156
|
+
async def aread(
|
|
157
|
+
self,
|
|
158
|
+
file_path: str,
|
|
159
|
+
offset: int = 0,
|
|
160
|
+
limit: int = 2000,
|
|
161
|
+
) -> str:
|
|
162
|
+
"""Async version of read."""
|
|
163
|
+
backend, stripped_key = self._get_backend_and_key(file_path)
|
|
164
|
+
return await backend.aread(stripped_key, offset=offset, limit=limit)
|
|
165
|
+
|
|
166
|
+
def grep_raw(
|
|
167
|
+
self,
|
|
168
|
+
pattern: str,
|
|
169
|
+
path: str | None = None,
|
|
170
|
+
glob: str | None = None,
|
|
171
|
+
) -> list[GrepMatch] | str:
|
|
172
|
+
# If path targets a specific route, search only that backend
|
|
173
|
+
for route_prefix, backend in self.sorted_routes:
|
|
174
|
+
if path is not None and path.startswith(route_prefix.rstrip("/")):
|
|
175
|
+
search_path = path[len(route_prefix) - 1 :]
|
|
176
|
+
raw = backend.grep_raw(pattern, search_path if search_path else "/", glob)
|
|
177
|
+
if isinstance(raw, str):
|
|
178
|
+
return raw
|
|
179
|
+
return [{**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw]
|
|
180
|
+
|
|
181
|
+
# Otherwise, search default and all routed backends and merge
|
|
182
|
+
all_matches: list[GrepMatch] = []
|
|
183
|
+
raw_default = self.default.grep_raw(pattern, path, glob) # type: ignore[attr-defined]
|
|
184
|
+
if isinstance(raw_default, str):
|
|
185
|
+
# This happens if error occurs
|
|
186
|
+
return raw_default
|
|
187
|
+
all_matches.extend(raw_default)
|
|
188
|
+
|
|
189
|
+
for route_prefix, backend in self.routes.items():
|
|
190
|
+
raw = backend.grep_raw(pattern, "/", glob)
|
|
191
|
+
if isinstance(raw, str):
|
|
192
|
+
# This happens if error occurs
|
|
193
|
+
return raw
|
|
194
|
+
all_matches.extend({**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw)
|
|
195
|
+
|
|
196
|
+
return all_matches
|
|
197
|
+
|
|
198
|
+
async def agrep_raw(
|
|
199
|
+
self,
|
|
200
|
+
pattern: str,
|
|
201
|
+
path: str | None = None,
|
|
202
|
+
glob: str | None = None,
|
|
203
|
+
) -> list[GrepMatch] | str:
|
|
204
|
+
"""Async version of grep_raw."""
|
|
205
|
+
# If path targets a specific route, search only that backend
|
|
206
|
+
for route_prefix, backend in self.sorted_routes:
|
|
207
|
+
if path is not None and path.startswith(route_prefix.rstrip("/")):
|
|
208
|
+
search_path = path[len(route_prefix) - 1 :]
|
|
209
|
+
raw = await backend.agrep_raw(pattern, search_path if search_path else "/", glob)
|
|
210
|
+
if isinstance(raw, str):
|
|
211
|
+
return raw
|
|
212
|
+
return [{**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw]
|
|
213
|
+
|
|
214
|
+
# Otherwise, search default and all routed backends and merge
|
|
215
|
+
all_matches: list[GrepMatch] = []
|
|
216
|
+
raw_default = await self.default.agrep_raw(pattern, path, glob) # type: ignore[attr-defined]
|
|
217
|
+
if isinstance(raw_default, str):
|
|
218
|
+
# This happens if error occurs
|
|
219
|
+
return raw_default
|
|
220
|
+
all_matches.extend(raw_default)
|
|
221
|
+
|
|
222
|
+
for route_prefix, backend in self.routes.items():
|
|
223
|
+
raw = await backend.agrep_raw(pattern, "/", glob)
|
|
224
|
+
if isinstance(raw, str):
|
|
225
|
+
# This happens if error occurs
|
|
226
|
+
return raw
|
|
227
|
+
all_matches.extend({**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw)
|
|
228
|
+
|
|
229
|
+
return all_matches
|
|
230
|
+
|
|
231
|
+
def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
|
|
232
|
+
results: list[FileInfo] = []
|
|
233
|
+
|
|
234
|
+
# Route based on path, not pattern
|
|
235
|
+
for route_prefix, backend in self.sorted_routes:
|
|
236
|
+
if path.startswith(route_prefix.rstrip("/")):
|
|
237
|
+
search_path = path[len(route_prefix) - 1 :]
|
|
238
|
+
infos = backend.glob_info(pattern, search_path if search_path else "/")
|
|
239
|
+
return [{**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos]
|
|
240
|
+
|
|
241
|
+
# Path doesn't match any specific route - search default backend AND all routed backends
|
|
242
|
+
results.extend(self.default.glob_info(pattern, path))
|
|
243
|
+
|
|
244
|
+
for route_prefix, backend in self.routes.items():
|
|
245
|
+
infos = backend.glob_info(pattern, "/")
|
|
246
|
+
results.extend({**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos)
|
|
247
|
+
|
|
248
|
+
# Deterministic ordering
|
|
249
|
+
results.sort(key=lambda x: x.get("path", ""))
|
|
250
|
+
return results
|
|
251
|
+
|
|
252
|
+
async def aglob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
|
|
253
|
+
"""Async version of glob_info."""
|
|
254
|
+
results: list[FileInfo] = []
|
|
255
|
+
|
|
256
|
+
# Route based on path, not pattern
|
|
257
|
+
for route_prefix, backend in self.sorted_routes:
|
|
258
|
+
if path.startswith(route_prefix.rstrip("/")):
|
|
259
|
+
search_path = path[len(route_prefix) - 1 :]
|
|
260
|
+
infos = await backend.aglob_info(pattern, search_path if search_path else "/")
|
|
261
|
+
return [{**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos]
|
|
262
|
+
|
|
263
|
+
# Path doesn't match any specific route - search default backend AND all routed backends
|
|
264
|
+
results.extend(await self.default.aglob_info(pattern, path))
|
|
265
|
+
|
|
266
|
+
for route_prefix, backend in self.routes.items():
|
|
267
|
+
infos = await backend.aglob_info(pattern, "/")
|
|
268
|
+
results.extend({**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos)
|
|
269
|
+
|
|
270
|
+
# Deterministic ordering
|
|
271
|
+
results.sort(key=lambda x: x.get("path", ""))
|
|
272
|
+
return results
|
|
273
|
+
|
|
274
|
+
def write(
|
|
275
|
+
self,
|
|
276
|
+
file_path: str,
|
|
277
|
+
content: str,
|
|
278
|
+
) -> WriteResult:
|
|
279
|
+
"""Create a new file, routing to appropriate backend.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
file_path: Absolute file path.
|
|
283
|
+
content: File content as a string.
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
Success message or Command object, or error if file already exists.
|
|
287
|
+
"""
|
|
288
|
+
backend, stripped_key = self._get_backend_and_key(file_path)
|
|
289
|
+
res = backend.write(stripped_key, content)
|
|
290
|
+
# If this is a state-backed update and default has state, merge so listings reflect changes
|
|
291
|
+
if res.files_update:
|
|
292
|
+
try:
|
|
293
|
+
runtime = getattr(self.default, "runtime", None)
|
|
294
|
+
if runtime is not None:
|
|
295
|
+
state = runtime.state
|
|
296
|
+
files = state.get("files", {})
|
|
297
|
+
files.update(res.files_update)
|
|
298
|
+
state["files"] = files
|
|
299
|
+
except Exception:
|
|
300
|
+
pass
|
|
301
|
+
return res
|
|
302
|
+
|
|
303
|
+
async def awrite(
|
|
304
|
+
self,
|
|
305
|
+
file_path: str,
|
|
306
|
+
content: str,
|
|
307
|
+
) -> WriteResult:
|
|
308
|
+
"""Async version of write."""
|
|
309
|
+
backend, stripped_key = self._get_backend_and_key(file_path)
|
|
310
|
+
res = await backend.awrite(stripped_key, content)
|
|
311
|
+
# If this is a state-backed update and default has state, merge so listings reflect changes
|
|
312
|
+
if res.files_update:
|
|
313
|
+
try:
|
|
314
|
+
runtime = getattr(self.default, "runtime", None)
|
|
315
|
+
if runtime is not None:
|
|
316
|
+
state = runtime.state
|
|
317
|
+
files = state.get("files", {})
|
|
318
|
+
files.update(res.files_update)
|
|
319
|
+
state["files"] = files
|
|
320
|
+
except Exception:
|
|
321
|
+
pass
|
|
322
|
+
return res
|
|
323
|
+
|
|
324
|
+
def edit(
|
|
325
|
+
self,
|
|
326
|
+
file_path: str,
|
|
327
|
+
old_string: str,
|
|
328
|
+
new_string: str,
|
|
329
|
+
replace_all: bool = False,
|
|
330
|
+
) -> EditResult:
|
|
331
|
+
"""Edit a file, routing to appropriate backend.
|
|
332
|
+
|
|
333
|
+
Args:
|
|
334
|
+
file_path: Absolute file path.
|
|
335
|
+
old_string: String to find and replace.
|
|
336
|
+
new_string: Replacement string.
|
|
337
|
+
replace_all: If True, replace all occurrences.
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
Success message or Command object, or error message on failure.
|
|
341
|
+
"""
|
|
342
|
+
backend, stripped_key = self._get_backend_and_key(file_path)
|
|
343
|
+
res = backend.edit(stripped_key, old_string, new_string, replace_all=replace_all)
|
|
344
|
+
if res.files_update:
|
|
345
|
+
try:
|
|
346
|
+
runtime = getattr(self.default, "runtime", None)
|
|
347
|
+
if runtime is not None:
|
|
348
|
+
state = runtime.state
|
|
349
|
+
files = state.get("files", {})
|
|
350
|
+
files.update(res.files_update)
|
|
351
|
+
state["files"] = files
|
|
352
|
+
except Exception:
|
|
353
|
+
pass
|
|
354
|
+
return res
|
|
355
|
+
|
|
356
|
+
async def aedit(
|
|
357
|
+
self,
|
|
358
|
+
file_path: str,
|
|
359
|
+
old_string: str,
|
|
360
|
+
new_string: str,
|
|
361
|
+
replace_all: bool = False,
|
|
362
|
+
) -> EditResult:
|
|
363
|
+
"""Async version of edit."""
|
|
364
|
+
backend, stripped_key = self._get_backend_and_key(file_path)
|
|
365
|
+
res = await backend.aedit(stripped_key, old_string, new_string, replace_all=replace_all)
|
|
366
|
+
if res.files_update:
|
|
367
|
+
try:
|
|
368
|
+
runtime = getattr(self.default, "runtime", None)
|
|
369
|
+
if runtime is not None:
|
|
370
|
+
state = runtime.state
|
|
371
|
+
files = state.get("files", {})
|
|
372
|
+
files.update(res.files_update)
|
|
373
|
+
state["files"] = files
|
|
374
|
+
except Exception:
|
|
375
|
+
pass
|
|
376
|
+
return res
|
|
377
|
+
|
|
378
|
+
def execute(
|
|
379
|
+
self,
|
|
380
|
+
command: str,
|
|
381
|
+
) -> ExecuteResponse:
|
|
382
|
+
"""Execute a command via the default backend.
|
|
383
|
+
|
|
384
|
+
Execution is not path-specific, so it always delegates to the default backend.
|
|
385
|
+
The default backend must implement SandboxBackendProtocol for this to work.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
command: Full shell command string to execute.
|
|
389
|
+
|
|
390
|
+
Returns:
|
|
391
|
+
ExecuteResponse with combined output, exit code, and truncation flag.
|
|
392
|
+
|
|
393
|
+
Raises:
|
|
394
|
+
NotImplementedError: If default backend doesn't support execution.
|
|
395
|
+
"""
|
|
396
|
+
if isinstance(self.default, SandboxBackendProtocol):
|
|
397
|
+
return self.default.execute(command)
|
|
398
|
+
|
|
399
|
+
# This shouldn't be reached if the runtime check in the execute tool works correctly,
|
|
400
|
+
# but we include it as a safety fallback.
|
|
401
|
+
raise NotImplementedError(
|
|
402
|
+
"Default backend doesn't support command execution (SandboxBackendProtocol). "
|
|
403
|
+
"To enable execution, provide a default backend that implements SandboxBackendProtocol."
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
async def aexecute(
|
|
407
|
+
self,
|
|
408
|
+
command: str,
|
|
409
|
+
) -> ExecuteResponse:
|
|
410
|
+
"""Async version of execute."""
|
|
411
|
+
if isinstance(self.default, SandboxBackendProtocol):
|
|
412
|
+
return await self.default.aexecute(command)
|
|
413
|
+
|
|
414
|
+
# This shouldn't be reached if the runtime check in the execute tool works correctly,
|
|
415
|
+
# but we include it as a safety fallback.
|
|
416
|
+
raise NotImplementedError(
|
|
417
|
+
"Default backend doesn't support command execution (SandboxBackendProtocol). "
|
|
418
|
+
"To enable execution, provide a default backend that implements SandboxBackendProtocol."
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
422
|
+
"""Upload multiple files, batching by backend for efficiency.
|
|
423
|
+
|
|
424
|
+
Groups files by their target backend, calls each backend's upload_files
|
|
425
|
+
once with all files for that backend, then merges results in original order.
|
|
426
|
+
|
|
427
|
+
Args:
|
|
428
|
+
files: List of (path, content) tuples to upload.
|
|
429
|
+
|
|
430
|
+
Returns:
|
|
431
|
+
List of FileUploadResponse objects, one per input file.
|
|
432
|
+
Response order matches input order.
|
|
433
|
+
"""
|
|
434
|
+
# Pre-allocate result list
|
|
435
|
+
results: list[FileUploadResponse | None] = [None] * len(files)
|
|
436
|
+
|
|
437
|
+
# Group files by backend, tracking original indices
|
|
438
|
+
from collections import defaultdict
|
|
439
|
+
|
|
440
|
+
backend_batches: dict[BackendProtocol, list[tuple[int, str, bytes]]] = defaultdict(list)
|
|
441
|
+
|
|
442
|
+
for idx, (path, content) in enumerate(files):
|
|
443
|
+
backend, stripped_path = self._get_backend_and_key(path)
|
|
444
|
+
backend_batches[backend].append((idx, stripped_path, content))
|
|
445
|
+
|
|
446
|
+
# Process each backend's batch
|
|
447
|
+
for backend, batch in backend_batches.items():
|
|
448
|
+
# Extract data for backend call
|
|
449
|
+
indices, stripped_paths, contents = zip(*batch, strict=False)
|
|
450
|
+
batch_files = list(zip(stripped_paths, contents, strict=False))
|
|
451
|
+
|
|
452
|
+
# Call backend once with all its files
|
|
453
|
+
batch_responses = backend.upload_files(batch_files)
|
|
454
|
+
|
|
455
|
+
# Place responses at original indices with original paths
|
|
456
|
+
for i, orig_idx in enumerate(indices):
|
|
457
|
+
results[orig_idx] = FileUploadResponse(
|
|
458
|
+
path=files[orig_idx][0], # Original path
|
|
459
|
+
error=batch_responses[i].error if i < len(batch_responses) else None,
|
|
460
|
+
)
|
|
461
|
+
|
|
462
|
+
return results # type: ignore[return-value]
|
|
463
|
+
|
|
464
|
+
async def aupload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
465
|
+
"""Async version of upload_files."""
|
|
466
|
+
# Pre-allocate result list
|
|
467
|
+
results: list[FileUploadResponse | None] = [None] * len(files)
|
|
468
|
+
|
|
469
|
+
# Group files by backend, tracking original indices
|
|
470
|
+
backend_batches: dict[BackendProtocol, list[tuple[int, str, bytes]]] = defaultdict(list)
|
|
471
|
+
|
|
472
|
+
for idx, (path, content) in enumerate(files):
|
|
473
|
+
backend, stripped_path = self._get_backend_and_key(path)
|
|
474
|
+
backend_batches[backend].append((idx, stripped_path, content))
|
|
475
|
+
|
|
476
|
+
# Process each backend's batch
|
|
477
|
+
for backend, batch in backend_batches.items():
|
|
478
|
+
# Extract data for backend call
|
|
479
|
+
indices, stripped_paths, contents = zip(*batch, strict=False)
|
|
480
|
+
batch_files = list(zip(stripped_paths, contents, strict=False))
|
|
481
|
+
|
|
482
|
+
# Call backend once with all its files
|
|
483
|
+
batch_responses = await backend.aupload_files(batch_files)
|
|
484
|
+
|
|
485
|
+
# Place responses at original indices with original paths
|
|
486
|
+
for i, orig_idx in enumerate(indices):
|
|
487
|
+
results[orig_idx] = FileUploadResponse(
|
|
488
|
+
path=files[orig_idx][0], # Original path
|
|
489
|
+
error=batch_responses[i].error if i < len(batch_responses) else None,
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
return results # type: ignore[return-value]
|
|
493
|
+
|
|
494
|
+
def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
495
|
+
"""Download multiple files, batching by backend for efficiency.
|
|
496
|
+
|
|
497
|
+
Groups paths by their target backend, calls each backend's download_files
|
|
498
|
+
once with all paths for that backend, then merges results in original order.
|
|
499
|
+
|
|
500
|
+
Args:
|
|
501
|
+
paths: List of file paths to download.
|
|
502
|
+
|
|
503
|
+
Returns:
|
|
504
|
+
List of FileDownloadResponse objects, one per input path.
|
|
505
|
+
Response order matches input order.
|
|
506
|
+
"""
|
|
507
|
+
# Pre-allocate result list
|
|
508
|
+
results: list[FileDownloadResponse | None] = [None] * len(paths)
|
|
509
|
+
|
|
510
|
+
backend_batches: dict[BackendProtocol, list[tuple[int, str]]] = defaultdict(list)
|
|
511
|
+
|
|
512
|
+
for idx, path in enumerate(paths):
|
|
513
|
+
backend, stripped_path = self._get_backend_and_key(path)
|
|
514
|
+
backend_batches[backend].append((idx, stripped_path))
|
|
515
|
+
|
|
516
|
+
# Process each backend's batch
|
|
517
|
+
for backend, batch in backend_batches.items():
|
|
518
|
+
# Extract data for backend call
|
|
519
|
+
indices, stripped_paths = zip(*batch, strict=False)
|
|
520
|
+
|
|
521
|
+
# Call backend once with all its paths
|
|
522
|
+
batch_responses = backend.download_files(list(stripped_paths))
|
|
523
|
+
|
|
524
|
+
# Place responses at original indices with original paths
|
|
525
|
+
for i, orig_idx in enumerate(indices):
|
|
526
|
+
results[orig_idx] = FileDownloadResponse(
|
|
527
|
+
path=paths[orig_idx], # Original path
|
|
528
|
+
content=batch_responses[i].content if i < len(batch_responses) else None,
|
|
529
|
+
error=batch_responses[i].error if i < len(batch_responses) else None,
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
return results # type: ignore[return-value]
|
|
533
|
+
|
|
534
|
+
async def adownload_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
535
|
+
"""Async version of download_files."""
|
|
536
|
+
# Pre-allocate result list
|
|
537
|
+
results: list[FileDownloadResponse | None] = [None] * len(paths)
|
|
538
|
+
|
|
539
|
+
backend_batches: dict[BackendProtocol, list[tuple[int, str]]] = defaultdict(list)
|
|
540
|
+
|
|
541
|
+
for idx, path in enumerate(paths):
|
|
542
|
+
backend, stripped_path = self._get_backend_and_key(path)
|
|
543
|
+
backend_batches[backend].append((idx, stripped_path))
|
|
544
|
+
|
|
545
|
+
# Process each backend's batch
|
|
546
|
+
for backend, batch in backend_batches.items():
|
|
547
|
+
# Extract data for backend call
|
|
548
|
+
indices, stripped_paths = zip(*batch, strict=False)
|
|
549
|
+
|
|
550
|
+
# Call backend once with all its paths
|
|
551
|
+
batch_responses = await backend.adownload_files(list(stripped_paths))
|
|
552
|
+
|
|
553
|
+
# Place responses at original indices with original paths
|
|
554
|
+
for i, orig_idx in enumerate(indices):
|
|
555
|
+
results[orig_idx] = FileDownloadResponse(
|
|
556
|
+
path=paths[orig_idx], # Original path
|
|
557
|
+
content=batch_responses[i].content if i < len(batch_responses) else None,
|
|
558
|
+
error=batch_responses[i].error if i < len(batch_responses) else None,
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
return results # type: ignore[return-value]
|