deepagents 0.2.7__tar.gz → 0.2.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {deepagents-0.2.7 → deepagents-0.2.8}/PKG-INFO +2 -5
- {deepagents-0.2.7 → deepagents-0.2.8}/README.md +1 -1
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/composite.py +87 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/filesystem.py +85 -18
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/protocol.py +122 -9
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/sandbox.py +19 -1
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/state.py +1 -7
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/store.py +65 -3
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/middleware/filesystem.py +24 -7
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/middleware/subagents.py +4 -2
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents.egg-info/PKG-INFO +2 -5
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents.egg-info/requires.txt +0 -3
- {deepagents-0.2.7 → deepagents-0.2.8}/pyproject.toml +1 -4
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/__init__.py +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/__init__.py +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/backends/utils.py +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/graph.py +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/middleware/__init__.py +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents/middleware/patch_tool_calls.py +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents.egg-info/SOURCES.txt +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents.egg-info/dependency_links.txt +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/deepagents.egg-info/top_level.txt +0 -0
- {deepagents-0.2.7 → deepagents-0.2.8}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: deepagents
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.8
|
|
4
4
|
Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
|
|
5
5
|
License: MIT
|
|
6
6
|
Project-URL: Homepage, https://docs.langchain.com/oss/python/deepagents/overview
|
|
@@ -15,9 +15,6 @@ Requires-Dist: langchain-anthropic<2.0.0,>=1.0.0
|
|
|
15
15
|
Requires-Dist: langchain<2.0.0,>=1.0.2
|
|
16
16
|
Requires-Dist: langchain-core<2.0.0,>=1.0.0
|
|
17
17
|
Requires-Dist: wcmatch
|
|
18
|
-
Requires-Dist: daytona>=0.113.0
|
|
19
|
-
Requires-Dist: runloop-api-client>=0.66.1
|
|
20
|
-
Requires-Dist: tavily>=1.1.0
|
|
21
18
|
|
|
22
19
|
# 🧠🤖Deep Agents
|
|
23
20
|
|
|
@@ -27,7 +24,7 @@ This architecture, however, can yield agents that are “shallow” and fail to
|
|
|
27
24
|
Applications like “Deep Research”, "Manus", and “Claude Code” have gotten around this limitation by implementing a combination of four things:
|
|
28
25
|
a **planning tool**, **sub agents**, access to a **file system**, and a **detailed prompt**.
|
|
29
26
|
|
|
30
|
-
<img src="deep_agents.png" alt="deep agent" width="600"/>
|
|
27
|
+
<img src="../../deep_agents.png" alt="deep agent" width="600"/>
|
|
31
28
|
|
|
32
29
|
`deepagents` is a Python package that implements these in a general purpose way so that you can easily create a Deep Agent for your application. For a full overview and quickstart of `deepagents`, the best resource is our [docs](https://docs.langchain.com/oss/python/deepagents/overview).
|
|
33
30
|
|
|
@@ -6,7 +6,7 @@ This architecture, however, can yield agents that are “shallow” and fail to
|
|
|
6
6
|
Applications like “Deep Research”, "Manus", and “Claude Code” have gotten around this limitation by implementing a combination of four things:
|
|
7
7
|
a **planning tool**, **sub agents**, access to a **file system**, and a **detailed prompt**.
|
|
8
8
|
|
|
9
|
-
<img src="deep_agents.png" alt="deep agent" width="600"/>
|
|
9
|
+
<img src="../../deep_agents.png" alt="deep agent" width="600"/>
|
|
10
10
|
|
|
11
11
|
`deepagents` is a Python package that implements these in a general purpose way so that you can easily create a Deep Agent for your application. For a full overview and quickstart of `deepagents`, the best resource is our [docs](https://docs.langchain.com/oss/python/deepagents/overview).
|
|
12
12
|
|
|
@@ -1,10 +1,14 @@
|
|
|
1
1
|
"""CompositeBackend: Route operations to different backends based on path prefix."""
|
|
2
2
|
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
|
|
3
5
|
from deepagents.backends.protocol import (
|
|
4
6
|
BackendProtocol,
|
|
5
7
|
EditResult,
|
|
6
8
|
ExecuteResponse,
|
|
9
|
+
FileDownloadResponse,
|
|
7
10
|
FileInfo,
|
|
11
|
+
FileUploadResponse,
|
|
8
12
|
GrepMatch,
|
|
9
13
|
SandboxBackendProtocol,
|
|
10
14
|
WriteResult,
|
|
@@ -247,3 +251,86 @@ class CompositeBackend:
|
|
|
247
251
|
"Default backend doesn't support command execution (SandboxBackendProtocol). "
|
|
248
252
|
"To enable execution, provide a default backend that implements SandboxBackendProtocol."
|
|
249
253
|
)
|
|
254
|
+
|
|
255
|
+
def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
256
|
+
"""Upload multiple files, batching by backend for efficiency.
|
|
257
|
+
|
|
258
|
+
Groups files by their target backend, calls each backend's upload_files
|
|
259
|
+
once with all files for that backend, then merges results in original order.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
files: List of (path, content) tuples to upload.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
List of FileUploadResponse objects, one per input file.
|
|
266
|
+
Response order matches input order.
|
|
267
|
+
"""
|
|
268
|
+
# Pre-allocate result list
|
|
269
|
+
results: list[FileUploadResponse | None] = [None] * len(files)
|
|
270
|
+
|
|
271
|
+
# Group files by backend, tracking original indices
|
|
272
|
+
from collections import defaultdict
|
|
273
|
+
|
|
274
|
+
backend_batches: dict[BackendProtocol, list[tuple[int, str, bytes]]] = defaultdict(list)
|
|
275
|
+
|
|
276
|
+
for idx, (path, content) in enumerate(files):
|
|
277
|
+
backend, stripped_path = self._get_backend_and_key(path)
|
|
278
|
+
backend_batches[backend].append((idx, stripped_path, content))
|
|
279
|
+
|
|
280
|
+
# Process each backend's batch
|
|
281
|
+
for backend, batch in backend_batches.items():
|
|
282
|
+
# Extract data for backend call
|
|
283
|
+
indices, stripped_paths, contents = zip(*batch, strict=False)
|
|
284
|
+
batch_files = list(zip(stripped_paths, contents, strict=False))
|
|
285
|
+
|
|
286
|
+
# Call backend once with all its files
|
|
287
|
+
batch_responses = backend.upload_files(batch_files)
|
|
288
|
+
|
|
289
|
+
# Place responses at original indices with original paths
|
|
290
|
+
for i, orig_idx in enumerate(indices):
|
|
291
|
+
results[orig_idx] = FileUploadResponse(
|
|
292
|
+
path=files[orig_idx][0], # Original path
|
|
293
|
+
error=batch_responses[i].error if i < len(batch_responses) else None,
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
return results # type: ignore[return-value]
|
|
297
|
+
|
|
298
|
+
def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
299
|
+
"""Download multiple files, batching by backend for efficiency.
|
|
300
|
+
|
|
301
|
+
Groups paths by their target backend, calls each backend's download_files
|
|
302
|
+
once with all paths for that backend, then merges results in original order.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
paths: List of file paths to download.
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
List of FileDownloadResponse objects, one per input path.
|
|
309
|
+
Response order matches input order.
|
|
310
|
+
"""
|
|
311
|
+
# Pre-allocate result list
|
|
312
|
+
results: list[FileDownloadResponse | None] = [None] * len(paths)
|
|
313
|
+
|
|
314
|
+
backend_batches: dict[BackendProtocol, list[tuple[int, str]]] = defaultdict(list)
|
|
315
|
+
|
|
316
|
+
for idx, path in enumerate(paths):
|
|
317
|
+
backend, stripped_path = self._get_backend_and_key(path)
|
|
318
|
+
backend_batches[backend].append((idx, stripped_path))
|
|
319
|
+
|
|
320
|
+
# Process each backend's batch
|
|
321
|
+
for backend, batch in backend_batches.items():
|
|
322
|
+
# Extract data for backend call
|
|
323
|
+
indices, stripped_paths = zip(*batch, strict=False)
|
|
324
|
+
|
|
325
|
+
# Call backend once with all its paths
|
|
326
|
+
batch_responses = backend.download_files(list(stripped_paths))
|
|
327
|
+
|
|
328
|
+
# Place responses at original indices with original paths
|
|
329
|
+
for i, orig_idx in enumerate(indices):
|
|
330
|
+
results[orig_idx] = FileDownloadResponse(
|
|
331
|
+
path=paths[orig_idx], # Original path
|
|
332
|
+
content=batch_responses[i].content if i < len(batch_responses) else None,
|
|
333
|
+
error=batch_responses[i].error if i < len(batch_responses) else None,
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
return results # type: ignore[return-value]
|
|
@@ -16,7 +16,15 @@ from pathlib import Path
|
|
|
16
16
|
|
|
17
17
|
import wcmatch.glob as wcglob
|
|
18
18
|
|
|
19
|
-
from deepagents.backends.protocol import
|
|
19
|
+
from deepagents.backends.protocol import (
|
|
20
|
+
BackendProtocol,
|
|
21
|
+
EditResult,
|
|
22
|
+
FileDownloadResponse,
|
|
23
|
+
FileInfo,
|
|
24
|
+
FileUploadResponse,
|
|
25
|
+
GrepMatch,
|
|
26
|
+
WriteResult,
|
|
27
|
+
)
|
|
20
28
|
from deepagents.backends.utils import (
|
|
21
29
|
check_empty_content,
|
|
22
30
|
format_content_with_line_numbers,
|
|
@@ -185,8 +193,6 @@ class FilesystemBackend(BackendProtocol):
|
|
|
185
193
|
results.sort(key=lambda x: x.get("path", ""))
|
|
186
194
|
return results
|
|
187
195
|
|
|
188
|
-
# Removed legacy ls() convenience to keep lean surface
|
|
189
|
-
|
|
190
196
|
def read(
|
|
191
197
|
self,
|
|
192
198
|
file_path: str,
|
|
@@ -208,14 +214,9 @@ class FilesystemBackend(BackendProtocol):
|
|
|
208
214
|
|
|
209
215
|
try:
|
|
210
216
|
# Open with O_NOFOLLOW where available to avoid symlink traversal
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
content = f.read()
|
|
215
|
-
except OSError:
|
|
216
|
-
# Fallback to normal open if O_NOFOLLOW unsupported or fails
|
|
217
|
-
with open(resolved_path, encoding="utf-8") as f:
|
|
218
|
-
content = f.read()
|
|
217
|
+
fd = os.open(resolved_path, os.O_RDONLY | getattr(os, "O_NOFOLLOW", 0))
|
|
218
|
+
with os.fdopen(fd, "r", encoding="utf-8") as f:
|
|
219
|
+
content = f.read()
|
|
219
220
|
|
|
220
221
|
empty_msg = check_empty_content(content)
|
|
221
222
|
if empty_msg:
|
|
@@ -279,13 +280,9 @@ class FilesystemBackend(BackendProtocol):
|
|
|
279
280
|
|
|
280
281
|
try:
|
|
281
282
|
# Read securely
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
content = f.read()
|
|
286
|
-
except OSError:
|
|
287
|
-
with open(resolved_path, encoding="utf-8") as f:
|
|
288
|
-
content = f.read()
|
|
283
|
+
fd = os.open(resolved_path, os.O_RDONLY | getattr(os, "O_NOFOLLOW", 0))
|
|
284
|
+
with os.fdopen(fd, "r", encoding="utf-8") as f:
|
|
285
|
+
content = f.read()
|
|
289
286
|
|
|
290
287
|
result = perform_string_replacement(content, old_string, new_string, replace_all)
|
|
291
288
|
|
|
@@ -481,3 +478,73 @@ class FilesystemBackend(BackendProtocol):
|
|
|
481
478
|
|
|
482
479
|
results.sort(key=lambda x: x.get("path", ""))
|
|
483
480
|
return results
|
|
481
|
+
|
|
482
|
+
def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
483
|
+
"""Upload multiple files to the filesystem.
|
|
484
|
+
|
|
485
|
+
Args:
|
|
486
|
+
files: List of (path, content) tuples where content is bytes.
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
List of FileUploadResponse objects, one per input file.
|
|
490
|
+
Response order matches input order.
|
|
491
|
+
"""
|
|
492
|
+
responses: list[FileUploadResponse] = []
|
|
493
|
+
for path, content in files:
|
|
494
|
+
try:
|
|
495
|
+
resolved_path = self._resolve_path(path)
|
|
496
|
+
|
|
497
|
+
# Create parent directories if needed
|
|
498
|
+
resolved_path.parent.mkdir(parents=True, exist_ok=True)
|
|
499
|
+
|
|
500
|
+
flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC
|
|
501
|
+
if hasattr(os, "O_NOFOLLOW"):
|
|
502
|
+
flags |= os.O_NOFOLLOW
|
|
503
|
+
fd = os.open(resolved_path, flags, 0o644)
|
|
504
|
+
with os.fdopen(fd, "wb") as f:
|
|
505
|
+
f.write(content)
|
|
506
|
+
|
|
507
|
+
responses.append(FileUploadResponse(path=path, error=None))
|
|
508
|
+
except FileNotFoundError:
|
|
509
|
+
responses.append(FileUploadResponse(path=path, error="file_not_found"))
|
|
510
|
+
except PermissionError:
|
|
511
|
+
responses.append(FileUploadResponse(path=path, error="permission_denied"))
|
|
512
|
+
except (ValueError, OSError) as e:
|
|
513
|
+
# ValueError from _resolve_path for path traversal, OSError for other file errors
|
|
514
|
+
if isinstance(e, ValueError) or "invalid" in str(e).lower():
|
|
515
|
+
responses.append(FileUploadResponse(path=path, error="invalid_path"))
|
|
516
|
+
else:
|
|
517
|
+
# Generic error fallback
|
|
518
|
+
responses.append(FileUploadResponse(path=path, error="invalid_path"))
|
|
519
|
+
|
|
520
|
+
return responses
|
|
521
|
+
|
|
522
|
+
def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
523
|
+
"""Download multiple files from the filesystem.
|
|
524
|
+
|
|
525
|
+
Args:
|
|
526
|
+
paths: List of file paths to download.
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
List of FileDownloadResponse objects, one per input path.
|
|
530
|
+
"""
|
|
531
|
+
responses: list[FileDownloadResponse] = []
|
|
532
|
+
for path in paths:
|
|
533
|
+
try:
|
|
534
|
+
resolved_path = self._resolve_path(path)
|
|
535
|
+
# Use flags to optionally prevent symlink following if
|
|
536
|
+
# supported by the OS
|
|
537
|
+
fd = os.open(resolved_path, os.O_RDONLY | getattr(os, "O_NOFOLLOW", 0))
|
|
538
|
+
with os.fdopen(fd, "rb") as f:
|
|
539
|
+
content = f.read()
|
|
540
|
+
responses.append(FileDownloadResponse(path=path, content=content, error=None))
|
|
541
|
+
except FileNotFoundError:
|
|
542
|
+
responses.append(FileDownloadResponse(path=path, content=None, error="file_not_found"))
|
|
543
|
+
except PermissionError:
|
|
544
|
+
responses.append(FileDownloadResponse(path=path, content=None, error="permission_denied"))
|
|
545
|
+
except IsADirectoryError:
|
|
546
|
+
responses.append(FileDownloadResponse(path=path, content=None, error="is_directory"))
|
|
547
|
+
except ValueError:
|
|
548
|
+
responses.append(FileDownloadResponse(path=path, content=None, error="invalid_path"))
|
|
549
|
+
# Let other errors propagate
|
|
550
|
+
return responses
|
|
@@ -7,12 +7,83 @@ database, etc.) and provide a uniform interface for file operations.
|
|
|
7
7
|
|
|
8
8
|
from collections.abc import Callable
|
|
9
9
|
from dataclasses import dataclass
|
|
10
|
-
from typing import Any, Protocol, TypeAlias,
|
|
10
|
+
from typing import Any, Literal, NotRequired, Protocol, TypeAlias, runtime_checkable
|
|
11
11
|
|
|
12
12
|
from langchain.tools import ToolRuntime
|
|
13
|
+
from typing_extensions import TypedDict
|
|
14
|
+
|
|
15
|
+
FileOperationError = Literal[
|
|
16
|
+
"file_not_found", # Download: file doesn't exist
|
|
17
|
+
"permission_denied", # Both: access denied
|
|
18
|
+
"is_directory", # Download: tried to download directory as file
|
|
19
|
+
"invalid_path", # Both: path syntax malformed (parent dir missing, invalid chars)
|
|
20
|
+
]
|
|
21
|
+
"""Standardized error codes for file upload/download operations.
|
|
22
|
+
|
|
23
|
+
These represent common, recoverable errors that an LLM can understand and potentially fix:
|
|
24
|
+
- file_not_found: The requested file doesn't exist (download)
|
|
25
|
+
- parent_not_found: The parent directory doesn't exist (upload)
|
|
26
|
+
- permission_denied: Access denied for the operation
|
|
27
|
+
- is_directory: Attempted to download a directory as a file
|
|
28
|
+
- invalid_path: Path syntax is malformed or contains invalid characters
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class FileDownloadResponse:
|
|
34
|
+
"""Result of a single file download operation.
|
|
35
|
+
|
|
36
|
+
The response is designed to allow partial success in batch operations.
|
|
37
|
+
The errors are standardized using FileOperationError literals
|
|
38
|
+
for certain recoverable conditions for use cases that involve
|
|
39
|
+
LLMs performing file operations.
|
|
40
|
+
|
|
41
|
+
Attributes:
|
|
42
|
+
path: The file path that was requested. Included for easy correlation
|
|
43
|
+
when processing batch results, especially useful for error messages.
|
|
44
|
+
content: File contents as bytes on success, None on failure.
|
|
45
|
+
error: Standardized error code on failure, None on success.
|
|
46
|
+
Uses FileOperationError literal for structured, LLM-actionable error reporting.
|
|
47
|
+
|
|
48
|
+
Examples:
|
|
49
|
+
>>> # Success
|
|
50
|
+
>>> FileDownloadResponse(path="/app/config.json", content=b"{...}", error=None)
|
|
51
|
+
>>> # Failure
|
|
52
|
+
>>> FileDownloadResponse(path="/wrong/path.txt", content=None, error="file_not_found")
|
|
53
|
+
"""
|
|
13
54
|
|
|
55
|
+
path: str
|
|
56
|
+
content: bytes | None = None
|
|
57
|
+
error: FileOperationError | None = None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class FileUploadResponse:
|
|
62
|
+
"""Result of a single file upload operation.
|
|
63
|
+
|
|
64
|
+
The response is designed to allow partial success in batch operations.
|
|
65
|
+
The errors are standardized using FileOperationError literals
|
|
66
|
+
for certain recoverable conditions for use cases that involve
|
|
67
|
+
LLMs performing file operations.
|
|
68
|
+
|
|
69
|
+
Attributes:
|
|
70
|
+
path: The file path that was requested. Included for easy correlation
|
|
71
|
+
when processing batch results and for clear error messages.
|
|
72
|
+
error: Standardized error code on failure, None on success.
|
|
73
|
+
Uses FileOperationError literal for structured, LLM-actionable error reporting.
|
|
74
|
+
|
|
75
|
+
Examples:
|
|
76
|
+
>>> # Success
|
|
77
|
+
>>> FileUploadResponse(path="/app/data.txt", error=None)
|
|
78
|
+
>>> # Failure
|
|
79
|
+
>>> FileUploadResponse(path="/readonly/file.txt", error="permission_denied")
|
|
80
|
+
"""
|
|
14
81
|
|
|
15
|
-
|
|
82
|
+
path: str
|
|
83
|
+
error: FileOperationError | None = None
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class FileInfo(TypedDict):
|
|
16
87
|
"""Structured file listing info.
|
|
17
88
|
|
|
18
89
|
Minimal contract used across backends. Only "path" is required.
|
|
@@ -20,9 +91,9 @@ class FileInfo(TypedDict, total=False):
|
|
|
20
91
|
"""
|
|
21
92
|
|
|
22
93
|
path: str
|
|
23
|
-
is_dir: bool
|
|
24
|
-
size: int # bytes (approx)
|
|
25
|
-
modified_at: str # ISO timestamp if known
|
|
94
|
+
is_dir: NotRequired[bool]
|
|
95
|
+
size: NotRequired[int] # bytes (approx)
|
|
96
|
+
modified_at: NotRequired[str] # ISO timestamp if known
|
|
26
97
|
|
|
27
98
|
|
|
28
99
|
class GrepMatch(TypedDict):
|
|
@@ -94,9 +165,9 @@ class BackendProtocol(Protocol):
|
|
|
94
165
|
|
|
95
166
|
All file data is represented as dicts with the following structure:
|
|
96
167
|
{
|
|
97
|
-
"content": list[str],
|
|
98
|
-
"created_at": str,
|
|
99
|
-
"modified_at": str,
|
|
168
|
+
"content": list[str], # Lines of text content
|
|
169
|
+
"created_at": str, # ISO format timestamp
|
|
170
|
+
"modified_at": str, # ISO format timestamp
|
|
100
171
|
}
|
|
101
172
|
"""
|
|
102
173
|
|
|
@@ -144,6 +215,48 @@ class BackendProtocol(Protocol):
|
|
|
144
215
|
"""Edit a file by replacing string occurrences. Returns EditResult."""
|
|
145
216
|
...
|
|
146
217
|
|
|
218
|
+
def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
219
|
+
"""Upload multiple files to the sandbox.
|
|
220
|
+
|
|
221
|
+
This API is designed to allow developers to use it either directly or
|
|
222
|
+
by exposing it to LLMs via custom tools.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
files: List of (path, content) tuples to upload.
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
List of FileUploadResponse objects, one per input file.
|
|
229
|
+
Response order matches input order (response[i] for files[i]).
|
|
230
|
+
Check the error field to determine success/failure per file.
|
|
231
|
+
|
|
232
|
+
Examples:
|
|
233
|
+
```python
|
|
234
|
+
responses = sandbox.upload_files(
|
|
235
|
+
[
|
|
236
|
+
("/app/config.json", b"{...}"),
|
|
237
|
+
("/app/data.txt", b"content"),
|
|
238
|
+
]
|
|
239
|
+
)
|
|
240
|
+
```
|
|
241
|
+
"""
|
|
242
|
+
...
|
|
243
|
+
|
|
244
|
+
def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
245
|
+
"""Download multiple files from the sandbox.
|
|
246
|
+
|
|
247
|
+
This API is designed to allow developers to use it either directly or
|
|
248
|
+
by exposing it to LLMs via custom tools.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
paths: List of file paths to download.
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
List of FileDownloadResponse objects, one per input path.
|
|
255
|
+
Response order matches input order (response[i] for paths[i]).
|
|
256
|
+
Check the error field to determine success/failure per file.
|
|
257
|
+
"""
|
|
258
|
+
...
|
|
259
|
+
|
|
147
260
|
|
|
148
261
|
@dataclass
|
|
149
262
|
class ExecuteResponse:
|
|
@@ -188,7 +301,7 @@ class SandboxBackendProtocol(BackendProtocol, Protocol):
|
|
|
188
301
|
|
|
189
302
|
@property
|
|
190
303
|
def id(self) -> str:
|
|
191
|
-
"""Unique identifier for the sandbox backend."""
|
|
304
|
+
"""Unique identifier for the sandbox backend instance."""
|
|
192
305
|
...
|
|
193
306
|
|
|
194
307
|
|
|
@@ -14,7 +14,9 @@ from abc import ABC, abstractmethod
|
|
|
14
14
|
from deepagents.backends.protocol import (
|
|
15
15
|
EditResult,
|
|
16
16
|
ExecuteResponse,
|
|
17
|
+
FileDownloadResponse,
|
|
17
18
|
FileInfo,
|
|
19
|
+
FileUploadResponse,
|
|
18
20
|
GrepMatch,
|
|
19
21
|
SandboxBackendProtocol,
|
|
20
22
|
WriteResult,
|
|
@@ -338,4 +340,20 @@ except PermissionError:
|
|
|
338
340
|
@property
|
|
339
341
|
@abstractmethod
|
|
340
342
|
def id(self) -> str:
|
|
341
|
-
"""Unique identifier for
|
|
343
|
+
"""Unique identifier for the sandbox backend."""
|
|
344
|
+
|
|
345
|
+
@abstractmethod
|
|
346
|
+
def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
347
|
+
"""Upload multiple files to the sandbox.
|
|
348
|
+
|
|
349
|
+
Implementations must support partial success - catch exceptions per-file
|
|
350
|
+
and return errors in FileUploadResponse objects rather than raising.
|
|
351
|
+
"""
|
|
352
|
+
|
|
353
|
+
@abstractmethod
|
|
354
|
+
def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
355
|
+
"""Download multiple files from the sandbox.
|
|
356
|
+
|
|
357
|
+
Implementations must support partial success - catch exceptions per-file
|
|
358
|
+
and return errors in FileDownloadResponse objects rather than raising.
|
|
359
|
+
"""
|
|
@@ -90,8 +90,6 @@ class StateBackend(BackendProtocol):
|
|
|
90
90
|
infos.sort(key=lambda x: x.get("path", ""))
|
|
91
91
|
return infos
|
|
92
92
|
|
|
93
|
-
# Removed legacy ls() convenience to keep lean surface
|
|
94
|
-
|
|
95
93
|
def read(
|
|
96
94
|
self,
|
|
97
95
|
file_path: str,
|
|
@@ -156,8 +154,6 @@ class StateBackend(BackendProtocol):
|
|
|
156
154
|
new_file_data = update_file_data(file_data, new_content)
|
|
157
155
|
return EditResult(path=file_path, files_update={file_path: new_file_data}, occurrences=int(occurrences))
|
|
158
156
|
|
|
159
|
-
# Removed legacy grep() convenience to keep lean surface
|
|
160
|
-
|
|
161
157
|
def grep_raw(
|
|
162
158
|
self,
|
|
163
159
|
pattern: str,
|
|
@@ -168,6 +164,7 @@ class StateBackend(BackendProtocol):
|
|
|
168
164
|
return grep_matches_from_files(files, pattern, path, glob)
|
|
169
165
|
|
|
170
166
|
def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
|
|
167
|
+
"""Get FileInfo for files matching glob pattern."""
|
|
171
168
|
files = self.runtime.state.get("files", {})
|
|
172
169
|
result = _glob_search_files(files, pattern, path)
|
|
173
170
|
if result == "No files found":
|
|
@@ -186,6 +183,3 @@ class StateBackend(BackendProtocol):
|
|
|
186
183
|
}
|
|
187
184
|
)
|
|
188
185
|
return infos
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
# Provider classes removed: prefer callables like `lambda rt: StateBackend(rt)`
|
|
@@ -5,7 +5,15 @@ from typing import Any
|
|
|
5
5
|
from langgraph.config import get_config
|
|
6
6
|
from langgraph.store.base import BaseStore, Item
|
|
7
7
|
|
|
8
|
-
from deepagents.backends.protocol import
|
|
8
|
+
from deepagents.backends.protocol import (
|
|
9
|
+
BackendProtocol,
|
|
10
|
+
EditResult,
|
|
11
|
+
FileDownloadResponse,
|
|
12
|
+
FileInfo,
|
|
13
|
+
FileUploadResponse,
|
|
14
|
+
GrepMatch,
|
|
15
|
+
WriteResult,
|
|
16
|
+
)
|
|
9
17
|
from deepagents.backends.utils import (
|
|
10
18
|
_glob_search_files,
|
|
11
19
|
create_file_data,
|
|
@@ -240,8 +248,6 @@ class StoreBackend(BackendProtocol):
|
|
|
240
248
|
infos.sort(key=lambda x: x.get("path", ""))
|
|
241
249
|
return infos
|
|
242
250
|
|
|
243
|
-
# Removed legacy ls() convenience to keep lean surface
|
|
244
|
-
|
|
245
251
|
def read(
|
|
246
252
|
self,
|
|
247
253
|
file_path: str,
|
|
@@ -376,3 +382,59 @@ class StoreBackend(BackendProtocol):
|
|
|
376
382
|
}
|
|
377
383
|
)
|
|
378
384
|
return infos
|
|
385
|
+
|
|
386
|
+
def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]:
|
|
387
|
+
"""Upload multiple files to the store.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
files: List of (path, content) tuples where content is bytes.
|
|
391
|
+
|
|
392
|
+
Returns:
|
|
393
|
+
List of FileUploadResponse objects, one per input file.
|
|
394
|
+
Response order matches input order.
|
|
395
|
+
"""
|
|
396
|
+
store = self._get_store()
|
|
397
|
+
namespace = self._get_namespace()
|
|
398
|
+
responses: list[FileUploadResponse] = []
|
|
399
|
+
|
|
400
|
+
for path, content in files:
|
|
401
|
+
content_str = content.decode("utf-8")
|
|
402
|
+
# Create file data
|
|
403
|
+
file_data = create_file_data(content_str)
|
|
404
|
+
store_value = self._convert_file_data_to_store_value(file_data)
|
|
405
|
+
|
|
406
|
+
# Store the file
|
|
407
|
+
store.put(namespace, path, store_value)
|
|
408
|
+
responses.append(FileUploadResponse(path=path, error=None))
|
|
409
|
+
|
|
410
|
+
return responses
|
|
411
|
+
|
|
412
|
+
def download_files(self, paths: list[str]) -> list[FileDownloadResponse]:
|
|
413
|
+
"""Download multiple files from the store.
|
|
414
|
+
|
|
415
|
+
Args:
|
|
416
|
+
paths: List of file paths to download.
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
List of FileDownloadResponse objects, one per input path.
|
|
420
|
+
Response order matches input order.
|
|
421
|
+
"""
|
|
422
|
+
store = self._get_store()
|
|
423
|
+
namespace = self._get_namespace()
|
|
424
|
+
responses: list[FileDownloadResponse] = []
|
|
425
|
+
|
|
426
|
+
for path in paths:
|
|
427
|
+
item = store.get(namespace, path)
|
|
428
|
+
|
|
429
|
+
if item is None:
|
|
430
|
+
responses.append(FileDownloadResponse(path=path, content=None, error="file_not_found"))
|
|
431
|
+
continue
|
|
432
|
+
|
|
433
|
+
file_data = self._convert_store_item_to_file_data(item)
|
|
434
|
+
# Convert file data to bytes
|
|
435
|
+
content_str = file_data_to_string(file_data)
|
|
436
|
+
content_bytes = content_str.encode("utf-8")
|
|
437
|
+
|
|
438
|
+
responses.append(FileDownloadResponse(path=path, content=content_bytes, error=None))
|
|
439
|
+
|
|
440
|
+
return responses
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
# ruff: noqa: E501
|
|
3
3
|
|
|
4
4
|
import os
|
|
5
|
+
import re
|
|
5
6
|
from collections.abc import Awaitable, Callable, Sequence
|
|
6
7
|
from typing import Annotated, Literal, NotRequired
|
|
7
8
|
|
|
@@ -92,12 +93,16 @@ def _file_data_reducer(left: dict[str, FileData] | None, right: dict[str, FileDa
|
|
|
92
93
|
|
|
93
94
|
|
|
94
95
|
def _validate_path(path: str, *, allowed_prefixes: Sequence[str] | None = None) -> str:
|
|
95
|
-
"""Validate and normalize file path for security.
|
|
96
|
+
r"""Validate and normalize file path for security.
|
|
96
97
|
|
|
97
98
|
Ensures paths are safe to use by preventing directory traversal attacks
|
|
98
99
|
and enforcing consistent formatting. All paths are normalized to use
|
|
99
100
|
forward slashes and start with a leading slash.
|
|
100
101
|
|
|
102
|
+
This function is designed for virtual filesystem paths and rejects
|
|
103
|
+
Windows absolute paths (e.g., C:/..., F:/...) to maintain consistency
|
|
104
|
+
and prevent path format ambiguity.
|
|
105
|
+
|
|
101
106
|
Args:
|
|
102
107
|
path: The path to validate and normalize.
|
|
103
108
|
allowed_prefixes: Optional list of allowed path prefixes. If provided,
|
|
@@ -107,14 +112,16 @@ def _validate_path(path: str, *, allowed_prefixes: Sequence[str] | None = None)
|
|
|
107
112
|
Normalized canonical path starting with `/` and using forward slashes.
|
|
108
113
|
|
|
109
114
|
Raises:
|
|
110
|
-
ValueError: If path contains traversal sequences (`..` or `~`)
|
|
111
|
-
|
|
115
|
+
ValueError: If path contains traversal sequences (`..` or `~`), is a
|
|
116
|
+
Windows absolute path (e.g., C:/...), or does not start with an
|
|
117
|
+
allowed prefix when `allowed_prefixes` is specified.
|
|
112
118
|
|
|
113
119
|
Example:
|
|
114
120
|
```python
|
|
115
121
|
validate_path("foo/bar") # Returns: "/foo/bar"
|
|
116
122
|
validate_path("/./foo//bar") # Returns: "/foo/bar"
|
|
117
123
|
validate_path("../etc/passwd") # Raises ValueError
|
|
124
|
+
validate_path(r"C:\\Users\\file.txt") # Raises ValueError
|
|
118
125
|
validate_path("/data/file.txt", allowed_prefixes=["/data/"]) # OK
|
|
119
126
|
validate_path("/etc/file.txt", allowed_prefixes=["/data/"]) # Raises ValueError
|
|
120
127
|
```
|
|
@@ -123,6 +130,12 @@ def _validate_path(path: str, *, allowed_prefixes: Sequence[str] | None = None)
|
|
|
123
130
|
msg = f"Path traversal not allowed: {path}"
|
|
124
131
|
raise ValueError(msg)
|
|
125
132
|
|
|
133
|
+
# Reject Windows absolute paths (e.g., C:\..., D:/...)
|
|
134
|
+
# This maintains consistency in virtual filesystem paths
|
|
135
|
+
if re.match(r"^[a-zA-Z]:", path):
|
|
136
|
+
msg = f"Windows absolute paths are not supported: {path}. Please use virtual paths starting with / (e.g., /workspace/file.txt)"
|
|
137
|
+
raise ValueError(msg)
|
|
138
|
+
|
|
126
139
|
normalized = os.path.normpath(path)
|
|
127
140
|
normalized = normalized.replace("\\", "/")
|
|
128
141
|
|
|
@@ -313,11 +326,13 @@ def _ls_tool_generator(
|
|
|
313
326
|
tool_description = custom_description or LIST_FILES_TOOL_DESCRIPTION
|
|
314
327
|
|
|
315
328
|
@tool(description=tool_description)
|
|
316
|
-
def ls(runtime: ToolRuntime[None, FilesystemState], path: str) ->
|
|
329
|
+
def ls(runtime: ToolRuntime[None, FilesystemState], path: str) -> str:
|
|
317
330
|
resolved_backend = _get_backend(backend, runtime)
|
|
318
331
|
validated_path = _validate_path(path)
|
|
319
332
|
infos = resolved_backend.ls_info(validated_path)
|
|
320
|
-
|
|
333
|
+
paths = [fi.get("path", "") for fi in infos]
|
|
334
|
+
result = truncate_if_too_long(paths)
|
|
335
|
+
return str(result)
|
|
321
336
|
|
|
322
337
|
return ls
|
|
323
338
|
|
|
@@ -457,10 +472,12 @@ def _glob_tool_generator(
|
|
|
457
472
|
tool_description = custom_description or GLOB_TOOL_DESCRIPTION
|
|
458
473
|
|
|
459
474
|
@tool(description=tool_description)
|
|
460
|
-
def glob(pattern: str, runtime: ToolRuntime[None, FilesystemState], path: str = "/") ->
|
|
475
|
+
def glob(pattern: str, runtime: ToolRuntime[None, FilesystemState], path: str = "/") -> str:
|
|
461
476
|
resolved_backend = _get_backend(backend, runtime)
|
|
462
477
|
infos = resolved_backend.glob_info(pattern, path=path)
|
|
463
|
-
|
|
478
|
+
paths = [fi.get("path", "") for fi in infos]
|
|
479
|
+
result = truncate_if_too_long(paths)
|
|
480
|
+
return str(result)
|
|
464
481
|
|
|
465
482
|
return glob
|
|
466
483
|
|
|
@@ -468,7 +468,8 @@ class SubAgentMiddleware(AgentMiddleware):
|
|
|
468
468
|
) -> ModelResponse:
|
|
469
469
|
"""Update the system prompt to include instructions on using subagents."""
|
|
470
470
|
if self.system_prompt is not None:
|
|
471
|
-
|
|
471
|
+
system_prompt = request.system_prompt + "\n\n" + self.system_prompt if request.system_prompt else self.system_prompt
|
|
472
|
+
return handler(request.override(system_prompt=system_prompt))
|
|
472
473
|
return handler(request)
|
|
473
474
|
|
|
474
475
|
async def awrap_model_call(
|
|
@@ -478,5 +479,6 @@ class SubAgentMiddleware(AgentMiddleware):
|
|
|
478
479
|
) -> ModelResponse:
|
|
479
480
|
"""(async) Update the system prompt to include instructions on using subagents."""
|
|
480
481
|
if self.system_prompt is not None:
|
|
481
|
-
|
|
482
|
+
system_prompt = request.system_prompt + "\n\n" + self.system_prompt if request.system_prompt else self.system_prompt
|
|
483
|
+
return await handler(request.override(system_prompt=system_prompt))
|
|
482
484
|
return await handler(request)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: deepagents
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.8
|
|
4
4
|
Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
|
|
5
5
|
License: MIT
|
|
6
6
|
Project-URL: Homepage, https://docs.langchain.com/oss/python/deepagents/overview
|
|
@@ -15,9 +15,6 @@ Requires-Dist: langchain-anthropic<2.0.0,>=1.0.0
|
|
|
15
15
|
Requires-Dist: langchain<2.0.0,>=1.0.2
|
|
16
16
|
Requires-Dist: langchain-core<2.0.0,>=1.0.0
|
|
17
17
|
Requires-Dist: wcmatch
|
|
18
|
-
Requires-Dist: daytona>=0.113.0
|
|
19
|
-
Requires-Dist: runloop-api-client>=0.66.1
|
|
20
|
-
Requires-Dist: tavily>=1.1.0
|
|
21
18
|
|
|
22
19
|
# 🧠🤖Deep Agents
|
|
23
20
|
|
|
@@ -27,7 +24,7 @@ This architecture, however, can yield agents that are “shallow” and fail to
|
|
|
27
24
|
Applications like “Deep Research”, "Manus", and “Claude Code” have gotten around this limitation by implementing a combination of four things:
|
|
28
25
|
a **planning tool**, **sub agents**, access to a **file system**, and a **detailed prompt**.
|
|
29
26
|
|
|
30
|
-
<img src="deep_agents.png" alt="deep agent" width="600"/>
|
|
27
|
+
<img src="../../deep_agents.png" alt="deep agent" width="600"/>
|
|
31
28
|
|
|
32
29
|
`deepagents` is a Python package that implements these in a general purpose way so that you can easily create a Deep Agent for your application. For a full overview and quickstart of `deepagents`, the best resource is our [docs](https://docs.langchain.com/oss/python/deepagents/overview).
|
|
33
30
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "deepagents"
|
|
3
|
-
version = "0.2.
|
|
3
|
+
version = "0.2.8"
|
|
4
4
|
description = "General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph."
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = { text = "MIT" }
|
|
@@ -10,9 +10,6 @@ dependencies = [
|
|
|
10
10
|
"langchain>=1.0.2,<2.0.0",
|
|
11
11
|
"langchain-core>=1.0.0,<2.0.0",
|
|
12
12
|
"wcmatch",
|
|
13
|
-
"daytona>=0.113.0",
|
|
14
|
-
"runloop-api-client>=0.66.1",
|
|
15
|
-
"tavily>=1.1.0",
|
|
16
13
|
]
|
|
17
14
|
|
|
18
15
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|