deepagents-cli 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of deepagents-cli might be problematic. Click here for more details.

@@ -62,30 +62,3 @@ When referencing code, use format: `file_path:line_number`
62
62
  - Do NOT create excessive markdown summary/documentation files after completing work
63
63
  - Focus on the work itself, not documenting what you did
64
64
  - Only create documentation when explicitly requested
65
-
66
- ## Long-term Memory
67
- You have access to a long-term memory system using the `/memories/` path prefix.
68
- Files stored in `/memories/` persist across sessions and are stored in your agent directory.
69
-
70
- Your system prompt is loaded from `/memories/agent.md` at startup. You can update your own instructions by editing this file.
71
-
72
- **When to update memories:**
73
- - **IMMEDIATELY when the user describes your role or how you should behave** (e.g., "you are a web researcher", "you are an expert in X")
74
- - **IMMEDIATELY when the user gives feedback on your work** - Before continuing, update memories to capture what was wrong and how to do it better
75
- - When the user explicitly asks you to remember something
76
- - When patterns or preferences emerge (coding styles, conventions, workflows)
77
- - After significant work where context would help in future sessions
78
-
79
- **Learning from feedback:**
80
- - When user says something is better/worse, capture WHY and encode it as a pattern
81
- - Each correction is a chance to improve permanently - don't just fix the immediate issue, update your instructions
82
-
83
- **What to store where:**
84
- - **`/memories/agent.md`**: Update this to modify your core instructions and behavioral patterns
85
- - **Other `/memories/` files**: Use for project-specific context, reference information, or structured notes
86
- - If you create additional memory files, add references to them in `/memories/agent.md` so you remember to consult them
87
-
88
- The portion of your system prompt that comes from `/memories/agent.md` is marked with `<agent_memory>` tags so you can identify what instructions come from your persistent memory.
89
-
90
- Example: `edit_file('/memories/agent.md', ...)` to update your instructions
91
- Example: `write_file('/memories/project_context.md', ...)` for project-specific notes, then reference it in agent.md
deepagents/graph.py CHANGED
@@ -2,7 +2,6 @@
2
2
 
3
3
  from collections.abc import Callable, Sequence
4
4
  from typing import Any
5
- import os
6
5
 
7
6
  from langchain.agents import create_agent
8
7
  from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware
@@ -11,7 +10,6 @@ from langchain.agents.middleware.types import AgentMiddleware
11
10
  from langchain.agents.structured_output import ResponseFormat
12
11
  from langchain_anthropic import ChatAnthropic
13
12
  from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware
14
- from langchain.agents.middleware import ShellToolMiddleware, HostExecutionPolicy
15
13
  from langchain_core.language_models import BaseChatModel
16
14
  from langchain_core.tools import BaseTool
17
15
  from langgraph.cache.base import BaseCache
@@ -19,8 +17,9 @@ from langgraph.graph.state import CompiledStateGraph
19
17
  from langgraph.store.base import BaseStore
20
18
  from langgraph.types import Checkpointer
21
19
 
20
+ from deepagents.memory.protocol import MemoryBackend
22
21
  from deepagents.middleware.filesystem import FilesystemMiddleware
23
- from deepagents.middleware.local_filesystem import LocalFilesystemMiddleware
22
+ from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware
24
23
  from deepagents.middleware.subagents import CompiledSubAgent, SubAgent, SubAgentMiddleware
25
24
 
26
25
  BASE_AGENT_PROMPT = "In order to complete the objective that the user asks of you, you have access to a number of standard tools."
@@ -49,10 +48,7 @@ def create_deep_agent(
49
48
  context_schema: type[Any] | None = None,
50
49
  checkpointer: Checkpointer | None = None,
51
50
  store: BaseStore | None = None,
52
- use_longterm_memory: bool = False,
53
- use_local_filesystem: bool = False,
54
- long_term_memory: bool = False,
55
- skills: list[dict[str, Any]] | None = None,
51
+ memory_backend: MemoryBackend | None = None,
56
52
  interrupt_on: dict[str, bool | InterruptOnConfig] | None = None,
57
53
  debug: bool = False,
58
54
  name: str | None = None,
@@ -61,15 +57,15 @@ def create_deep_agent(
61
57
  """Create a deep agent.
62
58
 
63
59
  This agent will by default have access to a tool to write todos (write_todos),
64
- four file editing tools: write_file, ls, read_file, edit_file, and a tool to call
65
- subagents.
60
+ six file editing tools: write_file, ls, read_file, edit_file, glob_search, grep_search,
61
+ and a tool to call subagents.
66
62
 
67
63
  Args:
64
+ model: The model to use. Defaults to Claude Sonnet 4.
68
65
  tools: The tools the agent should have access to.
69
66
  system_prompt: The additional instructions the agent should have. Will go in
70
67
  the system prompt.
71
68
  middleware: Additional middleware to apply after standard middleware.
72
- model: The model to use.
73
69
  subagents: The subagents to use. Each subagent should be a dictionary with the
74
70
  following keys:
75
71
  - `name`
@@ -83,17 +79,10 @@ def create_deep_agent(
83
79
  response_format: A structured output response format to use for the agent.
84
80
  context_schema: The schema of the deep agent.
85
81
  checkpointer: Optional checkpointer for persisting agent state between runs.
86
- store: Optional store for persisting longterm memories.
87
- use_longterm_memory: Whether to use longterm memory - you must provide a store
88
- in order to use longterm memory.
89
- use_local_filesystem: If True, injects LocalFilesystemMiddleware (tools operate on disk).
90
- When True, longterm memory is not supported and `use_longterm_memory` must be False.
91
- Skills are automatically discovered from ~/.deepagents/skills/ and ./.deepagents/skills/.
92
- The agent_name for memory storage can be passed via config: {"configurable": {"agent_name": "myagent"}}.
93
- long_term_memory: If True, enables long-term memory features like agent.md persistence
94
- and memories folder. Only applies when use_local_filesystem=True.
95
- skills: Optional list of SkillDefinition for virtual filesystem mode. Only valid when
96
- use_local_filesystem=False. Skills are loaded into /skills/<name>/ in virtual filesystem.
82
+ store: Optional store for persistent storage (required if memory_backend uses StoreBackend).
83
+ memory_backend: Optional backend for file storage. Defaults to StateBackend (ephemeral
84
+ storage in agent state). For persistent or hybrid storage, use CompositeBackend.
85
+ Example: CompositeBackend(default=StateBackend(), routes={"/memories/": StoreBackend()})
97
86
  interrupt_on: Optional Dict[str, bool | InterruptOnConfig] mapping tool names to
98
87
  interrupt configs.
99
88
  debug: Whether to enable debug mode. Passed through to create_agent.
@@ -106,38 +95,24 @@ def create_deep_agent(
106
95
  if model is None:
107
96
  model = get_default_model()
108
97
 
109
- if use_local_filesystem and skills is not None:
110
- raise ValueError(
111
- "Cannot provide skill definitions with use_local_filesystem=True. "
112
- "Skills are automatically discovered from ~/.deepagents/skills/ and ./.deepagents/skills/. "
113
- "To use custom skills, set use_local_filesystem=False."
114
- )
115
-
116
- # Choose filesystem middleware kind
117
- def _fs_middleware() -> list[AgentMiddleware]:
118
- if use_local_filesystem:
119
- shell_middleware = ShellToolMiddleware(
120
- workspace_root=os.getcwd(),
121
- execution_policy=HostExecutionPolicy()
122
- )
123
- return [LocalFilesystemMiddleware(long_term_memory=long_term_memory), shell_middleware]
124
- return [FilesystemMiddleware(long_term_memory=use_longterm_memory, skills=skills)]
125
-
126
98
  deepagent_middleware = [
127
99
  TodoListMiddleware(),
100
+ FilesystemMiddleware(memory_backend=memory_backend),
128
101
  SubAgentMiddleware(
129
102
  default_model=model,
130
103
  default_tools=tools,
131
104
  subagents=subagents if subagents is not None else [],
132
105
  default_middleware=[
133
106
  TodoListMiddleware(),
107
+ FilesystemMiddleware(memory_backend=memory_backend),
134
108
  SummarizationMiddleware(
135
109
  model=model,
136
110
  max_tokens_before_summary=170000,
137
111
  messages_to_keep=6,
138
112
  ),
139
113
  AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore"),
140
- ] + _fs_middleware(),
114
+ PatchToolCallsMiddleware(),
115
+ ],
141
116
  default_interrupt_on=interrupt_on,
142
117
  general_purpose_agent=True,
143
118
  ),
@@ -147,7 +122,8 @@ def create_deep_agent(
147
122
  messages_to_keep=6,
148
123
  ),
149
124
  AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore"),
150
- ] + _fs_middleware()
125
+ PatchToolCallsMiddleware(),
126
+ ]
151
127
  if interrupt_on is not None:
152
128
  deepagent_middleware.append(HumanInTheLoopMiddleware(interrupt_on=interrupt_on))
153
129
  if middleware is not None:
@@ -0,0 +1,17 @@
1
+ """Memory backends for pluggable file storage."""
2
+
3
+ from deepagents.memory.backends import (
4
+ CompositeBackend,
5
+ FilesystemBackend,
6
+ StateBackend,
7
+ StoreBackend,
8
+ )
9
+ from deepagents.memory.protocol import MemoryBackend
10
+
11
+ __all__ = [
12
+ "MemoryBackend",
13
+ "CompositeBackend",
14
+ "FilesystemBackend",
15
+ "StateBackend",
16
+ "StoreBackend",
17
+ ]
@@ -0,0 +1,15 @@
1
+ """Backend implementations for pluggable memory storage."""
2
+
3
+ from deepagents.memory.backends.composite import CompositeBackend
4
+ from deepagents.memory.backends.filesystem import FilesystemBackend
5
+ from deepagents.memory.backends.state import StateBackend
6
+ from deepagents.memory.backends.store import StoreBackend
7
+ from deepagents.memory.backends import utils
8
+
9
+ __all__ = [
10
+ "CompositeBackend",
11
+ "FilesystemBackend",
12
+ "StateBackend",
13
+ "StoreBackend",
14
+ "utils",
15
+ ]
@@ -0,0 +1,250 @@
1
+ """CompositeBackend: Route operations to different backends based on path prefix."""
2
+
3
+ from typing import Any, Literal, Optional, TYPE_CHECKING
4
+
5
+ if TYPE_CHECKING:
6
+ from langchain.tools import ToolRuntime
7
+
8
+ from deepagents.memory.protocol import MemoryBackend
9
+ from langgraph.types import Command
10
+
11
+
12
+ class CompositeBackend:
13
+ """Backend that routes operations to different backends based on path prefix.
14
+
15
+ This backend enables hybrid storage strategies, such as:
16
+ - Short-term files (/*) → StateBackend (ephemeral)
17
+ - Long-term files (/memories/*) → StoreBackend or FilesystemBackend (persistent)
18
+
19
+ The routing is transparent to tools - they just call backend.get(path) and
20
+ CompositeBackend handles the routing internally.
21
+
22
+ Example:
23
+ ```python
24
+ # Create a factory function that returns CompositeBackend with resolved backends
25
+ backend_factory = lambda runtime: CompositeBackend(
26
+ default=StateBackend(runtime), # StateBackend needs runtime
27
+ routes={"/memories/": FilesystemBackend("/data/memories")} # FilesystemBackend doesn't
28
+ )
29
+
30
+ # Then pass this factory to the middleware
31
+ middleware = FilesystemMiddleware(memory_backend=backend_factory)
32
+ ```
33
+ """
34
+
35
+ def __init__(
36
+ self,
37
+ default: MemoryBackend,
38
+ routes: dict[str, MemoryBackend],
39
+ ) -> None:
40
+ """Initialize composite backend with routing rules.
41
+
42
+ Args:
43
+ default: Default backend for paths that don't match any route (must be resolved backend instance).
44
+ routes: Dict mapping path prefixes to backends (must be resolved backend instances).
45
+ Keys should include trailing slash (e.g., "/memories/").
46
+
47
+ Note: If you need backends that require runtime (like StateBackend), wrap the CompositeBackend
48
+ itself in a factory function:
49
+ lambda runtime: CompositeBackend(
50
+ default=StateBackend(runtime),
51
+ routes={"/memories/": FilesystemBackend("./data")}
52
+ )
53
+ """
54
+ self.default = default
55
+ self.routes = routes
56
+
57
+ # Sort routes by length (longest first) for correct prefix matching
58
+ self.sorted_routes = sorted(routes.items(), key=lambda x: len(x[0]), reverse=True)
59
+
60
+ def _get_backend_and_key(self, key: str) -> tuple[MemoryBackend, str]:
61
+ """Determine which backend handles this key and strip prefix.
62
+
63
+ Args:
64
+ key: Original file path
65
+
66
+ Returns:
67
+ Tuple of (backend, stripped_key) where stripped_key has the route
68
+ prefix removed (but keeps leading slash).
69
+ """
70
+ # Check routes in order of length (longest first)
71
+ for prefix, backend in self.sorted_routes:
72
+ if key.startswith(prefix):
73
+ # Strip prefix but keep leading slash
74
+ # e.g., "/memories/notes.txt" → "/notes.txt"
75
+ stripped_key = key[len(prefix) - 1:] if key[len(prefix) - 1:] else "/"
76
+ return backend, stripped_key
77
+
78
+ return self.default, key
79
+
80
+ def ls(self, path: str) -> list[str]:
81
+ """List files from backends, with appropriate prefixes.
82
+
83
+ Args:
84
+ path: Absolute path to directory.
85
+
86
+ Returns:
87
+ List of file paths with route prefixes added.
88
+ """
89
+ # Check if path matches a specific route
90
+ for route_prefix, backend in self.sorted_routes:
91
+ if path.startswith(route_prefix.rstrip("/")):
92
+ # Query only the matching routed backend
93
+ search_path = path[len(route_prefix) - 1:]
94
+ keys = backend.ls(search_path if search_path else "/")
95
+ return [f"{route_prefix[:-1]}{key}" for key in keys]
96
+
97
+ # Path doesn't match a route: query only default backend
98
+ return self.default.ls(path)
99
+
100
+ def read(
101
+ self,
102
+ file_path: str,
103
+ offset: int = 0,
104
+ limit: int = 2000,
105
+ ) -> str:
106
+ """Read file content, routing to appropriate backend.
107
+
108
+ Args:
109
+ file_path: Absolute file path
110
+ offset: Line offset to start reading from (0-indexed)
111
+ limit: Maximum number of lines to readReturns:
112
+ Formatted file content with line numbers, or error message.
113
+ """
114
+ backend, stripped_key = self._get_backend_and_key(file_path)
115
+ return backend.read(stripped_key, offset=offset, limit=limit)
116
+
117
+ def write(
118
+ self,
119
+ file_path: str,
120
+ content: str,
121
+ ) -> Command | str:
122
+ """Create a new file, routing to appropriate backend.
123
+
124
+ Args:
125
+ file_path: Absolute file path
126
+ content: File content as a stringReturns:
127
+ Success message or Command object, or error if file already exists.
128
+ """
129
+ backend, stripped_key = self._get_backend_and_key(file_path)
130
+ return backend.write(stripped_key, content)
131
+
132
+ def edit(
133
+ self,
134
+ file_path: str,
135
+ old_string: str,
136
+ new_string: str,
137
+ replace_all: bool = False,
138
+ ) -> Command | str:
139
+ """Edit a file, routing to appropriate backend.
140
+
141
+ Args:
142
+ file_path: Absolute file path
143
+ old_string: String to find and replace
144
+ new_string: Replacement string
145
+ replace_all: If True, replace all occurrencesReturns:
146
+ Success message or Command object, or error message on failure.
147
+ """
148
+ backend, stripped_key = self._get_backend_and_key(file_path)
149
+ return backend.edit(stripped_key, old_string, new_string, replace_all=replace_all)
150
+
151
+ def delete(self, file_path: str) -> Command | None:
152
+ """Delete file, routing to appropriate backend.
153
+
154
+ Args:
155
+ file_path: File path to deleteReturns:
156
+ Return value from backend (None or Command).
157
+ """
158
+ backend, stripped_key = self._get_backend_and_key(file_path)
159
+ return backend.delete(stripped_key)
160
+
161
+ def grep(
162
+ self,
163
+ pattern: str,
164
+ path: str = "/",
165
+ glob: Optional[str] = None,
166
+ output_mode: str = "files_with_matches",
167
+ ) -> str:
168
+ """Search for a pattern in files, routing to appropriate backend(s).
169
+
170
+ Args:
171
+ pattern: String pattern to search for
172
+ path: Path to search in (default "/")
173
+ glob: Optional glob pattern to filter files (e.g., "*.py")
174
+ output_mode: Output format - "files_with_matches", "content", or "count"Returns:
175
+ Formatted search results based on output_mode.
176
+ """
177
+ for route_prefix, backend in self.sorted_routes:
178
+ if path.startswith(route_prefix.rstrip("/")):
179
+ search_path = path[len(route_prefix) - 1:]
180
+ result = backend.grep(pattern, search_path if search_path else "/", glob, output_mode)
181
+ if result.startswith("No matches found"):
182
+ return result
183
+
184
+ lines = result.split("\n")
185
+ prefixed_lines = []
186
+ for line in lines:
187
+ if output_mode == "files_with_matches" or line.endswith(":") or ": " in line.split(":", 1)[0]:
188
+ if line and not line.startswith(" "):
189
+ prefixed_lines.append(f"{route_prefix[:-1]}{line}")
190
+ else:
191
+ prefixed_lines.append(line)
192
+ else:
193
+ prefixed_lines.append(line)
194
+ return "\n".join(prefixed_lines)
195
+
196
+ all_results = []
197
+
198
+ default_result = self.default.grep(pattern, path, glob, output_mode)
199
+ if not default_result.startswith("No matches found"):
200
+ all_results.append(default_result)
201
+
202
+ for route_prefix, backend in self.routes.items():
203
+ result = backend.grep(pattern, "/", glob, output_mode)
204
+ if not result.startswith("No matches found"):
205
+ lines = result.split("\n")
206
+ prefixed_lines = []
207
+ for line in lines:
208
+ if output_mode == "files_with_matches" or line.endswith(":") or (": " in line and not line.startswith(" ")):
209
+ if line and not line.startswith(" "):
210
+ prefixed_lines.append(f"{route_prefix[:-1]}{line}")
211
+ else:
212
+ prefixed_lines.append(line)
213
+ else:
214
+ prefixed_lines.append(line)
215
+ all_results.append("\n".join(prefixed_lines))
216
+
217
+ if not all_results:
218
+ return f"No matches found for pattern: '{pattern}'"
219
+
220
+ return "\n".join(all_results)
221
+
222
+ def glob(self, pattern: str, path: str = "/") -> list[str]:
223
+ """Find files matching a glob pattern across all backends.
224
+
225
+ Args:
226
+ pattern: Glob pattern (e.g., "**/*.py", "*.txt", "/subdir/**/*.md")
227
+ path: Base path to search from (default "/")Returns:
228
+ List of absolute file paths matching the pattern.
229
+ """
230
+ results = []
231
+
232
+ # Route based on path, not pattern
233
+ for route_prefix, backend in self.sorted_routes:
234
+ if path.startswith(route_prefix.rstrip("/")):
235
+ # Path matches a specific route - search only that backend
236
+ search_path = path[len(route_prefix) - 1:]
237
+ matches = backend.glob(pattern, search_path if search_path else "/")
238
+ results.extend(f"{route_prefix[:-1]}{match}" for match in matches)
239
+ return sorted(results)
240
+
241
+ # Path doesn't match any specific route - search default backend AND all routed backends
242
+ default_matches = self.default.glob(pattern, path)
243
+ results.extend(default_matches)
244
+
245
+ # Also search in all routed backends and prefix results
246
+ for route_prefix, backend in self.routes.items():
247
+ matches = backend.glob(pattern, "/")
248
+ results.extend(f"{route_prefix[:-1]}{match}" for match in matches)
249
+
250
+ return sorted(results)