deepagents 0.1.3__tar.gz → 0.1.5rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. {deepagents-0.1.3 → deepagents-0.1.5rc1}/PKG-INFO +31 -11
  2. deepagents-0.1.3/src/deepagents.egg-info/PKG-INFO → deepagents-0.1.5rc1/README.md +29 -29
  3. {deepagents-0.1.3 → deepagents-0.1.5rc1}/pyproject.toml +3 -2
  4. deepagents-0.1.5rc1/src/deepagents/backends/__init__.py +16 -0
  5. deepagents-0.1.5rc1/src/deepagents/backends/composite.py +235 -0
  6. deepagents-0.1.5rc1/src/deepagents/backends/filesystem.py +452 -0
  7. deepagents-0.1.5rc1/src/deepagents/backends/protocol.py +122 -0
  8. deepagents-0.1.5rc1/src/deepagents/backends/state.py +161 -0
  9. deepagents-0.1.5rc1/src/deepagents/backends/store.py +350 -0
  10. deepagents-0.1.5rc1/src/deepagents/backends/utils.py +424 -0
  11. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents/graph.py +10 -13
  12. deepagents-0.1.5rc1/src/deepagents/middleware/filesystem.py +666 -0
  13. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents/middleware/subagents.py +1 -1
  14. deepagents-0.1.3/README.md → deepagents-0.1.5rc1/src/deepagents.egg-info/PKG-INFO +50 -11
  15. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents.egg-info/SOURCES.txt +7 -0
  16. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents.egg-info/requires.txt +1 -0
  17. deepagents-0.1.5rc1/tests/test_middleware.py +870 -0
  18. deepagents-0.1.3/src/deepagents/middleware/filesystem.py +0 -1125
  19. deepagents-0.1.3/tests/test_middleware.py +0 -304
  20. {deepagents-0.1.3 → deepagents-0.1.5rc1}/LICENSE +0 -0
  21. {deepagents-0.1.3 → deepagents-0.1.5rc1}/setup.cfg +0 -0
  22. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents/__init__.py +0 -0
  23. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents/middleware/__init__.py +0 -0
  24. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents/middleware/patch_tool_calls.py +0 -0
  25. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents.egg-info/dependency_links.txt +0 -0
  26. {deepagents-0.1.3 → deepagents-0.1.5rc1}/src/deepagents.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: deepagents
3
- Version: 0.1.3
3
+ Version: 0.1.5rc1
4
4
  Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
5
5
  License: MIT
6
6
  Requires-Python: <4.0,>=3.11
@@ -15,6 +15,7 @@ Requires-Dist: pytest-cov; extra == "dev"
15
15
  Requires-Dist: build; extra == "dev"
16
16
  Requires-Dist: twine; extra == "dev"
17
17
  Requires-Dist: langchain-openai; extra == "dev"
18
+ Requires-Dist: wcmatch; extra == "dev"
18
19
  Dynamic: license-file
19
20
 
20
21
  # 🧠🤖Deep Agents
@@ -128,9 +129,7 @@ By default, `deepagents` uses `"claude-sonnet-4-5-20250929"`. You can customize
128
129
  from langchain.chat_models import init_chat_model
129
130
  from deepagents import create_deep_agent
130
131
 
131
- model = init_chat_model(
132
- model="openai:gpt-5",
133
- )
132
+ model = init_chat_model("openai:gpt-4o")
134
133
  agent = create_deep_agent(
135
134
  model=model,
136
135
  )
@@ -315,19 +314,30 @@ agent = create_deep_agent(
315
314
  )
316
315
  ```
317
316
 
318
- ### `use_longterm_memory`
319
- Deep agents come with a local filesystem to offload memory to. This filesystem is stored in state, and is therefore transient to a single thread.
317
+ ### `backend`
318
+ Deep agents come with a local filesystem to offload memory to. By default, this filesystem is stored in state (ephemeral, transient to a single thread).
320
319
 
321
- You can extend deep agents with long-term memory by providing a Store and setting use_longterm_memory=True.
320
+ You can configure persistent long-term memory using a composite backend that routes a path prefix (for example, `/memories/`) to a persistent store.
322
321
 
323
322
  ```python
324
323
  from deepagents import create_deep_agent
324
+ from deepagents.backends import build_composite_state_backend, StoreBackend
325
325
  from langgraph.store.memory import InMemoryStore
326
326
 
327
- store = InMemoryStore() # Or any other Store object
327
+ store = InMemoryStore() # Or any other Store implementation
328
+
329
+ # Provide a backend factory to the agent/middleware.
330
+ # This builds a state-backed composite at runtime and routes /memories/ to StoreBackend.
331
+ backend_factory = lambda rt: build_composite_state_backend(
332
+ rt,
333
+ routes={
334
+ "/memories/": (lambda r: StoreBackend(r)),
335
+ },
336
+ )
337
+
328
338
  agent = create_deep_agent(
339
+ backend=backend_factory,
329
340
  store=store,
330
- use_longterm_memory=True
331
341
  )
332
342
  ```
333
343
 
@@ -403,6 +413,11 @@ Context engineering is one of the main challenges in building effective agents.
403
413
  ```python
404
414
  from langchain.agents import create_agent
405
415
  from deepagents.middleware.filesystem import FilesystemMiddleware
416
+ from deepagents.backends import (
417
+ StateBackend,
418
+ CompositeBackend,
419
+ StoreBackend,
420
+ )
406
421
 
407
422
  # FilesystemMiddleware is included by default in create_deep_agent
408
423
  # You can customize it if building a custom agent
@@ -410,8 +425,13 @@ agent = create_agent(
410
425
  model="anthropic:claude-sonnet-4-20250514",
411
426
  middleware=[
412
427
  FilesystemMiddleware(
413
- long_term_memory=False, # Enables access to long-term memory, defaults to False. You must attach a store to use long-term memory.
414
- system_prompt="Write to the filesystem when...", # Optional custom addition to the system prompt
428
+ backend=lambda rt: StateBackend(rt), # Optional: customize storage backend (defaults to lambda rt: )
429
+ # For persistent memory, use CompositeBackend:
430
+ # backend=CompositeBackend(
431
+ # default=lambda rt: StateBackend(rt)
432
+ # routes={"/memories/": lambda rt: StoreBackend(rt)}
433
+ # )
434
+ system_prompt="Write to the filesystem when...", # Optional custom system prompt override
415
435
  custom_tool_descriptions={
416
436
  "ls": "Use the ls tool when...",
417
437
  "read_file": "Use the read_file tool to..."
@@ -1,22 +1,3 @@
1
- Metadata-Version: 2.4
2
- Name: deepagents
3
- Version: 0.1.3
4
- Summary: General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph.
5
- License: MIT
6
- Requires-Python: <4.0,>=3.11
7
- Description-Content-Type: text/markdown
8
- License-File: LICENSE
9
- Requires-Dist: langchain-anthropic<2.0.0,>=1.0.0
10
- Requires-Dist: langchain<2.0.0,>=1.0.0
11
- Requires-Dist: langchain-core<2.0.0,>=1.0.0
12
- Provides-Extra: dev
13
- Requires-Dist: pytest; extra == "dev"
14
- Requires-Dist: pytest-cov; extra == "dev"
15
- Requires-Dist: build; extra == "dev"
16
- Requires-Dist: twine; extra == "dev"
17
- Requires-Dist: langchain-openai; extra == "dev"
18
- Dynamic: license-file
19
-
20
1
  # 🧠🤖Deep Agents
21
2
 
22
3
  Using an LLM to call tools in a loop is the simplest form of an agent.
@@ -128,9 +109,7 @@ By default, `deepagents` uses `"claude-sonnet-4-5-20250929"`. You can customize
128
109
  from langchain.chat_models import init_chat_model
129
110
  from deepagents import create_deep_agent
130
111
 
131
- model = init_chat_model(
132
- model="openai:gpt-5",
133
- )
112
+ model = init_chat_model("openai:gpt-4o")
134
113
  agent = create_deep_agent(
135
114
  model=model,
136
115
  )
@@ -315,19 +294,30 @@ agent = create_deep_agent(
315
294
  )
316
295
  ```
317
296
 
318
- ### `use_longterm_memory`
319
- Deep agents come with a local filesystem to offload memory to. This filesystem is stored in state, and is therefore transient to a single thread.
297
+ ### `backend`
298
+ Deep agents come with a local filesystem to offload memory to. By default, this filesystem is stored in state (ephemeral, transient to a single thread).
320
299
 
321
- You can extend deep agents with long-term memory by providing a Store and setting use_longterm_memory=True.
300
+ You can configure persistent long-term memory using a composite backend that routes a path prefix (for example, `/memories/`) to a persistent store.
322
301
 
323
302
  ```python
324
303
  from deepagents import create_deep_agent
304
+ from deepagents.backends import build_composite_state_backend, StoreBackend
325
305
  from langgraph.store.memory import InMemoryStore
326
306
 
327
- store = InMemoryStore() # Or any other Store object
307
+ store = InMemoryStore() # Or any other Store implementation
308
+
309
+ # Provide a backend factory to the agent/middleware.
310
+ # This builds a state-backed composite at runtime and routes /memories/ to StoreBackend.
311
+ backend_factory = lambda rt: build_composite_state_backend(
312
+ rt,
313
+ routes={
314
+ "/memories/": (lambda r: StoreBackend(r)),
315
+ },
316
+ )
317
+
328
318
  agent = create_deep_agent(
319
+ backend=backend_factory,
329
320
  store=store,
330
- use_longterm_memory=True
331
321
  )
332
322
  ```
333
323
 
@@ -403,6 +393,11 @@ Context engineering is one of the main challenges in building effective agents.
403
393
  ```python
404
394
  from langchain.agents import create_agent
405
395
  from deepagents.middleware.filesystem import FilesystemMiddleware
396
+ from deepagents.backends import (
397
+ StateBackend,
398
+ CompositeBackend,
399
+ StoreBackend,
400
+ )
406
401
 
407
402
  # FilesystemMiddleware is included by default in create_deep_agent
408
403
  # You can customize it if building a custom agent
@@ -410,8 +405,13 @@ agent = create_agent(
410
405
  model="anthropic:claude-sonnet-4-20250514",
411
406
  middleware=[
412
407
  FilesystemMiddleware(
413
- long_term_memory=False, # Enables access to long-term memory, defaults to False. You must attach a store to use long-term memory.
414
- system_prompt="Write to the filesystem when...", # Optional custom addition to the system prompt
408
+ backend=lambda rt: StateBackend(rt), # Optional: customize storage backend (defaults to lambda rt: )
409
+ # For persistent memory, use CompositeBackend:
410
+ # backend=CompositeBackend(
411
+ # default=lambda rt: StateBackend(rt)
412
+ # routes={"/memories/": lambda rt: StoreBackend(rt)}
413
+ # )
414
+ system_prompt="Write to the filesystem when...", # Optional custom system prompt override
415
415
  custom_tool_descriptions={
416
416
  "ls": "Use the ls tool when...",
417
417
  "read_file": "Use the read_file tool to..."
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "deepagents"
3
- version = "0.1.3"
3
+ version = "0.1.5rc1"
4
4
  description = "General purpose 'deep agent' with sub-agent spawning, todo list capabilities, and mock file system. Built on LangGraph."
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -17,7 +17,8 @@ dev = [
17
17
  "pytest-cov",
18
18
  "build",
19
19
  "twine",
20
- "langchain-openai"
20
+ "langchain-openai",
21
+ "wcmatch"
21
22
  ]
22
23
 
23
24
  [dependency-groups]
@@ -0,0 +1,16 @@
1
+ """Memory backends for pluggable file storage."""
2
+
3
+ from deepagents.backends.composite import CompositeBackend, build_composite_state_backend
4
+ from deepagents.backends.filesystem import FilesystemBackend
5
+ from deepagents.backends.state import StateBackend
6
+ from deepagents.backends.store import StoreBackend
7
+ from deepagents.backends.protocol import BackendProtocol
8
+
9
+ __all__ = [
10
+ "BackendProtocol",
11
+ "CompositeBackend",
12
+ "build_composite_state_backend",
13
+ "FilesystemBackend",
14
+ "StateBackend",
15
+ "StoreBackend",
16
+ ]
@@ -0,0 +1,235 @@
1
+ """CompositeBackend: Route operations to different backends based on path prefix."""
2
+
3
+ from typing import Any, Literal, Optional, TYPE_CHECKING
4
+
5
+ from langchain.tools import ToolRuntime
6
+
7
+ from deepagents.backends.protocol import BackendProtocol, BackendFactory, WriteResult, EditResult
8
+ from deepagents.backends.state import StateBackend
9
+ from deepagents.backends.utils import FileInfo, GrepMatch
10
+ from deepagents.backends.protocol import BackendFactory
11
+
12
+
13
+ class CompositeBackend:
14
+
15
+ def __init__(
16
+ self,
17
+ default: BackendProtocol | StateBackend,
18
+ routes: dict[str, BackendProtocol],
19
+ ) -> None:
20
+ # Default backend
21
+ self.default = default
22
+
23
+ # Virtual routes
24
+ self.routes = routes
25
+
26
+ # Sort routes by length (longest first) for correct prefix matching
27
+ self.sorted_routes = sorted(routes.items(), key=lambda x: len(x[0]), reverse=True)
28
+
29
+ def _get_backend_and_key(self, key: str) -> tuple[BackendProtocol, str]:
30
+ """Determine which backend handles this key and strip prefix.
31
+
32
+ Args:
33
+ key: Original file path
34
+
35
+ Returns:
36
+ Tuple of (backend, stripped_key) where stripped_key has the route
37
+ prefix removed (but keeps leading slash).
38
+ """
39
+ # Check routes in order of length (longest first)
40
+ for prefix, backend in self.sorted_routes:
41
+ if key.startswith(prefix):
42
+ # Strip full prefix and ensure a leading slash remains
43
+ # e.g., "/memories/notes.txt" → "/notes.txt"; "/memories/" → "/"
44
+ suffix = key[len(prefix):]
45
+ stripped_key = f"/{suffix}" if suffix else "/"
46
+ return backend, stripped_key
47
+
48
+ return self.default, key
49
+
50
+ def ls_info(self, path: str) -> list[FileInfo]:
51
+ """List files from backends, with appropriate prefixes.
52
+
53
+ Args:
54
+ path: Absolute path to directory.
55
+
56
+ Returns:
57
+ List of FileInfo-like dicts with route prefixes added.
58
+ """
59
+ # Check if path matches a specific route
60
+ for route_prefix, backend in self.sorted_routes:
61
+ if path.startswith(route_prefix.rstrip("/")):
62
+ # Query only the matching routed backend
63
+ suffix = path[len(route_prefix):]
64
+ search_path = f"/{suffix}" if suffix else "/"
65
+ infos = backend.ls_info(search_path)
66
+ prefixed: list[FileInfo] = []
67
+ for fi in infos:
68
+ fi = dict(fi)
69
+ fi["path"] = f"{route_prefix[:-1]}{fi['path']}"
70
+ prefixed.append(fi)
71
+ return prefixed
72
+
73
+ # At root, aggregate default and all routed backends
74
+ if path == "/":
75
+ results: list[FileInfo] = []
76
+ results.extend(self.default.ls_info(path))
77
+ for route_prefix, backend in self.sorted_routes:
78
+ infos = backend.ls_info("/")
79
+ for fi in infos:
80
+ fi = dict(fi)
81
+ fi["path"] = f"{route_prefix[:-1]}{fi['path']}"
82
+ results.append(fi)
83
+ results.sort(key=lambda x: x.get("path", ""))
84
+ return results
85
+
86
+ # Path doesn't match a route: query only default backend
87
+ return self.default.ls_info(path)
88
+
89
+
90
+ def read(
91
+ self,
92
+ file_path: str,
93
+ offset: int = 0,
94
+ limit: int = 2000,
95
+ ) -> str:
96
+ """Read file content, routing to appropriate backend.
97
+
98
+ Args:
99
+ file_path: Absolute file path
100
+ offset: Line offset to start reading from (0-indexed)
101
+ limit: Maximum number of lines to readReturns:
102
+ Formatted file content with line numbers, or error message.
103
+ """
104
+ backend, stripped_key = self._get_backend_and_key(file_path)
105
+ return backend.read(stripped_key, offset=offset, limit=limit)
106
+
107
+
108
+ def grep_raw(
109
+ self,
110
+ pattern: str,
111
+ path: Optional[str] = None,
112
+ glob: Optional[str] = None,
113
+ ) -> list[GrepMatch] | str:
114
+ # If path targets a specific route, search only that backend
115
+ for route_prefix, backend in self.sorted_routes:
116
+ if path is not None and path.startswith(route_prefix.rstrip("/")):
117
+ search_path = path[len(route_prefix) - 1:]
118
+ raw = backend.grep_raw(pattern, search_path if search_path else "/", glob)
119
+ if isinstance(raw, str):
120
+ return raw
121
+ return [{**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw]
122
+
123
+ # Otherwise, search default and all routed backends and merge
124
+ all_matches: list[GrepMatch] = []
125
+ raw_default = self.default.grep_raw(pattern, path, glob) # type: ignore[attr-defined]
126
+ if isinstance(raw_default, str):
127
+ # This happens if error occurs
128
+ return raw_default
129
+ all_matches.extend(raw_default)
130
+
131
+ for route_prefix, backend in self.routes.items():
132
+ raw = backend.grep_raw(pattern, "/", glob)
133
+ if isinstance(raw, str):
134
+ # This happens if error occurs
135
+ return raw
136
+ all_matches.extend({**m, "path": f"{route_prefix[:-1]}{m['path']}"} for m in raw)
137
+
138
+ return all_matches
139
+
140
+ def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
141
+ results: list[FileInfo] = []
142
+
143
+ # Route based on path, not pattern
144
+ for route_prefix, backend in self.sorted_routes:
145
+ if path.startswith(route_prefix.rstrip("/")):
146
+ search_path = path[len(route_prefix) - 1:]
147
+ infos = backend.glob_info(pattern, search_path if search_path else "/")
148
+ return [
149
+ {**fi, "path": f"{route_prefix[:-1]}{fi['path']}"}
150
+ for fi in infos
151
+ ]
152
+
153
+ # Path doesn't match any specific route - search default backend AND all routed backends
154
+ results.extend(self.default.glob_info(pattern, path))
155
+
156
+ for route_prefix, backend in self.routes.items():
157
+ infos = backend.glob_info(pattern, "/")
158
+ results.extend({**fi, "path": f"{route_prefix[:-1]}{fi['path']}"} for fi in infos)
159
+
160
+ # Deterministic ordering
161
+ results.sort(key=lambda x: x.get("path", ""))
162
+ return results
163
+
164
+
165
+ def write(
166
+ self,
167
+ file_path: str,
168
+ content: str,
169
+ ) -> WriteResult:
170
+ """Create a new file, routing to appropriate backend.
171
+
172
+ Args:
173
+ file_path: Absolute file path
174
+ content: File content as a stringReturns:
175
+ Success message or Command object, or error if file already exists.
176
+ """
177
+ backend, stripped_key = self._get_backend_and_key(file_path)
178
+ res = backend.write(stripped_key, content)
179
+ # If this is a state-backed update and default has state, merge so listings reflect changes
180
+ if res.files_update:
181
+ try:
182
+ runtime = getattr(self.default, "runtime", None)
183
+ if runtime is not None:
184
+ state = runtime.state
185
+ files = state.get("files", {})
186
+ files.update(res.files_update)
187
+ state["files"] = files
188
+ except Exception:
189
+ pass
190
+ return res
191
+
192
+ def edit(
193
+ self,
194
+ file_path: str,
195
+ old_string: str,
196
+ new_string: str,
197
+ replace_all: bool = False,
198
+ ) -> EditResult:
199
+ """Edit a file, routing to appropriate backend.
200
+
201
+ Args:
202
+ file_path: Absolute file path
203
+ old_string: String to find and replace
204
+ new_string: Replacement string
205
+ replace_all: If True, replace all occurrencesReturns:
206
+ Success message or Command object, or error message on failure.
207
+ """
208
+ backend, stripped_key = self._get_backend_and_key(file_path)
209
+ res = backend.edit(stripped_key, old_string, new_string, replace_all=replace_all)
210
+ if res.files_update:
211
+ try:
212
+ runtime = getattr(self.default, "runtime", None)
213
+ if runtime is not None:
214
+ state = runtime.state
215
+ files = state.get("files", {})
216
+ files.update(res.files_update)
217
+ state["files"] = files
218
+ except Exception:
219
+ pass
220
+ return res
221
+
222
+
223
+ def build_composite_state_backend(
224
+ runtime: ToolRuntime,
225
+ *,
226
+ routes: dict[str, BackendProtocol | BackendFactory],
227
+ ) -> BackendProtocol:
228
+ built_routes: dict[str, BackendProtocol] = {}
229
+ for k, v in routes.items():
230
+ if isinstance(v, BackendProtocol):
231
+ built_routes[k] = v
232
+ else:
233
+ built_routes[k] = v(runtime)
234
+ default_state = StateBackend(runtime)
235
+ return CompositeBackend(default=default_state, routes=built_routes)