deepagents 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. deepagents/backends/__init__.py +1 -1
  2. deepagents/backends/composite.py +32 -42
  3. deepagents/backends/filesystem.py +92 -86
  4. deepagents/backends/protocol.py +39 -13
  5. deepagents/backends/state.py +59 -58
  6. deepagents/backends/store.py +74 -67
  7. deepagents/backends/utils.py +7 -21
  8. deepagents/graph.py +1 -1
  9. deepagents/middleware/filesystem.py +49 -47
  10. deepagents/middleware/resumable_shell.py +5 -4
  11. deepagents/middleware/subagents.py +1 -2
  12. {deepagents-0.2.3.dist-info → deepagents-0.2.5.dist-info}/METADATA +7 -7
  13. deepagents-0.2.5.dist-info/RECORD +38 -0
  14. deepagents-0.2.5.dist-info/top_level.txt +2 -0
  15. deepagents-cli/README.md +3 -0
  16. deepagents-cli/deepagents_cli/README.md +196 -0
  17. deepagents-cli/deepagents_cli/__init__.py +5 -0
  18. deepagents-cli/deepagents_cli/__main__.py +6 -0
  19. deepagents-cli/deepagents_cli/agent.py +278 -0
  20. {deepagents/middleware → deepagents-cli/deepagents_cli}/agent_memory.py +16 -12
  21. deepagents-cli/deepagents_cli/commands.py +89 -0
  22. deepagents-cli/deepagents_cli/config.py +118 -0
  23. deepagents-cli/deepagents_cli/execution.py +636 -0
  24. deepagents-cli/deepagents_cli/file_ops.py +347 -0
  25. deepagents-cli/deepagents_cli/input.py +270 -0
  26. deepagents-cli/deepagents_cli/main.py +226 -0
  27. deepagents-cli/deepagents_cli/py.typed +0 -0
  28. deepagents-cli/deepagents_cli/token_utils.py +63 -0
  29. deepagents-cli/deepagents_cli/tools.py +140 -0
  30. deepagents-cli/deepagents_cli/ui.py +489 -0
  31. deepagents-cli/tests/test_file_ops.py +119 -0
  32. deepagents-cli/tests/test_placeholder.py +5 -0
  33. deepagents-0.2.3.dist-info/RECORD +0 -21
  34. deepagents-0.2.3.dist-info/top_level.txt +0 -1
  35. {deepagents-0.2.3.dist-info → deepagents-0.2.5.dist-info}/WHEEL +0 -0
  36. {deepagents-0.2.3.dist-info → deepagents-0.2.5.dist-info}/licenses/LICENSE +0 -0
  37. {deepagents → deepagents-cli/deepagents_cli}/default_agent_prompt.md +0 -0
@@ -1,44 +1,38 @@
1
1
  """StateBackend: Store files in LangGraph agent state (ephemeral)."""
2
2
 
3
- import re
4
- from typing import Any, Literal, Optional, TYPE_CHECKING
3
+ from typing import TYPE_CHECKING
5
4
 
6
- from langchain.tools import ToolRuntime
7
-
8
- from langchain_core.messages import ToolMessage
9
- from langgraph.types import Command
10
-
11
- from .utils import (
5
+ from deepagents.backends.protocol import BackendProtocol, EditResult, FileInfo, GrepMatch, WriteResult
6
+ from deepagents.backends.utils import (
7
+ _glob_search_files,
12
8
  create_file_data,
13
- update_file_data,
14
9
  file_data_to_string,
15
10
  format_read_response,
16
- perform_string_replacement,
17
- _glob_search_files,
18
11
  grep_matches_from_files,
12
+ perform_string_replacement,
13
+ update_file_data,
19
14
  )
20
- from deepagents.backends.utils import FileInfo, GrepMatch
21
- from deepagents.backends.protocol import WriteResult, EditResult
22
15
 
16
+ if TYPE_CHECKING:
17
+ from langchain.tools import ToolRuntime
23
18
 
24
- class StateBackend:
19
+
20
+ class StateBackend(BackendProtocol):
25
21
  """Backend that stores files in agent state (ephemeral).
26
-
22
+
27
23
  Uses LangGraph's state management and checkpointing. Files persist within
28
24
  a conversation thread but not across threads. State is automatically
29
25
  checkpointed after each agent step.
30
-
26
+
31
27
  Special handling: Since LangGraph state must be updated via Command objects
32
28
  (not direct mutation), operations return Command objects instead of None.
33
29
  This is indicated by the uses_state=True flag.
34
30
  """
35
-
31
+
36
32
  def __init__(self, runtime: "ToolRuntime"):
37
- """Initialize StateBackend with runtime.
38
-
39
- Args:"""
33
+ """Initialize StateBackend with runtime."""
40
34
  self.runtime = runtime
41
-
35
+
42
36
  def ls_info(self, path: str) -> list[FileInfo]:
43
37
  """List files and directories in the specified directory (non-recursive).
44
38
 
@@ -62,7 +56,7 @@ class StateBackend:
62
56
  continue
63
57
 
64
58
  # Get the relative path after the directory
65
- relative = k[len(normalized_path):]
59
+ relative = k[len(normalized_path) :]
66
60
 
67
61
  # If relative path contains '/', it's in a subdirectory
68
62
  if "/" in relative:
@@ -73,35 +67,39 @@ class StateBackend:
73
67
 
74
68
  # This is a file directly in the current directory
75
69
  size = len("\n".join(fd.get("content", [])))
76
- infos.append({
77
- "path": k,
78
- "is_dir": False,
79
- "size": int(size),
80
- "modified_at": fd.get("modified_at", ""),
81
- })
70
+ infos.append(
71
+ {
72
+ "path": k,
73
+ "is_dir": False,
74
+ "size": int(size),
75
+ "modified_at": fd.get("modified_at", ""),
76
+ }
77
+ )
82
78
 
83
79
  # Add directories to the results
84
80
  for subdir in sorted(subdirs):
85
- infos.append({
86
- "path": subdir,
87
- "is_dir": True,
88
- "size": 0,
89
- "modified_at": "",
90
- })
81
+ infos.append(
82
+ {
83
+ "path": subdir,
84
+ "is_dir": True,
85
+ "size": 0,
86
+ "modified_at": "",
87
+ }
88
+ )
91
89
 
92
90
  infos.sort(key=lambda x: x.get("path", ""))
93
91
  return infos
94
92
 
95
93
  # Removed legacy ls() convenience to keep lean surface
96
-
94
+
97
95
  def read(
98
- self,
96
+ self,
99
97
  file_path: str,
100
98
  offset: int = 0,
101
99
  limit: int = 2000,
102
100
  ) -> str:
103
101
  """Read file content with line numbers.
104
-
102
+
105
103
  Args:
106
104
  file_path: Absolute file path
107
105
  offset: Line offset to start reading from (0-indexed)
@@ -110,14 +108,14 @@ class StateBackend:
110
108
  """
111
109
  files = self.runtime.state.get("files", {})
112
110
  file_data = files.get(file_path)
113
-
111
+
114
112
  if file_data is None:
115
113
  return f"Error: File '{file_path}' not found"
116
-
114
+
117
115
  return format_read_response(file_data, offset, limit)
118
-
116
+
119
117
  def write(
120
- self,
118
+ self,
121
119
  file_path: str,
122
120
  content: str,
123
121
  ) -> WriteResult:
@@ -125,15 +123,15 @@ class StateBackend:
125
123
  Returns WriteResult with files_update to update LangGraph state.
126
124
  """
127
125
  files = self.runtime.state.get("files", {})
128
-
126
+
129
127
  if file_path in files:
130
128
  return WriteResult(error=f"Cannot write to {file_path} because it already exists. Read and then make an edit, or write to a new path.")
131
-
129
+
132
130
  new_file_data = create_file_data(content)
133
131
  return WriteResult(path=file_path, files_update={file_path: new_file_data})
134
-
132
+
135
133
  def edit(
136
- self,
134
+ self,
137
135
  file_path: str,
138
136
  old_string: str,
139
137
  new_string: str,
@@ -144,31 +142,31 @@ class StateBackend:
144
142
  """
145
143
  files = self.runtime.state.get("files", {})
146
144
  file_data = files.get(file_path)
147
-
145
+
148
146
  if file_data is None:
149
147
  return EditResult(error=f"Error: File '{file_path}' not found")
150
-
148
+
151
149
  content = file_data_to_string(file_data)
152
150
  result = perform_string_replacement(content, old_string, new_string, replace_all)
153
-
151
+
154
152
  if isinstance(result, str):
155
153
  return EditResult(error=result)
156
-
154
+
157
155
  new_content, occurrences = result
158
156
  new_file_data = update_file_data(file_data, new_content)
159
157
  return EditResult(path=file_path, files_update={file_path: new_file_data}, occurrences=int(occurrences))
160
-
158
+
161
159
  # Removed legacy grep() convenience to keep lean surface
162
160
 
163
161
  def grep_raw(
164
162
  self,
165
163
  pattern: str,
166
164
  path: str = "/",
167
- glob: Optional[str] = None,
165
+ glob: str | None = None,
168
166
  ) -> list[GrepMatch] | str:
169
167
  files = self.runtime.state.get("files", {})
170
168
  return grep_matches_from_files(files, pattern, path, glob)
171
-
169
+
172
170
  def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
173
171
  files = self.runtime.state.get("files", {})
174
172
  result = _glob_search_files(files, pattern, path)
@@ -179,12 +177,15 @@ class StateBackend:
179
177
  for p in paths:
180
178
  fd = files.get(p)
181
179
  size = len("\n".join(fd.get("content", []))) if fd else 0
182
- infos.append({
183
- "path": p,
184
- "is_dir": False,
185
- "size": int(size),
186
- "modified_at": fd.get("modified_at", "") if fd else "",
187
- })
180
+ infos.append(
181
+ {
182
+ "path": p,
183
+ "is_dir": False,
184
+ "size": int(size),
185
+ "modified_at": fd.get("modified_at", "") if fd else "",
186
+ }
187
+ )
188
188
  return infos
189
189
 
190
+
190
191
  # Provider classes removed: prefer callables like `lambda rt: StateBackend(rt)`
@@ -1,48 +1,49 @@
1
1
  """StoreBackend: Adapter for LangGraph's BaseStore (persistent, cross-thread)."""
2
2
 
3
- import re
4
- from typing import Any, Optional, TYPE_CHECKING
3
+ from typing import TYPE_CHECKING, Any
5
4
 
6
5
  if TYPE_CHECKING:
7
6
  from langchain.tools import ToolRuntime
8
7
 
9
8
  from langgraph.config import get_config
10
9
  from langgraph.store.base import BaseStore, Item
11
- from deepagents.backends.protocol import WriteResult, EditResult
12
10
 
11
+ from deepagents.backends.protocol import EditResult, WriteResult
13
12
  from deepagents.backends.utils import (
13
+ FileInfo,
14
+ GrepMatch,
15
+ _glob_search_files,
14
16
  create_file_data,
15
- update_file_data,
16
17
  file_data_to_string,
17
18
  format_read_response,
18
- perform_string_replacement,
19
- _glob_search_files,
20
19
  grep_matches_from_files,
20
+ perform_string_replacement,
21
+ update_file_data,
21
22
  )
22
- from deepagents.backends.utils import FileInfo, GrepMatch
23
23
 
24
24
 
25
25
  class StoreBackend:
26
26
  """Backend that stores files in LangGraph's BaseStore (persistent).
27
-
27
+
28
28
  Uses LangGraph's Store for persistent, cross-conversation storage.
29
29
  Files are organized via namespaces and persist across all threads.
30
-
30
+
31
31
  The namespace can include an optional assistant_id for multi-agent isolation.
32
32
  """
33
+
33
34
  def __init__(self, runtime: "ToolRuntime"):
34
35
  """Initialize StoreBackend with runtime.
35
-
36
- Args:"""
37
- self.runtime = runtime
38
36
 
37
+ Args:
38
+ """
39
+ self.runtime = runtime
39
40
 
40
41
  def _get_store(self) -> BaseStore:
41
42
  """Get the store instance.
42
-
43
+
43
44
  Args:Returns:
44
45
  BaseStore instance
45
-
46
+
46
47
  Raises:
47
48
  ValueError: If no store is available or runtime not provided
48
49
  """
@@ -51,15 +52,15 @@ class StoreBackend:
51
52
  msg = "Store is required but not available in runtime"
52
53
  raise ValueError(msg)
53
54
  return store
54
-
55
+
55
56
  def _get_namespace(self) -> tuple[str, ...]:
56
57
  """Get the namespace for store operations.
57
-
58
+
58
59
  Preference order:
59
60
  1) Use `self.runtime.config` if present (tests pass this explicitly).
60
61
  2) Fallback to `langgraph.config.get_config()` if available.
61
62
  3) Default to ("filesystem",).
62
-
63
+
63
64
  If an assistant_id is available in the config metadata, return
64
65
  (assistant_id, "filesystem") to provide per-assistant isolation.
65
66
  """
@@ -88,16 +89,16 @@ class StoreBackend:
88
89
  if assistant_id:
89
90
  return (assistant_id, namespace)
90
91
  return (namespace,)
91
-
92
+
92
93
  def _convert_store_item_to_file_data(self, store_item: Item) -> dict[str, Any]:
93
94
  """Convert a store Item to FileData format.
94
-
95
+
95
96
  Args:
96
97
  store_item: The store Item containing file data.
97
-
98
+
98
99
  Returns:
99
100
  FileData dict with content, created_at, and modified_at fields.
100
-
101
+
101
102
  Raises:
102
103
  ValueError: If required fields are missing or have incorrect types.
103
104
  """
@@ -115,13 +116,13 @@ class StoreBackend:
115
116
  "created_at": store_item.value["created_at"],
116
117
  "modified_at": store_item.value["modified_at"],
117
118
  }
118
-
119
+
119
120
  def _convert_file_data_to_store_value(self, file_data: dict[str, Any]) -> dict[str, Any]:
120
121
  """Convert FileData to a dict suitable for store.put().
121
-
122
+
122
123
  Args:
123
124
  file_data: The FileData to convert.
124
-
125
+
125
126
  Returns:
126
127
  Dictionary with content, created_at, and modified_at fields.
127
128
  """
@@ -177,7 +178,7 @@ class StoreBackend:
177
178
  offset += page_size
178
179
 
179
180
  return all_items
180
-
181
+
181
182
  def ls_info(self, path: str) -> list[FileInfo]:
182
183
  """List files and directories in the specified directory (non-recursive).
183
184
 
@@ -206,7 +207,7 @@ class StoreBackend:
206
207
  continue
207
208
 
208
209
  # Get the relative path after the directory
209
- relative = str(item.key)[len(normalized_path):]
210
+ relative = str(item.key)[len(normalized_path) :]
210
211
 
211
212
  # If relative path contains '/', it's in a subdirectory
212
213
  if "/" in relative:
@@ -221,58 +222,62 @@ class StoreBackend:
221
222
  except ValueError:
222
223
  continue
223
224
  size = len("\n".join(fd.get("content", [])))
224
- infos.append({
225
- "path": item.key,
226
- "is_dir": False,
227
- "size": int(size),
228
- "modified_at": fd.get("modified_at", ""),
229
- })
225
+ infos.append(
226
+ {
227
+ "path": item.key,
228
+ "is_dir": False,
229
+ "size": int(size),
230
+ "modified_at": fd.get("modified_at", ""),
231
+ }
232
+ )
230
233
 
231
234
  # Add directories to the results
232
235
  for subdir in sorted(subdirs):
233
- infos.append({
234
- "path": subdir,
235
- "is_dir": True,
236
- "size": 0,
237
- "modified_at": "",
238
- })
236
+ infos.append(
237
+ {
238
+ "path": subdir,
239
+ "is_dir": True,
240
+ "size": 0,
241
+ "modified_at": "",
242
+ }
243
+ )
239
244
 
240
245
  infos.sort(key=lambda x: x.get("path", ""))
241
246
  return infos
242
247
 
243
248
  # Removed legacy ls() convenience to keep lean surface
244
-
249
+
245
250
  def read(
246
- self,
251
+ self,
247
252
  file_path: str,
248
253
  offset: int = 0,
249
254
  limit: int = 2000,
250
255
  ) -> str:
251
256
  """Read file content with line numbers.
252
-
257
+
253
258
  Args:
254
259
  file_path: Absolute file path
255
260
  offset: Line offset to start reading from (0-indexed)limit: Maximum number of lines to read
256
-
261
+
257
262
  Returns:
258
263
  Formatted file content with line numbers, or error message.
259
264
  """
260
265
  store = self._get_store()
261
266
  namespace = self._get_namespace()
262
- item: Optional[Item] = store.get(namespace, file_path)
263
-
267
+ item: Item | None = store.get(namespace, file_path)
268
+
264
269
  if item is None:
265
270
  return f"Error: File '{file_path}' not found"
266
-
271
+
267
272
  try:
268
273
  file_data = self._convert_store_item_to_file_data(item)
269
274
  except ValueError as e:
270
275
  return f"Error: {e}"
271
-
276
+
272
277
  return format_read_response(file_data, offset, limit)
273
-
278
+
274
279
  def write(
275
- self,
280
+ self,
276
281
  file_path: str,
277
282
  content: str,
278
283
  ) -> WriteResult:
@@ -281,20 +286,20 @@ class StoreBackend:
281
286
  """
282
287
  store = self._get_store()
283
288
  namespace = self._get_namespace()
284
-
289
+
285
290
  # Check if file exists
286
291
  existing = store.get(namespace, file_path)
287
292
  if existing is not None:
288
293
  return WriteResult(error=f"Cannot write to {file_path} because it already exists. Read and then make an edit, or write to a new path.")
289
-
294
+
290
295
  # Create new file
291
296
  file_data = create_file_data(content)
292
297
  store_value = self._convert_file_data_to_store_value(file_data)
293
298
  store.put(namespace, file_path, store_value)
294
299
  return WriteResult(path=file_path, files_update=None)
295
-
300
+
296
301
  def edit(
297
- self,
302
+ self,
298
303
  file_path: str,
299
304
  old_string: str,
300
305
  new_string: str,
@@ -305,38 +310,38 @@ class StoreBackend:
305
310
  """
306
311
  store = self._get_store()
307
312
  namespace = self._get_namespace()
308
-
313
+
309
314
  # Get existing file
310
315
  item = store.get(namespace, file_path)
311
316
  if item is None:
312
317
  return EditResult(error=f"Error: File '{file_path}' not found")
313
-
318
+
314
319
  try:
315
320
  file_data = self._convert_store_item_to_file_data(item)
316
321
  except ValueError as e:
317
322
  return EditResult(error=f"Error: {e}")
318
-
323
+
319
324
  content = file_data_to_string(file_data)
320
325
  result = perform_string_replacement(content, old_string, new_string, replace_all)
321
-
326
+
322
327
  if isinstance(result, str):
323
328
  return EditResult(error=result)
324
-
329
+
325
330
  new_content, occurrences = result
326
331
  new_file_data = update_file_data(file_data, new_content)
327
-
332
+
328
333
  # Update file in store
329
334
  store_value = self._convert_file_data_to_store_value(new_file_data)
330
335
  store.put(namespace, file_path, store_value)
331
336
  return EditResult(path=file_path, files_update=None, occurrences=int(occurrences))
332
-
337
+
333
338
  # Removed legacy grep() convenience to keep lean surface
334
339
 
335
340
  def grep_raw(
336
341
  self,
337
342
  pattern: str,
338
343
  path: str = "/",
339
- glob: Optional[str] = None,
344
+ glob: str | None = None,
340
345
  ) -> list[GrepMatch] | str:
341
346
  store = self._get_store()
342
347
  namespace = self._get_namespace()
@@ -348,7 +353,7 @@ class StoreBackend:
348
353
  except ValueError:
349
354
  continue
350
355
  return grep_matches_from_files(files, pattern, path, glob)
351
-
356
+
352
357
  def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
353
358
  store = self._get_store()
354
359
  namespace = self._get_namespace()
@@ -367,12 +372,14 @@ class StoreBackend:
367
372
  for p in paths:
368
373
  fd = files.get(p)
369
374
  size = len("\n".join(fd.get("content", []))) if fd else 0
370
- infos.append({
371
- "path": p,
372
- "is_dir": False,
373
- "size": int(size),
374
- "modified_at": fd.get("modified_at", "") if fd else "",
375
- })
375
+ infos.append(
376
+ {
377
+ "path": p,
378
+ "is_dir": False,
379
+ "size": int(size),
380
+ "modified_at": fd.get("modified_at", "") if fd else "",
381
+ }
382
+ )
376
383
  return infos
377
384
 
378
385
 
@@ -8,36 +8,22 @@ enable composition without fragile string parsing.
8
8
  import re
9
9
  from datetime import UTC, datetime
10
10
  from pathlib import Path
11
- from typing import Any, Literal, TypedDict
11
+ from typing import Any, Literal
12
12
 
13
13
  import wcmatch.glob as wcglob
14
14
 
15
+ from deepagents.backends.protocol import FileInfo as _FileInfo
16
+ from deepagents.backends.protocol import GrepMatch as _GrepMatch
17
+
15
18
  EMPTY_CONTENT_WARNING = "System reminder: File exists but has empty contents"
16
19
  MAX_LINE_LENGTH = 10000
17
20
  LINE_NUMBER_WIDTH = 6
18
21
  TOOL_RESULT_TOKEN_LIMIT = 20000 # Same threshold as eviction
19
22
  TRUNCATION_GUIDANCE = "... [results truncated, try being more specific with your parameters]"
20
23
 
21
-
22
- class FileInfo(TypedDict, total=False):
23
- """Structured file listing info.
24
-
25
- Minimal contract used across backends. Only "path" is required.
26
- Other fields are best-effort and may be absent depending on backend.
27
- """
28
-
29
- path: str
30
- is_dir: bool
31
- size: int # bytes (approx)
32
- modified_at: str # ISO timestamp if known
33
-
34
-
35
- class GrepMatch(TypedDict):
36
- """Structured grep match entry."""
37
-
38
- path: str
39
- line: int
40
- text: str
24
+ # Re-export protocol types for backwards compatibility
25
+ FileInfo = _FileInfo
26
+ GrepMatch = _GrepMatch
41
27
 
42
28
 
43
29
  def sanitize_tool_call_id(tool_call_id: str) -> str:
deepagents/graph.py CHANGED
@@ -17,7 +17,7 @@ from langgraph.graph.state import CompiledStateGraph
17
17
  from langgraph.store.base import BaseStore
18
18
  from langgraph.types import Checkpointer
19
19
 
20
- from deepagents.backends.protocol import BackendProtocol, BackendFactory
20
+ from deepagents.backends.protocol import BackendFactory, BackendProtocol
21
21
  from deepagents.middleware.filesystem import FilesystemMiddleware
22
22
  from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware
23
23
  from deepagents.middleware.subagents import CompiledSubAgent, SubAgent, SubAgentMiddleware