deepagents 0.1.4__py3-none-any.whl → 0.1.5rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,161 @@
1
+ """StateBackend: Store files in LangGraph agent state (ephemeral)."""
2
+
3
+ import re
4
+ from typing import Any, Literal, Optional, TYPE_CHECKING
5
+
6
+ from langchain.tools import ToolRuntime
7
+
8
+ from langchain_core.messages import ToolMessage
9
+ from langgraph.types import Command
10
+
11
+ from .utils import (
12
+ create_file_data,
13
+ update_file_data,
14
+ file_data_to_string,
15
+ format_read_response,
16
+ perform_string_replacement,
17
+ _glob_search_files,
18
+ grep_matches_from_files,
19
+ )
20
+ from deepagents.backends.utils import FileInfo, GrepMatch
21
+ from deepagents.backends.protocol import WriteResult, EditResult
22
+
23
+
24
+ class StateBackend:
25
+ """Backend that stores files in agent state (ephemeral).
26
+
27
+ Uses LangGraph's state management and checkpointing. Files persist within
28
+ a conversation thread but not across threads. State is automatically
29
+ checkpointed after each agent step.
30
+
31
+ Special handling: Since LangGraph state must be updated via Command objects
32
+ (not direct mutation), operations return Command objects instead of None.
33
+ This is indicated by the uses_state=True flag.
34
+ """
35
+
36
+ def __init__(self, runtime: "ToolRuntime"):
37
+ """Initialize StateBackend with runtime.
38
+
39
+ Args:"""
40
+ self.runtime = runtime
41
+
42
+ def ls_info(self, path: str) -> list[FileInfo]:
43
+ """List files from state.
44
+
45
+ Args:
46
+ path: Absolute path to directory.
47
+
48
+ Returns:
49
+ List of FileInfo-like dicts.
50
+ """
51
+ files = self.runtime.state.get("files", {})
52
+ infos: list[FileInfo] = []
53
+ for k, fd in files.items():
54
+ if not k.startswith(path):
55
+ continue
56
+ size = len("\n".join(fd.get("content", [])))
57
+ infos.append({
58
+ "path": k,
59
+ "is_dir": False,
60
+ "size": int(size),
61
+ "modified_at": fd.get("modified_at", ""),
62
+ })
63
+ infos.sort(key=lambda x: x.get("path", ""))
64
+ return infos
65
+
66
+ # Removed legacy ls() convenience to keep lean surface
67
+
68
+ def read(
69
+ self,
70
+ file_path: str,
71
+ offset: int = 0,
72
+ limit: int = 2000,
73
+ ) -> str:
74
+ """Read file content with line numbers.
75
+
76
+ Args:
77
+ file_path: Absolute file path
78
+ offset: Line offset to start reading from (0-indexed)
79
+ limit: Maximum number of lines to readReturns:
80
+ Formatted file content with line numbers, or error message.
81
+ """
82
+ files = self.runtime.state.get("files", {})
83
+ file_data = files.get(file_path)
84
+
85
+ if file_data is None:
86
+ return f"Error: File '{file_path}' not found"
87
+
88
+ return format_read_response(file_data, offset, limit)
89
+
90
+ def write(
91
+ self,
92
+ file_path: str,
93
+ content: str,
94
+ ) -> WriteResult:
95
+ """Create a new file with content.
96
+ Returns WriteResult with files_update to update LangGraph state.
97
+ """
98
+ files = self.runtime.state.get("files", {})
99
+
100
+ if file_path in files:
101
+ return WriteResult(error=f"Cannot write to {file_path} because it already exists. Read and then make an edit, or write to a new path.")
102
+
103
+ new_file_data = create_file_data(content)
104
+ return WriteResult(path=file_path, files_update={file_path: new_file_data})
105
+
106
+ def edit(
107
+ self,
108
+ file_path: str,
109
+ old_string: str,
110
+ new_string: str,
111
+ replace_all: bool = False,
112
+ ) -> EditResult:
113
+ """Edit a file by replacing string occurrences.
114
+ Returns EditResult with files_update and occurrences.
115
+ """
116
+ files = self.runtime.state.get("files", {})
117
+ file_data = files.get(file_path)
118
+
119
+ if file_data is None:
120
+ return EditResult(error=f"Error: File '{file_path}' not found")
121
+
122
+ content = file_data_to_string(file_data)
123
+ result = perform_string_replacement(content, old_string, new_string, replace_all)
124
+
125
+ if isinstance(result, str):
126
+ return EditResult(error=result)
127
+
128
+ new_content, occurrences = result
129
+ new_file_data = update_file_data(file_data, new_content)
130
+ return EditResult(path=file_path, files_update={file_path: new_file_data}, occurrences=int(occurrences))
131
+
132
+ # Removed legacy grep() convenience to keep lean surface
133
+
134
+ def grep_raw(
135
+ self,
136
+ pattern: str,
137
+ path: str = "/",
138
+ glob: Optional[str] = None,
139
+ ) -> list[GrepMatch] | str:
140
+ files = self.runtime.state.get("files", {})
141
+ return grep_matches_from_files(files, pattern, path, glob)
142
+
143
+ def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
144
+ files = self.runtime.state.get("files", {})
145
+ result = _glob_search_files(files, pattern, path)
146
+ if result == "No files found":
147
+ return []
148
+ paths = result.split("\n")
149
+ infos: list[FileInfo] = []
150
+ for p in paths:
151
+ fd = files.get(p)
152
+ size = len("\n".join(fd.get("content", []))) if fd else 0
153
+ infos.append({
154
+ "path": p,
155
+ "is_dir": False,
156
+ "size": int(size),
157
+ "modified_at": fd.get("modified_at", "") if fd else "",
158
+ })
159
+ return infos
160
+
161
+ # Provider classes removed: prefer callables like `lambda rt: StateBackend(rt)`
@@ -0,0 +1,350 @@
1
+ """StoreBackend: Adapter for LangGraph's BaseStore (persistent, cross-thread)."""
2
+
3
+ import re
4
+ from typing import Any, Optional, TYPE_CHECKING
5
+
6
+ if TYPE_CHECKING:
7
+ from langchain.tools import ToolRuntime
8
+
9
+ from langgraph.config import get_config
10
+ from langgraph.store.base import BaseStore, Item
11
+ from deepagents.backends.protocol import WriteResult, EditResult
12
+
13
+ from deepagents.backends.utils import (
14
+ create_file_data,
15
+ update_file_data,
16
+ file_data_to_string,
17
+ format_read_response,
18
+ perform_string_replacement,
19
+ _glob_search_files,
20
+ grep_matches_from_files,
21
+ )
22
+ from deepagents.backends.utils import FileInfo, GrepMatch
23
+
24
+
25
+ class StoreBackend:
26
+ """Backend that stores files in LangGraph's BaseStore (persistent).
27
+
28
+ Uses LangGraph's Store for persistent, cross-conversation storage.
29
+ Files are organized via namespaces and persist across all threads.
30
+
31
+ The namespace can include an optional assistant_id for multi-agent isolation.
32
+ """
33
+ def __init__(self, runtime: "ToolRuntime"):
34
+ """Initialize StoreBackend with runtime.
35
+
36
+ Args:"""
37
+ self.runtime = runtime
38
+
39
+
40
+ def _get_store(self) -> BaseStore:
41
+ """Get the store instance.
42
+
43
+ Args:Returns:
44
+ BaseStore instance
45
+
46
+ Raises:
47
+ ValueError: If no store is available or runtime not provided
48
+ """
49
+ store = self.runtime.store
50
+ if store is None:
51
+ msg = "Store is required but not available in runtime"
52
+ raise ValueError(msg)
53
+ return store
54
+
55
+ def _get_namespace(self) -> tuple[str, ...]:
56
+ """Get the namespace for store operations.
57
+
58
+ Preference order:
59
+ 1) Use `self.runtime.config` if present (tests pass this explicitly).
60
+ 2) Fallback to `langgraph.config.get_config()` if available.
61
+ 3) Default to ("filesystem",).
62
+
63
+ If an assistant_id is available in the config metadata, return
64
+ (assistant_id, "filesystem") to provide per-assistant isolation.
65
+ """
66
+ namespace = "filesystem"
67
+
68
+ # Prefer the runtime-provided config when present
69
+ runtime_cfg = getattr(self.runtime, "config", None)
70
+ if isinstance(runtime_cfg, dict):
71
+ assistant_id = runtime_cfg.get("metadata", {}).get("assistant_id")
72
+ if assistant_id:
73
+ return (assistant_id, namespace)
74
+ return (namespace,)
75
+
76
+ # Fallback to langgraph's context, but guard against errors when
77
+ # called outside of a runnable context
78
+ try:
79
+ cfg = get_config()
80
+ except Exception:
81
+ return (namespace,)
82
+
83
+ try:
84
+ assistant_id = cfg.get("metadata", {}).get("assistant_id") # type: ignore[assignment]
85
+ except Exception:
86
+ assistant_id = None
87
+
88
+ if assistant_id:
89
+ return (assistant_id, namespace)
90
+ return (namespace,)
91
+
92
+ def _convert_store_item_to_file_data(self, store_item: Item) -> dict[str, Any]:
93
+ """Convert a store Item to FileData format.
94
+
95
+ Args:
96
+ store_item: The store Item containing file data.
97
+
98
+ Returns:
99
+ FileData dict with content, created_at, and modified_at fields.
100
+
101
+ Raises:
102
+ ValueError: If required fields are missing or have incorrect types.
103
+ """
104
+ if "content" not in store_item.value or not isinstance(store_item.value["content"], list):
105
+ msg = f"Store item does not contain valid content field. Got: {store_item.value.keys()}"
106
+ raise ValueError(msg)
107
+ if "created_at" not in store_item.value or not isinstance(store_item.value["created_at"], str):
108
+ msg = f"Store item does not contain valid created_at field. Got: {store_item.value.keys()}"
109
+ raise ValueError(msg)
110
+ if "modified_at" not in store_item.value or not isinstance(store_item.value["modified_at"], str):
111
+ msg = f"Store item does not contain valid modified_at field. Got: {store_item.value.keys()}"
112
+ raise ValueError(msg)
113
+ return {
114
+ "content": store_item.value["content"],
115
+ "created_at": store_item.value["created_at"],
116
+ "modified_at": store_item.value["modified_at"],
117
+ }
118
+
119
+ def _convert_file_data_to_store_value(self, file_data: dict[str, Any]) -> dict[str, Any]:
120
+ """Convert FileData to a dict suitable for store.put().
121
+
122
+ Args:
123
+ file_data: The FileData to convert.
124
+
125
+ Returns:
126
+ Dictionary with content, created_at, and modified_at fields.
127
+ """
128
+ return {
129
+ "content": file_data["content"],
130
+ "created_at": file_data["created_at"],
131
+ "modified_at": file_data["modified_at"],
132
+ }
133
+
134
+ def _search_store_paginated(
135
+ self,
136
+ store: BaseStore,
137
+ namespace: tuple[str, ...],
138
+ *,
139
+ query: str | None = None,
140
+ filter: dict[str, Any] | None = None,
141
+ page_size: int = 100,
142
+ ) -> list[Item]:
143
+ """Search store with automatic pagination to retrieve all results.
144
+
145
+ Args:
146
+ store: The store to search.
147
+ namespace: Hierarchical path prefix to search within.
148
+ query: Optional query for natural language search.
149
+ filter: Key-value pairs to filter results.
150
+ page_size: Number of items to fetch per page (default: 100).
151
+
152
+ Returns:
153
+ List of all items matching the search criteria.
154
+
155
+ Example:
156
+ ```python
157
+ store = _get_store(runtime)
158
+ namespace = _get_namespace()
159
+ all_items = _search_store_paginated(store, namespace)
160
+ ```
161
+ """
162
+ all_items: list[Item] = []
163
+ offset = 0
164
+ while True:
165
+ page_items = store.search(
166
+ namespace,
167
+ query=query,
168
+ filter=filter,
169
+ limit=page_size,
170
+ offset=offset,
171
+ )
172
+ if not page_items:
173
+ break
174
+ all_items.extend(page_items)
175
+ if len(page_items) < page_size:
176
+ break
177
+ offset += page_size
178
+
179
+ return all_items
180
+
181
+ def ls_info(self, path: str) -> list[FileInfo]:
182
+ """List files from store.
183
+
184
+ Args:
185
+ path: Absolute path to directory.
186
+
187
+ Returns:
188
+ List of FileInfo-like dicts.
189
+ """
190
+ store = self._get_store()
191
+ namespace = self._get_namespace()
192
+
193
+ # Retrieve all items and filter by path prefix locally to avoid
194
+ # coupling to store-specific filter semantics
195
+ items = self._search_store_paginated(store, namespace)
196
+ infos: list[FileInfo] = []
197
+ for item in items:
198
+ if not str(item.key).startswith(path):
199
+ continue
200
+ try:
201
+ fd = self._convert_store_item_to_file_data(item)
202
+ except ValueError:
203
+ continue
204
+ size = len("\n".join(fd.get("content", [])))
205
+ infos.append({
206
+ "path": item.key,
207
+ "is_dir": False,
208
+ "size": int(size),
209
+ "modified_at": fd.get("modified_at", ""),
210
+ })
211
+ infos.sort(key=lambda x: x.get("path", ""))
212
+ return infos
213
+
214
+ # Removed legacy ls() convenience to keep lean surface
215
+
216
+ def read(
217
+ self,
218
+ file_path: str,
219
+ offset: int = 0,
220
+ limit: int = 2000,
221
+ ) -> str:
222
+ """Read file content with line numbers.
223
+
224
+ Args:
225
+ file_path: Absolute file path
226
+ offset: Line offset to start reading from (0-indexed)limit: Maximum number of lines to read
227
+
228
+ Returns:
229
+ Formatted file content with line numbers, or error message.
230
+ """
231
+ store = self._get_store()
232
+ namespace = self._get_namespace()
233
+ item: Optional[Item] = store.get(namespace, file_path)
234
+
235
+ if item is None:
236
+ return f"Error: File '{file_path}' not found"
237
+
238
+ try:
239
+ file_data = self._convert_store_item_to_file_data(item)
240
+ except ValueError as e:
241
+ return f"Error: {e}"
242
+
243
+ return format_read_response(file_data, offset, limit)
244
+
245
+ def write(
246
+ self,
247
+ file_path: str,
248
+ content: str,
249
+ ) -> WriteResult:
250
+ """Create a new file with content.
251
+ Returns WriteResult. External storage sets files_update=None.
252
+ """
253
+ store = self._get_store()
254
+ namespace = self._get_namespace()
255
+
256
+ # Check if file exists
257
+ existing = store.get(namespace, file_path)
258
+ if existing is not None:
259
+ return WriteResult(error=f"Cannot write to {file_path} because it already exists. Read and then make an edit, or write to a new path.")
260
+
261
+ # Create new file
262
+ file_data = create_file_data(content)
263
+ store_value = self._convert_file_data_to_store_value(file_data)
264
+ store.put(namespace, file_path, store_value)
265
+ return WriteResult(path=file_path, files_update=None)
266
+
267
+ def edit(
268
+ self,
269
+ file_path: str,
270
+ old_string: str,
271
+ new_string: str,
272
+ replace_all: bool = False,
273
+ ) -> EditResult:
274
+ """Edit a file by replacing string occurrences.
275
+ Returns EditResult. External storage sets files_update=None.
276
+ """
277
+ store = self._get_store()
278
+ namespace = self._get_namespace()
279
+
280
+ # Get existing file
281
+ item = store.get(namespace, file_path)
282
+ if item is None:
283
+ return EditResult(error=f"Error: File '{file_path}' not found")
284
+
285
+ try:
286
+ file_data = self._convert_store_item_to_file_data(item)
287
+ except ValueError as e:
288
+ return EditResult(error=f"Error: {e}")
289
+
290
+ content = file_data_to_string(file_data)
291
+ result = perform_string_replacement(content, old_string, new_string, replace_all)
292
+
293
+ if isinstance(result, str):
294
+ return EditResult(error=result)
295
+
296
+ new_content, occurrences = result
297
+ new_file_data = update_file_data(file_data, new_content)
298
+
299
+ # Update file in store
300
+ store_value = self._convert_file_data_to_store_value(new_file_data)
301
+ store.put(namespace, file_path, store_value)
302
+ return EditResult(path=file_path, files_update=None, occurrences=int(occurrences))
303
+
304
+ # Removed legacy grep() convenience to keep lean surface
305
+
306
+ def grep_raw(
307
+ self,
308
+ pattern: str,
309
+ path: str = "/",
310
+ glob: Optional[str] = None,
311
+ ) -> list[GrepMatch] | str:
312
+ store = self._get_store()
313
+ namespace = self._get_namespace()
314
+ items = self._search_store_paginated(store, namespace)
315
+ files: dict[str, Any] = {}
316
+ for item in items:
317
+ try:
318
+ files[item.key] = self._convert_store_item_to_file_data(item)
319
+ except ValueError:
320
+ continue
321
+ return grep_matches_from_files(files, pattern, path, glob)
322
+
323
+ def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]:
324
+ store = self._get_store()
325
+ namespace = self._get_namespace()
326
+ items = self._search_store_paginated(store, namespace)
327
+ files: dict[str, Any] = {}
328
+ for item in items:
329
+ try:
330
+ files[item.key] = self._convert_store_item_to_file_data(item)
331
+ except ValueError:
332
+ continue
333
+ result = _glob_search_files(files, pattern, path)
334
+ if result == "No files found":
335
+ return []
336
+ paths = result.split("\n")
337
+ infos: list[FileInfo] = []
338
+ for p in paths:
339
+ fd = files.get(p)
340
+ size = len("\n".join(fd.get("content", []))) if fd else 0
341
+ infos.append({
342
+ "path": p,
343
+ "is_dir": False,
344
+ "size": int(size),
345
+ "modified_at": fd.get("modified_at", "") if fd else "",
346
+ })
347
+ return infos
348
+
349
+
350
+ # Provider classes removed: prefer callables like `lambda rt: StoreBackend(rt)`