deepagents-cli 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of deepagents-cli might be problematic. Click here for more details.
- deepagents/__init__.py +1 -12
- deepagents/cli.py +257 -272
- deepagents/default_agent_prompt.md +0 -27
- deepagents/graph.py +16 -40
- deepagents/memory/__init__.py +17 -0
- deepagents/memory/backends/__init__.py +15 -0
- deepagents/memory/backends/composite.py +250 -0
- deepagents/memory/backends/filesystem.py +330 -0
- deepagents/memory/backends/state.py +206 -0
- deepagents/memory/backends/store.py +351 -0
- deepagents/memory/backends/utils.py +319 -0
- deepagents/memory/protocol.py +164 -0
- deepagents/middleware/__init__.py +3 -3
- deepagents/middleware/agent_memory.py +207 -0
- deepagents/middleware/filesystem.py +229 -773
- deepagents/middleware/patch_tool_calls.py +44 -0
- deepagents/middleware/subagents.py +7 -6
- deepagents/pretty_cli.py +289 -0
- {deepagents_cli-0.0.1.dist-info → deepagents_cli-0.0.3.dist-info}/METADATA +26 -30
- deepagents_cli-0.0.3.dist-info/RECORD +24 -0
- deepagents/middleware/common.py +0 -16
- deepagents/middleware/local_filesystem.py +0 -741
- deepagents/prompts.py +0 -327
- deepagents/skills.py +0 -85
- deepagents_cli-0.0.1.dist-info/RECORD +0 -17
- {deepagents_cli-0.0.1.dist-info → deepagents_cli-0.0.3.dist-info}/WHEEL +0 -0
- {deepagents_cli-0.0.1.dist-info → deepagents_cli-0.0.3.dist-info}/entry_points.txt +0 -0
- {deepagents_cli-0.0.1.dist-info → deepagents_cli-0.0.3.dist-info}/licenses/LICENSE +0 -0
- {deepagents_cli-0.0.1.dist-info → deepagents_cli-0.0.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
"""StoreBackend: Adapter for LangGraph's BaseStore (persistent, cross-thread)."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from typing import Any, Optional, TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from langchain.tools import ToolRuntime
|
|
8
|
+
|
|
9
|
+
from langgraph.config import get_config
|
|
10
|
+
from langgraph.store.base import BaseStore, Item
|
|
11
|
+
from langgraph.types import Command
|
|
12
|
+
|
|
13
|
+
from deepagents.memory.backends.utils import (
|
|
14
|
+
create_file_data,
|
|
15
|
+
update_file_data,
|
|
16
|
+
file_data_to_string,
|
|
17
|
+
format_read_response,
|
|
18
|
+
perform_string_replacement,
|
|
19
|
+
_glob_search_files,
|
|
20
|
+
_grep_search_files,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class StoreBackend:
|
|
25
|
+
"""Backend that stores files in LangGraph's BaseStore (persistent).
|
|
26
|
+
|
|
27
|
+
Uses LangGraph's Store for persistent, cross-conversation storage.
|
|
28
|
+
Files are organized via namespaces and persist across all threads.
|
|
29
|
+
|
|
30
|
+
The namespace can include an optional assistant_id for multi-agent isolation.
|
|
31
|
+
"""
|
|
32
|
+
def __init__(self, runtime: "ToolRuntime"):
|
|
33
|
+
"""Initialize StoreBackend with runtime.
|
|
34
|
+
|
|
35
|
+
Args:"""
|
|
36
|
+
self.runtime = runtime
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _get_store(self) -> BaseStore:
|
|
40
|
+
"""Get the store instance.
|
|
41
|
+
|
|
42
|
+
Args:Returns:
|
|
43
|
+
BaseStore instance
|
|
44
|
+
|
|
45
|
+
Raises:
|
|
46
|
+
ValueError: If no store is available or runtime not provided
|
|
47
|
+
"""
|
|
48
|
+
store = self.runtime.store
|
|
49
|
+
if store is None:
|
|
50
|
+
msg = "Store is required but not available in runtime"
|
|
51
|
+
raise ValueError(msg)
|
|
52
|
+
return store
|
|
53
|
+
|
|
54
|
+
def _get_namespace(self) -> tuple[str, ...]:
|
|
55
|
+
"""Get the namespace for store operations.
|
|
56
|
+
|
|
57
|
+
Returns a tuple for organizing files in the store. If an assistant_id is
|
|
58
|
+
available in the config metadata, returns (assistant_id, "filesystem") to
|
|
59
|
+
provide per-assistant isolation. Otherwise, returns ("filesystem",).
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Namespace tuple for store operations.
|
|
63
|
+
"""
|
|
64
|
+
namespace = "filesystem"
|
|
65
|
+
config = get_config()
|
|
66
|
+
if config is None:
|
|
67
|
+
return (namespace,)
|
|
68
|
+
assistant_id = config.get("metadata", {}).get("assistant_id")
|
|
69
|
+
if assistant_id is None:
|
|
70
|
+
return (namespace,)
|
|
71
|
+
return (assistant_id, namespace)
|
|
72
|
+
|
|
73
|
+
def _convert_store_item_to_file_data(self, store_item: Item) -> dict[str, Any]:
|
|
74
|
+
"""Convert a store Item to FileData format.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
store_item: The store Item containing file data.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
FileData dict with content, created_at, and modified_at fields.
|
|
81
|
+
|
|
82
|
+
Raises:
|
|
83
|
+
ValueError: If required fields are missing or have incorrect types.
|
|
84
|
+
"""
|
|
85
|
+
if "content" not in store_item.value or not isinstance(store_item.value["content"], list):
|
|
86
|
+
msg = f"Store item does not contain valid content field. Got: {store_item.value.keys()}"
|
|
87
|
+
raise ValueError(msg)
|
|
88
|
+
if "created_at" not in store_item.value or not isinstance(store_item.value["created_at"], str):
|
|
89
|
+
msg = f"Store item does not contain valid created_at field. Got: {store_item.value.keys()}"
|
|
90
|
+
raise ValueError(msg)
|
|
91
|
+
if "modified_at" not in store_item.value or not isinstance(store_item.value["modified_at"], str):
|
|
92
|
+
msg = f"Store item does not contain valid modified_at field. Got: {store_item.value.keys()}"
|
|
93
|
+
raise ValueError(msg)
|
|
94
|
+
return {
|
|
95
|
+
"content": store_item.value["content"],
|
|
96
|
+
"created_at": store_item.value["created_at"],
|
|
97
|
+
"modified_at": store_item.value["modified_at"],
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
def _convert_file_data_to_store_value(self, file_data: dict[str, Any]) -> dict[str, Any]:
|
|
101
|
+
"""Convert FileData to a dict suitable for store.put().
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
file_data: The FileData to convert.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Dictionary with content, created_at, and modified_at fields.
|
|
108
|
+
"""
|
|
109
|
+
return {
|
|
110
|
+
"content": file_data["content"],
|
|
111
|
+
"created_at": file_data["created_at"],
|
|
112
|
+
"modified_at": file_data["modified_at"],
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
def _search_store_paginated(
|
|
116
|
+
self,
|
|
117
|
+
store: BaseStore,
|
|
118
|
+
namespace: tuple[str, ...],
|
|
119
|
+
*,
|
|
120
|
+
query: str | None = None,
|
|
121
|
+
filter: dict[str, Any] | None = None,
|
|
122
|
+
page_size: int = 100,
|
|
123
|
+
) -> list[Item]:
|
|
124
|
+
"""Search store with automatic pagination to retrieve all results.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
store: The store to search.
|
|
128
|
+
namespace: Hierarchical path prefix to search within.
|
|
129
|
+
query: Optional query for natural language search.
|
|
130
|
+
filter: Key-value pairs to filter results.
|
|
131
|
+
page_size: Number of items to fetch per page (default: 100).
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
List of all items matching the search criteria.
|
|
135
|
+
|
|
136
|
+
Example:
|
|
137
|
+
```python
|
|
138
|
+
store = _get_store(runtime)
|
|
139
|
+
namespace = _get_namespace()
|
|
140
|
+
all_items = _search_store_paginated(store, namespace)
|
|
141
|
+
```
|
|
142
|
+
"""
|
|
143
|
+
all_items: list[Item] = []
|
|
144
|
+
offset = 0
|
|
145
|
+
while True:
|
|
146
|
+
page_items = store.search(
|
|
147
|
+
namespace,
|
|
148
|
+
query=query,
|
|
149
|
+
filter=filter,
|
|
150
|
+
limit=page_size,
|
|
151
|
+
offset=offset,
|
|
152
|
+
)
|
|
153
|
+
if not page_items:
|
|
154
|
+
break
|
|
155
|
+
all_items.extend(page_items)
|
|
156
|
+
if len(page_items) < page_size:
|
|
157
|
+
break
|
|
158
|
+
offset += page_size
|
|
159
|
+
|
|
160
|
+
return all_items
|
|
161
|
+
|
|
162
|
+
def ls(self, path: str) -> list[str]:
|
|
163
|
+
"""List files from store.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
path: Absolute path to directory.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
List of file paths.
|
|
170
|
+
"""
|
|
171
|
+
store = self._get_store()
|
|
172
|
+
namespace = self._get_namespace()
|
|
173
|
+
|
|
174
|
+
# Search store with path filter
|
|
175
|
+
items = self._search_store_paginated(store, namespace, filter={"prefix": path})
|
|
176
|
+
|
|
177
|
+
return [item.key for item in items]
|
|
178
|
+
|
|
179
|
+
def read(
|
|
180
|
+
self,
|
|
181
|
+
file_path: str,
|
|
182
|
+
offset: int = 0,
|
|
183
|
+
limit: int = 2000,
|
|
184
|
+
) -> str:
|
|
185
|
+
"""Read file content with line numbers.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
file_path: Absolute file path
|
|
189
|
+
offset: Line offset to start reading from (0-indexed)limit: Maximum number of lines to read
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Formatted file content with line numbers, or error message.
|
|
193
|
+
"""
|
|
194
|
+
store = self._get_store()
|
|
195
|
+
namespace = self._get_namespace()
|
|
196
|
+
item: Optional[Item] = store.get(namespace, file_path)
|
|
197
|
+
|
|
198
|
+
if item is None:
|
|
199
|
+
return f"Error: File '{file_path}' not found"
|
|
200
|
+
|
|
201
|
+
try:
|
|
202
|
+
file_data = self._convert_store_item_to_file_data(item)
|
|
203
|
+
except ValueError as e:
|
|
204
|
+
return f"Error: {e}"
|
|
205
|
+
|
|
206
|
+
return format_read_response(file_data, offset, limit)
|
|
207
|
+
|
|
208
|
+
def write(
|
|
209
|
+
self,
|
|
210
|
+
file_path: str,
|
|
211
|
+
content: str,
|
|
212
|
+
) -> Command | str:
|
|
213
|
+
"""Create a new file with content.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
file_path: Absolute file path
|
|
217
|
+
content: File content as a stringReturns:
|
|
218
|
+
Success message or error if file already exists.
|
|
219
|
+
"""
|
|
220
|
+
store = self._get_store()
|
|
221
|
+
namespace = self._get_namespace()
|
|
222
|
+
|
|
223
|
+
# Check if file exists
|
|
224
|
+
existing = store.get(namespace, file_path)
|
|
225
|
+
if existing is not None:
|
|
226
|
+
return f"Cannot write to {file_path} because it already exists. Read and then make an edit, or write to a new path."
|
|
227
|
+
|
|
228
|
+
# Create new file
|
|
229
|
+
file_data = create_file_data(content)
|
|
230
|
+
store_value = self._convert_file_data_to_store_value(file_data)
|
|
231
|
+
store.put(namespace, file_path, store_value)
|
|
232
|
+
|
|
233
|
+
return f"Updated file {file_path}"
|
|
234
|
+
|
|
235
|
+
def edit(
|
|
236
|
+
self,
|
|
237
|
+
file_path: str,
|
|
238
|
+
old_string: str,
|
|
239
|
+
new_string: str,
|
|
240
|
+
replace_all: bool = False,
|
|
241
|
+
) -> Command | str:
|
|
242
|
+
"""Edit a file by replacing string occurrences.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
file_path: Absolute file path
|
|
246
|
+
old_string: String to find and replace
|
|
247
|
+
new_string: Replacement string
|
|
248
|
+
replace_all: If True, replace all occurrencesReturns:
|
|
249
|
+
Success message or error message on failure.
|
|
250
|
+
"""
|
|
251
|
+
store = self._get_store()
|
|
252
|
+
namespace = self._get_namespace()
|
|
253
|
+
|
|
254
|
+
# Get existing file
|
|
255
|
+
item = store.get(namespace, file_path)
|
|
256
|
+
if item is None:
|
|
257
|
+
return f"Error: File '{file_path}' not found"
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
file_data = self._convert_store_item_to_file_data(item)
|
|
261
|
+
except ValueError as e:
|
|
262
|
+
return f"Error: {e}"
|
|
263
|
+
|
|
264
|
+
content = file_data_to_string(file_data)
|
|
265
|
+
result = perform_string_replacement(content, old_string, new_string, replace_all)
|
|
266
|
+
|
|
267
|
+
if isinstance(result, str):
|
|
268
|
+
return result
|
|
269
|
+
|
|
270
|
+
new_content, occurrences = result
|
|
271
|
+
new_file_data = update_file_data(file_data, new_content)
|
|
272
|
+
|
|
273
|
+
# Update file in store
|
|
274
|
+
store_value = self._convert_file_data_to_store_value(new_file_data)
|
|
275
|
+
store.put(namespace, file_path, store_value)
|
|
276
|
+
|
|
277
|
+
return f"Successfully replaced {occurrences} instance(s) of the string in '{file_path}'"
|
|
278
|
+
|
|
279
|
+
def delete(self, file_path: str) -> Command | None:
|
|
280
|
+
"""Delete file from store.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
file_path: File path to deleteReturns:
|
|
284
|
+
None (direct store modification)
|
|
285
|
+
"""
|
|
286
|
+
store = self._get_store()
|
|
287
|
+
namespace = self._get_namespace()
|
|
288
|
+
store.delete(namespace, file_path)
|
|
289
|
+
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
def grep(
|
|
293
|
+
self,
|
|
294
|
+
pattern: str,
|
|
295
|
+
path: str = "/",
|
|
296
|
+
glob: Optional[str] = None,
|
|
297
|
+
output_mode: str = "files_with_matches",
|
|
298
|
+
) -> str:
|
|
299
|
+
"""Search for a pattern in files.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
pattern: String pattern to search for
|
|
303
|
+
path: Path to search in (default "/")
|
|
304
|
+
glob: Optional glob pattern to filter files (e.g., "*.py")
|
|
305
|
+
output_mode: Output format - "files_with_matches", "content", or "count"Returns:
|
|
306
|
+
Formatted search results based on output_mode.
|
|
307
|
+
"""
|
|
308
|
+
store = self._get_store()
|
|
309
|
+
namespace = self._get_namespace()
|
|
310
|
+
|
|
311
|
+
items = self._search_store_paginated(store, namespace)
|
|
312
|
+
|
|
313
|
+
files = {}
|
|
314
|
+
for item in items:
|
|
315
|
+
if item is None:
|
|
316
|
+
continue
|
|
317
|
+
try:
|
|
318
|
+
file_data = self._convert_store_item_to_file_data(item)
|
|
319
|
+
files[item.key] = file_data
|
|
320
|
+
except ValueError:
|
|
321
|
+
continue
|
|
322
|
+
|
|
323
|
+
return _grep_search_files(files, pattern, path, glob, output_mode)
|
|
324
|
+
|
|
325
|
+
def glob(self, pattern: str, path: str = "/") -> list[str]:
|
|
326
|
+
"""Find files matching a glob pattern.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
pattern: Glob pattern (e.g., "**/*.py", "*.txt", "/subdir/**/*.md")
|
|
330
|
+
path: Base path to search from (default "/")Returns:
|
|
331
|
+
List of absolute file paths matching the pattern.
|
|
332
|
+
"""
|
|
333
|
+
store = self._get_store()
|
|
334
|
+
namespace = self._get_namespace()
|
|
335
|
+
|
|
336
|
+
items = self._search_store_paginated(store, namespace)
|
|
337
|
+
|
|
338
|
+
files = {}
|
|
339
|
+
for item in items:
|
|
340
|
+
if item is None:
|
|
341
|
+
continue
|
|
342
|
+
try:
|
|
343
|
+
file_data = self._convert_store_item_to_file_data(item)
|
|
344
|
+
files[item.key] = file_data
|
|
345
|
+
except ValueError:
|
|
346
|
+
continue
|
|
347
|
+
|
|
348
|
+
result = _glob_search_files(files, pattern, path)
|
|
349
|
+
if result == "No files found":
|
|
350
|
+
return []
|
|
351
|
+
return result.split("\n")
|
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
"""Shared utility functions for memory backend implementations."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
import wcmatch.glob as wcglob
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, Literal
|
|
8
|
+
|
|
9
|
+
EMPTY_CONTENT_WARNING = "System reminder: File exists but has empty contents"
|
|
10
|
+
MAX_LINE_LENGTH = 2000
|
|
11
|
+
LINE_NUMBER_WIDTH = 6
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def format_content_with_line_numbers(
|
|
15
|
+
content: str | list[str],
|
|
16
|
+
start_line: int = 1,
|
|
17
|
+
) -> str:
|
|
18
|
+
"""Format file content with line numbers (cat -n style).
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
content: File content as string or list of lines
|
|
22
|
+
start_line: Starting line number (default: 1)
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
Formatted content with line numbers
|
|
26
|
+
"""
|
|
27
|
+
if isinstance(content, str):
|
|
28
|
+
lines = content.split("\n")
|
|
29
|
+
if lines and lines[-1] == "":
|
|
30
|
+
lines = lines[:-1]
|
|
31
|
+
else:
|
|
32
|
+
lines = content
|
|
33
|
+
|
|
34
|
+
return "\n".join(
|
|
35
|
+
f"{i + start_line:{LINE_NUMBER_WIDTH}d}\t{line[:MAX_LINE_LENGTH]}"
|
|
36
|
+
for i, line in enumerate(lines)
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def check_empty_content(content: str) -> str | None:
|
|
41
|
+
"""Check if content is empty and return warning message.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
content: Content to check
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Warning message if empty, None otherwise
|
|
48
|
+
"""
|
|
49
|
+
if not content or content.strip() == "":
|
|
50
|
+
return EMPTY_CONTENT_WARNING
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def file_data_to_string(file_data: dict[str, Any]) -> str:
|
|
55
|
+
"""Convert FileData to plain string content.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
file_data: FileData dict with 'content' key
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Content as string with lines joined by newlines
|
|
62
|
+
"""
|
|
63
|
+
return "\n".join(file_data["content"])
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def create_file_data(content: str, created_at: str | None = None) -> dict[str, Any]:
|
|
67
|
+
"""Create a FileData object with timestamps.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
content: File content as string
|
|
71
|
+
created_at: Optional creation timestamp (ISO format)
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
FileData dict with content and timestamps
|
|
75
|
+
"""
|
|
76
|
+
lines = content.split("\n") if isinstance(content, str) else content
|
|
77
|
+
lines = [line[i:i+MAX_LINE_LENGTH] for line in lines for i in range(0, len(line) or 1, MAX_LINE_LENGTH)]
|
|
78
|
+
now = datetime.now(UTC).isoformat()
|
|
79
|
+
|
|
80
|
+
return {
|
|
81
|
+
"content": lines,
|
|
82
|
+
"created_at": created_at or now,
|
|
83
|
+
"modified_at": now,
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def update_file_data(file_data: dict[str, Any], content: str) -> dict[str, Any]:
|
|
88
|
+
"""Update FileData with new content, preserving creation timestamp.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
file_data: Existing FileData dict
|
|
92
|
+
content: New content as string
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
Updated FileData dict
|
|
96
|
+
"""
|
|
97
|
+
lines = content.split("\n") if isinstance(content, str) else content
|
|
98
|
+
lines = [line[i:i+MAX_LINE_LENGTH] for line in lines for i in range(0, len(line) or 1, MAX_LINE_LENGTH)]
|
|
99
|
+
now = datetime.now(UTC).isoformat()
|
|
100
|
+
|
|
101
|
+
return {
|
|
102
|
+
"content": lines,
|
|
103
|
+
"created_at": file_data["created_at"],
|
|
104
|
+
"modified_at": now,
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def format_read_response(
|
|
109
|
+
file_data: dict[str, Any],
|
|
110
|
+
offset: int,
|
|
111
|
+
limit: int,
|
|
112
|
+
) -> str:
|
|
113
|
+
"""Format file data for read response with line numbers.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
file_data: FileData dict
|
|
117
|
+
offset: Line offset (0-indexed)
|
|
118
|
+
limit: Maximum number of lines
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Formatted content or error message
|
|
122
|
+
"""
|
|
123
|
+
content = file_data_to_string(file_data)
|
|
124
|
+
empty_msg = check_empty_content(content)
|
|
125
|
+
if empty_msg:
|
|
126
|
+
return empty_msg
|
|
127
|
+
|
|
128
|
+
lines = content.splitlines()
|
|
129
|
+
start_idx = offset
|
|
130
|
+
end_idx = min(start_idx + limit, len(lines))
|
|
131
|
+
|
|
132
|
+
if start_idx >= len(lines):
|
|
133
|
+
return f"Error: Line offset {offset} exceeds file length ({len(lines)} lines)"
|
|
134
|
+
|
|
135
|
+
selected_lines = lines[start_idx:end_idx]
|
|
136
|
+
return format_content_with_line_numbers(selected_lines, start_line=start_idx + 1)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def perform_string_replacement(
|
|
140
|
+
content: str,
|
|
141
|
+
old_string: str,
|
|
142
|
+
new_string: str,
|
|
143
|
+
replace_all: bool,
|
|
144
|
+
) -> tuple[str, int] | str:
|
|
145
|
+
"""Perform string replacement with occurrence validation.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
content: Original content
|
|
149
|
+
old_string: String to replace
|
|
150
|
+
new_string: Replacement string
|
|
151
|
+
replace_all: Whether to replace all occurrences
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
Tuple of (new_content, occurrences) on success, or error message string
|
|
155
|
+
"""
|
|
156
|
+
occurrences = content.count(old_string)
|
|
157
|
+
|
|
158
|
+
if occurrences == 0:
|
|
159
|
+
return f"Error: String not found in file: '{old_string}'"
|
|
160
|
+
|
|
161
|
+
if occurrences > 1 and not replace_all:
|
|
162
|
+
return f"Error: String '{old_string}' appears {occurrences} times in file. Use replace_all=True to replace all instances, or provide a more specific string with surrounding context."
|
|
163
|
+
|
|
164
|
+
new_content = content.replace(old_string, new_string)
|
|
165
|
+
return new_content, occurrences
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def _validate_path(path: str) -> str:
|
|
169
|
+
"""Validate and normalize a path.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
path: Path to validate
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
Normalized path starting with /
|
|
176
|
+
|
|
177
|
+
Raises:
|
|
178
|
+
ValueError: If path is invalid
|
|
179
|
+
"""
|
|
180
|
+
if not path or path.strip() == "":
|
|
181
|
+
raise ValueError("Path cannot be empty")
|
|
182
|
+
|
|
183
|
+
normalized = path if path.startswith("/") else "/" + path
|
|
184
|
+
|
|
185
|
+
if not normalized.endswith("/"):
|
|
186
|
+
normalized += "/"
|
|
187
|
+
|
|
188
|
+
return normalized
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _glob_search_files(
|
|
192
|
+
files: dict[str, Any],
|
|
193
|
+
pattern: str,
|
|
194
|
+
path: str = "/",
|
|
195
|
+
) -> str:
|
|
196
|
+
"""Search files dict for paths matching glob pattern.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
files: Dictionary of file paths to FileData.
|
|
200
|
+
pattern: Glob pattern (e.g., "*.py", "**/*.ts").
|
|
201
|
+
path: Base path to search from.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Newline-separated file paths, sorted by modification time (most recent first).
|
|
205
|
+
Returns "No files found" if no matches.
|
|
206
|
+
|
|
207
|
+
Example:
|
|
208
|
+
```python
|
|
209
|
+
files = {"/src/main.py": FileData(...), "/test.py": FileData(...)}
|
|
210
|
+
_glob_search_files(files, "*.py", "/")
|
|
211
|
+
# Returns: "/test.py\n/src/main.py" (sorted by modified_at)
|
|
212
|
+
```
|
|
213
|
+
"""
|
|
214
|
+
try:
|
|
215
|
+
normalized_path = _validate_path(path)
|
|
216
|
+
except ValueError:
|
|
217
|
+
return "No files found"
|
|
218
|
+
|
|
219
|
+
filtered = {fp: fd for fp, fd in files.items() if fp.startswith(normalized_path)}
|
|
220
|
+
|
|
221
|
+
matches = []
|
|
222
|
+
for file_path, file_data in filtered.items():
|
|
223
|
+
relative = file_path[len(normalized_path) :].lstrip("/")
|
|
224
|
+
if not relative:
|
|
225
|
+
relative = file_path.split("/")[-1]
|
|
226
|
+
|
|
227
|
+
if wcglob.globmatch(relative, pattern, flags=wcglob.BRACE | wcglob.GLOBSTAR):
|
|
228
|
+
matches.append((file_path, file_data["modified_at"]))
|
|
229
|
+
|
|
230
|
+
matches.sort(key=lambda x: x[1], reverse=True)
|
|
231
|
+
|
|
232
|
+
if not matches:
|
|
233
|
+
return "No files found"
|
|
234
|
+
|
|
235
|
+
return "\n".join(fp for fp, _ in matches)
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def _format_grep_results(
|
|
239
|
+
results: dict[str, list[tuple[int, str]]],
|
|
240
|
+
output_mode: Literal["files_with_matches", "content", "count"],
|
|
241
|
+
) -> str:
|
|
242
|
+
"""Format grep search results based on output mode.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
results: Dictionary mapping file paths to list of (line_num, line_content) tuples
|
|
246
|
+
output_mode: Output format - "files_with_matches", "content", or "count"
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Formatted string output
|
|
250
|
+
"""
|
|
251
|
+
if output_mode == "files_with_matches":
|
|
252
|
+
return "\n".join(sorted(results.keys()))
|
|
253
|
+
elif output_mode == "count":
|
|
254
|
+
lines = []
|
|
255
|
+
for file_path in sorted(results.keys()):
|
|
256
|
+
count = len(results[file_path])
|
|
257
|
+
lines.append(f"{file_path}: {count}")
|
|
258
|
+
return "\n".join(lines)
|
|
259
|
+
else:
|
|
260
|
+
lines = []
|
|
261
|
+
for file_path in sorted(results.keys()):
|
|
262
|
+
lines.append(f"{file_path}:")
|
|
263
|
+
for line_num, line in results[file_path]:
|
|
264
|
+
lines.append(f" {line_num}: {line}")
|
|
265
|
+
return "\n".join(lines)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _grep_search_files(
|
|
269
|
+
files: dict[str, Any],
|
|
270
|
+
pattern: str,
|
|
271
|
+
path: str = "/",
|
|
272
|
+
glob: str | None = None,
|
|
273
|
+
output_mode: Literal["files_with_matches", "content", "count"] = "files_with_matches",
|
|
274
|
+
) -> str:
|
|
275
|
+
"""Search file contents for regex pattern.
|
|
276
|
+
|
|
277
|
+
Args:
|
|
278
|
+
files: Dictionary of file paths to FileData.
|
|
279
|
+
pattern: Regex pattern to search for.
|
|
280
|
+
path: Base path to search from.
|
|
281
|
+
glob: Optional glob pattern to filter files (e.g., "*.py").
|
|
282
|
+
output_mode: Output format - "files_with_matches", "content", or "count".
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Formatted search results. Returns "No matches found" if no results.
|
|
286
|
+
|
|
287
|
+
Example:
|
|
288
|
+
```python
|
|
289
|
+
files = {"/file.py": FileData(content=["import os", "print('hi')"], ...)}
|
|
290
|
+
_grep_search_files(files, "import", "/")
|
|
291
|
+
# Returns: "/file.py" (with output_mode="files_with_matches")
|
|
292
|
+
```
|
|
293
|
+
"""
|
|
294
|
+
try:
|
|
295
|
+
regex = re.compile(pattern)
|
|
296
|
+
except re.error as e:
|
|
297
|
+
return f"Invalid regex pattern: {e}"
|
|
298
|
+
|
|
299
|
+
try:
|
|
300
|
+
normalized_path = _validate_path(path)
|
|
301
|
+
except ValueError:
|
|
302
|
+
return "No matches found"
|
|
303
|
+
|
|
304
|
+
filtered = {fp: fd for fp, fd in files.items() if fp.startswith(normalized_path)}
|
|
305
|
+
|
|
306
|
+
if glob:
|
|
307
|
+
filtered = {fp: fd for fp, fd in filtered.items() if wcglob.globmatch(Path(fp).name, glob, flags=wcglob.BRACE)}
|
|
308
|
+
|
|
309
|
+
results: dict[str, list[tuple[int, str]]] = {}
|
|
310
|
+
for file_path, file_data in filtered.items():
|
|
311
|
+
for line_num, line in enumerate(file_data["content"], 1):
|
|
312
|
+
if regex.search(line):
|
|
313
|
+
if file_path not in results:
|
|
314
|
+
results[file_path] = []
|
|
315
|
+
results[file_path].append((line_num, line))
|
|
316
|
+
|
|
317
|
+
if not results:
|
|
318
|
+
return "No matches found"
|
|
319
|
+
return _format_grep_results(results, output_mode)
|