noesium 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- noesium/agents/askura_agent/__init__.py +22 -0
- noesium/agents/askura_agent/askura_agent.py +480 -0
- noesium/agents/askura_agent/conversation.py +164 -0
- noesium/agents/askura_agent/extractor.py +175 -0
- noesium/agents/askura_agent/memory.py +14 -0
- noesium/agents/askura_agent/models.py +239 -0
- noesium/agents/askura_agent/prompts.py +202 -0
- noesium/agents/askura_agent/reflection.py +234 -0
- noesium/agents/askura_agent/summarizer.py +30 -0
- noesium/agents/askura_agent/utils.py +6 -0
- noesium/agents/deep_research/__init__.py +13 -0
- noesium/agents/deep_research/agent.py +398 -0
- noesium/agents/deep_research/prompts.py +84 -0
- noesium/agents/deep_research/schemas.py +42 -0
- noesium/agents/deep_research/state.py +54 -0
- noesium/agents/search/__init__.py +5 -0
- noesium/agents/search/agent.py +474 -0
- noesium/agents/search/state.py +28 -0
- noesium/core/__init__.py +1 -1
- noesium/core/agent/base.py +10 -2
- noesium/core/goalith/decomposer/llm_decomposer.py +1 -1
- noesium/core/llm/__init__.py +1 -1
- noesium/core/llm/base.py +2 -2
- noesium/core/llm/litellm.py +42 -21
- noesium/core/llm/llamacpp.py +25 -4
- noesium/core/llm/ollama.py +43 -22
- noesium/core/llm/openai.py +25 -5
- noesium/core/llm/openrouter.py +1 -1
- noesium/core/toolify/base.py +9 -2
- noesium/core/toolify/config.py +2 -2
- noesium/core/toolify/registry.py +21 -5
- noesium/core/tracing/opik_tracing.py +7 -7
- noesium/core/vector_store/__init__.py +2 -2
- noesium/core/vector_store/base.py +1 -1
- noesium/core/vector_store/pgvector.py +10 -13
- noesium/core/vector_store/weaviate.py +2 -1
- noesium/toolkits/__init__.py +1 -0
- noesium/toolkits/arxiv_toolkit.py +310 -0
- noesium/toolkits/audio_aliyun_toolkit.py +441 -0
- noesium/toolkits/audio_toolkit.py +370 -0
- noesium/toolkits/bash_toolkit.py +332 -0
- noesium/toolkits/document_toolkit.py +454 -0
- noesium/toolkits/file_edit_toolkit.py +552 -0
- noesium/toolkits/github_toolkit.py +395 -0
- noesium/toolkits/gmail_toolkit.py +575 -0
- noesium/toolkits/image_toolkit.py +425 -0
- noesium/toolkits/memory_toolkit.py +398 -0
- noesium/toolkits/python_executor_toolkit.py +334 -0
- noesium/toolkits/search_toolkit.py +451 -0
- noesium/toolkits/serper_toolkit.py +623 -0
- noesium/toolkits/tabular_data_toolkit.py +537 -0
- noesium/toolkits/user_interaction_toolkit.py +365 -0
- noesium/toolkits/video_toolkit.py +168 -0
- noesium/toolkits/wikipedia_toolkit.py +420 -0
- {noesium-0.1.0.dist-info → noesium-0.2.0.dist-info}/METADATA +56 -48
- {noesium-0.1.0.dist-info → noesium-0.2.0.dist-info}/RECORD +59 -23
- {noesium-0.1.0.dist-info → noesium-0.2.0.dist-info}/licenses/LICENSE +1 -1
- {noesium-0.1.0.dist-info → noesium-0.2.0.dist-info}/WHEEL +0 -0
- {noesium-0.1.0.dist-info → noesium-0.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,552 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File editing toolkit for file operations and content management.
|
|
3
|
+
|
|
4
|
+
Provides tools for creating, reading, writing, and managing files with
|
|
5
|
+
safety features, backup capabilities, and comprehensive error handling.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import re
|
|
9
|
+
import shutil
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Callable, Dict, Optional
|
|
13
|
+
|
|
14
|
+
from noesium.core.toolify.base import AsyncBaseToolkit
|
|
15
|
+
from noesium.core.toolify.config import ToolkitConfig
|
|
16
|
+
from noesium.core.toolify.registry import register_toolkit
|
|
17
|
+
from noesium.core.utils.logging import get_logger
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@register_toolkit("file_edit")
|
|
23
|
+
class FileEditToolkit(AsyncBaseToolkit):
|
|
24
|
+
"""
|
|
25
|
+
Toolkit for file operations and content management.
|
|
26
|
+
|
|
27
|
+
This toolkit provides comprehensive file management capabilities including:
|
|
28
|
+
- File creation, reading, and writing
|
|
29
|
+
- Directory operations
|
|
30
|
+
- File backup and versioning
|
|
31
|
+
- Safe filename handling
|
|
32
|
+
- Content search and replacement
|
|
33
|
+
- File metadata operations
|
|
34
|
+
|
|
35
|
+
Features:
|
|
36
|
+
- Automatic filename sanitization
|
|
37
|
+
- Backup creation before modifications
|
|
38
|
+
- Configurable working directory
|
|
39
|
+
- Multiple encoding support
|
|
40
|
+
- Path resolution and validation
|
|
41
|
+
- Comprehensive error handling
|
|
42
|
+
|
|
43
|
+
Safety features:
|
|
44
|
+
- Prevents overwriting without backup
|
|
45
|
+
- Validates file paths and permissions
|
|
46
|
+
- Sanitizes filenames to prevent security issues
|
|
47
|
+
- Configurable file size limits
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def __init__(self, config: ToolkitConfig = None):
|
|
51
|
+
"""
|
|
52
|
+
Initialize the file edit toolkit.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
config: Toolkit configuration containing directory and settings
|
|
56
|
+
"""
|
|
57
|
+
super().__init__(config)
|
|
58
|
+
|
|
59
|
+
# Configuration
|
|
60
|
+
self.work_dir = Path(self.config.config.get("work_dir", "./file_workspace")).resolve()
|
|
61
|
+
self.default_encoding = self.config.config.get("default_encoding", "utf-8")
|
|
62
|
+
self.backup_enabled = self.config.config.get("backup_enabled", True)
|
|
63
|
+
self.max_file_size = self.config.config.get("max_file_size", 10 * 1024 * 1024) # 10MB
|
|
64
|
+
self.allowed_extensions = self.config.config.get("allowed_extensions", None) # None = all allowed
|
|
65
|
+
|
|
66
|
+
# Create working directory
|
|
67
|
+
self.work_dir.mkdir(parents=True, exist_ok=True)
|
|
68
|
+
|
|
69
|
+
# Backup directory
|
|
70
|
+
self.backup_dir = self.work_dir / ".backups"
|
|
71
|
+
if self.backup_enabled:
|
|
72
|
+
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
|
73
|
+
|
|
74
|
+
self.logger.info(f"FileEditToolkit initialized with work directory: {self.work_dir}")
|
|
75
|
+
|
|
76
|
+
def _sanitize_filename(self, filename: str) -> str:
|
|
77
|
+
"""
|
|
78
|
+
Sanitize a filename by replacing unsafe characters.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
filename: Original filename
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Sanitized filename safe for filesystem use
|
|
85
|
+
"""
|
|
86
|
+
# Replace unsafe characters with underscores
|
|
87
|
+
safe = re.sub(r"[^\w\-_.]", "_", filename)
|
|
88
|
+
|
|
89
|
+
# Remove multiple consecutive underscores
|
|
90
|
+
safe = re.sub(r"_+", "_", safe)
|
|
91
|
+
|
|
92
|
+
# Remove leading/trailing underscores and dots
|
|
93
|
+
safe = safe.strip("_.")
|
|
94
|
+
|
|
95
|
+
# Ensure filename is not empty
|
|
96
|
+
if not safe:
|
|
97
|
+
safe = "unnamed_file"
|
|
98
|
+
|
|
99
|
+
return safe
|
|
100
|
+
|
|
101
|
+
def _resolve_filepath(self, file_path: str, create_dirs: bool = False) -> Path:
|
|
102
|
+
"""
|
|
103
|
+
Resolve and validate a file path within the working directory.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
file_path: File path to resolve
|
|
107
|
+
create_dirs: Whether to create parent directories
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Resolved Path object
|
|
111
|
+
|
|
112
|
+
Raises:
|
|
113
|
+
ValueError: If path is invalid or outside working directory
|
|
114
|
+
"""
|
|
115
|
+
# Convert to Path object
|
|
116
|
+
path = Path(file_path)
|
|
117
|
+
|
|
118
|
+
# If not absolute, make it relative to work_dir
|
|
119
|
+
if not path.is_absolute():
|
|
120
|
+
path = self.work_dir / path
|
|
121
|
+
|
|
122
|
+
# Resolve to absolute path
|
|
123
|
+
path = path.resolve()
|
|
124
|
+
|
|
125
|
+
# Security check: ensure path is within work_dir
|
|
126
|
+
try:
|
|
127
|
+
path.relative_to(self.work_dir)
|
|
128
|
+
except ValueError:
|
|
129
|
+
raise ValueError(f"Path outside working directory: {path}")
|
|
130
|
+
|
|
131
|
+
# Sanitize filename
|
|
132
|
+
if path.name:
|
|
133
|
+
sanitized_name = self._sanitize_filename(path.name)
|
|
134
|
+
path = path.parent / sanitized_name
|
|
135
|
+
|
|
136
|
+
# Create parent directories if requested
|
|
137
|
+
if create_dirs and path.parent != path:
|
|
138
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
139
|
+
|
|
140
|
+
return path
|
|
141
|
+
|
|
142
|
+
def _check_file_extension(self, file_path: Path) -> bool:
|
|
143
|
+
"""
|
|
144
|
+
Check if file extension is allowed.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
file_path: Path to check
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
True if extension is allowed
|
|
151
|
+
"""
|
|
152
|
+
if self.allowed_extensions is None:
|
|
153
|
+
return True
|
|
154
|
+
|
|
155
|
+
extension = file_path.suffix.lower()
|
|
156
|
+
return extension in [ext.lower() for ext in self.allowed_extensions]
|
|
157
|
+
|
|
158
|
+
def _create_backup(self, file_path: Path) -> Optional[Path]:
|
|
159
|
+
"""
|
|
160
|
+
Create a backup of an existing file.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
file_path: Path to file to backup
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
Path to backup file, or None if backup failed
|
|
167
|
+
"""
|
|
168
|
+
if not self.backup_enabled or not file_path.exists():
|
|
169
|
+
return None
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
173
|
+
backup_name = f"{file_path.stem}_{timestamp}{file_path.suffix}"
|
|
174
|
+
backup_path = self.backup_dir / backup_name
|
|
175
|
+
|
|
176
|
+
shutil.copy2(file_path, backup_path)
|
|
177
|
+
self.logger.info(f"Created backup: {backup_path}")
|
|
178
|
+
return backup_path
|
|
179
|
+
|
|
180
|
+
except Exception as e:
|
|
181
|
+
self.logger.warning(f"Failed to create backup: {e}")
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
async def create_file(
|
|
185
|
+
self, file_path: str, content: str = "", encoding: Optional[str] = None, overwrite: bool = False
|
|
186
|
+
) -> str:
|
|
187
|
+
"""
|
|
188
|
+
Create a new file with the specified content.
|
|
189
|
+
|
|
190
|
+
This tool creates a new file in the working directory with the given content.
|
|
191
|
+
It includes safety features like filename sanitization, backup creation,
|
|
192
|
+
and overwrite protection.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
file_path: Path for the new file (relative to working directory)
|
|
196
|
+
content: Initial content for the file (default: empty string)
|
|
197
|
+
encoding: Text encoding to use (default: utf-8)
|
|
198
|
+
overwrite: Whether to overwrite existing files (default: False)
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Success message with file path or error description
|
|
202
|
+
|
|
203
|
+
Examples:
|
|
204
|
+
- create_file("notes.txt", "Hello World!")
|
|
205
|
+
- create_file("data/config.json", '{"key": "value"}')
|
|
206
|
+
- create_file("script.py", "print('Hello')", overwrite=True)
|
|
207
|
+
"""
|
|
208
|
+
encoding = encoding or self.default_encoding
|
|
209
|
+
|
|
210
|
+
try:
|
|
211
|
+
# Resolve and validate path
|
|
212
|
+
resolved_path = self._resolve_filepath(file_path, create_dirs=True)
|
|
213
|
+
|
|
214
|
+
# Check file extension
|
|
215
|
+
if not self._check_file_extension(resolved_path):
|
|
216
|
+
return f"Error: File extension not allowed: {resolved_path.suffix}"
|
|
217
|
+
|
|
218
|
+
# Check if file exists and handle overwrite
|
|
219
|
+
if resolved_path.exists() and not overwrite:
|
|
220
|
+
return f"Error: File already exists: {resolved_path}. Use overwrite=True to replace."
|
|
221
|
+
|
|
222
|
+
# Create backup if file exists
|
|
223
|
+
backup_path = self._create_backup(resolved_path)
|
|
224
|
+
|
|
225
|
+
# Check content size
|
|
226
|
+
content_size = len(content.encode(encoding))
|
|
227
|
+
if content_size > self.max_file_size:
|
|
228
|
+
return f"Error: Content too large ({content_size} bytes, max: {self.max_file_size})"
|
|
229
|
+
|
|
230
|
+
# Write the file
|
|
231
|
+
with open(resolved_path, "w", encoding=encoding) as f:
|
|
232
|
+
f.write(content)
|
|
233
|
+
|
|
234
|
+
result_msg = f"Successfully created file: {resolved_path}"
|
|
235
|
+
if backup_path:
|
|
236
|
+
result_msg += f" (backup created: {backup_path.name})"
|
|
237
|
+
|
|
238
|
+
self.logger.info(result_msg)
|
|
239
|
+
return result_msg
|
|
240
|
+
|
|
241
|
+
except Exception as e:
|
|
242
|
+
error_msg = f"Failed to create file '{file_path}': {str(e)}"
|
|
243
|
+
self.logger.error(error_msg)
|
|
244
|
+
return error_msg
|
|
245
|
+
|
|
246
|
+
async def read_file(self, file_path: str, encoding: Optional[str] = None) -> str:
|
|
247
|
+
"""
|
|
248
|
+
Read the contents of a file.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
file_path: Path to the file to read
|
|
252
|
+
encoding: Text encoding to use (default: utf-8)
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
File contents or error message
|
|
256
|
+
"""
|
|
257
|
+
encoding = encoding or self.default_encoding
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
resolved_path = self._resolve_filepath(file_path)
|
|
261
|
+
|
|
262
|
+
if not resolved_path.exists():
|
|
263
|
+
return f"Error: File not found: {resolved_path}"
|
|
264
|
+
|
|
265
|
+
if not resolved_path.is_file():
|
|
266
|
+
return f"Error: Path is not a file: {resolved_path}"
|
|
267
|
+
|
|
268
|
+
# Check file size
|
|
269
|
+
file_size = resolved_path.stat().st_size
|
|
270
|
+
if file_size > self.max_file_size:
|
|
271
|
+
return f"Error: File too large ({file_size} bytes, max: {self.max_file_size})"
|
|
272
|
+
|
|
273
|
+
with open(resolved_path, "r", encoding=encoding) as f:
|
|
274
|
+
content = f.read()
|
|
275
|
+
|
|
276
|
+
self.logger.info(f"Read file: {resolved_path} ({len(content)} characters)")
|
|
277
|
+
return content
|
|
278
|
+
|
|
279
|
+
except UnicodeDecodeError as e:
|
|
280
|
+
return f"Error: Unable to decode file with {encoding} encoding: {str(e)}"
|
|
281
|
+
except Exception as e:
|
|
282
|
+
error_msg = f"Failed to read file '{file_path}': {str(e)}"
|
|
283
|
+
self.logger.error(error_msg)
|
|
284
|
+
return error_msg
|
|
285
|
+
|
|
286
|
+
async def write_file(
|
|
287
|
+
self, file_path: str, content: str, encoding: Optional[str] = None, append: bool = False
|
|
288
|
+
) -> str:
|
|
289
|
+
"""
|
|
290
|
+
Write content to a file.
|
|
291
|
+
|
|
292
|
+
This tool writes content to an existing file or creates a new one.
|
|
293
|
+
It automatically creates backups of existing files before modification.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
file_path: Path to the file to write
|
|
297
|
+
content: Content to write to the file
|
|
298
|
+
encoding: Text encoding to use (default: utf-8)
|
|
299
|
+
append: Whether to append to existing content (default: False)
|
|
300
|
+
|
|
301
|
+
Returns:
|
|
302
|
+
Success message or error description
|
|
303
|
+
"""
|
|
304
|
+
encoding = encoding or self.default_encoding
|
|
305
|
+
|
|
306
|
+
try:
|
|
307
|
+
resolved_path = self._resolve_filepath(file_path, create_dirs=True)
|
|
308
|
+
|
|
309
|
+
# Check file extension
|
|
310
|
+
if not self._check_file_extension(resolved_path):
|
|
311
|
+
return f"Error: File extension not allowed: {resolved_path.suffix}"
|
|
312
|
+
|
|
313
|
+
# Create backup if file exists
|
|
314
|
+
backup_path = self._create_backup(resolved_path)
|
|
315
|
+
|
|
316
|
+
# Check content size
|
|
317
|
+
if append and resolved_path.exists():
|
|
318
|
+
existing_size = resolved_path.stat().st_size
|
|
319
|
+
new_content_size = len(content.encode(encoding))
|
|
320
|
+
total_size = existing_size + new_content_size
|
|
321
|
+
else:
|
|
322
|
+
total_size = len(content.encode(encoding))
|
|
323
|
+
|
|
324
|
+
if total_size > self.max_file_size:
|
|
325
|
+
return f"Error: Total content too large ({total_size} bytes, max: {self.max_file_size})"
|
|
326
|
+
|
|
327
|
+
# Write the file
|
|
328
|
+
mode = "a" if append else "w"
|
|
329
|
+
with open(resolved_path, mode, encoding=encoding) as f:
|
|
330
|
+
f.write(content)
|
|
331
|
+
|
|
332
|
+
action = "appended to" if append else "written to"
|
|
333
|
+
result_msg = f"Successfully {action} file: {resolved_path}"
|
|
334
|
+
if backup_path:
|
|
335
|
+
result_msg += f" (backup created: {backup_path.name})"
|
|
336
|
+
|
|
337
|
+
self.logger.info(result_msg)
|
|
338
|
+
return result_msg
|
|
339
|
+
|
|
340
|
+
except Exception as e:
|
|
341
|
+
error_msg = f"Failed to write file '{file_path}': {str(e)}"
|
|
342
|
+
self.logger.error(error_msg)
|
|
343
|
+
return error_msg
|
|
344
|
+
|
|
345
|
+
async def delete_file(self, file_path: str, create_backup: bool = True) -> str:
|
|
346
|
+
"""
|
|
347
|
+
Delete a file.
|
|
348
|
+
|
|
349
|
+
Args:
|
|
350
|
+
file_path: Path to the file to delete
|
|
351
|
+
create_backup: Whether to create a backup before deletion
|
|
352
|
+
|
|
353
|
+
Returns:
|
|
354
|
+
Success message or error description
|
|
355
|
+
"""
|
|
356
|
+
try:
|
|
357
|
+
resolved_path = self._resolve_filepath(file_path)
|
|
358
|
+
|
|
359
|
+
if not resolved_path.exists():
|
|
360
|
+
return f"Error: File not found: {resolved_path}"
|
|
361
|
+
|
|
362
|
+
if not resolved_path.is_file():
|
|
363
|
+
return f"Error: Path is not a file: {resolved_path}"
|
|
364
|
+
|
|
365
|
+
# Create backup if requested
|
|
366
|
+
backup_path = None
|
|
367
|
+
if create_backup:
|
|
368
|
+
backup_path = self._create_backup(resolved_path)
|
|
369
|
+
|
|
370
|
+
# Delete the file
|
|
371
|
+
resolved_path.unlink()
|
|
372
|
+
|
|
373
|
+
result_msg = f"Successfully deleted file: {resolved_path}"
|
|
374
|
+
if backup_path:
|
|
375
|
+
result_msg += f" (backup created: {backup_path.name})"
|
|
376
|
+
|
|
377
|
+
self.logger.info(result_msg)
|
|
378
|
+
return result_msg
|
|
379
|
+
|
|
380
|
+
except Exception as e:
|
|
381
|
+
error_msg = f"Failed to delete file '{file_path}': {str(e)}"
|
|
382
|
+
self.logger.error(error_msg)
|
|
383
|
+
return error_msg
|
|
384
|
+
|
|
385
|
+
async def list_files(self, directory: str = ".", pattern: str = "*") -> str:
|
|
386
|
+
"""
|
|
387
|
+
List files in a directory.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
directory: Directory to list (relative to working directory)
|
|
391
|
+
pattern: Glob pattern to filter files (default: "*" for all files)
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
Formatted list of files and directories
|
|
395
|
+
"""
|
|
396
|
+
try:
|
|
397
|
+
if directory == ".":
|
|
398
|
+
dir_path = self.work_dir
|
|
399
|
+
else:
|
|
400
|
+
dir_path = self._resolve_filepath(directory)
|
|
401
|
+
|
|
402
|
+
if not dir_path.exists():
|
|
403
|
+
return f"Error: Directory not found: {dir_path}"
|
|
404
|
+
|
|
405
|
+
if not dir_path.is_dir():
|
|
406
|
+
return f"Error: Path is not a directory: {dir_path}"
|
|
407
|
+
|
|
408
|
+
# Get matching files
|
|
409
|
+
files = list(dir_path.glob(pattern))
|
|
410
|
+
files.sort()
|
|
411
|
+
|
|
412
|
+
if not files:
|
|
413
|
+
return f"No files found matching pattern '{pattern}' in {dir_path}"
|
|
414
|
+
|
|
415
|
+
# Format the listing
|
|
416
|
+
result_lines = [f"Files in {dir_path}:"]
|
|
417
|
+
|
|
418
|
+
for file_path in files:
|
|
419
|
+
if file_path.is_file():
|
|
420
|
+
size = file_path.stat().st_size
|
|
421
|
+
modified = datetime.fromtimestamp(file_path.stat().st_mtime)
|
|
422
|
+
result_lines.append(f" 📄 {file_path.name} ({size} bytes, {modified.strftime('%Y-%m-%d %H:%M')})")
|
|
423
|
+
elif file_path.is_dir():
|
|
424
|
+
result_lines.append(f" 📁 {file_path.name}/")
|
|
425
|
+
|
|
426
|
+
return "\n".join(result_lines)
|
|
427
|
+
|
|
428
|
+
except Exception as e:
|
|
429
|
+
error_msg = f"Failed to list files in '{directory}': {str(e)}"
|
|
430
|
+
self.logger.error(error_msg)
|
|
431
|
+
return error_msg
|
|
432
|
+
|
|
433
|
+
async def search_in_files(self, pattern: str, directory: str = ".", file_pattern: str = "*") -> str:
|
|
434
|
+
"""
|
|
435
|
+
Search for text patterns within files.
|
|
436
|
+
|
|
437
|
+
Args:
|
|
438
|
+
pattern: Text pattern to search for (supports regex)
|
|
439
|
+
directory: Directory to search in (default: current)
|
|
440
|
+
file_pattern: File pattern to include in search (default: all files)
|
|
441
|
+
|
|
442
|
+
Returns:
|
|
443
|
+
Search results with file names and line numbers
|
|
444
|
+
"""
|
|
445
|
+
try:
|
|
446
|
+
if directory == ".":
|
|
447
|
+
dir_path = self.work_dir
|
|
448
|
+
else:
|
|
449
|
+
dir_path = self._resolve_filepath(directory)
|
|
450
|
+
|
|
451
|
+
if not dir_path.exists() or not dir_path.is_dir():
|
|
452
|
+
return f"Error: Invalid directory: {dir_path}"
|
|
453
|
+
|
|
454
|
+
# Compile regex pattern
|
|
455
|
+
try:
|
|
456
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
457
|
+
except re.error as e:
|
|
458
|
+
return f"Error: Invalid regex pattern: {e}"
|
|
459
|
+
|
|
460
|
+
# Search files
|
|
461
|
+
results = []
|
|
462
|
+
files_searched = 0
|
|
463
|
+
|
|
464
|
+
for file_path in dir_path.rglob(file_pattern):
|
|
465
|
+
if not file_path.is_file():
|
|
466
|
+
continue
|
|
467
|
+
|
|
468
|
+
# Skip binary files and backups
|
|
469
|
+
if file_path.suffix.lower() in [".exe", ".bin", ".jpg", ".png", ".pdf"]:
|
|
470
|
+
continue
|
|
471
|
+
if file_path.parent.name == ".backups":
|
|
472
|
+
continue
|
|
473
|
+
|
|
474
|
+
try:
|
|
475
|
+
with open(file_path, "r", encoding=self.default_encoding, errors="ignore") as f:
|
|
476
|
+
for line_num, line in enumerate(f, 1):
|
|
477
|
+
if regex.search(line):
|
|
478
|
+
results.append(f"{file_path.name}:{line_num}: {line.strip()}")
|
|
479
|
+
|
|
480
|
+
files_searched += 1
|
|
481
|
+
|
|
482
|
+
except Exception:
|
|
483
|
+
continue # Skip files that can't be read
|
|
484
|
+
|
|
485
|
+
if not results:
|
|
486
|
+
return f"No matches found for pattern '{pattern}' in {files_searched} files"
|
|
487
|
+
|
|
488
|
+
result_text = f"Found {len(results)} matches in {files_searched} files:\n\n"
|
|
489
|
+
result_text += "\n".join(results[:50]) # Limit to first 50 results
|
|
490
|
+
|
|
491
|
+
if len(results) > 50:
|
|
492
|
+
result_text += f"\n\n... and {len(results) - 50} more matches"
|
|
493
|
+
|
|
494
|
+
return result_text
|
|
495
|
+
|
|
496
|
+
except Exception as e:
|
|
497
|
+
error_msg = f"Search failed: {str(e)}"
|
|
498
|
+
self.logger.error(error_msg)
|
|
499
|
+
return error_msg
|
|
500
|
+
|
|
501
|
+
async def get_file_info(self, file_path: str) -> str:
|
|
502
|
+
"""
|
|
503
|
+
Get detailed information about a file.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
file_path: Path to the file
|
|
507
|
+
|
|
508
|
+
Returns:
|
|
509
|
+
Formatted file information
|
|
510
|
+
"""
|
|
511
|
+
try:
|
|
512
|
+
resolved_path = self._resolve_filepath(file_path)
|
|
513
|
+
|
|
514
|
+
if not resolved_path.exists():
|
|
515
|
+
return f"Error: File not found: {resolved_path}"
|
|
516
|
+
|
|
517
|
+
stat = resolved_path.stat()
|
|
518
|
+
|
|
519
|
+
info_lines = [
|
|
520
|
+
f"File Information: {resolved_path}",
|
|
521
|
+
f"Size: {stat.st_size} bytes ({stat.st_size / 1024:.1f} KB)",
|
|
522
|
+
f"Created: {datetime.fromtimestamp(stat.st_ctime).strftime('%Y-%m-%d %H:%M:%S')}",
|
|
523
|
+
f"Modified: {datetime.fromtimestamp(stat.st_mtime).strftime('%Y-%m-%d %H:%M:%S')}",
|
|
524
|
+
f"Accessed: {datetime.fromtimestamp(stat.st_atime).strftime('%Y-%m-%d %H:%M:%S')}",
|
|
525
|
+
f"Type: {'File' if resolved_path.is_file() else 'Directory'}",
|
|
526
|
+
f"Extension: {resolved_path.suffix or 'None'}",
|
|
527
|
+
f"Permissions: {oct(stat.st_mode)[-3:]}",
|
|
528
|
+
]
|
|
529
|
+
|
|
530
|
+
return "\n".join(info_lines)
|
|
531
|
+
|
|
532
|
+
except Exception as e:
|
|
533
|
+
error_msg = f"Failed to get file info for '{file_path}': {str(e)}"
|
|
534
|
+
self.logger.error(error_msg)
|
|
535
|
+
return error_msg
|
|
536
|
+
|
|
537
|
+
async def get_tools_map(self) -> Dict[str, Callable]:
|
|
538
|
+
"""
|
|
539
|
+
Get the mapping of tool names to their implementation functions.
|
|
540
|
+
|
|
541
|
+
Returns:
|
|
542
|
+
Dictionary mapping tool names to callable functions
|
|
543
|
+
"""
|
|
544
|
+
return {
|
|
545
|
+
"create_file": self.create_file,
|
|
546
|
+
"read_file": self.read_file,
|
|
547
|
+
"write_file": self.write_file,
|
|
548
|
+
"delete_file": self.delete_file,
|
|
549
|
+
"list_files": self.list_files,
|
|
550
|
+
"search_in_files": self.search_in_files,
|
|
551
|
+
"get_file_info": self.get_file_info,
|
|
552
|
+
}
|