alita-sdk 0.3.465__py3-none-any.whl → 0.3.486__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/agent/__init__.py +5 -0
- alita_sdk/cli/agent/default.py +83 -1
- alita_sdk/cli/agent_loader.py +6 -9
- alita_sdk/cli/agent_ui.py +13 -3
- alita_sdk/cli/agents.py +1866 -185
- alita_sdk/cli/callbacks.py +96 -25
- alita_sdk/cli/cli.py +10 -1
- alita_sdk/cli/config.py +151 -9
- alita_sdk/cli/context/__init__.py +30 -0
- alita_sdk/cli/context/cleanup.py +198 -0
- alita_sdk/cli/context/manager.py +731 -0
- alita_sdk/cli/context/message.py +285 -0
- alita_sdk/cli/context/strategies.py +289 -0
- alita_sdk/cli/context/token_estimation.py +127 -0
- alita_sdk/cli/input_handler.py +167 -4
- alita_sdk/cli/inventory.py +1256 -0
- alita_sdk/cli/toolkit.py +14 -17
- alita_sdk/cli/toolkit_loader.py +35 -5
- alita_sdk/cli/tools/__init__.py +8 -1
- alita_sdk/cli/tools/filesystem.py +815 -55
- alita_sdk/cli/tools/planning.py +143 -157
- alita_sdk/cli/tools/terminal.py +154 -20
- alita_sdk/community/__init__.py +64 -8
- alita_sdk/community/inventory/__init__.py +224 -0
- alita_sdk/community/inventory/config.py +257 -0
- alita_sdk/community/inventory/enrichment.py +2137 -0
- alita_sdk/community/inventory/extractors.py +1469 -0
- alita_sdk/community/inventory/ingestion.py +3172 -0
- alita_sdk/community/inventory/knowledge_graph.py +1457 -0
- alita_sdk/community/inventory/parsers/__init__.py +218 -0
- alita_sdk/community/inventory/parsers/base.py +295 -0
- alita_sdk/community/inventory/parsers/csharp_parser.py +907 -0
- alita_sdk/community/inventory/parsers/go_parser.py +851 -0
- alita_sdk/community/inventory/parsers/html_parser.py +389 -0
- alita_sdk/community/inventory/parsers/java_parser.py +593 -0
- alita_sdk/community/inventory/parsers/javascript_parser.py +629 -0
- alita_sdk/community/inventory/parsers/kotlin_parser.py +768 -0
- alita_sdk/community/inventory/parsers/markdown_parser.py +362 -0
- alita_sdk/community/inventory/parsers/python_parser.py +604 -0
- alita_sdk/community/inventory/parsers/rust_parser.py +858 -0
- alita_sdk/community/inventory/parsers/swift_parser.py +832 -0
- alita_sdk/community/inventory/parsers/text_parser.py +322 -0
- alita_sdk/community/inventory/parsers/yaml_parser.py +370 -0
- alita_sdk/community/inventory/patterns/__init__.py +61 -0
- alita_sdk/community/inventory/patterns/ast_adapter.py +380 -0
- alita_sdk/community/inventory/patterns/loader.py +348 -0
- alita_sdk/community/inventory/patterns/registry.py +198 -0
- alita_sdk/community/inventory/presets.py +535 -0
- alita_sdk/community/inventory/retrieval.py +1403 -0
- alita_sdk/community/inventory/toolkit.py +169 -0
- alita_sdk/community/inventory/visualize.py +1370 -0
- alita_sdk/configurations/bitbucket.py +0 -3
- alita_sdk/runtime/clients/client.py +84 -26
- alita_sdk/runtime/langchain/assistant.py +4 -2
- alita_sdk/runtime/langchain/langraph_agent.py +122 -31
- alita_sdk/runtime/llms/preloaded.py +2 -6
- alita_sdk/runtime/toolkits/__init__.py +2 -0
- alita_sdk/runtime/toolkits/application.py +1 -1
- alita_sdk/runtime/toolkits/mcp.py +46 -36
- alita_sdk/runtime/toolkits/planning.py +171 -0
- alita_sdk/runtime/toolkits/tools.py +39 -6
- alita_sdk/runtime/tools/llm.py +185 -8
- alita_sdk/runtime/tools/planning/__init__.py +36 -0
- alita_sdk/runtime/tools/planning/models.py +246 -0
- alita_sdk/runtime/tools/planning/wrapper.py +607 -0
- alita_sdk/runtime/tools/vectorstore_base.py +41 -6
- alita_sdk/runtime/utils/mcp_oauth.py +80 -0
- alita_sdk/runtime/utils/streamlit.py +6 -10
- alita_sdk/runtime/utils/toolkit_utils.py +19 -4
- alita_sdk/tools/__init__.py +54 -27
- alita_sdk/tools/ado/repos/repos_wrapper.py +1 -2
- alita_sdk/tools/base_indexer_toolkit.py +98 -19
- alita_sdk/tools/bitbucket/__init__.py +2 -2
- alita_sdk/tools/chunkers/__init__.py +3 -1
- alita_sdk/tools/chunkers/sematic/markdown_chunker.py +95 -6
- alita_sdk/tools/chunkers/universal_chunker.py +269 -0
- alita_sdk/tools/code_indexer_toolkit.py +55 -22
- alita_sdk/tools/elitea_base.py +86 -21
- alita_sdk/tools/jira/__init__.py +1 -1
- alita_sdk/tools/jira/api_wrapper.py +91 -40
- alita_sdk/tools/non_code_indexer_toolkit.py +1 -0
- alita_sdk/tools/qtest/__init__.py +1 -1
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +8 -2
- alita_sdk/tools/zephyr_essential/api_wrapper.py +12 -13
- {alita_sdk-0.3.465.dist-info → alita_sdk-0.3.486.dist-info}/METADATA +2 -1
- {alita_sdk-0.3.465.dist-info → alita_sdk-0.3.486.dist-info}/RECORD +90 -50
- {alita_sdk-0.3.465.dist-info → alita_sdk-0.3.486.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.465.dist-info → alita_sdk-0.3.486.dist-info}/entry_points.txt +0 -0
- {alita_sdk-0.3.465.dist-info → alita_sdk-0.3.486.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.465.dist-info → alita_sdk-0.3.486.dist-info}/top_level.txt +0 -0
|
@@ -3,14 +3,56 @@ Filesystem tools for CLI agents.
|
|
|
3
3
|
|
|
4
4
|
Provides comprehensive file system operations restricted to specific directories.
|
|
5
5
|
Inspired by MCP filesystem server implementation.
|
|
6
|
+
|
|
7
|
+
Also provides a FilesystemApiWrapper for integration with the inventory ingestion
|
|
8
|
+
pipeline, enabling local document loading and chunking.
|
|
6
9
|
"""
|
|
7
10
|
|
|
11
|
+
import base64
|
|
12
|
+
import fnmatch
|
|
13
|
+
import hashlib
|
|
14
|
+
import logging
|
|
8
15
|
import os
|
|
9
16
|
from pathlib import Path
|
|
10
|
-
from typing import Optional, List, Dict, Any
|
|
17
|
+
from typing import Optional, List, Dict, Any, Generator, ClassVar
|
|
11
18
|
from datetime import datetime
|
|
12
|
-
from langchain_core.tools import BaseTool
|
|
13
|
-
from
|
|
19
|
+
from langchain_core.tools import BaseTool, ToolException
|
|
20
|
+
from langchain_core.documents import Document
|
|
21
|
+
from pydantic import BaseModel, Field, model_validator
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# Maximum recommended content size for single write operations (in characters)
|
|
27
|
+
MAX_RECOMMENDED_CONTENT_SIZE = 5000 # ~5KB, roughly 1,200-1,500 tokens
|
|
28
|
+
|
|
29
|
+
# Helpful error message for truncated content
|
|
30
|
+
CONTENT_TRUNCATED_ERROR = """
|
|
31
|
+
⚠️ CONTENT FIELD MISSING - OUTPUT TRUNCATED
|
|
32
|
+
|
|
33
|
+
Your tool call was cut off because the content was too large for the context window.
|
|
34
|
+
The JSON was truncated, leaving the 'content' field incomplete or missing.
|
|
35
|
+
|
|
36
|
+
🔧 HOW TO FIX THIS:
|
|
37
|
+
|
|
38
|
+
1. **Use incremental writes** - Don't write large files in one call:
|
|
39
|
+
- First: filesystem_write_file(path, "# Header\\nimport x\\n\\n")
|
|
40
|
+
- Then: filesystem_append_file(path, "def func1():\\n ...\\n\\n")
|
|
41
|
+
- Then: filesystem_append_file(path, "def func2():\\n ...\\n\\n")
|
|
42
|
+
|
|
43
|
+
2. **Keep each chunk small** - Under 2000 characters per call
|
|
44
|
+
|
|
45
|
+
3. **Structure first, details later**:
|
|
46
|
+
- Write skeleton/structure first
|
|
47
|
+
- Add implementations section by section
|
|
48
|
+
|
|
49
|
+
4. **For documentation/reports**:
|
|
50
|
+
- Write one section at a time
|
|
51
|
+
- Use append_file for each new section
|
|
52
|
+
|
|
53
|
+
❌ DON'T: Try to write the entire file content again
|
|
54
|
+
✅ DO: Break it into 3-5 smaller append_file calls
|
|
55
|
+
"""
|
|
14
56
|
|
|
15
57
|
|
|
16
58
|
class ReadFileInput(BaseModel):
|
|
@@ -47,7 +89,38 @@ class ReadMultipleFilesInput(BaseModel):
|
|
|
47
89
|
class WriteFileInput(BaseModel):
|
|
48
90
|
"""Input for writing to a file."""
|
|
49
91
|
path: str = Field(description="Relative path to the file to write")
|
|
50
|
-
content: str = Field(
|
|
92
|
+
content: Optional[str] = Field(
|
|
93
|
+
default=None,
|
|
94
|
+
description="Content to write to the file. REQUIRED - this field cannot be empty or omitted."
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
@model_validator(mode='after')
|
|
98
|
+
def validate_content_required(self):
|
|
99
|
+
"""Provide helpful error message when content is missing or truncated."""
|
|
100
|
+
if self.content is None:
|
|
101
|
+
raise ToolException(CONTENT_TRUNCATED_ERROR)
|
|
102
|
+
if len(self.content) > MAX_RECOMMENDED_CONTENT_SIZE:
|
|
103
|
+
logger.warning(
|
|
104
|
+
f"Content is very large ({len(self.content)} chars). Consider using append_file "
|
|
105
|
+
"for incremental writes to avoid truncation issues."
|
|
106
|
+
)
|
|
107
|
+
return self
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class AppendFileInput(BaseModel):
|
|
111
|
+
"""Input for appending to a file."""
|
|
112
|
+
path: str = Field(description="Relative path to the file to append to")
|
|
113
|
+
content: Optional[str] = Field(
|
|
114
|
+
default=None,
|
|
115
|
+
description="Content to append to the end of the file. REQUIRED - this field cannot be empty or omitted."
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
@model_validator(mode='after')
|
|
119
|
+
def validate_content_required(self):
|
|
120
|
+
"""Provide helpful error message when content is missing or truncated."""
|
|
121
|
+
if self.content is None:
|
|
122
|
+
raise ToolException(CONTENT_TRUNCATED_ERROR)
|
|
123
|
+
return self
|
|
51
124
|
|
|
52
125
|
|
|
53
126
|
class EditFileInput(BaseModel):
|
|
@@ -67,13 +140,15 @@ class ListDirectoryInput(BaseModel):
|
|
|
67
140
|
class DirectoryTreeInput(BaseModel):
|
|
68
141
|
"""Input for getting a directory tree."""
|
|
69
142
|
path: str = Field(default=".", description="Relative path to the directory")
|
|
70
|
-
max_depth: Optional[int] = Field(
|
|
143
|
+
max_depth: Optional[int] = Field(default=3, description="Maximum depth to traverse. Default is 3 to prevent excessive output. Use None for unlimited (caution: may exceed context limits).")
|
|
144
|
+
max_items: Optional[int] = Field(default=200, description="Maximum number of files/directories to include. Default is 200 to prevent context window overflow. Use None for unlimited (caution: large directories may exceed context limits).")
|
|
71
145
|
|
|
72
146
|
|
|
73
147
|
class SearchFilesInput(BaseModel):
|
|
74
148
|
"""Input for searching files."""
|
|
75
149
|
path: str = Field(default=".", description="Relative path to search from")
|
|
76
150
|
pattern: str = Field(description="Glob pattern to match (e.g., '*.py', '**/*.txt')")
|
|
151
|
+
max_results: Optional[int] = Field(default=100, description="Maximum number of results to return. Default is 100 to prevent context overflow. Use None for unlimited.")
|
|
77
152
|
|
|
78
153
|
|
|
79
154
|
class DeleteFileInput(BaseModel):
|
|
@@ -104,29 +179,58 @@ class EmptyInput(BaseModel):
|
|
|
104
179
|
|
|
105
180
|
class FileSystemTool(BaseTool):
|
|
106
181
|
"""Base class for filesystem tools with directory restriction."""
|
|
107
|
-
base_directory: str
|
|
182
|
+
base_directory: str # Primary directory (for backward compatibility)
|
|
183
|
+
allowed_directories: List[str] = [] # Additional allowed directories
|
|
184
|
+
|
|
185
|
+
def _get_all_allowed_directories(self) -> List[Path]:
|
|
186
|
+
"""Get all allowed directories as resolved Paths."""
|
|
187
|
+
dirs = [Path(self.base_directory).resolve()]
|
|
188
|
+
for d in self.allowed_directories:
|
|
189
|
+
resolved = Path(d).resolve()
|
|
190
|
+
if resolved not in dirs:
|
|
191
|
+
dirs.append(resolved)
|
|
192
|
+
return dirs
|
|
108
193
|
|
|
109
194
|
def _resolve_path(self, relative_path: str) -> Path:
|
|
110
195
|
"""
|
|
111
|
-
Resolve and validate a path within the
|
|
196
|
+
Resolve and validate a path within any of the allowed directories.
|
|
112
197
|
|
|
113
|
-
Security: Ensures resolved path is within allowed
|
|
198
|
+
Security: Ensures resolved path is within one of the allowed directories.
|
|
114
199
|
"""
|
|
115
|
-
|
|
200
|
+
allowed_dirs = self._get_all_allowed_directories()
|
|
116
201
|
|
|
117
|
-
# Handle
|
|
202
|
+
# Handle absolute paths - check if within any allowed directory
|
|
118
203
|
if Path(relative_path).is_absolute():
|
|
119
204
|
target = Path(relative_path).resolve()
|
|
120
|
-
|
|
121
|
-
|
|
205
|
+
for base in allowed_dirs:
|
|
206
|
+
try:
|
|
207
|
+
target.relative_to(base)
|
|
208
|
+
return target
|
|
209
|
+
except ValueError:
|
|
210
|
+
continue
|
|
211
|
+
raise ValueError(f"Access denied: path '{relative_path}' is outside allowed directories")
|
|
122
212
|
|
|
123
|
-
#
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
213
|
+
# For relative paths, try to resolve against each allowed directory
|
|
214
|
+
# First check primary base_directory
|
|
215
|
+
primary_base = allowed_dirs[0]
|
|
216
|
+
target = (primary_base / relative_path).resolve()
|
|
217
|
+
|
|
218
|
+
# Check if target is within any allowed directory
|
|
219
|
+
for base in allowed_dirs:
|
|
220
|
+
try:
|
|
221
|
+
target.relative_to(base)
|
|
222
|
+
return target
|
|
223
|
+
except ValueError:
|
|
224
|
+
continue
|
|
225
|
+
|
|
226
|
+
# If relative path doesn't work from primary, try finding the file in other directories
|
|
227
|
+
for base in allowed_dirs[1:]:
|
|
228
|
+
candidate = (base / relative_path).resolve()
|
|
229
|
+
if candidate.exists():
|
|
230
|
+
return candidate
|
|
128
231
|
|
|
129
|
-
|
|
232
|
+
# Default to primary base directory resolution
|
|
233
|
+
raise ValueError(f"Access denied: path '{relative_path}' is outside allowed directories")
|
|
130
234
|
|
|
131
235
|
def _format_size(self, size: int) -> str:
|
|
132
236
|
"""Format file size in human-readable format."""
|
|
@@ -147,6 +251,11 @@ class ReadFileTool(FileSystemTool):
|
|
|
147
251
|
"Only works within allowed directories."
|
|
148
252
|
)
|
|
149
253
|
args_schema: type[BaseModel] = ReadFileInput
|
|
254
|
+
truncation_suggestions: ClassVar[List[str]] = [
|
|
255
|
+
"Use head=100 to read only the first 100 lines",
|
|
256
|
+
"Use tail=100 to read only the last 100 lines",
|
|
257
|
+
"Use filesystem_read_file_chunk with start_line and end_line for specific sections",
|
|
258
|
+
]
|
|
150
259
|
|
|
151
260
|
def _run(self, path: str, head: Optional[int] = None, tail: Optional[int] = None) -> str:
|
|
152
261
|
"""Read a file with optional head/tail."""
|
|
@@ -196,6 +305,10 @@ class ReadFileChunkTool(FileSystemTool):
|
|
|
196
305
|
"Only works within allowed directories."
|
|
197
306
|
)
|
|
198
307
|
args_schema: type[BaseModel] = ReadFileChunkInput
|
|
308
|
+
truncation_suggestions: ClassVar[List[str]] = [
|
|
309
|
+
"Reduce the line range (end_line - start_line) to read fewer lines at once",
|
|
310
|
+
"Read smaller chunks sequentially if you need to process the entire file",
|
|
311
|
+
]
|
|
199
312
|
|
|
200
313
|
def _run(self, path: str, start_line: int = 1, end_line: Optional[int] = None) -> str:
|
|
201
314
|
"""Read a chunk of a file by line range."""
|
|
@@ -246,6 +359,10 @@ class ReadMultipleFilesTool(FileSystemTool):
|
|
|
246
359
|
"Only works within allowed directories."
|
|
247
360
|
)
|
|
248
361
|
args_schema: type[BaseModel] = ReadMultipleFilesInput
|
|
362
|
+
truncation_suggestions: ClassVar[List[str]] = [
|
|
363
|
+
"Read fewer files at once - split into multiple smaller batches",
|
|
364
|
+
"Use filesystem_read_file with head parameter on individual large files instead",
|
|
365
|
+
]
|
|
249
366
|
|
|
250
367
|
def _run(self, paths: List[str]) -> str:
|
|
251
368
|
"""Read multiple files."""
|
|
@@ -291,6 +408,43 @@ class WriteFileTool(FileSystemTool):
|
|
|
291
408
|
return f"Error writing to file '{path}': {str(e)}"
|
|
292
409
|
|
|
293
410
|
|
|
411
|
+
class AppendFileTool(FileSystemTool):
|
|
412
|
+
"""Append content to the end of a file."""
|
|
413
|
+
name: str = "filesystem_append_file"
|
|
414
|
+
description: str = (
|
|
415
|
+
"Append content to the end of an existing file. Creates the file if it doesn't exist. "
|
|
416
|
+
"Use this for incremental file creation - write initial structure with write_file, "
|
|
417
|
+
"then add sections progressively with append_file. This is safer than rewriting "
|
|
418
|
+
"entire files and prevents context overflow. Only works within allowed directories."
|
|
419
|
+
)
|
|
420
|
+
args_schema: type[BaseModel] = AppendFileInput
|
|
421
|
+
|
|
422
|
+
def _run(self, path: str, content: str) -> str:
|
|
423
|
+
"""Append to a file."""
|
|
424
|
+
try:
|
|
425
|
+
target = self._resolve_path(path)
|
|
426
|
+
|
|
427
|
+
# Create parent directories if they don't exist
|
|
428
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
429
|
+
|
|
430
|
+
# Check current file size if it exists
|
|
431
|
+
existed = target.exists()
|
|
432
|
+
original_size = target.stat().st_size if existed else 0
|
|
433
|
+
|
|
434
|
+
with open(target, 'a', encoding='utf-8') as f:
|
|
435
|
+
f.write(content)
|
|
436
|
+
|
|
437
|
+
appended_size = len(content.encode('utf-8'))
|
|
438
|
+
new_size = original_size + appended_size
|
|
439
|
+
|
|
440
|
+
if existed:
|
|
441
|
+
return f"Successfully appended {self._format_size(appended_size)} to '{path}' (total: {self._format_size(new_size)})"
|
|
442
|
+
else:
|
|
443
|
+
return f"Created '{path}' and wrote {self._format_size(appended_size)}"
|
|
444
|
+
except Exception as e:
|
|
445
|
+
return f"Error appending to file '{path}': {str(e)}"
|
|
446
|
+
|
|
447
|
+
|
|
294
448
|
class EditFileTool(FileSystemTool):
|
|
295
449
|
"""Edit file with precise text replacement."""
|
|
296
450
|
name: str = "filesystem_edit_file"
|
|
@@ -443,6 +597,10 @@ class ListDirectoryTool(FileSystemTool):
|
|
|
443
597
|
"Only works within allowed directories."
|
|
444
598
|
)
|
|
445
599
|
args_schema: type[BaseModel] = ListDirectoryInput
|
|
600
|
+
truncation_suggestions: ClassVar[List[str]] = [
|
|
601
|
+
"List a specific subdirectory instead of the root directory",
|
|
602
|
+
"Consider using filesystem_directory_tree with max_depth=1 for hierarchical overview",
|
|
603
|
+
]
|
|
446
604
|
|
|
447
605
|
def _run(self, path: str = ".", include_sizes: bool = False, sort_by: str = "name") -> str:
|
|
448
606
|
"""List directory contents."""
|
|
@@ -494,13 +652,23 @@ class ListDirectoryTool(FileSystemTool):
|
|
|
494
652
|
|
|
495
653
|
result = "\n".join(lines)
|
|
496
654
|
|
|
655
|
+
# Add header showing the listing context
|
|
656
|
+
if path in (".", "", "./"):
|
|
657
|
+
header = "Contents of working directory (./):\n\n"
|
|
658
|
+
else:
|
|
659
|
+
header = f"Contents of {path}/:\n\n"
|
|
660
|
+
result = header + result
|
|
661
|
+
|
|
497
662
|
if include_sizes:
|
|
498
663
|
summary = f"\n\nTotal: {total_files} files, {total_dirs} directories"
|
|
499
664
|
if total_files > 0:
|
|
500
665
|
summary += f"\nCombined size: {self._format_size(total_size)}"
|
|
501
666
|
result += summary
|
|
502
667
|
|
|
503
|
-
|
|
668
|
+
# Add note about how to access files
|
|
669
|
+
result += "\n\nNote: Access files using paths shown above (e.g., 'agents/file.md' for items in agents/ directory)"
|
|
670
|
+
|
|
671
|
+
return result if lines else "Directory is empty"
|
|
504
672
|
except Exception as e:
|
|
505
673
|
return f"Error listing directory '{path}': {str(e)}"
|
|
506
674
|
|
|
@@ -511,25 +679,51 @@ class DirectoryTreeTool(FileSystemTool):
|
|
|
511
679
|
description: str = (
|
|
512
680
|
"Get a recursive tree view of files and directories. "
|
|
513
681
|
"Shows the complete structure in an easy-to-read tree format. "
|
|
514
|
-
"
|
|
682
|
+
"IMPORTANT: For large directories, use max_depth (default: 3) and max_items (default: 200) "
|
|
683
|
+
"to prevent context window overflow. Increase these only if needed for smaller directories. "
|
|
515
684
|
"Only works within allowed directories."
|
|
516
685
|
)
|
|
517
686
|
args_schema: type[BaseModel] = DirectoryTreeInput
|
|
687
|
+
truncation_suggestions: ClassVar[List[str]] = [
|
|
688
|
+
"Use max_depth=2 to limit directory traversal depth",
|
|
689
|
+
"Use max_items=50 to limit total items returned",
|
|
690
|
+
"Target a specific subdirectory instead of the root",
|
|
691
|
+
]
|
|
692
|
+
|
|
693
|
+
# Track item count during tree building
|
|
694
|
+
_item_count: int = 0
|
|
695
|
+
_max_items: Optional[int] = None
|
|
696
|
+
_truncated: bool = False
|
|
518
697
|
|
|
519
698
|
def _build_tree(self, directory: Path, prefix: str = "", depth: int = 0, max_depth: Optional[int] = None) -> List[str]:
|
|
520
|
-
"""Recursively build directory tree."""
|
|
699
|
+
"""Recursively build directory tree with item limit."""
|
|
700
|
+
# Check depth limit
|
|
521
701
|
if max_depth is not None and depth >= max_depth:
|
|
522
702
|
return []
|
|
523
703
|
|
|
704
|
+
# Check item limit
|
|
705
|
+
if self._max_items is not None and self._item_count >= self._max_items:
|
|
706
|
+
if not self._truncated:
|
|
707
|
+
self._truncated = True
|
|
708
|
+
return []
|
|
709
|
+
|
|
524
710
|
lines = []
|
|
525
711
|
try:
|
|
526
712
|
entries = sorted(directory.iterdir(), key=lambda x: (not x.is_dir(), x.name.lower()))
|
|
527
713
|
|
|
528
714
|
for i, entry in enumerate(entries):
|
|
715
|
+
# Check item limit before adding each entry
|
|
716
|
+
if self._max_items is not None and self._item_count >= self._max_items:
|
|
717
|
+
if not self._truncated:
|
|
718
|
+
self._truncated = True
|
|
719
|
+
break
|
|
720
|
+
|
|
529
721
|
is_last = i == len(entries) - 1
|
|
530
722
|
current_prefix = "└── " if is_last else "├── "
|
|
531
723
|
next_prefix = " " if is_last else "│ "
|
|
532
724
|
|
|
725
|
+
self._item_count += 1
|
|
726
|
+
|
|
533
727
|
if entry.is_dir():
|
|
534
728
|
lines.append(f"{prefix}{current_prefix}📁 {entry.name}/")
|
|
535
729
|
lines.extend(self._build_tree(entry, prefix + next_prefix, depth + 1, max_depth))
|
|
@@ -541,8 +735,8 @@ class DirectoryTreeTool(FileSystemTool):
|
|
|
541
735
|
|
|
542
736
|
return lines
|
|
543
737
|
|
|
544
|
-
def _run(self, path: str = ".", max_depth: Optional[int] =
|
|
545
|
-
"""Get directory tree."""
|
|
738
|
+
def _run(self, path: str = ".", max_depth: Optional[int] = 3, max_items: Optional[int] = 200) -> str:
|
|
739
|
+
"""Get directory tree with size limits to prevent context overflow."""
|
|
546
740
|
try:
|
|
547
741
|
target = self._resolve_path(path)
|
|
548
742
|
|
|
@@ -552,9 +746,31 @@ class DirectoryTreeTool(FileSystemTool):
|
|
|
552
746
|
if not target.is_dir():
|
|
553
747
|
return f"Error: '{path}' is not a directory"
|
|
554
748
|
|
|
555
|
-
|
|
749
|
+
# Reset counters for this run
|
|
750
|
+
self._item_count = 0
|
|
751
|
+
self._max_items = max_items
|
|
752
|
+
self._truncated = False
|
|
753
|
+
|
|
754
|
+
# Show relative path from base directory, use '.' for root
|
|
755
|
+
# This prevents confusion - files should be accessed relative to working directory
|
|
756
|
+
if path in (".", "", "./"):
|
|
757
|
+
display_root = "." # Root of working directory
|
|
758
|
+
else:
|
|
759
|
+
display_root = path.rstrip('/')
|
|
760
|
+
|
|
761
|
+
lines = [f"📁 {display_root}/"]
|
|
556
762
|
lines.extend(self._build_tree(target, "", 0, max_depth))
|
|
557
763
|
|
|
764
|
+
# Add truncation warning if limit was reached
|
|
765
|
+
if self._truncated:
|
|
766
|
+
lines.append("")
|
|
767
|
+
lines.append(f"⚠️ OUTPUT TRUNCATED: Showing {self._item_count} of more items (max_items={max_items}, max_depth={max_depth})")
|
|
768
|
+
lines.append(f" To see more: increase max_items or max_depth, or use filesystem_list_directory on specific subdirectories")
|
|
769
|
+
|
|
770
|
+
# Add note about file paths
|
|
771
|
+
lines.append("")
|
|
772
|
+
lines.append("Note: Use paths relative to working directory (e.g., 'agents/file.md', not including the root directory name)")
|
|
773
|
+
|
|
558
774
|
return "\n".join(lines)
|
|
559
775
|
except Exception as e:
|
|
560
776
|
return f"Error building directory tree for '{path}': {str(e)}"
|
|
@@ -566,13 +782,18 @@ class SearchFilesTool(FileSystemTool):
|
|
|
566
782
|
description: str = (
|
|
567
783
|
"Recursively search for files and directories matching a glob pattern. "
|
|
568
784
|
"Use patterns like '*.py' for Python files in current dir, or '**/*.py' for all Python files recursively. "
|
|
569
|
-
"Returns
|
|
785
|
+
"Returns paths to matching items (default limit: 100 results to prevent context overflow). "
|
|
570
786
|
"Only searches within allowed directories."
|
|
571
787
|
)
|
|
572
788
|
args_schema: type[BaseModel] = SearchFilesInput
|
|
789
|
+
truncation_suggestions: ClassVar[List[str]] = [
|
|
790
|
+
"Use max_results=50 to limit number of results",
|
|
791
|
+
"Use a more specific glob pattern (e.g., 'src/**/*.py' instead of '**/*.py')",
|
|
792
|
+
"Search in a specific subdirectory instead of the root",
|
|
793
|
+
]
|
|
573
794
|
|
|
574
|
-
def _run(self, path: str = ".", pattern: str = "*") -> str:
|
|
575
|
-
"""Search for files."""
|
|
795
|
+
def _run(self, path: str = ".", pattern: str = "*", max_results: Optional[int] = 100) -> str:
|
|
796
|
+
"""Search for files with result limit."""
|
|
576
797
|
try:
|
|
577
798
|
target = self._resolve_path(path)
|
|
578
799
|
|
|
@@ -583,19 +804,25 @@ class SearchFilesTool(FileSystemTool):
|
|
|
583
804
|
return f"Error: '{path}' is not a directory"
|
|
584
805
|
|
|
585
806
|
# Use glob to find matching files
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
else:
|
|
589
|
-
matches = list(target.glob(pattern))
|
|
807
|
+
all_matches = list(target.glob(pattern))
|
|
808
|
+
total_count = len(all_matches)
|
|
590
809
|
|
|
591
|
-
if not
|
|
810
|
+
if not all_matches:
|
|
592
811
|
return f"No files matching '{pattern}' found in '{path}'"
|
|
593
812
|
|
|
813
|
+
# Apply limit
|
|
814
|
+
truncated = False
|
|
815
|
+
if max_results is not None and total_count > max_results:
|
|
816
|
+
matches = sorted(all_matches)[:max_results]
|
|
817
|
+
truncated = True
|
|
818
|
+
else:
|
|
819
|
+
matches = sorted(all_matches)
|
|
820
|
+
|
|
594
821
|
# Format results
|
|
595
822
|
base = Path(self.base_directory).resolve()
|
|
596
823
|
results = []
|
|
597
824
|
|
|
598
|
-
for match in
|
|
825
|
+
for match in matches:
|
|
599
826
|
rel_path = match.relative_to(base)
|
|
600
827
|
if match.is_dir():
|
|
601
828
|
results.append(f"📁 {rel_path}/")
|
|
@@ -603,8 +830,14 @@ class SearchFilesTool(FileSystemTool):
|
|
|
603
830
|
size = self._format_size(match.stat().st_size)
|
|
604
831
|
results.append(f"📄 {rel_path} ({size})")
|
|
605
832
|
|
|
606
|
-
header = f"Found {
|
|
607
|
-
|
|
833
|
+
header = f"Found {total_count} matches for '{pattern}':\n\n"
|
|
834
|
+
output = header + "\n".join(results)
|
|
835
|
+
|
|
836
|
+
if truncated:
|
|
837
|
+
output += f"\n\n⚠️ OUTPUT TRUNCATED: Showing {max_results} of {total_count} results (max_results={max_results})"
|
|
838
|
+
output += "\n To see more: increase max_results or use a more specific pattern"
|
|
839
|
+
|
|
840
|
+
return output
|
|
608
841
|
except Exception as e:
|
|
609
842
|
return f"Error searching files in '{path}': {str(e)}"
|
|
610
843
|
|
|
@@ -753,7 +986,524 @@ class ListAllowedDirectoriesTool(FileSystemTool):
|
|
|
753
986
|
|
|
754
987
|
def _run(self) -> str:
|
|
755
988
|
"""List allowed directories."""
|
|
756
|
-
|
|
989
|
+
dirs = self._get_all_allowed_directories()
|
|
990
|
+
if len(dirs) == 1:
|
|
991
|
+
return f"Allowed directory:\n{dirs[0]}\n\nAll subdirectories within this path are accessible."
|
|
992
|
+
else:
|
|
993
|
+
dir_list = "\n".join(f" - {d}" for d in dirs)
|
|
994
|
+
return f"Allowed directories:\n{dir_list}\n\nAll subdirectories within these paths are accessible."
|
|
995
|
+
|
|
996
|
+
|
|
997
|
+
# ========== Filesystem API Wrapper for Inventory Ingestion ==========
|
|
998
|
+
|
|
999
|
+
class FilesystemApiWrapper:
|
|
1000
|
+
"""
|
|
1001
|
+
API Wrapper for filesystem operations compatible with inventory ingestion pipeline.
|
|
1002
|
+
|
|
1003
|
+
Supports both text and non-text files:
|
|
1004
|
+
- Text files: .py, .md, .txt, .json, .yaml, etc.
|
|
1005
|
+
- Documents: .pdf, .docx, .pptx, .xlsx, .xls (converted to markdown)
|
|
1006
|
+
- Images: .png, .jpg, .gif, .webp (base64 encoded or described via LLM)
|
|
1007
|
+
|
|
1008
|
+
Usage:
|
|
1009
|
+
# Create wrapper for a directory
|
|
1010
|
+
wrapper = FilesystemApiWrapper(base_directory="/path/to/docs")
|
|
1011
|
+
|
|
1012
|
+
# Load documents (uses inherited loader())
|
|
1013
|
+
for doc in wrapper.loader(whitelist=["*.md", "*.pdf"]):
|
|
1014
|
+
print(doc.page_content[:100])
|
|
1015
|
+
|
|
1016
|
+
# For image description, provide an LLM
|
|
1017
|
+
wrapper = FilesystemApiWrapper(base_directory="/path/to/docs", llm=my_llm)
|
|
1018
|
+
for doc in wrapper.loader(whitelist=["*.png"]):
|
|
1019
|
+
print(doc.page_content) # LLM-generated description
|
|
1020
|
+
|
|
1021
|
+
# Use with inventory ingestion
|
|
1022
|
+
pipeline = IngestionPipeline(llm=llm, graph_path="./graph.json")
|
|
1023
|
+
pipeline.register_toolkit("local_docs", wrapper)
|
|
1024
|
+
result = pipeline.run(source="local_docs", whitelist=["*.md", "*.pdf"])
|
|
1025
|
+
"""
|
|
1026
|
+
|
|
1027
|
+
# Filesystem-specific settings
|
|
1028
|
+
base_directory: str = ""
|
|
1029
|
+
recursive: bool = True
|
|
1030
|
+
follow_symlinks: bool = False
|
|
1031
|
+
llm: Any = None # Optional LLM for image processing
|
|
1032
|
+
|
|
1033
|
+
# File type categories
|
|
1034
|
+
BINARY_EXTENSIONS = {'.pdf', '.docx', '.doc', '.pptx', '.ppt', '.xlsx', '.xls'}
|
|
1035
|
+
IMAGE_EXTENSIONS = {'.png', '.jpg', '.jpeg', '.gif', '.webp', '.bmp', '.svg'}
|
|
1036
|
+
|
|
1037
|
+
def __init__(
|
|
1038
|
+
self,
|
|
1039
|
+
base_directory: str,
|
|
1040
|
+
recursive: bool = True,
|
|
1041
|
+
follow_symlinks: bool = False,
|
|
1042
|
+
llm: Any = None,
|
|
1043
|
+
**kwargs
|
|
1044
|
+
):
|
|
1045
|
+
"""
|
|
1046
|
+
Initialize filesystem wrapper.
|
|
1047
|
+
|
|
1048
|
+
Args:
|
|
1049
|
+
base_directory: Root directory for file operations
|
|
1050
|
+
recursive: If True, search subdirectories recursively
|
|
1051
|
+
follow_symlinks: If True, follow symbolic links
|
|
1052
|
+
llm: Optional LLM for image description (if not provided, images are base64 encoded)
|
|
1053
|
+
**kwargs: Additional arguments (ignored, for compatibility)
|
|
1054
|
+
"""
|
|
1055
|
+
self.base_directory = str(Path(base_directory).resolve())
|
|
1056
|
+
self.recursive = recursive
|
|
1057
|
+
self.follow_symlinks = follow_symlinks
|
|
1058
|
+
self.llm = llm
|
|
1059
|
+
|
|
1060
|
+
# For compatibility with BaseCodeToolApiWrapper.loader()
|
|
1061
|
+
self.active_branch = None
|
|
1062
|
+
|
|
1063
|
+
# Validate directory
|
|
1064
|
+
if not Path(self.base_directory).exists():
|
|
1065
|
+
raise ValueError(f"Directory does not exist: {self.base_directory}")
|
|
1066
|
+
if not Path(self.base_directory).is_dir():
|
|
1067
|
+
raise ValueError(f"Path is not a directory: {self.base_directory}")
|
|
1068
|
+
|
|
1069
|
+
# Optional RunnableConfig for CLI/standalone usage
|
|
1070
|
+
self._runnable_config = None
|
|
1071
|
+
|
|
1072
|
+
def set_runnable_config(self, config: Optional[Dict[str, Any]]) -> None:
|
|
1073
|
+
"""
|
|
1074
|
+
Set the RunnableConfig for dispatching custom events.
|
|
1075
|
+
|
|
1076
|
+
This is required when running outside of a LangChain agent context
|
|
1077
|
+
(e.g., from CLI). Without a config containing a run_id,
|
|
1078
|
+
dispatch_custom_event will fail with "Unable to dispatch an adhoc event
|
|
1079
|
+
without a parent run id".
|
|
1080
|
+
|
|
1081
|
+
Args:
|
|
1082
|
+
config: A RunnableConfig dict with at least {'run_id': uuid}
|
|
1083
|
+
"""
|
|
1084
|
+
self._runnable_config = config
|
|
1085
|
+
|
|
1086
|
+
def _log_tool_event(self, message: str, tool_name: str = None, config: Optional[Dict[str, Any]] = None):
|
|
1087
|
+
"""Log progress events (mirrors BaseToolApiWrapper).
|
|
1088
|
+
|
|
1089
|
+
Args:
|
|
1090
|
+
message: The message to log
|
|
1091
|
+
tool_name: Name of the tool (defaults to 'filesystem')
|
|
1092
|
+
config: Optional RunnableConfig. If not provided, uses self._runnable_config.
|
|
1093
|
+
Required when running outside a LangChain agent context.
|
|
1094
|
+
"""
|
|
1095
|
+
logger.info(f"[{tool_name or 'filesystem'}] {message}")
|
|
1096
|
+
try:
|
|
1097
|
+
from langchain_core.callbacks import dispatch_custom_event
|
|
1098
|
+
|
|
1099
|
+
# Use provided config, fall back to instance config
|
|
1100
|
+
effective_config = config or getattr(self, '_runnable_config', None)
|
|
1101
|
+
|
|
1102
|
+
dispatch_custom_event(
|
|
1103
|
+
name="thinking_step",
|
|
1104
|
+
data={
|
|
1105
|
+
"message": message,
|
|
1106
|
+
"tool_name": tool_name or "filesystem",
|
|
1107
|
+
"toolkit": "FilesystemApiWrapper",
|
|
1108
|
+
},
|
|
1109
|
+
config=effective_config,
|
|
1110
|
+
)
|
|
1111
|
+
except Exception:
|
|
1112
|
+
pass
|
|
1113
|
+
|
|
1114
|
+
def _get_files(self, path: str = "", branch: str = None) -> List[str]:
|
|
1115
|
+
"""
|
|
1116
|
+
Get list of files in the directory.
|
|
1117
|
+
|
|
1118
|
+
Implements BaseCodeToolApiWrapper._get_files() for filesystem.
|
|
1119
|
+
|
|
1120
|
+
Args:
|
|
1121
|
+
path: Subdirectory path (relative to base_directory)
|
|
1122
|
+
branch: Ignored for filesystem (compatibility with git-based toolkits)
|
|
1123
|
+
|
|
1124
|
+
Returns:
|
|
1125
|
+
List of file paths relative to base_directory
|
|
1126
|
+
"""
|
|
1127
|
+
base = Path(self.base_directory)
|
|
1128
|
+
search_path = base / path if path else base
|
|
1129
|
+
|
|
1130
|
+
if not search_path.exists():
|
|
1131
|
+
return []
|
|
1132
|
+
|
|
1133
|
+
files = []
|
|
1134
|
+
|
|
1135
|
+
if self.recursive:
|
|
1136
|
+
for root, dirs, filenames in os.walk(search_path, followlinks=self.follow_symlinks):
|
|
1137
|
+
# Skip hidden directories
|
|
1138
|
+
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
|
1139
|
+
|
|
1140
|
+
for filename in filenames:
|
|
1141
|
+
if filename.startswith('.'):
|
|
1142
|
+
continue
|
|
1143
|
+
|
|
1144
|
+
full_path = Path(root) / filename
|
|
1145
|
+
try:
|
|
1146
|
+
rel_path = str(full_path.relative_to(base))
|
|
1147
|
+
files.append(rel_path)
|
|
1148
|
+
except ValueError:
|
|
1149
|
+
continue
|
|
1150
|
+
else:
|
|
1151
|
+
for entry in search_path.iterdir():
|
|
1152
|
+
if entry.is_file() and not entry.name.startswith('.'):
|
|
1153
|
+
try:
|
|
1154
|
+
rel_path = str(entry.relative_to(base))
|
|
1155
|
+
files.append(rel_path)
|
|
1156
|
+
except ValueError:
|
|
1157
|
+
continue
|
|
1158
|
+
|
|
1159
|
+
return sorted(files)
|
|
1160
|
+
|
|
1161
|
+
def _is_binary_file(self, file_path: str) -> bool:
|
|
1162
|
+
"""Check if file is a binary document (PDF, DOCX, etc.)."""
|
|
1163
|
+
ext = Path(file_path).suffix.lower()
|
|
1164
|
+
return ext in self.BINARY_EXTENSIONS
|
|
1165
|
+
|
|
1166
|
+
def _is_image_file(self, file_path: str) -> bool:
|
|
1167
|
+
"""Check if file is an image."""
|
|
1168
|
+
ext = Path(file_path).suffix.lower()
|
|
1169
|
+
return ext in self.IMAGE_EXTENSIONS
|
|
1170
|
+
|
|
1171
|
+
def _read_binary_file(self, file_path: str) -> Optional[str]:
|
|
1172
|
+
"""
|
|
1173
|
+
Read binary file (PDF, DOCX, PPTX, Excel) and convert to text/markdown.
|
|
1174
|
+
|
|
1175
|
+
Uses the SDK's content_parser for document conversion.
|
|
1176
|
+
|
|
1177
|
+
Args:
|
|
1178
|
+
file_path: Path relative to base_directory
|
|
1179
|
+
|
|
1180
|
+
Returns:
|
|
1181
|
+
Converted text content, or None if conversion fails
|
|
1182
|
+
"""
|
|
1183
|
+
full_path = Path(self.base_directory) / file_path
|
|
1184
|
+
|
|
1185
|
+
try:
|
|
1186
|
+
from alita_sdk.tools.utils.content_parser import parse_file_content
|
|
1187
|
+
|
|
1188
|
+
result = parse_file_content(
|
|
1189
|
+
file_path=str(full_path),
|
|
1190
|
+
is_capture_image=bool(self.llm), # Capture images if LLM available
|
|
1191
|
+
llm=self.llm
|
|
1192
|
+
)
|
|
1193
|
+
|
|
1194
|
+
if isinstance(result, Exception):
|
|
1195
|
+
logger.warning(f"Failed to parse {file_path}: {result}")
|
|
1196
|
+
return None
|
|
1197
|
+
|
|
1198
|
+
return result
|
|
1199
|
+
|
|
1200
|
+
except ImportError:
|
|
1201
|
+
logger.warning("content_parser not available, skipping binary file")
|
|
1202
|
+
return None
|
|
1203
|
+
except Exception as e:
|
|
1204
|
+
logger.warning(f"Error parsing {file_path}: {e}")
|
|
1205
|
+
return None
|
|
1206
|
+
|
|
1207
|
+
def _read_image_file(self, file_path: str) -> Optional[str]:
|
|
1208
|
+
"""
|
|
1209
|
+
Read image file and convert to text representation.
|
|
1210
|
+
|
|
1211
|
+
If LLM is available, uses it to describe the image.
|
|
1212
|
+
Otherwise, returns base64-encoded data URI.
|
|
1213
|
+
|
|
1214
|
+
Args:
|
|
1215
|
+
file_path: Path relative to base_directory
|
|
1216
|
+
|
|
1217
|
+
Returns:
|
|
1218
|
+
Image description or base64 data URI
|
|
1219
|
+
"""
|
|
1220
|
+
full_path = Path(self.base_directory) / file_path
|
|
1221
|
+
|
|
1222
|
+
if not full_path.exists():
|
|
1223
|
+
return None
|
|
1224
|
+
|
|
1225
|
+
ext = full_path.suffix.lower()
|
|
1226
|
+
|
|
1227
|
+
try:
|
|
1228
|
+
# Read image bytes
|
|
1229
|
+
image_bytes = full_path.read_bytes()
|
|
1230
|
+
|
|
1231
|
+
if self.llm:
|
|
1232
|
+
# Use content_parser with LLM for image description
|
|
1233
|
+
try:
|
|
1234
|
+
from alita_sdk.tools.utils.content_parser import parse_file_content
|
|
1235
|
+
|
|
1236
|
+
result = parse_file_content(
|
|
1237
|
+
file_path=str(full_path),
|
|
1238
|
+
is_capture_image=True,
|
|
1239
|
+
llm=self.llm
|
|
1240
|
+
)
|
|
1241
|
+
|
|
1242
|
+
if isinstance(result, Exception):
|
|
1243
|
+
logger.warning(f"Failed to describe image {file_path}: {result}")
|
|
1244
|
+
else:
|
|
1245
|
+
return f"[Image: {Path(file_path).name}]\n\n{result}"
|
|
1246
|
+
|
|
1247
|
+
except ImportError:
|
|
1248
|
+
pass
|
|
1249
|
+
|
|
1250
|
+
# Fallback: return base64 data URI
|
|
1251
|
+
mime_types = {
|
|
1252
|
+
'.png': 'image/png',
|
|
1253
|
+
'.jpg': 'image/jpeg',
|
|
1254
|
+
'.jpeg': 'image/jpeg',
|
|
1255
|
+
'.gif': 'image/gif',
|
|
1256
|
+
'.webp': 'image/webp',
|
|
1257
|
+
'.bmp': 'image/bmp',
|
|
1258
|
+
'.svg': 'image/svg+xml',
|
|
1259
|
+
}
|
|
1260
|
+
mime_type = mime_types.get(ext, 'application/octet-stream')
|
|
1261
|
+
b64_data = base64.b64encode(image_bytes).decode('utf-8')
|
|
1262
|
+
|
|
1263
|
+
return f"[Image: {Path(file_path).name}]\ndata:{mime_type};base64,{b64_data}"
|
|
1264
|
+
|
|
1265
|
+
except Exception as e:
|
|
1266
|
+
logger.warning(f"Error reading image {file_path}: {e}")
|
|
1267
|
+
return None
|
|
1268
|
+
|
|
1269
|
+
def _read_file(
|
|
1270
|
+
self,
|
|
1271
|
+
file_path: str,
|
|
1272
|
+
branch: str = None,
|
|
1273
|
+
offset: Optional[int] = None,
|
|
1274
|
+
limit: Optional[int] = None,
|
|
1275
|
+
head: Optional[int] = None,
|
|
1276
|
+
tail: Optional[int] = None,
|
|
1277
|
+
) -> Optional[str]:
|
|
1278
|
+
"""
|
|
1279
|
+
Read file content, handling text, binary documents, and images.
|
|
1280
|
+
|
|
1281
|
+
Supports:
|
|
1282
|
+
- Text files: Read directly with encoding detection
|
|
1283
|
+
- Binary documents (PDF, DOCX, PPTX, Excel): Convert to markdown
|
|
1284
|
+
- Images: Return LLM description or base64 data URI
|
|
1285
|
+
|
|
1286
|
+
Args:
|
|
1287
|
+
file_path: Path relative to base_directory
|
|
1288
|
+
branch: Ignored for filesystem (compatibility with git-based toolkits)
|
|
1289
|
+
offset: Start line number (1-indexed). If None, start from beginning.
|
|
1290
|
+
limit: Maximum number of lines to read. If None, read to end.
|
|
1291
|
+
head: Read only first N lines (alternative to offset/limit)
|
|
1292
|
+
tail: Read only last N lines (alternative to offset/limit)
|
|
1293
|
+
|
|
1294
|
+
Returns:
|
|
1295
|
+
File content as string, or None if unreadable
|
|
1296
|
+
"""
|
|
1297
|
+
full_path = Path(self.base_directory) / file_path
|
|
1298
|
+
|
|
1299
|
+
# Security check - prevent path traversal
|
|
1300
|
+
try:
|
|
1301
|
+
full_path.resolve().relative_to(Path(self.base_directory).resolve())
|
|
1302
|
+
except ValueError:
|
|
1303
|
+
logger.warning(f"Access denied: {file_path} is outside base directory")
|
|
1304
|
+
return None
|
|
1305
|
+
|
|
1306
|
+
if not full_path.exists() or not full_path.is_file():
|
|
1307
|
+
return None
|
|
1308
|
+
|
|
1309
|
+
# Route to appropriate reader based on file type
|
|
1310
|
+
# Note: offset/limit only apply to text files
|
|
1311
|
+
if self._is_binary_file(file_path):
|
|
1312
|
+
return self._read_binary_file(file_path)
|
|
1313
|
+
|
|
1314
|
+
if self._is_image_file(file_path):
|
|
1315
|
+
return self._read_image_file(file_path)
|
|
1316
|
+
|
|
1317
|
+
# Default: read as text with encoding detection
|
|
1318
|
+
encodings = ['utf-8', 'utf-8-sig', 'latin-1', 'cp1252']
|
|
1319
|
+
|
|
1320
|
+
for encoding in encodings:
|
|
1321
|
+
try:
|
|
1322
|
+
content = full_path.read_text(encoding=encoding)
|
|
1323
|
+
|
|
1324
|
+
# Apply line filtering if specified
|
|
1325
|
+
if offset is not None or limit is not None or head is not None or tail is not None:
|
|
1326
|
+
lines = content.splitlines(keepends=True)
|
|
1327
|
+
|
|
1328
|
+
if head is not None:
|
|
1329
|
+
# Read first N lines
|
|
1330
|
+
lines = lines[:head]
|
|
1331
|
+
elif tail is not None:
|
|
1332
|
+
# Read last N lines
|
|
1333
|
+
lines = lines[-tail:] if tail > 0 else []
|
|
1334
|
+
else:
|
|
1335
|
+
# Use offset/limit
|
|
1336
|
+
start_idx = (offset - 1) if offset and offset > 0 else 0
|
|
1337
|
+
if limit is not None:
|
|
1338
|
+
end_idx = start_idx + limit
|
|
1339
|
+
lines = lines[start_idx:end_idx]
|
|
1340
|
+
else:
|
|
1341
|
+
lines = lines[start_idx:]
|
|
1342
|
+
|
|
1343
|
+
content = ''.join(lines)
|
|
1344
|
+
|
|
1345
|
+
return content
|
|
1346
|
+
|
|
1347
|
+
except UnicodeDecodeError:
|
|
1348
|
+
continue
|
|
1349
|
+
except Exception as e:
|
|
1350
|
+
logger.warning(f"Failed to read {file_path}: {e}")
|
|
1351
|
+
return None
|
|
1352
|
+
|
|
1353
|
+
logger.warning(f"Could not decode {file_path} with any known encoding")
|
|
1354
|
+
return None
|
|
1355
|
+
|
|
1356
|
+
def read_file(
|
|
1357
|
+
self,
|
|
1358
|
+
file_path: str,
|
|
1359
|
+
offset: Optional[int] = None,
|
|
1360
|
+
limit: Optional[int] = None,
|
|
1361
|
+
head: Optional[int] = None,
|
|
1362
|
+
tail: Optional[int] = None,
|
|
1363
|
+
) -> Optional[str]:
|
|
1364
|
+
"""
|
|
1365
|
+
Public method to read file content with optional line range.
|
|
1366
|
+
|
|
1367
|
+
Args:
|
|
1368
|
+
file_path: Path relative to base_directory
|
|
1369
|
+
offset: Start line number (1-indexed)
|
|
1370
|
+
limit: Maximum number of lines to read
|
|
1371
|
+
head: Read only first N lines
|
|
1372
|
+
tail: Read only last N lines
|
|
1373
|
+
|
|
1374
|
+
Returns:
|
|
1375
|
+
File content as string
|
|
1376
|
+
"""
|
|
1377
|
+
return self._read_file(file_path, offset=offset, limit=limit, head=head, tail=tail)
|
|
1378
|
+
|
|
1379
|
+
def loader(
|
|
1380
|
+
self,
|
|
1381
|
+
branch: Optional[str] = None,
|
|
1382
|
+
whitelist: Optional[List[str]] = None,
|
|
1383
|
+
blacklist: Optional[List[str]] = None,
|
|
1384
|
+
chunked: bool = True,
|
|
1385
|
+
) -> Generator[Document, None, None]:
|
|
1386
|
+
"""
|
|
1387
|
+
Load documents from the filesystem.
|
|
1388
|
+
|
|
1389
|
+
Mirrors BaseCodeToolApiWrapper.loader() interface for compatibility.
|
|
1390
|
+
|
|
1391
|
+
Args:
|
|
1392
|
+
branch: Ignored (kept for API compatibility with git-based loaders)
|
|
1393
|
+
whitelist: File patterns to include (e.g., ['*.py', 'src/**/*.js'])
|
|
1394
|
+
blacklist: File patterns to exclude (e.g., ['*test*', 'node_modules/**'])
|
|
1395
|
+
chunked: If True, applies universal chunker based on file type
|
|
1396
|
+
|
|
1397
|
+
Yields:
|
|
1398
|
+
Document objects with page_content and metadata
|
|
1399
|
+
"""
|
|
1400
|
+
import glob as glob_module
|
|
1401
|
+
|
|
1402
|
+
base = Path(self.base_directory)
|
|
1403
|
+
|
|
1404
|
+
def is_blacklisted(file_path: str) -> bool:
|
|
1405
|
+
if not blacklist:
|
|
1406
|
+
return False
|
|
1407
|
+
return (
|
|
1408
|
+
any(fnmatch.fnmatch(file_path, p) for p in blacklist) or
|
|
1409
|
+
any(fnmatch.fnmatch(Path(file_path).name, p) for p in blacklist)
|
|
1410
|
+
)
|
|
1411
|
+
|
|
1412
|
+
# Optimization: Use glob directly when whitelist has path patterns
|
|
1413
|
+
# This avoids scanning 100K+ files in node_modules etc.
|
|
1414
|
+
def get_files_via_glob() -> Generator[str, None, None]:
|
|
1415
|
+
"""Use glob patterns directly - much faster than scanning all files."""
|
|
1416
|
+
seen = set()
|
|
1417
|
+
for pattern in whitelist:
|
|
1418
|
+
# Handle glob patterns
|
|
1419
|
+
full_pattern = str(base / pattern)
|
|
1420
|
+
for match in glob_module.glob(full_pattern, recursive=True):
|
|
1421
|
+
match_path = Path(match)
|
|
1422
|
+
if match_path.is_file():
|
|
1423
|
+
try:
|
|
1424
|
+
rel_path = str(match_path.relative_to(base))
|
|
1425
|
+
if rel_path not in seen and not is_blacklisted(rel_path):
|
|
1426
|
+
seen.add(rel_path)
|
|
1427
|
+
yield rel_path
|
|
1428
|
+
except ValueError:
|
|
1429
|
+
continue
|
|
1430
|
+
|
|
1431
|
+
def get_files_via_scan() -> Generator[str, None, None]:
|
|
1432
|
+
"""Fall back to scanning all files when no whitelist or simple extension patterns."""
|
|
1433
|
+
_files = self._get_files()
|
|
1434
|
+
self._log_tool_event(f"Found {len(_files)} files in {self.base_directory}", "loader")
|
|
1435
|
+
|
|
1436
|
+
def is_whitelisted(file_path: str) -> bool:
|
|
1437
|
+
if not whitelist:
|
|
1438
|
+
return True
|
|
1439
|
+
return (
|
|
1440
|
+
any(fnmatch.fnmatch(file_path, p) for p in whitelist) or
|
|
1441
|
+
any(fnmatch.fnmatch(Path(file_path).name, p) for p in whitelist) or
|
|
1442
|
+
any(file_path.endswith(f'.{p.lstrip("*.")}') for p in whitelist if p.startswith('*.'))
|
|
1443
|
+
)
|
|
1444
|
+
|
|
1445
|
+
for file_path in _files:
|
|
1446
|
+
if is_whitelisted(file_path) and not is_blacklisted(file_path):
|
|
1447
|
+
yield file_path
|
|
1448
|
+
|
|
1449
|
+
# Decide strategy: use glob if whitelist has path patterns (contains / or **)
|
|
1450
|
+
use_glob = whitelist and any('/' in p or '**' in p for p in whitelist)
|
|
1451
|
+
|
|
1452
|
+
if use_glob:
|
|
1453
|
+
self._log_tool_event(f"Using glob patterns: {whitelist}", "loader")
|
|
1454
|
+
file_iterator = get_files_via_glob()
|
|
1455
|
+
else:
|
|
1456
|
+
file_iterator = get_files_via_scan()
|
|
1457
|
+
|
|
1458
|
+
def raw_document_generator() -> Generator[Document, None, None]:
|
|
1459
|
+
self._log_tool_event("Reading files...", "loader")
|
|
1460
|
+
processed = 0
|
|
1461
|
+
|
|
1462
|
+
for file_path in file_iterator:
|
|
1463
|
+
content = self._read_file(file_path)
|
|
1464
|
+
if not content:
|
|
1465
|
+
continue
|
|
1466
|
+
|
|
1467
|
+
content_hash = hashlib.sha256(content.encode('utf-8')).hexdigest()
|
|
1468
|
+
processed += 1
|
|
1469
|
+
|
|
1470
|
+
yield Document(
|
|
1471
|
+
page_content=content,
|
|
1472
|
+
metadata={
|
|
1473
|
+
'file_path': file_path,
|
|
1474
|
+
'file_name': Path(file_path).name,
|
|
1475
|
+
'source': file_path,
|
|
1476
|
+
'commit_hash': content_hash,
|
|
1477
|
+
}
|
|
1478
|
+
)
|
|
1479
|
+
|
|
1480
|
+
# Log progress every 100 files
|
|
1481
|
+
if processed % 100 == 0:
|
|
1482
|
+
logger.debug(f"[loader] Read {processed} files...")
|
|
1483
|
+
|
|
1484
|
+
self._log_tool_event(f"Loaded {processed} files", "loader")
|
|
1485
|
+
|
|
1486
|
+
if not chunked:
|
|
1487
|
+
return raw_document_generator()
|
|
1488
|
+
|
|
1489
|
+
try:
|
|
1490
|
+
from alita_sdk.tools.chunkers.universal_chunker import universal_chunker
|
|
1491
|
+
return universal_chunker(raw_document_generator())
|
|
1492
|
+
except ImportError:
|
|
1493
|
+
logger.warning("Universal chunker not available, returning raw documents")
|
|
1494
|
+
return raw_document_generator()
|
|
1495
|
+
|
|
1496
|
+
def chunker(self, documents: Generator[Document, None, None]) -> Generator[Document, None, None]:
|
|
1497
|
+
"""Apply universal chunker to documents."""
|
|
1498
|
+
try:
|
|
1499
|
+
from alita_sdk.tools.chunkers.universal_chunker import universal_chunker
|
|
1500
|
+
return universal_chunker(documents)
|
|
1501
|
+
except ImportError:
|
|
1502
|
+
return documents
|
|
1503
|
+
|
|
1504
|
+
def get_files_content(self, file_path: str) -> Optional[str]:
|
|
1505
|
+
"""Get file content (compatibility alias for retrieval toolkit)."""
|
|
1506
|
+
return self._read_file(file_path)
|
|
757
1507
|
|
|
758
1508
|
|
|
759
1509
|
# Predefined tool presets for common use cases
|
|
@@ -761,6 +1511,7 @@ FILESYSTEM_TOOL_PRESETS = {
|
|
|
761
1511
|
'read_only': {
|
|
762
1512
|
'exclude_tools': [
|
|
763
1513
|
'filesystem_write_file',
|
|
1514
|
+
'filesystem_append_file',
|
|
764
1515
|
'filesystem_edit_file',
|
|
765
1516
|
'filesystem_apply_patch',
|
|
766
1517
|
'filesystem_delete_file',
|
|
@@ -775,6 +1526,7 @@ FILESYSTEM_TOOL_PRESETS = {
|
|
|
775
1526
|
'include_tools': [
|
|
776
1527
|
'filesystem_read_file',
|
|
777
1528
|
'filesystem_write_file',
|
|
1529
|
+
'filesystem_append_file',
|
|
778
1530
|
'filesystem_list_directory',
|
|
779
1531
|
'filesystem_create_directory',
|
|
780
1532
|
]
|
|
@@ -792,20 +1544,21 @@ def get_filesystem_tools(
|
|
|
792
1544
|
base_directory: str,
|
|
793
1545
|
include_tools: Optional[List[str]] = None,
|
|
794
1546
|
exclude_tools: Optional[List[str]] = None,
|
|
795
|
-
preset: Optional[str] = None
|
|
1547
|
+
preset: Optional[str] = None,
|
|
1548
|
+
allowed_directories: Optional[List[str]] = None
|
|
796
1549
|
) -> List[BaseTool]:
|
|
797
1550
|
"""
|
|
798
|
-
Get filesystem tools for the specified
|
|
1551
|
+
Get filesystem tools for the specified directories.
|
|
799
1552
|
|
|
800
1553
|
Args:
|
|
801
|
-
base_directory: Absolute or relative path to the directory to restrict access to
|
|
1554
|
+
base_directory: Absolute or relative path to the primary directory to restrict access to
|
|
802
1555
|
include_tools: Optional list of tool names to include. If provided, only these tools are returned.
|
|
803
1556
|
If None, all tools are included (unless excluded).
|
|
804
1557
|
exclude_tools: Optional list of tool names to exclude. Applied after include_tools.
|
|
805
1558
|
preset: Optional preset name to use predefined tool sets. Presets:
|
|
806
1559
|
- 'read_only': Excludes all write/modify operations
|
|
807
1560
|
- 'no_delete': All tools except delete
|
|
808
|
-
- 'basic': Read, write, list, create directory
|
|
1561
|
+
- 'basic': Read, write, append, list, create directory
|
|
809
1562
|
- 'minimal': Only read and list
|
|
810
1563
|
Note: If preset is used with include_tools or exclude_tools,
|
|
811
1564
|
preset is applied first, then custom filters.
|
|
@@ -818,6 +1571,7 @@ def get_filesystem_tools(
|
|
|
818
1571
|
- filesystem_read_file_chunk
|
|
819
1572
|
- filesystem_read_multiple_files
|
|
820
1573
|
- filesystem_write_file
|
|
1574
|
+
- filesystem_append_file (for incremental file creation)
|
|
821
1575
|
- filesystem_edit_file
|
|
822
1576
|
- filesystem_apply_patch
|
|
823
1577
|
- filesystem_list_directory
|
|
@@ -847,6 +1601,10 @@ def get_filesystem_tools(
|
|
|
847
1601
|
# Use preset and add custom exclusions
|
|
848
1602
|
get_filesystem_tools('/path/to/dir', preset='read_only',
|
|
849
1603
|
exclude_tools=['filesystem_search_files'])
|
|
1604
|
+
|
|
1605
|
+
# Multiple allowed directories
|
|
1606
|
+
get_filesystem_tools('/path/to/primary',
|
|
1607
|
+
allowed_directories=['/path/to/other1', '/path/to/other2'])
|
|
850
1608
|
"""
|
|
851
1609
|
# Apply preset if specified
|
|
852
1610
|
preset_include = None
|
|
@@ -870,25 +1628,27 @@ def get_filesystem_tools(
|
|
|
870
1628
|
final_exclude.extend(exclude_tools)
|
|
871
1629
|
final_exclude = list(set(final_exclude)) if final_exclude else None
|
|
872
1630
|
|
|
873
|
-
# Resolve to absolute
|
|
1631
|
+
# Resolve to absolute paths
|
|
874
1632
|
base_dir = str(Path(base_directory).resolve())
|
|
1633
|
+
extra_dirs = [str(Path(d).resolve()) for d in (allowed_directories or [])]
|
|
875
1634
|
|
|
876
1635
|
# Define all available tools with their names
|
|
877
1636
|
all_tools = {
|
|
878
|
-
'filesystem_read_file': ReadFileTool(base_directory=base_dir),
|
|
879
|
-
'filesystem_read_file_chunk': ReadFileChunkTool(base_directory=base_dir),
|
|
880
|
-
'filesystem_read_multiple_files': ReadMultipleFilesTool(base_directory=base_dir),
|
|
881
|
-
'filesystem_write_file': WriteFileTool(base_directory=base_dir),
|
|
882
|
-
'
|
|
883
|
-
'
|
|
884
|
-
'
|
|
885
|
-
'
|
|
886
|
-
'
|
|
887
|
-
'
|
|
888
|
-
'
|
|
889
|
-
'
|
|
890
|
-
'
|
|
891
|
-
'
|
|
1637
|
+
'filesystem_read_file': ReadFileTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1638
|
+
'filesystem_read_file_chunk': ReadFileChunkTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1639
|
+
'filesystem_read_multiple_files': ReadMultipleFilesTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1640
|
+
'filesystem_write_file': WriteFileTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1641
|
+
'filesystem_append_file': AppendFileTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1642
|
+
'filesystem_edit_file': EditFileTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1643
|
+
'filesystem_apply_patch': ApplyPatchTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1644
|
+
'filesystem_list_directory': ListDirectoryTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1645
|
+
'filesystem_directory_tree': DirectoryTreeTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1646
|
+
'filesystem_search_files': SearchFilesTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1647
|
+
'filesystem_delete_file': DeleteFileTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1648
|
+
'filesystem_move_file': MoveFileTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1649
|
+
'filesystem_create_directory': CreateDirectoryTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1650
|
+
'filesystem_get_file_info': GetFileInfoTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
1651
|
+
'filesystem_list_allowed_directories': ListAllowedDirectoriesTool(base_directory=base_dir, allowed_directories=extra_dirs),
|
|
892
1652
|
}
|
|
893
1653
|
|
|
894
1654
|
# Start with all tools or only included ones
|