alita-sdk 0.3.497__py3-none-any.whl → 0.3.515__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of alita-sdk might be problematic. Click here for more details.

Files changed (108) hide show
  1. alita_sdk/cli/inventory.py +12 -195
  2. alita_sdk/community/inventory/__init__.py +12 -0
  3. alita_sdk/community/inventory/toolkit.py +9 -5
  4. alita_sdk/community/inventory/toolkit_utils.py +176 -0
  5. alita_sdk/configurations/ado.py +144 -0
  6. alita_sdk/configurations/confluence.py +76 -42
  7. alita_sdk/configurations/figma.py +76 -0
  8. alita_sdk/configurations/gitlab.py +2 -0
  9. alita_sdk/configurations/qtest.py +72 -1
  10. alita_sdk/configurations/report_portal.py +96 -0
  11. alita_sdk/configurations/sharepoint.py +148 -0
  12. alita_sdk/configurations/testio.py +83 -0
  13. alita_sdk/runtime/clients/artifact.py +2 -2
  14. alita_sdk/runtime/clients/client.py +24 -19
  15. alita_sdk/runtime/clients/sandbox_client.py +14 -0
  16. alita_sdk/runtime/langchain/assistant.py +48 -2
  17. alita_sdk/runtime/langchain/document_loaders/AlitaJSONLinesLoader.py +77 -0
  18. alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +2 -1
  19. alita_sdk/runtime/langchain/document_loaders/constants.py +2 -1
  20. alita_sdk/runtime/langchain/langraph_agent.py +8 -9
  21. alita_sdk/runtime/langchain/utils.py +6 -1
  22. alita_sdk/runtime/toolkits/artifact.py +14 -5
  23. alita_sdk/runtime/toolkits/datasource.py +13 -6
  24. alita_sdk/runtime/toolkits/mcp.py +26 -157
  25. alita_sdk/runtime/toolkits/planning.py +10 -5
  26. alita_sdk/runtime/toolkits/tools.py +23 -7
  27. alita_sdk/runtime/toolkits/vectorstore.py +11 -5
  28. alita_sdk/runtime/tools/artifact.py +139 -6
  29. alita_sdk/runtime/tools/llm.py +20 -10
  30. alita_sdk/runtime/tools/mcp_remote_tool.py +2 -3
  31. alita_sdk/runtime/tools/mcp_server_tool.py +2 -4
  32. alita_sdk/runtime/utils/AlitaCallback.py +30 -1
  33. alita_sdk/runtime/utils/mcp_client.py +33 -6
  34. alita_sdk/runtime/utils/mcp_oauth.py +125 -8
  35. alita_sdk/runtime/utils/mcp_sse_client.py +35 -6
  36. alita_sdk/runtime/utils/utils.py +2 -0
  37. alita_sdk/tools/__init__.py +15 -0
  38. alita_sdk/tools/ado/repos/__init__.py +10 -12
  39. alita_sdk/tools/ado/test_plan/__init__.py +23 -8
  40. alita_sdk/tools/ado/wiki/__init__.py +24 -8
  41. alita_sdk/tools/ado/wiki/ado_wrapper.py +21 -7
  42. alita_sdk/tools/ado/work_item/__init__.py +24 -8
  43. alita_sdk/tools/advanced_jira_mining/__init__.py +10 -8
  44. alita_sdk/tools/aws/delta_lake/__init__.py +12 -9
  45. alita_sdk/tools/aws/delta_lake/tool.py +5 -1
  46. alita_sdk/tools/azure_ai/search/__init__.py +9 -7
  47. alita_sdk/tools/base/tool.py +5 -1
  48. alita_sdk/tools/base_indexer_toolkit.py +25 -0
  49. alita_sdk/tools/bitbucket/__init__.py +14 -10
  50. alita_sdk/tools/bitbucket/api_wrapper.py +50 -2
  51. alita_sdk/tools/browser/__init__.py +5 -4
  52. alita_sdk/tools/carrier/__init__.py +5 -6
  53. alita_sdk/tools/cloud/aws/__init__.py +9 -7
  54. alita_sdk/tools/cloud/azure/__init__.py +9 -7
  55. alita_sdk/tools/cloud/gcp/__init__.py +9 -7
  56. alita_sdk/tools/cloud/k8s/__init__.py +9 -7
  57. alita_sdk/tools/code/linter/__init__.py +9 -8
  58. alita_sdk/tools/code/sonar/__init__.py +9 -7
  59. alita_sdk/tools/confluence/__init__.py +15 -10
  60. alita_sdk/tools/custom_open_api/__init__.py +11 -5
  61. alita_sdk/tools/elastic/__init__.py +10 -8
  62. alita_sdk/tools/elitea_base.py +387 -9
  63. alita_sdk/tools/figma/__init__.py +8 -7
  64. alita_sdk/tools/github/__init__.py +12 -14
  65. alita_sdk/tools/github/github_client.py +68 -2
  66. alita_sdk/tools/github/tool.py +5 -1
  67. alita_sdk/tools/gitlab/__init__.py +14 -11
  68. alita_sdk/tools/gitlab/api_wrapper.py +81 -1
  69. alita_sdk/tools/gitlab_org/__init__.py +9 -8
  70. alita_sdk/tools/google/bigquery/__init__.py +12 -12
  71. alita_sdk/tools/google/bigquery/tool.py +5 -1
  72. alita_sdk/tools/google_places/__init__.py +9 -8
  73. alita_sdk/tools/jira/__init__.py +15 -10
  74. alita_sdk/tools/keycloak/__init__.py +10 -8
  75. alita_sdk/tools/localgit/__init__.py +8 -3
  76. alita_sdk/tools/localgit/local_git.py +62 -54
  77. alita_sdk/tools/localgit/tool.py +5 -1
  78. alita_sdk/tools/memory/__init__.py +11 -3
  79. alita_sdk/tools/ocr/__init__.py +10 -8
  80. alita_sdk/tools/openapi/__init__.py +6 -2
  81. alita_sdk/tools/pandas/__init__.py +9 -7
  82. alita_sdk/tools/postman/__init__.py +10 -11
  83. alita_sdk/tools/pptx/__init__.py +9 -9
  84. alita_sdk/tools/qtest/__init__.py +9 -8
  85. alita_sdk/tools/rally/__init__.py +9 -8
  86. alita_sdk/tools/report_portal/__init__.py +11 -9
  87. alita_sdk/tools/salesforce/__init__.py +9 -9
  88. alita_sdk/tools/servicenow/__init__.py +10 -8
  89. alita_sdk/tools/sharepoint/__init__.py +9 -8
  90. alita_sdk/tools/slack/__init__.py +8 -7
  91. alita_sdk/tools/sql/__init__.py +9 -8
  92. alita_sdk/tools/testio/__init__.py +9 -8
  93. alita_sdk/tools/testrail/__init__.py +10 -8
  94. alita_sdk/tools/utils/__init__.py +9 -4
  95. alita_sdk/tools/utils/text_operations.py +254 -0
  96. alita_sdk/tools/xray/__init__.py +10 -8
  97. alita_sdk/tools/yagmail/__init__.py +8 -3
  98. alita_sdk/tools/zephyr/__init__.py +8 -7
  99. alita_sdk/tools/zephyr_enterprise/__init__.py +10 -8
  100. alita_sdk/tools/zephyr_essential/__init__.py +9 -8
  101. alita_sdk/tools/zephyr_scale/__init__.py +9 -8
  102. alita_sdk/tools/zephyr_squad/__init__.py +9 -8
  103. {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/METADATA +1 -1
  104. {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/RECORD +108 -105
  105. {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/WHEEL +0 -0
  106. {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/entry_points.txt +0 -0
  107. {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/licenses/LICENSE +0 -0
  108. {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/top_level.txt +0 -0
@@ -11,7 +11,6 @@ from pydantic import BaseModel, create_model, Field, SecretStr
11
11
 
12
12
  # from alita_sdk.runtime.langchain.interfaces.llm_processor import get_embeddings
13
13
  from .chunkers import markdown_chunker
14
- from .utils import TOOLKIT_SPLITTER
15
14
  from .vector_adapters.VectorStoreAdapter import VectorStoreAdapterFactory
16
15
  from ..runtime.utils.utils import IndexerKeywords
17
16
 
@@ -126,6 +125,58 @@ BaseIndexDataParams = create_model(
126
125
  chunking_config=(Optional[dict], Field(description="Chunking tool configuration", default_factory=dict)),
127
126
  )
128
127
 
128
+ # File Operations Schema Models
129
+ ReadFileInput = create_model(
130
+ "ReadFileInput",
131
+ file_path=(str, Field(description="Path to the file to read")),
132
+ branch=(Optional[str], Field(description="Branch name. If None, uses active branch.", default=None)),
133
+ offset=(Optional[int], Field(description="Starting line number (1-indexed, inclusive). Read from this line onwards.", default=None, ge=1)),
134
+ limit=(Optional[int], Field(description="Number of lines to read from offset. If None, reads to end.", default=None, ge=1)),
135
+ head=(Optional[int], Field(description="Read only the first N lines. Alternative to offset/limit.", default=None, ge=1)),
136
+ tail=(Optional[int], Field(description="Read only the last N lines. Alternative to offset/limit.", default=None, ge=1)),
137
+ )
138
+
139
+ ReadFileChunkInput = create_model(
140
+ "ReadFileChunkInput",
141
+ file_path=(str, Field(description="Path to the file to read")),
142
+ branch=(Optional[str], Field(description="Branch name. If None, uses active branch.", default=None)),
143
+ start_line=(int, Field(description="Starting line number (1-indexed, inclusive)", ge=1)),
144
+ end_line=(Optional[int], Field(description="Ending line number (1-indexed, inclusive). If None, reads to end.", default=None, ge=1)),
145
+ )
146
+
147
+ ReadMultipleFilesInput = create_model(
148
+ "ReadMultipleFilesInput",
149
+ file_paths=(List[str], Field(description="List of file paths to read", min_length=1)),
150
+ branch=(Optional[str], Field(description="Branch name. If None, uses active branch.", default=None)),
151
+ offset=(Optional[int], Field(description="Starting line number for all files (1-indexed)", default=None, ge=1)),
152
+ limit=(Optional[int], Field(description="Number of lines to read from offset for all files", default=None, ge=1)),
153
+ )
154
+
155
+ EditFileInput = create_model(
156
+ "EditFileInput",
157
+ file_path=(str, Field(description="Path to the file to edit. Must be a text file (markdown, txt, csv, json, xml, html, yaml, etc.)")),
158
+ file_query=(str, Field(description="""Edit instructions with OLD/NEW markers. Format:
159
+ OLD <<<<
160
+ old content to replace
161
+ >>>> OLD
162
+ NEW <<<<
163
+ new content
164
+ >>>> NEW
165
+
166
+ Multiple OLD/NEW pairs can be provided for multiple edits.""")),
167
+ branch=(Optional[str], Field(description="Branch name. If None, uses active branch.", default=None)),
168
+ commit_message=(Optional[str], Field(description="Commit message for the change (VCS toolkits only)", default=None)),
169
+ )
170
+
171
+ SearchFileInput = create_model(
172
+ "SearchFileInput",
173
+ file_path=(str, Field(description="Path to the file to search")),
174
+ pattern=(str, Field(description="Search pattern. Treated as regex by default unless is_regex=False.")),
175
+ branch=(Optional[str], Field(description="Branch name. If None, uses active branch.", default=None)),
176
+ is_regex=(bool, Field(description="Whether pattern is a regex. Default is True for flexible matching.", default=True)),
177
+ context_lines=(int, Field(description="Number of lines before/after match to include for context", default=2, ge=0)),
178
+ )
179
+
129
180
 
130
181
  class BaseToolApiWrapper(BaseModel):
131
182
 
@@ -185,8 +236,7 @@ class BaseToolApiWrapper(BaseModel):
185
236
 
186
237
 
187
238
  def run(self, mode: str, *args: Any, **kwargs: Any):
188
- if TOOLKIT_SPLITTER in mode:
189
- mode = mode.rsplit(TOOLKIT_SPLITTER, maxsplit=1)[1]
239
+ # Mode is now the clean tool name (no prefix to remove)
190
240
  for tool in self.get_available_tools():
191
241
  if tool["name"] == mode:
192
242
  try:
@@ -589,11 +639,281 @@ class BaseCodeToolApiWrapper(BaseVectorStoreToolApiWrapper):
589
639
  def _get_files(self):
590
640
  raise NotImplementedError("Subclasses should implement this method")
591
641
 
592
- def _read_file(self, file_path: str, branch: str):
642
+ def _read_file(
643
+ self,
644
+ file_path: str,
645
+ branch: str = None,
646
+ offset: Optional[int] = None,
647
+ limit: Optional[int] = None,
648
+ head: Optional[int] = None,
649
+ tail: Optional[int] = None,
650
+ **kwargs # Allow subclasses to have additional parameters
651
+ ) -> str:
652
+ """
653
+ Read file content with optional partial read support.
654
+
655
+ Subclasses should implement this method. If they don't support partial reads,
656
+ they can accept **kwargs and ignore offset/limit/head/tail parameters - the base
657
+ class high-level methods will apply slicing client-side.
658
+
659
+ Args:
660
+ file_path: Path to the file
661
+ branch: Branch name (None for active branch)
662
+ offset: Starting line number (1-indexed)
663
+ limit: Number of lines to read from offset
664
+ head: Read only first N lines
665
+ tail: Read only last N lines
666
+ **kwargs: Additional toolkit-specific parameters (e.g., repo_name for GitHub)
667
+
668
+ Returns:
669
+ File content as string
670
+ """
593
671
  raise NotImplementedError("Subclasses should implement this method")
672
+
673
+ def _write_file(
674
+ self,
675
+ file_path: str,
676
+ content: str,
677
+ branch: str = None,
678
+ commit_message: str = None
679
+ ) -> str:
680
+ """
681
+ Write content to a file.
682
+
683
+ Subclasses should implement this method to enable edit_file functionality.
684
+ For VCS toolkits, this may involve creating or updating files with commits.
685
+
686
+ Args:
687
+ file_path: Path to the file
688
+ content: New file content
689
+ branch: Branch name (None for active branch)
690
+ commit_message: Commit message (VCS toolkits only)
691
+
692
+ Returns:
693
+ Success message
694
+ """
695
+ raise NotImplementedError("Subclasses should implement _write_file to enable editing")
594
696
 
595
697
  def _file_commit_hash(self, file_path: str, branch: str):
596
698
  pass
699
+
700
+ def read_file_chunk(
701
+ self,
702
+ file_path: str,
703
+ start_line: int,
704
+ end_line: Optional[int] = None,
705
+ branch: str = None
706
+ ) -> str:
707
+ """
708
+ Read a specific range of lines from a file.
709
+
710
+ Args:
711
+ file_path: Path to the file
712
+ start_line: Starting line number (1-indexed, inclusive)
713
+ end_line: Ending line number (1-indexed, inclusive). If None, reads to end.
714
+ branch: Branch name (None for active branch)
715
+
716
+ Returns:
717
+ File content for the specified line range
718
+ """
719
+ from .utils.text_operations import apply_line_slice
720
+
721
+ # Calculate offset and limit from start_line and end_line
722
+ offset = start_line
723
+ limit = (end_line - start_line + 1) if end_line is not None else None
724
+
725
+ # Read the file with offset/limit
726
+ content = self._read_file(file_path, branch, offset=offset, limit=limit)
727
+
728
+ # Apply client-side slicing if toolkit doesn't support partial reads
729
+ # (toolkit's _read_file will return full content if it ignores offset/limit)
730
+ return apply_line_slice(content, offset=offset, limit=limit)
731
+
732
+ def read_multiple_files(
733
+ self,
734
+ file_paths: List[str],
735
+ branch: str = None,
736
+ offset: Optional[int] = None,
737
+ limit: Optional[int] = None
738
+ ) -> Dict[str, str]:
739
+ """
740
+ Read multiple files in batch.
741
+
742
+ Args:
743
+ file_paths: List of file paths to read
744
+ branch: Branch name (None for active branch)
745
+ offset: Starting line number for all files (1-indexed)
746
+ limit: Number of lines to read from offset for all files
747
+
748
+ Returns:
749
+ Dictionary mapping file paths to their content (or error messages)
750
+ """
751
+ results = {}
752
+
753
+ for file_path in file_paths:
754
+ try:
755
+ content = self._read_file(
756
+ file_path,
757
+ branch,
758
+ offset=offset,
759
+ limit=limit
760
+ )
761
+ results[file_path] = content
762
+ except Exception as e:
763
+ results[file_path] = f"Error reading file: {str(e)}"
764
+ logger.error(f"Failed to read {file_path}: {e}")
765
+
766
+ return results
767
+
768
+ def search_file(
769
+ self,
770
+ file_path: str,
771
+ pattern: str,
772
+ branch: str = None,
773
+ is_regex: bool = True,
774
+ context_lines: int = 2
775
+ ) -> str:
776
+ """
777
+ Search for pattern in file content with context.
778
+
779
+ Args:
780
+ file_path: Path to the file
781
+ pattern: Search pattern (regex if is_regex=True, else literal)
782
+ branch: Branch name (None for active branch)
783
+ is_regex: Whether pattern is regex (default True)
784
+ context_lines: Lines of context before/after matches (default 2)
785
+
786
+ Returns:
787
+ Formatted string with search results and context
788
+ """
789
+ from .utils.text_operations import search_in_content
790
+
791
+ # Read full file content
792
+ content = self._read_file(file_path, branch)
793
+
794
+ # Search for pattern
795
+ matches = search_in_content(content, pattern, is_regex, context_lines)
796
+
797
+ if not matches:
798
+ return f"No matches found for pattern '{pattern}' in {file_path}"
799
+
800
+ # Format results
801
+ result_lines = [f"Found {len(matches)} match(es) for pattern '{pattern}' in {file_path}:\n"]
802
+
803
+ for i, match in enumerate(matches, 1):
804
+ result_lines.append(f"\n--- Match {i} at line {match['line_number']} ---")
805
+
806
+ # Context before
807
+ if match['context_before']:
808
+ for line in match['context_before']:
809
+ result_lines.append(f" {line}")
810
+
811
+ # Matching line (highlighted)
812
+ result_lines.append(f"> {match['line_content']}")
813
+
814
+ # Context after
815
+ if match['context_after']:
816
+ for line in match['context_after']:
817
+ result_lines.append(f" {line}")
818
+
819
+ return "\n".join(result_lines)
820
+
821
+ def edit_file(
822
+ self,
823
+ file_path: str,
824
+ file_query: str,
825
+ branch: str = None,
826
+ commit_message: str = None
827
+ ) -> str:
828
+ """
829
+ Edit file using OLD/NEW markers for precise replacements.
830
+
831
+ Only works with text files (markdown, txt, csv, json, xml, html, yaml, code files).
832
+
833
+ Args:
834
+ file_path: Path to the file to edit
835
+ file_query: Edit instructions with OLD/NEW markers
836
+ branch: Branch name (None for active branch)
837
+ commit_message: Commit message (VCS toolkits only)
838
+
839
+ Returns:
840
+ Success message or error
841
+
842
+ Raises:
843
+ ToolException: If file is not text-editable or edit fails
844
+ """
845
+ from .utils.text_operations import parse_old_new_markers, is_text_editable
846
+ from langchain_core.callbacks import dispatch_custom_event
847
+
848
+ # Validate file is text-editable
849
+ if not is_text_editable(file_path):
850
+ raise ToolException(
851
+ f"Cannot edit binary/document file '{file_path}'. "
852
+ f"Supported text formats: markdown, txt, csv, json, xml, html, yaml, code files."
853
+ )
854
+
855
+ # Parse OLD/NEW markers
856
+ edits = parse_old_new_markers(file_query)
857
+ if not edits:
858
+ raise ToolException(
859
+ "No OLD/NEW marker pairs found in file_query. "
860
+ "Format: OLD <<<< old text >>>> OLD NEW <<<< new text >>>> NEW"
861
+ )
862
+
863
+ # Read current file content
864
+ try:
865
+ current_content = self._read_file(file_path, branch)
866
+ except Exception as e:
867
+ raise ToolException(f"Failed to read file {file_path}: {e}")
868
+
869
+ # Apply all edits
870
+ updated_content = current_content
871
+ for old_text, new_text in edits:
872
+ if not old_text.strip():
873
+ continue
874
+
875
+ if old_text not in updated_content:
876
+ logger.warning(
877
+ f"Old content not found in {file_path}. "
878
+ f"Looking for: {old_text[:100]}..."
879
+ )
880
+ continue
881
+
882
+ updated_content = updated_content.replace(old_text, new_text)
883
+
884
+ # Check if any changes were made
885
+ if current_content == updated_content:
886
+ return (
887
+ f"No changes made to {file_path}. "
888
+ "Old content was not found or is empty. "
889
+ "Use read_file or search_file to verify current content."
890
+ )
891
+
892
+ # Write updated content
893
+ try:
894
+ result = self._write_file(file_path, updated_content, branch, commit_message)
895
+ except NotImplementedError:
896
+ raise ToolException(
897
+ f"Editing not supported for this toolkit. "
898
+ f"The _write_file method is not implemented."
899
+ )
900
+ except Exception as e:
901
+ raise ToolException(f"Failed to write file {file_path}: {e}")
902
+
903
+ # Dispatch file modification event
904
+ try:
905
+ dispatch_custom_event("file_modified", {
906
+ "message": f"File '{file_path}' edited successfully",
907
+ "filename": file_path,
908
+ "tool_name": "edit_file",
909
+ "toolkit": self.__class__.__name__,
910
+ "operation_type": "modify",
911
+ "edits_applied": len(edits)
912
+ })
913
+ except Exception as e:
914
+ logger.warning(f"Failed to dispatch file_modified event: {e}")
915
+
916
+ return result
597
917
 
598
918
  def __handle_get_files(self, path: str, branch: str):
599
919
  """
@@ -773,20 +1093,78 @@ def extend_with_vector_tools(method):
773
1093
  return wrapper
774
1094
 
775
1095
 
1096
+ def extend_with_file_operations(method):
1097
+ """
1098
+ Decorator to automatically add file operation tools to toolkits that implement
1099
+ _read_file and _write_file methods.
1100
+
1101
+ Adds:
1102
+ - read_file_chunk: Read specific line ranges
1103
+ - read_multiple_files: Batch read files
1104
+ - search_file: Search for patterns in files
1105
+ - edit_file: Edit files using OLD/NEW markers
1106
+ """
1107
+ def wrapper(self, *args, **kwargs):
1108
+ tools = method(self, *args, **kwargs)
1109
+
1110
+ # Only add file operations if toolkit has implemented the required methods
1111
+ # Check for both _read_file and _write_file methods
1112
+ has_file_ops = (hasattr(self, '_read_file') and callable(getattr(self, '_read_file')) and
1113
+ hasattr(self, '_write_file') and callable(getattr(self, '_write_file')))
1114
+
1115
+ if has_file_ops:
1116
+ # Import schemas from elitea_base
1117
+ from . import elitea_base
1118
+
1119
+ file_operation_tools = [
1120
+ {
1121
+ "name": "read_file_chunk",
1122
+ "mode": "read_file_chunk",
1123
+ "ref": self.read_file_chunk,
1124
+ "description": self.read_file_chunk.__doc__,
1125
+ "args_schema": elitea_base.ReadFileChunkInput
1126
+ },
1127
+ {
1128
+ "name": "read_multiple_files",
1129
+ "mode": "read_multiple_files",
1130
+ "ref": self.read_multiple_files,
1131
+ "description": self.read_multiple_files.__doc__,
1132
+ "args_schema": elitea_base.ReadMultipleFilesInput
1133
+ },
1134
+ {
1135
+ "name": "search_file",
1136
+ "mode": "search_file",
1137
+ "ref": self.search_file,
1138
+ "description": self.search_file.__doc__,
1139
+ "args_schema": elitea_base.SearchFileInput
1140
+ },
1141
+ {
1142
+ "name": "edit_file",
1143
+ "mode": "edit_file",
1144
+ "ref": self.edit_file,
1145
+ "description": self.edit_file.__doc__,
1146
+ "args_schema": elitea_base.EditFileInput
1147
+ },
1148
+ ]
1149
+
1150
+ tools.extend(file_operation_tools)
1151
+
1152
+ return tools
1153
+
1154
+ return wrapper
1155
+
1156
+
776
1157
  def filter_missconfigured_index_tools(method):
777
1158
  def wrapper(self, *args, **kwargs):
778
1159
  toolkit = method(self, *args, **kwargs)
779
1160
 
780
1161
  # Validate index tools misconfiguration and exclude them if necessary
781
- is_index_toolkit = any(tool.name.rsplit(TOOLKIT_SPLITTER)[1]
782
- if TOOLKIT_SPLITTER in tool.name else tool.name
783
- in INDEX_TOOL_NAMES for tool in toolkit.tools)
1162
+ is_index_toolkit = any(tool.name in INDEX_TOOL_NAMES for tool in toolkit.tools)
784
1163
  is_index_configuration_missing = not (kwargs.get('embedding_model')
785
1164
  and kwargs.get('pgvector_configuration'))
786
1165
 
787
1166
  if is_index_toolkit and is_index_configuration_missing:
788
- toolkit.tools = [tool for tool in toolkit.tools if (tool.name.rsplit(TOOLKIT_SPLITTER, 1)[
789
- 1] if TOOLKIT_SPLITTER in tool.name else tool.name) not in INDEX_TOOL_NAMES]
1167
+ toolkit.tools = [tool for tool in toolkit.tools if tool.name not in INDEX_TOOL_NAMES]
790
1168
 
791
1169
  return toolkit
792
1170
 
@@ -6,7 +6,7 @@ from pydantic import BaseModel, ConfigDict, Field, create_model
6
6
  from ..base.tool import BaseAction
7
7
  from .api_wrapper import FigmaApiWrapper, GLOBAL_LIMIT
8
8
  from ..elitea_base import filter_missconfigured_index_tools
9
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
9
+ from ..utils import clean_string, get_max_toolkit_length
10
10
  from ...configurations.figma import FigmaConfiguration
11
11
  from ...configurations.pgvector import PgVectorConfiguration
12
12
 
@@ -36,7 +36,6 @@ def get_tools(tool):
36
36
 
37
37
  class FigmaToolkit(BaseToolkit):
38
38
  tools: List[BaseTool] = []
39
- toolkit_max_length: int = 0
40
39
 
41
40
  @staticmethod
42
41
  def toolkit_config_schema() -> BaseModel:
@@ -44,7 +43,6 @@ class FigmaToolkit(BaseToolkit):
44
43
  x["name"]: x["args_schema"].schema()
45
44
  for x in FigmaApiWrapper.model_construct().get_available_tools()
46
45
  }
47
- FigmaToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
48
46
  return create_model(
49
47
  name,
50
48
  global_limit=(Optional[int], Field(description="Global limit", default=GLOBAL_LIMIT)),
@@ -66,7 +64,6 @@ class FigmaToolkit(BaseToolkit):
66
64
  "metadata": {
67
65
  "label": "Figma",
68
66
  "icon_url": "figma-icon.svg",
69
- "max_length": FigmaToolkit.toolkit_max_length,
70
67
  "categories": ["other"],
71
68
  "extra_categories": ["figma", "design", "ui/ux", "prototyping", "collaboration"],
72
69
  }
@@ -85,19 +82,23 @@ class FigmaToolkit(BaseToolkit):
85
82
  **(kwargs.get('pgvector_configuration') or {}),
86
83
  }
87
84
  figma_api_wrapper = FigmaApiWrapper(**wrapper_payload)
88
- prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
89
85
  available_tools = figma_api_wrapper.get_available_tools()
90
86
  tools = []
91
87
  for tool in available_tools:
92
88
  if selected_tools:
93
89
  if tool["name"] not in selected_tools:
94
90
  continue
91
+ description = tool["description"]
92
+ if toolkit_name:
93
+ description = f"Toolkit: {toolkit_name}\n{description}"
94
+ description = description[:1000]
95
95
  tools.append(
96
96
  BaseAction(
97
97
  api_wrapper=figma_api_wrapper,
98
- name=prefix + tool["name"],
99
- description=tool["description"],
98
+ name=tool["name"],
99
+ description=description,
100
100
  args_schema=tool["args_schema"],
101
+ metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
101
102
  )
102
103
  )
103
104
  return cls(tools=tools)
@@ -7,13 +7,13 @@ from .api_wrapper import AlitaGitHubAPIWrapper
7
7
  from .tool import GitHubAction
8
8
  from ..elitea_base import filter_missconfigured_index_tools
9
9
 
10
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
10
+ from ..utils import clean_string, get_max_toolkit_length
11
11
  from ...configurations.github import GithubConfiguration
12
12
  from ...configurations.pgvector import PgVectorConfiguration
13
13
 
14
14
  name = "github"
15
15
 
16
- def _get_toolkit(tool) -> BaseToolkit:
16
+ def get_toolkit(tool) -> BaseToolkit:
17
17
  return AlitaGitHubToolkit().get_toolkit(
18
18
  selected_tools=tool['settings'].get('selected_tools', []),
19
19
  github_base_url=tool['settings'].get('base_url', ''),
@@ -31,21 +31,16 @@ def _get_toolkit(tool) -> BaseToolkit:
31
31
  toolkit_name=tool.get('toolkit_name')
32
32
  )
33
33
 
34
- def get_toolkit():
35
- return AlitaGitHubToolkit.toolkit_config_schema()
36
-
37
34
  def get_tools(tool):
38
- return _get_toolkit(tool).get_tools()
35
+ return get_toolkit(tool).get_tools()
39
36
 
40
37
  class AlitaGitHubToolkit(BaseToolkit):
41
38
  tools: List[BaseTool] = []
42
- toolkit_max_length: int = 0
43
39
 
44
40
  @staticmethod
45
41
  def toolkit_config_schema() -> BaseModel:
46
42
  selected_tools = {x['name']: x['args_schema'].schema() for x in
47
43
  AlitaGitHubAPIWrapper.model_construct().get_available_tools()}
48
- AlitaGitHubToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
49
44
  return create_model(
50
45
  name,
51
46
  __config__=ConfigDict(
@@ -53,7 +48,6 @@ class AlitaGitHubToolkit(BaseToolkit):
53
48
  'metadata': {
54
49
  "label": "GitHub",
55
50
  "icon_url": None,
56
- "max_length": AlitaGitHubToolkit.toolkit_max_length,
57
51
  "categories": ["code repositories"],
58
52
  "extra_categories": ["github", "git", "repository", "code", "version control"],
59
53
  },
@@ -87,18 +81,22 @@ class AlitaGitHubToolkit(BaseToolkit):
87
81
  github_api_wrapper = AlitaGitHubAPIWrapper(**wrapper_payload)
88
82
  available_tools: List[Dict] = github_api_wrapper.get_available_tools()
89
83
  tools = []
90
- prefix = clean_string(toolkit_name, AlitaGitHubToolkit.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
91
84
  for tool in available_tools:
92
85
  if selected_tools:
93
86
  if tool["name"] not in selected_tools:
94
87
  continue
88
+ description = tool["description"]
89
+ if toolkit_name:
90
+ description = f"Toolkit: {toolkit_name}\n{description}"
91
+ description = f"Repository: {github_api_wrapper.github_repository}\n{description}"
92
+ description = description[:1000]
95
93
  tools.append(GitHubAction(
96
94
  api_wrapper=github_api_wrapper,
97
- name=prefix + tool["name"],
95
+ name=tool["name"],
98
96
  mode=tool["mode"],
99
- # set unique description for declared tools to differentiate the same methods for different toolkits
100
- description=f"Repository: {github_api_wrapper.github_repository}\n" + tool["description"],
101
- args_schema=tool["args_schema"]
97
+ description=description,
98
+ args_schema=tool["args_schema"],
99
+ metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
102
100
  ))
103
101
  return cls(tools=tools)
104
102
 
@@ -11,6 +11,7 @@ from github import Auth, Github, GithubIntegration, Repository
11
11
  from github.Consts import DEFAULT_BASE_URL
12
12
  from langchain_core.tools import ToolException
13
13
 
14
+ from ..elitea_base import extend_with_file_operations, BaseCodeToolApiWrapper
14
15
  from .schemas import (
15
16
  GitHubAuthConfig,
16
17
  GitHubRepoConfig,
@@ -94,6 +95,12 @@ class GitHubClient(BaseModel):
94
95
 
95
96
  # Alita instance
96
97
  alita: Optional[Any] = Field(default=None, exclude=True)
98
+
99
+ # Import file operation methods from BaseCodeToolApiWrapper
100
+ read_file_chunk = BaseCodeToolApiWrapper.read_file_chunk
101
+ read_multiple_files = BaseCodeToolApiWrapper.read_multiple_files
102
+ search_file = BaseCodeToolApiWrapper.search_file
103
+ edit_file = BaseCodeToolApiWrapper.edit_file
97
104
 
98
105
  @property
99
106
  def github_repo_instance(self) -> Optional[Repository.Repository]:
@@ -1414,13 +1421,16 @@ class GitHubClient(BaseModel):
1414
1421
  except Exception as e:
1415
1422
  return f"File not found `{file_path}` on branch `{branch}`. Error: {str(e)}"
1416
1423
 
1417
- def _read_file(self, file_path: str, branch: str, repo_name: Optional[str] = None) -> str:
1424
+ def _read_file(self, file_path: str, branch: str, repo_name: Optional[str] = None, **kwargs) -> str:
1418
1425
  """
1419
- Read a file from specified branch
1426
+ Read a file from specified branch with optional partial read support.
1427
+
1420
1428
  Parameters:
1421
1429
  file_path(str): the file path
1422
1430
  branch(str): the branch to read the file from
1423
1431
  repo_name (Optional[str]): Name of the repository in format 'owner/repo'
1432
+ **kwargs: Additional parameters (offset, limit, head, tail) - currently ignored,
1433
+ partial read handled client-side by base class methods
1424
1434
 
1425
1435
  Returns:
1426
1436
  str: The file decoded as a string, or an error message if not found
@@ -1445,6 +1455,61 @@ class GitHubClient(BaseModel):
1445
1455
  str: The file contents as a string
1446
1456
  """
1447
1457
  return self._read_file(file_path, branch if branch else self.active_branch, repo_name)
1458
+
1459
+ def _write_file(
1460
+ self,
1461
+ file_path: str,
1462
+ content: str,
1463
+ branch: str = None,
1464
+ commit_message: str = None,
1465
+ repo_name: Optional[str] = None
1466
+ ) -> str:
1467
+ """
1468
+ Write content to a file (create or update).
1469
+
1470
+ Parameters:
1471
+ file_path: Path to the file
1472
+ content: New file content
1473
+ branch: Branch name (uses active branch if None)
1474
+ commit_message: Commit message
1475
+ repo_name: Name of the repository in format 'owner/repo'
1476
+
1477
+ Returns:
1478
+ Success message
1479
+ """
1480
+ try:
1481
+ repo = self.github_api.get_repo(repo_name) if repo_name else self.github_repo_instance
1482
+ branch = branch or self.active_branch
1483
+
1484
+ if branch == self.github_base_branch:
1485
+ raise ToolException(
1486
+ f"Cannot commit directly to the {self.github_base_branch} branch. "
1487
+ "Please create a new branch and try again."
1488
+ )
1489
+
1490
+ # Check if file exists
1491
+ try:
1492
+ existing_file = repo.get_contents(file_path, ref=branch)
1493
+ # File exists, update it
1494
+ repo.update_file(
1495
+ path=file_path,
1496
+ message=commit_message or f"Update {file_path}",
1497
+ content=content,
1498
+ branch=branch,
1499
+ sha=existing_file.sha,
1500
+ )
1501
+ return f"Updated file {file_path}"
1502
+ except:
1503
+ # File doesn't exist, create it
1504
+ repo.create_file(
1505
+ path=file_path,
1506
+ message=commit_message or f"Create {file_path}",
1507
+ content=content,
1508
+ branch=branch,
1509
+ )
1510
+ return f"Created file {file_path}"
1511
+ except Exception as e:
1512
+ raise ToolException(f"Unable to write file {file_path}: {str(e)}")
1448
1513
 
1449
1514
  def loader(self,
1450
1515
  branch: Optional[str] = None,
@@ -1877,6 +1942,7 @@ class GitHubClient(BaseModel):
1877
1942
  import traceback
1878
1943
  return f"API call failed: {traceback.format_exc()}"
1879
1944
 
1945
+ @extend_with_file_operations
1880
1946
  def get_available_tools(self) -> List[Dict[str, Any]]:
1881
1947
  return [
1882
1948
  {