alita-sdk 0.3.497__py3-none-any.whl → 0.3.515__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/inventory.py +12 -195
- alita_sdk/community/inventory/__init__.py +12 -0
- alita_sdk/community/inventory/toolkit.py +9 -5
- alita_sdk/community/inventory/toolkit_utils.py +176 -0
- alita_sdk/configurations/ado.py +144 -0
- alita_sdk/configurations/confluence.py +76 -42
- alita_sdk/configurations/figma.py +76 -0
- alita_sdk/configurations/gitlab.py +2 -0
- alita_sdk/configurations/qtest.py +72 -1
- alita_sdk/configurations/report_portal.py +96 -0
- alita_sdk/configurations/sharepoint.py +148 -0
- alita_sdk/configurations/testio.py +83 -0
- alita_sdk/runtime/clients/artifact.py +2 -2
- alita_sdk/runtime/clients/client.py +24 -19
- alita_sdk/runtime/clients/sandbox_client.py +14 -0
- alita_sdk/runtime/langchain/assistant.py +48 -2
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLinesLoader.py +77 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +2 -1
- alita_sdk/runtime/langchain/document_loaders/constants.py +2 -1
- alita_sdk/runtime/langchain/langraph_agent.py +8 -9
- alita_sdk/runtime/langchain/utils.py +6 -1
- alita_sdk/runtime/toolkits/artifact.py +14 -5
- alita_sdk/runtime/toolkits/datasource.py +13 -6
- alita_sdk/runtime/toolkits/mcp.py +26 -157
- alita_sdk/runtime/toolkits/planning.py +10 -5
- alita_sdk/runtime/toolkits/tools.py +23 -7
- alita_sdk/runtime/toolkits/vectorstore.py +11 -5
- alita_sdk/runtime/tools/artifact.py +139 -6
- alita_sdk/runtime/tools/llm.py +20 -10
- alita_sdk/runtime/tools/mcp_remote_tool.py +2 -3
- alita_sdk/runtime/tools/mcp_server_tool.py +2 -4
- alita_sdk/runtime/utils/AlitaCallback.py +30 -1
- alita_sdk/runtime/utils/mcp_client.py +33 -6
- alita_sdk/runtime/utils/mcp_oauth.py +125 -8
- alita_sdk/runtime/utils/mcp_sse_client.py +35 -6
- alita_sdk/runtime/utils/utils.py +2 -0
- alita_sdk/tools/__init__.py +15 -0
- alita_sdk/tools/ado/repos/__init__.py +10 -12
- alita_sdk/tools/ado/test_plan/__init__.py +23 -8
- alita_sdk/tools/ado/wiki/__init__.py +24 -8
- alita_sdk/tools/ado/wiki/ado_wrapper.py +21 -7
- alita_sdk/tools/ado/work_item/__init__.py +24 -8
- alita_sdk/tools/advanced_jira_mining/__init__.py +10 -8
- alita_sdk/tools/aws/delta_lake/__init__.py +12 -9
- alita_sdk/tools/aws/delta_lake/tool.py +5 -1
- alita_sdk/tools/azure_ai/search/__init__.py +9 -7
- alita_sdk/tools/base/tool.py +5 -1
- alita_sdk/tools/base_indexer_toolkit.py +25 -0
- alita_sdk/tools/bitbucket/__init__.py +14 -10
- alita_sdk/tools/bitbucket/api_wrapper.py +50 -2
- alita_sdk/tools/browser/__init__.py +5 -4
- alita_sdk/tools/carrier/__init__.py +5 -6
- alita_sdk/tools/cloud/aws/__init__.py +9 -7
- alita_sdk/tools/cloud/azure/__init__.py +9 -7
- alita_sdk/tools/cloud/gcp/__init__.py +9 -7
- alita_sdk/tools/cloud/k8s/__init__.py +9 -7
- alita_sdk/tools/code/linter/__init__.py +9 -8
- alita_sdk/tools/code/sonar/__init__.py +9 -7
- alita_sdk/tools/confluence/__init__.py +15 -10
- alita_sdk/tools/custom_open_api/__init__.py +11 -5
- alita_sdk/tools/elastic/__init__.py +10 -8
- alita_sdk/tools/elitea_base.py +387 -9
- alita_sdk/tools/figma/__init__.py +8 -7
- alita_sdk/tools/github/__init__.py +12 -14
- alita_sdk/tools/github/github_client.py +68 -2
- alita_sdk/tools/github/tool.py +5 -1
- alita_sdk/tools/gitlab/__init__.py +14 -11
- alita_sdk/tools/gitlab/api_wrapper.py +81 -1
- alita_sdk/tools/gitlab_org/__init__.py +9 -8
- alita_sdk/tools/google/bigquery/__init__.py +12 -12
- alita_sdk/tools/google/bigquery/tool.py +5 -1
- alita_sdk/tools/google_places/__init__.py +9 -8
- alita_sdk/tools/jira/__init__.py +15 -10
- alita_sdk/tools/keycloak/__init__.py +10 -8
- alita_sdk/tools/localgit/__init__.py +8 -3
- alita_sdk/tools/localgit/local_git.py +62 -54
- alita_sdk/tools/localgit/tool.py +5 -1
- alita_sdk/tools/memory/__init__.py +11 -3
- alita_sdk/tools/ocr/__init__.py +10 -8
- alita_sdk/tools/openapi/__init__.py +6 -2
- alita_sdk/tools/pandas/__init__.py +9 -7
- alita_sdk/tools/postman/__init__.py +10 -11
- alita_sdk/tools/pptx/__init__.py +9 -9
- alita_sdk/tools/qtest/__init__.py +9 -8
- alita_sdk/tools/rally/__init__.py +9 -8
- alita_sdk/tools/report_portal/__init__.py +11 -9
- alita_sdk/tools/salesforce/__init__.py +9 -9
- alita_sdk/tools/servicenow/__init__.py +10 -8
- alita_sdk/tools/sharepoint/__init__.py +9 -8
- alita_sdk/tools/slack/__init__.py +8 -7
- alita_sdk/tools/sql/__init__.py +9 -8
- alita_sdk/tools/testio/__init__.py +9 -8
- alita_sdk/tools/testrail/__init__.py +10 -8
- alita_sdk/tools/utils/__init__.py +9 -4
- alita_sdk/tools/utils/text_operations.py +254 -0
- alita_sdk/tools/xray/__init__.py +10 -8
- alita_sdk/tools/yagmail/__init__.py +8 -3
- alita_sdk/tools/zephyr/__init__.py +8 -7
- alita_sdk/tools/zephyr_enterprise/__init__.py +10 -8
- alita_sdk/tools/zephyr_essential/__init__.py +9 -8
- alita_sdk/tools/zephyr_scale/__init__.py +9 -8
- alita_sdk/tools/zephyr_squad/__init__.py +9 -8
- {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/METADATA +1 -1
- {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/RECORD +108 -105
- {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/entry_points.txt +0 -0
- {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.497.dist-info → alita_sdk-0.3.515.dist-info}/top_level.txt +0 -0
|
@@ -9,19 +9,32 @@ from ...elitea_base import filter_missconfigured_index_tools
|
|
|
9
9
|
from ....configurations.ado import AdoConfiguration
|
|
10
10
|
from ....configurations.pgvector import PgVectorConfiguration
|
|
11
11
|
from ...base.tool import BaseAction
|
|
12
|
-
from ...utils import clean_string,
|
|
12
|
+
from ...utils import clean_string, get_max_toolkit_length, check_connection_response
|
|
13
13
|
|
|
14
14
|
name = "azure_devops_wiki"
|
|
15
15
|
name_alias = 'ado_wiki'
|
|
16
16
|
|
|
17
|
+
def get_toolkit(tool):
|
|
18
|
+
return AzureDevOpsWikiToolkit().get_toolkit(
|
|
19
|
+
selected_tools=tool['settings'].get('selected_tools', []),
|
|
20
|
+
ado_configuration=tool['settings']['ado_configuration'],
|
|
21
|
+
limit=tool['settings'].get('limit', 5),
|
|
22
|
+
toolkit_name=tool.get('toolkit_name', ''),
|
|
23
|
+
alita=tool['settings'].get('alita', None),
|
|
24
|
+
llm=tool['settings'].get('llm', None),
|
|
25
|
+
pgvector_configuration=tool['settings'].get('pgvector_configuration', {}),
|
|
26
|
+
collection_name=tool['toolkit_name'],
|
|
27
|
+
doctype='doc',
|
|
28
|
+
embedding_model=tool['settings'].get('embedding_model'),
|
|
29
|
+
vectorstore_type="PGVector"
|
|
30
|
+
)
|
|
31
|
+
|
|
17
32
|
class AzureDevOpsWikiToolkit(BaseToolkit):
|
|
18
33
|
tools: List[BaseTool] = []
|
|
19
|
-
toolkit_max_length: int = 0
|
|
20
34
|
|
|
21
35
|
@staticmethod
|
|
22
36
|
def toolkit_config_schema() -> BaseModel:
|
|
23
37
|
selected_tools = {x['name']: x['args_schema'].schema() for x in AzureDevOpsApiWrapper.model_construct().get_available_tools()}
|
|
24
|
-
AzureDevOpsWikiToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
25
38
|
m = create_model(
|
|
26
39
|
name_alias,
|
|
27
40
|
ado_configuration=(AdoConfiguration, Field(description="Ado configuration", json_schema_extra={'configuration_types': ['ado']})),
|
|
@@ -37,7 +50,6 @@ class AzureDevOpsWikiToolkit(BaseToolkit):
|
|
|
37
50
|
'metadata': {
|
|
38
51
|
"label": "ADO wiki",
|
|
39
52
|
"icon_url": "ado-wiki-icon.svg",
|
|
40
|
-
"max_length": AzureDevOpsWikiToolkit.toolkit_max_length,
|
|
41
53
|
"categories": ["documentation"],
|
|
42
54
|
"extra_categories": ["knowledge base", "documentation management", "wiki"],
|
|
43
55
|
"sections": {
|
|
@@ -91,16 +103,20 @@ class AzureDevOpsWikiToolkit(BaseToolkit):
|
|
|
91
103
|
azure_devops_api_wrapper = AzureDevOpsApiWrapper(**wrapper_payload)
|
|
92
104
|
available_tools = azure_devops_api_wrapper.get_available_tools()
|
|
93
105
|
tools = []
|
|
94
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
95
106
|
for tool in available_tools:
|
|
96
107
|
if selected_tools:
|
|
97
108
|
if tool["name"] not in selected_tools:
|
|
98
109
|
continue
|
|
110
|
+
description = tool["description"] + f"\nADO instance: {azure_devops_api_wrapper.organization_url}/{azure_devops_api_wrapper.project}"
|
|
111
|
+
if toolkit_name:
|
|
112
|
+
description = f"{description}\nToolkit: {toolkit_name}"
|
|
113
|
+
description = description[:1000]
|
|
99
114
|
tools.append(BaseAction(
|
|
100
115
|
api_wrapper=azure_devops_api_wrapper,
|
|
101
|
-
name=
|
|
102
|
-
description=
|
|
103
|
-
args_schema=tool["args_schema"]
|
|
116
|
+
name=tool["name"],
|
|
117
|
+
description=description,
|
|
118
|
+
args_schema=tool["args_schema"],
|
|
119
|
+
metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
|
|
104
120
|
))
|
|
105
121
|
return cls(tools=tools)
|
|
106
122
|
|
|
@@ -52,8 +52,9 @@ ModifyPageInput = create_model(
|
|
|
52
52
|
wiki_identified=(str, Field(description="Wiki ID or wiki name")),
|
|
53
53
|
page_name=(str, Field(description="Wiki page name")),
|
|
54
54
|
page_content=(str, Field(description="Wiki page content")),
|
|
55
|
-
version_identifier=(str, Field(description="Version string identifier (name of tag/branch, SHA1 of commit)")),
|
|
56
|
-
version_type=(Optional[str], Field(description="Version type (branch, tag, or commit). Determines how Id is interpreted", default="branch"))
|
|
55
|
+
version_identifier=(str, Field(description="Version string identifier (name of tag/branch, SHA1 of commit). Usually for wiki the branch is 'wikiMaster'")),
|
|
56
|
+
version_type=(Optional[str], Field(description="Version type (branch, tag, or commit). Determines how Id is interpreted", default="branch")),
|
|
57
|
+
expanded=(Optional[bool], Field(description="Whether to return the full page object or just its simplified version.", default=False))
|
|
57
58
|
)
|
|
58
59
|
|
|
59
60
|
RenamePageInput = create_model(
|
|
@@ -66,6 +67,19 @@ RenamePageInput = create_model(
|
|
|
66
67
|
)
|
|
67
68
|
|
|
68
69
|
|
|
70
|
+
def _format_wiki_page_response(wiki_page_response, expanded: bool = False):
|
|
71
|
+
"""Format wiki page response."""
|
|
72
|
+
try:
|
|
73
|
+
return {
|
|
74
|
+
'eTag': wiki_page_response.eTag,
|
|
75
|
+
'page': wiki_page_response.page.__dict__ if wiki_page_response.page else None
|
|
76
|
+
} if expanded else {"eTag": wiki_page_response.eTag, "id": wiki_page_response.page.id,
|
|
77
|
+
"page": wiki_page_response.page.url}
|
|
78
|
+
except:
|
|
79
|
+
logger.error(f"Unable to format wiki page response: {wiki_page_response}")
|
|
80
|
+
return wiki_page_response
|
|
81
|
+
|
|
82
|
+
|
|
69
83
|
class AzureDevOpsApiWrapper(NonCodeIndexerToolkit):
|
|
70
84
|
# TODO use ado_configuration instead of organization_url, project and token
|
|
71
85
|
organization_url: str
|
|
@@ -226,7 +240,7 @@ class AzureDevOpsApiWrapper(NonCodeIndexerToolkit):
|
|
|
226
240
|
logger.error(f"Unable to rename wiki page: {str(e)}")
|
|
227
241
|
return ToolException(f"Unable to rename wiki page: {str(e)}")
|
|
228
242
|
|
|
229
|
-
def modify_wiki_page(self, wiki_identified: str, page_name: str, page_content: str, version_identifier: str, version_type: str = "branch"):
|
|
243
|
+
def modify_wiki_page(self, wiki_identified: str, page_name: str, page_content: str, version_identifier: str, version_type: str = "branch", expanded: Optional[bool] = False):
|
|
230
244
|
"""Create or Update ADO wiki page content."""
|
|
231
245
|
try:
|
|
232
246
|
all_wikis = [wiki.name for wiki in self._client.get_all_wikis(project=self.project)]
|
|
@@ -257,24 +271,24 @@ class AzureDevOpsApiWrapper(NonCodeIndexerToolkit):
|
|
|
257
271
|
return ToolException(f"Unable to extract page by path {page_name}: {str(get_page_e)}")
|
|
258
272
|
|
|
259
273
|
try:
|
|
260
|
-
return self._client.create_or_update_page(
|
|
274
|
+
return _format_wiki_page_response(self._client.create_or_update_page(
|
|
261
275
|
project=self.project,
|
|
262
276
|
wiki_identifier=wiki_identified,
|
|
263
277
|
path=page_name,
|
|
264
278
|
parameters=WikiPageCreateOrUpdateParameters(content=page_content),
|
|
265
279
|
version=version,
|
|
266
280
|
version_descriptor=GitVersionDescriptor(version=version_identifier, version_type=version_type)
|
|
267
|
-
)
|
|
281
|
+
), expanded=expanded)
|
|
268
282
|
except AzureDevOpsServiceError as e:
|
|
269
283
|
if "The version '{0}' either is invalid or does not exist." in str(e):
|
|
270
284
|
# Retry the request without version_descriptor
|
|
271
|
-
return self._client.create_or_update_page(
|
|
285
|
+
return _format_wiki_page_response(wiki_page_response=self._client.create_or_update_page(
|
|
272
286
|
project=self.project,
|
|
273
287
|
wiki_identifier=wiki_identified,
|
|
274
288
|
path=page_name,
|
|
275
289
|
parameters=WikiPageCreateOrUpdateParameters(content=page_content),
|
|
276
290
|
version=version
|
|
277
|
-
)
|
|
291
|
+
), expanded=expanded)
|
|
278
292
|
else:
|
|
279
293
|
raise
|
|
280
294
|
except Exception as e:
|
|
@@ -9,18 +9,31 @@ from ...elitea_base import filter_missconfigured_index_tools
|
|
|
9
9
|
from ....configurations.ado import AdoConfiguration
|
|
10
10
|
from ....configurations.pgvector import PgVectorConfiguration
|
|
11
11
|
from ...base.tool import BaseAction
|
|
12
|
-
from ...utils import clean_string,
|
|
12
|
+
from ...utils import clean_string, get_max_toolkit_length, check_connection_response
|
|
13
13
|
|
|
14
14
|
name = "ado_boards"
|
|
15
15
|
|
|
16
|
+
def get_toolkit(tool):
|
|
17
|
+
return AzureDevOpsWorkItemsToolkit().get_toolkit(
|
|
18
|
+
selected_tools=tool['settings'].get('selected_tools', []),
|
|
19
|
+
ado_configuration=tool['settings']['ado_configuration'],
|
|
20
|
+
limit=tool['settings'].get('limit', 5),
|
|
21
|
+
toolkit_name=tool.get('toolkit_name', ''),
|
|
22
|
+
alita=tool['settings'].get('alita', None),
|
|
23
|
+
llm=tool['settings'].get('llm', None),
|
|
24
|
+
pgvector_configuration=tool['settings'].get('pgvector_configuration', {}),
|
|
25
|
+
collection_name=tool['toolkit_name'],
|
|
26
|
+
doctype='doc',
|
|
27
|
+
embedding_model=tool['settings'].get('embedding_model'),
|
|
28
|
+
vectorstore_type="PGVector"
|
|
29
|
+
)
|
|
30
|
+
|
|
16
31
|
class AzureDevOpsWorkItemsToolkit(BaseToolkit):
|
|
17
32
|
tools: List[BaseTool] = []
|
|
18
|
-
toolkit_max_length: int = 0
|
|
19
33
|
|
|
20
34
|
@staticmethod
|
|
21
35
|
def toolkit_config_schema() -> BaseModel:
|
|
22
36
|
selected_tools = {x['name']: x['args_schema'].schema() for x in AzureDevOpsApiWrapper.model_construct().get_available_tools()}
|
|
23
|
-
AzureDevOpsWorkItemsToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
24
37
|
m = create_model(
|
|
25
38
|
name,
|
|
26
39
|
ado_configuration=(AdoConfiguration, Field(description="Ado Work Item configuration", json_schema_extra={'configuration_types': ['ado']})),
|
|
@@ -37,7 +50,6 @@ class AzureDevOpsWorkItemsToolkit(BaseToolkit):
|
|
|
37
50
|
'metadata': {
|
|
38
51
|
"label": "ADO boards",
|
|
39
52
|
"icon_url": "ado-boards-icon.svg",
|
|
40
|
-
"max_length": AzureDevOpsWorkItemsToolkit.toolkit_max_length,
|
|
41
53
|
"categories": ["project management"],
|
|
42
54
|
"extra_categories": ["work item management", "issue tracking", "agile boards"],
|
|
43
55
|
"sections": {
|
|
@@ -92,16 +104,20 @@ class AzureDevOpsWorkItemsToolkit(BaseToolkit):
|
|
|
92
104
|
azure_devops_api_wrapper = AzureDevOpsApiWrapper(**wrapper_payload)
|
|
93
105
|
available_tools = azure_devops_api_wrapper.get_available_tools()
|
|
94
106
|
tools = []
|
|
95
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
96
107
|
for tool in available_tools:
|
|
97
108
|
if selected_tools:
|
|
98
109
|
if tool["name"] not in selected_tools:
|
|
99
110
|
continue
|
|
111
|
+
description = tool["description"] + f"\nADO instance: {azure_devops_api_wrapper.organization_url}/{azure_devops_api_wrapper.project}"
|
|
112
|
+
if toolkit_name:
|
|
113
|
+
description = f"{description}\nToolkit: {toolkit_name}"
|
|
114
|
+
description = description[:1000]
|
|
100
115
|
tools.append(BaseAction(
|
|
101
116
|
api_wrapper=azure_devops_api_wrapper,
|
|
102
|
-
name=
|
|
103
|
-
description=
|
|
104
|
-
args_schema=tool["args_schema"]
|
|
117
|
+
name=tool["name"],
|
|
118
|
+
description=description,
|
|
119
|
+
args_schema=tool["args_schema"],
|
|
120
|
+
metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
|
|
105
121
|
))
|
|
106
122
|
return cls(tools=tools)
|
|
107
123
|
|
|
@@ -6,7 +6,7 @@ from pydantic import create_model, BaseModel, Field, SecretStr
|
|
|
6
6
|
from .data_mining_wrapper import AdvancedJiraMiningWrapper
|
|
7
7
|
from ..base.tool import BaseAction
|
|
8
8
|
from ..elitea_base import filter_missconfigured_index_tools
|
|
9
|
-
from ..utils import clean_string,
|
|
9
|
+
from ..utils import clean_string, get_max_toolkit_length
|
|
10
10
|
|
|
11
11
|
name = "advanced_jira_mining"
|
|
12
12
|
|
|
@@ -28,15 +28,13 @@ def get_tools(tool):
|
|
|
28
28
|
|
|
29
29
|
class AdvancedJiraMiningToolkit(BaseToolkit):
|
|
30
30
|
tools: List[BaseTool] = []
|
|
31
|
-
toolkit_max_length: int = 0
|
|
32
31
|
|
|
33
32
|
@staticmethod
|
|
34
33
|
def toolkit_config_schema() -> BaseModel:
|
|
35
34
|
selected_tools = {x['name']: x['args_schema'].schema() for x in AdvancedJiraMiningWrapper.model_construct().get_available_tools()}
|
|
36
|
-
AdvancedJiraMiningToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
37
35
|
return create_model(
|
|
38
36
|
name,
|
|
39
|
-
jira_base_url=(str, Field(default="", title="Jira URL", description="Jira URL", json_schema_extra={'toolkit_name': True
|
|
37
|
+
jira_base_url=(str, Field(default="", title="Jira URL", description="Jira URL", json_schema_extra={'toolkit_name': True})),
|
|
40
38
|
confluence_base_url=(str, Field(default="", title="Confluence URL", description="Confluence URL")),
|
|
41
39
|
model_type=(str, Field(default="", title="Model type", description="Model type")),
|
|
42
40
|
summarization_prompt=(Optional[str], Field(default=None, title="Summarization prompt", description="Summarization prompt")),
|
|
@@ -66,17 +64,21 @@ class AdvancedJiraMiningToolkit(BaseToolkit):
|
|
|
66
64
|
selected_tools = []
|
|
67
65
|
jira_mining_wrapper = AdvancedJiraMiningWrapper(**kwargs)
|
|
68
66
|
available_tools = jira_mining_wrapper.get_available_tools()
|
|
69
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
70
67
|
tools = []
|
|
71
68
|
for tool in available_tools:
|
|
72
69
|
if selected_tools:
|
|
73
70
|
if tool["name"] not in selected_tools:
|
|
74
71
|
continue
|
|
72
|
+
description = tool["description"]
|
|
73
|
+
if toolkit_name:
|
|
74
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
75
|
+
description = description[:1000]
|
|
75
76
|
tools.append(BaseAction(
|
|
76
77
|
api_wrapper=jira_mining_wrapper,
|
|
77
|
-
name=
|
|
78
|
-
description=
|
|
79
|
-
args_schema=tool["args_schema"]
|
|
78
|
+
name=tool["name"],
|
|
79
|
+
description=description,
|
|
80
|
+
args_schema=tool["args_schema"],
|
|
81
|
+
metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
|
|
80
82
|
))
|
|
81
83
|
return cls(tools=tools)
|
|
82
84
|
|
|
@@ -6,7 +6,7 @@ from langchain_core.tools import BaseTool, BaseToolkit
|
|
|
6
6
|
from pydantic import BaseModel, Field, computed_field, field_validator
|
|
7
7
|
|
|
8
8
|
from alita_sdk.configurations.delta_lake import DeltaLakeConfiguration
|
|
9
|
-
from ...utils import
|
|
9
|
+
from ...utils import clean_string, get_max_toolkit_length
|
|
10
10
|
from .api_wrapper import DeltaLakeApiWrapper
|
|
11
11
|
from .tool import DeltaLakeAction
|
|
12
12
|
|
|
@@ -21,10 +21,6 @@ def get_available_tools() -> dict[str, dict]:
|
|
|
21
21
|
}
|
|
22
22
|
return available_tools
|
|
23
23
|
|
|
24
|
-
toolkit_max_length = lru_cache(maxsize=1)(
|
|
25
|
-
lambda: get_max_toolkit_length(get_available_tools())
|
|
26
|
-
)
|
|
27
|
-
|
|
28
24
|
class DeltaLakeToolkitConfig(BaseModel):
|
|
29
25
|
class Config:
|
|
30
26
|
title = name
|
|
@@ -87,9 +83,10 @@ class DeltaLakeToolkit(BaseToolkit):
|
|
|
87
83
|
|
|
88
84
|
@computed_field
|
|
89
85
|
@property
|
|
90
|
-
def
|
|
86
|
+
def toolkit_context(self) -> str:
|
|
87
|
+
"""Returns toolkit context for descriptions (max 1000 chars)."""
|
|
91
88
|
return (
|
|
92
|
-
clean_string(self.toolkit_name,
|
|
89
|
+
f" [Toolkit: {clean_string(self.toolkit_name, 0)}]"
|
|
93
90
|
if self.toolkit_name
|
|
94
91
|
else ""
|
|
95
92
|
)
|
|
@@ -118,12 +115,18 @@ class DeltaLakeToolkit(BaseToolkit):
|
|
|
118
115
|
selected_tools = set(selected_tools)
|
|
119
116
|
for t in instance.available_tools:
|
|
120
117
|
if t["name"] in selected_tools:
|
|
118
|
+
description = t["description"]
|
|
119
|
+
if toolkit_name:
|
|
120
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
121
|
+
description = f"S3 Path: {getattr(instance.api_wrapper, 's3_path', '')} Table Path: {getattr(instance.api_wrapper, 'table_path', '')}\n{description}"
|
|
122
|
+
description = description[:1000]
|
|
121
123
|
instance.tools.append(
|
|
122
124
|
DeltaLakeAction(
|
|
123
125
|
api_wrapper=instance.api_wrapper,
|
|
124
|
-
name=
|
|
125
|
-
description=
|
|
126
|
+
name=t["name"],
|
|
127
|
+
description=description,
|
|
126
128
|
args_schema=t["args_schema"],
|
|
129
|
+
metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
|
|
127
130
|
)
|
|
128
131
|
)
|
|
129
132
|
return instance
|
|
@@ -29,7 +29,11 @@ class DeltaLakeAction(BaseTool):
|
|
|
29
29
|
) -> str:
|
|
30
30
|
"""Use the Delta Lake API to run an operation."""
|
|
31
31
|
try:
|
|
32
|
+
# Strip numeric suffix added for deduplication (_2, _3, etc.)
|
|
33
|
+
# to get the original tool name that exists in the wrapper
|
|
34
|
+
import re
|
|
35
|
+
tool_name = re.sub(r'_\d+$', '', self.name)
|
|
32
36
|
# Use the tool name to dispatch to the correct API wrapper method
|
|
33
|
-
return self.api_wrapper.run(
|
|
37
|
+
return self.api_wrapper.run(tool_name, *args, **kwargs)
|
|
34
38
|
except Exception as e:
|
|
35
39
|
return f"Error: {format_exc()}"
|
|
@@ -6,7 +6,7 @@ from langchain_core.tools import BaseToolkit, BaseTool
|
|
|
6
6
|
from pydantic import create_model, BaseModel, ConfigDict, Field
|
|
7
7
|
|
|
8
8
|
from ...elitea_base import filter_missconfigured_index_tools
|
|
9
|
-
from ...utils import clean_string,
|
|
9
|
+
from ...utils import clean_string, get_max_toolkit_length, check_connection_response
|
|
10
10
|
from ....configurations.azure_search import AzureSearchConfiguration
|
|
11
11
|
import requests
|
|
12
12
|
|
|
@@ -31,12 +31,10 @@ def get_toolkit():
|
|
|
31
31
|
|
|
32
32
|
class AzureSearchToolkit(BaseToolkit):
|
|
33
33
|
tools: List[BaseTool] = []
|
|
34
|
-
toolkit_max_length: int = 0
|
|
35
34
|
|
|
36
35
|
@staticmethod
|
|
37
36
|
def toolkit_config_schema() -> BaseModel:
|
|
38
37
|
selected_tools = {x['name']: x['args_schema'].schema() for x in AzureSearchApiWrapper.model_construct().get_available_tools()}
|
|
39
|
-
AzureSearchToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
40
38
|
m = create_model(
|
|
41
39
|
name,
|
|
42
40
|
index_name=(str, Field(description="Azure Search index name")),
|
|
@@ -79,17 +77,21 @@ class AzureSearchToolkit(BaseToolkit):
|
|
|
79
77
|
}
|
|
80
78
|
azure_search_api_wrapper = AzureSearchApiWrapper(**wrapper_payload)
|
|
81
79
|
available_tools = azure_search_api_wrapper.get_available_tools()
|
|
82
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
83
80
|
tools = []
|
|
84
81
|
for tool in available_tools:
|
|
85
82
|
if selected_tools:
|
|
86
83
|
if tool["name"] not in selected_tools:
|
|
87
84
|
continue
|
|
85
|
+
description = tool["description"]
|
|
86
|
+
if toolkit_name:
|
|
87
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
88
|
+
description = description[:1000]
|
|
88
89
|
tools.append(BaseAction(
|
|
89
90
|
api_wrapper=azure_search_api_wrapper,
|
|
90
|
-
name=
|
|
91
|
-
description=
|
|
92
|
-
args_schema=tool["args_schema"]
|
|
91
|
+
name=tool["name"],
|
|
92
|
+
description=description,
|
|
93
|
+
args_schema=tool["args_schema"],
|
|
94
|
+
metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
|
|
93
95
|
))
|
|
94
96
|
return cls(tools=tools)
|
|
95
97
|
|
alita_sdk/tools/base/tool.py
CHANGED
|
@@ -23,6 +23,10 @@ class BaseAction(BaseTool):
|
|
|
23
23
|
) -> ToolException | str:
|
|
24
24
|
"""Use the Confluence API to run an operation."""
|
|
25
25
|
try:
|
|
26
|
-
|
|
26
|
+
# Strip numeric suffix added for deduplication (_2, _3, etc.)
|
|
27
|
+
# to get the original tool name that exists in the wrapper
|
|
28
|
+
import re
|
|
29
|
+
tool_name = re.sub(r'_\d+$', '', self.name)
|
|
30
|
+
return self.api_wrapper.run(tool_name, *args, **kwargs)
|
|
27
31
|
except Exception as e:
|
|
28
32
|
return ToolException(f"An exception occurred: {e}")
|
|
@@ -387,6 +387,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
|
|
|
387
387
|
def remove_index(self, index_name: str = ""):
|
|
388
388
|
"""Cleans the indexed data in the collection."""
|
|
389
389
|
super()._clean_collection(index_name=index_name, including_index_meta=True)
|
|
390
|
+
self._emit_index_data_removed_event(index_name)
|
|
390
391
|
return (f"Collection '{index_name}' has been removed from the vector store.\n"
|
|
391
392
|
f"Available collections: {self.list_collections()}") if index_name \
|
|
392
393
|
else "All collections have been removed from the vector store."
|
|
@@ -641,6 +642,30 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
|
|
|
641
642
|
except Exception as e:
|
|
642
643
|
logger.warning(f"Failed to emit index_data_status event: {e}")
|
|
643
644
|
|
|
645
|
+
def _emit_index_data_removed_event(self, index_name: str):
|
|
646
|
+
"""
|
|
647
|
+
Emit custom event for index data removing.
|
|
648
|
+
|
|
649
|
+
Args:
|
|
650
|
+
index_name: The name of the index
|
|
651
|
+
toolkit_id: The toolkit identifier
|
|
652
|
+
"""
|
|
653
|
+
# Build event message
|
|
654
|
+
event_data = {
|
|
655
|
+
"index_name": index_name,
|
|
656
|
+
"toolkit_id": self.toolkit_id,
|
|
657
|
+
"project_id": self.alita.project_id,
|
|
658
|
+
}
|
|
659
|
+
# Emit the event
|
|
660
|
+
try:
|
|
661
|
+
dispatch_custom_event("index_data_removed", event_data)
|
|
662
|
+
logger.debug(
|
|
663
|
+
f"Emitted index_data_removed event for index "
|
|
664
|
+
f"'{index_name}': {event_data}"
|
|
665
|
+
)
|
|
666
|
+
except Exception as e:
|
|
667
|
+
logger.warning(f"Failed to emit index_data_removed event: {e}")
|
|
668
|
+
|
|
644
669
|
def get_available_tools(self):
|
|
645
670
|
"""
|
|
646
671
|
Returns the standardized vector search tools (search operations only).
|
|
@@ -9,7 +9,7 @@ from pydantic import BaseModel, Field, ConfigDict, create_model
|
|
|
9
9
|
|
|
10
10
|
from ..base.tool import BaseAction
|
|
11
11
|
from ..elitea_base import filter_missconfigured_index_tools
|
|
12
|
-
from ..utils import clean_string,
|
|
12
|
+
from ..utils import clean_string, get_max_toolkit_length, check_connection_response
|
|
13
13
|
from ...configurations.bitbucket import BitbucketConfiguration
|
|
14
14
|
from ...configurations.pgvector import PgVectorConfiguration
|
|
15
15
|
import requests
|
|
@@ -18,7 +18,7 @@ import requests
|
|
|
18
18
|
name = "bitbucket"
|
|
19
19
|
|
|
20
20
|
|
|
21
|
-
def
|
|
21
|
+
def get_toolkit(tool):
|
|
22
22
|
return AlitaBitbucketToolkit.get_toolkit(
|
|
23
23
|
selected_tools=tool['settings'].get('selected_tools', []),
|
|
24
24
|
project=tool['settings']['project'],
|
|
@@ -33,18 +33,19 @@ def get_tools(tool):
|
|
|
33
33
|
doctype='code',
|
|
34
34
|
embedding_model=tool['settings'].get('embedding_model'),
|
|
35
35
|
toolkit_name=tool.get('toolkit_name')
|
|
36
|
-
)
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
def get_tools(tool):
|
|
39
|
+
return get_toolkit(tool).get_tools()
|
|
37
40
|
|
|
38
41
|
|
|
39
42
|
class AlitaBitbucketToolkit(BaseToolkit):
|
|
40
43
|
tools: List[BaseTool] = []
|
|
41
|
-
toolkit_max_length: int = 0
|
|
42
44
|
|
|
43
45
|
@staticmethod
|
|
44
46
|
def toolkit_config_schema() -> BaseModel:
|
|
45
47
|
selected_tools = {x['name']: x['args_schema'].schema() for x in
|
|
46
48
|
BitbucketAPIWrapper.model_construct().get_available_tools()}
|
|
47
|
-
AlitaBitbucketToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
48
49
|
m = create_model(
|
|
49
50
|
name,
|
|
50
51
|
project=(str, Field(description="Project/Workspace")),
|
|
@@ -61,7 +62,6 @@ class AlitaBitbucketToolkit(BaseToolkit):
|
|
|
61
62
|
'metadata':
|
|
62
63
|
{
|
|
63
64
|
"label": "Bitbucket", "icon_url": "bitbucket-icon.svg",
|
|
64
|
-
"max_length": AlitaBitbucketToolkit.toolkit_max_length,
|
|
65
65
|
"categories": ["code repositories"],
|
|
66
66
|
"extra_categories": ["bitbucket", "git", "repository", "code", "version control"],
|
|
67
67
|
}
|
|
@@ -100,17 +100,21 @@ class AlitaBitbucketToolkit(BaseToolkit):
|
|
|
100
100
|
}
|
|
101
101
|
bitbucket_api_wrapper = BitbucketAPIWrapper(**wrapper_payload)
|
|
102
102
|
available_tools: List[Dict] = bitbucket_api_wrapper.get_available_tools()
|
|
103
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
104
103
|
tools = []
|
|
105
104
|
for tool in available_tools:
|
|
106
105
|
if selected_tools:
|
|
107
106
|
if tool['name'] not in selected_tools:
|
|
108
107
|
continue
|
|
108
|
+
description = tool["description"] + f"\nrepo: {bitbucket_api_wrapper.repository}"
|
|
109
|
+
if toolkit_name:
|
|
110
|
+
description = f"{description}\nToolkit: {toolkit_name}"
|
|
111
|
+
description = description[:1000]
|
|
109
112
|
tools.append(BaseAction(
|
|
110
113
|
api_wrapper=bitbucket_api_wrapper,
|
|
111
|
-
name=
|
|
112
|
-
description=
|
|
113
|
-
args_schema=tool["args_schema"]
|
|
114
|
+
name=tool["name"],
|
|
115
|
+
description=description,
|
|
116
|
+
args_schema=tool["args_schema"],
|
|
117
|
+
metadata={"toolkit_name": toolkit_name} if toolkit_name else {}
|
|
114
118
|
))
|
|
115
119
|
return cls(tools=tools)
|
|
116
120
|
|
|
@@ -13,6 +13,7 @@ from pydantic.fields import PrivateAttr
|
|
|
13
13
|
|
|
14
14
|
from ..code_indexer_toolkit import CodeIndexerToolkit
|
|
15
15
|
from ..utils.available_tools_decorator import extend_with_parent_available_tools
|
|
16
|
+
from ..elitea_base import extend_with_file_operations, BaseCodeToolApiWrapper
|
|
16
17
|
|
|
17
18
|
logger = logging.getLogger(__name__)
|
|
18
19
|
|
|
@@ -123,6 +124,12 @@ class BitbucketAPIWrapper(CodeIndexerToolkit):
|
|
|
123
124
|
|
|
124
125
|
_bitbucket: Any = PrivateAttr()
|
|
125
126
|
_active_branch: Any = PrivateAttr()
|
|
127
|
+
|
|
128
|
+
# Import file operation methods from BaseCodeToolApiWrapper
|
|
129
|
+
read_file_chunk = BaseCodeToolApiWrapper.read_file_chunk
|
|
130
|
+
read_multiple_files = BaseCodeToolApiWrapper.read_multiple_files
|
|
131
|
+
search_file = BaseCodeToolApiWrapper.search_file
|
|
132
|
+
edit_file = BaseCodeToolApiWrapper.edit_file
|
|
126
133
|
url: str = ''
|
|
127
134
|
project: str = ''
|
|
128
135
|
"""The key of the project this repo belongs to"""
|
|
@@ -360,12 +367,15 @@ class BitbucketAPIWrapper(CodeIndexerToolkit):
|
|
|
360
367
|
# except Exception as e:
|
|
361
368
|
# raise ToolException(f"Can't extract file commit hash (`{file_path}`) due to error:\n{str(e)}")
|
|
362
369
|
|
|
363
|
-
def _read_file(self, file_path: str, branch: str) -> str:
|
|
370
|
+
def _read_file(self, file_path: str, branch: str, **kwargs) -> str:
|
|
364
371
|
"""
|
|
365
|
-
Reads a file from the
|
|
372
|
+
Reads a file from the bitbucket repo with optional partial read support.
|
|
373
|
+
|
|
366
374
|
Parameters:
|
|
367
375
|
file_path(str): the file path
|
|
368
376
|
branch(str): branch name (by default: active_branch)
|
|
377
|
+
**kwargs: Additional parameters (offset, limit, head, tail) - currently ignored,
|
|
378
|
+
partial read handled client-side by base class methods
|
|
369
379
|
Returns:
|
|
370
380
|
str: The file decoded as a string
|
|
371
381
|
"""
|
|
@@ -399,8 +409,46 @@ class BitbucketAPIWrapper(CodeIndexerToolkit):
|
|
|
399
409
|
return self._read_file(file_path, branch)
|
|
400
410
|
except Exception as e:
|
|
401
411
|
return f"Failed to read file {file_path}: {str(e)}"
|
|
412
|
+
|
|
413
|
+
def _write_file(
|
|
414
|
+
self,
|
|
415
|
+
file_path: str,
|
|
416
|
+
content: str,
|
|
417
|
+
branch: str = None,
|
|
418
|
+
commit_message: str = None
|
|
419
|
+
) -> str:
|
|
420
|
+
"""
|
|
421
|
+
Write content to a file (create or update).
|
|
422
|
+
|
|
423
|
+
Parameters:
|
|
424
|
+
file_path: Path to the file
|
|
425
|
+
content: New file content
|
|
426
|
+
branch: Branch name (uses active branch if None)
|
|
427
|
+
commit_message: Commit message (not used by Bitbucket API)
|
|
428
|
+
|
|
429
|
+
Returns:
|
|
430
|
+
Success message
|
|
431
|
+
"""
|
|
432
|
+
try:
|
|
433
|
+
branch = branch or self._active_branch
|
|
434
|
+
|
|
435
|
+
# Check if file exists by attempting to read it
|
|
436
|
+
try:
|
|
437
|
+
self._read_file(file_path, branch)
|
|
438
|
+
# File exists, update it using OLD/NEW format
|
|
439
|
+
old_content = self._read_file(file_path, branch)
|
|
440
|
+
update_query = f"OLD <<<<\n{old_content}\n>>>> OLD\nNEW <<<<\n{content}\n>>>> NEW"
|
|
441
|
+
self._bitbucket.update_file(file_path=file_path, update_query=update_query, branch=branch)
|
|
442
|
+
return f"Updated file {file_path}"
|
|
443
|
+
except:
|
|
444
|
+
# File doesn't exist, create it
|
|
445
|
+
self._bitbucket.create_file(file_path=file_path, file_contents=content, branch=branch)
|
|
446
|
+
return f"Created file {file_path}"
|
|
447
|
+
except Exception as e:
|
|
448
|
+
raise ToolException(f"Unable to write file {file_path}: {str(e)}")
|
|
402
449
|
|
|
403
450
|
@extend_with_parent_available_tools
|
|
451
|
+
@extend_with_file_operations
|
|
404
452
|
def get_available_tools(self):
|
|
405
453
|
return [
|
|
406
454
|
{
|
|
@@ -8,7 +8,7 @@ from langchain_community.utilities.wikipedia import WikipediaAPIWrapper
|
|
|
8
8
|
from .google_search_rag import GoogleSearchResults
|
|
9
9
|
from .crawler import SingleURLCrawler, MultiURLCrawler, GetHTMLContent, GetPDFContent
|
|
10
10
|
from .wiki import WikipediaQueryRun
|
|
11
|
-
from ..utils import get_max_toolkit_length, clean_string
|
|
11
|
+
from ..utils import get_max_toolkit_length, clean_string
|
|
12
12
|
from ...configurations.browser import BrowserConfiguration
|
|
13
13
|
from logging import getLogger
|
|
14
14
|
|
|
@@ -42,7 +42,6 @@ class BrowserToolkit(BaseToolkit):
|
|
|
42
42
|
'google': GoogleSearchResults.__pydantic_fields__['args_schema'].default.schema(),
|
|
43
43
|
'wiki': WikipediaQueryRun.__pydantic_fields__['args_schema'].default.schema()
|
|
44
44
|
}
|
|
45
|
-
BrowserToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
46
45
|
|
|
47
46
|
def validate_google_fields(cls, values):
|
|
48
47
|
if 'google' in values.get('selected_tools', []):
|
|
@@ -90,7 +89,6 @@ class BrowserToolkit(BaseToolkit):
|
|
|
90
89
|
}
|
|
91
90
|
|
|
92
91
|
tools = []
|
|
93
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
94
92
|
if not selected_tools:
|
|
95
93
|
selected_tools = [
|
|
96
94
|
'single_url_crawler',
|
|
@@ -127,7 +125,10 @@ class BrowserToolkit(BaseToolkit):
|
|
|
127
125
|
|
|
128
126
|
# Only add the tool if it was successfully created
|
|
129
127
|
if tool_entry is not None:
|
|
130
|
-
|
|
128
|
+
if toolkit_name:
|
|
129
|
+
tool_entry.description = f"{tool_entry.description}\nToolkit: {toolkit_name}"
|
|
130
|
+
tool_entry.description = tool_entry.description[:1000]
|
|
131
|
+
tool_entry.metadata = {"toolkit_name": toolkit_name}
|
|
131
132
|
tools.append(tool_entry)
|
|
132
133
|
return cls(tools=tools)
|
|
133
134
|
|