alita-sdk 0.3.379__py3-none-any.whl → 0.3.627__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alita_sdk/cli/__init__.py +10 -0
- alita_sdk/cli/__main__.py +17 -0
- alita_sdk/cli/agent/__init__.py +5 -0
- alita_sdk/cli/agent/default.py +258 -0
- alita_sdk/cli/agent_executor.py +156 -0
- alita_sdk/cli/agent_loader.py +245 -0
- alita_sdk/cli/agent_ui.py +228 -0
- alita_sdk/cli/agents.py +3113 -0
- alita_sdk/cli/callbacks.py +647 -0
- alita_sdk/cli/cli.py +168 -0
- alita_sdk/cli/config.py +306 -0
- alita_sdk/cli/context/__init__.py +30 -0
- alita_sdk/cli/context/cleanup.py +198 -0
- alita_sdk/cli/context/manager.py +731 -0
- alita_sdk/cli/context/message.py +285 -0
- alita_sdk/cli/context/strategies.py +289 -0
- alita_sdk/cli/context/token_estimation.py +127 -0
- alita_sdk/cli/formatting.py +182 -0
- alita_sdk/cli/input_handler.py +419 -0
- alita_sdk/cli/inventory.py +1073 -0
- alita_sdk/cli/mcp_loader.py +315 -0
- alita_sdk/cli/testcases/__init__.py +94 -0
- alita_sdk/cli/testcases/data_generation.py +119 -0
- alita_sdk/cli/testcases/discovery.py +96 -0
- alita_sdk/cli/testcases/executor.py +84 -0
- alita_sdk/cli/testcases/logger.py +85 -0
- alita_sdk/cli/testcases/parser.py +172 -0
- alita_sdk/cli/testcases/prompts.py +91 -0
- alita_sdk/cli/testcases/reporting.py +125 -0
- alita_sdk/cli/testcases/setup.py +108 -0
- alita_sdk/cli/testcases/test_runner.py +282 -0
- alita_sdk/cli/testcases/utils.py +39 -0
- alita_sdk/cli/testcases/validation.py +90 -0
- alita_sdk/cli/testcases/workflow.py +196 -0
- alita_sdk/cli/toolkit.py +327 -0
- alita_sdk/cli/toolkit_loader.py +85 -0
- alita_sdk/cli/tools/__init__.py +43 -0
- alita_sdk/cli/tools/approval.py +224 -0
- alita_sdk/cli/tools/filesystem.py +1751 -0
- alita_sdk/cli/tools/planning.py +389 -0
- alita_sdk/cli/tools/terminal.py +414 -0
- alita_sdk/community/__init__.py +72 -12
- alita_sdk/community/inventory/__init__.py +236 -0
- alita_sdk/community/inventory/config.py +257 -0
- alita_sdk/community/inventory/enrichment.py +2137 -0
- alita_sdk/community/inventory/extractors.py +1469 -0
- alita_sdk/community/inventory/ingestion.py +3172 -0
- alita_sdk/community/inventory/knowledge_graph.py +1457 -0
- alita_sdk/community/inventory/parsers/__init__.py +218 -0
- alita_sdk/community/inventory/parsers/base.py +295 -0
- alita_sdk/community/inventory/parsers/csharp_parser.py +907 -0
- alita_sdk/community/inventory/parsers/go_parser.py +851 -0
- alita_sdk/community/inventory/parsers/html_parser.py +389 -0
- alita_sdk/community/inventory/parsers/java_parser.py +593 -0
- alita_sdk/community/inventory/parsers/javascript_parser.py +629 -0
- alita_sdk/community/inventory/parsers/kotlin_parser.py +768 -0
- alita_sdk/community/inventory/parsers/markdown_parser.py +362 -0
- alita_sdk/community/inventory/parsers/python_parser.py +604 -0
- alita_sdk/community/inventory/parsers/rust_parser.py +858 -0
- alita_sdk/community/inventory/parsers/swift_parser.py +832 -0
- alita_sdk/community/inventory/parsers/text_parser.py +322 -0
- alita_sdk/community/inventory/parsers/yaml_parser.py +370 -0
- alita_sdk/community/inventory/patterns/__init__.py +61 -0
- alita_sdk/community/inventory/patterns/ast_adapter.py +380 -0
- alita_sdk/community/inventory/patterns/loader.py +348 -0
- alita_sdk/community/inventory/patterns/registry.py +198 -0
- alita_sdk/community/inventory/presets.py +535 -0
- alita_sdk/community/inventory/retrieval.py +1403 -0
- alita_sdk/community/inventory/toolkit.py +173 -0
- alita_sdk/community/inventory/toolkit_utils.py +176 -0
- alita_sdk/community/inventory/visualize.py +1370 -0
- alita_sdk/configurations/__init__.py +1 -1
- alita_sdk/configurations/ado.py +141 -20
- alita_sdk/configurations/bitbucket.py +94 -2
- alita_sdk/configurations/confluence.py +130 -1
- alita_sdk/configurations/figma.py +76 -0
- alita_sdk/configurations/gitlab.py +91 -0
- alita_sdk/configurations/jira.py +103 -0
- alita_sdk/configurations/openapi.py +329 -0
- alita_sdk/configurations/qtest.py +72 -1
- alita_sdk/configurations/report_portal.py +96 -0
- alita_sdk/configurations/sharepoint.py +148 -0
- alita_sdk/configurations/testio.py +83 -0
- alita_sdk/configurations/testrail.py +88 -0
- alita_sdk/configurations/xray.py +93 -0
- alita_sdk/configurations/zephyr_enterprise.py +93 -0
- alita_sdk/configurations/zephyr_essential.py +75 -0
- alita_sdk/runtime/clients/artifact.py +3 -3
- alita_sdk/runtime/clients/client.py +388 -46
- alita_sdk/runtime/clients/mcp_discovery.py +342 -0
- alita_sdk/runtime/clients/mcp_manager.py +262 -0
- alita_sdk/runtime/clients/sandbox_client.py +8 -21
- alita_sdk/runtime/langchain/_constants_bkup.py +1318 -0
- alita_sdk/runtime/langchain/assistant.py +157 -39
- alita_sdk/runtime/langchain/constants.py +647 -1
- alita_sdk/runtime/langchain/document_loaders/AlitaDocxMammothLoader.py +315 -3
- alita_sdk/runtime/langchain/document_loaders/AlitaExcelLoader.py +103 -60
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLinesLoader.py +77 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +10 -4
- alita_sdk/runtime/langchain/document_loaders/AlitaPowerPointLoader.py +226 -7
- alita_sdk/runtime/langchain/document_loaders/AlitaTextLoader.py +5 -2
- alita_sdk/runtime/langchain/document_loaders/constants.py +40 -19
- alita_sdk/runtime/langchain/langraph_agent.py +405 -84
- alita_sdk/runtime/langchain/utils.py +106 -7
- alita_sdk/runtime/llms/preloaded.py +2 -6
- alita_sdk/runtime/models/mcp_models.py +61 -0
- alita_sdk/runtime/skills/__init__.py +91 -0
- alita_sdk/runtime/skills/callbacks.py +498 -0
- alita_sdk/runtime/skills/discovery.py +540 -0
- alita_sdk/runtime/skills/executor.py +610 -0
- alita_sdk/runtime/skills/input_builder.py +371 -0
- alita_sdk/runtime/skills/models.py +330 -0
- alita_sdk/runtime/skills/registry.py +355 -0
- alita_sdk/runtime/skills/skill_runner.py +330 -0
- alita_sdk/runtime/toolkits/__init__.py +31 -0
- alita_sdk/runtime/toolkits/application.py +29 -10
- alita_sdk/runtime/toolkits/artifact.py +20 -11
- alita_sdk/runtime/toolkits/datasource.py +13 -6
- alita_sdk/runtime/toolkits/mcp.py +783 -0
- alita_sdk/runtime/toolkits/mcp_config.py +1048 -0
- alita_sdk/runtime/toolkits/planning.py +178 -0
- alita_sdk/runtime/toolkits/skill_router.py +238 -0
- alita_sdk/runtime/toolkits/subgraph.py +251 -6
- alita_sdk/runtime/toolkits/tools.py +356 -69
- alita_sdk/runtime/toolkits/vectorstore.py +11 -5
- alita_sdk/runtime/tools/__init__.py +10 -3
- alita_sdk/runtime/tools/application.py +27 -6
- alita_sdk/runtime/tools/artifact.py +511 -28
- alita_sdk/runtime/tools/data_analysis.py +183 -0
- alita_sdk/runtime/tools/function.py +67 -35
- alita_sdk/runtime/tools/graph.py +10 -4
- alita_sdk/runtime/tools/image_generation.py +148 -46
- alita_sdk/runtime/tools/llm.py +1003 -128
- alita_sdk/runtime/tools/loop.py +3 -1
- alita_sdk/runtime/tools/loop_output.py +3 -1
- alita_sdk/runtime/tools/mcp_inspect_tool.py +284 -0
- alita_sdk/runtime/tools/mcp_remote_tool.py +181 -0
- alita_sdk/runtime/tools/mcp_server_tool.py +8 -5
- alita_sdk/runtime/tools/planning/__init__.py +36 -0
- alita_sdk/runtime/tools/planning/models.py +246 -0
- alita_sdk/runtime/tools/planning/wrapper.py +607 -0
- alita_sdk/runtime/tools/router.py +2 -4
- alita_sdk/runtime/tools/sandbox.py +65 -48
- alita_sdk/runtime/tools/skill_router.py +776 -0
- alita_sdk/runtime/tools/tool.py +3 -1
- alita_sdk/runtime/tools/vectorstore.py +9 -3
- alita_sdk/runtime/tools/vectorstore_base.py +70 -14
- alita_sdk/runtime/utils/AlitaCallback.py +137 -21
- alita_sdk/runtime/utils/constants.py +5 -1
- alita_sdk/runtime/utils/mcp_client.py +492 -0
- alita_sdk/runtime/utils/mcp_oauth.py +361 -0
- alita_sdk/runtime/utils/mcp_sse_client.py +434 -0
- alita_sdk/runtime/utils/mcp_tools_discovery.py +124 -0
- alita_sdk/runtime/utils/serialization.py +155 -0
- alita_sdk/runtime/utils/streamlit.py +40 -13
- alita_sdk/runtime/utils/toolkit_utils.py +30 -9
- alita_sdk/runtime/utils/utils.py +36 -0
- alita_sdk/tools/__init__.py +134 -35
- alita_sdk/tools/ado/repos/__init__.py +51 -32
- alita_sdk/tools/ado/repos/repos_wrapper.py +148 -89
- alita_sdk/tools/ado/test_plan/__init__.py +25 -9
- alita_sdk/tools/ado/test_plan/test_plan_wrapper.py +23 -1
- alita_sdk/tools/ado/utils.py +1 -18
- alita_sdk/tools/ado/wiki/__init__.py +25 -12
- alita_sdk/tools/ado/wiki/ado_wrapper.py +291 -22
- alita_sdk/tools/ado/work_item/__init__.py +26 -13
- alita_sdk/tools/ado/work_item/ado_wrapper.py +73 -11
- alita_sdk/tools/advanced_jira_mining/__init__.py +11 -8
- alita_sdk/tools/aws/delta_lake/__init__.py +13 -9
- alita_sdk/tools/aws/delta_lake/tool.py +5 -1
- alita_sdk/tools/azure_ai/search/__init__.py +11 -8
- alita_sdk/tools/azure_ai/search/api_wrapper.py +1 -1
- alita_sdk/tools/base/tool.py +5 -1
- alita_sdk/tools/base_indexer_toolkit.py +271 -84
- alita_sdk/tools/bitbucket/__init__.py +17 -11
- alita_sdk/tools/bitbucket/api_wrapper.py +59 -11
- alita_sdk/tools/bitbucket/cloud_api_wrapper.py +49 -35
- alita_sdk/tools/browser/__init__.py +5 -4
- alita_sdk/tools/carrier/__init__.py +5 -6
- alita_sdk/tools/carrier/backend_reports_tool.py +6 -6
- alita_sdk/tools/carrier/run_ui_test_tool.py +6 -6
- alita_sdk/tools/carrier/ui_reports_tool.py +5 -5
- alita_sdk/tools/chunkers/__init__.py +3 -1
- alita_sdk/tools/chunkers/code/treesitter/treesitter.py +37 -13
- alita_sdk/tools/chunkers/sematic/json_chunker.py +1 -0
- alita_sdk/tools/chunkers/sematic/markdown_chunker.py +97 -6
- alita_sdk/tools/chunkers/sematic/proposal_chunker.py +1 -1
- alita_sdk/tools/chunkers/universal_chunker.py +270 -0
- alita_sdk/tools/cloud/aws/__init__.py +10 -7
- alita_sdk/tools/cloud/azure/__init__.py +10 -7
- alita_sdk/tools/cloud/gcp/__init__.py +10 -7
- alita_sdk/tools/cloud/k8s/__init__.py +10 -7
- alita_sdk/tools/code/linter/__init__.py +10 -8
- alita_sdk/tools/code/loaders/codesearcher.py +3 -2
- alita_sdk/tools/code/sonar/__init__.py +11 -8
- alita_sdk/tools/code_indexer_toolkit.py +82 -22
- alita_sdk/tools/confluence/__init__.py +22 -16
- alita_sdk/tools/confluence/api_wrapper.py +107 -30
- alita_sdk/tools/confluence/loader.py +14 -2
- alita_sdk/tools/custom_open_api/__init__.py +12 -5
- alita_sdk/tools/elastic/__init__.py +11 -8
- alita_sdk/tools/elitea_base.py +493 -30
- alita_sdk/tools/figma/__init__.py +58 -11
- alita_sdk/tools/figma/api_wrapper.py +1235 -143
- alita_sdk/tools/figma/figma_client.py +73 -0
- alita_sdk/tools/figma/toon_tools.py +2748 -0
- alita_sdk/tools/github/__init__.py +14 -15
- alita_sdk/tools/github/github_client.py +224 -100
- alita_sdk/tools/github/graphql_client_wrapper.py +119 -33
- alita_sdk/tools/github/schemas.py +14 -5
- alita_sdk/tools/github/tool.py +5 -1
- alita_sdk/tools/github/tool_prompts.py +9 -22
- alita_sdk/tools/gitlab/__init__.py +16 -11
- alita_sdk/tools/gitlab/api_wrapper.py +218 -48
- alita_sdk/tools/gitlab_org/__init__.py +10 -9
- alita_sdk/tools/gitlab_org/api_wrapper.py +63 -64
- alita_sdk/tools/google/bigquery/__init__.py +13 -12
- alita_sdk/tools/google/bigquery/tool.py +5 -1
- alita_sdk/tools/google_places/__init__.py +11 -8
- alita_sdk/tools/google_places/api_wrapper.py +1 -1
- alita_sdk/tools/jira/__init__.py +17 -10
- alita_sdk/tools/jira/api_wrapper.py +92 -41
- alita_sdk/tools/keycloak/__init__.py +11 -8
- alita_sdk/tools/localgit/__init__.py +9 -3
- alita_sdk/tools/localgit/local_git.py +62 -54
- alita_sdk/tools/localgit/tool.py +5 -1
- alita_sdk/tools/memory/__init__.py +12 -4
- alita_sdk/tools/non_code_indexer_toolkit.py +1 -0
- alita_sdk/tools/ocr/__init__.py +11 -8
- alita_sdk/tools/openapi/__init__.py +491 -106
- alita_sdk/tools/openapi/api_wrapper.py +1368 -0
- alita_sdk/tools/openapi/tool.py +20 -0
- alita_sdk/tools/pandas/__init__.py +20 -12
- alita_sdk/tools/pandas/api_wrapper.py +38 -25
- alita_sdk/tools/pandas/dataframe/generator/base.py +3 -1
- alita_sdk/tools/postman/__init__.py +10 -9
- alita_sdk/tools/pptx/__init__.py +11 -10
- alita_sdk/tools/pptx/pptx_wrapper.py +1 -1
- alita_sdk/tools/qtest/__init__.py +31 -11
- alita_sdk/tools/qtest/api_wrapper.py +2135 -86
- alita_sdk/tools/rally/__init__.py +10 -9
- alita_sdk/tools/rally/api_wrapper.py +1 -1
- alita_sdk/tools/report_portal/__init__.py +12 -8
- alita_sdk/tools/salesforce/__init__.py +10 -8
- alita_sdk/tools/servicenow/__init__.py +17 -15
- alita_sdk/tools/servicenow/api_wrapper.py +1 -1
- alita_sdk/tools/sharepoint/__init__.py +10 -7
- alita_sdk/tools/sharepoint/api_wrapper.py +129 -38
- alita_sdk/tools/sharepoint/authorization_helper.py +191 -1
- alita_sdk/tools/sharepoint/utils.py +8 -2
- alita_sdk/tools/slack/__init__.py +10 -7
- alita_sdk/tools/slack/api_wrapper.py +2 -2
- alita_sdk/tools/sql/__init__.py +12 -9
- alita_sdk/tools/testio/__init__.py +10 -7
- alita_sdk/tools/testrail/__init__.py +11 -10
- alita_sdk/tools/testrail/api_wrapper.py +1 -1
- alita_sdk/tools/utils/__init__.py +9 -4
- alita_sdk/tools/utils/content_parser.py +103 -18
- alita_sdk/tools/utils/text_operations.py +410 -0
- alita_sdk/tools/utils/tool_prompts.py +79 -0
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +30 -13
- alita_sdk/tools/xray/__init__.py +13 -9
- alita_sdk/tools/yagmail/__init__.py +9 -3
- alita_sdk/tools/zephyr/__init__.py +10 -7
- alita_sdk/tools/zephyr_enterprise/__init__.py +11 -7
- alita_sdk/tools/zephyr_essential/__init__.py +10 -7
- alita_sdk/tools/zephyr_essential/api_wrapper.py +30 -13
- alita_sdk/tools/zephyr_essential/client.py +2 -2
- alita_sdk/tools/zephyr_scale/__init__.py +11 -8
- alita_sdk/tools/zephyr_scale/api_wrapper.py +2 -2
- alita_sdk/tools/zephyr_squad/__init__.py +10 -7
- {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/METADATA +154 -8
- alita_sdk-0.3.627.dist-info/RECORD +468 -0
- alita_sdk-0.3.627.dist-info/entry_points.txt +2 -0
- alita_sdk-0.3.379.dist-info/RECORD +0 -360
- {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/top_level.txt +0 -0
|
@@ -5,9 +5,10 @@ from langchain_core.tools import BaseTool, BaseToolkit
|
|
|
5
5
|
from pydantic import BaseModel, Field, computed_field, field_validator
|
|
6
6
|
|
|
7
7
|
from ....configurations.bigquery import BigQueryConfiguration
|
|
8
|
-
from ...utils import
|
|
8
|
+
from ...utils import clean_string, get_max_toolkit_length
|
|
9
9
|
from .api_wrapper import BigQueryApiWrapper
|
|
10
10
|
from .tool import BigQueryAction
|
|
11
|
+
from ....runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
11
12
|
|
|
12
13
|
name = "bigquery"
|
|
13
14
|
|
|
@@ -22,11 +23,6 @@ def get_available_tools() -> dict[str, dict]:
|
|
|
22
23
|
return available_tools
|
|
23
24
|
|
|
24
25
|
|
|
25
|
-
toolkit_max_length = lru_cache(maxsize=1)(
|
|
26
|
-
lambda: get_max_toolkit_length(get_available_tools())
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
|
|
30
26
|
class BigQueryToolkitConfig(BaseModel):
|
|
31
27
|
class Config:
|
|
32
28
|
title = name
|
|
@@ -86,9 +82,10 @@ class BigQueryToolkit(BaseToolkit):
|
|
|
86
82
|
|
|
87
83
|
@computed_field
|
|
88
84
|
@property
|
|
89
|
-
def
|
|
85
|
+
def toolkit_context(self) -> str:
|
|
86
|
+
"""Returns toolkit context for descriptions (max 1000 chars)."""
|
|
90
87
|
return (
|
|
91
|
-
clean_string(self.toolkit_name,
|
|
88
|
+
f" [Toolkit: {clean_string(self.toolkit_name, 0)}]"
|
|
92
89
|
if self.toolkit_name
|
|
93
90
|
else ""
|
|
94
91
|
)
|
|
@@ -122,14 +119,18 @@ class BigQueryToolkit(BaseToolkit):
|
|
|
122
119
|
selected_tools = set(selected_tools)
|
|
123
120
|
for t in instance.available_tools:
|
|
124
121
|
if t["name"] in selected_tools:
|
|
122
|
+
description = t["description"]
|
|
123
|
+
if toolkit_name:
|
|
124
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
125
|
+
description = f"Project: {getattr(instance.api_wrapper, 'project', '')}\n{description}"
|
|
126
|
+
description = description[:1000]
|
|
125
127
|
instance.tools.append(
|
|
126
128
|
BigQueryAction(
|
|
127
129
|
api_wrapper=instance.api_wrapper,
|
|
128
|
-
name=
|
|
129
|
-
|
|
130
|
-
description=f"Project: {getattr(instance.api_wrapper, 'project', '')}\n"
|
|
131
|
-
+ t["description"],
|
|
130
|
+
name=t["name"],
|
|
131
|
+
description=description,
|
|
132
132
|
args_schema=t["args_schema"],
|
|
133
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: t["name"]} if toolkit_name else {TOOL_NAME_META: t["name"]}
|
|
133
134
|
)
|
|
134
135
|
)
|
|
135
136
|
return instance
|
|
@@ -29,6 +29,10 @@ class BigQueryAction(BaseTool):
|
|
|
29
29
|
) -> str:
|
|
30
30
|
"""Use the GitHub API to run an operation."""
|
|
31
31
|
try:
|
|
32
|
-
|
|
32
|
+
# Strip numeric suffix added for deduplication (_2, _3, etc.)
|
|
33
|
+
# to get the original tool name that exists in the wrapper
|
|
34
|
+
import re
|
|
35
|
+
mode = re.sub(r'_\d+$', '', self.mode) if self.mode else self.mode
|
|
36
|
+
return self.api_wrapper.run(mode, *args, **kwargs)
|
|
33
37
|
except Exception as e:
|
|
34
38
|
return f"Error: {format_exc()}"
|
|
@@ -6,8 +6,9 @@ from pydantic.fields import Field
|
|
|
6
6
|
from .api_wrapper import GooglePlacesAPIWrapper
|
|
7
7
|
from ..base.tool import BaseAction
|
|
8
8
|
from ..elitea_base import filter_missconfigured_index_tools
|
|
9
|
-
from ..utils import clean_string,
|
|
9
|
+
from ..utils import clean_string, get_max_toolkit_length
|
|
10
10
|
from ...configurations.google_places import GooglePlacesConfiguration
|
|
11
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
11
12
|
|
|
12
13
|
name = "google_places"
|
|
13
14
|
|
|
@@ -22,15 +23,13 @@ def get_tools(tool):
|
|
|
22
23
|
|
|
23
24
|
class GooglePlacesToolkit(BaseToolkit):
|
|
24
25
|
tools: list[BaseTool] = []
|
|
25
|
-
toolkit_max_length: int = 0
|
|
26
26
|
|
|
27
27
|
@staticmethod
|
|
28
28
|
def toolkit_config_schema() -> BaseModel:
|
|
29
29
|
selected_tools = {x['name']: x['args_schema'].schema() for x in GooglePlacesAPIWrapper.model_construct().get_available_tools()}
|
|
30
|
-
GooglePlacesToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
31
30
|
return create_model(
|
|
32
31
|
name,
|
|
33
|
-
results_count=(Optional[int], Field(description="Results number to show", default=None
|
|
32
|
+
results_count=(Optional[int], Field(description="Results number to show", default=None)),
|
|
34
33
|
google_places_configuration=(GooglePlacesConfiguration, Field(description="Google Places Configuration", json_schema_extra={'configuration_types': ['google_places']})),
|
|
35
34
|
selected_tools=(List[Literal[tuple(selected_tools)]], Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
|
|
36
35
|
__config__=ConfigDict(json_schema_extra=
|
|
@@ -55,17 +54,21 @@ class GooglePlacesToolkit(BaseToolkit):
|
|
|
55
54
|
**kwargs.get('google_places_configuration', {}),
|
|
56
55
|
}
|
|
57
56
|
google_places_api_wrapper = GooglePlacesAPIWrapper(**wrapper_payload)
|
|
58
|
-
prefix = clean_string(toolkit_name, GooglePlacesToolkit.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
59
57
|
available_tools = google_places_api_wrapper.get_available_tools()
|
|
60
58
|
tools = []
|
|
61
59
|
for tool in available_tools:
|
|
62
60
|
if selected_tools and tool["name"] not in selected_tools:
|
|
63
61
|
continue
|
|
62
|
+
description = tool["description"]
|
|
63
|
+
if toolkit_name:
|
|
64
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
65
|
+
description = description[:1000]
|
|
64
66
|
tools.append(BaseAction(
|
|
65
67
|
api_wrapper=google_places_api_wrapper,
|
|
66
|
-
name=
|
|
67
|
-
description=
|
|
68
|
-
args_schema=tool["args_schema"]
|
|
68
|
+
name=tool["name"],
|
|
69
|
+
description=description,
|
|
70
|
+
args_schema=tool["args_schema"],
|
|
71
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
69
72
|
))
|
|
70
73
|
return cls(tools=tools)
|
|
71
74
|
|
|
@@ -104,7 +104,7 @@ class GooglePlacesAPIWrapper(BaseToolApiWrapper):
|
|
|
104
104
|
"GooglePlacesFindNearSchema",
|
|
105
105
|
current_location_query=(
|
|
106
106
|
str, Field(description="Detailed user query of current user location or where to start from")),
|
|
107
|
-
target=(str, Field(description="The target location or query which user wants to find", default=None)),
|
|
107
|
+
target=(Optional[str], Field(description="The target location or query which user wants to find", default=None)),
|
|
108
108
|
radius=(Optional[int], Field(description="The radius of the search. This is optional field", default=3000))
|
|
109
109
|
),
|
|
110
110
|
}
|
alita_sdk/tools/jira/__init__.py
CHANGED
|
@@ -6,13 +6,14 @@ from pydantic import create_model, BaseModel, ConfigDict, Field
|
|
|
6
6
|
import requests
|
|
7
7
|
|
|
8
8
|
from ..elitea_base import filter_missconfigured_index_tools
|
|
9
|
-
from ..utils import clean_string,
|
|
9
|
+
from ..utils import clean_string, get_max_toolkit_length, parse_list, check_connection_response
|
|
10
10
|
from ...configurations.jira import JiraConfiguration
|
|
11
11
|
from ...configurations.pgvector import PgVectorConfiguration
|
|
12
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOLKIT_TYPE_META, TOOL_NAME_META
|
|
12
13
|
|
|
13
14
|
name = "jira"
|
|
14
15
|
|
|
15
|
-
def
|
|
16
|
+
def get_toolkit(tool):
|
|
16
17
|
return JiraToolkit().get_toolkit(
|
|
17
18
|
selected_tools=tool['settings'].get('selected_tools', []),
|
|
18
19
|
base_url=tool['settings'].get('base_url'),
|
|
@@ -32,17 +33,18 @@ def get_tools(tool):
|
|
|
32
33
|
embedding_model=tool['settings'].get('embedding_model'),
|
|
33
34
|
vectorstore_type="PGVector",
|
|
34
35
|
toolkit_name=tool.get('toolkit_name')
|
|
35
|
-
)
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
def get_tools(tool):
|
|
39
|
+
return get_toolkit(tool).get_tools()
|
|
36
40
|
|
|
37
41
|
|
|
38
42
|
class JiraToolkit(BaseToolkit):
|
|
39
43
|
tools: List[BaseTool] = []
|
|
40
|
-
toolkit_max_length: int = 0
|
|
41
44
|
|
|
42
45
|
@staticmethod
|
|
43
46
|
def toolkit_config_schema() -> BaseModel:
|
|
44
47
|
selected_tools = {x['name']: x['args_schema'].schema() for x in JiraApiWrapper.model_construct().get_available_tools()}
|
|
45
|
-
JiraToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
46
48
|
|
|
47
49
|
@check_connection_response
|
|
48
50
|
def check_connection(self):
|
|
@@ -68,7 +70,7 @@ class JiraToolkit(BaseToolkit):
|
|
|
68
70
|
name,
|
|
69
71
|
cloud=(bool, Field(description="Hosting Option", json_schema_extra={'configuration': True})),
|
|
70
72
|
limit=(int, Field(description="Limit issues. Default is 5", gt=0, default=5)),
|
|
71
|
-
api_version=(
|
|
73
|
+
api_version=(Literal['2', '3'], Field(description="Rest API version: optional. Default is 2", default="3")),
|
|
72
74
|
labels=(Optional[str], Field(
|
|
73
75
|
description="List of comma separated labels used for labeling of agent's created or updated entities",
|
|
74
76
|
default=None,
|
|
@@ -109,18 +111,23 @@ class JiraToolkit(BaseToolkit):
|
|
|
109
111
|
**(kwargs.get('pgvector_configuration') or {}),
|
|
110
112
|
}
|
|
111
113
|
jira_api_wrapper = JiraApiWrapper(**wrapper_payload)
|
|
112
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
113
114
|
available_tools = jira_api_wrapper.get_available_tools()
|
|
114
115
|
tools = []
|
|
115
116
|
for tool in available_tools:
|
|
116
117
|
if selected_tools:
|
|
117
118
|
if tool["name"] not in selected_tools:
|
|
118
119
|
continue
|
|
120
|
+
description = tool["description"]
|
|
121
|
+
if toolkit_name:
|
|
122
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
123
|
+
description = f"Jira instance: {jira_api_wrapper.base_url}\n{description}"
|
|
124
|
+
description = description[:1000]
|
|
119
125
|
tools.append(BaseAction(
|
|
120
126
|
api_wrapper=jira_api_wrapper,
|
|
121
|
-
name=
|
|
122
|
-
description=
|
|
123
|
-
args_schema=tool["args_schema"]
|
|
127
|
+
name=tool["name"],
|
|
128
|
+
description=description,
|
|
129
|
+
args_schema=tool["args_schema"],
|
|
130
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
124
131
|
))
|
|
125
132
|
return cls(tools=tools)
|
|
126
133
|
|
|
@@ -453,41 +453,63 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
453
453
|
return super().validate_toolkit(values)
|
|
454
454
|
|
|
455
455
|
def _parse_issues(self, issues: Dict) -> List[dict]:
|
|
456
|
-
parsed = []
|
|
457
|
-
|
|
458
|
-
|
|
456
|
+
parsed: List[dict] = []
|
|
457
|
+
issues_list = issues.get("issues") if isinstance(issues, dict) else None
|
|
458
|
+
if not isinstance(issues_list, list):
|
|
459
|
+
return parsed
|
|
460
|
+
|
|
461
|
+
for issue in issues_list:
|
|
462
|
+
if self.limit and len(parsed) >= self.limit:
|
|
459
463
|
break
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
464
|
+
|
|
465
|
+
issue_fields = issue.get("fields") or {}
|
|
466
|
+
key = issue.get("key", "")
|
|
467
|
+
issue_id = issue.get("id", "")
|
|
468
|
+
|
|
469
|
+
summary = issue_fields.get("summary") or ""
|
|
470
|
+
description = issue_fields.get("description") or ""
|
|
471
|
+
created_raw = issue_fields.get("created") or ""
|
|
472
|
+
created = created_raw[:10] if created_raw else ""
|
|
473
|
+
updated = issue_fields.get("updated") or ""
|
|
474
|
+
duedate = issue_fields.get("duedate")
|
|
475
|
+
|
|
476
|
+
priority_info = issue_fields.get("priority") or {}
|
|
477
|
+
priority = priority_info.get("name") or "None"
|
|
478
|
+
|
|
479
|
+
status_info = issue_fields.get("status") or {}
|
|
480
|
+
status = status_info.get("name") or "Unknown"
|
|
481
|
+
|
|
482
|
+
project_info = issue_fields.get("project") or {}
|
|
483
|
+
project_id = project_info.get("id") or ""
|
|
484
|
+
|
|
485
|
+
issue_url = f"{self._client.url}browse/{key}" if key else self._client.url
|
|
486
|
+
|
|
487
|
+
assignee_info = issue_fields.get("assignee") or {}
|
|
488
|
+
assignee = assignee_info.get("displayName") or "None"
|
|
489
|
+
|
|
476
490
|
rel_issues = {}
|
|
477
|
-
for related_issue in issue_fields
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
491
|
+
for related_issue in issue_fields.get("issuelinks") or []:
|
|
492
|
+
rel_type = None
|
|
493
|
+
rel_key = None
|
|
494
|
+
if related_issue.get("inwardIssue"):
|
|
495
|
+
rel_type = related_issue.get("type", {}).get("inward")
|
|
496
|
+
rel_key = related_issue["inwardIssue"].get("key")
|
|
481
497
|
# rel_summary = related_issue["inwardIssue"]["fields"]["summary"]
|
|
482
|
-
|
|
483
|
-
rel_type = related_issue
|
|
484
|
-
rel_key = related_issue["outwardIssue"]
|
|
498
|
+
elif related_issue.get("outwardIssue"):
|
|
499
|
+
rel_type = related_issue.get("type", {}).get("outward")
|
|
500
|
+
rel_key = related_issue["outwardIssue"].get("key")
|
|
485
501
|
# rel_summary = related_issue["outwardIssue"]["fields"]["summary"]
|
|
486
|
-
|
|
502
|
+
|
|
503
|
+
if rel_type and rel_key:
|
|
504
|
+
rel_issues = {
|
|
505
|
+
"type": rel_type,
|
|
506
|
+
"key": rel_key,
|
|
507
|
+
"url": f"{self._client.url}browse/{rel_key}",
|
|
508
|
+
}
|
|
487
509
|
|
|
488
510
|
parsed_issue = {
|
|
489
511
|
"key": key,
|
|
490
|
-
"id":
|
|
512
|
+
"id": issue_id,
|
|
491
513
|
"projectId": project_id,
|
|
492
514
|
"summary": summary,
|
|
493
515
|
"description": description,
|
|
@@ -500,10 +522,13 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
500
522
|
"url": issue_url,
|
|
501
523
|
"related_issues": rel_issues,
|
|
502
524
|
}
|
|
503
|
-
|
|
504
|
-
|
|
525
|
+
|
|
526
|
+
for field in (self.additional_fields or []):
|
|
527
|
+
field_value = issue_fields.get(field)
|
|
505
528
|
parsed_issue[field] = field_value
|
|
529
|
+
|
|
506
530
|
parsed.append(parsed_issue)
|
|
531
|
+
|
|
507
532
|
return parsed
|
|
508
533
|
|
|
509
534
|
@staticmethod
|
|
@@ -563,7 +588,7 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
563
588
|
Use the appropriate issue link type (e.g., "Test", "Relates", "Blocks").
|
|
564
589
|
If we use "Test" linktype, the test is inward issue, the story/other issue is outward issue.."""
|
|
565
590
|
|
|
566
|
-
comment = "
|
|
591
|
+
comment = f"Issue {inward_issue_key} was linked to {outward_issue_key}."
|
|
567
592
|
comment_body = {"content": [{"content": [{"text": comment,"type": "text"}],"type": "paragraph"}],"type": "doc","version": 1} if self.api_version == "3" else comment
|
|
568
593
|
link_data = {
|
|
569
594
|
"type": {"name": f"{linktype}"},
|
|
@@ -749,13 +774,24 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
749
774
|
|
|
750
775
|
attachment_data = []
|
|
751
776
|
attachments = self._client.get_attachments_ids_from_issue(issue=jira_issue_key)
|
|
777
|
+
api_version = str(getattr(self._client, "api_version", "2"))
|
|
752
778
|
for attachment in attachments:
|
|
753
779
|
if attachment_pattern and not re.search(attachment_pattern, attachment['filename']):
|
|
754
780
|
logger.info(f"Skipping attachment {attachment['filename']} as it does not match pattern {attachment_pattern}")
|
|
755
781
|
continue
|
|
756
782
|
logger.info(f"Processing attachment {attachment['filename']} with ID {attachment['attachment_id']}")
|
|
757
783
|
try:
|
|
758
|
-
attachment_content =
|
|
784
|
+
attachment_content = None
|
|
785
|
+
|
|
786
|
+
# Cloud (REST v3) attachments require signed URLs returned from metadata
|
|
787
|
+
if api_version in {"3", "latest"} or self.cloud:
|
|
788
|
+
attachment_content = self._download_attachment_v3(
|
|
789
|
+
attachment['attachment_id'],
|
|
790
|
+
attachment['filename']
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
if attachment_content is None:
|
|
794
|
+
attachment_content = self._client.get_attachment_content(attachment['attachment_id'])
|
|
759
795
|
except Exception as e:
|
|
760
796
|
logger.error(
|
|
761
797
|
f"Failed to download attachment {attachment['filename']} for issue {jira_issue_key}: {str(e)}")
|
|
@@ -797,15 +833,6 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
797
833
|
logger.debug(response_string)
|
|
798
834
|
return response_string
|
|
799
835
|
|
|
800
|
-
def _extract_attachment_content(self, attachment):
|
|
801
|
-
"""Extract attachment's content if possible (used for api v.2)"""
|
|
802
|
-
|
|
803
|
-
try:
|
|
804
|
-
content = self._client.get(attachment['content'].replace(self.base_url, ''))
|
|
805
|
-
except Exception as e:
|
|
806
|
-
content = f"Unable to parse content of '{attachment['filename']}' due to: {str(e)}"
|
|
807
|
-
return f"filename: {attachment['filename']}\ncontent: {content}"
|
|
808
|
-
|
|
809
836
|
# Helper functions for image processing
|
|
810
837
|
@staticmethod
|
|
811
838
|
def _collect_context_for_image(content: str, image_marker: str, context_radius: int = 500) -> str:
|
|
@@ -1038,6 +1065,30 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
1038
1065
|
logger.error(f"Error downloading attachment: {str(e)}")
|
|
1039
1066
|
return None
|
|
1040
1067
|
|
|
1068
|
+
def _download_attachment_v3(self, attachment_id: str, filename: str | None = None) -> Optional[bytes]:
|
|
1069
|
+
"""Download Jira attachment using metadata content URL (required for REST v3 / Cloud)."""
|
|
1070
|
+
try:
|
|
1071
|
+
metadata = self._client.get_attachment(attachment_id)
|
|
1072
|
+
except Exception as e:
|
|
1073
|
+
logger.error(f"Failed to retrieve metadata for attachment {attachment_id}: {str(e)}")
|
|
1074
|
+
return None
|
|
1075
|
+
|
|
1076
|
+
download_url = metadata.get('content') or metadata.get('_links', {}).get('content')
|
|
1077
|
+
|
|
1078
|
+
if not download_url:
|
|
1079
|
+
logger.warning(
|
|
1080
|
+
f"Attachment {attachment_id} ({filename}) metadata does not include a content URL; falling back.")
|
|
1081
|
+
return None
|
|
1082
|
+
|
|
1083
|
+
logger.info(f"Downloading attachment {attachment_id} via metadata content URL (v3).")
|
|
1084
|
+
content = self._download_attachment(download_url)
|
|
1085
|
+
|
|
1086
|
+
if content is None:
|
|
1087
|
+
logger.error(
|
|
1088
|
+
f"Failed to download attachment {attachment_id} ({filename}) from v3 content URL: {download_url}")
|
|
1089
|
+
|
|
1090
|
+
return content
|
|
1091
|
+
|
|
1041
1092
|
def _extract_image_data(self, field_data):
|
|
1042
1093
|
"""
|
|
1043
1094
|
Extracts image data from general JSON response.
|
|
@@ -5,7 +5,8 @@ from pydantic import BaseModel, ConfigDict, create_model, Field, SecretStr
|
|
|
5
5
|
|
|
6
6
|
from .api_wrapper import KeycloakApiWrapper
|
|
7
7
|
from ..base.tool import BaseAction
|
|
8
|
-
from ..utils import clean_string,
|
|
8
|
+
from ..utils import clean_string, get_max_toolkit_length
|
|
9
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
9
10
|
|
|
10
11
|
name = "keycloak"
|
|
11
12
|
|
|
@@ -21,15 +22,13 @@ def get_tools(tool):
|
|
|
21
22
|
|
|
22
23
|
class KeycloakToolkit(BaseToolkit):
|
|
23
24
|
tools: list[BaseTool] = []
|
|
24
|
-
toolkit_max_length: int = 0
|
|
25
25
|
|
|
26
26
|
@staticmethod
|
|
27
27
|
def toolkit_config_schema() -> BaseModel:
|
|
28
28
|
selected_tools = {x['name']: x['args_schema'].schema() for x in KeycloakApiWrapper.model_construct().get_available_tools()}
|
|
29
|
-
KeycloakToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
30
29
|
return create_model(
|
|
31
30
|
name,
|
|
32
|
-
base_url=(str, Field(default="", title="Server URL", description="Keycloak server URL", json_schema_extra={'toolkit_name': True
|
|
31
|
+
base_url=(str, Field(default="", title="Server URL", description="Keycloak server URL", json_schema_extra={'toolkit_name': True})),
|
|
33
32
|
realm=(str, Field(default="", title="Realm", description="Keycloak realm")),
|
|
34
33
|
client_id=(str, Field(default="", title="Client ID", description="Keycloak client ID")),
|
|
35
34
|
client_secret=(SecretStr, Field(default="", title="Client sercet", description="Keycloak client secret", json_schema_extra={'secret': True})),
|
|
@@ -42,17 +41,21 @@ class KeycloakToolkit(BaseToolkit):
|
|
|
42
41
|
if selected_tools is None:
|
|
43
42
|
selected_tools = []
|
|
44
43
|
keycloak_api_wrapper = KeycloakApiWrapper(**kwargs)
|
|
45
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
46
44
|
available_tools = keycloak_api_wrapper.get_available_tools()
|
|
47
45
|
tools = []
|
|
48
46
|
for tool in available_tools:
|
|
49
47
|
if selected_tools and tool["name"] not in selected_tools:
|
|
50
48
|
continue
|
|
49
|
+
description = f"{tool['description']}\nUrl: {keycloak_api_wrapper.base_url}"
|
|
50
|
+
if toolkit_name:
|
|
51
|
+
description = f"{description}\nToolkit: {toolkit_name}"
|
|
52
|
+
description = description[:1000]
|
|
51
53
|
tools.append(BaseAction(
|
|
52
54
|
api_wrapper=keycloak_api_wrapper,
|
|
53
|
-
name=
|
|
54
|
-
description=
|
|
55
|
-
args_schema=tool["args_schema"]
|
|
55
|
+
name=tool["name"],
|
|
56
|
+
description=description,
|
|
57
|
+
args_schema=tool["args_schema"],
|
|
58
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
56
59
|
))
|
|
57
60
|
return cls(tools=tools)
|
|
58
61
|
|
|
@@ -5,6 +5,7 @@ from pydantic import BaseModel, ConfigDict, create_model, Field
|
|
|
5
5
|
|
|
6
6
|
from .local_git import LocalGit
|
|
7
7
|
from .tool import LocalGitAction
|
|
8
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
8
9
|
|
|
9
10
|
name = "localgit"
|
|
10
11
|
|
|
@@ -34,7 +35,7 @@ class AlitaLocalGitToolkit(BaseToolkit):
|
|
|
34
35
|
)
|
|
35
36
|
|
|
36
37
|
@classmethod
|
|
37
|
-
def get_toolkit(cls, selected_tools: list[str] | None = None, **kwargs):
|
|
38
|
+
def get_toolkit(cls, selected_tools: list[str] | None = None, toolkit_name: Optional[str] = None, **kwargs):
|
|
38
39
|
if selected_tools is None:
|
|
39
40
|
selected_tools = []
|
|
40
41
|
local_git_tool = LocalGit(**kwargs)
|
|
@@ -45,12 +46,17 @@ class AlitaLocalGitToolkit(BaseToolkit):
|
|
|
45
46
|
if selected_tools:
|
|
46
47
|
if tool["name"] not in selected_tools:
|
|
47
48
|
continue
|
|
49
|
+
description = tool["description"]
|
|
50
|
+
if toolkit_name:
|
|
51
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
52
|
+
description = description[:1000]
|
|
48
53
|
tools.append(LocalGitAction(
|
|
49
54
|
api_wrapper=local_git_tool,
|
|
50
55
|
name=repo + "_" + tool["name"],
|
|
51
56
|
mode=tool["mode"],
|
|
52
|
-
description=
|
|
53
|
-
args_schema=tool["args_schema"]
|
|
57
|
+
description=description,
|
|
58
|
+
args_schema=tool["args_schema"],
|
|
59
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
54
60
|
))
|
|
55
61
|
return cls(tools=tools)
|
|
56
62
|
|
|
@@ -8,7 +8,8 @@ from git import Repo
|
|
|
8
8
|
from pydantic import BaseModel, Field, create_model, model_validator
|
|
9
9
|
from langchain_core.tools import ToolException
|
|
10
10
|
|
|
11
|
-
from ..elitea_base import BaseToolApiWrapper
|
|
11
|
+
from ..elitea_base import BaseToolApiWrapper, extend_with_file_operations, BaseCodeToolApiWrapper
|
|
12
|
+
from ..utils.text_operations import parse_old_new_markers
|
|
12
13
|
|
|
13
14
|
logger = logging.getLogger(__name__)
|
|
14
15
|
CREATE_FILE_PROMPT = """Create new file in your local repository."""
|
|
@@ -110,6 +111,12 @@ class LocalGit(BaseToolApiWrapper):
|
|
|
110
111
|
repo_url: str = None
|
|
111
112
|
commit_sha: str = None
|
|
112
113
|
path_pattern: str = '**/*.py'
|
|
114
|
+
|
|
115
|
+
# Import file operation methods from BaseCodeToolApiWrapper
|
|
116
|
+
read_file_chunk = BaseCodeToolApiWrapper.read_file_chunk
|
|
117
|
+
read_multiple_files = BaseCodeToolApiWrapper.read_multiple_files
|
|
118
|
+
search_file = BaseCodeToolApiWrapper.search_file
|
|
119
|
+
edit_file = BaseCodeToolApiWrapper.edit_file
|
|
113
120
|
|
|
114
121
|
@model_validator(mode='before')
|
|
115
122
|
@classmethod
|
|
@@ -128,58 +135,6 @@ class LocalGit(BaseToolApiWrapper):
|
|
|
128
135
|
repo.head.reset(commit=commit_sha, working_tree=True)
|
|
129
136
|
return values
|
|
130
137
|
|
|
131
|
-
def extract_old_new_pairs(self, file_query):
|
|
132
|
-
# Split the file content by lines
|
|
133
|
-
code_lines = file_query.split("\n")
|
|
134
|
-
|
|
135
|
-
# Initialize lists to hold the contents of OLD and NEW sections
|
|
136
|
-
old_contents = []
|
|
137
|
-
new_contents = []
|
|
138
|
-
|
|
139
|
-
# Initialize variables to track whether the current line is within an OLD or NEW section
|
|
140
|
-
in_old_section = False
|
|
141
|
-
in_new_section = False
|
|
142
|
-
|
|
143
|
-
# Temporary storage for the current section's content
|
|
144
|
-
current_section_content = []
|
|
145
|
-
|
|
146
|
-
# Iterate through each line in the file content
|
|
147
|
-
for line in code_lines:
|
|
148
|
-
# Check for OLD section start
|
|
149
|
-
if "OLD <<<" in line:
|
|
150
|
-
in_old_section = True
|
|
151
|
-
current_section_content = [] # Reset current section content
|
|
152
|
-
continue # Skip the line with the marker
|
|
153
|
-
|
|
154
|
-
# Check for OLD section end
|
|
155
|
-
if ">>>> OLD" in line:
|
|
156
|
-
in_old_section = False
|
|
157
|
-
old_contents.append("\n".join(current_section_content).strip()) # Add the captured content
|
|
158
|
-
current_section_content = [] # Reset current section content
|
|
159
|
-
continue # Skip the line with the marker
|
|
160
|
-
|
|
161
|
-
# Check for NEW section start
|
|
162
|
-
if "NEW <<<" in line:
|
|
163
|
-
in_new_section = True
|
|
164
|
-
current_section_content = [] # Reset current section content
|
|
165
|
-
continue # Skip the line with the marker
|
|
166
|
-
|
|
167
|
-
# Check for NEW section end
|
|
168
|
-
if ">>>> NEW" in line:
|
|
169
|
-
in_new_section = False
|
|
170
|
-
new_contents.append("\n".join(current_section_content).strip()) # Add the captured content
|
|
171
|
-
current_section_content = [] # Reset current section content
|
|
172
|
-
continue # Skip the line with the marker
|
|
173
|
-
|
|
174
|
-
# If currently in an OLD or NEW section, add the line to the current section content
|
|
175
|
-
if in_old_section or in_new_section:
|
|
176
|
-
current_section_content.append(line)
|
|
177
|
-
|
|
178
|
-
# Pair the OLD and NEW contents
|
|
179
|
-
paired_contents = list(zip(old_contents, new_contents))
|
|
180
|
-
|
|
181
|
-
return paired_contents
|
|
182
|
-
|
|
183
138
|
def checkout_commit(self, commit_sha: str) -> str:
|
|
184
139
|
""" Checkout specific commit from repository """
|
|
185
140
|
try:
|
|
@@ -233,6 +188,58 @@ class LocalGit(BaseToolApiWrapper):
|
|
|
233
188
|
return f.read()
|
|
234
189
|
else:
|
|
235
190
|
return "File '{}' cannot be read because it is not existed".format(file_path)
|
|
191
|
+
|
|
192
|
+
def _read_file(self, file_path: str, branch: str = None, **kwargs) -> str:
|
|
193
|
+
"""
|
|
194
|
+
Read a file from the repository with optional partial read support.
|
|
195
|
+
|
|
196
|
+
Parameters:
|
|
197
|
+
file_path: the file path (relative to repo root)
|
|
198
|
+
branch: branch name (not used for local git, always reads from working dir)
|
|
199
|
+
**kwargs: Additional parameters (offset, limit, head, tail) - currently ignored,
|
|
200
|
+
partial read handled client-side by base class methods
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
File content as string
|
|
204
|
+
"""
|
|
205
|
+
return self.read_file(file_path)
|
|
206
|
+
|
|
207
|
+
def _write_file(
|
|
208
|
+
self,
|
|
209
|
+
file_path: str,
|
|
210
|
+
content: str,
|
|
211
|
+
branch: str = None,
|
|
212
|
+
commit_message: str = None
|
|
213
|
+
) -> str:
|
|
214
|
+
"""
|
|
215
|
+
Write content to a file (create or update).
|
|
216
|
+
|
|
217
|
+
Parameters:
|
|
218
|
+
file_path: Path to the file (relative to repo root)
|
|
219
|
+
content: New file content
|
|
220
|
+
branch: Branch name (not used for local git)
|
|
221
|
+
commit_message: Commit message (not used - files are written without commit)
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
Success message
|
|
225
|
+
"""
|
|
226
|
+
try:
|
|
227
|
+
full_path = os.path.normpath(os.path.join(self.repo.working_dir, file_path))
|
|
228
|
+
|
|
229
|
+
# Ensure directory exists
|
|
230
|
+
os.makedirs(os.path.dirname(full_path), exist_ok=True)
|
|
231
|
+
|
|
232
|
+
# Write the file
|
|
233
|
+
with open(full_path, 'w') as f:
|
|
234
|
+
f.write(content)
|
|
235
|
+
|
|
236
|
+
# Determine if file was created or updated
|
|
237
|
+
if os.path.exists(full_path):
|
|
238
|
+
return f"Updated file {file_path}"
|
|
239
|
+
else:
|
|
240
|
+
return f"Created file {file_path}"
|
|
241
|
+
except Exception as e:
|
|
242
|
+
raise ToolException(f"Unable to write file {file_path}: {str(e)}")
|
|
236
243
|
|
|
237
244
|
def update_file_content_by_lines(self, file_path: str, start_line_index: int, end_line_index: int,
|
|
238
245
|
new_content: str) -> str:
|
|
@@ -314,7 +321,7 @@ class LocalGit(BaseToolApiWrapper):
|
|
|
314
321
|
file_path = os.path.normpath(os.path.join(self.repo.working_dir, file_path))
|
|
315
322
|
file_content = self.read_file(file_path)
|
|
316
323
|
updated_file_content = file_content
|
|
317
|
-
for old, new in
|
|
324
|
+
for old, new in parse_old_new_markers(file_query): # Use shared utility
|
|
318
325
|
if not old.strip():
|
|
319
326
|
continue
|
|
320
327
|
updated_file_content = updated_file_content.replace(old, new)
|
|
@@ -332,6 +339,7 @@ class LocalGit(BaseToolApiWrapper):
|
|
|
332
339
|
except Exception as e:
|
|
333
340
|
return "Unable to update file due to error:\n" + str(e)
|
|
334
341
|
|
|
342
|
+
@extend_with_file_operations
|
|
335
343
|
def get_available_tools(self):
|
|
336
344
|
return [
|
|
337
345
|
{
|