alita-sdk 0.3.257__py3-none-any.whl → 0.3.584__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/__init__.py +10 -0
- alita_sdk/cli/__main__.py +17 -0
- alita_sdk/cli/agent/__init__.py +5 -0
- alita_sdk/cli/agent/default.py +258 -0
- alita_sdk/cli/agent_executor.py +155 -0
- alita_sdk/cli/agent_loader.py +215 -0
- alita_sdk/cli/agent_ui.py +228 -0
- alita_sdk/cli/agents.py +3794 -0
- alita_sdk/cli/callbacks.py +647 -0
- alita_sdk/cli/cli.py +168 -0
- alita_sdk/cli/config.py +306 -0
- alita_sdk/cli/context/__init__.py +30 -0
- alita_sdk/cli/context/cleanup.py +198 -0
- alita_sdk/cli/context/manager.py +731 -0
- alita_sdk/cli/context/message.py +285 -0
- alita_sdk/cli/context/strategies.py +289 -0
- alita_sdk/cli/context/token_estimation.py +127 -0
- alita_sdk/cli/formatting.py +182 -0
- alita_sdk/cli/input_handler.py +419 -0
- alita_sdk/cli/inventory.py +1073 -0
- alita_sdk/cli/mcp_loader.py +315 -0
- alita_sdk/cli/toolkit.py +327 -0
- alita_sdk/cli/toolkit_loader.py +85 -0
- alita_sdk/cli/tools/__init__.py +43 -0
- alita_sdk/cli/tools/approval.py +224 -0
- alita_sdk/cli/tools/filesystem.py +1751 -0
- alita_sdk/cli/tools/planning.py +389 -0
- alita_sdk/cli/tools/terminal.py +414 -0
- alita_sdk/community/__init__.py +72 -12
- alita_sdk/community/inventory/__init__.py +236 -0
- alita_sdk/community/inventory/config.py +257 -0
- alita_sdk/community/inventory/enrichment.py +2137 -0
- alita_sdk/community/inventory/extractors.py +1469 -0
- alita_sdk/community/inventory/ingestion.py +3172 -0
- alita_sdk/community/inventory/knowledge_graph.py +1457 -0
- alita_sdk/community/inventory/parsers/__init__.py +218 -0
- alita_sdk/community/inventory/parsers/base.py +295 -0
- alita_sdk/community/inventory/parsers/csharp_parser.py +907 -0
- alita_sdk/community/inventory/parsers/go_parser.py +851 -0
- alita_sdk/community/inventory/parsers/html_parser.py +389 -0
- alita_sdk/community/inventory/parsers/java_parser.py +593 -0
- alita_sdk/community/inventory/parsers/javascript_parser.py +629 -0
- alita_sdk/community/inventory/parsers/kotlin_parser.py +768 -0
- alita_sdk/community/inventory/parsers/markdown_parser.py +362 -0
- alita_sdk/community/inventory/parsers/python_parser.py +604 -0
- alita_sdk/community/inventory/parsers/rust_parser.py +858 -0
- alita_sdk/community/inventory/parsers/swift_parser.py +832 -0
- alita_sdk/community/inventory/parsers/text_parser.py +322 -0
- alita_sdk/community/inventory/parsers/yaml_parser.py +370 -0
- alita_sdk/community/inventory/patterns/__init__.py +61 -0
- alita_sdk/community/inventory/patterns/ast_adapter.py +380 -0
- alita_sdk/community/inventory/patterns/loader.py +348 -0
- alita_sdk/community/inventory/patterns/registry.py +198 -0
- alita_sdk/community/inventory/presets.py +535 -0
- alita_sdk/community/inventory/retrieval.py +1403 -0
- alita_sdk/community/inventory/toolkit.py +173 -0
- alita_sdk/community/inventory/toolkit_utils.py +176 -0
- alita_sdk/community/inventory/visualize.py +1370 -0
- alita_sdk/configurations/__init__.py +11 -0
- alita_sdk/configurations/ado.py +148 -2
- alita_sdk/configurations/azure_search.py +1 -1
- alita_sdk/configurations/bigquery.py +1 -1
- alita_sdk/configurations/bitbucket.py +94 -2
- alita_sdk/configurations/browser.py +18 -0
- alita_sdk/configurations/carrier.py +19 -0
- alita_sdk/configurations/confluence.py +130 -1
- alita_sdk/configurations/delta_lake.py +1 -1
- alita_sdk/configurations/figma.py +76 -5
- alita_sdk/configurations/github.py +65 -1
- alita_sdk/configurations/gitlab.py +81 -0
- alita_sdk/configurations/google_places.py +17 -0
- alita_sdk/configurations/jira.py +103 -0
- alita_sdk/configurations/openapi.py +323 -0
- alita_sdk/configurations/postman.py +1 -1
- alita_sdk/configurations/qtest.py +72 -3
- alita_sdk/configurations/report_portal.py +115 -0
- alita_sdk/configurations/salesforce.py +19 -0
- alita_sdk/configurations/service_now.py +1 -12
- alita_sdk/configurations/sharepoint.py +167 -0
- alita_sdk/configurations/sonar.py +18 -0
- alita_sdk/configurations/sql.py +20 -0
- alita_sdk/configurations/testio.py +101 -0
- alita_sdk/configurations/testrail.py +88 -0
- alita_sdk/configurations/xray.py +94 -1
- alita_sdk/configurations/zephyr_enterprise.py +94 -1
- alita_sdk/configurations/zephyr_essential.py +95 -0
- alita_sdk/runtime/clients/artifact.py +21 -4
- alita_sdk/runtime/clients/client.py +458 -67
- alita_sdk/runtime/clients/mcp_discovery.py +342 -0
- alita_sdk/runtime/clients/mcp_manager.py +262 -0
- alita_sdk/runtime/clients/sandbox_client.py +352 -0
- alita_sdk/runtime/langchain/_constants_bkup.py +1318 -0
- alita_sdk/runtime/langchain/assistant.py +183 -43
- alita_sdk/runtime/langchain/constants.py +647 -1
- alita_sdk/runtime/langchain/document_loaders/AlitaDocxMammothLoader.py +315 -3
- alita_sdk/runtime/langchain/document_loaders/AlitaExcelLoader.py +209 -31
- alita_sdk/runtime/langchain/document_loaders/AlitaImageLoader.py +1 -1
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLinesLoader.py +77 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +10 -3
- alita_sdk/runtime/langchain/document_loaders/AlitaMarkdownLoader.py +66 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaPDFLoader.py +79 -10
- alita_sdk/runtime/langchain/document_loaders/AlitaPowerPointLoader.py +52 -15
- alita_sdk/runtime/langchain/document_loaders/AlitaPythonLoader.py +9 -0
- alita_sdk/runtime/langchain/document_loaders/AlitaTableLoader.py +1 -4
- alita_sdk/runtime/langchain/document_loaders/AlitaTextLoader.py +15 -2
- alita_sdk/runtime/langchain/document_loaders/ImageParser.py +30 -0
- alita_sdk/runtime/langchain/document_loaders/constants.py +189 -41
- alita_sdk/runtime/langchain/interfaces/llm_processor.py +4 -2
- alita_sdk/runtime/langchain/langraph_agent.py +493 -105
- alita_sdk/runtime/langchain/utils.py +118 -8
- alita_sdk/runtime/llms/preloaded.py +2 -6
- alita_sdk/runtime/models/mcp_models.py +61 -0
- alita_sdk/runtime/skills/__init__.py +91 -0
- alita_sdk/runtime/skills/callbacks.py +498 -0
- alita_sdk/runtime/skills/discovery.py +540 -0
- alita_sdk/runtime/skills/executor.py +610 -0
- alita_sdk/runtime/skills/input_builder.py +371 -0
- alita_sdk/runtime/skills/models.py +330 -0
- alita_sdk/runtime/skills/registry.py +355 -0
- alita_sdk/runtime/skills/skill_runner.py +330 -0
- alita_sdk/runtime/toolkits/__init__.py +28 -0
- alita_sdk/runtime/toolkits/application.py +14 -4
- alita_sdk/runtime/toolkits/artifact.py +25 -9
- alita_sdk/runtime/toolkits/datasource.py +13 -6
- alita_sdk/runtime/toolkits/mcp.py +782 -0
- alita_sdk/runtime/toolkits/planning.py +178 -0
- alita_sdk/runtime/toolkits/skill_router.py +238 -0
- alita_sdk/runtime/toolkits/subgraph.py +11 -6
- alita_sdk/runtime/toolkits/tools.py +314 -70
- alita_sdk/runtime/toolkits/vectorstore.py +11 -5
- alita_sdk/runtime/tools/__init__.py +24 -0
- alita_sdk/runtime/tools/application.py +16 -4
- alita_sdk/runtime/tools/artifact.py +367 -33
- alita_sdk/runtime/tools/data_analysis.py +183 -0
- alita_sdk/runtime/tools/function.py +100 -4
- alita_sdk/runtime/tools/graph.py +81 -0
- alita_sdk/runtime/tools/image_generation.py +218 -0
- alita_sdk/runtime/tools/llm.py +1032 -177
- alita_sdk/runtime/tools/loop.py +3 -1
- alita_sdk/runtime/tools/loop_output.py +3 -1
- alita_sdk/runtime/tools/mcp_inspect_tool.py +284 -0
- alita_sdk/runtime/tools/mcp_remote_tool.py +181 -0
- alita_sdk/runtime/tools/mcp_server_tool.py +3 -1
- alita_sdk/runtime/tools/planning/__init__.py +36 -0
- alita_sdk/runtime/tools/planning/models.py +246 -0
- alita_sdk/runtime/tools/planning/wrapper.py +607 -0
- alita_sdk/runtime/tools/router.py +2 -1
- alita_sdk/runtime/tools/sandbox.py +375 -0
- alita_sdk/runtime/tools/skill_router.py +776 -0
- alita_sdk/runtime/tools/tool.py +3 -1
- alita_sdk/runtime/tools/vectorstore.py +69 -65
- alita_sdk/runtime/tools/vectorstore_base.py +163 -90
- alita_sdk/runtime/utils/AlitaCallback.py +137 -21
- alita_sdk/runtime/utils/constants.py +5 -1
- alita_sdk/runtime/utils/mcp_client.py +492 -0
- alita_sdk/runtime/utils/mcp_oauth.py +361 -0
- alita_sdk/runtime/utils/mcp_sse_client.py +434 -0
- alita_sdk/runtime/utils/mcp_tools_discovery.py +124 -0
- alita_sdk/runtime/utils/streamlit.py +41 -14
- alita_sdk/runtime/utils/toolkit_utils.py +28 -9
- alita_sdk/runtime/utils/utils.py +48 -0
- alita_sdk/tools/__init__.py +135 -37
- alita_sdk/tools/ado/__init__.py +2 -2
- alita_sdk/tools/ado/repos/__init__.py +16 -19
- alita_sdk/tools/ado/repos/repos_wrapper.py +12 -20
- alita_sdk/tools/ado/test_plan/__init__.py +27 -8
- alita_sdk/tools/ado/test_plan/test_plan_wrapper.py +56 -28
- alita_sdk/tools/ado/wiki/__init__.py +28 -12
- alita_sdk/tools/ado/wiki/ado_wrapper.py +114 -40
- alita_sdk/tools/ado/work_item/__init__.py +28 -12
- alita_sdk/tools/ado/work_item/ado_wrapper.py +95 -11
- alita_sdk/tools/advanced_jira_mining/__init__.py +13 -8
- alita_sdk/tools/aws/delta_lake/__init__.py +15 -11
- alita_sdk/tools/aws/delta_lake/tool.py +5 -1
- alita_sdk/tools/azure_ai/search/__init__.py +14 -8
- alita_sdk/tools/base/tool.py +5 -1
- alita_sdk/tools/base_indexer_toolkit.py +454 -110
- alita_sdk/tools/bitbucket/__init__.py +28 -19
- alita_sdk/tools/bitbucket/api_wrapper.py +285 -27
- alita_sdk/tools/bitbucket/cloud_api_wrapper.py +5 -5
- alita_sdk/tools/browser/__init__.py +41 -16
- alita_sdk/tools/browser/crawler.py +3 -1
- alita_sdk/tools/browser/utils.py +15 -6
- alita_sdk/tools/carrier/__init__.py +18 -17
- alita_sdk/tools/carrier/backend_reports_tool.py +8 -4
- alita_sdk/tools/carrier/excel_reporter.py +8 -4
- alita_sdk/tools/chunkers/__init__.py +3 -1
- alita_sdk/tools/chunkers/code/codeparser.py +1 -1
- alita_sdk/tools/chunkers/sematic/json_chunker.py +2 -1
- alita_sdk/tools/chunkers/sematic/markdown_chunker.py +97 -6
- alita_sdk/tools/chunkers/sematic/proposal_chunker.py +1 -1
- alita_sdk/tools/chunkers/universal_chunker.py +270 -0
- alita_sdk/tools/cloud/aws/__init__.py +12 -7
- alita_sdk/tools/cloud/azure/__init__.py +12 -7
- alita_sdk/tools/cloud/gcp/__init__.py +12 -7
- alita_sdk/tools/cloud/k8s/__init__.py +12 -7
- alita_sdk/tools/code/linter/__init__.py +10 -8
- alita_sdk/tools/code/loaders/codesearcher.py +3 -2
- alita_sdk/tools/code/sonar/__init__.py +21 -13
- alita_sdk/tools/code_indexer_toolkit.py +199 -0
- alita_sdk/tools/confluence/__init__.py +22 -14
- alita_sdk/tools/confluence/api_wrapper.py +197 -58
- alita_sdk/tools/confluence/loader.py +14 -2
- alita_sdk/tools/custom_open_api/__init__.py +12 -5
- alita_sdk/tools/elastic/__init__.py +11 -8
- alita_sdk/tools/elitea_base.py +546 -64
- alita_sdk/tools/figma/__init__.py +60 -11
- alita_sdk/tools/figma/api_wrapper.py +1400 -167
- alita_sdk/tools/figma/figma_client.py +73 -0
- alita_sdk/tools/figma/toon_tools.py +2748 -0
- alita_sdk/tools/github/__init__.py +18 -17
- alita_sdk/tools/github/api_wrapper.py +9 -26
- alita_sdk/tools/github/github_client.py +81 -12
- alita_sdk/tools/github/schemas.py +2 -1
- alita_sdk/tools/github/tool.py +5 -1
- alita_sdk/tools/gitlab/__init__.py +19 -13
- alita_sdk/tools/gitlab/api_wrapper.py +256 -80
- alita_sdk/tools/gitlab_org/__init__.py +14 -10
- alita_sdk/tools/google/bigquery/__init__.py +14 -13
- alita_sdk/tools/google/bigquery/tool.py +5 -1
- alita_sdk/tools/google_places/__init__.py +21 -11
- alita_sdk/tools/jira/__init__.py +22 -11
- alita_sdk/tools/jira/api_wrapper.py +315 -168
- alita_sdk/tools/keycloak/__init__.py +11 -8
- alita_sdk/tools/localgit/__init__.py +9 -3
- alita_sdk/tools/localgit/local_git.py +62 -54
- alita_sdk/tools/localgit/tool.py +5 -1
- alita_sdk/tools/memory/__init__.py +38 -14
- alita_sdk/tools/non_code_indexer_toolkit.py +7 -2
- alita_sdk/tools/ocr/__init__.py +11 -8
- alita_sdk/tools/openapi/__init__.py +491 -106
- alita_sdk/tools/openapi/api_wrapper.py +1357 -0
- alita_sdk/tools/openapi/tool.py +20 -0
- alita_sdk/tools/pandas/__init__.py +20 -12
- alita_sdk/tools/pandas/api_wrapper.py +40 -45
- alita_sdk/tools/pandas/dataframe/generator/base.py +3 -1
- alita_sdk/tools/postman/__init__.py +11 -11
- alita_sdk/tools/postman/api_wrapper.py +19 -8
- alita_sdk/tools/postman/postman_analysis.py +8 -1
- alita_sdk/tools/pptx/__init__.py +11 -10
- alita_sdk/tools/qtest/__init__.py +22 -14
- alita_sdk/tools/qtest/api_wrapper.py +1784 -88
- alita_sdk/tools/rally/__init__.py +13 -10
- alita_sdk/tools/report_portal/__init__.py +23 -16
- alita_sdk/tools/salesforce/__init__.py +22 -16
- alita_sdk/tools/servicenow/__init__.py +21 -16
- alita_sdk/tools/servicenow/api_wrapper.py +1 -1
- alita_sdk/tools/sharepoint/__init__.py +17 -14
- alita_sdk/tools/sharepoint/api_wrapper.py +179 -39
- alita_sdk/tools/sharepoint/authorization_helper.py +191 -1
- alita_sdk/tools/sharepoint/utils.py +8 -2
- alita_sdk/tools/slack/__init__.py +13 -8
- alita_sdk/tools/sql/__init__.py +22 -19
- alita_sdk/tools/sql/api_wrapper.py +71 -23
- alita_sdk/tools/testio/__init__.py +21 -13
- alita_sdk/tools/testrail/__init__.py +13 -11
- alita_sdk/tools/testrail/api_wrapper.py +214 -46
- alita_sdk/tools/utils/__init__.py +28 -4
- alita_sdk/tools/utils/content_parser.py +241 -55
- alita_sdk/tools/utils/text_operations.py +254 -0
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +83 -27
- alita_sdk/tools/xray/__init__.py +18 -14
- alita_sdk/tools/xray/api_wrapper.py +58 -113
- alita_sdk/tools/yagmail/__init__.py +9 -3
- alita_sdk/tools/zephyr/__init__.py +12 -7
- alita_sdk/tools/zephyr_enterprise/__init__.py +16 -9
- alita_sdk/tools/zephyr_enterprise/api_wrapper.py +30 -15
- alita_sdk/tools/zephyr_essential/__init__.py +16 -10
- alita_sdk/tools/zephyr_essential/api_wrapper.py +297 -54
- alita_sdk/tools/zephyr_essential/client.py +6 -4
- alita_sdk/tools/zephyr_scale/__init__.py +13 -8
- alita_sdk/tools/zephyr_scale/api_wrapper.py +39 -31
- alita_sdk/tools/zephyr_squad/__init__.py +12 -7
- {alita_sdk-0.3.257.dist-info → alita_sdk-0.3.584.dist-info}/METADATA +184 -37
- alita_sdk-0.3.584.dist-info/RECORD +452 -0
- alita_sdk-0.3.584.dist-info/entry_points.txt +2 -0
- alita_sdk/tools/bitbucket/tools.py +0 -304
- alita_sdk-0.3.257.dist-info/RECORD +0 -343
- {alita_sdk-0.3.257.dist-info → alita_sdk-0.3.584.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.257.dist-info → alita_sdk-0.3.584.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.257.dist-info → alita_sdk-0.3.584.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import logging
|
|
3
3
|
import hashlib
|
|
4
|
-
from typing import Any, Dict, Generator, List, Optional
|
|
4
|
+
from typing import Any, Dict, Generator, List, Optional, Literal
|
|
5
5
|
|
|
6
6
|
import requests
|
|
7
7
|
from langchain_core.documents import Document
|
|
@@ -9,12 +9,9 @@ from langchain_core.tools import ToolException
|
|
|
9
9
|
from pydantic import PrivateAttr, SecretStr, create_model, model_validator, Field
|
|
10
10
|
from python_graphql_client import GraphqlClient
|
|
11
11
|
|
|
12
|
-
from ..
|
|
13
|
-
|
|
14
|
-
extend_with_vector_tools,
|
|
15
|
-
)
|
|
12
|
+
from ..non_code_indexer_toolkit import NonCodeIndexerToolkit
|
|
13
|
+
from ..utils.available_tools_decorator import extend_with_parent_available_tools
|
|
16
14
|
from ...runtime.utils.utils import IndexerKeywords
|
|
17
|
-
from ..utils.content_parser import parse_file_content, load_content_from_bytes
|
|
18
15
|
|
|
19
16
|
try:
|
|
20
17
|
from alita_sdk.runtime.langchain.interfaces.llm_processor import get_embeddings
|
|
@@ -31,7 +28,7 @@ _get_tests_query = """query GetTests($jql: String!, $limit:Int!, $start: Int)
|
|
|
31
28
|
limit
|
|
32
29
|
results {
|
|
33
30
|
issueId
|
|
34
|
-
jira(fields: ["key", "summary", "created", "updated", "assignee.displayName", "reporter.displayName"])
|
|
31
|
+
jira(fields: ["key", "summary", "description", "created", "updated", "assignee.displayName", "reporter.displayName"])
|
|
35
32
|
projectId
|
|
36
33
|
testType {
|
|
37
34
|
name
|
|
@@ -107,7 +104,7 @@ XrayCreateTest = create_model(
|
|
|
107
104
|
|
|
108
105
|
XrayCreateTests = create_model(
|
|
109
106
|
"XrayCreateTests",
|
|
110
|
-
graphql_mutations=(
|
|
107
|
+
graphql_mutations=(List[str], Field(description="list of GraphQL mutations:\n" + _graphql_mutation_description))
|
|
111
108
|
)
|
|
112
109
|
|
|
113
110
|
def _parse_tests(test_results) -> List[Any]:
|
|
@@ -120,7 +117,7 @@ def _parse_tests(test_results) -> List[Any]:
|
|
|
120
117
|
return test_results
|
|
121
118
|
|
|
122
119
|
|
|
123
|
-
class XrayApiWrapper(
|
|
120
|
+
class XrayApiWrapper(NonCodeIndexerToolkit):
|
|
124
121
|
_default_base_url: str = 'https://xray.cloud.getxray.app'
|
|
125
122
|
base_url: str = ""
|
|
126
123
|
client_id: str = None
|
|
@@ -147,7 +144,7 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
147
144
|
client_id = values['client_id']
|
|
148
145
|
client_secret = values['client_secret']
|
|
149
146
|
# Authenticate to get the token
|
|
150
|
-
values['base_url'] = values.get('base_url', '') or cls._default_base_url
|
|
147
|
+
values['base_url'] = values.get('base_url', '') or cls._default_base_url.default
|
|
151
148
|
auth_url = f"{values['base_url']}/api/v1/authenticate"
|
|
152
149
|
auth_data = {
|
|
153
150
|
"client_id": client_id,
|
|
@@ -168,7 +165,7 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
168
165
|
return ToolException(f"Please, check you credentials ({values['client_id']} / {masked_secret}). Unable")
|
|
169
166
|
else:
|
|
170
167
|
return ToolException(f"Authentication failed: {str(e)}")
|
|
171
|
-
return values
|
|
168
|
+
return super().validate_toolkit(values)
|
|
172
169
|
|
|
173
170
|
def __init__(self, **data):
|
|
174
171
|
super().__init__(**data)
|
|
@@ -333,6 +330,7 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
333
330
|
|
|
334
331
|
for test in tests_data:
|
|
335
332
|
page_content = ""
|
|
333
|
+
content_structure = {}
|
|
336
334
|
test_type_name = test.get("testType", {}).get("name", "").lower()
|
|
337
335
|
|
|
338
336
|
attachment_ids = []
|
|
@@ -359,19 +357,16 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
359
357
|
content_structure = {"steps": steps_content}
|
|
360
358
|
if attachment_ids:
|
|
361
359
|
content_structure["attachment_ids"] = sorted(attachment_ids)
|
|
362
|
-
page_content = json.dumps(content_structure, indent=2)
|
|
363
360
|
|
|
364
361
|
elif test_type_name == "cucumber" and test.get("gherkin"):
|
|
365
362
|
content_structure = {"gherkin": test["gherkin"]}
|
|
366
363
|
if attachment_ids:
|
|
367
364
|
content_structure["attachment_ids"] = sorted(attachment_ids)
|
|
368
|
-
page_content = json.dumps(content_structure, indent=2)
|
|
369
365
|
|
|
370
366
|
elif test.get("unstructured"):
|
|
371
367
|
content_structure = {"unstructured": test["unstructured"]}
|
|
372
368
|
if attachment_ids:
|
|
373
369
|
content_structure["attachment_ids"] = sorted(attachment_ids)
|
|
374
|
-
page_content = json.dumps(content_structure, indent=2)
|
|
375
370
|
|
|
376
371
|
metadata = {"doctype": self.doctype}
|
|
377
372
|
|
|
@@ -382,7 +377,12 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
382
377
|
|
|
383
378
|
if "created" in jira_data:
|
|
384
379
|
metadata["created_on"] = jira_data["created"]
|
|
385
|
-
|
|
380
|
+
|
|
381
|
+
if jira_data.get("description"):
|
|
382
|
+
content_structure["description"] = jira_data.get("description")
|
|
383
|
+
|
|
384
|
+
page_content = json.dumps(content_structure if content_structure.items() else "", indent=2)
|
|
385
|
+
|
|
386
386
|
content_hash = hashlib.sha256(page_content.encode('utf-8')).hexdigest()[:16]
|
|
387
387
|
metadata["updated_on"] = content_hash
|
|
388
388
|
|
|
@@ -407,11 +407,13 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
407
407
|
if "attachments" in step and step["attachments"]:
|
|
408
408
|
for attachment in step["attachments"]:
|
|
409
409
|
if attachment and "id" in attachment and "filename" in attachment:
|
|
410
|
+
attachment['step_id'] = step['id']
|
|
410
411
|
attachments_data.append(attachment)
|
|
411
412
|
if attachments_data:
|
|
412
413
|
metadata["_attachments_data"] = attachments_data
|
|
413
414
|
|
|
414
|
-
|
|
415
|
+
metadata[IndexerKeywords.CONTENT_IN_BYTES.value] = page_content.encode('utf-8')
|
|
416
|
+
yield Document(page_content='', metadata=metadata)
|
|
415
417
|
|
|
416
418
|
except Exception as e:
|
|
417
419
|
logger.error(f"Error processing test data: {e}")
|
|
@@ -430,14 +432,7 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
430
432
|
Generator[Document, None, None]: A generator yielding processed Document objects with metadata.
|
|
431
433
|
"""
|
|
432
434
|
try:
|
|
433
|
-
if not getattr(self, '_include_attachments', False):
|
|
434
|
-
yield document
|
|
435
|
-
return
|
|
436
|
-
|
|
437
435
|
attachments_data = document.metadata.get("_attachments_data", [])
|
|
438
|
-
if not attachments_data:
|
|
439
|
-
yield document
|
|
440
|
-
return
|
|
441
436
|
|
|
442
437
|
issue_id = document.metadata.get("id")
|
|
443
438
|
|
|
@@ -458,44 +453,33 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
458
453
|
).append(attachment_id)
|
|
459
454
|
|
|
460
455
|
try:
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
456
|
+
attachment_metadata = {
|
|
457
|
+
'id': str(attachment_id),
|
|
458
|
+
'issue_key': document.metadata.get('key', ''),
|
|
459
|
+
'issueId': str(issue_id),
|
|
460
|
+
'projectId': document.metadata.get('projectId', ''),
|
|
461
|
+
'source': f"xray_test_{issue_id}",
|
|
462
|
+
'filename': filename,
|
|
463
|
+
'download_link': attachment.get('downloadLink', ''),
|
|
464
|
+
'entity_type': 'test_case_attachment',
|
|
465
|
+
'step_id': attachment.get('step_id', ''),
|
|
466
|
+
'key': document.metadata.get('key', ''),
|
|
467
|
+
IndexerKeywords.PARENT.value: document.metadata.get('id', str(issue_id)),
|
|
468
|
+
'type': 'attachment',
|
|
469
|
+
'doctype': self.doctype,
|
|
470
|
+
}
|
|
471
|
+
yield from self._process_attachment(attachment, attachment_metadata)
|
|
465
472
|
except Exception as e:
|
|
466
473
|
logger.error(f"Failed to process attachment {filename}: {str(e)}")
|
|
467
474
|
continue
|
|
468
|
-
|
|
469
|
-
attachment_metadata = {
|
|
470
|
-
'id': str(attachment_id),
|
|
471
|
-
'issue_key': document.metadata.get('key', ''),
|
|
472
|
-
'issueId': str(issue_id),
|
|
473
|
-
'projectId': document.metadata.get('projectId', ''),
|
|
474
|
-
'source': f"xray_test_{issue_id}",
|
|
475
|
-
'filename': filename,
|
|
476
|
-
'download_link': attachment.get('downloadLink', ''),
|
|
477
|
-
'entity_type': 'test_case_attachment',
|
|
478
|
-
'key': document.metadata.get('key', ''),
|
|
479
|
-
IndexerKeywords.PARENT.value: document.metadata.get('id', str(issue_id)),
|
|
480
|
-
'type': 'attachment',
|
|
481
|
-
'doctype': self.doctype,
|
|
482
|
-
}
|
|
483
|
-
|
|
484
|
-
yield Document(
|
|
485
|
-
page_content=content,
|
|
486
|
-
metadata=attachment_metadata
|
|
487
|
-
)
|
|
488
475
|
|
|
489
476
|
if "_attachments_data" in document.metadata:
|
|
490
477
|
del document.metadata["_attachments_data"]
|
|
491
478
|
|
|
492
|
-
yield document
|
|
493
|
-
|
|
494
479
|
except Exception as e:
|
|
495
480
|
logger.error(f"Error processing document for attachments: {e}")
|
|
496
|
-
yield document
|
|
497
481
|
|
|
498
|
-
def _process_attachment(self, attachment: Dict[str, Any]) ->
|
|
482
|
+
def _process_attachment(self, attachment: Dict[str, Any], attachment_metadata) -> Generator[Document, None, None]:
|
|
499
483
|
"""
|
|
500
484
|
Processes an attachment to extract its content.
|
|
501
485
|
|
|
@@ -508,38 +492,17 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
508
492
|
try:
|
|
509
493
|
download_link = attachment.get('downloadLink')
|
|
510
494
|
filename = attachment.get('filename', '')
|
|
511
|
-
|
|
512
|
-
if not download_link:
|
|
513
|
-
return f"Attachment: {filename} (no download link available)"
|
|
514
495
|
|
|
515
496
|
try:
|
|
516
497
|
auth_token = self._ensure_auth_token()
|
|
517
498
|
headers = {'Authorization': f'Bearer {auth_token}'}
|
|
518
499
|
response = requests.get(download_link, headers=headers, timeout=30)
|
|
519
500
|
response.raise_for_status()
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
file_content=response.content,
|
|
526
|
-
file_name=filename,
|
|
527
|
-
llm=self.llm,
|
|
528
|
-
is_capture_image=True
|
|
529
|
-
)
|
|
530
|
-
else:
|
|
531
|
-
content = load_content_from_bytes(
|
|
532
|
-
response.content,
|
|
533
|
-
ext,
|
|
534
|
-
llm=self.llm
|
|
535
|
-
)
|
|
536
|
-
|
|
537
|
-
if content:
|
|
538
|
-
return f"filename: {filename}\ncontent: {content}"
|
|
539
|
-
else:
|
|
540
|
-
logger.warning(f"No content extracted from attachment {filename}")
|
|
541
|
-
return f"filename: {filename}\ncontent: [No extractable content]"
|
|
542
|
-
|
|
501
|
+
|
|
502
|
+
yield from self._load_attachment(content=response.content,
|
|
503
|
+
file_name=filename,
|
|
504
|
+
attachment_metadata=attachment_metadata)
|
|
505
|
+
|
|
543
506
|
except requests.RequestException as req_e:
|
|
544
507
|
logger.error(f"Unable to download attachment {filename} with existing token: {req_e}")
|
|
545
508
|
|
|
@@ -560,23 +523,13 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
560
523
|
fresh_headers = {'Authorization': f'Bearer {fresh_token}'}
|
|
561
524
|
response = requests.get(download_link, headers=fresh_headers, timeout=60)
|
|
562
525
|
response.raise_for_status()
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
file_name=filename,
|
|
568
|
-
llm=self.llm,
|
|
569
|
-
is_capture_image=True
|
|
570
|
-
) if ext == '.pdf' else load_content_from_bytes(response.content, ext, llm=self.llm)
|
|
571
|
-
|
|
572
|
-
if content:
|
|
573
|
-
return f"filename: {filename}\ncontent: {content}"
|
|
574
|
-
else:
|
|
575
|
-
return f"filename: {filename}\ncontent: [Content extraction failed after re-auth]"
|
|
526
|
+
|
|
527
|
+
yield from self._load_attachment(content=response.content,
|
|
528
|
+
file_name=filename,
|
|
529
|
+
attachment_metadata=attachment_metadata)
|
|
576
530
|
|
|
577
531
|
except Exception as reauth_e:
|
|
578
532
|
logger.error(f"Re-authentication and retry failed for {filename}: {reauth_e}")
|
|
579
|
-
return f"Attachment: {filename} (download failed: {str(req_e)}, re-auth failed: {str(reauth_e)})"
|
|
580
533
|
else:
|
|
581
534
|
try:
|
|
582
535
|
auth_token = self._ensure_auth_token()
|
|
@@ -587,34 +540,29 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
587
540
|
}
|
|
588
541
|
response = requests.get(download_link, headers=fallback_headers, timeout=60)
|
|
589
542
|
response.raise_for_status()
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
file_name=filename,
|
|
595
|
-
llm=self.llm,
|
|
596
|
-
is_capture_image=True
|
|
597
|
-
) if ext == '.pdf' else load_content_from_bytes(response.content, ext, llm=self.llm)
|
|
598
|
-
|
|
599
|
-
if content:
|
|
600
|
-
return f"filename: {filename}\ncontent: {content}"
|
|
601
|
-
else:
|
|
602
|
-
return f"filename: {filename}\ncontent: [Content extraction failed after fallback]"
|
|
543
|
+
|
|
544
|
+
yield from self._load_attachment(content=response.content,
|
|
545
|
+
file_name=filename,
|
|
546
|
+
attachment_metadata=attachment_metadata)
|
|
603
547
|
|
|
604
548
|
except Exception as fallback_e:
|
|
605
549
|
logger.error(f"Fallback download also failed for {filename}: {fallback_e}")
|
|
606
|
-
return f"Attachment: {filename} (download failed: {str(req_e)}, fallback failed: {str(fallback_e)})"
|
|
607
550
|
|
|
608
551
|
except Exception as parse_e:
|
|
609
552
|
logger.error(f"Unable to parse attachment {filename}: {parse_e}")
|
|
610
|
-
return f"Attachment: {filename} (parsing failed: {str(parse_e)})"
|
|
611
553
|
|
|
612
554
|
except Exception as e:
|
|
613
555
|
logger.error(f"Error processing attachment: {e}")
|
|
614
|
-
|
|
556
|
+
|
|
557
|
+
def _load_attachment(self, content, file_name, attachment_metadata) -> Generator[Document, None, None]:
|
|
558
|
+
attachment_metadata[IndexerKeywords.CONTENT_IN_BYTES.value] = content
|
|
559
|
+
attachment_metadata[IndexerKeywords.CONTENT_FILE_NAME.value] = file_name
|
|
560
|
+
yield Document(page_content='', metadata=attachment_metadata)
|
|
615
561
|
|
|
616
562
|
def _index_tool_params(self, **kwargs) -> dict[str, tuple[type, Field]]:
|
|
617
563
|
return {
|
|
564
|
+
'chunking_tool': (Literal['json', ''],
|
|
565
|
+
Field(description="Name of chunking tool for base document", default='json')),
|
|
618
566
|
'jql': (Optional[str], Field(description="""JQL query for searching test cases in Xray.
|
|
619
567
|
|
|
620
568
|
Standard JQL query syntax for filtering Xray test cases. Examples:
|
|
@@ -684,9 +632,9 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
684
632
|
except Exception as e:
|
|
685
633
|
raise ToolException(f"Unable to execute GraphQL due to error: {str(e)}")
|
|
686
634
|
|
|
687
|
-
@
|
|
635
|
+
@extend_with_parent_available_tools
|
|
688
636
|
def get_available_tools(self):
|
|
689
|
-
|
|
637
|
+
return [
|
|
690
638
|
{
|
|
691
639
|
"name": "get_tests",
|
|
692
640
|
"description": self.get_tests.__doc__,
|
|
@@ -711,7 +659,4 @@ class XrayApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
711
659
|
"args_schema": XrayGrapql,
|
|
712
660
|
"ref": self.execute_graphql,
|
|
713
661
|
}
|
|
714
|
-
]
|
|
715
|
-
|
|
716
|
-
tools.extend(self._get_vector_search_tools())
|
|
717
|
-
return tools
|
|
662
|
+
]
|
|
@@ -6,6 +6,7 @@ from pydantic import create_model, BaseModel, Field, SecretStr
|
|
|
6
6
|
|
|
7
7
|
from .yagmail_wrapper import YagmailWrapper, SMTP_SERVER
|
|
8
8
|
from ..base.tool import BaseAction
|
|
9
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
9
10
|
|
|
10
11
|
name = "yagmail"
|
|
11
12
|
|
|
@@ -34,7 +35,7 @@ class AlitaYagmailToolkit(BaseToolkit):
|
|
|
34
35
|
)
|
|
35
36
|
|
|
36
37
|
@classmethod
|
|
37
|
-
def get_toolkit(cls, selected_tools: list[str] | None = None, **kwargs):
|
|
38
|
+
def get_toolkit(cls, selected_tools: list[str] | None = None, toolkit_name: Optional[str] = None, **kwargs):
|
|
38
39
|
if selected_tools is None:
|
|
39
40
|
selected_tools = []
|
|
40
41
|
yagmail_wrapper = YagmailWrapper(**kwargs)
|
|
@@ -44,11 +45,16 @@ class AlitaYagmailToolkit(BaseToolkit):
|
|
|
44
45
|
if selected_tools:
|
|
45
46
|
if tool["name"] not in selected_tools:
|
|
46
47
|
continue
|
|
48
|
+
description = tool["description"]
|
|
49
|
+
if toolkit_name:
|
|
50
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
51
|
+
description = description[:1000]
|
|
47
52
|
tools.append(BaseAction(
|
|
48
53
|
api_wrapper=yagmail_wrapper,
|
|
49
54
|
name=tool["name"],
|
|
50
|
-
description=
|
|
51
|
-
args_schema=tool["args_schema"]
|
|
55
|
+
description=description,
|
|
56
|
+
args_schema=tool["args_schema"],
|
|
57
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
52
58
|
))
|
|
53
59
|
return cls(tools=tools)
|
|
54
60
|
|
|
@@ -7,7 +7,9 @@ from pydantic import create_model, BaseModel, Field, SecretStr
|
|
|
7
7
|
|
|
8
8
|
from ..base.tool import BaseAction
|
|
9
9
|
from .api_wrapper import ZephyrV1ApiWrapper
|
|
10
|
-
from ..
|
|
10
|
+
from ..elitea_base import filter_missconfigured_index_tools
|
|
11
|
+
from ..utils import clean_string, get_max_toolkit_length
|
|
12
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
11
13
|
|
|
12
14
|
name = "zephyr"
|
|
13
15
|
|
|
@@ -22,15 +24,13 @@ def get_tools(tool):
|
|
|
22
24
|
|
|
23
25
|
class ZephyrToolkit(BaseToolkit):
|
|
24
26
|
tools: List[BaseTool] = []
|
|
25
|
-
toolkit_max_length: int = 0
|
|
26
27
|
|
|
27
28
|
@staticmethod
|
|
28
29
|
def toolkit_config_schema() -> BaseModel:
|
|
29
30
|
selected_tools = {x['name']: x['args_schema'].schema() for x in ZephyrV1ApiWrapper.model_construct().get_available_tools()}
|
|
30
|
-
ZephyrToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
31
31
|
return create_model(
|
|
32
32
|
name,
|
|
33
|
-
base_url=(str, Field(description="Base URL"
|
|
33
|
+
base_url=(str, Field(description="Base URL")),
|
|
34
34
|
username=(str, Field(description="Username")),
|
|
35
35
|
password=(SecretStr, Field(description="Password", json_schema_extra={'secret': True})),
|
|
36
36
|
selected_tools=(List[Literal[tuple(selected_tools)]], Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
|
|
@@ -45,20 +45,25 @@ class ZephyrToolkit(BaseToolkit):
|
|
|
45
45
|
)
|
|
46
46
|
|
|
47
47
|
@classmethod
|
|
48
|
+
@filter_missconfigured_index_tools
|
|
48
49
|
def get_toolkit(cls, selected_tools: list[str] | None = None, toolkit_name: Optional[str] = None, **kwargs):
|
|
49
50
|
zephyr_api_wrapper = ZephyrV1ApiWrapper(**kwargs)
|
|
50
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
51
51
|
available_tools = zephyr_api_wrapper.get_available_tools()
|
|
52
52
|
tools = []
|
|
53
53
|
for tool in available_tools:
|
|
54
54
|
if selected_tools:
|
|
55
55
|
if tool["name"] not in selected_tools:
|
|
56
56
|
continue
|
|
57
|
+
description = tool["description"]
|
|
58
|
+
if toolkit_name:
|
|
59
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
60
|
+
description = description[:1000]
|
|
57
61
|
tools.append(BaseAction(
|
|
58
62
|
api_wrapper=zephyr_api_wrapper,
|
|
59
63
|
name=tool["name"],
|
|
60
|
-
description=
|
|
61
|
-
args_schema=tool["args_schema"]
|
|
64
|
+
description=description,
|
|
65
|
+
args_schema=tool["args_schema"],
|
|
66
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
62
67
|
))
|
|
63
68
|
return cls(tools=tools)
|
|
64
69
|
|
|
@@ -4,9 +4,11 @@ from typing import List, Literal, Optional
|
|
|
4
4
|
|
|
5
5
|
from .api_wrapper import ZephyrApiWrapper
|
|
6
6
|
from ..base.tool import BaseAction
|
|
7
|
-
from ..
|
|
7
|
+
from ..elitea_base import filter_missconfigured_index_tools
|
|
8
|
+
from ..utils import clean_string, get_max_toolkit_length
|
|
8
9
|
from ...configurations.pgvector import PgVectorConfiguration
|
|
9
10
|
from ...configurations.zephyr_enterprise import ZephyrEnterpriseConfiguration
|
|
11
|
+
from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
|
|
10
12
|
|
|
11
13
|
name = "zephyr_enterprise"
|
|
12
14
|
|
|
@@ -27,23 +29,22 @@ def get_tools(tool):
|
|
|
27
29
|
|
|
28
30
|
class ZephyrEnterpriseToolkit(BaseToolkit):
|
|
29
31
|
tools: List[BaseTool] = []
|
|
30
|
-
toolkit_max_length: int = 0
|
|
31
32
|
|
|
32
33
|
@staticmethod
|
|
33
34
|
def toolkit_config_schema() -> BaseModel:
|
|
34
35
|
selected_tools = {x['name']: x['args_schema'].schema() for x in
|
|
35
36
|
ZephyrApiWrapper.model_construct().get_available_tools()}
|
|
36
|
-
ZephyrEnterpriseToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
37
37
|
return create_model(
|
|
38
38
|
name,
|
|
39
|
-
zephyr_configuration=(
|
|
39
|
+
zephyr_configuration=(ZephyrEnterpriseConfiguration, Field(description="Zephyr Configuration", json_schema_extra={'configuration_types': ['zephyr_enterprise']})),
|
|
40
40
|
pgvector_configuration=(Optional[PgVectorConfiguration], Field(description="PgVector Configuration",
|
|
41
41
|
json_schema_extra={
|
|
42
42
|
'configuration_types': ['pgvector']},
|
|
43
43
|
default=None)),
|
|
44
44
|
# embedder settings
|
|
45
45
|
embedding_model=(Optional[str], Field(default=None, description="Embedding configuration.", json_schema_extra={'configuration_model': 'embedding'})),
|
|
46
|
-
selected_tools=(List[Literal[tuple(selected_tools)]],
|
|
46
|
+
selected_tools=(List[Literal[tuple(selected_tools)]],
|
|
47
|
+
Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
|
|
47
48
|
__config__=ConfigDict(json_schema_extra={
|
|
48
49
|
'metadata': {
|
|
49
50
|
"label": "Zephyr Enterprise", "icon_url": "zephyr.svg",
|
|
@@ -53,6 +54,7 @@ class ZephyrEnterpriseToolkit(BaseToolkit):
|
|
|
53
54
|
)
|
|
54
55
|
|
|
55
56
|
@classmethod
|
|
57
|
+
@filter_missconfigured_index_tools
|
|
56
58
|
def get_toolkit(cls, selected_tools: list[str] | None = None, toolkit_name: Optional[str] = None, **kwargs):
|
|
57
59
|
if selected_tools is None:
|
|
58
60
|
selected_tools = []
|
|
@@ -64,17 +66,22 @@ class ZephyrEnterpriseToolkit(BaseToolkit):
|
|
|
64
66
|
**(kwargs.get('embedding_configuration') or {}),
|
|
65
67
|
}
|
|
66
68
|
zephyr_api_wrapper = ZephyrApiWrapper(**wrapper_payload)
|
|
67
|
-
prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
|
|
68
69
|
available_tools = zephyr_api_wrapper.get_available_tools()
|
|
69
70
|
tools = []
|
|
70
71
|
for tool in available_tools:
|
|
71
72
|
if selected_tools and tool["name"] not in selected_tools:
|
|
72
73
|
continue
|
|
74
|
+
description = tool["description"]
|
|
75
|
+
if toolkit_name:
|
|
76
|
+
description = f"Toolkit: {toolkit_name}\n{description}"
|
|
77
|
+
description = description + "\nZephyr Enterprise instance: " + zephyr_api_wrapper.base_url
|
|
78
|
+
description = description[:1000]
|
|
73
79
|
tools.append(BaseAction(
|
|
74
80
|
api_wrapper=zephyr_api_wrapper,
|
|
75
|
-
name=
|
|
76
|
-
description=
|
|
77
|
-
args_schema=tool["args_schema"]
|
|
81
|
+
name=tool["name"],
|
|
82
|
+
description=description,
|
|
83
|
+
args_schema=tool["args_schema"],
|
|
84
|
+
metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
|
|
78
85
|
))
|
|
79
86
|
return cls(tools=tools)
|
|
80
87
|
|
|
@@ -1,14 +1,16 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import logging
|
|
2
|
-
from
|
|
3
|
-
from operator import ifloordiv
|
|
4
|
-
from typing import Optional, List, Generator
|
|
3
|
+
from typing import Optional, List, Generator, Literal
|
|
5
4
|
|
|
6
5
|
from langchain_core.tools import ToolException
|
|
7
6
|
from pydantic import create_model, model_validator, PrivateAttr, Field, SecretStr
|
|
8
7
|
|
|
9
8
|
from langchain_core.documents import Document
|
|
10
9
|
from .zephyr_enterprise import ZephyrClient
|
|
11
|
-
from ..
|
|
10
|
+
from ..non_code_indexer_toolkit import NonCodeIndexerToolkit
|
|
11
|
+
from ..utils.available_tools_decorator import extend_with_parent_available_tools
|
|
12
|
+
from ..utils.content_parser import file_extension_by_chunker
|
|
13
|
+
from ...runtime.utils.utils import IndexerKeywords
|
|
12
14
|
|
|
13
15
|
logger = logging.getLogger(__name__)
|
|
14
16
|
|
|
@@ -23,7 +25,7 @@ zql_description = """
|
|
|
23
25
|
"folder=\"TestToolkit\"", "name~\"TestToolkit5\"
|
|
24
26
|
"""
|
|
25
27
|
|
|
26
|
-
class ZephyrApiWrapper(
|
|
28
|
+
class ZephyrApiWrapper(NonCodeIndexerToolkit):
|
|
27
29
|
base_url: str
|
|
28
30
|
token: SecretStr
|
|
29
31
|
_client: Optional[ZephyrClient] = PrivateAttr()
|
|
@@ -34,7 +36,7 @@ class ZephyrApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
34
36
|
base_url = values.get('base_url')
|
|
35
37
|
token = values.get('token')
|
|
36
38
|
cls._client = ZephyrClient(base_url=base_url, token=token)
|
|
37
|
-
return values
|
|
39
|
+
return super().validate_toolkit(values)
|
|
38
40
|
|
|
39
41
|
def get_test_case(self, testcase_id: str):
|
|
40
42
|
|
|
@@ -153,25 +155,38 @@ class ZephyrApiWrapper(BaseVectorStoreToolApiWrapper):
|
|
|
153
155
|
Returns a list of fields for index_data args schema.
|
|
154
156
|
"""
|
|
155
157
|
return {
|
|
158
|
+
'chunking_tool': (Literal['json', ''], Field(description="Name of chunking tool", default='json')),
|
|
156
159
|
"zql": (str, Field(description=zql_description, examples=["folder=\"TestToolkit\"", "name~\"TestToolkit5\""]))
|
|
160
|
+
|
|
157
161
|
}
|
|
158
162
|
|
|
159
163
|
def _base_loader(self, zql: str, **kwargs) -> Generator[Document, None, None]:
|
|
160
|
-
|
|
164
|
+
self._chunking_tool = kwargs.get('chunking_tool', None)
|
|
165
|
+
test_cases = self.get_testcases_by_zql(zql=zql, return_as_list=True)
|
|
161
166
|
for test_case in test_cases:
|
|
162
167
|
metadata = {
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
168
|
+
"updated_on": str(test_case.get("lastModifiedOn")),
|
|
169
|
+
"id": str(test_case.get("id")),
|
|
170
|
+
"name": test_case.get("name"),
|
|
171
|
+
"testcaseId": str(test_case.get("testcaseId")),
|
|
172
|
+
"projectId": test_case.get("projectId"),
|
|
173
|
+
"projectName": test_case.get("projectName"),
|
|
174
|
+
"testcaseType": test_case.get("testcaseType"),
|
|
166
175
|
}
|
|
167
176
|
yield Document(page_content='', metadata=metadata)
|
|
168
177
|
|
|
169
|
-
def
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
178
|
+
def _extend_data(self, documents: Generator[Document, None, None]) -> Generator[Document, None, None]:
|
|
179
|
+
for document in documents:
|
|
180
|
+
try:
|
|
181
|
+
id = document.metadata['id']
|
|
182
|
+
test_case_content = self.get_test_case_steps(id)
|
|
183
|
+
page_content = json.dumps(test_case_content)
|
|
184
|
+
document.metadata[IndexerKeywords.CONTENT_IN_BYTES.value] = page_content.encode('utf-8')
|
|
185
|
+
except Exception as e:
|
|
186
|
+
logging.error(f"Failed to process document: {e}")
|
|
187
|
+
yield document
|
|
173
188
|
|
|
174
|
-
@
|
|
189
|
+
@extend_with_parent_available_tools
|
|
175
190
|
def get_available_tools(self):
|
|
176
191
|
return [
|
|
177
192
|
{
|