alita-sdk 0.3.351__py3-none-any.whl → 0.3.499__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (206) hide show
  1. alita_sdk/cli/__init__.py +10 -0
  2. alita_sdk/cli/__main__.py +17 -0
  3. alita_sdk/cli/agent/__init__.py +5 -0
  4. alita_sdk/cli/agent/default.py +258 -0
  5. alita_sdk/cli/agent_executor.py +155 -0
  6. alita_sdk/cli/agent_loader.py +215 -0
  7. alita_sdk/cli/agent_ui.py +228 -0
  8. alita_sdk/cli/agents.py +3601 -0
  9. alita_sdk/cli/callbacks.py +647 -0
  10. alita_sdk/cli/cli.py +168 -0
  11. alita_sdk/cli/config.py +306 -0
  12. alita_sdk/cli/context/__init__.py +30 -0
  13. alita_sdk/cli/context/cleanup.py +198 -0
  14. alita_sdk/cli/context/manager.py +731 -0
  15. alita_sdk/cli/context/message.py +285 -0
  16. alita_sdk/cli/context/strategies.py +289 -0
  17. alita_sdk/cli/context/token_estimation.py +127 -0
  18. alita_sdk/cli/formatting.py +182 -0
  19. alita_sdk/cli/input_handler.py +419 -0
  20. alita_sdk/cli/inventory.py +1256 -0
  21. alita_sdk/cli/mcp_loader.py +315 -0
  22. alita_sdk/cli/toolkit.py +327 -0
  23. alita_sdk/cli/toolkit_loader.py +85 -0
  24. alita_sdk/cli/tools/__init__.py +43 -0
  25. alita_sdk/cli/tools/approval.py +224 -0
  26. alita_sdk/cli/tools/filesystem.py +1751 -0
  27. alita_sdk/cli/tools/planning.py +389 -0
  28. alita_sdk/cli/tools/terminal.py +414 -0
  29. alita_sdk/community/__init__.py +64 -8
  30. alita_sdk/community/inventory/__init__.py +224 -0
  31. alita_sdk/community/inventory/config.py +257 -0
  32. alita_sdk/community/inventory/enrichment.py +2137 -0
  33. alita_sdk/community/inventory/extractors.py +1469 -0
  34. alita_sdk/community/inventory/ingestion.py +3172 -0
  35. alita_sdk/community/inventory/knowledge_graph.py +1457 -0
  36. alita_sdk/community/inventory/parsers/__init__.py +218 -0
  37. alita_sdk/community/inventory/parsers/base.py +295 -0
  38. alita_sdk/community/inventory/parsers/csharp_parser.py +907 -0
  39. alita_sdk/community/inventory/parsers/go_parser.py +851 -0
  40. alita_sdk/community/inventory/parsers/html_parser.py +389 -0
  41. alita_sdk/community/inventory/parsers/java_parser.py +593 -0
  42. alita_sdk/community/inventory/parsers/javascript_parser.py +629 -0
  43. alita_sdk/community/inventory/parsers/kotlin_parser.py +768 -0
  44. alita_sdk/community/inventory/parsers/markdown_parser.py +362 -0
  45. alita_sdk/community/inventory/parsers/python_parser.py +604 -0
  46. alita_sdk/community/inventory/parsers/rust_parser.py +858 -0
  47. alita_sdk/community/inventory/parsers/swift_parser.py +832 -0
  48. alita_sdk/community/inventory/parsers/text_parser.py +322 -0
  49. alita_sdk/community/inventory/parsers/yaml_parser.py +370 -0
  50. alita_sdk/community/inventory/patterns/__init__.py +61 -0
  51. alita_sdk/community/inventory/patterns/ast_adapter.py +380 -0
  52. alita_sdk/community/inventory/patterns/loader.py +348 -0
  53. alita_sdk/community/inventory/patterns/registry.py +198 -0
  54. alita_sdk/community/inventory/presets.py +535 -0
  55. alita_sdk/community/inventory/retrieval.py +1403 -0
  56. alita_sdk/community/inventory/toolkit.py +173 -0
  57. alita_sdk/community/inventory/visualize.py +1370 -0
  58. alita_sdk/configurations/bitbucket.py +94 -2
  59. alita_sdk/configurations/confluence.py +96 -1
  60. alita_sdk/configurations/gitlab.py +79 -0
  61. alita_sdk/configurations/jira.py +103 -0
  62. alita_sdk/configurations/testrail.py +88 -0
  63. alita_sdk/configurations/xray.py +93 -0
  64. alita_sdk/configurations/zephyr_enterprise.py +93 -0
  65. alita_sdk/configurations/zephyr_essential.py +75 -0
  66. alita_sdk/runtime/clients/artifact.py +1 -1
  67. alita_sdk/runtime/clients/client.py +214 -42
  68. alita_sdk/runtime/clients/mcp_discovery.py +342 -0
  69. alita_sdk/runtime/clients/mcp_manager.py +262 -0
  70. alita_sdk/runtime/clients/sandbox_client.py +373 -0
  71. alita_sdk/runtime/langchain/assistant.py +118 -30
  72. alita_sdk/runtime/langchain/constants.py +8 -1
  73. alita_sdk/runtime/langchain/document_loaders/AlitaDocxMammothLoader.py +315 -3
  74. alita_sdk/runtime/langchain/document_loaders/AlitaExcelLoader.py +103 -60
  75. alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +4 -1
  76. alita_sdk/runtime/langchain/document_loaders/AlitaPowerPointLoader.py +41 -12
  77. alita_sdk/runtime/langchain/document_loaders/AlitaTableLoader.py +1 -1
  78. alita_sdk/runtime/langchain/document_loaders/constants.py +116 -99
  79. alita_sdk/runtime/langchain/interfaces/llm_processor.py +2 -2
  80. alita_sdk/runtime/langchain/langraph_agent.py +307 -71
  81. alita_sdk/runtime/langchain/utils.py +48 -8
  82. alita_sdk/runtime/llms/preloaded.py +2 -6
  83. alita_sdk/runtime/models/mcp_models.py +61 -0
  84. alita_sdk/runtime/toolkits/__init__.py +26 -0
  85. alita_sdk/runtime/toolkits/application.py +9 -2
  86. alita_sdk/runtime/toolkits/artifact.py +18 -6
  87. alita_sdk/runtime/toolkits/datasource.py +13 -6
  88. alita_sdk/runtime/toolkits/mcp.py +780 -0
  89. alita_sdk/runtime/toolkits/planning.py +178 -0
  90. alita_sdk/runtime/toolkits/tools.py +205 -55
  91. alita_sdk/runtime/toolkits/vectorstore.py +9 -4
  92. alita_sdk/runtime/tools/__init__.py +11 -3
  93. alita_sdk/runtime/tools/application.py +7 -0
  94. alita_sdk/runtime/tools/artifact.py +225 -12
  95. alita_sdk/runtime/tools/function.py +95 -5
  96. alita_sdk/runtime/tools/graph.py +10 -4
  97. alita_sdk/runtime/tools/image_generation.py +212 -0
  98. alita_sdk/runtime/tools/llm.py +494 -102
  99. alita_sdk/runtime/tools/mcp_inspect_tool.py +284 -0
  100. alita_sdk/runtime/tools/mcp_remote_tool.py +181 -0
  101. alita_sdk/runtime/tools/mcp_server_tool.py +4 -4
  102. alita_sdk/runtime/tools/planning/__init__.py +36 -0
  103. alita_sdk/runtime/tools/planning/models.py +246 -0
  104. alita_sdk/runtime/tools/planning/wrapper.py +607 -0
  105. alita_sdk/runtime/tools/router.py +2 -1
  106. alita_sdk/runtime/tools/sandbox.py +180 -79
  107. alita_sdk/runtime/tools/vectorstore.py +22 -21
  108. alita_sdk/runtime/tools/vectorstore_base.py +125 -52
  109. alita_sdk/runtime/utils/AlitaCallback.py +106 -20
  110. alita_sdk/runtime/utils/mcp_client.py +465 -0
  111. alita_sdk/runtime/utils/mcp_oauth.py +244 -0
  112. alita_sdk/runtime/utils/mcp_sse_client.py +405 -0
  113. alita_sdk/runtime/utils/mcp_tools_discovery.py +124 -0
  114. alita_sdk/runtime/utils/streamlit.py +40 -13
  115. alita_sdk/runtime/utils/toolkit_utils.py +28 -9
  116. alita_sdk/runtime/utils/utils.py +12 -0
  117. alita_sdk/tools/__init__.py +77 -33
  118. alita_sdk/tools/ado/repos/__init__.py +7 -6
  119. alita_sdk/tools/ado/repos/repos_wrapper.py +11 -11
  120. alita_sdk/tools/ado/test_plan/__init__.py +7 -7
  121. alita_sdk/tools/ado/wiki/__init__.py +7 -11
  122. alita_sdk/tools/ado/wiki/ado_wrapper.py +89 -15
  123. alita_sdk/tools/ado/work_item/__init__.py +7 -11
  124. alita_sdk/tools/ado/work_item/ado_wrapper.py +17 -8
  125. alita_sdk/tools/advanced_jira_mining/__init__.py +8 -7
  126. alita_sdk/tools/aws/delta_lake/__init__.py +11 -9
  127. alita_sdk/tools/azure_ai/search/__init__.py +7 -6
  128. alita_sdk/tools/base_indexer_toolkit.py +345 -70
  129. alita_sdk/tools/bitbucket/__init__.py +9 -8
  130. alita_sdk/tools/bitbucket/api_wrapper.py +50 -6
  131. alita_sdk/tools/browser/__init__.py +4 -4
  132. alita_sdk/tools/carrier/__init__.py +4 -6
  133. alita_sdk/tools/chunkers/__init__.py +3 -1
  134. alita_sdk/tools/chunkers/sematic/json_chunker.py +1 -0
  135. alita_sdk/tools/chunkers/sematic/markdown_chunker.py +97 -6
  136. alita_sdk/tools/chunkers/sematic/proposal_chunker.py +1 -1
  137. alita_sdk/tools/chunkers/universal_chunker.py +270 -0
  138. alita_sdk/tools/cloud/aws/__init__.py +7 -6
  139. alita_sdk/tools/cloud/azure/__init__.py +7 -6
  140. alita_sdk/tools/cloud/gcp/__init__.py +7 -6
  141. alita_sdk/tools/cloud/k8s/__init__.py +7 -6
  142. alita_sdk/tools/code/linter/__init__.py +7 -7
  143. alita_sdk/tools/code/loaders/codesearcher.py +3 -2
  144. alita_sdk/tools/code/sonar/__init__.py +8 -7
  145. alita_sdk/tools/code_indexer_toolkit.py +199 -0
  146. alita_sdk/tools/confluence/__init__.py +9 -8
  147. alita_sdk/tools/confluence/api_wrapper.py +171 -75
  148. alita_sdk/tools/confluence/loader.py +10 -0
  149. alita_sdk/tools/custom_open_api/__init__.py +9 -4
  150. alita_sdk/tools/elastic/__init__.py +8 -7
  151. alita_sdk/tools/elitea_base.py +492 -52
  152. alita_sdk/tools/figma/__init__.py +7 -7
  153. alita_sdk/tools/figma/api_wrapper.py +2 -1
  154. alita_sdk/tools/github/__init__.py +9 -9
  155. alita_sdk/tools/github/api_wrapper.py +9 -26
  156. alita_sdk/tools/github/github_client.py +62 -2
  157. alita_sdk/tools/gitlab/__init__.py +8 -8
  158. alita_sdk/tools/gitlab/api_wrapper.py +135 -33
  159. alita_sdk/tools/gitlab_org/__init__.py +7 -8
  160. alita_sdk/tools/google/bigquery/__init__.py +11 -12
  161. alita_sdk/tools/google_places/__init__.py +8 -7
  162. alita_sdk/tools/jira/__init__.py +9 -7
  163. alita_sdk/tools/jira/api_wrapper.py +100 -52
  164. alita_sdk/tools/keycloak/__init__.py +8 -7
  165. alita_sdk/tools/localgit/local_git.py +56 -54
  166. alita_sdk/tools/memory/__init__.py +1 -1
  167. alita_sdk/tools/non_code_indexer_toolkit.py +3 -2
  168. alita_sdk/tools/ocr/__init__.py +8 -7
  169. alita_sdk/tools/openapi/__init__.py +10 -1
  170. alita_sdk/tools/pandas/__init__.py +8 -7
  171. alita_sdk/tools/postman/__init__.py +7 -8
  172. alita_sdk/tools/postman/api_wrapper.py +19 -8
  173. alita_sdk/tools/postman/postman_analysis.py +8 -1
  174. alita_sdk/tools/pptx/__init__.py +8 -9
  175. alita_sdk/tools/qtest/__init__.py +16 -11
  176. alita_sdk/tools/qtest/api_wrapper.py +1784 -88
  177. alita_sdk/tools/rally/__init__.py +7 -8
  178. alita_sdk/tools/report_portal/__init__.py +9 -7
  179. alita_sdk/tools/salesforce/__init__.py +7 -7
  180. alita_sdk/tools/servicenow/__init__.py +10 -10
  181. alita_sdk/tools/sharepoint/__init__.py +7 -6
  182. alita_sdk/tools/sharepoint/api_wrapper.py +127 -36
  183. alita_sdk/tools/sharepoint/authorization_helper.py +191 -1
  184. alita_sdk/tools/sharepoint/utils.py +8 -2
  185. alita_sdk/tools/slack/__init__.py +7 -6
  186. alita_sdk/tools/sql/__init__.py +8 -7
  187. alita_sdk/tools/sql/api_wrapper.py +71 -23
  188. alita_sdk/tools/testio/__init__.py +7 -6
  189. alita_sdk/tools/testrail/__init__.py +8 -9
  190. alita_sdk/tools/utils/__init__.py +26 -4
  191. alita_sdk/tools/utils/content_parser.py +88 -60
  192. alita_sdk/tools/utils/text_operations.py +254 -0
  193. alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +76 -26
  194. alita_sdk/tools/xray/__init__.py +9 -7
  195. alita_sdk/tools/zephyr/__init__.py +7 -6
  196. alita_sdk/tools/zephyr_enterprise/__init__.py +8 -6
  197. alita_sdk/tools/zephyr_essential/__init__.py +7 -6
  198. alita_sdk/tools/zephyr_essential/api_wrapper.py +12 -13
  199. alita_sdk/tools/zephyr_scale/__init__.py +7 -6
  200. alita_sdk/tools/zephyr_squad/__init__.py +7 -6
  201. {alita_sdk-0.3.351.dist-info → alita_sdk-0.3.499.dist-info}/METADATA +147 -2
  202. {alita_sdk-0.3.351.dist-info → alita_sdk-0.3.499.dist-info}/RECORD +206 -130
  203. alita_sdk-0.3.499.dist-info/entry_points.txt +2 -0
  204. {alita_sdk-0.3.351.dist-info → alita_sdk-0.3.499.dist-info}/WHEEL +0 -0
  205. {alita_sdk-0.3.351.dist-info → alita_sdk-0.3.499.dist-info}/licenses/LICENSE +0 -0
  206. {alita_sdk-0.3.351.dist-info → alita_sdk-0.3.499.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,7 @@ from ..base.tool import BaseAction
6
6
  from pydantic import create_model, BaseModel, ConfigDict, Field, SecretStr
7
7
 
8
8
  from ..elitea_base import filter_missconfigured_index_tools
9
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
9
+ from ..utils import clean_string, get_max_toolkit_length
10
10
  from ...configurations.gitlab import GitlabConfiguration
11
11
 
12
12
  name = "gitlab_org"
@@ -22,16 +22,12 @@ def get_tools(tool):
22
22
 
23
23
  class AlitaGitlabSpaceToolkit(BaseToolkit):
24
24
  tools: List[BaseTool] = []
25
- toolkit_max_length: int = 0
26
25
 
27
26
  @staticmethod
28
27
  def toolkit_config_schema() -> BaseModel:
29
28
  selected_tools = {x['name']: x['args_schema'].schema() for x in GitLabWorkspaceAPIWrapper.model_construct().get_available_tools()}
30
- AlitaGitlabSpaceToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
31
29
  return create_model(
32
30
  name,
33
- name=(str, Field(description="Toolkit name", json_schema_extra={'toolkit_name': True,
34
- 'max_toolkit_length': AlitaGitlabSpaceToolkit.toolkit_max_length})),
35
31
  gitlab_configuration=(GitlabConfiguration, Field(description="GitLab configuration",
36
32
  json_schema_extra={
37
33
  'configuration_types': ['gitlab']})),
@@ -63,17 +59,20 @@ class AlitaGitlabSpaceToolkit(BaseToolkit):
63
59
  **kwargs['gitlab_configuration'],
64
60
  }
65
61
  gitlab_wrapper = GitLabWorkspaceAPIWrapper(**wrapper_payload)
66
- prefix = clean_string(toolkit_name, AlitaGitlabSpaceToolkit.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
67
62
  available_tools = gitlab_wrapper.get_available_tools()
68
63
  tools = []
69
64
  for tool in available_tools:
70
65
  if selected_tools:
71
66
  if tool["name"] not in selected_tools:
72
67
  continue
68
+ description = tool["description"]
69
+ if toolkit_name:
70
+ description = f"Toolkit: {toolkit_name}\n{description}"
71
+ description = description[:1000]
73
72
  tools.append(BaseAction(
74
73
  api_wrapper=gitlab_wrapper,
75
- name=prefix + tool['name'],
76
- description=tool["description"],
74
+ name=tool['name'],
75
+ description=description,
77
76
  args_schema=tool["args_schema"]
78
77
  ))
79
78
  return cls(tools=tools)
@@ -5,7 +5,7 @@ from langchain_core.tools import BaseTool, BaseToolkit
5
5
  from pydantic import BaseModel, Field, computed_field, field_validator
6
6
 
7
7
  from ....configurations.bigquery import BigQueryConfiguration
8
- from ...utils import TOOLKIT_SPLITTER, clean_string, get_max_toolkit_length
8
+ from ...utils import clean_string, get_max_toolkit_length
9
9
  from .api_wrapper import BigQueryApiWrapper
10
10
  from .tool import BigQueryAction
11
11
 
@@ -22,11 +22,6 @@ def get_available_tools() -> dict[str, dict]:
22
22
  return available_tools
23
23
 
24
24
 
25
- toolkit_max_length = lru_cache(maxsize=1)(
26
- lambda: get_max_toolkit_length(get_available_tools())
27
- )
28
-
29
-
30
25
  class BigQueryToolkitConfig(BaseModel):
31
26
  class Config:
32
27
  title = name
@@ -86,9 +81,10 @@ class BigQueryToolkit(BaseToolkit):
86
81
 
87
82
  @computed_field
88
83
  @property
89
- def tool_prefix(self) -> str:
84
+ def toolkit_context(self) -> str:
85
+ """Returns toolkit context for descriptions (max 1000 chars)."""
90
86
  return (
91
- clean_string(self.toolkit_name, toolkit_max_length()) + TOOLKIT_SPLITTER
87
+ f" [Toolkit: {clean_string(self.toolkit_name, 0)}]"
92
88
  if self.toolkit_name
93
89
  else ""
94
90
  )
@@ -122,13 +118,16 @@ class BigQueryToolkit(BaseToolkit):
122
118
  selected_tools = set(selected_tools)
123
119
  for t in instance.available_tools:
124
120
  if t["name"] in selected_tools:
121
+ description = t["description"]
122
+ if toolkit_name:
123
+ description = f"Toolkit: {toolkit_name}\n{description}"
124
+ description = f"Project: {getattr(instance.api_wrapper, 'project', '')}\n{description}"
125
+ description = description[:1000]
125
126
  instance.tools.append(
126
127
  BigQueryAction(
127
128
  api_wrapper=instance.api_wrapper,
128
- name=instance.tool_prefix + t["name"],
129
- # set unique description for declared tools to differentiate the same methods for different toolkits
130
- description=f"Project: {getattr(instance.api_wrapper, 'project', '')}\n"
131
- + t["description"],
129
+ name=t["name"],
130
+ description=description,
132
131
  args_schema=t["args_schema"],
133
132
  )
134
133
  )
@@ -6,7 +6,7 @@ from pydantic.fields import Field
6
6
  from .api_wrapper import GooglePlacesAPIWrapper
7
7
  from ..base.tool import BaseAction
8
8
  from ..elitea_base import filter_missconfigured_index_tools
9
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
9
+ from ..utils import clean_string, get_max_toolkit_length
10
10
  from ...configurations.google_places import GooglePlacesConfiguration
11
11
 
12
12
  name = "google_places"
@@ -22,15 +22,13 @@ def get_tools(tool):
22
22
 
23
23
  class GooglePlacesToolkit(BaseToolkit):
24
24
  tools: list[BaseTool] = []
25
- toolkit_max_length: int = 0
26
25
 
27
26
  @staticmethod
28
27
  def toolkit_config_schema() -> BaseModel:
29
28
  selected_tools = {x['name']: x['args_schema'].schema() for x in GooglePlacesAPIWrapper.model_construct().get_available_tools()}
30
- GooglePlacesToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
31
29
  return create_model(
32
30
  name,
33
- results_count=(Optional[int], Field(description="Results number to show", default=None, json_schema_extra={'toolkit_name': True, 'max_toolkit_length': GooglePlacesToolkit.toolkit_max_length})),
31
+ results_count=(Optional[int], Field(description="Results number to show", default=None)),
34
32
  google_places_configuration=(GooglePlacesConfiguration, Field(description="Google Places Configuration", json_schema_extra={'configuration_types': ['google_places']})),
35
33
  selected_tools=(List[Literal[tuple(selected_tools)]], Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
36
34
  __config__=ConfigDict(json_schema_extra=
@@ -55,16 +53,19 @@ class GooglePlacesToolkit(BaseToolkit):
55
53
  **kwargs.get('google_places_configuration', {}),
56
54
  }
57
55
  google_places_api_wrapper = GooglePlacesAPIWrapper(**wrapper_payload)
58
- prefix = clean_string(toolkit_name, GooglePlacesToolkit.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
59
56
  available_tools = google_places_api_wrapper.get_available_tools()
60
57
  tools = []
61
58
  for tool in available_tools:
62
59
  if selected_tools and tool["name"] not in selected_tools:
63
60
  continue
61
+ description = tool["description"]
62
+ if toolkit_name:
63
+ description = f"Toolkit: {toolkit_name}\n{description}"
64
+ description = description[:1000]
64
65
  tools.append(BaseAction(
65
66
  api_wrapper=google_places_api_wrapper,
66
- name=prefix + tool["name"],
67
- description=tool["description"],
67
+ name=tool["name"],
68
+ description=description,
68
69
  args_schema=tool["args_schema"]
69
70
  ))
70
71
  return cls(tools=tools)
@@ -6,7 +6,7 @@ from pydantic import create_model, BaseModel, ConfigDict, Field
6
6
  import requests
7
7
 
8
8
  from ..elitea_base import filter_missconfigured_index_tools
9
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length, parse_list, check_connection_response
9
+ from ..utils import clean_string, get_max_toolkit_length, parse_list, check_connection_response
10
10
  from ...configurations.jira import JiraConfiguration
11
11
  from ...configurations.pgvector import PgVectorConfiguration
12
12
 
@@ -37,12 +37,10 @@ def get_tools(tool):
37
37
 
38
38
  class JiraToolkit(BaseToolkit):
39
39
  tools: List[BaseTool] = []
40
- toolkit_max_length: int = 0
41
40
 
42
41
  @staticmethod
43
42
  def toolkit_config_schema() -> BaseModel:
44
43
  selected_tools = {x['name']: x['args_schema'].schema() for x in JiraApiWrapper.model_construct().get_available_tools()}
45
- JiraToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
46
44
 
47
45
  @check_connection_response
48
46
  def check_connection(self):
@@ -68,7 +66,7 @@ class JiraToolkit(BaseToolkit):
68
66
  name,
69
67
  cloud=(bool, Field(description="Hosting Option", json_schema_extra={'configuration': True})),
70
68
  limit=(int, Field(description="Limit issues. Default is 5", gt=0, default=5)),
71
- api_version=(Optional[str], Field(description="Rest API version: optional. Default is 2", default="2")),
69
+ api_version=(Literal['2', '3'], Field(description="Rest API version: optional. Default is 2", default="3")),
72
70
  labels=(Optional[str], Field(
73
71
  description="List of comma separated labels used for labeling of agent's created or updated entities",
74
72
  default=None,
@@ -109,17 +107,21 @@ class JiraToolkit(BaseToolkit):
109
107
  **(kwargs.get('pgvector_configuration') or {}),
110
108
  }
111
109
  jira_api_wrapper = JiraApiWrapper(**wrapper_payload)
112
- prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
113
110
  available_tools = jira_api_wrapper.get_available_tools()
114
111
  tools = []
115
112
  for tool in available_tools:
116
113
  if selected_tools:
117
114
  if tool["name"] not in selected_tools:
118
115
  continue
116
+ description = tool["description"]
117
+ if toolkit_name:
118
+ description = f"Toolkit: {toolkit_name}\n{description}"
119
+ description = f"Jira instance: {jira_api_wrapper.url}\n{description}"
120
+ description = description[:1000]
119
121
  tools.append(BaseAction(
120
122
  api_wrapper=jira_api_wrapper,
121
- name=prefix + tool["name"],
122
- description=f"Tool for Jira: '{jira_api_wrapper.base_url}'\n{tool['description']}",
123
+ name=tool["name"],
124
+ description=description,
123
125
  args_schema=tool["args_schema"]
124
126
  ))
125
127
  return cls(tools=tools)
@@ -453,41 +453,63 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
453
453
  return super().validate_toolkit(values)
454
454
 
455
455
  def _parse_issues(self, issues: Dict) -> List[dict]:
456
- parsed = []
457
- for issue in issues["issues"]:
458
- if len(parsed) >= self.limit:
456
+ parsed: List[dict] = []
457
+ issues_list = issues.get("issues") if isinstance(issues, dict) else None
458
+ if not isinstance(issues_list, list):
459
+ return parsed
460
+
461
+ for issue in issues_list:
462
+ if self.limit and len(parsed) >= self.limit:
459
463
  break
460
- issue_fields = issue["fields"]
461
- key = issue["key"]
462
- id = issue["id"]
463
- summary = issue_fields["summary"]
464
- description = issue_fields["description"]
465
- created = issue_fields["created"][0:10]
466
- updated = issue_fields["updated"]
467
- duedate = issue_fields["duedate"]
468
- priority = issue_fields["priority"]["name"]
469
- status = issue_fields["status"]["name"]
470
- project_id = issue_fields["project"]["id"]
471
- issue_url = f"{self._client.url}browse/{key}"
472
- try:
473
- assignee = issue_fields["assignee"]["displayName"]
474
- except Exception:
475
- assignee = "None"
464
+
465
+ issue_fields = issue.get("fields") or {}
466
+ key = issue.get("key", "")
467
+ issue_id = issue.get("id", "")
468
+
469
+ summary = issue_fields.get("summary") or ""
470
+ description = issue_fields.get("description") or ""
471
+ created_raw = issue_fields.get("created") or ""
472
+ created = created_raw[:10] if created_raw else ""
473
+ updated = issue_fields.get("updated") or ""
474
+ duedate = issue_fields.get("duedate")
475
+
476
+ priority_info = issue_fields.get("priority") or {}
477
+ priority = priority_info.get("name") or "None"
478
+
479
+ status_info = issue_fields.get("status") or {}
480
+ status = status_info.get("name") or "Unknown"
481
+
482
+ project_info = issue_fields.get("project") or {}
483
+ project_id = project_info.get("id") or ""
484
+
485
+ issue_url = f"{self._client.url}browse/{key}" if key else self._client.url
486
+
487
+ assignee_info = issue_fields.get("assignee") or {}
488
+ assignee = assignee_info.get("displayName") or "None"
489
+
476
490
  rel_issues = {}
477
- for related_issue in issue_fields["issuelinks"]:
478
- if "inwardIssue" in related_issue.keys():
479
- rel_type = related_issue["type"]["inward"]
480
- rel_key = related_issue["inwardIssue"]["key"]
491
+ for related_issue in issue_fields.get("issuelinks") or []:
492
+ rel_type = None
493
+ rel_key = None
494
+ if related_issue.get("inwardIssue"):
495
+ rel_type = related_issue.get("type", {}).get("inward")
496
+ rel_key = related_issue["inwardIssue"].get("key")
481
497
  # rel_summary = related_issue["inwardIssue"]["fields"]["summary"]
482
- if "outwardIssue" in related_issue.keys():
483
- rel_type = related_issue["type"]["outward"]
484
- rel_key = related_issue["outwardIssue"]["key"]
498
+ elif related_issue.get("outwardIssue"):
499
+ rel_type = related_issue.get("type", {}).get("outward")
500
+ rel_key = related_issue["outwardIssue"].get("key")
485
501
  # rel_summary = related_issue["outwardIssue"]["fields"]["summary"]
486
- rel_issues = {"type": rel_type, "key": rel_key, "url": f"{self._client.url}browse/{rel_key}"}
502
+
503
+ if rel_type and rel_key:
504
+ rel_issues = {
505
+ "type": rel_type,
506
+ "key": rel_key,
507
+ "url": f"{self._client.url}browse/{rel_key}",
508
+ }
487
509
 
488
510
  parsed_issue = {
489
511
  "key": key,
490
- "id": id,
512
+ "id": issue_id,
491
513
  "projectId": project_id,
492
514
  "summary": summary,
493
515
  "description": description,
@@ -500,10 +522,13 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
500
522
  "url": issue_url,
501
523
  "related_issues": rel_issues,
502
524
  }
503
- for field in self.additional_fields:
504
- field_value = issue_fields.get(field, None)
525
+
526
+ for field in (self.additional_fields or []):
527
+ field_value = issue_fields.get(field)
505
528
  parsed_issue[field] = field_value
529
+
506
530
  parsed.append(parsed_issue)
531
+
507
532
  return parsed
508
533
 
509
534
  @staticmethod
@@ -563,7 +588,7 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
563
588
  Use the appropriate issue link type (e.g., "Test", "Relates", "Blocks").
564
589
  If we use "Test" linktype, the test is inward issue, the story/other issue is outward issue.."""
565
590
 
566
- comment = "This test is linked to the story."
591
+ comment = f"Issue {inward_issue_key} was linked to {outward_issue_key}."
567
592
  comment_body = {"content": [{"content": [{"text": comment,"type": "text"}],"type": "paragraph"}],"type": "doc","version": 1} if self.api_version == "3" else comment
568
593
  link_data = {
569
594
  "type": {"name": f"{linktype}"},
@@ -749,23 +774,31 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
749
774
 
750
775
  attachment_data = []
751
776
  attachments = self._client.get_attachments_ids_from_issue(issue=jira_issue_key)
777
+ api_version = str(getattr(self._client, "api_version", "2"))
752
778
  for attachment in attachments:
753
779
  if attachment_pattern and not re.search(attachment_pattern, attachment['filename']):
754
780
  logger.info(f"Skipping attachment {attachment['filename']} as it does not match pattern {attachment_pattern}")
755
781
  continue
756
782
  logger.info(f"Processing attachment {attachment['filename']} with ID {attachment['attachment_id']}")
757
- if self.api_version == "3":
758
- attachment_data.append(self._client.get_attachment_content(attachment['attachment_id']))
759
- else:
760
- try:
783
+ try:
784
+ attachment_content = None
785
+
786
+ # Cloud (REST v3) attachments require signed URLs returned from metadata
787
+ if api_version in {"3", "latest"} or self.cloud:
788
+ attachment_content = self._download_attachment_v3(
789
+ attachment['attachment_id'],
790
+ attachment['filename']
791
+ )
792
+
793
+ if attachment_content is None:
761
794
  attachment_content = self._client.get_attachment_content(attachment['attachment_id'])
762
- except Exception as e:
763
- logger.error(
764
- f"Failed to download attachment {attachment['filename']} for issue {jira_issue_key}: {str(e)}")
765
- attachment_content = self._client.get(
766
- path=f"secure/attachment/{attachment['attachment_id']}/{attachment['filename']}", not_json_response=True)
767
- content_docs = process_content_by_type(attachment_content, attachment['filename'], llm=self.llm)
768
- attachment_data.append("filename: " + attachment['filename'] + "\ncontent: " + str([doc.page_content for doc in content_docs]))
795
+ except Exception as e:
796
+ logger.error(
797
+ f"Failed to download attachment {attachment['filename']} for issue {jira_issue_key}: {str(e)}")
798
+ attachment_content = self._client.get(
799
+ path=f"secure/attachment/{attachment['attachment_id']}/{attachment['filename']}", not_json_response=True)
800
+ content_docs = process_content_by_type(attachment_content, attachment['filename'], llm=self.llm, fallback_extensions=[".txt", ".png"])
801
+ attachment_data.append("filename: " + attachment['filename'] + "\ncontent: " + str([doc.page_content for doc in content_docs]))
769
802
 
770
803
  return "\n\n".join(attachment_data)
771
804
 
@@ -800,15 +833,6 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
800
833
  logger.debug(response_string)
801
834
  return response_string
802
835
 
803
- def _extract_attachment_content(self, attachment):
804
- """Extract attachment's content if possible (used for api v.2)"""
805
-
806
- try:
807
- content = self._client.get(attachment['content'].replace(self.base_url, ''))
808
- except Exception as e:
809
- content = f"Unable to parse content of '{attachment['filename']}' due to: {str(e)}"
810
- return f"filename: {attachment['filename']}\ncontent: {content}"
811
-
812
836
  # Helper functions for image processing
813
837
  @staticmethod
814
838
  def _collect_context_for_image(content: str, image_marker: str, context_radius: int = 500) -> str:
@@ -1041,6 +1065,30 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
1041
1065
  logger.error(f"Error downloading attachment: {str(e)}")
1042
1066
  return None
1043
1067
 
1068
+ def _download_attachment_v3(self, attachment_id: str, filename: str | None = None) -> Optional[bytes]:
1069
+ """Download Jira attachment using metadata content URL (required for REST v3 / Cloud)."""
1070
+ try:
1071
+ metadata = self._client.get_attachment(attachment_id)
1072
+ except Exception as e:
1073
+ logger.error(f"Failed to retrieve metadata for attachment {attachment_id}: {str(e)}")
1074
+ return None
1075
+
1076
+ download_url = metadata.get('content') or metadata.get('_links', {}).get('content')
1077
+
1078
+ if not download_url:
1079
+ logger.warning(
1080
+ f"Attachment {attachment_id} ({filename}) metadata does not include a content URL; falling back.")
1081
+ return None
1082
+
1083
+ logger.info(f"Downloading attachment {attachment_id} via metadata content URL (v3).")
1084
+ content = self._download_attachment(download_url)
1085
+
1086
+ if content is None:
1087
+ logger.error(
1088
+ f"Failed to download attachment {attachment_id} ({filename}) from v3 content URL: {download_url}")
1089
+
1090
+ return content
1091
+
1044
1092
  def _extract_image_data(self, field_data):
1045
1093
  """
1046
1094
  Extracts image data from general JSON response.
@@ -1330,7 +1378,7 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
1330
1378
 
1331
1379
  # Use provided JQL query or default to all issues
1332
1380
  if not jql:
1333
- jql_query = "ORDER BY updated DESC" # Default to get all issues ordered by update time
1381
+ jql_query = "created >= \"1970-01-01\" ORDER BY updated DESC" # Default to get all issues ordered by update time
1334
1382
  else:
1335
1383
  jql_query = jql
1336
1384
 
@@ -5,7 +5,7 @@ from pydantic import BaseModel, ConfigDict, create_model, Field, SecretStr
5
5
 
6
6
  from .api_wrapper import KeycloakApiWrapper
7
7
  from ..base.tool import BaseAction
8
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
8
+ from ..utils import clean_string, get_max_toolkit_length
9
9
 
10
10
  name = "keycloak"
11
11
 
@@ -21,15 +21,13 @@ def get_tools(tool):
21
21
 
22
22
  class KeycloakToolkit(BaseToolkit):
23
23
  tools: list[BaseTool] = []
24
- toolkit_max_length: int = 0
25
24
 
26
25
  @staticmethod
27
26
  def toolkit_config_schema() -> BaseModel:
28
27
  selected_tools = {x['name']: x['args_schema'].schema() for x in KeycloakApiWrapper.model_construct().get_available_tools()}
29
- KeycloakToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
30
28
  return create_model(
31
29
  name,
32
- base_url=(str, Field(default="", title="Server URL", description="Keycloak server URL", json_schema_extra={'toolkit_name': True, 'max_toolkit_length': KeycloakToolkit.toolkit_max_length})),
30
+ base_url=(str, Field(default="", title="Server URL", description="Keycloak server URL", json_schema_extra={'toolkit_name': True})),
33
31
  realm=(str, Field(default="", title="Realm", description="Keycloak realm")),
34
32
  client_id=(str, Field(default="", title="Client ID", description="Keycloak client ID")),
35
33
  client_secret=(SecretStr, Field(default="", title="Client sercet", description="Keycloak client secret", json_schema_extra={'secret': True})),
@@ -42,16 +40,19 @@ class KeycloakToolkit(BaseToolkit):
42
40
  if selected_tools is None:
43
41
  selected_tools = []
44
42
  keycloak_api_wrapper = KeycloakApiWrapper(**kwargs)
45
- prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
46
43
  available_tools = keycloak_api_wrapper.get_available_tools()
47
44
  tools = []
48
45
  for tool in available_tools:
49
46
  if selected_tools and tool["name"] not in selected_tools:
50
47
  continue
48
+ description = f"{tool['description']}\nUrl: {keycloak_api_wrapper.base_url}"
49
+ if toolkit_name:
50
+ description = f"{description}\nToolkit: {toolkit_name}"
51
+ description = description[:1000]
51
52
  tools.append(BaseAction(
52
53
  api_wrapper=keycloak_api_wrapper,
53
- name=prefix + tool["name"],
54
- description=f"{tool['description']}\nUrl: {keycloak_api_wrapper.base_url}",
54
+ name=tool["name"],
55
+ description=description,
55
56
  args_schema=tool["args_schema"]
56
57
  ))
57
58
  return cls(tools=tools)
@@ -8,7 +8,8 @@ from git import Repo
8
8
  from pydantic import BaseModel, Field, create_model, model_validator
9
9
  from langchain_core.tools import ToolException
10
10
 
11
- from ..elitea_base import BaseToolApiWrapper
11
+ from ..elitea_base import BaseToolApiWrapper, extend_with_file_operations
12
+ from ..utils.text_operations import parse_old_new_markers
12
13
 
13
14
  logger = logging.getLogger(__name__)
14
15
  CREATE_FILE_PROMPT = """Create new file in your local repository."""
@@ -128,58 +129,6 @@ class LocalGit(BaseToolApiWrapper):
128
129
  repo.head.reset(commit=commit_sha, working_tree=True)
129
130
  return values
130
131
 
131
- def extract_old_new_pairs(self, file_query):
132
- # Split the file content by lines
133
- code_lines = file_query.split("\n")
134
-
135
- # Initialize lists to hold the contents of OLD and NEW sections
136
- old_contents = []
137
- new_contents = []
138
-
139
- # Initialize variables to track whether the current line is within an OLD or NEW section
140
- in_old_section = False
141
- in_new_section = False
142
-
143
- # Temporary storage for the current section's content
144
- current_section_content = []
145
-
146
- # Iterate through each line in the file content
147
- for line in code_lines:
148
- # Check for OLD section start
149
- if "OLD <<<" in line:
150
- in_old_section = True
151
- current_section_content = [] # Reset current section content
152
- continue # Skip the line with the marker
153
-
154
- # Check for OLD section end
155
- if ">>>> OLD" in line:
156
- in_old_section = False
157
- old_contents.append("\n".join(current_section_content).strip()) # Add the captured content
158
- current_section_content = [] # Reset current section content
159
- continue # Skip the line with the marker
160
-
161
- # Check for NEW section start
162
- if "NEW <<<" in line:
163
- in_new_section = True
164
- current_section_content = [] # Reset current section content
165
- continue # Skip the line with the marker
166
-
167
- # Check for NEW section end
168
- if ">>>> NEW" in line:
169
- in_new_section = False
170
- new_contents.append("\n".join(current_section_content).strip()) # Add the captured content
171
- current_section_content = [] # Reset current section content
172
- continue # Skip the line with the marker
173
-
174
- # If currently in an OLD or NEW section, add the line to the current section content
175
- if in_old_section or in_new_section:
176
- current_section_content.append(line)
177
-
178
- # Pair the OLD and NEW contents
179
- paired_contents = list(zip(old_contents, new_contents))
180
-
181
- return paired_contents
182
-
183
132
  def checkout_commit(self, commit_sha: str) -> str:
184
133
  """ Checkout specific commit from repository """
185
134
  try:
@@ -233,6 +182,58 @@ class LocalGit(BaseToolApiWrapper):
233
182
  return f.read()
234
183
  else:
235
184
  return "File '{}' cannot be read because it is not existed".format(file_path)
185
+
186
+ def _read_file(self, file_path: str, branch: str = None, **kwargs) -> str:
187
+ """
188
+ Read a file from the repository with optional partial read support.
189
+
190
+ Parameters:
191
+ file_path: the file path (relative to repo root)
192
+ branch: branch name (not used for local git, always reads from working dir)
193
+ **kwargs: Additional parameters (offset, limit, head, tail) - currently ignored,
194
+ partial read handled client-side by base class methods
195
+
196
+ Returns:
197
+ File content as string
198
+ """
199
+ return self.read_file(file_path)
200
+
201
+ def _write_file(
202
+ self,
203
+ file_path: str,
204
+ content: str,
205
+ branch: str = None,
206
+ commit_message: str = None
207
+ ) -> str:
208
+ """
209
+ Write content to a file (create or update).
210
+
211
+ Parameters:
212
+ file_path: Path to the file (relative to repo root)
213
+ content: New file content
214
+ branch: Branch name (not used for local git)
215
+ commit_message: Commit message (not used - files are written without commit)
216
+
217
+ Returns:
218
+ Success message
219
+ """
220
+ try:
221
+ full_path = os.path.normpath(os.path.join(self.repo.working_dir, file_path))
222
+
223
+ # Ensure directory exists
224
+ os.makedirs(os.path.dirname(full_path), exist_ok=True)
225
+
226
+ # Write the file
227
+ with open(full_path, 'w') as f:
228
+ f.write(content)
229
+
230
+ # Determine if file was created or updated
231
+ if os.path.exists(full_path):
232
+ return f"Updated file {file_path}"
233
+ else:
234
+ return f"Created file {file_path}"
235
+ except Exception as e:
236
+ raise ToolException(f"Unable to write file {file_path}: {str(e)}")
236
237
 
237
238
  def update_file_content_by_lines(self, file_path: str, start_line_index: int, end_line_index: int,
238
239
  new_content: str) -> str:
@@ -314,7 +315,7 @@ class LocalGit(BaseToolApiWrapper):
314
315
  file_path = os.path.normpath(os.path.join(self.repo.working_dir, file_path))
315
316
  file_content = self.read_file(file_path)
316
317
  updated_file_content = file_content
317
- for old, new in self.extract_old_new_pairs(file_query):
318
+ for old, new in parse_old_new_markers(file_query): # Use shared utility
318
319
  if not old.strip():
319
320
  continue
320
321
  updated_file_content = updated_file_content.replace(old, new)
@@ -332,6 +333,7 @@ class LocalGit(BaseToolApiWrapper):
332
333
  except Exception as e:
333
334
  return "Unable to update file due to error:\n" + str(e)
334
335
 
336
+ @extend_with_file_operations
335
337
  def get_available_tools(self):
336
338
  return [
337
339
  {
@@ -61,7 +61,7 @@ class MemoryToolkit(BaseToolkit):
61
61
 
62
62
  return create_model(
63
63
  'memory',
64
- namespace=(str, Field(description="Memory namespace", json_schema_extra={'toolkit_name': True})),
64
+ namespace=(str, Field(description="Memory namespace")),
65
65
  pgvector_configuration=(PgVectorConfiguration, Field(description="PgVector Configuration",
66
66
  json_schema_extra={
67
67
  'configuration_types': ['pgvector']})),
@@ -6,11 +6,12 @@ from alita_sdk.tools.base_indexer_toolkit import BaseIndexerToolkit
6
6
 
7
7
 
8
8
  class NonCodeIndexerToolkit(BaseIndexerToolkit):
9
- def _get_indexed_data(self, collection_suffix: str):
9
+ def _get_indexed_data(self, index_name: str):
10
+ self._ensure_vectorstore_initialized()
10
11
  if not self.vector_adapter:
11
12
  raise ToolException("Vector adapter is not initialized. "
12
13
  "Check your configuration: embedding_model and vectorstore_type.")
13
- return self.vector_adapter.get_indexed_data(self, collection_suffix)
14
+ return self.vector_adapter.get_indexed_data(self, index_name)
14
15
 
15
16
  def key_fn(self, document: Document):
16
17
  return document.metadata.get('id')