alita-sdk 0.3.379__py3-none-any.whl → 0.3.627__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (278) hide show
  1. alita_sdk/cli/__init__.py +10 -0
  2. alita_sdk/cli/__main__.py +17 -0
  3. alita_sdk/cli/agent/__init__.py +5 -0
  4. alita_sdk/cli/agent/default.py +258 -0
  5. alita_sdk/cli/agent_executor.py +156 -0
  6. alita_sdk/cli/agent_loader.py +245 -0
  7. alita_sdk/cli/agent_ui.py +228 -0
  8. alita_sdk/cli/agents.py +3113 -0
  9. alita_sdk/cli/callbacks.py +647 -0
  10. alita_sdk/cli/cli.py +168 -0
  11. alita_sdk/cli/config.py +306 -0
  12. alita_sdk/cli/context/__init__.py +30 -0
  13. alita_sdk/cli/context/cleanup.py +198 -0
  14. alita_sdk/cli/context/manager.py +731 -0
  15. alita_sdk/cli/context/message.py +285 -0
  16. alita_sdk/cli/context/strategies.py +289 -0
  17. alita_sdk/cli/context/token_estimation.py +127 -0
  18. alita_sdk/cli/formatting.py +182 -0
  19. alita_sdk/cli/input_handler.py +419 -0
  20. alita_sdk/cli/inventory.py +1073 -0
  21. alita_sdk/cli/mcp_loader.py +315 -0
  22. alita_sdk/cli/testcases/__init__.py +94 -0
  23. alita_sdk/cli/testcases/data_generation.py +119 -0
  24. alita_sdk/cli/testcases/discovery.py +96 -0
  25. alita_sdk/cli/testcases/executor.py +84 -0
  26. alita_sdk/cli/testcases/logger.py +85 -0
  27. alita_sdk/cli/testcases/parser.py +172 -0
  28. alita_sdk/cli/testcases/prompts.py +91 -0
  29. alita_sdk/cli/testcases/reporting.py +125 -0
  30. alita_sdk/cli/testcases/setup.py +108 -0
  31. alita_sdk/cli/testcases/test_runner.py +282 -0
  32. alita_sdk/cli/testcases/utils.py +39 -0
  33. alita_sdk/cli/testcases/validation.py +90 -0
  34. alita_sdk/cli/testcases/workflow.py +196 -0
  35. alita_sdk/cli/toolkit.py +327 -0
  36. alita_sdk/cli/toolkit_loader.py +85 -0
  37. alita_sdk/cli/tools/__init__.py +43 -0
  38. alita_sdk/cli/tools/approval.py +224 -0
  39. alita_sdk/cli/tools/filesystem.py +1751 -0
  40. alita_sdk/cli/tools/planning.py +389 -0
  41. alita_sdk/cli/tools/terminal.py +414 -0
  42. alita_sdk/community/__init__.py +72 -12
  43. alita_sdk/community/inventory/__init__.py +236 -0
  44. alita_sdk/community/inventory/config.py +257 -0
  45. alita_sdk/community/inventory/enrichment.py +2137 -0
  46. alita_sdk/community/inventory/extractors.py +1469 -0
  47. alita_sdk/community/inventory/ingestion.py +3172 -0
  48. alita_sdk/community/inventory/knowledge_graph.py +1457 -0
  49. alita_sdk/community/inventory/parsers/__init__.py +218 -0
  50. alita_sdk/community/inventory/parsers/base.py +295 -0
  51. alita_sdk/community/inventory/parsers/csharp_parser.py +907 -0
  52. alita_sdk/community/inventory/parsers/go_parser.py +851 -0
  53. alita_sdk/community/inventory/parsers/html_parser.py +389 -0
  54. alita_sdk/community/inventory/parsers/java_parser.py +593 -0
  55. alita_sdk/community/inventory/parsers/javascript_parser.py +629 -0
  56. alita_sdk/community/inventory/parsers/kotlin_parser.py +768 -0
  57. alita_sdk/community/inventory/parsers/markdown_parser.py +362 -0
  58. alita_sdk/community/inventory/parsers/python_parser.py +604 -0
  59. alita_sdk/community/inventory/parsers/rust_parser.py +858 -0
  60. alita_sdk/community/inventory/parsers/swift_parser.py +832 -0
  61. alita_sdk/community/inventory/parsers/text_parser.py +322 -0
  62. alita_sdk/community/inventory/parsers/yaml_parser.py +370 -0
  63. alita_sdk/community/inventory/patterns/__init__.py +61 -0
  64. alita_sdk/community/inventory/patterns/ast_adapter.py +380 -0
  65. alita_sdk/community/inventory/patterns/loader.py +348 -0
  66. alita_sdk/community/inventory/patterns/registry.py +198 -0
  67. alita_sdk/community/inventory/presets.py +535 -0
  68. alita_sdk/community/inventory/retrieval.py +1403 -0
  69. alita_sdk/community/inventory/toolkit.py +173 -0
  70. alita_sdk/community/inventory/toolkit_utils.py +176 -0
  71. alita_sdk/community/inventory/visualize.py +1370 -0
  72. alita_sdk/configurations/__init__.py +1 -1
  73. alita_sdk/configurations/ado.py +141 -20
  74. alita_sdk/configurations/bitbucket.py +94 -2
  75. alita_sdk/configurations/confluence.py +130 -1
  76. alita_sdk/configurations/figma.py +76 -0
  77. alita_sdk/configurations/gitlab.py +91 -0
  78. alita_sdk/configurations/jira.py +103 -0
  79. alita_sdk/configurations/openapi.py +329 -0
  80. alita_sdk/configurations/qtest.py +72 -1
  81. alita_sdk/configurations/report_portal.py +96 -0
  82. alita_sdk/configurations/sharepoint.py +148 -0
  83. alita_sdk/configurations/testio.py +83 -0
  84. alita_sdk/configurations/testrail.py +88 -0
  85. alita_sdk/configurations/xray.py +93 -0
  86. alita_sdk/configurations/zephyr_enterprise.py +93 -0
  87. alita_sdk/configurations/zephyr_essential.py +75 -0
  88. alita_sdk/runtime/clients/artifact.py +3 -3
  89. alita_sdk/runtime/clients/client.py +388 -46
  90. alita_sdk/runtime/clients/mcp_discovery.py +342 -0
  91. alita_sdk/runtime/clients/mcp_manager.py +262 -0
  92. alita_sdk/runtime/clients/sandbox_client.py +8 -21
  93. alita_sdk/runtime/langchain/_constants_bkup.py +1318 -0
  94. alita_sdk/runtime/langchain/assistant.py +157 -39
  95. alita_sdk/runtime/langchain/constants.py +647 -1
  96. alita_sdk/runtime/langchain/document_loaders/AlitaDocxMammothLoader.py +315 -3
  97. alita_sdk/runtime/langchain/document_loaders/AlitaExcelLoader.py +103 -60
  98. alita_sdk/runtime/langchain/document_loaders/AlitaJSONLinesLoader.py +77 -0
  99. alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +10 -4
  100. alita_sdk/runtime/langchain/document_loaders/AlitaPowerPointLoader.py +226 -7
  101. alita_sdk/runtime/langchain/document_loaders/AlitaTextLoader.py +5 -2
  102. alita_sdk/runtime/langchain/document_loaders/constants.py +40 -19
  103. alita_sdk/runtime/langchain/langraph_agent.py +405 -84
  104. alita_sdk/runtime/langchain/utils.py +106 -7
  105. alita_sdk/runtime/llms/preloaded.py +2 -6
  106. alita_sdk/runtime/models/mcp_models.py +61 -0
  107. alita_sdk/runtime/skills/__init__.py +91 -0
  108. alita_sdk/runtime/skills/callbacks.py +498 -0
  109. alita_sdk/runtime/skills/discovery.py +540 -0
  110. alita_sdk/runtime/skills/executor.py +610 -0
  111. alita_sdk/runtime/skills/input_builder.py +371 -0
  112. alita_sdk/runtime/skills/models.py +330 -0
  113. alita_sdk/runtime/skills/registry.py +355 -0
  114. alita_sdk/runtime/skills/skill_runner.py +330 -0
  115. alita_sdk/runtime/toolkits/__init__.py +31 -0
  116. alita_sdk/runtime/toolkits/application.py +29 -10
  117. alita_sdk/runtime/toolkits/artifact.py +20 -11
  118. alita_sdk/runtime/toolkits/datasource.py +13 -6
  119. alita_sdk/runtime/toolkits/mcp.py +783 -0
  120. alita_sdk/runtime/toolkits/mcp_config.py +1048 -0
  121. alita_sdk/runtime/toolkits/planning.py +178 -0
  122. alita_sdk/runtime/toolkits/skill_router.py +238 -0
  123. alita_sdk/runtime/toolkits/subgraph.py +251 -6
  124. alita_sdk/runtime/toolkits/tools.py +356 -69
  125. alita_sdk/runtime/toolkits/vectorstore.py +11 -5
  126. alita_sdk/runtime/tools/__init__.py +10 -3
  127. alita_sdk/runtime/tools/application.py +27 -6
  128. alita_sdk/runtime/tools/artifact.py +511 -28
  129. alita_sdk/runtime/tools/data_analysis.py +183 -0
  130. alita_sdk/runtime/tools/function.py +67 -35
  131. alita_sdk/runtime/tools/graph.py +10 -4
  132. alita_sdk/runtime/tools/image_generation.py +148 -46
  133. alita_sdk/runtime/tools/llm.py +1003 -128
  134. alita_sdk/runtime/tools/loop.py +3 -1
  135. alita_sdk/runtime/tools/loop_output.py +3 -1
  136. alita_sdk/runtime/tools/mcp_inspect_tool.py +284 -0
  137. alita_sdk/runtime/tools/mcp_remote_tool.py +181 -0
  138. alita_sdk/runtime/tools/mcp_server_tool.py +8 -5
  139. alita_sdk/runtime/tools/planning/__init__.py +36 -0
  140. alita_sdk/runtime/tools/planning/models.py +246 -0
  141. alita_sdk/runtime/tools/planning/wrapper.py +607 -0
  142. alita_sdk/runtime/tools/router.py +2 -4
  143. alita_sdk/runtime/tools/sandbox.py +65 -48
  144. alita_sdk/runtime/tools/skill_router.py +776 -0
  145. alita_sdk/runtime/tools/tool.py +3 -1
  146. alita_sdk/runtime/tools/vectorstore.py +9 -3
  147. alita_sdk/runtime/tools/vectorstore_base.py +70 -14
  148. alita_sdk/runtime/utils/AlitaCallback.py +137 -21
  149. alita_sdk/runtime/utils/constants.py +5 -1
  150. alita_sdk/runtime/utils/mcp_client.py +492 -0
  151. alita_sdk/runtime/utils/mcp_oauth.py +361 -0
  152. alita_sdk/runtime/utils/mcp_sse_client.py +434 -0
  153. alita_sdk/runtime/utils/mcp_tools_discovery.py +124 -0
  154. alita_sdk/runtime/utils/serialization.py +155 -0
  155. alita_sdk/runtime/utils/streamlit.py +40 -13
  156. alita_sdk/runtime/utils/toolkit_utils.py +30 -9
  157. alita_sdk/runtime/utils/utils.py +36 -0
  158. alita_sdk/tools/__init__.py +134 -35
  159. alita_sdk/tools/ado/repos/__init__.py +51 -32
  160. alita_sdk/tools/ado/repos/repos_wrapper.py +148 -89
  161. alita_sdk/tools/ado/test_plan/__init__.py +25 -9
  162. alita_sdk/tools/ado/test_plan/test_plan_wrapper.py +23 -1
  163. alita_sdk/tools/ado/utils.py +1 -18
  164. alita_sdk/tools/ado/wiki/__init__.py +25 -12
  165. alita_sdk/tools/ado/wiki/ado_wrapper.py +291 -22
  166. alita_sdk/tools/ado/work_item/__init__.py +26 -13
  167. alita_sdk/tools/ado/work_item/ado_wrapper.py +73 -11
  168. alita_sdk/tools/advanced_jira_mining/__init__.py +11 -8
  169. alita_sdk/tools/aws/delta_lake/__init__.py +13 -9
  170. alita_sdk/tools/aws/delta_lake/tool.py +5 -1
  171. alita_sdk/tools/azure_ai/search/__init__.py +11 -8
  172. alita_sdk/tools/azure_ai/search/api_wrapper.py +1 -1
  173. alita_sdk/tools/base/tool.py +5 -1
  174. alita_sdk/tools/base_indexer_toolkit.py +271 -84
  175. alita_sdk/tools/bitbucket/__init__.py +17 -11
  176. alita_sdk/tools/bitbucket/api_wrapper.py +59 -11
  177. alita_sdk/tools/bitbucket/cloud_api_wrapper.py +49 -35
  178. alita_sdk/tools/browser/__init__.py +5 -4
  179. alita_sdk/tools/carrier/__init__.py +5 -6
  180. alita_sdk/tools/carrier/backend_reports_tool.py +6 -6
  181. alita_sdk/tools/carrier/run_ui_test_tool.py +6 -6
  182. alita_sdk/tools/carrier/ui_reports_tool.py +5 -5
  183. alita_sdk/tools/chunkers/__init__.py +3 -1
  184. alita_sdk/tools/chunkers/code/treesitter/treesitter.py +37 -13
  185. alita_sdk/tools/chunkers/sematic/json_chunker.py +1 -0
  186. alita_sdk/tools/chunkers/sematic/markdown_chunker.py +97 -6
  187. alita_sdk/tools/chunkers/sematic/proposal_chunker.py +1 -1
  188. alita_sdk/tools/chunkers/universal_chunker.py +270 -0
  189. alita_sdk/tools/cloud/aws/__init__.py +10 -7
  190. alita_sdk/tools/cloud/azure/__init__.py +10 -7
  191. alita_sdk/tools/cloud/gcp/__init__.py +10 -7
  192. alita_sdk/tools/cloud/k8s/__init__.py +10 -7
  193. alita_sdk/tools/code/linter/__init__.py +10 -8
  194. alita_sdk/tools/code/loaders/codesearcher.py +3 -2
  195. alita_sdk/tools/code/sonar/__init__.py +11 -8
  196. alita_sdk/tools/code_indexer_toolkit.py +82 -22
  197. alita_sdk/tools/confluence/__init__.py +22 -16
  198. alita_sdk/tools/confluence/api_wrapper.py +107 -30
  199. alita_sdk/tools/confluence/loader.py +14 -2
  200. alita_sdk/tools/custom_open_api/__init__.py +12 -5
  201. alita_sdk/tools/elastic/__init__.py +11 -8
  202. alita_sdk/tools/elitea_base.py +493 -30
  203. alita_sdk/tools/figma/__init__.py +58 -11
  204. alita_sdk/tools/figma/api_wrapper.py +1235 -143
  205. alita_sdk/tools/figma/figma_client.py +73 -0
  206. alita_sdk/tools/figma/toon_tools.py +2748 -0
  207. alita_sdk/tools/github/__init__.py +14 -15
  208. alita_sdk/tools/github/github_client.py +224 -100
  209. alita_sdk/tools/github/graphql_client_wrapper.py +119 -33
  210. alita_sdk/tools/github/schemas.py +14 -5
  211. alita_sdk/tools/github/tool.py +5 -1
  212. alita_sdk/tools/github/tool_prompts.py +9 -22
  213. alita_sdk/tools/gitlab/__init__.py +16 -11
  214. alita_sdk/tools/gitlab/api_wrapper.py +218 -48
  215. alita_sdk/tools/gitlab_org/__init__.py +10 -9
  216. alita_sdk/tools/gitlab_org/api_wrapper.py +63 -64
  217. alita_sdk/tools/google/bigquery/__init__.py +13 -12
  218. alita_sdk/tools/google/bigquery/tool.py +5 -1
  219. alita_sdk/tools/google_places/__init__.py +11 -8
  220. alita_sdk/tools/google_places/api_wrapper.py +1 -1
  221. alita_sdk/tools/jira/__init__.py +17 -10
  222. alita_sdk/tools/jira/api_wrapper.py +92 -41
  223. alita_sdk/tools/keycloak/__init__.py +11 -8
  224. alita_sdk/tools/localgit/__init__.py +9 -3
  225. alita_sdk/tools/localgit/local_git.py +62 -54
  226. alita_sdk/tools/localgit/tool.py +5 -1
  227. alita_sdk/tools/memory/__init__.py +12 -4
  228. alita_sdk/tools/non_code_indexer_toolkit.py +1 -0
  229. alita_sdk/tools/ocr/__init__.py +11 -8
  230. alita_sdk/tools/openapi/__init__.py +491 -106
  231. alita_sdk/tools/openapi/api_wrapper.py +1368 -0
  232. alita_sdk/tools/openapi/tool.py +20 -0
  233. alita_sdk/tools/pandas/__init__.py +20 -12
  234. alita_sdk/tools/pandas/api_wrapper.py +38 -25
  235. alita_sdk/tools/pandas/dataframe/generator/base.py +3 -1
  236. alita_sdk/tools/postman/__init__.py +10 -9
  237. alita_sdk/tools/pptx/__init__.py +11 -10
  238. alita_sdk/tools/pptx/pptx_wrapper.py +1 -1
  239. alita_sdk/tools/qtest/__init__.py +31 -11
  240. alita_sdk/tools/qtest/api_wrapper.py +2135 -86
  241. alita_sdk/tools/rally/__init__.py +10 -9
  242. alita_sdk/tools/rally/api_wrapper.py +1 -1
  243. alita_sdk/tools/report_portal/__init__.py +12 -8
  244. alita_sdk/tools/salesforce/__init__.py +10 -8
  245. alita_sdk/tools/servicenow/__init__.py +17 -15
  246. alita_sdk/tools/servicenow/api_wrapper.py +1 -1
  247. alita_sdk/tools/sharepoint/__init__.py +10 -7
  248. alita_sdk/tools/sharepoint/api_wrapper.py +129 -38
  249. alita_sdk/tools/sharepoint/authorization_helper.py +191 -1
  250. alita_sdk/tools/sharepoint/utils.py +8 -2
  251. alita_sdk/tools/slack/__init__.py +10 -7
  252. alita_sdk/tools/slack/api_wrapper.py +2 -2
  253. alita_sdk/tools/sql/__init__.py +12 -9
  254. alita_sdk/tools/testio/__init__.py +10 -7
  255. alita_sdk/tools/testrail/__init__.py +11 -10
  256. alita_sdk/tools/testrail/api_wrapper.py +1 -1
  257. alita_sdk/tools/utils/__init__.py +9 -4
  258. alita_sdk/tools/utils/content_parser.py +103 -18
  259. alita_sdk/tools/utils/text_operations.py +410 -0
  260. alita_sdk/tools/utils/tool_prompts.py +79 -0
  261. alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +30 -13
  262. alita_sdk/tools/xray/__init__.py +13 -9
  263. alita_sdk/tools/yagmail/__init__.py +9 -3
  264. alita_sdk/tools/zephyr/__init__.py +10 -7
  265. alita_sdk/tools/zephyr_enterprise/__init__.py +11 -7
  266. alita_sdk/tools/zephyr_essential/__init__.py +10 -7
  267. alita_sdk/tools/zephyr_essential/api_wrapper.py +30 -13
  268. alita_sdk/tools/zephyr_essential/client.py +2 -2
  269. alita_sdk/tools/zephyr_scale/__init__.py +11 -8
  270. alita_sdk/tools/zephyr_scale/api_wrapper.py +2 -2
  271. alita_sdk/tools/zephyr_squad/__init__.py +10 -7
  272. {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/METADATA +154 -8
  273. alita_sdk-0.3.627.dist-info/RECORD +468 -0
  274. alita_sdk-0.3.627.dist-info/entry_points.txt +2 -0
  275. alita_sdk-0.3.379.dist-info/RECORD +0 -360
  276. {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/WHEEL +0 -0
  277. {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/licenses/LICENSE +0 -0
  278. {alita_sdk-0.3.379.dist-info → alita_sdk-0.3.627.dist-info}/top_level.txt +0 -0
@@ -2,12 +2,13 @@ import copy
2
2
  import json
3
3
  import logging
4
4
  import time
5
+ from enum import Enum
5
6
  from typing import Any, Optional, List, Dict, Generator
6
7
 
8
+ from langchain_core.callbacks import dispatch_custom_event
7
9
  from langchain_core.documents import Document
8
10
  from pydantic import create_model, Field, SecretStr
9
11
 
10
- from .utils import make_json_serializable
11
12
  from .utils.content_parser import file_extension_by_chunker, process_document_by_type
12
13
  from .vector_adapters.VectorStoreAdapter import VectorStoreAdapterFactory
13
14
  from ..runtime.langchain.document_loaders.constants import loaders_allowed_to_override
@@ -16,11 +17,17 @@ from ..runtime.utils.utils import IndexerKeywords
16
17
 
17
18
  logger = logging.getLogger(__name__)
18
19
 
19
- # Base Vector Store Schema Models
20
- BaseIndexParams = create_model(
21
- "BaseIndexParams",
22
- index_name=(str, Field(description="Index name (max 7 characters)", min_length=1, max_length=7)),
23
- )
20
+ DEFAULT_CUT_OFF = 0.1
21
+ INDEX_META_UPDATE_INTERVAL = 600.0
22
+
23
+ class IndexTools(str, Enum):
24
+ """Enum for index-related tool names."""
25
+ INDEX_DATA = "index_data"
26
+ SEARCH_INDEX = "search_index"
27
+ STEPBACK_SEARCH_INDEX = "stepback_search_index"
28
+ STEPBACK_SUMMARY_INDEX = "stepback_summary_index"
29
+ REMOVE_INDEX = "remove_index"
30
+ LIST_COLLECTIONS = "list_collections"
24
31
 
25
32
  RemoveIndexParams = create_model(
26
33
  "RemoveIndexParams",
@@ -38,8 +45,8 @@ BaseSearchParams = create_model(
38
45
  default={},
39
46
  examples=["{\"key\": \"value\"}", "{\"status\": \"active\"}"]
40
47
  )),
41
- cut_off=(Optional[float], Field(description="Cut-off score for search results", default=0.5, ge=0, le=1)),
42
- search_top=(Optional[int], Field(description="Number of top results to return", default=10)),
48
+ cut_off=(Optional[float], Field(description="Cut-off score for search results", default=DEFAULT_CUT_OFF, ge=0, le=1)),
49
+ search_top=(Optional[int], Field(description="Number of top results to return", default=10, gt=0)),
43
50
  full_text_search=(Optional[Dict[str, Any]], Field(
44
51
  description="Full text search parameters. Can be a dictionary with search options.",
45
52
  default=None
@@ -68,8 +75,8 @@ BaseStepbackSearchParams = create_model(
68
75
  default={},
69
76
  examples=["{\"key\": \"value\"}", "{\"status\": \"active\"}"]
70
77
  )),
71
- cut_off=(Optional[float], Field(description="Cut-off score for search results", default=0.5, ge=0, le=1)),
72
- search_top=(Optional[int], Field(description="Number of top results to return", default=10)),
78
+ cut_off=(Optional[float], Field(description="Cut-off score for search results", default=DEFAULT_CUT_OFF, ge=0, le=1)),
79
+ search_top=(Optional[int], Field(description="Number of top results to return", default=10, gt=0)),
73
80
  full_text_search=(Optional[Dict[str, Any]], Field(
74
81
  description="Full text search parameters. Can be a dictionary with search options.",
75
82
  default=None
@@ -88,16 +95,6 @@ BaseStepbackSearchParams = create_model(
88
95
  )),
89
96
  )
90
97
 
91
- BaseIndexDataParams = create_model(
92
- "indexData",
93
- __base__=BaseIndexParams,
94
- clean_index=(Optional[bool], Field(default=False,
95
- description="Optional flag to enforce clean existing index before indexing new data")),
96
- progress_step=(Optional[int], Field(default=10, ge=0, le=100,
97
- description="Optional step size for progress reporting during indexing")),
98
- chunking_config=(Optional[dict], Field(description="Chunking tool configuration", default=loaders_allowed_to_override)),
99
- )
100
-
101
98
 
102
99
  class BaseIndexerToolkit(VectorStoreWrapperBase):
103
100
  """Base class for tool API wrappers that support vector store functionality."""
@@ -111,7 +108,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
111
108
  def __init__(self, **kwargs):
112
109
  conn = kwargs.get('connection_string', None)
113
110
  connection_string = conn.get_secret_value() if isinstance(conn, SecretStr) else conn
114
- collection_name = kwargs.get('collection_name')
111
+ collection_name = kwargs.get('collection_schema')
115
112
 
116
113
  if 'vectorstore_type' not in kwargs:
117
114
  kwargs['vectorstore_type'] = 'PGVector'
@@ -155,6 +152,16 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
155
152
  clean_index = kwargs.get("clean_index")
156
153
  chunking_tool = kwargs.get("chunking_tool")
157
154
  chunking_config = kwargs.get("chunking_config")
155
+
156
+ # Store the interval in a private dict to avoid Pydantic field errors
157
+ if not hasattr(self, "_index_meta_config"):
158
+ self._index_meta_config: Dict[str, Any] = {}
159
+
160
+ self._index_meta_config["update_interval"] = kwargs.get(
161
+ "meta_update_interval",
162
+ INDEX_META_UPDATE_INTERVAL,
163
+ )
164
+
158
165
  result = {"count": 0}
159
166
  #
160
167
  try:
@@ -162,6 +169,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
162
169
  self._clean_index(index_name)
163
170
  #
164
171
  self.index_meta_init(index_name, kwargs)
172
+ self._emit_index_event(index_name)
165
173
  #
166
174
  self._log_tool_event(f"Indexing data into collection with suffix '{index_name}'. It can take some time...")
167
175
  self._log_tool_event(f"Loading the documents to index...{kwargs}")
@@ -176,15 +184,27 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
176
184
  f"Processing documents to collect dependencies and prepare them for indexing...")
177
185
  self._save_index_generator(documents, documents_count, chunking_tool, chunking_config, index_name=index_name, result=result)
178
186
  #
179
- self.index_meta_update(index_name, IndexerKeywords.INDEX_META_COMPLETED.value, result["count"])
187
+ results_count = result["count"]
188
+ # Final update should always be forced
189
+ self.index_meta_update(index_name, IndexerKeywords.INDEX_META_COMPLETED.value, results_count, update_force=True, error=None)
190
+ self._emit_index_event(index_name)
180
191
  #
181
- return {"status": "ok", "message": f"successfully indexed {result["count"]} documents"}
192
+ return {"status": "ok", "message": f"successfully indexed {results_count} documents" if results_count > 0
193
+ else "no new documents to index"}
182
194
  except Exception as e:
183
- self.index_meta_update(index_name, IndexerKeywords.INDEX_META_FAILED.value, result["count"])
195
+ # Do maximum effort at least send custom event for supposed changed status
196
+ msg = str(e)
197
+ try:
198
+ # Error update should also be forced and include the error message
199
+ self.index_meta_update(index_name, IndexerKeywords.INDEX_META_FAILED.value, result["count"], update_force=True, error=msg)
200
+ except Exception as ie:
201
+ logger.error(f"Failed to update index meta status to FAILED for index '{index_name}': {ie}")
202
+ msg = f"{msg}; additionally failed to update index meta status to FAILED: {ie}"
203
+ self._emit_index_event(index_name, error=msg)
184
204
  raise e
185
-
186
205
 
187
206
  def _save_index_generator(self, base_documents: Generator[Document, None, None], base_total: int, chunking_tool, chunking_config, result, index_name: Optional[str] = None):
207
+ self._ensure_vectorstore_initialized()
188
208
  self._log_tool_event(f"Base documents are ready for indexing. {base_total} base documents in total to index.")
189
209
  from ..runtime.langchain.interfaces.llm_processor import add_documents
190
210
  #
@@ -200,7 +220,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
200
220
  self._log_tool_event(f"Dependent documents were processed. "
201
221
  f"Applying chunking tool '{chunking_tool}' if specified and preparing documents for indexing...")
202
222
  documents = self._apply_loaders_chunkers(documents, chunking_tool, chunking_config)
203
- self._clean_metadata(documents)
223
+ documents = self._clean_metadata(documents)
204
224
 
205
225
  logger.debug(f"Indexing base document #{base_doc_counter}: {base_doc} and all dependent documents: {documents}")
206
226
 
@@ -237,6 +257,11 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
237
257
  logger.debug(msg)
238
258
  self._log_tool_event(msg)
239
259
  result["count"] += dependent_docs_counter
260
+ # After each base document, try a non-forced meta update; throttling handled inside index_meta_update
261
+ try:
262
+ self.index_meta_update(index_name, IndexerKeywords.INDEX_META_IN_PROGRESS.value, result["count"], update_force=False)
263
+ except Exception as exc: # best-effort, do not break indexing
264
+ logger.warning(f"Failed to update index meta during indexing process for index '{index_name}': {exc}")
240
265
  if pg_vector_add_docs_chunk:
241
266
  add_documents(vectorstore=self.vectorstore, documents=pg_vector_add_docs_chunk)
242
267
 
@@ -302,6 +327,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
302
327
  log_msg: str = "Verification of documents to index started"
303
328
  ) -> Generator[Document, None, None]:
304
329
  """Generic duplicate reduction logic for documents."""
330
+ self._ensure_vectorstore_initialized()
305
331
  self._log_tool_event(log_msg, tool_name="index_documents")
306
332
  indexed_data = self._get_indexed_data(index_name)
307
333
  indexed_keys = set(indexed_data.keys())
@@ -344,7 +370,8 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
344
370
 
345
371
  def remove_index(self, index_name: str = ""):
346
372
  """Cleans the indexed data in the collection."""
347
- super()._clean_collection(index_name=index_name)
373
+ super()._clean_collection(index_name=index_name, including_index_meta=True)
374
+ self._emit_index_data_removed_event(index_name)
348
375
  return (f"Collection '{index_name}' has been removed from the vector store.\n"
349
376
  f"Available collections: {self.list_collections()}") if index_name \
350
377
  else "All collections have been removed from the vector store."
@@ -379,7 +406,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
379
406
  def search_index(self,
380
407
  query: str,
381
408
  index_name: str = "",
382
- filter: dict | str = {}, cut_off: float = 0.5,
409
+ filter: dict | str = {}, cut_off: float = DEFAULT_CUT_OFF,
383
410
  search_top: int = 10, reranker: dict = {},
384
411
  full_text_search: Optional[Dict[str, Any]] = None,
385
412
  reranking_config: Optional[Dict[str, Dict[str, Any]]] = None,
@@ -410,7 +437,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
410
437
  query: str,
411
438
  messages: List[Dict[str, Any]] = [],
412
439
  index_name: str = "",
413
- filter: dict | str = {}, cut_off: float = 0.5,
440
+ filter: dict | str = {}, cut_off: float = DEFAULT_CUT_OFF,
414
441
  search_top: int = 10, reranker: dict = {},
415
442
  full_text_search: Optional[Dict[str, Any]] = None,
416
443
  reranking_config: Optional[Dict[str, Dict[str, Any]]] = None,
@@ -435,7 +462,7 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
435
462
  query: str,
436
463
  messages: List[Dict[str, Any]] = [],
437
464
  index_name: str = "",
438
- filter: dict | str = {}, cut_off: float = 0.5,
465
+ filter: dict | str = {}, cut_off: float = DEFAULT_CUT_OFF,
439
466
  search_top: int = 10, reranker: dict = {},
440
467
  full_text_search: Optional[Dict[str, Any]] = None,
441
468
  reranking_config: Optional[Dict[str, Dict[str, Any]]] = None,
@@ -457,103 +484,263 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
457
484
  )
458
485
 
459
486
  def index_meta_init(self, index_name: str, index_configuration: dict[str, Any]):
460
- index_meta_raw = super().get_index_meta(index_name)
461
- from ..runtime.langchain.interfaces.llm_processor import add_documents
462
- created_on = time.time()
463
- metadata = {
464
- "collection": index_name,
465
- "type": IndexerKeywords.INDEX_META_TYPE.value,
466
- "indexed": 0,
467
- "state": IndexerKeywords.INDEX_META_IN_PROGRESS.value,
468
- "index_configuration": index_configuration,
469
- "created_on": created_on,
470
- "updated_on": created_on,
471
- "history": "[]",
472
- }
473
- index_meta_ids = None
474
- #
475
- if index_meta_raw:
476
- history_raw = index_meta_raw.get("metadata", {}).get("history", "[]")
477
- if isinstance(history_raw, str) and history_raw.strip():
478
- try:
479
- history = json.loads(history_raw)
480
- except (json.JSONDecodeError, TypeError):
481
- history = []
482
- else:
483
- history = []
484
- new_history_item = {k: v for k, v in index_meta_raw.get("metadata", {}).items() if k != "history"}
485
- history.append(new_history_item)
486
- metadata["history"] = json.dumps(history)
487
- index_meta_ids = [index_meta_raw.get("id")]
488
- #
489
- index_meta_doc = Document(page_content=f"{IndexerKeywords.INDEX_META_TYPE.value}_{index_name}", metadata=metadata)
490
- add_documents(vectorstore=self.vectorstore, documents=[index_meta_doc], ids=index_meta_ids)
487
+ self._ensure_vectorstore_initialized()
488
+ index_meta = super().get_index_meta(index_name)
489
+ if not index_meta:
490
+ self._log_tool_event(
491
+ f"There is no existing index_meta for collection '{index_name}'. Initializing it.",
492
+ tool_name="index_data"
493
+ )
494
+ from ..runtime.langchain.interfaces.llm_processor import add_documents
495
+ created_on = time.time()
496
+ metadata = {
497
+ "collection": index_name,
498
+ "type": IndexerKeywords.INDEX_META_TYPE.value,
499
+ "indexed": 0,
500
+ "updated": 0,
501
+ "state": IndexerKeywords.INDEX_META_IN_PROGRESS.value,
502
+ "index_configuration": index_configuration,
503
+ "created_on": created_on,
504
+ "updated_on": created_on,
505
+ "task_id": None,
506
+ "conversation_id": None,
507
+ "toolkit_id": self.toolkit_id,
508
+ # Initialize error field to keep track of the latest failure reason if any
509
+ "error": None,
510
+ }
511
+ metadata["history"] = json.dumps([metadata])
512
+ index_meta_doc = Document(page_content=f"{IndexerKeywords.INDEX_META_TYPE.value}_{index_name}", metadata=metadata)
513
+ add_documents(vectorstore=self.vectorstore, documents=[index_meta_doc])
514
+
515
+ def index_meta_update(self, index_name: str, state: str, result: int, update_force: bool = True, interval: Optional[float] = None, error: Optional[str] = None):
516
+ """Update `index_meta` document with optional time-based throttling.
517
+
518
+ Args:
519
+ index_name: Index name to update meta for.
520
+ state: New state value for the `index_meta` record.
521
+ result: Number of processed documents to store in the `updated` field.
522
+ update_force: If `True`, perform the update unconditionally, ignoring throttling.
523
+ If `False`, perform the update only when the effective time interval has passed.
524
+ interval: Optional custom interval (in seconds) for this call when `update_force` is `False`.
525
+ If `None`, falls back to the value stored in `self._index_meta_config["update_interval"]`
526
+ if present, otherwise uses `INDEX_META_UPDATE_INTERVAL`.
527
+ error: Optional error message to record when the state represents a failed index.
528
+ """
529
+ self._ensure_vectorstore_initialized()
530
+ if not hasattr(self, "_index_meta_last_update_time"):
531
+ self._index_meta_last_update_time: Dict[str, float] = {}
532
+
533
+ if not update_force:
534
+ # Resolve effective interval:
535
+ # 1\) explicit arg
536
+ # 2\) value from `_index_meta_config`
537
+ # 3\) default constant
538
+ cfg_interval = None
539
+ if hasattr(self, "_index_meta_config"):
540
+ cfg_interval = self._index_meta_config.get("update_interval")
541
+
542
+ eff_interval = (
543
+ interval
544
+ if interval is not None
545
+ else (cfg_interval if cfg_interval is not None else INDEX_META_UPDATE_INTERVAL)
546
+ )
547
+
548
+ last_time = self._index_meta_last_update_time.get(index_name)
549
+ now = time.time()
550
+ if last_time is not None and (now - last_time) < eff_interval:
551
+ return
552
+ self._index_meta_last_update_time[index_name] = now
553
+ else:
554
+ # For forced updates, always refresh last update time
555
+ self._index_meta_last_update_time[index_name] = time.time()
491
556
 
492
- def index_meta_update(self, index_name: str, state: str, result: int):
493
557
  index_meta_raw = super().get_index_meta(index_name)
494
558
  from ..runtime.langchain.interfaces.llm_processor import add_documents
495
559
  #
496
560
  if index_meta_raw:
497
561
  metadata = copy.deepcopy(index_meta_raw.get("metadata", {}))
498
- metadata["indexed"] = result
562
+ metadata["indexed"] = self.get_indexed_count(index_name)
563
+ metadata["updated"] = result
499
564
  metadata["state"] = state
500
565
  metadata["updated_on"] = time.time()
566
+ # Attach error if provided, else clear on success
567
+ if error is not None:
568
+ metadata["error"] = error
569
+ elif state == IndexerKeywords.INDEX_META_COMPLETED.value:
570
+ # Clear previous error on successful completion
571
+ metadata["error"] = None
572
+ #
573
+ history_raw = metadata.pop("history", "[]")
574
+ try:
575
+ history = json.loads(history_raw) if history_raw.strip() else []
576
+ # replace the last history item with updated metadata
577
+ if history and isinstance(history, list):
578
+ history[-1] = metadata
579
+ else:
580
+ history = [metadata]
581
+ except (json.JSONDecodeError, TypeError):
582
+ logger.warning(f"Failed to load index history: {history_raw}. Create new with only current item.")
583
+ history = [metadata]
584
+ #
585
+ metadata["history"] = json.dumps(history)
501
586
  index_meta_doc = Document(page_content=index_meta_raw.get("content", ""), metadata=metadata)
502
587
  add_documents(vectorstore=self.vectorstore, documents=[index_meta_doc], ids=[index_meta_raw.get("id")])
503
588
 
589
+ def _emit_index_event(self, index_name: str, error: Optional[str] = None):
590
+ """
591
+ Emit custom event for index data operation.
592
+
593
+ Args:
594
+ index_name: The name of the index
595
+ error: Error message if the operation failed, None otherwise
596
+ """
597
+ index_meta = super().get_index_meta(index_name)
598
+
599
+ if not index_meta:
600
+ logger.warning(
601
+ f"No index_meta found for index '{index_name}'. "
602
+ "Cannot emit index event."
603
+ )
604
+ return
605
+
606
+ metadata = index_meta.get("metadata", {})
607
+
608
+ # Determine if this is a reindex operation
609
+ history_raw = metadata.get("history", "[]")
610
+ try:
611
+ history = json.loads(history_raw) if history_raw.strip() else []
612
+ is_reindex = len(history) > 1
613
+ except (json.JSONDecodeError, TypeError):
614
+ is_reindex = False
615
+
616
+ # Build event message
617
+ event_data = {
618
+ "id": index_meta.get("id"),
619
+ "index_name": index_name,
620
+ "state": "failed" if error is not None else metadata.get("state"),
621
+ "error": error,
622
+ "reindex": is_reindex,
623
+ "indexed": metadata.get("indexed", 0),
624
+ "updated": metadata.get("updated", 0),
625
+ "toolkit_id": metadata.get("toolkit_id"),
626
+ }
627
+
628
+ # Emit the event
629
+ try:
630
+ dispatch_custom_event("index_data_status", event_data)
631
+ logger.debug(
632
+ f"Emitted index_data_status event for index "
633
+ f"'{index_name}': {event_data}"
634
+ )
635
+ except Exception as e:
636
+ logger.warning(f"Failed to emit index_data_status event: {e}")
637
+
638
+ def _emit_index_data_removed_event(self, index_name: str):
639
+ """
640
+ Emit custom event for index data removing.
641
+
642
+ Args:
643
+ index_name: The name of the index
644
+ toolkit_id: The toolkit identifier
645
+ """
646
+ # Build event message
647
+ event_data = {
648
+ "index_name": index_name,
649
+ "toolkit_id": self.toolkit_id,
650
+ "project_id": self.alita.project_id,
651
+ }
652
+ # Emit the event
653
+ try:
654
+ dispatch_custom_event("index_data_removed", event_data)
655
+ logger.debug(
656
+ f"Emitted index_data_removed event for index "
657
+ f"'{index_name}': {event_data}"
658
+ )
659
+ except Exception as e:
660
+ logger.warning(f"Failed to emit index_data_removed event: {e}")
661
+
504
662
  def get_available_tools(self):
505
663
  """
506
664
  Returns the standardized vector search tools (search operations only).
507
665
  Index operations are toolkit-specific and should be added manually to each toolkit.
508
-
666
+
667
+ This method constructs the argument schemas for each tool, merging base parameters with any extra parameters
668
+ defined in the subclass. It also handles the special case for chunking tools and their configuration.
669
+
509
670
  Returns:
510
- List of tool dictionaries with name, ref, description, and args_schema
671
+ list: List of tool dictionaries with name, ref, description, and args_schema.
511
672
  """
673
+ index_params = {
674
+ "index_name": (
675
+ str,
676
+ Field(description="Index name (max 7 characters)", min_length=1, max_length=7)
677
+ ),
678
+ "clean_index": (
679
+ Optional[bool],
680
+ Field(default=False, description="Optional flag to enforce clean existing index before indexing new data")
681
+ ),
682
+ "progress_step": (
683
+ Optional[int],
684
+ Field(default=10, ge=0, le=100, description="Optional step size for progress reporting during indexing")
685
+ ),
686
+ }
687
+ chunking_config = (
688
+ Optional[dict],
689
+ Field(description="Chunking tool configuration", default=loaders_allowed_to_override)
690
+ )
691
+
692
+ index_extra_params = self._index_tool_params() or {}
693
+ chunking_tool = index_extra_params.pop("chunking_tool", None)
694
+ if chunking_tool:
695
+ index_params = {
696
+ **index_params,
697
+ "chunking_tool": chunking_tool,
698
+ }
699
+ index_params["chunking_config"] = chunking_config
700
+ index_args_schema = create_model("IndexData", **index_params, **index_extra_params)
701
+
512
702
  return [
513
703
  {
514
- "name": "index_data",
515
- "mode": "index_data",
704
+ "name": IndexTools.INDEX_DATA.value,
705
+ "mode": IndexTools.INDEX_DATA.value,
516
706
  "ref": self.index_data,
517
707
  "description": "Loads data to index.",
518
- "args_schema": create_model(
519
- "IndexData",
520
- __base__=BaseIndexDataParams,
521
- **self._index_tool_params() if self._index_tool_params() else {}
522
- )
708
+ "args_schema": index_args_schema,
523
709
  },
524
710
  {
525
- "name": "search_index",
526
- "mode": "search_index",
711
+ "name": IndexTools.SEARCH_INDEX.value,
712
+ "mode": IndexTools.SEARCH_INDEX.value,
527
713
  "ref": self.search_index,
528
714
  "description": self.search_index.__doc__,
529
715
  "args_schema": BaseSearchParams
530
716
  },
531
717
  {
532
- "name": "stepback_search_index",
533
- "mode": "stepback_search_index",
718
+ "name": IndexTools.STEPBACK_SEARCH_INDEX.value,
719
+ "mode": IndexTools.STEPBACK_SEARCH_INDEX.value,
534
720
  "ref": self.stepback_search_index,
535
721
  "description": self.stepback_search_index.__doc__,
536
722
  "args_schema": BaseStepbackSearchParams
537
723
  },
538
724
  {
539
- "name": "stepback_summary_index",
540
- "mode": "stepback_summary_index",
725
+ "name": IndexTools.STEPBACK_SUMMARY_INDEX.value,
726
+ "mode": IndexTools.STEPBACK_SUMMARY_INDEX.value,
541
727
  "ref": self.stepback_summary_index,
542
728
  "description": self.stepback_summary_index.__doc__,
543
729
  "args_schema": BaseStepbackSearchParams
544
730
  },
545
731
  {
546
- "name": "remove_index",
547
- "mode": "remove_index",
732
+ "name": IndexTools.REMOVE_INDEX.value,
733
+ "mode": IndexTools.REMOVE_INDEX.value,
548
734
  "ref": self.remove_index,
549
735
  "description": self.remove_index.__doc__,
550
736
  "args_schema": RemoveIndexParams
551
737
  },
552
738
  {
553
- "name": "list_collections",
554
- "mode": "list_collections",
739
+ "name": IndexTools.LIST_COLLECTIONS.value,
740
+ "mode": IndexTools.LIST_COLLECTIONS.value,
555
741
  "ref": self.list_collections,
556
742
  "description": self.list_collections.__doc__,
557
- "args_schema": create_model("ListCollectionsParams") # No parameters
743
+ # No parameters
744
+ "args_schema": create_model("ListCollectionsParams")
558
745
  },
559
- ]
746
+ ]
@@ -9,16 +9,17 @@ from pydantic import BaseModel, Field, ConfigDict, create_model
9
9
 
10
10
  from ..base.tool import BaseAction
11
11
  from ..elitea_base import filter_missconfigured_index_tools
12
- from ..utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length, check_connection_response
12
+ from ..utils import clean_string, get_max_toolkit_length, check_connection_response
13
13
  from ...configurations.bitbucket import BitbucketConfiguration
14
14
  from ...configurations.pgvector import PgVectorConfiguration
15
15
  import requests
16
+ from ...runtime.utils.constants import TOOLKIT_NAME_META, TOOL_NAME_META, TOOLKIT_TYPE_META
16
17
 
17
18
 
18
19
  name = "bitbucket"
19
20
 
20
21
 
21
- def get_tools(tool):
22
+ def get_toolkit(tool):
22
23
  return AlitaBitbucketToolkit.get_toolkit(
23
24
  selected_tools=tool['settings'].get('selected_tools', []),
24
25
  project=tool['settings']['project'],
@@ -33,22 +34,23 @@ def get_tools(tool):
33
34
  doctype='code',
34
35
  embedding_model=tool['settings'].get('embedding_model'),
35
36
  toolkit_name=tool.get('toolkit_name')
36
- ).get_tools()
37
+ )
38
+
39
+ def get_tools(tool):
40
+ return get_toolkit(tool).get_tools()
37
41
 
38
42
 
39
43
  class AlitaBitbucketToolkit(BaseToolkit):
40
44
  tools: List[BaseTool] = []
41
- toolkit_max_length: int = 0
42
45
 
43
46
  @staticmethod
44
47
  def toolkit_config_schema() -> BaseModel:
45
48
  selected_tools = {x['name']: x['args_schema'].schema() for x in
46
49
  BitbucketAPIWrapper.model_construct().get_available_tools()}
47
- AlitaBitbucketToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
48
50
  m = create_model(
49
51
  name,
50
- project=(str, Field(description="Project/Workspace", json_schema_extra={'configuration': True})),
51
- repository=(str, Field(description="Repository", json_schema_extra={'max_toolkit_length': AlitaBitbucketToolkit.toolkit_max_length, 'configuration': True})),
52
+ project=(str, Field(description="Project/Workspace")),
53
+ repository=(str, Field(description="Repository")),
52
54
  branch=(str, Field(description="Main branch", default="main")),
53
55
  cloud=(Optional[bool], Field(description="Hosting Option", default=None)),
54
56
  bitbucket_configuration=(BitbucketConfiguration, Field(description="Bitbucket Configuration", json_schema_extra={'configuration_types': ['bitbucket']})),
@@ -99,17 +101,21 @@ class AlitaBitbucketToolkit(BaseToolkit):
99
101
  }
100
102
  bitbucket_api_wrapper = BitbucketAPIWrapper(**wrapper_payload)
101
103
  available_tools: List[Dict] = bitbucket_api_wrapper.get_available_tools()
102
- prefix = clean_string(toolkit_name, cls.toolkit_max_length) + TOOLKIT_SPLITTER if toolkit_name else ''
103
104
  tools = []
104
105
  for tool in available_tools:
105
106
  if selected_tools:
106
107
  if tool['name'] not in selected_tools:
107
108
  continue
109
+ description = tool["description"] + f"\nrepo: {bitbucket_api_wrapper.repository}"
110
+ if toolkit_name:
111
+ description = f"{description}\nToolkit: {toolkit_name}"
112
+ description = description[:1000]
108
113
  tools.append(BaseAction(
109
114
  api_wrapper=bitbucket_api_wrapper,
110
- name=prefix + tool["name"],
111
- description=tool["description"] + f"\nrepo: {bitbucket_api_wrapper.repository}",
112
- args_schema=tool["args_schema"]
115
+ name=tool["name"],
116
+ description=description,
117
+ args_schema=tool["args_schema"],
118
+ metadata={TOOLKIT_NAME_META: toolkit_name, TOOLKIT_TYPE_META: name, TOOL_NAME_META: tool["name"]} if toolkit_name else {TOOL_NAME_META: tool["name"]}
113
119
  ))
114
120
  return cls(tools=tools)
115
121