alita-sdk 0.3.365__py3-none-any.whl → 0.3.462__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/__init__.py +10 -0
- alita_sdk/cli/__main__.py +17 -0
- alita_sdk/cli/agent_executor.py +144 -0
- alita_sdk/cli/agent_loader.py +197 -0
- alita_sdk/cli/agent_ui.py +166 -0
- alita_sdk/cli/agents.py +1069 -0
- alita_sdk/cli/callbacks.py +576 -0
- alita_sdk/cli/cli.py +159 -0
- alita_sdk/cli/config.py +153 -0
- alita_sdk/cli/formatting.py +182 -0
- alita_sdk/cli/mcp_loader.py +315 -0
- alita_sdk/cli/toolkit.py +330 -0
- alita_sdk/cli/toolkit_loader.py +55 -0
- alita_sdk/cli/tools/__init__.py +9 -0
- alita_sdk/cli/tools/filesystem.py +905 -0
- alita_sdk/configurations/bitbucket.py +95 -0
- alita_sdk/configurations/confluence.py +96 -1
- alita_sdk/configurations/gitlab.py +79 -0
- alita_sdk/configurations/jira.py +103 -0
- alita_sdk/configurations/testrail.py +88 -0
- alita_sdk/configurations/xray.py +93 -0
- alita_sdk/configurations/zephyr_enterprise.py +93 -0
- alita_sdk/configurations/zephyr_essential.py +75 -0
- alita_sdk/runtime/clients/artifact.py +1 -1
- alita_sdk/runtime/clients/client.py +47 -10
- alita_sdk/runtime/clients/mcp_discovery.py +342 -0
- alita_sdk/runtime/clients/mcp_manager.py +262 -0
- alita_sdk/runtime/clients/sandbox_client.py +373 -0
- alita_sdk/runtime/langchain/assistant.py +70 -41
- alita_sdk/runtime/langchain/constants.py +6 -1
- alita_sdk/runtime/langchain/document_loaders/AlitaDocxMammothLoader.py +315 -3
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +4 -1
- alita_sdk/runtime/langchain/document_loaders/constants.py +73 -100
- alita_sdk/runtime/langchain/langraph_agent.py +164 -38
- alita_sdk/runtime/langchain/utils.py +43 -7
- alita_sdk/runtime/models/mcp_models.py +61 -0
- alita_sdk/runtime/toolkits/__init__.py +24 -0
- alita_sdk/runtime/toolkits/application.py +8 -1
- alita_sdk/runtime/toolkits/artifact.py +5 -6
- alita_sdk/runtime/toolkits/mcp.py +895 -0
- alita_sdk/runtime/toolkits/tools.py +140 -50
- alita_sdk/runtime/tools/__init__.py +7 -2
- alita_sdk/runtime/tools/application.py +7 -0
- alita_sdk/runtime/tools/function.py +94 -5
- alita_sdk/runtime/tools/graph.py +10 -4
- alita_sdk/runtime/tools/image_generation.py +104 -8
- alita_sdk/runtime/tools/llm.py +204 -114
- alita_sdk/runtime/tools/mcp_inspect_tool.py +284 -0
- alita_sdk/runtime/tools/mcp_remote_tool.py +166 -0
- alita_sdk/runtime/tools/mcp_server_tool.py +3 -1
- alita_sdk/runtime/tools/sandbox.py +180 -79
- alita_sdk/runtime/tools/vectorstore.py +22 -21
- alita_sdk/runtime/tools/vectorstore_base.py +79 -26
- alita_sdk/runtime/utils/mcp_oauth.py +164 -0
- alita_sdk/runtime/utils/mcp_sse_client.py +405 -0
- alita_sdk/runtime/utils/streamlit.py +34 -3
- alita_sdk/runtime/utils/toolkit_utils.py +14 -4
- alita_sdk/runtime/utils/utils.py +1 -0
- alita_sdk/tools/__init__.py +48 -31
- alita_sdk/tools/ado/repos/__init__.py +1 -0
- alita_sdk/tools/ado/test_plan/__init__.py +1 -1
- alita_sdk/tools/ado/wiki/__init__.py +1 -5
- alita_sdk/tools/ado/work_item/__init__.py +1 -5
- alita_sdk/tools/ado/work_item/ado_wrapper.py +17 -8
- alita_sdk/tools/base_indexer_toolkit.py +194 -112
- alita_sdk/tools/bitbucket/__init__.py +1 -0
- alita_sdk/tools/chunkers/sematic/proposal_chunker.py +1 -1
- alita_sdk/tools/code/sonar/__init__.py +1 -1
- alita_sdk/tools/code_indexer_toolkit.py +15 -5
- alita_sdk/tools/confluence/__init__.py +2 -2
- alita_sdk/tools/confluence/api_wrapper.py +110 -63
- alita_sdk/tools/confluence/loader.py +10 -0
- alita_sdk/tools/elitea_base.py +22 -22
- alita_sdk/tools/github/__init__.py +2 -2
- alita_sdk/tools/gitlab/__init__.py +2 -1
- alita_sdk/tools/gitlab/api_wrapper.py +11 -7
- alita_sdk/tools/gitlab_org/__init__.py +1 -2
- alita_sdk/tools/google_places/__init__.py +2 -1
- alita_sdk/tools/jira/__init__.py +1 -0
- alita_sdk/tools/jira/api_wrapper.py +1 -1
- alita_sdk/tools/memory/__init__.py +1 -1
- alita_sdk/tools/non_code_indexer_toolkit.py +2 -2
- alita_sdk/tools/openapi/__init__.py +10 -1
- alita_sdk/tools/pandas/__init__.py +1 -1
- alita_sdk/tools/postman/__init__.py +2 -1
- alita_sdk/tools/postman/api_wrapper.py +18 -8
- alita_sdk/tools/postman/postman_analysis.py +8 -1
- alita_sdk/tools/pptx/__init__.py +2 -2
- alita_sdk/tools/qtest/__init__.py +3 -3
- alita_sdk/tools/qtest/api_wrapper.py +1708 -76
- alita_sdk/tools/rally/__init__.py +1 -2
- alita_sdk/tools/report_portal/__init__.py +1 -0
- alita_sdk/tools/salesforce/__init__.py +1 -0
- alita_sdk/tools/servicenow/__init__.py +2 -3
- alita_sdk/tools/sharepoint/__init__.py +1 -0
- alita_sdk/tools/sharepoint/api_wrapper.py +125 -34
- alita_sdk/tools/sharepoint/authorization_helper.py +191 -1
- alita_sdk/tools/sharepoint/utils.py +8 -2
- alita_sdk/tools/slack/__init__.py +1 -0
- alita_sdk/tools/sql/__init__.py +2 -1
- alita_sdk/tools/sql/api_wrapper.py +71 -23
- alita_sdk/tools/testio/__init__.py +1 -0
- alita_sdk/tools/testrail/__init__.py +1 -3
- alita_sdk/tools/utils/__init__.py +17 -0
- alita_sdk/tools/utils/content_parser.py +35 -24
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +67 -21
- alita_sdk/tools/xray/__init__.py +2 -1
- alita_sdk/tools/zephyr/__init__.py +2 -1
- alita_sdk/tools/zephyr_enterprise/__init__.py +1 -0
- alita_sdk/tools/zephyr_essential/__init__.py +1 -0
- alita_sdk/tools/zephyr_scale/__init__.py +1 -0
- alita_sdk/tools/zephyr_squad/__init__.py +1 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/METADATA +8 -2
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/RECORD +118 -93
- alita_sdk-0.3.462.dist-info/entry_points.txt +2 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/top_level.txt +0 -0
|
@@ -29,8 +29,6 @@ class RallyToolkit(BaseToolkit):
|
|
|
29
29
|
RallyToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
30
30
|
return create_model(
|
|
31
31
|
name,
|
|
32
|
-
name=(str, Field(description="Toolkit name", json_schema_extra={'toolkit_name': True,
|
|
33
|
-
'max_toolkit_length': RallyToolkit.toolkit_max_length})),
|
|
34
32
|
rally_configuration=(RallyConfiguration, Field(description="Rally configuration", json_schema_extra={'configuration_types': ['rally']})),
|
|
35
33
|
workspace=(Optional[str], Field(default=None, description="Rally workspace")),
|
|
36
34
|
project=(Optional[str], Field(default=None, description="Rally project")),
|
|
@@ -39,6 +37,7 @@ class RallyToolkit(BaseToolkit):
|
|
|
39
37
|
'metadata': {
|
|
40
38
|
"label": "Rally",
|
|
41
39
|
"icon_url": "rally.svg",
|
|
40
|
+
"max_length": RallyToolkit.toolkit_max_length,
|
|
42
41
|
"categories": ["project management"],
|
|
43
42
|
"extra_categories": ["agile management", "test management", "scrum", "kanban"]
|
|
44
43
|
}
|
|
@@ -33,6 +33,7 @@ class ReportPortalToolkit(BaseToolkit):
|
|
|
33
33
|
report_portal_configuration=(ReportPortalConfiguration, Field(description="Report Portal Configuration", json_schema_extra={'configuration_types': ['report_portal']})),
|
|
34
34
|
selected_tools=(List[Literal[tuple(selected_tools)]], Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
|
|
35
35
|
__config__=ConfigDict(json_schema_extra={'metadata': {"label": "Report Portal", "icon_url": "reportportal-icon.svg",
|
|
36
|
+
"max_length": ReportPortalToolkit.toolkit_max_length,
|
|
36
37
|
"categories": ["testing"],
|
|
37
38
|
"extra_categories": ["test reporting", "test automation"]}})
|
|
38
39
|
)
|
|
@@ -31,6 +31,7 @@ class SalesforceToolkit(BaseToolkit):
|
|
|
31
31
|
selected_tools=(List[Literal[tuple(available_tools)]], Field(default=[], json_schema_extra={'args_schemas': available_tools})),
|
|
32
32
|
__config__=ConfigDict(json_schema_extra={'metadata': {
|
|
33
33
|
"label": "Salesforce", "icon_url": "salesforce-icon.svg",
|
|
34
|
+
"max_length": SalesforceToolkit.toolkit_max_length,
|
|
34
35
|
"categories": ["other"],
|
|
35
36
|
"extra_categories": ["customer relationship management", "cloud computing", "marketing automation", "salesforce"]
|
|
36
37
|
}})
|
|
@@ -35,9 +35,7 @@ class ServiceNowToolkit(BaseToolkit):
|
|
|
35
35
|
ServiceNowToolkit.toolkit_max_length = get_max_toolkit_length(selected_tools)
|
|
36
36
|
return create_model(
|
|
37
37
|
name,
|
|
38
|
-
name=(str, Field(description="Toolkit name",
|
|
39
|
-
json_schema_extra={
|
|
40
|
-
'toolkit_name': True, 'max_toolkit_length': ServiceNowToolkit.toolkit_max_length})),
|
|
38
|
+
name=(str, Field(description="Toolkit name")),
|
|
41
39
|
response_fields=(Optional[str], Field(description="Response fields", default=None)),
|
|
42
40
|
servicenow_configuration=(ServiceNowConfiguration, Field(description="ServiceNow Configuration",
|
|
43
41
|
json_schema_extra={
|
|
@@ -49,6 +47,7 @@ class ServiceNowToolkit(BaseToolkit):
|
|
|
49
47
|
'metadata': {
|
|
50
48
|
"label": "ServiceNow",
|
|
51
49
|
"icon_url": "service-now.svg",
|
|
50
|
+
"max_length": ServiceNowToolkit.toolkit_max_length,
|
|
52
51
|
"hidden": False,
|
|
53
52
|
"sections": {
|
|
54
53
|
"auth": {
|
|
@@ -48,6 +48,7 @@ class SharepointToolkit(BaseToolkit):
|
|
|
48
48
|
__config__=ConfigDict(json_schema_extra={
|
|
49
49
|
'metadata': {
|
|
50
50
|
"label": "Sharepoint", "icon_url": "sharepoint.svg",
|
|
51
|
+
"max_length": SharepointToolkit.toolkit_max_length,
|
|
51
52
|
"categories": ["office"],
|
|
52
53
|
"extra_categories": ["microsoft", "cloud storage", "team collaboration", "content management"]
|
|
53
54
|
}})
|
|
@@ -8,6 +8,7 @@ from office365.runtime.auth.client_credential import ClientCredential
|
|
|
8
8
|
from office365.sharepoint.client_context import ClientContext
|
|
9
9
|
from pydantic import Field, PrivateAttr, create_model, model_validator, SecretStr
|
|
10
10
|
|
|
11
|
+
from .utils import decode_sharepoint_string
|
|
11
12
|
from ..non_code_indexer_toolkit import NonCodeIndexerToolkit
|
|
12
13
|
from ..utils.content_parser import parse_file_content
|
|
13
14
|
from ...runtime.utils.utils import IndexerKeywords
|
|
@@ -91,44 +92,100 @@ class SharepointApiWrapper(NonCodeIndexerToolkit):
|
|
|
91
92
|
target_list = self._client.web.lists.get_by_title(list_title)
|
|
92
93
|
self._client.load(target_list)
|
|
93
94
|
self._client.execute_query()
|
|
94
|
-
items = target_list.items.
|
|
95
|
-
logging.info("{0} items from sharepoint loaded successfully.".format(len(items)))
|
|
95
|
+
items = target_list.items.top(limit).get().execute_query()
|
|
96
|
+
logging.info("{0} items from sharepoint loaded successfully via SharePoint REST API.".format(len(items)))
|
|
96
97
|
result = []
|
|
97
98
|
for item in items:
|
|
98
99
|
result.append(item.properties)
|
|
99
100
|
return result
|
|
100
|
-
except Exception as
|
|
101
|
-
logging.
|
|
102
|
-
|
|
101
|
+
except Exception as base_e:
|
|
102
|
+
logging.warning(f"Primary SharePoint REST list read failed: {base_e}. Attempting Graph API fallback.")
|
|
103
|
+
# Attempt Graph API fallback
|
|
104
|
+
try:
|
|
105
|
+
from .authorization_helper import SharepointAuthorizationHelper
|
|
106
|
+
auth_helper = SharepointAuthorizationHelper(
|
|
107
|
+
client_id=self.client_id,
|
|
108
|
+
client_secret=self.client_secret.get_secret_value() if self.client_secret else None,
|
|
109
|
+
tenant="", # optional for graph api (derived inside helper)
|
|
110
|
+
scope="", # optional for graph api
|
|
111
|
+
token_json="", # not needed for client credentials flow here
|
|
112
|
+
)
|
|
113
|
+
graph_items = auth_helper.get_list_items(self.site_url, list_title, limit)
|
|
114
|
+
if graph_items:
|
|
115
|
+
logging.info(f"{len(graph_items)} items from sharepoint loaded successfully via Graph API fallback.")
|
|
116
|
+
return graph_items
|
|
117
|
+
else:
|
|
118
|
+
return ToolException("List appears empty or inaccessible via both REST and Graph APIs.")
|
|
119
|
+
except Exception as graph_e:
|
|
120
|
+
logging.error(f"Graph API fallback failed: {graph_e}")
|
|
121
|
+
return ToolException(f"Cannot read list '{list_title}'. Check list name and permissions: {base_e} | {graph_e}")
|
|
103
122
|
|
|
104
123
|
|
|
105
124
|
def get_files_list(self, folder_name: str = None, limit_files: int = 100):
|
|
106
125
|
""" If folder name is specified, lists all files in this folder under Shared Documents path. If folder name is empty, lists all files under root catalog (Shared Documents). Number of files is limited by limit_files (default is 100)."""
|
|
107
126
|
try:
|
|
127
|
+
# exclude default system libraries like 'Form Templates', 'Site Assets', 'Style Library'
|
|
128
|
+
all_libraries = self._client.web.lists.filter("BaseTemplate eq 101 and Title ne 'Form Templates' and Title ne 'Site Assets' and Title ne 'Style Library'").get().execute_query()
|
|
108
129
|
result = []
|
|
109
130
|
if not limit_files:
|
|
110
131
|
limit_files = 100
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
132
|
+
#
|
|
133
|
+
site_segments = [seg for seg in self.site_url.strip('/').split('/') if seg][-2:]
|
|
134
|
+
full_path_prefix = '/'.join(site_segments)
|
|
135
|
+
#
|
|
136
|
+
for lib in all_libraries:
|
|
137
|
+
library_type = decode_sharepoint_string(lib.properties["EntityTypeName"])
|
|
138
|
+
target_folder_url = library_type
|
|
139
|
+
if folder_name:
|
|
140
|
+
folder_path = folder_name.strip('/')
|
|
141
|
+
expected_prefix = f'{full_path_prefix}/{library_type}'
|
|
142
|
+
if folder_path.startswith(full_path_prefix):
|
|
143
|
+
if folder_path.startswith(expected_prefix):
|
|
144
|
+
target_folder_url = folder_path.removeprefix(f'{full_path_prefix}/')
|
|
145
|
+
else:
|
|
146
|
+
# ignore full path folder which is not targeted to current library
|
|
147
|
+
continue
|
|
148
|
+
else:
|
|
149
|
+
target_folder_url = f"{library_type}/{folder_name}"
|
|
150
|
+
#
|
|
151
|
+
files = (self._client.web.get_folder_by_server_relative_path(target_folder_url)
|
|
152
|
+
.get_files(True)
|
|
153
|
+
.execute_query())
|
|
154
|
+
#
|
|
155
|
+
for file in files:
|
|
156
|
+
if f"{library_type}/Forms" in file.properties['ServerRelativeUrl']:
|
|
157
|
+
# skip files from system folder "Forms"
|
|
158
|
+
continue
|
|
159
|
+
if len(result) >= limit_files:
|
|
160
|
+
break
|
|
161
|
+
temp_props = {
|
|
162
|
+
'Name': file.properties['Name'],
|
|
163
|
+
'Path': file.properties['ServerRelativeUrl'],
|
|
164
|
+
'Created': file.properties['TimeCreated'],
|
|
165
|
+
'Modified': file.properties['TimeLastModified'],
|
|
166
|
+
'Link': file.properties['LinkingUrl'],
|
|
167
|
+
'id': file.properties['UniqueId']
|
|
168
|
+
}
|
|
169
|
+
result.append(temp_props)
|
|
128
170
|
return result if result else ToolException("Can not get files or folder is empty. Please, double check folder name and read permissions.")
|
|
129
171
|
except Exception as e:
|
|
130
|
-
|
|
131
|
-
|
|
172
|
+
# attempt to get via graph api
|
|
173
|
+
try:
|
|
174
|
+
# attempt to get files via graph api
|
|
175
|
+
from .authorization_helper import SharepointAuthorizationHelper
|
|
176
|
+
auth_helper = SharepointAuthorizationHelper(
|
|
177
|
+
client_id=self.client_id,
|
|
178
|
+
client_secret=self.client_secret.get_secret_value(),
|
|
179
|
+
tenant="", # optional for graph api
|
|
180
|
+
scope="", # optional for graph api
|
|
181
|
+
token_json="", # optional for graph api
|
|
182
|
+
)
|
|
183
|
+
files = auth_helper.get_files_list(self.site_url, folder_name, limit_files)
|
|
184
|
+
return files
|
|
185
|
+
except Exception as graph_e:
|
|
186
|
+
logging.error(f"Failed to load files from sharepoint via base api: {e}")
|
|
187
|
+
logging.error(f"Failed to load files from sharepoint via graph api: {graph_e}")
|
|
188
|
+
return ToolException(f"Can not get files. Please, double check folder name and read permissions: {e} and {graph_e}")
|
|
132
189
|
|
|
133
190
|
def read_file(self, path,
|
|
134
191
|
is_capture_image: bool = False,
|
|
@@ -141,11 +198,28 @@ class SharepointApiWrapper(NonCodeIndexerToolkit):
|
|
|
141
198
|
self._client.load(file).execute_query()
|
|
142
199
|
|
|
143
200
|
file_content = file.read()
|
|
201
|
+
file_name = file.name
|
|
144
202
|
self._client.execute_query()
|
|
145
203
|
except Exception as e:
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
204
|
+
# attempt to get via graph api
|
|
205
|
+
try:
|
|
206
|
+
# attempt to get files via graph api
|
|
207
|
+
from .authorization_helper import SharepointAuthorizationHelper
|
|
208
|
+
auth_helper = SharepointAuthorizationHelper(
|
|
209
|
+
client_id=self.client_id,
|
|
210
|
+
client_secret=self.client_secret.get_secret_value(),
|
|
211
|
+
tenant="", # optional for graph api
|
|
212
|
+
scope="", # optional for graph api
|
|
213
|
+
token_json="", # optional for graph api
|
|
214
|
+
)
|
|
215
|
+
file_content = auth_helper.get_file_content(self.site_url, path)
|
|
216
|
+
file_name = path.split('/')[-1]
|
|
217
|
+
except Exception as graph_e:
|
|
218
|
+
logging.error(f"Failed to load file from SharePoint via base api: {e}. Path: {path}. Please, double check file name and path.")
|
|
219
|
+
logging.error(f"Failed to load file from SharePoint via graph api: {graph_e}. Path: {path}. Please, double check file name and path.")
|
|
220
|
+
return ToolException(f"File not found. Please, check file name and path: {e} and {graph_e}")
|
|
221
|
+
#
|
|
222
|
+
return parse_file_content(file_name=file_name,
|
|
149
223
|
file_content=file_content,
|
|
150
224
|
is_capture_image=is_capture_image,
|
|
151
225
|
page_number=page_number,
|
|
@@ -167,13 +241,18 @@ class SharepointApiWrapper(NonCodeIndexerToolkit):
|
|
|
167
241
|
'skip_extensions': (Optional[List[str]], Field(
|
|
168
242
|
description="List of file extensions to skip when processing: i.e. ['*.png', '*.jpg']",
|
|
169
243
|
default=[])),
|
|
244
|
+
'path': (Optional[str], Field(
|
|
245
|
+
description="Folder path. "
|
|
246
|
+
"Accepts either a full server-relative path (e.g., '/sites/SiteName/...') or a relative path. "
|
|
247
|
+
"If a relative path is provided, the search will be performed recursively under 'Shared Documents' and other private libraries.",
|
|
248
|
+
default=None)),
|
|
170
249
|
}
|
|
171
250
|
|
|
172
251
|
def _base_loader(self, **kwargs) -> Generator[Document, None, None]:
|
|
173
252
|
|
|
174
253
|
self._log_tool_event(message="Starting SharePoint files extraction", tool_name="loader")
|
|
175
254
|
try:
|
|
176
|
-
all_files = self.get_files_list(
|
|
255
|
+
all_files = self.get_files_list(kwargs.get('path'), kwargs.get('limit_files', 10000))
|
|
177
256
|
self._log_tool_event(message="List of the files has been extracted", tool_name="loader")
|
|
178
257
|
except Exception as e:
|
|
179
258
|
raise ToolException(f"Unable to extract files: {e}")
|
|
@@ -219,12 +298,24 @@ class SharepointApiWrapper(NonCodeIndexerToolkit):
|
|
|
219
298
|
yield document
|
|
220
299
|
|
|
221
300
|
def _load_file_content_in_bytes(self, path):
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
301
|
+
try:
|
|
302
|
+
file = self._client.web.get_file_by_server_relative_path(path)
|
|
303
|
+
self._client.load(file).execute_query()
|
|
304
|
+
file_content = file.read()
|
|
305
|
+
self._client.execute_query()
|
|
306
|
+
#
|
|
307
|
+
return file_content
|
|
308
|
+
except Exception as e:
|
|
309
|
+
# attempt to get via graph api
|
|
310
|
+
from .authorization_helper import SharepointAuthorizationHelper
|
|
311
|
+
auth_helper = SharepointAuthorizationHelper(
|
|
312
|
+
client_id=self.client_id,
|
|
313
|
+
client_secret=self.client_secret.get_secret_value(),
|
|
314
|
+
tenant="", # optional for graph api
|
|
315
|
+
scope="", # optional for graph api
|
|
316
|
+
token_json="", # optional for graph api
|
|
317
|
+
)
|
|
318
|
+
return auth_helper.get_file_content(self.site_url, path)
|
|
228
319
|
|
|
229
320
|
def get_available_tools(self):
|
|
230
321
|
return super().get_available_tools() + [
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
from datetime import datetime, timezone
|
|
2
|
+
from urllib.parse import unquote, urlparse, quote
|
|
2
3
|
|
|
3
4
|
import jwt
|
|
4
5
|
import requests
|
|
6
|
+
from botocore.response import get_response
|
|
7
|
+
|
|
5
8
|
|
|
6
9
|
class SharepointAuthorizationHelper:
|
|
7
10
|
|
|
@@ -54,4 +57,191 @@ class SharepointAuthorizationHelper:
|
|
|
54
57
|
except jwt.ExpiredSignatureError:
|
|
55
58
|
return False
|
|
56
59
|
except jwt.InvalidTokenError:
|
|
57
|
-
return False
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
def _validate_response(self, response, required_field, error_prefix=None):
|
|
63
|
+
if response.status_code != 200:
|
|
64
|
+
raise RuntimeError(f"{error_prefix or 'Request'} failed: {response.status_code} {response.text}")
|
|
65
|
+
json_data = response.json()
|
|
66
|
+
if required_field not in json_data:
|
|
67
|
+
raise KeyError(f"'{required_field}' missing in response")
|
|
68
|
+
return json_data[required_field]
|
|
69
|
+
|
|
70
|
+
def generate_token_and_site_id(self, site_url: str) -> tuple[str, str]:
|
|
71
|
+
try:
|
|
72
|
+
parsed = urlparse(site_url)
|
|
73
|
+
domain = parsed.hostname
|
|
74
|
+
site_path = parsed.path.strip('/')
|
|
75
|
+
if not domain or not site_path:
|
|
76
|
+
raise ValueError(f"site_url missing domain or site path: {site_url}")
|
|
77
|
+
app_name = domain.split('.')[0]
|
|
78
|
+
openid_config_url = f"https://login.microsoftonline.com/{app_name}.onmicrosoft.com/v2.0/.well-known/openid-configuration"
|
|
79
|
+
response = requests.get(openid_config_url)
|
|
80
|
+
token_url = self._validate_response(response, required_field="token_endpoint", error_prefix="OpenID config")
|
|
81
|
+
token_data = {
|
|
82
|
+
"grant_type": "client_credentials",
|
|
83
|
+
"client_id": self.client_id,
|
|
84
|
+
"client_secret": self.client_secret,
|
|
85
|
+
"scope": "https://graph.microsoft.com/.default"
|
|
86
|
+
}
|
|
87
|
+
token_response = requests.post(token_url, data=token_data)
|
|
88
|
+
access_token = self._validate_response(token_response, required_field="access_token", error_prefix="Token request")
|
|
89
|
+
graph_site_url = f"https://graph.microsoft.com/v1.0/sites/{domain}:/{site_path}"
|
|
90
|
+
headers = {"Authorization": f"Bearer {access_token}"}
|
|
91
|
+
site_response = requests.get(graph_site_url, headers=headers)
|
|
92
|
+
site_id = self._validate_response(site_response, required_field="id", error_prefix="Site info")
|
|
93
|
+
return access_token, site_id
|
|
94
|
+
except Exception as e:
|
|
95
|
+
raise RuntimeError(f"Error while obtaining access_token and site_id: {e}")
|
|
96
|
+
|
|
97
|
+
def get_files_list(self, site_url: str, folder_name: str = None, limit_files: int = 100):
|
|
98
|
+
if not site_url or not site_url.startswith("https://"):
|
|
99
|
+
raise ValueError(f"Invalid site_url format: {site_url}")
|
|
100
|
+
if limit_files is not None and (not isinstance(limit_files, int) or limit_files <= 0):
|
|
101
|
+
raise ValueError(f"limit_files must be a positive integer, got: {limit_files}")
|
|
102
|
+
try:
|
|
103
|
+
access_token, site_id = self.generate_token_and_site_id(site_url)
|
|
104
|
+
headers = {"Authorization": f"Bearer {access_token}"}
|
|
105
|
+
drives_url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/drives"
|
|
106
|
+
drives_response = requests.get(drives_url, headers=headers)
|
|
107
|
+
drives = self._validate_response(drives_response, required_field="value", error_prefix="Drives request")
|
|
108
|
+
result = []
|
|
109
|
+
def _recurse_drive(drive_id, drive_path, parent_folder, limit_files):
|
|
110
|
+
# Escape folder_name for URL safety if present
|
|
111
|
+
if parent_folder:
|
|
112
|
+
safe_folder_name = quote(parent_folder.strip('/'), safe="/")
|
|
113
|
+
url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/drives/{drive_id}/root:/{safe_folder_name}:/children?$top={limit_files}"
|
|
114
|
+
else:
|
|
115
|
+
url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/drives/{drive_id}/root/children?$top={limit_files}"
|
|
116
|
+
response = requests.get(url, headers=headers)
|
|
117
|
+
if response.status_code != 200:
|
|
118
|
+
return []
|
|
119
|
+
files_json = response.json()
|
|
120
|
+
if "value" not in files_json:
|
|
121
|
+
return []
|
|
122
|
+
files = []
|
|
123
|
+
for file in files_json["value"]:
|
|
124
|
+
file_name = file.get('name', '')
|
|
125
|
+
# Build full path reflecting nested folders
|
|
126
|
+
if parent_folder:
|
|
127
|
+
full_path = '/' + '/'.join([drive_path.strip('/'), parent_folder.strip('/'), file_name.strip('/')])
|
|
128
|
+
else:
|
|
129
|
+
full_path = '/' + '/'.join([drive_path.strip('/'), file_name.strip('/')])
|
|
130
|
+
temp_props = {
|
|
131
|
+
'Name': file_name,
|
|
132
|
+
'Path': full_path,
|
|
133
|
+
'Created': file.get('createdDateTime'),
|
|
134
|
+
'Modified': file.get('lastModifiedDateTime'),
|
|
135
|
+
'Link': file.get('webUrl'),
|
|
136
|
+
'id': file.get('id')
|
|
137
|
+
}
|
|
138
|
+
if not all([temp_props['Name'], temp_props['Path'], temp_props['id']]):
|
|
139
|
+
continue # skip files with missing required fields
|
|
140
|
+
if 'folder' in file:
|
|
141
|
+
# Recursively extract files from this folder
|
|
142
|
+
inner_folder = parent_folder + '/' + file_name if parent_folder else file_name
|
|
143
|
+
inner_files = _recurse_drive(drive_id, drive_path, inner_folder, limit_files)
|
|
144
|
+
files.extend(inner_files)
|
|
145
|
+
else:
|
|
146
|
+
files.append(temp_props)
|
|
147
|
+
if limit_files is not None and len(result) + len(files) >= limit_files:
|
|
148
|
+
return files[:limit_files - len(result)]
|
|
149
|
+
return files
|
|
150
|
+
#
|
|
151
|
+
site_segments = [seg for seg in site_url.strip('/').split('/') if seg][-2:]
|
|
152
|
+
full_path_prefix = '/'.join(site_segments)
|
|
153
|
+
#
|
|
154
|
+
for drive in drives:
|
|
155
|
+
drive_id = drive.get("id")
|
|
156
|
+
drive_path = unquote(urlparse(drive.get("webUrl")).path) if drive.get("webUrl") else ""
|
|
157
|
+
if not drive_id:
|
|
158
|
+
continue # skip drives without id
|
|
159
|
+
#
|
|
160
|
+
sub_folder = folder_name
|
|
161
|
+
if folder_name:
|
|
162
|
+
folder_path = folder_name.strip('/')
|
|
163
|
+
expected_prefix = drive_path.strip('/')#f'{full_path_prefix}/{library_type}'
|
|
164
|
+
if folder_path.startswith(full_path_prefix):
|
|
165
|
+
if folder_path.startswith(expected_prefix):
|
|
166
|
+
sub_folder = folder_path.removeprefix(f'{expected_prefix}').strip('/')#target_folder_url = folder_path.removeprefix(f'{full_path_prefix}/')
|
|
167
|
+
else:
|
|
168
|
+
# ignore full path folder which is not targeted to current drive
|
|
169
|
+
continue
|
|
170
|
+
#
|
|
171
|
+
files = _recurse_drive(drive_id, drive_path, sub_folder, limit_files)
|
|
172
|
+
result.extend(files)
|
|
173
|
+
if limit_files is not None and len(result) >= limit_files:
|
|
174
|
+
return result[:limit_files]
|
|
175
|
+
return result
|
|
176
|
+
except Exception as e:
|
|
177
|
+
raise RuntimeError(f"Error in get_files_list: {e}")
|
|
178
|
+
|
|
179
|
+
def get_file_content(self, site_url: str, path: str):
|
|
180
|
+
try:
|
|
181
|
+
access_token, site_id = self.generate_token_and_site_id(site_url)
|
|
182
|
+
headers = {"Authorization": f"Bearer {access_token}"}
|
|
183
|
+
drives_url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/drives"
|
|
184
|
+
drives_response = requests.get(drives_url, headers=headers)
|
|
185
|
+
drives = self._validate_response(drives_response, required_field="value", error_prefix="Drives request")
|
|
186
|
+
path = path.strip('/')
|
|
187
|
+
#
|
|
188
|
+
for drive in drives:
|
|
189
|
+
drive_path = unquote(urlparse(drive.get("webUrl")).path).strip('/')
|
|
190
|
+
if not drive_path or not path.startswith(drive_path):
|
|
191
|
+
continue
|
|
192
|
+
drive_id = drive.get("id")
|
|
193
|
+
if not drive_id:
|
|
194
|
+
continue
|
|
195
|
+
path = path.replace(drive_path, '').strip('/')
|
|
196
|
+
safe_path = quote(path, safe="")
|
|
197
|
+
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/root:/{safe_path}:/content"
|
|
198
|
+
response = requests.get(url, headers=headers)
|
|
199
|
+
if response.status_code == 200:
|
|
200
|
+
return response.content
|
|
201
|
+
raise RuntimeError(f"File '{path}' not found in any private or shared documents.")
|
|
202
|
+
except Exception as e:
|
|
203
|
+
raise RuntimeError(f"Error in get_file_content: {e}")
|
|
204
|
+
|
|
205
|
+
def get_list_items(self, site_url: str, list_title: str, limit: int = 1000):
|
|
206
|
+
"""Fallback Graph API method to read SharePoint list items by list title.
|
|
207
|
+
|
|
208
|
+
Returns a list of dictionaries representing list item fields.
|
|
209
|
+
"""
|
|
210
|
+
if not site_url or not site_url.startswith("https://"):
|
|
211
|
+
raise ValueError(f"Invalid site_url format: {site_url}")
|
|
212
|
+
try:
|
|
213
|
+
access_token, site_id = self.generate_token_and_site_id(site_url)
|
|
214
|
+
headers = {"Authorization": f"Bearer {access_token}"}
|
|
215
|
+
lists_url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/lists"
|
|
216
|
+
response = requests.get(lists_url, headers=headers)
|
|
217
|
+
if response.status_code != 200:
|
|
218
|
+
raise RuntimeError(f"Lists request failed: {response.status_code} {response.text}")
|
|
219
|
+
lists_json = response.json()
|
|
220
|
+
lists = lists_json.get("value", [])
|
|
221
|
+
target_list = None
|
|
222
|
+
normalized_title = list_title.strip().lower()
|
|
223
|
+
for lst in lists:
|
|
224
|
+
# displayName is the user-visible title. name can differ (internal name)
|
|
225
|
+
display_name = (lst.get("displayName") or lst.get("name") or '').strip().lower()
|
|
226
|
+
if display_name == normalized_title:
|
|
227
|
+
target_list = lst
|
|
228
|
+
break
|
|
229
|
+
if not target_list:
|
|
230
|
+
raise RuntimeError(f"List '{list_title}' not found via Graph API.")
|
|
231
|
+
list_id = target_list.get('id')
|
|
232
|
+
if not list_id:
|
|
233
|
+
raise RuntimeError(f"List '{list_title}' missing id field.")
|
|
234
|
+
items_url = f"https://graph.microsoft.com/v1.0/sites/{site_id}/lists/{list_id}/items?expand=fields&$top={limit}"
|
|
235
|
+
items_response = requests.get(items_url, headers=headers)
|
|
236
|
+
if items_response.status_code != 200:
|
|
237
|
+
raise RuntimeError(f"List items request failed: {items_response.status_code} {items_response.text}")
|
|
238
|
+
items_json = items_response.json()
|
|
239
|
+
values = items_json.get('value', [])
|
|
240
|
+
result = []
|
|
241
|
+
for item in values:
|
|
242
|
+
fields = item.get('fields', {})
|
|
243
|
+
if fields:
|
|
244
|
+
result.append(fields)
|
|
245
|
+
return result
|
|
246
|
+
except Exception as e:
|
|
247
|
+
raise RuntimeError(f"Error in get_list_items: {e}")
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
-
|
|
1
|
+
import re
|
|
2
2
|
from io import BytesIO
|
|
3
|
+
from docx import Document
|
|
4
|
+
|
|
3
5
|
|
|
4
6
|
def read_docx_from_bytes(file_content):
|
|
5
7
|
"""Read and return content from a .docx file using a byte stream."""
|
|
@@ -11,4 +13,8 @@ def read_docx_from_bytes(file_content):
|
|
|
11
13
|
return '\n'.join(text)
|
|
12
14
|
except Exception as e:
|
|
13
15
|
print(f"Error reading .docx from bytes: {e}")
|
|
14
|
-
return ""
|
|
16
|
+
return ""
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def decode_sharepoint_string(s):
|
|
20
|
+
return re.sub(r'_x([0-9A-Fa-f]{4})_', lambda m: chr(int(m.group(1), 16)), s)
|
|
@@ -59,6 +59,7 @@ class SlackToolkit(BaseToolkit):
|
|
|
59
59
|
'metadata': {
|
|
60
60
|
"label": "Slack",
|
|
61
61
|
"icon_url": "slack-icon.svg",
|
|
62
|
+
"max_length": SlackToolkit.toolkit_max_length,
|
|
62
63
|
"categories": ["communication"],
|
|
63
64
|
"extra_categories": ["slack", "chat", "messaging", "collaboration"],
|
|
64
65
|
}
|
alita_sdk/tools/sql/__init__.py
CHANGED
|
@@ -34,7 +34,7 @@ class SQLToolkit(BaseToolkit):
|
|
|
34
34
|
return create_model(
|
|
35
35
|
name,
|
|
36
36
|
dialect=(Literal[tuple(supported_dialects)], Field(description="Database dialect (mysql or postgres)")),
|
|
37
|
-
database_name=(str, Field(description="Database name"
|
|
37
|
+
database_name=(str, Field(description="Database name")),
|
|
38
38
|
sql_configuration=(SqlConfiguration, Field(description="SQL Configuration", json_schema_extra={'configuration_types': ['sql']})),
|
|
39
39
|
selected_tools=(List[Literal[tuple(selected_tools)]], Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
|
|
40
40
|
__config__=ConfigDict(json_schema_extra=
|
|
@@ -42,6 +42,7 @@ class SQLToolkit(BaseToolkit):
|
|
|
42
42
|
'metadata':
|
|
43
43
|
{
|
|
44
44
|
"label": "SQL", "icon_url": "sql-icon.svg",
|
|
45
|
+
"max_length": SQLToolkit.toolkit_max_length,
|
|
45
46
|
"categories": ["development"],
|
|
46
47
|
"extra_categories": ["sql", "data management", "data analysis"]}})
|
|
47
48
|
)
|