alita-sdk 0.3.130__py3-none-any.whl → 0.3.132__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,7 @@ from langchain_core.messages import (
8
8
  AIMessage, HumanMessage,
9
9
  SystemMessage, BaseMessage,
10
10
  )
11
+ from langchain_core.tools import ToolException
11
12
 
12
13
  from ..langchain.assistant import Assistant as LangChainAssistant
13
14
  # from ..llamaindex.assistant import Assistant as LLamaAssistant
@@ -174,7 +175,12 @@ class AlitaClient:
174
175
  tools = []
175
176
  if chat_history is None:
176
177
  chat_history = []
177
- data = self.get_app_version_details(application_id, application_version_id)
178
+ try:
179
+ data = self.get_app_version_details(application_id, application_version_id)
180
+ except ApiDetailsRequestError as e:
181
+ error_msg = f"Failed to fetch application version details for {application_id}/{application_version_id}\nDetails: {e}"
182
+ logger.error(error_msg)
183
+ raise ToolException(error_msg)
178
184
 
179
185
  if application_variables:
180
186
  for var in data.get('variables', {}):
@@ -0,0 +1,100 @@
1
+ from typing import List, Optional, Literal
2
+ from elitea_analyse.ado.azure_search import AzureSearch
3
+ from pydantic import SecretStr, create_model, BaseModel, ConfigDict, Field
4
+
5
+ from langchain_core.tools import BaseTool, BaseToolkit
6
+
7
+ from alita_tools.utils import get_max_toolkit_length
8
+ from alita_tools.base.tool import BaseAction
9
+
10
+ from ....tools.artifact import ArtifactWrapper
11
+ from .api_wrapper import AdoAnalyseWrapper
12
+
13
+ from ...utils import check_schema
14
+
15
+
16
+ name = "Analyse_Ado"
17
+
18
+
19
+ class AnalyseAdo(BaseToolkit):
20
+ tools: List[BaseTool] = []
21
+ toolkit_max_length: int = 0
22
+
23
+ @staticmethod
24
+ def toolkit_config_schema() -> type[BaseModel]:
25
+ selected_tools = {
26
+ x["name"]: x["args_schema"].schema()
27
+ for x in AdoAnalyseWrapper.model_construct().get_available_tools()
28
+ }
29
+ AnalyseAdo.toolkit_max_length = get_max_toolkit_length(selected_tools)
30
+
31
+ return create_model(
32
+ "analyse_ado",
33
+ organization=(str, Field(description="Azure DevOps organization name",
34
+ json_schema_extra={"toolkit_name": True, "max_toolkit_length": AnalyseAdo.toolkit_max_length})),
35
+ username=(str, Field(description="Azure DevOps username (e.g., 'john.doe@domain.com')")),
36
+ token=(SecretStr, Field(description="Azure DevOps Access Token", json_schema_extra={"secret": True})),
37
+ project_keys=(Optional[str], Field(description="Azure DevOps project keys separated by comma", default=None)),
38
+ default_branch_name=(Optional[str], Field(description="Default branch name", default="main")),
39
+ area=(Optional[str], Field(description="Area path filter", default="")),
40
+ artifact_bucket_path=(Optional[str], Field(description="Artifact Bucket Path", default="analyse-ado")),
41
+ selected_tools=(List[Literal[tuple(selected_tools)]], Field(default=[], json_schema_extra={"args_schemas": selected_tools})),
42
+ __config__=ConfigDict(json_schema_extra={"metadata": {
43
+ "label": "Analyse_Ado",
44
+ "icon_url": "ado-icon.svg", # ???
45
+ "hidden": True,
46
+ "sections": {
47
+ "auth": {
48
+ "required": True,
49
+ "subsections": [{"name": "Token", "fields": ["token"]}],
50
+ }
51
+ },
52
+ }
53
+ })
54
+ )
55
+
56
+ @classmethod
57
+ def get_toolkit(cls, client: "AlitaClient", selected_tools: list[str], **kwargs):
58
+
59
+ bucket_path = kwargs.get("artifact_bucket_path") or "analyse-ado"
60
+ artifact_wrapper = ArtifactWrapper(client=client, bucket=bucket_path)
61
+ check_schema(artifact_wrapper)
62
+
63
+ project_keys = kwargs.get("project_keys") or ""
64
+ area = kwargs.get("area", "")
65
+
66
+ organization = kwargs.get("organization")
67
+ username = kwargs.get("username")
68
+ token = kwargs.get("token")
69
+
70
+ if not organization or not username or not token:
71
+ raise ValueError("Organization, username, and token must be provided.")
72
+
73
+ ado_search = AzureSearch(organization=organization, user=username, token=token)
74
+
75
+ ado_analyse_wrapper = AdoAnalyseWrapper(
76
+ artifacts_wrapper=artifact_wrapper,
77
+ project_keys=project_keys,
78
+ default_branch_name=kwargs.get("default_branch_name", "main"),
79
+ area=area,
80
+ ado_search=ado_search,
81
+ )
82
+
83
+ selected_tools = selected_tools or []
84
+ available_tools = ado_analyse_wrapper.get_available_tools()
85
+
86
+ tools = []
87
+ for tool in available_tools:
88
+ if selected_tools:
89
+ if tool["name"] not in selected_tools:
90
+ continue
91
+ tools.append(
92
+ BaseAction(
93
+ api_wrapper=ado_analyse_wrapper,
94
+ name=tool["name"],
95
+ description=tool["description"],
96
+ args_schema=tool["args_schema"],
97
+ )
98
+ )
99
+
100
+ return cls(tools=tools)
@@ -0,0 +1,259 @@
1
+ import logging
2
+
3
+ from typing import Optional, Dict, Any
4
+ from langchain_core.callbacks import dispatch_custom_event
5
+ from pydantic import BaseModel, Field
6
+
7
+ from elitea_analyse.utils.constants import OUTPUT_WORK_ITEMS_FILE
8
+ from elitea_analyse.ado.azure_search import AzureSearch
9
+ from elitea_analyse.ado.main import (
10
+ OUTPUT_WORK_ITEMS,
11
+ get_work_items_several_projects,
12
+ get_commits_several_projects,
13
+ get_merge_requests_several_projects,
14
+ get_pipelines_runs_several_projects,
15
+ )
16
+
17
+
18
+ from alita_tools.elitea_base import BaseToolApiWrapper
19
+
20
+ from src.alita_sdk.utils.save_dataframe import save_dataframe_to_artifact
21
+ from ....tools.artifact import ArtifactWrapper
22
+ from ....utils.logging import with_streamlit_logs
23
+
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ class GetAdoWorkItemsArgs(BaseModel):
29
+ resolved_after: str = Field(description="Resolveed after date (i.e. 2023-01-01)")
30
+ updated_after: str = Field(description="Updated after date (i.e. 2023-01-01)")
31
+ created_after: str = Field(description="Created after date (i.e. 2023-01-01)")
32
+ area: Optional[str] = Field(description="Area path filter.", default="")
33
+ project_keys: Optional[str] = Field(
34
+ description="One or more projects keys separated with comma.", default=""
35
+ )
36
+
37
+
38
+ class AdoCommitsArgs(BaseModel):
39
+ project_keys: Optional[str] = Field(
40
+ description="One or more projects keys separated with comma.", default=""
41
+ )
42
+ since_date: str = Field(description="Get commits after this date 'YYYY-MM-DD'")
43
+
44
+
45
+ class AdoPipelinesArgs(BaseModel):
46
+ project_keys: Optional[str] = Field(
47
+ description="One or more projects keys separated with comma.", default=""
48
+ )
49
+
50
+
51
+ class AdoAnalyseWrapper(BaseToolApiWrapper):
52
+ artifacts_wrapper: ArtifactWrapper
53
+ project_keys: str # Comma-separated list of Azure DevOps project names
54
+ default_branch_name: str = "main"
55
+ area: str = ""
56
+ ado_search: AzureSearch # Azure DevOps search client
57
+
58
+ class Config:
59
+ arbitrary_types_allowed = True
60
+
61
+ def get_projects_list(self):
62
+ """
63
+ Get all projects in the organization that the authenticated user has access to.
64
+ Details on a page: https://docs.microsoft.com/en-us/rest/api/azure/devops/core/projects/list
65
+ """
66
+ result = self.ado_search.get_projects_list()
67
+
68
+ save_dataframe_to_artifact(
69
+ self.artifacts_wrapper,
70
+ result,
71
+ "projects_info.csv",
72
+ csv_options={"index": False},
73
+ )
74
+
75
+ return (
76
+ f"You have access to {len(result)} project. "
77
+ f"Data has been downloaded to the bucket as 'projects_info_AzureDevOps.csv'."
78
+ )
79
+
80
+ @with_streamlit_logs(tool_name="get_work_items")
81
+ def get_work_items(
82
+ self,
83
+ resolved_after: str,
84
+ updated_after: str,
85
+ created_after: str,
86
+ area: str = "",
87
+ project_keys: Optional[str] = None,
88
+ ) -> str:
89
+ """
90
+ Get work items from multiple Azure DevOps projects.
91
+
92
+ project_keys: str
93
+ Comma-separated project names.
94
+ resolved_after: str
95
+ Date filter for resolved items 'YYYY-MM-DD'.
96
+ updated_after: str
97
+ Date filter for updated items 'YYYY-MM-DD'.
98
+ created_after: str
99
+ Date filter for created items 'YYYY-MM-DD'.
100
+ area: str
101
+ Area path filter (optional).
102
+ """
103
+ project_keys = project_keys or self.project_keys
104
+ area = area or self.area
105
+
106
+ df_work_items = get_work_items_several_projects(
107
+ project_keys,
108
+ resolved_after,
109
+ updated_after,
110
+ created_after,
111
+ area=area,
112
+ ado_search=self.ado_search,
113
+ )
114
+
115
+ save_dataframe_to_artifact(
116
+ self.artifacts_wrapper,
117
+ df_work_items,
118
+ f"{OUTPUT_WORK_ITEMS_FILE}{project_keys}.csv",
119
+ csv_options={"index_label": "id"},
120
+ )
121
+
122
+ return (
123
+ f"Work items for {project_keys} have been successfully retrieved "
124
+ f"and saved to the bucket as '{OUTPUT_WORK_ITEMS}{project_keys}.csv'."
125
+ )
126
+
127
+ async def get_commits(
128
+ self,
129
+ since_date: str,
130
+ project_keys: Optional[str] = None,
131
+ new_version: bool = True,
132
+ with_commit_size: bool = True,
133
+ ) -> str:
134
+ """
135
+ Get commits from multiple Azure DevOps projects.
136
+
137
+ since_date: str
138
+ Get commits after this date 'YYYY-MM-DD'.
139
+ project_keys: str
140
+ Comma-separated project names.
141
+ new_version: bool
142
+ Use new API version.
143
+ with_commit_size: bool
144
+ Include commit size info.
145
+ """
146
+ project_keys = project_keys or self.project_keys
147
+
148
+ # Await the coroutine to get commits
149
+ df_commits = await get_commits_several_projects(
150
+ project_keys,
151
+ since_date,
152
+ new_version=new_version,
153
+ with_commit_size=with_commit_size,
154
+ ado_search=self.ado_search,
155
+ )
156
+
157
+ save_dataframe_to_artifact(
158
+ self.artifacts_wrapper,
159
+ df_commits,
160
+ f"commits_details_{project_keys}.csv",
161
+ csv_options={"index_label": "id"},
162
+ )
163
+
164
+ return (
165
+ f"Commits for {project_keys} have been successfully retrieved "
166
+ f"and saved to the bucket as 'commits_details_{project_keys}.csv'."
167
+ )
168
+
169
+ def get_merge_requests(
170
+ self, since_date: str, project_keys: Optional[str] = None
171
+ ) -> str:
172
+ """
173
+ Get pull requests from multiple Azure DevOps projects.
174
+
175
+ project_keys: str
176
+ Comma-separated project names.
177
+ since_date: str
178
+ Get PRs after this date 'YYYY-MM-DD'.
179
+ """
180
+ project_keys = project_keys or self.project_keys
181
+
182
+ df_prs = get_merge_requests_several_projects(
183
+ project_keys, since_date, ado_search=self.ado_search
184
+ )
185
+
186
+ save_dataframe_to_artifact(
187
+ self.artifacts_wrapper,
188
+ df_prs,
189
+ f"merge_requests_details_{project_keys}.csv",
190
+ csv_options={"index": False},
191
+ )
192
+
193
+ return (
194
+ f"Pull requests for {project_keys} have been successfully retrieved "
195
+ f"and saved to the bucket as 'merge_requests_details_{project_keys}.csv'."
196
+ )
197
+
198
+ def get_pipelines_runs(
199
+ self,
200
+ project_keys: Optional[str] = None,
201
+ ) -> str:
202
+ """
203
+ Get pipeline runs from multiple Azure DevOps projects.
204
+
205
+ project_keys: str
206
+ Comma-separated project names.
207
+ """
208
+ project_keys = project_keys or self.project_keys
209
+ pipelines_df = get_pipelines_runs_several_projects(project_keys, ado_search=self.ado_search)
210
+
211
+ save_dataframe_to_artifact(
212
+ self.artifacts_wrapper, pipelines_df, f"pipelines_runs_{project_keys}.csv", csv_options={"index": False}
213
+ )
214
+
215
+ return (
216
+ f"Pipeline runs for {project_keys} have been successfully retrieved "
217
+ f"and saved to the bucket as 'pipelines_runs_{project_keys}.csv'."
218
+ )
219
+
220
+ def get_available_tools(self) -> list[Dict[str, Any]]:
221
+ """Get a list of available tools."""
222
+ return [
223
+ {
224
+ "name": "get_projects_list",
225
+ "description": self.get_projects_list.__doc__,
226
+ "ref": self.get_projects_list,
227
+ "args_schema": {},
228
+ },
229
+ {
230
+ "name": "get_work_items",
231
+ "description": self.get_work_items.__doc__,
232
+ "ref": self.get_work_items,
233
+ "args_schema": GetAdoWorkItemsArgs,
234
+ },
235
+ {
236
+ "name": "get_commits",
237
+ "description": self.get_commits.__doc__,
238
+ "ref": self.get_commits,
239
+ "args_schema": AdoCommitsArgs,
240
+ },
241
+ {
242
+ "name": "get_merge_requests",
243
+ "description": self.get_merge_requests.__doc__,
244
+ "ref": self.get_merge_requests,
245
+ "args_schema": AdoCommitsArgs,
246
+ },
247
+ {
248
+ "name": "get_pipelines_runs",
249
+ "description": self.get_pipelines_runs.__doc__,
250
+ "ref": self.get_pipelines_runs,
251
+ "args_schema": AdoPipelinesArgs,
252
+ },
253
+ ]
254
+
255
+ def run(self, mode: str, *args: Any, **kwargs: Any):
256
+ for tool in self.get_available_tools():
257
+ if tool["name"] == mode:
258
+ return tool["ref"](*args, **kwargs)
259
+ raise ValueError(f"Unknown mode: {mode}")
@@ -0,0 +1,94 @@
1
+ from typing import List, Optional, Literal
2
+ from pydantic import SecretStr, create_model, BaseModel, ConfigDict, Field
3
+
4
+ from langchain_core.tools import BaseTool, BaseToolkit
5
+
6
+ from elitea_analyse.github.github_org import GitHubGetOrgLvl
7
+ from alita_tools.utils import get_max_toolkit_length
8
+ from alita_tools.base.tool import BaseAction
9
+
10
+ from ....tools.artifact import ArtifactWrapper
11
+ from .api_wrapper import GitHubAnalyseWrapper
12
+
13
+ from ...utils import check_schema
14
+
15
+
16
+ name = "Analyse_Github"
17
+
18
+
19
+ class AnalyseGithub(BaseToolkit):
20
+ tools: List[BaseTool] = []
21
+ toolkit_max_length: int = 0
22
+
23
+ @staticmethod
24
+ def toolkit_config_schema() -> type[BaseModel]:
25
+ selected_tools = {
26
+ x["name"]: x["args_schema"].schema()
27
+ for x in GitHubAnalyseWrapper.model_construct().get_available_tools()
28
+ }
29
+ AnalyseGithub.toolkit_max_length = get_max_toolkit_length(selected_tools)
30
+
31
+ return create_model(
32
+ "analyse_github",
33
+ owner=(str, Field(description="GitHub owner name",
34
+ json_schema_extra={"toolkit_name": True, "max_toolkit_length": AnalyseGithub.toolkit_max_length})),
35
+ token=(SecretStr, Field(description="Github Access Token", json_schema_extra={"secret": True})),
36
+ repos=(Optional[str],
37
+ Field(description="Comma-separated list of GitHub repository names e.g. 'repo1,repo2'", default=None)),
38
+ artifact_bucket_path=(Optional[str],
39
+ Field(description="Artifact Bucket Path", default="analyse-github")),
40
+ selected_tools=(
41
+ List[Literal[tuple(selected_tools)]], Field(default=[],
42
+ json_schema_extra={"args_schemas": selected_tools})
43
+ ),
44
+ __config__=ConfigDict(json_schema_extra={"metadata": {
45
+ "label": "Analyse_Github",
46
+ "icon_url": None, # ?? is exists
47
+ "hidden": True,
48
+ "sections": {
49
+ "auth": {
50
+ "required": True,
51
+ "subsections": [{"name": "Token", "fields": ["token"]}],
52
+ }
53
+ },
54
+ }
55
+ })
56
+ )
57
+
58
+ @classmethod
59
+ def get_toolkit(cls, client: "AlitaClient", selected_tools: list[str], **kwargs):
60
+ bucket_path = kwargs.get("artifact_bucket_path") or "analyse-github"
61
+ artifact_wrapper = ArtifactWrapper(client=client, bucket=bucket_path)
62
+ check_schema(artifact_wrapper)
63
+
64
+ owner = kwargs.get("owner")
65
+ token = kwargs.get("token")
66
+
67
+ if not owner or not token:
68
+ raise ValueError("GitHub owner and token must be provided.")
69
+
70
+ git = GitHubGetOrgLvl(owner=owner, token=token)
71
+
72
+ github_analyse_wrapper = GitHubAnalyseWrapper(
73
+ artifacts_wrapper=artifact_wrapper,
74
+ repos=kwargs.get("repos") or "",
75
+ git=git
76
+ )
77
+
78
+ selected_tools = selected_tools or []
79
+ available_tools = github_analyse_wrapper.get_available_tools()
80
+
81
+ tools = []
82
+ for tool in available_tools:
83
+ if selected_tools and tool["name"] not in selected_tools:
84
+ continue
85
+ tools.append(
86
+ BaseAction(
87
+ api_wrapper=github_analyse_wrapper,
88
+ name=tool["name"],
89
+ description=tool["description"],
90
+ args_schema=tool["args_schema"],
91
+ )
92
+ )
93
+
94
+ return cls(tools=tools)
@@ -0,0 +1,166 @@
1
+ import logging
2
+ from typing import Optional, Any
3
+ from langchain_core.callbacks import dispatch_custom_event
4
+ from pydantic import BaseModel, Field
5
+
6
+ from elitea_analyse.github.github_org import GitHubGetOrgLvl
7
+ from elitea_analyse.github.main_github import (
8
+ extract_commits_from_multiple_repos,
9
+ extract_pull_requests_from_multiple_repos,
10
+ extract_repositories_list,
11
+ extract_repositories_extended_data,
12
+ )
13
+
14
+ from alita_tools.elitea_base import BaseToolApiWrapper
15
+ from src.alita_sdk.utils.save_dataframe import save_dataframe_to_artifact
16
+ from ....tools.artifact import ArtifactWrapper
17
+ from ....utils.logging import with_streamlit_logs
18
+
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ class GetGithubCommitsFromReposArgs(BaseModel):
24
+ since_after: str = Field( description="Date to filter commits from, in 'YYYY-MM-DD' format." )
25
+ repos: Optional[str] = Field(
26
+ description="Comma-separated list of repositories to extract commits from.",
27
+ default="",
28
+ )
29
+
30
+
31
+ class GetGithubRepositoriesListArgs(BaseModel):
32
+ pushed_after: str = Field( description="Date to filter repositories by, in 'YYYY-MM-DD' format." )
33
+
34
+
35
+ class GitHubAnalyseWrapper(BaseToolApiWrapper):
36
+ artifacts_wrapper: ArtifactWrapper
37
+ repos: str # Comma-separated list of GitHub repository names e.g. 'repo1,repo2'
38
+ git: GitHubGetOrgLvl # GitHub client
39
+
40
+ class Config:
41
+ arbitrary_types_allowed = True
42
+
43
+ def get_commits_from_repos(self, since_after: str, repos: Optional[str] = None) -> str:
44
+ """
45
+ Extracts commit data from multiple GitHub repositories since the specified date. Saves the result to a CSV file.
46
+
47
+ repos : str
48
+ The string containing repositories names to extract data from, separated by commas.
49
+ since_date : str
50
+ The date to start extracting commits from, in 'YYYY-MM-DD' format.
51
+ """
52
+ repos = repos or self.repos
53
+ df_commits = extract_commits_from_multiple_repos(repos, since_after, git=self.git)
54
+
55
+ if df_commits is None or df_commits.empty:
56
+ return f"No commits found for repositories: {repos} since {since_after}"
57
+
58
+ output_filename = f"commits_{repos.replace(',', '_')}.csv"
59
+ save_dataframe_to_artifact( self.artifacts_wrapper, df_commits, output_filename, {"index": False} )
60
+
61
+ return f"GitHub commits data for {repos} saved to {output_filename}"
62
+
63
+ def get_pull_requests_from_repos(self, since_after: str, repos: Optional[str] = None) -> str:
64
+ """
65
+ Extracts pull request data from multiple GitHub repositories since the specified date.
66
+ Saves the result to a CSV file.
67
+
68
+ repos: str
69
+ The string containing repositories names to extract data from, separated by commas.
70
+ since_date: str
71
+ The date to start extracting pull requests from, in 'YYYY-MM-DD' format.
72
+ """
73
+ repos = repos or self.repos
74
+ df_pull_requests = extract_pull_requests_from_multiple_repos(repos, since_after, git=self.git)
75
+
76
+ output_filename = f"pull_requests_details_{repos.replace(',', '_')}.csv"
77
+ save_dataframe_to_artifact( self.artifacts_wrapper, df_pull_requests, output_filename, {"index": False} )
78
+
79
+ return f"GitHub pull requests data saved to {output_filename}"
80
+
81
+ def get_repositories_list(self, pushed_after: str) -> str:
82
+ """
83
+ Extracts a list of GitHub repositories that were pushed after the specified date.
84
+ Saves the result to a CSV file.
85
+
86
+ pushed_after : str
87
+ The date to filter repositories by, in 'YYYY-MM-DD' format.
88
+ """
89
+ df_repos = extract_repositories_list(pushed_after, git=self.git)
90
+
91
+ output_filename = "github_repos_list.csv"
92
+ save_dataframe_to_artifact( self.artifacts_wrapper, df_repos, output_filename, {"index": False} )
93
+ dispatch_custom_event(
94
+ "thinking_step",
95
+ data={
96
+ "message": f"Extracted {len(df_repos)} repositories pushed after {pushed_after}.",
97
+ "tool_name": "github_repositories_list_extraction",
98
+ "toolkit": "analyse_github",
99
+ },
100
+ )
101
+
102
+ return f"GitHub repositories list saved to {output_filename}"
103
+
104
+ @with_streamlit_logs(tool_name="get_github_repositories_extended_data")
105
+ def get_repositories_extended_data(self, pushed_after: str) -> str:
106
+ """
107
+ Extracts extended information about GitHub repositories that were pushed after the specified date.
108
+ Saves the result to a CSV file.
109
+
110
+ pushed_after : str
111
+ The date to filter repositories by, in 'YYYY-MM-DD' format.
112
+ """
113
+ df_repos_extended = extract_repositories_extended_data(pushed_after, git=self.git)
114
+
115
+ output_filename = "github_repos_extended_info.csv"
116
+ save_dataframe_to_artifact( self.artifacts_wrapper, df_repos_extended, output_filename, {"index": False} )
117
+
118
+ dispatch_custom_event(
119
+ "thinking_step",
120
+ data={
121
+ "message": (
122
+ f"Extracted extended data for {len(df_repos_extended)} repositories "
123
+ f"pushed after {pushed_after}."
124
+ ),
125
+ "tool_name": "github_repositories_extended_data_extraction",
126
+ "toolkit": "analyse_github",
127
+ },
128
+ )
129
+
130
+ return f"Extended repository info that you have access saved to {output_filename}"
131
+
132
+ def get_available_tools(self):
133
+ """Get a list of available tools."""
134
+ return [
135
+ {
136
+ "name": "get_commits_from_repos",
137
+ "description": self.get_commits_from_repos.__doc__,
138
+ "args_schema": GetGithubCommitsFromReposArgs,
139
+ "ref": self.get_commits_from_repos,
140
+ },
141
+ {
142
+ "name": "get_pull_requests_from_repos",
143
+ "description": self.get_pull_requests_from_repos.__doc__,
144
+ "args_schema": GetGithubCommitsFromReposArgs,
145
+ "ref": self.get_pull_requests_from_repos,
146
+ },
147
+ {
148
+ "name": "get_repositories_list",
149
+ "description": self.get_repositories_list.__doc__,
150
+ "args_schema": GetGithubRepositoriesListArgs,
151
+ "ref": self.get_repositories_list,
152
+ },
153
+ {
154
+ "name": "get_repositories_extended_data",
155
+ "description": self.get_repositories_extended_data.__doc__,
156
+ "args_schema": GetGithubRepositoriesListArgs,
157
+ "ref": self.get_repositories_extended_data,
158
+ },
159
+ ]
160
+
161
+ def run(self, mode: str, *args: Any, **kwargs: Any):
162
+ for tool in self.get_available_tools():
163
+ if tool["name"] == mode:
164
+ return tool["ref"](*args, **kwargs)
165
+
166
+ raise ValueError(f"Unknown mode: {mode}")