alita-sdk 0.3.205__py3-none-any.whl → 0.3.207__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. alita_sdk/runtime/clients/client.py +314 -11
  2. alita_sdk/runtime/langchain/assistant.py +22 -21
  3. alita_sdk/runtime/langchain/interfaces/llm_processor.py +1 -4
  4. alita_sdk/runtime/langchain/langraph_agent.py +6 -1
  5. alita_sdk/runtime/langchain/store_manager.py +4 -4
  6. alita_sdk/runtime/toolkits/application.py +5 -10
  7. alita_sdk/runtime/toolkits/tools.py +11 -21
  8. alita_sdk/runtime/tools/vectorstore.py +25 -11
  9. alita_sdk/runtime/utils/streamlit.py +505 -222
  10. alita_sdk/runtime/utils/toolkit_runtime.py +147 -0
  11. alita_sdk/runtime/utils/toolkit_utils.py +157 -0
  12. alita_sdk/runtime/utils/utils.py +5 -0
  13. alita_sdk/tools/__init__.py +2 -0
  14. alita_sdk/tools/ado/repos/repos_wrapper.py +20 -13
  15. alita_sdk/tools/bitbucket/api_wrapper.py +5 -5
  16. alita_sdk/tools/bitbucket/cloud_api_wrapper.py +54 -29
  17. alita_sdk/tools/elitea_base.py +9 -4
  18. alita_sdk/tools/gitlab/__init__.py +22 -10
  19. alita_sdk/tools/gitlab/api_wrapper.py +278 -253
  20. alita_sdk/tools/gitlab/tools.py +354 -376
  21. alita_sdk/tools/llm/llm_utils.py +0 -6
  22. alita_sdk/tools/memory/__init__.py +54 -10
  23. alita_sdk/tools/openapi/__init__.py +14 -3
  24. alita_sdk/tools/sharepoint/__init__.py +2 -1
  25. alita_sdk/tools/sharepoint/api_wrapper.py +11 -3
  26. alita_sdk/tools/testrail/api_wrapper.py +39 -16
  27. alita_sdk/tools/utils/content_parser.py +77 -13
  28. {alita_sdk-0.3.205.dist-info → alita_sdk-0.3.207.dist-info}/METADATA +1 -1
  29. {alita_sdk-0.3.205.dist-info → alita_sdk-0.3.207.dist-info}/RECORD +32 -40
  30. alita_sdk/community/analysis/__init__.py +0 -0
  31. alita_sdk/community/analysis/ado_analyse/__init__.py +0 -103
  32. alita_sdk/community/analysis/ado_analyse/api_wrapper.py +0 -261
  33. alita_sdk/community/analysis/github_analyse/__init__.py +0 -98
  34. alita_sdk/community/analysis/github_analyse/api_wrapper.py +0 -166
  35. alita_sdk/community/analysis/gitlab_analyse/__init__.py +0 -110
  36. alita_sdk/community/analysis/gitlab_analyse/api_wrapper.py +0 -172
  37. alita_sdk/community/analysis/jira_analyse/__init__.py +0 -141
  38. alita_sdk/community/analysis/jira_analyse/api_wrapper.py +0 -252
  39. alita_sdk/runtime/llms/alita.py +0 -259
  40. {alita_sdk-0.3.205.dist-info → alita_sdk-0.3.207.dist-info}/WHEEL +0 -0
  41. {alita_sdk-0.3.205.dist-info → alita_sdk-0.3.207.dist-info}/licenses/LICENSE +0 -0
  42. {alita_sdk-0.3.205.dist-info → alita_sdk-0.3.207.dist-info}/top_level.txt +0 -0
@@ -1,166 +0,0 @@
1
- import logging
2
- from typing import Optional, Any
3
- from langchain_core.callbacks import dispatch_custom_event
4
- from pydantic import BaseModel, Field
5
-
6
- from elitea_analyse.github.github_org import GitHubGetOrgLvl
7
- from elitea_analyse.github.main_github import (
8
- extract_commits_from_multiple_repos,
9
- extract_pull_requests_from_multiple_repos,
10
- extract_repositories_list,
11
- extract_repositories_extended_data,
12
- )
13
-
14
- from alita_sdk.tools.elitea_base import BaseToolApiWrapper
15
- from alita_sdk.runtime.utils.save_dataframe import save_dataframe_to_artifact
16
- from alita_sdk.runtime.tools.artifact import ArtifactWrapper
17
- from alita_sdk.runtime.utils.logging import with_streamlit_logs
18
-
19
-
20
- logger = logging.getLogger(__name__)
21
-
22
-
23
- class GetGithubCommitsFromReposArgs(BaseModel):
24
- since_after: str = Field( description="Date to filter commits from, in 'YYYY-MM-DD' format." )
25
- repos: Optional[str] = Field(
26
- description="Comma-separated list of repositories to extract commits from.",
27
- default="",
28
- )
29
-
30
-
31
- class GetGithubRepositoriesListArgs(BaseModel):
32
- pushed_after: str = Field( description="Date to filter repositories by, in 'YYYY-MM-DD' format." )
33
-
34
-
35
- class GitHubAnalyseWrapper(BaseToolApiWrapper):
36
- artifacts_wrapper: ArtifactWrapper
37
- repos: str # Comma-separated list of GitHub repository names e.g. 'repo1,repo2'
38
- git: GitHubGetOrgLvl # GitHub client
39
-
40
- class Config:
41
- arbitrary_types_allowed = True
42
-
43
- def get_commits_from_repos(self, since_after: str, repos: Optional[str] = None) -> str:
44
- """
45
- Extracts commit data from multiple GitHub repositories since the specified date. Saves the result to a CSV file.
46
-
47
- repos : str
48
- The string containing repositories names to extract data from, separated by commas.
49
- since_date : str
50
- The date to start extracting commits from, in 'YYYY-MM-DD' format.
51
- """
52
- repos = repos or self.repos
53
- df_commits = extract_commits_from_multiple_repos(repos, since_after, git=self.git)
54
-
55
- if df_commits is None or df_commits.empty:
56
- return f"No commits found for repositories: {repos} since {since_after}"
57
-
58
- output_filename = f"commits_{repos.replace(',', '_')}.csv"
59
- save_dataframe_to_artifact( self.artifacts_wrapper, df_commits, output_filename, {"index": False} )
60
-
61
- return f"GitHub commits data for {repos} saved to {output_filename}"
62
-
63
- def get_pull_requests_from_repos(self, since_after: str, repos: Optional[str] = None) -> str:
64
- """
65
- Extracts pull request data from multiple GitHub repositories since the specified date.
66
- Saves the result to a CSV file.
67
-
68
- repos: str
69
- The string containing repositories names to extract data from, separated by commas.
70
- since_date: str
71
- The date to start extracting pull requests from, in 'YYYY-MM-DD' format.
72
- """
73
- repos = repos or self.repos
74
- df_pull_requests = extract_pull_requests_from_multiple_repos(repos, since_after, git=self.git)
75
-
76
- output_filename = f"pull_requests_details_{repos.replace(',', '_')}.csv"
77
- save_dataframe_to_artifact( self.artifacts_wrapper, df_pull_requests, output_filename, {"index": False} )
78
-
79
- return f"GitHub pull requests data saved to {output_filename}"
80
-
81
- def get_repositories_list(self, pushed_after: str) -> str:
82
- """
83
- Extracts a list of GitHub repositories that were pushed after the specified date.
84
- Saves the result to a CSV file.
85
-
86
- pushed_after : str
87
- The date to filter repositories by, in 'YYYY-MM-DD' format.
88
- """
89
- df_repos = extract_repositories_list(pushed_after, git=self.git)
90
-
91
- output_filename = "github_repos_list.csv"
92
- save_dataframe_to_artifact( self.artifacts_wrapper, df_repos, output_filename, {"index": False} )
93
- dispatch_custom_event(
94
- "thinking_step",
95
- data={
96
- "message": f"Extracted {len(df_repos)} repositories pushed after {pushed_after}.",
97
- "tool_name": "github_repositories_list_extraction",
98
- "toolkit": "analyse_github",
99
- },
100
- )
101
-
102
- return f"GitHub repositories list saved to {output_filename}"
103
-
104
- @with_streamlit_logs(tool_name="get_github_repositories_extended_data")
105
- def get_repositories_extended_data(self, pushed_after: str) -> str:
106
- """
107
- Extracts extended information about GitHub repositories that were pushed after the specified date.
108
- Saves the result to a CSV file.
109
-
110
- pushed_after : str
111
- The date to filter repositories by, in 'YYYY-MM-DD' format.
112
- """
113
- df_repos_extended = extract_repositories_extended_data(pushed_after, git=self.git)
114
-
115
- output_filename = "github_repos_extended_info.csv"
116
- save_dataframe_to_artifact( self.artifacts_wrapper, df_repos_extended, output_filename, {"index": False} )
117
-
118
- dispatch_custom_event(
119
- "thinking_step",
120
- data={
121
- "message": (
122
- f"Extracted extended data for {len(df_repos_extended)} repositories "
123
- f"pushed after {pushed_after}."
124
- ),
125
- "tool_name": "github_repositories_extended_data_extraction",
126
- "toolkit": "analyse_github",
127
- },
128
- )
129
-
130
- return f"Extended repository info that you have access saved to {output_filename}"
131
-
132
- def get_available_tools(self):
133
- """Get a list of available tools."""
134
- return [
135
- {
136
- "name": "get_commits_from_repos",
137
- "description": self.get_commits_from_repos.__doc__,
138
- "args_schema": GetGithubCommitsFromReposArgs,
139
- "ref": self.get_commits_from_repos,
140
- },
141
- {
142
- "name": "get_pull_requests_from_repos",
143
- "description": self.get_pull_requests_from_repos.__doc__,
144
- "args_schema": GetGithubCommitsFromReposArgs,
145
- "ref": self.get_pull_requests_from_repos,
146
- },
147
- {
148
- "name": "get_repositories_list",
149
- "description": self.get_repositories_list.__doc__,
150
- "args_schema": GetGithubRepositoriesListArgs,
151
- "ref": self.get_repositories_list,
152
- },
153
- {
154
- "name": "get_repositories_extended_data",
155
- "description": self.get_repositories_extended_data.__doc__,
156
- "args_schema": GetGithubRepositoriesListArgs,
157
- "ref": self.get_repositories_extended_data,
158
- },
159
- ]
160
-
161
- def run(self, mode: str, *args: Any, **kwargs: Any):
162
- for tool in self.get_available_tools():
163
- if tool["name"] == mode:
164
- return tool["ref"](*args, **kwargs)
165
-
166
- raise ValueError(f"Unknown mode: {mode}")
@@ -1,110 +0,0 @@
1
- from typing import List, Optional, Literal
2
- from elitea_analyse.git.git_search import GitLabV4Search
3
- from pydantic import SecretStr, create_model, BaseModel, ConfigDict, Field
4
-
5
- from langchain_core.tools import BaseTool, BaseToolkit
6
-
7
- from alita_sdk.tools.utils import get_max_toolkit_length
8
- from alita_sdk.tools.base.tool import BaseAction
9
- from alita_sdk.runtime.clients.client import AlitaClient
10
- from alita_sdk.runtime.tools.artifact import ArtifactWrapper
11
- from .api_wrapper import GitLabAnalyseWrapper
12
-
13
- from ...utils import check_schema
14
-
15
-
16
- name = "Analyse_GitLab"
17
-
18
-
19
- class AnalyseGitLab(BaseToolkit):
20
- tools: List[BaseTool] = []
21
- toolkit_max_length: int = 0
22
-
23
- @staticmethod
24
- def toolkit_config_schema() -> type[BaseModel]:
25
- selected_tools = {
26
- x["name"]: x["args_schema"].schema()
27
- for x in GitLabAnalyseWrapper.model_construct().get_available_tools()
28
- }
29
- AnalyseGitLab.toolkit_max_length = get_max_toolkit_length(selected_tools)
30
-
31
- return create_model(
32
- "analyse_gitlab",
33
- url=(
34
- str,
35
- Field(
36
- description="GitLab URL (e.g., git.epam.com)",
37
- json_schema_extra={"toolkit_name": True, "max_toolkit_length": AnalyseGitLab.toolkit_max_length}
38
- )
39
- ),
40
- project_ids=(Optional[str], Field(description="GitLab project ids separated by comma", default=None)),
41
- jira_project_keys=(Optional[str],
42
- Field(description="GitLab project Jira keys separated by comma", default=None)),
43
- token=(SecretStr, Field(description="GitLab Personal Access Token", json_schema_extra={"secret": True})),
44
- default_branch_name=(Optional[str], Field(description="Default branch name", default="master")),
45
- artifact_bucket_path=(Optional[str], Field(description="Artifact Bucket Path", default="analyse-gitlab")),
46
- selected_tools=(
47
- List[Literal[tuple(selected_tools)]],
48
- Field(default=[], json_schema_extra={"args_schemas": selected_tools})
49
- ),
50
- __config__=ConfigDict(json_schema_extra={"metadata": {
51
- "label": "Analyse_GitLab",
52
- "icon_url": "gitlab-icon.svg", # if exists
53
- "hidden": True,
54
- "sections": {
55
- "auth": {
56
- "required": True,
57
- "subsections": [{"name": "Token", "fields": ["token"]}],
58
- }
59
- },
60
- }})
61
- )
62
-
63
- @classmethod
64
- def get_toolkit(cls, client: "AlitaClient", selected_tools: list[str], **kwargs):
65
- bucket_path = kwargs.get("artifact_bucket_path") or "analyse-gitlab"
66
- artifact_wrapper = ArtifactWrapper(client=client, bucket=bucket_path)
67
- check_schema(artifact_wrapper)
68
-
69
- jira_project_keys = kwargs.get("jira_project_keys") or ""
70
- project_ids = kwargs.get("project_ids") or ""
71
- url = kwargs.get("url")
72
- token = kwargs.get("token")
73
-
74
- if not url or not token:
75
- raise ValueError("GitLab URL and token are required.")
76
-
77
- gitlab_search = GitLabV4Search(
78
- url=url,
79
- default_branch_name=kwargs.get("default_branch_name", "master"),
80
- token=token,
81
- )
82
-
83
- gitlab_analyse_wrapper = GitLabAnalyseWrapper(
84
- artifacts_wrapper=artifact_wrapper,
85
- project_ids=project_ids,
86
- jira_project_keys=jira_project_keys,
87
- gitlab_search=gitlab_search,
88
- )
89
-
90
- selected_tools = selected_tools or []
91
- available_tools = gitlab_analyse_wrapper.get_available_tools()
92
-
93
- tools = []
94
- for tool in available_tools:
95
- if selected_tools:
96
- if tool["name"] not in selected_tools:
97
- continue
98
- tools.append(
99
- BaseAction(
100
- api_wrapper=gitlab_analyse_wrapper,
101
- name=tool["name"],
102
- description=tool["description"],
103
- args_schema=tool["args_schema"],
104
- )
105
- )
106
-
107
- return cls(tools=tools)
108
-
109
- def get_tools(self):
110
- return self.tools
@@ -1,172 +0,0 @@
1
- import logging
2
- from typing import Any
3
- from pydantic import BaseModel, Field
4
- from typing import Optional
5
-
6
- from elitea_analyse.git.main import (
7
- get_git_projects_list,
8
- get_git_projects_that_in_jira,
9
- get_git_commits,
10
- get_git_merge_requests,
11
- )
12
- from elitea_analyse.git.git_search import GitLabV4Search
13
-
14
-
15
- from alita_sdk.tools.elitea_base import BaseToolApiWrapper
16
- from alita_sdk.runtime.utils.save_dataframe import save_dataframe_to_artifact
17
- from alita_sdk.runtime.tools.artifact import ArtifactWrapper
18
-
19
-
20
- logger = logging.getLogger(__name__)
21
-
22
-
23
- class GitLabProjectsListArgs(BaseModel):
24
- date: str = Field(
25
- description="Filter projects by last activity date in 'YYYY-MM-DD' format."
26
- )
27
-
28
- class GitLabProjectsListInJiraArgs(BaseModel):
29
- jira_project_keys: Optional[str] = Field(description="Comma-separated Jira project keys.", default=None)
30
-
31
- class GitLabCommitsArgs(BaseModel):
32
- project_ids: Optional[str] = Field(description="GitLab project ID.", default=None)
33
- since_date:str = Field(description="Date filter in 'YYYY-MM-DD' format.")
34
-
35
-
36
- class GitLabAnalyseWrapper(BaseToolApiWrapper):
37
- artifacts_wrapper: ArtifactWrapper
38
- project_ids: str # Comma-separated list of GitLab project IDs
39
- jira_project_keys: str # Comma-separated list of Jira projects' keys
40
- gitlab_search: GitLabV4Search # GitLab search client
41
-
42
- class Config:
43
- arbitrary_types_allowed = True
44
-
45
- def get_gitlab_projects_list(self, date: str) -> str:
46
- """
47
- Get projects list that user has access to in GitLab.
48
-
49
- date: str
50
- Filter projects by last activity date.
51
- Date in 'YYYY-MM-DD' format.
52
- """
53
-
54
- df_project_list = get_git_projects_list(date, git=self.gitlab_search)
55
-
56
- save_dataframe_to_artifact(
57
- self.artifacts_wrapper, df_project_list, "gitlab_projects_info.csv", csv_options={"index": False}
58
- )
59
-
60
- return (
61
- f"You have access to {len(df_project_list)}. "
62
- f"Data has been downloaded to the bucket as 'gitlab_projects_info.csv'"
63
- )
64
-
65
- def get_gitlab_projects_that_in_jira(self, jira_project_keys: Optional[str] = None) -> str:
66
- """
67
- Find GitLab projects that correspond to Jira projects by matching names.
68
-
69
- jira_project_keys: str
70
- Comma-separated Jira project keys.
71
- """
72
- jira_project_keys = jira_project_keys or self.jira_project_keys
73
- df_projects = get_git_projects_that_in_jira(
74
- jira_project_keys, git=self.gitlab_search)
75
-
76
- if df_projects is None or df_projects.empty:
77
- return "No GitLab projects found that match the provided Jira project keys."
78
-
79
- save_dataframe_to_artifact(
80
- self.artifacts_wrapper, df_projects, "gitlab_projects_that_in_Jira.csv", csv_options={"index": False},
81
- )
82
-
83
- return (
84
- f"Found {len(df_projects)} GitLab projects that match Jira project names. "
85
- f"Data has been downloaded to the bucket as 'gitlab_projects_that_in_Jira.csv'."
86
- )
87
-
88
- def get_gitlab_commits(self, since_date: str, project_ids: Optional[str] = None) -> str:
89
- """
90
- Get commit data for specified GitLab project.
91
-
92
- project_id: str
93
- GitLab project ID.
94
- since_date: str
95
- Date filter in 'YYYY-MM-DD' format.
96
- """
97
- project_ids = project_ids or self.project_ids
98
- df_commits = get_git_commits(
99
- project_ids, since_date, git_search=self.gitlab_search
100
- )
101
-
102
- if df_commits is None or df_commits.empty:
103
- return f'There are no commits in the project {project_ids} created after {since_date}'
104
-
105
- save_dataframe_to_artifact(
106
- self.artifacts_wrapper, df_commits, f"commits_details_{project_ids}.csv", csv_options={"index": False},
107
- )
108
-
109
- return (
110
- f"Commits data for project {project_ids} has been saved. "
111
- f"Data has been downloaded to the bucket as 'commits_details_{project_ids}.csv'."
112
- )
113
-
114
- def get_gitlab_merge_requests(self, since_date: str, project_ids: Optional[str] = None) -> str:
115
- """
116
- Get merge requests for specified GitLab project.
117
-
118
- project_ids: str
119
- GitLab project ID.
120
- since_date: str
121
- Date filter in 'YYYY-MM-DD' format.
122
- """
123
- project_ids = project_ids or self.project_ids
124
- df_mrs = get_git_merge_requests(
125
- project_ids, since_date, git_search=self.gitlab_search)
126
-
127
- if df_mrs is None or df_mrs.empty:
128
- return f'There are no merge requests in the project {project_ids} created after {since_date}'
129
-
130
- save_dataframe_to_artifact(
131
- self.artifacts_wrapper, df_mrs, f"merge_requests_details_{project_ids}.csv", csv_options={"index": False},
132
- )
133
-
134
- return (
135
- f"Merge requests data for project {project_ids} has been saved. "
136
- f"Data has been downloaded to the bucket as 'merge_requests_details_{project_ids}.csv'."
137
- )
138
-
139
-
140
- def get_available_tools(self):
141
- return [
142
- {
143
- "name": "get_gitlab_projects_list",
144
- "description": self.get_gitlab_projects_list.__doc__,
145
- "args_schema": GitLabProjectsListArgs ,
146
- "ref": self.get_gitlab_projects_list
147
- },
148
- {
149
- "name": "get_gitlab_projects_that_in_jira",
150
- "description": self.get_gitlab_projects_that_in_jira.__doc__,
151
- "args_schema": GitLabProjectsListInJiraArgs,
152
- "ref": self.get_gitlab_projects_that_in_jira
153
- },
154
- {
155
- "name": "get_gitlab_commits",
156
- "description": self.get_gitlab_commits.__doc__,
157
- "args_schema": GitLabCommitsArgs,
158
- "ref": self.get_gitlab_commits
159
- },
160
- {
161
- "name": "get_gitlab_merge_requests",
162
- "description": self.get_gitlab_merge_requests.__doc__,
163
- "args_schema": GitLabCommitsArgs,
164
- "ref": self.get_gitlab_merge_requests
165
- }
166
- ]
167
-
168
- def run(self, mode: str, *args: Any, **kwargs: Any):
169
- for tool in self.get_available_tools():
170
- if tool["name"] == mode:
171
- return tool["ref"](*args, **kwargs)
172
- raise ValueError(f"Unknown mode: {mode}")
@@ -1,141 +0,0 @@
1
- import json
2
- from typing import List, Optional, Literal
3
- from pydantic import create_model, BaseModel, ConfigDict, Field
4
-
5
- from langchain_core.tools import BaseTool, BaseToolkit
6
-
7
- from elitea_analyse.jira.jira_connect import connect_to_jira
8
- from alita_sdk.tools.utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
9
- from alita_sdk.tools.base.tool import BaseAction
10
- from alita_sdk.runtime.clients.client import AlitaClient
11
- from alita_sdk.runtime.tools.artifact import ArtifactWrapper
12
- from .api_wrapper import JiraAnalyseWrapper
13
-
14
- from ...utils import check_schema
15
-
16
- name = "Analyse_Jira"
17
-
18
- class AnalyseJira(BaseToolkit):
19
- tools: List[BaseTool] = []
20
- toolkit_max_length: int = 0
21
-
22
- @staticmethod
23
- def toolkit_config_schema() -> type[BaseModel]:
24
- selected_tools = {x['name']: x['args_schema'].schema() for x in
25
- JiraAnalyseWrapper.model_construct().get_available_tools()}
26
- AnalyseJira.toolkit_max_length = get_max_toolkit_length(selected_tools)
27
- return create_model(
28
- "analyse_jira",
29
- jira_base_url=(str, Field(
30
- description="Jira URL",
31
- json_schema_extra={
32
- 'toolkit_name': True,
33
- 'max_toolkit_length': AnalyseJira.toolkit_max_length
34
- })
35
- ),
36
- jira_cloud=(bool, Field(description="Hosting Option")),
37
- jira_username=(str, Field(description="Jira Username")),
38
- jira_api_key=(Optional[str], Field(description="API key", json_schema_extra={'secret': True}, default="")),
39
- jira_token=(Optional[str], Field(description="Jira token", json_schema_extra={'secret': True}, default="")),
40
- # TODO: Add these fields to the schema as custom fields comma-separated if required
41
- project_keys=(Optional[str], Field(description="Jira project keys separated by comma", default=None)),
42
- team_field=(Optional[str], Field(description="Jira field used as identifier for team", default="")),
43
- environment_field=(Optional[str], Field(description="Jira field used as identifier for environment", default="")),
44
- defects_name=(Optional[str], Field(description="Jira defects type", default="")),
45
- closed_status=(Optional[str], Field(description="Jira closed status", default="")),
46
- jira_verify_ssl=(bool, Field(description="Verify SSL")),
47
- jira_custom_fields=(Optional[dict], Field(description="Additional fields, split by comma", default={})),
48
- artifact_bucket_path=(Optional[str], Field(description="Artifact Bucket Path", default="")),
49
- selected_tools=(List[Literal[tuple(selected_tools)]],
50
- Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
51
- __config__=ConfigDict(json_schema_extra={'metadata':
52
- {
53
- "label": "Analyse_Jira",
54
- "icon_url": "jira-icon.svg",
55
- "hidden": False,
56
- "sections": {
57
- "auth": {
58
- "required": True,
59
- "subsections": [
60
- {
61
- "name": "Api key",
62
- "fields": ["jira_api_key"]
63
- },
64
- {
65
- "name": "Token",
66
- "fields": ["jira_token"]
67
- }
68
- ]
69
- }
70
- }
71
- }
72
- })
73
- )
74
-
75
- @classmethod
76
- def get_toolkit(cls, client: "AlitaClient", selected_tools: list[str], **kwargs):
77
- if selected_tools is None:
78
- selected_tools = []
79
-
80
- bucket_path = kwargs.get('artifact_bucket_path') or 'analyse-jira'
81
- artifact_wrapper = ArtifactWrapper(
82
- client=client, bucket=bucket_path
83
- )
84
- check_schema(artifact_wrapper)
85
-
86
- project_keys = kwargs.get('project_keys') or ''
87
-
88
- jira_base_url = kwargs.get('jira_base_url')
89
- jira_verify_ssl = kwargs.get('jira_verify_ssl')
90
- jira_username = kwargs.get('jira_username')
91
- jira_token = kwargs.get('jira_token')
92
- jira_api_key = kwargs.get('jira_api_key')
93
-
94
- jira_custom_fields = kwargs.get('jira_custom_fields', {})
95
- jira_custom_fields['team'] = kwargs.get('team_field', '')
96
- jira_custom_fields['environment'] = kwargs.get('environment_field', '')
97
- closed_status = kwargs.get('closed_status', '')
98
- defects_name = kwargs.get('defects_name', '')
99
-
100
- jira_credentials = {
101
- "username": jira_username,
102
- "base_url": jira_base_url,
103
- "token": jira_token,
104
- "api_key": jira_api_key,
105
- "verify_ssl": jira_verify_ssl
106
- }
107
-
108
- jira = connect_to_jira(credentials=jira_credentials)
109
- if not jira:
110
- raise ValueError(
111
- "Failed to connect to Jira. Please check your credentials."
112
- )
113
-
114
- api_wrapper = JiraAnalyseWrapper(
115
- artifacts_wrapper=artifact_wrapper,
116
- jira=jira,
117
- project_keys=project_keys,
118
- closed_status=closed_status,
119
- defects_name=defects_name,
120
- custom_fields=jira_custom_fields,
121
- )
122
-
123
- tools = []
124
- available_tools = api_wrapper.get_available_tools()
125
- for tool in available_tools:
126
- if selected_tools:
127
- if tool["name"] not in selected_tools:
128
- continue
129
- tools.append(
130
- BaseAction(
131
- api_wrapper=api_wrapper,
132
- name=tool["name"],
133
- description=tool["description"],
134
- args_schema=tool["args_schema"],
135
- )
136
- )
137
-
138
- return cls(tools=tools)
139
-
140
- def get_tools(self):
141
- return self.tools