alita-sdk 0.3.130__py3-none-any.whl → 0.3.131__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,107 @@
1
+ from typing import List, Optional, Literal
2
+ from elitea_analyse.git.git_search import GitLabV4Search
3
+ from pydantic import SecretStr, create_model, BaseModel, ConfigDict, Field
4
+
5
+ from langchain_core.tools import BaseTool, BaseToolkit
6
+
7
+ from alita_tools.utils import get_max_toolkit_length
8
+ from alita_tools.base.tool import BaseAction
9
+
10
+ from ....tools.artifact import ArtifactWrapper
11
+ from .api_wrapper import GitLabAnalyseWrapper
12
+
13
+ from ...utils import check_schema
14
+
15
+
16
+ name = "Analyse_GitLab"
17
+
18
+
19
+ class AnalyseGitLab(BaseToolkit):
20
+ tools: List[BaseTool] = []
21
+ toolkit_max_length: int = 0
22
+
23
+ @staticmethod
24
+ def toolkit_config_schema() -> type[BaseModel]:
25
+ selected_tools = {
26
+ x["name"]: x["args_schema"].schema()
27
+ for x in GitLabAnalyseWrapper.model_construct().get_available_tools()
28
+ }
29
+ AnalyseGitLab.toolkit_max_length = get_max_toolkit_length(selected_tools)
30
+
31
+ return create_model(
32
+ "analyse_gitlab",
33
+ url=(
34
+ str,
35
+ Field(
36
+ description="GitLab URL (e.g., git.epam.com)",
37
+ json_schema_extra={"toolkit_name": True, "max_toolkit_length": AnalyseGitLab.toolkit_max_length}
38
+ )
39
+ ),
40
+ project_ids=(Optional[str], Field(description="GitLab project ids separated by comma", default=None)),
41
+ jira_project_keys=(Optional[str],
42
+ Field(description="GitLab project Jira keys separated by comma", default=None)),
43
+ token=(SecretStr, Field(description="GitLab Personal Access Token", json_schema_extra={"secret": True})),
44
+ default_branch_name=(Optional[str], Field(description="Default branch name", default="master")),
45
+ artifact_bucket_path=(Optional[str], Field(description="Artifact Bucket Path", default="analyse-gitlab")),
46
+ selected_tools=(
47
+ List[Literal[tuple(selected_tools)]],
48
+ Field(default=[], json_schema_extra={"args_schemas": selected_tools})
49
+ ),
50
+ __config__=ConfigDict(json_schema_extra={"metadata": {
51
+ "label": "Analyse_GitLab",
52
+ "icon_url": "gitlab-icon.svg", # if exists
53
+ "hidden": True,
54
+ "sections": {
55
+ "auth": {
56
+ "required": True,
57
+ "subsections": [{"name": "Token", "fields": ["token"]}],
58
+ }
59
+ },
60
+ }})
61
+ )
62
+
63
+ @classmethod
64
+ def get_toolkit(cls, client: "AlitaClient", selected_tools: list[str], **kwargs):
65
+ bucket_path = kwargs.get("artifact_bucket_path") or "analyse-gitlab"
66
+ artifact_wrapper = ArtifactWrapper(client=client, bucket=bucket_path)
67
+ check_schema(artifact_wrapper)
68
+
69
+ jira_project_keys = kwargs.get("jira_project_keys") or ""
70
+ project_ids = kwargs.get("project_ids") or ""
71
+ url = kwargs.get("url")
72
+ token = kwargs.get("token")
73
+
74
+ if not url or not token:
75
+ raise ValueError("GitLab URL and token are required.")
76
+
77
+ gitlab_search = GitLabV4Search(
78
+ url=url,
79
+ default_branch_name=kwargs.get("default_branch_name", "master"),
80
+ token=token.get_secret_value(),
81
+ )
82
+
83
+ gitlab_analyse_wrapper = GitLabAnalyseWrapper(
84
+ artifacts_wrapper=artifact_wrapper,
85
+ project_ids=project_ids,
86
+ jira_project_keys=jira_project_keys,
87
+ gitlab_search=gitlab_search,
88
+ )
89
+
90
+ selected_tools = selected_tools or []
91
+ available_tools = gitlab_analyse_wrapper.get_available_tools()
92
+
93
+ tools = []
94
+ for tool in available_tools:
95
+ if selected_tools:
96
+ if tool["name"] not in selected_tools:
97
+ continue
98
+ tools.append(
99
+ BaseAction(
100
+ api_wrapper=gitlab_analyse_wrapper,
101
+ name=tool["name"],
102
+ description=tool["description"],
103
+ args_schema=tool["args_schema"],
104
+ )
105
+ )
106
+
107
+ return cls(tools=tools)
@@ -0,0 +1,173 @@
1
+ import logging
2
+ from typing import Any
3
+ from pydantic import BaseModel, Field
4
+ from typing import Optional
5
+
6
+ from elitea_analyse.git.main import (
7
+ get_git_projects_list,
8
+ get_git_projects_that_in_jira,
9
+ get_git_commits,
10
+ get_git_merge_requests,
11
+ )
12
+ from elitea_analyse.git.git_search import GitLabV4Search
13
+
14
+
15
+ from alita_tools.elitea_base import BaseToolApiWrapper
16
+ from src.alita_sdk.utils.save_dataframe import save_dataframe_to_artifact
17
+ from ....tools.artifact import ArtifactWrapper
18
+
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ class GitLabProjectsListArgs(BaseModel):
24
+ date: str = Field(
25
+ description="Filter projects by last activity date in 'YYYY-MM-DD' format."
26
+ )
27
+
28
+ class GitLabProjectsListInJiraArgs(BaseModel):
29
+ jira_project_keys: Optional[str] = Field(description="Comma-separated Jira project keys.", default=None)
30
+
31
+ class GitLabCommitsArgs(BaseModel):
32
+ project_ids: Optional[str] = Field(description="GitLab project ID.", default=None)
33
+ since_date:str = Field(description="Date filter in 'YYYY-MM-DD' format.")
34
+
35
+
36
+ class GitLabAnalyseWrapper(BaseToolApiWrapper):
37
+ artifacts_wrapper: ArtifactWrapper
38
+ project_ids: str # Comma-separated list of GitLab project IDs
39
+ jira_project_keys: str # Comma-separated list of Jira projects' keys
40
+ gitlab_search: GitLabV4Search # GitLab search client
41
+
42
+ class Config:
43
+ arbitrary_types_allowed = True
44
+
45
+ def get_gitlab_projects_list(self, date: str) -> str:
46
+ """
47
+ Get projects list that user has access to in GitLab.
48
+
49
+ date: str
50
+ Filter projects by last activity date.
51
+ Date in 'YYYY-MM-DD' format.
52
+ """
53
+
54
+ df_project_list = get_git_projects_list(date, git=self.gitlab_search)
55
+
56
+ save_dataframe_to_artifact(
57
+ self.artifacts_wrapper, df_project_list, "gitlab_projects_info.csv", csv_options={"index": False}
58
+ )
59
+
60
+ return (
61
+ f"You have access to {len(df_project_list)}. "
62
+ f"Data has been downloaded to the bucket as 'gitlab_projects_info.csv'"
63
+ )
64
+
65
+ def get_gitlab_projects_that_in_jira(self, jira_project_keys: Optional[str] = None) -> str:
66
+ """
67
+ Find GitLab projects that correspond to Jira projects by matching names.
68
+
69
+ jira_project_keys: str
70
+ Comma-separated Jira project keys.
71
+ """
72
+ jira_project_keys = jira_project_keys or self.jira_project_keys
73
+ df_projects = get_git_projects_that_in_jira(
74
+ jira_project_keys, git=self.gitlab_search)
75
+
76
+ if df_projects is None or df_projects.empty:
77
+ return "No GitLab projects found that match the provided Jira project keys."
78
+
79
+ save_dataframe_to_artifact(
80
+ self.artifacts_wrapper, df_projects, "gitlab_projects_that_in_Jira.csv", csv_options={"index": False},
81
+ )
82
+
83
+ return (
84
+ f"Found {len(df_projects)} GitLab projects that match Jira project names. "
85
+ f"Data has been downloaded to the bucket as 'gitlab_projects_that_in_Jira.csv'."
86
+ )
87
+
88
+ def get_gitlab_commits(self, project_ids: Optional[str], since_date: str,
89
+ ) -> str:
90
+ """
91
+ Get commit data for specified GitLab project.
92
+
93
+ project_id: str
94
+ GitLab project ID.
95
+ since_date: str
96
+ Date filter in 'YYYY-MM-DD' format.
97
+ """
98
+ project_ids = project_ids or self.project_ids
99
+ df_commits = get_git_commits(
100
+ project_ids, since_date, git_search=self.gitlab_search
101
+ )
102
+
103
+ if df_commits is None or df_commits.empty:
104
+ return f'There are no commits in the project {project_ids} created after {since_date}'
105
+
106
+ save_dataframe_to_artifact(
107
+ self.artifacts_wrapper, df_commits, f"commits_details_{project_ids}.csv", csv_options={"index": False},
108
+ )
109
+
110
+ return (
111
+ f"Commits data for project {project_ids} has been saved. "
112
+ f"Data has been downloaded to the bucket as 'commits_details_{project_ids}.csv'."
113
+ )
114
+
115
+ def get_gitlab_merge_requests(self, project_ids: Optional[str], since_date: str) -> str:
116
+ """
117
+ Get merge requests for specified GitLab project.
118
+
119
+ project_ids: str
120
+ GitLab project ID.
121
+ since_date: str
122
+ Date filter in 'YYYY-MM-DD' format.
123
+ """
124
+ project_ids = project_ids or self.project_ids
125
+ df_mrs = get_git_merge_requests(
126
+ project_ids, since_date, git_search=self.gitlab_search)
127
+
128
+ if df_mrs is None or df_mrs.empty:
129
+ return f'There are no merge requests in the project {project_ids} created after {since_date}'
130
+
131
+ save_dataframe_to_artifact(
132
+ self.artifacts_wrapper, df_mrs, f"merge_requests_details_{project_ids}.csv", csv_options={"index": False},
133
+ )
134
+
135
+ return (
136
+ f"Merge requests data for project {project_ids} has been saved. "
137
+ f"Data has been downloaded to the bucket as 'merge_requests_details_{project_ids}.csv'."
138
+ )
139
+
140
+
141
+ def get_available_tools(self):
142
+ return [
143
+ {
144
+ "name": "get_gitlab_projects_list",
145
+ "description": self.get_gitlab_projects_list.__doc__,
146
+ "args_schema": GitLabProjectsListArgs ,
147
+ "ref": self.get_gitlab_projects_list
148
+ },
149
+ {
150
+ "name": "get_gitlab_projects_that_in_jira",
151
+ "description": self.get_gitlab_projects_that_in_jira.__doc__,
152
+ "args_schema": GitLabProjectsListInJiraArgs,
153
+ "ref": self.get_gitlab_projects_that_in_jira
154
+ },
155
+ {
156
+ "name": "get_gitlab_commits",
157
+ "description": self.get_gitlab_commits.__doc__,
158
+ "args_schema": GitLabCommitsArgs,
159
+ "ref": self.get_gitlab_commits
160
+ },
161
+ {
162
+ "name": "get_gitlab_merge_requests",
163
+ "description": self.get_gitlab_merge_requests.__doc__,
164
+ "args_schema": GitLabCommitsArgs,
165
+ "ref": self.get_gitlab_merge_requests
166
+ }
167
+ ]
168
+
169
+ def run(self, mode: str, *args: Any, **kwargs: Any):
170
+ for tool in self.get_available_tools():
171
+ if tool["name"] == mode:
172
+ return tool["ref"](*args, **kwargs)
173
+ raise ValueError(f"Unknown mode: {mode}")
@@ -33,13 +33,15 @@ class GetJiraIssuesArgs(BaseModel):
33
33
  description="One or more projects keys separated with comma.", default=''
34
34
  )
35
35
  closed_issues_based_on: int = Field(
36
- description="Define whether issues can be thought as closed based on their status (1) or not empty resolved date (2)."
36
+ description=("Define whether issues can be thought as closed based on their status (1) "
37
+ "or not empty resolved date (2).")
37
38
  )
38
39
  resolved_after: str = Field(description="Resolved after date (i.e. 2023-01-01).")
39
40
  updated_after: str = Field(description="Updated after date (i.e. 2023-01-01).")
40
41
  created_after: str = Field(description="Created after date (i.e. 2023-01-01).")
41
42
  add_filter: Optional[str] = Field(
42
- description="Additional filter for Jira issues in JQL format like 'customfield_10000 = 'value' AND customfield_10001 = 'value'"
43
+ description=("Additional filter for Jira issues in JQL format like "
44
+ "'customfield_10000 = 'value' AND customfield_10001 = 'value'")
43
45
  )
44
46
 
45
47
 
@@ -111,7 +113,8 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
111
113
  created_after: str
112
114
  created after date (i.e. 2023-01-01)
113
115
  add_filter: str
114
- additional filter for Jira issues in JQL format like "customfield_10000 = 'value' AND customfield_10001 = 'value'"
116
+ additional filter for Jira issues in JQL format
117
+ like "customfield_10000 = 'value' AND customfield_10001 = 'value'"
115
118
  project_keys: str
116
119
  one or more projects keys separated with comma
117
120
  """
@@ -124,7 +127,8 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
124
127
  or closed_issues_based_on == 2
125
128
  ):
126
129
  return (
127
- "ERROR: Check input parameters closed_issues_based_on and closed_status"
130
+ f"ERROR: Check input parameters closed_issues_based_on ({closed_issues_based_on}) "
131
+ f"and closed_status ({self.closed_status}) not in Jira statuses list."
128
132
  )
129
133
 
130
134
  project_keys = project_keys or self.project_keys
@@ -244,5 +248,5 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
244
248
  for tool in self.get_available_tools():
245
249
  if tool["name"] == mode:
246
250
  return tool["ref"](*args, **kwargs)
247
- else:
248
- raise ValueError(f"Unknown mode: {mode}")
251
+
252
+ raise ValueError(f"Unknown mode: {mode}")
@@ -676,7 +676,7 @@ def merge_subgraphs(parent_yaml: str, registry: Dict[str, Dict[str, Any]]) -> Di
676
676
  regular_nodes = []
677
677
 
678
678
  for node in parent_def.get('nodes', []):
679
- if node.get('type') == 'subgraph':
679
+ if node.get('type') == 'subgraph' or node.get('type') == 'pipeline':
680
680
  subgraph_nodes.append(node)
681
681
  else:
682
682
  regular_nodes.append(node)
@@ -846,7 +846,7 @@ def detect_and_flatten_subgraphs(yaml_schema: str) -> tuple[str, list]:
846
846
  schema_dict = yaml.safe_load(yaml_schema)
847
847
  subgraph_nodes = [
848
848
  node for node in schema_dict.get('nodes', [])
849
- if node.get('type') == 'subgraph'
849
+ if node.get('type') == 'subgraph' or node.get('type') == 'pipeline'
850
850
  ]
851
851
 
852
852
  if not subgraph_nodes:
@@ -11,6 +11,10 @@ from .subgraph import SubgraphToolkit
11
11
  from .vectorstore import VectorStoreToolkit
12
12
  ## Community tools and toolkits
13
13
  from ..community.analysis.jira_analyse import AnalyseJira
14
+ from ..community.analysis.ado_analyse import AnalyseAdo
15
+ from ..community.analysis.gitlab_analyse import AnalyseGitLab
16
+ from ..community.analysis.github_analyse import AnalyseGithub
17
+
14
18
  from ..community.browseruse import BrowserUseToolkit
15
19
 
16
20
  from ..tools.mcp_server_tool import McpServerTool
@@ -26,12 +30,15 @@ def get_toolkits():
26
30
  ArtifactToolkit.toolkit_config_schema(),
27
31
  VectorStoreToolkit.toolkit_config_schema()
28
32
  ]
29
-
30
- community_toolkits = [
33
+
34
+ community_toolkits = [
31
35
  AnalyseJira.toolkit_config_schema(),
32
- BrowserUseToolkit.toolkit_config_schema()
36
+ AnalyseAdo.toolkit_config_schema(),
37
+ AnalyseGitLab.toolkit_config_schema(),
38
+ AnalyseGithub.toolkit_config_schema(),
39
+ BrowserUseToolkit.toolkit_config_schema(),
33
40
  ]
34
-
41
+
35
42
  return core_toolkits + community_toolkits + alita_toolkits()
36
43
 
37
44
 
@@ -81,6 +88,18 @@ def get_tools(tools_list: list, alita_client, llm) -> list:
81
88
  tools.extend(AnalyseJira.get_toolkit(
82
89
  client=alita_client,
83
90
  **tool['settings']).get_tools())
91
+ if tool['type'] == 'analyse_ado':
92
+ tools.extend(AnalyseAdo.get_toolkit(
93
+ client=alita_client,
94
+ **tool['settings']).get_tools())
95
+ if tool['type'] == 'analyse_gitlab':
96
+ tools.extend(AnalyseGitLab.get_toolkit(
97
+ client=alita_client,
98
+ **tool['settings']).get_tools())
99
+ if tool['type'] == 'analyse_github':
100
+ tools.extend(AnalyseGithub.get_toolkit(
101
+ client=alita_client,
102
+ **tool['settings']).get_tools())
84
103
  if tool['type'] == 'browser_use':
85
104
  tools.extend(BrowserUseToolkit.get_toolkit(
86
105
  client=alita_client,
@@ -0,0 +1,47 @@
1
+ import logging
2
+
3
+ from io import StringIO
4
+ from typing import Any, Dict, Optional
5
+
6
+ from langchain_core.tools import ToolException
7
+ import pandas as pd
8
+
9
+ from src.alita_sdk.tools.artifact import ArtifactWrapper
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def save_dataframe_to_artifact(
15
+ artifacts_wrapper: ArtifactWrapper,
16
+ df: pd.DataFrame,
17
+ target_file: str,
18
+ csv_options: Optional[Dict[str, Any]] = None,
19
+ ):
20
+ """
21
+ Save a pandas DataFrame as a CSV file in the artifact repository using the ArtifactWrapper.
22
+
23
+ Args:
24
+ df (pd.DataFrame): The DataFrame to save.
25
+ target_file (str): The target file name in the storage (e.g., "file.csv").
26
+ csv_options: Dictionary of options to pass to Dataframe.to_csv()
27
+
28
+ Raises:
29
+ ValueError: If the DataFrame is empty or the file name is invalid.
30
+ Exception: If saving to the artifact repository fails.
31
+ """
32
+
33
+ csv_options = csv_options or {}
34
+
35
+ # Use StringIO to save the DataFrame as a string
36
+ try:
37
+ buffer = StringIO()
38
+ df.to_csv(buffer, **csv_options)
39
+ artifacts_wrapper.create_file(target_file, buffer.getvalue())
40
+ logger.info(
41
+ f"Successfully saved dataframe to {target_file} in bucket{artifacts_wrapper.bucket}"
42
+ )
43
+ except Exception as e:
44
+ logger.exception("Failed to save DataFrame to artifact repository")
45
+ return ToolException(
46
+ f"Failed to save DataFrame to artifact repository: {str(e)}"
47
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: alita_sdk
3
- Version: 0.3.130
3
+ Version: 0.3.131
4
4
  Summary: SDK for building langchain agents using resources from Alita
5
5
  Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedjik@gmail.com>
6
6
  Project-URL: Homepage, https://projectalita.ai
@@ -10,8 +10,14 @@ alita_sdk/clients/prompt.py,sha256=li1RG9eBwgNK_Qf0qUaZ8QNTmsncFrAL2pv3kbxZRZg,1
10
10
  alita_sdk/community/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  alita_sdk/community/utils.py,sha256=lvuCJaNqVPHOORJV6kIPcXJcdprVW_TJvERtYAEgpjM,249
12
12
  alita_sdk/community/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ alita_sdk/community/analysis/ado_analyse/__init__.py,sha256=JU-WsNdLp6GyhgftwW0COE37bAH3mciOFYYw58FawwQ,4056
14
+ alita_sdk/community/analysis/ado_analyse/api_wrapper.py,sha256=qLkpm4XPmhuDC0nJwXAaDOcuI3XURVjYLufpKZ9NJnY,8649
15
+ alita_sdk/community/analysis/github_analyse/__init__.py,sha256=QHVuYoTw6r89smShN3Iurhz_XLicPk1G8s2HeOMEk1s,3550
16
+ alita_sdk/community/analysis/github_analyse/api_wrapper.py,sha256=pDZlpufTWSCtD0m-TI9aIgctFe62uJ7diX3LTsPdWjY,6836
17
+ alita_sdk/community/analysis/gitlab_analyse/__init__.py,sha256=J_HwgZGfiQPxEqFD0IOulgettTI_fvpuqYdR_ciKRFw,4131
18
+ alita_sdk/community/analysis/gitlab_analyse/api_wrapper.py,sha256=GDP_cW8Faa695357M9YXPsQ12WBP33JZQy5CGcCAS1A,6343
13
19
  alita_sdk/community/analysis/jira_analyse/__init__.py,sha256=Rm-HKEi_HIxrgHdq9mZ-XzxMKLXm8-81eJwJT2lar-c,5945
14
- alita_sdk/community/analysis/jira_analyse/api_wrapper.py,sha256=JqGSxg_3x0ErzII31UZkY3V7jo9i8Gb5d_pW7lPIOSA,9522
20
+ alita_sdk/community/analysis/jira_analyse/api_wrapper.py,sha256=naEgBSdwNonNleUtHCb1UOkWiYdM64ZJ9dfsyszmeX8,9668
15
21
  alita_sdk/community/browseruse/__init__.py,sha256=uAxPZEX7ihpt8HtcGDFrzTNv9WcklT1wG1ItTwUO8y4,3601
16
22
  alita_sdk/community/browseruse/api_wrapper.py,sha256=Y05NKWfTROPmBxe8ZFIELSGBX5v3RTNP30OTO2Tj8uI,10838
17
23
  alita_sdk/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -19,7 +25,7 @@ alita_sdk/langchain/assistant.py,sha256=J_xhwbNl934BgDKSpAMC9a1u6v03DZQcTYaamCzt
19
25
  alita_sdk/langchain/chat_message_template.py,sha256=kPz8W2BG6IMyITFDA5oeb5BxVRkHEVZhuiGl4MBZKdc,2176
20
26
  alita_sdk/langchain/constants.py,sha256=eHVJ_beJNTf1WJo4yq7KMK64fxsRvs3lKc34QCXSbpk,3319
21
27
  alita_sdk/langchain/indexer.py,sha256=0ENHy5EOhThnAiYFc7QAsaTNp9rr8hDV_hTK8ahbatk,37592
22
- alita_sdk/langchain/langraph_agent.py,sha256=mPoyC2fJWf8F29lEZfhjh6_Cx_TQNXqmikBy6zQxqVI,39849
28
+ alita_sdk/langchain/langraph_agent.py,sha256=BkrbYMy4BPAvy9uANH3s9ffBzaGewKFK97evN90L5kY,39917
23
29
  alita_sdk/langchain/mixedAgentParser.py,sha256=M256lvtsL3YtYflBCEp-rWKrKtcY1dJIyRGVv7KW9ME,2611
24
30
  alita_sdk/langchain/mixedAgentRenderes.py,sha256=asBtKqm88QhZRILditjYICwFVKF5KfO38hu2O-WrSWE,5964
25
31
  alita_sdk/langchain/utils.py,sha256=Npferkn10dvdksnKzLJLBI5bNGQyVWTBwqp3vQtUqmY,6631
@@ -69,7 +75,7 @@ alita_sdk/toolkits/artifact.py,sha256=7zb17vhJ3CigeTqvzQ4VNBsU5UOCJqAwz7fOJGMYqX
69
75
  alita_sdk/toolkits/datasource.py,sha256=v3FQu8Gmvq7gAGAnFEbA8qofyUhh98rxgIjY6GHBfyI,2494
70
76
  alita_sdk/toolkits/prompt.py,sha256=WIpTkkVYWqIqOWR_LlSWz3ug8uO9tm5jJ7aZYdiGRn0,1192
71
77
  alita_sdk/toolkits/subgraph.py,sha256=ZYqI4yVLbEPAjCR8dpXbjbL2ipX598Hk3fL6AgaqFD4,1758
72
- alita_sdk/toolkits/tools.py,sha256=VU4KG05BCW7keiyrORrRdU-VYkp2unU_L1_ZUWM0D4I,5936
78
+ alita_sdk/toolkits/tools.py,sha256=WYR__mieQ-9ouL7ofI8IyRLllnyYbh0N1S6YX9IZNlU,6790
73
79
  alita_sdk/toolkits/vectorstore.py,sha256=di08-CRl0KJ9xSZ8_24VVnPZy58iLqHtXW8vuF29P64,2893
74
80
  alita_sdk/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
81
  alita_sdk/tools/agent.py,sha256=m98QxOHwnCRTT9j18Olbb5UPS8-ZGeQaGiUyZJSyFck,3162
@@ -92,12 +98,16 @@ alita_sdk/utils/AlitaCallback.py,sha256=cvpDhR4QLVCNQci6CO6TEUrUVDZU9_CRSwzcHGm3
92
98
  alita_sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
93
99
  alita_sdk/utils/evaluate.py,sha256=iM1P8gzBLHTuSCe85_Ng_h30m52hFuGuhNXJ7kB1tgI,1872
94
100
  alita_sdk/utils/logging.py,sha256=hBE3qAzmcLMdamMp2YRXwOOK9P4lmNaNhM76kntVljs,3124
101
+ alita_sdk/utils/save_dataframe.py,sha256=6aHFr28Ssw2jmsnrCTVfZIE2MCpVkG6GV58gzMN-Eao,1477
95
102
  alita_sdk/utils/streamlit.py,sha256=zp8owZwHI3HZplhcExJf6R3-APtWx-z6s5jznT2hY_k,29124
96
103
  alita_sdk/utils/utils.py,sha256=dM8whOJAuFJFe19qJ69-FLzrUp6d2G-G6L7d4ss2XqM,346
97
- alita_sdk-0.3.130.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
104
+ alita_sdk-0.3.131.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
98
105
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
- tests/test_jira_analysis.py,sha256=I0cErH5R_dHVyutpXrM1QEo7jfBuKWTmDQvJBPjx18I,3281
100
- alita_sdk-0.3.130.dist-info/METADATA,sha256=1qKeRMLCtu2rapTuSnVRysP4tNq4hGU7L4ILieHI-PU,7076
101
- alita_sdk-0.3.130.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
102
- alita_sdk-0.3.130.dist-info/top_level.txt,sha256=SWRhxB7Et3cOy3RkE5hR7OIRnHoo3K8EXzoiNlkfOmc,25
103
- alita_sdk-0.3.130.dist-info/RECORD,,
106
+ tests/test_ado_analysis.py,sha256=wsxB4B2Ycxoiykthh6YbPQ9hqsDbPFie8D9ZK1i_6kg,3311
107
+ tests/test_github_analysis.py,sha256=ulR4CEGmiMRPydJuX7aQcglzvhC7kFOAtZRLLBB9F_M,3148
108
+ tests/test_gitlab_analysis.py,sha256=J7Y2mNi5Sj8-rH2PMRmVbT3uwZ17YeR9pcs0MDIyNW4,3352
109
+ tests/test_jira_analysis.py,sha256=6F3Elikt02L28N6sS_AKDy9lgqgD81_hr979NcdZeg4,3359
110
+ alita_sdk-0.3.131.dist-info/METADATA,sha256=u6rbcQEd9_oXtzErr6KKmk6ZUxEq9XoUmH6Y5Utfjzk,7076
111
+ alita_sdk-0.3.131.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
112
+ alita_sdk-0.3.131.dist-info/top_level.txt,sha256=SWRhxB7Et3cOy3RkE5hR7OIRnHoo3K8EXzoiNlkfOmc,25
113
+ alita_sdk-0.3.131.dist-info/RECORD,,
@@ -0,0 +1,99 @@
1
+ import os
2
+ import pytest
3
+ from dotenv import load_dotenv
4
+
5
+ from elitea_analyse.ado.azure_search import AzureSearch
6
+
7
+ from ..alita_sdk.community.analysis.ado_analyse.api_wrapper import (
8
+ GetAdoWorkItemsArgs, AdoCommitsArgs, AdoAnalyseWrapper)
9
+
10
+ from ..alita_sdk.clients.client import AlitaClient
11
+ from ..alita_sdk.tools.artifact import ArtifactWrapper
12
+ from ..alita_sdk.community.utils import check_schema
13
+
14
+ # Load environment variables from .env file
15
+ load_dotenv()
16
+
17
+
18
+ @pytest.fixture
19
+ def ado_api_wrapper():
20
+ base_url = os.getenv("DEPLOYMENT_URL")
21
+ project_id = os.getenv("PROJECT_ID")
22
+ api_key = os.getenv("API_KEY")
23
+
24
+ if not base_url or not project_id or not api_key:
25
+ raise ValueError("Environment variables DEPLOYMENT_URL, PROJECT_ID, and API_KEY must be set.")
26
+
27
+ client = AlitaClient(
28
+ base_url=base_url,
29
+ project_id=int(project_id),
30
+ auth_token=api_key,
31
+ )
32
+
33
+ artifacts_wrapper = ArtifactWrapper(
34
+ client=client, bucket=os.getenv("ARTIFACT_BUCKET_PATH", "analyse-ado")
35
+ )
36
+ check_schema(artifacts_wrapper)
37
+ ado_search = AzureSearch(
38
+ organization=os.getenv("ADO_ORGANIZATION", "john.doe@epam.com"),
39
+ user=os.getenv("ADO_USER", "ado_user"),
40
+ token=os.getenv("ADO_TOKEN", "1111"),
41
+ )
42
+
43
+ ado_wrapper = AdoAnalyseWrapper(
44
+ artifacts_wrapper=artifacts_wrapper,
45
+ project_keys=os.getenv("ADO_PROJECTS", "project1,project2"),
46
+ default_branch_name=os.getenv("ADO_DEFAULT_BRANCH", "main"),
47
+ area=os.getenv("ADO_AREA", ""),
48
+ ado_search=ado_search,
49
+ )
50
+ check_schema(ado_wrapper)
51
+
52
+ return ado_wrapper
53
+
54
+ def test_get_projects_list(ado_api_wrapper):
55
+ result = ado_api_wrapper.get_projects_list()
56
+ assert isinstance(result, str)
57
+ assert "You have access to" in result
58
+
59
+ def test_get_work_items(ado_api_wrapper):
60
+ args = GetAdoWorkItemsArgs(
61
+ resolved_after="2023-01-01",
62
+ updated_after="2023-01-01",
63
+ created_after="2023-01-01",
64
+ )
65
+ result = ado_api_wrapper.get_work_items(
66
+ resolved_after=args.resolved_after,
67
+ updated_after=args.updated_after,
68
+ created_after=args.created_after,
69
+ area=args.area,
70
+ project_keys=args.project_keys
71
+ )
72
+ assert isinstance(result, str)
73
+ assert f"Work items for {ado_api_wrapper.project_keys} have been successfully retrieved and saved to the bucket" in result
74
+
75
+ @pytest.mark.asyncio
76
+ async def test_get_commits(ado_api_wrapper):
77
+ args = AdoCommitsArgs(
78
+ since_date="2023-01-01",
79
+ )
80
+
81
+ result = await ado_api_wrapper.get_commits(since_date=args.since_date)
82
+ # breakpoint()
83
+ assert isinstance(result, str)
84
+ assert f"Commits for {ado_api_wrapper.project_keys} have been successfully" in result
85
+
86
+
87
+ def test_get_merge_requests(ado_api_wrapper):
88
+ args = AdoCommitsArgs(
89
+ since_date="2023-01-01",
90
+ )
91
+
92
+ result = ado_api_wrapper.get_merge_requests(since_date=args.since_date)
93
+ assert isinstance(result, str)
94
+ assert f"Pull requests for {ado_api_wrapper.project_keys} have been successfully retrieved" in result
95
+
96
+ def test_get_pipelines_runs(ado_api_wrapper):
97
+ result = ado_api_wrapper.get_pipelines_runs()
98
+ assert isinstance(result, str)
99
+ assert f"Pipeline runs for {ado_api_wrapper.project_keys} have been successfully retrieved " in result