alita-sdk 0.3.206__py3-none-any.whl → 0.3.208__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. alita_sdk/runtime/clients/client.py +369 -6
  2. alita_sdk/runtime/langchain/langraph_agent.py +6 -1
  3. alita_sdk/runtime/langchain/store_manager.py +4 -4
  4. alita_sdk/runtime/toolkits/tools.py +11 -20
  5. alita_sdk/runtime/utils/streamlit.py +472 -192
  6. alita_sdk/runtime/utils/toolkit_runtime.py +147 -0
  7. alita_sdk/runtime/utils/toolkit_utils.py +157 -0
  8. alita_sdk/tools/ado/test_plan/test_plan_wrapper.py +82 -11
  9. alita_sdk/tools/ado/wiki/ado_wrapper.py +62 -2
  10. alita_sdk/tools/chunkers/sematic/markdown_chunker.py +2 -1
  11. alita_sdk/tools/memory/__init__.py +54 -10
  12. alita_sdk/tools/sharepoint/api_wrapper.py +13 -4
  13. {alita_sdk-0.3.206.dist-info → alita_sdk-0.3.208.dist-info}/METADATA +1 -1
  14. {alita_sdk-0.3.206.dist-info → alita_sdk-0.3.208.dist-info}/RECORD +17 -24
  15. alita_sdk/community/analysis/__init__.py +0 -0
  16. alita_sdk/community/analysis/ado_analyse/__init__.py +0 -103
  17. alita_sdk/community/analysis/ado_analyse/api_wrapper.py +0 -261
  18. alita_sdk/community/analysis/github_analyse/__init__.py +0 -98
  19. alita_sdk/community/analysis/github_analyse/api_wrapper.py +0 -166
  20. alita_sdk/community/analysis/gitlab_analyse/__init__.py +0 -110
  21. alita_sdk/community/analysis/gitlab_analyse/api_wrapper.py +0 -172
  22. alita_sdk/community/analysis/jira_analyse/__init__.py +0 -141
  23. alita_sdk/community/analysis/jira_analyse/api_wrapper.py +0 -252
  24. {alita_sdk-0.3.206.dist-info → alita_sdk-0.3.208.dist-info}/WHEEL +0 -0
  25. {alita_sdk-0.3.206.dist-info → alita_sdk-0.3.208.dist-info}/licenses/LICENSE +0 -0
  26. {alita_sdk-0.3.206.dist-info → alita_sdk-0.3.208.dist-info}/top_level.txt +0 -0
@@ -1,252 +0,0 @@
1
- import logging
2
- from io import StringIO
3
- from typing import Optional, List, Dict, Any
4
- from langchain_core.callbacks import dispatch_custom_event
5
- from langchain_core.tools import ToolException
6
- from pydantic import BaseModel, Field
7
- from jira import JIRA
8
- import pandas as pd
9
-
10
-
11
- from elitea_analyse.utils.constants import OUTPUT_MAPPING_FILE, OUTPUT_WORK_ITEMS_FILE
12
- from elitea_analyse.jira.jira_projects_overview import jira_projects_overview
13
- from elitea_analyse.jira.jira_statuses import get_all_statuses_list
14
- from elitea_analyse.jira.jira_issues import JiraIssues
15
-
16
- from alita_sdk.tools.elitea_base import BaseToolApiWrapper
17
- from alita_sdk.runtime.tools.artifact import ArtifactWrapper
18
- from alita_sdk.runtime.utils.logging import with_streamlit_logs
19
-
20
- logger = logging.getLogger(__name__)
21
-
22
-
23
- class GetJiraFieldsArgs(BaseModel):
24
- project_keys: Optional[str] = Field(
25
- description="One or more projects keys separated with comma.",
26
- default=''
27
- )
28
- after_date: str = Field(description="Date after which issues are considered.")
29
-
30
-
31
- class GetJiraIssuesArgs(BaseModel):
32
- project_keys: Optional[str] = Field(
33
- description="One or more projects keys separated with comma.", default=''
34
- )
35
- closed_issues_based_on: int = Field(
36
- description=("Define whether issues can be thought as closed based on their status (1) "
37
- "or not empty resolved date (2).")
38
- )
39
- resolved_after: str = Field(description="Resolved after date (i.e. 2023-01-01).")
40
- updated_after: str = Field(description="Updated after date (i.e. 2023-01-01).")
41
- created_after: str = Field(description="Created after date (i.e. 2023-01-01).")
42
- add_filter: Optional[str] = Field(
43
- description=("Additional filter for Jira issues in JQL format like "
44
- "'customfield_10000 = 'value' AND customfield_10001 = 'value'")
45
- )
46
-
47
-
48
- class JiraAnalyseWrapper(BaseToolApiWrapper):
49
- artifacts_wrapper: ArtifactWrapper
50
- jira: JIRA
51
- project_keys: str # Jira project keys
52
- closed_status: str # Jira ticket closed statuses
53
- defects_name: str # Jira ticket defects name
54
- custom_fields: dict # Jira ticket custom fields
55
-
56
- class Config:
57
- arbitrary_types_allowed = True
58
-
59
- def get_number_off_all_issues(self, after_date: str, project_keys: Optional[str] = None):
60
- """
61
- Get projects a user has access to and merge them with issues count.
62
- after_date: str
63
- date after which issues are considered
64
- project_keys: str
65
- one or more projects keys separated with comma
66
- """
67
- project_keys = project_keys or self.project_keys
68
-
69
- dispatch_custom_event(
70
- name="thinking_step",
71
- data={
72
- "message": f"I am extracting number of all issues with initial parameters:\
73
- project keys: {project_keys}, after date: {after_date}",
74
- "tool_name": "get_number_off_all_issues",
75
- "toolkit": "analyse_jira",
76
- },
77
- )
78
-
79
- project_df = jira_projects_overview(
80
- after_date, project_keys=project_keys, jira=self.jira
81
- )
82
-
83
- # Save project_df DataFrame into the bucket
84
- self.save_dataframe(
85
- project_df,
86
- f"projects_overview_{project_keys}.csv",
87
- csv_options={"index": False},
88
- )
89
- return {
90
- "projects": project_df["key"].tolist(),
91
- "projects_summary": project_df.to_string(),
92
- }
93
-
94
- @with_streamlit_logs(tool_name="get_jira_issues")
95
- def get_jira_issues(
96
- self,
97
- closed_issues_based_on: int,
98
- resolved_after: str,
99
- updated_after: str,
100
- created_after: str,
101
- add_filter: str = "",
102
- project_keys: Optional[str] = None,
103
- ):
104
- """
105
- Extract Jira issues for the specified projects.
106
- closed_issues_based_on: int
107
- define whether issues can be thought as
108
- closed based on their status (1) or not empty resolved date (2)
109
- resolved_after: str
110
- resolved after date (i.e. 2023-01-01)
111
- updated_after: str
112
- updated after date (i.e. 2023-01-01)
113
- created_after: str
114
- created after date (i.e. 2023-01-01)
115
- add_filter: str
116
- additional filter for Jira issues in JQL format
117
- like "customfield_10000 = 'value' AND customfield_10001 = 'value'"
118
- project_keys: str
119
- one or more projects keys separated with comma
120
- """
121
-
122
- if not (
123
- (
124
- closed_issues_based_on == 1
125
- and self.closed_status in get_all_statuses_list(jira=self.jira)
126
- )
127
- or closed_issues_based_on == 2
128
- ):
129
- return (
130
- f"ERROR: Check input parameters closed_issues_based_on ({closed_issues_based_on}) "
131
- f"and closed_status ({self.closed_status}) not in Jira statuses list."
132
- )
133
-
134
- project_keys = project_keys or self.project_keys
135
-
136
- dispatch_custom_event(
137
- name="thinking_step",
138
- data={
139
- "message": f"I am extracting Jira issues with initial parameters:\
140
- project keys: {project_keys}, closed status: {self.closed_status},\
141
- defects name: {self.defects_name}, custom fields: {self.custom_fields}, \
142
- closed status based on: {closed_issues_based_on}, resolved after: {resolved_after}, \
143
- updated after: {updated_after}, created after: {created_after}, additional filter:{add_filter}",
144
- "tool_name": "jira_issues_extraction_start",
145
- "toolkit": "analyse_jira",
146
- },
147
- )
148
-
149
- jira_issues = JiraIssues(
150
- self.jira,
151
- project_keys,
152
- (closed_issues_based_on, self.closed_status),
153
- self.defects_name,
154
- add_filter="",
155
- )
156
-
157
- df_issues, df_map = jira_issues.extract_issues_from_jira_and_transform(
158
- self.custom_fields, (resolved_after, updated_after, created_after)
159
- )
160
-
161
- dispatch_custom_event(
162
- name="thinking_step",
163
- data={
164
- "message": f"I am saving the extracted Jira issues to the artifact repository. \
165
- issues count: {len(df_issues)}, mapping rows: {len(df_map)}, \
166
- output file: {OUTPUT_MAPPING_FILE}{jira_issues.projects}.csv",
167
- "tool_name": "get_jira_issues",
168
- "toolkit": "analyse_jira",
169
- },
170
- )
171
- self.save_dataframe(
172
- df_map,
173
- f"{OUTPUT_MAPPING_FILE}{jira_issues.projects}.csv",
174
- csv_options={"index_label": "id"},
175
- )
176
-
177
- if not df_issues.empty:
178
- self.save_dataframe(
179
- df_issues,
180
- f"{OUTPUT_WORK_ITEMS_FILE}{jira_issues.projects}.csv",
181
- csv_options={"index_label": "id"},
182
- )
183
- dispatch_custom_event(
184
- name="thinking_step",
185
- data={
186
- "message": f"Saving Jira issues to the file . \
187
- output file: {OUTPUT_WORK_ITEMS_FILE}{jira_issues.projects}.csv,\
188
- row count: {len(df_issues)}",
189
- "tool_name": "get_jira_issues",
190
- "toolkit": "analyse_jira",
191
- },
192
- )
193
-
194
- return f"{jira_issues.projects} Data has been extracted successfully."
195
-
196
- def get_available_tools(self) -> List[Dict[str, Any]]:
197
- """Get a list of available tools."""
198
- return [
199
- {
200
- "name": "get_number_off_all_issues",
201
- "description": self.get_number_off_all_issues.__doc__,
202
- "args_schema": GetJiraFieldsArgs,
203
- "ref": self.get_number_off_all_issues,
204
- },
205
- {
206
- "name": "get_jira_issues",
207
- "description": self.get_jira_issues.__doc__,
208
- "args_schema": GetJiraIssuesArgs,
209
- "ref": self.get_jira_issues,
210
- },
211
- ]
212
-
213
- def save_dataframe(
214
- self,
215
- df: pd.DataFrame,
216
- target_file: str,
217
- csv_options: Optional[Dict[str, Any]] = None,
218
- ):
219
- """
220
- Save a pandas DataFrame as a CSV file in the artifact repository using the ArtifactWrapper.
221
-
222
- Args:
223
- df (pd.DataFrame): The DataFrame to save.
224
- target_file (str): The target file name in the storage (e.g., "file.csv").
225
- csv_options: Dictionary of options to pass to Dataframe.to_csv()
226
-
227
- Raises:
228
- ValueError: If the DataFrame is empty or the file name is invalid.
229
- Exception: If saving to the artifact repository fails.
230
- """
231
- csv_options = csv_options or {}
232
-
233
- # Use StringIO to save the DataFrame as a string
234
- try:
235
- buffer = StringIO()
236
- df.to_csv(buffer, **csv_options)
237
- self.artifacts_wrapper.create_file(target_file, buffer.getvalue())
238
- logger.info(
239
- f"Successfully saved dataframe to {target_file} in bucket {self.artifacts_wrapper.bucket}"
240
- )
241
- except Exception as e:
242
- logger.exception("Failed to save DataFrame to artifact repository")
243
- return ToolException(
244
- f"Failed to save DataFrame to artifact repository: {str(e)}"
245
- )
246
-
247
- def run(self, mode: str, *args: Any, **kwargs: Any):
248
- for tool in self.get_available_tools():
249
- if tool["name"] == mode:
250
- return tool["ref"](*args, **kwargs)
251
-
252
- raise ValueError(f"Unknown mode: {mode}")