alita-sdk 0.3.114__tar.gz → 0.3.116__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/PKG-INFO +1 -1
  2. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/pyproject.toml +1 -1
  3. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/analysis/jira_analyse/__init__.py +29 -7
  4. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/analysis/jira_analyse/api_wrapper.py +38 -85
  5. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/langraph_agent.py +79 -0
  6. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/utils.py +1 -1
  7. alita_sdk-0.3.116/src/alita_sdk/tools/router.py +35 -0
  8. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/utils/AlitaCallback.py +32 -2
  9. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk.egg-info/PKG-INFO +1 -1
  10. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk.egg-info/SOURCES.txt +1 -0
  11. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/LICENSE +0 -0
  12. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/README.md +0 -0
  13. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/requirements.txt +0 -0
  14. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/setup.cfg +0 -0
  15. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/__init__.py +0 -0
  16. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/__init__.py +0 -0
  17. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/agents/__init__.py +0 -0
  18. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/agents/llamaAgentParser.py +0 -0
  19. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/clients/__init__.py +0 -0
  20. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/clients/artifact.py +0 -0
  21. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/clients/client.py +0 -0
  22. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/clients/datasource.py +0 -0
  23. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/clients/prompt.py +0 -0
  24. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/__init__.py +0 -0
  25. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/analysis/__init__.py +0 -0
  26. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/browseruse/__init__.py +0 -0
  27. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/browseruse/api_wrapper.py +0 -0
  28. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/community/utils.py +0 -0
  29. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/__init__.py +0 -0
  30. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/agents/__init__.py +0 -0
  31. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/agents/xml_chat.py +0 -0
  32. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/assistant.py +0 -0
  33. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/chat_message_template.py +0 -0
  34. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/constants.py +0 -0
  35. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaBDDScenariosLoader.py +0 -0
  36. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaCSVLoader.py +0 -0
  37. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaConfluenceLoader.py +0 -0
  38. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaDirectoryLoader.py +0 -0
  39. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaDocxMammothLoader.py +0 -0
  40. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaExcelLoader.py +0 -0
  41. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaGitRepoLoader.py +0 -0
  42. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaImageLoader.py +0 -0
  43. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaJiraLoader.py +0 -0
  44. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaQtestLoader.py +0 -0
  45. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/AlitaTableLoader.py +0 -0
  46. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/__init__.py +0 -0
  47. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/constants.py +0 -0
  48. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/document_loaders/utils.py +0 -0
  49. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/indexer.py +0 -0
  50. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/interfaces/__init__.py +0 -0
  51. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/interfaces/kwextractor.py +0 -0
  52. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/interfaces/llm_processor.py +0 -0
  53. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/interfaces/loaders.py +0 -0
  54. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/interfaces/splitters.py +0 -0
  55. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/mixedAgentParser.py +0 -0
  56. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/mixedAgentRenderes.py +0 -0
  57. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/retrievers/AlitaRetriever.py +0 -0
  58. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/retrievers/VectorstoreRetriever.py +0 -0
  59. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/retrievers/__init__.py +0 -0
  60. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/__init__.py +0 -0
  61. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/bdd_parser/__init__.py +0 -0
  62. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/bdd_parser/bdd_exceptions.py +0 -0
  63. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/bdd_parser/bdd_parser.py +0 -0
  64. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/bdd_parser/feature_types.py +0 -0
  65. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/bdd_parser/parser.py +0 -0
  66. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/git.py +0 -0
  67. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/log.py +0 -0
  68. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/quota.py +0 -0
  69. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/state.py +0 -0
  70. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/utils.py +0 -0
  71. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/langchain/tools/vector.py +0 -0
  72. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/llamaindex/assistant.py +0 -0
  73. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/llms/__init__.py +0 -0
  74. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/llms/alita.py +0 -0
  75. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/llms/preloaded.py +0 -0
  76. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/__init__.py +0 -0
  77. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/application.py +0 -0
  78. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/artifact.py +0 -0
  79. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/datasource.py +0 -0
  80. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/prompt.py +0 -0
  81. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/tools.py +0 -0
  82. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/toolkits/vectorstore.py +0 -0
  83. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/__init__.py +0 -0
  84. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/application.py +0 -0
  85. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/artifact.py +0 -0
  86. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/datasource.py +0 -0
  87. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/echo.py +0 -0
  88. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/function.py +0 -0
  89. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/indexer_tool.py +0 -0
  90. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/llm.py +0 -0
  91. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/loop.py +0 -0
  92. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/loop_output.py +0 -0
  93. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/pgvector_search.py +0 -0
  94. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/prompt.py +0 -0
  95. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/tool.py +0 -0
  96. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/tools/vectorstore.py +0 -0
  97. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/utils/__init__.py +0 -0
  98. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/utils/evaluate.py +0 -0
  99. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/utils/streamlit.py +0 -0
  100. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk/utils/utils.py +0 -0
  101. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk.egg-info/dependency_links.txt +0 -0
  102. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk.egg-info/requires.txt +0 -0
  103. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/alita_sdk.egg-info/top_level.txt +0 -0
  104. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/tests/__init__.py +0 -0
  105. {alita_sdk-0.3.114 → alita_sdk-0.3.116}/src/tests/test_jira_analysis.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: alita_sdk
3
- Version: 0.3.114
3
+ Version: 0.3.116
4
4
  Summary: SDK for building langchain agents using resouces from Alita
5
5
  Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedjik@gmail.com>
6
6
  Project-URL: Homepage, https://projectalita.ai
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "alita_sdk"
7
- version = "0.3.114"
7
+ version = "0.3.116"
8
8
  description = "SDK for building langchain agents using resouces from Alita"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -5,6 +5,7 @@ from pydantic import create_model, BaseModel, ConfigDict, Field
5
5
  from langchain_core.tools import BaseTool, BaseToolkit
6
6
 
7
7
  from elitea_analyse.jira.jira_connect import connect_to_jira
8
+ from alita_tools.utils import clean_string, TOOLKIT_SPLITTER, get_max_toolkit_length
8
9
  from alita_tools.base.tool import BaseAction
9
10
 
10
11
  from ....tools.artifact import ArtifactWrapper
@@ -16,24 +17,37 @@ name = "Analyse_Jira"
16
17
 
17
18
  class AnalyseJira(BaseToolkit):
18
19
  tools: List[BaseTool] = []
20
+ toolkit_max_length: int = 0
19
21
 
20
22
  @staticmethod
21
23
  def toolkit_config_schema() -> type[BaseModel]:
24
+ selected_tools = {x['name']: x['args_schema'].schema() for x in
25
+ JiraAnalyseWrapper.model_construct().get_available_tools()}
26
+ AnalyseJira.toolkit_max_length = get_max_toolkit_length(selected_tools)
22
27
  return create_model(
23
28
  "analyse_jira",
24
- jira_base_url=(str, Field(description="Jira URL")),
29
+ jira_base_url=(str, Field(
30
+ description="Jira URL",
31
+ json_schema_extra={
32
+ 'toolkit_name': True,
33
+ 'max_toolkit_length': AnalyseJira.toolkit_max_length
34
+ })
35
+ ),
25
36
  jira_cloud=(bool, Field(description="Hosting Option")),
26
37
  jira_username=(str, Field(description="Jira Username")),
27
38
  jira_api_key=(Optional[str], Field(description="API key", json_schema_extra={'secret': True}, default="")),
28
39
  jira_token=(Optional[str], Field(description="Jira token", json_schema_extra={'secret': True}, default="")),
29
40
  # TODO: Add these fields to the schema as custom fields comma-separated if required
41
+ project_keys=(Optional[str], Field(description="Jira project keys separated by comma", default=None)),
30
42
  team_field=(Optional[str], Field(description="Jira field used as identifier for team", default="")),
31
43
  environment_field=(Optional[str], Field(description="Jira field used as identifier for environment", default="")),
32
44
  defects_name=(Optional[str], Field(description="Jira defects type", default="")),
33
45
  closed_status=(Optional[str], Field(description="Jira closed status", default="")),
34
46
  jira_verify_ssl=(bool, Field(description="Verify SSL")),
35
- jira_custom_fields=(Optional[str], Field(description="Additional fields, split by comma", default="")),
47
+ jira_custom_fields=(Optional[dict], Field(description="Additional fields, split by comma", default={})),
36
48
  artifact_bucket_path=(Optional[str], Field(description="Artifact Bucket Path", default="")),
49
+ selected_tools=(List[Literal[tuple(selected_tools)]],
50
+ Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
37
51
  __config__=ConfigDict(json_schema_extra={'metadata':
38
52
  {
39
53
  "label": "Analyse_Jira",
@@ -59,22 +73,25 @@ class AnalyseJira(BaseToolkit):
59
73
  )
60
74
 
61
75
  @classmethod
62
- def get_toolkit(cls, client: 'AlitaClient', **kwargs):
76
+ def get_toolkit(cls, client: 'AlitaClient', selected_tools: list[str], **kwargs):
77
+ if selected_tools is None:
78
+ selected_tools = []
79
+
63
80
  bucket_path = kwargs.get('artifact_bucket_path') or 'analyse-jira'
64
81
  artifact_wrapper = ArtifactWrapper(
65
82
  client=client, bucket=bucket_path
66
83
  )
67
84
  check_schema(artifact_wrapper)
68
85
 
86
+ project_keys = kwargs.get('project_keys') or ''
87
+
69
88
  jira_base_url = kwargs.get('jira_base_url')
70
89
  jira_verify_ssl = kwargs.get('jira_verify_ssl')
71
90
  jira_username = kwargs.get('jira_username')
72
91
  jira_token = kwargs.get('jira_token')
73
92
  jira_api_key = kwargs.get('jira_api_key')
74
- try:
75
- jira_custom_fields = json.loads(kwargs.get('jira_custom_fields', '{}'))
76
- except:
77
- jira_custom_fields = {}
93
+
94
+ jira_custom_fields = kwargs.get('jira_custom_fields', {})
78
95
  jira_custom_fields['team'] = kwargs.get('team_field', '')
79
96
  jira_custom_fields['environment'] = kwargs.get('environment_field', '')
80
97
  closed_status = kwargs.get('closed_status', '')
@@ -97,13 +114,18 @@ class AnalyseJira(BaseToolkit):
97
114
  api_wrapper = JiraAnalyseWrapper(
98
115
  artifacts_wrapper=artifact_wrapper,
99
116
  jira=jira,
117
+ project_keys=project_keys,
100
118
  closed_status=closed_status,
101
119
  defects_name=defects_name,
102
120
  custom_fields=jira_custom_fields,
103
121
  )
122
+
104
123
  tools = []
105
124
  available_tools = api_wrapper.get_available_tools()
106
125
  for tool in available_tools:
126
+ if selected_tools:
127
+ if tool["name"] not in selected_tools:
128
+ continue
107
129
  tools.append(
108
130
  BaseAction(
109
131
  api_wrapper=api_wrapper,
@@ -10,7 +10,6 @@ import pandas as pd
10
10
 
11
11
  from elitea_analyse.utils.constants import OUTPUT_MAPPING_FILE, OUTPUT_WORK_ITEMS_FILE
12
12
  from elitea_analyse.jira.jira_projects_overview import jira_projects_overview
13
- from elitea_analyse.jira.jira_all_fields_overview import jira_all_fields_overview
14
13
  from elitea_analyse.jira.jira_statuses import get_all_statuses_list
15
14
  from elitea_analyse.jira.jira_issues import JiraIssues
16
15
 
@@ -22,15 +21,16 @@ logger = logging.getLogger(__name__)
22
21
 
23
22
 
24
23
  class GetJiraFieldsArgs(BaseModel):
25
- project_keys: str = Field(
26
- description="One or more projects keys separated with comma."
24
+ project_keys: Optional[str] = Field(
25
+ description="One or more projects keys separated with comma.",
26
+ default=''
27
27
  )
28
28
  after_date: str = Field(description="Date after which issues are considered.")
29
29
 
30
30
 
31
31
  class GetJiraIssuesArgs(BaseModel):
32
- project_keys: str = Field(
33
- description="One or more projects keys separated with comma."
32
+ project_keys: Optional[str] = Field(
33
+ description="One or more projects keys separated with comma.", default=''
34
34
  )
35
35
  closed_issues_based_on: int = Field(
36
36
  description="Define whether issues can be thought as closed based on their status (1) or not empty resolved date (2)."
@@ -46,6 +46,7 @@ class GetJiraIssuesArgs(BaseModel):
46
46
  class JiraAnalyseWrapper(BaseToolApiWrapper):
47
47
  artifacts_wrapper: ArtifactWrapper
48
48
  jira: JIRA
49
+ project_keys: str # Jira project keys
49
50
  closed_status: str # Jira ticket closed statuses
50
51
  defects_name: str # Jira ticket defects name
51
52
  custom_fields: dict # Jira ticket custom fields
@@ -53,12 +54,14 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
53
54
  class Config:
54
55
  arbitrary_types_allowed = True
55
56
 
56
- def get_number_off_all_issues(self, project_keys: str, after_date: str):
57
+ def get_number_off_all_issues(self, after_date: str, project_keys: Optional[str] = None):
57
58
  """
58
59
  Get projects a user has access to and merge them with issues count.
59
60
  after_date: str
60
61
  date after which issues are considered
61
62
  """
63
+ project_keys = project_keys or self.project_keys
64
+
62
65
  project_df = jira_projects_overview(
63
66
  after_date, project_keys=project_keys, jira=self.jira
64
67
  )
@@ -85,69 +88,17 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
85
88
  "projects_summary": project_df.to_string(),
86
89
  }
87
90
 
88
- def get_all_jira_fields(self, project_keys: str, after_date: str):
89
- """
90
- Get all Jira fields for the specified projects.
91
- projects: str
92
- one or more projects keys separated with comma
93
- after_date: str
94
- date after which issues are considered
95
- """
96
- dispatch_custom_event(
97
- name="jira_all_fields_overview",
98
- data={
99
- "project_keys": project_keys,
100
- "after_date": after_date,
101
- },
102
- )
103
- overall_stat, issue_types_stat = jira_all_fields_overview(
104
- project_keys, after_date, jira=self.jira
105
- )
106
-
107
- dispatch_custom_event(
108
- name="jira_fields_saving",
109
- data={
110
- "project_keys": project_keys,
111
- "after_date": after_date,
112
- "overall_stat": len(overall_stat),
113
- "issue_types_stat": len(issue_types_stat),
114
- "files": [
115
- "fields_count.csv",
116
- f"fields_count_issues_{project_keys}.csv",
117
- ],
118
- },
119
- )
120
-
121
- self.save_dataframe(
122
- overall_stat,
123
- "fields_count.csv",
124
- csv_options={"index": False},
125
- )
126
-
127
- self.save_dataframe(
128
- issue_types_stat,
129
- f"fields_count_issues_{project_keys}.csv",
130
- csv_options={"index": False},
131
- )
132
-
133
- return {
134
- "overall_stat": overall_stat.to_string(),
135
- "issue_types_stat": issue_types_stat.to_string(),
136
- }
137
-
138
91
  def get_jira_issues(
139
92
  self,
140
- project_keys: str,
141
93
  closed_issues_based_on: int,
142
94
  resolved_after: str,
143
95
  updated_after: str,
144
96
  created_after: str,
145
97
  add_filter: str = "",
98
+ project_keys: Optional[str] = None,
146
99
  ):
147
100
  """
148
101
  Extract Jira issues for the specified projects.
149
- projects: str
150
- one or more projects keys separated with comma
151
102
  closed_issues_based_on: int
152
103
  define whether issues can be thought as closed based on their status (1) or not empty resolved date (2)
153
104
  resolved_after: str
@@ -158,8 +109,9 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
158
109
  created after date (i.e. 2023-01-01)
159
110
  add_filter: str
160
111
  additional filter for Jira issues in JQL format like "customfield_10000 = 'value' AND customfield_10001 = 'value'"
112
+ project_keys: str
113
+ one or more projects keys separated with comma
161
114
  """
162
-
163
115
  if not (
164
116
  (
165
117
  closed_issues_based_on == 1
@@ -170,14 +122,22 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
170
122
  return (
171
123
  "ERROR: Check input parameters closed_issues_based_on and closed_status"
172
124
  )
125
+
126
+ project_keys = project_keys or self.project_keys
173
127
 
174
128
  dispatch_custom_event(
175
- name="jira_issues_extraction_start",
129
+ name="thinking_step",
176
130
  data={
177
- "closed_issues_based_on": closed_issues_based_on,
178
- "closed_status": self.closed_status,
179
- }
131
+ "message": f"I am extracting Jira issues with initial parameters:\
132
+ project keys: {project_keys}, closed status: {self.closed_status},\
133
+ defects name: {self.defects_name}, custom fields: {self.custom_fields}, \
134
+ closed status based on: {closed_issues_based_on}, resolved after: {resolved_after}, \
135
+ updated after: {updated_after}, created after: {created_after}, additional filter:{add_filter}",
136
+ "tool_name": "jira_issues_extraction_start",
137
+ "toolkit": "analyse_jira",
138
+ },
180
139
  )
140
+
181
141
  jira_issues = JiraIssues(
182
142
  self.jira,
183
143
  project_keys,
@@ -189,12 +149,15 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
189
149
  df_issues, df_map = jira_issues.extract_issues_from_jira_and_transform(
190
150
  self.custom_fields, (resolved_after, updated_after, created_after)
191
151
  )
152
+
192
153
  dispatch_custom_event(
193
- name="jira_issues_extracted",
154
+ name="thinking_step",
194
155
  data={
195
- "project_keys": jira_issues.projects,
196
- "issue_count": len(df_issues),
197
- "map_rows": len(df_map),
156
+ "message": f"I am saving the extracted Jira issues to the artifact repository. \
157
+ issues count: {len(df_issues)}, mapping rows: {len(df_map)}, \
158
+ output file: {OUTPUT_MAPPING_FILE}{jira_issues.projects}.csv",
159
+ "tool_name": "get_jira_issues",
160
+ "toolkit": "analyse_jira",
198
161
  },
199
162
  )
200
163
  self.save_dataframe(
@@ -202,13 +165,6 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
202
165
  f"{OUTPUT_MAPPING_FILE}{jira_issues.projects}.csv",
203
166
  csv_options={"index_label": "id"},
204
167
  )
205
- dispatch_custom_event(
206
- name="jira_map_statuces_saved",
207
- data={
208
- "output_file": f"{OUTPUT_MAPPING_FILE}{jira_issues.projects}.csv",
209
- "row_count": len(df_map),
210
- }
211
- )
212
168
 
213
169
  if not df_issues.empty:
214
170
  self.save_dataframe(
@@ -217,11 +173,14 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
217
173
  csv_options={"index_label": "id"},
218
174
  )
219
175
  dispatch_custom_event(
220
- name="jira_issues_saved",
176
+ name="thinking_step",
221
177
  data={
222
- "output_file": f"{OUTPUT_WORK_ITEMS_FILE}{jira_issues.projects}.csv",
223
- "row_count": len(df_issues),
224
- }
178
+ "message": f"Saving Jira issues to the file . \
179
+ output file: {OUTPUT_WORK_ITEMS_FILE}{jira_issues.projects}.csv,\
180
+ row count: {len(df_issues)}",
181
+ "tool_name": "get_jira_issues",
182
+ "toolkit": "analyse_jira",
183
+ },
225
184
  )
226
185
 
227
186
  return f"{jira_issues.projects} Data has been extracted successfully."
@@ -235,12 +194,6 @@ class JiraAnalyseWrapper(BaseToolApiWrapper):
235
194
  "args_schema": GetJiraFieldsArgs,
236
195
  "ref": self.get_number_off_all_issues,
237
196
  },
238
- {
239
- "name": "get_all_jira_fields",
240
- "description": self.get_all_jira_fields.__doc__,
241
- "args_schema": GetJiraFieldsArgs,
242
- "ref": self.get_all_jira_fields,
243
- },
244
197
  {
245
198
  "name": "get_jira_issues",
246
199
  "description": self.get_jira_issues.__doc__,
@@ -26,6 +26,7 @@ from ..tools.loop_output import LoopToolNode
26
26
  from ..tools.tool import ToolNode
27
27
  from ..utils.evaluate import EvaluateTemplate
28
28
  from ..utils.utils import clean_string, TOOLKIT_SPLITTER
29
+ from ..tools.router import RouterNode
29
30
 
30
31
  logger = logging.getLogger(__name__)
31
32
 
@@ -124,6 +125,58 @@ class TransitionalEdge(Runnable):
124
125
  return self.next_step if self.next_step != 'END' else END
125
126
 
126
127
 
128
+ class StateModifierNode(Runnable):
129
+ name = "StateModifierNode"
130
+
131
+ def __init__(self, template: str, variables_to_clean: Optional[list[str]] = None,
132
+ input_variables: Optional[list[str]] = None,
133
+ output_variables: Optional[list[str]] = None):
134
+ self.template = template
135
+ self.variables_to_clean = variables_to_clean or []
136
+ self.input_variables = input_variables or ["messages"]
137
+ self.output_variables = output_variables or []
138
+
139
+ def invoke(self, state: Annotated[BaseStore, InjectedStore()], config: Optional[RunnableConfig] = None) -> dict:
140
+ logger.info(f"Modifying state with template: {self.template}")
141
+
142
+ # Collect input variables from state
143
+ input_data = {}
144
+ for var in self.input_variables:
145
+ if var in state:
146
+ input_data[var] = state.get(var)
147
+
148
+ # Render the template using Jinja
149
+ from jinja2 import Template
150
+ rendered_message = Template(self.template).render(**input_data)
151
+ result = {}
152
+ # Store the rendered message in the state or messages
153
+ if len(self.output_variables) > 0:
154
+ # Use the first output variable to store the rendered content
155
+ output_var = self.output_variables[0]
156
+ result[output_var] = rendered_message
157
+
158
+ # Clean up specified variables (make them empty, not delete)
159
+
160
+ for var in self.variables_to_clean:
161
+ if var in state:
162
+ # Empty the variable based on its type
163
+ if isinstance(state[var], list):
164
+ result[var] = []
165
+ elif isinstance(state[var], dict):
166
+ result[var] = {}
167
+ elif isinstance(state[var], str):
168
+ result[var] = ""
169
+ elif isinstance(state[var], (int, float)):
170
+ result[var] = 0
171
+ elif state[var] is None:
172
+ pass
173
+ else:
174
+ # For other types, set to None
175
+ result[var] = None
176
+ logger.info(f"State modifier result: {result}")
177
+ return result
178
+
179
+
127
180
  def prepare_output_schema(lg_builder, memory, store, debug=False, interrupt_before=[], interrupt_after=[]):
128
181
  # prepare output channels
129
182
  output_channels = (
@@ -283,6 +336,32 @@ def create_graph(
283
336
  output_variables=node.get('output', []),
284
337
  input_variables=node.get('input', ['messages']),
285
338
  structured_output=node.get('structured_output', False)))
339
+ elif node_type == 'router':
340
+ # Add a RouterNode as an independent node
341
+ lg_builder.add_node(node_id, RouterNode(
342
+ name=node['id'],
343
+ condition=node.get('condition', ''),
344
+ routes=node.get('routes', []),
345
+ default_output=node.get('default_output', 'END'),
346
+ input_variables=node.get('input', ['messages'])
347
+ ))
348
+ # Add a single conditional edge for all routes
349
+ lg_builder.add_conditional_edges(
350
+ node_id,
351
+ ConditionalEdge(
352
+ condition="{{router_output}}", # router node returns the route key in 'router_output'
353
+ condition_inputs=["router_output"],
354
+ conditional_outputs=node.get('routes', []),
355
+ default_output=node.get('default_output', 'END')
356
+ )
357
+ )
358
+ elif node_type == 'state_modifier':
359
+ lg_builder.add_node(node_id, StateModifierNode(
360
+ template=node.get('template', ''),
361
+ variables_to_clean=node.get('variables_to_clean', []),
362
+ input_variables=node.get('input', ['messages']),
363
+ output_variables=node.get('output', [])
364
+ ))
286
365
  if node.get('transition'):
287
366
  next_step = clean_string(node['transition'])
288
367
  logger.info(f'Adding transition: {next_step}')
@@ -128,7 +128,7 @@ def parse_type(type_str):
128
128
 
129
129
 
130
130
  def create_state(data: Optional[dict] = None):
131
- state_dict = {'input': str,}
131
+ state_dict = {'input': str, 'router_output': str} # Always include router_output
132
132
  if not data:
133
133
  data = {'messages': 'list[str]'}
134
134
  for key, value in data.items():
@@ -0,0 +1,35 @@
1
+ import logging
2
+ from typing import Any, Optional, Union, List
3
+ from langchain_core.runnables import RunnableConfig
4
+ from langchain_core.tools import BaseTool
5
+ from ..utils.evaluate import EvaluateTemplate
6
+ from ..utils.utils import clean_string
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+ class RouterNode(BaseTool):
11
+ name: str = 'RouterNode'
12
+ description: str = 'A router node that evaluates a condition and routes accordingly.'
13
+ condition: str = ''
14
+ routes: List[str] = [] # List of possible output node keys
15
+ default_output: str = 'END'
16
+ input_variables: Optional[list[str]] = None
17
+
18
+ def invoke(self, state: Union[str, dict], config: Optional[RunnableConfig] = None, **kwargs: Any) -> dict:
19
+ input_data = {}
20
+ for field in self.input_variables or []:
21
+ input_data[field] = state.get(field, "")
22
+ template = EvaluateTemplate(self.condition, input_data)
23
+ result = template.evaluate()
24
+ logger.info(f"RouterNode evaluated condition '{self.condition}' with input {input_data} => {result}")
25
+ result = clean_string(str(result))
26
+ if result in self.routes:
27
+ # If the result is one of the routes, return it
28
+ return {"router_output": result}
29
+ elif result == self.default_output:
30
+ # If the result is the default output, return it
31
+ return {"router_output": clean_string(self.default_output)}
32
+ return {"router_output": 'END'}
33
+
34
+ def _run(self, *args, **kwargs):
35
+ return self.invoke(**kwargs)
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  import json
3
3
  import traceback
4
+ from datetime import datetime, timezone
4
5
  from uuid import UUID, uuid4
5
6
  from typing import Any, Dict, List, Optional
6
7
  from collections import defaultdict
@@ -51,6 +52,35 @@ class AlitaStreamlitCallback(BaseCallbackHandler):
51
52
  # Tool
52
53
  #
53
54
 
55
+ def on_custom_event(
56
+ self,
57
+ name: str,
58
+ data: Any,
59
+ *,
60
+ run_id: UUID,
61
+ tags: Optional[List[str]] = None,
62
+ metadata: Optional[Dict[str, Any]] = None,
63
+ **kwargs: Any,
64
+ ) -> None:
65
+ """Callback containing a group of custom events"""
66
+
67
+ payload = {
68
+ "name": name,
69
+ "run_id": str(run_id),
70
+ "tool_run_id": str(run_id), # compatibility
71
+ "metadata": metadata,
72
+ "datetime": str(datetime.now(tz=timezone.utc)),
73
+ **data,
74
+ }
75
+ payload = json.loads(
76
+ json.dumps(payload, ensure_ascii=False, default=lambda o: str(o))
77
+ )
78
+
79
+ self.callback_state[str(run_id)] = self.st.status(
80
+ f"Running {payload.get("tool_name")}...", expanded=True
81
+ )
82
+ self.callback_state[str(run_id)].write(f"Tool inputs: {payload}")
83
+
54
84
  def on_tool_start(self, *args, run_id: UUID, **kwargs):
55
85
  """ Callback """
56
86
  if self.debug:
@@ -125,7 +155,7 @@ class AlitaStreamlitCallback(BaseCallbackHandler):
125
155
 
126
156
  self.current_model_name = metadata.get('ls_model_name', self.current_model_name)
127
157
  llm_run_id = str(run_id)
128
-
158
+
129
159
  self.callback_state[llm_run_id] = self.st.status(f"Running LLM ...", expanded=True)
130
160
  self.callback_state[llm_run_id].write(f"LLM inputs: {messages}")
131
161
 
@@ -176,4 +206,4 @@ class AlitaStreamlitCallback(BaseCallbackHandler):
176
206
  log.debug("on_llm_end(%s, %s)", response, kwargs)
177
207
  llm_run_id = str(run_id)
178
208
  self.callback_state[llm_run_id].update(label=f"Completed LLM call", state="complete", expanded=False)
179
- self.callback_state.pop(llm_run_id, None)
209
+ self.callback_state.pop(llm_run_id, None)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: alita_sdk
3
- Version: 0.3.114
3
+ Version: 0.3.116
4
4
  Summary: SDK for building langchain agents using resouces from Alita
5
5
  Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedjik@gmail.com>
6
6
  Project-URL: Homepage, https://projectalita.ai
@@ -91,6 +91,7 @@ src/alita_sdk/tools/loop.py
91
91
  src/alita_sdk/tools/loop_output.py
92
92
  src/alita_sdk/tools/pgvector_search.py
93
93
  src/alita_sdk/tools/prompt.py
94
+ src/alita_sdk/tools/router.py
94
95
  src/alita_sdk/tools/tool.py
95
96
  src/alita_sdk/tools/vectorstore.py
96
97
  src/alita_sdk/utils/AlitaCallback.py
File without changes
File without changes
File without changes
File without changes