alita-sdk 0.3.365__py3-none-any.whl → 0.3.462__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/cli/__init__.py +10 -0
- alita_sdk/cli/__main__.py +17 -0
- alita_sdk/cli/agent_executor.py +144 -0
- alita_sdk/cli/agent_loader.py +197 -0
- alita_sdk/cli/agent_ui.py +166 -0
- alita_sdk/cli/agents.py +1069 -0
- alita_sdk/cli/callbacks.py +576 -0
- alita_sdk/cli/cli.py +159 -0
- alita_sdk/cli/config.py +153 -0
- alita_sdk/cli/formatting.py +182 -0
- alita_sdk/cli/mcp_loader.py +315 -0
- alita_sdk/cli/toolkit.py +330 -0
- alita_sdk/cli/toolkit_loader.py +55 -0
- alita_sdk/cli/tools/__init__.py +9 -0
- alita_sdk/cli/tools/filesystem.py +905 -0
- alita_sdk/configurations/bitbucket.py +95 -0
- alita_sdk/configurations/confluence.py +96 -1
- alita_sdk/configurations/gitlab.py +79 -0
- alita_sdk/configurations/jira.py +103 -0
- alita_sdk/configurations/testrail.py +88 -0
- alita_sdk/configurations/xray.py +93 -0
- alita_sdk/configurations/zephyr_enterprise.py +93 -0
- alita_sdk/configurations/zephyr_essential.py +75 -0
- alita_sdk/runtime/clients/artifact.py +1 -1
- alita_sdk/runtime/clients/client.py +47 -10
- alita_sdk/runtime/clients/mcp_discovery.py +342 -0
- alita_sdk/runtime/clients/mcp_manager.py +262 -0
- alita_sdk/runtime/clients/sandbox_client.py +373 -0
- alita_sdk/runtime/langchain/assistant.py +70 -41
- alita_sdk/runtime/langchain/constants.py +6 -1
- alita_sdk/runtime/langchain/document_loaders/AlitaDocxMammothLoader.py +315 -3
- alita_sdk/runtime/langchain/document_loaders/AlitaJSONLoader.py +4 -1
- alita_sdk/runtime/langchain/document_loaders/constants.py +73 -100
- alita_sdk/runtime/langchain/langraph_agent.py +164 -38
- alita_sdk/runtime/langchain/utils.py +43 -7
- alita_sdk/runtime/models/mcp_models.py +61 -0
- alita_sdk/runtime/toolkits/__init__.py +24 -0
- alita_sdk/runtime/toolkits/application.py +8 -1
- alita_sdk/runtime/toolkits/artifact.py +5 -6
- alita_sdk/runtime/toolkits/mcp.py +895 -0
- alita_sdk/runtime/toolkits/tools.py +140 -50
- alita_sdk/runtime/tools/__init__.py +7 -2
- alita_sdk/runtime/tools/application.py +7 -0
- alita_sdk/runtime/tools/function.py +94 -5
- alita_sdk/runtime/tools/graph.py +10 -4
- alita_sdk/runtime/tools/image_generation.py +104 -8
- alita_sdk/runtime/tools/llm.py +204 -114
- alita_sdk/runtime/tools/mcp_inspect_tool.py +284 -0
- alita_sdk/runtime/tools/mcp_remote_tool.py +166 -0
- alita_sdk/runtime/tools/mcp_server_tool.py +3 -1
- alita_sdk/runtime/tools/sandbox.py +180 -79
- alita_sdk/runtime/tools/vectorstore.py +22 -21
- alita_sdk/runtime/tools/vectorstore_base.py +79 -26
- alita_sdk/runtime/utils/mcp_oauth.py +164 -0
- alita_sdk/runtime/utils/mcp_sse_client.py +405 -0
- alita_sdk/runtime/utils/streamlit.py +34 -3
- alita_sdk/runtime/utils/toolkit_utils.py +14 -4
- alita_sdk/runtime/utils/utils.py +1 -0
- alita_sdk/tools/__init__.py +48 -31
- alita_sdk/tools/ado/repos/__init__.py +1 -0
- alita_sdk/tools/ado/test_plan/__init__.py +1 -1
- alita_sdk/tools/ado/wiki/__init__.py +1 -5
- alita_sdk/tools/ado/work_item/__init__.py +1 -5
- alita_sdk/tools/ado/work_item/ado_wrapper.py +17 -8
- alita_sdk/tools/base_indexer_toolkit.py +194 -112
- alita_sdk/tools/bitbucket/__init__.py +1 -0
- alita_sdk/tools/chunkers/sematic/proposal_chunker.py +1 -1
- alita_sdk/tools/code/sonar/__init__.py +1 -1
- alita_sdk/tools/code_indexer_toolkit.py +15 -5
- alita_sdk/tools/confluence/__init__.py +2 -2
- alita_sdk/tools/confluence/api_wrapper.py +110 -63
- alita_sdk/tools/confluence/loader.py +10 -0
- alita_sdk/tools/elitea_base.py +22 -22
- alita_sdk/tools/github/__init__.py +2 -2
- alita_sdk/tools/gitlab/__init__.py +2 -1
- alita_sdk/tools/gitlab/api_wrapper.py +11 -7
- alita_sdk/tools/gitlab_org/__init__.py +1 -2
- alita_sdk/tools/google_places/__init__.py +2 -1
- alita_sdk/tools/jira/__init__.py +1 -0
- alita_sdk/tools/jira/api_wrapper.py +1 -1
- alita_sdk/tools/memory/__init__.py +1 -1
- alita_sdk/tools/non_code_indexer_toolkit.py +2 -2
- alita_sdk/tools/openapi/__init__.py +10 -1
- alita_sdk/tools/pandas/__init__.py +1 -1
- alita_sdk/tools/postman/__init__.py +2 -1
- alita_sdk/tools/postman/api_wrapper.py +18 -8
- alita_sdk/tools/postman/postman_analysis.py +8 -1
- alita_sdk/tools/pptx/__init__.py +2 -2
- alita_sdk/tools/qtest/__init__.py +3 -3
- alita_sdk/tools/qtest/api_wrapper.py +1708 -76
- alita_sdk/tools/rally/__init__.py +1 -2
- alita_sdk/tools/report_portal/__init__.py +1 -0
- alita_sdk/tools/salesforce/__init__.py +1 -0
- alita_sdk/tools/servicenow/__init__.py +2 -3
- alita_sdk/tools/sharepoint/__init__.py +1 -0
- alita_sdk/tools/sharepoint/api_wrapper.py +125 -34
- alita_sdk/tools/sharepoint/authorization_helper.py +191 -1
- alita_sdk/tools/sharepoint/utils.py +8 -2
- alita_sdk/tools/slack/__init__.py +1 -0
- alita_sdk/tools/sql/__init__.py +2 -1
- alita_sdk/tools/sql/api_wrapper.py +71 -23
- alita_sdk/tools/testio/__init__.py +1 -0
- alita_sdk/tools/testrail/__init__.py +1 -3
- alita_sdk/tools/utils/__init__.py +17 -0
- alita_sdk/tools/utils/content_parser.py +35 -24
- alita_sdk/tools/vector_adapters/VectorStoreAdapter.py +67 -21
- alita_sdk/tools/xray/__init__.py +2 -1
- alita_sdk/tools/zephyr/__init__.py +2 -1
- alita_sdk/tools/zephyr_enterprise/__init__.py +1 -0
- alita_sdk/tools/zephyr_essential/__init__.py +1 -0
- alita_sdk/tools/zephyr_scale/__init__.py +1 -0
- alita_sdk/tools/zephyr_squad/__init__.py +1 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/METADATA +8 -2
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/RECORD +118 -93
- alita_sdk-0.3.462.dist-info/entry_points.txt +2 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.365.dist-info → alita_sdk-0.3.462.dist-info}/top_level.txt +0 -0
|
@@ -30,78 +30,73 @@ from enum import Enum
|
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class LoaderProperties(Enum):
|
|
33
|
-
LLM = '
|
|
33
|
+
LLM = 'use_llm'
|
|
34
|
+
PROMPT_DEFAULT = 'use_default_prompt'
|
|
34
35
|
PROMPT = 'prompt'
|
|
35
|
-
PROMPT_DEFAULT = 'prompt_default'
|
|
36
36
|
|
|
37
|
+
DEFAULT_ALLOWED_BASE = {'max_tokens': 512}
|
|
37
38
|
|
|
38
|
-
|
|
39
|
+
DEFAULT_ALLOWED_WITH_LLM = {
|
|
40
|
+
**DEFAULT_ALLOWED_BASE,
|
|
41
|
+
LoaderProperties.LLM.value: False,
|
|
42
|
+
LoaderProperties.PROMPT_DEFAULT.value: False,
|
|
43
|
+
LoaderProperties.PROMPT.value: "",
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
# Image file loaders mapping - directly supported by LLM with image_url
|
|
39
47
|
image_loaders_map = {
|
|
40
48
|
'.png': {
|
|
41
49
|
'class': AlitaImageLoader,
|
|
42
50
|
'mime_type': 'image/png',
|
|
43
51
|
'is_multimodal_processing': True,
|
|
44
52
|
'kwargs': {},
|
|
45
|
-
'allowed_to_override':
|
|
46
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
47
|
-
LoaderProperties.PROMPT.value,
|
|
48
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
49
|
-
],
|
|
53
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM,
|
|
50
54
|
},
|
|
51
55
|
'.jpg': {
|
|
52
56
|
'class': AlitaImageLoader,
|
|
53
57
|
'mime_type': 'image/jpeg',
|
|
54
58
|
'is_multimodal_processing': True,
|
|
55
59
|
'kwargs': {},
|
|
56
|
-
'allowed_to_override':
|
|
57
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
58
|
-
LoaderProperties.PROMPT.value,
|
|
59
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
60
|
-
]
|
|
60
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
61
61
|
},
|
|
62
62
|
'.jpeg': {
|
|
63
63
|
'class': AlitaImageLoader,
|
|
64
64
|
'mime_type': 'image/jpeg',
|
|
65
65
|
'is_multimodal_processing': True,
|
|
66
66
|
'kwargs': {},
|
|
67
|
-
'allowed_to_override':
|
|
68
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
69
|
-
LoaderProperties.PROMPT.value,
|
|
70
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
71
|
-
]
|
|
67
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
72
68
|
},
|
|
73
69
|
'.gif': {
|
|
74
70
|
'class': AlitaImageLoader,
|
|
75
71
|
'mime_type': 'image/gif',
|
|
76
72
|
'is_multimodal_processing': True,
|
|
77
73
|
'kwargs': {},
|
|
78
|
-
'allowed_to_override':
|
|
79
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
80
|
-
LoaderProperties.PROMPT.value,
|
|
81
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
82
|
-
]
|
|
74
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
83
75
|
},
|
|
76
|
+
'.webp': {
|
|
77
|
+
'class': AlitaImageLoader,
|
|
78
|
+
'mime_type': 'image/webp',
|
|
79
|
+
'is_multimodal_processing': True,
|
|
80
|
+
'kwargs': {},
|
|
81
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# Image file loaders mapping - require conversion before sending to LLM
|
|
86
|
+
image_loaders_map_converted = {
|
|
84
87
|
'.bmp': {
|
|
85
88
|
'class': AlitaImageLoader,
|
|
86
89
|
'mime_type': 'image/bmp',
|
|
87
90
|
'is_multimodal_processing': True,
|
|
88
91
|
'kwargs': {},
|
|
89
|
-
'allowed_to_override':
|
|
90
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
91
|
-
LoaderProperties.PROMPT.value,
|
|
92
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
93
|
-
]
|
|
92
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
94
93
|
},
|
|
95
94
|
'.svg': {
|
|
96
95
|
'class': AlitaImageLoader,
|
|
97
96
|
'mime_type': 'image/svg+xml',
|
|
98
97
|
'is_multimodal_processing': True,
|
|
99
98
|
'kwargs': {},
|
|
100
|
-
'allowed_to_override':
|
|
101
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
102
|
-
LoaderProperties.PROMPT.value,
|
|
103
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
104
|
-
]
|
|
99
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
105
100
|
}
|
|
106
101
|
}
|
|
107
102
|
|
|
@@ -114,25 +109,25 @@ document_loaders_map = {
|
|
|
114
109
|
'kwargs': {
|
|
115
110
|
'autodetect_encoding': True
|
|
116
111
|
},
|
|
117
|
-
'allowed_to_override':
|
|
112
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
118
113
|
},
|
|
119
114
|
'.yml': {
|
|
120
115
|
'class': AlitaTextLoader,
|
|
121
|
-
'mime_type': 'application/
|
|
116
|
+
'mime_type': 'application/yaml',
|
|
122
117
|
'is_multimodal_processing': False,
|
|
123
118
|
'kwargs': {
|
|
124
119
|
'autodetect_encoding': True
|
|
125
120
|
},
|
|
126
|
-
'allowed_to_override':
|
|
121
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
127
122
|
},
|
|
128
123
|
'.yaml': {
|
|
129
124
|
'class': AlitaTextLoader,
|
|
130
|
-
'mime_type': 'application/
|
|
125
|
+
'mime_type': 'application/yaml',
|
|
131
126
|
'is_multimodal_processing': False,
|
|
132
127
|
'kwargs': {
|
|
133
128
|
'autodetect_encoding': True
|
|
134
129
|
},
|
|
135
|
-
'allowed_to_override':
|
|
130
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
136
131
|
},
|
|
137
132
|
'.groovy': {
|
|
138
133
|
'class': AlitaTextLoader,
|
|
@@ -141,14 +136,14 @@ document_loaders_map = {
|
|
|
141
136
|
'kwargs': {
|
|
142
137
|
'autodetect_encoding': True
|
|
143
138
|
},
|
|
144
|
-
'allowed_to_override':
|
|
139
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
145
140
|
},
|
|
146
141
|
'.md': {
|
|
147
142
|
'class': AlitaMarkdownLoader,
|
|
148
143
|
'mime_type': 'text/markdown',
|
|
149
144
|
'is_multimodal_processing': False,
|
|
150
145
|
'kwargs': {},
|
|
151
|
-
'allowed_to_override':
|
|
146
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
152
147
|
},
|
|
153
148
|
'.csv': {
|
|
154
149
|
'class': AlitaCSVLoader,
|
|
@@ -159,7 +154,7 @@ document_loaders_map = {
|
|
|
159
154
|
'raw_content': True,
|
|
160
155
|
'cleanse': False
|
|
161
156
|
},
|
|
162
|
-
'allowed_to_override':
|
|
157
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
163
158
|
},
|
|
164
159
|
'.xlsx': {
|
|
165
160
|
'class': AlitaExcelLoader,
|
|
@@ -171,11 +166,7 @@ document_loaders_map = {
|
|
|
171
166
|
'raw_content': True,
|
|
172
167
|
'cleanse': False
|
|
173
168
|
},
|
|
174
|
-
'allowed_to_override':
|
|
175
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
176
|
-
LoaderProperties.PROMPT.value,
|
|
177
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
178
|
-
]
|
|
169
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
179
170
|
},
|
|
180
171
|
'.xls': {
|
|
181
172
|
'class': AlitaExcelLoader,
|
|
@@ -186,22 +177,14 @@ document_loaders_map = {
|
|
|
186
177
|
'raw_content': True,
|
|
187
178
|
'cleanse': False
|
|
188
179
|
},
|
|
189
|
-
'allowed_to_override':
|
|
190
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
191
|
-
LoaderProperties.PROMPT.value,
|
|
192
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
193
|
-
]
|
|
180
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
194
181
|
},
|
|
195
182
|
'.pdf': {
|
|
196
183
|
'class': AlitaPDFLoader,
|
|
197
184
|
'mime_type': 'application/pdf',
|
|
198
185
|
'is_multimodal_processing': False,
|
|
199
186
|
'kwargs': {},
|
|
200
|
-
'allowed_to_override':
|
|
201
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
202
|
-
LoaderProperties.PROMPT.value,
|
|
203
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
204
|
-
]
|
|
187
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
205
188
|
},
|
|
206
189
|
'.docx': {
|
|
207
190
|
'class': AlitaDocxMammothLoader,
|
|
@@ -211,58 +194,42 @@ document_loaders_map = {
|
|
|
211
194
|
'kwargs': {
|
|
212
195
|
'extract_images': True
|
|
213
196
|
},
|
|
214
|
-
'allowed_to_override':
|
|
215
|
-
'max_tokens', 'mode', LoaderProperties.LLM.value,
|
|
216
|
-
LoaderProperties.PROMPT.value,
|
|
217
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
218
|
-
]
|
|
197
|
+
'allowed_to_override': {**DEFAULT_ALLOWED_WITH_LLM, 'mode': 'paged'}
|
|
219
198
|
},
|
|
220
199
|
'.json': {
|
|
221
200
|
'class': AlitaJSONLoader,
|
|
222
201
|
'mime_type': 'application/json',
|
|
223
202
|
'is_multimodal_processing': False,
|
|
224
203
|
'kwargs': {},
|
|
225
|
-
'allowed_to_override':
|
|
204
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
226
205
|
},
|
|
227
206
|
'.jsonl': {
|
|
228
207
|
'class': AirbyteJSONLoader,
|
|
229
208
|
'mime_type': 'application/jsonl',
|
|
230
209
|
'is_multimodal_processing': False,
|
|
231
210
|
'kwargs': {},
|
|
232
|
-
'allowed_to_override':
|
|
211
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
233
212
|
},
|
|
234
213
|
'.htm': {
|
|
235
214
|
'class': UnstructuredHTMLLoader,
|
|
236
215
|
'mime_type': 'text/html',
|
|
237
216
|
'is_multimodal_processing': False,
|
|
238
217
|
'kwargs': {},
|
|
239
|
-
'allowed_to_override':
|
|
240
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
241
|
-
LoaderProperties.PROMPT.value,
|
|
242
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
243
|
-
]
|
|
218
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
244
219
|
},
|
|
245
220
|
'.html': {
|
|
246
221
|
'class': UnstructuredHTMLLoader,
|
|
247
222
|
'mime_type': 'text/html',
|
|
248
223
|
'is_multimodal_processing': False,
|
|
249
224
|
'kwargs': {},
|
|
250
|
-
'allowed_to_override':
|
|
251
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
252
|
-
LoaderProperties.PROMPT.value,
|
|
253
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
254
|
-
]
|
|
225
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
255
226
|
},
|
|
256
227
|
'.xml': {
|
|
257
228
|
'class': UnstructuredXMLLoader,
|
|
258
229
|
'mime_type': 'text/xml',
|
|
259
230
|
'is_multimodal_processing': False,
|
|
260
231
|
'kwargs': {},
|
|
261
|
-
'allowed_to_override':
|
|
262
|
-
'max_tokens', LoaderProperties.LLM.value,
|
|
263
|
-
LoaderProperties.PROMPT.value,
|
|
264
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
265
|
-
]
|
|
232
|
+
'allowed_to_override': DEFAULT_ALLOWED_WITH_LLM
|
|
266
233
|
},
|
|
267
234
|
'.ppt': {
|
|
268
235
|
'class': AlitaPowerPointLoader,
|
|
@@ -271,11 +238,7 @@ document_loaders_map = {
|
|
|
271
238
|
'kwargs': {
|
|
272
239
|
'mode': 'paged'
|
|
273
240
|
},
|
|
274
|
-
'allowed_to_override':
|
|
275
|
-
'max_tokens', 'mode', LoaderProperties.LLM.value,
|
|
276
|
-
LoaderProperties.PROMPT.value,
|
|
277
|
-
LoaderProperties.PROMPT_DEFAULT.value
|
|
278
|
-
]
|
|
241
|
+
'allowed_to_override': {**DEFAULT_ALLOWED_WITH_LLM, 'mode': 'paged'}
|
|
279
242
|
},
|
|
280
243
|
'.pptx': {
|
|
281
244
|
'class': AlitaPowerPointLoader,
|
|
@@ -285,25 +248,24 @@ document_loaders_map = {
|
|
|
285
248
|
'kwargs': {
|
|
286
249
|
'mode': 'paged'
|
|
287
250
|
},
|
|
288
|
-
'allowed_to_override':
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
]
|
|
251
|
+
'allowed_to_override': {
|
|
252
|
+
**DEFAULT_ALLOWED_WITH_LLM,
|
|
253
|
+
'mode': 'paged',
|
|
254
|
+
'pages_per_chunk': 5,
|
|
255
|
+
'extract_images': False,
|
|
256
|
+
}
|
|
295
257
|
},
|
|
296
|
-
'.py': {
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
}
|
|
258
|
+
# '.py': {
|
|
259
|
+
# 'class': AlitaPythonLoader,
|
|
260
|
+
# 'mime_type': 'text/x-python',
|
|
261
|
+
# 'is_multimodal_processing': False,
|
|
262
|
+
# 'kwargs': {},
|
|
263
|
+
# 'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
264
|
+
# }
|
|
303
265
|
}
|
|
304
266
|
|
|
305
267
|
code_extensions = [
|
|
306
|
-
|
|
268
|
+
'.py', # Python
|
|
307
269
|
'.js', # JavaScript
|
|
308
270
|
'.ts', # TypeScript
|
|
309
271
|
'.java', # Java
|
|
@@ -335,10 +297,21 @@ default_loader_config = {
|
|
|
335
297
|
'mime_type': 'text/plain',
|
|
336
298
|
'is_multimodal_processing': False,
|
|
337
299
|
'kwargs': {},
|
|
338
|
-
'allowed_to_override':
|
|
300
|
+
'allowed_to_override': DEFAULT_ALLOWED_BASE
|
|
339
301
|
}
|
|
340
302
|
|
|
341
303
|
code_loaders_map = {ext: default_loader_config for ext in code_extensions}
|
|
342
304
|
|
|
343
305
|
# Combined mapping for backward compatibility
|
|
344
|
-
loaders_map = {
|
|
306
|
+
loaders_map = {
|
|
307
|
+
**image_loaders_map,
|
|
308
|
+
**image_loaders_map_converted,
|
|
309
|
+
**document_loaders_map,
|
|
310
|
+
**code_loaders_map
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
loaders_allowed_to_override = {
|
|
314
|
+
extension: config.get('allowed_to_override')
|
|
315
|
+
for extension, config in loaders_map.items()
|
|
316
|
+
if 'allowed_to_override' in config
|
|
317
|
+
}
|
|
@@ -19,8 +19,9 @@ from langgraph.managed.base import is_managed_value
|
|
|
19
19
|
from langgraph.prebuilt import InjectedStore
|
|
20
20
|
from langgraph.store.base import BaseStore
|
|
21
21
|
|
|
22
|
+
from .constants import PRINTER_NODE_RS, PRINTER, PRINTER_COMPLETED_STATE
|
|
22
23
|
from .mixedAgentRenderes import convert_message_to_json
|
|
23
|
-
from .utils import create_state, propagate_the_input_mapping
|
|
24
|
+
from .utils import create_state, propagate_the_input_mapping, safe_format
|
|
24
25
|
from ..tools.function import FunctionTool
|
|
25
26
|
from ..tools.indexer_tool import IndexerNode
|
|
26
27
|
from ..tools.llm import LLMNode
|
|
@@ -232,6 +233,32 @@ class StateDefaultNode(Runnable):
|
|
|
232
233
|
result[key] = temp_value
|
|
233
234
|
return result
|
|
234
235
|
|
|
236
|
+
class PrinterNode(Runnable):
|
|
237
|
+
name = "PrinterNode"
|
|
238
|
+
|
|
239
|
+
def __init__(self, input_mapping: Optional[dict[str, dict]]):
|
|
240
|
+
self.input_mapping = input_mapping
|
|
241
|
+
|
|
242
|
+
def invoke(self, state: BaseStore, config: Optional[RunnableConfig] = None) -> dict:
|
|
243
|
+
logger.info(f"Printer Node - Current state variables: {state}")
|
|
244
|
+
result = {}
|
|
245
|
+
logger.debug(f"Initial text pattern: {self.input_mapping}")
|
|
246
|
+
mapping = propagate_the_input_mapping(self.input_mapping, [], state)
|
|
247
|
+
# for printer node we expect that all the lists will be joined into strings already
|
|
248
|
+
# Join any lists that haven't been converted yet
|
|
249
|
+
for key, value in mapping.items():
|
|
250
|
+
if isinstance(value, list):
|
|
251
|
+
mapping[key] = ', '.join(str(item) for item in value)
|
|
252
|
+
if mapping.get(PRINTER) is None:
|
|
253
|
+
raise ToolException(f"PrinterNode requires '{PRINTER}' field in input mapping")
|
|
254
|
+
formatted_output = mapping[PRINTER]
|
|
255
|
+
# add info label to the printer's output
|
|
256
|
+
if not formatted_output == PRINTER_COMPLETED_STATE:
|
|
257
|
+
formatted_output += f"\n\n-----\n*How to proceed?*\n* *to resume the pipeline - type anything...*"
|
|
258
|
+
logger.debug(f"Formatted output: {formatted_output}")
|
|
259
|
+
result[PRINTER_NODE_RS] = formatted_output
|
|
260
|
+
return result
|
|
261
|
+
|
|
235
262
|
|
|
236
263
|
class StateModifierNode(Runnable):
|
|
237
264
|
name = "StateModifierNode"
|
|
@@ -348,8 +375,8 @@ class StateModifierNode(Runnable):
|
|
|
348
375
|
return result
|
|
349
376
|
|
|
350
377
|
|
|
351
|
-
|
|
352
|
-
|
|
378
|
+
def prepare_output_schema(lg_builder, memory, store, debug=False, interrupt_before=None, interrupt_after=None,
|
|
379
|
+
state_class=None, output_variables=None):
|
|
353
380
|
# prepare output channels
|
|
354
381
|
if interrupt_after is None:
|
|
355
382
|
interrupt_after = []
|
|
@@ -453,10 +480,14 @@ def create_graph(
|
|
|
453
480
|
if toolkit_name:
|
|
454
481
|
tool_name = f"{clean_string(toolkit_name)}{TOOLKIT_SPLITTER}{tool_name}"
|
|
455
482
|
logger.info(f"Node: {node_id} : {node_type} - {tool_name}")
|
|
456
|
-
if node_type in ['function', 'tool', 'loop', 'loop_from_tool', 'indexer', 'subgraph', 'pipeline', 'agent']:
|
|
483
|
+
if node_type in ['function', 'toolkit', 'mcp', 'tool', 'loop', 'loop_from_tool', 'indexer', 'subgraph', 'pipeline', 'agent']:
|
|
484
|
+
if node_type == 'mcp' and tool_name not in [tool.name for tool in tools]:
|
|
485
|
+
# MCP is not connected and node cannot be added
|
|
486
|
+
raise ToolException(f"MCP tool '{tool_name}' not found in the provided tools. "
|
|
487
|
+
f"Make sure it is connected properly. Available tools: {[tool.name for tool in tools]}")
|
|
457
488
|
for tool in tools:
|
|
458
489
|
if tool.name == tool_name:
|
|
459
|
-
if node_type
|
|
490
|
+
if node_type in ['function', 'toolkit', 'mcp']:
|
|
460
491
|
lg_builder.add_node(node_id, FunctionTool(
|
|
461
492
|
tool=tool, name=node_id, return_type='dict',
|
|
462
493
|
output_variables=node.get('output', []),
|
|
@@ -466,11 +497,12 @@ def create_graph(
|
|
|
466
497
|
elif node_type == 'agent':
|
|
467
498
|
input_params = node.get('input', ['messages'])
|
|
468
499
|
input_mapping = node.get('input_mapping',
|
|
469
|
-
|
|
500
|
+
{'messages': {'type': 'variable', 'value': 'messages'}})
|
|
501
|
+
output_vars = node.get('output', [])
|
|
470
502
|
lg_builder.add_node(node_id, FunctionTool(
|
|
471
503
|
client=client, tool=tool,
|
|
472
504
|
name=node_id, return_type='str',
|
|
473
|
-
output_variables=
|
|
505
|
+
output_variables=output_vars + ['messages'] if 'messages' not in output_vars else output_vars,
|
|
474
506
|
input_variables=input_params,
|
|
475
507
|
input_mapping= input_mapping
|
|
476
508
|
))
|
|
@@ -481,7 +513,8 @@ def create_graph(
|
|
|
481
513
|
# wrap with mappings
|
|
482
514
|
pipeline_name = node.get('tool', None)
|
|
483
515
|
if not pipeline_name:
|
|
484
|
-
raise ValueError(
|
|
516
|
+
raise ValueError(
|
|
517
|
+
"Subgraph must have a 'tool' node: add required tool to the subgraph node")
|
|
485
518
|
node_fn = SubgraphRunnable(
|
|
486
519
|
inner=tool.graph,
|
|
487
520
|
name=pipeline_name,
|
|
@@ -499,15 +532,6 @@ def create_graph(
|
|
|
499
532
|
structured_output=node.get('structured_output', False),
|
|
500
533
|
task=node.get('task')
|
|
501
534
|
))
|
|
502
|
-
# TODO: decide on struct output for agent nodes
|
|
503
|
-
# elif node_type == 'agent':
|
|
504
|
-
# lg_builder.add_node(node_id, AgentNode(
|
|
505
|
-
# client=client, tool=tool,
|
|
506
|
-
# name=node['id'], return_type='dict',
|
|
507
|
-
# output_variables=node.get('output', []),
|
|
508
|
-
# input_variables=node.get('input', ['messages']),
|
|
509
|
-
# task=node.get('task')
|
|
510
|
-
# ))
|
|
511
535
|
elif node_type == 'loop':
|
|
512
536
|
lg_builder.add_node(node_id, LoopNode(
|
|
513
537
|
client=client, tool=tool,
|
|
@@ -520,7 +544,8 @@ def create_graph(
|
|
|
520
544
|
loop_toolkit_name = node.get('loop_toolkit_name')
|
|
521
545
|
loop_tool_name = node.get('loop_tool')
|
|
522
546
|
if (loop_toolkit_name and loop_tool_name) or loop_tool_name:
|
|
523
|
-
loop_tool_name = f"{clean_string(loop_toolkit_name)}{TOOLKIT_SPLITTER}{loop_tool_name}" if loop_toolkit_name else clean_string(
|
|
547
|
+
loop_tool_name = f"{clean_string(loop_toolkit_name)}{TOOLKIT_SPLITTER}{loop_tool_name}" if loop_toolkit_name else clean_string(
|
|
548
|
+
loop_tool_name)
|
|
524
549
|
for t in tools:
|
|
525
550
|
if t.name == loop_tool_name:
|
|
526
551
|
logger.debug(f"Loop tool discovered: {t}")
|
|
@@ -553,6 +578,19 @@ def create_graph(
|
|
|
553
578
|
input_variables=node.get('input', ['messages']),
|
|
554
579
|
structured_output=node.get('structured_output', False)))
|
|
555
580
|
break
|
|
581
|
+
elif node_type == 'code':
|
|
582
|
+
from ..tools.sandbox import create_sandbox_tool
|
|
583
|
+
sandbox_tool = create_sandbox_tool(stateful=False, allow_net=True,
|
|
584
|
+
alita_client=kwargs.get('alita_client', None))
|
|
585
|
+
code_data = node.get('code', {'type': 'fixed', 'value': "return 'Code block is empty'"})
|
|
586
|
+
lg_builder.add_node(node_id, FunctionTool(
|
|
587
|
+
tool=sandbox_tool, name=node['id'], return_type='dict',
|
|
588
|
+
output_variables=node.get('output', []),
|
|
589
|
+
input_mapping={'code': code_data},
|
|
590
|
+
input_variables=node.get('input', ['messages']),
|
|
591
|
+
structured_output=node.get('structured_output', False),
|
|
592
|
+
alita_client=kwargs.get('alita_client', None)
|
|
593
|
+
))
|
|
556
594
|
elif node_type == 'llm':
|
|
557
595
|
output_vars = node.get('output', [])
|
|
558
596
|
output_vars_dict = {
|
|
@@ -581,7 +619,7 @@ def create_graph(
|
|
|
581
619
|
else:
|
|
582
620
|
# Use all available tools
|
|
583
621
|
available_tools = [tool for tool in tools if isinstance(tool, BaseTool)]
|
|
584
|
-
|
|
622
|
+
|
|
585
623
|
lg_builder.add_node(node_id, LLMNode(
|
|
586
624
|
client=client,
|
|
587
625
|
input_mapping=node.get('input_mapping', {'messages': {'type': 'variable', 'value': 'messages'}}),
|
|
@@ -592,7 +630,9 @@ def create_graph(
|
|
|
592
630
|
input_variables=node.get('input', ['messages']),
|
|
593
631
|
structured_output=node.get('structured_output', False),
|
|
594
632
|
available_tools=available_tools,
|
|
595
|
-
tool_names=tool_names
|
|
633
|
+
tool_names=tool_names,
|
|
634
|
+
steps_limit=kwargs.get('steps_limit', 25)
|
|
635
|
+
))
|
|
596
636
|
elif node_type == 'router':
|
|
597
637
|
# Add a RouterNode as an independent node
|
|
598
638
|
lg_builder.add_node(node_id, RouterNode(
|
|
@@ -612,6 +652,7 @@ def create_graph(
|
|
|
612
652
|
default_output=node.get('default_output', 'END')
|
|
613
653
|
)
|
|
614
654
|
)
|
|
655
|
+
continue
|
|
615
656
|
elif node_type == 'state_modifier':
|
|
616
657
|
lg_builder.add_node(node_id, StateModifierNode(
|
|
617
658
|
template=node.get('template', ''),
|
|
@@ -619,6 +660,22 @@ def create_graph(
|
|
|
619
660
|
input_variables=node.get('input', ['messages']),
|
|
620
661
|
output_variables=node.get('output', [])
|
|
621
662
|
))
|
|
663
|
+
elif node_type == 'printer':
|
|
664
|
+
lg_builder.add_node(node_id, PrinterNode(
|
|
665
|
+
input_mapping=node.get('input_mapping', {'printer': {'type': 'fixed', 'value': ''}}),
|
|
666
|
+
))
|
|
667
|
+
|
|
668
|
+
# add interrupts after printer node if specified
|
|
669
|
+
interrupt_after.append(clean_string(node_id))
|
|
670
|
+
|
|
671
|
+
# reset printer output variable to avoid carrying over
|
|
672
|
+
reset_node_id = f"{node_id}_reset"
|
|
673
|
+
lg_builder.add_node(reset_node_id, PrinterNode(
|
|
674
|
+
input_mapping={'printer': {'type': 'fixed', 'value': PRINTER_COMPLETED_STATE}}
|
|
675
|
+
))
|
|
676
|
+
lg_builder.add_conditional_edges(node_id, TransitionalEdge(reset_node_id))
|
|
677
|
+
lg_builder.add_conditional_edges(reset_node_id, TransitionalEdge(clean_string(node['transition'])))
|
|
678
|
+
continue
|
|
622
679
|
if node.get('transition'):
|
|
623
680
|
next_step = clean_string(node['transition'])
|
|
624
681
|
logger.info(f'Adding transition: {next_step}')
|
|
@@ -765,45 +822,114 @@ class LangGraphAgentRunnable(CompiledStateGraph):
|
|
|
765
822
|
# Convert chat history dict messages to LangChain message objects
|
|
766
823
|
chat_history = input.pop('chat_history')
|
|
767
824
|
input['messages'] = [convert_dict_to_message(msg) for msg in chat_history]
|
|
768
|
-
|
|
825
|
+
|
|
826
|
+
# handler for LLM node: if no input (Chat perspective), then take last human message
|
|
827
|
+
# Track if input came from messages to handle content extraction properly
|
|
828
|
+
input_from_messages = False
|
|
829
|
+
if not input.get('input'):
|
|
830
|
+
if input.get('messages'):
|
|
831
|
+
input['input'] = [next((msg for msg in reversed(input['messages']) if isinstance(msg, HumanMessage)),
|
|
832
|
+
None)]
|
|
833
|
+
if input['input'] is not None:
|
|
834
|
+
input_from_messages = True
|
|
835
|
+
|
|
769
836
|
# Append current input to existing messages instead of overwriting
|
|
770
837
|
if input.get('input'):
|
|
771
|
-
|
|
838
|
+
if isinstance(input['input'], str):
|
|
839
|
+
current_message = input['input']
|
|
840
|
+
else:
|
|
841
|
+
# input can be a list of messages or a single message object
|
|
842
|
+
current_message = input.get('input')[-1]
|
|
843
|
+
|
|
772
844
|
# TODO: add handler after we add 2+ inputs (filterByType, etc.)
|
|
773
|
-
|
|
845
|
+
if isinstance(current_message, HumanMessage):
|
|
846
|
+
current_content = current_message.content
|
|
847
|
+
if isinstance(current_content, list):
|
|
848
|
+
# Extract text parts and keep non-text parts (images, etc.)
|
|
849
|
+
text_contents = []
|
|
850
|
+
non_text_parts = []
|
|
851
|
+
|
|
852
|
+
for item in current_content:
|
|
853
|
+
if isinstance(item, dict) and item.get('type') == 'text':
|
|
854
|
+
text_contents.append(item['text'])
|
|
855
|
+
elif isinstance(item, str):
|
|
856
|
+
text_contents.append(item)
|
|
857
|
+
else:
|
|
858
|
+
# Keep image_url and other non-text content
|
|
859
|
+
non_text_parts.append(item)
|
|
860
|
+
|
|
861
|
+
# Set input to the joined text
|
|
862
|
+
input['input'] = ". ".join(text_contents) if text_contents else ""
|
|
863
|
+
|
|
864
|
+
# If this message came from input['messages'], update or remove it
|
|
865
|
+
if input_from_messages:
|
|
866
|
+
if non_text_parts:
|
|
867
|
+
# Keep the message but only with non-text content (images, etc.)
|
|
868
|
+
current_message.content = non_text_parts
|
|
869
|
+
else:
|
|
870
|
+
# All content was text, remove this message from the list
|
|
871
|
+
input['messages'] = [msg for msg in input['messages'] if msg is not current_message]
|
|
872
|
+
|
|
873
|
+
elif isinstance(current_content, str):
|
|
874
|
+
# on regenerate case
|
|
875
|
+
input['input'] = current_content
|
|
876
|
+
# If from messages and all content is text, remove the message
|
|
877
|
+
if input_from_messages:
|
|
878
|
+
input['messages'] = [msg for msg in input['messages'] if msg is not current_message]
|
|
879
|
+
else:
|
|
880
|
+
input['input'] = str(current_content)
|
|
881
|
+
# If from messages, remove since we extracted the content
|
|
882
|
+
if input_from_messages:
|
|
883
|
+
input['messages'] = [msg for msg in input['messages'] if msg is not current_message]
|
|
884
|
+
elif isinstance(current_message, str):
|
|
885
|
+
input['input'] = current_message
|
|
886
|
+
else:
|
|
887
|
+
input['input'] = str(current_message)
|
|
774
888
|
if input.get('messages'):
|
|
775
889
|
# Ensure existing messages are LangChain objects
|
|
776
890
|
input['messages'] = [convert_dict_to_message(msg) for msg in input['messages']]
|
|
777
891
|
# Append to existing messages
|
|
778
|
-
input['messages'].append(current_message)
|
|
779
|
-
else:
|
|
780
|
-
#
|
|
781
|
-
input['messages'] = [current_message]
|
|
892
|
+
# input['messages'].append(current_message)
|
|
893
|
+
# else:
|
|
894
|
+
# NOTE: Commented out to prevent duplicates with input['input']
|
|
895
|
+
# input['messages'] = [current_message]
|
|
896
|
+
|
|
897
|
+
# Validate that input is not empty after all processing
|
|
898
|
+
if not input.get('input'):
|
|
899
|
+
raise RuntimeError(
|
|
900
|
+
"Empty input after processing. Cannot send empty string to LLM. "
|
|
901
|
+
"This likely means the message contained only non-text content "
|
|
902
|
+
"with no accompanying text."
|
|
903
|
+
)
|
|
904
|
+
|
|
782
905
|
logging.info(f"Input: {thread_id} - {input}")
|
|
783
906
|
if self.checkpointer and self.checkpointer.get_tuple(config):
|
|
784
907
|
self.update_state(config, input)
|
|
785
|
-
|
|
908
|
+
if config.pop("should_continue", False):
|
|
909
|
+
invoke_input = input
|
|
910
|
+
else:
|
|
911
|
+
invoke_input = None
|
|
912
|
+
result = super().invoke(invoke_input, config=config, *args, **kwargs)
|
|
786
913
|
else:
|
|
787
914
|
result = super().invoke(input, config=config, *args, **kwargs)
|
|
788
915
|
try:
|
|
789
|
-
if
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
if not output:
|
|
793
|
-
output = result['messages'][-1].content
|
|
916
|
+
if result.get(PRINTER_NODE_RS) == PRINTER_COMPLETED_STATE:
|
|
917
|
+
output = next((msg.content for msg in reversed(result['messages']) if not isinstance(msg, HumanMessage)),
|
|
918
|
+
result['messages'][-1].content)
|
|
794
919
|
else:
|
|
795
|
-
output
|
|
920
|
+
# used for printer node output - it will be reset by next `reset` node
|
|
921
|
+
output = result.get(PRINTER_NODE_RS)
|
|
796
922
|
except:
|
|
797
923
|
output = list(result.values())[-1]
|
|
798
|
-
thread_id = None
|
|
799
924
|
config_state = self.get_state(config)
|
|
800
|
-
|
|
801
|
-
|
|
925
|
+
is_execution_finished = not config_state.next
|
|
926
|
+
if is_execution_finished:
|
|
927
|
+
thread_id = None
|
|
802
928
|
|
|
803
929
|
result_with_state = {
|
|
804
930
|
"output": output,
|
|
805
931
|
"thread_id": thread_id,
|
|
806
|
-
"execution_finished":
|
|
932
|
+
"execution_finished": is_execution_finished
|
|
807
933
|
}
|
|
808
934
|
|
|
809
935
|
# Include all state values in the result
|