alita-sdk 0.3.360__py3-none-any.whl → 0.3.362__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/runtime/clients/client.py +1 -12
- alita_sdk/runtime/langchain/langraph_agent.py +25 -10
- alita_sdk/runtime/langchain/utils.py +4 -0
- alita_sdk/runtime/toolkits/tools.py +1 -1
- alita_sdk/runtime/tools/artifact.py +60 -12
- alita_sdk/runtime/tools/router.py +2 -6
- alita_sdk/runtime/utils/utils.py +6 -0
- alita_sdk/tools/jira/api_wrapper.py +9 -12
- alita_sdk/tools/utils/content_parser.py +56 -42
- {alita_sdk-0.3.360.dist-info → alita_sdk-0.3.362.dist-info}/METADATA +1 -1
- {alita_sdk-0.3.360.dist-info → alita_sdk-0.3.362.dist-info}/RECORD +14 -14
- {alita_sdk-0.3.360.dist-info → alita_sdk-0.3.362.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.360.dist-info → alita_sdk-0.3.362.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.360.dist-info → alita_sdk-0.3.362.dist-info}/top_level.txt +0 -0
|
@@ -231,21 +231,17 @@ class AlitaClient:
|
|
|
231
231
|
if not self.model_image_generation:
|
|
232
232
|
raise ValueError("Image generation model is not configured for this client")
|
|
233
233
|
|
|
234
|
-
# Prepare the request data - only include non-"auto" parameters
|
|
235
|
-
# This prevents LiteLLM from adding unsupported parameters to extra_body
|
|
236
234
|
image_generation_data = {
|
|
237
235
|
"prompt": prompt,
|
|
238
236
|
"model": self.model_image_generation,
|
|
239
237
|
"n": n,
|
|
240
238
|
"response_format": response_format,
|
|
241
|
-
# Enable drop_params to prevent LiteLLM from adding unsupported params to extra_body
|
|
242
|
-
"drop_params": True
|
|
243
239
|
}
|
|
244
240
|
|
|
245
241
|
# Only add optional parameters if they have meaningful values
|
|
246
242
|
if size and size.lower() != "auto":
|
|
247
243
|
image_generation_data["size"] = size
|
|
248
|
-
|
|
244
|
+
|
|
249
245
|
if quality and quality.lower() != "auto":
|
|
250
246
|
image_generation_data["quality"] = quality
|
|
251
247
|
|
|
@@ -259,7 +255,6 @@ class AlitaClient:
|
|
|
259
255
|
})
|
|
260
256
|
|
|
261
257
|
logger.info(f"Generating image with model: {self.model_image_generation}, prompt: {prompt[:50]}...")
|
|
262
|
-
logger.debug(f"Image generation request data: {image_generation_data}")
|
|
263
258
|
|
|
264
259
|
try:
|
|
265
260
|
response = requests.post(
|
|
@@ -274,12 +269,6 @@ class AlitaClient:
|
|
|
274
269
|
|
|
275
270
|
except requests.exceptions.HTTPError as e:
|
|
276
271
|
logger.error(f"Image generation failed: {e.response.status_code} - {e.response.text}")
|
|
277
|
-
# Try to parse the error response for more details
|
|
278
|
-
try:
|
|
279
|
-
error_details = e.response.json()
|
|
280
|
-
logger.error(f"Error details: {error_details}")
|
|
281
|
-
except:
|
|
282
|
-
pass
|
|
283
272
|
raise
|
|
284
273
|
except requests.exceptions.RequestException as e:
|
|
285
274
|
logger.error(f"Image generation request failed: {e}")
|
|
@@ -458,7 +458,7 @@ def create_graph(
|
|
|
458
458
|
if tool.name == tool_name:
|
|
459
459
|
if node_type == 'function':
|
|
460
460
|
lg_builder.add_node(node_id, FunctionTool(
|
|
461
|
-
tool=tool, name=
|
|
461
|
+
tool=tool, name=node_id, return_type='dict',
|
|
462
462
|
output_variables=node.get('output', []),
|
|
463
463
|
input_mapping=node.get('input_mapping',
|
|
464
464
|
{'messages': {'type': 'variable', 'value': 'messages'}}),
|
|
@@ -469,7 +469,7 @@ def create_graph(
|
|
|
469
469
|
{'messages': {'type': 'variable', 'value': 'messages'}})
|
|
470
470
|
lg_builder.add_node(node_id, FunctionTool(
|
|
471
471
|
client=client, tool=tool,
|
|
472
|
-
name=
|
|
472
|
+
name=node_id, return_type='str',
|
|
473
473
|
output_variables=node.get('output', []),
|
|
474
474
|
input_variables=input_params,
|
|
475
475
|
input_mapping= input_mapping
|
|
@@ -493,7 +493,7 @@ def create_graph(
|
|
|
493
493
|
elif node_type == 'tool':
|
|
494
494
|
lg_builder.add_node(node_id, ToolNode(
|
|
495
495
|
client=client, tool=tool,
|
|
496
|
-
name=
|
|
496
|
+
name=node_id, return_type='dict',
|
|
497
497
|
output_variables=node.get('output', []),
|
|
498
498
|
input_variables=node.get('input', ['messages']),
|
|
499
499
|
structured_output=node.get('structured_output', False),
|
|
@@ -511,7 +511,7 @@ def create_graph(
|
|
|
511
511
|
elif node_type == 'loop':
|
|
512
512
|
lg_builder.add_node(node_id, LoopNode(
|
|
513
513
|
client=client, tool=tool,
|
|
514
|
-
name=
|
|
514
|
+
name=node_id, return_type='dict',
|
|
515
515
|
output_variables=node.get('output', []),
|
|
516
516
|
input_variables=node.get('input', ['messages']),
|
|
517
517
|
task=node.get('task', '')
|
|
@@ -526,7 +526,7 @@ def create_graph(
|
|
|
526
526
|
logger.debug(f"Loop tool discovered: {t}")
|
|
527
527
|
lg_builder.add_node(node_id, LoopToolNode(
|
|
528
528
|
client=client,
|
|
529
|
-
name=
|
|
529
|
+
name=node_id, return_type='dict',
|
|
530
530
|
tool=tool, loop_tool=t,
|
|
531
531
|
variables_mapping=node.get('variables_mapping', {}),
|
|
532
532
|
output_variables=node.get('output', []),
|
|
@@ -546,7 +546,7 @@ def create_graph(
|
|
|
546
546
|
client=client, tool=tool,
|
|
547
547
|
index_tool=indexer_tool,
|
|
548
548
|
input_mapping=node.get('input_mapping', {}),
|
|
549
|
-
name=
|
|
549
|
+
name=node_id, return_type='dict',
|
|
550
550
|
chunking_tool=node.get('chunking_tool', None),
|
|
551
551
|
chunking_config=node.get('chunking_config', {}),
|
|
552
552
|
output_variables=node.get('output', []),
|
|
@@ -585,7 +585,7 @@ def create_graph(
|
|
|
585
585
|
lg_builder.add_node(node_id, LLMNode(
|
|
586
586
|
client=client,
|
|
587
587
|
input_mapping=node.get('input_mapping', {'messages': {'type': 'variable', 'value': 'messages'}}),
|
|
588
|
-
name=
|
|
588
|
+
name=node_id,
|
|
589
589
|
return_type='dict',
|
|
590
590
|
structured_output_dict=output_vars_dict,
|
|
591
591
|
output_variables=output_vars,
|
|
@@ -596,7 +596,7 @@ def create_graph(
|
|
|
596
596
|
elif node_type == 'router':
|
|
597
597
|
# Add a RouterNode as an independent node
|
|
598
598
|
lg_builder.add_node(node_id, RouterNode(
|
|
599
|
-
name=
|
|
599
|
+
name=node_id,
|
|
600
600
|
condition=node.get('condition', ''),
|
|
601
601
|
routes=node.get('routes', []),
|
|
602
602
|
default_output=node.get('default_output', 'END'),
|
|
@@ -701,6 +701,9 @@ def set_defaults(d):
|
|
|
701
701
|
'bool': False,
|
|
702
702
|
# add more types as needed
|
|
703
703
|
}
|
|
704
|
+
# Build state_types mapping with STRING type names (not actual type objects)
|
|
705
|
+
state_types = {}
|
|
706
|
+
|
|
704
707
|
for k, v in d.items():
|
|
705
708
|
# Skip 'input' key as it is not a state initial variable
|
|
706
709
|
if k == 'input':
|
|
@@ -708,6 +711,16 @@ def set_defaults(d):
|
|
|
708
711
|
# set value or default if type is defined
|
|
709
712
|
if 'value' not in v:
|
|
710
713
|
v['value'] = type_defaults.get(v['type'], None)
|
|
714
|
+
|
|
715
|
+
# Also build the state_types mapping with STRING type names
|
|
716
|
+
var_type = v['type'] if isinstance(v, dict) else v
|
|
717
|
+
if var_type in ['str', 'int', 'float', 'bool', 'list', 'dict', 'number']:
|
|
718
|
+
# Store the string type name, not the actual type object
|
|
719
|
+
state_types[k] = var_type if var_type != 'number' else 'int'
|
|
720
|
+
|
|
721
|
+
# Add state_types as a default value that will be set at initialization
|
|
722
|
+
# Use string type names to avoid serialization issues
|
|
723
|
+
d['state_types'] = {'type': 'dict', 'value': state_types}
|
|
711
724
|
return d
|
|
712
725
|
|
|
713
726
|
def convert_dict_to_message(msg_dict):
|
|
@@ -741,7 +754,7 @@ class LangGraphAgentRunnable(CompiledStateGraph):
|
|
|
741
754
|
def invoke(self, input: Union[dict[str, Any], Any],
|
|
742
755
|
config: Optional[RunnableConfig] = None,
|
|
743
756
|
*args, **kwargs):
|
|
744
|
-
logger.info(f"
|
|
757
|
+
logger.info(f"Incoming Input: {input}")
|
|
745
758
|
if config is None:
|
|
746
759
|
config = RunnableConfig()
|
|
747
760
|
if not config.get("configurable", {}).get("thread_id", ""):
|
|
@@ -775,7 +788,9 @@ class LangGraphAgentRunnable(CompiledStateGraph):
|
|
|
775
788
|
try:
|
|
776
789
|
if self.output_variables and self.output_variables[0] != "messages":
|
|
777
790
|
# If output_variables are specified, use the value of first one or use the last messages as default
|
|
778
|
-
output = result.get(self.output_variables[0]
|
|
791
|
+
output = result.get(self.output_variables[0])
|
|
792
|
+
if not output:
|
|
793
|
+
output = result['messages'][-1].content
|
|
779
794
|
else:
|
|
780
795
|
output = result['messages'][-1].content
|
|
781
796
|
except:
|
|
@@ -131,6 +131,7 @@ def parse_type(type_str):
|
|
|
131
131
|
|
|
132
132
|
def create_state(data: Optional[dict] = None):
|
|
133
133
|
state_dict = {'input': str, 'router_output': str} # Always include router_output
|
|
134
|
+
types_dict = {}
|
|
134
135
|
if not data:
|
|
135
136
|
data = {'messages': 'list[str]'}
|
|
136
137
|
for key, value in data.items():
|
|
@@ -140,6 +141,9 @@ def create_state(data: Optional[dict] = None):
|
|
|
140
141
|
state_dict[key] = Annotated[list[AnyMessage], add_messages]
|
|
141
142
|
elif value in ['str', 'int', 'float', 'bool', 'list', 'dict', 'number', 'dict']:
|
|
142
143
|
state_dict[key] = parse_type(value)
|
|
144
|
+
|
|
145
|
+
state_dict["state_types"] = types_dict # Default value for state_types
|
|
146
|
+
types_dict["state_types"] = dict
|
|
143
147
|
logger.debug(f"Created state: {state_dict}")
|
|
144
148
|
return TypedDict('State', state_dict)
|
|
145
149
|
|
|
@@ -165,7 +165,7 @@ def _mcp_tools(tools_list, alita):
|
|
|
165
165
|
if not selected_tools or tool_name in selected_tools:
|
|
166
166
|
if server_tool := _init_single_mcp_tool(server_toolkit_name,
|
|
167
167
|
# selected_toolkit["name"] is None for toolkit_test
|
|
168
|
-
selected_toolkit["
|
|
168
|
+
selected_toolkit["toolkit_name"] if selected_toolkit.get("toolkit_name")
|
|
169
169
|
else server_toolkit_name,
|
|
170
170
|
available_tool, alita, selected_toolkit['settings']):
|
|
171
171
|
tools.append(server_tool)
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import hashlib
|
|
2
|
+
import io
|
|
3
|
+
import json
|
|
2
4
|
import logging
|
|
3
5
|
import re
|
|
4
6
|
from typing import Any, Optional, Generator, List
|
|
@@ -6,6 +8,7 @@ from typing import Any, Optional, Generator, List
|
|
|
6
8
|
from langchain_core.callbacks import dispatch_custom_event
|
|
7
9
|
from langchain_core.documents import Document
|
|
8
10
|
from langchain_core.tools import ToolException
|
|
11
|
+
from openpyxl.workbook.workbook import Workbook
|
|
9
12
|
from pydantic import create_model, Field, model_validator
|
|
10
13
|
|
|
11
14
|
from ...tools.non_code_indexer_toolkit import NonCodeIndexerToolkit
|
|
@@ -31,22 +34,57 @@ class ArtifactWrapper(NonCodeIndexerToolkit):
|
|
|
31
34
|
return self.artifact.list(bucket_name, return_as_string)
|
|
32
35
|
|
|
33
36
|
def create_file(self, filename: str, filedata: str, bucket_name = None):
|
|
37
|
+
if filename.endswith(".xlsx"):
|
|
38
|
+
data = json.loads(filedata)
|
|
39
|
+
filedata = self.create_xlsx_filedata(data)
|
|
40
|
+
|
|
34
41
|
result = self.artifact.create(filename, filedata, bucket_name)
|
|
35
42
|
|
|
36
43
|
# Dispatch custom event for file creation
|
|
37
|
-
|
|
38
|
-
"
|
|
39
|
-
"
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
"
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
44
|
+
self._log_tool_event(
|
|
45
|
+
tool_name="file_modified",
|
|
46
|
+
message="""
|
|
47
|
+
{
|
|
48
|
+
"message": f"File '{filename}' created successfully",
|
|
49
|
+
"filename": filename,
|
|
50
|
+
"tool_name": "createFile",
|
|
51
|
+
"toolkit": "artifact",
|
|
52
|
+
"operation_type": "create",
|
|
53
|
+
"meta": {
|
|
54
|
+
"bucket": bucket_name or self.bucket
|
|
55
|
+
}
|
|
56
|
+
}""")
|
|
57
|
+
|
|
48
58
|
return result
|
|
49
59
|
|
|
60
|
+
def create_xlsx_filedata(self, data: dict[str, list[list]]) -> bytes:
|
|
61
|
+
try:
|
|
62
|
+
workbook = Workbook()
|
|
63
|
+
|
|
64
|
+
first_sheet = True
|
|
65
|
+
for sheet_name, sheet_data in data.items():
|
|
66
|
+
if first_sheet:
|
|
67
|
+
sheet = workbook.active
|
|
68
|
+
sheet.title = sheet_name
|
|
69
|
+
first_sheet = False
|
|
70
|
+
else:
|
|
71
|
+
sheet = workbook.create_sheet(title=sheet_name)
|
|
72
|
+
|
|
73
|
+
for row in sheet_data:
|
|
74
|
+
sheet.append(row)
|
|
75
|
+
|
|
76
|
+
file_buffer = io.BytesIO()
|
|
77
|
+
workbook.save(file_buffer)
|
|
78
|
+
file_buffer.seek(0)
|
|
79
|
+
|
|
80
|
+
return file_buffer.read()
|
|
81
|
+
|
|
82
|
+
except json.JSONDecodeError:
|
|
83
|
+
raise ValueError("Invalid JSON format for .xlsx file data.")
|
|
84
|
+
except Exception as e:
|
|
85
|
+
raise ValueError(f"Error processing .xlsx file data: {e}")
|
|
86
|
+
|
|
87
|
+
|
|
50
88
|
def read_file(self,
|
|
51
89
|
filename: str,
|
|
52
90
|
bucket_name = None,
|
|
@@ -190,7 +228,17 @@ class ArtifactWrapper(NonCodeIndexerToolkit):
|
|
|
190
228
|
"args_schema": create_model(
|
|
191
229
|
"createFile",
|
|
192
230
|
filename=(str, Field(description="Filename")),
|
|
193
|
-
filedata=(str, Field(description="Stringified content of the file
|
|
231
|
+
filedata=(str, Field(description="""Stringified content of the file.
|
|
232
|
+
Example for .xlsx filedata format:
|
|
233
|
+
{
|
|
234
|
+
"Sheet1":[
|
|
235
|
+
["Name", "Age", "City"],
|
|
236
|
+
["Alice", 25, "New York"],
|
|
237
|
+
["Bob", 30, "San Francisco"],
|
|
238
|
+
["Charlie", 35, "Los Angeles"]
|
|
239
|
+
]
|
|
240
|
+
}
|
|
241
|
+
""")),
|
|
194
242
|
bucket_name=bucket_name
|
|
195
243
|
)
|
|
196
244
|
},
|
|
@@ -8,10 +8,6 @@ from ..utils.utils import clean_string
|
|
|
8
8
|
|
|
9
9
|
logger = logging.getLogger(__name__)
|
|
10
10
|
|
|
11
|
-
def clean_node_str(s: str)-> str:
|
|
12
|
-
cleaned_string = re.sub(r'[^\w\s]', '', s)
|
|
13
|
-
return cleaned_string
|
|
14
|
-
|
|
15
11
|
class RouterNode(BaseTool):
|
|
16
12
|
name: str = 'RouterNode'
|
|
17
13
|
description: str = 'A router node that evaluates a condition and routes accordingly.'
|
|
@@ -27,8 +23,8 @@ class RouterNode(BaseTool):
|
|
|
27
23
|
template = EvaluateTemplate(self.condition, input_data)
|
|
28
24
|
result = template.evaluate()
|
|
29
25
|
logger.info(f"RouterNode evaluated condition '{self.condition}' with input {input_data} => {result}")
|
|
30
|
-
result =
|
|
31
|
-
if result in self.routes:
|
|
26
|
+
result = clean_string(str(result))
|
|
27
|
+
if result in [clean_string(formatted_result) for formatted_result in self.routes]:
|
|
32
28
|
# If the result is one of the routes, return it
|
|
33
29
|
return {"router_output": result}
|
|
34
30
|
elif result == self.default_output:
|
alita_sdk/runtime/utils/utils.py
CHANGED
|
@@ -20,3 +20,9 @@ def clean_string(s: str) -> str:
|
|
|
20
20
|
# Replace these characters with an empty string
|
|
21
21
|
cleaned_string = re.sub(clean_string_pattern, '', s)
|
|
22
22
|
return cleaned_string
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def clean_node_str(s: str) -> str:
|
|
26
|
+
"""Cleans a node string by removing all non-alphanumeric characters except underscores and spaces."""
|
|
27
|
+
cleaned_string = re.sub(r'[^\w\s]', '', s)
|
|
28
|
+
return cleaned_string
|
|
@@ -754,18 +754,15 @@ class JiraApiWrapper(NonCodeIndexerToolkit):
|
|
|
754
754
|
logger.info(f"Skipping attachment {attachment['filename']} as it does not match pattern {attachment_pattern}")
|
|
755
755
|
continue
|
|
756
756
|
logger.info(f"Processing attachment {attachment['filename']} with ID {attachment['attachment_id']}")
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
path=f"secure/attachment/{attachment['attachment_id']}/{attachment['filename']}", not_json_response=True)
|
|
767
|
-
content_docs = process_content_by_type(attachment_content, attachment['filename'], llm=self.llm)
|
|
768
|
-
attachment_data.append("filename: " + attachment['filename'] + "\ncontent: " + str([doc.page_content for doc in content_docs]))
|
|
757
|
+
try:
|
|
758
|
+
attachment_content = self._client.get_attachment_content(attachment['attachment_id'])
|
|
759
|
+
except Exception as e:
|
|
760
|
+
logger.error(
|
|
761
|
+
f"Failed to download attachment {attachment['filename']} for issue {jira_issue_key}: {str(e)}")
|
|
762
|
+
attachment_content = self._client.get(
|
|
763
|
+
path=f"secure/attachment/{attachment['attachment_id']}/{attachment['filename']}", not_json_response=True)
|
|
764
|
+
content_docs = process_content_by_type(attachment_content, attachment['filename'], llm=self.llm, fallback_extensions=[".txt", ".png"])
|
|
765
|
+
attachment_data.append("filename: " + attachment['filename'] + "\ncontent: " + str([doc.page_content for doc in content_docs]))
|
|
769
766
|
|
|
770
767
|
return "\n\n".join(attachment_data)
|
|
771
768
|
|
|
@@ -229,51 +229,65 @@ def process_document_by_type(content, extension_source: str, document: Document
|
|
|
229
229
|
)
|
|
230
230
|
|
|
231
231
|
|
|
232
|
-
def process_content_by_type(content, filename: str, llm=None, chunking_config=None) -> \
|
|
232
|
+
def process_content_by_type(content, filename: str, llm=None, chunking_config=None, fallback_extensions=None) -> \
|
|
233
233
|
Generator[Document, None, None]:
|
|
234
234
|
"""Process the content of a file based on its type using a configured loader."""
|
|
235
235
|
temp_file_path = None
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
236
|
+
extensions = fallback_extensions if fallback_extensions else []
|
|
237
|
+
match = re.search(r'\.([^.]+)$', filename)
|
|
238
|
+
|
|
239
|
+
if match:
|
|
240
|
+
extensions.insert(0, f".{match.group(1).lower()}")
|
|
241
|
+
elif not extensions:
|
|
242
|
+
extensions = [".txt"]
|
|
243
|
+
|
|
244
|
+
for extension in extensions:
|
|
245
|
+
try:
|
|
246
|
+
with tempfile.NamedTemporaryFile(mode='w+b', suffix=extension, delete=False) as temp_file:
|
|
247
|
+
temp_file_path = temp_file.name
|
|
248
|
+
if content is None:
|
|
249
|
+
logger.warning(
|
|
250
|
+
f"'{IndexerKeywords.CONTENT_IN_BYTES.value}' ie expected but not found in document metadata.")
|
|
251
|
+
return []
|
|
252
|
+
|
|
253
|
+
temp_file.write(content)
|
|
254
|
+
temp_file.flush()
|
|
255
|
+
|
|
256
|
+
loader_config = loaders_map.get(extension)
|
|
257
|
+
if not loader_config:
|
|
258
|
+
logger.warning(f"No loader found for file extension: {extension}. File: {temp_file_path}")
|
|
259
|
+
return []
|
|
260
|
+
|
|
261
|
+
loader_cls = loader_config['class']
|
|
262
|
+
loader_kwargs = loader_config['kwargs']
|
|
263
|
+
# Determine which loader configuration keys are allowed to be overridden by user input.
|
|
264
|
+
# If 'allowed_to_override' is specified in the loader configuration, use it; otherwise, allow all keys in loader_kwargs.
|
|
265
|
+
allowed_to_override = loader_config.get('allowed_to_override', list(loader_kwargs.keys()))
|
|
266
|
+
# If a chunking_config is provided and contains custom configuration for the current file extension,
|
|
267
|
+
# update loader_kwargs with user-supplied values, but only for keys explicitly permitted in allowed_to_override.
|
|
268
|
+
# This ensures that only safe and intended parameters can be customized, preventing accidental or unauthorized changes
|
|
269
|
+
# to critical loader settings.
|
|
270
|
+
if chunking_config and (users_config_for_extension := chunking_config.get(extension, {})):
|
|
271
|
+
for key in set(users_config_for_extension.keys()) & set(allowed_to_override):
|
|
272
|
+
loader_kwargs[key] = users_config_for_extension[key]
|
|
273
|
+
if LoaderProperties.LLM.value in loader_kwargs:
|
|
274
|
+
loader_kwargs[LoaderProperties.LLM.value] = llm
|
|
275
|
+
if LoaderProperties.PROMPT_DEFAULT.value in loader_kwargs:
|
|
276
|
+
loader_kwargs.pop(LoaderProperties.PROMPT_DEFAULT.value)
|
|
277
|
+
loader_kwargs[LoaderProperties.PROMPT.value] = image_processing_prompt
|
|
278
|
+
loader = loader_cls(file_path=temp_file_path, **loader_kwargs)
|
|
279
|
+
yield from loader.load()
|
|
280
|
+
break
|
|
281
|
+
except Exception as e:
|
|
282
|
+
if fallback_extensions:
|
|
283
|
+
logger.warning(f"Error loading attachment: {str(e)} for file {temp_file_path} (extension: {extension})")
|
|
284
|
+
logger.warning(f"Continuing with fallback extensions: {fallback_extensions}.")
|
|
285
|
+
continue
|
|
286
|
+
else:
|
|
287
|
+
raise e
|
|
288
|
+
finally:
|
|
289
|
+
if temp_file_path and os.path.exists(temp_file_path):
|
|
290
|
+
os.remove(temp_file_path)
|
|
277
291
|
|
|
278
292
|
# FIXME copied from langchain_core/utils/strings.py of 0.3.74 version
|
|
279
293
|
# https://github.com/langchain-ai/langchain/pull/32157
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: alita_sdk
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.362
|
|
4
4
|
Summary: SDK for building langchain agents using resources from Alita
|
|
5
5
|
Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedj27@gmail.com>, Artem Dubrovskiy <ad13box@gmail.com>
|
|
6
6
|
License-Expression: Apache-2.0
|
|
@@ -36,7 +36,7 @@ alita_sdk/configurations/zephyr_essential.py,sha256=tUIrh-PRNvdrLBj6rJXqlF-h6oaM
|
|
|
36
36
|
alita_sdk/runtime/__init__.py,sha256=4W0UF-nl3QF2bvET5lnah4o24CoTwSoKXhuN0YnwvEE,828
|
|
37
37
|
alita_sdk/runtime/clients/__init__.py,sha256=BdehU5GBztN1Qi1Wul0cqlU46FxUfMnI6Vq2Zd_oq1M,296
|
|
38
38
|
alita_sdk/runtime/clients/artifact.py,sha256=Tt3aWcxu20bVW6EX7s_iX5CTmcItKhUnkk8Q2gv2vw0,4036
|
|
39
|
-
alita_sdk/runtime/clients/client.py,sha256=
|
|
39
|
+
alita_sdk/runtime/clients/client.py,sha256=K36HB_7Br1naXCbOnRqIdUcc-5Vlrl5l5OUJCyHbkSU,45782
|
|
40
40
|
alita_sdk/runtime/clients/datasource.py,sha256=HAZovoQN9jBg0_-lIlGBQzb4FJdczPhkHehAiVG3Wx0,1020
|
|
41
41
|
alita_sdk/runtime/clients/prompt.py,sha256=li1RG9eBwgNK_Qf0qUaZ8QNTmsncFrAL2pv3kbxZRZg,1447
|
|
42
42
|
alita_sdk/runtime/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -44,11 +44,11 @@ alita_sdk/runtime/langchain/assistant.py,sha256=2tH8je9uKegIIIZUuiGU4zqRVg7jyQas
|
|
|
44
44
|
alita_sdk/runtime/langchain/chat_message_template.py,sha256=kPz8W2BG6IMyITFDA5oeb5BxVRkHEVZhuiGl4MBZKdc,2176
|
|
45
45
|
alita_sdk/runtime/langchain/constants.py,sha256=eHVJ_beJNTf1WJo4yq7KMK64fxsRvs3lKc34QCXSbpk,3319
|
|
46
46
|
alita_sdk/runtime/langchain/indexer.py,sha256=0ENHy5EOhThnAiYFc7QAsaTNp9rr8hDV_hTK8ahbatk,37592
|
|
47
|
-
alita_sdk/runtime/langchain/langraph_agent.py,sha256=
|
|
47
|
+
alita_sdk/runtime/langchain/langraph_agent.py,sha256=Nrhhcr7fIGaMFwps2XV0ZPL72Ek4wHo7Zml-JaE3AvU,48039
|
|
48
48
|
alita_sdk/runtime/langchain/mixedAgentParser.py,sha256=M256lvtsL3YtYflBCEp-rWKrKtcY1dJIyRGVv7KW9ME,2611
|
|
49
49
|
alita_sdk/runtime/langchain/mixedAgentRenderes.py,sha256=asBtKqm88QhZRILditjYICwFVKF5KfO38hu2O-WrSWE,5964
|
|
50
50
|
alita_sdk/runtime/langchain/store_manager.py,sha256=i8Fl11IXJhrBXq1F1ukEVln57B1IBe-tqSUvfUmBV4A,2218
|
|
51
|
-
alita_sdk/runtime/langchain/utils.py,sha256=
|
|
51
|
+
alita_sdk/runtime/langchain/utils.py,sha256=tzoNe8V8B7nMgDFfQhQrjQehZDIQGH0zYU1uOvz2Sio,6884
|
|
52
52
|
alita_sdk/runtime/langchain/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
53
|
alita_sdk/runtime/langchain/agents/xml_chat.py,sha256=Mx7PK5T97_GrFCwHHZ3JZP42S7MwtUzV0W-_8j6Amt8,6212
|
|
54
54
|
alita_sdk/runtime/langchain/document_loaders/AlitaBDDScenariosLoader.py,sha256=4kFU1ijrM1Jw7cywQv8mUiBHlE6w-uqfzSZP4hUV5P4,3771
|
|
@@ -101,12 +101,12 @@ alita_sdk/runtime/toolkits/configurations.py,sha256=kIDAlnryPQfbZyFxV-9SzN2-Vefz
|
|
|
101
101
|
alita_sdk/runtime/toolkits/datasource.py,sha256=qk78OdPoReYPCWwahfkKLbKc4pfsu-061oXRryFLP6I,2498
|
|
102
102
|
alita_sdk/runtime/toolkits/prompt.py,sha256=WIpTkkVYWqIqOWR_LlSWz3ug8uO9tm5jJ7aZYdiGRn0,1192
|
|
103
103
|
alita_sdk/runtime/toolkits/subgraph.py,sha256=wwUK8JjPXkGzyVZ3tAukmvST6eGbqx_U11rpnmbrvtg,2105
|
|
104
|
-
alita_sdk/runtime/toolkits/tools.py,sha256=
|
|
104
|
+
alita_sdk/runtime/toolkits/tools.py,sha256=WBTU-ou5u0R9QLses5N_JHP2TryrsWUr_gY8uG5xY3E,8319
|
|
105
105
|
alita_sdk/runtime/toolkits/vectorstore.py,sha256=BGppQADa1ZiLO17fC0uCACTTEvPHlodEDYEzUcBRbAA,2901
|
|
106
106
|
alita_sdk/runtime/tools/__init__.py,sha256=TbHPnDtCdQvNzK1YQnk_ufkuI7FgHfvY1-JWUgycZhQ,497
|
|
107
107
|
alita_sdk/runtime/tools/agent.py,sha256=m98QxOHwnCRTT9j18Olbb5UPS8-ZGeQaGiUyZJSyFck,3162
|
|
108
108
|
alita_sdk/runtime/tools/application.py,sha256=z3vLZODs-_xEEnZFmGF0fKz1j3VtNJxqsAmg5ovExpQ,3129
|
|
109
|
-
alita_sdk/runtime/tools/artifact.py,sha256=
|
|
109
|
+
alita_sdk/runtime/tools/artifact.py,sha256=u3szFwZqguHrPZ3tZJ7S_TiZl7cxlT3oHYd6zbdpRDE,13842
|
|
110
110
|
alita_sdk/runtime/tools/datasource.py,sha256=pvbaSfI-ThQQnjHG-QhYNSTYRnZB0rYtZFpjCfpzxYI,2443
|
|
111
111
|
alita_sdk/runtime/tools/echo.py,sha256=spw9eCweXzixJqHnZofHE1yWiSUa04L4VKycf3KCEaM,486
|
|
112
112
|
alita_sdk/runtime/tools/function.py,sha256=0iZJ-UxaPbtcXAVX9G5Vsn7vmD7lrz3cBG1qylto1gs,2844
|
|
@@ -119,7 +119,7 @@ alita_sdk/runtime/tools/loop_output.py,sha256=U4hO9PCQgWlXwOq6jdmCGbegtAxGAPXObS
|
|
|
119
119
|
alita_sdk/runtime/tools/mcp_server_tool.py,sha256=MhLxZJ44LYrB_0GrojmkyqKoDRaqIHkEQAsg718ipog,4277
|
|
120
120
|
alita_sdk/runtime/tools/pgvector_search.py,sha256=NN2BGAnq4SsDHIhUcFZ8d_dbEOM8QwB0UwpsWCYruXU,11692
|
|
121
121
|
alita_sdk/runtime/tools/prompt.py,sha256=nJafb_e5aOM1Rr3qGFCR-SKziU9uCsiP2okIMs9PppM,741
|
|
122
|
-
alita_sdk/runtime/tools/router.py,sha256=
|
|
122
|
+
alita_sdk/runtime/tools/router.py,sha256=p7e0tX6YAWw2M2Nq0A_xqw1E2P-Xz1DaJvhUstfoZn4,1584
|
|
123
123
|
alita_sdk/runtime/tools/sandbox.py,sha256=WNz-aUMtkGCPg84dDy_0BPkyp-6YjoYB-xjIEFFrtKw,11601
|
|
124
124
|
alita_sdk/runtime/tools/tool.py,sha256=lE1hGi6qOAXG7qxtqxarD_XMQqTghdywf261DZawwno,5631
|
|
125
125
|
alita_sdk/runtime/tools/vectorstore.py,sha256=8vRhi1lGFEs3unvnflEi2p59U2MfV32lStpEizpDms0,34467
|
|
@@ -133,7 +133,7 @@ alita_sdk/runtime/utils/save_dataframe.py,sha256=i-E1wp-t4wb17Zq3nA3xYwgSILjoXNi
|
|
|
133
133
|
alita_sdk/runtime/utils/streamlit.py,sha256=GQ69CsjfRMcGXcCrslL0Uoj24Cl07Jeji0rZxELaKTQ,104930
|
|
134
134
|
alita_sdk/runtime/utils/toolkit_runtime.py,sha256=MU63Fpxj0b5_r1IUUc0Q3-PN9VwL7rUxp2MRR4tmYR8,5136
|
|
135
135
|
alita_sdk/runtime/utils/toolkit_utils.py,sha256=I9QFqnaqfVgN26LUr6s3XlBlG6y0CoHURnCzG7XcwVs,5311
|
|
136
|
-
alita_sdk/runtime/utils/utils.py,sha256=
|
|
136
|
+
alita_sdk/runtime/utils/utils.py,sha256=iuCcyVZoBpXrHh0zQa8M-Gg_tIaznc7T9kEEUJ8a0l4,891
|
|
137
137
|
alita_sdk/tools/__init__.py,sha256=jUj1ztC2FbkIUB-YYmiqaz_rqW7Il5kWzDPn1mJmj5w,10545
|
|
138
138
|
alita_sdk/tools/base_indexer_toolkit.py,sha256=PyT3BDSn6gNJPXdbZw21tvTbE9WkhJD3m_pFWZJlYbU,23825
|
|
139
139
|
alita_sdk/tools/code_indexer_toolkit.py,sha256=6QvI1by0OFdnKTx5TfNoDJjnMrvnTi9T56xaDxzeleU,7306
|
|
@@ -263,7 +263,7 @@ alita_sdk/tools/google/bigquery/tool.py,sha256=Esf9Hsp8I0e7-5EdkFqQ-bid0cfrg-bfS
|
|
|
263
263
|
alita_sdk/tools/google_places/__init__.py,sha256=QtmBCI0bHDK79u4hsCSWFcUihu-h4EmPSh9Yll7zz3w,3590
|
|
264
264
|
alita_sdk/tools/google_places/api_wrapper.py,sha256=7nZly6nk4f4Tm7s2MVdnnwlb-1_WHRrDhyjDiqoyPjA,4674
|
|
265
265
|
alita_sdk/tools/jira/__init__.py,sha256=G-9qnOYKFWM_adG0QFexh5-2pj_WaxIxxZanB3ARFqI,6339
|
|
266
|
-
alita_sdk/tools/jira/api_wrapper.py,sha256=
|
|
266
|
+
alita_sdk/tools/jira/api_wrapper.py,sha256=N-aPpzV1CZaB5uU56sqXO4t2FEdmK2lVQt3VU9EYY0g,82584
|
|
267
267
|
alita_sdk/tools/keycloak/__init__.py,sha256=0WB9yXMUUAHQRni1ghDEmd7GYa7aJPsTVlZgMCM9cQ0,3050
|
|
268
268
|
alita_sdk/tools/keycloak/api_wrapper.py,sha256=cOGr0f3S3-c6tRDBWI8wMnetjoNSxiV5rvC_0VHb8uw,3100
|
|
269
269
|
alita_sdk/tools/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -330,7 +330,7 @@ alita_sdk/tools/testrail/__init__.py,sha256=Xg4nVjULL_D8JpIXLYXppnwUfGF4-lguFwKH
|
|
|
330
330
|
alita_sdk/tools/testrail/api_wrapper.py,sha256=tQcGlFJmftvs5ZiO4tsP19fCo4CrJeq_UEvQR1liVfE,39891
|
|
331
331
|
alita_sdk/tools/utils/__init__.py,sha256=W9rCCUPtHCP5nGAbWp0n5jaNA84572aiRoqKneBnaS4,3330
|
|
332
332
|
alita_sdk/tools/utils/available_tools_decorator.py,sha256=IbrdfeQkswxUFgvvN7-dyLMZMyXLiwvX7kgi3phciCk,273
|
|
333
|
-
alita_sdk/tools/utils/content_parser.py,sha256=
|
|
333
|
+
alita_sdk/tools/utils/content_parser.py,sha256=TuKAPUzIZx9F-pzHiVyrCFpI5emrGaOF8DgWHJP2cM4,15235
|
|
334
334
|
alita_sdk/tools/vector_adapters/VectorStoreAdapter.py,sha256=ypBEAkFRGHv5edW0N9rdo1yKurNGQ4pRVEWtrN_7SeA,17656
|
|
335
335
|
alita_sdk/tools/vector_adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
336
336
|
alita_sdk/tools/xray/__init__.py,sha256=eOMWP8VamFbbJgt1xrGpGPqB9ByOTA0Cd3LCaETzGk4,4376
|
|
@@ -352,8 +352,8 @@ alita_sdk/tools/zephyr_scale/api_wrapper.py,sha256=kT0TbmMvuKhDUZc0i7KO18O38JM9S
|
|
|
352
352
|
alita_sdk/tools/zephyr_squad/__init__.py,sha256=0ne8XLJEQSLOWfzd2HdnqOYmQlUliKHbBED5kW_Vias,2895
|
|
353
353
|
alita_sdk/tools/zephyr_squad/api_wrapper.py,sha256=kmw_xol8YIYFplBLWTqP_VKPRhL_1ItDD0_vXTe_UuI,14906
|
|
354
354
|
alita_sdk/tools/zephyr_squad/zephyr_squad_cloud_client.py,sha256=R371waHsms4sllHCbijKYs90C-9Yu0sSR3N4SUfQOgU,5066
|
|
355
|
-
alita_sdk-0.3.
|
|
356
|
-
alita_sdk-0.3.
|
|
357
|
-
alita_sdk-0.3.
|
|
358
|
-
alita_sdk-0.3.
|
|
359
|
-
alita_sdk-0.3.
|
|
355
|
+
alita_sdk-0.3.362.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
356
|
+
alita_sdk-0.3.362.dist-info/METADATA,sha256=-dQUAdfEQUBXMeIDx9i7d9eNDss9eUsj7_dWUT-pTO8,19071
|
|
357
|
+
alita_sdk-0.3.362.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
358
|
+
alita_sdk-0.3.362.dist-info/top_level.txt,sha256=0vJYy5p_jK6AwVb1aqXr7Kgqgk3WDtQ6t5C-XI9zkmg,10
|
|
359
|
+
alita_sdk-0.3.362.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|