alita-sdk 0.3.377__py3-none-any.whl → 0.3.379__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alita-sdk might be problematic. Click here for more details.
- alita_sdk/runtime/langchain/assistant.py +13 -23
- alita_sdk/runtime/langchain/langraph_agent.py +2 -2
- alita_sdk/runtime/langchain/utils.py +5 -1
- alita_sdk/runtime/toolkits/tools.py +10 -1
- alita_sdk/runtime/tools/function.py +5 -4
- alita_sdk/runtime/tools/sandbox.py +121 -32
- alita_sdk/runtime/utils/utils.py +1 -0
- alita_sdk/tools/__init__.py +3 -1
- alita_sdk/tools/base_indexer_toolkit.py +29 -26
- alita_sdk/tools/elitea_base.py +1 -1
- {alita_sdk-0.3.377.dist-info → alita_sdk-0.3.379.dist-info}/METADATA +1 -1
- {alita_sdk-0.3.377.dist-info → alita_sdk-0.3.379.dist-info}/RECORD +15 -15
- {alita_sdk-0.3.377.dist-info → alita_sdk-0.3.379.dist-info}/WHEEL +0 -0
- {alita_sdk-0.3.377.dist-info → alita_sdk-0.3.379.dist-info}/licenses/LICENSE +0 -0
- {alita_sdk-0.3.377.dist-info → alita_sdk-0.3.379.dist-info}/top_level.txt +0 -0
|
@@ -77,11 +77,17 @@ class Assistant:
|
|
|
77
77
|
else:
|
|
78
78
|
# For predict agents, initialize memory store to None since they don't use memory
|
|
79
79
|
self.store = None
|
|
80
|
-
|
|
80
|
+
|
|
81
81
|
# Lazy import to avoid circular dependency
|
|
82
82
|
from ..toolkits.tools import get_tools
|
|
83
|
-
|
|
84
|
-
|
|
83
|
+
version_tools = data['tools']
|
|
84
|
+
# Handle internal tools
|
|
85
|
+
meta = data.get('meta', {})
|
|
86
|
+
if meta.get("internal_tools"):
|
|
87
|
+
for internal_tool_name in meta.get("internal_tools"):
|
|
88
|
+
version_tools.append({"type": "internal_tool", "name": internal_tool_name})
|
|
89
|
+
|
|
90
|
+
self.tools = get_tools(version_tools, alita_client=alita, llm=self.client, memory_store=self.store)
|
|
85
91
|
if tools:
|
|
86
92
|
self.tools += tools
|
|
87
93
|
# Handle prompt setup
|
|
@@ -118,9 +124,11 @@ class Assistant:
|
|
|
118
124
|
if variables:
|
|
119
125
|
self.prompt.partial_variables = variables
|
|
120
126
|
try:
|
|
121
|
-
logger.info(
|
|
127
|
+
logger.info(
|
|
128
|
+
f"Client was created with client setting: temperature - {self.client._get_model_default_parameters}")
|
|
122
129
|
except Exception as e:
|
|
123
|
-
logger.info(
|
|
130
|
+
logger.info(
|
|
131
|
+
f"Client was created with client setting: temperature - {self.client.temperature} : {self.client.max_tokens}")
|
|
124
132
|
|
|
125
133
|
def _configure_store(self, memory_tool: dict | None) -> None:
|
|
126
134
|
"""
|
|
@@ -157,7 +165,6 @@ class Assistant:
|
|
|
157
165
|
agent = create_json_chat_agent(llm=self.client, tools=simple_tools, prompt=self.prompt)
|
|
158
166
|
return self._agent_executor(agent)
|
|
159
167
|
|
|
160
|
-
|
|
161
168
|
def getXMLAgentExecutor(self):
|
|
162
169
|
# Exclude compiled graph runnables from simple tool agents
|
|
163
170
|
simple_tools = [t for t in self.tools if isinstance(t, (BaseTool, CompiledStateGraph))]
|
|
@@ -177,23 +184,6 @@ class Assistant:
|
|
|
177
184
|
"""
|
|
178
185
|
# Exclude compiled graph runnables from simple tool agents
|
|
179
186
|
simple_tools = [t for t in self.tools if isinstance(t, (BaseTool, CompiledStateGraph))]
|
|
180
|
-
|
|
181
|
-
# Add sandbox tool by default for react agents
|
|
182
|
-
try:
|
|
183
|
-
from ..tools.sandbox import create_sandbox_tool
|
|
184
|
-
sandbox_tool = create_sandbox_tool(stateful=False, allow_net=True)
|
|
185
|
-
simple_tools.append(sandbox_tool)
|
|
186
|
-
logger.info("Added PyodideSandboxTool to react agent")
|
|
187
|
-
except ImportError as e:
|
|
188
|
-
logger.warning(f"Failed to add PyodideSandboxTool: {e}. Install langchain-sandbox to enable this feature.")
|
|
189
|
-
except RuntimeError as e:
|
|
190
|
-
if "Deno" in str(e):
|
|
191
|
-
logger.warning("Failed to add PyodideSandboxTool: Deno is required. Install from https://docs.deno.com/runtime/getting_started/installation/")
|
|
192
|
-
else:
|
|
193
|
-
logger.warning(f"Failed to add PyodideSandboxTool: {e}")
|
|
194
|
-
except Exception as e:
|
|
195
|
-
logger.error(f"Error adding PyodideSandboxTool: {e}")
|
|
196
|
-
|
|
197
187
|
# Add image generation tool if model is configured
|
|
198
188
|
if self.alita_client.model_image_generation is not None:
|
|
199
189
|
try:
|
|
@@ -556,11 +556,11 @@ def create_graph(
|
|
|
556
556
|
elif node_type == 'code':
|
|
557
557
|
from ..tools.sandbox import create_sandbox_tool
|
|
558
558
|
sandbox_tool = create_sandbox_tool(stateful=False, allow_net=True)
|
|
559
|
-
|
|
559
|
+
code_data = node.get('code', {'type': 'fixed', 'value': "return 'Code block is empty'"})
|
|
560
560
|
lg_builder.add_node(node_id, FunctionTool(
|
|
561
561
|
tool=sandbox_tool, name=node['id'], return_type='dict',
|
|
562
562
|
output_variables=node.get('output', []),
|
|
563
|
-
input_mapping={'code':
|
|
563
|
+
input_mapping={'code': code_data},
|
|
564
564
|
input_variables=node.get('input', ['messages']),
|
|
565
565
|
structured_output=node.get('structured_output', False),
|
|
566
566
|
alita_client=kwargs.get('alita_client', None)
|
|
@@ -177,7 +177,11 @@ def propagate_the_input_mapping(input_mapping: dict[str, dict], input_variables:
|
|
|
177
177
|
var_dict = create_params(input_variables, source)
|
|
178
178
|
|
|
179
179
|
if value['type'] == 'fstring':
|
|
180
|
-
|
|
180
|
+
try:
|
|
181
|
+
input_data[key] = value['value'].format(**var_dict)
|
|
182
|
+
except KeyError as e:
|
|
183
|
+
logger.error(f"KeyError in fstring formatting for key '{key}'. Attempt to find proper data in state.\n{e}")
|
|
184
|
+
input_data[key] = value['value'].format(**state)
|
|
181
185
|
elif value['type'] == 'fixed':
|
|
182
186
|
input_data[key] = value['value']
|
|
183
187
|
else:
|
|
@@ -12,6 +12,7 @@ from .prompt import PromptToolkit
|
|
|
12
12
|
from .subgraph import SubgraphToolkit
|
|
13
13
|
from .vectorstore import VectorStoreToolkit
|
|
14
14
|
from ..tools.mcp_server_tool import McpServerTool
|
|
15
|
+
from ..tools.sandbox import SandboxToolkit
|
|
15
16
|
# Import community tools
|
|
16
17
|
from ...community import get_toolkits as community_toolkits, get_tools as community_tools
|
|
17
18
|
from ...tools.memory import MemoryToolkit
|
|
@@ -24,7 +25,8 @@ def get_toolkits():
|
|
|
24
25
|
core_toolkits = [
|
|
25
26
|
ArtifactToolkit.toolkit_config_schema(),
|
|
26
27
|
MemoryToolkit.toolkit_config_schema(),
|
|
27
|
-
VectorStoreToolkit.toolkit_config_schema()
|
|
28
|
+
VectorStoreToolkit.toolkit_config_schema(),
|
|
29
|
+
SandboxToolkit.toolkit_config_schema()
|
|
28
30
|
]
|
|
29
31
|
|
|
30
32
|
return core_toolkits + community_toolkits() + alita_toolkits()
|
|
@@ -65,6 +67,13 @@ def get_tools(tools_list: list, alita_client, llm, memory_store: BaseStore = Non
|
|
|
65
67
|
pgvector_configuration=tool['settings'].get('pgvector_configuration', {}),
|
|
66
68
|
store=memory_store,
|
|
67
69
|
).get_tools()
|
|
70
|
+
# TODO: update configuration of internal tools
|
|
71
|
+
elif tool['type'] == 'internal_tool':
|
|
72
|
+
if tool['name'] == 'pyodide':
|
|
73
|
+
tools += SandboxToolkit.get_toolkit(
|
|
74
|
+
stateful=False,
|
|
75
|
+
allow_net=True,
|
|
76
|
+
).get_tools()
|
|
68
77
|
elif tool['type'] == 'artifact':
|
|
69
78
|
tools.extend(ArtifactToolkit.get_toolkit(
|
|
70
79
|
client=alita_client,
|
|
@@ -31,13 +31,10 @@ class FunctionTool(BaseTool):
|
|
|
31
31
|
"""Prepare input for PyodideSandboxTool by injecting state into the code block."""
|
|
32
32
|
# add state into the code block here since it might be changed during the execution of the code
|
|
33
33
|
state_copy = deepcopy(state)
|
|
34
|
-
# pickle state
|
|
35
|
-
import pickle
|
|
36
34
|
|
|
37
35
|
del state_copy['messages'] # remove messages to avoid issues with pickling without langchain-core
|
|
38
|
-
serialized_state = pickle.dumps(state_copy)
|
|
39
36
|
# inject state into the code block as alita_state variable
|
|
40
|
-
pyodide_predata = f"
|
|
37
|
+
pyodide_predata = f"alita_state = {state_copy}"
|
|
41
38
|
# add classes related to sandbox client
|
|
42
39
|
# read the content of alita_sdk/runtime/cliens/sandbox_client.py
|
|
43
40
|
try:
|
|
@@ -64,6 +61,10 @@ class FunctionTool(BaseTool):
|
|
|
64
61
|
|
|
65
62
|
if self.output_variables:
|
|
66
63
|
for var in self.output_variables:
|
|
64
|
+
if var == "messages":
|
|
65
|
+
tool_result_converted.update(
|
|
66
|
+
{"messages": [{"role": "assistant", "content": dumps(tool_result)}]})
|
|
67
|
+
continue
|
|
67
68
|
if isinstance(tool_result, dict) and var in tool_result:
|
|
68
69
|
tool_result_converted[var] = tool_result[var]
|
|
69
70
|
else:
|
|
@@ -2,21 +2,56 @@ import asyncio
|
|
|
2
2
|
import logging
|
|
3
3
|
import subprocess
|
|
4
4
|
import os
|
|
5
|
-
from typing import Any, Type, Optional, Dict
|
|
6
|
-
|
|
7
|
-
from
|
|
5
|
+
from typing import Any, Type, Optional, Dict, List, Literal
|
|
6
|
+
|
|
7
|
+
from langchain_core.tools import BaseTool, BaseToolkit
|
|
8
|
+
from pydantic import BaseModel, create_model, ConfigDict, Field
|
|
8
9
|
from pydantic.fields import FieldInfo
|
|
9
10
|
|
|
10
11
|
logger = logging.getLogger(__name__)
|
|
11
12
|
|
|
13
|
+
name = "pyodide"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def get_tools(tools_list: list, alita_client=None, llm=None, memory_store=None):
|
|
17
|
+
"""
|
|
18
|
+
Get sandbox tools for the provided tool configurations.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
tools_list: List of tool configurations
|
|
22
|
+
alita_client: Alita client instance (unused for sandbox)
|
|
23
|
+
llm: LLM client instance (unused for sandbox)
|
|
24
|
+
memory_store: Optional memory store instance (unused for sandbox)
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
List of sandbox tools
|
|
28
|
+
"""
|
|
29
|
+
all_tools = []
|
|
30
|
+
|
|
31
|
+
for tool in tools_list:
|
|
32
|
+
if tool.get('type') == 'sandbox' or tool.get('toolkit_name') == 'sandbox':
|
|
33
|
+
try:
|
|
34
|
+
toolkit_instance = SandboxToolkit.get_toolkit(
|
|
35
|
+
stateful=tool['settings'].get('stateful', False),
|
|
36
|
+
allow_net=tool['settings'].get('allow_net', True),
|
|
37
|
+
toolkit_name=tool.get('toolkit_name', '')
|
|
38
|
+
)
|
|
39
|
+
all_tools.extend(toolkit_instance.get_tools())
|
|
40
|
+
except Exception as e:
|
|
41
|
+
logger.error(f"Error in sandbox toolkit get_tools: {e}")
|
|
42
|
+
logger.error(f"Tool config: {tool}")
|
|
43
|
+
raise
|
|
44
|
+
|
|
45
|
+
return all_tools
|
|
46
|
+
|
|
12
47
|
|
|
13
48
|
def _is_deno_available() -> bool:
|
|
14
49
|
"""Check if Deno is available in the PATH"""
|
|
15
50
|
try:
|
|
16
51
|
result = subprocess.run(
|
|
17
|
-
["deno", "--version"],
|
|
18
|
-
capture_output=True,
|
|
19
|
-
text=True,
|
|
52
|
+
["deno", "--version"],
|
|
53
|
+
capture_output=True,
|
|
54
|
+
text=True,
|
|
20
55
|
timeout=10
|
|
21
56
|
)
|
|
22
57
|
return result.returncode == 0
|
|
@@ -42,26 +77,26 @@ def _setup_pyodide_cache_env() -> None:
|
|
|
42
77
|
value = value.strip('"').strip("'")
|
|
43
78
|
os.environ[key] = value
|
|
44
79
|
logger.debug(f"Set Pyodide cache env: {key}={value}")
|
|
45
|
-
|
|
80
|
+
|
|
46
81
|
# Set default caching environment variables if not already set
|
|
47
82
|
cache_defaults = {
|
|
48
83
|
'PYODIDE_PACKAGES_PATH': os.path.expanduser('~/.cache/pyodide'),
|
|
49
84
|
'DENO_DIR': os.path.expanduser('~/.cache/deno'),
|
|
50
85
|
'PYODIDE_CACHE_DIR': os.path.expanduser('~/.cache/pyodide'),
|
|
51
86
|
}
|
|
52
|
-
|
|
87
|
+
|
|
53
88
|
for key, default_value in cache_defaults.items():
|
|
54
89
|
if key not in os.environ:
|
|
55
90
|
os.environ[key] = default_value
|
|
56
91
|
logger.debug(f"Set default Pyodide env: {key}={default_value}")
|
|
57
|
-
|
|
92
|
+
|
|
58
93
|
except Exception as e:
|
|
59
94
|
logger.warning(f"Could not setup Pyodide cache environment: {e}")
|
|
60
95
|
|
|
61
96
|
|
|
62
97
|
# Create input schema for the sandbox tool
|
|
63
98
|
sandbox_tool_input = create_model(
|
|
64
|
-
"SandboxToolInput",
|
|
99
|
+
"SandboxToolInput",
|
|
65
100
|
code=(str, FieldInfo(description="Python code to execute in the sandbox environment"))
|
|
66
101
|
)
|
|
67
102
|
|
|
@@ -72,7 +107,7 @@ class PyodideSandboxTool(BaseTool):
|
|
|
72
107
|
This tool leverages langchain-sandbox to provide a safe environment for running untrusted Python code.
|
|
73
108
|
Optimized for performance with caching and stateless execution by default.
|
|
74
109
|
"""
|
|
75
|
-
|
|
110
|
+
|
|
76
111
|
name: str = "pyodide_sandbox"
|
|
77
112
|
description: str = """Execute Python code in a secure sandbox environment using Pyodide.
|
|
78
113
|
This tool allows safe execution of Python code without access to the host system.
|
|
@@ -81,7 +116,7 @@ class PyodideSandboxTool(BaseTool):
|
|
|
81
116
|
- Perform calculations or data analysis
|
|
82
117
|
- Test Python algorithms
|
|
83
118
|
- Run code that requires isolation from the host system
|
|
84
|
-
|
|
119
|
+
|
|
85
120
|
The sandbox supports most Python standard library modules and can install additional packages.
|
|
86
121
|
Note: File access and some system operations are restricted for security.
|
|
87
122
|
Optimized for performance with local caching (stateless by default for faster execution).
|
|
@@ -91,14 +126,14 @@ class PyodideSandboxTool(BaseTool):
|
|
|
91
126
|
allow_net: bool = True
|
|
92
127
|
session_bytes: Optional[bytes] = None
|
|
93
128
|
session_metadata: Optional[Dict] = None
|
|
94
|
-
|
|
129
|
+
|
|
95
130
|
def __init__(self, **kwargs: Any) -> None:
|
|
96
131
|
super().__init__(**kwargs)
|
|
97
132
|
self._sandbox = None
|
|
98
133
|
# Setup caching environment for optimal performance
|
|
99
134
|
_setup_pyodide_cache_env()
|
|
100
135
|
self._initialize_sandbox()
|
|
101
|
-
|
|
136
|
+
|
|
102
137
|
def _initialize_sandbox(self) -> None:
|
|
103
138
|
"""Initialize the PyodideSandbox instance with optimized settings"""
|
|
104
139
|
try:
|
|
@@ -110,9 +145,9 @@ class PyodideSandboxTool(BaseTool):
|
|
|
110
145
|
)
|
|
111
146
|
logger.error(error_msg)
|
|
112
147
|
raise RuntimeError(error_msg)
|
|
113
|
-
|
|
148
|
+
|
|
114
149
|
from langchain_sandbox import PyodideSandbox
|
|
115
|
-
|
|
150
|
+
|
|
116
151
|
# Configure sandbox with performance optimizations
|
|
117
152
|
self._sandbox = PyodideSandbox(
|
|
118
153
|
stateful=self.stateful,
|
|
@@ -135,7 +170,7 @@ class PyodideSandboxTool(BaseTool):
|
|
|
135
170
|
except Exception as e:
|
|
136
171
|
logger.error(f"Failed to initialize PyodideSandbox: {e}")
|
|
137
172
|
raise
|
|
138
|
-
|
|
173
|
+
|
|
139
174
|
def _run(self, code: str) -> str:
|
|
140
175
|
"""
|
|
141
176
|
Synchronous version - runs the async method in a new event loop
|
|
@@ -144,7 +179,7 @@ class PyodideSandboxTool(BaseTool):
|
|
|
144
179
|
# Check if sandbox is initialized, if not try to initialize
|
|
145
180
|
if self._sandbox is None:
|
|
146
181
|
self._initialize_sandbox()
|
|
147
|
-
|
|
182
|
+
|
|
148
183
|
# Check if we're already in an async context
|
|
149
184
|
try:
|
|
150
185
|
loop = asyncio.get_running_loop()
|
|
@@ -169,7 +204,7 @@ class PyodideSandboxTool(BaseTool):
|
|
|
169
204
|
except Exception as e:
|
|
170
205
|
logger.error(f"Error executing code in sandbox: {e}")
|
|
171
206
|
return f"Error executing code: {str(e)}"
|
|
172
|
-
|
|
207
|
+
|
|
173
208
|
async def _arun(self, code: str) -> str:
|
|
174
209
|
"""
|
|
175
210
|
Execute Python code in the Pyodide sandbox
|
|
@@ -177,19 +212,19 @@ class PyodideSandboxTool(BaseTool):
|
|
|
177
212
|
try:
|
|
178
213
|
if self._sandbox is None:
|
|
179
214
|
self._initialize_sandbox()
|
|
180
|
-
|
|
215
|
+
|
|
181
216
|
# Execute the code with session state if available
|
|
182
217
|
result = await self._sandbox.execute(
|
|
183
218
|
code,
|
|
184
219
|
session_bytes=self.session_bytes,
|
|
185
220
|
session_metadata=self.session_metadata
|
|
186
221
|
)
|
|
187
|
-
|
|
222
|
+
|
|
188
223
|
# Update session state for stateful execution
|
|
189
224
|
if self.stateful:
|
|
190
225
|
self.session_bytes = result.session_bytes
|
|
191
226
|
self.session_metadata = result.session_metadata
|
|
192
|
-
|
|
227
|
+
|
|
193
228
|
result_dict = {}
|
|
194
229
|
|
|
195
230
|
if result.result is not None:
|
|
@@ -212,7 +247,7 @@ class PyodideSandboxTool(BaseTool):
|
|
|
212
247
|
|
|
213
248
|
result_dict["execution_info"] = execution_info
|
|
214
249
|
return result_dict
|
|
215
|
-
|
|
250
|
+
|
|
216
251
|
except Exception as e:
|
|
217
252
|
logger.error(f"Error executing code in sandbox: {e}")
|
|
218
253
|
return f"Error executing code: {str(e)}"
|
|
@@ -223,7 +258,7 @@ class StatefulPyodideSandboxTool(PyodideSandboxTool):
|
|
|
223
258
|
A stateful version of the PyodideSandboxTool that maintains state between executions.
|
|
224
259
|
This version preserves variables, imports, and function definitions across multiple tool calls.
|
|
225
260
|
"""
|
|
226
|
-
|
|
261
|
+
|
|
227
262
|
name: str = "stateful_pyodide_sandbox"
|
|
228
263
|
description: str = """Execute Python code in a stateful sandbox environment using Pyodide.
|
|
229
264
|
This tool maintains state between executions, preserving variables, imports, and function definitions.
|
|
@@ -232,11 +267,11 @@ class StatefulPyodideSandboxTool(PyodideSandboxTool):
|
|
|
232
267
|
- Maintain variables across multiple calls
|
|
233
268
|
- Develop complex programs step by step
|
|
234
269
|
- Preserve imported libraries and defined functions
|
|
235
|
-
|
|
270
|
+
|
|
236
271
|
The sandbox supports most Python standard library modules and can install additional packages.
|
|
237
272
|
Note: File access and some system operations are restricted for security.
|
|
238
273
|
"""
|
|
239
|
-
|
|
274
|
+
|
|
240
275
|
def __init__(self, **kwargs: Any) -> None:
|
|
241
276
|
kwargs['stateful'] = True # Force stateful mode
|
|
242
277
|
super().__init__(**kwargs)
|
|
@@ -246,21 +281,21 @@ class StatefulPyodideSandboxTool(PyodideSandboxTool):
|
|
|
246
281
|
def create_sandbox_tool(stateful: bool = False, allow_net: bool = True) -> BaseTool:
|
|
247
282
|
"""
|
|
248
283
|
Factory function to create sandbox tools with specified configuration.
|
|
249
|
-
|
|
284
|
+
|
|
250
285
|
Note: This tool requires Deno to be installed and available in PATH.
|
|
251
286
|
For installation and optimization, run the bootstrap.sh script.
|
|
252
|
-
|
|
287
|
+
|
|
253
288
|
Args:
|
|
254
289
|
stateful: Whether to maintain state between executions (default: False for better performance)
|
|
255
290
|
allow_net: Whether to allow network access (for package installation)
|
|
256
|
-
|
|
291
|
+
|
|
257
292
|
Returns:
|
|
258
293
|
Configured sandbox tool instance
|
|
259
|
-
|
|
294
|
+
|
|
260
295
|
Raises:
|
|
261
296
|
ImportError: If langchain-sandbox is not installed
|
|
262
297
|
RuntimeError: If Deno is not found in PATH
|
|
263
|
-
|
|
298
|
+
|
|
264
299
|
Performance Notes:
|
|
265
300
|
- Stateless mode (default) is faster and avoids session state overhead
|
|
266
301
|
- Run bootstrap.sh script to enable local caching and reduce initialization time
|
|
@@ -269,4 +304,58 @@ def create_sandbox_tool(stateful: bool = False, allow_net: bool = True) -> BaseT
|
|
|
269
304
|
if stateful:
|
|
270
305
|
return StatefulPyodideSandboxTool(allow_net=allow_net)
|
|
271
306
|
else:
|
|
272
|
-
return PyodideSandboxTool(stateful=False, allow_net=allow_net)
|
|
307
|
+
return PyodideSandboxTool(stateful=False, allow_net=allow_net)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
class SandboxToolkit(BaseToolkit):
|
|
311
|
+
tools: List[BaseTool] = []
|
|
312
|
+
|
|
313
|
+
@staticmethod
|
|
314
|
+
def toolkit_config_schema() -> BaseModel:
|
|
315
|
+
# Create sample tools to get their schemas
|
|
316
|
+
sample_tools = [
|
|
317
|
+
PyodideSandboxTool(),
|
|
318
|
+
StatefulPyodideSandboxTool()
|
|
319
|
+
]
|
|
320
|
+
selected_tools = {x.name: x.args_schema.schema() for x in sample_tools}
|
|
321
|
+
|
|
322
|
+
return create_model(
|
|
323
|
+
'sandbox',
|
|
324
|
+
stateful=(bool, Field(default=False, description="Whether to maintain state between executions")),
|
|
325
|
+
allow_net=(bool, Field(default=True, description="Whether to allow network access for package installation")),
|
|
326
|
+
selected_tools=(List[Literal[tuple(selected_tools)]],
|
|
327
|
+
Field(default=[], json_schema_extra={'args_schemas': selected_tools})),
|
|
328
|
+
|
|
329
|
+
__config__=ConfigDict(json_schema_extra={
|
|
330
|
+
'metadata': {
|
|
331
|
+
"label": "Python Sandbox",
|
|
332
|
+
"icon_url": "sandbox.svg",
|
|
333
|
+
"hidden": False,
|
|
334
|
+
"categories": ["code", "execution", "internal_tool"],
|
|
335
|
+
"extra_categories": ["python", "pyodide", "sandbox", "code execution"],
|
|
336
|
+
}
|
|
337
|
+
})
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
@classmethod
|
|
341
|
+
def get_toolkit(cls, stateful: bool = False, allow_net: bool = True, **kwargs):
|
|
342
|
+
"""
|
|
343
|
+
Get toolkit with sandbox tools.
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
stateful: Whether to maintain state between executions
|
|
347
|
+
allow_net: Whether to allow network access
|
|
348
|
+
**kwargs: Additional arguments
|
|
349
|
+
"""
|
|
350
|
+
tools = []
|
|
351
|
+
|
|
352
|
+
if stateful:
|
|
353
|
+
tools.append(StatefulPyodideSandboxTool(allow_net=allow_net))
|
|
354
|
+
else:
|
|
355
|
+
tools.append(PyodideSandboxTool(stateful=False, allow_net=allow_net))
|
|
356
|
+
|
|
357
|
+
return cls(tools=tools)
|
|
358
|
+
|
|
359
|
+
def get_tools(self):
|
|
360
|
+
return self.tools
|
|
361
|
+
|
alita_sdk/runtime/utils/utils.py
CHANGED
|
@@ -14,6 +14,7 @@ class IndexerKeywords(Enum):
|
|
|
14
14
|
INDEX_META_TYPE = 'index_meta'
|
|
15
15
|
INDEX_META_IN_PROGRESS = 'in_progress'
|
|
16
16
|
INDEX_META_COMPLETED = 'completed'
|
|
17
|
+
INDEX_META_FAILED = 'failed'
|
|
17
18
|
|
|
18
19
|
# This pattern matches characters that are NOT alphanumeric, underscores, or hyphens
|
|
19
20
|
clean_string_pattern = re.compile(r'[^a-zA-Z0-9_.-]')
|
alita_sdk/tools/__init__.py
CHANGED
|
@@ -97,7 +97,9 @@ def get_tools(tools_list, alita, llm, store: Optional[BaseStore] = None, *args,
|
|
|
97
97
|
for tool_name in tool.get('settings', {}).get('selected_tools', []):
|
|
98
98
|
if isinstance(tool_name, str) and tool_name.startswith('_'):
|
|
99
99
|
raise ValueError(f"Tool name '{tool_name}' from toolkit '{tool.get('type', '')}' cannot start with '_'")
|
|
100
|
-
|
|
100
|
+
if not tool.get('settings'):
|
|
101
|
+
logger.warning(f"Tool '{tool.get('type', '')}' has no settings, skipping...")
|
|
102
|
+
continue
|
|
101
103
|
tool['settings']['alita'] = alita
|
|
102
104
|
tool['settings']['llm'] = llm
|
|
103
105
|
tool['settings']['store'] = store
|
|
@@ -152,39 +152,43 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
|
|
|
152
152
|
|
|
153
153
|
def index_data(self, **kwargs):
|
|
154
154
|
index_name = kwargs.get("index_name")
|
|
155
|
-
progress_step = kwargs.get("progress_step")
|
|
156
155
|
clean_index = kwargs.get("clean_index")
|
|
157
156
|
chunking_tool = kwargs.get("chunking_tool")
|
|
158
157
|
chunking_config = kwargs.get("chunking_config")
|
|
158
|
+
result = {"count": 0}
|
|
159
159
|
#
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
160
|
+
try:
|
|
161
|
+
if clean_index:
|
|
162
|
+
self._clean_index(index_name)
|
|
163
|
+
#
|
|
164
|
+
self.index_meta_init(index_name, kwargs)
|
|
165
|
+
#
|
|
166
|
+
self._log_tool_event(f"Indexing data into collection with suffix '{index_name}'. It can take some time...")
|
|
167
|
+
self._log_tool_event(f"Loading the documents to index...{kwargs}")
|
|
168
|
+
documents = self._base_loader(**kwargs)
|
|
169
|
+
documents = list(documents) # consume/exhaust generator to count items
|
|
170
|
+
documents_count = len(documents)
|
|
171
|
+
documents = (doc for doc in documents)
|
|
172
|
+
self._log_tool_event(f"Base documents were pre-loaded. "
|
|
173
|
+
f"Search for possible document duplicates and remove them from the indexing list...")
|
|
174
|
+
documents = self._reduce_duplicates(documents, index_name)
|
|
175
|
+
self._log_tool_event(f"Duplicates were removed. "
|
|
176
|
+
f"Processing documents to collect dependencies and prepare them for indexing...")
|
|
177
|
+
self._save_index_generator(documents, documents_count, chunking_tool, chunking_config, index_name=index_name, result=result)
|
|
178
|
+
#
|
|
179
|
+
self.index_meta_update(index_name, IndexerKeywords.INDEX_META_COMPLETED.value, result["count"])
|
|
180
|
+
#
|
|
181
|
+
return {"status": "ok", "message": f"successfully indexed {result["count"]} documents"}
|
|
182
|
+
except Exception as e:
|
|
183
|
+
self.index_meta_update(index_name, IndexerKeywords.INDEX_META_FAILED.value, result["count"])
|
|
184
|
+
raise e
|
|
185
|
+
|
|
181
186
|
|
|
182
|
-
def _save_index_generator(self, base_documents: Generator[Document, None, None], base_total: int, chunking_tool, chunking_config, index_name: Optional[str] = None
|
|
187
|
+
def _save_index_generator(self, base_documents: Generator[Document, None, None], base_total: int, chunking_tool, chunking_config, result, index_name: Optional[str] = None):
|
|
183
188
|
self._log_tool_event(f"Base documents are ready for indexing. {base_total} base documents in total to index.")
|
|
184
189
|
from ..runtime.langchain.interfaces.llm_processor import add_documents
|
|
185
190
|
#
|
|
186
191
|
base_doc_counter = 0
|
|
187
|
-
total_counter = 0
|
|
188
192
|
pg_vector_add_docs_chunk = []
|
|
189
193
|
for base_doc in base_documents:
|
|
190
194
|
base_doc_counter += 1
|
|
@@ -232,10 +236,9 @@ class BaseIndexerToolkit(VectorStoreWrapperBase):
|
|
|
232
236
|
msg = f"Indexed base document #{base_doc_counter} out of {base_total} (with {dependent_docs_counter} dependencies)."
|
|
233
237
|
logger.debug(msg)
|
|
234
238
|
self._log_tool_event(msg)
|
|
235
|
-
|
|
239
|
+
result["count"] += dependent_docs_counter
|
|
236
240
|
if pg_vector_add_docs_chunk:
|
|
237
241
|
add_documents(vectorstore=self.vectorstore, documents=pg_vector_add_docs_chunk)
|
|
238
|
-
return total_counter
|
|
239
242
|
|
|
240
243
|
def _apply_loaders_chunkers(self, documents: Generator[Document, None, None], chunking_tool: str=None, chunking_config=None) -> Generator[Document, None, None]:
|
|
241
244
|
from ..tools.chunkers import __all__ as chunkers
|
alita_sdk/tools/elitea_base.py
CHANGED
|
@@ -537,7 +537,7 @@ class BaseVectorStoreToolApiWrapper(BaseToolApiWrapper):
|
|
|
537
537
|
"args_schema": RemoveIndexParams
|
|
538
538
|
},
|
|
539
539
|
{
|
|
540
|
-
"name": "
|
|
540
|
+
"name": "list_collections",
|
|
541
541
|
"mode": "list_collections",
|
|
542
542
|
"ref": self.list_collections,
|
|
543
543
|
"description": self.list_collections.__doc__,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: alita_sdk
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.379
|
|
4
4
|
Summary: SDK for building langchain agents using resources from Alita
|
|
5
5
|
Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedj27@gmail.com>, Artem Dubrovskiy <ad13box@gmail.com>
|
|
6
6
|
License-Expression: Apache-2.0
|
|
@@ -41,15 +41,15 @@ alita_sdk/runtime/clients/datasource.py,sha256=HAZovoQN9jBg0_-lIlGBQzb4FJdczPhkH
|
|
|
41
41
|
alita_sdk/runtime/clients/prompt.py,sha256=li1RG9eBwgNK_Qf0qUaZ8QNTmsncFrAL2pv3kbxZRZg,1447
|
|
42
42
|
alita_sdk/runtime/clients/sandbox_client.py,sha256=OhEasE0MxBBDw4o76xkxVCpNpr3xJ8spQsrsVxMrjUA,16192
|
|
43
43
|
alita_sdk/runtime/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
|
-
alita_sdk/runtime/langchain/assistant.py,sha256=
|
|
44
|
+
alita_sdk/runtime/langchain/assistant.py,sha256=HqFQ8nWrSdbXRx-huf0srA_zYCxcMjEndBCJVVriRMQ,15607
|
|
45
45
|
alita_sdk/runtime/langchain/chat_message_template.py,sha256=kPz8W2BG6IMyITFDA5oeb5BxVRkHEVZhuiGl4MBZKdc,2176
|
|
46
46
|
alita_sdk/runtime/langchain/constants.py,sha256=eHVJ_beJNTf1WJo4yq7KMK64fxsRvs3lKc34QCXSbpk,3319
|
|
47
47
|
alita_sdk/runtime/langchain/indexer.py,sha256=0ENHy5EOhThnAiYFc7QAsaTNp9rr8hDV_hTK8ahbatk,37592
|
|
48
|
-
alita_sdk/runtime/langchain/langraph_agent.py,sha256=
|
|
48
|
+
alita_sdk/runtime/langchain/langraph_agent.py,sha256=2AjPJQk1TpBzAKsJAPsETtjUF1p9GH5Q-GC51knX1es,48659
|
|
49
49
|
alita_sdk/runtime/langchain/mixedAgentParser.py,sha256=M256lvtsL3YtYflBCEp-rWKrKtcY1dJIyRGVv7KW9ME,2611
|
|
50
50
|
alita_sdk/runtime/langchain/mixedAgentRenderes.py,sha256=asBtKqm88QhZRILditjYICwFVKF5KfO38hu2O-WrSWE,5964
|
|
51
51
|
alita_sdk/runtime/langchain/store_manager.py,sha256=i8Fl11IXJhrBXq1F1ukEVln57B1IBe-tqSUvfUmBV4A,2218
|
|
52
|
-
alita_sdk/runtime/langchain/utils.py,sha256=
|
|
52
|
+
alita_sdk/runtime/langchain/utils.py,sha256=hWWIRKov7X54lr43zH8m214gOul8KLT5YOShdFZjFjs,7128
|
|
53
53
|
alita_sdk/runtime/langchain/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
54
|
alita_sdk/runtime/langchain/agents/xml_chat.py,sha256=Mx7PK5T97_GrFCwHHZ3JZP42S7MwtUzV0W-_8j6Amt8,6212
|
|
55
55
|
alita_sdk/runtime/langchain/document_loaders/AlitaBDDScenariosLoader.py,sha256=4kFU1ijrM1Jw7cywQv8mUiBHlE6w-uqfzSZP4hUV5P4,3771
|
|
@@ -102,7 +102,7 @@ alita_sdk/runtime/toolkits/configurations.py,sha256=kIDAlnryPQfbZyFxV-9SzN2-Vefz
|
|
|
102
102
|
alita_sdk/runtime/toolkits/datasource.py,sha256=qk78OdPoReYPCWwahfkKLbKc4pfsu-061oXRryFLP6I,2498
|
|
103
103
|
alita_sdk/runtime/toolkits/prompt.py,sha256=WIpTkkVYWqIqOWR_LlSWz3ug8uO9tm5jJ7aZYdiGRn0,1192
|
|
104
104
|
alita_sdk/runtime/toolkits/subgraph.py,sha256=wwUK8JjPXkGzyVZ3tAukmvST6eGbqx_U11rpnmbrvtg,2105
|
|
105
|
-
alita_sdk/runtime/toolkits/tools.py,sha256=
|
|
105
|
+
alita_sdk/runtime/toolkits/tools.py,sha256=Lo90zr6G_Mi378hpMNTRFmk05A7dDJDMbtVHEeFd5cM,8708
|
|
106
106
|
alita_sdk/runtime/toolkits/vectorstore.py,sha256=BGppQADa1ZiLO17fC0uCACTTEvPHlodEDYEzUcBRbAA,2901
|
|
107
107
|
alita_sdk/runtime/tools/__init__.py,sha256=TbHPnDtCdQvNzK1YQnk_ufkuI7FgHfvY1-JWUgycZhQ,497
|
|
108
108
|
alita_sdk/runtime/tools/agent.py,sha256=m98QxOHwnCRTT9j18Olbb5UPS8-ZGeQaGiUyZJSyFck,3162
|
|
@@ -110,7 +110,7 @@ alita_sdk/runtime/tools/application.py,sha256=z3vLZODs-_xEEnZFmGF0fKz1j3VtNJxqsA
|
|
|
110
110
|
alita_sdk/runtime/tools/artifact.py,sha256=u3szFwZqguHrPZ3tZJ7S_TiZl7cxlT3oHYd6zbdpRDE,13842
|
|
111
111
|
alita_sdk/runtime/tools/datasource.py,sha256=pvbaSfI-ThQQnjHG-QhYNSTYRnZB0rYtZFpjCfpzxYI,2443
|
|
112
112
|
alita_sdk/runtime/tools/echo.py,sha256=spw9eCweXzixJqHnZofHE1yWiSUa04L4VKycf3KCEaM,486
|
|
113
|
-
alita_sdk/runtime/tools/function.py,sha256=
|
|
113
|
+
alita_sdk/runtime/tools/function.py,sha256=4r-VbGtm8gN_RTU2I-1iUNVs_MWNgRqn-aQrr__lCTc,7001
|
|
114
114
|
alita_sdk/runtime/tools/graph.py,sha256=MbnZYqdmvZY7SGDp43lOVVIjUt5ARHSgj43mdtBjSjQ,3092
|
|
115
115
|
alita_sdk/runtime/tools/image_generation.py,sha256=8ZH4SoRrbS4EzmtF6cpNMRvuFephCYD2S8uqNC9KGE4,4274
|
|
116
116
|
alita_sdk/runtime/tools/indexer_tool.py,sha256=whSLPevB4WD6dhh2JDXEivDmTvbjiMV1MrPl9cz5eLA,4375
|
|
@@ -121,7 +121,7 @@ alita_sdk/runtime/tools/mcp_server_tool.py,sha256=MhLxZJ44LYrB_0GrojmkyqKoDRaqIH
|
|
|
121
121
|
alita_sdk/runtime/tools/pgvector_search.py,sha256=NN2BGAnq4SsDHIhUcFZ8d_dbEOM8QwB0UwpsWCYruXU,11692
|
|
122
122
|
alita_sdk/runtime/tools/prompt.py,sha256=nJafb_e5aOM1Rr3qGFCR-SKziU9uCsiP2okIMs9PppM,741
|
|
123
123
|
alita_sdk/runtime/tools/router.py,sha256=p7e0tX6YAWw2M2Nq0A_xqw1E2P-Xz1DaJvhUstfoZn4,1584
|
|
124
|
-
alita_sdk/runtime/tools/sandbox.py,sha256=
|
|
124
|
+
alita_sdk/runtime/tools/sandbox.py,sha256=CBPaHwGv74FiHbELuwLBEvB2C-HhbptUnSMipzJUSZA,14282
|
|
125
125
|
alita_sdk/runtime/tools/tool.py,sha256=lE1hGi6qOAXG7qxtqxarD_XMQqTghdywf261DZawwno,5631
|
|
126
126
|
alita_sdk/runtime/tools/vectorstore.py,sha256=FsnxdnvMK5bUEFxz0eeSHeNpVOk2gxOeXjoSlvCo8rs,34327
|
|
127
127
|
alita_sdk/runtime/tools/vectorstore_base.py,sha256=lNz6bOMpHOY8JiHT7BkoDbyj3kLykcKlCx4zOu_IgPE,28252
|
|
@@ -134,11 +134,11 @@ alita_sdk/runtime/utils/save_dataframe.py,sha256=i-E1wp-t4wb17Zq3nA3xYwgSILjoXNi
|
|
|
134
134
|
alita_sdk/runtime/utils/streamlit.py,sha256=GQ69CsjfRMcGXcCrslL0Uoj24Cl07Jeji0rZxELaKTQ,104930
|
|
135
135
|
alita_sdk/runtime/utils/toolkit_runtime.py,sha256=MU63Fpxj0b5_r1IUUc0Q3-PN9VwL7rUxp2MRR4tmYR8,5136
|
|
136
136
|
alita_sdk/runtime/utils/toolkit_utils.py,sha256=I9QFqnaqfVgN26LUr6s3XlBlG6y0CoHURnCzG7XcwVs,5311
|
|
137
|
-
alita_sdk/runtime/utils/utils.py,sha256=
|
|
138
|
-
alita_sdk/tools/__init__.py,sha256=
|
|
139
|
-
alita_sdk/tools/base_indexer_toolkit.py,sha256=
|
|
137
|
+
alita_sdk/runtime/utils/utils.py,sha256=PJK8A-JVIzY1IowOjGG8DIqsIiEFe65qDKvFcjJCKWA,1041
|
|
138
|
+
alita_sdk/tools/__init__.py,sha256=NrZyTEdEhmO1NnAR9RFMQ05Mb-kgu68mAQz3n5r0HYs,10692
|
|
139
|
+
alita_sdk/tools/base_indexer_toolkit.py,sha256=i0S3tIdXrWoRx5B5v0cQMArnmOTsinC9SMLihlEcdxM,26801
|
|
140
140
|
alita_sdk/tools/code_indexer_toolkit.py,sha256=p3zVnCnQTUf7JUGra9Rl6GEK2W1-hvvz0Xsgz0v0muM,7292
|
|
141
|
-
alita_sdk/tools/elitea_base.py,sha256=
|
|
141
|
+
alita_sdk/tools/elitea_base.py,sha256=34fmVdYgd2YXifU5LFNjMQysr4OOIZ6AOZjq4GxLgSw,34417
|
|
142
142
|
alita_sdk/tools/non_code_indexer_toolkit.py,sha256=6Lrqor1VeSLbPLDHAfg_7UAUqKFy1r_n6bdsc4-ak98,1315
|
|
143
143
|
alita_sdk/tools/ado/__init__.py,sha256=NnNYpNFW0_N_v1td_iekYOoQRRB7PIunbpT2f9ZFJM4,1201
|
|
144
144
|
alita_sdk/tools/ado/utils.py,sha256=PTCludvaQmPLakF2EbCGy66Mro4-rjDtavVP-xcB2Wc,1252
|
|
@@ -353,8 +353,8 @@ alita_sdk/tools/zephyr_scale/api_wrapper.py,sha256=kT0TbmMvuKhDUZc0i7KO18O38JM9S
|
|
|
353
353
|
alita_sdk/tools/zephyr_squad/__init__.py,sha256=0ne8XLJEQSLOWfzd2HdnqOYmQlUliKHbBED5kW_Vias,2895
|
|
354
354
|
alita_sdk/tools/zephyr_squad/api_wrapper.py,sha256=kmw_xol8YIYFplBLWTqP_VKPRhL_1ItDD0_vXTe_UuI,14906
|
|
355
355
|
alita_sdk/tools/zephyr_squad/zephyr_squad_cloud_client.py,sha256=R371waHsms4sllHCbijKYs90C-9Yu0sSR3N4SUfQOgU,5066
|
|
356
|
-
alita_sdk-0.3.
|
|
357
|
-
alita_sdk-0.3.
|
|
358
|
-
alita_sdk-0.3.
|
|
359
|
-
alita_sdk-0.3.
|
|
360
|
-
alita_sdk-0.3.
|
|
356
|
+
alita_sdk-0.3.379.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
357
|
+
alita_sdk-0.3.379.dist-info/METADATA,sha256=3ZAbyIgfi1p802VDdfvzPvuPiZ5o8RyiTGIAbkIR1Yk,19071
|
|
358
|
+
alita_sdk-0.3.379.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
359
|
+
alita_sdk-0.3.379.dist-info/top_level.txt,sha256=0vJYy5p_jK6AwVb1aqXr7Kgqgk3WDtQ6t5C-XI9zkmg,10
|
|
360
|
+
alita_sdk-0.3.379.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|