alita-sdk 0.3.149__py3-none-any.whl → 0.3.151__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
alita_sdk/__init__.py CHANGED
@@ -9,13 +9,13 @@ This package contains three main modules:
9
9
 
10
10
  __version__ = "0.3.142"
11
11
 
12
- # Import key components
13
- from .runtime import *
14
- from .tools import *
15
- from .community import *
12
+ __all__ = ["runtime", "tools", "community"]
16
13
 
17
- __all__ = [
18
- "runtime",
19
- "tools",
20
- "community",
21
- ]
14
+ import importlib
15
+
16
+ def __getattr__(name):
17
+ if name in __all__:
18
+ module = importlib.import_module(f".{name}", __name__)
19
+ globals()[name] = module
20
+ return module
21
+ raise AttributeError(f"module {__name__} has no attribute {name}")
@@ -7,23 +7,27 @@ Includes agents, clients, language models, and utilities.
7
7
 
8
8
  import importlib
9
9
 
10
- # Import available runtime modules
11
- __all__ = []
10
+ _modules = [
11
+ "agents",
12
+ "clients",
13
+ "langchain",
14
+ "llamaindex",
15
+ "llms",
16
+ "toolkits",
17
+ "tools",
18
+ "utils",
19
+ ]
12
20
 
13
- # Standard imports with fallback
14
- _modules = ['agents', 'clients', 'langchain', 'llamaindex', 'llms', 'toolkits', 'tools', 'utils']
21
+ __all__ = _modules + ["get_tools", "get_toolkits"]
15
22
 
16
- for module_name in _modules:
17
- try:
18
- module = importlib.import_module(f'.{module_name}', package=__name__)
19
- globals()[module_name] = module
20
- __all__.append(module_name)
21
- except ImportError:
22
- pass
23
-
24
- # Always try to export core functions from toolkits
25
- try:
26
- from .toolkits.tools import get_tools, get_toolkits
27
- __all__.extend(["get_tools", "get_toolkits"])
28
- except ImportError:
29
- pass
23
+ def __getattr__(name):
24
+ if name in _modules:
25
+ module = importlib.import_module(f".{name}", __name__)
26
+ globals()[name] = module
27
+ return module
28
+ if name in {"get_tools", "get_toolkits"}:
29
+ toolkits = importlib.import_module(".toolkits.tools", __name__)
30
+ value = getattr(toolkits, name)
31
+ globals()[name] = value
32
+ return value
33
+ raise AttributeError(f"module {__name__} has no attribute {name}")
@@ -1 +1,12 @@
1
- from .client import AlitaClient
1
+ """
2
+ Runtime clients package.
3
+ """
4
+
5
+ try:
6
+ from .client import AlitaClient
7
+ __all__ = ['AlitaClient']
8
+ except ImportError as e:
9
+ # Handle case where dependencies are not available
10
+ import logging
11
+ logging.getLogger(__name__).debug(f"Failed to import AlitaClient: {e}")
12
+ __all__ = []
@@ -9,6 +9,7 @@ from langchain_core.messages import (
9
9
  SystemMessage, BaseMessage,
10
10
  )
11
11
  from langchain_core.tools import ToolException
12
+ from langgraph.store.base import BaseStore
12
13
 
13
14
  from ..langchain.assistant import Assistant as LangChainAssistant
14
15
  # from ..llamaindex.assistant import Assistant as LLamaAssistant
@@ -178,7 +179,7 @@ class AlitaClient:
178
179
  tools: Optional[list] = None, chat_history: Optional[List[Any]] = None,
179
180
  app_type=None, memory=None, runtime='langchain',
180
181
  application_variables: Optional[dict] = None,
181
- version_details: Optional[dict] = None):
182
+ version_details: Optional[dict] = None, store: Optional[BaseStore] = None):
182
183
  if tools is None:
183
184
  tools = []
184
185
  if chat_history is None:
@@ -210,11 +211,11 @@ class AlitaClient:
210
211
  app_type = "openai"
211
212
  if runtime == 'nonrunnable':
212
213
  return LangChainAssistant(self, data, client, chat_history, app_type,
213
- tools=tools, memory=memory)
214
+ tools=tools, memory=memory, store=store)
214
215
  if runtime == 'langchain':
215
216
  return LangChainAssistant(self, data, client,
216
217
  chat_history, app_type,
217
- tools=tools, memory=memory).runnable()
218
+ tools=tools, memory=memory, store=store).runnable()
218
219
  elif runtime == 'llama':
219
220
  raise NotImplementedError("LLama runtime is not supported")
220
221
 
@@ -5,8 +5,9 @@ from typing import Any, Optional
5
5
  from langchain.agents import (
6
6
  AgentExecutor, create_openai_tools_agent,
7
7
  create_json_chat_agent)
8
+ from langgraph.store.base import BaseStore
9
+
8
10
  from .agents.xml_chat import create_xml_chat_agent
9
- # from ..langchain.mixedAgentRenderes import render_react_text_description_and_args
10
11
  from .langraph_agent import create_graph
11
12
  from langchain_core.messages import (
12
13
  BaseMessage, SystemMessage, HumanMessage
@@ -28,7 +29,8 @@ class Assistant:
28
29
  chat_history: list[BaseMessage] = [],
29
30
  app_type: str = "openai",
30
31
  tools: Optional[list] = [],
31
- memory: Optional[Any] = None):
32
+ memory: Optional[Any] = None,
33
+ store: Optional[BaseStore] = None):
32
34
 
33
35
  self.client = copy(client)
34
36
  self.client.max_tokens = data['llm_settings']['max_tokens']
@@ -40,6 +42,7 @@ class Assistant:
40
42
 
41
43
  self.app_type = app_type
42
44
  self.memory = memory
45
+ self.store = store
43
46
 
44
47
  logger.debug("Data for agent creation: %s", data)
45
48
  logger.info("App type: %s", app_type)
@@ -58,7 +61,10 @@ class Assistant:
58
61
  raise ToolException("Non-pipeline agents cannot have pipelines as a toolkits. "
59
62
  "Review toolkits configuration or use pipeline as master agent.")
60
63
 
61
- self.tools = get_tools(data['tools'], alita_client=alita, llm=self.client)
64
+ # configure memory store if memory tool is defined
65
+ memory_tool = next((tool for tool in data['tools'] if tool['type'] == 'memory'), None)
66
+ self._configure_store(memory_tool)
67
+ self.tools = get_tools(data['tools'], alita_client=alita, llm=self.client, memory_store=self.store)
62
68
  if app_type == "pipeline":
63
69
  self.prompt = data['instructions']
64
70
  else:
@@ -94,6 +100,18 @@ class Assistant:
94
100
  except Exception as e:
95
101
  logger.info(f"Client was created with client setting: temperature - {self.client.temperature} : {self.client.max_tokens}")
96
102
 
103
+ def _configure_store(self, memory_tool: dict | None) -> None:
104
+ """
105
+ Configure the memory store based on a memory_tool definition.
106
+ Only creates a new store if one does not already exist.
107
+ """
108
+ if not memory_tool or self.store is not None:
109
+ return
110
+ from .store_manager import get_manager
111
+ conn_str = memory_tool.get('settings', {}).get('connection_string', '')
112
+ store = get_manager().get_store(conn_str)
113
+ self.store = store
114
+
97
115
  def runnable(self):
98
116
  if self.app_type == 'pipeline':
99
117
  return self.pipeline()
@@ -0,0 +1,46 @@
1
+ import threading
2
+ import atexit
3
+ import logging
4
+ from psycopg import Connection
5
+ from langgraph.store.postgres import PostgresStore
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ class StoreManager:
10
+ _instance = None
11
+ _lock = threading.Lock()
12
+
13
+ def __new__(cls):
14
+ if cls._instance is None:
15
+ with cls._lock:
16
+ if cls._instance is None:
17
+ cls._instance = super().__new__(cls)
18
+ cls._instance._stores = {}
19
+ return cls._instance
20
+
21
+ def get_store(self, conn_str: str) -> PostgresStore:
22
+ store = self._stores.get(conn_str)
23
+ if store is None:
24
+ logger.info(f"Creating new PostgresStore for connection: {conn_str}")
25
+ conn = Connection.connect(conn_str, autocommit=True, prepare_threshold=0)
26
+ store = PostgresStore(conn)
27
+ store.setup()
28
+ self._stores[conn_str] = store
29
+ return store
30
+
31
+ def shutdown(self) -> None:
32
+ logger.info("Shutting down StoreManager and closing all stores")
33
+ for store in list(self._stores.values()):
34
+ try:
35
+ conn = getattr(store, 'conn', None)
36
+ if conn:
37
+ conn.close()
38
+ except Exception:
39
+ pass
40
+ self._stores.clear()
41
+
42
+ _store_manager = StoreManager()
43
+ atexit.register(_store_manager.shutdown)
44
+
45
+ def get_manager() -> StoreManager:
46
+ return _store_manager
@@ -30,7 +30,11 @@ from langchain_core.language_models import BaseChatModel # pylint: disable=E040
30
30
  from langchain_core.messages import AIMessage, AIMessageChunk # pylint: disable=E0401
31
31
  from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult # pylint: disable=E0401
32
32
 
33
- from ..langchain.tools import log
33
+ try:
34
+ from ..langchain.tools import log
35
+ except ImportError:
36
+ import logging as _logging
37
+ log = _logging.getLogger(__name__)
34
38
 
35
39
 
36
40
  class PreloadedEmbeddings(Embeddings):
@@ -1,4 +1,6 @@
1
1
  from typing import List, Any, Optional
2
+
3
+ from langgraph.store.base import BaseStore
2
4
  from pydantic import create_model, BaseModel, Field
3
5
  from langchain_community.agent_toolkits.base import BaseToolkit
4
6
  from langchain_core.tools import BaseTool
@@ -19,8 +21,8 @@ class ApplicationToolkit(BaseToolkit):
19
21
  )
20
22
 
21
23
  @classmethod
22
- def get_toolkit(cls, client: Any, application_id: int, application_version_id: int, app_api_key: str,
23
- selected_tools: list[str] = []):
24
+ def get_toolkit(cls, client: Any, application_id: int, application_version_id: int, app_api_key: str,
25
+ selected_tools: list[str] = [], store: Optional[BaseStore] = None):
24
26
  from ..llms.alita import AlitaChatModel
25
27
 
26
28
  app_details = client.get_app_details(application_id)
@@ -37,7 +39,7 @@ class ApplicationToolkit(BaseToolkit):
37
39
  "temperature": version_details['llm_settings']['temperature'],
38
40
  }
39
41
 
40
- app = client.application(AlitaChatModel(**settings), application_id, application_version_id)
42
+ app = client.application(AlitaChatModel(**settings), application_id, application_version_id, store=store)
41
43
  return cls(tools=[Application(name=app_details.get("name"),
42
44
  description=app_details.get("description"),
43
45
  application=app,
@@ -1,19 +1,20 @@
1
1
  import logging
2
2
 
3
+ from langchain_core.tools import ToolException
4
+ from langgraph.store.base import BaseStore
5
+
3
6
  from alita_sdk.tools import get_toolkits as alita_toolkits
4
7
  from alita_sdk.tools import get_tools as alita_tools
5
-
6
8
  from .application import ApplicationToolkit
7
9
  from .artifact import ArtifactToolkit
8
10
  from .datasource import DatasourcesToolkit
9
11
  from .prompt import PromptToolkit
10
12
  from .subgraph import SubgraphToolkit
11
13
  from .vectorstore import VectorStoreToolkit
12
-
14
+ from ..tools.mcp_server_tool import McpServerTool
13
15
  # Import community tools
14
16
  from ...community import get_toolkits as community_toolkits, get_tools as community_tools
15
-
16
- from ..tools.mcp_server_tool import McpServerTool
17
+ from ...tools.memory import MemoryToolkit
17
18
 
18
19
  logger = logging.getLogger(__name__)
19
20
 
@@ -24,13 +25,14 @@ def get_toolkits():
24
25
  # DatasourcesToolkit.toolkit_config_schema(),
25
26
  # ApplicationToolkit.toolkit_config_schema(),
26
27
  ArtifactToolkit.toolkit_config_schema(),
28
+ MemoryToolkit.toolkit_config_schema(),
27
29
  VectorStoreToolkit.toolkit_config_schema()
28
30
  ]
29
31
 
30
32
  return core_toolkits + community_toolkits() + alita_toolkits()
31
33
 
32
34
 
33
- def get_tools(tools_list: list, alita_client, llm) -> list:
35
+ def get_tools(tools_list: list, alita_client, llm, memory_store: BaseStore = None) -> list:
34
36
  prompts = []
35
37
  tools = []
36
38
 
@@ -65,6 +67,14 @@ def get_tools(tools_list: list, alita_client, llm) -> list:
65
67
  selected_tools=[],
66
68
  llm=llm
67
69
  ))
70
+ elif tool['type'] == 'memory':
71
+ if memory_store is None:
72
+ raise ToolException(f"Memory store is not provided for memory tool: {tool['name']}")
73
+ tools += MemoryToolkit.get_toolkit(
74
+ namespace=tool['settings'].get('namespace', str(tool['id'])),
75
+ store=memory_store,
76
+ toolkit_name=tool.get('toolkit_name', '')
77
+ ).get_tools()
68
78
  elif tool['type'] == 'artifact':
69
79
  tools.extend(ArtifactToolkit.get_toolkit(
70
80
  client=alita_client,
@@ -1,7 +1,12 @@
1
1
  import logging
2
2
  from functools import wraps
3
3
 
4
- from langchain_core.callbacks import dispatch_custom_event
4
+ try:
5
+ from langchain_core.callbacks import dispatch_custom_event
6
+ except ImportError:
7
+ # Fallback stub if langchain_core is unavailable
8
+ def dispatch_custom_event(name: str, data: dict): # pragma: no cover
9
+ pass
5
10
 
6
11
 
7
12
  class StreamlitCallbackHandler(logging.Handler):
@@ -42,6 +47,7 @@ def setup_streamlit_logging(
42
47
  formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
43
48
  handler.setFormatter(formatter)
44
49
  logger.addHandler(handler)
50
+ logger.setLevel(logging.INFO)
45
51
 
46
52
  return handler
47
53
 
@@ -6,13 +6,16 @@ from typing import Any, Dict, Optional
6
6
  from langchain_core.tools import ToolException
7
7
  import pandas as pd
8
8
 
9
- from ..tools.artifact import ArtifactWrapper
9
+ from typing import TYPE_CHECKING
10
+
11
+ if TYPE_CHECKING: # pragma: no cover - avoid heavy imports at runtime
12
+ from ..tools.artifact import ArtifactWrapper
10
13
 
11
14
  logger = logging.getLogger(__name__)
12
15
 
13
16
 
14
17
  def save_dataframe_to_artifact(
15
- artifacts_wrapper: ArtifactWrapper,
18
+ artifacts_wrapper: 'ArtifactWrapper',
16
19
  df: pd.DataFrame,
17
20
  target_file: str,
18
21
  csv_options: Optional[Dict[str, Any]] = None,