alita-sdk 0.3.321__py3-none-any.whl → 0.3.321b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of alita-sdk might be problematic. Click here for more details.

@@ -584,11 +584,19 @@ def create_graph(
584
584
  entry_point = clean_string(schema['entry_point'])
585
585
  except KeyError:
586
586
  raise ToolException("Entry point is not defined in the schema. Please define 'entry_point' in the schema.")
587
- if state.items():
588
- state_default_node = StateDefaultNode(default_vars=set_defaults(state))
589
- lg_builder.add_node(state_default_node.name, state_default_node)
590
- lg_builder.set_entry_point(state_default_node.name)
591
- lg_builder.add_conditional_edges(state_default_node.name, TransitionalEdge(entry_point))
587
+ # if state.items():
588
+ # state_default_node = StateDefaultNode(default_vars=set_defaults(state))
589
+ # lg_builder.add_node(state_default_node.name, state_default_node)
590
+ # lg_builder.set_entry_point(state_default_node.name)
591
+ # lg_builder.add_conditional_edges(state_default_node.name, TransitionalEdge(entry_point))
592
+ for key, value in state.items():
593
+ if 'type' in value and 'value' in value:
594
+ # set default value for state variable if it is defined in the schema
595
+ state_default_node = StateDefaultNode(default_vars=state)
596
+ lg_builder.add_node(state_default_node.name, state_default_node)
597
+ lg_builder.set_entry_point(state_default_node.name)
598
+ lg_builder.add_conditional_edges(state_default_node.name, TransitionalEdge(entry_point))
599
+ break
592
600
  else:
593
601
  # if no state variables are defined, set the entry point directly
594
602
  lg_builder.set_entry_point(entry_point)
@@ -710,7 +718,7 @@ class LangGraphAgentRunnable(CompiledStateGraph):
710
718
  else:
711
719
  result = super().invoke(input, config=config, *args, **kwargs)
712
720
  try:
713
- if self.output_variables and self.output_variables[0] != "messages":
721
+ if self.output_variables and self.output_variables[0] in result and self.output_variables[0] != "messages":
714
722
  # If output_variables are specified, use the value of first one or use the last messages as default
715
723
  output = result.get(self.output_variables[0], result['messages'][-1].content)
716
724
  else:
@@ -15,7 +15,6 @@ logger = logging.getLogger(__name__)
15
15
 
16
16
  def create_llm_input_with_messages(
17
17
  prompt: Dict[str, str],
18
- messages: List[BaseMessage],
19
18
  params: Dict[str, Any]
20
19
  ) -> List[BaseMessage]:
21
20
  """
@@ -23,13 +22,12 @@ def create_llm_input_with_messages(
23
22
 
24
23
  Args:
25
24
  prompt: The prompt configuration with template
26
- messages: List of chat history messages
27
25
  params: Additional parameters for prompt formatting
28
26
 
29
27
  Returns:
30
28
  List of messages to send to LLM
31
29
  """
32
- logger.info(f"Creating LLM input with messages: {len(messages)} messages, params: {params}")
30
+ logger.info(f"Creating LLM input with params: {params}")
33
31
 
34
32
  # Build the input messages
35
33
  input_messages = []
@@ -47,9 +45,13 @@ def create_llm_input_with_messages(
47
45
  raise ToolException(error_msg)
48
46
 
49
47
  # Add the chat history messages
48
+ messages = params.get('messages', [])
50
49
  if messages:
51
50
  input_messages.extend(messages)
52
-
51
+ else:
52
+ # conditionally add a default human message if no chat history
53
+ input_messages.extend([HumanMessage(content="")])
54
+
53
55
  return input_messages
54
56
 
55
57
 
@@ -124,12 +126,13 @@ class LLMNode(BaseTool):
124
126
  # Create parameters for prompt formatting from state
125
127
  params = {}
126
128
  if isinstance(state, dict):
127
- for var in self.input_variables or []:
128
- if var != "messages" and var in state:
129
- params[var] = state[var]
129
+ params = {var: state[var] for var in (self.input_variables or []) if var != "messages" and var in state}
130
+ # message as a part of chat history added ONLY if "messages" is in input_variables
131
+ if "messages" in (self.input_variables or []):
132
+ params["messages"] = messages
130
133
 
131
134
  # Create LLM input with proper message handling
132
- llm_input = create_llm_input_with_messages(self.prompt, messages, params)
135
+ llm_input = create_llm_input_with_messages(self.prompt, params)
133
136
 
134
137
  # Get the LLM client, potentially with tools bound
135
138
  llm_client = self.client
@@ -268,17 +271,14 @@ class LLMNode(BaseTool):
268
271
  if json_output_vars:
269
272
  try:
270
273
  response = _extract_json(content) or {}
271
- response_data = {key: response.get(key) for key in json_output_vars if key in response}
272
-
273
- # Always add the messages to the response
274
- new_messages = messages + [AIMessage(content=content)]
275
- response_data['messages'] = new_messages
276
-
277
- return response_data
274
+ response_data = {key: response.get(key, content) for key in json_output_vars}
278
275
  except (ValueError, json.JSONDecodeError) as e:
279
- # LLM returned non-JSON content, treat as plain text
280
- logger.warning(f"Expected JSON output but got plain text. Output variables specified: {json_output_vars}. Error: {e}")
281
- # Fall through to plain text handling
276
+ logger.warning(
277
+ f"Expected JSON output but got plain text. Output variables specified: {json_output_vars}. Error: {e}")
278
+ response_data = {var: content for var in json_output_vars}
279
+ new_messages = messages + [AIMessage(content=content)]
280
+ response_data['messages'] = new_messages
281
+ return response_data
282
282
 
283
283
  # Simple text response (either no output variables or JSON parsing failed)
284
284
  new_messages = messages + [AIMessage(content=content)]
@@ -1,4 +1,5 @@
1
1
  from langchain_core.documents import Document
2
+ from langchain_core.tools import ToolException
2
3
 
3
4
  from alita_sdk.runtime.utils.utils import IndexerKeywords
4
5
  from alita_sdk.tools.base_indexer_toolkit import BaseIndexerToolkit
@@ -6,6 +7,9 @@ from alita_sdk.tools.base_indexer_toolkit import BaseIndexerToolkit
6
7
 
7
8
  class NonCodeIndexerToolkit(BaseIndexerToolkit):
8
9
  def _get_indexed_data(self, collection_suffix: str):
10
+ if not self.vector_adapter:
11
+ raise ToolException("Vector adapter is not initialized. "
12
+ "Check your configuration: embedding_model and vectorstore_type.")
9
13
  return self.vector_adapter.get_indexed_data(self, collection_suffix)
10
14
 
11
15
  def key_fn(self, document: Document):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: alita_sdk
3
- Version: 0.3.321
3
+ Version: 0.3.321b2
4
4
  Summary: SDK for building langchain agents using resources from Alita
5
5
  Author-email: Artem Rozumenko <artyom.rozumenko@gmail.com>, Mikalai Biazruchka <mikalai_biazruchka@epam.com>, Roman Mitusov <roman_mitusov@epam.com>, Ivan Krakhmaliuk <lifedj27@gmail.com>, Artem Dubrovskiy <ad13box@gmail.com>
6
6
  License-Expression: Apache-2.0
@@ -44,7 +44,7 @@ alita_sdk/runtime/langchain/assistant.py,sha256=0Cv30SJULxlrvD3hzWmTnxZyzqPEaSgp
44
44
  alita_sdk/runtime/langchain/chat_message_template.py,sha256=kPz8W2BG6IMyITFDA5oeb5BxVRkHEVZhuiGl4MBZKdc,2176
45
45
  alita_sdk/runtime/langchain/constants.py,sha256=eHVJ_beJNTf1WJo4yq7KMK64fxsRvs3lKc34QCXSbpk,3319
46
46
  alita_sdk/runtime/langchain/indexer.py,sha256=0ENHy5EOhThnAiYFc7QAsaTNp9rr8hDV_hTK8ahbatk,37592
47
- alita_sdk/runtime/langchain/langraph_agent.py,sha256=5fTT2FyRFSLIbQxm8KkxT_-bkoABmZzZA_V3-9nPse0,44296
47
+ alita_sdk/runtime/langchain/langraph_agent.py,sha256=gT6f8I0mOdCc4l4xWYaZ-fYqkkPGhM6hwOIgpr1mXA8,44875
48
48
  alita_sdk/runtime/langchain/mixedAgentParser.py,sha256=M256lvtsL3YtYflBCEp-rWKrKtcY1dJIyRGVv7KW9ME,2611
49
49
  alita_sdk/runtime/langchain/mixedAgentRenderes.py,sha256=asBtKqm88QhZRILditjYICwFVKF5KfO38hu2O-WrSWE,5964
50
50
  alita_sdk/runtime/langchain/store_manager.py,sha256=i8Fl11IXJhrBXq1F1ukEVln57B1IBe-tqSUvfUmBV4A,2218
@@ -112,7 +112,7 @@ alita_sdk/runtime/tools/echo.py,sha256=spw9eCweXzixJqHnZofHE1yWiSUa04L4VKycf3KCE
112
112
  alita_sdk/runtime/tools/function.py,sha256=ZFpd7TGwIawze2e7BHlKwP0NHwNw42wwrmmnXyJQJhk,2600
113
113
  alita_sdk/runtime/tools/graph.py,sha256=MbnZYqdmvZY7SGDp43lOVVIjUt5ARHSgj43mdtBjSjQ,3092
114
114
  alita_sdk/runtime/tools/indexer_tool.py,sha256=whSLPevB4WD6dhh2JDXEivDmTvbjiMV1MrPl9cz5eLA,4375
115
- alita_sdk/runtime/tools/llm.py,sha256=NsrsP-SblyxDdzgMCn9_OBUL0sUGDVS5yqer49V7ciE,15069
115
+ alita_sdk/runtime/tools/llm.py,sha256=SwRzC9n_rBsljnbhS8FPMgKc9b_DTSleCK40MqWnGzc,15107
116
116
  alita_sdk/runtime/tools/loop.py,sha256=uds0WhZvwMxDVFI6MZHrcmMle637cQfBNg682iLxoJA,8335
117
117
  alita_sdk/runtime/tools/loop_output.py,sha256=U4hO9PCQgWlXwOq6jdmCGbegtAxGAPXObSxZQ3z38uk,8069
118
118
  alita_sdk/runtime/tools/mcp_server_tool.py,sha256=trGraI8-AwdbNmTKMjfmlBxgTDMTE4-21heCVtd_lz0,4156
@@ -135,7 +135,7 @@ alita_sdk/runtime/utils/utils.py,sha256=VXNLsdeTmf6snn9EtUyobv4yL-xzLhUcH8P_ORMi
135
135
  alita_sdk/tools/__init__.py,sha256=jUj1ztC2FbkIUB-YYmiqaz_rqW7Il5kWzDPn1mJmj5w,10545
136
136
  alita_sdk/tools/base_indexer_toolkit.py,sha256=IKtnJVX27yPu8bBWgbl-5YfUQy4pJPnBoRBFLkqagoc,20228
137
137
  alita_sdk/tools/elitea_base.py,sha256=up3HshASSDfjlHV_HPrs1aD4JIwwX0Ug26WGTzgIYvY,34724
138
- alita_sdk/tools/non_code_indexer_toolkit.py,sha256=v9uq1POE1fQKCd152mbqDtF-HSe0qoDj83k4E5LAkMI,1080
138
+ alita_sdk/tools/non_code_indexer_toolkit.py,sha256=B3QvhpT1F9QidkCcsOi3J_QrTOaNlTxqWFwe90VivQQ,1329
139
139
  alita_sdk/tools/ado/__init__.py,sha256=NnNYpNFW0_N_v1td_iekYOoQRRB7PIunbpT2f9ZFJM4,1201
140
140
  alita_sdk/tools/ado/utils.py,sha256=PTCludvaQmPLakF2EbCGy66Mro4-rjDtavVP-xcB2Wc,1252
141
141
  alita_sdk/tools/ado/repos/__init__.py,sha256=rR-c40Pw_WpQeOXtEuS-COvgRUs1_cTkcJfHlK09N88,5339
@@ -349,8 +349,8 @@ alita_sdk/tools/zephyr_scale/api_wrapper.py,sha256=kT0TbmMvuKhDUZc0i7KO18O38JM9S
349
349
  alita_sdk/tools/zephyr_squad/__init__.py,sha256=0ne8XLJEQSLOWfzd2HdnqOYmQlUliKHbBED5kW_Vias,2895
350
350
  alita_sdk/tools/zephyr_squad/api_wrapper.py,sha256=kmw_xol8YIYFplBLWTqP_VKPRhL_1ItDD0_vXTe_UuI,14906
351
351
  alita_sdk/tools/zephyr_squad/zephyr_squad_cloud_client.py,sha256=R371waHsms4sllHCbijKYs90C-9Yu0sSR3N4SUfQOgU,5066
352
- alita_sdk-0.3.321.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
353
- alita_sdk-0.3.321.dist-info/METADATA,sha256=NYftgHbUR29AEwXHSYD4vDsVk94-TYbo4ijPAl7FeJQ,18897
354
- alita_sdk-0.3.321.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
355
- alita_sdk-0.3.321.dist-info/top_level.txt,sha256=0vJYy5p_jK6AwVb1aqXr7Kgqgk3WDtQ6t5C-XI9zkmg,10
356
- alita_sdk-0.3.321.dist-info/RECORD,,
352
+ alita_sdk-0.3.321b2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
353
+ alita_sdk-0.3.321b2.dist-info/METADATA,sha256=DibZvPqa7lDNcR3Guti57bcNqcAE4dddj5xoArHUKuo,18899
354
+ alita_sdk-0.3.321b2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
355
+ alita_sdk-0.3.321b2.dist-info/top_level.txt,sha256=0vJYy5p_jK6AwVb1aqXr7Kgqgk3WDtQ6t5C-XI9zkmg,10
356
+ alita_sdk-0.3.321b2.dist-info/RECORD,,