lfx-nightly 0.1.13.dev0__py3-none-any.whl → 0.2.0.dev26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/_assets/component_index.json +1 -1
- lfx/base/agents/agent.py +121 -29
- lfx/base/agents/altk_base_agent.py +380 -0
- lfx/base/agents/altk_tool_wrappers.py +565 -0
- lfx/base/agents/events.py +103 -35
- lfx/base/agents/utils.py +15 -2
- lfx/base/composio/composio_base.py +183 -233
- lfx/base/data/base_file.py +88 -21
- lfx/base/data/storage_utils.py +192 -0
- lfx/base/data/utils.py +178 -14
- lfx/base/datastax/__init__.py +5 -0
- lfx/{components/vectorstores/astradb.py → base/datastax/astradb_base.py} +84 -473
- lfx/base/embeddings/embeddings_class.py +113 -0
- lfx/base/io/chat.py +5 -4
- lfx/base/mcp/util.py +101 -15
- lfx/base/models/groq_constants.py +74 -58
- lfx/base/models/groq_model_discovery.py +265 -0
- lfx/base/models/model.py +1 -1
- lfx/base/models/model_input_constants.py +74 -7
- lfx/base/models/model_utils.py +100 -0
- lfx/base/models/ollama_constants.py +3 -0
- lfx/base/models/openai_constants.py +7 -0
- lfx/base/models/watsonx_constants.py +36 -0
- lfx/base/tools/run_flow.py +601 -129
- lfx/cli/commands.py +7 -4
- lfx/cli/common.py +2 -2
- lfx/cli/run.py +1 -1
- lfx/cli/script_loader.py +53 -11
- lfx/components/Notion/create_page.py +1 -1
- lfx/components/Notion/list_database_properties.py +1 -1
- lfx/components/Notion/list_pages.py +1 -1
- lfx/components/Notion/list_users.py +1 -1
- lfx/components/Notion/page_content_viewer.py +1 -1
- lfx/components/Notion/search.py +1 -1
- lfx/components/Notion/update_page_property.py +1 -1
- lfx/components/__init__.py +19 -5
- lfx/components/altk/__init__.py +34 -0
- lfx/components/altk/altk_agent.py +193 -0
- lfx/components/amazon/amazon_bedrock_converse.py +1 -1
- lfx/components/apify/apify_actor.py +4 -4
- lfx/components/composio/__init__.py +70 -18
- lfx/components/composio/apollo_composio.py +11 -0
- lfx/components/composio/bitbucket_composio.py +11 -0
- lfx/components/composio/canva_composio.py +11 -0
- lfx/components/composio/coda_composio.py +11 -0
- lfx/components/composio/composio_api.py +10 -0
- lfx/components/composio/discord_composio.py +1 -1
- lfx/components/composio/elevenlabs_composio.py +11 -0
- lfx/components/composio/exa_composio.py +11 -0
- lfx/components/composio/firecrawl_composio.py +11 -0
- lfx/components/composio/fireflies_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +1 -1
- lfx/components/composio/googlebigquery_composio.py +11 -0
- lfx/components/composio/googlecalendar_composio.py +1 -1
- lfx/components/composio/googledocs_composio.py +1 -1
- lfx/components/composio/googlemeet_composio.py +1 -1
- lfx/components/composio/googlesheets_composio.py +1 -1
- lfx/components/composio/googletasks_composio.py +1 -1
- lfx/components/composio/heygen_composio.py +11 -0
- lfx/components/composio/mem0_composio.py +11 -0
- lfx/components/composio/peopledatalabs_composio.py +11 -0
- lfx/components/composio/perplexityai_composio.py +11 -0
- lfx/components/composio/serpapi_composio.py +11 -0
- lfx/components/composio/slack_composio.py +3 -574
- lfx/components/composio/slackbot_composio.py +1 -1
- lfx/components/composio/snowflake_composio.py +11 -0
- lfx/components/composio/tavily_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +2 -2
- lfx/components/{agents → cuga}/__init__.py +5 -7
- lfx/components/cuga/cuga_agent.py +730 -0
- lfx/components/data/__init__.py +78 -28
- lfx/components/data_source/__init__.py +58 -0
- lfx/components/{data → data_source}/api_request.py +26 -3
- lfx/components/{data → data_source}/csv_to_data.py +15 -10
- lfx/components/{data → data_source}/json_to_data.py +15 -8
- lfx/components/{data → data_source}/news_search.py +1 -1
- lfx/components/{data → data_source}/rss.py +1 -1
- lfx/components/{data → data_source}/sql_executor.py +1 -1
- lfx/components/{data → data_source}/url.py +1 -1
- lfx/components/{data → data_source}/web_search.py +1 -1
- lfx/components/datastax/__init__.py +12 -6
- lfx/components/datastax/{astra_assistant_manager.py → astradb_assistant_manager.py} +1 -0
- lfx/components/datastax/astradb_chatmemory.py +40 -0
- lfx/components/datastax/astradb_cql.py +6 -32
- lfx/components/datastax/astradb_graph.py +10 -124
- lfx/components/datastax/astradb_tool.py +13 -53
- lfx/components/datastax/astradb_vectorstore.py +134 -977
- lfx/components/datastax/create_assistant.py +1 -0
- lfx/components/datastax/create_thread.py +1 -0
- lfx/components/datastax/dotenv.py +1 -0
- lfx/components/datastax/get_assistant.py +1 -0
- lfx/components/datastax/getenvvar.py +1 -0
- lfx/components/datastax/graph_rag.py +1 -1
- lfx/components/datastax/hcd.py +1 -1
- lfx/components/datastax/list_assistants.py +1 -0
- lfx/components/datastax/run.py +1 -0
- lfx/components/deactivated/json_document_builder.py +1 -1
- lfx/components/elastic/elasticsearch.py +1 -1
- lfx/components/elastic/opensearch_multimodal.py +1575 -0
- lfx/components/files_and_knowledge/__init__.py +47 -0
- lfx/components/{data → files_and_knowledge}/directory.py +1 -1
- lfx/components/{data → files_and_knowledge}/file.py +246 -18
- lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +17 -9
- lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +18 -10
- lfx/components/{data → files_and_knowledge}/save_file.py +142 -22
- lfx/components/flow_controls/__init__.py +58 -0
- lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
- lfx/components/{logic → flow_controls}/loop.py +47 -9
- lfx/components/flow_controls/run_flow.py +108 -0
- lfx/components/glean/glean_search_api.py +1 -1
- lfx/components/groq/groq.py +35 -28
- lfx/components/helpers/__init__.py +102 -0
- lfx/components/ibm/watsonx.py +25 -21
- lfx/components/input_output/__init__.py +3 -1
- lfx/components/input_output/chat.py +12 -3
- lfx/components/input_output/chat_output.py +12 -4
- lfx/components/input_output/text.py +1 -1
- lfx/components/input_output/text_output.py +1 -1
- lfx/components/{data → input_output}/webhook.py +1 -1
- lfx/components/knowledge_bases/__init__.py +59 -4
- lfx/components/langchain_utilities/character.py +1 -1
- lfx/components/langchain_utilities/csv_agent.py +84 -16
- lfx/components/langchain_utilities/json_agent.py +67 -12
- lfx/components/langchain_utilities/language_recursive.py +1 -1
- lfx/components/llm_operations/__init__.py +46 -0
- lfx/components/{processing → llm_operations}/batch_run.py +1 -1
- lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
- lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
- lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
- lfx/components/{processing → llm_operations}/structured_output.py +56 -18
- lfx/components/logic/__init__.py +126 -0
- lfx/components/mem0/mem0_chat_memory.py +11 -0
- lfx/components/mistral/mistral_embeddings.py +1 -1
- lfx/components/models/__init__.py +64 -9
- lfx/components/models_and_agents/__init__.py +49 -0
- lfx/components/{agents → models_and_agents}/agent.py +49 -6
- lfx/components/models_and_agents/embedding_model.py +423 -0
- lfx/components/models_and_agents/language_model.py +398 -0
- lfx/components/{agents → models_and_agents}/mcp_component.py +84 -45
- lfx/components/{helpers → models_and_agents}/memory.py +1 -1
- lfx/components/nvidia/system_assist.py +1 -1
- lfx/components/olivya/olivya.py +1 -1
- lfx/components/ollama/ollama.py +235 -14
- lfx/components/openrouter/openrouter.py +49 -147
- lfx/components/processing/__init__.py +9 -57
- lfx/components/processing/converter.py +1 -1
- lfx/components/processing/dataframe_operations.py +1 -1
- lfx/components/processing/parse_json_data.py +2 -2
- lfx/components/processing/parser.py +7 -2
- lfx/components/processing/split_text.py +1 -1
- lfx/components/qdrant/qdrant.py +1 -1
- lfx/components/redis/redis.py +1 -1
- lfx/components/twelvelabs/split_video.py +10 -0
- lfx/components/twelvelabs/video_file.py +12 -0
- lfx/components/utilities/__init__.py +43 -0
- lfx/components/{helpers → utilities}/calculator_core.py +1 -1
- lfx/components/{helpers → utilities}/current_date.py +1 -1
- lfx/components/{processing → utilities}/python_repl_core.py +1 -1
- lfx/components/vectorstores/__init__.py +0 -6
- lfx/components/vectorstores/local_db.py +9 -0
- lfx/components/youtube/youtube_transcripts.py +118 -30
- lfx/custom/custom_component/component.py +60 -3
- lfx/custom/custom_component/custom_component.py +68 -6
- lfx/field_typing/constants.py +1 -0
- lfx/graph/edge/base.py +45 -22
- lfx/graph/graph/base.py +5 -2
- lfx/graph/graph/schema.py +3 -2
- lfx/graph/state/model.py +15 -2
- lfx/graph/utils.py +6 -0
- lfx/graph/vertex/base.py +4 -1
- lfx/graph/vertex/param_handler.py +10 -7
- lfx/graph/vertex/vertex_types.py +1 -1
- lfx/helpers/__init__.py +12 -0
- lfx/helpers/flow.py +117 -0
- lfx/inputs/input_mixin.py +24 -1
- lfx/inputs/inputs.py +13 -1
- lfx/interface/components.py +161 -83
- lfx/io/schema.py +6 -0
- lfx/log/logger.py +5 -3
- lfx/schema/schema.py +5 -0
- lfx/services/database/__init__.py +5 -0
- lfx/services/database/service.py +25 -0
- lfx/services/deps.py +87 -22
- lfx/services/manager.py +19 -6
- lfx/services/mcp_composer/service.py +998 -157
- lfx/services/session.py +5 -0
- lfx/services/settings/base.py +51 -7
- lfx/services/settings/constants.py +8 -0
- lfx/services/storage/local.py +76 -46
- lfx/services/storage/service.py +152 -29
- lfx/template/field/base.py +3 -0
- lfx/utils/ssrf_protection.py +384 -0
- lfx/utils/validate_cloud.py +26 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/METADATA +38 -22
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/RECORD +210 -196
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/WHEEL +1 -1
- lfx/components/agents/cuga_agent.py +0 -1013
- lfx/components/datastax/astra_db.py +0 -77
- lfx/components/datastax/cassandra.py +0 -92
- lfx/components/logic/run_flow.py +0 -71
- lfx/components/models/embedding_model.py +0 -114
- lfx/components/models/language_model.py +0 -144
- lfx/components/vectorstores/astradb_graph.py +0 -326
- lfx/components/vectorstores/cassandra.py +0 -264
- lfx/components/vectorstores/cassandra_graph.py +0 -238
- lfx/components/vectorstores/chroma.py +0 -167
- lfx/components/vectorstores/clickhouse.py +0 -135
- lfx/components/vectorstores/couchbase.py +0 -102
- lfx/components/vectorstores/elasticsearch.py +0 -267
- lfx/components/vectorstores/faiss.py +0 -111
- lfx/components/vectorstores/graph_rag.py +0 -141
- lfx/components/vectorstores/hcd.py +0 -314
- lfx/components/vectorstores/milvus.py +0 -115
- lfx/components/vectorstores/mongodb_atlas.py +0 -213
- lfx/components/vectorstores/opensearch.py +0 -243
- lfx/components/vectorstores/pgvector.py +0 -72
- lfx/components/vectorstores/pinecone.py +0 -134
- lfx/components/vectorstores/qdrant.py +0 -109
- lfx/components/vectorstores/supabase.py +0 -76
- lfx/components/vectorstores/upstash.py +0 -124
- lfx/components/vectorstores/vectara.py +0 -97
- lfx/components/vectorstores/vectara_rag.py +0 -164
- lfx/components/vectorstores/weaviate.py +0 -89
- /lfx/components/{data → data_source}/mock_data.py +0 -0
- /lfx/components/datastax/{astra_vectorize.py → astradb_vectorize.py} +0 -0
- /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
- /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
- /lfx/components/{logic → flow_controls}/listen.py +0 -0
- /lfx/components/{logic → flow_controls}/notify.py +0 -0
- /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
- /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
- /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
- /lfx/components/{helpers → processing}/create_list.py +0 -0
- /lfx/components/{helpers → processing}/output_parser.py +0 -0
- /lfx/components/{helpers → processing}/store_message.py +0 -0
- /lfx/components/{helpers → utilities}/id_generator.py +0 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/entry_points.txt +0 -0
lfx/base/agents/events.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
# Add helper functions for each event type
|
|
2
|
+
import asyncio
|
|
2
3
|
from collections.abc import AsyncIterator
|
|
3
4
|
from time import perf_counter
|
|
4
5
|
from typing import Any, Protocol
|
|
@@ -9,7 +10,7 @@ from typing_extensions import TypedDict
|
|
|
9
10
|
|
|
10
11
|
from lfx.schema.content_block import ContentBlock
|
|
11
12
|
from lfx.schema.content_types import TextContent, ToolContent
|
|
12
|
-
from lfx.schema.log import SendMessageFunctionType
|
|
13
|
+
from lfx.schema.log import OnTokenFunctionType, SendMessageFunctionType
|
|
13
14
|
from lfx.schema.message import Message
|
|
14
15
|
|
|
15
16
|
|
|
@@ -53,7 +54,14 @@ def _calculate_duration(start_time: float) -> int:
|
|
|
53
54
|
|
|
54
55
|
|
|
55
56
|
async def handle_on_chain_start(
|
|
56
|
-
event: dict[str, Any],
|
|
57
|
+
event: dict[str, Any],
|
|
58
|
+
agent_message: Message,
|
|
59
|
+
send_message_callback: SendMessageFunctionType,
|
|
60
|
+
send_token_callback: OnTokenFunctionType | None, # noqa: ARG001
|
|
61
|
+
start_time: float,
|
|
62
|
+
*,
|
|
63
|
+
had_streaming: bool = False, # noqa: ARG001
|
|
64
|
+
message_id: str | None = None, # noqa: ARG001
|
|
57
65
|
) -> tuple[Message, float]:
|
|
58
66
|
# Create content blocks if they don't exist
|
|
59
67
|
if not agent_message.content_blocks:
|
|
@@ -80,7 +88,7 @@ async def handle_on_chain_start(
|
|
|
80
88
|
header={"title": "Input", "icon": "MessageSquare"},
|
|
81
89
|
)
|
|
82
90
|
agent_message.content_blocks[0].contents.append(text_content)
|
|
83
|
-
agent_message = await
|
|
91
|
+
agent_message = await send_message_callback(message=agent_message, skip_db_update=True)
|
|
84
92
|
start_time = perf_counter()
|
|
85
93
|
return agent_message, start_time
|
|
86
94
|
|
|
@@ -101,15 +109,23 @@ def _extract_output_text(output: str | list) -> str:
|
|
|
101
109
|
if isinstance(item, dict):
|
|
102
110
|
if "text" in item:
|
|
103
111
|
return item["text"] or ""
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
+
if "content" in item:
|
|
113
|
+
return str(item["content"])
|
|
114
|
+
if "message" in item:
|
|
115
|
+
return str(item["message"])
|
|
116
|
+
|
|
117
|
+
# Special case handling for non-text-like dicts
|
|
118
|
+
if (
|
|
119
|
+
item.get("type") == "tool_use" # Handle tool use items
|
|
120
|
+
or ("index" in item and len(item) == 1) # Handle index-only items
|
|
121
|
+
or "partial_json" in item # Handle partial json items
|
|
122
|
+
# Handle index-only items
|
|
123
|
+
or ("index" in item and not any(k in item for k in ("text", "content", "message")))
|
|
124
|
+
# Handle other metadata-only chunks that don't contain meaningful text
|
|
125
|
+
or not any(key in item for key in ["text", "content", "message"])
|
|
126
|
+
):
|
|
112
127
|
return ""
|
|
128
|
+
|
|
113
129
|
# For any other dict format, return empty string
|
|
114
130
|
return ""
|
|
115
131
|
# For any other single item type (not str or dict), return empty string
|
|
@@ -133,7 +149,14 @@ def _extract_output_text(output: str | list) -> str:
|
|
|
133
149
|
|
|
134
150
|
|
|
135
151
|
async def handle_on_chain_end(
|
|
136
|
-
event: dict[str, Any],
|
|
152
|
+
event: dict[str, Any],
|
|
153
|
+
agent_message: Message,
|
|
154
|
+
send_message_callback: SendMessageFunctionType,
|
|
155
|
+
send_token_callback: OnTokenFunctionType | None, # noqa: ARG001
|
|
156
|
+
start_time: float,
|
|
157
|
+
*,
|
|
158
|
+
had_streaming: bool = False,
|
|
159
|
+
message_id: str | None = None, # noqa: ARG001
|
|
137
160
|
) -> tuple[Message, float]:
|
|
138
161
|
data_output = event["data"].get("output")
|
|
139
162
|
if data_output and isinstance(data_output, AgentFinish) and data_output.return_values.get("output"):
|
|
@@ -151,7 +174,11 @@ async def handle_on_chain_end(
|
|
|
151
174
|
header={"title": "Output", "icon": "MessageSquare"},
|
|
152
175
|
)
|
|
153
176
|
agent_message.content_blocks[0].contents.append(text_content)
|
|
154
|
-
|
|
177
|
+
|
|
178
|
+
# Only send final message if we didn't have streaming chunks
|
|
179
|
+
# If we had streaming, frontend already accumulated the chunks
|
|
180
|
+
if not had_streaming:
|
|
181
|
+
agent_message = await send_message_callback(message=agent_message)
|
|
155
182
|
start_time = perf_counter()
|
|
156
183
|
return agent_message, start_time
|
|
157
184
|
|
|
@@ -160,7 +187,7 @@ async def handle_on_tool_start(
|
|
|
160
187
|
event: dict[str, Any],
|
|
161
188
|
agent_message: Message,
|
|
162
189
|
tool_blocks_map: dict[str, ToolContent],
|
|
163
|
-
|
|
190
|
+
send_message_callback: SendMessageFunctionType,
|
|
164
191
|
start_time: float,
|
|
165
192
|
) -> tuple[Message, float]:
|
|
166
193
|
tool_name = event["name"]
|
|
@@ -190,7 +217,7 @@ async def handle_on_tool_start(
|
|
|
190
217
|
tool_blocks_map[tool_key] = tool_content
|
|
191
218
|
agent_message.content_blocks[0].contents.append(tool_content)
|
|
192
219
|
|
|
193
|
-
agent_message = await
|
|
220
|
+
agent_message = await send_message_callback(message=agent_message, skip_db_update=True)
|
|
194
221
|
if agent_message.content_blocks and agent_message.content_blocks[0].contents:
|
|
195
222
|
tool_blocks_map[tool_key] = agent_message.content_blocks[0].contents[-1]
|
|
196
223
|
return agent_message, new_start_time
|
|
@@ -200,7 +227,7 @@ async def handle_on_tool_end(
|
|
|
200
227
|
event: dict[str, Any],
|
|
201
228
|
agent_message: Message,
|
|
202
229
|
tool_blocks_map: dict[str, ToolContent],
|
|
203
|
-
|
|
230
|
+
send_message_callback: SendMessageFunctionType,
|
|
204
231
|
start_time: float,
|
|
205
232
|
) -> tuple[Message, float]:
|
|
206
233
|
run_id = event.get("run_id", "")
|
|
@@ -209,8 +236,8 @@ async def handle_on_tool_end(
|
|
|
209
236
|
tool_content = tool_blocks_map.get(tool_key)
|
|
210
237
|
|
|
211
238
|
if tool_content and isinstance(tool_content, ToolContent):
|
|
212
|
-
# Call
|
|
213
|
-
agent_message = await
|
|
239
|
+
# Call send_message_callback first to get the updated message structure
|
|
240
|
+
agent_message = await send_message_callback(message=agent_message, skip_db_update=True)
|
|
214
241
|
new_start_time = perf_counter()
|
|
215
242
|
|
|
216
243
|
# Now find and update the tool content in the current message
|
|
@@ -246,7 +273,7 @@ async def handle_on_tool_error(
|
|
|
246
273
|
event: dict[str, Any],
|
|
247
274
|
agent_message: Message,
|
|
248
275
|
tool_blocks_map: dict[str, ToolContent],
|
|
249
|
-
|
|
276
|
+
send_message_callback: SendMessageFunctionType,
|
|
250
277
|
start_time: float,
|
|
251
278
|
) -> tuple[Message, float]:
|
|
252
279
|
run_id = event.get("run_id", "")
|
|
@@ -258,7 +285,7 @@ async def handle_on_tool_error(
|
|
|
258
285
|
tool_content.error = event["data"].get("error", "Unknown error")
|
|
259
286
|
tool_content.duration = _calculate_duration(start_time)
|
|
260
287
|
tool_content.header = {"title": f"Error using **{tool_content.name}**", "icon": "Hammer"}
|
|
261
|
-
agent_message = await
|
|
288
|
+
agent_message = await send_message_callback(message=agent_message, skip_db_update=True)
|
|
262
289
|
start_time = perf_counter()
|
|
263
290
|
return agent_message, start_time
|
|
264
291
|
|
|
@@ -266,8 +293,12 @@ async def handle_on_tool_error(
|
|
|
266
293
|
async def handle_on_chain_stream(
|
|
267
294
|
event: dict[str, Any],
|
|
268
295
|
agent_message: Message,
|
|
269
|
-
|
|
296
|
+
send_message_callback: SendMessageFunctionType, # noqa: ARG001
|
|
297
|
+
send_token_callback: OnTokenFunctionType | None,
|
|
270
298
|
start_time: float,
|
|
299
|
+
*,
|
|
300
|
+
had_streaming: bool = False, # noqa: ARG001
|
|
301
|
+
message_id: str | None = None,
|
|
271
302
|
) -> tuple[Message, float]:
|
|
272
303
|
data_chunk = event["data"].get("chunk", {})
|
|
273
304
|
if isinstance(data_chunk, dict) and data_chunk.get("output"):
|
|
@@ -275,15 +306,26 @@ async def handle_on_chain_stream(
|
|
|
275
306
|
if output and isinstance(output, str | list):
|
|
276
307
|
agent_message.text = _extract_output_text(output)
|
|
277
308
|
agent_message.properties.state = "complete"
|
|
278
|
-
|
|
309
|
+
# Don't call send_message_callback here - we must update in place
|
|
310
|
+
# in order to keep the message id consistent throughout the stream.
|
|
311
|
+
# The final message will be sent after the loop completes
|
|
279
312
|
start_time = perf_counter()
|
|
280
313
|
elif isinstance(data_chunk, AIMessageChunk):
|
|
281
314
|
output_text = _extract_output_text(data_chunk.content)
|
|
282
|
-
if
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
315
|
+
# For streaming, send token event if callback is available
|
|
316
|
+
# Note: we should expect the callback, but we keep it optional for backwards compatibility
|
|
317
|
+
# as of v1.6.5
|
|
318
|
+
if output_text and output_text.strip() and send_token_callback and message_id:
|
|
319
|
+
await asyncio.to_thread(
|
|
320
|
+
send_token_callback,
|
|
321
|
+
data={
|
|
322
|
+
"chunk": output_text,
|
|
323
|
+
"id": str(message_id),
|
|
324
|
+
},
|
|
325
|
+
)
|
|
326
|
+
|
|
286
327
|
if not agent_message.text:
|
|
328
|
+
# Starts the timer when the first message is starting to be generated
|
|
287
329
|
start_time = perf_counter()
|
|
288
330
|
return agent_message, start_time
|
|
289
331
|
|
|
@@ -294,7 +336,7 @@ class ToolEventHandler(Protocol):
|
|
|
294
336
|
event: dict[str, Any],
|
|
295
337
|
agent_message: Message,
|
|
296
338
|
tool_blocks_map: dict[str, ContentBlock],
|
|
297
|
-
|
|
339
|
+
send_message_callback: SendMessageFunctionType,
|
|
298
340
|
start_time: float,
|
|
299
341
|
) -> tuple[Message, float]: ...
|
|
300
342
|
|
|
@@ -304,8 +346,12 @@ class ChainEventHandler(Protocol):
|
|
|
304
346
|
self,
|
|
305
347
|
event: dict[str, Any],
|
|
306
348
|
agent_message: Message,
|
|
307
|
-
|
|
349
|
+
send_message_callback: SendMessageFunctionType,
|
|
350
|
+
send_token_callback: OnTokenFunctionType | None,
|
|
308
351
|
start_time: float,
|
|
352
|
+
*,
|
|
353
|
+
had_streaming: bool = False,
|
|
354
|
+
message_id: str | None = None,
|
|
309
355
|
) -> tuple[Message, float]: ...
|
|
310
356
|
|
|
311
357
|
|
|
@@ -329,7 +375,8 @@ TOOL_EVENT_HANDLERS: dict[str, ToolEventHandler] = {
|
|
|
329
375
|
async def process_agent_events(
|
|
330
376
|
agent_executor: AsyncIterator[dict[str, Any]],
|
|
331
377
|
agent_message: Message,
|
|
332
|
-
|
|
378
|
+
send_message_callback: SendMessageFunctionType,
|
|
379
|
+
send_token_callback: OnTokenFunctionType | None = None,
|
|
333
380
|
) -> Message:
|
|
334
381
|
"""Process agent events and return the final output."""
|
|
335
382
|
if isinstance(agent_message.properties, dict):
|
|
@@ -337,26 +384,47 @@ async def process_agent_events(
|
|
|
337
384
|
else:
|
|
338
385
|
agent_message.properties.icon = "Bot"
|
|
339
386
|
agent_message.properties.state = "partial"
|
|
340
|
-
# Store the initial message
|
|
341
|
-
agent_message = await
|
|
387
|
+
# Store the initial message and capture the message id
|
|
388
|
+
agent_message = await send_message_callback(message=agent_message)
|
|
389
|
+
# Capture the original message id - this must stay consistent throughout if streaming
|
|
390
|
+
# Message may not contain id if the Agent is not connected to a Chat Output (_should_skip_message is True)
|
|
391
|
+
initial_message_id = agent_message.id if hasattr(agent_message, "id") else None
|
|
342
392
|
try:
|
|
343
393
|
# Create a mapping of run_ids to tool contents
|
|
344
394
|
tool_blocks_map: dict[str, ToolContent] = {}
|
|
395
|
+
had_streaming = False
|
|
345
396
|
start_time = perf_counter()
|
|
397
|
+
|
|
346
398
|
async for event in agent_executor:
|
|
347
399
|
if event["event"] in TOOL_EVENT_HANDLERS:
|
|
348
400
|
tool_handler = TOOL_EVENT_HANDLERS[event["event"]]
|
|
349
401
|
# Use skip_db_update=True during streaming to avoid DB round-trips
|
|
350
402
|
agent_message, start_time = await tool_handler(
|
|
351
|
-
event, agent_message, tool_blocks_map,
|
|
403
|
+
event, agent_message, tool_blocks_map, send_message_callback, start_time
|
|
352
404
|
)
|
|
353
405
|
elif event["event"] in CHAIN_EVENT_HANDLERS:
|
|
354
406
|
chain_handler = CHAIN_EVENT_HANDLERS[event["event"]]
|
|
355
|
-
|
|
356
|
-
|
|
407
|
+
|
|
408
|
+
# Check if this is a streaming event
|
|
409
|
+
if event["event"] in ("on_chain_stream", "on_chat_model_stream"):
|
|
410
|
+
had_streaming = True
|
|
411
|
+
agent_message, start_time = await chain_handler(
|
|
412
|
+
event,
|
|
413
|
+
agent_message,
|
|
414
|
+
send_message_callback,
|
|
415
|
+
send_token_callback,
|
|
416
|
+
start_time,
|
|
417
|
+
had_streaming=had_streaming,
|
|
418
|
+
message_id=initial_message_id,
|
|
419
|
+
)
|
|
420
|
+
else:
|
|
421
|
+
agent_message, start_time = await chain_handler(
|
|
422
|
+
event, agent_message, send_message_callback, None, start_time, had_streaming=had_streaming
|
|
423
|
+
)
|
|
424
|
+
|
|
357
425
|
agent_message.properties.state = "complete"
|
|
358
426
|
# Final DB update with the complete message (skip_db_update=False by default)
|
|
359
|
-
agent_message = await
|
|
427
|
+
agent_message = await send_message_callback(message=agent_message)
|
|
360
428
|
except Exception as e:
|
|
361
429
|
raise ExceptionWithMessageError(agent_message, str(e)) from e
|
|
362
430
|
return await Message.create(**agent_message.model_dump())
|
lfx/base/agents/utils.py
CHANGED
|
@@ -47,9 +47,22 @@ def data_to_messages(data: list[Data | Message]) -> list[BaseMessage]:
|
|
|
47
47
|
data (List[Data | Message]): The data to convert.
|
|
48
48
|
|
|
49
49
|
Returns:
|
|
50
|
-
List[BaseMessage]: The data as messages.
|
|
50
|
+
List[BaseMessage]: The data as messages, filtering out any with empty content.
|
|
51
51
|
"""
|
|
52
|
-
|
|
52
|
+
messages = []
|
|
53
|
+
for value in data:
|
|
54
|
+
try:
|
|
55
|
+
lc_message = value.to_lc_message()
|
|
56
|
+
# Only add messages with non-empty content (prevents Anthropic API errors)
|
|
57
|
+
content = lc_message.content
|
|
58
|
+
if content and ((isinstance(content, str) and content.strip()) or (isinstance(content, list) and content)):
|
|
59
|
+
messages.append(lc_message)
|
|
60
|
+
else:
|
|
61
|
+
logger.warning("Skipping message with empty content in chat history")
|
|
62
|
+
except (ValueError, AttributeError) as e:
|
|
63
|
+
logger.warning(f"Failed to convert message to BaseMessage: {e}")
|
|
64
|
+
continue
|
|
65
|
+
return messages
|
|
53
66
|
|
|
54
67
|
|
|
55
68
|
def validate_and_create_xml_agent(
|