lfx-nightly 0.1.11.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/__init__.py +0 -0
- lfx/__main__.py +25 -0
- lfx/base/__init__.py +0 -0
- lfx/base/agents/__init__.py +0 -0
- lfx/base/agents/agent.py +268 -0
- lfx/base/agents/callback.py +130 -0
- lfx/base/agents/context.py +109 -0
- lfx/base/agents/crewai/__init__.py +0 -0
- lfx/base/agents/crewai/crew.py +231 -0
- lfx/base/agents/crewai/tasks.py +12 -0
- lfx/base/agents/default_prompts.py +23 -0
- lfx/base/agents/errors.py +15 -0
- lfx/base/agents/events.py +346 -0
- lfx/base/agents/utils.py +205 -0
- lfx/base/astra_assistants/__init__.py +0 -0
- lfx/base/astra_assistants/util.py +171 -0
- lfx/base/chains/__init__.py +0 -0
- lfx/base/chains/model.py +19 -0
- lfx/base/composio/__init__.py +0 -0
- lfx/base/composio/composio_base.py +1291 -0
- lfx/base/compressors/__init__.py +0 -0
- lfx/base/compressors/model.py +60 -0
- lfx/base/constants.py +46 -0
- lfx/base/curl/__init__.py +0 -0
- lfx/base/curl/parse.py +188 -0
- lfx/base/data/__init__.py +5 -0
- lfx/base/data/base_file.py +685 -0
- lfx/base/data/docling_utils.py +245 -0
- lfx/base/data/utils.py +198 -0
- lfx/base/document_transformers/__init__.py +0 -0
- lfx/base/document_transformers/model.py +43 -0
- lfx/base/embeddings/__init__.py +0 -0
- lfx/base/embeddings/aiml_embeddings.py +62 -0
- lfx/base/embeddings/model.py +26 -0
- lfx/base/flow_processing/__init__.py +0 -0
- lfx/base/flow_processing/utils.py +86 -0
- lfx/base/huggingface/__init__.py +0 -0
- lfx/base/huggingface/model_bridge.py +133 -0
- lfx/base/io/__init__.py +0 -0
- lfx/base/io/chat.py +20 -0
- lfx/base/io/text.py +22 -0
- lfx/base/langchain_utilities/__init__.py +0 -0
- lfx/base/langchain_utilities/model.py +35 -0
- lfx/base/langchain_utilities/spider_constants.py +1 -0
- lfx/base/langwatch/__init__.py +0 -0
- lfx/base/langwatch/utils.py +18 -0
- lfx/base/mcp/__init__.py +0 -0
- lfx/base/mcp/constants.py +2 -0
- lfx/base/mcp/util.py +1398 -0
- lfx/base/memory/__init__.py +0 -0
- lfx/base/memory/memory.py +49 -0
- lfx/base/memory/model.py +38 -0
- lfx/base/models/__init__.py +3 -0
- lfx/base/models/aiml_constants.py +51 -0
- lfx/base/models/anthropic_constants.py +47 -0
- lfx/base/models/aws_constants.py +151 -0
- lfx/base/models/chat_result.py +76 -0
- lfx/base/models/google_generative_ai_constants.py +70 -0
- lfx/base/models/groq_constants.py +134 -0
- lfx/base/models/model.py +375 -0
- lfx/base/models/model_input_constants.py +307 -0
- lfx/base/models/model_metadata.py +41 -0
- lfx/base/models/model_utils.py +8 -0
- lfx/base/models/novita_constants.py +35 -0
- lfx/base/models/ollama_constants.py +49 -0
- lfx/base/models/openai_constants.py +122 -0
- lfx/base/models/sambanova_constants.py +18 -0
- lfx/base/processing/__init__.py +0 -0
- lfx/base/prompts/__init__.py +0 -0
- lfx/base/prompts/api_utils.py +224 -0
- lfx/base/prompts/utils.py +61 -0
- lfx/base/textsplitters/__init__.py +0 -0
- lfx/base/textsplitters/model.py +28 -0
- lfx/base/tools/__init__.py +0 -0
- lfx/base/tools/base.py +26 -0
- lfx/base/tools/component_tool.py +325 -0
- lfx/base/tools/constants.py +49 -0
- lfx/base/tools/flow_tool.py +132 -0
- lfx/base/tools/run_flow.py +224 -0
- lfx/base/vectorstores/__init__.py +0 -0
- lfx/base/vectorstores/model.py +193 -0
- lfx/base/vectorstores/utils.py +22 -0
- lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
- lfx/cli/__init__.py +5 -0
- lfx/cli/commands.py +319 -0
- lfx/cli/common.py +650 -0
- lfx/cli/run.py +441 -0
- lfx/cli/script_loader.py +247 -0
- lfx/cli/serve_app.py +546 -0
- lfx/cli/validation.py +69 -0
- lfx/components/FAISS/__init__.py +34 -0
- lfx/components/FAISS/faiss.py +111 -0
- lfx/components/Notion/__init__.py +19 -0
- lfx/components/Notion/add_content_to_page.py +269 -0
- lfx/components/Notion/create_page.py +94 -0
- lfx/components/Notion/list_database_properties.py +68 -0
- lfx/components/Notion/list_pages.py +122 -0
- lfx/components/Notion/list_users.py +77 -0
- lfx/components/Notion/page_content_viewer.py +93 -0
- lfx/components/Notion/search.py +111 -0
- lfx/components/Notion/update_page_property.py +114 -0
- lfx/components/__init__.py +411 -0
- lfx/components/_importing.py +42 -0
- lfx/components/agentql/__init__.py +3 -0
- lfx/components/agentql/agentql_api.py +151 -0
- lfx/components/agents/__init__.py +34 -0
- lfx/components/agents/agent.py +558 -0
- lfx/components/agents/mcp_component.py +501 -0
- lfx/components/aiml/__init__.py +37 -0
- lfx/components/aiml/aiml.py +112 -0
- lfx/components/aiml/aiml_embeddings.py +37 -0
- lfx/components/amazon/__init__.py +36 -0
- lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
- lfx/components/amazon/amazon_bedrock_model.py +124 -0
- lfx/components/amazon/s3_bucket_uploader.py +211 -0
- lfx/components/anthropic/__init__.py +34 -0
- lfx/components/anthropic/anthropic.py +187 -0
- lfx/components/apify/__init__.py +5 -0
- lfx/components/apify/apify_actor.py +325 -0
- lfx/components/arxiv/__init__.py +3 -0
- lfx/components/arxiv/arxiv.py +163 -0
- lfx/components/assemblyai/__init__.py +46 -0
- lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
- lfx/components/assemblyai/assemblyai_lemur.py +183 -0
- lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
- lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
- lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
- lfx/components/azure/__init__.py +37 -0
- lfx/components/azure/azure_openai.py +95 -0
- lfx/components/azure/azure_openai_embeddings.py +83 -0
- lfx/components/baidu/__init__.py +32 -0
- lfx/components/baidu/baidu_qianfan_chat.py +113 -0
- lfx/components/bing/__init__.py +3 -0
- lfx/components/bing/bing_search_api.py +61 -0
- lfx/components/cassandra/__init__.py +40 -0
- lfx/components/cassandra/cassandra.py +264 -0
- lfx/components/cassandra/cassandra_chat.py +92 -0
- lfx/components/cassandra/cassandra_graph.py +238 -0
- lfx/components/chains/__init__.py +3 -0
- lfx/components/chroma/__init__.py +34 -0
- lfx/components/chroma/chroma.py +167 -0
- lfx/components/cleanlab/__init__.py +40 -0
- lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
- lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
- lfx/components/cleanlab/cleanlab_remediator.py +131 -0
- lfx/components/clickhouse/__init__.py +34 -0
- lfx/components/clickhouse/clickhouse.py +135 -0
- lfx/components/cloudflare/__init__.py +32 -0
- lfx/components/cloudflare/cloudflare.py +81 -0
- lfx/components/cohere/__init__.py +40 -0
- lfx/components/cohere/cohere_embeddings.py +81 -0
- lfx/components/cohere/cohere_models.py +46 -0
- lfx/components/cohere/cohere_rerank.py +51 -0
- lfx/components/composio/__init__.py +74 -0
- lfx/components/composio/composio_api.py +268 -0
- lfx/components/composio/dropbox_compnent.py +11 -0
- lfx/components/composio/github_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +38 -0
- lfx/components/composio/googlecalendar_composio.py +11 -0
- lfx/components/composio/googlemeet_composio.py +11 -0
- lfx/components/composio/googletasks_composio.py +8 -0
- lfx/components/composio/linear_composio.py +11 -0
- lfx/components/composio/outlook_composio.py +11 -0
- lfx/components/composio/reddit_composio.py +11 -0
- lfx/components/composio/slack_composio.py +582 -0
- lfx/components/composio/slackbot_composio.py +11 -0
- lfx/components/composio/supabase_composio.py +11 -0
- lfx/components/composio/todoist_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +11 -0
- lfx/components/confluence/__init__.py +3 -0
- lfx/components/confluence/confluence.py +84 -0
- lfx/components/couchbase/__init__.py +34 -0
- lfx/components/couchbase/couchbase.py +102 -0
- lfx/components/crewai/__init__.py +49 -0
- lfx/components/crewai/crewai.py +107 -0
- lfx/components/crewai/hierarchical_crew.py +46 -0
- lfx/components/crewai/hierarchical_task.py +44 -0
- lfx/components/crewai/sequential_crew.py +52 -0
- lfx/components/crewai/sequential_task.py +73 -0
- lfx/components/crewai/sequential_task_agent.py +143 -0
- lfx/components/custom_component/__init__.py +34 -0
- lfx/components/custom_component/custom_component.py +31 -0
- lfx/components/data/__init__.py +64 -0
- lfx/components/data/api_request.py +544 -0
- lfx/components/data/csv_to_data.py +95 -0
- lfx/components/data/directory.py +113 -0
- lfx/components/data/file.py +577 -0
- lfx/components/data/json_to_data.py +98 -0
- lfx/components/data/news_search.py +164 -0
- lfx/components/data/rss.py +69 -0
- lfx/components/data/sql_executor.py +101 -0
- lfx/components/data/url.py +311 -0
- lfx/components/data/web_search.py +112 -0
- lfx/components/data/webhook.py +56 -0
- lfx/components/datastax/__init__.py +70 -0
- lfx/components/datastax/astra_assistant_manager.py +306 -0
- lfx/components/datastax/astra_db.py +75 -0
- lfx/components/datastax/astra_vectorize.py +124 -0
- lfx/components/datastax/astradb.py +1285 -0
- lfx/components/datastax/astradb_cql.py +314 -0
- lfx/components/datastax/astradb_graph.py +330 -0
- lfx/components/datastax/astradb_tool.py +414 -0
- lfx/components/datastax/astradb_vectorstore.py +1285 -0
- lfx/components/datastax/cassandra.py +92 -0
- lfx/components/datastax/create_assistant.py +58 -0
- lfx/components/datastax/create_thread.py +32 -0
- lfx/components/datastax/dotenv.py +35 -0
- lfx/components/datastax/get_assistant.py +37 -0
- lfx/components/datastax/getenvvar.py +30 -0
- lfx/components/datastax/graph_rag.py +141 -0
- lfx/components/datastax/hcd.py +314 -0
- lfx/components/datastax/list_assistants.py +25 -0
- lfx/components/datastax/run.py +89 -0
- lfx/components/deactivated/__init__.py +15 -0
- lfx/components/deactivated/amazon_kendra.py +66 -0
- lfx/components/deactivated/chat_litellm_model.py +158 -0
- lfx/components/deactivated/code_block_extractor.py +26 -0
- lfx/components/deactivated/documents_to_data.py +22 -0
- lfx/components/deactivated/embed.py +16 -0
- lfx/components/deactivated/extract_key_from_data.py +46 -0
- lfx/components/deactivated/json_document_builder.py +57 -0
- lfx/components/deactivated/list_flows.py +20 -0
- lfx/components/deactivated/mcp_sse.py +61 -0
- lfx/components/deactivated/mcp_stdio.py +62 -0
- lfx/components/deactivated/merge_data.py +93 -0
- lfx/components/deactivated/message.py +37 -0
- lfx/components/deactivated/metal.py +54 -0
- lfx/components/deactivated/multi_query.py +59 -0
- lfx/components/deactivated/retriever.py +43 -0
- lfx/components/deactivated/selective_passthrough.py +77 -0
- lfx/components/deactivated/should_run_next.py +40 -0
- lfx/components/deactivated/split_text.py +63 -0
- lfx/components/deactivated/store_message.py +24 -0
- lfx/components/deactivated/sub_flow.py +124 -0
- lfx/components/deactivated/vectara_self_query.py +76 -0
- lfx/components/deactivated/vector_store.py +24 -0
- lfx/components/deepseek/__init__.py +34 -0
- lfx/components/deepseek/deepseek.py +136 -0
- lfx/components/docling/__init__.py +43 -0
- lfx/components/docling/chunk_docling_document.py +186 -0
- lfx/components/docling/docling_inline.py +231 -0
- lfx/components/docling/docling_remote.py +193 -0
- lfx/components/docling/export_docling_document.py +117 -0
- lfx/components/documentloaders/__init__.py +3 -0
- lfx/components/duckduckgo/__init__.py +3 -0
- lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
- lfx/components/elastic/__init__.py +37 -0
- lfx/components/elastic/elasticsearch.py +267 -0
- lfx/components/elastic/opensearch.py +243 -0
- lfx/components/embeddings/__init__.py +37 -0
- lfx/components/embeddings/similarity.py +76 -0
- lfx/components/embeddings/text_embedder.py +64 -0
- lfx/components/exa/__init__.py +3 -0
- lfx/components/exa/exa_search.py +68 -0
- lfx/components/firecrawl/__init__.py +43 -0
- lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
- lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
- lfx/components/firecrawl/firecrawl_map_api.py +89 -0
- lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
- lfx/components/git/__init__.py +4 -0
- lfx/components/git/git.py +262 -0
- lfx/components/git/gitextractor.py +196 -0
- lfx/components/glean/__init__.py +3 -0
- lfx/components/glean/glean_search_api.py +173 -0
- lfx/components/google/__init__.py +17 -0
- lfx/components/google/gmail.py +192 -0
- lfx/components/google/google_bq_sql_executor.py +157 -0
- lfx/components/google/google_drive.py +92 -0
- lfx/components/google/google_drive_search.py +152 -0
- lfx/components/google/google_generative_ai.py +147 -0
- lfx/components/google/google_generative_ai_embeddings.py +141 -0
- lfx/components/google/google_oauth_token.py +89 -0
- lfx/components/google/google_search_api_core.py +68 -0
- lfx/components/google/google_serper_api_core.py +74 -0
- lfx/components/groq/__init__.py +34 -0
- lfx/components/groq/groq.py +136 -0
- lfx/components/helpers/__init__.py +52 -0
- lfx/components/helpers/calculator_core.py +89 -0
- lfx/components/helpers/create_list.py +40 -0
- lfx/components/helpers/current_date.py +42 -0
- lfx/components/helpers/id_generator.py +42 -0
- lfx/components/helpers/memory.py +251 -0
- lfx/components/helpers/output_parser.py +45 -0
- lfx/components/helpers/store_message.py +90 -0
- lfx/components/homeassistant/__init__.py +7 -0
- lfx/components/homeassistant/home_assistant_control.py +152 -0
- lfx/components/homeassistant/list_home_assistant_states.py +137 -0
- lfx/components/huggingface/__init__.py +37 -0
- lfx/components/huggingface/huggingface.py +197 -0
- lfx/components/huggingface/huggingface_inference_api.py +106 -0
- lfx/components/ibm/__init__.py +34 -0
- lfx/components/ibm/watsonx.py +203 -0
- lfx/components/ibm/watsonx_embeddings.py +135 -0
- lfx/components/icosacomputing/__init__.py +5 -0
- lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
- lfx/components/input_output/__init__.py +38 -0
- lfx/components/input_output/chat.py +120 -0
- lfx/components/input_output/chat_output.py +200 -0
- lfx/components/input_output/text.py +27 -0
- lfx/components/input_output/text_output.py +29 -0
- lfx/components/jigsawstack/__init__.py +23 -0
- lfx/components/jigsawstack/ai_scrape.py +126 -0
- lfx/components/jigsawstack/ai_web_search.py +136 -0
- lfx/components/jigsawstack/file_read.py +115 -0
- lfx/components/jigsawstack/file_upload.py +94 -0
- lfx/components/jigsawstack/image_generation.py +205 -0
- lfx/components/jigsawstack/nsfw.py +60 -0
- lfx/components/jigsawstack/object_detection.py +124 -0
- lfx/components/jigsawstack/sentiment.py +112 -0
- lfx/components/jigsawstack/text_to_sql.py +90 -0
- lfx/components/jigsawstack/text_translate.py +77 -0
- lfx/components/jigsawstack/vocr.py +107 -0
- lfx/components/langchain_utilities/__init__.py +109 -0
- lfx/components/langchain_utilities/character.py +53 -0
- lfx/components/langchain_utilities/conversation.py +59 -0
- lfx/components/langchain_utilities/csv_agent.py +107 -0
- lfx/components/langchain_utilities/fake_embeddings.py +26 -0
- lfx/components/langchain_utilities/html_link_extractor.py +35 -0
- lfx/components/langchain_utilities/json_agent.py +45 -0
- lfx/components/langchain_utilities/langchain_hub.py +126 -0
- lfx/components/langchain_utilities/language_recursive.py +49 -0
- lfx/components/langchain_utilities/language_semantic.py +138 -0
- lfx/components/langchain_utilities/llm_checker.py +39 -0
- lfx/components/langchain_utilities/llm_math.py +42 -0
- lfx/components/langchain_utilities/natural_language.py +61 -0
- lfx/components/langchain_utilities/openai_tools.py +53 -0
- lfx/components/langchain_utilities/openapi.py +48 -0
- lfx/components/langchain_utilities/recursive_character.py +60 -0
- lfx/components/langchain_utilities/retrieval_qa.py +83 -0
- lfx/components/langchain_utilities/runnable_executor.py +137 -0
- lfx/components/langchain_utilities/self_query.py +80 -0
- lfx/components/langchain_utilities/spider.py +142 -0
- lfx/components/langchain_utilities/sql.py +40 -0
- lfx/components/langchain_utilities/sql_database.py +35 -0
- lfx/components/langchain_utilities/sql_generator.py +78 -0
- lfx/components/langchain_utilities/tool_calling.py +59 -0
- lfx/components/langchain_utilities/vector_store_info.py +49 -0
- lfx/components/langchain_utilities/vector_store_router.py +33 -0
- lfx/components/langchain_utilities/xml_agent.py +71 -0
- lfx/components/langwatch/__init__.py +3 -0
- lfx/components/langwatch/langwatch.py +278 -0
- lfx/components/link_extractors/__init__.py +3 -0
- lfx/components/lmstudio/__init__.py +34 -0
- lfx/components/lmstudio/lmstudioembeddings.py +89 -0
- lfx/components/lmstudio/lmstudiomodel.py +129 -0
- lfx/components/logic/__init__.py +52 -0
- lfx/components/logic/conditional_router.py +171 -0
- lfx/components/logic/data_conditional_router.py +125 -0
- lfx/components/logic/flow_tool.py +110 -0
- lfx/components/logic/listen.py +29 -0
- lfx/components/logic/loop.py +125 -0
- lfx/components/logic/notify.py +88 -0
- lfx/components/logic/pass_message.py +35 -0
- lfx/components/logic/run_flow.py +71 -0
- lfx/components/logic/sub_flow.py +114 -0
- lfx/components/maritalk/__init__.py +32 -0
- lfx/components/maritalk/maritalk.py +52 -0
- lfx/components/mem0/__init__.py +3 -0
- lfx/components/mem0/mem0_chat_memory.py +136 -0
- lfx/components/milvus/__init__.py +34 -0
- lfx/components/milvus/milvus.py +115 -0
- lfx/components/mistral/__init__.py +37 -0
- lfx/components/mistral/mistral.py +114 -0
- lfx/components/mistral/mistral_embeddings.py +58 -0
- lfx/components/models/__init__.py +34 -0
- lfx/components/models/embedding_model.py +114 -0
- lfx/components/models/language_model.py +144 -0
- lfx/components/mongodb/__init__.py +34 -0
- lfx/components/mongodb/mongodb_atlas.py +213 -0
- lfx/components/needle/__init__.py +3 -0
- lfx/components/needle/needle.py +104 -0
- lfx/components/notdiamond/__init__.py +34 -0
- lfx/components/notdiamond/notdiamond.py +228 -0
- lfx/components/novita/__init__.py +32 -0
- lfx/components/novita/novita.py +130 -0
- lfx/components/nvidia/__init__.py +57 -0
- lfx/components/nvidia/nvidia.py +157 -0
- lfx/components/nvidia/nvidia_embedding.py +77 -0
- lfx/components/nvidia/nvidia_ingest.py +317 -0
- lfx/components/nvidia/nvidia_rerank.py +63 -0
- lfx/components/nvidia/system_assist.py +65 -0
- lfx/components/olivya/__init__.py +3 -0
- lfx/components/olivya/olivya.py +116 -0
- lfx/components/ollama/__init__.py +37 -0
- lfx/components/ollama/ollama.py +330 -0
- lfx/components/ollama/ollama_embeddings.py +106 -0
- lfx/components/openai/__init__.py +37 -0
- lfx/components/openai/openai.py +100 -0
- lfx/components/openai/openai_chat_model.py +176 -0
- lfx/components/openrouter/__init__.py +32 -0
- lfx/components/openrouter/openrouter.py +202 -0
- lfx/components/output_parsers/__init__.py +3 -0
- lfx/components/perplexity/__init__.py +34 -0
- lfx/components/perplexity/perplexity.py +75 -0
- lfx/components/pgvector/__init__.py +34 -0
- lfx/components/pgvector/pgvector.py +72 -0
- lfx/components/pinecone/__init__.py +34 -0
- lfx/components/pinecone/pinecone.py +134 -0
- lfx/components/processing/__init__.py +117 -0
- lfx/components/processing/alter_metadata.py +108 -0
- lfx/components/processing/batch_run.py +205 -0
- lfx/components/processing/combine_text.py +39 -0
- lfx/components/processing/converter.py +159 -0
- lfx/components/processing/create_data.py +110 -0
- lfx/components/processing/data_operations.py +438 -0
- lfx/components/processing/data_to_dataframe.py +70 -0
- lfx/components/processing/dataframe_operations.py +313 -0
- lfx/components/processing/extract_key.py +53 -0
- lfx/components/processing/filter_data.py +42 -0
- lfx/components/processing/filter_data_values.py +88 -0
- lfx/components/processing/json_cleaner.py +103 -0
- lfx/components/processing/lambda_filter.py +154 -0
- lfx/components/processing/llm_router.py +499 -0
- lfx/components/processing/merge_data.py +90 -0
- lfx/components/processing/message_to_data.py +36 -0
- lfx/components/processing/parse_data.py +70 -0
- lfx/components/processing/parse_dataframe.py +68 -0
- lfx/components/processing/parse_json_data.py +90 -0
- lfx/components/processing/parser.py +143 -0
- lfx/components/processing/prompt.py +67 -0
- lfx/components/processing/python_repl_core.py +98 -0
- lfx/components/processing/regex.py +82 -0
- lfx/components/processing/save_file.py +225 -0
- lfx/components/processing/select_data.py +48 -0
- lfx/components/processing/split_text.py +141 -0
- lfx/components/processing/structured_output.py +202 -0
- lfx/components/processing/update_data.py +160 -0
- lfx/components/prototypes/__init__.py +34 -0
- lfx/components/prototypes/python_function.py +73 -0
- lfx/components/qdrant/__init__.py +34 -0
- lfx/components/qdrant/qdrant.py +109 -0
- lfx/components/redis/__init__.py +37 -0
- lfx/components/redis/redis.py +89 -0
- lfx/components/redis/redis_chat.py +43 -0
- lfx/components/sambanova/__init__.py +32 -0
- lfx/components/sambanova/sambanova.py +84 -0
- lfx/components/scrapegraph/__init__.py +40 -0
- lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
- lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
- lfx/components/searchapi/__init__.py +34 -0
- lfx/components/searchapi/search.py +79 -0
- lfx/components/serpapi/__init__.py +3 -0
- lfx/components/serpapi/serp.py +115 -0
- lfx/components/supabase/__init__.py +34 -0
- lfx/components/supabase/supabase.py +76 -0
- lfx/components/tavily/__init__.py +4 -0
- lfx/components/tavily/tavily_extract.py +117 -0
- lfx/components/tavily/tavily_search.py +212 -0
- lfx/components/textsplitters/__init__.py +3 -0
- lfx/components/toolkits/__init__.py +3 -0
- lfx/components/tools/__init__.py +72 -0
- lfx/components/tools/calculator.py +108 -0
- lfx/components/tools/google_search_api.py +45 -0
- lfx/components/tools/google_serper_api.py +115 -0
- lfx/components/tools/python_code_structured_tool.py +327 -0
- lfx/components/tools/python_repl.py +97 -0
- lfx/components/tools/search_api.py +87 -0
- lfx/components/tools/searxng.py +145 -0
- lfx/components/tools/serp_api.py +119 -0
- lfx/components/tools/tavily_search_tool.py +344 -0
- lfx/components/tools/wikidata_api.py +102 -0
- lfx/components/tools/wikipedia_api.py +49 -0
- lfx/components/tools/yahoo_finance.py +129 -0
- lfx/components/twelvelabs/__init__.py +52 -0
- lfx/components/twelvelabs/convert_astra_results.py +84 -0
- lfx/components/twelvelabs/pegasus_index.py +311 -0
- lfx/components/twelvelabs/split_video.py +291 -0
- lfx/components/twelvelabs/text_embeddings.py +57 -0
- lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
- lfx/components/twelvelabs/video_embeddings.py +100 -0
- lfx/components/twelvelabs/video_file.py +179 -0
- lfx/components/unstructured/__init__.py +3 -0
- lfx/components/unstructured/unstructured.py +121 -0
- lfx/components/upstash/__init__.py +34 -0
- lfx/components/upstash/upstash.py +124 -0
- lfx/components/vectara/__init__.py +37 -0
- lfx/components/vectara/vectara.py +97 -0
- lfx/components/vectara/vectara_rag.py +164 -0
- lfx/components/vectorstores/__init__.py +40 -0
- lfx/components/vectorstores/astradb.py +1285 -0
- lfx/components/vectorstores/astradb_graph.py +319 -0
- lfx/components/vectorstores/cassandra.py +264 -0
- lfx/components/vectorstores/cassandra_graph.py +238 -0
- lfx/components/vectorstores/chroma.py +167 -0
- lfx/components/vectorstores/clickhouse.py +135 -0
- lfx/components/vectorstores/couchbase.py +102 -0
- lfx/components/vectorstores/elasticsearch.py +267 -0
- lfx/components/vectorstores/faiss.py +111 -0
- lfx/components/vectorstores/graph_rag.py +141 -0
- lfx/components/vectorstores/hcd.py +314 -0
- lfx/components/vectorstores/local_db.py +261 -0
- lfx/components/vectorstores/milvus.py +115 -0
- lfx/components/vectorstores/mongodb_atlas.py +213 -0
- lfx/components/vectorstores/opensearch.py +243 -0
- lfx/components/vectorstores/pgvector.py +72 -0
- lfx/components/vectorstores/pinecone.py +134 -0
- lfx/components/vectorstores/qdrant.py +109 -0
- lfx/components/vectorstores/supabase.py +76 -0
- lfx/components/vectorstores/upstash.py +124 -0
- lfx/components/vectorstores/vectara.py +97 -0
- lfx/components/vectorstores/vectara_rag.py +164 -0
- lfx/components/vectorstores/weaviate.py +89 -0
- lfx/components/vertexai/__init__.py +37 -0
- lfx/components/vertexai/vertexai.py +71 -0
- lfx/components/vertexai/vertexai_embeddings.py +67 -0
- lfx/components/weaviate/__init__.py +34 -0
- lfx/components/weaviate/weaviate.py +89 -0
- lfx/components/wikipedia/__init__.py +4 -0
- lfx/components/wikipedia/wikidata.py +86 -0
- lfx/components/wikipedia/wikipedia.py +53 -0
- lfx/components/wolframalpha/__init__.py +3 -0
- lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
- lfx/components/xai/__init__.py +32 -0
- lfx/components/xai/xai.py +167 -0
- lfx/components/yahoosearch/__init__.py +3 -0
- lfx/components/yahoosearch/yahoo.py +137 -0
- lfx/components/youtube/__init__.py +52 -0
- lfx/components/youtube/channel.py +227 -0
- lfx/components/youtube/comments.py +231 -0
- lfx/components/youtube/playlist.py +33 -0
- lfx/components/youtube/search.py +120 -0
- lfx/components/youtube/trending.py +285 -0
- lfx/components/youtube/video_details.py +263 -0
- lfx/components/youtube/youtube_transcripts.py +118 -0
- lfx/components/zep/__init__.py +3 -0
- lfx/components/zep/zep.py +44 -0
- lfx/constants.py +6 -0
- lfx/custom/__init__.py +7 -0
- lfx/custom/attributes.py +86 -0
- lfx/custom/code_parser/__init__.py +3 -0
- lfx/custom/code_parser/code_parser.py +361 -0
- lfx/custom/custom_component/__init__.py +0 -0
- lfx/custom/custom_component/base_component.py +128 -0
- lfx/custom/custom_component/component.py +1808 -0
- lfx/custom/custom_component/component_with_cache.py +8 -0
- lfx/custom/custom_component/custom_component.py +588 -0
- lfx/custom/dependency_analyzer.py +165 -0
- lfx/custom/directory_reader/__init__.py +3 -0
- lfx/custom/directory_reader/directory_reader.py +359 -0
- lfx/custom/directory_reader/utils.py +171 -0
- lfx/custom/eval.py +12 -0
- lfx/custom/schema.py +32 -0
- lfx/custom/tree_visitor.py +21 -0
- lfx/custom/utils.py +877 -0
- lfx/custom/validate.py +488 -0
- lfx/events/__init__.py +1 -0
- lfx/events/event_manager.py +110 -0
- lfx/exceptions/__init__.py +0 -0
- lfx/exceptions/component.py +15 -0
- lfx/field_typing/__init__.py +91 -0
- lfx/field_typing/constants.py +215 -0
- lfx/field_typing/range_spec.py +35 -0
- lfx/graph/__init__.py +6 -0
- lfx/graph/edge/__init__.py +0 -0
- lfx/graph/edge/base.py +277 -0
- lfx/graph/edge/schema.py +119 -0
- lfx/graph/edge/utils.py +0 -0
- lfx/graph/graph/__init__.py +0 -0
- lfx/graph/graph/ascii.py +202 -0
- lfx/graph/graph/base.py +2238 -0
- lfx/graph/graph/constants.py +63 -0
- lfx/graph/graph/runnable_vertices_manager.py +133 -0
- lfx/graph/graph/schema.py +52 -0
- lfx/graph/graph/state_model.py +66 -0
- lfx/graph/graph/utils.py +1024 -0
- lfx/graph/schema.py +75 -0
- lfx/graph/state/__init__.py +0 -0
- lfx/graph/state/model.py +237 -0
- lfx/graph/utils.py +200 -0
- lfx/graph/vertex/__init__.py +0 -0
- lfx/graph/vertex/base.py +823 -0
- lfx/graph/vertex/constants.py +0 -0
- lfx/graph/vertex/exceptions.py +4 -0
- lfx/graph/vertex/param_handler.py +264 -0
- lfx/graph/vertex/schema.py +26 -0
- lfx/graph/vertex/utils.py +19 -0
- lfx/graph/vertex/vertex_types.py +489 -0
- lfx/helpers/__init__.py +1 -0
- lfx/helpers/base_model.py +71 -0
- lfx/helpers/custom.py +13 -0
- lfx/helpers/data.py +167 -0
- lfx/helpers/flow.py +194 -0
- lfx/inputs/__init__.py +68 -0
- lfx/inputs/constants.py +2 -0
- lfx/inputs/input_mixin.py +328 -0
- lfx/inputs/inputs.py +714 -0
- lfx/inputs/validators.py +19 -0
- lfx/interface/__init__.py +6 -0
- lfx/interface/components.py +489 -0
- lfx/interface/importing/__init__.py +5 -0
- lfx/interface/importing/utils.py +39 -0
- lfx/interface/initialize/__init__.py +3 -0
- lfx/interface/initialize/loading.py +224 -0
- lfx/interface/listing.py +26 -0
- lfx/interface/run.py +16 -0
- lfx/interface/utils.py +111 -0
- lfx/io/__init__.py +63 -0
- lfx/io/schema.py +289 -0
- lfx/load/__init__.py +8 -0
- lfx/load/load.py +256 -0
- lfx/load/utils.py +99 -0
- lfx/log/__init__.py +5 -0
- lfx/log/logger.py +385 -0
- lfx/memory/__init__.py +90 -0
- lfx/memory/stubs.py +283 -0
- lfx/processing/__init__.py +1 -0
- lfx/processing/process.py +238 -0
- lfx/processing/utils.py +25 -0
- lfx/py.typed +0 -0
- lfx/schema/__init__.py +66 -0
- lfx/schema/artifact.py +83 -0
- lfx/schema/content_block.py +62 -0
- lfx/schema/content_types.py +91 -0
- lfx/schema/data.py +308 -0
- lfx/schema/dataframe.py +210 -0
- lfx/schema/dotdict.py +74 -0
- lfx/schema/encoders.py +13 -0
- lfx/schema/graph.py +47 -0
- lfx/schema/image.py +131 -0
- lfx/schema/json_schema.py +141 -0
- lfx/schema/log.py +61 -0
- lfx/schema/message.py +473 -0
- lfx/schema/openai_responses_schemas.py +74 -0
- lfx/schema/properties.py +41 -0
- lfx/schema/schema.py +171 -0
- lfx/schema/serialize.py +13 -0
- lfx/schema/table.py +140 -0
- lfx/schema/validators.py +114 -0
- lfx/serialization/__init__.py +5 -0
- lfx/serialization/constants.py +2 -0
- lfx/serialization/serialization.py +314 -0
- lfx/services/__init__.py +23 -0
- lfx/services/base.py +28 -0
- lfx/services/cache/__init__.py +6 -0
- lfx/services/cache/base.py +183 -0
- lfx/services/cache/service.py +166 -0
- lfx/services/cache/utils.py +169 -0
- lfx/services/chat/__init__.py +1 -0
- lfx/services/chat/config.py +2 -0
- lfx/services/chat/schema.py +10 -0
- lfx/services/deps.py +129 -0
- lfx/services/factory.py +19 -0
- lfx/services/initialize.py +19 -0
- lfx/services/interfaces.py +103 -0
- lfx/services/manager.py +172 -0
- lfx/services/schema.py +20 -0
- lfx/services/session.py +82 -0
- lfx/services/settings/__init__.py +3 -0
- lfx/services/settings/auth.py +130 -0
- lfx/services/settings/base.py +539 -0
- lfx/services/settings/constants.py +31 -0
- lfx/services/settings/factory.py +23 -0
- lfx/services/settings/feature_flags.py +12 -0
- lfx/services/settings/service.py +35 -0
- lfx/services/settings/utils.py +40 -0
- lfx/services/shared_component_cache/__init__.py +1 -0
- lfx/services/shared_component_cache/factory.py +30 -0
- lfx/services/shared_component_cache/service.py +9 -0
- lfx/services/storage/__init__.py +5 -0
- lfx/services/storage/local.py +155 -0
- lfx/services/storage/service.py +54 -0
- lfx/services/tracing/__init__.py +1 -0
- lfx/services/tracing/service.py +21 -0
- lfx/settings.py +6 -0
- lfx/template/__init__.py +6 -0
- lfx/template/field/__init__.py +0 -0
- lfx/template/field/base.py +257 -0
- lfx/template/field/prompt.py +15 -0
- lfx/template/frontend_node/__init__.py +6 -0
- lfx/template/frontend_node/base.py +212 -0
- lfx/template/frontend_node/constants.py +65 -0
- lfx/template/frontend_node/custom_components.py +79 -0
- lfx/template/template/__init__.py +0 -0
- lfx/template/template/base.py +100 -0
- lfx/template/utils.py +217 -0
- lfx/type_extraction/__init__.py +19 -0
- lfx/type_extraction/type_extraction.py +75 -0
- lfx/type_extraction.py +80 -0
- lfx/utils/__init__.py +1 -0
- lfx/utils/async_helpers.py +42 -0
- lfx/utils/component_utils.py +154 -0
- lfx/utils/concurrency.py +60 -0
- lfx/utils/connection_string_parser.py +11 -0
- lfx/utils/constants.py +205 -0
- lfx/utils/data_structure.py +212 -0
- lfx/utils/exceptions.py +22 -0
- lfx/utils/helpers.py +28 -0
- lfx/utils/image.py +73 -0
- lfx/utils/lazy_load.py +15 -0
- lfx/utils/request_utils.py +18 -0
- lfx/utils/schemas.py +139 -0
- lfx/utils/util.py +481 -0
- lfx/utils/util_strings.py +56 -0
- lfx/utils/version.py +24 -0
- lfx_nightly-0.1.11.dev0.dist-info/METADATA +293 -0
- lfx_nightly-0.1.11.dev0.dist-info/RECORD +699 -0
- lfx_nightly-0.1.11.dev0.dist-info/WHEEL +4 -0
- lfx_nightly-0.1.11.dev0.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,1808 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import ast
|
4
|
+
import asyncio
|
5
|
+
import inspect
|
6
|
+
from collections.abc import AsyncIterator, Iterator
|
7
|
+
from copy import deepcopy
|
8
|
+
from textwrap import dedent
|
9
|
+
from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, get_type_hints
|
10
|
+
from uuid import UUID
|
11
|
+
|
12
|
+
import nanoid
|
13
|
+
import pandas as pd
|
14
|
+
import yaml
|
15
|
+
from langchain_core.tools import StructuredTool
|
16
|
+
from pydantic import BaseModel, ValidationError
|
17
|
+
|
18
|
+
from lfx.base.tools.constants import (
|
19
|
+
TOOL_OUTPUT_DISPLAY_NAME,
|
20
|
+
TOOL_OUTPUT_NAME,
|
21
|
+
TOOLS_METADATA_INFO,
|
22
|
+
TOOLS_METADATA_INPUT_NAME,
|
23
|
+
)
|
24
|
+
from lfx.custom.tree_visitor import RequiredInputsVisitor
|
25
|
+
from lfx.exceptions.component import StreamingError
|
26
|
+
from lfx.field_typing import Tool # noqa: TC001
|
27
|
+
|
28
|
+
# Lazy import to avoid circular dependency
|
29
|
+
# from lfx.graph.state.model import create_state_model
|
30
|
+
# Lazy import to avoid circular dependency
|
31
|
+
# from lfx.graph.utils import has_chat_output
|
32
|
+
from lfx.helpers.custom import format_type
|
33
|
+
from lfx.memory import astore_message, aupdate_messages, delete_message
|
34
|
+
from lfx.schema.artifact import get_artifact_type, post_process_raw
|
35
|
+
from lfx.schema.data import Data
|
36
|
+
from lfx.schema.log import Log
|
37
|
+
from lfx.schema.message import ErrorMessage, Message
|
38
|
+
from lfx.schema.properties import Source
|
39
|
+
from lfx.template.field.base import UNDEFINED, Input, Output
|
40
|
+
from lfx.template.frontend_node.custom_components import ComponentFrontendNode
|
41
|
+
from lfx.utils.async_helpers import run_until_complete
|
42
|
+
from lfx.utils.util import find_closest_match
|
43
|
+
|
44
|
+
from .custom_component import CustomComponent
|
45
|
+
|
46
|
+
if TYPE_CHECKING:
|
47
|
+
from collections.abc import Callable
|
48
|
+
|
49
|
+
from lfx.base.tools.component_tool import ComponentToolkit
|
50
|
+
from lfx.events.event_manager import EventManager
|
51
|
+
from lfx.graph.edge.schema import EdgeData
|
52
|
+
from lfx.graph.vertex.base import Vertex
|
53
|
+
from lfx.inputs.inputs import InputTypes
|
54
|
+
from lfx.schema.dataframe import DataFrame
|
55
|
+
from lfx.schema.log import LoggableType
|
56
|
+
|
57
|
+
|
58
|
+
_ComponentToolkit = None
|
59
|
+
|
60
|
+
|
61
|
+
def get_component_toolkit():
|
62
|
+
global _ComponentToolkit # noqa: PLW0603
|
63
|
+
if _ComponentToolkit is None:
|
64
|
+
from lfx.base.tools.component_tool import ComponentToolkit
|
65
|
+
|
66
|
+
_ComponentToolkit = ComponentToolkit
|
67
|
+
return _ComponentToolkit
|
68
|
+
|
69
|
+
|
70
|
+
BACKWARDS_COMPATIBLE_ATTRIBUTES = ["user_id", "vertex", "tracing_service"]
|
71
|
+
CONFIG_ATTRIBUTES = ["_display_name", "_description", "_icon", "_name", "_metadata"]
|
72
|
+
|
73
|
+
|
74
|
+
class PlaceholderGraph(NamedTuple):
|
75
|
+
"""A placeholder graph structure for components, providing backwards compatibility.
|
76
|
+
|
77
|
+
and enabling component execution without a full graph object.
|
78
|
+
|
79
|
+
This lightweight structure contains essential information typically found in a complete graph,
|
80
|
+
allowing components to function in isolation or in simplified contexts.
|
81
|
+
|
82
|
+
Attributes:
|
83
|
+
flow_id (str | None): Unique identifier for the flow, if applicable.
|
84
|
+
user_id (str | None): Identifier of the user associated with the flow, if any.
|
85
|
+
session_id (str | None): Identifier for the current session, if applicable.
|
86
|
+
context (dict): Additional contextual information for the component's execution.
|
87
|
+
flow_name (str | None): Name of the flow, if available.
|
88
|
+
"""
|
89
|
+
|
90
|
+
flow_id: str | None
|
91
|
+
user_id: str | None
|
92
|
+
session_id: str | None
|
93
|
+
context: dict
|
94
|
+
flow_name: str | None
|
95
|
+
|
96
|
+
|
97
|
+
class Component(CustomComponent):
|
98
|
+
inputs: list[InputTypes] = []
|
99
|
+
outputs: list[Output] = []
|
100
|
+
selected_output: str | None = None
|
101
|
+
code_class_base_inheritance: ClassVar[str] = "Component"
|
102
|
+
|
103
|
+
def __init__(self, **kwargs) -> None:
|
104
|
+
# Initialize instance-specific attributes first
|
105
|
+
if overlap := self._there_is_overlap_in_inputs_and_outputs():
|
106
|
+
msg = f"Inputs and outputs have overlapping names: {overlap}"
|
107
|
+
raise ValueError(msg)
|
108
|
+
self._output_logs: dict[str, list[Log]] = {}
|
109
|
+
self._current_output: str = ""
|
110
|
+
self._metadata: dict = {}
|
111
|
+
self._ctx: dict = {}
|
112
|
+
self._code: str | None = None
|
113
|
+
self._logs: list[Log] = []
|
114
|
+
|
115
|
+
# Initialize component-specific collections
|
116
|
+
self._inputs: dict[str, InputTypes] = {}
|
117
|
+
self._outputs_map: dict[str, Output] = {}
|
118
|
+
self._results: dict[str, Any] = {}
|
119
|
+
self._attributes: dict[str, Any] = {}
|
120
|
+
self._edges: list[EdgeData] = []
|
121
|
+
self._components: list[Component] = []
|
122
|
+
self._event_manager: EventManager | None = None
|
123
|
+
self._state_model = None
|
124
|
+
|
125
|
+
# Process input kwargs
|
126
|
+
inputs = {}
|
127
|
+
config = {}
|
128
|
+
for key, value in kwargs.items():
|
129
|
+
if key.startswith("_"):
|
130
|
+
config[key] = value
|
131
|
+
elif key in CONFIG_ATTRIBUTES:
|
132
|
+
config[key[1:]] = value
|
133
|
+
else:
|
134
|
+
inputs[key] = value
|
135
|
+
|
136
|
+
self._parameters = inputs or {}
|
137
|
+
self.set_attributes(self._parameters)
|
138
|
+
|
139
|
+
# Store original inputs and config for reference
|
140
|
+
self.__inputs = inputs
|
141
|
+
self.__config = config or {}
|
142
|
+
|
143
|
+
# Add unique ID if not provided
|
144
|
+
if "_id" not in self.__config:
|
145
|
+
self.__config |= {"_id": f"{self.__class__.__name__}-{nanoid.generate(size=5)}"}
|
146
|
+
|
147
|
+
# Initialize base class
|
148
|
+
super().__init__(**self.__config)
|
149
|
+
|
150
|
+
# Post-initialization setup
|
151
|
+
if hasattr(self, "_trace_type"):
|
152
|
+
self.trace_type = self._trace_type
|
153
|
+
if not hasattr(self, "trace_type"):
|
154
|
+
self.trace_type = "chain"
|
155
|
+
|
156
|
+
# Setup inputs and outputs
|
157
|
+
self._reset_all_output_values()
|
158
|
+
if self.inputs is not None:
|
159
|
+
self.map_inputs(self.inputs)
|
160
|
+
self.map_outputs()
|
161
|
+
|
162
|
+
# Final setup
|
163
|
+
self._set_output_types(list(self._outputs_map.values()))
|
164
|
+
self.set_class_code()
|
165
|
+
|
166
|
+
@classmethod
|
167
|
+
def get_base_inputs(cls):
|
168
|
+
if not hasattr(cls, "_base_inputs"):
|
169
|
+
return []
|
170
|
+
return cls._base_inputs
|
171
|
+
|
172
|
+
@classmethod
|
173
|
+
def get_base_outputs(cls):
|
174
|
+
if not hasattr(cls, "_base_outputs"):
|
175
|
+
return []
|
176
|
+
return cls._base_outputs
|
177
|
+
|
178
|
+
def get_results(self) -> dict[str, Any]:
|
179
|
+
return self._results
|
180
|
+
|
181
|
+
def get_artifacts(self) -> dict[str, Any]:
|
182
|
+
return self._artifacts
|
183
|
+
|
184
|
+
def get_event_manager(self) -> EventManager | None:
|
185
|
+
return self._event_manager
|
186
|
+
|
187
|
+
def get_undesrcore_inputs(self) -> dict[str, InputTypes]:
|
188
|
+
return self._inputs
|
189
|
+
|
190
|
+
def get_id(self) -> str:
|
191
|
+
return self._id
|
192
|
+
|
193
|
+
def set_id(self, id_: str) -> None:
|
194
|
+
self._id = id_
|
195
|
+
|
196
|
+
def get_edges(self) -> list[EdgeData]:
|
197
|
+
return self._edges
|
198
|
+
|
199
|
+
def get_components(self) -> list[Component]:
|
200
|
+
return self._components
|
201
|
+
|
202
|
+
def get_outputs_map(self) -> dict[str, Output]:
|
203
|
+
return self._outputs_map
|
204
|
+
|
205
|
+
def get_output_logs(self) -> dict[str, Any]:
|
206
|
+
return self._output_logs
|
207
|
+
|
208
|
+
def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:
|
209
|
+
source_dict = {}
|
210
|
+
if id_:
|
211
|
+
source_dict["id"] = id_
|
212
|
+
if display_name:
|
213
|
+
source_dict["display_name"] = display_name
|
214
|
+
if source:
|
215
|
+
# Handle case where source is a ChatOpenAI and other models objects
|
216
|
+
if hasattr(source, "model_name"):
|
217
|
+
source_dict["source"] = source.model_name
|
218
|
+
elif hasattr(source, "model"):
|
219
|
+
source_dict["source"] = str(source.model)
|
220
|
+
else:
|
221
|
+
source_dict["source"] = str(source)
|
222
|
+
return Source(**source_dict)
|
223
|
+
|
224
|
+
def get_incoming_edge_by_target_param(self, target_param: str) -> str | None:
|
225
|
+
"""Get the source vertex ID for an incoming edge that targets a specific parameter.
|
226
|
+
|
227
|
+
This method delegates to the underlying vertex to find an incoming edge that connects
|
228
|
+
to the specified target parameter.
|
229
|
+
|
230
|
+
Args:
|
231
|
+
target_param (str): The name of the target parameter to find an incoming edge for
|
232
|
+
|
233
|
+
Returns:
|
234
|
+
str | None: The ID of the source vertex if an incoming edge is found, None otherwise
|
235
|
+
"""
|
236
|
+
if self._vertex is None:
|
237
|
+
msg = "Vertex not found. Please build the graph first."
|
238
|
+
raise ValueError(msg)
|
239
|
+
return self._vertex.get_incoming_edge_by_target_param(target_param)
|
240
|
+
|
241
|
+
@property
|
242
|
+
def enabled_tools(self) -> list[str] | None:
|
243
|
+
"""Dynamically determine which tools should be enabled.
|
244
|
+
|
245
|
+
This property can be overridden by subclasses to provide custom tool filtering.
|
246
|
+
By default, it returns None, which means all tools are enabled.
|
247
|
+
|
248
|
+
Returns:
|
249
|
+
list[str] | None: List of tool names or tags to enable, or None to enable all tools.
|
250
|
+
"""
|
251
|
+
# Default implementation returns None (all tools enabled)
|
252
|
+
# Subclasses can override this to provide custom filtering
|
253
|
+
return None
|
254
|
+
|
255
|
+
def _there_is_overlap_in_inputs_and_outputs(self) -> set[str]:
|
256
|
+
"""Check the `.name` of inputs and outputs to see if there is overlap.
|
257
|
+
|
258
|
+
Returns:
|
259
|
+
set[str]: Set of names that overlap between inputs and outputs.
|
260
|
+
"""
|
261
|
+
# Create sets of input and output names for O(1) lookup
|
262
|
+
input_names = {input_.name for input_ in self.inputs if input_.name is not None}
|
263
|
+
output_names = {output.name for output in self.outputs}
|
264
|
+
|
265
|
+
# Return the intersection of the sets
|
266
|
+
return input_names & output_names
|
267
|
+
|
268
|
+
def get_base_args(self):
|
269
|
+
"""Get the base arguments required for component initialization.
|
270
|
+
|
271
|
+
Returns:
|
272
|
+
dict: A dictionary containing the base arguments:
|
273
|
+
- _user_id: The ID of the current user
|
274
|
+
- _session_id: The ID of the current session
|
275
|
+
- _tracing_service: The tracing service instance for logging/monitoring
|
276
|
+
"""
|
277
|
+
return {
|
278
|
+
"_user_id": self.user_id,
|
279
|
+
"_session_id": self.graph.session_id,
|
280
|
+
"_tracing_service": self.tracing_service,
|
281
|
+
}
|
282
|
+
|
283
|
+
@property
|
284
|
+
def ctx(self):
|
285
|
+
if not hasattr(self, "graph") or self.graph is None:
|
286
|
+
msg = "Graph not found. Please build the graph first."
|
287
|
+
raise ValueError(msg)
|
288
|
+
return self.graph.context
|
289
|
+
|
290
|
+
def add_to_ctx(self, key: str, value: Any, *, overwrite: bool = False) -> None:
|
291
|
+
"""Add a key-value pair to the context.
|
292
|
+
|
293
|
+
Args:
|
294
|
+
key (str): The key to add.
|
295
|
+
value (Any): The value to associate with the key.
|
296
|
+
overwrite (bool, optional): Whether to overwrite the existing value. Defaults to False.
|
297
|
+
|
298
|
+
Raises:
|
299
|
+
ValueError: If the graph is not built.
|
300
|
+
"""
|
301
|
+
if not hasattr(self, "graph") or self.graph is None:
|
302
|
+
msg = "Graph not found. Please build the graph first."
|
303
|
+
raise ValueError(msg)
|
304
|
+
if key in self.graph.context and not overwrite:
|
305
|
+
msg = f"Key {key} already exists in context. Set overwrite=True to overwrite."
|
306
|
+
raise ValueError(msg)
|
307
|
+
self.graph.context.update({key: value})
|
308
|
+
|
309
|
+
def update_ctx(self, value_dict: dict[str, Any]) -> None:
|
310
|
+
"""Update the context with a dictionary of values.
|
311
|
+
|
312
|
+
Args:
|
313
|
+
value_dict (dict[str, Any]): The dictionary of values to update.
|
314
|
+
|
315
|
+
Raises:
|
316
|
+
ValueError: If the graph is not built.
|
317
|
+
"""
|
318
|
+
if not hasattr(self, "graph") or self.graph is None:
|
319
|
+
msg = "Graph not found. Please build the graph first."
|
320
|
+
raise ValueError(msg)
|
321
|
+
if not isinstance(value_dict, dict):
|
322
|
+
msg = "Value dict must be a dictionary"
|
323
|
+
raise TypeError(msg)
|
324
|
+
|
325
|
+
self.graph.context.update(value_dict)
|
326
|
+
|
327
|
+
def _pre_run_setup(self):
|
328
|
+
pass
|
329
|
+
|
330
|
+
def set_event_manager(self, event_manager: EventManager | None = None) -> None:
|
331
|
+
self._event_manager = event_manager
|
332
|
+
|
333
|
+
def _reset_all_output_values(self) -> None:
|
334
|
+
if isinstance(self._outputs_map, dict):
|
335
|
+
for output in self._outputs_map.values():
|
336
|
+
output.value = UNDEFINED
|
337
|
+
|
338
|
+
def _build_state_model(self):
|
339
|
+
if self._state_model:
|
340
|
+
return self._state_model
|
341
|
+
name = self.name or self.__class__.__name__
|
342
|
+
model_name = f"{name}StateModel"
|
343
|
+
fields = {}
|
344
|
+
for output in self._outputs_map.values():
|
345
|
+
fields[output.name] = getattr(self, output.method)
|
346
|
+
# Lazy import to avoid circular dependency
|
347
|
+
from lfx.graph.state.model import create_state_model
|
348
|
+
|
349
|
+
self._state_model = create_state_model(model_name=model_name, **fields)
|
350
|
+
return self._state_model
|
351
|
+
|
352
|
+
def get_state_model_instance_getter(self):
|
353
|
+
state_model = self._build_state_model()
|
354
|
+
|
355
|
+
def _instance_getter(_):
|
356
|
+
return state_model()
|
357
|
+
|
358
|
+
_instance_getter.__annotations__["return"] = state_model
|
359
|
+
return _instance_getter
|
360
|
+
|
361
|
+
def __deepcopy__(self, memo: dict) -> Component:
|
362
|
+
if id(self) in memo:
|
363
|
+
return memo[id(self)]
|
364
|
+
kwargs = deepcopy(self.__config, memo)
|
365
|
+
kwargs["inputs"] = deepcopy(self.__inputs, memo)
|
366
|
+
new_component = type(self)(**kwargs)
|
367
|
+
new_component._code = self._code
|
368
|
+
new_component._outputs_map = self._outputs_map
|
369
|
+
new_component._inputs = self._inputs
|
370
|
+
new_component._edges = self._edges
|
371
|
+
new_component._components = self._components
|
372
|
+
new_component._parameters = self._parameters
|
373
|
+
new_component._attributes = self._attributes
|
374
|
+
new_component._output_logs = self._output_logs
|
375
|
+
new_component._logs = self._logs # type: ignore[attr-defined]
|
376
|
+
memo[id(self)] = new_component
|
377
|
+
return new_component
|
378
|
+
|
379
|
+
def set_class_code(self) -> None:
|
380
|
+
# Get the source code of the calling class
|
381
|
+
if self._code:
|
382
|
+
return
|
383
|
+
try:
|
384
|
+
module = inspect.getmodule(self.__class__)
|
385
|
+
if module is None:
|
386
|
+
msg = "Could not find module for class"
|
387
|
+
raise ValueError(msg)
|
388
|
+
|
389
|
+
class_code = inspect.getsource(module)
|
390
|
+
self._code = class_code
|
391
|
+
except (OSError, TypeError) as e:
|
392
|
+
msg = f"Could not find source code for {self.__class__.__name__}"
|
393
|
+
raise ValueError(msg) from e
|
394
|
+
|
395
|
+
def set(self, **kwargs):
|
396
|
+
"""Connects the component to other components or sets parameters and attributes.
|
397
|
+
|
398
|
+
Args:
|
399
|
+
**kwargs: Keyword arguments representing the connections, parameters, and attributes.
|
400
|
+
|
401
|
+
Returns:
|
402
|
+
None
|
403
|
+
|
404
|
+
Raises:
|
405
|
+
KeyError: If the specified input name does not exist.
|
406
|
+
"""
|
407
|
+
for key, value in kwargs.items():
|
408
|
+
self._process_connection_or_parameters(key, value)
|
409
|
+
return self
|
410
|
+
|
411
|
+
def list_inputs(self):
|
412
|
+
"""Returns a list of input names."""
|
413
|
+
return [_input.name for _input in self.inputs]
|
414
|
+
|
415
|
+
def list_outputs(self):
|
416
|
+
"""Returns a list of output names."""
|
417
|
+
return [_output.name for _output in self._outputs_map.values()]
|
418
|
+
|
419
|
+
async def run(self):
|
420
|
+
"""Executes the component's logic and returns the result.
|
421
|
+
|
422
|
+
Returns:
|
423
|
+
The result of executing the component's logic.
|
424
|
+
"""
|
425
|
+
return await self._run()
|
426
|
+
|
427
|
+
def set_vertex(self, vertex: Vertex) -> None:
|
428
|
+
"""Sets the vertex for the component.
|
429
|
+
|
430
|
+
Args:
|
431
|
+
vertex (Vertex): The vertex to set.
|
432
|
+
|
433
|
+
Returns:
|
434
|
+
None
|
435
|
+
"""
|
436
|
+
self._vertex = vertex
|
437
|
+
|
438
|
+
def get_input(self, name: str) -> Any:
|
439
|
+
"""Retrieves the value of the input with the specified name.
|
440
|
+
|
441
|
+
Args:
|
442
|
+
name (str): The name of the input.
|
443
|
+
|
444
|
+
Returns:
|
445
|
+
Any: The value of the input.
|
446
|
+
|
447
|
+
Raises:
|
448
|
+
ValueError: If the input with the specified name is not found.
|
449
|
+
"""
|
450
|
+
if name in self._inputs:
|
451
|
+
return self._inputs[name]
|
452
|
+
msg = f"Input {name} not found in {self.__class__.__name__}"
|
453
|
+
raise ValueError(msg)
|
454
|
+
|
455
|
+
def get_output(self, name: str) -> Any:
|
456
|
+
"""Retrieves the output with the specified name.
|
457
|
+
|
458
|
+
Args:
|
459
|
+
name (str): The name of the output to retrieve.
|
460
|
+
|
461
|
+
Returns:
|
462
|
+
Any: The output value.
|
463
|
+
|
464
|
+
Raises:
|
465
|
+
ValueError: If the output with the specified name is not found.
|
466
|
+
"""
|
467
|
+
if name in self._outputs_map:
|
468
|
+
return self._outputs_map[name]
|
469
|
+
msg = f"Output {name} not found in {self.__class__.__name__}"
|
470
|
+
raise ValueError(msg)
|
471
|
+
|
472
|
+
def set_on_output(self, name: str, **kwargs) -> None:
|
473
|
+
output = self.get_output(name)
|
474
|
+
for key, value in kwargs.items():
|
475
|
+
if not hasattr(output, key):
|
476
|
+
msg = f"Output {name} does not have a method {key}"
|
477
|
+
raise ValueError(msg)
|
478
|
+
setattr(output, key, value)
|
479
|
+
|
480
|
+
def set_output_value(self, name: str, value: Any) -> None:
|
481
|
+
if name in self._outputs_map:
|
482
|
+
self._outputs_map[name].value = value
|
483
|
+
else:
|
484
|
+
msg = f"Output {name} not found in {self.__class__.__name__}"
|
485
|
+
raise ValueError(msg)
|
486
|
+
|
487
|
+
def map_outputs(self) -> None:
|
488
|
+
"""Maps the given list of outputs to the component.
|
489
|
+
|
490
|
+
Args:
|
491
|
+
outputs (List[Output]): The list of outputs to be mapped.
|
492
|
+
|
493
|
+
Raises:
|
494
|
+
ValueError: If the output name is None.
|
495
|
+
|
496
|
+
Returns:
|
497
|
+
None
|
498
|
+
"""
|
499
|
+
# override outputs (generated from the class code) with vertex outputs
|
500
|
+
# if they exist (generated from the frontend)
|
501
|
+
outputs = []
|
502
|
+
if self._vertex and self._vertex.outputs:
|
503
|
+
for output in self._vertex.outputs:
|
504
|
+
try:
|
505
|
+
output_ = Output(**output)
|
506
|
+
outputs.append(output_)
|
507
|
+
except ValidationError as e:
|
508
|
+
msg = f"Invalid output: {e}"
|
509
|
+
raise ValueError(msg) from e
|
510
|
+
else:
|
511
|
+
outputs = self.outputs
|
512
|
+
for output in outputs:
|
513
|
+
if output.name is None:
|
514
|
+
msg = "Output name cannot be None."
|
515
|
+
raise ValueError(msg)
|
516
|
+
# Deepcopy is required to avoid modifying the original component;
|
517
|
+
# allows each instance of each component to modify its own output
|
518
|
+
self._outputs_map[output.name] = deepcopy(output)
|
519
|
+
|
520
|
+
def map_inputs(self, inputs: list[InputTypes]) -> None:
|
521
|
+
"""Maps the given inputs to the component.
|
522
|
+
|
523
|
+
Args:
|
524
|
+
inputs (List[InputTypes]): A list of InputTypes objects representing the inputs.
|
525
|
+
|
526
|
+
Raises:
|
527
|
+
ValueError: If the input name is None.
|
528
|
+
|
529
|
+
"""
|
530
|
+
for input_ in inputs:
|
531
|
+
if input_.name is None:
|
532
|
+
msg = self.build_component_error_message("Input name cannot be None")
|
533
|
+
raise ValueError(msg)
|
534
|
+
try:
|
535
|
+
self._inputs[input_.name] = deepcopy(input_)
|
536
|
+
except TypeError:
|
537
|
+
self._inputs[input_.name] = input_
|
538
|
+
|
539
|
+
def validate(self, params: dict) -> None:
|
540
|
+
"""Validates the component parameters.
|
541
|
+
|
542
|
+
Args:
|
543
|
+
params (dict): A dictionary containing the component parameters.
|
544
|
+
|
545
|
+
Raises:
|
546
|
+
ValueError: If the inputs are not valid.
|
547
|
+
ValueError: If the outputs are not valid.
|
548
|
+
"""
|
549
|
+
self._validate_inputs(params)
|
550
|
+
self._validate_outputs()
|
551
|
+
|
552
|
+
async def run_and_validate_update_outputs(self, frontend_node: dict, field_name: str, field_value: Any):
|
553
|
+
frontend_node = self.update_outputs(frontend_node, field_name, field_value)
|
554
|
+
if field_name == "tool_mode" or frontend_node.get("tool_mode"):
|
555
|
+
is_tool_mode = field_value or frontend_node.get("tool_mode")
|
556
|
+
frontend_node["outputs"] = [self._build_tool_output()] if is_tool_mode else frontend_node["outputs"]
|
557
|
+
if is_tool_mode:
|
558
|
+
frontend_node.setdefault("template", {})
|
559
|
+
frontend_node["tool_mode"] = True
|
560
|
+
tools_metadata_input = await self._build_tools_metadata_input()
|
561
|
+
frontend_node["template"][TOOLS_METADATA_INPUT_NAME] = tools_metadata_input.to_dict()
|
562
|
+
self._append_tool_to_outputs_map()
|
563
|
+
elif "template" in frontend_node:
|
564
|
+
frontend_node["template"].pop(TOOLS_METADATA_INPUT_NAME, None)
|
565
|
+
self.tools_metadata = frontend_node.get("template", {}).get(TOOLS_METADATA_INPUT_NAME, {}).get("value")
|
566
|
+
return self._validate_frontend_node(frontend_node)
|
567
|
+
|
568
|
+
def _validate_frontend_node(self, frontend_node: dict):
|
569
|
+
# Check if all outputs are either Output or a valid Output model
|
570
|
+
for index, output in enumerate(frontend_node["outputs"]):
|
571
|
+
if isinstance(output, dict):
|
572
|
+
try:
|
573
|
+
output_ = Output(**output)
|
574
|
+
self._set_output_return_type(output_)
|
575
|
+
output_dict = output_.model_dump()
|
576
|
+
except ValidationError as e:
|
577
|
+
msg = f"Invalid output: {e}"
|
578
|
+
raise ValueError(msg) from e
|
579
|
+
elif isinstance(output, Output):
|
580
|
+
# we need to serialize it
|
581
|
+
self._set_output_return_type(output)
|
582
|
+
output_dict = output.model_dump()
|
583
|
+
else:
|
584
|
+
msg = f"Invalid output type: {type(output)}"
|
585
|
+
raise TypeError(msg)
|
586
|
+
frontend_node["outputs"][index] = output_dict
|
587
|
+
return frontend_node
|
588
|
+
|
589
|
+
def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any) -> dict: # noqa: ARG002
|
590
|
+
"""Default implementation for updating outputs based on field changes.
|
591
|
+
|
592
|
+
Subclasses can override this to modify outputs based on field_name and field_value.
|
593
|
+
"""
|
594
|
+
return frontend_node
|
595
|
+
|
596
|
+
def _set_output_types(self, outputs: list[Output]) -> None:
|
597
|
+
for output in outputs:
|
598
|
+
self._set_output_return_type(output)
|
599
|
+
|
600
|
+
def _set_output_return_type(self, output: Output) -> None:
|
601
|
+
if output.method is None:
|
602
|
+
msg = f"Output {output.name} does not have a method"
|
603
|
+
raise ValueError(msg)
|
604
|
+
return_types = self._get_method_return_type(output.method)
|
605
|
+
output.add_types(return_types)
|
606
|
+
|
607
|
+
def _set_output_required_inputs(self) -> None:
|
608
|
+
for output in self.outputs:
|
609
|
+
if not output.method:
|
610
|
+
continue
|
611
|
+
method = getattr(self, output.method, None)
|
612
|
+
if not method or not callable(method):
|
613
|
+
continue
|
614
|
+
try:
|
615
|
+
source_code = inspect.getsource(method)
|
616
|
+
ast_tree = ast.parse(dedent(source_code))
|
617
|
+
except Exception: # noqa: BLE001
|
618
|
+
ast_tree = ast.parse(dedent(self._code or ""))
|
619
|
+
|
620
|
+
visitor = RequiredInputsVisitor(self._inputs)
|
621
|
+
visitor.visit(ast_tree)
|
622
|
+
output.required_inputs = sorted(visitor.required_inputs)
|
623
|
+
|
624
|
+
def get_output_by_method(self, method: Callable):
|
625
|
+
# method is a callable and output.method is a string
|
626
|
+
# we need to find the output that has the same method
|
627
|
+
output = next((output for output in self._outputs_map.values() if output.method == method.__name__), None)
|
628
|
+
if output is None:
|
629
|
+
method_name = method.__name__ if hasattr(method, "__name__") else str(method)
|
630
|
+
msg = f"Output with method {method_name} not found"
|
631
|
+
raise ValueError(msg)
|
632
|
+
return output
|
633
|
+
|
634
|
+
def _inherits_from_component(self, method: Callable):
|
635
|
+
# check if the method is a method from a class that inherits from Component
|
636
|
+
# and that it is an output of that class
|
637
|
+
return hasattr(method, "__self__") and isinstance(method.__self__, Component)
|
638
|
+
|
639
|
+
def _method_is_valid_output(self, method: Callable):
|
640
|
+
# check if the method is a method from a class that inherits from Component
|
641
|
+
# and that it is an output of that class
|
642
|
+
return (
|
643
|
+
hasattr(method, "__self__")
|
644
|
+
and isinstance(method.__self__, Component)
|
645
|
+
and method.__self__.get_output_by_method(method)
|
646
|
+
)
|
647
|
+
|
648
|
+
def _build_error_string_from_matching_pairs(self, matching_pairs: list[tuple[Output, Input]]):
|
649
|
+
text = ""
|
650
|
+
for output, input_ in matching_pairs:
|
651
|
+
text += f"{output.name}[{','.join(output.types)}]->{input_.name}[{','.join(input_.input_types or [])}]\n"
|
652
|
+
return text
|
653
|
+
|
654
|
+
def _find_matching_output_method(self, input_name: str, value: Component):
|
655
|
+
"""Find the output method from the given component and input name.
|
656
|
+
|
657
|
+
Find the output method from the given component (`value`) that matches the specified input (`input_name`)
|
658
|
+
in the current component.
|
659
|
+
This method searches through all outputs of the provided component to find outputs whose types match
|
660
|
+
the input types of the specified input in the current component. If exactly one matching output is found,
|
661
|
+
it returns the corresponding method. If multiple matching outputs are found, it raises an error indicating
|
662
|
+
ambiguity. If no matching outputs are found, it raises an error indicating that no suitable output was found.
|
663
|
+
|
664
|
+
Args:
|
665
|
+
input_name (str): The name of the input in the current component to match.
|
666
|
+
value (Component): The component whose outputs are to be considered.
|
667
|
+
|
668
|
+
Returns:
|
669
|
+
Callable: The method corresponding to the matching output.
|
670
|
+
|
671
|
+
Raises:
|
672
|
+
ValueError: If multiple matching outputs are found, if no matching outputs are found,
|
673
|
+
or if the output method is invalid.
|
674
|
+
"""
|
675
|
+
# Retrieve all outputs from the given component
|
676
|
+
outputs = value._outputs_map.values()
|
677
|
+
# Prepare to collect matching output-input pairs
|
678
|
+
matching_pairs = []
|
679
|
+
# Get the input object from the current component
|
680
|
+
input_ = self._inputs[input_name]
|
681
|
+
# Iterate over outputs to find matches based on types
|
682
|
+
matching_pairs = [
|
683
|
+
(output, input_)
|
684
|
+
for output in outputs
|
685
|
+
for output_type in output.types
|
686
|
+
# Check if the output type matches the input's accepted types
|
687
|
+
if input_.input_types and output_type in input_.input_types
|
688
|
+
]
|
689
|
+
# If multiple matches are found, raise an error indicating ambiguity
|
690
|
+
if len(matching_pairs) > 1:
|
691
|
+
matching_pairs_str = self._build_error_string_from_matching_pairs(matching_pairs)
|
692
|
+
msg = self.build_component_error_message(
|
693
|
+
f"There are multiple outputs from {value.display_name} that can connect to inputs: {matching_pairs_str}"
|
694
|
+
)
|
695
|
+
raise ValueError(msg)
|
696
|
+
# If no matches are found, raise an error indicating no suitable output
|
697
|
+
if not matching_pairs:
|
698
|
+
msg = self.build_input_error_message(input_name, f"No matching output from {value.display_name} found")
|
699
|
+
raise ValueError(msg)
|
700
|
+
# Get the matching output and input pair
|
701
|
+
output, input_ = matching_pairs[0]
|
702
|
+
# Ensure that the output method is a valid method name (string)
|
703
|
+
if not isinstance(output.method, str):
|
704
|
+
msg = self.build_component_error_message(
|
705
|
+
f"Method {output.method} is not a valid output of {value.display_name}"
|
706
|
+
)
|
707
|
+
raise TypeError(msg)
|
708
|
+
return getattr(value, output.method)
|
709
|
+
|
710
|
+
def _process_connection_or_parameter(self, key, value) -> None:
|
711
|
+
# Special handling for Loop components: check if we're setting a loop-enabled output
|
712
|
+
if self._is_loop_connection(key, value):
|
713
|
+
self._process_loop_connection(key, value)
|
714
|
+
return
|
715
|
+
|
716
|
+
input_ = self._get_or_create_input(key)
|
717
|
+
# We need to check if callable AND if it is a method from a class that inherits from Component
|
718
|
+
if isinstance(value, Component):
|
719
|
+
# We need to find the Output that can connect to an input of the current component
|
720
|
+
# if there's more than one output that matches, we need to raise an error
|
721
|
+
# because we don't know which one to connect to
|
722
|
+
value = self._find_matching_output_method(key, value)
|
723
|
+
if callable(value) and self._inherits_from_component(value):
|
724
|
+
try:
|
725
|
+
self._method_is_valid_output(value)
|
726
|
+
except ValueError as e:
|
727
|
+
msg = f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}"
|
728
|
+
raise ValueError(msg) from e
|
729
|
+
self._connect_to_component(key, value, input_)
|
730
|
+
else:
|
731
|
+
self._set_parameter_or_attribute(key, value)
|
732
|
+
|
733
|
+
def _is_loop_connection(self, key: str, value) -> bool:
|
734
|
+
"""Check if this is a loop feedback connection.
|
735
|
+
|
736
|
+
A loop connection occurs when:
|
737
|
+
1. The key matches an output name of this component
|
738
|
+
2. That output has allows_loop=True
|
739
|
+
3. The value is a callable method from another component
|
740
|
+
"""
|
741
|
+
# Check if key matches a loop-enabled output
|
742
|
+
if key not in self._outputs_map:
|
743
|
+
return False
|
744
|
+
|
745
|
+
output = self._outputs_map[key]
|
746
|
+
if not getattr(output, "allows_loop", False):
|
747
|
+
return False
|
748
|
+
|
749
|
+
# Check if value is a callable method from a Component
|
750
|
+
return callable(value) and self._inherits_from_component(value)
|
751
|
+
|
752
|
+
def _process_loop_connection(self, key: str, value) -> None:
|
753
|
+
"""Process a loop feedback connection.
|
754
|
+
|
755
|
+
Creates a special edge that connects the source component's output
|
756
|
+
to this Loop component's loop-enabled output (not an input).
|
757
|
+
"""
|
758
|
+
try:
|
759
|
+
self._method_is_valid_output(value)
|
760
|
+
except ValueError as e:
|
761
|
+
msg = f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}"
|
762
|
+
raise ValueError(msg) from e
|
763
|
+
|
764
|
+
source_component = value.__self__
|
765
|
+
self._components.append(source_component)
|
766
|
+
source_output = source_component.get_output_by_method(value)
|
767
|
+
target_output = self._outputs_map[key]
|
768
|
+
|
769
|
+
# Create special loop feedback edge
|
770
|
+
self._add_loop_edge(source_component, source_output, target_output)
|
771
|
+
|
772
|
+
def _add_loop_edge(self, source_component, source_output, target_output) -> None:
|
773
|
+
"""Add a special loop feedback edge that targets an output instead of an input."""
|
774
|
+
self._edges.append(
|
775
|
+
{
|
776
|
+
"source": source_component._id,
|
777
|
+
"target": self._id,
|
778
|
+
"data": {
|
779
|
+
"sourceHandle": {
|
780
|
+
"dataType": source_component.name or source_component.__class__.__name__,
|
781
|
+
"id": source_component._id,
|
782
|
+
"name": source_output.name,
|
783
|
+
"output_types": source_output.types,
|
784
|
+
},
|
785
|
+
"targetHandle": {
|
786
|
+
# Special loop edge structure - targets an output, not an input
|
787
|
+
"dataType": self.name or self.__class__.__name__,
|
788
|
+
"id": self._id,
|
789
|
+
"name": target_output.name,
|
790
|
+
"output_types": target_output.types,
|
791
|
+
},
|
792
|
+
},
|
793
|
+
}
|
794
|
+
)
|
795
|
+
|
796
|
+
def _process_connection_or_parameters(self, key, value) -> None:
|
797
|
+
# if value is a list of components, we need to process each component
|
798
|
+
# Note this update make sure it is not a list str | int | float | bool | type(None)
|
799
|
+
if isinstance(value, list) and not any(
|
800
|
+
isinstance(val, str | int | float | bool | type(None) | Message | Data | StructuredTool) for val in value
|
801
|
+
):
|
802
|
+
for val in value:
|
803
|
+
self._process_connection_or_parameter(key, val)
|
804
|
+
else:
|
805
|
+
self._process_connection_or_parameter(key, value)
|
806
|
+
|
807
|
+
def _get_or_create_input(self, key):
|
808
|
+
try:
|
809
|
+
return self._inputs[key]
|
810
|
+
except KeyError:
|
811
|
+
input_ = self._get_fallback_input(name=key, display_name=key)
|
812
|
+
self._inputs[key] = input_
|
813
|
+
self.inputs.append(input_)
|
814
|
+
return input_
|
815
|
+
|
816
|
+
def _connect_to_component(self, key, value, input_) -> None:
|
817
|
+
component = value.__self__
|
818
|
+
self._components.append(component)
|
819
|
+
output = component.get_output_by_method(value)
|
820
|
+
self._add_edge(component, key, output, input_)
|
821
|
+
|
822
|
+
def _add_edge(self, component, key, output, input_) -> None:
|
823
|
+
self._edges.append(
|
824
|
+
{
|
825
|
+
"source": component._id,
|
826
|
+
"target": self._id,
|
827
|
+
"data": {
|
828
|
+
"sourceHandle": {
|
829
|
+
"dataType": component.name or component.__class__.__name__,
|
830
|
+
"id": component._id,
|
831
|
+
"name": output.name,
|
832
|
+
"output_types": output.types,
|
833
|
+
},
|
834
|
+
"targetHandle": {
|
835
|
+
"fieldName": key,
|
836
|
+
"id": self._id,
|
837
|
+
"inputTypes": input_.input_types,
|
838
|
+
"type": input_.field_type,
|
839
|
+
},
|
840
|
+
},
|
841
|
+
}
|
842
|
+
)
|
843
|
+
|
844
|
+
def _set_parameter_or_attribute(self, key, value) -> None:
|
845
|
+
if isinstance(value, Component):
|
846
|
+
methods = ", ".join([f"'{output.method}'" for output in value.outputs])
|
847
|
+
msg = f"You set {value.display_name} as value for `{key}`. You should pass one of the following: {methods}"
|
848
|
+
raise TypeError(msg)
|
849
|
+
self.set_input_value(key, value)
|
850
|
+
self._parameters[key] = value
|
851
|
+
self._attributes[key] = value
|
852
|
+
|
853
|
+
def __call__(self, **kwargs):
|
854
|
+
self.set(**kwargs)
|
855
|
+
|
856
|
+
return run_until_complete(self.run())
|
857
|
+
|
858
|
+
async def _run(self):
|
859
|
+
# Resolve callable inputs
|
860
|
+
for key, _input in self._inputs.items():
|
861
|
+
if asyncio.iscoroutinefunction(_input.value):
|
862
|
+
self._inputs[key].value = await _input.value()
|
863
|
+
elif callable(_input.value):
|
864
|
+
self._inputs[key].value = await asyncio.to_thread(_input.value)
|
865
|
+
|
866
|
+
self.set_attributes({})
|
867
|
+
|
868
|
+
return await self.build_results()
|
869
|
+
|
870
|
+
def __getattr__(self, name: str) -> Any:
|
871
|
+
if "_attributes" in self.__dict__ and name in self.__dict__["_attributes"]:
|
872
|
+
# It is a dict of attributes that are not inputs or outputs all the raw data it should have the loop input.
|
873
|
+
return self.__dict__["_attributes"][name]
|
874
|
+
if "_inputs" in self.__dict__ and name in self.__dict__["_inputs"]:
|
875
|
+
return self.__dict__["_inputs"][name].value
|
876
|
+
if "_outputs_map" in self.__dict__ and name in self.__dict__["_outputs_map"]:
|
877
|
+
return self.__dict__["_outputs_map"][name]
|
878
|
+
if name in BACKWARDS_COMPATIBLE_ATTRIBUTES:
|
879
|
+
return self.__dict__[f"_{name}"]
|
880
|
+
if name.startswith("_") and name[1:] in BACKWARDS_COMPATIBLE_ATTRIBUTES:
|
881
|
+
return self.__dict__[name]
|
882
|
+
if name == "graph":
|
883
|
+
# If it got up to here it means it was going to raise
|
884
|
+
session_id = self._session_id if hasattr(self, "_session_id") else None
|
885
|
+
user_id = self._user_id if hasattr(self, "_user_id") else None
|
886
|
+
flow_name = self._flow_name if hasattr(self, "_flow_name") else None
|
887
|
+
flow_id = self._flow_id if hasattr(self, "_flow_id") else None
|
888
|
+
return PlaceholderGraph(
|
889
|
+
flow_id=flow_id, user_id=str(user_id), session_id=session_id, context={}, flow_name=flow_name
|
890
|
+
)
|
891
|
+
msg = f"Attribute {name} not found in {self.__class__.__name__}"
|
892
|
+
raise AttributeError(msg)
|
893
|
+
|
894
|
+
def set_input_value(self, name: str, value: Any) -> None:
|
895
|
+
if name in self._inputs:
|
896
|
+
input_value = self._inputs[name].value
|
897
|
+
if isinstance(input_value, Component):
|
898
|
+
methods = ", ".join([f"'{output.method}'" for output in input_value.outputs])
|
899
|
+
msg = self.build_input_error_message(
|
900
|
+
name,
|
901
|
+
f"You set {input_value.display_name} as value. You should pass one of the following: {methods}",
|
902
|
+
)
|
903
|
+
raise ValueError(msg)
|
904
|
+
if callable(input_value) and hasattr(input_value, "__self__"):
|
905
|
+
msg = self.build_input_error_message(
|
906
|
+
name, f"Input is connected to {input_value.__self__.display_name}.{input_value.__name__}"
|
907
|
+
)
|
908
|
+
raise ValueError(msg)
|
909
|
+
try:
|
910
|
+
self._inputs[name].value = value
|
911
|
+
except Exception as e:
|
912
|
+
msg = f"Error setting input value for {name}: {e}"
|
913
|
+
raise ValueError(msg) from e
|
914
|
+
if hasattr(self._inputs[name], "load_from_db"):
|
915
|
+
self._inputs[name].load_from_db = False
|
916
|
+
else:
|
917
|
+
msg = self.build_component_error_message(f"Input {name} not found")
|
918
|
+
raise ValueError(msg)
|
919
|
+
|
920
|
+
def _validate_outputs(self) -> None:
|
921
|
+
# Raise Error if some rule isn't met
|
922
|
+
if self.selected_output is not None and self.selected_output not in self._outputs_map:
|
923
|
+
output_names = ", ".join(list(self._outputs_map.keys()))
|
924
|
+
msg = f"selected_output '{self.selected_output}' is not valid. Must be one of: {output_names}"
|
925
|
+
raise ValueError(msg)
|
926
|
+
|
927
|
+
def _map_parameters_on_frontend_node(self, frontend_node: ComponentFrontendNode) -> None:
|
928
|
+
for name, value in self._parameters.items():
|
929
|
+
frontend_node.set_field_value_in_template(name, value)
|
930
|
+
|
931
|
+
def _map_parameters_on_template(self, template: dict) -> None:
|
932
|
+
for name, value in self._parameters.items():
|
933
|
+
try:
|
934
|
+
template[name]["value"] = value
|
935
|
+
except KeyError as e:
|
936
|
+
close_match = find_closest_match(name, list(template.keys()))
|
937
|
+
if close_match:
|
938
|
+
msg = f"Parameter '{name}' not found in {self.__class__.__name__}. Did you mean '{close_match}'?"
|
939
|
+
raise ValueError(msg) from e
|
940
|
+
msg = f"Parameter {name} not found in {self.__class__.__name__}. "
|
941
|
+
raise ValueError(msg) from e
|
942
|
+
|
943
|
+
def _get_method_return_type(self, method_name: str) -> list[str]:
|
944
|
+
method = getattr(self, method_name)
|
945
|
+
return_type = get_type_hints(method).get("return")
|
946
|
+
if return_type is None:
|
947
|
+
return []
|
948
|
+
extracted_return_types = self._extract_return_type(return_type)
|
949
|
+
return [format_type(extracted_return_type) for extracted_return_type in extracted_return_types]
|
950
|
+
|
951
|
+
def _update_template(self, frontend_node: dict):
|
952
|
+
return frontend_node
|
953
|
+
|
954
|
+
def to_frontend_node(self):
|
955
|
+
# ! This part here is clunky but we need it like this for
|
956
|
+
# ! backwards compatibility. We can change how prompt component
|
957
|
+
# ! works and then update this later
|
958
|
+
field_config = self.get_template_config(self)
|
959
|
+
frontend_node = ComponentFrontendNode.from_inputs(**field_config)
|
960
|
+
# for key in self._inputs:
|
961
|
+
# frontend_node.set_field_load_from_db_in_template(key, value=False)
|
962
|
+
self._map_parameters_on_frontend_node(frontend_node)
|
963
|
+
|
964
|
+
frontend_node_dict = frontend_node.to_dict(keep_name=False)
|
965
|
+
frontend_node_dict = self._update_template(frontend_node_dict)
|
966
|
+
self._map_parameters_on_template(frontend_node_dict["template"])
|
967
|
+
|
968
|
+
frontend_node = ComponentFrontendNode.from_dict(frontend_node_dict)
|
969
|
+
if not self._code:
|
970
|
+
self.set_class_code()
|
971
|
+
code_field = Input(
|
972
|
+
dynamic=True,
|
973
|
+
required=True,
|
974
|
+
placeholder="",
|
975
|
+
multiline=True,
|
976
|
+
value=self._code,
|
977
|
+
password=False,
|
978
|
+
name="code",
|
979
|
+
advanced=True,
|
980
|
+
field_type="code",
|
981
|
+
is_list=False,
|
982
|
+
)
|
983
|
+
frontend_node.template.add_field(code_field)
|
984
|
+
|
985
|
+
for output in frontend_node.outputs:
|
986
|
+
if output.types:
|
987
|
+
continue
|
988
|
+
return_types = self._get_method_return_type(output.method)
|
989
|
+
output.add_types(return_types)
|
990
|
+
|
991
|
+
frontend_node.validate_component()
|
992
|
+
frontend_node.set_base_classes_from_outputs()
|
993
|
+
|
994
|
+
# Get the node dictionary and add selected_output if specified
|
995
|
+
node_dict = frontend_node.to_dict(keep_name=False)
|
996
|
+
if self.selected_output is not None:
|
997
|
+
node_dict["selected_output"] = self.selected_output
|
998
|
+
|
999
|
+
return {
|
1000
|
+
"data": {
|
1001
|
+
"node": node_dict,
|
1002
|
+
"type": self.name or self.__class__.__name__,
|
1003
|
+
"id": self._id,
|
1004
|
+
},
|
1005
|
+
"id": self._id,
|
1006
|
+
}
|
1007
|
+
|
1008
|
+
def _validate_inputs(self, params: dict) -> None:
|
1009
|
+
# Params keys are the `name` attribute of the Input objects
|
1010
|
+
"""Validates and assigns input values from the provided parameters dictionary.
|
1011
|
+
|
1012
|
+
For each parameter matching a defined input, sets the input's value and updates the parameter
|
1013
|
+
dictionary with the validated value.
|
1014
|
+
"""
|
1015
|
+
for key, value in params.copy().items():
|
1016
|
+
if key not in self._inputs:
|
1017
|
+
continue
|
1018
|
+
input_ = self._inputs[key]
|
1019
|
+
# BaseInputMixin has a `validate_assignment=True`
|
1020
|
+
|
1021
|
+
input_.value = value
|
1022
|
+
params[input_.name] = input_.value
|
1023
|
+
|
1024
|
+
def set_attributes(self, params: dict) -> None:
|
1025
|
+
"""Sets component attributes from the given parameters, preventing conflicts with reserved attribute names.
|
1026
|
+
|
1027
|
+
Raises:
|
1028
|
+
ValueError: If a parameter name matches a reserved attribute not managed in _attributes and its
|
1029
|
+
value differs from the current attribute value.
|
1030
|
+
"""
|
1031
|
+
self._validate_inputs(params)
|
1032
|
+
attributes = {}
|
1033
|
+
for key, value in params.items():
|
1034
|
+
if key in self.__dict__ and key not in self._attributes and value != getattr(self, key):
|
1035
|
+
msg = (
|
1036
|
+
f"{self.__class__.__name__} defines an input parameter named '{key}' "
|
1037
|
+
f"that is a reserved word and cannot be used."
|
1038
|
+
)
|
1039
|
+
raise ValueError(msg)
|
1040
|
+
attributes[key] = value
|
1041
|
+
for key, input_obj in self._inputs.items():
|
1042
|
+
if key not in attributes and key not in self._attributes:
|
1043
|
+
attributes[key] = input_obj.value or None
|
1044
|
+
|
1045
|
+
self._attributes.update(attributes)
|
1046
|
+
|
1047
|
+
def _set_outputs(self, outputs: list[dict]) -> None:
|
1048
|
+
self.outputs = [Output(**output) for output in outputs]
|
1049
|
+
for output in self.outputs:
|
1050
|
+
setattr(self, output.name, output)
|
1051
|
+
self._outputs_map[output.name] = output
|
1052
|
+
|
1053
|
+
def get_trace_as_inputs(self):
|
1054
|
+
predefined_inputs = {
|
1055
|
+
input_.name: input_.value
|
1056
|
+
for input_ in self.inputs
|
1057
|
+
if hasattr(input_, "trace_as_input") and input_.trace_as_input
|
1058
|
+
}
|
1059
|
+
# Runtime inputs
|
1060
|
+
runtime_inputs = {name: input_.value for name, input_ in self._inputs.items() if hasattr(input_, "value")}
|
1061
|
+
return {**predefined_inputs, **runtime_inputs}
|
1062
|
+
|
1063
|
+
def get_trace_as_metadata(self):
|
1064
|
+
return {
|
1065
|
+
input_.name: input_.value
|
1066
|
+
for input_ in self.inputs
|
1067
|
+
if hasattr(input_, "trace_as_metadata") and input_.trace_as_metadata
|
1068
|
+
}
|
1069
|
+
|
1070
|
+
async def _build_with_tracing(self):
|
1071
|
+
inputs = self.get_trace_as_inputs()
|
1072
|
+
metadata = self.get_trace_as_metadata()
|
1073
|
+
async with self.tracing_service.trace_component(self, self.trace_name, inputs, metadata):
|
1074
|
+
results, artifacts = await self._build_results()
|
1075
|
+
self.tracing_service.set_outputs(self.trace_name, results)
|
1076
|
+
|
1077
|
+
return results, artifacts
|
1078
|
+
|
1079
|
+
async def _build_without_tracing(self):
|
1080
|
+
return await self._build_results()
|
1081
|
+
|
1082
|
+
async def build_results(self):
|
1083
|
+
"""Build the results of the component."""
|
1084
|
+
if hasattr(self, "graph"):
|
1085
|
+
session_id = self.graph.session_id
|
1086
|
+
elif hasattr(self, "_session_id"):
|
1087
|
+
session_id = self._session_id
|
1088
|
+
else:
|
1089
|
+
session_id = None
|
1090
|
+
try:
|
1091
|
+
if self.tracing_service:
|
1092
|
+
return await self._build_with_tracing()
|
1093
|
+
return await self._build_without_tracing()
|
1094
|
+
except StreamingError as e:
|
1095
|
+
await self.send_error(
|
1096
|
+
exception=e.cause,
|
1097
|
+
session_id=session_id,
|
1098
|
+
trace_name=getattr(self, "trace_name", None),
|
1099
|
+
source=e.source,
|
1100
|
+
)
|
1101
|
+
raise e.cause # noqa: B904
|
1102
|
+
except Exception as e:
|
1103
|
+
await self.send_error(
|
1104
|
+
exception=e,
|
1105
|
+
session_id=session_id,
|
1106
|
+
source=Source(id=self._id, display_name=self.display_name, source=self.display_name),
|
1107
|
+
trace_name=getattr(self, "trace_name", None),
|
1108
|
+
)
|
1109
|
+
raise
|
1110
|
+
|
1111
|
+
async def _build_results(self) -> tuple[dict, dict]:
|
1112
|
+
results, artifacts = {}, {}
|
1113
|
+
|
1114
|
+
self._pre_run_setup_if_needed()
|
1115
|
+
self._handle_tool_mode()
|
1116
|
+
|
1117
|
+
for output in self._get_outputs_to_process():
|
1118
|
+
self._current_output = output.name
|
1119
|
+
result = await self._get_output_result(output)
|
1120
|
+
results[output.name] = result
|
1121
|
+
artifacts[output.name] = self._build_artifact(result)
|
1122
|
+
self._log_output(output)
|
1123
|
+
|
1124
|
+
self._finalize_results(results, artifacts)
|
1125
|
+
return results, artifacts
|
1126
|
+
|
1127
|
+
def _pre_run_setup_if_needed(self):
|
1128
|
+
if hasattr(self, "_pre_run_setup"):
|
1129
|
+
self._pre_run_setup()
|
1130
|
+
|
1131
|
+
def _handle_tool_mode(self):
|
1132
|
+
if (
|
1133
|
+
hasattr(self, "outputs") and any(getattr(_input, "tool_mode", False) for _input in self.inputs)
|
1134
|
+
) or self.add_tool_output:
|
1135
|
+
self._append_tool_to_outputs_map()
|
1136
|
+
|
1137
|
+
def _should_process_output(self, output):
|
1138
|
+
"""Determines whether a given output should be processed based on vertex edge configuration.
|
1139
|
+
|
1140
|
+
Returns True if the component has no vertex or outgoing edges, or if the output's name is among
|
1141
|
+
the vertex's source edge names.
|
1142
|
+
"""
|
1143
|
+
if not self._vertex or not self._vertex.outgoing_edges:
|
1144
|
+
return True
|
1145
|
+
return output.name in self._vertex.edges_source_names
|
1146
|
+
|
1147
|
+
def _get_outputs_to_process(self):
|
1148
|
+
"""Returns a list of outputs to process, ordered according to self.outputs.
|
1149
|
+
|
1150
|
+
Outputs are included only if they should be processed, as determined by _should_process_output.
|
1151
|
+
First processes outputs in the order defined by self.outputs, then processes any remaining outputs
|
1152
|
+
from _outputs_map that weren't in self.outputs.
|
1153
|
+
|
1154
|
+
Returns:
|
1155
|
+
list: Outputs to be processed in the defined order.
|
1156
|
+
|
1157
|
+
Raises:
|
1158
|
+
ValueError: If an output name in self.outputs is not present in _outputs_map.
|
1159
|
+
"""
|
1160
|
+
result = []
|
1161
|
+
processed_names = set()
|
1162
|
+
|
1163
|
+
# First process outputs in the order defined by self.outputs
|
1164
|
+
for output in self.outputs:
|
1165
|
+
output_obj = self._outputs_map.get(output.name, deepcopy(output))
|
1166
|
+
if self._should_process_output(output_obj):
|
1167
|
+
result.append(output_obj)
|
1168
|
+
processed_names.add(output_obj.name)
|
1169
|
+
|
1170
|
+
# Then process any remaining outputs from _outputs_map
|
1171
|
+
for name, output_obj in self._outputs_map.items():
|
1172
|
+
if name not in processed_names and self._should_process_output(output_obj):
|
1173
|
+
result.append(output_obj)
|
1174
|
+
|
1175
|
+
return result
|
1176
|
+
|
1177
|
+
async def _get_output_result(self, output):
|
1178
|
+
"""Computes and returns the result for a given output, applying caching and output options.
|
1179
|
+
|
1180
|
+
If the output is cached and a value is already defined, returns the cached value. Otherwise,
|
1181
|
+
invokes the associated output method asynchronously, applies output options, updates the cache,
|
1182
|
+
and returns the result. Raises a ValueError if the output method is not defined, or a TypeError
|
1183
|
+
if the method invocation fails.
|
1184
|
+
"""
|
1185
|
+
if output.cache and output.value != UNDEFINED:
|
1186
|
+
return output.value
|
1187
|
+
|
1188
|
+
if output.method is None:
|
1189
|
+
msg = f'Output "{output.name}" does not have a method defined.'
|
1190
|
+
raise ValueError(msg)
|
1191
|
+
|
1192
|
+
method = getattr(self, output.method)
|
1193
|
+
try:
|
1194
|
+
result = await method() if inspect.iscoroutinefunction(method) else await asyncio.to_thread(method)
|
1195
|
+
except TypeError as e:
|
1196
|
+
msg = f'Error running method "{output.method}": {e}'
|
1197
|
+
raise TypeError(msg) from e
|
1198
|
+
|
1199
|
+
if (
|
1200
|
+
self._vertex is not None
|
1201
|
+
and isinstance(result, Message)
|
1202
|
+
and result.flow_id is None
|
1203
|
+
and self._vertex.graph.flow_id is not None
|
1204
|
+
):
|
1205
|
+
result.set_flow_id(self._vertex.graph.flow_id)
|
1206
|
+
result = output.apply_options(result)
|
1207
|
+
output.value = result
|
1208
|
+
|
1209
|
+
return result
|
1210
|
+
|
1211
|
+
async def resolve_output(self, output_name: str) -> Any:
|
1212
|
+
"""Resolves and returns the value for a specified output by name.
|
1213
|
+
|
1214
|
+
If output caching is enabled and a value is already available, returns the cached value;
|
1215
|
+
otherwise, computes and returns the output result. Raises a KeyError if the output name
|
1216
|
+
does not exist.
|
1217
|
+
"""
|
1218
|
+
output = self._outputs_map.get(output_name)
|
1219
|
+
if output is None:
|
1220
|
+
msg = (
|
1221
|
+
f"Sorry, an output named '{output_name}' could not be found. "
|
1222
|
+
"Please ensure that the output is correctly configured and try again."
|
1223
|
+
)
|
1224
|
+
raise KeyError(msg)
|
1225
|
+
if output.cache and output.value != UNDEFINED:
|
1226
|
+
return output.value
|
1227
|
+
return await self._get_output_result(output)
|
1228
|
+
|
1229
|
+
def _build_artifact(self, result):
|
1230
|
+
"""Builds an artifact dictionary containing a string representation, raw data, and type for a result.
|
1231
|
+
|
1232
|
+
The artifact includes a human-readable representation, the processed raw result, and its determined type.
|
1233
|
+
"""
|
1234
|
+
custom_repr = self.custom_repr()
|
1235
|
+
if custom_repr is None and isinstance(result, dict | Data | str):
|
1236
|
+
custom_repr = result
|
1237
|
+
if not isinstance(custom_repr, str):
|
1238
|
+
custom_repr = str(custom_repr)
|
1239
|
+
|
1240
|
+
raw = self._process_raw_result(result)
|
1241
|
+
artifact_type = get_artifact_type(self.status or raw, result)
|
1242
|
+
raw, artifact_type = post_process_raw(raw, artifact_type)
|
1243
|
+
return {"repr": custom_repr, "raw": raw, "type": artifact_type}
|
1244
|
+
|
1245
|
+
def _process_raw_result(self, result):
|
1246
|
+
return self.extract_data(result)
|
1247
|
+
|
1248
|
+
def extract_data(self, result):
|
1249
|
+
"""Extract the data from the result. this is where the self.status is set."""
|
1250
|
+
if isinstance(result, Message):
|
1251
|
+
self.status = result.get_text()
|
1252
|
+
return (
|
1253
|
+
self.status if self.status is not None else "No text available"
|
1254
|
+
) # Provide a default message if .text_key is missing
|
1255
|
+
if hasattr(result, "data"):
|
1256
|
+
return result.data
|
1257
|
+
if hasattr(result, "model_dump"):
|
1258
|
+
return result.model_dump()
|
1259
|
+
if isinstance(result, Data | dict | str):
|
1260
|
+
return result.data if isinstance(result, Data) else result
|
1261
|
+
|
1262
|
+
if self.status:
|
1263
|
+
return self.status
|
1264
|
+
return result
|
1265
|
+
|
1266
|
+
def _log_output(self, output):
|
1267
|
+
self._output_logs[output.name] = self._logs
|
1268
|
+
self._logs = []
|
1269
|
+
self._current_output = ""
|
1270
|
+
|
1271
|
+
def _finalize_results(self, results, artifacts):
|
1272
|
+
self._artifacts = artifacts
|
1273
|
+
self._results = results
|
1274
|
+
if self.tracing_service:
|
1275
|
+
self.tracing_service.set_outputs(self.trace_name, results)
|
1276
|
+
|
1277
|
+
def custom_repr(self):
|
1278
|
+
if self.repr_value == "":
|
1279
|
+
self.repr_value = self.status
|
1280
|
+
if isinstance(self.repr_value, dict):
|
1281
|
+
return yaml.dump(self.repr_value)
|
1282
|
+
if isinstance(self.repr_value, str):
|
1283
|
+
return self.repr_value
|
1284
|
+
if isinstance(self.repr_value, BaseModel) and not isinstance(self.repr_value, Data):
|
1285
|
+
return str(self.repr_value)
|
1286
|
+
return self.repr_value
|
1287
|
+
|
1288
|
+
def build_inputs(self):
|
1289
|
+
"""Builds the inputs for the custom component.
|
1290
|
+
|
1291
|
+
Returns:
|
1292
|
+
List[Input]: The list of inputs.
|
1293
|
+
"""
|
1294
|
+
# This function is similar to build_config, but it will process the inputs
|
1295
|
+
# and return them as a dict with keys being the Input.name and values being the Input.model_dump()
|
1296
|
+
self.inputs = self.template_config.get("inputs", [])
|
1297
|
+
if not self.inputs:
|
1298
|
+
return {}
|
1299
|
+
return {_input.name: _input.model_dump(by_alias=True, exclude_none=True) for _input in self.inputs}
|
1300
|
+
|
1301
|
+
def _get_field_order(self):
|
1302
|
+
try:
|
1303
|
+
inputs = self.template_config["inputs"]
|
1304
|
+
return [field.name for field in inputs]
|
1305
|
+
except KeyError:
|
1306
|
+
return []
|
1307
|
+
|
1308
|
+
def build(self, **kwargs) -> None:
|
1309
|
+
self.set_attributes(kwargs)
|
1310
|
+
|
1311
|
+
def _get_fallback_input(self, **kwargs):
|
1312
|
+
return Input(**kwargs)
|
1313
|
+
|
1314
|
+
def to_toolkit(self) -> list[Tool]:
|
1315
|
+
"""Convert component to a list of tools.
|
1316
|
+
|
1317
|
+
This is a template method that defines the skeleton of the toolkit creation
|
1318
|
+
algorithm. Subclasses can override _get_tools() to provide custom tool
|
1319
|
+
implementations while maintaining the metadata update functionality.
|
1320
|
+
|
1321
|
+
Returns:
|
1322
|
+
list[Tool]: A list of tools with updated metadata. Each tool contains:
|
1323
|
+
- name: The name of the tool
|
1324
|
+
- description: A description of what the tool does
|
1325
|
+
- tags: List of tags associated with the tool
|
1326
|
+
"""
|
1327
|
+
# Get tools from subclass implementation
|
1328
|
+
tools = self._get_tools()
|
1329
|
+
|
1330
|
+
if hasattr(self, TOOLS_METADATA_INPUT_NAME):
|
1331
|
+
tools = self._filter_tools_by_status(tools=tools, metadata=self.tools_metadata)
|
1332
|
+
return self._update_tools_with_metadata(tools=tools, metadata=self.tools_metadata)
|
1333
|
+
|
1334
|
+
# If no metadata exists yet, filter based on enabled_tools
|
1335
|
+
return self._filter_tools_by_status(tools=tools, metadata=None)
|
1336
|
+
|
1337
|
+
def _get_tools(self) -> list[Tool]:
|
1338
|
+
"""Get the list of tools for this component.
|
1339
|
+
|
1340
|
+
This method can be overridden by subclasses to provide custom tool implementations.
|
1341
|
+
The default implementation uses ComponentToolkit.
|
1342
|
+
|
1343
|
+
Returns:
|
1344
|
+
list[Tool]: List of tools provided by this component
|
1345
|
+
"""
|
1346
|
+
component_toolkit: type[ComponentToolkit] = get_component_toolkit()
|
1347
|
+
return component_toolkit(component=self).get_tools(callbacks=self.get_langchain_callbacks())
|
1348
|
+
|
1349
|
+
def _extract_tools_tags(self, tools_metadata: list[dict]) -> list[str]:
|
1350
|
+
"""Extract the first tag from each tool's metadata."""
|
1351
|
+
return [tool["tags"][0] for tool in tools_metadata if tool["tags"]]
|
1352
|
+
|
1353
|
+
def _update_tools_with_metadata(self, tools: list[Tool], metadata: DataFrame | None) -> list[Tool]:
|
1354
|
+
"""Update tools with provided metadata."""
|
1355
|
+
component_toolkit: type[ComponentToolkit] = get_component_toolkit()
|
1356
|
+
return component_toolkit(component=self, metadata=metadata).update_tools_metadata(tools=tools)
|
1357
|
+
|
1358
|
+
def check_for_tool_tag_change(self, old_tags: list[str], new_tags: list[str]) -> bool:
|
1359
|
+
# First check length - if different lengths, they can't be equal
|
1360
|
+
if len(old_tags) != len(new_tags):
|
1361
|
+
return True
|
1362
|
+
# Use set comparison for O(n) average case complexity, earlier the old_tags.sort() != new_tags.sort() was used
|
1363
|
+
return set(old_tags) != set(new_tags)
|
1364
|
+
|
1365
|
+
def _filter_tools_by_status(self, tools: list[Tool], metadata: pd.DataFrame | None) -> list[Tool]:
|
1366
|
+
"""Filter tools based on their status in metadata.
|
1367
|
+
|
1368
|
+
Args:
|
1369
|
+
tools (list[Tool]): List of tools to filter.
|
1370
|
+
metadata (list[dict] | None): Tools metadata containing status information.
|
1371
|
+
|
1372
|
+
Returns:
|
1373
|
+
list[Tool]: Filtered list of tools.
|
1374
|
+
"""
|
1375
|
+
# Convert metadata to a list of dicts if it's a DataFrame
|
1376
|
+
metadata_dict = None # Initialize as None to avoid lint issues with empty dict
|
1377
|
+
if isinstance(metadata, pd.DataFrame):
|
1378
|
+
metadata_dict = metadata.to_dict(orient="records")
|
1379
|
+
|
1380
|
+
# If metadata is None or empty, use enabled_tools
|
1381
|
+
if not metadata_dict:
|
1382
|
+
enabled = self.enabled_tools
|
1383
|
+
return (
|
1384
|
+
tools
|
1385
|
+
if enabled is None
|
1386
|
+
else [
|
1387
|
+
tool for tool in tools if any(enabled_name in [tool.name, *tool.tags] for enabled_name in enabled)
|
1388
|
+
]
|
1389
|
+
)
|
1390
|
+
|
1391
|
+
# Ensure metadata is a list of dicts
|
1392
|
+
if not isinstance(metadata_dict, list):
|
1393
|
+
return tools
|
1394
|
+
|
1395
|
+
# Create a mapping of tool names to their status
|
1396
|
+
tool_status = {item["name"]: item.get("status", True) for item in metadata_dict}
|
1397
|
+
return [tool for tool in tools if tool_status.get(tool.name, True)]
|
1398
|
+
|
1399
|
+
def _build_tool_data(self, tool: Tool) -> dict:
|
1400
|
+
if tool.metadata is None:
|
1401
|
+
tool.metadata = {}
|
1402
|
+
return {
|
1403
|
+
"name": tool.name,
|
1404
|
+
"description": tool.description,
|
1405
|
+
"tags": tool.tags if hasattr(tool, "tags") and tool.tags else [tool.name],
|
1406
|
+
"status": True, # Initialize all tools with status True
|
1407
|
+
"display_name": tool.metadata.get("display_name", tool.name),
|
1408
|
+
"display_description": tool.metadata.get("display_description", tool.description),
|
1409
|
+
"readonly": tool.metadata.get("readonly", False),
|
1410
|
+
"args": tool.args,
|
1411
|
+
# "args_schema": tool.args_schema,
|
1412
|
+
}
|
1413
|
+
|
1414
|
+
async def _build_tools_metadata_input(self):
|
1415
|
+
try:
|
1416
|
+
from lfx.inputs.inputs import ToolsInput
|
1417
|
+
except ImportError as e:
|
1418
|
+
msg = "Failed to import ToolsInput from lfx.inputs.inputs"
|
1419
|
+
raise ImportError(msg) from e
|
1420
|
+
placeholder = None
|
1421
|
+
tools = []
|
1422
|
+
try:
|
1423
|
+
# Handle both sync and async _get_tools methods
|
1424
|
+
if asyncio.iscoroutinefunction(self._get_tools):
|
1425
|
+
tools = await self._get_tools()
|
1426
|
+
else:
|
1427
|
+
tools = self._get_tools()
|
1428
|
+
|
1429
|
+
placeholder = "Loading actions..." if len(tools) == 0 else ""
|
1430
|
+
except (TimeoutError, asyncio.TimeoutError):
|
1431
|
+
placeholder = "Timeout loading actions"
|
1432
|
+
except (ConnectionError, OSError, ValueError):
|
1433
|
+
placeholder = "Error loading actions"
|
1434
|
+
# Always use the latest tool data
|
1435
|
+
tool_data = [self._build_tool_data(tool) for tool in tools]
|
1436
|
+
# print(tool_data)
|
1437
|
+
if hasattr(self, TOOLS_METADATA_INPUT_NAME):
|
1438
|
+
old_tags = self._extract_tools_tags(self.tools_metadata)
|
1439
|
+
new_tags = self._extract_tools_tags(tool_data)
|
1440
|
+
if self.check_for_tool_tag_change(old_tags, new_tags):
|
1441
|
+
# If enabled tools are set, update status based on them
|
1442
|
+
enabled = self.enabled_tools
|
1443
|
+
if enabled is not None:
|
1444
|
+
for item in tool_data:
|
1445
|
+
item["status"] = any(enabled_name in [item["name"], *item["tags"]] for enabled_name in enabled)
|
1446
|
+
self.tools_metadata = tool_data
|
1447
|
+
else:
|
1448
|
+
# Preserve existing status values
|
1449
|
+
existing_status = {item["name"]: item.get("status", True) for item in self.tools_metadata}
|
1450
|
+
for item in tool_data:
|
1451
|
+
item["status"] = existing_status.get(item["name"], True)
|
1452
|
+
tool_data = self.tools_metadata
|
1453
|
+
else:
|
1454
|
+
# If enabled tools are set, update status based on them
|
1455
|
+
enabled = self.enabled_tools
|
1456
|
+
if enabled is not None:
|
1457
|
+
for item in tool_data:
|
1458
|
+
item["status"] = any(enabled_name in [item["name"], *item["tags"]] for enabled_name in enabled)
|
1459
|
+
self.tools_metadata = tool_data
|
1460
|
+
|
1461
|
+
return ToolsInput(
|
1462
|
+
name=TOOLS_METADATA_INPUT_NAME,
|
1463
|
+
placeholder=placeholder,
|
1464
|
+
display_name="Actions",
|
1465
|
+
info=TOOLS_METADATA_INFO,
|
1466
|
+
value=tool_data,
|
1467
|
+
)
|
1468
|
+
|
1469
|
+
def get_project_name(self):
|
1470
|
+
if hasattr(self, "_tracing_service") and self.tracing_service:
|
1471
|
+
return self.tracing_service.project_name
|
1472
|
+
return "Langflow"
|
1473
|
+
|
1474
|
+
def log(self, message: LoggableType | list[LoggableType], name: str | None = None) -> None:
|
1475
|
+
"""Logs a message.
|
1476
|
+
|
1477
|
+
Args:
|
1478
|
+
message (LoggableType | list[LoggableType]): The message to log.
|
1479
|
+
name (str, optional): The name of the log. Defaults to None.
|
1480
|
+
"""
|
1481
|
+
if name is None:
|
1482
|
+
name = f"Log {len(self._logs) + 1}"
|
1483
|
+
log = Log(message=message, type=get_artifact_type(message), name=name)
|
1484
|
+
self._logs.append(log)
|
1485
|
+
if self.tracing_service and self._vertex:
|
1486
|
+
self.tracing_service.add_log(trace_name=self.trace_name, log=log)
|
1487
|
+
if self._event_manager is not None and self._current_output:
|
1488
|
+
data = log.model_dump()
|
1489
|
+
data["output"] = self._current_output
|
1490
|
+
data["component_id"] = self._id
|
1491
|
+
self._event_manager.on_log(data=data)
|
1492
|
+
|
1493
|
+
def _append_tool_output(self) -> None:
|
1494
|
+
if next((output for output in self.outputs if output.name == TOOL_OUTPUT_NAME), None) is None:
|
1495
|
+
self.outputs.append(
|
1496
|
+
Output(
|
1497
|
+
name=TOOL_OUTPUT_NAME,
|
1498
|
+
display_name=TOOL_OUTPUT_DISPLAY_NAME,
|
1499
|
+
method="to_toolkit",
|
1500
|
+
types=["Tool"],
|
1501
|
+
)
|
1502
|
+
)
|
1503
|
+
|
1504
|
+
def is_connected_to_chat_output(self) -> bool:
|
1505
|
+
# Lazy import to avoid circular dependency
|
1506
|
+
from lfx.graph.utils import has_chat_output
|
1507
|
+
|
1508
|
+
return has_chat_output(self.graph.get_vertex_neighbors(self._vertex))
|
1509
|
+
|
1510
|
+
def _should_skip_message(self, message: Message) -> bool:
|
1511
|
+
"""Check if the message should be skipped based on vertex configuration and message type."""
|
1512
|
+
return (
|
1513
|
+
self._vertex is not None
|
1514
|
+
and not (self._vertex.is_output or self._vertex.is_input)
|
1515
|
+
and not self.is_connected_to_chat_output()
|
1516
|
+
and not isinstance(message, ErrorMessage)
|
1517
|
+
)
|
1518
|
+
|
1519
|
+
def _ensure_message_required_fields(self, message: Message) -> None:
|
1520
|
+
"""Ensure message has required fields for storage (session_id, sender, sender_name).
|
1521
|
+
|
1522
|
+
Only sets default values if the fields are not already provided.
|
1523
|
+
"""
|
1524
|
+
from lfx.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI
|
1525
|
+
|
1526
|
+
# Set default session_id from graph if not already set
|
1527
|
+
if (
|
1528
|
+
not message.session_id
|
1529
|
+
and hasattr(self, "graph")
|
1530
|
+
and hasattr(self.graph, "session_id")
|
1531
|
+
and self.graph.session_id
|
1532
|
+
):
|
1533
|
+
session_id = (
|
1534
|
+
UUID(self.graph.session_id) if isinstance(self.graph.session_id, str) else self.graph.session_id
|
1535
|
+
)
|
1536
|
+
message.session_id = session_id
|
1537
|
+
|
1538
|
+
# Set default sender if not set (preserves existing values)
|
1539
|
+
if not message.sender:
|
1540
|
+
message.sender = MESSAGE_SENDER_AI
|
1541
|
+
|
1542
|
+
# Set default sender_name if not set (preserves existing values)
|
1543
|
+
if not message.sender_name:
|
1544
|
+
message.sender_name = MESSAGE_SENDER_NAME_AI
|
1545
|
+
|
1546
|
+
async def send_message(self, message: Message, id_: str | None = None):
|
1547
|
+
if self._should_skip_message(message):
|
1548
|
+
return message
|
1549
|
+
|
1550
|
+
if hasattr(message, "flow_id") and isinstance(message.flow_id, str):
|
1551
|
+
message.flow_id = UUID(message.flow_id)
|
1552
|
+
|
1553
|
+
# Ensure required fields for message storage are set
|
1554
|
+
self._ensure_message_required_fields(message)
|
1555
|
+
|
1556
|
+
stored_message = await self._store_message(message)
|
1557
|
+
|
1558
|
+
self._stored_message_id = stored_message.id
|
1559
|
+
try:
|
1560
|
+
complete_message = ""
|
1561
|
+
if (
|
1562
|
+
self._should_stream_message(stored_message, message)
|
1563
|
+
and message is not None
|
1564
|
+
and isinstance(message.text, AsyncIterator | Iterator)
|
1565
|
+
):
|
1566
|
+
complete_message = await self._stream_message(message.text, stored_message)
|
1567
|
+
stored_message.text = complete_message
|
1568
|
+
stored_message = await self._update_stored_message(stored_message)
|
1569
|
+
else:
|
1570
|
+
# Only send message event for non-streaming messages
|
1571
|
+
await self._send_message_event(stored_message, id_=id_)
|
1572
|
+
except Exception:
|
1573
|
+
# remove the message from the database
|
1574
|
+
await delete_message(stored_message.id)
|
1575
|
+
raise
|
1576
|
+
self.status = stored_message
|
1577
|
+
return stored_message
|
1578
|
+
|
1579
|
+
async def _store_message(self, message: Message) -> Message:
|
1580
|
+
flow_id: str | None = None
|
1581
|
+
if hasattr(self, "graph"):
|
1582
|
+
# Convert UUID to str if needed
|
1583
|
+
flow_id = str(self.graph.flow_id) if self.graph.flow_id else None
|
1584
|
+
stored_messages = await astore_message(message, flow_id=flow_id)
|
1585
|
+
if len(stored_messages) != 1:
|
1586
|
+
msg = "Only one message can be stored at a time."
|
1587
|
+
raise ValueError(msg)
|
1588
|
+
stored_message = stored_messages[0]
|
1589
|
+
return await Message.create(**stored_message.model_dump())
|
1590
|
+
|
1591
|
+
async def _send_message_event(self, message: Message, id_: str | None = None, category: str | None = None) -> None:
|
1592
|
+
if hasattr(self, "_event_manager") and self._event_manager:
|
1593
|
+
data_dict = message.model_dump()["data"] if hasattr(message, "data") else message.model_dump()
|
1594
|
+
if id_ and not data_dict.get("id"):
|
1595
|
+
data_dict["id"] = id_
|
1596
|
+
category = category or data_dict.get("category", None)
|
1597
|
+
|
1598
|
+
def _send_event():
|
1599
|
+
match category:
|
1600
|
+
case "error":
|
1601
|
+
self._event_manager.on_error(data=data_dict)
|
1602
|
+
case "remove_message":
|
1603
|
+
# Check if id exists in data_dict before accessing it
|
1604
|
+
if "id" in data_dict:
|
1605
|
+
self._event_manager.on_remove_message(data={"id": data_dict["id"]})
|
1606
|
+
else:
|
1607
|
+
# If no id, try to get it from the message object or id_ parameter
|
1608
|
+
message_id = getattr(message, "id", None) or id_
|
1609
|
+
if message_id:
|
1610
|
+
self._event_manager.on_remove_message(data={"id": message_id})
|
1611
|
+
case _:
|
1612
|
+
self._event_manager.on_message(data=data_dict)
|
1613
|
+
|
1614
|
+
await asyncio.to_thread(_send_event)
|
1615
|
+
|
1616
|
+
def _should_stream_message(self, stored_message: Message, original_message: Message) -> bool:
|
1617
|
+
return bool(
|
1618
|
+
hasattr(self, "_event_manager")
|
1619
|
+
and self._event_manager
|
1620
|
+
and stored_message.id
|
1621
|
+
and not isinstance(original_message.text, str)
|
1622
|
+
)
|
1623
|
+
|
1624
|
+
async def _update_stored_message(self, message: Message) -> Message:
|
1625
|
+
"""Update the stored message."""
|
1626
|
+
if hasattr(self, "_vertex") and self._vertex is not None and hasattr(self._vertex, "graph"):
|
1627
|
+
flow_id = (
|
1628
|
+
UUID(self._vertex.graph.flow_id)
|
1629
|
+
if isinstance(self._vertex.graph.flow_id, str)
|
1630
|
+
else self._vertex.graph.flow_id
|
1631
|
+
)
|
1632
|
+
|
1633
|
+
message.flow_id = flow_id
|
1634
|
+
|
1635
|
+
message_tables = await aupdate_messages(message)
|
1636
|
+
if not message_tables:
|
1637
|
+
msg = "Failed to update message"
|
1638
|
+
raise ValueError(msg)
|
1639
|
+
message_table = message_tables[0]
|
1640
|
+
return await Message.create(**message_table.model_dump())
|
1641
|
+
|
1642
|
+
async def _stream_message(self, iterator: AsyncIterator | Iterator, message: Message) -> str:
|
1643
|
+
if not isinstance(iterator, AsyncIterator | Iterator):
|
1644
|
+
msg = "The message must be an iterator or an async iterator."
|
1645
|
+
raise TypeError(msg)
|
1646
|
+
|
1647
|
+
if isinstance(iterator, AsyncIterator):
|
1648
|
+
return await self._handle_async_iterator(iterator, message.id, message)
|
1649
|
+
try:
|
1650
|
+
complete_message = ""
|
1651
|
+
first_chunk = True
|
1652
|
+
for chunk in iterator:
|
1653
|
+
complete_message = await self._process_chunk(
|
1654
|
+
chunk.content, complete_message, message.id, message, first_chunk=first_chunk
|
1655
|
+
)
|
1656
|
+
first_chunk = False
|
1657
|
+
except Exception as e:
|
1658
|
+
raise StreamingError(cause=e, source=message.properties.source) from e
|
1659
|
+
else:
|
1660
|
+
return complete_message
|
1661
|
+
|
1662
|
+
async def _handle_async_iterator(self, iterator: AsyncIterator, message_id: str, message: Message) -> str:
|
1663
|
+
complete_message = ""
|
1664
|
+
first_chunk = True
|
1665
|
+
async for chunk in iterator:
|
1666
|
+
complete_message = await self._process_chunk(
|
1667
|
+
chunk.content, complete_message, message_id, message, first_chunk=first_chunk
|
1668
|
+
)
|
1669
|
+
first_chunk = False
|
1670
|
+
return complete_message
|
1671
|
+
|
1672
|
+
async def _process_chunk(
|
1673
|
+
self, chunk: str, complete_message: str, message_id: str, message: Message, *, first_chunk: bool = False
|
1674
|
+
) -> str:
|
1675
|
+
complete_message += chunk
|
1676
|
+
if self._event_manager:
|
1677
|
+
if first_chunk:
|
1678
|
+
# Send the initial message only on the first chunk
|
1679
|
+
msg_copy = message.model_copy()
|
1680
|
+
msg_copy.text = complete_message
|
1681
|
+
await self._send_message_event(msg_copy, id_=message_id)
|
1682
|
+
await asyncio.to_thread(
|
1683
|
+
self._event_manager.on_token,
|
1684
|
+
data={
|
1685
|
+
"chunk": chunk,
|
1686
|
+
"id": str(message_id),
|
1687
|
+
},
|
1688
|
+
)
|
1689
|
+
return complete_message
|
1690
|
+
|
1691
|
+
async def send_error(
|
1692
|
+
self,
|
1693
|
+
exception: Exception,
|
1694
|
+
session_id: str,
|
1695
|
+
trace_name: str,
|
1696
|
+
source: Source,
|
1697
|
+
) -> Message | None:
|
1698
|
+
"""Send an error message to the frontend."""
|
1699
|
+
flow_id = self.graph.flow_id if hasattr(self, "graph") else None
|
1700
|
+
if not session_id:
|
1701
|
+
return None
|
1702
|
+
error_message = ErrorMessage(
|
1703
|
+
flow_id=flow_id,
|
1704
|
+
exception=exception,
|
1705
|
+
session_id=session_id,
|
1706
|
+
trace_name=trace_name,
|
1707
|
+
source=source,
|
1708
|
+
)
|
1709
|
+
await self.send_message(error_message)
|
1710
|
+
return error_message
|
1711
|
+
|
1712
|
+
def _append_tool_to_outputs_map(self):
|
1713
|
+
self._outputs_map[TOOL_OUTPUT_NAME] = self._build_tool_output()
|
1714
|
+
# add a new input for the tool schema
|
1715
|
+
# self.inputs.append(self._build_tool_schema())
|
1716
|
+
|
1717
|
+
def _build_tool_output(self) -> Output:
|
1718
|
+
return Output(name=TOOL_OUTPUT_NAME, display_name=TOOL_OUTPUT_DISPLAY_NAME, method="to_toolkit", types=["Tool"])
|
1719
|
+
|
1720
|
+
def get_input_display_name(self, input_name: str) -> str:
|
1721
|
+
"""Get the display name of an input.
|
1722
|
+
|
1723
|
+
This is a public utility method that subclasses can use to get user-friendly
|
1724
|
+
display names for inputs when building error messages or UI elements.
|
1725
|
+
|
1726
|
+
Usage:
|
1727
|
+
msg = f"Input {self.get_input_display_name(input_name)} not found"
|
1728
|
+
|
1729
|
+
Args:
|
1730
|
+
input_name (str): The name of the input.
|
1731
|
+
|
1732
|
+
Returns:
|
1733
|
+
str: The display name of the input, or the input name if not found.
|
1734
|
+
"""
|
1735
|
+
if input_name in self._inputs:
|
1736
|
+
return getattr(self._inputs[input_name], "display_name", input_name)
|
1737
|
+
return input_name
|
1738
|
+
|
1739
|
+
def get_output_display_name(self, output_name: str) -> str:
|
1740
|
+
"""Get the display name of an output.
|
1741
|
+
|
1742
|
+
This is a public utility method that subclasses can use to get user-friendly
|
1743
|
+
display names for outputs when building error messages or UI elements.
|
1744
|
+
|
1745
|
+
Args:
|
1746
|
+
output_name (str): The name of the output.
|
1747
|
+
|
1748
|
+
Returns:
|
1749
|
+
str: The display name of the output, or the output name if not found.
|
1750
|
+
"""
|
1751
|
+
if output_name in self._outputs_map:
|
1752
|
+
return getattr(self._outputs_map[output_name], "display_name", output_name)
|
1753
|
+
return output_name
|
1754
|
+
|
1755
|
+
def build_input_error_message(self, input_name: str, message: str) -> str:
|
1756
|
+
"""Build an error message for an input.
|
1757
|
+
|
1758
|
+
This is a public utility method that subclasses can use to create consistent,
|
1759
|
+
user-friendly error messages that reference inputs by their display names.
|
1760
|
+
The input name is placed at the beginning to ensure it's visible even if the message is truncated.
|
1761
|
+
|
1762
|
+
Args:
|
1763
|
+
input_name (str): The name of the input.
|
1764
|
+
message (str): The error message.
|
1765
|
+
|
1766
|
+
Returns:
|
1767
|
+
str: The formatted error message with display name.
|
1768
|
+
"""
|
1769
|
+
display_name = self.get_input_display_name(input_name)
|
1770
|
+
return f"[Input: {display_name}] {message}"
|
1771
|
+
|
1772
|
+
def build_output_error_message(self, output_name: str, message: str) -> str:
|
1773
|
+
"""Build an error message for an output.
|
1774
|
+
|
1775
|
+
This is a public utility method that subclasses can use to create consistent,
|
1776
|
+
user-friendly error messages that reference outputs by their display names.
|
1777
|
+
The output name is placed at the beginning to ensure it's visible even if the message is truncated.
|
1778
|
+
|
1779
|
+
Args:
|
1780
|
+
output_name (str): The name of the output.
|
1781
|
+
message (str): The error message.
|
1782
|
+
|
1783
|
+
Returns:
|
1784
|
+
str: The formatted error message with display name.
|
1785
|
+
"""
|
1786
|
+
display_name = self.get_output_display_name(output_name)
|
1787
|
+
return f"[Output: {display_name}] {message}"
|
1788
|
+
|
1789
|
+
def build_component_error_message(self, message: str) -> str:
|
1790
|
+
"""Build an error message for the component.
|
1791
|
+
|
1792
|
+
This is a public utility method that subclasses can use to create consistent,
|
1793
|
+
user-friendly error messages that reference the component by its display name.
|
1794
|
+
The component name is placed at the beginning to ensure it's visible even if the message is truncated.
|
1795
|
+
|
1796
|
+
Args:
|
1797
|
+
message (str): The error message.
|
1798
|
+
|
1799
|
+
Returns:
|
1800
|
+
str: The formatted error message with component display name.
|
1801
|
+
"""
|
1802
|
+
return f"[Component: {self.display_name or self.__class__.__name__}] {message}"
|
1803
|
+
|
1804
|
+
|
1805
|
+
def _get_component_toolkit():
|
1806
|
+
from lfx.base.tools.component_tool import ComponentToolkit
|
1807
|
+
|
1808
|
+
return ComponentToolkit
|